ggcplug.c: Shuffle includes to include gcc-plugin.h earlier.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "cgraph.h"
72 #include "tree-phinodes.h"
73 #include "stringpool.h"
74 #include "tree-ssanames.h"
75 #include "expr.h"
76 #include "tree-dfa.h"
77 #include "params.h"
78 #include "tree-pass.h"
79 #include "langhooks-def.h"
80 #include "diagnostic.h"
81 #include "tree-diagnostic.h"
82 #include "tree-pretty-print.h"
83 #include "except.h"
84 #include "debug.h"
85 #include "intl.h"
86 #include "wide-int.h"
87 #include "builtins.h"
88
89 /* Tree code classes. */
90
91 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
92 #define END_OF_BASE_TREE_CODES tcc_exceptional,
93
94 const enum tree_code_class tree_code_type[] = {
95 #include "all-tree.def"
96 };
97
98 #undef DEFTREECODE
99 #undef END_OF_BASE_TREE_CODES
100
101 /* Table indexed by tree code giving number of expression
102 operands beyond the fixed part of the node structure.
103 Not used for types or decls. */
104
105 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
106 #define END_OF_BASE_TREE_CODES 0,
107
108 const unsigned char tree_code_length[] = {
109 #include "all-tree.def"
110 };
111
112 #undef DEFTREECODE
113 #undef END_OF_BASE_TREE_CODES
114
115 /* Names of tree components.
116 Used for printing out the tree and error messages. */
117 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
118 #define END_OF_BASE_TREE_CODES "@dummy",
119
120 static const char *const tree_code_name[] = {
121 #include "all-tree.def"
122 };
123
124 #undef DEFTREECODE
125 #undef END_OF_BASE_TREE_CODES
126
127 /* Each tree code class has an associated string representation.
128 These must correspond to the tree_code_class entries. */
129
130 const char *const tree_code_class_strings[] =
131 {
132 "exceptional",
133 "constant",
134 "type",
135 "declaration",
136 "reference",
137 "comparison",
138 "unary",
139 "binary",
140 "statement",
141 "vl_exp",
142 "expression"
143 };
144
145 /* obstack.[ch] explicitly declined to prototype this. */
146 extern int _obstack_allocated_p (struct obstack *h, void *obj);
147
148 /* Statistics-gathering stuff. */
149
150 static int tree_code_counts[MAX_TREE_CODES];
151 int tree_node_counts[(int) all_kinds];
152 int tree_node_sizes[(int) all_kinds];
153
154 /* Keep in sync with tree.h:enum tree_node_kind. */
155 static const char * const tree_node_kind_names[] = {
156 "decls",
157 "types",
158 "blocks",
159 "stmts",
160 "refs",
161 "exprs",
162 "constants",
163 "identifiers",
164 "vecs",
165 "binfos",
166 "ssa names",
167 "constructors",
168 "random kinds",
169 "lang_decl kinds",
170 "lang_type kinds",
171 "omp clauses",
172 };
173
174 /* Unique id for next decl created. */
175 static GTY(()) int next_decl_uid;
176 /* Unique id for next type created. */
177 static GTY(()) int next_type_uid = 1;
178 /* Unique id for next debug decl created. Use negative numbers,
179 to catch erroneous uses. */
180 static GTY(()) int next_debug_decl_uid;
181
182 /* Since we cannot rehash a type after it is in the table, we have to
183 keep the hash code. */
184
185 struct GTY(()) type_hash {
186 unsigned long hash;
187 tree type;
188 };
189
190 /* Initial size of the hash table (rounded to next prime). */
191 #define TYPE_HASH_INITIAL_SIZE 1000
192
193 /* Now here is the hash table. When recording a type, it is added to
194 the slot whose index is the hash code. Note that the hash table is
195 used for several kinds of types (function types, array types and
196 array index range types, for now). While all these live in the
197 same table, they are completely independent, and the hash code is
198 computed differently for each of these. */
199
200 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
201 htab_t type_hash_table;
202
203 /* Hash table and temporary node for larger integer const values. */
204 static GTY (()) tree int_cst_node;
205 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
206 htab_t int_cst_hash_table;
207
208 /* Hash table for optimization flags and target option flags. Use the same
209 hash table for both sets of options. Nodes for building the current
210 optimization and target option nodes. The assumption is most of the time
211 the options created will already be in the hash table, so we avoid
212 allocating and freeing up a node repeatably. */
213 static GTY (()) tree cl_optimization_node;
214 static GTY (()) tree cl_target_option_node;
215 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
216 htab_t cl_option_hash_table;
217
218 /* General tree->tree mapping structure for use in hash tables. */
219
220
221 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
222 htab_t debug_expr_for_decl;
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t value_expr_for_decl;
226
227 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
228 htab_t debug_args_for_decl;
229
230 static void set_type_quals (tree, int);
231 static int type_hash_eq (const void *, const void *);
232 static hashval_t type_hash_hash (const void *);
233 static hashval_t int_cst_hash_hash (const void *);
234 static int int_cst_hash_eq (const void *, const void *);
235 static hashval_t cl_option_hash_hash (const void *);
236 static int cl_option_hash_eq (const void *, const void *);
237 static void print_type_hash_statistics (void);
238 static void print_debug_expr_statistics (void);
239 static void print_value_expr_statistics (void);
240 static int type_hash_marked_p (const void *);
241 static void type_hash_list (const_tree, inchash::hash &);
242 static void attribute_hash_list (const_tree, inchash::hash &);
243
244 tree global_trees[TI_MAX];
245 tree integer_types[itk_none];
246
247 bool int_n_enabled_p[NUM_INT_N_ENTS];
248 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
249
250 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
251
252 /* Number of operands for each OpenMP clause. */
253 unsigned const char omp_clause_num_ops[] =
254 {
255 0, /* OMP_CLAUSE_ERROR */
256 1, /* OMP_CLAUSE_PRIVATE */
257 1, /* OMP_CLAUSE_SHARED */
258 1, /* OMP_CLAUSE_FIRSTPRIVATE */
259 2, /* OMP_CLAUSE_LASTPRIVATE */
260 4, /* OMP_CLAUSE_REDUCTION */
261 1, /* OMP_CLAUSE_COPYIN */
262 1, /* OMP_CLAUSE_COPYPRIVATE */
263 3, /* OMP_CLAUSE_LINEAR */
264 2, /* OMP_CLAUSE_ALIGNED */
265 1, /* OMP_CLAUSE_DEPEND */
266 1, /* OMP_CLAUSE_UNIFORM */
267 2, /* OMP_CLAUSE_FROM */
268 2, /* OMP_CLAUSE_TO */
269 2, /* OMP_CLAUSE_MAP */
270 1, /* OMP_CLAUSE__LOOPTEMP_ */
271 1, /* OMP_CLAUSE_IF */
272 1, /* OMP_CLAUSE_NUM_THREADS */
273 1, /* OMP_CLAUSE_SCHEDULE */
274 0, /* OMP_CLAUSE_NOWAIT */
275 0, /* OMP_CLAUSE_ORDERED */
276 0, /* OMP_CLAUSE_DEFAULT */
277 3, /* OMP_CLAUSE_COLLAPSE */
278 0, /* OMP_CLAUSE_UNTIED */
279 1, /* OMP_CLAUSE_FINAL */
280 0, /* OMP_CLAUSE_MERGEABLE */
281 1, /* OMP_CLAUSE_DEVICE */
282 1, /* OMP_CLAUSE_DIST_SCHEDULE */
283 0, /* OMP_CLAUSE_INBRANCH */
284 0, /* OMP_CLAUSE_NOTINBRANCH */
285 1, /* OMP_CLAUSE_NUM_TEAMS */
286 1, /* OMP_CLAUSE_THREAD_LIMIT */
287 0, /* OMP_CLAUSE_PROC_BIND */
288 1, /* OMP_CLAUSE_SAFELEN */
289 1, /* OMP_CLAUSE_SIMDLEN */
290 0, /* OMP_CLAUSE_FOR */
291 0, /* OMP_CLAUSE_PARALLEL */
292 0, /* OMP_CLAUSE_SECTIONS */
293 0, /* OMP_CLAUSE_TASKGROUP */
294 1, /* OMP_CLAUSE__SIMDUID_ */
295 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
296 };
297
298 const char * const omp_clause_code_name[] =
299 {
300 "error_clause",
301 "private",
302 "shared",
303 "firstprivate",
304 "lastprivate",
305 "reduction",
306 "copyin",
307 "copyprivate",
308 "linear",
309 "aligned",
310 "depend",
311 "uniform",
312 "from",
313 "to",
314 "map",
315 "_looptemp_",
316 "if",
317 "num_threads",
318 "schedule",
319 "nowait",
320 "ordered",
321 "default",
322 "collapse",
323 "untied",
324 "final",
325 "mergeable",
326 "device",
327 "dist_schedule",
328 "inbranch",
329 "notinbranch",
330 "num_teams",
331 "thread_limit",
332 "proc_bind",
333 "safelen",
334 "simdlen",
335 "for",
336 "parallel",
337 "sections",
338 "taskgroup",
339 "_simduid_",
340 "_Cilk_for_count_"
341 };
342
343
344 /* Return the tree node structure used by tree code CODE. */
345
346 static inline enum tree_node_structure_enum
347 tree_node_structure_for_code (enum tree_code code)
348 {
349 switch (TREE_CODE_CLASS (code))
350 {
351 case tcc_declaration:
352 {
353 switch (code)
354 {
355 case FIELD_DECL:
356 return TS_FIELD_DECL;
357 case PARM_DECL:
358 return TS_PARM_DECL;
359 case VAR_DECL:
360 return TS_VAR_DECL;
361 case LABEL_DECL:
362 return TS_LABEL_DECL;
363 case RESULT_DECL:
364 return TS_RESULT_DECL;
365 case DEBUG_EXPR_DECL:
366 return TS_DECL_WRTL;
367 case CONST_DECL:
368 return TS_CONST_DECL;
369 case TYPE_DECL:
370 return TS_TYPE_DECL;
371 case FUNCTION_DECL:
372 return TS_FUNCTION_DECL;
373 case TRANSLATION_UNIT_DECL:
374 return TS_TRANSLATION_UNIT_DECL;
375 default:
376 return TS_DECL_NON_COMMON;
377 }
378 }
379 case tcc_type:
380 return TS_TYPE_NON_COMMON;
381 case tcc_reference:
382 case tcc_comparison:
383 case tcc_unary:
384 case tcc_binary:
385 case tcc_expression:
386 case tcc_statement:
387 case tcc_vl_exp:
388 return TS_EXP;
389 default: /* tcc_constant and tcc_exceptional */
390 break;
391 }
392 switch (code)
393 {
394 /* tcc_constant cases. */
395 case VOID_CST: return TS_TYPED;
396 case INTEGER_CST: return TS_INT_CST;
397 case REAL_CST: return TS_REAL_CST;
398 case FIXED_CST: return TS_FIXED_CST;
399 case COMPLEX_CST: return TS_COMPLEX;
400 case VECTOR_CST: return TS_VECTOR;
401 case STRING_CST: return TS_STRING;
402 /* tcc_exceptional cases. */
403 case ERROR_MARK: return TS_COMMON;
404 case IDENTIFIER_NODE: return TS_IDENTIFIER;
405 case TREE_LIST: return TS_LIST;
406 case TREE_VEC: return TS_VEC;
407 case SSA_NAME: return TS_SSA_NAME;
408 case PLACEHOLDER_EXPR: return TS_COMMON;
409 case STATEMENT_LIST: return TS_STATEMENT_LIST;
410 case BLOCK: return TS_BLOCK;
411 case CONSTRUCTOR: return TS_CONSTRUCTOR;
412 case TREE_BINFO: return TS_BINFO;
413 case OMP_CLAUSE: return TS_OMP_CLAUSE;
414 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
415 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
416
417 default:
418 gcc_unreachable ();
419 }
420 }
421
422
423 /* Initialize tree_contains_struct to describe the hierarchy of tree
424 nodes. */
425
426 static void
427 initialize_tree_contains_struct (void)
428 {
429 unsigned i;
430
431 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
432 {
433 enum tree_code code;
434 enum tree_node_structure_enum ts_code;
435
436 code = (enum tree_code) i;
437 ts_code = tree_node_structure_for_code (code);
438
439 /* Mark the TS structure itself. */
440 tree_contains_struct[code][ts_code] = 1;
441
442 /* Mark all the structures that TS is derived from. */
443 switch (ts_code)
444 {
445 case TS_TYPED:
446 case TS_BLOCK:
447 MARK_TS_BASE (code);
448 break;
449
450 case TS_COMMON:
451 case TS_INT_CST:
452 case TS_REAL_CST:
453 case TS_FIXED_CST:
454 case TS_VECTOR:
455 case TS_STRING:
456 case TS_COMPLEX:
457 case TS_SSA_NAME:
458 case TS_CONSTRUCTOR:
459 case TS_EXP:
460 case TS_STATEMENT_LIST:
461 MARK_TS_TYPED (code);
462 break;
463
464 case TS_IDENTIFIER:
465 case TS_DECL_MINIMAL:
466 case TS_TYPE_COMMON:
467 case TS_LIST:
468 case TS_VEC:
469 case TS_BINFO:
470 case TS_OMP_CLAUSE:
471 case TS_OPTIMIZATION:
472 case TS_TARGET_OPTION:
473 MARK_TS_COMMON (code);
474 break;
475
476 case TS_TYPE_WITH_LANG_SPECIFIC:
477 MARK_TS_TYPE_COMMON (code);
478 break;
479
480 case TS_TYPE_NON_COMMON:
481 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
482 break;
483
484 case TS_DECL_COMMON:
485 MARK_TS_DECL_MINIMAL (code);
486 break;
487
488 case TS_DECL_WRTL:
489 case TS_CONST_DECL:
490 MARK_TS_DECL_COMMON (code);
491 break;
492
493 case TS_DECL_NON_COMMON:
494 MARK_TS_DECL_WITH_VIS (code);
495 break;
496
497 case TS_DECL_WITH_VIS:
498 case TS_PARM_DECL:
499 case TS_LABEL_DECL:
500 case TS_RESULT_DECL:
501 MARK_TS_DECL_WRTL (code);
502 break;
503
504 case TS_FIELD_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 case TS_VAR_DECL:
509 MARK_TS_DECL_WITH_VIS (code);
510 break;
511
512 case TS_TYPE_DECL:
513 case TS_FUNCTION_DECL:
514 MARK_TS_DECL_NON_COMMON (code);
515 break;
516
517 case TS_TRANSLATION_UNIT_DECL:
518 MARK_TS_DECL_COMMON (code);
519 break;
520
521 default:
522 gcc_unreachable ();
523 }
524 }
525
526 /* Basic consistency checks for attributes used in fold. */
527 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
528 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
529 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
530 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
531 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
532 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
539 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
540 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
541 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
544 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
545 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
553 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
554 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
556 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
557 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
558 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
559 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
560 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
561 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
562 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
563 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
564 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
565 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
566 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
567 }
568
569
570 /* Init tree.c. */
571
572 void
573 init_ttree (void)
574 {
575 /* Initialize the hash table of types. */
576 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
577 type_hash_eq, 0);
578
579 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
580 tree_decl_map_eq, 0);
581
582 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
586 int_cst_hash_eq, NULL);
587
588 int_cst_node = make_int_cst (1, 1);
589
590 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
591 cl_option_hash_eq, NULL);
592
593 cl_optimization_node = make_node (OPTIMIZATION_NODE);
594 cl_target_option_node = make_node (TARGET_OPTION_NODE);
595
596 /* Initialize the tree_contains_struct array. */
597 initialize_tree_contains_struct ();
598 lang_hooks.init_ts ();
599 }
600
601 \f
602 /* The name of the object as the assembler will see it (but before any
603 translations made by ASM_OUTPUT_LABELREF). Often this is the same
604 as DECL_NAME. It is an IDENTIFIER_NODE. */
605 tree
606 decl_assembler_name (tree decl)
607 {
608 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
609 lang_hooks.set_decl_assembler_name (decl);
610 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
611 }
612
613 /* When the target supports COMDAT groups, this indicates which group the
614 DECL is associated with. This can be either an IDENTIFIER_NODE or a
615 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
616 tree
617 decl_comdat_group (const_tree node)
618 {
619 struct symtab_node *snode = symtab_node::get (node);
620 if (!snode)
621 return NULL;
622 return snode->get_comdat_group ();
623 }
624
625 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
626 tree
627 decl_comdat_group_id (const_tree node)
628 {
629 struct symtab_node *snode = symtab_node::get (node);
630 if (!snode)
631 return NULL;
632 return snode->get_comdat_group_id ();
633 }
634
635 /* When the target supports named section, return its name as IDENTIFIER_NODE
636 or NULL if it is in no section. */
637 const char *
638 decl_section_name (const_tree node)
639 {
640 struct symtab_node *snode = symtab_node::get (node);
641 if (!snode)
642 return NULL;
643 return snode->get_section ();
644 }
645
646 /* Set section section name of NODE to VALUE (that is expected to
647 be identifier node) */
648 void
649 set_decl_section_name (tree node, const char *value)
650 {
651 struct symtab_node *snode;
652
653 if (value == NULL)
654 {
655 snode = symtab_node::get (node);
656 if (!snode)
657 return;
658 }
659 else if (TREE_CODE (node) == VAR_DECL)
660 snode = varpool_node::get_create (node);
661 else
662 snode = cgraph_node::get_create (node);
663 snode->set_section (value);
664 }
665
666 /* Return TLS model of a variable NODE. */
667 enum tls_model
668 decl_tls_model (const_tree node)
669 {
670 struct varpool_node *snode = varpool_node::get (node);
671 if (!snode)
672 return TLS_MODEL_NONE;
673 return snode->tls_model;
674 }
675
676 /* Set TLS model of variable NODE to MODEL. */
677 void
678 set_decl_tls_model (tree node, enum tls_model model)
679 {
680 struct varpool_node *vnode;
681
682 if (model == TLS_MODEL_NONE)
683 {
684 vnode = varpool_node::get (node);
685 if (!vnode)
686 return;
687 }
688 else
689 vnode = varpool_node::get_create (node);
690 vnode->tls_model = model;
691 }
692
693 /* Compute the number of bytes occupied by a tree with code CODE.
694 This function cannot be used for nodes that have variable sizes,
695 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
696 size_t
697 tree_code_size (enum tree_code code)
698 {
699 switch (TREE_CODE_CLASS (code))
700 {
701 case tcc_declaration: /* A decl node */
702 {
703 switch (code)
704 {
705 case FIELD_DECL:
706 return sizeof (struct tree_field_decl);
707 case PARM_DECL:
708 return sizeof (struct tree_parm_decl);
709 case VAR_DECL:
710 return sizeof (struct tree_var_decl);
711 case LABEL_DECL:
712 return sizeof (struct tree_label_decl);
713 case RESULT_DECL:
714 return sizeof (struct tree_result_decl);
715 case CONST_DECL:
716 return sizeof (struct tree_const_decl);
717 case TYPE_DECL:
718 return sizeof (struct tree_type_decl);
719 case FUNCTION_DECL:
720 return sizeof (struct tree_function_decl);
721 case DEBUG_EXPR_DECL:
722 return sizeof (struct tree_decl_with_rtl);
723 case TRANSLATION_UNIT_DECL:
724 return sizeof (struct tree_translation_unit_decl);
725 case NAMESPACE_DECL:
726 case IMPORTED_DECL:
727 case NAMELIST_DECL:
728 return sizeof (struct tree_decl_non_common);
729 default:
730 return lang_hooks.tree_size (code);
731 }
732 }
733
734 case tcc_type: /* a type node */
735 return sizeof (struct tree_type_non_common);
736
737 case tcc_reference: /* a reference */
738 case tcc_expression: /* an expression */
739 case tcc_statement: /* an expression with side effects */
740 case tcc_comparison: /* a comparison expression */
741 case tcc_unary: /* a unary arithmetic expression */
742 case tcc_binary: /* a binary arithmetic expression */
743 return (sizeof (struct tree_exp)
744 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
745
746 case tcc_constant: /* a constant */
747 switch (code)
748 {
749 case VOID_CST: return sizeof (struct tree_typed);
750 case INTEGER_CST: gcc_unreachable ();
751 case REAL_CST: return sizeof (struct tree_real_cst);
752 case FIXED_CST: return sizeof (struct tree_fixed_cst);
753 case COMPLEX_CST: return sizeof (struct tree_complex);
754 case VECTOR_CST: return sizeof (struct tree_vector);
755 case STRING_CST: gcc_unreachable ();
756 default:
757 return lang_hooks.tree_size (code);
758 }
759
760 case tcc_exceptional: /* something random, like an identifier. */
761 switch (code)
762 {
763 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
764 case TREE_LIST: return sizeof (struct tree_list);
765
766 case ERROR_MARK:
767 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
768
769 case TREE_VEC:
770 case OMP_CLAUSE: gcc_unreachable ();
771
772 case SSA_NAME: return sizeof (struct tree_ssa_name);
773
774 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
775 case BLOCK: return sizeof (struct tree_block);
776 case CONSTRUCTOR: return sizeof (struct tree_constructor);
777 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
778 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
779
780 default:
781 return lang_hooks.tree_size (code);
782 }
783
784 default:
785 gcc_unreachable ();
786 }
787 }
788
789 /* Compute the number of bytes occupied by NODE. This routine only
790 looks at TREE_CODE, except for those nodes that have variable sizes. */
791 size_t
792 tree_size (const_tree node)
793 {
794 const enum tree_code code = TREE_CODE (node);
795 switch (code)
796 {
797 case INTEGER_CST:
798 return (sizeof (struct tree_int_cst)
799 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
800
801 case TREE_BINFO:
802 return (offsetof (struct tree_binfo, base_binfos)
803 + vec<tree, va_gc>
804 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
805
806 case TREE_VEC:
807 return (sizeof (struct tree_vec)
808 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
809
810 case VECTOR_CST:
811 return (sizeof (struct tree_vector)
812 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
813
814 case STRING_CST:
815 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
816
817 case OMP_CLAUSE:
818 return (sizeof (struct tree_omp_clause)
819 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
820 * sizeof (tree));
821
822 default:
823 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
824 return (sizeof (struct tree_exp)
825 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
826 else
827 return tree_code_size (code);
828 }
829 }
830
831 /* Record interesting allocation statistics for a tree node with CODE
832 and LENGTH. */
833
834 static void
835 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
836 size_t length ATTRIBUTE_UNUSED)
837 {
838 enum tree_code_class type = TREE_CODE_CLASS (code);
839 tree_node_kind kind;
840
841 if (!GATHER_STATISTICS)
842 return;
843
844 switch (type)
845 {
846 case tcc_declaration: /* A decl node */
847 kind = d_kind;
848 break;
849
850 case tcc_type: /* a type node */
851 kind = t_kind;
852 break;
853
854 case tcc_statement: /* an expression with side effects */
855 kind = s_kind;
856 break;
857
858 case tcc_reference: /* a reference */
859 kind = r_kind;
860 break;
861
862 case tcc_expression: /* an expression */
863 case tcc_comparison: /* a comparison expression */
864 case tcc_unary: /* a unary arithmetic expression */
865 case tcc_binary: /* a binary arithmetic expression */
866 kind = e_kind;
867 break;
868
869 case tcc_constant: /* a constant */
870 kind = c_kind;
871 break;
872
873 case tcc_exceptional: /* something random, like an identifier. */
874 switch (code)
875 {
876 case IDENTIFIER_NODE:
877 kind = id_kind;
878 break;
879
880 case TREE_VEC:
881 kind = vec_kind;
882 break;
883
884 case TREE_BINFO:
885 kind = binfo_kind;
886 break;
887
888 case SSA_NAME:
889 kind = ssa_name_kind;
890 break;
891
892 case BLOCK:
893 kind = b_kind;
894 break;
895
896 case CONSTRUCTOR:
897 kind = constr_kind;
898 break;
899
900 case OMP_CLAUSE:
901 kind = omp_clause_kind;
902 break;
903
904 default:
905 kind = x_kind;
906 break;
907 }
908 break;
909
910 case tcc_vl_exp:
911 kind = e_kind;
912 break;
913
914 default:
915 gcc_unreachable ();
916 }
917
918 tree_code_counts[(int) code]++;
919 tree_node_counts[(int) kind]++;
920 tree_node_sizes[(int) kind] += length;
921 }
922
923 /* Allocate and return a new UID from the DECL_UID namespace. */
924
925 int
926 allocate_decl_uid (void)
927 {
928 return next_decl_uid++;
929 }
930
931 /* Return a newly allocated node of code CODE. For decl and type
932 nodes, some other fields are initialized. The rest of the node is
933 initialized to zero. This function cannot be used for TREE_VEC,
934 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
935 tree_code_size.
936
937 Achoo! I got a code in the node. */
938
939 tree
940 make_node_stat (enum tree_code code MEM_STAT_DECL)
941 {
942 tree t;
943 enum tree_code_class type = TREE_CODE_CLASS (code);
944 size_t length = tree_code_size (code);
945
946 record_node_allocation_statistics (code, length);
947
948 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
949 TREE_SET_CODE (t, code);
950
951 switch (type)
952 {
953 case tcc_statement:
954 TREE_SIDE_EFFECTS (t) = 1;
955 break;
956
957 case tcc_declaration:
958 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
959 {
960 if (code == FUNCTION_DECL)
961 {
962 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
963 DECL_MODE (t) = FUNCTION_MODE;
964 }
965 else
966 DECL_ALIGN (t) = 1;
967 }
968 DECL_SOURCE_LOCATION (t) = input_location;
969 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
970 DECL_UID (t) = --next_debug_decl_uid;
971 else
972 {
973 DECL_UID (t) = allocate_decl_uid ();
974 SET_DECL_PT_UID (t, -1);
975 }
976 if (TREE_CODE (t) == LABEL_DECL)
977 LABEL_DECL_UID (t) = -1;
978
979 break;
980
981 case tcc_type:
982 TYPE_UID (t) = next_type_uid++;
983 TYPE_ALIGN (t) = BITS_PER_UNIT;
984 TYPE_USER_ALIGN (t) = 0;
985 TYPE_MAIN_VARIANT (t) = t;
986 TYPE_CANONICAL (t) = t;
987
988 /* Default to no attributes for type, but let target change that. */
989 TYPE_ATTRIBUTES (t) = NULL_TREE;
990 targetm.set_default_type_attributes (t);
991
992 /* We have not yet computed the alias set for this type. */
993 TYPE_ALIAS_SET (t) = -1;
994 break;
995
996 case tcc_constant:
997 TREE_CONSTANT (t) = 1;
998 break;
999
1000 case tcc_expression:
1001 switch (code)
1002 {
1003 case INIT_EXPR:
1004 case MODIFY_EXPR:
1005 case VA_ARG_EXPR:
1006 case PREDECREMENT_EXPR:
1007 case PREINCREMENT_EXPR:
1008 case POSTDECREMENT_EXPR:
1009 case POSTINCREMENT_EXPR:
1010 /* All of these have side-effects, no matter what their
1011 operands are. */
1012 TREE_SIDE_EFFECTS (t) = 1;
1013 break;
1014
1015 default:
1016 break;
1017 }
1018 break;
1019
1020 default:
1021 /* Other classes need no special treatment. */
1022 break;
1023 }
1024
1025 return t;
1026 }
1027 \f
1028 /* Return a new node with the same contents as NODE except that its
1029 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1030
1031 tree
1032 copy_node_stat (tree node MEM_STAT_DECL)
1033 {
1034 tree t;
1035 enum tree_code code = TREE_CODE (node);
1036 size_t length;
1037
1038 gcc_assert (code != STATEMENT_LIST);
1039
1040 length = tree_size (node);
1041 record_node_allocation_statistics (code, length);
1042 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1043 memcpy (t, node, length);
1044
1045 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1046 TREE_CHAIN (t) = 0;
1047 TREE_ASM_WRITTEN (t) = 0;
1048 TREE_VISITED (t) = 0;
1049
1050 if (TREE_CODE_CLASS (code) == tcc_declaration)
1051 {
1052 if (code == DEBUG_EXPR_DECL)
1053 DECL_UID (t) = --next_debug_decl_uid;
1054 else
1055 {
1056 DECL_UID (t) = allocate_decl_uid ();
1057 if (DECL_PT_UID_SET_P (node))
1058 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1059 }
1060 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1061 && DECL_HAS_VALUE_EXPR_P (node))
1062 {
1063 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1064 DECL_HAS_VALUE_EXPR_P (t) = 1;
1065 }
1066 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1067 if (TREE_CODE (node) == VAR_DECL)
1068 {
1069 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1070 t->decl_with_vis.symtab_node = NULL;
1071 }
1072 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1073 {
1074 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1075 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1076 }
1077 if (TREE_CODE (node) == FUNCTION_DECL)
1078 {
1079 DECL_STRUCT_FUNCTION (t) = NULL;
1080 t->decl_with_vis.symtab_node = NULL;
1081 }
1082 }
1083 else if (TREE_CODE_CLASS (code) == tcc_type)
1084 {
1085 TYPE_UID (t) = next_type_uid++;
1086 /* The following is so that the debug code for
1087 the copy is different from the original type.
1088 The two statements usually duplicate each other
1089 (because they clear fields of the same union),
1090 but the optimizer should catch that. */
1091 TYPE_SYMTAB_POINTER (t) = 0;
1092 TYPE_SYMTAB_ADDRESS (t) = 0;
1093
1094 /* Do not copy the values cache. */
1095 if (TYPE_CACHED_VALUES_P (t))
1096 {
1097 TYPE_CACHED_VALUES_P (t) = 0;
1098 TYPE_CACHED_VALUES (t) = NULL_TREE;
1099 }
1100 }
1101
1102 return t;
1103 }
1104
1105 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1106 For example, this can copy a list made of TREE_LIST nodes. */
1107
1108 tree
1109 copy_list (tree list)
1110 {
1111 tree head;
1112 tree prev, next;
1113
1114 if (list == 0)
1115 return 0;
1116
1117 head = prev = copy_node (list);
1118 next = TREE_CHAIN (list);
1119 while (next)
1120 {
1121 TREE_CHAIN (prev) = copy_node (next);
1122 prev = TREE_CHAIN (prev);
1123 next = TREE_CHAIN (next);
1124 }
1125 return head;
1126 }
1127
1128 \f
1129 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1130 INTEGER_CST with value CST and type TYPE. */
1131
1132 static unsigned int
1133 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1134 {
1135 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1136 /* We need an extra zero HWI if CST is an unsigned integer with its
1137 upper bit set, and if CST occupies a whole number of HWIs. */
1138 if (TYPE_UNSIGNED (type)
1139 && wi::neg_p (cst)
1140 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1141 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1142 return cst.get_len ();
1143 }
1144
1145 /* Return a new INTEGER_CST with value CST and type TYPE. */
1146
1147 static tree
1148 build_new_int_cst (tree type, const wide_int &cst)
1149 {
1150 unsigned int len = cst.get_len ();
1151 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1152 tree nt = make_int_cst (len, ext_len);
1153
1154 if (len < ext_len)
1155 {
1156 --ext_len;
1157 TREE_INT_CST_ELT (nt, ext_len) = 0;
1158 for (unsigned int i = len; i < ext_len; ++i)
1159 TREE_INT_CST_ELT (nt, i) = -1;
1160 }
1161 else if (TYPE_UNSIGNED (type)
1162 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1163 {
1164 len--;
1165 TREE_INT_CST_ELT (nt, len)
1166 = zext_hwi (cst.elt (len),
1167 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1168 }
1169
1170 for (unsigned int i = 0; i < len; i++)
1171 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1172 TREE_TYPE (nt) = type;
1173 return nt;
1174 }
1175
1176 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1177
1178 tree
1179 build_int_cst (tree type, HOST_WIDE_INT low)
1180 {
1181 /* Support legacy code. */
1182 if (!type)
1183 type = integer_type_node;
1184
1185 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1186 }
1187
1188 tree
1189 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1190 {
1191 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1192 }
1193
1194 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1195
1196 tree
1197 build_int_cst_type (tree type, HOST_WIDE_INT low)
1198 {
1199 gcc_assert (type);
1200 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1201 }
1202
1203 /* Constructs tree in type TYPE from with value given by CST. Signedness
1204 of CST is assumed to be the same as the signedness of TYPE. */
1205
1206 tree
1207 double_int_to_tree (tree type, double_int cst)
1208 {
1209 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1210 }
1211
1212 /* We force the wide_int CST to the range of the type TYPE by sign or
1213 zero extending it. OVERFLOWABLE indicates if we are interested in
1214 overflow of the value, when >0 we are only interested in signed
1215 overflow, for <0 we are interested in any overflow. OVERFLOWED
1216 indicates whether overflow has already occurred. CONST_OVERFLOWED
1217 indicates whether constant overflow has already occurred. We force
1218 T's value to be within range of T's type (by setting to 0 or 1 all
1219 the bits outside the type's range). We set TREE_OVERFLOWED if,
1220 OVERFLOWED is nonzero,
1221 or OVERFLOWABLE is >0 and signed overflow occurs
1222 or OVERFLOWABLE is <0 and any overflow occurs
1223 We return a new tree node for the extended wide_int. The node
1224 is shared if no overflow flags are set. */
1225
1226
1227 tree
1228 force_fit_type (tree type, const wide_int_ref &cst,
1229 int overflowable, bool overflowed)
1230 {
1231 signop sign = TYPE_SIGN (type);
1232
1233 /* If we need to set overflow flags, return a new unshared node. */
1234 if (overflowed || !wi::fits_to_tree_p (cst, type))
1235 {
1236 if (overflowed
1237 || overflowable < 0
1238 || (overflowable > 0 && sign == SIGNED))
1239 {
1240 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1241 tree t = build_new_int_cst (type, tmp);
1242 TREE_OVERFLOW (t) = 1;
1243 return t;
1244 }
1245 }
1246
1247 /* Else build a shared node. */
1248 return wide_int_to_tree (type, cst);
1249 }
1250
1251 /* These are the hash table functions for the hash table of INTEGER_CST
1252 nodes of a sizetype. */
1253
1254 /* Return the hash code code X, an INTEGER_CST. */
1255
1256 static hashval_t
1257 int_cst_hash_hash (const void *x)
1258 {
1259 const_tree const t = (const_tree) x;
1260 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1261 int i;
1262
1263 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1264 code ^= TREE_INT_CST_ELT (t, i);
1265
1266 return code;
1267 }
1268
1269 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1270 is the same as that given by *Y, which is the same. */
1271
1272 static int
1273 int_cst_hash_eq (const void *x, const void *y)
1274 {
1275 const_tree const xt = (const_tree) x;
1276 const_tree const yt = (const_tree) y;
1277
1278 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1279 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1280 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1281 return false;
1282
1283 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1284 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1285 return false;
1286
1287 return true;
1288 }
1289
1290 /* Create an INT_CST node of TYPE and value CST.
1291 The returned node is always shared. For small integers we use a
1292 per-type vector cache, for larger ones we use a single hash table.
1293 The value is extended from its precision according to the sign of
1294 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1295 the upper bits and ensures that hashing and value equality based
1296 upon the underlying HOST_WIDE_INTs works without masking. */
1297
1298 tree
1299 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1300 {
1301 tree t;
1302 int ix = -1;
1303 int limit = 0;
1304
1305 gcc_assert (type);
1306 unsigned int prec = TYPE_PRECISION (type);
1307 signop sgn = TYPE_SIGN (type);
1308
1309 /* Verify that everything is canonical. */
1310 int l = pcst.get_len ();
1311 if (l > 1)
1312 {
1313 if (pcst.elt (l - 1) == 0)
1314 gcc_checking_assert (pcst.elt (l - 2) < 0);
1315 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1316 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1317 }
1318
1319 wide_int cst = wide_int::from (pcst, prec, sgn);
1320 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1321
1322 if (ext_len == 1)
1323 {
1324 /* We just need to store a single HOST_WIDE_INT. */
1325 HOST_WIDE_INT hwi;
1326 if (TYPE_UNSIGNED (type))
1327 hwi = cst.to_uhwi ();
1328 else
1329 hwi = cst.to_shwi ();
1330
1331 switch (TREE_CODE (type))
1332 {
1333 case NULLPTR_TYPE:
1334 gcc_assert (hwi == 0);
1335 /* Fallthru. */
1336
1337 case POINTER_TYPE:
1338 case REFERENCE_TYPE:
1339 /* Cache NULL pointer. */
1340 if (hwi == 0)
1341 {
1342 limit = 1;
1343 ix = 0;
1344 }
1345 break;
1346
1347 case BOOLEAN_TYPE:
1348 /* Cache false or true. */
1349 limit = 2;
1350 if (hwi < 2)
1351 ix = hwi;
1352 break;
1353
1354 case INTEGER_TYPE:
1355 case OFFSET_TYPE:
1356 if (TYPE_SIGN (type) == UNSIGNED)
1357 {
1358 /* Cache [0, N). */
1359 limit = INTEGER_SHARE_LIMIT;
1360 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1361 ix = hwi;
1362 }
1363 else
1364 {
1365 /* Cache [-1, N). */
1366 limit = INTEGER_SHARE_LIMIT + 1;
1367 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1368 ix = hwi + 1;
1369 }
1370 break;
1371
1372 case ENUMERAL_TYPE:
1373 break;
1374
1375 default:
1376 gcc_unreachable ();
1377 }
1378
1379 if (ix >= 0)
1380 {
1381 /* Look for it in the type's vector of small shared ints. */
1382 if (!TYPE_CACHED_VALUES_P (type))
1383 {
1384 TYPE_CACHED_VALUES_P (type) = 1;
1385 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1386 }
1387
1388 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1389 if (t)
1390 /* Make sure no one is clobbering the shared constant. */
1391 gcc_checking_assert (TREE_TYPE (t) == type
1392 && TREE_INT_CST_NUNITS (t) == 1
1393 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1394 && TREE_INT_CST_EXT_NUNITS (t) == 1
1395 && TREE_INT_CST_ELT (t, 0) == hwi);
1396 else
1397 {
1398 /* Create a new shared int. */
1399 t = build_new_int_cst (type, cst);
1400 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1401 }
1402 }
1403 else
1404 {
1405 /* Use the cache of larger shared ints, using int_cst_node as
1406 a temporary. */
1407 void **slot;
1408
1409 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1410 TREE_TYPE (int_cst_node) = type;
1411
1412 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1413 t = (tree) *slot;
1414 if (!t)
1415 {
1416 /* Insert this one into the hash table. */
1417 t = int_cst_node;
1418 *slot = t;
1419 /* Make a new node for next time round. */
1420 int_cst_node = make_int_cst (1, 1);
1421 }
1422 }
1423 }
1424 else
1425 {
1426 /* The value either hashes properly or we drop it on the floor
1427 for the gc to take care of. There will not be enough of them
1428 to worry about. */
1429 void **slot;
1430
1431 tree nt = build_new_int_cst (type, cst);
1432 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1433 t = (tree) *slot;
1434 if (!t)
1435 {
1436 /* Insert this one into the hash table. */
1437 t = nt;
1438 *slot = t;
1439 }
1440 }
1441
1442 return t;
1443 }
1444
1445 void
1446 cache_integer_cst (tree t)
1447 {
1448 tree type = TREE_TYPE (t);
1449 int ix = -1;
1450 int limit = 0;
1451 int prec = TYPE_PRECISION (type);
1452
1453 gcc_assert (!TREE_OVERFLOW (t));
1454
1455 switch (TREE_CODE (type))
1456 {
1457 case NULLPTR_TYPE:
1458 gcc_assert (integer_zerop (t));
1459 /* Fallthru. */
1460
1461 case POINTER_TYPE:
1462 case REFERENCE_TYPE:
1463 /* Cache NULL pointer. */
1464 if (integer_zerop (t))
1465 {
1466 limit = 1;
1467 ix = 0;
1468 }
1469 break;
1470
1471 case BOOLEAN_TYPE:
1472 /* Cache false or true. */
1473 limit = 2;
1474 if (wi::ltu_p (t, 2))
1475 ix = TREE_INT_CST_ELT (t, 0);
1476 break;
1477
1478 case INTEGER_TYPE:
1479 case OFFSET_TYPE:
1480 if (TYPE_UNSIGNED (type))
1481 {
1482 /* Cache 0..N */
1483 limit = INTEGER_SHARE_LIMIT;
1484
1485 /* This is a little hokie, but if the prec is smaller than
1486 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1487 obvious test will not get the correct answer. */
1488 if (prec < HOST_BITS_PER_WIDE_INT)
1489 {
1490 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1491 ix = tree_to_uhwi (t);
1492 }
1493 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1494 ix = tree_to_uhwi (t);
1495 }
1496 else
1497 {
1498 /* Cache -1..N */
1499 limit = INTEGER_SHARE_LIMIT + 1;
1500
1501 if (integer_minus_onep (t))
1502 ix = 0;
1503 else if (!wi::neg_p (t))
1504 {
1505 if (prec < HOST_BITS_PER_WIDE_INT)
1506 {
1507 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1508 ix = tree_to_shwi (t) + 1;
1509 }
1510 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1511 ix = tree_to_shwi (t) + 1;
1512 }
1513 }
1514 break;
1515
1516 case ENUMERAL_TYPE:
1517 break;
1518
1519 default:
1520 gcc_unreachable ();
1521 }
1522
1523 if (ix >= 0)
1524 {
1525 /* Look for it in the type's vector of small shared ints. */
1526 if (!TYPE_CACHED_VALUES_P (type))
1527 {
1528 TYPE_CACHED_VALUES_P (type) = 1;
1529 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1530 }
1531
1532 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1533 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1534 }
1535 else
1536 {
1537 /* Use the cache of larger shared ints. */
1538 void **slot;
1539
1540 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1541 /* If there is already an entry for the number verify it's the
1542 same. */
1543 if (*slot)
1544 gcc_assert (wi::eq_p (tree (*slot), t));
1545 else
1546 /* Otherwise insert this one into the hash table. */
1547 *slot = t;
1548 }
1549 }
1550
1551
1552 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1553 and the rest are zeros. */
1554
1555 tree
1556 build_low_bits_mask (tree type, unsigned bits)
1557 {
1558 gcc_assert (bits <= TYPE_PRECISION (type));
1559
1560 return wide_int_to_tree (type, wi::mask (bits, false,
1561 TYPE_PRECISION (type)));
1562 }
1563
1564 /* Checks that X is integer constant that can be expressed in (unsigned)
1565 HOST_WIDE_INT without loss of precision. */
1566
1567 bool
1568 cst_and_fits_in_hwi (const_tree x)
1569 {
1570 if (TREE_CODE (x) != INTEGER_CST)
1571 return false;
1572
1573 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1574 return false;
1575
1576 return TREE_INT_CST_NUNITS (x) == 1;
1577 }
1578
1579 /* Build a newly constructed TREE_VEC node of length LEN. */
1580
1581 tree
1582 make_vector_stat (unsigned len MEM_STAT_DECL)
1583 {
1584 tree t;
1585 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1586
1587 record_node_allocation_statistics (VECTOR_CST, length);
1588
1589 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1590
1591 TREE_SET_CODE (t, VECTOR_CST);
1592 TREE_CONSTANT (t) = 1;
1593
1594 return t;
1595 }
1596
1597 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1598 are in a list pointed to by VALS. */
1599
1600 tree
1601 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1602 {
1603 int over = 0;
1604 unsigned cnt = 0;
1605 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1606 TREE_TYPE (v) = type;
1607
1608 /* Iterate through elements and check for overflow. */
1609 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1610 {
1611 tree value = vals[cnt];
1612
1613 VECTOR_CST_ELT (v, cnt) = value;
1614
1615 /* Don't crash if we get an address constant. */
1616 if (!CONSTANT_CLASS_P (value))
1617 continue;
1618
1619 over |= TREE_OVERFLOW (value);
1620 }
1621
1622 TREE_OVERFLOW (v) = over;
1623 return v;
1624 }
1625
1626 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1627 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1628
1629 tree
1630 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1631 {
1632 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1633 unsigned HOST_WIDE_INT idx;
1634 tree value;
1635
1636 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1637 vec[idx] = value;
1638 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1639 vec[idx] = build_zero_cst (TREE_TYPE (type));
1640
1641 return build_vector (type, vec);
1642 }
1643
1644 /* Build a vector of type VECTYPE where all the elements are SCs. */
1645 tree
1646 build_vector_from_val (tree vectype, tree sc)
1647 {
1648 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1649
1650 if (sc == error_mark_node)
1651 return sc;
1652
1653 /* Verify that the vector type is suitable for SC. Note that there
1654 is some inconsistency in the type-system with respect to restrict
1655 qualifications of pointers. Vector types always have a main-variant
1656 element type and the qualification is applied to the vector-type.
1657 So TREE_TYPE (vector-type) does not return a properly qualified
1658 vector element-type. */
1659 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1660 TREE_TYPE (vectype)));
1661
1662 if (CONSTANT_CLASS_P (sc))
1663 {
1664 tree *v = XALLOCAVEC (tree, nunits);
1665 for (i = 0; i < nunits; ++i)
1666 v[i] = sc;
1667 return build_vector (vectype, v);
1668 }
1669 else
1670 {
1671 vec<constructor_elt, va_gc> *v;
1672 vec_alloc (v, nunits);
1673 for (i = 0; i < nunits; ++i)
1674 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1675 return build_constructor (vectype, v);
1676 }
1677 }
1678
1679 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1680 are in the vec pointed to by VALS. */
1681 tree
1682 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1683 {
1684 tree c = make_node (CONSTRUCTOR);
1685 unsigned int i;
1686 constructor_elt *elt;
1687 bool constant_p = true;
1688 bool side_effects_p = false;
1689
1690 TREE_TYPE (c) = type;
1691 CONSTRUCTOR_ELTS (c) = vals;
1692
1693 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1694 {
1695 /* Mostly ctors will have elts that don't have side-effects, so
1696 the usual case is to scan all the elements. Hence a single
1697 loop for both const and side effects, rather than one loop
1698 each (with early outs). */
1699 if (!TREE_CONSTANT (elt->value))
1700 constant_p = false;
1701 if (TREE_SIDE_EFFECTS (elt->value))
1702 side_effects_p = true;
1703 }
1704
1705 TREE_SIDE_EFFECTS (c) = side_effects_p;
1706 TREE_CONSTANT (c) = constant_p;
1707
1708 return c;
1709 }
1710
1711 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1712 INDEX and VALUE. */
1713 tree
1714 build_constructor_single (tree type, tree index, tree value)
1715 {
1716 vec<constructor_elt, va_gc> *v;
1717 constructor_elt elt = {index, value};
1718
1719 vec_alloc (v, 1);
1720 v->quick_push (elt);
1721
1722 return build_constructor (type, v);
1723 }
1724
1725
1726 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1727 are in a list pointed to by VALS. */
1728 tree
1729 build_constructor_from_list (tree type, tree vals)
1730 {
1731 tree t;
1732 vec<constructor_elt, va_gc> *v = NULL;
1733
1734 if (vals)
1735 {
1736 vec_alloc (v, list_length (vals));
1737 for (t = vals; t; t = TREE_CHAIN (t))
1738 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1739 }
1740
1741 return build_constructor (type, v);
1742 }
1743
1744 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1745 of elements, provided as index/value pairs. */
1746
1747 tree
1748 build_constructor_va (tree type, int nelts, ...)
1749 {
1750 vec<constructor_elt, va_gc> *v = NULL;
1751 va_list p;
1752
1753 va_start (p, nelts);
1754 vec_alloc (v, nelts);
1755 while (nelts--)
1756 {
1757 tree index = va_arg (p, tree);
1758 tree value = va_arg (p, tree);
1759 CONSTRUCTOR_APPEND_ELT (v, index, value);
1760 }
1761 va_end (p);
1762 return build_constructor (type, v);
1763 }
1764
1765 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1766
1767 tree
1768 build_fixed (tree type, FIXED_VALUE_TYPE f)
1769 {
1770 tree v;
1771 FIXED_VALUE_TYPE *fp;
1772
1773 v = make_node (FIXED_CST);
1774 fp = ggc_alloc<fixed_value> ();
1775 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1776
1777 TREE_TYPE (v) = type;
1778 TREE_FIXED_CST_PTR (v) = fp;
1779 return v;
1780 }
1781
1782 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1783
1784 tree
1785 build_real (tree type, REAL_VALUE_TYPE d)
1786 {
1787 tree v;
1788 REAL_VALUE_TYPE *dp;
1789 int overflow = 0;
1790
1791 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1792 Consider doing it via real_convert now. */
1793
1794 v = make_node (REAL_CST);
1795 dp = ggc_alloc<real_value> ();
1796 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1797
1798 TREE_TYPE (v) = type;
1799 TREE_REAL_CST_PTR (v) = dp;
1800 TREE_OVERFLOW (v) = overflow;
1801 return v;
1802 }
1803
1804 /* Return a new REAL_CST node whose type is TYPE
1805 and whose value is the integer value of the INTEGER_CST node I. */
1806
1807 REAL_VALUE_TYPE
1808 real_value_from_int_cst (const_tree type, const_tree i)
1809 {
1810 REAL_VALUE_TYPE d;
1811
1812 /* Clear all bits of the real value type so that we can later do
1813 bitwise comparisons to see if two values are the same. */
1814 memset (&d, 0, sizeof d);
1815
1816 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1817 TYPE_SIGN (TREE_TYPE (i)));
1818 return d;
1819 }
1820
1821 /* Given a tree representing an integer constant I, return a tree
1822 representing the same value as a floating-point constant of type TYPE. */
1823
1824 tree
1825 build_real_from_int_cst (tree type, const_tree i)
1826 {
1827 tree v;
1828 int overflow = TREE_OVERFLOW (i);
1829
1830 v = build_real (type, real_value_from_int_cst (type, i));
1831
1832 TREE_OVERFLOW (v) |= overflow;
1833 return v;
1834 }
1835
1836 /* Return a newly constructed STRING_CST node whose value is
1837 the LEN characters at STR.
1838 Note that for a C string literal, LEN should include the trailing NUL.
1839 The TREE_TYPE is not initialized. */
1840
1841 tree
1842 build_string (int len, const char *str)
1843 {
1844 tree s;
1845 size_t length;
1846
1847 /* Do not waste bytes provided by padding of struct tree_string. */
1848 length = len + offsetof (struct tree_string, str) + 1;
1849
1850 record_node_allocation_statistics (STRING_CST, length);
1851
1852 s = (tree) ggc_internal_alloc (length);
1853
1854 memset (s, 0, sizeof (struct tree_typed));
1855 TREE_SET_CODE (s, STRING_CST);
1856 TREE_CONSTANT (s) = 1;
1857 TREE_STRING_LENGTH (s) = len;
1858 memcpy (s->string.str, str, len);
1859 s->string.str[len] = '\0';
1860
1861 return s;
1862 }
1863
1864 /* Return a newly constructed COMPLEX_CST node whose value is
1865 specified by the real and imaginary parts REAL and IMAG.
1866 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1867 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1868
1869 tree
1870 build_complex (tree type, tree real, tree imag)
1871 {
1872 tree t = make_node (COMPLEX_CST);
1873
1874 TREE_REALPART (t) = real;
1875 TREE_IMAGPART (t) = imag;
1876 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1878 return t;
1879 }
1880
1881 /* Return a constant of arithmetic type TYPE which is the
1882 multiplicative identity of the set TYPE. */
1883
1884 tree
1885 build_one_cst (tree type)
1886 {
1887 switch (TREE_CODE (type))
1888 {
1889 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1890 case POINTER_TYPE: case REFERENCE_TYPE:
1891 case OFFSET_TYPE:
1892 return build_int_cst (type, 1);
1893
1894 case REAL_TYPE:
1895 return build_real (type, dconst1);
1896
1897 case FIXED_POINT_TYPE:
1898 /* We can only generate 1 for accum types. */
1899 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1900 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1901
1902 case VECTOR_TYPE:
1903 {
1904 tree scalar = build_one_cst (TREE_TYPE (type));
1905
1906 return build_vector_from_val (type, scalar);
1907 }
1908
1909 case COMPLEX_TYPE:
1910 return build_complex (type,
1911 build_one_cst (TREE_TYPE (type)),
1912 build_zero_cst (TREE_TYPE (type)));
1913
1914 default:
1915 gcc_unreachable ();
1916 }
1917 }
1918
1919 /* Return an integer of type TYPE containing all 1's in as much precision as
1920 it contains, or a complex or vector whose subparts are such integers. */
1921
1922 tree
1923 build_all_ones_cst (tree type)
1924 {
1925 if (TREE_CODE (type) == COMPLEX_TYPE)
1926 {
1927 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1928 return build_complex (type, scalar, scalar);
1929 }
1930 else
1931 return build_minus_one_cst (type);
1932 }
1933
1934 /* Return a constant of arithmetic type TYPE which is the
1935 opposite of the multiplicative identity of the set TYPE. */
1936
1937 tree
1938 build_minus_one_cst (tree type)
1939 {
1940 switch (TREE_CODE (type))
1941 {
1942 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1943 case POINTER_TYPE: case REFERENCE_TYPE:
1944 case OFFSET_TYPE:
1945 return build_int_cst (type, -1);
1946
1947 case REAL_TYPE:
1948 return build_real (type, dconstm1);
1949
1950 case FIXED_POINT_TYPE:
1951 /* We can only generate 1 for accum types. */
1952 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1953 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1954 TYPE_MODE (type)));
1955
1956 case VECTOR_TYPE:
1957 {
1958 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1959
1960 return build_vector_from_val (type, scalar);
1961 }
1962
1963 case COMPLEX_TYPE:
1964 return build_complex (type,
1965 build_minus_one_cst (TREE_TYPE (type)),
1966 build_zero_cst (TREE_TYPE (type)));
1967
1968 default:
1969 gcc_unreachable ();
1970 }
1971 }
1972
1973 /* Build 0 constant of type TYPE. This is used by constructor folding
1974 and thus the constant should be represented in memory by
1975 zero(es). */
1976
1977 tree
1978 build_zero_cst (tree type)
1979 {
1980 switch (TREE_CODE (type))
1981 {
1982 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1983 case POINTER_TYPE: case REFERENCE_TYPE:
1984 case OFFSET_TYPE: case NULLPTR_TYPE:
1985 return build_int_cst (type, 0);
1986
1987 case REAL_TYPE:
1988 return build_real (type, dconst0);
1989
1990 case FIXED_POINT_TYPE:
1991 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1992
1993 case VECTOR_TYPE:
1994 {
1995 tree scalar = build_zero_cst (TREE_TYPE (type));
1996
1997 return build_vector_from_val (type, scalar);
1998 }
1999
2000 case COMPLEX_TYPE:
2001 {
2002 tree zero = build_zero_cst (TREE_TYPE (type));
2003
2004 return build_complex (type, zero, zero);
2005 }
2006
2007 default:
2008 if (!AGGREGATE_TYPE_P (type))
2009 return fold_convert (type, integer_zero_node);
2010 return build_constructor (type, NULL);
2011 }
2012 }
2013
2014
2015 /* Build a BINFO with LEN language slots. */
2016
2017 tree
2018 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2019 {
2020 tree t;
2021 size_t length = (offsetof (struct tree_binfo, base_binfos)
2022 + vec<tree, va_gc>::embedded_size (base_binfos));
2023
2024 record_node_allocation_statistics (TREE_BINFO, length);
2025
2026 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2027
2028 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2029
2030 TREE_SET_CODE (t, TREE_BINFO);
2031
2032 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2033
2034 return t;
2035 }
2036
2037 /* Create a CASE_LABEL_EXPR tree node and return it. */
2038
2039 tree
2040 build_case_label (tree low_value, tree high_value, tree label_decl)
2041 {
2042 tree t = make_node (CASE_LABEL_EXPR);
2043
2044 TREE_TYPE (t) = void_type_node;
2045 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2046
2047 CASE_LOW (t) = low_value;
2048 CASE_HIGH (t) = high_value;
2049 CASE_LABEL (t) = label_decl;
2050 CASE_CHAIN (t) = NULL_TREE;
2051
2052 return t;
2053 }
2054
2055 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2056 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2057 The latter determines the length of the HOST_WIDE_INT vector. */
2058
2059 tree
2060 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2061 {
2062 tree t;
2063 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2064 + sizeof (struct tree_int_cst));
2065
2066 gcc_assert (len);
2067 record_node_allocation_statistics (INTEGER_CST, length);
2068
2069 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2070
2071 TREE_SET_CODE (t, INTEGER_CST);
2072 TREE_INT_CST_NUNITS (t) = len;
2073 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2074 /* to_offset can only be applied to trees that are offset_int-sized
2075 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2076 must be exactly the precision of offset_int and so LEN is correct. */
2077 if (ext_len <= OFFSET_INT_ELTS)
2078 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2079 else
2080 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2081
2082 TREE_CONSTANT (t) = 1;
2083
2084 return t;
2085 }
2086
2087 /* Build a newly constructed TREE_VEC node of length LEN. */
2088
2089 tree
2090 make_tree_vec_stat (int len MEM_STAT_DECL)
2091 {
2092 tree t;
2093 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2094
2095 record_node_allocation_statistics (TREE_VEC, length);
2096
2097 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2098
2099 TREE_SET_CODE (t, TREE_VEC);
2100 TREE_VEC_LENGTH (t) = len;
2101
2102 return t;
2103 }
2104
2105 /* Grow a TREE_VEC node to new length LEN. */
2106
2107 tree
2108 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2109 {
2110 gcc_assert (TREE_CODE (v) == TREE_VEC);
2111
2112 int oldlen = TREE_VEC_LENGTH (v);
2113 gcc_assert (len > oldlen);
2114
2115 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2116 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2117
2118 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2119
2120 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2121
2122 TREE_VEC_LENGTH (v) = len;
2123
2124 return v;
2125 }
2126 \f
2127 /* Return 1 if EXPR is the integer constant zero or a complex constant
2128 of zero. */
2129
2130 int
2131 integer_zerop (const_tree expr)
2132 {
2133 STRIP_NOPS (expr);
2134
2135 switch (TREE_CODE (expr))
2136 {
2137 case INTEGER_CST:
2138 return wi::eq_p (expr, 0);
2139 case COMPLEX_CST:
2140 return (integer_zerop (TREE_REALPART (expr))
2141 && integer_zerop (TREE_IMAGPART (expr)));
2142 case VECTOR_CST:
2143 {
2144 unsigned i;
2145 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2146 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2147 return false;
2148 return true;
2149 }
2150 default:
2151 return false;
2152 }
2153 }
2154
2155 /* Return 1 if EXPR is the integer constant one or the corresponding
2156 complex constant. */
2157
2158 int
2159 integer_onep (const_tree expr)
2160 {
2161 STRIP_NOPS (expr);
2162
2163 switch (TREE_CODE (expr))
2164 {
2165 case INTEGER_CST:
2166 return wi::eq_p (wi::to_widest (expr), 1);
2167 case COMPLEX_CST:
2168 return (integer_onep (TREE_REALPART (expr))
2169 && integer_zerop (TREE_IMAGPART (expr)));
2170 case VECTOR_CST:
2171 {
2172 unsigned i;
2173 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2174 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2175 return false;
2176 return true;
2177 }
2178 default:
2179 return false;
2180 }
2181 }
2182
2183 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2184 return 1 if every piece is the integer constant one. */
2185
2186 int
2187 integer_each_onep (const_tree expr)
2188 {
2189 STRIP_NOPS (expr);
2190
2191 if (TREE_CODE (expr) == COMPLEX_CST)
2192 return (integer_onep (TREE_REALPART (expr))
2193 && integer_onep (TREE_IMAGPART (expr)));
2194 else
2195 return integer_onep (expr);
2196 }
2197
2198 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2199 it contains, or a complex or vector whose subparts are such integers. */
2200
2201 int
2202 integer_all_onesp (const_tree expr)
2203 {
2204 STRIP_NOPS (expr);
2205
2206 if (TREE_CODE (expr) == COMPLEX_CST
2207 && integer_all_onesp (TREE_REALPART (expr))
2208 && integer_all_onesp (TREE_IMAGPART (expr)))
2209 return 1;
2210
2211 else if (TREE_CODE (expr) == VECTOR_CST)
2212 {
2213 unsigned i;
2214 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2215 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2216 return 0;
2217 return 1;
2218 }
2219
2220 else if (TREE_CODE (expr) != INTEGER_CST)
2221 return 0;
2222
2223 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2224 }
2225
2226 /* Return 1 if EXPR is the integer constant minus one. */
2227
2228 int
2229 integer_minus_onep (const_tree expr)
2230 {
2231 STRIP_NOPS (expr);
2232
2233 if (TREE_CODE (expr) == COMPLEX_CST)
2234 return (integer_all_onesp (TREE_REALPART (expr))
2235 && integer_zerop (TREE_IMAGPART (expr)));
2236 else
2237 return integer_all_onesp (expr);
2238 }
2239
2240 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2241 one bit on). */
2242
2243 int
2244 integer_pow2p (const_tree expr)
2245 {
2246 STRIP_NOPS (expr);
2247
2248 if (TREE_CODE (expr) == COMPLEX_CST
2249 && integer_pow2p (TREE_REALPART (expr))
2250 && integer_zerop (TREE_IMAGPART (expr)))
2251 return 1;
2252
2253 if (TREE_CODE (expr) != INTEGER_CST)
2254 return 0;
2255
2256 return wi::popcount (expr) == 1;
2257 }
2258
2259 /* Return 1 if EXPR is an integer constant other than zero or a
2260 complex constant other than zero. */
2261
2262 int
2263 integer_nonzerop (const_tree expr)
2264 {
2265 STRIP_NOPS (expr);
2266
2267 return ((TREE_CODE (expr) == INTEGER_CST
2268 && !wi::eq_p (expr, 0))
2269 || (TREE_CODE (expr) == COMPLEX_CST
2270 && (integer_nonzerop (TREE_REALPART (expr))
2271 || integer_nonzerop (TREE_IMAGPART (expr)))));
2272 }
2273
2274 /* Return 1 if EXPR is the fixed-point constant zero. */
2275
2276 int
2277 fixed_zerop (const_tree expr)
2278 {
2279 return (TREE_CODE (expr) == FIXED_CST
2280 && TREE_FIXED_CST (expr).data.is_zero ());
2281 }
2282
2283 /* Return the power of two represented by a tree node known to be a
2284 power of two. */
2285
2286 int
2287 tree_log2 (const_tree expr)
2288 {
2289 STRIP_NOPS (expr);
2290
2291 if (TREE_CODE (expr) == COMPLEX_CST)
2292 return tree_log2 (TREE_REALPART (expr));
2293
2294 return wi::exact_log2 (expr);
2295 }
2296
2297 /* Similar, but return the largest integer Y such that 2 ** Y is less
2298 than or equal to EXPR. */
2299
2300 int
2301 tree_floor_log2 (const_tree expr)
2302 {
2303 STRIP_NOPS (expr);
2304
2305 if (TREE_CODE (expr) == COMPLEX_CST)
2306 return tree_log2 (TREE_REALPART (expr));
2307
2308 return wi::floor_log2 (expr);
2309 }
2310
2311 /* Return number of known trailing zero bits in EXPR, or, if the value of
2312 EXPR is known to be zero, the precision of it's type. */
2313
2314 unsigned int
2315 tree_ctz (const_tree expr)
2316 {
2317 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2318 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2319 return 0;
2320
2321 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2322 switch (TREE_CODE (expr))
2323 {
2324 case INTEGER_CST:
2325 ret1 = wi::ctz (expr);
2326 return MIN (ret1, prec);
2327 case SSA_NAME:
2328 ret1 = wi::ctz (get_nonzero_bits (expr));
2329 return MIN (ret1, prec);
2330 case PLUS_EXPR:
2331 case MINUS_EXPR:
2332 case BIT_IOR_EXPR:
2333 case BIT_XOR_EXPR:
2334 case MIN_EXPR:
2335 case MAX_EXPR:
2336 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2337 if (ret1 == 0)
2338 return ret1;
2339 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2340 return MIN (ret1, ret2);
2341 case POINTER_PLUS_EXPR:
2342 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2343 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2344 /* Second operand is sizetype, which could be in theory
2345 wider than pointer's precision. Make sure we never
2346 return more than prec. */
2347 ret2 = MIN (ret2, prec);
2348 return MIN (ret1, ret2);
2349 case BIT_AND_EXPR:
2350 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2351 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2352 return MAX (ret1, ret2);
2353 case MULT_EXPR:
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2356 return MIN (ret1 + ret2, prec);
2357 case LSHIFT_EXPR:
2358 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2359 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2360 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2361 {
2362 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2363 return MIN (ret1 + ret2, prec);
2364 }
2365 return ret1;
2366 case RSHIFT_EXPR:
2367 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2368 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2369 {
2370 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2371 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2372 if (ret1 > ret2)
2373 return ret1 - ret2;
2374 }
2375 return 0;
2376 case TRUNC_DIV_EXPR:
2377 case CEIL_DIV_EXPR:
2378 case FLOOR_DIV_EXPR:
2379 case ROUND_DIV_EXPR:
2380 case EXACT_DIV_EXPR:
2381 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2382 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2383 {
2384 int l = tree_log2 (TREE_OPERAND (expr, 1));
2385 if (l >= 0)
2386 {
2387 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2388 ret2 = l;
2389 if (ret1 > ret2)
2390 return ret1 - ret2;
2391 }
2392 }
2393 return 0;
2394 CASE_CONVERT:
2395 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2396 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2397 ret1 = prec;
2398 return MIN (ret1, prec);
2399 case SAVE_EXPR:
2400 return tree_ctz (TREE_OPERAND (expr, 0));
2401 case COND_EXPR:
2402 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2403 if (ret1 == 0)
2404 return 0;
2405 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2406 return MIN (ret1, ret2);
2407 case COMPOUND_EXPR:
2408 return tree_ctz (TREE_OPERAND (expr, 1));
2409 case ADDR_EXPR:
2410 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2411 if (ret1 > BITS_PER_UNIT)
2412 {
2413 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2414 return MIN (ret1, prec);
2415 }
2416 return 0;
2417 default:
2418 return 0;
2419 }
2420 }
2421
2422 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2423 decimal float constants, so don't return 1 for them. */
2424
2425 int
2426 real_zerop (const_tree expr)
2427 {
2428 STRIP_NOPS (expr);
2429
2430 switch (TREE_CODE (expr))
2431 {
2432 case REAL_CST:
2433 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2434 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2435 case COMPLEX_CST:
2436 return real_zerop (TREE_REALPART (expr))
2437 && real_zerop (TREE_IMAGPART (expr));
2438 case VECTOR_CST:
2439 {
2440 unsigned i;
2441 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2442 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2443 return false;
2444 return true;
2445 }
2446 default:
2447 return false;
2448 }
2449 }
2450
2451 /* Return 1 if EXPR is the real constant one in real or complex form.
2452 Trailing zeroes matter for decimal float constants, so don't return
2453 1 for them. */
2454
2455 int
2456 real_onep (const_tree expr)
2457 {
2458 STRIP_NOPS (expr);
2459
2460 switch (TREE_CODE (expr))
2461 {
2462 case REAL_CST:
2463 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2464 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2465 case COMPLEX_CST:
2466 return real_onep (TREE_REALPART (expr))
2467 && real_zerop (TREE_IMAGPART (expr));
2468 case VECTOR_CST:
2469 {
2470 unsigned i;
2471 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2472 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2473 return false;
2474 return true;
2475 }
2476 default:
2477 return false;
2478 }
2479 }
2480
2481 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2482 matter for decimal float constants, so don't return 1 for them. */
2483
2484 int
2485 real_minus_onep (const_tree expr)
2486 {
2487 STRIP_NOPS (expr);
2488
2489 switch (TREE_CODE (expr))
2490 {
2491 case REAL_CST:
2492 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2493 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2494 case COMPLEX_CST:
2495 return real_minus_onep (TREE_REALPART (expr))
2496 && real_zerop (TREE_IMAGPART (expr));
2497 case VECTOR_CST:
2498 {
2499 unsigned i;
2500 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2501 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2502 return false;
2503 return true;
2504 }
2505 default:
2506 return false;
2507 }
2508 }
2509
2510 /* Nonzero if EXP is a constant or a cast of a constant. */
2511
2512 int
2513 really_constant_p (const_tree exp)
2514 {
2515 /* This is not quite the same as STRIP_NOPS. It does more. */
2516 while (CONVERT_EXPR_P (exp)
2517 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2518 exp = TREE_OPERAND (exp, 0);
2519 return TREE_CONSTANT (exp);
2520 }
2521 \f
2522 /* Return first list element whose TREE_VALUE is ELEM.
2523 Return 0 if ELEM is not in LIST. */
2524
2525 tree
2526 value_member (tree elem, tree list)
2527 {
2528 while (list)
2529 {
2530 if (elem == TREE_VALUE (list))
2531 return list;
2532 list = TREE_CHAIN (list);
2533 }
2534 return NULL_TREE;
2535 }
2536
2537 /* Return first list element whose TREE_PURPOSE is ELEM.
2538 Return 0 if ELEM is not in LIST. */
2539
2540 tree
2541 purpose_member (const_tree elem, tree list)
2542 {
2543 while (list)
2544 {
2545 if (elem == TREE_PURPOSE (list))
2546 return list;
2547 list = TREE_CHAIN (list);
2548 }
2549 return NULL_TREE;
2550 }
2551
2552 /* Return true if ELEM is in V. */
2553
2554 bool
2555 vec_member (const_tree elem, vec<tree, va_gc> *v)
2556 {
2557 unsigned ix;
2558 tree t;
2559 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2560 if (elem == t)
2561 return true;
2562 return false;
2563 }
2564
2565 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2566 NULL_TREE. */
2567
2568 tree
2569 chain_index (int idx, tree chain)
2570 {
2571 for (; chain && idx > 0; --idx)
2572 chain = TREE_CHAIN (chain);
2573 return chain;
2574 }
2575
2576 /* Return nonzero if ELEM is part of the chain CHAIN. */
2577
2578 int
2579 chain_member (const_tree elem, const_tree chain)
2580 {
2581 while (chain)
2582 {
2583 if (elem == chain)
2584 return 1;
2585 chain = DECL_CHAIN (chain);
2586 }
2587
2588 return 0;
2589 }
2590
2591 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2592 We expect a null pointer to mark the end of the chain.
2593 This is the Lisp primitive `length'. */
2594
2595 int
2596 list_length (const_tree t)
2597 {
2598 const_tree p = t;
2599 #ifdef ENABLE_TREE_CHECKING
2600 const_tree q = t;
2601 #endif
2602 int len = 0;
2603
2604 while (p)
2605 {
2606 p = TREE_CHAIN (p);
2607 #ifdef ENABLE_TREE_CHECKING
2608 if (len % 2)
2609 q = TREE_CHAIN (q);
2610 gcc_assert (p != q);
2611 #endif
2612 len++;
2613 }
2614
2615 return len;
2616 }
2617
2618 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2619 UNION_TYPE TYPE, or NULL_TREE if none. */
2620
2621 tree
2622 first_field (const_tree type)
2623 {
2624 tree t = TYPE_FIELDS (type);
2625 while (t && TREE_CODE (t) != FIELD_DECL)
2626 t = TREE_CHAIN (t);
2627 return t;
2628 }
2629
2630 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2631 by modifying the last node in chain 1 to point to chain 2.
2632 This is the Lisp primitive `nconc'. */
2633
2634 tree
2635 chainon (tree op1, tree op2)
2636 {
2637 tree t1;
2638
2639 if (!op1)
2640 return op2;
2641 if (!op2)
2642 return op1;
2643
2644 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2645 continue;
2646 TREE_CHAIN (t1) = op2;
2647
2648 #ifdef ENABLE_TREE_CHECKING
2649 {
2650 tree t2;
2651 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2652 gcc_assert (t2 != t1);
2653 }
2654 #endif
2655
2656 return op1;
2657 }
2658
2659 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2660
2661 tree
2662 tree_last (tree chain)
2663 {
2664 tree next;
2665 if (chain)
2666 while ((next = TREE_CHAIN (chain)))
2667 chain = next;
2668 return chain;
2669 }
2670
2671 /* Reverse the order of elements in the chain T,
2672 and return the new head of the chain (old last element). */
2673
2674 tree
2675 nreverse (tree t)
2676 {
2677 tree prev = 0, decl, next;
2678 for (decl = t; decl; decl = next)
2679 {
2680 /* We shouldn't be using this function to reverse BLOCK chains; we
2681 have blocks_nreverse for that. */
2682 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2683 next = TREE_CHAIN (decl);
2684 TREE_CHAIN (decl) = prev;
2685 prev = decl;
2686 }
2687 return prev;
2688 }
2689 \f
2690 /* Return a newly created TREE_LIST node whose
2691 purpose and value fields are PARM and VALUE. */
2692
2693 tree
2694 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2695 {
2696 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2697 TREE_PURPOSE (t) = parm;
2698 TREE_VALUE (t) = value;
2699 return t;
2700 }
2701
2702 /* Build a chain of TREE_LIST nodes from a vector. */
2703
2704 tree
2705 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2706 {
2707 tree ret = NULL_TREE;
2708 tree *pp = &ret;
2709 unsigned int i;
2710 tree t;
2711 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2712 {
2713 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2714 pp = &TREE_CHAIN (*pp);
2715 }
2716 return ret;
2717 }
2718
2719 /* Return a newly created TREE_LIST node whose
2720 purpose and value fields are PURPOSE and VALUE
2721 and whose TREE_CHAIN is CHAIN. */
2722
2723 tree
2724 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2725 {
2726 tree node;
2727
2728 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2729 memset (node, 0, sizeof (struct tree_common));
2730
2731 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2732
2733 TREE_SET_CODE (node, TREE_LIST);
2734 TREE_CHAIN (node) = chain;
2735 TREE_PURPOSE (node) = purpose;
2736 TREE_VALUE (node) = value;
2737 return node;
2738 }
2739
2740 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2741 trees. */
2742
2743 vec<tree, va_gc> *
2744 ctor_to_vec (tree ctor)
2745 {
2746 vec<tree, va_gc> *vec;
2747 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2748 unsigned int ix;
2749 tree val;
2750
2751 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2752 vec->quick_push (val);
2753
2754 return vec;
2755 }
2756 \f
2757 /* Return the size nominally occupied by an object of type TYPE
2758 when it resides in memory. The value is measured in units of bytes,
2759 and its data type is that normally used for type sizes
2760 (which is the first type created by make_signed_type or
2761 make_unsigned_type). */
2762
2763 tree
2764 size_in_bytes (const_tree type)
2765 {
2766 tree t;
2767
2768 if (type == error_mark_node)
2769 return integer_zero_node;
2770
2771 type = TYPE_MAIN_VARIANT (type);
2772 t = TYPE_SIZE_UNIT (type);
2773
2774 if (t == 0)
2775 {
2776 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2777 return size_zero_node;
2778 }
2779
2780 return t;
2781 }
2782
2783 /* Return the size of TYPE (in bytes) as a wide integer
2784 or return -1 if the size can vary or is larger than an integer. */
2785
2786 HOST_WIDE_INT
2787 int_size_in_bytes (const_tree type)
2788 {
2789 tree t;
2790
2791 if (type == error_mark_node)
2792 return 0;
2793
2794 type = TYPE_MAIN_VARIANT (type);
2795 t = TYPE_SIZE_UNIT (type);
2796
2797 if (t && tree_fits_uhwi_p (t))
2798 return TREE_INT_CST_LOW (t);
2799 else
2800 return -1;
2801 }
2802
2803 /* Return the maximum size of TYPE (in bytes) as a wide integer
2804 or return -1 if the size can vary or is larger than an integer. */
2805
2806 HOST_WIDE_INT
2807 max_int_size_in_bytes (const_tree type)
2808 {
2809 HOST_WIDE_INT size = -1;
2810 tree size_tree;
2811
2812 /* If this is an array type, check for a possible MAX_SIZE attached. */
2813
2814 if (TREE_CODE (type) == ARRAY_TYPE)
2815 {
2816 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2817
2818 if (size_tree && tree_fits_uhwi_p (size_tree))
2819 size = tree_to_uhwi (size_tree);
2820 }
2821
2822 /* If we still haven't been able to get a size, see if the language
2823 can compute a maximum size. */
2824
2825 if (size == -1)
2826 {
2827 size_tree = lang_hooks.types.max_size (type);
2828
2829 if (size_tree && tree_fits_uhwi_p (size_tree))
2830 size = tree_to_uhwi (size_tree);
2831 }
2832
2833 return size;
2834 }
2835 \f
2836 /* Return the bit position of FIELD, in bits from the start of the record.
2837 This is a tree of type bitsizetype. */
2838
2839 tree
2840 bit_position (const_tree field)
2841 {
2842 return bit_from_pos (DECL_FIELD_OFFSET (field),
2843 DECL_FIELD_BIT_OFFSET (field));
2844 }
2845 \f
2846 /* Return the byte position of FIELD, in bytes from the start of the record.
2847 This is a tree of type sizetype. */
2848
2849 tree
2850 byte_position (const_tree field)
2851 {
2852 return byte_from_pos (DECL_FIELD_OFFSET (field),
2853 DECL_FIELD_BIT_OFFSET (field));
2854 }
2855
2856 /* Likewise, but return as an integer. It must be representable in
2857 that way (since it could be a signed value, we don't have the
2858 option of returning -1 like int_size_in_byte can. */
2859
2860 HOST_WIDE_INT
2861 int_byte_position (const_tree field)
2862 {
2863 return tree_to_shwi (byte_position (field));
2864 }
2865 \f
2866 /* Return the strictest alignment, in bits, that T is known to have. */
2867
2868 unsigned int
2869 expr_align (const_tree t)
2870 {
2871 unsigned int align0, align1;
2872
2873 switch (TREE_CODE (t))
2874 {
2875 CASE_CONVERT: case NON_LVALUE_EXPR:
2876 /* If we have conversions, we know that the alignment of the
2877 object must meet each of the alignments of the types. */
2878 align0 = expr_align (TREE_OPERAND (t, 0));
2879 align1 = TYPE_ALIGN (TREE_TYPE (t));
2880 return MAX (align0, align1);
2881
2882 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2883 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2884 case CLEANUP_POINT_EXPR:
2885 /* These don't change the alignment of an object. */
2886 return expr_align (TREE_OPERAND (t, 0));
2887
2888 case COND_EXPR:
2889 /* The best we can do is say that the alignment is the least aligned
2890 of the two arms. */
2891 align0 = expr_align (TREE_OPERAND (t, 1));
2892 align1 = expr_align (TREE_OPERAND (t, 2));
2893 return MIN (align0, align1);
2894
2895 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2896 meaningfully, it's always 1. */
2897 case LABEL_DECL: case CONST_DECL:
2898 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2899 case FUNCTION_DECL:
2900 gcc_assert (DECL_ALIGN (t) != 0);
2901 return DECL_ALIGN (t);
2902
2903 default:
2904 break;
2905 }
2906
2907 /* Otherwise take the alignment from that of the type. */
2908 return TYPE_ALIGN (TREE_TYPE (t));
2909 }
2910 \f
2911 /* Return, as a tree node, the number of elements for TYPE (which is an
2912 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2913
2914 tree
2915 array_type_nelts (const_tree type)
2916 {
2917 tree index_type, min, max;
2918
2919 /* If they did it with unspecified bounds, then we should have already
2920 given an error about it before we got here. */
2921 if (! TYPE_DOMAIN (type))
2922 return error_mark_node;
2923
2924 index_type = TYPE_DOMAIN (type);
2925 min = TYPE_MIN_VALUE (index_type);
2926 max = TYPE_MAX_VALUE (index_type);
2927
2928 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2929 if (!max)
2930 return error_mark_node;
2931
2932 return (integer_zerop (min)
2933 ? max
2934 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2935 }
2936 \f
2937 /* If arg is static -- a reference to an object in static storage -- then
2938 return the object. This is not the same as the C meaning of `static'.
2939 If arg isn't static, return NULL. */
2940
2941 tree
2942 staticp (tree arg)
2943 {
2944 switch (TREE_CODE (arg))
2945 {
2946 case FUNCTION_DECL:
2947 /* Nested functions are static, even though taking their address will
2948 involve a trampoline as we unnest the nested function and create
2949 the trampoline on the tree level. */
2950 return arg;
2951
2952 case VAR_DECL:
2953 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2954 && ! DECL_THREAD_LOCAL_P (arg)
2955 && ! DECL_DLLIMPORT_P (arg)
2956 ? arg : NULL);
2957
2958 case CONST_DECL:
2959 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2960 ? arg : NULL);
2961
2962 case CONSTRUCTOR:
2963 return TREE_STATIC (arg) ? arg : NULL;
2964
2965 case LABEL_DECL:
2966 case STRING_CST:
2967 return arg;
2968
2969 case COMPONENT_REF:
2970 /* If the thing being referenced is not a field, then it is
2971 something language specific. */
2972 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2973
2974 /* If we are referencing a bitfield, we can't evaluate an
2975 ADDR_EXPR at compile time and so it isn't a constant. */
2976 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2977 return NULL;
2978
2979 return staticp (TREE_OPERAND (arg, 0));
2980
2981 case BIT_FIELD_REF:
2982 return NULL;
2983
2984 case INDIRECT_REF:
2985 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2986
2987 case ARRAY_REF:
2988 case ARRAY_RANGE_REF:
2989 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2990 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2991 return staticp (TREE_OPERAND (arg, 0));
2992 else
2993 return NULL;
2994
2995 case COMPOUND_LITERAL_EXPR:
2996 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2997
2998 default:
2999 return NULL;
3000 }
3001 }
3002
3003 \f
3004
3005
3006 /* Return whether OP is a DECL whose address is function-invariant. */
3007
3008 bool
3009 decl_address_invariant_p (const_tree op)
3010 {
3011 /* The conditions below are slightly less strict than the one in
3012 staticp. */
3013
3014 switch (TREE_CODE (op))
3015 {
3016 case PARM_DECL:
3017 case RESULT_DECL:
3018 case LABEL_DECL:
3019 case FUNCTION_DECL:
3020 return true;
3021
3022 case VAR_DECL:
3023 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3024 || DECL_THREAD_LOCAL_P (op)
3025 || DECL_CONTEXT (op) == current_function_decl
3026 || decl_function_context (op) == current_function_decl)
3027 return true;
3028 break;
3029
3030 case CONST_DECL:
3031 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3032 || decl_function_context (op) == current_function_decl)
3033 return true;
3034 break;
3035
3036 default:
3037 break;
3038 }
3039
3040 return false;
3041 }
3042
3043 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3044
3045 bool
3046 decl_address_ip_invariant_p (const_tree op)
3047 {
3048 /* The conditions below are slightly less strict than the one in
3049 staticp. */
3050
3051 switch (TREE_CODE (op))
3052 {
3053 case LABEL_DECL:
3054 case FUNCTION_DECL:
3055 case STRING_CST:
3056 return true;
3057
3058 case VAR_DECL:
3059 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3060 && !DECL_DLLIMPORT_P (op))
3061 || DECL_THREAD_LOCAL_P (op))
3062 return true;
3063 break;
3064
3065 case CONST_DECL:
3066 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3067 return true;
3068 break;
3069
3070 default:
3071 break;
3072 }
3073
3074 return false;
3075 }
3076
3077
3078 /* Return true if T is function-invariant (internal function, does
3079 not handle arithmetic; that's handled in skip_simple_arithmetic and
3080 tree_invariant_p). */
3081
3082 static bool tree_invariant_p (tree t);
3083
3084 static bool
3085 tree_invariant_p_1 (tree t)
3086 {
3087 tree op;
3088
3089 if (TREE_CONSTANT (t)
3090 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3091 return true;
3092
3093 switch (TREE_CODE (t))
3094 {
3095 case SAVE_EXPR:
3096 return true;
3097
3098 case ADDR_EXPR:
3099 op = TREE_OPERAND (t, 0);
3100 while (handled_component_p (op))
3101 {
3102 switch (TREE_CODE (op))
3103 {
3104 case ARRAY_REF:
3105 case ARRAY_RANGE_REF:
3106 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3107 || TREE_OPERAND (op, 2) != NULL_TREE
3108 || TREE_OPERAND (op, 3) != NULL_TREE)
3109 return false;
3110 break;
3111
3112 case COMPONENT_REF:
3113 if (TREE_OPERAND (op, 2) != NULL_TREE)
3114 return false;
3115 break;
3116
3117 default:;
3118 }
3119 op = TREE_OPERAND (op, 0);
3120 }
3121
3122 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3123
3124 default:
3125 break;
3126 }
3127
3128 return false;
3129 }
3130
3131 /* Return true if T is function-invariant. */
3132
3133 static bool
3134 tree_invariant_p (tree t)
3135 {
3136 tree inner = skip_simple_arithmetic (t);
3137 return tree_invariant_p_1 (inner);
3138 }
3139
3140 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3141 Do this to any expression which may be used in more than one place,
3142 but must be evaluated only once.
3143
3144 Normally, expand_expr would reevaluate the expression each time.
3145 Calling save_expr produces something that is evaluated and recorded
3146 the first time expand_expr is called on it. Subsequent calls to
3147 expand_expr just reuse the recorded value.
3148
3149 The call to expand_expr that generates code that actually computes
3150 the value is the first call *at compile time*. Subsequent calls
3151 *at compile time* generate code to use the saved value.
3152 This produces correct result provided that *at run time* control
3153 always flows through the insns made by the first expand_expr
3154 before reaching the other places where the save_expr was evaluated.
3155 You, the caller of save_expr, must make sure this is so.
3156
3157 Constants, and certain read-only nodes, are returned with no
3158 SAVE_EXPR because that is safe. Expressions containing placeholders
3159 are not touched; see tree.def for an explanation of what these
3160 are used for. */
3161
3162 tree
3163 save_expr (tree expr)
3164 {
3165 tree t = fold (expr);
3166 tree inner;
3167
3168 /* If the tree evaluates to a constant, then we don't want to hide that
3169 fact (i.e. this allows further folding, and direct checks for constants).
3170 However, a read-only object that has side effects cannot be bypassed.
3171 Since it is no problem to reevaluate literals, we just return the
3172 literal node. */
3173 inner = skip_simple_arithmetic (t);
3174 if (TREE_CODE (inner) == ERROR_MARK)
3175 return inner;
3176
3177 if (tree_invariant_p_1 (inner))
3178 return t;
3179
3180 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3181 it means that the size or offset of some field of an object depends on
3182 the value within another field.
3183
3184 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3185 and some variable since it would then need to be both evaluated once and
3186 evaluated more than once. Front-ends must assure this case cannot
3187 happen by surrounding any such subexpressions in their own SAVE_EXPR
3188 and forcing evaluation at the proper time. */
3189 if (contains_placeholder_p (inner))
3190 return t;
3191
3192 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3193 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3194
3195 /* This expression might be placed ahead of a jump to ensure that the
3196 value was computed on both sides of the jump. So make sure it isn't
3197 eliminated as dead. */
3198 TREE_SIDE_EFFECTS (t) = 1;
3199 return t;
3200 }
3201
3202 /* Look inside EXPR into any simple arithmetic operations. Return the
3203 outermost non-arithmetic or non-invariant node. */
3204
3205 tree
3206 skip_simple_arithmetic (tree expr)
3207 {
3208 /* We don't care about whether this can be used as an lvalue in this
3209 context. */
3210 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3211 expr = TREE_OPERAND (expr, 0);
3212
3213 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3214 a constant, it will be more efficient to not make another SAVE_EXPR since
3215 it will allow better simplification and GCSE will be able to merge the
3216 computations if they actually occur. */
3217 while (true)
3218 {
3219 if (UNARY_CLASS_P (expr))
3220 expr = TREE_OPERAND (expr, 0);
3221 else if (BINARY_CLASS_P (expr))
3222 {
3223 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3224 expr = TREE_OPERAND (expr, 0);
3225 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3226 expr = TREE_OPERAND (expr, 1);
3227 else
3228 break;
3229 }
3230 else
3231 break;
3232 }
3233
3234 return expr;
3235 }
3236
3237 /* Look inside EXPR into simple arithmetic operations involving constants.
3238 Return the outermost non-arithmetic or non-constant node. */
3239
3240 tree
3241 skip_simple_constant_arithmetic (tree expr)
3242 {
3243 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3244 expr = TREE_OPERAND (expr, 0);
3245
3246 while (true)
3247 {
3248 if (UNARY_CLASS_P (expr))
3249 expr = TREE_OPERAND (expr, 0);
3250 else if (BINARY_CLASS_P (expr))
3251 {
3252 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3253 expr = TREE_OPERAND (expr, 0);
3254 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3255 expr = TREE_OPERAND (expr, 1);
3256 else
3257 break;
3258 }
3259 else
3260 break;
3261 }
3262
3263 return expr;
3264 }
3265
3266 /* Return which tree structure is used by T. */
3267
3268 enum tree_node_structure_enum
3269 tree_node_structure (const_tree t)
3270 {
3271 const enum tree_code code = TREE_CODE (t);
3272 return tree_node_structure_for_code (code);
3273 }
3274
3275 /* Set various status flags when building a CALL_EXPR object T. */
3276
3277 static void
3278 process_call_operands (tree t)
3279 {
3280 bool side_effects = TREE_SIDE_EFFECTS (t);
3281 bool read_only = false;
3282 int i = call_expr_flags (t);
3283
3284 /* Calls have side-effects, except those to const or pure functions. */
3285 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3286 side_effects = true;
3287 /* Propagate TREE_READONLY of arguments for const functions. */
3288 if (i & ECF_CONST)
3289 read_only = true;
3290
3291 if (!side_effects || read_only)
3292 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3293 {
3294 tree op = TREE_OPERAND (t, i);
3295 if (op && TREE_SIDE_EFFECTS (op))
3296 side_effects = true;
3297 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3298 read_only = false;
3299 }
3300
3301 TREE_SIDE_EFFECTS (t) = side_effects;
3302 TREE_READONLY (t) = read_only;
3303 }
3304 \f
3305 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3306 size or offset that depends on a field within a record. */
3307
3308 bool
3309 contains_placeholder_p (const_tree exp)
3310 {
3311 enum tree_code code;
3312
3313 if (!exp)
3314 return 0;
3315
3316 code = TREE_CODE (exp);
3317 if (code == PLACEHOLDER_EXPR)
3318 return 1;
3319
3320 switch (TREE_CODE_CLASS (code))
3321 {
3322 case tcc_reference:
3323 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3324 position computations since they will be converted into a
3325 WITH_RECORD_EXPR involving the reference, which will assume
3326 here will be valid. */
3327 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3328
3329 case tcc_exceptional:
3330 if (code == TREE_LIST)
3331 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3332 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3333 break;
3334
3335 case tcc_unary:
3336 case tcc_binary:
3337 case tcc_comparison:
3338 case tcc_expression:
3339 switch (code)
3340 {
3341 case COMPOUND_EXPR:
3342 /* Ignoring the first operand isn't quite right, but works best. */
3343 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3344
3345 case COND_EXPR:
3346 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3347 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3348 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3349
3350 case SAVE_EXPR:
3351 /* The save_expr function never wraps anything containing
3352 a PLACEHOLDER_EXPR. */
3353 return 0;
3354
3355 default:
3356 break;
3357 }
3358
3359 switch (TREE_CODE_LENGTH (code))
3360 {
3361 case 1:
3362 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3363 case 2:
3364 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3365 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3366 default:
3367 return 0;
3368 }
3369
3370 case tcc_vl_exp:
3371 switch (code)
3372 {
3373 case CALL_EXPR:
3374 {
3375 const_tree arg;
3376 const_call_expr_arg_iterator iter;
3377 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3378 if (CONTAINS_PLACEHOLDER_P (arg))
3379 return 1;
3380 return 0;
3381 }
3382 default:
3383 return 0;
3384 }
3385
3386 default:
3387 return 0;
3388 }
3389 return 0;
3390 }
3391
3392 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3393 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3394 field positions. */
3395
3396 static bool
3397 type_contains_placeholder_1 (const_tree type)
3398 {
3399 /* If the size contains a placeholder or the parent type (component type in
3400 the case of arrays) type involves a placeholder, this type does. */
3401 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3402 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3403 || (!POINTER_TYPE_P (type)
3404 && TREE_TYPE (type)
3405 && type_contains_placeholder_p (TREE_TYPE (type))))
3406 return true;
3407
3408 /* Now do type-specific checks. Note that the last part of the check above
3409 greatly limits what we have to do below. */
3410 switch (TREE_CODE (type))
3411 {
3412 case VOID_TYPE:
3413 case COMPLEX_TYPE:
3414 case ENUMERAL_TYPE:
3415 case BOOLEAN_TYPE:
3416 case POINTER_TYPE:
3417 case OFFSET_TYPE:
3418 case REFERENCE_TYPE:
3419 case METHOD_TYPE:
3420 case FUNCTION_TYPE:
3421 case VECTOR_TYPE:
3422 case NULLPTR_TYPE:
3423 return false;
3424
3425 case INTEGER_TYPE:
3426 case REAL_TYPE:
3427 case FIXED_POINT_TYPE:
3428 /* Here we just check the bounds. */
3429 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3430 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3431
3432 case ARRAY_TYPE:
3433 /* We have already checked the component type above, so just check the
3434 domain type. */
3435 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3436
3437 case RECORD_TYPE:
3438 case UNION_TYPE:
3439 case QUAL_UNION_TYPE:
3440 {
3441 tree field;
3442
3443 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3444 if (TREE_CODE (field) == FIELD_DECL
3445 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3446 || (TREE_CODE (type) == QUAL_UNION_TYPE
3447 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3448 || type_contains_placeholder_p (TREE_TYPE (field))))
3449 return true;
3450
3451 return false;
3452 }
3453
3454 default:
3455 gcc_unreachable ();
3456 }
3457 }
3458
3459 /* Wrapper around above function used to cache its result. */
3460
3461 bool
3462 type_contains_placeholder_p (tree type)
3463 {
3464 bool result;
3465
3466 /* If the contains_placeholder_bits field has been initialized,
3467 then we know the answer. */
3468 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3469 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3470
3471 /* Indicate that we've seen this type node, and the answer is false.
3472 This is what we want to return if we run into recursion via fields. */
3473 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3474
3475 /* Compute the real value. */
3476 result = type_contains_placeholder_1 (type);
3477
3478 /* Store the real value. */
3479 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3480
3481 return result;
3482 }
3483 \f
3484 /* Push tree EXP onto vector QUEUE if it is not already present. */
3485
3486 static void
3487 push_without_duplicates (tree exp, vec<tree> *queue)
3488 {
3489 unsigned int i;
3490 tree iter;
3491
3492 FOR_EACH_VEC_ELT (*queue, i, iter)
3493 if (simple_cst_equal (iter, exp) == 1)
3494 break;
3495
3496 if (!iter)
3497 queue->safe_push (exp);
3498 }
3499
3500 /* Given a tree EXP, find all occurrences of references to fields
3501 in a PLACEHOLDER_EXPR and place them in vector REFS without
3502 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3503 we assume here that EXP contains only arithmetic expressions
3504 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3505 argument list. */
3506
3507 void
3508 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3509 {
3510 enum tree_code code = TREE_CODE (exp);
3511 tree inner;
3512 int i;
3513
3514 /* We handle TREE_LIST and COMPONENT_REF separately. */
3515 if (code == TREE_LIST)
3516 {
3517 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3518 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3519 }
3520 else if (code == COMPONENT_REF)
3521 {
3522 for (inner = TREE_OPERAND (exp, 0);
3523 REFERENCE_CLASS_P (inner);
3524 inner = TREE_OPERAND (inner, 0))
3525 ;
3526
3527 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3528 push_without_duplicates (exp, refs);
3529 else
3530 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3531 }
3532 else
3533 switch (TREE_CODE_CLASS (code))
3534 {
3535 case tcc_constant:
3536 break;
3537
3538 case tcc_declaration:
3539 /* Variables allocated to static storage can stay. */
3540 if (!TREE_STATIC (exp))
3541 push_without_duplicates (exp, refs);
3542 break;
3543
3544 case tcc_expression:
3545 /* This is the pattern built in ada/make_aligning_type. */
3546 if (code == ADDR_EXPR
3547 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3548 {
3549 push_without_duplicates (exp, refs);
3550 break;
3551 }
3552
3553 /* Fall through... */
3554
3555 case tcc_exceptional:
3556 case tcc_unary:
3557 case tcc_binary:
3558 case tcc_comparison:
3559 case tcc_reference:
3560 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3561 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3562 break;
3563
3564 case tcc_vl_exp:
3565 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3566 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3567 break;
3568
3569 default:
3570 gcc_unreachable ();
3571 }
3572 }
3573
3574 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3575 return a tree with all occurrences of references to F in a
3576 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3577 CONST_DECLs. Note that we assume here that EXP contains only
3578 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3579 occurring only in their argument list. */
3580
3581 tree
3582 substitute_in_expr (tree exp, tree f, tree r)
3583 {
3584 enum tree_code code = TREE_CODE (exp);
3585 tree op0, op1, op2, op3;
3586 tree new_tree;
3587
3588 /* We handle TREE_LIST and COMPONENT_REF separately. */
3589 if (code == TREE_LIST)
3590 {
3591 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3592 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3593 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3594 return exp;
3595
3596 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3597 }
3598 else if (code == COMPONENT_REF)
3599 {
3600 tree inner;
3601
3602 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3603 and it is the right field, replace it with R. */
3604 for (inner = TREE_OPERAND (exp, 0);
3605 REFERENCE_CLASS_P (inner);
3606 inner = TREE_OPERAND (inner, 0))
3607 ;
3608
3609 /* The field. */
3610 op1 = TREE_OPERAND (exp, 1);
3611
3612 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3613 return r;
3614
3615 /* If this expression hasn't been completed let, leave it alone. */
3616 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3617 return exp;
3618
3619 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3620 if (op0 == TREE_OPERAND (exp, 0))
3621 return exp;
3622
3623 new_tree
3624 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3625 }
3626 else
3627 switch (TREE_CODE_CLASS (code))
3628 {
3629 case tcc_constant:
3630 return exp;
3631
3632 case tcc_declaration:
3633 if (exp == f)
3634 return r;
3635 else
3636 return exp;
3637
3638 case tcc_expression:
3639 if (exp == f)
3640 return r;
3641
3642 /* Fall through... */
3643
3644 case tcc_exceptional:
3645 case tcc_unary:
3646 case tcc_binary:
3647 case tcc_comparison:
3648 case tcc_reference:
3649 switch (TREE_CODE_LENGTH (code))
3650 {
3651 case 0:
3652 return exp;
3653
3654 case 1:
3655 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3656 if (op0 == TREE_OPERAND (exp, 0))
3657 return exp;
3658
3659 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3660 break;
3661
3662 case 2:
3663 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3664 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3665
3666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3667 return exp;
3668
3669 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3670 break;
3671
3672 case 3:
3673 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3674 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3675 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3676
3677 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3678 && op2 == TREE_OPERAND (exp, 2))
3679 return exp;
3680
3681 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3682 break;
3683
3684 case 4:
3685 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3686 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3687 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3688 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3689
3690 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3691 && op2 == TREE_OPERAND (exp, 2)
3692 && op3 == TREE_OPERAND (exp, 3))
3693 return exp;
3694
3695 new_tree
3696 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3697 break;
3698
3699 default:
3700 gcc_unreachable ();
3701 }
3702 break;
3703
3704 case tcc_vl_exp:
3705 {
3706 int i;
3707
3708 new_tree = NULL_TREE;
3709
3710 /* If we are trying to replace F with a constant, inline back
3711 functions which do nothing else than computing a value from
3712 the arguments they are passed. This makes it possible to
3713 fold partially or entirely the replacement expression. */
3714 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3715 {
3716 tree t = maybe_inline_call_in_expr (exp);
3717 if (t)
3718 return SUBSTITUTE_IN_EXPR (t, f, r);
3719 }
3720
3721 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3722 {
3723 tree op = TREE_OPERAND (exp, i);
3724 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3725 if (new_op != op)
3726 {
3727 if (!new_tree)
3728 new_tree = copy_node (exp);
3729 TREE_OPERAND (new_tree, i) = new_op;
3730 }
3731 }
3732
3733 if (new_tree)
3734 {
3735 new_tree = fold (new_tree);
3736 if (TREE_CODE (new_tree) == CALL_EXPR)
3737 process_call_operands (new_tree);
3738 }
3739 else
3740 return exp;
3741 }
3742 break;
3743
3744 default:
3745 gcc_unreachable ();
3746 }
3747
3748 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3749
3750 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3751 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3752
3753 return new_tree;
3754 }
3755
3756 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3757 for it within OBJ, a tree that is an object or a chain of references. */
3758
3759 tree
3760 substitute_placeholder_in_expr (tree exp, tree obj)
3761 {
3762 enum tree_code code = TREE_CODE (exp);
3763 tree op0, op1, op2, op3;
3764 tree new_tree;
3765
3766 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3767 in the chain of OBJ. */
3768 if (code == PLACEHOLDER_EXPR)
3769 {
3770 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3771 tree elt;
3772
3773 for (elt = obj; elt != 0;
3774 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3775 || TREE_CODE (elt) == COND_EXPR)
3776 ? TREE_OPERAND (elt, 1)
3777 : (REFERENCE_CLASS_P (elt)
3778 || UNARY_CLASS_P (elt)
3779 || BINARY_CLASS_P (elt)
3780 || VL_EXP_CLASS_P (elt)
3781 || EXPRESSION_CLASS_P (elt))
3782 ? TREE_OPERAND (elt, 0) : 0))
3783 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3784 return elt;
3785
3786 for (elt = obj; elt != 0;
3787 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3788 || TREE_CODE (elt) == COND_EXPR)
3789 ? TREE_OPERAND (elt, 1)
3790 : (REFERENCE_CLASS_P (elt)
3791 || UNARY_CLASS_P (elt)
3792 || BINARY_CLASS_P (elt)
3793 || VL_EXP_CLASS_P (elt)
3794 || EXPRESSION_CLASS_P (elt))
3795 ? TREE_OPERAND (elt, 0) : 0))
3796 if (POINTER_TYPE_P (TREE_TYPE (elt))
3797 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3798 == need_type))
3799 return fold_build1 (INDIRECT_REF, need_type, elt);
3800
3801 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3802 survives until RTL generation, there will be an error. */
3803 return exp;
3804 }
3805
3806 /* TREE_LIST is special because we need to look at TREE_VALUE
3807 and TREE_CHAIN, not TREE_OPERANDS. */
3808 else if (code == TREE_LIST)
3809 {
3810 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3811 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3812 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3813 return exp;
3814
3815 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3816 }
3817 else
3818 switch (TREE_CODE_CLASS (code))
3819 {
3820 case tcc_constant:
3821 case tcc_declaration:
3822 return exp;
3823
3824 case tcc_exceptional:
3825 case tcc_unary:
3826 case tcc_binary:
3827 case tcc_comparison:
3828 case tcc_expression:
3829 case tcc_reference:
3830 case tcc_statement:
3831 switch (TREE_CODE_LENGTH (code))
3832 {
3833 case 0:
3834 return exp;
3835
3836 case 1:
3837 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3838 if (op0 == TREE_OPERAND (exp, 0))
3839 return exp;
3840
3841 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3842 break;
3843
3844 case 2:
3845 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3846 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3847
3848 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3849 return exp;
3850
3851 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3852 break;
3853
3854 case 3:
3855 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3856 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3857 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3858
3859 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3860 && op2 == TREE_OPERAND (exp, 2))
3861 return exp;
3862
3863 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3864 break;
3865
3866 case 4:
3867 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3868 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3869 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3870 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3871
3872 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3873 && op2 == TREE_OPERAND (exp, 2)
3874 && op3 == TREE_OPERAND (exp, 3))
3875 return exp;
3876
3877 new_tree
3878 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3879 break;
3880
3881 default:
3882 gcc_unreachable ();
3883 }
3884 break;
3885
3886 case tcc_vl_exp:
3887 {
3888 int i;
3889
3890 new_tree = NULL_TREE;
3891
3892 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3893 {
3894 tree op = TREE_OPERAND (exp, i);
3895 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3896 if (new_op != op)
3897 {
3898 if (!new_tree)
3899 new_tree = copy_node (exp);
3900 TREE_OPERAND (new_tree, i) = new_op;
3901 }
3902 }
3903
3904 if (new_tree)
3905 {
3906 new_tree = fold (new_tree);
3907 if (TREE_CODE (new_tree) == CALL_EXPR)
3908 process_call_operands (new_tree);
3909 }
3910 else
3911 return exp;
3912 }
3913 break;
3914
3915 default:
3916 gcc_unreachable ();
3917 }
3918
3919 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3920
3921 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3922 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3923
3924 return new_tree;
3925 }
3926 \f
3927
3928 /* Subroutine of stabilize_reference; this is called for subtrees of
3929 references. Any expression with side-effects must be put in a SAVE_EXPR
3930 to ensure that it is only evaluated once.
3931
3932 We don't put SAVE_EXPR nodes around everything, because assigning very
3933 simple expressions to temporaries causes us to miss good opportunities
3934 for optimizations. Among other things, the opportunity to fold in the
3935 addition of a constant into an addressing mode often gets lost, e.g.
3936 "y[i+1] += x;". In general, we take the approach that we should not make
3937 an assignment unless we are forced into it - i.e., that any non-side effect
3938 operator should be allowed, and that cse should take care of coalescing
3939 multiple utterances of the same expression should that prove fruitful. */
3940
3941 static tree
3942 stabilize_reference_1 (tree e)
3943 {
3944 tree result;
3945 enum tree_code code = TREE_CODE (e);
3946
3947 /* We cannot ignore const expressions because it might be a reference
3948 to a const array but whose index contains side-effects. But we can
3949 ignore things that are actual constant or that already have been
3950 handled by this function. */
3951
3952 if (tree_invariant_p (e))
3953 return e;
3954
3955 switch (TREE_CODE_CLASS (code))
3956 {
3957 case tcc_exceptional:
3958 case tcc_type:
3959 case tcc_declaration:
3960 case tcc_comparison:
3961 case tcc_statement:
3962 case tcc_expression:
3963 case tcc_reference:
3964 case tcc_vl_exp:
3965 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3966 so that it will only be evaluated once. */
3967 /* The reference (r) and comparison (<) classes could be handled as
3968 below, but it is generally faster to only evaluate them once. */
3969 if (TREE_SIDE_EFFECTS (e))
3970 return save_expr (e);
3971 return e;
3972
3973 case tcc_constant:
3974 /* Constants need no processing. In fact, we should never reach
3975 here. */
3976 return e;
3977
3978 case tcc_binary:
3979 /* Division is slow and tends to be compiled with jumps,
3980 especially the division by powers of 2 that is often
3981 found inside of an array reference. So do it just once. */
3982 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3983 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3984 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3985 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3986 return save_expr (e);
3987 /* Recursively stabilize each operand. */
3988 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3989 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3990 break;
3991
3992 case tcc_unary:
3993 /* Recursively stabilize each operand. */
3994 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3995 break;
3996
3997 default:
3998 gcc_unreachable ();
3999 }
4000
4001 TREE_TYPE (result) = TREE_TYPE (e);
4002 TREE_READONLY (result) = TREE_READONLY (e);
4003 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4004 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4005
4006 return result;
4007 }
4008
4009 /* Stabilize a reference so that we can use it any number of times
4010 without causing its operands to be evaluated more than once.
4011 Returns the stabilized reference. This works by means of save_expr,
4012 so see the caveats in the comments about save_expr.
4013
4014 Also allows conversion expressions whose operands are references.
4015 Any other kind of expression is returned unchanged. */
4016
4017 tree
4018 stabilize_reference (tree ref)
4019 {
4020 tree result;
4021 enum tree_code code = TREE_CODE (ref);
4022
4023 switch (code)
4024 {
4025 case VAR_DECL:
4026 case PARM_DECL:
4027 case RESULT_DECL:
4028 /* No action is needed in this case. */
4029 return ref;
4030
4031 CASE_CONVERT:
4032 case FLOAT_EXPR:
4033 case FIX_TRUNC_EXPR:
4034 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4035 break;
4036
4037 case INDIRECT_REF:
4038 result = build_nt (INDIRECT_REF,
4039 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4040 break;
4041
4042 case COMPONENT_REF:
4043 result = build_nt (COMPONENT_REF,
4044 stabilize_reference (TREE_OPERAND (ref, 0)),
4045 TREE_OPERAND (ref, 1), NULL_TREE);
4046 break;
4047
4048 case BIT_FIELD_REF:
4049 result = build_nt (BIT_FIELD_REF,
4050 stabilize_reference (TREE_OPERAND (ref, 0)),
4051 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4052 break;
4053
4054 case ARRAY_REF:
4055 result = build_nt (ARRAY_REF,
4056 stabilize_reference (TREE_OPERAND (ref, 0)),
4057 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4058 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4059 break;
4060
4061 case ARRAY_RANGE_REF:
4062 result = build_nt (ARRAY_RANGE_REF,
4063 stabilize_reference (TREE_OPERAND (ref, 0)),
4064 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4065 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4066 break;
4067
4068 case COMPOUND_EXPR:
4069 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4070 it wouldn't be ignored. This matters when dealing with
4071 volatiles. */
4072 return stabilize_reference_1 (ref);
4073
4074 /* If arg isn't a kind of lvalue we recognize, make no change.
4075 Caller should recognize the error for an invalid lvalue. */
4076 default:
4077 return ref;
4078
4079 case ERROR_MARK:
4080 return error_mark_node;
4081 }
4082
4083 TREE_TYPE (result) = TREE_TYPE (ref);
4084 TREE_READONLY (result) = TREE_READONLY (ref);
4085 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4086 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4087
4088 return result;
4089 }
4090 \f
4091 /* Low-level constructors for expressions. */
4092
4093 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4094 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4095
4096 void
4097 recompute_tree_invariant_for_addr_expr (tree t)
4098 {
4099 tree node;
4100 bool tc = true, se = false;
4101
4102 /* We started out assuming this address is both invariant and constant, but
4103 does not have side effects. Now go down any handled components and see if
4104 any of them involve offsets that are either non-constant or non-invariant.
4105 Also check for side-effects.
4106
4107 ??? Note that this code makes no attempt to deal with the case where
4108 taking the address of something causes a copy due to misalignment. */
4109
4110 #define UPDATE_FLAGS(NODE) \
4111 do { tree _node = (NODE); \
4112 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4113 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4114
4115 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4116 node = TREE_OPERAND (node, 0))
4117 {
4118 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4119 array reference (probably made temporarily by the G++ front end),
4120 so ignore all the operands. */
4121 if ((TREE_CODE (node) == ARRAY_REF
4122 || TREE_CODE (node) == ARRAY_RANGE_REF)
4123 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4124 {
4125 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4126 if (TREE_OPERAND (node, 2))
4127 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4128 if (TREE_OPERAND (node, 3))
4129 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4130 }
4131 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4132 FIELD_DECL, apparently. The G++ front end can put something else
4133 there, at least temporarily. */
4134 else if (TREE_CODE (node) == COMPONENT_REF
4135 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4136 {
4137 if (TREE_OPERAND (node, 2))
4138 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4139 }
4140 }
4141
4142 node = lang_hooks.expr_to_decl (node, &tc, &se);
4143
4144 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4145 the address, since &(*a)->b is a form of addition. If it's a constant, the
4146 address is constant too. If it's a decl, its address is constant if the
4147 decl is static. Everything else is not constant and, furthermore,
4148 taking the address of a volatile variable is not volatile. */
4149 if (TREE_CODE (node) == INDIRECT_REF
4150 || TREE_CODE (node) == MEM_REF)
4151 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4152 else if (CONSTANT_CLASS_P (node))
4153 ;
4154 else if (DECL_P (node))
4155 tc &= (staticp (node) != NULL_TREE);
4156 else
4157 {
4158 tc = false;
4159 se |= TREE_SIDE_EFFECTS (node);
4160 }
4161
4162
4163 TREE_CONSTANT (t) = tc;
4164 TREE_SIDE_EFFECTS (t) = se;
4165 #undef UPDATE_FLAGS
4166 }
4167
4168 /* Build an expression of code CODE, data type TYPE, and operands as
4169 specified. Expressions and reference nodes can be created this way.
4170 Constants, decls, types and misc nodes cannot be.
4171
4172 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4173 enough for all extant tree codes. */
4174
4175 tree
4176 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4177 {
4178 tree t;
4179
4180 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4181
4182 t = make_node_stat (code PASS_MEM_STAT);
4183 TREE_TYPE (t) = tt;
4184
4185 return t;
4186 }
4187
4188 tree
4189 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4190 {
4191 int length = sizeof (struct tree_exp);
4192 tree t;
4193
4194 record_node_allocation_statistics (code, length);
4195
4196 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4197
4198 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4199
4200 memset (t, 0, sizeof (struct tree_common));
4201
4202 TREE_SET_CODE (t, code);
4203
4204 TREE_TYPE (t) = type;
4205 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4206 TREE_OPERAND (t, 0) = node;
4207 if (node && !TYPE_P (node))
4208 {
4209 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4210 TREE_READONLY (t) = TREE_READONLY (node);
4211 }
4212
4213 if (TREE_CODE_CLASS (code) == tcc_statement)
4214 TREE_SIDE_EFFECTS (t) = 1;
4215 else switch (code)
4216 {
4217 case VA_ARG_EXPR:
4218 /* All of these have side-effects, no matter what their
4219 operands are. */
4220 TREE_SIDE_EFFECTS (t) = 1;
4221 TREE_READONLY (t) = 0;
4222 break;
4223
4224 case INDIRECT_REF:
4225 /* Whether a dereference is readonly has nothing to do with whether
4226 its operand is readonly. */
4227 TREE_READONLY (t) = 0;
4228 break;
4229
4230 case ADDR_EXPR:
4231 if (node)
4232 recompute_tree_invariant_for_addr_expr (t);
4233 break;
4234
4235 default:
4236 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4237 && node && !TYPE_P (node)
4238 && TREE_CONSTANT (node))
4239 TREE_CONSTANT (t) = 1;
4240 if (TREE_CODE_CLASS (code) == tcc_reference
4241 && node && TREE_THIS_VOLATILE (node))
4242 TREE_THIS_VOLATILE (t) = 1;
4243 break;
4244 }
4245
4246 return t;
4247 }
4248
4249 #define PROCESS_ARG(N) \
4250 do { \
4251 TREE_OPERAND (t, N) = arg##N; \
4252 if (arg##N &&!TYPE_P (arg##N)) \
4253 { \
4254 if (TREE_SIDE_EFFECTS (arg##N)) \
4255 side_effects = 1; \
4256 if (!TREE_READONLY (arg##N) \
4257 && !CONSTANT_CLASS_P (arg##N)) \
4258 (void) (read_only = 0); \
4259 if (!TREE_CONSTANT (arg##N)) \
4260 (void) (constant = 0); \
4261 } \
4262 } while (0)
4263
4264 tree
4265 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4266 {
4267 bool constant, read_only, side_effects;
4268 tree t;
4269
4270 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4271
4272 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4273 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4274 /* When sizetype precision doesn't match that of pointers
4275 we need to be able to build explicit extensions or truncations
4276 of the offset argument. */
4277 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4278 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4279 && TREE_CODE (arg1) == INTEGER_CST);
4280
4281 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4282 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4283 && ptrofftype_p (TREE_TYPE (arg1)));
4284
4285 t = make_node_stat (code PASS_MEM_STAT);
4286 TREE_TYPE (t) = tt;
4287
4288 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4289 result based on those same flags for the arguments. But if the
4290 arguments aren't really even `tree' expressions, we shouldn't be trying
4291 to do this. */
4292
4293 /* Expressions without side effects may be constant if their
4294 arguments are as well. */
4295 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4296 || TREE_CODE_CLASS (code) == tcc_binary);
4297 read_only = 1;
4298 side_effects = TREE_SIDE_EFFECTS (t);
4299
4300 PROCESS_ARG (0);
4301 PROCESS_ARG (1);
4302
4303 TREE_READONLY (t) = read_only;
4304 TREE_CONSTANT (t) = constant;
4305 TREE_SIDE_EFFECTS (t) = side_effects;
4306 TREE_THIS_VOLATILE (t)
4307 = (TREE_CODE_CLASS (code) == tcc_reference
4308 && arg0 && TREE_THIS_VOLATILE (arg0));
4309
4310 return t;
4311 }
4312
4313
4314 tree
4315 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4316 tree arg2 MEM_STAT_DECL)
4317 {
4318 bool constant, read_only, side_effects;
4319 tree t;
4320
4321 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4322 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4323
4324 t = make_node_stat (code PASS_MEM_STAT);
4325 TREE_TYPE (t) = tt;
4326
4327 read_only = 1;
4328
4329 /* As a special exception, if COND_EXPR has NULL branches, we
4330 assume that it is a gimple statement and always consider
4331 it to have side effects. */
4332 if (code == COND_EXPR
4333 && tt == void_type_node
4334 && arg1 == NULL_TREE
4335 && arg2 == NULL_TREE)
4336 side_effects = true;
4337 else
4338 side_effects = TREE_SIDE_EFFECTS (t);
4339
4340 PROCESS_ARG (0);
4341 PROCESS_ARG (1);
4342 PROCESS_ARG (2);
4343
4344 if (code == COND_EXPR)
4345 TREE_READONLY (t) = read_only;
4346
4347 TREE_SIDE_EFFECTS (t) = side_effects;
4348 TREE_THIS_VOLATILE (t)
4349 = (TREE_CODE_CLASS (code) == tcc_reference
4350 && arg0 && TREE_THIS_VOLATILE (arg0));
4351
4352 return t;
4353 }
4354
4355 tree
4356 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4357 tree arg2, tree arg3 MEM_STAT_DECL)
4358 {
4359 bool constant, read_only, side_effects;
4360 tree t;
4361
4362 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4363
4364 t = make_node_stat (code PASS_MEM_STAT);
4365 TREE_TYPE (t) = tt;
4366
4367 side_effects = TREE_SIDE_EFFECTS (t);
4368
4369 PROCESS_ARG (0);
4370 PROCESS_ARG (1);
4371 PROCESS_ARG (2);
4372 PROCESS_ARG (3);
4373
4374 TREE_SIDE_EFFECTS (t) = side_effects;
4375 TREE_THIS_VOLATILE (t)
4376 = (TREE_CODE_CLASS (code) == tcc_reference
4377 && arg0 && TREE_THIS_VOLATILE (arg0));
4378
4379 return t;
4380 }
4381
4382 tree
4383 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4384 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4385 {
4386 bool constant, read_only, side_effects;
4387 tree t;
4388
4389 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4390
4391 t = make_node_stat (code PASS_MEM_STAT);
4392 TREE_TYPE (t) = tt;
4393
4394 side_effects = TREE_SIDE_EFFECTS (t);
4395
4396 PROCESS_ARG (0);
4397 PROCESS_ARG (1);
4398 PROCESS_ARG (2);
4399 PROCESS_ARG (3);
4400 PROCESS_ARG (4);
4401
4402 TREE_SIDE_EFFECTS (t) = side_effects;
4403 TREE_THIS_VOLATILE (t)
4404 = (TREE_CODE_CLASS (code) == tcc_reference
4405 && arg0 && TREE_THIS_VOLATILE (arg0));
4406
4407 return t;
4408 }
4409
4410 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4411 on the pointer PTR. */
4412
4413 tree
4414 build_simple_mem_ref_loc (location_t loc, tree ptr)
4415 {
4416 HOST_WIDE_INT offset = 0;
4417 tree ptype = TREE_TYPE (ptr);
4418 tree tem;
4419 /* For convenience allow addresses that collapse to a simple base
4420 and offset. */
4421 if (TREE_CODE (ptr) == ADDR_EXPR
4422 && (handled_component_p (TREE_OPERAND (ptr, 0))
4423 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4424 {
4425 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4426 gcc_assert (ptr);
4427 ptr = build_fold_addr_expr (ptr);
4428 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4429 }
4430 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4431 ptr, build_int_cst (ptype, offset));
4432 SET_EXPR_LOCATION (tem, loc);
4433 return tem;
4434 }
4435
4436 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4437
4438 offset_int
4439 mem_ref_offset (const_tree t)
4440 {
4441 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4442 }
4443
4444 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4445 offsetted by OFFSET units. */
4446
4447 tree
4448 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4449 {
4450 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4451 build_fold_addr_expr (base),
4452 build_int_cst (ptr_type_node, offset));
4453 tree addr = build1 (ADDR_EXPR, type, ref);
4454 recompute_tree_invariant_for_addr_expr (addr);
4455 return addr;
4456 }
4457
4458 /* Similar except don't specify the TREE_TYPE
4459 and leave the TREE_SIDE_EFFECTS as 0.
4460 It is permissible for arguments to be null,
4461 or even garbage if their values do not matter. */
4462
4463 tree
4464 build_nt (enum tree_code code, ...)
4465 {
4466 tree t;
4467 int length;
4468 int i;
4469 va_list p;
4470
4471 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4472
4473 va_start (p, code);
4474
4475 t = make_node (code);
4476 length = TREE_CODE_LENGTH (code);
4477
4478 for (i = 0; i < length; i++)
4479 TREE_OPERAND (t, i) = va_arg (p, tree);
4480
4481 va_end (p);
4482 return t;
4483 }
4484
4485 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4486 tree vec. */
4487
4488 tree
4489 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4490 {
4491 tree ret, t;
4492 unsigned int ix;
4493
4494 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4495 CALL_EXPR_FN (ret) = fn;
4496 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4497 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4498 CALL_EXPR_ARG (ret, ix) = t;
4499 return ret;
4500 }
4501 \f
4502 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4503 We do NOT enter this node in any sort of symbol table.
4504
4505 LOC is the location of the decl.
4506
4507 layout_decl is used to set up the decl's storage layout.
4508 Other slots are initialized to 0 or null pointers. */
4509
4510 tree
4511 build_decl_stat (location_t loc, enum tree_code code, tree name,
4512 tree type MEM_STAT_DECL)
4513 {
4514 tree t;
4515
4516 t = make_node_stat (code PASS_MEM_STAT);
4517 DECL_SOURCE_LOCATION (t) = loc;
4518
4519 /* if (type == error_mark_node)
4520 type = integer_type_node; */
4521 /* That is not done, deliberately, so that having error_mark_node
4522 as the type can suppress useless errors in the use of this variable. */
4523
4524 DECL_NAME (t) = name;
4525 TREE_TYPE (t) = type;
4526
4527 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4528 layout_decl (t, 0);
4529
4530 return t;
4531 }
4532
4533 /* Builds and returns function declaration with NAME and TYPE. */
4534
4535 tree
4536 build_fn_decl (const char *name, tree type)
4537 {
4538 tree id = get_identifier (name);
4539 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4540
4541 DECL_EXTERNAL (decl) = 1;
4542 TREE_PUBLIC (decl) = 1;
4543 DECL_ARTIFICIAL (decl) = 1;
4544 TREE_NOTHROW (decl) = 1;
4545
4546 return decl;
4547 }
4548
4549 vec<tree, va_gc> *all_translation_units;
4550
4551 /* Builds a new translation-unit decl with name NAME, queues it in the
4552 global list of translation-unit decls and returns it. */
4553
4554 tree
4555 build_translation_unit_decl (tree name)
4556 {
4557 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4558 name, NULL_TREE);
4559 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4560 vec_safe_push (all_translation_units, tu);
4561 return tu;
4562 }
4563
4564 \f
4565 /* BLOCK nodes are used to represent the structure of binding contours
4566 and declarations, once those contours have been exited and their contents
4567 compiled. This information is used for outputting debugging info. */
4568
4569 tree
4570 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4571 {
4572 tree block = make_node (BLOCK);
4573
4574 BLOCK_VARS (block) = vars;
4575 BLOCK_SUBBLOCKS (block) = subblocks;
4576 BLOCK_SUPERCONTEXT (block) = supercontext;
4577 BLOCK_CHAIN (block) = chain;
4578 return block;
4579 }
4580
4581 \f
4582 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4583
4584 LOC is the location to use in tree T. */
4585
4586 void
4587 protected_set_expr_location (tree t, location_t loc)
4588 {
4589 if (CAN_HAVE_LOCATION_P (t))
4590 SET_EXPR_LOCATION (t, loc);
4591 }
4592 \f
4593 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4594 is ATTRIBUTE. */
4595
4596 tree
4597 build_decl_attribute_variant (tree ddecl, tree attribute)
4598 {
4599 DECL_ATTRIBUTES (ddecl) = attribute;
4600 return ddecl;
4601 }
4602
4603 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4604 is ATTRIBUTE and its qualifiers are QUALS.
4605
4606 Record such modified types already made so we don't make duplicates. */
4607
4608 tree
4609 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4610 {
4611 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4612 {
4613 inchash::hash hstate;
4614 tree ntype;
4615 int i;
4616 tree t;
4617 enum tree_code code = TREE_CODE (ttype);
4618
4619 /* Building a distinct copy of a tagged type is inappropriate; it
4620 causes breakage in code that expects there to be a one-to-one
4621 relationship between a struct and its fields.
4622 build_duplicate_type is another solution (as used in
4623 handle_transparent_union_attribute), but that doesn't play well
4624 with the stronger C++ type identity model. */
4625 if (TREE_CODE (ttype) == RECORD_TYPE
4626 || TREE_CODE (ttype) == UNION_TYPE
4627 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4628 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4629 {
4630 warning (OPT_Wattributes,
4631 "ignoring attributes applied to %qT after definition",
4632 TYPE_MAIN_VARIANT (ttype));
4633 return build_qualified_type (ttype, quals);
4634 }
4635
4636 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4637 ntype = build_distinct_type_copy (ttype);
4638
4639 TYPE_ATTRIBUTES (ntype) = attribute;
4640
4641 hstate.add_int (code);
4642 if (TREE_TYPE (ntype))
4643 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4644 attribute_hash_list (attribute, hstate);
4645
4646 switch (TREE_CODE (ntype))
4647 {
4648 case FUNCTION_TYPE:
4649 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4650 break;
4651 case ARRAY_TYPE:
4652 if (TYPE_DOMAIN (ntype))
4653 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4654 break;
4655 case INTEGER_TYPE:
4656 t = TYPE_MAX_VALUE (ntype);
4657 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4658 hstate.add_object (TREE_INT_CST_ELT (t, i));
4659 break;
4660 case REAL_TYPE:
4661 case FIXED_POINT_TYPE:
4662 {
4663 unsigned int precision = TYPE_PRECISION (ntype);
4664 hstate.add_object (precision);
4665 }
4666 break;
4667 default:
4668 break;
4669 }
4670
4671 ntype = type_hash_canon (hstate.end(), ntype);
4672
4673 /* If the target-dependent attributes make NTYPE different from
4674 its canonical type, we will need to use structural equality
4675 checks for this type. */
4676 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4677 || !comp_type_attributes (ntype, ttype))
4678 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4679 else if (TYPE_CANONICAL (ntype) == ntype)
4680 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4681
4682 ttype = build_qualified_type (ntype, quals);
4683 }
4684 else if (TYPE_QUALS (ttype) != quals)
4685 ttype = build_qualified_type (ttype, quals);
4686
4687 return ttype;
4688 }
4689
4690 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4691 the same. */
4692
4693 static bool
4694 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4695 {
4696 tree cl1, cl2;
4697 for (cl1 = clauses1, cl2 = clauses2;
4698 cl1 && cl2;
4699 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4700 {
4701 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4702 return false;
4703 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4704 {
4705 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4706 OMP_CLAUSE_DECL (cl2)) != 1)
4707 return false;
4708 }
4709 switch (OMP_CLAUSE_CODE (cl1))
4710 {
4711 case OMP_CLAUSE_ALIGNED:
4712 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4713 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4714 return false;
4715 break;
4716 case OMP_CLAUSE_LINEAR:
4717 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4718 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4719 return false;
4720 break;
4721 case OMP_CLAUSE_SIMDLEN:
4722 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4723 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4724 return false;
4725 default:
4726 break;
4727 }
4728 }
4729 return true;
4730 }
4731
4732 /* Compare two constructor-element-type constants. Return 1 if the lists
4733 are known to be equal; otherwise return 0. */
4734
4735 static bool
4736 simple_cst_list_equal (const_tree l1, const_tree l2)
4737 {
4738 while (l1 != NULL_TREE && l2 != NULL_TREE)
4739 {
4740 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4741 return false;
4742
4743 l1 = TREE_CHAIN (l1);
4744 l2 = TREE_CHAIN (l2);
4745 }
4746
4747 return l1 == l2;
4748 }
4749
4750 /* Compare two attributes for their value identity. Return true if the
4751 attribute values are known to be equal; otherwise return false.
4752 */
4753
4754 static bool
4755 attribute_value_equal (const_tree attr1, const_tree attr2)
4756 {
4757 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4758 return true;
4759
4760 if (TREE_VALUE (attr1) != NULL_TREE
4761 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4762 && TREE_VALUE (attr2) != NULL
4763 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4764 return (simple_cst_list_equal (TREE_VALUE (attr1),
4765 TREE_VALUE (attr2)) == 1);
4766
4767 if ((flag_openmp || flag_openmp_simd)
4768 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4769 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4770 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4771 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4772 TREE_VALUE (attr2));
4773
4774 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4775 }
4776
4777 /* Return 0 if the attributes for two types are incompatible, 1 if they
4778 are compatible, and 2 if they are nearly compatible (which causes a
4779 warning to be generated). */
4780 int
4781 comp_type_attributes (const_tree type1, const_tree type2)
4782 {
4783 const_tree a1 = TYPE_ATTRIBUTES (type1);
4784 const_tree a2 = TYPE_ATTRIBUTES (type2);
4785 const_tree a;
4786
4787 if (a1 == a2)
4788 return 1;
4789 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4790 {
4791 const struct attribute_spec *as;
4792 const_tree attr;
4793
4794 as = lookup_attribute_spec (get_attribute_name (a));
4795 if (!as || as->affects_type_identity == false)
4796 continue;
4797
4798 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4799 if (!attr || !attribute_value_equal (a, attr))
4800 break;
4801 }
4802 if (!a)
4803 {
4804 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4805 {
4806 const struct attribute_spec *as;
4807
4808 as = lookup_attribute_spec (get_attribute_name (a));
4809 if (!as || as->affects_type_identity == false)
4810 continue;
4811
4812 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4813 break;
4814 /* We don't need to compare trees again, as we did this
4815 already in first loop. */
4816 }
4817 /* All types - affecting identity - are equal, so
4818 there is no need to call target hook for comparison. */
4819 if (!a)
4820 return 1;
4821 }
4822 /* As some type combinations - like default calling-convention - might
4823 be compatible, we have to call the target hook to get the final result. */
4824 return targetm.comp_type_attributes (type1, type2);
4825 }
4826
4827 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4828 is ATTRIBUTE.
4829
4830 Record such modified types already made so we don't make duplicates. */
4831
4832 tree
4833 build_type_attribute_variant (tree ttype, tree attribute)
4834 {
4835 return build_type_attribute_qual_variant (ttype, attribute,
4836 TYPE_QUALS (ttype));
4837 }
4838
4839
4840 /* Reset the expression *EXPR_P, a size or position.
4841
4842 ??? We could reset all non-constant sizes or positions. But it's cheap
4843 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4844
4845 We need to reset self-referential sizes or positions because they cannot
4846 be gimplified and thus can contain a CALL_EXPR after the gimplification
4847 is finished, which will run afoul of LTO streaming. And they need to be
4848 reset to something essentially dummy but not constant, so as to preserve
4849 the properties of the object they are attached to. */
4850
4851 static inline void
4852 free_lang_data_in_one_sizepos (tree *expr_p)
4853 {
4854 tree expr = *expr_p;
4855 if (CONTAINS_PLACEHOLDER_P (expr))
4856 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4857 }
4858
4859
4860 /* Reset all the fields in a binfo node BINFO. We only keep
4861 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4862
4863 static void
4864 free_lang_data_in_binfo (tree binfo)
4865 {
4866 unsigned i;
4867 tree t;
4868
4869 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4870
4871 BINFO_VIRTUALS (binfo) = NULL_TREE;
4872 BINFO_BASE_ACCESSES (binfo) = NULL;
4873 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4874 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4875
4876 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4877 free_lang_data_in_binfo (t);
4878 }
4879
4880
4881 /* Reset all language specific information still present in TYPE. */
4882
4883 static void
4884 free_lang_data_in_type (tree type)
4885 {
4886 gcc_assert (TYPE_P (type));
4887
4888 /* Give the FE a chance to remove its own data first. */
4889 lang_hooks.free_lang_data (type);
4890
4891 TREE_LANG_FLAG_0 (type) = 0;
4892 TREE_LANG_FLAG_1 (type) = 0;
4893 TREE_LANG_FLAG_2 (type) = 0;
4894 TREE_LANG_FLAG_3 (type) = 0;
4895 TREE_LANG_FLAG_4 (type) = 0;
4896 TREE_LANG_FLAG_5 (type) = 0;
4897 TREE_LANG_FLAG_6 (type) = 0;
4898
4899 if (TREE_CODE (type) == FUNCTION_TYPE)
4900 {
4901 /* Remove the const and volatile qualifiers from arguments. The
4902 C++ front end removes them, but the C front end does not,
4903 leading to false ODR violation errors when merging two
4904 instances of the same function signature compiled by
4905 different front ends. */
4906 tree p;
4907
4908 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4909 {
4910 tree arg_type = TREE_VALUE (p);
4911
4912 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4913 {
4914 int quals = TYPE_QUALS (arg_type)
4915 & ~TYPE_QUAL_CONST
4916 & ~TYPE_QUAL_VOLATILE;
4917 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4918 free_lang_data_in_type (TREE_VALUE (p));
4919 }
4920 }
4921 }
4922
4923 /* Remove members that are not actually FIELD_DECLs from the field
4924 list of an aggregate. These occur in C++. */
4925 if (RECORD_OR_UNION_TYPE_P (type))
4926 {
4927 tree prev, member;
4928
4929 /* Note that TYPE_FIELDS can be shared across distinct
4930 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4931 to be removed, we cannot set its TREE_CHAIN to NULL.
4932 Otherwise, we would not be able to find all the other fields
4933 in the other instances of this TREE_TYPE.
4934
4935 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4936 prev = NULL_TREE;
4937 member = TYPE_FIELDS (type);
4938 while (member)
4939 {
4940 if (TREE_CODE (member) == FIELD_DECL
4941 || TREE_CODE (member) == TYPE_DECL)
4942 {
4943 if (prev)
4944 TREE_CHAIN (prev) = member;
4945 else
4946 TYPE_FIELDS (type) = member;
4947 prev = member;
4948 }
4949
4950 member = TREE_CHAIN (member);
4951 }
4952
4953 if (prev)
4954 TREE_CHAIN (prev) = NULL_TREE;
4955 else
4956 TYPE_FIELDS (type) = NULL_TREE;
4957
4958 TYPE_METHODS (type) = NULL_TREE;
4959 if (TYPE_BINFO (type))
4960 free_lang_data_in_binfo (TYPE_BINFO (type));
4961 }
4962 else
4963 {
4964 /* For non-aggregate types, clear out the language slot (which
4965 overloads TYPE_BINFO). */
4966 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4967
4968 if (INTEGRAL_TYPE_P (type)
4969 || SCALAR_FLOAT_TYPE_P (type)
4970 || FIXED_POINT_TYPE_P (type))
4971 {
4972 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4973 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4974 }
4975 }
4976
4977 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4978 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4979
4980 if (TYPE_CONTEXT (type)
4981 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4982 {
4983 tree ctx = TYPE_CONTEXT (type);
4984 do
4985 {
4986 ctx = BLOCK_SUPERCONTEXT (ctx);
4987 }
4988 while (ctx && TREE_CODE (ctx) == BLOCK);
4989 TYPE_CONTEXT (type) = ctx;
4990 }
4991 }
4992
4993
4994 /* Return true if DECL may need an assembler name to be set. */
4995
4996 static inline bool
4997 need_assembler_name_p (tree decl)
4998 {
4999 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5000 merging. */
5001 if (flag_lto_odr_type_mering
5002 && TREE_CODE (decl) == TYPE_DECL
5003 && DECL_NAME (decl)
5004 && decl == TYPE_NAME (TREE_TYPE (decl))
5005 && !is_lang_specific (TREE_TYPE (decl))
5006 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5007 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5008 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5009 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5010 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5011 if (TREE_CODE (decl) != FUNCTION_DECL
5012 && TREE_CODE (decl) != VAR_DECL)
5013 return false;
5014
5015 /* If DECL already has its assembler name set, it does not need a
5016 new one. */
5017 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5018 || DECL_ASSEMBLER_NAME_SET_P (decl))
5019 return false;
5020
5021 /* Abstract decls do not need an assembler name. */
5022 if (DECL_ABSTRACT_P (decl))
5023 return false;
5024
5025 /* For VAR_DECLs, only static, public and external symbols need an
5026 assembler name. */
5027 if (TREE_CODE (decl) == VAR_DECL
5028 && !TREE_STATIC (decl)
5029 && !TREE_PUBLIC (decl)
5030 && !DECL_EXTERNAL (decl))
5031 return false;
5032
5033 if (TREE_CODE (decl) == FUNCTION_DECL)
5034 {
5035 /* Do not set assembler name on builtins. Allow RTL expansion to
5036 decide whether to expand inline or via a regular call. */
5037 if (DECL_BUILT_IN (decl)
5038 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5039 return false;
5040
5041 /* Functions represented in the callgraph need an assembler name. */
5042 if (cgraph_node::get (decl) != NULL)
5043 return true;
5044
5045 /* Unused and not public functions don't need an assembler name. */
5046 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5047 return false;
5048 }
5049
5050 return true;
5051 }
5052
5053
5054 /* Reset all language specific information still present in symbol
5055 DECL. */
5056
5057 static void
5058 free_lang_data_in_decl (tree decl)
5059 {
5060 gcc_assert (DECL_P (decl));
5061
5062 /* Give the FE a chance to remove its own data first. */
5063 lang_hooks.free_lang_data (decl);
5064
5065 TREE_LANG_FLAG_0 (decl) = 0;
5066 TREE_LANG_FLAG_1 (decl) = 0;
5067 TREE_LANG_FLAG_2 (decl) = 0;
5068 TREE_LANG_FLAG_3 (decl) = 0;
5069 TREE_LANG_FLAG_4 (decl) = 0;
5070 TREE_LANG_FLAG_5 (decl) = 0;
5071 TREE_LANG_FLAG_6 (decl) = 0;
5072
5073 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5074 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5075 if (TREE_CODE (decl) == FIELD_DECL)
5076 {
5077 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5078 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5079 DECL_QUALIFIER (decl) = NULL_TREE;
5080 }
5081
5082 if (TREE_CODE (decl) == FUNCTION_DECL)
5083 {
5084 struct cgraph_node *node;
5085 if (!(node = cgraph_node::get (decl))
5086 || (!node->definition && !node->clones))
5087 {
5088 if (node)
5089 node->release_body ();
5090 else
5091 {
5092 release_function_body (decl);
5093 DECL_ARGUMENTS (decl) = NULL;
5094 DECL_RESULT (decl) = NULL;
5095 DECL_INITIAL (decl) = error_mark_node;
5096 }
5097 }
5098 if (gimple_has_body_p (decl))
5099 {
5100 tree t;
5101
5102 /* If DECL has a gimple body, then the context for its
5103 arguments must be DECL. Otherwise, it doesn't really
5104 matter, as we will not be emitting any code for DECL. In
5105 general, there may be other instances of DECL created by
5106 the front end and since PARM_DECLs are generally shared,
5107 their DECL_CONTEXT changes as the replicas of DECL are
5108 created. The only time where DECL_CONTEXT is important
5109 is for the FUNCTION_DECLs that have a gimple body (since
5110 the PARM_DECL will be used in the function's body). */
5111 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5112 DECL_CONTEXT (t) = decl;
5113 }
5114
5115 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5116 At this point, it is not needed anymore. */
5117 DECL_SAVED_TREE (decl) = NULL_TREE;
5118
5119 /* Clear the abstract origin if it refers to a method. Otherwise
5120 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5121 origin will not be output correctly. */
5122 if (DECL_ABSTRACT_ORIGIN (decl)
5123 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5124 && RECORD_OR_UNION_TYPE_P
5125 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5126 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5127
5128 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5129 DECL_VINDEX referring to itself into a vtable slot number as it
5130 should. Happens with functions that are copied and then forgotten
5131 about. Just clear it, it won't matter anymore. */
5132 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5133 DECL_VINDEX (decl) = NULL_TREE;
5134 }
5135 else if (TREE_CODE (decl) == VAR_DECL)
5136 {
5137 if ((DECL_EXTERNAL (decl)
5138 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5139 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5140 DECL_INITIAL (decl) = NULL_TREE;
5141 }
5142 else if (TREE_CODE (decl) == TYPE_DECL
5143 || TREE_CODE (decl) == FIELD_DECL)
5144 DECL_INITIAL (decl) = NULL_TREE;
5145 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5146 && DECL_INITIAL (decl)
5147 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5148 {
5149 /* Strip builtins from the translation-unit BLOCK. We still have targets
5150 without builtin_decl_explicit support and also builtins are shared
5151 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5152 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5153 while (*nextp)
5154 {
5155 tree var = *nextp;
5156 if (TREE_CODE (var) == FUNCTION_DECL
5157 && DECL_BUILT_IN (var))
5158 *nextp = TREE_CHAIN (var);
5159 else
5160 nextp = &TREE_CHAIN (var);
5161 }
5162 }
5163 }
5164
5165
5166 /* Data used when collecting DECLs and TYPEs for language data removal. */
5167
5168 struct free_lang_data_d
5169 {
5170 /* Worklist to avoid excessive recursion. */
5171 vec<tree> worklist;
5172
5173 /* Set of traversed objects. Used to avoid duplicate visits. */
5174 hash_set<tree> *pset;
5175
5176 /* Array of symbols to process with free_lang_data_in_decl. */
5177 vec<tree> decls;
5178
5179 /* Array of types to process with free_lang_data_in_type. */
5180 vec<tree> types;
5181 };
5182
5183
5184 /* Save all language fields needed to generate proper debug information
5185 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5186
5187 static void
5188 save_debug_info_for_decl (tree t)
5189 {
5190 /*struct saved_debug_info_d *sdi;*/
5191
5192 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5193
5194 /* FIXME. Partial implementation for saving debug info removed. */
5195 }
5196
5197
5198 /* Save all language fields needed to generate proper debug information
5199 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5200
5201 static void
5202 save_debug_info_for_type (tree t)
5203 {
5204 /*struct saved_debug_info_d *sdi;*/
5205
5206 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5207
5208 /* FIXME. Partial implementation for saving debug info removed. */
5209 }
5210
5211
5212 /* Add type or decl T to one of the list of tree nodes that need their
5213 language data removed. The lists are held inside FLD. */
5214
5215 static void
5216 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5217 {
5218 if (DECL_P (t))
5219 {
5220 fld->decls.safe_push (t);
5221 if (debug_info_level > DINFO_LEVEL_TERSE)
5222 save_debug_info_for_decl (t);
5223 }
5224 else if (TYPE_P (t))
5225 {
5226 fld->types.safe_push (t);
5227 if (debug_info_level > DINFO_LEVEL_TERSE)
5228 save_debug_info_for_type (t);
5229 }
5230 else
5231 gcc_unreachable ();
5232 }
5233
5234 /* Push tree node T into FLD->WORKLIST. */
5235
5236 static inline void
5237 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5238 {
5239 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5240 fld->worklist.safe_push ((t));
5241 }
5242
5243
5244 /* Operand callback helper for free_lang_data_in_node. *TP is the
5245 subtree operand being considered. */
5246
5247 static tree
5248 find_decls_types_r (tree *tp, int *ws, void *data)
5249 {
5250 tree t = *tp;
5251 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5252
5253 if (TREE_CODE (t) == TREE_LIST)
5254 return NULL_TREE;
5255
5256 /* Language specific nodes will be removed, so there is no need
5257 to gather anything under them. */
5258 if (is_lang_specific (t))
5259 {
5260 *ws = 0;
5261 return NULL_TREE;
5262 }
5263
5264 if (DECL_P (t))
5265 {
5266 /* Note that walk_tree does not traverse every possible field in
5267 decls, so we have to do our own traversals here. */
5268 add_tree_to_fld_list (t, fld);
5269
5270 fld_worklist_push (DECL_NAME (t), fld);
5271 fld_worklist_push (DECL_CONTEXT (t), fld);
5272 fld_worklist_push (DECL_SIZE (t), fld);
5273 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5274
5275 /* We are going to remove everything under DECL_INITIAL for
5276 TYPE_DECLs. No point walking them. */
5277 if (TREE_CODE (t) != TYPE_DECL)
5278 fld_worklist_push (DECL_INITIAL (t), fld);
5279
5280 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5281 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5282
5283 if (TREE_CODE (t) == FUNCTION_DECL)
5284 {
5285 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5286 fld_worklist_push (DECL_RESULT (t), fld);
5287 }
5288 else if (TREE_CODE (t) == TYPE_DECL)
5289 {
5290 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5291 }
5292 else if (TREE_CODE (t) == FIELD_DECL)
5293 {
5294 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5295 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5296 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5297 fld_worklist_push (DECL_FCONTEXT (t), fld);
5298 }
5299
5300 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5301 && DECL_HAS_VALUE_EXPR_P (t))
5302 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5303
5304 if (TREE_CODE (t) != FIELD_DECL
5305 && TREE_CODE (t) != TYPE_DECL)
5306 fld_worklist_push (TREE_CHAIN (t), fld);
5307 *ws = 0;
5308 }
5309 else if (TYPE_P (t))
5310 {
5311 /* Note that walk_tree does not traverse every possible field in
5312 types, so we have to do our own traversals here. */
5313 add_tree_to_fld_list (t, fld);
5314
5315 if (!RECORD_OR_UNION_TYPE_P (t))
5316 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5317 fld_worklist_push (TYPE_SIZE (t), fld);
5318 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5319 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5320 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5321 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5322 fld_worklist_push (TYPE_NAME (t), fld);
5323 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5324 them and thus do not and want not to reach unused pointer types
5325 this way. */
5326 if (!POINTER_TYPE_P (t))
5327 fld_worklist_push (TYPE_MINVAL (t), fld);
5328 if (!RECORD_OR_UNION_TYPE_P (t))
5329 fld_worklist_push (TYPE_MAXVAL (t), fld);
5330 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5331 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5332 do not and want not to reach unused variants this way. */
5333 if (TYPE_CONTEXT (t))
5334 {
5335 tree ctx = TYPE_CONTEXT (t);
5336 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5337 So push that instead. */
5338 while (ctx && TREE_CODE (ctx) == BLOCK)
5339 ctx = BLOCK_SUPERCONTEXT (ctx);
5340 fld_worklist_push (ctx, fld);
5341 }
5342 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5343 and want not to reach unused types this way. */
5344
5345 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5346 {
5347 unsigned i;
5348 tree tem;
5349 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5350 fld_worklist_push (TREE_TYPE (tem), fld);
5351 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5352 if (tem
5353 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5354 && TREE_CODE (tem) == TREE_LIST)
5355 do
5356 {
5357 fld_worklist_push (TREE_VALUE (tem), fld);
5358 tem = TREE_CHAIN (tem);
5359 }
5360 while (tem);
5361 }
5362 if (RECORD_OR_UNION_TYPE_P (t))
5363 {
5364 tree tem;
5365 /* Push all TYPE_FIELDS - there can be interleaving interesting
5366 and non-interesting things. */
5367 tem = TYPE_FIELDS (t);
5368 while (tem)
5369 {
5370 if (TREE_CODE (tem) == FIELD_DECL
5371 || TREE_CODE (tem) == TYPE_DECL)
5372 fld_worklist_push (tem, fld);
5373 tem = TREE_CHAIN (tem);
5374 }
5375 }
5376
5377 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5378 *ws = 0;
5379 }
5380 else if (TREE_CODE (t) == BLOCK)
5381 {
5382 tree tem;
5383 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5384 fld_worklist_push (tem, fld);
5385 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5386 fld_worklist_push (tem, fld);
5387 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5388 }
5389
5390 if (TREE_CODE (t) != IDENTIFIER_NODE
5391 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5392 fld_worklist_push (TREE_TYPE (t), fld);
5393
5394 return NULL_TREE;
5395 }
5396
5397
5398 /* Find decls and types in T. */
5399
5400 static void
5401 find_decls_types (tree t, struct free_lang_data_d *fld)
5402 {
5403 while (1)
5404 {
5405 if (!fld->pset->contains (t))
5406 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5407 if (fld->worklist.is_empty ())
5408 break;
5409 t = fld->worklist.pop ();
5410 }
5411 }
5412
5413 /* Translate all the types in LIST with the corresponding runtime
5414 types. */
5415
5416 static tree
5417 get_eh_types_for_runtime (tree list)
5418 {
5419 tree head, prev;
5420
5421 if (list == NULL_TREE)
5422 return NULL_TREE;
5423
5424 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5425 prev = head;
5426 list = TREE_CHAIN (list);
5427 while (list)
5428 {
5429 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5430 TREE_CHAIN (prev) = n;
5431 prev = TREE_CHAIN (prev);
5432 list = TREE_CHAIN (list);
5433 }
5434
5435 return head;
5436 }
5437
5438
5439 /* Find decls and types referenced in EH region R and store them in
5440 FLD->DECLS and FLD->TYPES. */
5441
5442 static void
5443 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5444 {
5445 switch (r->type)
5446 {
5447 case ERT_CLEANUP:
5448 break;
5449
5450 case ERT_TRY:
5451 {
5452 eh_catch c;
5453
5454 /* The types referenced in each catch must first be changed to the
5455 EH types used at runtime. This removes references to FE types
5456 in the region. */
5457 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5458 {
5459 c->type_list = get_eh_types_for_runtime (c->type_list);
5460 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5461 }
5462 }
5463 break;
5464
5465 case ERT_ALLOWED_EXCEPTIONS:
5466 r->u.allowed.type_list
5467 = get_eh_types_for_runtime (r->u.allowed.type_list);
5468 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5469 break;
5470
5471 case ERT_MUST_NOT_THROW:
5472 walk_tree (&r->u.must_not_throw.failure_decl,
5473 find_decls_types_r, fld, fld->pset);
5474 break;
5475 }
5476 }
5477
5478
5479 /* Find decls and types referenced in cgraph node N and store them in
5480 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5481 look for *every* kind of DECL and TYPE node reachable from N,
5482 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5483 NAMESPACE_DECLs, etc). */
5484
5485 static void
5486 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5487 {
5488 basic_block bb;
5489 struct function *fn;
5490 unsigned ix;
5491 tree t;
5492
5493 find_decls_types (n->decl, fld);
5494
5495 if (!gimple_has_body_p (n->decl))
5496 return;
5497
5498 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5499
5500 fn = DECL_STRUCT_FUNCTION (n->decl);
5501
5502 /* Traverse locals. */
5503 FOR_EACH_LOCAL_DECL (fn, ix, t)
5504 find_decls_types (t, fld);
5505
5506 /* Traverse EH regions in FN. */
5507 {
5508 eh_region r;
5509 FOR_ALL_EH_REGION_FN (r, fn)
5510 find_decls_types_in_eh_region (r, fld);
5511 }
5512
5513 /* Traverse every statement in FN. */
5514 FOR_EACH_BB_FN (bb, fn)
5515 {
5516 gimple_stmt_iterator si;
5517 unsigned i;
5518
5519 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5520 {
5521 gimple phi = gsi_stmt (si);
5522
5523 for (i = 0; i < gimple_phi_num_args (phi); i++)
5524 {
5525 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5526 find_decls_types (*arg_p, fld);
5527 }
5528 }
5529
5530 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5531 {
5532 gimple stmt = gsi_stmt (si);
5533
5534 if (is_gimple_call (stmt))
5535 find_decls_types (gimple_call_fntype (stmt), fld);
5536
5537 for (i = 0; i < gimple_num_ops (stmt); i++)
5538 {
5539 tree arg = gimple_op (stmt, i);
5540 find_decls_types (arg, fld);
5541 }
5542 }
5543 }
5544 }
5545
5546
5547 /* Find decls and types referenced in varpool node N and store them in
5548 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5549 look for *every* kind of DECL and TYPE node reachable from N,
5550 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5551 NAMESPACE_DECLs, etc). */
5552
5553 static void
5554 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5555 {
5556 find_decls_types (v->decl, fld);
5557 }
5558
5559 /* If T needs an assembler name, have one created for it. */
5560
5561 void
5562 assign_assembler_name_if_neeeded (tree t)
5563 {
5564 if (need_assembler_name_p (t))
5565 {
5566 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5567 diagnostics that use input_location to show locus
5568 information. The problem here is that, at this point,
5569 input_location is generally anchored to the end of the file
5570 (since the parser is long gone), so we don't have a good
5571 position to pin it to.
5572
5573 To alleviate this problem, this uses the location of T's
5574 declaration. Examples of this are
5575 testsuite/g++.dg/template/cond2.C and
5576 testsuite/g++.dg/template/pr35240.C. */
5577 location_t saved_location = input_location;
5578 input_location = DECL_SOURCE_LOCATION (t);
5579
5580 decl_assembler_name (t);
5581
5582 input_location = saved_location;
5583 }
5584 }
5585
5586
5587 /* Free language specific information for every operand and expression
5588 in every node of the call graph. This process operates in three stages:
5589
5590 1- Every callgraph node and varpool node is traversed looking for
5591 decls and types embedded in them. This is a more exhaustive
5592 search than that done by find_referenced_vars, because it will
5593 also collect individual fields, decls embedded in types, etc.
5594
5595 2- All the decls found are sent to free_lang_data_in_decl.
5596
5597 3- All the types found are sent to free_lang_data_in_type.
5598
5599 The ordering between decls and types is important because
5600 free_lang_data_in_decl sets assembler names, which includes
5601 mangling. So types cannot be freed up until assembler names have
5602 been set up. */
5603
5604 static void
5605 free_lang_data_in_cgraph (void)
5606 {
5607 struct cgraph_node *n;
5608 varpool_node *v;
5609 struct free_lang_data_d fld;
5610 tree t;
5611 unsigned i;
5612 alias_pair *p;
5613
5614 /* Initialize sets and arrays to store referenced decls and types. */
5615 fld.pset = new hash_set<tree>;
5616 fld.worklist.create (0);
5617 fld.decls.create (100);
5618 fld.types.create (100);
5619
5620 /* Find decls and types in the body of every function in the callgraph. */
5621 FOR_EACH_FUNCTION (n)
5622 find_decls_types_in_node (n, &fld);
5623
5624 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5625 find_decls_types (p->decl, &fld);
5626
5627 /* Find decls and types in every varpool symbol. */
5628 FOR_EACH_VARIABLE (v)
5629 find_decls_types_in_var (v, &fld);
5630
5631 /* Set the assembler name on every decl found. We need to do this
5632 now because free_lang_data_in_decl will invalidate data needed
5633 for mangling. This breaks mangling on interdependent decls. */
5634 FOR_EACH_VEC_ELT (fld.decls, i, t)
5635 assign_assembler_name_if_neeeded (t);
5636
5637 /* Traverse every decl found freeing its language data. */
5638 FOR_EACH_VEC_ELT (fld.decls, i, t)
5639 free_lang_data_in_decl (t);
5640
5641 /* Traverse every type found freeing its language data. */
5642 FOR_EACH_VEC_ELT (fld.types, i, t)
5643 free_lang_data_in_type (t);
5644
5645 delete fld.pset;
5646 fld.worklist.release ();
5647 fld.decls.release ();
5648 fld.types.release ();
5649 }
5650
5651
5652 /* Free resources that are used by FE but are not needed once they are done. */
5653
5654 static unsigned
5655 free_lang_data (void)
5656 {
5657 unsigned i;
5658
5659 /* If we are the LTO frontend we have freed lang-specific data already. */
5660 if (in_lto_p
5661 || !flag_generate_lto)
5662 return 0;
5663
5664 /* Allocate and assign alias sets to the standard integer types
5665 while the slots are still in the way the frontends generated them. */
5666 for (i = 0; i < itk_none; ++i)
5667 if (integer_types[i])
5668 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5669
5670 /* Traverse the IL resetting language specific information for
5671 operands, expressions, etc. */
5672 free_lang_data_in_cgraph ();
5673
5674 /* Create gimple variants for common types. */
5675 ptrdiff_type_node = integer_type_node;
5676 fileptr_type_node = ptr_type_node;
5677
5678 /* Reset some langhooks. Do not reset types_compatible_p, it may
5679 still be used indirectly via the get_alias_set langhook. */
5680 lang_hooks.dwarf_name = lhd_dwarf_name;
5681 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5682 /* We do not want the default decl_assembler_name implementation,
5683 rather if we have fixed everything we want a wrapper around it
5684 asserting that all non-local symbols already got their assembler
5685 name and only produce assembler names for local symbols. Or rather
5686 make sure we never call decl_assembler_name on local symbols and
5687 devise a separate, middle-end private scheme for it. */
5688
5689 /* Reset diagnostic machinery. */
5690 tree_diagnostics_defaults (global_dc);
5691
5692 return 0;
5693 }
5694
5695
5696 namespace {
5697
5698 const pass_data pass_data_ipa_free_lang_data =
5699 {
5700 SIMPLE_IPA_PASS, /* type */
5701 "*free_lang_data", /* name */
5702 OPTGROUP_NONE, /* optinfo_flags */
5703 TV_IPA_FREE_LANG_DATA, /* tv_id */
5704 0, /* properties_required */
5705 0, /* properties_provided */
5706 0, /* properties_destroyed */
5707 0, /* todo_flags_start */
5708 0, /* todo_flags_finish */
5709 };
5710
5711 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5712 {
5713 public:
5714 pass_ipa_free_lang_data (gcc::context *ctxt)
5715 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5716 {}
5717
5718 /* opt_pass methods: */
5719 virtual unsigned int execute (function *) { return free_lang_data (); }
5720
5721 }; // class pass_ipa_free_lang_data
5722
5723 } // anon namespace
5724
5725 simple_ipa_opt_pass *
5726 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5727 {
5728 return new pass_ipa_free_lang_data (ctxt);
5729 }
5730
5731 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5732 ATTR_NAME. Also used internally by remove_attribute(). */
5733 bool
5734 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5735 {
5736 size_t ident_len = IDENTIFIER_LENGTH (ident);
5737
5738 if (ident_len == attr_len)
5739 {
5740 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5741 return true;
5742 }
5743 else if (ident_len == attr_len + 4)
5744 {
5745 /* There is the possibility that ATTR is 'text' and IDENT is
5746 '__text__'. */
5747 const char *p = IDENTIFIER_POINTER (ident);
5748 if (p[0] == '_' && p[1] == '_'
5749 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5750 && strncmp (attr_name, p + 2, attr_len) == 0)
5751 return true;
5752 }
5753
5754 return false;
5755 }
5756
5757 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5758 of ATTR_NAME, and LIST is not NULL_TREE. */
5759 tree
5760 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5761 {
5762 while (list)
5763 {
5764 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5765
5766 if (ident_len == attr_len)
5767 {
5768 if (!strcmp (attr_name,
5769 IDENTIFIER_POINTER (get_attribute_name (list))))
5770 break;
5771 }
5772 /* TODO: If we made sure that attributes were stored in the
5773 canonical form without '__...__' (ie, as in 'text' as opposed
5774 to '__text__') then we could avoid the following case. */
5775 else if (ident_len == attr_len + 4)
5776 {
5777 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5778 if (p[0] == '_' && p[1] == '_'
5779 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5780 && strncmp (attr_name, p + 2, attr_len) == 0)
5781 break;
5782 }
5783 list = TREE_CHAIN (list);
5784 }
5785
5786 return list;
5787 }
5788
5789 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5790 return a pointer to the attribute's list first element if the attribute
5791 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5792 '__text__'). */
5793
5794 tree
5795 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5796 tree list)
5797 {
5798 while (list)
5799 {
5800 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5801
5802 if (attr_len > ident_len)
5803 {
5804 list = TREE_CHAIN (list);
5805 continue;
5806 }
5807
5808 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5809
5810 if (strncmp (attr_name, p, attr_len) == 0)
5811 break;
5812
5813 /* TODO: If we made sure that attributes were stored in the
5814 canonical form without '__...__' (ie, as in 'text' as opposed
5815 to '__text__') then we could avoid the following case. */
5816 if (p[0] == '_' && p[1] == '_' &&
5817 strncmp (attr_name, p + 2, attr_len) == 0)
5818 break;
5819
5820 list = TREE_CHAIN (list);
5821 }
5822
5823 return list;
5824 }
5825
5826
5827 /* A variant of lookup_attribute() that can be used with an identifier
5828 as the first argument, and where the identifier can be either
5829 'text' or '__text__'.
5830
5831 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5832 return a pointer to the attribute's list element if the attribute
5833 is part of the list, or NULL_TREE if not found. If the attribute
5834 appears more than once, this only returns the first occurrence; the
5835 TREE_CHAIN of the return value should be passed back in if further
5836 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5837 can be in the form 'text' or '__text__'. */
5838 static tree
5839 lookup_ident_attribute (tree attr_identifier, tree list)
5840 {
5841 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5842
5843 while (list)
5844 {
5845 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5846 == IDENTIFIER_NODE);
5847
5848 /* Identifiers can be compared directly for equality. */
5849 if (attr_identifier == get_attribute_name (list))
5850 break;
5851
5852 /* If they are not equal, they may still be one in the form
5853 'text' while the other one is in the form '__text__'. TODO:
5854 If we were storing attributes in normalized 'text' form, then
5855 this could all go away and we could take full advantage of
5856 the fact that we're comparing identifiers. :-) */
5857 {
5858 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5859 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5860
5861 if (ident_len == attr_len + 4)
5862 {
5863 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5864 const char *q = IDENTIFIER_POINTER (attr_identifier);
5865 if (p[0] == '_' && p[1] == '_'
5866 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5867 && strncmp (q, p + 2, attr_len) == 0)
5868 break;
5869 }
5870 else if (ident_len + 4 == attr_len)
5871 {
5872 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5873 const char *q = IDENTIFIER_POINTER (attr_identifier);
5874 if (q[0] == '_' && q[1] == '_'
5875 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5876 && strncmp (q + 2, p, ident_len) == 0)
5877 break;
5878 }
5879 }
5880 list = TREE_CHAIN (list);
5881 }
5882
5883 return list;
5884 }
5885
5886 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5887 modified list. */
5888
5889 tree
5890 remove_attribute (const char *attr_name, tree list)
5891 {
5892 tree *p;
5893 size_t attr_len = strlen (attr_name);
5894
5895 gcc_checking_assert (attr_name[0] != '_');
5896
5897 for (p = &list; *p; )
5898 {
5899 tree l = *p;
5900 /* TODO: If we were storing attributes in normalized form, here
5901 we could use a simple strcmp(). */
5902 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5903 *p = TREE_CHAIN (l);
5904 else
5905 p = &TREE_CHAIN (l);
5906 }
5907
5908 return list;
5909 }
5910
5911 /* Return an attribute list that is the union of a1 and a2. */
5912
5913 tree
5914 merge_attributes (tree a1, tree a2)
5915 {
5916 tree attributes;
5917
5918 /* Either one unset? Take the set one. */
5919
5920 if ((attributes = a1) == 0)
5921 attributes = a2;
5922
5923 /* One that completely contains the other? Take it. */
5924
5925 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5926 {
5927 if (attribute_list_contained (a2, a1))
5928 attributes = a2;
5929 else
5930 {
5931 /* Pick the longest list, and hang on the other list. */
5932
5933 if (list_length (a1) < list_length (a2))
5934 attributes = a2, a2 = a1;
5935
5936 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5937 {
5938 tree a;
5939 for (a = lookup_ident_attribute (get_attribute_name (a2),
5940 attributes);
5941 a != NULL_TREE && !attribute_value_equal (a, a2);
5942 a = lookup_ident_attribute (get_attribute_name (a2),
5943 TREE_CHAIN (a)))
5944 ;
5945 if (a == NULL_TREE)
5946 {
5947 a1 = copy_node (a2);
5948 TREE_CHAIN (a1) = attributes;
5949 attributes = a1;
5950 }
5951 }
5952 }
5953 }
5954 return attributes;
5955 }
5956
5957 /* Given types T1 and T2, merge their attributes and return
5958 the result. */
5959
5960 tree
5961 merge_type_attributes (tree t1, tree t2)
5962 {
5963 return merge_attributes (TYPE_ATTRIBUTES (t1),
5964 TYPE_ATTRIBUTES (t2));
5965 }
5966
5967 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5968 the result. */
5969
5970 tree
5971 merge_decl_attributes (tree olddecl, tree newdecl)
5972 {
5973 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5974 DECL_ATTRIBUTES (newdecl));
5975 }
5976
5977 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5978
5979 /* Specialization of merge_decl_attributes for various Windows targets.
5980
5981 This handles the following situation:
5982
5983 __declspec (dllimport) int foo;
5984 int foo;
5985
5986 The second instance of `foo' nullifies the dllimport. */
5987
5988 tree
5989 merge_dllimport_decl_attributes (tree old, tree new_tree)
5990 {
5991 tree a;
5992 int delete_dllimport_p = 1;
5993
5994 /* What we need to do here is remove from `old' dllimport if it doesn't
5995 appear in `new'. dllimport behaves like extern: if a declaration is
5996 marked dllimport and a definition appears later, then the object
5997 is not dllimport'd. We also remove a `new' dllimport if the old list
5998 contains dllexport: dllexport always overrides dllimport, regardless
5999 of the order of declaration. */
6000 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6001 delete_dllimport_p = 0;
6002 else if (DECL_DLLIMPORT_P (new_tree)
6003 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6004 {
6005 DECL_DLLIMPORT_P (new_tree) = 0;
6006 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6007 "dllimport ignored", new_tree);
6008 }
6009 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6010 {
6011 /* Warn about overriding a symbol that has already been used, e.g.:
6012 extern int __attribute__ ((dllimport)) foo;
6013 int* bar () {return &foo;}
6014 int foo;
6015 */
6016 if (TREE_USED (old))
6017 {
6018 warning (0, "%q+D redeclared without dllimport attribute "
6019 "after being referenced with dll linkage", new_tree);
6020 /* If we have used a variable's address with dllimport linkage,
6021 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6022 decl may already have had TREE_CONSTANT computed.
6023 We still remove the attribute so that assembler code refers
6024 to '&foo rather than '_imp__foo'. */
6025 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6026 DECL_DLLIMPORT_P (new_tree) = 1;
6027 }
6028
6029 /* Let an inline definition silently override the external reference,
6030 but otherwise warn about attribute inconsistency. */
6031 else if (TREE_CODE (new_tree) == VAR_DECL
6032 || !DECL_DECLARED_INLINE_P (new_tree))
6033 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6034 "previous dllimport ignored", new_tree);
6035 }
6036 else
6037 delete_dllimport_p = 0;
6038
6039 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6040
6041 if (delete_dllimport_p)
6042 a = remove_attribute ("dllimport", a);
6043
6044 return a;
6045 }
6046
6047 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6048 struct attribute_spec.handler. */
6049
6050 tree
6051 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6052 bool *no_add_attrs)
6053 {
6054 tree node = *pnode;
6055 bool is_dllimport;
6056
6057 /* These attributes may apply to structure and union types being created,
6058 but otherwise should pass to the declaration involved. */
6059 if (!DECL_P (node))
6060 {
6061 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6062 | (int) ATTR_FLAG_ARRAY_NEXT))
6063 {
6064 *no_add_attrs = true;
6065 return tree_cons (name, args, NULL_TREE);
6066 }
6067 if (TREE_CODE (node) == RECORD_TYPE
6068 || TREE_CODE (node) == UNION_TYPE)
6069 {
6070 node = TYPE_NAME (node);
6071 if (!node)
6072 return NULL_TREE;
6073 }
6074 else
6075 {
6076 warning (OPT_Wattributes, "%qE attribute ignored",
6077 name);
6078 *no_add_attrs = true;
6079 return NULL_TREE;
6080 }
6081 }
6082
6083 if (TREE_CODE (node) != FUNCTION_DECL
6084 && TREE_CODE (node) != VAR_DECL
6085 && TREE_CODE (node) != TYPE_DECL)
6086 {
6087 *no_add_attrs = true;
6088 warning (OPT_Wattributes, "%qE attribute ignored",
6089 name);
6090 return NULL_TREE;
6091 }
6092
6093 if (TREE_CODE (node) == TYPE_DECL
6094 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6095 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6096 {
6097 *no_add_attrs = true;
6098 warning (OPT_Wattributes, "%qE attribute ignored",
6099 name);
6100 return NULL_TREE;
6101 }
6102
6103 is_dllimport = is_attribute_p ("dllimport", name);
6104
6105 /* Report error on dllimport ambiguities seen now before they cause
6106 any damage. */
6107 if (is_dllimport)
6108 {
6109 /* Honor any target-specific overrides. */
6110 if (!targetm.valid_dllimport_attribute_p (node))
6111 *no_add_attrs = true;
6112
6113 else if (TREE_CODE (node) == FUNCTION_DECL
6114 && DECL_DECLARED_INLINE_P (node))
6115 {
6116 warning (OPT_Wattributes, "inline function %q+D declared as "
6117 " dllimport: attribute ignored", node);
6118 *no_add_attrs = true;
6119 }
6120 /* Like MS, treat definition of dllimported variables and
6121 non-inlined functions on declaration as syntax errors. */
6122 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6123 {
6124 error ("function %q+D definition is marked dllimport", node);
6125 *no_add_attrs = true;
6126 }
6127
6128 else if (TREE_CODE (node) == VAR_DECL)
6129 {
6130 if (DECL_INITIAL (node))
6131 {
6132 error ("variable %q+D definition is marked dllimport",
6133 node);
6134 *no_add_attrs = true;
6135 }
6136
6137 /* `extern' needn't be specified with dllimport.
6138 Specify `extern' now and hope for the best. Sigh. */
6139 DECL_EXTERNAL (node) = 1;
6140 /* Also, implicitly give dllimport'd variables declared within
6141 a function global scope, unless declared static. */
6142 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6143 TREE_PUBLIC (node) = 1;
6144 }
6145
6146 if (*no_add_attrs == false)
6147 DECL_DLLIMPORT_P (node) = 1;
6148 }
6149 else if (TREE_CODE (node) == FUNCTION_DECL
6150 && DECL_DECLARED_INLINE_P (node)
6151 && flag_keep_inline_dllexport)
6152 /* An exported function, even if inline, must be emitted. */
6153 DECL_EXTERNAL (node) = 0;
6154
6155 /* Report error if symbol is not accessible at global scope. */
6156 if (!TREE_PUBLIC (node)
6157 && (TREE_CODE (node) == VAR_DECL
6158 || TREE_CODE (node) == FUNCTION_DECL))
6159 {
6160 error ("external linkage required for symbol %q+D because of "
6161 "%qE attribute", node, name);
6162 *no_add_attrs = true;
6163 }
6164
6165 /* A dllexport'd entity must have default visibility so that other
6166 program units (shared libraries or the main executable) can see
6167 it. A dllimport'd entity must have default visibility so that
6168 the linker knows that undefined references within this program
6169 unit can be resolved by the dynamic linker. */
6170 if (!*no_add_attrs)
6171 {
6172 if (DECL_VISIBILITY_SPECIFIED (node)
6173 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6174 error ("%qE implies default visibility, but %qD has already "
6175 "been declared with a different visibility",
6176 name, node);
6177 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6178 DECL_VISIBILITY_SPECIFIED (node) = 1;
6179 }
6180
6181 return NULL_TREE;
6182 }
6183
6184 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6185 \f
6186 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6187 of the various TYPE_QUAL values. */
6188
6189 static void
6190 set_type_quals (tree type, int type_quals)
6191 {
6192 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6193 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6194 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6195 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6196 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6197 }
6198
6199 /* Returns true iff unqualified CAND and BASE are equivalent. */
6200
6201 bool
6202 check_base_type (const_tree cand, const_tree base)
6203 {
6204 return (TYPE_NAME (cand) == TYPE_NAME (base)
6205 /* Apparently this is needed for Objective-C. */
6206 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6207 /* Check alignment. */
6208 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6209 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6210 TYPE_ATTRIBUTES (base)));
6211 }
6212
6213 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6214
6215 bool
6216 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6217 {
6218 return (TYPE_QUALS (cand) == type_quals
6219 && check_base_type (cand, base));
6220 }
6221
6222 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6223
6224 static bool
6225 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6226 {
6227 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6228 && TYPE_NAME (cand) == TYPE_NAME (base)
6229 /* Apparently this is needed for Objective-C. */
6230 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6231 /* Check alignment. */
6232 && TYPE_ALIGN (cand) == align
6233 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6234 TYPE_ATTRIBUTES (base)));
6235 }
6236
6237 /* This function checks to see if TYPE matches the size one of the built-in
6238 atomic types, and returns that core atomic type. */
6239
6240 static tree
6241 find_atomic_core_type (tree type)
6242 {
6243 tree base_atomic_type;
6244
6245 /* Only handle complete types. */
6246 if (TYPE_SIZE (type) == NULL_TREE)
6247 return NULL_TREE;
6248
6249 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6250 switch (type_size)
6251 {
6252 case 8:
6253 base_atomic_type = atomicQI_type_node;
6254 break;
6255
6256 case 16:
6257 base_atomic_type = atomicHI_type_node;
6258 break;
6259
6260 case 32:
6261 base_atomic_type = atomicSI_type_node;
6262 break;
6263
6264 case 64:
6265 base_atomic_type = atomicDI_type_node;
6266 break;
6267
6268 case 128:
6269 base_atomic_type = atomicTI_type_node;
6270 break;
6271
6272 default:
6273 base_atomic_type = NULL_TREE;
6274 }
6275
6276 return base_atomic_type;
6277 }
6278
6279 /* Return a version of the TYPE, qualified as indicated by the
6280 TYPE_QUALS, if one exists. If no qualified version exists yet,
6281 return NULL_TREE. */
6282
6283 tree
6284 get_qualified_type (tree type, int type_quals)
6285 {
6286 tree t;
6287
6288 if (TYPE_QUALS (type) == type_quals)
6289 return type;
6290
6291 /* Search the chain of variants to see if there is already one there just
6292 like the one we need to have. If so, use that existing one. We must
6293 preserve the TYPE_NAME, since there is code that depends on this. */
6294 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6295 if (check_qualified_type (t, type, type_quals))
6296 return t;
6297
6298 return NULL_TREE;
6299 }
6300
6301 /* Like get_qualified_type, but creates the type if it does not
6302 exist. This function never returns NULL_TREE. */
6303
6304 tree
6305 build_qualified_type (tree type, int type_quals)
6306 {
6307 tree t;
6308
6309 /* See if we already have the appropriate qualified variant. */
6310 t = get_qualified_type (type, type_quals);
6311
6312 /* If not, build it. */
6313 if (!t)
6314 {
6315 t = build_variant_type_copy (type);
6316 set_type_quals (t, type_quals);
6317
6318 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6319 {
6320 /* See if this object can map to a basic atomic type. */
6321 tree atomic_type = find_atomic_core_type (type);
6322 if (atomic_type)
6323 {
6324 /* Ensure the alignment of this type is compatible with
6325 the required alignment of the atomic type. */
6326 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6327 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6328 }
6329 }
6330
6331 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6332 /* Propagate structural equality. */
6333 SET_TYPE_STRUCTURAL_EQUALITY (t);
6334 else if (TYPE_CANONICAL (type) != type)
6335 /* Build the underlying canonical type, since it is different
6336 from TYPE. */
6337 {
6338 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6339 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6340 }
6341 else
6342 /* T is its own canonical type. */
6343 TYPE_CANONICAL (t) = t;
6344
6345 }
6346
6347 return t;
6348 }
6349
6350 /* Create a variant of type T with alignment ALIGN. */
6351
6352 tree
6353 build_aligned_type (tree type, unsigned int align)
6354 {
6355 tree t;
6356
6357 if (TYPE_PACKED (type)
6358 || TYPE_ALIGN (type) == align)
6359 return type;
6360
6361 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6362 if (check_aligned_type (t, type, align))
6363 return t;
6364
6365 t = build_variant_type_copy (type);
6366 TYPE_ALIGN (t) = align;
6367
6368 return t;
6369 }
6370
6371 /* Create a new distinct copy of TYPE. The new type is made its own
6372 MAIN_VARIANT. If TYPE requires structural equality checks, the
6373 resulting type requires structural equality checks; otherwise, its
6374 TYPE_CANONICAL points to itself. */
6375
6376 tree
6377 build_distinct_type_copy (tree type)
6378 {
6379 tree t = copy_node (type);
6380
6381 TYPE_POINTER_TO (t) = 0;
6382 TYPE_REFERENCE_TO (t) = 0;
6383
6384 /* Set the canonical type either to a new equivalence class, or
6385 propagate the need for structural equality checks. */
6386 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6387 SET_TYPE_STRUCTURAL_EQUALITY (t);
6388 else
6389 TYPE_CANONICAL (t) = t;
6390
6391 /* Make it its own variant. */
6392 TYPE_MAIN_VARIANT (t) = t;
6393 TYPE_NEXT_VARIANT (t) = 0;
6394
6395 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6396 whose TREE_TYPE is not t. This can also happen in the Ada
6397 frontend when using subtypes. */
6398
6399 return t;
6400 }
6401
6402 /* Create a new variant of TYPE, equivalent but distinct. This is so
6403 the caller can modify it. TYPE_CANONICAL for the return type will
6404 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6405 are considered equal by the language itself (or that both types
6406 require structural equality checks). */
6407
6408 tree
6409 build_variant_type_copy (tree type)
6410 {
6411 tree t, m = TYPE_MAIN_VARIANT (type);
6412
6413 t = build_distinct_type_copy (type);
6414
6415 /* Since we're building a variant, assume that it is a non-semantic
6416 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6417 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6418
6419 /* Add the new type to the chain of variants of TYPE. */
6420 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6421 TYPE_NEXT_VARIANT (m) = t;
6422 TYPE_MAIN_VARIANT (t) = m;
6423
6424 return t;
6425 }
6426 \f
6427 /* Return true if the from tree in both tree maps are equal. */
6428
6429 int
6430 tree_map_base_eq (const void *va, const void *vb)
6431 {
6432 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6433 *const b = (const struct tree_map_base *) vb;
6434 return (a->from == b->from);
6435 }
6436
6437 /* Hash a from tree in a tree_base_map. */
6438
6439 unsigned int
6440 tree_map_base_hash (const void *item)
6441 {
6442 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6443 }
6444
6445 /* Return true if this tree map structure is marked for garbage collection
6446 purposes. We simply return true if the from tree is marked, so that this
6447 structure goes away when the from tree goes away. */
6448
6449 int
6450 tree_map_base_marked_p (const void *p)
6451 {
6452 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6453 }
6454
6455 /* Hash a from tree in a tree_map. */
6456
6457 unsigned int
6458 tree_map_hash (const void *item)
6459 {
6460 return (((const struct tree_map *) item)->hash);
6461 }
6462
6463 /* Hash a from tree in a tree_decl_map. */
6464
6465 unsigned int
6466 tree_decl_map_hash (const void *item)
6467 {
6468 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6469 }
6470
6471 /* Return the initialization priority for DECL. */
6472
6473 priority_type
6474 decl_init_priority_lookup (tree decl)
6475 {
6476 symtab_node *snode = symtab_node::get (decl);
6477
6478 if (!snode)
6479 return DEFAULT_INIT_PRIORITY;
6480 return
6481 snode->get_init_priority ();
6482 }
6483
6484 /* Return the finalization priority for DECL. */
6485
6486 priority_type
6487 decl_fini_priority_lookup (tree decl)
6488 {
6489 cgraph_node *node = cgraph_node::get (decl);
6490
6491 if (!node)
6492 return DEFAULT_INIT_PRIORITY;
6493 return
6494 node->get_fini_priority ();
6495 }
6496
6497 /* Set the initialization priority for DECL to PRIORITY. */
6498
6499 void
6500 decl_init_priority_insert (tree decl, priority_type priority)
6501 {
6502 struct symtab_node *snode;
6503
6504 if (priority == DEFAULT_INIT_PRIORITY)
6505 {
6506 snode = symtab_node::get (decl);
6507 if (!snode)
6508 return;
6509 }
6510 else if (TREE_CODE (decl) == VAR_DECL)
6511 snode = varpool_node::get_create (decl);
6512 else
6513 snode = cgraph_node::get_create (decl);
6514 snode->set_init_priority (priority);
6515 }
6516
6517 /* Set the finalization priority for DECL to PRIORITY. */
6518
6519 void
6520 decl_fini_priority_insert (tree decl, priority_type priority)
6521 {
6522 struct cgraph_node *node;
6523
6524 if (priority == DEFAULT_INIT_PRIORITY)
6525 {
6526 node = cgraph_node::get (decl);
6527 if (!node)
6528 return;
6529 }
6530 else
6531 node = cgraph_node::get_create (decl);
6532 node->set_fini_priority (priority);
6533 }
6534
6535 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6536
6537 static void
6538 print_debug_expr_statistics (void)
6539 {
6540 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6541 (long) htab_size (debug_expr_for_decl),
6542 (long) htab_elements (debug_expr_for_decl),
6543 htab_collisions (debug_expr_for_decl));
6544 }
6545
6546 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6547
6548 static void
6549 print_value_expr_statistics (void)
6550 {
6551 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6552 (long) htab_size (value_expr_for_decl),
6553 (long) htab_elements (value_expr_for_decl),
6554 htab_collisions (value_expr_for_decl));
6555 }
6556
6557 /* Lookup a debug expression for FROM, and return it if we find one. */
6558
6559 tree
6560 decl_debug_expr_lookup (tree from)
6561 {
6562 struct tree_decl_map *h, in;
6563 in.base.from = from;
6564
6565 h = (struct tree_decl_map *)
6566 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6567 if (h)
6568 return h->to;
6569 return NULL_TREE;
6570 }
6571
6572 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6573
6574 void
6575 decl_debug_expr_insert (tree from, tree to)
6576 {
6577 struct tree_decl_map *h;
6578 void **loc;
6579
6580 h = ggc_alloc<tree_decl_map> ();
6581 h->base.from = from;
6582 h->to = to;
6583 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6584 INSERT);
6585 *(struct tree_decl_map **) loc = h;
6586 }
6587
6588 /* Lookup a value expression for FROM, and return it if we find one. */
6589
6590 tree
6591 decl_value_expr_lookup (tree from)
6592 {
6593 struct tree_decl_map *h, in;
6594 in.base.from = from;
6595
6596 h = (struct tree_decl_map *)
6597 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6598 if (h)
6599 return h->to;
6600 return NULL_TREE;
6601 }
6602
6603 /* Insert a mapping FROM->TO in the value expression hashtable. */
6604
6605 void
6606 decl_value_expr_insert (tree from, tree to)
6607 {
6608 struct tree_decl_map *h;
6609 void **loc;
6610
6611 h = ggc_alloc<tree_decl_map> ();
6612 h->base.from = from;
6613 h->to = to;
6614 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6615 INSERT);
6616 *(struct tree_decl_map **) loc = h;
6617 }
6618
6619 /* Lookup a vector of debug arguments for FROM, and return it if we
6620 find one. */
6621
6622 vec<tree, va_gc> **
6623 decl_debug_args_lookup (tree from)
6624 {
6625 struct tree_vec_map *h, in;
6626
6627 if (!DECL_HAS_DEBUG_ARGS_P (from))
6628 return NULL;
6629 gcc_checking_assert (debug_args_for_decl != NULL);
6630 in.base.from = from;
6631 h = (struct tree_vec_map *)
6632 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6633 if (h)
6634 return &h->to;
6635 return NULL;
6636 }
6637
6638 /* Insert a mapping FROM->empty vector of debug arguments in the value
6639 expression hashtable. */
6640
6641 vec<tree, va_gc> **
6642 decl_debug_args_insert (tree from)
6643 {
6644 struct tree_vec_map *h;
6645 void **loc;
6646
6647 if (DECL_HAS_DEBUG_ARGS_P (from))
6648 return decl_debug_args_lookup (from);
6649 if (debug_args_for_decl == NULL)
6650 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6651 tree_vec_map_eq, 0);
6652 h = ggc_alloc<tree_vec_map> ();
6653 h->base.from = from;
6654 h->to = NULL;
6655 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6656 INSERT);
6657 *(struct tree_vec_map **) loc = h;
6658 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6659 return &h->to;
6660 }
6661
6662 /* Hashing of types so that we don't make duplicates.
6663 The entry point is `type_hash_canon'. */
6664
6665 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6666 with types in the TREE_VALUE slots), by adding the hash codes
6667 of the individual types. */
6668
6669 static void
6670 type_hash_list (const_tree list, inchash::hash &hstate)
6671 {
6672 const_tree tail;
6673
6674 for (tail = list; tail; tail = TREE_CHAIN (tail))
6675 if (TREE_VALUE (tail) != error_mark_node)
6676 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6677 }
6678
6679 /* These are the Hashtable callback functions. */
6680
6681 /* Returns true iff the types are equivalent. */
6682
6683 static int
6684 type_hash_eq (const void *va, const void *vb)
6685 {
6686 const struct type_hash *const a = (const struct type_hash *) va,
6687 *const b = (const struct type_hash *) vb;
6688
6689 /* First test the things that are the same for all types. */
6690 if (a->hash != b->hash
6691 || TREE_CODE (a->type) != TREE_CODE (b->type)
6692 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6693 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6694 TYPE_ATTRIBUTES (b->type))
6695 || (TREE_CODE (a->type) != COMPLEX_TYPE
6696 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6697 return 0;
6698
6699 /* Be careful about comparing arrays before and after the element type
6700 has been completed; don't compare TYPE_ALIGN unless both types are
6701 complete. */
6702 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6703 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6704 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6705 return 0;
6706
6707 switch (TREE_CODE (a->type))
6708 {
6709 case VOID_TYPE:
6710 case COMPLEX_TYPE:
6711 case POINTER_TYPE:
6712 case REFERENCE_TYPE:
6713 case NULLPTR_TYPE:
6714 return 1;
6715
6716 case VECTOR_TYPE:
6717 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6718
6719 case ENUMERAL_TYPE:
6720 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6721 && !(TYPE_VALUES (a->type)
6722 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6723 && TYPE_VALUES (b->type)
6724 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6725 && type_list_equal (TYPE_VALUES (a->type),
6726 TYPE_VALUES (b->type))))
6727 return 0;
6728
6729 /* ... fall through ... */
6730
6731 case INTEGER_TYPE:
6732 case REAL_TYPE:
6733 case BOOLEAN_TYPE:
6734 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6735 return false;
6736 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6737 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6738 TYPE_MAX_VALUE (b->type)))
6739 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6740 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6741 TYPE_MIN_VALUE (b->type))));
6742
6743 case FIXED_POINT_TYPE:
6744 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6745
6746 case OFFSET_TYPE:
6747 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6748
6749 case METHOD_TYPE:
6750 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6751 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6752 || (TYPE_ARG_TYPES (a->type)
6753 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6754 && TYPE_ARG_TYPES (b->type)
6755 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6756 && type_list_equal (TYPE_ARG_TYPES (a->type),
6757 TYPE_ARG_TYPES (b->type)))))
6758 break;
6759 return 0;
6760 case ARRAY_TYPE:
6761 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6762
6763 case RECORD_TYPE:
6764 case UNION_TYPE:
6765 case QUAL_UNION_TYPE:
6766 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6767 || (TYPE_FIELDS (a->type)
6768 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6769 && TYPE_FIELDS (b->type)
6770 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6771 && type_list_equal (TYPE_FIELDS (a->type),
6772 TYPE_FIELDS (b->type))));
6773
6774 case FUNCTION_TYPE:
6775 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6776 || (TYPE_ARG_TYPES (a->type)
6777 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6778 && TYPE_ARG_TYPES (b->type)
6779 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6780 && type_list_equal (TYPE_ARG_TYPES (a->type),
6781 TYPE_ARG_TYPES (b->type))))
6782 break;
6783 return 0;
6784
6785 default:
6786 return 0;
6787 }
6788
6789 if (lang_hooks.types.type_hash_eq != NULL)
6790 return lang_hooks.types.type_hash_eq (a->type, b->type);
6791
6792 return 1;
6793 }
6794
6795 /* Return the cached hash value. */
6796
6797 static hashval_t
6798 type_hash_hash (const void *item)
6799 {
6800 return ((const struct type_hash *) item)->hash;
6801 }
6802
6803 /* Given TYPE, and HASHCODE its hash code, return the canonical
6804 object for an identical type if one already exists.
6805 Otherwise, return TYPE, and record it as the canonical object.
6806
6807 To use this function, first create a type of the sort you want.
6808 Then compute its hash code from the fields of the type that
6809 make it different from other similar types.
6810 Then call this function and use the value. */
6811
6812 tree
6813 type_hash_canon (unsigned int hashcode, tree type)
6814 {
6815 type_hash in;
6816 void **loc;
6817
6818 /* The hash table only contains main variants, so ensure that's what we're
6819 being passed. */
6820 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6821
6822 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6823 must call that routine before comparing TYPE_ALIGNs. */
6824 layout_type (type);
6825
6826 in.hash = hashcode;
6827 in.type = type;
6828
6829 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6830 if (*loc)
6831 {
6832 tree t1 = ((type_hash *) *loc)->type;
6833 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6834 if (GATHER_STATISTICS)
6835 {
6836 tree_code_counts[(int) TREE_CODE (type)]--;
6837 tree_node_counts[(int) t_kind]--;
6838 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6839 }
6840 return t1;
6841 }
6842 else
6843 {
6844 struct type_hash *h;
6845
6846 h = ggc_alloc<type_hash> ();
6847 h->hash = hashcode;
6848 h->type = type;
6849 *loc = (void *)h;
6850
6851 return type;
6852 }
6853 }
6854
6855 /* See if the data pointed to by the type hash table is marked. We consider
6856 it marked if the type is marked or if a debug type number or symbol
6857 table entry has been made for the type. */
6858
6859 static int
6860 type_hash_marked_p (const void *p)
6861 {
6862 const_tree const type = ((const struct type_hash *) p)->type;
6863
6864 return ggc_marked_p (type);
6865 }
6866
6867 static void
6868 print_type_hash_statistics (void)
6869 {
6870 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6871 (long) htab_size (type_hash_table),
6872 (long) htab_elements (type_hash_table),
6873 htab_collisions (type_hash_table));
6874 }
6875
6876 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6877 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6878 by adding the hash codes of the individual attributes. */
6879
6880 static void
6881 attribute_hash_list (const_tree list, inchash::hash &hstate)
6882 {
6883 const_tree tail;
6884
6885 for (tail = list; tail; tail = TREE_CHAIN (tail))
6886 /* ??? Do we want to add in TREE_VALUE too? */
6887 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6888 }
6889
6890 /* Given two lists of attributes, return true if list l2 is
6891 equivalent to l1. */
6892
6893 int
6894 attribute_list_equal (const_tree l1, const_tree l2)
6895 {
6896 if (l1 == l2)
6897 return 1;
6898
6899 return attribute_list_contained (l1, l2)
6900 && attribute_list_contained (l2, l1);
6901 }
6902
6903 /* Given two lists of attributes, return true if list L2 is
6904 completely contained within L1. */
6905 /* ??? This would be faster if attribute names were stored in a canonicalized
6906 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6907 must be used to show these elements are equivalent (which they are). */
6908 /* ??? It's not clear that attributes with arguments will always be handled
6909 correctly. */
6910
6911 int
6912 attribute_list_contained (const_tree l1, const_tree l2)
6913 {
6914 const_tree t1, t2;
6915
6916 /* First check the obvious, maybe the lists are identical. */
6917 if (l1 == l2)
6918 return 1;
6919
6920 /* Maybe the lists are similar. */
6921 for (t1 = l1, t2 = l2;
6922 t1 != 0 && t2 != 0
6923 && get_attribute_name (t1) == get_attribute_name (t2)
6924 && TREE_VALUE (t1) == TREE_VALUE (t2);
6925 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6926 ;
6927
6928 /* Maybe the lists are equal. */
6929 if (t1 == 0 && t2 == 0)
6930 return 1;
6931
6932 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6933 {
6934 const_tree attr;
6935 /* This CONST_CAST is okay because lookup_attribute does not
6936 modify its argument and the return value is assigned to a
6937 const_tree. */
6938 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6939 CONST_CAST_TREE (l1));
6940 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6941 attr = lookup_ident_attribute (get_attribute_name (t2),
6942 TREE_CHAIN (attr)))
6943 ;
6944
6945 if (attr == NULL_TREE)
6946 return 0;
6947 }
6948
6949 return 1;
6950 }
6951
6952 /* Given two lists of types
6953 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6954 return 1 if the lists contain the same types in the same order.
6955 Also, the TREE_PURPOSEs must match. */
6956
6957 int
6958 type_list_equal (const_tree l1, const_tree l2)
6959 {
6960 const_tree t1, t2;
6961
6962 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6963 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6964 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6965 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6966 && (TREE_TYPE (TREE_PURPOSE (t1))
6967 == TREE_TYPE (TREE_PURPOSE (t2))))))
6968 return 0;
6969
6970 return t1 == t2;
6971 }
6972
6973 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6974 given by TYPE. If the argument list accepts variable arguments,
6975 then this function counts only the ordinary arguments. */
6976
6977 int
6978 type_num_arguments (const_tree type)
6979 {
6980 int i = 0;
6981 tree t;
6982
6983 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6984 /* If the function does not take a variable number of arguments,
6985 the last element in the list will have type `void'. */
6986 if (VOID_TYPE_P (TREE_VALUE (t)))
6987 break;
6988 else
6989 ++i;
6990
6991 return i;
6992 }
6993
6994 /* Nonzero if integer constants T1 and T2
6995 represent the same constant value. */
6996
6997 int
6998 tree_int_cst_equal (const_tree t1, const_tree t2)
6999 {
7000 if (t1 == t2)
7001 return 1;
7002
7003 if (t1 == 0 || t2 == 0)
7004 return 0;
7005
7006 if (TREE_CODE (t1) == INTEGER_CST
7007 && TREE_CODE (t2) == INTEGER_CST
7008 && wi::to_widest (t1) == wi::to_widest (t2))
7009 return 1;
7010
7011 return 0;
7012 }
7013
7014 /* Return true if T is an INTEGER_CST whose numerical value (extended
7015 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7016
7017 bool
7018 tree_fits_shwi_p (const_tree t)
7019 {
7020 return (t != NULL_TREE
7021 && TREE_CODE (t) == INTEGER_CST
7022 && wi::fits_shwi_p (wi::to_widest (t)));
7023 }
7024
7025 /* Return true if T is an INTEGER_CST whose numerical value (extended
7026 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7027
7028 bool
7029 tree_fits_uhwi_p (const_tree t)
7030 {
7031 return (t != NULL_TREE
7032 && TREE_CODE (t) == INTEGER_CST
7033 && wi::fits_uhwi_p (wi::to_widest (t)));
7034 }
7035
7036 /* T is an INTEGER_CST whose numerical value (extended according to
7037 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7038 HOST_WIDE_INT. */
7039
7040 HOST_WIDE_INT
7041 tree_to_shwi (const_tree t)
7042 {
7043 gcc_assert (tree_fits_shwi_p (t));
7044 return TREE_INT_CST_LOW (t);
7045 }
7046
7047 /* T is an INTEGER_CST whose numerical value (extended according to
7048 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7049 HOST_WIDE_INT. */
7050
7051 unsigned HOST_WIDE_INT
7052 tree_to_uhwi (const_tree t)
7053 {
7054 gcc_assert (tree_fits_uhwi_p (t));
7055 return TREE_INT_CST_LOW (t);
7056 }
7057
7058 /* Return the most significant (sign) bit of T. */
7059
7060 int
7061 tree_int_cst_sign_bit (const_tree t)
7062 {
7063 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7064
7065 return wi::extract_uhwi (t, bitno, 1);
7066 }
7067
7068 /* Return an indication of the sign of the integer constant T.
7069 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7070 Note that -1 will never be returned if T's type is unsigned. */
7071
7072 int
7073 tree_int_cst_sgn (const_tree t)
7074 {
7075 if (wi::eq_p (t, 0))
7076 return 0;
7077 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7078 return 1;
7079 else if (wi::neg_p (t))
7080 return -1;
7081 else
7082 return 1;
7083 }
7084
7085 /* Return the minimum number of bits needed to represent VALUE in a
7086 signed or unsigned type, UNSIGNEDP says which. */
7087
7088 unsigned int
7089 tree_int_cst_min_precision (tree value, signop sgn)
7090 {
7091 /* If the value is negative, compute its negative minus 1. The latter
7092 adjustment is because the absolute value of the largest negative value
7093 is one larger than the largest positive value. This is equivalent to
7094 a bit-wise negation, so use that operation instead. */
7095
7096 if (tree_int_cst_sgn (value) < 0)
7097 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7098
7099 /* Return the number of bits needed, taking into account the fact
7100 that we need one more bit for a signed than unsigned type.
7101 If value is 0 or -1, the minimum precision is 1 no matter
7102 whether unsignedp is true or false. */
7103
7104 if (integer_zerop (value))
7105 return 1;
7106 else
7107 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7108 }
7109
7110 /* Return truthvalue of whether T1 is the same tree structure as T2.
7111 Return 1 if they are the same.
7112 Return 0 if they are understandably different.
7113 Return -1 if either contains tree structure not understood by
7114 this function. */
7115
7116 int
7117 simple_cst_equal (const_tree t1, const_tree t2)
7118 {
7119 enum tree_code code1, code2;
7120 int cmp;
7121 int i;
7122
7123 if (t1 == t2)
7124 return 1;
7125 if (t1 == 0 || t2 == 0)
7126 return 0;
7127
7128 code1 = TREE_CODE (t1);
7129 code2 = TREE_CODE (t2);
7130
7131 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7132 {
7133 if (CONVERT_EXPR_CODE_P (code2)
7134 || code2 == NON_LVALUE_EXPR)
7135 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7136 else
7137 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7138 }
7139
7140 else if (CONVERT_EXPR_CODE_P (code2)
7141 || code2 == NON_LVALUE_EXPR)
7142 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7143
7144 if (code1 != code2)
7145 return 0;
7146
7147 switch (code1)
7148 {
7149 case INTEGER_CST:
7150 return wi::to_widest (t1) == wi::to_widest (t2);
7151
7152 case REAL_CST:
7153 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7154
7155 case FIXED_CST:
7156 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7157
7158 case STRING_CST:
7159 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7160 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7161 TREE_STRING_LENGTH (t1)));
7162
7163 case CONSTRUCTOR:
7164 {
7165 unsigned HOST_WIDE_INT idx;
7166 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7167 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7168
7169 if (vec_safe_length (v1) != vec_safe_length (v2))
7170 return false;
7171
7172 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7173 /* ??? Should we handle also fields here? */
7174 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7175 return false;
7176 return true;
7177 }
7178
7179 case SAVE_EXPR:
7180 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7181
7182 case CALL_EXPR:
7183 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7184 if (cmp <= 0)
7185 return cmp;
7186 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7187 return 0;
7188 {
7189 const_tree arg1, arg2;
7190 const_call_expr_arg_iterator iter1, iter2;
7191 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7192 arg2 = first_const_call_expr_arg (t2, &iter2);
7193 arg1 && arg2;
7194 arg1 = next_const_call_expr_arg (&iter1),
7195 arg2 = next_const_call_expr_arg (&iter2))
7196 {
7197 cmp = simple_cst_equal (arg1, arg2);
7198 if (cmp <= 0)
7199 return cmp;
7200 }
7201 return arg1 == arg2;
7202 }
7203
7204 case TARGET_EXPR:
7205 /* Special case: if either target is an unallocated VAR_DECL,
7206 it means that it's going to be unified with whatever the
7207 TARGET_EXPR is really supposed to initialize, so treat it
7208 as being equivalent to anything. */
7209 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7210 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7211 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7212 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7213 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7214 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7215 cmp = 1;
7216 else
7217 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7218
7219 if (cmp <= 0)
7220 return cmp;
7221
7222 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7223
7224 case WITH_CLEANUP_EXPR:
7225 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7226 if (cmp <= 0)
7227 return cmp;
7228
7229 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7230
7231 case COMPONENT_REF:
7232 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7233 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7234
7235 return 0;
7236
7237 case VAR_DECL:
7238 case PARM_DECL:
7239 case CONST_DECL:
7240 case FUNCTION_DECL:
7241 return 0;
7242
7243 default:
7244 break;
7245 }
7246
7247 /* This general rule works for most tree codes. All exceptions should be
7248 handled above. If this is a language-specific tree code, we can't
7249 trust what might be in the operand, so say we don't know
7250 the situation. */
7251 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7252 return -1;
7253
7254 switch (TREE_CODE_CLASS (code1))
7255 {
7256 case tcc_unary:
7257 case tcc_binary:
7258 case tcc_comparison:
7259 case tcc_expression:
7260 case tcc_reference:
7261 case tcc_statement:
7262 cmp = 1;
7263 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7264 {
7265 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7266 if (cmp <= 0)
7267 return cmp;
7268 }
7269
7270 return cmp;
7271
7272 default:
7273 return -1;
7274 }
7275 }
7276
7277 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7278 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7279 than U, respectively. */
7280
7281 int
7282 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7283 {
7284 if (tree_int_cst_sgn (t) < 0)
7285 return -1;
7286 else if (!tree_fits_uhwi_p (t))
7287 return 1;
7288 else if (TREE_INT_CST_LOW (t) == u)
7289 return 0;
7290 else if (TREE_INT_CST_LOW (t) < u)
7291 return -1;
7292 else
7293 return 1;
7294 }
7295
7296 /* Return true if SIZE represents a constant size that is in bounds of
7297 what the middle-end and the backend accepts (covering not more than
7298 half of the address-space). */
7299
7300 bool
7301 valid_constant_size_p (const_tree size)
7302 {
7303 if (! tree_fits_uhwi_p (size)
7304 || TREE_OVERFLOW (size)
7305 || tree_int_cst_sign_bit (size) != 0)
7306 return false;
7307 return true;
7308 }
7309
7310 /* Return the precision of the type, or for a complex or vector type the
7311 precision of the type of its elements. */
7312
7313 unsigned int
7314 element_precision (const_tree type)
7315 {
7316 enum tree_code code = TREE_CODE (type);
7317 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7318 type = TREE_TYPE (type);
7319
7320 return TYPE_PRECISION (type);
7321 }
7322
7323 /* Return true if CODE represents an associative tree code. Otherwise
7324 return false. */
7325 bool
7326 associative_tree_code (enum tree_code code)
7327 {
7328 switch (code)
7329 {
7330 case BIT_IOR_EXPR:
7331 case BIT_AND_EXPR:
7332 case BIT_XOR_EXPR:
7333 case PLUS_EXPR:
7334 case MULT_EXPR:
7335 case MIN_EXPR:
7336 case MAX_EXPR:
7337 return true;
7338
7339 default:
7340 break;
7341 }
7342 return false;
7343 }
7344
7345 /* Return true if CODE represents a commutative tree code. Otherwise
7346 return false. */
7347 bool
7348 commutative_tree_code (enum tree_code code)
7349 {
7350 switch (code)
7351 {
7352 case PLUS_EXPR:
7353 case MULT_EXPR:
7354 case MULT_HIGHPART_EXPR:
7355 case MIN_EXPR:
7356 case MAX_EXPR:
7357 case BIT_IOR_EXPR:
7358 case BIT_XOR_EXPR:
7359 case BIT_AND_EXPR:
7360 case NE_EXPR:
7361 case EQ_EXPR:
7362 case UNORDERED_EXPR:
7363 case ORDERED_EXPR:
7364 case UNEQ_EXPR:
7365 case LTGT_EXPR:
7366 case TRUTH_AND_EXPR:
7367 case TRUTH_XOR_EXPR:
7368 case TRUTH_OR_EXPR:
7369 case WIDEN_MULT_EXPR:
7370 case VEC_WIDEN_MULT_HI_EXPR:
7371 case VEC_WIDEN_MULT_LO_EXPR:
7372 case VEC_WIDEN_MULT_EVEN_EXPR:
7373 case VEC_WIDEN_MULT_ODD_EXPR:
7374 return true;
7375
7376 default:
7377 break;
7378 }
7379 return false;
7380 }
7381
7382 /* Return true if CODE represents a ternary tree code for which the
7383 first two operands are commutative. Otherwise return false. */
7384 bool
7385 commutative_ternary_tree_code (enum tree_code code)
7386 {
7387 switch (code)
7388 {
7389 case WIDEN_MULT_PLUS_EXPR:
7390 case WIDEN_MULT_MINUS_EXPR:
7391 case DOT_PROD_EXPR:
7392 case FMA_EXPR:
7393 return true;
7394
7395 default:
7396 break;
7397 }
7398 return false;
7399 }
7400
7401 namespace inchash
7402 {
7403
7404 /* Generate a hash value for an expression. This can be used iteratively
7405 by passing a previous result as the HSTATE argument.
7406
7407 This function is intended to produce the same hash for expressions which
7408 would compare equal using operand_equal_p. */
7409 void
7410 add_expr (const_tree t, inchash::hash &hstate)
7411 {
7412 int i;
7413 enum tree_code code;
7414 enum tree_code_class tclass;
7415
7416 if (t == NULL_TREE)
7417 {
7418 hstate.merge_hash (0);
7419 return;
7420 }
7421
7422 code = TREE_CODE (t);
7423
7424 switch (code)
7425 {
7426 /* Alas, constants aren't shared, so we can't rely on pointer
7427 identity. */
7428 case VOID_CST:
7429 hstate.merge_hash (0);
7430 return;
7431 case INTEGER_CST:
7432 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7433 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7434 return;
7435 case REAL_CST:
7436 {
7437 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7438 hstate.merge_hash (val2);
7439 return;
7440 }
7441 case FIXED_CST:
7442 {
7443 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7444 hstate.merge_hash (val2);
7445 return;
7446 }
7447 case STRING_CST:
7448 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7449 return;
7450 case COMPLEX_CST:
7451 inchash::add_expr (TREE_REALPART (t), hstate);
7452 inchash::add_expr (TREE_IMAGPART (t), hstate);
7453 return;
7454 case VECTOR_CST:
7455 {
7456 unsigned i;
7457 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7458 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7459 return;
7460 }
7461 case SSA_NAME:
7462 /* We can just compare by pointer. */
7463 hstate.add_wide_int (SSA_NAME_VERSION (t));
7464 return;
7465 case PLACEHOLDER_EXPR:
7466 /* The node itself doesn't matter. */
7467 return;
7468 case TREE_LIST:
7469 /* A list of expressions, for a CALL_EXPR or as the elements of a
7470 VECTOR_CST. */
7471 for (; t; t = TREE_CHAIN (t))
7472 inchash::add_expr (TREE_VALUE (t), hstate);
7473 return;
7474 case CONSTRUCTOR:
7475 {
7476 unsigned HOST_WIDE_INT idx;
7477 tree field, value;
7478 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7479 {
7480 inchash::add_expr (field, hstate);
7481 inchash::add_expr (value, hstate);
7482 }
7483 return;
7484 }
7485 case FUNCTION_DECL:
7486 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7487 Otherwise nodes that compare equal according to operand_equal_p might
7488 get different hash codes. However, don't do this for machine specific
7489 or front end builtins, since the function code is overloaded in those
7490 cases. */
7491 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7492 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7493 {
7494 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7495 code = TREE_CODE (t);
7496 }
7497 /* FALL THROUGH */
7498 default:
7499 tclass = TREE_CODE_CLASS (code);
7500
7501 if (tclass == tcc_declaration)
7502 {
7503 /* DECL's have a unique ID */
7504 hstate.add_wide_int (DECL_UID (t));
7505 }
7506 else
7507 {
7508 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7509
7510 hstate.add_object (code);
7511
7512 /* Don't hash the type, that can lead to having nodes which
7513 compare equal according to operand_equal_p, but which
7514 have different hash codes. */
7515 if (CONVERT_EXPR_CODE_P (code)
7516 || code == NON_LVALUE_EXPR)
7517 {
7518 /* Make sure to include signness in the hash computation. */
7519 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7520 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7521 }
7522
7523 else if (commutative_tree_code (code))
7524 {
7525 /* It's a commutative expression. We want to hash it the same
7526 however it appears. We do this by first hashing both operands
7527 and then rehashing based on the order of their independent
7528 hashes. */
7529 inchash::hash one, two;
7530 inchash::add_expr (TREE_OPERAND (t, 0), one);
7531 inchash::add_expr (TREE_OPERAND (t, 1), two);
7532 hstate.add_commutative (one, two);
7533 }
7534 else
7535 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7536 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7537 }
7538 return;
7539 }
7540 }
7541
7542 }
7543
7544 /* Constructors for pointer, array and function types.
7545 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7546 constructed by language-dependent code, not here.) */
7547
7548 /* Construct, lay out and return the type of pointers to TO_TYPE with
7549 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7550 reference all of memory. If such a type has already been
7551 constructed, reuse it. */
7552
7553 tree
7554 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7555 bool can_alias_all)
7556 {
7557 tree t;
7558
7559 if (to_type == error_mark_node)
7560 return error_mark_node;
7561
7562 /* If the pointed-to type has the may_alias attribute set, force
7563 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7564 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7565 can_alias_all = true;
7566
7567 /* In some cases, languages will have things that aren't a POINTER_TYPE
7568 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7569 In that case, return that type without regard to the rest of our
7570 operands.
7571
7572 ??? This is a kludge, but consistent with the way this function has
7573 always operated and there doesn't seem to be a good way to avoid this
7574 at the moment. */
7575 if (TYPE_POINTER_TO (to_type) != 0
7576 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7577 return TYPE_POINTER_TO (to_type);
7578
7579 /* First, if we already have a type for pointers to TO_TYPE and it's
7580 the proper mode, use it. */
7581 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7582 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7583 return t;
7584
7585 t = make_node (POINTER_TYPE);
7586
7587 TREE_TYPE (t) = to_type;
7588 SET_TYPE_MODE (t, mode);
7589 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7590 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7591 TYPE_POINTER_TO (to_type) = t;
7592
7593 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7594 SET_TYPE_STRUCTURAL_EQUALITY (t);
7595 else if (TYPE_CANONICAL (to_type) != to_type)
7596 TYPE_CANONICAL (t)
7597 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7598 mode, can_alias_all);
7599
7600 /* Lay out the type. This function has many callers that are concerned
7601 with expression-construction, and this simplifies them all. */
7602 layout_type (t);
7603
7604 return t;
7605 }
7606
7607 /* By default build pointers in ptr_mode. */
7608
7609 tree
7610 build_pointer_type (tree to_type)
7611 {
7612 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7613 : TYPE_ADDR_SPACE (to_type);
7614 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7615 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7616 }
7617
7618 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7619
7620 tree
7621 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7622 bool can_alias_all)
7623 {
7624 tree t;
7625
7626 if (to_type == error_mark_node)
7627 return error_mark_node;
7628
7629 /* If the pointed-to type has the may_alias attribute set, force
7630 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7631 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7632 can_alias_all = true;
7633
7634 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7635 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7636 In that case, return that type without regard to the rest of our
7637 operands.
7638
7639 ??? This is a kludge, but consistent with the way this function has
7640 always operated and there doesn't seem to be a good way to avoid this
7641 at the moment. */
7642 if (TYPE_REFERENCE_TO (to_type) != 0
7643 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7644 return TYPE_REFERENCE_TO (to_type);
7645
7646 /* First, if we already have a type for pointers to TO_TYPE and it's
7647 the proper mode, use it. */
7648 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7649 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7650 return t;
7651
7652 t = make_node (REFERENCE_TYPE);
7653
7654 TREE_TYPE (t) = to_type;
7655 SET_TYPE_MODE (t, mode);
7656 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7657 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7658 TYPE_REFERENCE_TO (to_type) = t;
7659
7660 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7661 SET_TYPE_STRUCTURAL_EQUALITY (t);
7662 else if (TYPE_CANONICAL (to_type) != to_type)
7663 TYPE_CANONICAL (t)
7664 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7665 mode, can_alias_all);
7666
7667 layout_type (t);
7668
7669 return t;
7670 }
7671
7672
7673 /* Build the node for the type of references-to-TO_TYPE by default
7674 in ptr_mode. */
7675
7676 tree
7677 build_reference_type (tree to_type)
7678 {
7679 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7680 : TYPE_ADDR_SPACE (to_type);
7681 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7682 return build_reference_type_for_mode (to_type, pointer_mode, false);
7683 }
7684
7685 #define MAX_INT_CACHED_PREC \
7686 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7687 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7688
7689 /* Builds a signed or unsigned integer type of precision PRECISION.
7690 Used for C bitfields whose precision does not match that of
7691 built-in target types. */
7692 tree
7693 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7694 int unsignedp)
7695 {
7696 tree itype, ret;
7697
7698 if (unsignedp)
7699 unsignedp = MAX_INT_CACHED_PREC + 1;
7700
7701 if (precision <= MAX_INT_CACHED_PREC)
7702 {
7703 itype = nonstandard_integer_type_cache[precision + unsignedp];
7704 if (itype)
7705 return itype;
7706 }
7707
7708 itype = make_node (INTEGER_TYPE);
7709 TYPE_PRECISION (itype) = precision;
7710
7711 if (unsignedp)
7712 fixup_unsigned_type (itype);
7713 else
7714 fixup_signed_type (itype);
7715
7716 ret = itype;
7717 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7718 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7719 if (precision <= MAX_INT_CACHED_PREC)
7720 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7721
7722 return ret;
7723 }
7724
7725 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7726 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7727 is true, reuse such a type that has already been constructed. */
7728
7729 static tree
7730 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7731 {
7732 tree itype = make_node (INTEGER_TYPE);
7733 inchash::hash hstate;
7734
7735 TREE_TYPE (itype) = type;
7736
7737 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7738 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7739
7740 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7741 SET_TYPE_MODE (itype, TYPE_MODE (type));
7742 TYPE_SIZE (itype) = TYPE_SIZE (type);
7743 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7744 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7745 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7746
7747 if (!shared)
7748 return itype;
7749
7750 if ((TYPE_MIN_VALUE (itype)
7751 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7752 || (TYPE_MAX_VALUE (itype)
7753 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7754 {
7755 /* Since we cannot reliably merge this type, we need to compare it using
7756 structural equality checks. */
7757 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7758 return itype;
7759 }
7760
7761 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7762 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7763 hstate.merge_hash (TYPE_HASH (type));
7764 itype = type_hash_canon (hstate.end (), itype);
7765
7766 return itype;
7767 }
7768
7769 /* Wrapper around build_range_type_1 with SHARED set to true. */
7770
7771 tree
7772 build_range_type (tree type, tree lowval, tree highval)
7773 {
7774 return build_range_type_1 (type, lowval, highval, true);
7775 }
7776
7777 /* Wrapper around build_range_type_1 with SHARED set to false. */
7778
7779 tree
7780 build_nonshared_range_type (tree type, tree lowval, tree highval)
7781 {
7782 return build_range_type_1 (type, lowval, highval, false);
7783 }
7784
7785 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7786 MAXVAL should be the maximum value in the domain
7787 (one less than the length of the array).
7788
7789 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7790 We don't enforce this limit, that is up to caller (e.g. language front end).
7791 The limit exists because the result is a signed type and we don't handle
7792 sizes that use more than one HOST_WIDE_INT. */
7793
7794 tree
7795 build_index_type (tree maxval)
7796 {
7797 return build_range_type (sizetype, size_zero_node, maxval);
7798 }
7799
7800 /* Return true if the debug information for TYPE, a subtype, should be emitted
7801 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7802 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7803 debug info and doesn't reflect the source code. */
7804
7805 bool
7806 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7807 {
7808 tree base_type = TREE_TYPE (type), low, high;
7809
7810 /* Subrange types have a base type which is an integral type. */
7811 if (!INTEGRAL_TYPE_P (base_type))
7812 return false;
7813
7814 /* Get the real bounds of the subtype. */
7815 if (lang_hooks.types.get_subrange_bounds)
7816 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7817 else
7818 {
7819 low = TYPE_MIN_VALUE (type);
7820 high = TYPE_MAX_VALUE (type);
7821 }
7822
7823 /* If the type and its base type have the same representation and the same
7824 name, then the type is not a subrange but a copy of the base type. */
7825 if ((TREE_CODE (base_type) == INTEGER_TYPE
7826 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7827 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7828 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7829 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7830 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7831 return false;
7832
7833 if (lowval)
7834 *lowval = low;
7835 if (highval)
7836 *highval = high;
7837 return true;
7838 }
7839
7840 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7841 and number of elements specified by the range of values of INDEX_TYPE.
7842 If SHARED is true, reuse such a type that has already been constructed. */
7843
7844 static tree
7845 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7846 {
7847 tree t;
7848
7849 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7850 {
7851 error ("arrays of functions are not meaningful");
7852 elt_type = integer_type_node;
7853 }
7854
7855 t = make_node (ARRAY_TYPE);
7856 TREE_TYPE (t) = elt_type;
7857 TYPE_DOMAIN (t) = index_type;
7858 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7859 layout_type (t);
7860
7861 /* If the element type is incomplete at this point we get marked for
7862 structural equality. Do not record these types in the canonical
7863 type hashtable. */
7864 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7865 return t;
7866
7867 if (shared)
7868 {
7869 inchash::hash hstate;
7870 hstate.add_object (TYPE_HASH (elt_type));
7871 if (index_type)
7872 hstate.add_object (TYPE_HASH (index_type));
7873 t = type_hash_canon (hstate.end (), t);
7874 }
7875
7876 if (TYPE_CANONICAL (t) == t)
7877 {
7878 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7879 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7880 SET_TYPE_STRUCTURAL_EQUALITY (t);
7881 else if (TYPE_CANONICAL (elt_type) != elt_type
7882 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7883 TYPE_CANONICAL (t)
7884 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7885 index_type
7886 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7887 shared);
7888 }
7889
7890 return t;
7891 }
7892
7893 /* Wrapper around build_array_type_1 with SHARED set to true. */
7894
7895 tree
7896 build_array_type (tree elt_type, tree index_type)
7897 {
7898 return build_array_type_1 (elt_type, index_type, true);
7899 }
7900
7901 /* Wrapper around build_array_type_1 with SHARED set to false. */
7902
7903 tree
7904 build_nonshared_array_type (tree elt_type, tree index_type)
7905 {
7906 return build_array_type_1 (elt_type, index_type, false);
7907 }
7908
7909 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7910 sizetype. */
7911
7912 tree
7913 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7914 {
7915 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7916 }
7917
7918 /* Recursively examines the array elements of TYPE, until a non-array
7919 element type is found. */
7920
7921 tree
7922 strip_array_types (tree type)
7923 {
7924 while (TREE_CODE (type) == ARRAY_TYPE)
7925 type = TREE_TYPE (type);
7926
7927 return type;
7928 }
7929
7930 /* Computes the canonical argument types from the argument type list
7931 ARGTYPES.
7932
7933 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7934 on entry to this function, or if any of the ARGTYPES are
7935 structural.
7936
7937 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7938 true on entry to this function, or if any of the ARGTYPES are
7939 non-canonical.
7940
7941 Returns a canonical argument list, which may be ARGTYPES when the
7942 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7943 true) or would not differ from ARGTYPES. */
7944
7945 static tree
7946 maybe_canonicalize_argtypes (tree argtypes,
7947 bool *any_structural_p,
7948 bool *any_noncanonical_p)
7949 {
7950 tree arg;
7951 bool any_noncanonical_argtypes_p = false;
7952
7953 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7954 {
7955 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7956 /* Fail gracefully by stating that the type is structural. */
7957 *any_structural_p = true;
7958 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7959 *any_structural_p = true;
7960 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7961 || TREE_PURPOSE (arg))
7962 /* If the argument has a default argument, we consider it
7963 non-canonical even though the type itself is canonical.
7964 That way, different variants of function and method types
7965 with default arguments will all point to the variant with
7966 no defaults as their canonical type. */
7967 any_noncanonical_argtypes_p = true;
7968 }
7969
7970 if (*any_structural_p)
7971 return argtypes;
7972
7973 if (any_noncanonical_argtypes_p)
7974 {
7975 /* Build the canonical list of argument types. */
7976 tree canon_argtypes = NULL_TREE;
7977 bool is_void = false;
7978
7979 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7980 {
7981 if (arg == void_list_node)
7982 is_void = true;
7983 else
7984 canon_argtypes = tree_cons (NULL_TREE,
7985 TYPE_CANONICAL (TREE_VALUE (arg)),
7986 canon_argtypes);
7987 }
7988
7989 canon_argtypes = nreverse (canon_argtypes);
7990 if (is_void)
7991 canon_argtypes = chainon (canon_argtypes, void_list_node);
7992
7993 /* There is a non-canonical type. */
7994 *any_noncanonical_p = true;
7995 return canon_argtypes;
7996 }
7997
7998 /* The canonical argument types are the same as ARGTYPES. */
7999 return argtypes;
8000 }
8001
8002 /* Construct, lay out and return
8003 the type of functions returning type VALUE_TYPE
8004 given arguments of types ARG_TYPES.
8005 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8006 are data type nodes for the arguments of the function.
8007 If such a type has already been constructed, reuse it. */
8008
8009 tree
8010 build_function_type (tree value_type, tree arg_types)
8011 {
8012 tree t;
8013 inchash::hash hstate;
8014 bool any_structural_p, any_noncanonical_p;
8015 tree canon_argtypes;
8016
8017 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8018 {
8019 error ("function return type cannot be function");
8020 value_type = integer_type_node;
8021 }
8022
8023 /* Make a node of the sort we want. */
8024 t = make_node (FUNCTION_TYPE);
8025 TREE_TYPE (t) = value_type;
8026 TYPE_ARG_TYPES (t) = arg_types;
8027
8028 /* If we already have such a type, use the old one. */
8029 hstate.add_object (TYPE_HASH (value_type));
8030 type_hash_list (arg_types, hstate);
8031 t = type_hash_canon (hstate.end (), t);
8032
8033 /* Set up the canonical type. */
8034 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8035 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8036 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8037 &any_structural_p,
8038 &any_noncanonical_p);
8039 if (any_structural_p)
8040 SET_TYPE_STRUCTURAL_EQUALITY (t);
8041 else if (any_noncanonical_p)
8042 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8043 canon_argtypes);
8044
8045 if (!COMPLETE_TYPE_P (t))
8046 layout_type (t);
8047 return t;
8048 }
8049
8050 /* Build a function type. The RETURN_TYPE is the type returned by the
8051 function. If VAARGS is set, no void_type_node is appended to the
8052 the list. ARGP must be always be terminated be a NULL_TREE. */
8053
8054 static tree
8055 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8056 {
8057 tree t, args, last;
8058
8059 t = va_arg (argp, tree);
8060 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8061 args = tree_cons (NULL_TREE, t, args);
8062
8063 if (vaargs)
8064 {
8065 last = args;
8066 if (args != NULL_TREE)
8067 args = nreverse (args);
8068 gcc_assert (last != void_list_node);
8069 }
8070 else if (args == NULL_TREE)
8071 args = void_list_node;
8072 else
8073 {
8074 last = args;
8075 args = nreverse (args);
8076 TREE_CHAIN (last) = void_list_node;
8077 }
8078 args = build_function_type (return_type, args);
8079
8080 return args;
8081 }
8082
8083 /* Build a function type. The RETURN_TYPE is the type returned by the
8084 function. If additional arguments are provided, they are
8085 additional argument types. The list of argument types must always
8086 be terminated by NULL_TREE. */
8087
8088 tree
8089 build_function_type_list (tree return_type, ...)
8090 {
8091 tree args;
8092 va_list p;
8093
8094 va_start (p, return_type);
8095 args = build_function_type_list_1 (false, return_type, p);
8096 va_end (p);
8097 return args;
8098 }
8099
8100 /* Build a variable argument function type. The RETURN_TYPE is the
8101 type returned by the function. If additional arguments are provided,
8102 they are additional argument types. The list of argument types must
8103 always be terminated by NULL_TREE. */
8104
8105 tree
8106 build_varargs_function_type_list (tree return_type, ...)
8107 {
8108 tree args;
8109 va_list p;
8110
8111 va_start (p, return_type);
8112 args = build_function_type_list_1 (true, return_type, p);
8113 va_end (p);
8114
8115 return args;
8116 }
8117
8118 /* Build a function type. RETURN_TYPE is the type returned by the
8119 function; VAARGS indicates whether the function takes varargs. The
8120 function takes N named arguments, the types of which are provided in
8121 ARG_TYPES. */
8122
8123 static tree
8124 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8125 tree *arg_types)
8126 {
8127 int i;
8128 tree t = vaargs ? NULL_TREE : void_list_node;
8129
8130 for (i = n - 1; i >= 0; i--)
8131 t = tree_cons (NULL_TREE, arg_types[i], t);
8132
8133 return build_function_type (return_type, t);
8134 }
8135
8136 /* Build a function type. RETURN_TYPE is the type returned by the
8137 function. The function takes N named arguments, the types of which
8138 are provided in ARG_TYPES. */
8139
8140 tree
8141 build_function_type_array (tree return_type, int n, tree *arg_types)
8142 {
8143 return build_function_type_array_1 (false, return_type, n, arg_types);
8144 }
8145
8146 /* Build a variable argument function type. RETURN_TYPE is the type
8147 returned by the function. The function takes N named arguments, the
8148 types of which are provided in ARG_TYPES. */
8149
8150 tree
8151 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8152 {
8153 return build_function_type_array_1 (true, return_type, n, arg_types);
8154 }
8155
8156 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8157 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8158 for the method. An implicit additional parameter (of type
8159 pointer-to-BASETYPE) is added to the ARGTYPES. */
8160
8161 tree
8162 build_method_type_directly (tree basetype,
8163 tree rettype,
8164 tree argtypes)
8165 {
8166 tree t;
8167 tree ptype;
8168 inchash::hash hstate;
8169 bool any_structural_p, any_noncanonical_p;
8170 tree canon_argtypes;
8171
8172 /* Make a node of the sort we want. */
8173 t = make_node (METHOD_TYPE);
8174
8175 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8176 TREE_TYPE (t) = rettype;
8177 ptype = build_pointer_type (basetype);
8178
8179 /* The actual arglist for this function includes a "hidden" argument
8180 which is "this". Put it into the list of argument types. */
8181 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8182 TYPE_ARG_TYPES (t) = argtypes;
8183
8184 /* If we already have such a type, use the old one. */
8185 hstate.add_object (TYPE_HASH (basetype));
8186 hstate.add_object (TYPE_HASH (rettype));
8187 type_hash_list (argtypes, hstate);
8188 t = type_hash_canon (hstate.end (), t);
8189
8190 /* Set up the canonical type. */
8191 any_structural_p
8192 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8193 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8194 any_noncanonical_p
8195 = (TYPE_CANONICAL (basetype) != basetype
8196 || TYPE_CANONICAL (rettype) != rettype);
8197 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8198 &any_structural_p,
8199 &any_noncanonical_p);
8200 if (any_structural_p)
8201 SET_TYPE_STRUCTURAL_EQUALITY (t);
8202 else if (any_noncanonical_p)
8203 TYPE_CANONICAL (t)
8204 = build_method_type_directly (TYPE_CANONICAL (basetype),
8205 TYPE_CANONICAL (rettype),
8206 canon_argtypes);
8207 if (!COMPLETE_TYPE_P (t))
8208 layout_type (t);
8209
8210 return t;
8211 }
8212
8213 /* Construct, lay out and return the type of methods belonging to class
8214 BASETYPE and whose arguments and values are described by TYPE.
8215 If that type exists already, reuse it.
8216 TYPE must be a FUNCTION_TYPE node. */
8217
8218 tree
8219 build_method_type (tree basetype, tree type)
8220 {
8221 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8222
8223 return build_method_type_directly (basetype,
8224 TREE_TYPE (type),
8225 TYPE_ARG_TYPES (type));
8226 }
8227
8228 /* Construct, lay out and return the type of offsets to a value
8229 of type TYPE, within an object of type BASETYPE.
8230 If a suitable offset type exists already, reuse it. */
8231
8232 tree
8233 build_offset_type (tree basetype, tree type)
8234 {
8235 tree t;
8236 inchash::hash hstate;
8237
8238 /* Make a node of the sort we want. */
8239 t = make_node (OFFSET_TYPE);
8240
8241 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8242 TREE_TYPE (t) = type;
8243
8244 /* If we already have such a type, use the old one. */
8245 hstate.add_object (TYPE_HASH (basetype));
8246 hstate.add_object (TYPE_HASH (type));
8247 t = type_hash_canon (hstate.end (), t);
8248
8249 if (!COMPLETE_TYPE_P (t))
8250 layout_type (t);
8251
8252 if (TYPE_CANONICAL (t) == t)
8253 {
8254 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8255 || TYPE_STRUCTURAL_EQUALITY_P (type))
8256 SET_TYPE_STRUCTURAL_EQUALITY (t);
8257 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8258 || TYPE_CANONICAL (type) != type)
8259 TYPE_CANONICAL (t)
8260 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8261 TYPE_CANONICAL (type));
8262 }
8263
8264 return t;
8265 }
8266
8267 /* Create a complex type whose components are COMPONENT_TYPE. */
8268
8269 tree
8270 build_complex_type (tree component_type)
8271 {
8272 tree t;
8273 inchash::hash hstate;
8274
8275 gcc_assert (INTEGRAL_TYPE_P (component_type)
8276 || SCALAR_FLOAT_TYPE_P (component_type)
8277 || FIXED_POINT_TYPE_P (component_type));
8278
8279 /* Make a node of the sort we want. */
8280 t = make_node (COMPLEX_TYPE);
8281
8282 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8283
8284 /* If we already have such a type, use the old one. */
8285 hstate.add_object (TYPE_HASH (component_type));
8286 t = type_hash_canon (hstate.end (), t);
8287
8288 if (!COMPLETE_TYPE_P (t))
8289 layout_type (t);
8290
8291 if (TYPE_CANONICAL (t) == t)
8292 {
8293 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8294 SET_TYPE_STRUCTURAL_EQUALITY (t);
8295 else if (TYPE_CANONICAL (component_type) != component_type)
8296 TYPE_CANONICAL (t)
8297 = build_complex_type (TYPE_CANONICAL (component_type));
8298 }
8299
8300 /* We need to create a name, since complex is a fundamental type. */
8301 if (! TYPE_NAME (t))
8302 {
8303 const char *name;
8304 if (component_type == char_type_node)
8305 name = "complex char";
8306 else if (component_type == signed_char_type_node)
8307 name = "complex signed char";
8308 else if (component_type == unsigned_char_type_node)
8309 name = "complex unsigned char";
8310 else if (component_type == short_integer_type_node)
8311 name = "complex short int";
8312 else if (component_type == short_unsigned_type_node)
8313 name = "complex short unsigned int";
8314 else if (component_type == integer_type_node)
8315 name = "complex int";
8316 else if (component_type == unsigned_type_node)
8317 name = "complex unsigned int";
8318 else if (component_type == long_integer_type_node)
8319 name = "complex long int";
8320 else if (component_type == long_unsigned_type_node)
8321 name = "complex long unsigned int";
8322 else if (component_type == long_long_integer_type_node)
8323 name = "complex long long int";
8324 else if (component_type == long_long_unsigned_type_node)
8325 name = "complex long long unsigned int";
8326 else
8327 name = 0;
8328
8329 if (name != 0)
8330 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8331 get_identifier (name), t);
8332 }
8333
8334 return build_qualified_type (t, TYPE_QUALS (component_type));
8335 }
8336
8337 /* If TYPE is a real or complex floating-point type and the target
8338 does not directly support arithmetic on TYPE then return the wider
8339 type to be used for arithmetic on TYPE. Otherwise, return
8340 NULL_TREE. */
8341
8342 tree
8343 excess_precision_type (tree type)
8344 {
8345 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8346 {
8347 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8348 switch (TREE_CODE (type))
8349 {
8350 case REAL_TYPE:
8351 switch (flt_eval_method)
8352 {
8353 case 1:
8354 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8355 return double_type_node;
8356 break;
8357 case 2:
8358 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8359 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8360 return long_double_type_node;
8361 break;
8362 default:
8363 gcc_unreachable ();
8364 }
8365 break;
8366 case COMPLEX_TYPE:
8367 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8368 return NULL_TREE;
8369 switch (flt_eval_method)
8370 {
8371 case 1:
8372 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8373 return complex_double_type_node;
8374 break;
8375 case 2:
8376 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8377 || (TYPE_MODE (TREE_TYPE (type))
8378 == TYPE_MODE (double_type_node)))
8379 return complex_long_double_type_node;
8380 break;
8381 default:
8382 gcc_unreachable ();
8383 }
8384 break;
8385 default:
8386 break;
8387 }
8388 }
8389 return NULL_TREE;
8390 }
8391 \f
8392 /* Return OP, stripped of any conversions to wider types as much as is safe.
8393 Converting the value back to OP's type makes a value equivalent to OP.
8394
8395 If FOR_TYPE is nonzero, we return a value which, if converted to
8396 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8397
8398 OP must have integer, real or enumeral type. Pointers are not allowed!
8399
8400 There are some cases where the obvious value we could return
8401 would regenerate to OP if converted to OP's type,
8402 but would not extend like OP to wider types.
8403 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8404 For example, if OP is (unsigned short)(signed char)-1,
8405 we avoid returning (signed char)-1 if FOR_TYPE is int,
8406 even though extending that to an unsigned short would regenerate OP,
8407 since the result of extending (signed char)-1 to (int)
8408 is different from (int) OP. */
8409
8410 tree
8411 get_unwidened (tree op, tree for_type)
8412 {
8413 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8414 tree type = TREE_TYPE (op);
8415 unsigned final_prec
8416 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8417 int uns
8418 = (for_type != 0 && for_type != type
8419 && final_prec > TYPE_PRECISION (type)
8420 && TYPE_UNSIGNED (type));
8421 tree win = op;
8422
8423 while (CONVERT_EXPR_P (op))
8424 {
8425 int bitschange;
8426
8427 /* TYPE_PRECISION on vector types has different meaning
8428 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8429 so avoid them here. */
8430 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8431 break;
8432
8433 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8434 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8435
8436 /* Truncations are many-one so cannot be removed.
8437 Unless we are later going to truncate down even farther. */
8438 if (bitschange < 0
8439 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8440 break;
8441
8442 /* See what's inside this conversion. If we decide to strip it,
8443 we will set WIN. */
8444 op = TREE_OPERAND (op, 0);
8445
8446 /* If we have not stripped any zero-extensions (uns is 0),
8447 we can strip any kind of extension.
8448 If we have previously stripped a zero-extension,
8449 only zero-extensions can safely be stripped.
8450 Any extension can be stripped if the bits it would produce
8451 are all going to be discarded later by truncating to FOR_TYPE. */
8452
8453 if (bitschange > 0)
8454 {
8455 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8456 win = op;
8457 /* TYPE_UNSIGNED says whether this is a zero-extension.
8458 Let's avoid computing it if it does not affect WIN
8459 and if UNS will not be needed again. */
8460 if ((uns
8461 || CONVERT_EXPR_P (op))
8462 && TYPE_UNSIGNED (TREE_TYPE (op)))
8463 {
8464 uns = 1;
8465 win = op;
8466 }
8467 }
8468 }
8469
8470 /* If we finally reach a constant see if it fits in for_type and
8471 in that case convert it. */
8472 if (for_type
8473 && TREE_CODE (win) == INTEGER_CST
8474 && TREE_TYPE (win) != for_type
8475 && int_fits_type_p (win, for_type))
8476 win = fold_convert (for_type, win);
8477
8478 return win;
8479 }
8480 \f
8481 /* Return OP or a simpler expression for a narrower value
8482 which can be sign-extended or zero-extended to give back OP.
8483 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8484 or 0 if the value should be sign-extended. */
8485
8486 tree
8487 get_narrower (tree op, int *unsignedp_ptr)
8488 {
8489 int uns = 0;
8490 int first = 1;
8491 tree win = op;
8492 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8493
8494 while (TREE_CODE (op) == NOP_EXPR)
8495 {
8496 int bitschange
8497 = (TYPE_PRECISION (TREE_TYPE (op))
8498 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8499
8500 /* Truncations are many-one so cannot be removed. */
8501 if (bitschange < 0)
8502 break;
8503
8504 /* See what's inside this conversion. If we decide to strip it,
8505 we will set WIN. */
8506
8507 if (bitschange > 0)
8508 {
8509 op = TREE_OPERAND (op, 0);
8510 /* An extension: the outermost one can be stripped,
8511 but remember whether it is zero or sign extension. */
8512 if (first)
8513 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8514 /* Otherwise, if a sign extension has been stripped,
8515 only sign extensions can now be stripped;
8516 if a zero extension has been stripped, only zero-extensions. */
8517 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8518 break;
8519 first = 0;
8520 }
8521 else /* bitschange == 0 */
8522 {
8523 /* A change in nominal type can always be stripped, but we must
8524 preserve the unsignedness. */
8525 if (first)
8526 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8527 first = 0;
8528 op = TREE_OPERAND (op, 0);
8529 /* Keep trying to narrow, but don't assign op to win if it
8530 would turn an integral type into something else. */
8531 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8532 continue;
8533 }
8534
8535 win = op;
8536 }
8537
8538 if (TREE_CODE (op) == COMPONENT_REF
8539 /* Since type_for_size always gives an integer type. */
8540 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8541 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8542 /* Ensure field is laid out already. */
8543 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8544 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8545 {
8546 unsigned HOST_WIDE_INT innerprec
8547 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8548 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8549 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8550 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8551
8552 /* We can get this structure field in a narrower type that fits it,
8553 but the resulting extension to its nominal type (a fullword type)
8554 must satisfy the same conditions as for other extensions.
8555
8556 Do this only for fields that are aligned (not bit-fields),
8557 because when bit-field insns will be used there is no
8558 advantage in doing this. */
8559
8560 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8561 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8562 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8563 && type != 0)
8564 {
8565 if (first)
8566 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8567 win = fold_convert (type, op);
8568 }
8569 }
8570
8571 *unsignedp_ptr = uns;
8572 return win;
8573 }
8574 \f
8575 /* Returns true if integer constant C has a value that is permissible
8576 for type TYPE (an INTEGER_TYPE). */
8577
8578 bool
8579 int_fits_type_p (const_tree c, const_tree type)
8580 {
8581 tree type_low_bound, type_high_bound;
8582 bool ok_for_low_bound, ok_for_high_bound;
8583 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8584
8585 retry:
8586 type_low_bound = TYPE_MIN_VALUE (type);
8587 type_high_bound = TYPE_MAX_VALUE (type);
8588
8589 /* If at least one bound of the type is a constant integer, we can check
8590 ourselves and maybe make a decision. If no such decision is possible, but
8591 this type is a subtype, try checking against that. Otherwise, use
8592 fits_to_tree_p, which checks against the precision.
8593
8594 Compute the status for each possibly constant bound, and return if we see
8595 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8596 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8597 for "constant known to fit". */
8598
8599 /* Check if c >= type_low_bound. */
8600 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8601 {
8602 if (tree_int_cst_lt (c, type_low_bound))
8603 return false;
8604 ok_for_low_bound = true;
8605 }
8606 else
8607 ok_for_low_bound = false;
8608
8609 /* Check if c <= type_high_bound. */
8610 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8611 {
8612 if (tree_int_cst_lt (type_high_bound, c))
8613 return false;
8614 ok_for_high_bound = true;
8615 }
8616 else
8617 ok_for_high_bound = false;
8618
8619 /* If the constant fits both bounds, the result is known. */
8620 if (ok_for_low_bound && ok_for_high_bound)
8621 return true;
8622
8623 /* Perform some generic filtering which may allow making a decision
8624 even if the bounds are not constant. First, negative integers
8625 never fit in unsigned types, */
8626 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8627 return false;
8628
8629 /* Second, narrower types always fit in wider ones. */
8630 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8631 return true;
8632
8633 /* Third, unsigned integers with top bit set never fit signed types. */
8634 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8635 {
8636 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8637 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8638 {
8639 /* When a tree_cst is converted to a wide-int, the precision
8640 is taken from the type. However, if the precision of the
8641 mode underneath the type is smaller than that, it is
8642 possible that the value will not fit. The test below
8643 fails if any bit is set between the sign bit of the
8644 underlying mode and the top bit of the type. */
8645 if (wi::ne_p (wi::zext (c, prec - 1), c))
8646 return false;
8647 }
8648 else if (wi::neg_p (c))
8649 return false;
8650 }
8651
8652 /* If we haven't been able to decide at this point, there nothing more we
8653 can check ourselves here. Look at the base type if we have one and it
8654 has the same precision. */
8655 if (TREE_CODE (type) == INTEGER_TYPE
8656 && TREE_TYPE (type) != 0
8657 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8658 {
8659 type = TREE_TYPE (type);
8660 goto retry;
8661 }
8662
8663 /* Or to fits_to_tree_p, if nothing else. */
8664 return wi::fits_to_tree_p (c, type);
8665 }
8666
8667 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8668 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8669 represented (assuming two's-complement arithmetic) within the bit
8670 precision of the type are returned instead. */
8671
8672 void
8673 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8674 {
8675 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8676 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8677 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8678 else
8679 {
8680 if (TYPE_UNSIGNED (type))
8681 mpz_set_ui (min, 0);
8682 else
8683 {
8684 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8685 wi::to_mpz (mn, min, SIGNED);
8686 }
8687 }
8688
8689 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8690 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8691 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8692 else
8693 {
8694 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8695 wi::to_mpz (mn, max, TYPE_SIGN (type));
8696 }
8697 }
8698
8699 /* Return true if VAR is an automatic variable defined in function FN. */
8700
8701 bool
8702 auto_var_in_fn_p (const_tree var, const_tree fn)
8703 {
8704 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8705 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8706 || TREE_CODE (var) == PARM_DECL)
8707 && ! TREE_STATIC (var))
8708 || TREE_CODE (var) == LABEL_DECL
8709 || TREE_CODE (var) == RESULT_DECL));
8710 }
8711
8712 /* Subprogram of following function. Called by walk_tree.
8713
8714 Return *TP if it is an automatic variable or parameter of the
8715 function passed in as DATA. */
8716
8717 static tree
8718 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8719 {
8720 tree fn = (tree) data;
8721
8722 if (TYPE_P (*tp))
8723 *walk_subtrees = 0;
8724
8725 else if (DECL_P (*tp)
8726 && auto_var_in_fn_p (*tp, fn))
8727 return *tp;
8728
8729 return NULL_TREE;
8730 }
8731
8732 /* Returns true if T is, contains, or refers to a type with variable
8733 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8734 arguments, but not the return type. If FN is nonzero, only return
8735 true if a modifier of the type or position of FN is a variable or
8736 parameter inside FN.
8737
8738 This concept is more general than that of C99 'variably modified types':
8739 in C99, a struct type is never variably modified because a VLA may not
8740 appear as a structure member. However, in GNU C code like:
8741
8742 struct S { int i[f()]; };
8743
8744 is valid, and other languages may define similar constructs. */
8745
8746 bool
8747 variably_modified_type_p (tree type, tree fn)
8748 {
8749 tree t;
8750
8751 /* Test if T is either variable (if FN is zero) or an expression containing
8752 a variable in FN. If TYPE isn't gimplified, return true also if
8753 gimplify_one_sizepos would gimplify the expression into a local
8754 variable. */
8755 #define RETURN_TRUE_IF_VAR(T) \
8756 do { tree _t = (T); \
8757 if (_t != NULL_TREE \
8758 && _t != error_mark_node \
8759 && TREE_CODE (_t) != INTEGER_CST \
8760 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8761 && (!fn \
8762 || (!TYPE_SIZES_GIMPLIFIED (type) \
8763 && !is_gimple_sizepos (_t)) \
8764 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8765 return true; } while (0)
8766
8767 if (type == error_mark_node)
8768 return false;
8769
8770 /* If TYPE itself has variable size, it is variably modified. */
8771 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8772 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8773
8774 switch (TREE_CODE (type))
8775 {
8776 case POINTER_TYPE:
8777 case REFERENCE_TYPE:
8778 case VECTOR_TYPE:
8779 if (variably_modified_type_p (TREE_TYPE (type), fn))
8780 return true;
8781 break;
8782
8783 case FUNCTION_TYPE:
8784 case METHOD_TYPE:
8785 /* If TYPE is a function type, it is variably modified if the
8786 return type is variably modified. */
8787 if (variably_modified_type_p (TREE_TYPE (type), fn))
8788 return true;
8789 break;
8790
8791 case INTEGER_TYPE:
8792 case REAL_TYPE:
8793 case FIXED_POINT_TYPE:
8794 case ENUMERAL_TYPE:
8795 case BOOLEAN_TYPE:
8796 /* Scalar types are variably modified if their end points
8797 aren't constant. */
8798 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8799 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8800 break;
8801
8802 case RECORD_TYPE:
8803 case UNION_TYPE:
8804 case QUAL_UNION_TYPE:
8805 /* We can't see if any of the fields are variably-modified by the
8806 definition we normally use, since that would produce infinite
8807 recursion via pointers. */
8808 /* This is variably modified if some field's type is. */
8809 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8810 if (TREE_CODE (t) == FIELD_DECL)
8811 {
8812 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8813 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8814 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8815
8816 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8817 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8818 }
8819 break;
8820
8821 case ARRAY_TYPE:
8822 /* Do not call ourselves to avoid infinite recursion. This is
8823 variably modified if the element type is. */
8824 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8825 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8826 break;
8827
8828 default:
8829 break;
8830 }
8831
8832 /* The current language may have other cases to check, but in general,
8833 all other types are not variably modified. */
8834 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8835
8836 #undef RETURN_TRUE_IF_VAR
8837 }
8838
8839 /* Given a DECL or TYPE, return the scope in which it was declared, or
8840 NULL_TREE if there is no containing scope. */
8841
8842 tree
8843 get_containing_scope (const_tree t)
8844 {
8845 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8846 }
8847
8848 /* Return the innermost context enclosing DECL that is
8849 a FUNCTION_DECL, or zero if none. */
8850
8851 tree
8852 decl_function_context (const_tree decl)
8853 {
8854 tree context;
8855
8856 if (TREE_CODE (decl) == ERROR_MARK)
8857 return 0;
8858
8859 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8860 where we look up the function at runtime. Such functions always take
8861 a first argument of type 'pointer to real context'.
8862
8863 C++ should really be fixed to use DECL_CONTEXT for the real context,
8864 and use something else for the "virtual context". */
8865 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8866 context
8867 = TYPE_MAIN_VARIANT
8868 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8869 else
8870 context = DECL_CONTEXT (decl);
8871
8872 while (context && TREE_CODE (context) != FUNCTION_DECL)
8873 {
8874 if (TREE_CODE (context) == BLOCK)
8875 context = BLOCK_SUPERCONTEXT (context);
8876 else
8877 context = get_containing_scope (context);
8878 }
8879
8880 return context;
8881 }
8882
8883 /* Return the innermost context enclosing DECL that is
8884 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8885 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8886
8887 tree
8888 decl_type_context (const_tree decl)
8889 {
8890 tree context = DECL_CONTEXT (decl);
8891
8892 while (context)
8893 switch (TREE_CODE (context))
8894 {
8895 case NAMESPACE_DECL:
8896 case TRANSLATION_UNIT_DECL:
8897 return NULL_TREE;
8898
8899 case RECORD_TYPE:
8900 case UNION_TYPE:
8901 case QUAL_UNION_TYPE:
8902 return context;
8903
8904 case TYPE_DECL:
8905 case FUNCTION_DECL:
8906 context = DECL_CONTEXT (context);
8907 break;
8908
8909 case BLOCK:
8910 context = BLOCK_SUPERCONTEXT (context);
8911 break;
8912
8913 default:
8914 gcc_unreachable ();
8915 }
8916
8917 return NULL_TREE;
8918 }
8919
8920 /* CALL is a CALL_EXPR. Return the declaration for the function
8921 called, or NULL_TREE if the called function cannot be
8922 determined. */
8923
8924 tree
8925 get_callee_fndecl (const_tree call)
8926 {
8927 tree addr;
8928
8929 if (call == error_mark_node)
8930 return error_mark_node;
8931
8932 /* It's invalid to call this function with anything but a
8933 CALL_EXPR. */
8934 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8935
8936 /* The first operand to the CALL is the address of the function
8937 called. */
8938 addr = CALL_EXPR_FN (call);
8939
8940 /* If there is no function, return early. */
8941 if (addr == NULL_TREE)
8942 return NULL_TREE;
8943
8944 STRIP_NOPS (addr);
8945
8946 /* If this is a readonly function pointer, extract its initial value. */
8947 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8948 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8949 && DECL_INITIAL (addr))
8950 addr = DECL_INITIAL (addr);
8951
8952 /* If the address is just `&f' for some function `f', then we know
8953 that `f' is being called. */
8954 if (TREE_CODE (addr) == ADDR_EXPR
8955 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8956 return TREE_OPERAND (addr, 0);
8957
8958 /* We couldn't figure out what was being called. */
8959 return NULL_TREE;
8960 }
8961
8962 /* Print debugging information about tree nodes generated during the compile,
8963 and any language-specific information. */
8964
8965 void
8966 dump_tree_statistics (void)
8967 {
8968 if (GATHER_STATISTICS)
8969 {
8970 int i;
8971 int total_nodes, total_bytes;
8972 fprintf (stderr, "Kind Nodes Bytes\n");
8973 fprintf (stderr, "---------------------------------------\n");
8974 total_nodes = total_bytes = 0;
8975 for (i = 0; i < (int) all_kinds; i++)
8976 {
8977 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8978 tree_node_counts[i], tree_node_sizes[i]);
8979 total_nodes += tree_node_counts[i];
8980 total_bytes += tree_node_sizes[i];
8981 }
8982 fprintf (stderr, "---------------------------------------\n");
8983 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8984 fprintf (stderr, "---------------------------------------\n");
8985 fprintf (stderr, "Code Nodes\n");
8986 fprintf (stderr, "----------------------------\n");
8987 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8988 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8989 tree_code_counts[i]);
8990 fprintf (stderr, "----------------------------\n");
8991 ssanames_print_statistics ();
8992 phinodes_print_statistics ();
8993 }
8994 else
8995 fprintf (stderr, "(No per-node statistics)\n");
8996
8997 print_type_hash_statistics ();
8998 print_debug_expr_statistics ();
8999 print_value_expr_statistics ();
9000 lang_hooks.print_statistics ();
9001 }
9002 \f
9003 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9004
9005 /* Generate a crc32 of a byte. */
9006
9007 static unsigned
9008 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9009 {
9010 unsigned ix;
9011
9012 for (ix = bits; ix--; value <<= 1)
9013 {
9014 unsigned feedback;
9015
9016 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9017 chksum <<= 1;
9018 chksum ^= feedback;
9019 }
9020 return chksum;
9021 }
9022
9023 /* Generate a crc32 of a 32-bit unsigned. */
9024
9025 unsigned
9026 crc32_unsigned (unsigned chksum, unsigned value)
9027 {
9028 return crc32_unsigned_bits (chksum, value, 32);
9029 }
9030
9031 /* Generate a crc32 of a byte. */
9032
9033 unsigned
9034 crc32_byte (unsigned chksum, char byte)
9035 {
9036 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9037 }
9038
9039 /* Generate a crc32 of a string. */
9040
9041 unsigned
9042 crc32_string (unsigned chksum, const char *string)
9043 {
9044 do
9045 {
9046 chksum = crc32_byte (chksum, *string);
9047 }
9048 while (*string++);
9049 return chksum;
9050 }
9051
9052 /* P is a string that will be used in a symbol. Mask out any characters
9053 that are not valid in that context. */
9054
9055 void
9056 clean_symbol_name (char *p)
9057 {
9058 for (; *p; p++)
9059 if (! (ISALNUM (*p)
9060 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9061 || *p == '$'
9062 #endif
9063 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9064 || *p == '.'
9065 #endif
9066 ))
9067 *p = '_';
9068 }
9069
9070 /* Generate a name for a special-purpose function.
9071 The generated name may need to be unique across the whole link.
9072 Changes to this function may also require corresponding changes to
9073 xstrdup_mask_random.
9074 TYPE is some string to identify the purpose of this function to the
9075 linker or collect2; it must start with an uppercase letter,
9076 one of:
9077 I - for constructors
9078 D - for destructors
9079 N - for C++ anonymous namespaces
9080 F - for DWARF unwind frame information. */
9081
9082 tree
9083 get_file_function_name (const char *type)
9084 {
9085 char *buf;
9086 const char *p;
9087 char *q;
9088
9089 /* If we already have a name we know to be unique, just use that. */
9090 if (first_global_object_name)
9091 p = q = ASTRDUP (first_global_object_name);
9092 /* If the target is handling the constructors/destructors, they
9093 will be local to this file and the name is only necessary for
9094 debugging purposes.
9095 We also assign sub_I and sub_D sufixes to constructors called from
9096 the global static constructors. These are always local. */
9097 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9098 || (strncmp (type, "sub_", 4) == 0
9099 && (type[4] == 'I' || type[4] == 'D')))
9100 {
9101 const char *file = main_input_filename;
9102 if (! file)
9103 file = LOCATION_FILE (input_location);
9104 /* Just use the file's basename, because the full pathname
9105 might be quite long. */
9106 p = q = ASTRDUP (lbasename (file));
9107 }
9108 else
9109 {
9110 /* Otherwise, the name must be unique across the entire link.
9111 We don't have anything that we know to be unique to this translation
9112 unit, so use what we do have and throw in some randomness. */
9113 unsigned len;
9114 const char *name = weak_global_object_name;
9115 const char *file = main_input_filename;
9116
9117 if (! name)
9118 name = "";
9119 if (! file)
9120 file = LOCATION_FILE (input_location);
9121
9122 len = strlen (file);
9123 q = (char *) alloca (9 + 17 + len + 1);
9124 memcpy (q, file, len + 1);
9125
9126 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9127 crc32_string (0, name), get_random_seed (false));
9128
9129 p = q;
9130 }
9131
9132 clean_symbol_name (q);
9133 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9134 + strlen (type));
9135
9136 /* Set up the name of the file-level functions we may need.
9137 Use a global object (which is already required to be unique over
9138 the program) rather than the file name (which imposes extra
9139 constraints). */
9140 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9141
9142 return get_identifier (buf);
9143 }
9144 \f
9145 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9146
9147 /* Complain that the tree code of NODE does not match the expected 0
9148 terminated list of trailing codes. The trailing code list can be
9149 empty, for a more vague error message. FILE, LINE, and FUNCTION
9150 are of the caller. */
9151
9152 void
9153 tree_check_failed (const_tree node, const char *file,
9154 int line, const char *function, ...)
9155 {
9156 va_list args;
9157 const char *buffer;
9158 unsigned length = 0;
9159 enum tree_code code;
9160
9161 va_start (args, function);
9162 while ((code = (enum tree_code) va_arg (args, int)))
9163 length += 4 + strlen (get_tree_code_name (code));
9164 va_end (args);
9165 if (length)
9166 {
9167 char *tmp;
9168 va_start (args, function);
9169 length += strlen ("expected ");
9170 buffer = tmp = (char *) alloca (length);
9171 length = 0;
9172 while ((code = (enum tree_code) va_arg (args, int)))
9173 {
9174 const char *prefix = length ? " or " : "expected ";
9175
9176 strcpy (tmp + length, prefix);
9177 length += strlen (prefix);
9178 strcpy (tmp + length, get_tree_code_name (code));
9179 length += strlen (get_tree_code_name (code));
9180 }
9181 va_end (args);
9182 }
9183 else
9184 buffer = "unexpected node";
9185
9186 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9187 buffer, get_tree_code_name (TREE_CODE (node)),
9188 function, trim_filename (file), line);
9189 }
9190
9191 /* Complain that the tree code of NODE does match the expected 0
9192 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9193 the caller. */
9194
9195 void
9196 tree_not_check_failed (const_tree node, const char *file,
9197 int line, const char *function, ...)
9198 {
9199 va_list args;
9200 char *buffer;
9201 unsigned length = 0;
9202 enum tree_code code;
9203
9204 va_start (args, function);
9205 while ((code = (enum tree_code) va_arg (args, int)))
9206 length += 4 + strlen (get_tree_code_name (code));
9207 va_end (args);
9208 va_start (args, function);
9209 buffer = (char *) alloca (length);
9210 length = 0;
9211 while ((code = (enum tree_code) va_arg (args, int)))
9212 {
9213 if (length)
9214 {
9215 strcpy (buffer + length, " or ");
9216 length += 4;
9217 }
9218 strcpy (buffer + length, get_tree_code_name (code));
9219 length += strlen (get_tree_code_name (code));
9220 }
9221 va_end (args);
9222
9223 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9224 buffer, get_tree_code_name (TREE_CODE (node)),
9225 function, trim_filename (file), line);
9226 }
9227
9228 /* Similar to tree_check_failed, except that we check for a class of tree
9229 code, given in CL. */
9230
9231 void
9232 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9233 const char *file, int line, const char *function)
9234 {
9235 internal_error
9236 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9237 TREE_CODE_CLASS_STRING (cl),
9238 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9239 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9240 }
9241
9242 /* Similar to tree_check_failed, except that instead of specifying a
9243 dozen codes, use the knowledge that they're all sequential. */
9244
9245 void
9246 tree_range_check_failed (const_tree node, const char *file, int line,
9247 const char *function, enum tree_code c1,
9248 enum tree_code c2)
9249 {
9250 char *buffer;
9251 unsigned length = 0;
9252 unsigned int c;
9253
9254 for (c = c1; c <= c2; ++c)
9255 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9256
9257 length += strlen ("expected ");
9258 buffer = (char *) alloca (length);
9259 length = 0;
9260
9261 for (c = c1; c <= c2; ++c)
9262 {
9263 const char *prefix = length ? " or " : "expected ";
9264
9265 strcpy (buffer + length, prefix);
9266 length += strlen (prefix);
9267 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9268 length += strlen (get_tree_code_name ((enum tree_code) c));
9269 }
9270
9271 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9272 buffer, get_tree_code_name (TREE_CODE (node)),
9273 function, trim_filename (file), line);
9274 }
9275
9276
9277 /* Similar to tree_check_failed, except that we check that a tree does
9278 not have the specified code, given in CL. */
9279
9280 void
9281 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9282 const char *file, int line, const char *function)
9283 {
9284 internal_error
9285 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9286 TREE_CODE_CLASS_STRING (cl),
9287 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9288 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9289 }
9290
9291
9292 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9293
9294 void
9295 omp_clause_check_failed (const_tree node, const char *file, int line,
9296 const char *function, enum omp_clause_code code)
9297 {
9298 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9299 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9300 function, trim_filename (file), line);
9301 }
9302
9303
9304 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9305
9306 void
9307 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9308 const char *function, enum omp_clause_code c1,
9309 enum omp_clause_code c2)
9310 {
9311 char *buffer;
9312 unsigned length = 0;
9313 unsigned int c;
9314
9315 for (c = c1; c <= c2; ++c)
9316 length += 4 + strlen (omp_clause_code_name[c]);
9317
9318 length += strlen ("expected ");
9319 buffer = (char *) alloca (length);
9320 length = 0;
9321
9322 for (c = c1; c <= c2; ++c)
9323 {
9324 const char *prefix = length ? " or " : "expected ";
9325
9326 strcpy (buffer + length, prefix);
9327 length += strlen (prefix);
9328 strcpy (buffer + length, omp_clause_code_name[c]);
9329 length += strlen (omp_clause_code_name[c]);
9330 }
9331
9332 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9333 buffer, omp_clause_code_name[TREE_CODE (node)],
9334 function, trim_filename (file), line);
9335 }
9336
9337
9338 #undef DEFTREESTRUCT
9339 #define DEFTREESTRUCT(VAL, NAME) NAME,
9340
9341 static const char *ts_enum_names[] = {
9342 #include "treestruct.def"
9343 };
9344 #undef DEFTREESTRUCT
9345
9346 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9347
9348 /* Similar to tree_class_check_failed, except that we check for
9349 whether CODE contains the tree structure identified by EN. */
9350
9351 void
9352 tree_contains_struct_check_failed (const_tree node,
9353 const enum tree_node_structure_enum en,
9354 const char *file, int line,
9355 const char *function)
9356 {
9357 internal_error
9358 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9359 TS_ENUM_NAME (en),
9360 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9361 }
9362
9363
9364 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9365 (dynamically sized) vector. */
9366
9367 void
9368 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9369 const char *function)
9370 {
9371 internal_error
9372 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9373 idx + 1, len, function, trim_filename (file), line);
9374 }
9375
9376 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9377 (dynamically sized) vector. */
9378
9379 void
9380 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9381 const char *function)
9382 {
9383 internal_error
9384 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9385 idx + 1, len, function, trim_filename (file), line);
9386 }
9387
9388 /* Similar to above, except that the check is for the bounds of the operand
9389 vector of an expression node EXP. */
9390
9391 void
9392 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9393 int line, const char *function)
9394 {
9395 enum tree_code code = TREE_CODE (exp);
9396 internal_error
9397 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9398 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9399 function, trim_filename (file), line);
9400 }
9401
9402 /* Similar to above, except that the check is for the number of
9403 operands of an OMP_CLAUSE node. */
9404
9405 void
9406 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9407 int line, const char *function)
9408 {
9409 internal_error
9410 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9411 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9412 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9413 trim_filename (file), line);
9414 }
9415 #endif /* ENABLE_TREE_CHECKING */
9416 \f
9417 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9418 and mapped to the machine mode MODE. Initialize its fields and build
9419 the information necessary for debugging output. */
9420
9421 static tree
9422 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9423 {
9424 tree t;
9425 inchash::hash hstate;
9426
9427 t = make_node (VECTOR_TYPE);
9428 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9429 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9430 SET_TYPE_MODE (t, mode);
9431
9432 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9433 SET_TYPE_STRUCTURAL_EQUALITY (t);
9434 else if (TYPE_CANONICAL (innertype) != innertype
9435 || mode != VOIDmode)
9436 TYPE_CANONICAL (t)
9437 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9438
9439 layout_type (t);
9440
9441 hstate.add_wide_int (VECTOR_TYPE);
9442 hstate.add_wide_int (nunits);
9443 hstate.add_wide_int (mode);
9444 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9445 t = type_hash_canon (hstate.end (), t);
9446
9447 /* We have built a main variant, based on the main variant of the
9448 inner type. Use it to build the variant we return. */
9449 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9450 && TREE_TYPE (t) != innertype)
9451 return build_type_attribute_qual_variant (t,
9452 TYPE_ATTRIBUTES (innertype),
9453 TYPE_QUALS (innertype));
9454
9455 return t;
9456 }
9457
9458 static tree
9459 make_or_reuse_type (unsigned size, int unsignedp)
9460 {
9461 int i;
9462
9463 if (size == INT_TYPE_SIZE)
9464 return unsignedp ? unsigned_type_node : integer_type_node;
9465 if (size == CHAR_TYPE_SIZE)
9466 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9467 if (size == SHORT_TYPE_SIZE)
9468 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9469 if (size == LONG_TYPE_SIZE)
9470 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9471 if (size == LONG_LONG_TYPE_SIZE)
9472 return (unsignedp ? long_long_unsigned_type_node
9473 : long_long_integer_type_node);
9474
9475 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9476 if (size == int_n_data[i].bitsize
9477 && int_n_enabled_p[i])
9478 return (unsignedp ? int_n_trees[i].unsigned_type
9479 : int_n_trees[i].signed_type);
9480
9481 if (unsignedp)
9482 return make_unsigned_type (size);
9483 else
9484 return make_signed_type (size);
9485 }
9486
9487 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9488
9489 static tree
9490 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9491 {
9492 if (satp)
9493 {
9494 if (size == SHORT_FRACT_TYPE_SIZE)
9495 return unsignedp ? sat_unsigned_short_fract_type_node
9496 : sat_short_fract_type_node;
9497 if (size == FRACT_TYPE_SIZE)
9498 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9499 if (size == LONG_FRACT_TYPE_SIZE)
9500 return unsignedp ? sat_unsigned_long_fract_type_node
9501 : sat_long_fract_type_node;
9502 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9503 return unsignedp ? sat_unsigned_long_long_fract_type_node
9504 : sat_long_long_fract_type_node;
9505 }
9506 else
9507 {
9508 if (size == SHORT_FRACT_TYPE_SIZE)
9509 return unsignedp ? unsigned_short_fract_type_node
9510 : short_fract_type_node;
9511 if (size == FRACT_TYPE_SIZE)
9512 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9513 if (size == LONG_FRACT_TYPE_SIZE)
9514 return unsignedp ? unsigned_long_fract_type_node
9515 : long_fract_type_node;
9516 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9517 return unsignedp ? unsigned_long_long_fract_type_node
9518 : long_long_fract_type_node;
9519 }
9520
9521 return make_fract_type (size, unsignedp, satp);
9522 }
9523
9524 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9525
9526 static tree
9527 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9528 {
9529 if (satp)
9530 {
9531 if (size == SHORT_ACCUM_TYPE_SIZE)
9532 return unsignedp ? sat_unsigned_short_accum_type_node
9533 : sat_short_accum_type_node;
9534 if (size == ACCUM_TYPE_SIZE)
9535 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9536 if (size == LONG_ACCUM_TYPE_SIZE)
9537 return unsignedp ? sat_unsigned_long_accum_type_node
9538 : sat_long_accum_type_node;
9539 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9540 return unsignedp ? sat_unsigned_long_long_accum_type_node
9541 : sat_long_long_accum_type_node;
9542 }
9543 else
9544 {
9545 if (size == SHORT_ACCUM_TYPE_SIZE)
9546 return unsignedp ? unsigned_short_accum_type_node
9547 : short_accum_type_node;
9548 if (size == ACCUM_TYPE_SIZE)
9549 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9550 if (size == LONG_ACCUM_TYPE_SIZE)
9551 return unsignedp ? unsigned_long_accum_type_node
9552 : long_accum_type_node;
9553 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9554 return unsignedp ? unsigned_long_long_accum_type_node
9555 : long_long_accum_type_node;
9556 }
9557
9558 return make_accum_type (size, unsignedp, satp);
9559 }
9560
9561
9562 /* Create an atomic variant node for TYPE. This routine is called
9563 during initialization of data types to create the 5 basic atomic
9564 types. The generic build_variant_type function requires these to
9565 already be set up in order to function properly, so cannot be
9566 called from there. If ALIGN is non-zero, then ensure alignment is
9567 overridden to this value. */
9568
9569 static tree
9570 build_atomic_base (tree type, unsigned int align)
9571 {
9572 tree t;
9573
9574 /* Make sure its not already registered. */
9575 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9576 return t;
9577
9578 t = build_variant_type_copy (type);
9579 set_type_quals (t, TYPE_QUAL_ATOMIC);
9580
9581 if (align)
9582 TYPE_ALIGN (t) = align;
9583
9584 return t;
9585 }
9586
9587 /* Create nodes for all integer types (and error_mark_node) using the sizes
9588 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9589 SHORT_DOUBLE specifies whether double should be of the same precision
9590 as float. */
9591
9592 void
9593 build_common_tree_nodes (bool signed_char, bool short_double)
9594 {
9595 int i;
9596
9597 error_mark_node = make_node (ERROR_MARK);
9598 TREE_TYPE (error_mark_node) = error_mark_node;
9599
9600 initialize_sizetypes ();
9601
9602 /* Define both `signed char' and `unsigned char'. */
9603 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9604 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9605 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9606 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9607
9608 /* Define `char', which is like either `signed char' or `unsigned char'
9609 but not the same as either. */
9610 char_type_node
9611 = (signed_char
9612 ? make_signed_type (CHAR_TYPE_SIZE)
9613 : make_unsigned_type (CHAR_TYPE_SIZE));
9614 TYPE_STRING_FLAG (char_type_node) = 1;
9615
9616 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9617 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9618 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9619 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9620 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9621 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9622 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9623 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9624
9625 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9626 {
9627 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9628 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9629 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9630 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9631
9632 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE)
9633 {
9634 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9635 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9636 }
9637 }
9638
9639 /* Define a boolean type. This type only represents boolean values but
9640 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9641 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9642 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9643 TYPE_PRECISION (boolean_type_node) = 1;
9644 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9645
9646 /* Define what type to use for size_t. */
9647 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9648 size_type_node = unsigned_type_node;
9649 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9650 size_type_node = long_unsigned_type_node;
9651 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9652 size_type_node = long_long_unsigned_type_node;
9653 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9654 size_type_node = short_unsigned_type_node;
9655 else
9656 {
9657 int i;
9658
9659 size_type_node = NULL_TREE;
9660 for (i = 0; i < NUM_INT_N_ENTS; i++)
9661 if (int_n_enabled_p[i])
9662 {
9663 char name[50];
9664 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9665
9666 if (strcmp (name, SIZE_TYPE) == 0)
9667 {
9668 size_type_node = int_n_trees[i].unsigned_type;
9669 }
9670 }
9671 if (size_type_node == NULL_TREE)
9672 gcc_unreachable ();
9673 }
9674
9675 /* Fill in the rest of the sized types. Reuse existing type nodes
9676 when possible. */
9677 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9678 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9679 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9680 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9681 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9682
9683 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9684 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9685 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9686 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9687 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9688
9689 /* Don't call build_qualified type for atomics. That routine does
9690 special processing for atomics, and until they are initialized
9691 it's better not to make that call.
9692
9693 Check to see if there is a target override for atomic types. */
9694
9695 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9696 targetm.atomic_align_for_mode (QImode));
9697 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9698 targetm.atomic_align_for_mode (HImode));
9699 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9700 targetm.atomic_align_for_mode (SImode));
9701 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9702 targetm.atomic_align_for_mode (DImode));
9703 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9704 targetm.atomic_align_for_mode (TImode));
9705
9706 access_public_node = get_identifier ("public");
9707 access_protected_node = get_identifier ("protected");
9708 access_private_node = get_identifier ("private");
9709
9710 /* Define these next since types below may used them. */
9711 integer_zero_node = build_int_cst (integer_type_node, 0);
9712 integer_one_node = build_int_cst (integer_type_node, 1);
9713 integer_three_node = build_int_cst (integer_type_node, 3);
9714 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9715
9716 size_zero_node = size_int (0);
9717 size_one_node = size_int (1);
9718 bitsize_zero_node = bitsize_int (0);
9719 bitsize_one_node = bitsize_int (1);
9720 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9721
9722 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9723 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9724
9725 void_type_node = make_node (VOID_TYPE);
9726 layout_type (void_type_node);
9727
9728 /* We are not going to have real types in C with less than byte alignment,
9729 so we might as well not have any types that claim to have it. */
9730 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9731 TYPE_USER_ALIGN (void_type_node) = 0;
9732
9733 void_node = make_node (VOID_CST);
9734 TREE_TYPE (void_node) = void_type_node;
9735
9736 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9737 layout_type (TREE_TYPE (null_pointer_node));
9738
9739 ptr_type_node = build_pointer_type (void_type_node);
9740 const_ptr_type_node
9741 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9742 fileptr_type_node = ptr_type_node;
9743
9744 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9745
9746 float_type_node = make_node (REAL_TYPE);
9747 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9748 layout_type (float_type_node);
9749
9750 double_type_node = make_node (REAL_TYPE);
9751 if (short_double)
9752 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9753 else
9754 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9755 layout_type (double_type_node);
9756
9757 long_double_type_node = make_node (REAL_TYPE);
9758 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9759 layout_type (long_double_type_node);
9760
9761 float_ptr_type_node = build_pointer_type (float_type_node);
9762 double_ptr_type_node = build_pointer_type (double_type_node);
9763 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9764 integer_ptr_type_node = build_pointer_type (integer_type_node);
9765
9766 /* Fixed size integer types. */
9767 uint16_type_node = make_or_reuse_type (16, 1);
9768 uint32_type_node = make_or_reuse_type (32, 1);
9769 uint64_type_node = make_or_reuse_type (64, 1);
9770
9771 /* Decimal float types. */
9772 dfloat32_type_node = make_node (REAL_TYPE);
9773 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9774 layout_type (dfloat32_type_node);
9775 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9776 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9777
9778 dfloat64_type_node = make_node (REAL_TYPE);
9779 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9780 layout_type (dfloat64_type_node);
9781 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9782 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9783
9784 dfloat128_type_node = make_node (REAL_TYPE);
9785 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9786 layout_type (dfloat128_type_node);
9787 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9788 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9789
9790 complex_integer_type_node = build_complex_type (integer_type_node);
9791 complex_float_type_node = build_complex_type (float_type_node);
9792 complex_double_type_node = build_complex_type (double_type_node);
9793 complex_long_double_type_node = build_complex_type (long_double_type_node);
9794
9795 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9796 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9797 sat_ ## KIND ## _type_node = \
9798 make_sat_signed_ ## KIND ## _type (SIZE); \
9799 sat_unsigned_ ## KIND ## _type_node = \
9800 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9801 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9802 unsigned_ ## KIND ## _type_node = \
9803 make_unsigned_ ## KIND ## _type (SIZE);
9804
9805 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9806 sat_ ## WIDTH ## KIND ## _type_node = \
9807 make_sat_signed_ ## KIND ## _type (SIZE); \
9808 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9809 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9810 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9811 unsigned_ ## WIDTH ## KIND ## _type_node = \
9812 make_unsigned_ ## KIND ## _type (SIZE);
9813
9814 /* Make fixed-point type nodes based on four different widths. */
9815 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9816 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9817 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9818 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9819 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9820
9821 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9822 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9823 NAME ## _type_node = \
9824 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9825 u ## NAME ## _type_node = \
9826 make_or_reuse_unsigned_ ## KIND ## _type \
9827 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9828 sat_ ## NAME ## _type_node = \
9829 make_or_reuse_sat_signed_ ## KIND ## _type \
9830 (GET_MODE_BITSIZE (MODE ## mode)); \
9831 sat_u ## NAME ## _type_node = \
9832 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9833 (GET_MODE_BITSIZE (U ## MODE ## mode));
9834
9835 /* Fixed-point type and mode nodes. */
9836 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9837 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9838 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9839 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9840 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9841 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9842 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9843 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9844 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9845 MAKE_FIXED_MODE_NODE (accum, da, DA)
9846 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9847
9848 {
9849 tree t = targetm.build_builtin_va_list ();
9850
9851 /* Many back-ends define record types without setting TYPE_NAME.
9852 If we copied the record type here, we'd keep the original
9853 record type without a name. This breaks name mangling. So,
9854 don't copy record types and let c_common_nodes_and_builtins()
9855 declare the type to be __builtin_va_list. */
9856 if (TREE_CODE (t) != RECORD_TYPE)
9857 t = build_variant_type_copy (t);
9858
9859 va_list_type_node = t;
9860 }
9861 }
9862
9863 /* Modify DECL for given flags.
9864 TM_PURE attribute is set only on types, so the function will modify
9865 DECL's type when ECF_TM_PURE is used. */
9866
9867 void
9868 set_call_expr_flags (tree decl, int flags)
9869 {
9870 if (flags & ECF_NOTHROW)
9871 TREE_NOTHROW (decl) = 1;
9872 if (flags & ECF_CONST)
9873 TREE_READONLY (decl) = 1;
9874 if (flags & ECF_PURE)
9875 DECL_PURE_P (decl) = 1;
9876 if (flags & ECF_LOOPING_CONST_OR_PURE)
9877 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9878 if (flags & ECF_NOVOPS)
9879 DECL_IS_NOVOPS (decl) = 1;
9880 if (flags & ECF_NORETURN)
9881 TREE_THIS_VOLATILE (decl) = 1;
9882 if (flags & ECF_MALLOC)
9883 DECL_IS_MALLOC (decl) = 1;
9884 if (flags & ECF_RETURNS_TWICE)
9885 DECL_IS_RETURNS_TWICE (decl) = 1;
9886 if (flags & ECF_LEAF)
9887 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9888 NULL, DECL_ATTRIBUTES (decl));
9889 if ((flags & ECF_TM_PURE) && flag_tm)
9890 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9891 /* Looping const or pure is implied by noreturn.
9892 There is currently no way to declare looping const or looping pure alone. */
9893 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9894 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9895 }
9896
9897
9898 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9899
9900 static void
9901 local_define_builtin (const char *name, tree type, enum built_in_function code,
9902 const char *library_name, int ecf_flags)
9903 {
9904 tree decl;
9905
9906 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9907 library_name, NULL_TREE);
9908 set_call_expr_flags (decl, ecf_flags);
9909
9910 set_builtin_decl (code, decl, true);
9911 }
9912
9913 /* Call this function after instantiating all builtins that the language
9914 front end cares about. This will build the rest of the builtins
9915 and internal functions that are relied upon by the tree optimizers and
9916 the middle-end. */
9917
9918 void
9919 build_common_builtin_nodes (void)
9920 {
9921 tree tmp, ftype;
9922 int ecf_flags;
9923
9924 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9925 {
9926 ftype = build_function_type (void_type_node, void_list_node);
9927 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9928 "__builtin_unreachable",
9929 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9930 | ECF_CONST | ECF_LEAF);
9931 }
9932
9933 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9934 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9935 {
9936 ftype = build_function_type_list (ptr_type_node,
9937 ptr_type_node, const_ptr_type_node,
9938 size_type_node, NULL_TREE);
9939
9940 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9941 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9942 "memcpy", ECF_NOTHROW | ECF_LEAF);
9943 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9944 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9945 "memmove", ECF_NOTHROW | ECF_LEAF);
9946 }
9947
9948 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9949 {
9950 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9951 const_ptr_type_node, size_type_node,
9952 NULL_TREE);
9953 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9954 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9955 }
9956
9957 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9958 {
9959 ftype = build_function_type_list (ptr_type_node,
9960 ptr_type_node, integer_type_node,
9961 size_type_node, NULL_TREE);
9962 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9963 "memset", ECF_NOTHROW | ECF_LEAF);
9964 }
9965
9966 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9967 {
9968 ftype = build_function_type_list (ptr_type_node,
9969 size_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9971 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9972 }
9973
9974 ftype = build_function_type_list (ptr_type_node, size_type_node,
9975 size_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_alloca_with_align", ftype,
9977 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9978 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9979
9980 /* If we're checking the stack, `alloca' can throw. */
9981 if (flag_stack_check)
9982 {
9983 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9984 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9985 }
9986
9987 ftype = build_function_type_list (void_type_node,
9988 ptr_type_node, ptr_type_node,
9989 ptr_type_node, NULL_TREE);
9990 local_define_builtin ("__builtin_init_trampoline", ftype,
9991 BUILT_IN_INIT_TRAMPOLINE,
9992 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9993 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9994 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9995 "__builtin_init_heap_trampoline",
9996 ECF_NOTHROW | ECF_LEAF);
9997
9998 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9999 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10000 BUILT_IN_ADJUST_TRAMPOLINE,
10001 "__builtin_adjust_trampoline",
10002 ECF_CONST | ECF_NOTHROW);
10003
10004 ftype = build_function_type_list (void_type_node,
10005 ptr_type_node, ptr_type_node, NULL_TREE);
10006 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10007 BUILT_IN_NONLOCAL_GOTO,
10008 "__builtin_nonlocal_goto",
10009 ECF_NORETURN | ECF_NOTHROW);
10010
10011 ftype = build_function_type_list (void_type_node,
10012 ptr_type_node, ptr_type_node, NULL_TREE);
10013 local_define_builtin ("__builtin_setjmp_setup", ftype,
10014 BUILT_IN_SETJMP_SETUP,
10015 "__builtin_setjmp_setup", ECF_NOTHROW);
10016
10017 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10018 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10019 BUILT_IN_SETJMP_RECEIVER,
10020 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10021
10022 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10023 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10024 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10025
10026 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_stack_restore", ftype,
10028 BUILT_IN_STACK_RESTORE,
10029 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10030
10031 /* If there's a possibility that we might use the ARM EABI, build the
10032 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10033 if (targetm.arm_eabi_unwinder)
10034 {
10035 ftype = build_function_type_list (void_type_node, NULL_TREE);
10036 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10037 BUILT_IN_CXA_END_CLEANUP,
10038 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10039 }
10040
10041 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10042 local_define_builtin ("__builtin_unwind_resume", ftype,
10043 BUILT_IN_UNWIND_RESUME,
10044 ((targetm_common.except_unwind_info (&global_options)
10045 == UI_SJLJ)
10046 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10047 ECF_NORETURN);
10048
10049 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10050 {
10051 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10052 NULL_TREE);
10053 local_define_builtin ("__builtin_return_address", ftype,
10054 BUILT_IN_RETURN_ADDRESS,
10055 "__builtin_return_address",
10056 ECF_NOTHROW);
10057 }
10058
10059 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10060 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10061 {
10062 ftype = build_function_type_list (void_type_node, ptr_type_node,
10063 ptr_type_node, NULL_TREE);
10064 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10065 local_define_builtin ("__cyg_profile_func_enter", ftype,
10066 BUILT_IN_PROFILE_FUNC_ENTER,
10067 "__cyg_profile_func_enter", 0);
10068 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10069 local_define_builtin ("__cyg_profile_func_exit", ftype,
10070 BUILT_IN_PROFILE_FUNC_EXIT,
10071 "__cyg_profile_func_exit", 0);
10072 }
10073
10074 /* The exception object and filter values from the runtime. The argument
10075 must be zero before exception lowering, i.e. from the front end. After
10076 exception lowering, it will be the region number for the exception
10077 landing pad. These functions are PURE instead of CONST to prevent
10078 them from being hoisted past the exception edge that will initialize
10079 its value in the landing pad. */
10080 ftype = build_function_type_list (ptr_type_node,
10081 integer_type_node, NULL_TREE);
10082 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10083 /* Only use TM_PURE if we we have TM language support. */
10084 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10085 ecf_flags |= ECF_TM_PURE;
10086 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10087 "__builtin_eh_pointer", ecf_flags);
10088
10089 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10090 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10091 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10092 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10093
10094 ftype = build_function_type_list (void_type_node,
10095 integer_type_node, integer_type_node,
10096 NULL_TREE);
10097 local_define_builtin ("__builtin_eh_copy_values", ftype,
10098 BUILT_IN_EH_COPY_VALUES,
10099 "__builtin_eh_copy_values", ECF_NOTHROW);
10100
10101 /* Complex multiplication and division. These are handled as builtins
10102 rather than optabs because emit_library_call_value doesn't support
10103 complex. Further, we can do slightly better with folding these
10104 beasties if the real and complex parts of the arguments are separate. */
10105 {
10106 int mode;
10107
10108 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10109 {
10110 char mode_name_buf[4], *q;
10111 const char *p;
10112 enum built_in_function mcode, dcode;
10113 tree type, inner_type;
10114 const char *prefix = "__";
10115
10116 if (targetm.libfunc_gnu_prefix)
10117 prefix = "__gnu_";
10118
10119 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10120 if (type == NULL)
10121 continue;
10122 inner_type = TREE_TYPE (type);
10123
10124 ftype = build_function_type_list (type, inner_type, inner_type,
10125 inner_type, inner_type, NULL_TREE);
10126
10127 mcode = ((enum built_in_function)
10128 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10129 dcode = ((enum built_in_function)
10130 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10131
10132 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10133 *q = TOLOWER (*p);
10134 *q = '\0';
10135
10136 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10137 NULL);
10138 local_define_builtin (built_in_names[mcode], ftype, mcode,
10139 built_in_names[mcode],
10140 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10141
10142 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10143 NULL);
10144 local_define_builtin (built_in_names[dcode], ftype, dcode,
10145 built_in_names[dcode],
10146 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10147 }
10148 }
10149
10150 init_internal_fns ();
10151 }
10152
10153 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10154 better way.
10155
10156 If we requested a pointer to a vector, build up the pointers that
10157 we stripped off while looking for the inner type. Similarly for
10158 return values from functions.
10159
10160 The argument TYPE is the top of the chain, and BOTTOM is the
10161 new type which we will point to. */
10162
10163 tree
10164 reconstruct_complex_type (tree type, tree bottom)
10165 {
10166 tree inner, outer;
10167
10168 if (TREE_CODE (type) == POINTER_TYPE)
10169 {
10170 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10171 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10172 TYPE_REF_CAN_ALIAS_ALL (type));
10173 }
10174 else if (TREE_CODE (type) == REFERENCE_TYPE)
10175 {
10176 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10177 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10178 TYPE_REF_CAN_ALIAS_ALL (type));
10179 }
10180 else if (TREE_CODE (type) == ARRAY_TYPE)
10181 {
10182 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10183 outer = build_array_type (inner, TYPE_DOMAIN (type));
10184 }
10185 else if (TREE_CODE (type) == FUNCTION_TYPE)
10186 {
10187 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10188 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10189 }
10190 else if (TREE_CODE (type) == METHOD_TYPE)
10191 {
10192 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10193 /* The build_method_type_directly() routine prepends 'this' to argument list,
10194 so we must compensate by getting rid of it. */
10195 outer
10196 = build_method_type_directly
10197 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10198 inner,
10199 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10200 }
10201 else if (TREE_CODE (type) == OFFSET_TYPE)
10202 {
10203 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10204 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10205 }
10206 else
10207 return bottom;
10208
10209 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10210 TYPE_QUALS (type));
10211 }
10212
10213 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10214 the inner type. */
10215 tree
10216 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10217 {
10218 int nunits;
10219
10220 switch (GET_MODE_CLASS (mode))
10221 {
10222 case MODE_VECTOR_INT:
10223 case MODE_VECTOR_FLOAT:
10224 case MODE_VECTOR_FRACT:
10225 case MODE_VECTOR_UFRACT:
10226 case MODE_VECTOR_ACCUM:
10227 case MODE_VECTOR_UACCUM:
10228 nunits = GET_MODE_NUNITS (mode);
10229 break;
10230
10231 case MODE_INT:
10232 /* Check that there are no leftover bits. */
10233 gcc_assert (GET_MODE_BITSIZE (mode)
10234 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10235
10236 nunits = GET_MODE_BITSIZE (mode)
10237 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10238 break;
10239
10240 default:
10241 gcc_unreachable ();
10242 }
10243
10244 return make_vector_type (innertype, nunits, mode);
10245 }
10246
10247 /* Similarly, but takes the inner type and number of units, which must be
10248 a power of two. */
10249
10250 tree
10251 build_vector_type (tree innertype, int nunits)
10252 {
10253 return make_vector_type (innertype, nunits, VOIDmode);
10254 }
10255
10256 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10257
10258 tree
10259 build_opaque_vector_type (tree innertype, int nunits)
10260 {
10261 tree t = make_vector_type (innertype, nunits, VOIDmode);
10262 tree cand;
10263 /* We always build the non-opaque variant before the opaque one,
10264 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10265 cand = TYPE_NEXT_VARIANT (t);
10266 if (cand
10267 && TYPE_VECTOR_OPAQUE (cand)
10268 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10269 return cand;
10270 /* Othewise build a variant type and make sure to queue it after
10271 the non-opaque type. */
10272 cand = build_distinct_type_copy (t);
10273 TYPE_VECTOR_OPAQUE (cand) = true;
10274 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10275 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10276 TYPE_NEXT_VARIANT (t) = cand;
10277 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10278 return cand;
10279 }
10280
10281
10282 /* Given an initializer INIT, return TRUE if INIT is zero or some
10283 aggregate of zeros. Otherwise return FALSE. */
10284 bool
10285 initializer_zerop (const_tree init)
10286 {
10287 tree elt;
10288
10289 STRIP_NOPS (init);
10290
10291 switch (TREE_CODE (init))
10292 {
10293 case INTEGER_CST:
10294 return integer_zerop (init);
10295
10296 case REAL_CST:
10297 /* ??? Note that this is not correct for C4X float formats. There,
10298 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10299 negative exponent. */
10300 return real_zerop (init)
10301 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10302
10303 case FIXED_CST:
10304 return fixed_zerop (init);
10305
10306 case COMPLEX_CST:
10307 return integer_zerop (init)
10308 || (real_zerop (init)
10309 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10310 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10311
10312 case VECTOR_CST:
10313 {
10314 unsigned i;
10315 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10316 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10317 return false;
10318 return true;
10319 }
10320
10321 case CONSTRUCTOR:
10322 {
10323 unsigned HOST_WIDE_INT idx;
10324
10325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10326 if (!initializer_zerop (elt))
10327 return false;
10328 return true;
10329 }
10330
10331 case STRING_CST:
10332 {
10333 int i;
10334
10335 /* We need to loop through all elements to handle cases like
10336 "\0" and "\0foobar". */
10337 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10338 if (TREE_STRING_POINTER (init)[i] != '\0')
10339 return false;
10340
10341 return true;
10342 }
10343
10344 default:
10345 return false;
10346 }
10347 }
10348
10349 /* Check if vector VEC consists of all the equal elements and
10350 that the number of elements corresponds to the type of VEC.
10351 The function returns first element of the vector
10352 or NULL_TREE if the vector is not uniform. */
10353 tree
10354 uniform_vector_p (const_tree vec)
10355 {
10356 tree first, t;
10357 unsigned i;
10358
10359 if (vec == NULL_TREE)
10360 return NULL_TREE;
10361
10362 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10363
10364 if (TREE_CODE (vec) == VECTOR_CST)
10365 {
10366 first = VECTOR_CST_ELT (vec, 0);
10367 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10368 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10369 return NULL_TREE;
10370
10371 return first;
10372 }
10373
10374 else if (TREE_CODE (vec) == CONSTRUCTOR)
10375 {
10376 first = error_mark_node;
10377
10378 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10379 {
10380 if (i == 0)
10381 {
10382 first = t;
10383 continue;
10384 }
10385 if (!operand_equal_p (first, t, 0))
10386 return NULL_TREE;
10387 }
10388 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10389 return NULL_TREE;
10390
10391 return first;
10392 }
10393
10394 return NULL_TREE;
10395 }
10396
10397 /* Build an empty statement at location LOC. */
10398
10399 tree
10400 build_empty_stmt (location_t loc)
10401 {
10402 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10403 SET_EXPR_LOCATION (t, loc);
10404 return t;
10405 }
10406
10407
10408 /* Build an OpenMP clause with code CODE. LOC is the location of the
10409 clause. */
10410
10411 tree
10412 build_omp_clause (location_t loc, enum omp_clause_code code)
10413 {
10414 tree t;
10415 int size, length;
10416
10417 length = omp_clause_num_ops[code];
10418 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10419
10420 record_node_allocation_statistics (OMP_CLAUSE, size);
10421
10422 t = (tree) ggc_internal_alloc (size);
10423 memset (t, 0, size);
10424 TREE_SET_CODE (t, OMP_CLAUSE);
10425 OMP_CLAUSE_SET_CODE (t, code);
10426 OMP_CLAUSE_LOCATION (t) = loc;
10427
10428 return t;
10429 }
10430
10431 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10432 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10433 Except for the CODE and operand count field, other storage for the
10434 object is initialized to zeros. */
10435
10436 tree
10437 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10438 {
10439 tree t;
10440 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10441
10442 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10443 gcc_assert (len >= 1);
10444
10445 record_node_allocation_statistics (code, length);
10446
10447 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10448
10449 TREE_SET_CODE (t, code);
10450
10451 /* Can't use TREE_OPERAND to store the length because if checking is
10452 enabled, it will try to check the length before we store it. :-P */
10453 t->exp.operands[0] = build_int_cst (sizetype, len);
10454
10455 return t;
10456 }
10457
10458 /* Helper function for build_call_* functions; build a CALL_EXPR with
10459 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10460 the argument slots. */
10461
10462 static tree
10463 build_call_1 (tree return_type, tree fn, int nargs)
10464 {
10465 tree t;
10466
10467 t = build_vl_exp (CALL_EXPR, nargs + 3);
10468 TREE_TYPE (t) = return_type;
10469 CALL_EXPR_FN (t) = fn;
10470 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10471
10472 return t;
10473 }
10474
10475 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10476 FN and a null static chain slot. NARGS is the number of call arguments
10477 which are specified as "..." arguments. */
10478
10479 tree
10480 build_call_nary (tree return_type, tree fn, int nargs, ...)
10481 {
10482 tree ret;
10483 va_list args;
10484 va_start (args, nargs);
10485 ret = build_call_valist (return_type, fn, nargs, args);
10486 va_end (args);
10487 return ret;
10488 }
10489
10490 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10491 FN and a null static chain slot. NARGS is the number of call arguments
10492 which are specified as a va_list ARGS. */
10493
10494 tree
10495 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10496 {
10497 tree t;
10498 int i;
10499
10500 t = build_call_1 (return_type, fn, nargs);
10501 for (i = 0; i < nargs; i++)
10502 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10503 process_call_operands (t);
10504 return t;
10505 }
10506
10507 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10508 FN and a null static chain slot. NARGS is the number of call arguments
10509 which are specified as a tree array ARGS. */
10510
10511 tree
10512 build_call_array_loc (location_t loc, tree return_type, tree fn,
10513 int nargs, const tree *args)
10514 {
10515 tree t;
10516 int i;
10517
10518 t = build_call_1 (return_type, fn, nargs);
10519 for (i = 0; i < nargs; i++)
10520 CALL_EXPR_ARG (t, i) = args[i];
10521 process_call_operands (t);
10522 SET_EXPR_LOCATION (t, loc);
10523 return t;
10524 }
10525
10526 /* Like build_call_array, but takes a vec. */
10527
10528 tree
10529 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10530 {
10531 tree ret, t;
10532 unsigned int ix;
10533
10534 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10535 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10536 CALL_EXPR_ARG (ret, ix) = t;
10537 process_call_operands (ret);
10538 return ret;
10539 }
10540
10541 /* Conveniently construct a function call expression. FNDECL names the
10542 function to be called and N arguments are passed in the array
10543 ARGARRAY. */
10544
10545 tree
10546 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10547 {
10548 tree fntype = TREE_TYPE (fndecl);
10549 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10550
10551 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10552 }
10553
10554 /* Conveniently construct a function call expression. FNDECL names the
10555 function to be called and the arguments are passed in the vector
10556 VEC. */
10557
10558 tree
10559 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10560 {
10561 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10562 vec_safe_address (vec));
10563 }
10564
10565
10566 /* Conveniently construct a function call expression. FNDECL names the
10567 function to be called, N is the number of arguments, and the "..."
10568 parameters are the argument expressions. */
10569
10570 tree
10571 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10572 {
10573 va_list ap;
10574 tree *argarray = XALLOCAVEC (tree, n);
10575 int i;
10576
10577 va_start (ap, n);
10578 for (i = 0; i < n; i++)
10579 argarray[i] = va_arg (ap, tree);
10580 va_end (ap);
10581 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10582 }
10583
10584 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10585 varargs macros aren't supported by all bootstrap compilers. */
10586
10587 tree
10588 build_call_expr (tree fndecl, int n, ...)
10589 {
10590 va_list ap;
10591 tree *argarray = XALLOCAVEC (tree, n);
10592 int i;
10593
10594 va_start (ap, n);
10595 for (i = 0; i < n; i++)
10596 argarray[i] = va_arg (ap, tree);
10597 va_end (ap);
10598 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10599 }
10600
10601 /* Build internal call expression. This is just like CALL_EXPR, except
10602 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10603 internal function. */
10604
10605 tree
10606 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10607 tree type, int n, ...)
10608 {
10609 va_list ap;
10610 int i;
10611
10612 tree fn = build_call_1 (type, NULL_TREE, n);
10613 va_start (ap, n);
10614 for (i = 0; i < n; i++)
10615 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10616 va_end (ap);
10617 SET_EXPR_LOCATION (fn, loc);
10618 CALL_EXPR_IFN (fn) = ifn;
10619 return fn;
10620 }
10621
10622 /* Create a new constant string literal and return a char* pointer to it.
10623 The STRING_CST value is the LEN characters at STR. */
10624 tree
10625 build_string_literal (int len, const char *str)
10626 {
10627 tree t, elem, index, type;
10628
10629 t = build_string (len, str);
10630 elem = build_type_variant (char_type_node, 1, 0);
10631 index = build_index_type (size_int (len - 1));
10632 type = build_array_type (elem, index);
10633 TREE_TYPE (t) = type;
10634 TREE_CONSTANT (t) = 1;
10635 TREE_READONLY (t) = 1;
10636 TREE_STATIC (t) = 1;
10637
10638 type = build_pointer_type (elem);
10639 t = build1 (ADDR_EXPR, type,
10640 build4 (ARRAY_REF, elem,
10641 t, integer_zero_node, NULL_TREE, NULL_TREE));
10642 return t;
10643 }
10644
10645
10646
10647 /* Return true if T (assumed to be a DECL) must be assigned a memory
10648 location. */
10649
10650 bool
10651 needs_to_live_in_memory (const_tree t)
10652 {
10653 return (TREE_ADDRESSABLE (t)
10654 || is_global_var (t)
10655 || (TREE_CODE (t) == RESULT_DECL
10656 && !DECL_BY_REFERENCE (t)
10657 && aggregate_value_p (t, current_function_decl)));
10658 }
10659
10660 /* Return value of a constant X and sign-extend it. */
10661
10662 HOST_WIDE_INT
10663 int_cst_value (const_tree x)
10664 {
10665 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10666 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10667
10668 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10669 gcc_assert (cst_and_fits_in_hwi (x));
10670
10671 if (bits < HOST_BITS_PER_WIDE_INT)
10672 {
10673 bool negative = ((val >> (bits - 1)) & 1) != 0;
10674 if (negative)
10675 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10676 else
10677 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10678 }
10679
10680 return val;
10681 }
10682
10683 /* If TYPE is an integral or pointer type, return an integer type with
10684 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10685 if TYPE is already an integer type of signedness UNSIGNEDP. */
10686
10687 tree
10688 signed_or_unsigned_type_for (int unsignedp, tree type)
10689 {
10690 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10691 return type;
10692
10693 if (TREE_CODE (type) == VECTOR_TYPE)
10694 {
10695 tree inner = TREE_TYPE (type);
10696 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10697 if (!inner2)
10698 return NULL_TREE;
10699 if (inner == inner2)
10700 return type;
10701 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10702 }
10703
10704 if (!INTEGRAL_TYPE_P (type)
10705 && !POINTER_TYPE_P (type)
10706 && TREE_CODE (type) != OFFSET_TYPE)
10707 return NULL_TREE;
10708
10709 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10710 }
10711
10712 /* If TYPE is an integral or pointer type, return an integer type with
10713 the same precision which is unsigned, or itself if TYPE is already an
10714 unsigned integer type. */
10715
10716 tree
10717 unsigned_type_for (tree type)
10718 {
10719 return signed_or_unsigned_type_for (1, type);
10720 }
10721
10722 /* If TYPE is an integral or pointer type, return an integer type with
10723 the same precision which is signed, or itself if TYPE is already a
10724 signed integer type. */
10725
10726 tree
10727 signed_type_for (tree type)
10728 {
10729 return signed_or_unsigned_type_for (0, type);
10730 }
10731
10732 /* If TYPE is a vector type, return a signed integer vector type with the
10733 same width and number of subparts. Otherwise return boolean_type_node. */
10734
10735 tree
10736 truth_type_for (tree type)
10737 {
10738 if (TREE_CODE (type) == VECTOR_TYPE)
10739 {
10740 tree elem = lang_hooks.types.type_for_size
10741 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10742 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10743 }
10744 else
10745 return boolean_type_node;
10746 }
10747
10748 /* Returns the largest value obtainable by casting something in INNER type to
10749 OUTER type. */
10750
10751 tree
10752 upper_bound_in_type (tree outer, tree inner)
10753 {
10754 unsigned int det = 0;
10755 unsigned oprec = TYPE_PRECISION (outer);
10756 unsigned iprec = TYPE_PRECISION (inner);
10757 unsigned prec;
10758
10759 /* Compute a unique number for every combination. */
10760 det |= (oprec > iprec) ? 4 : 0;
10761 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10762 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10763
10764 /* Determine the exponent to use. */
10765 switch (det)
10766 {
10767 case 0:
10768 case 1:
10769 /* oprec <= iprec, outer: signed, inner: don't care. */
10770 prec = oprec - 1;
10771 break;
10772 case 2:
10773 case 3:
10774 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10775 prec = oprec;
10776 break;
10777 case 4:
10778 /* oprec > iprec, outer: signed, inner: signed. */
10779 prec = iprec - 1;
10780 break;
10781 case 5:
10782 /* oprec > iprec, outer: signed, inner: unsigned. */
10783 prec = iprec;
10784 break;
10785 case 6:
10786 /* oprec > iprec, outer: unsigned, inner: signed. */
10787 prec = oprec;
10788 break;
10789 case 7:
10790 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10791 prec = iprec;
10792 break;
10793 default:
10794 gcc_unreachable ();
10795 }
10796
10797 return wide_int_to_tree (outer,
10798 wi::mask (prec, false, TYPE_PRECISION (outer)));
10799 }
10800
10801 /* Returns the smallest value obtainable by casting something in INNER type to
10802 OUTER type. */
10803
10804 tree
10805 lower_bound_in_type (tree outer, tree inner)
10806 {
10807 unsigned oprec = TYPE_PRECISION (outer);
10808 unsigned iprec = TYPE_PRECISION (inner);
10809
10810 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10811 and obtain 0. */
10812 if (TYPE_UNSIGNED (outer)
10813 /* If we are widening something of an unsigned type, OUTER type
10814 contains all values of INNER type. In particular, both INNER
10815 and OUTER types have zero in common. */
10816 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10817 return build_int_cst (outer, 0);
10818 else
10819 {
10820 /* If we are widening a signed type to another signed type, we
10821 want to obtain -2^^(iprec-1). If we are keeping the
10822 precision or narrowing to a signed type, we want to obtain
10823 -2^(oprec-1). */
10824 unsigned prec = oprec > iprec ? iprec : oprec;
10825 return wide_int_to_tree (outer,
10826 wi::mask (prec - 1, true,
10827 TYPE_PRECISION (outer)));
10828 }
10829 }
10830
10831 /* Return nonzero if two operands that are suitable for PHI nodes are
10832 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10833 SSA_NAME or invariant. Note that this is strictly an optimization.
10834 That is, callers of this function can directly call operand_equal_p
10835 and get the same result, only slower. */
10836
10837 int
10838 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10839 {
10840 if (arg0 == arg1)
10841 return 1;
10842 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10843 return 0;
10844 return operand_equal_p (arg0, arg1, 0);
10845 }
10846
10847 /* Returns number of zeros at the end of binary representation of X. */
10848
10849 tree
10850 num_ending_zeros (const_tree x)
10851 {
10852 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10853 }
10854
10855
10856 #define WALK_SUBTREE(NODE) \
10857 do \
10858 { \
10859 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10860 if (result) \
10861 return result; \
10862 } \
10863 while (0)
10864
10865 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10866 be walked whenever a type is seen in the tree. Rest of operands and return
10867 value are as for walk_tree. */
10868
10869 static tree
10870 walk_type_fields (tree type, walk_tree_fn func, void *data,
10871 hash_set<tree> *pset, walk_tree_lh lh)
10872 {
10873 tree result = NULL_TREE;
10874
10875 switch (TREE_CODE (type))
10876 {
10877 case POINTER_TYPE:
10878 case REFERENCE_TYPE:
10879 case VECTOR_TYPE:
10880 /* We have to worry about mutually recursive pointers. These can't
10881 be written in C. They can in Ada. It's pathological, but
10882 there's an ACATS test (c38102a) that checks it. Deal with this
10883 by checking if we're pointing to another pointer, that one
10884 points to another pointer, that one does too, and we have no htab.
10885 If so, get a hash table. We check three levels deep to avoid
10886 the cost of the hash table if we don't need one. */
10887 if (POINTER_TYPE_P (TREE_TYPE (type))
10888 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10889 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10890 && !pset)
10891 {
10892 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10893 func, data);
10894 if (result)
10895 return result;
10896
10897 break;
10898 }
10899
10900 /* ... fall through ... */
10901
10902 case COMPLEX_TYPE:
10903 WALK_SUBTREE (TREE_TYPE (type));
10904 break;
10905
10906 case METHOD_TYPE:
10907 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10908
10909 /* Fall through. */
10910
10911 case FUNCTION_TYPE:
10912 WALK_SUBTREE (TREE_TYPE (type));
10913 {
10914 tree arg;
10915
10916 /* We never want to walk into default arguments. */
10917 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10918 WALK_SUBTREE (TREE_VALUE (arg));
10919 }
10920 break;
10921
10922 case ARRAY_TYPE:
10923 /* Don't follow this nodes's type if a pointer for fear that
10924 we'll have infinite recursion. If we have a PSET, then we
10925 need not fear. */
10926 if (pset
10927 || (!POINTER_TYPE_P (TREE_TYPE (type))
10928 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10929 WALK_SUBTREE (TREE_TYPE (type));
10930 WALK_SUBTREE (TYPE_DOMAIN (type));
10931 break;
10932
10933 case OFFSET_TYPE:
10934 WALK_SUBTREE (TREE_TYPE (type));
10935 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10936 break;
10937
10938 default:
10939 break;
10940 }
10941
10942 return NULL_TREE;
10943 }
10944
10945 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10946 called with the DATA and the address of each sub-tree. If FUNC returns a
10947 non-NULL value, the traversal is stopped, and the value returned by FUNC
10948 is returned. If PSET is non-NULL it is used to record the nodes visited,
10949 and to avoid visiting a node more than once. */
10950
10951 tree
10952 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10953 hash_set<tree> *pset, walk_tree_lh lh)
10954 {
10955 enum tree_code code;
10956 int walk_subtrees;
10957 tree result;
10958
10959 #define WALK_SUBTREE_TAIL(NODE) \
10960 do \
10961 { \
10962 tp = & (NODE); \
10963 goto tail_recurse; \
10964 } \
10965 while (0)
10966
10967 tail_recurse:
10968 /* Skip empty subtrees. */
10969 if (!*tp)
10970 return NULL_TREE;
10971
10972 /* Don't walk the same tree twice, if the user has requested
10973 that we avoid doing so. */
10974 if (pset && pset->add (*tp))
10975 return NULL_TREE;
10976
10977 /* Call the function. */
10978 walk_subtrees = 1;
10979 result = (*func) (tp, &walk_subtrees, data);
10980
10981 /* If we found something, return it. */
10982 if (result)
10983 return result;
10984
10985 code = TREE_CODE (*tp);
10986
10987 /* Even if we didn't, FUNC may have decided that there was nothing
10988 interesting below this point in the tree. */
10989 if (!walk_subtrees)
10990 {
10991 /* But we still need to check our siblings. */
10992 if (code == TREE_LIST)
10993 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10994 else if (code == OMP_CLAUSE)
10995 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10996 else
10997 return NULL_TREE;
10998 }
10999
11000 if (lh)
11001 {
11002 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11003 if (result || !walk_subtrees)
11004 return result;
11005 }
11006
11007 switch (code)
11008 {
11009 case ERROR_MARK:
11010 case IDENTIFIER_NODE:
11011 case INTEGER_CST:
11012 case REAL_CST:
11013 case FIXED_CST:
11014 case VECTOR_CST:
11015 case STRING_CST:
11016 case BLOCK:
11017 case PLACEHOLDER_EXPR:
11018 case SSA_NAME:
11019 case FIELD_DECL:
11020 case RESULT_DECL:
11021 /* None of these have subtrees other than those already walked
11022 above. */
11023 break;
11024
11025 case TREE_LIST:
11026 WALK_SUBTREE (TREE_VALUE (*tp));
11027 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11028 break;
11029
11030 case TREE_VEC:
11031 {
11032 int len = TREE_VEC_LENGTH (*tp);
11033
11034 if (len == 0)
11035 break;
11036
11037 /* Walk all elements but the first. */
11038 while (--len)
11039 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11040
11041 /* Now walk the first one as a tail call. */
11042 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11043 }
11044
11045 case COMPLEX_CST:
11046 WALK_SUBTREE (TREE_REALPART (*tp));
11047 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11048
11049 case CONSTRUCTOR:
11050 {
11051 unsigned HOST_WIDE_INT idx;
11052 constructor_elt *ce;
11053
11054 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11055 idx++)
11056 WALK_SUBTREE (ce->value);
11057 }
11058 break;
11059
11060 case SAVE_EXPR:
11061 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11062
11063 case BIND_EXPR:
11064 {
11065 tree decl;
11066 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11067 {
11068 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11069 into declarations that are just mentioned, rather than
11070 declared; they don't really belong to this part of the tree.
11071 And, we can see cycles: the initializer for a declaration
11072 can refer to the declaration itself. */
11073 WALK_SUBTREE (DECL_INITIAL (decl));
11074 WALK_SUBTREE (DECL_SIZE (decl));
11075 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11076 }
11077 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11078 }
11079
11080 case STATEMENT_LIST:
11081 {
11082 tree_stmt_iterator i;
11083 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11084 WALK_SUBTREE (*tsi_stmt_ptr (i));
11085 }
11086 break;
11087
11088 case OMP_CLAUSE:
11089 switch (OMP_CLAUSE_CODE (*tp))
11090 {
11091 case OMP_CLAUSE_PRIVATE:
11092 case OMP_CLAUSE_SHARED:
11093 case OMP_CLAUSE_FIRSTPRIVATE:
11094 case OMP_CLAUSE_COPYIN:
11095 case OMP_CLAUSE_COPYPRIVATE:
11096 case OMP_CLAUSE_FINAL:
11097 case OMP_CLAUSE_IF:
11098 case OMP_CLAUSE_NUM_THREADS:
11099 case OMP_CLAUSE_SCHEDULE:
11100 case OMP_CLAUSE_UNIFORM:
11101 case OMP_CLAUSE_DEPEND:
11102 case OMP_CLAUSE_NUM_TEAMS:
11103 case OMP_CLAUSE_THREAD_LIMIT:
11104 case OMP_CLAUSE_DEVICE:
11105 case OMP_CLAUSE_DIST_SCHEDULE:
11106 case OMP_CLAUSE_SAFELEN:
11107 case OMP_CLAUSE_SIMDLEN:
11108 case OMP_CLAUSE__LOOPTEMP_:
11109 case OMP_CLAUSE__SIMDUID_:
11110 case OMP_CLAUSE__CILK_FOR_COUNT_:
11111 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11112 /* FALLTHRU */
11113
11114 case OMP_CLAUSE_NOWAIT:
11115 case OMP_CLAUSE_ORDERED:
11116 case OMP_CLAUSE_DEFAULT:
11117 case OMP_CLAUSE_UNTIED:
11118 case OMP_CLAUSE_MERGEABLE:
11119 case OMP_CLAUSE_PROC_BIND:
11120 case OMP_CLAUSE_INBRANCH:
11121 case OMP_CLAUSE_NOTINBRANCH:
11122 case OMP_CLAUSE_FOR:
11123 case OMP_CLAUSE_PARALLEL:
11124 case OMP_CLAUSE_SECTIONS:
11125 case OMP_CLAUSE_TASKGROUP:
11126 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11127
11128 case OMP_CLAUSE_LASTPRIVATE:
11129 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11130 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11131 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11132
11133 case OMP_CLAUSE_COLLAPSE:
11134 {
11135 int i;
11136 for (i = 0; i < 3; i++)
11137 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11138 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11139 }
11140
11141 case OMP_CLAUSE_LINEAR:
11142 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11143 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11144 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11145 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11146
11147 case OMP_CLAUSE_ALIGNED:
11148 case OMP_CLAUSE_FROM:
11149 case OMP_CLAUSE_TO:
11150 case OMP_CLAUSE_MAP:
11151 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11152 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11153 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11154
11155 case OMP_CLAUSE_REDUCTION:
11156 {
11157 int i;
11158 for (i = 0; i < 4; i++)
11159 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11160 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11161 }
11162
11163 default:
11164 gcc_unreachable ();
11165 }
11166 break;
11167
11168 case TARGET_EXPR:
11169 {
11170 int i, len;
11171
11172 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11173 But, we only want to walk once. */
11174 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11175 for (i = 0; i < len; ++i)
11176 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11177 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11178 }
11179
11180 case DECL_EXPR:
11181 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11182 defining. We only want to walk into these fields of a type in this
11183 case and not in the general case of a mere reference to the type.
11184
11185 The criterion is as follows: if the field can be an expression, it
11186 must be walked only here. This should be in keeping with the fields
11187 that are directly gimplified in gimplify_type_sizes in order for the
11188 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11189 variable-sized types.
11190
11191 Note that DECLs get walked as part of processing the BIND_EXPR. */
11192 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11193 {
11194 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11195 if (TREE_CODE (*type_p) == ERROR_MARK)
11196 return NULL_TREE;
11197
11198 /* Call the function for the type. See if it returns anything or
11199 doesn't want us to continue. If we are to continue, walk both
11200 the normal fields and those for the declaration case. */
11201 result = (*func) (type_p, &walk_subtrees, data);
11202 if (result || !walk_subtrees)
11203 return result;
11204
11205 /* But do not walk a pointed-to type since it may itself need to
11206 be walked in the declaration case if it isn't anonymous. */
11207 if (!POINTER_TYPE_P (*type_p))
11208 {
11209 result = walk_type_fields (*type_p, func, data, pset, lh);
11210 if (result)
11211 return result;
11212 }
11213
11214 /* If this is a record type, also walk the fields. */
11215 if (RECORD_OR_UNION_TYPE_P (*type_p))
11216 {
11217 tree field;
11218
11219 for (field = TYPE_FIELDS (*type_p); field;
11220 field = DECL_CHAIN (field))
11221 {
11222 /* We'd like to look at the type of the field, but we can
11223 easily get infinite recursion. So assume it's pointed
11224 to elsewhere in the tree. Also, ignore things that
11225 aren't fields. */
11226 if (TREE_CODE (field) != FIELD_DECL)
11227 continue;
11228
11229 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11230 WALK_SUBTREE (DECL_SIZE (field));
11231 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11232 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11233 WALK_SUBTREE (DECL_QUALIFIER (field));
11234 }
11235 }
11236
11237 /* Same for scalar types. */
11238 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11239 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11240 || TREE_CODE (*type_p) == INTEGER_TYPE
11241 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11242 || TREE_CODE (*type_p) == REAL_TYPE)
11243 {
11244 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11245 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11246 }
11247
11248 WALK_SUBTREE (TYPE_SIZE (*type_p));
11249 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11250 }
11251 /* FALLTHRU */
11252
11253 default:
11254 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11255 {
11256 int i, len;
11257
11258 /* Walk over all the sub-trees of this operand. */
11259 len = TREE_OPERAND_LENGTH (*tp);
11260
11261 /* Go through the subtrees. We need to do this in forward order so
11262 that the scope of a FOR_EXPR is handled properly. */
11263 if (len)
11264 {
11265 for (i = 0; i < len - 1; ++i)
11266 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11267 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11268 }
11269 }
11270 /* If this is a type, walk the needed fields in the type. */
11271 else if (TYPE_P (*tp))
11272 return walk_type_fields (*tp, func, data, pset, lh);
11273 break;
11274 }
11275
11276 /* We didn't find what we were looking for. */
11277 return NULL_TREE;
11278
11279 #undef WALK_SUBTREE_TAIL
11280 }
11281 #undef WALK_SUBTREE
11282
11283 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11284
11285 tree
11286 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11287 walk_tree_lh lh)
11288 {
11289 tree result;
11290
11291 hash_set<tree> pset;
11292 result = walk_tree_1 (tp, func, data, &pset, lh);
11293 return result;
11294 }
11295
11296
11297 tree
11298 tree_block (tree t)
11299 {
11300 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11301
11302 if (IS_EXPR_CODE_CLASS (c))
11303 return LOCATION_BLOCK (t->exp.locus);
11304 gcc_unreachable ();
11305 return NULL;
11306 }
11307
11308 void
11309 tree_set_block (tree t, tree b)
11310 {
11311 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11312
11313 if (IS_EXPR_CODE_CLASS (c))
11314 {
11315 if (b)
11316 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11317 else
11318 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11319 }
11320 else
11321 gcc_unreachable ();
11322 }
11323
11324 /* Create a nameless artificial label and put it in the current
11325 function context. The label has a location of LOC. Returns the
11326 newly created label. */
11327
11328 tree
11329 create_artificial_label (location_t loc)
11330 {
11331 tree lab = build_decl (loc,
11332 LABEL_DECL, NULL_TREE, void_type_node);
11333
11334 DECL_ARTIFICIAL (lab) = 1;
11335 DECL_IGNORED_P (lab) = 1;
11336 DECL_CONTEXT (lab) = current_function_decl;
11337 return lab;
11338 }
11339
11340 /* Given a tree, try to return a useful variable name that we can use
11341 to prefix a temporary that is being assigned the value of the tree.
11342 I.E. given <temp> = &A, return A. */
11343
11344 const char *
11345 get_name (tree t)
11346 {
11347 tree stripped_decl;
11348
11349 stripped_decl = t;
11350 STRIP_NOPS (stripped_decl);
11351 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11352 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11353 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11354 {
11355 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11356 if (!name)
11357 return NULL;
11358 return IDENTIFIER_POINTER (name);
11359 }
11360 else
11361 {
11362 switch (TREE_CODE (stripped_decl))
11363 {
11364 case ADDR_EXPR:
11365 return get_name (TREE_OPERAND (stripped_decl, 0));
11366 default:
11367 return NULL;
11368 }
11369 }
11370 }
11371
11372 /* Return true if TYPE has a variable argument list. */
11373
11374 bool
11375 stdarg_p (const_tree fntype)
11376 {
11377 function_args_iterator args_iter;
11378 tree n = NULL_TREE, t;
11379
11380 if (!fntype)
11381 return false;
11382
11383 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11384 {
11385 n = t;
11386 }
11387
11388 return n != NULL_TREE && n != void_type_node;
11389 }
11390
11391 /* Return true if TYPE has a prototype. */
11392
11393 bool
11394 prototype_p (tree fntype)
11395 {
11396 tree t;
11397
11398 gcc_assert (fntype != NULL_TREE);
11399
11400 t = TYPE_ARG_TYPES (fntype);
11401 return (t != NULL_TREE);
11402 }
11403
11404 /* If BLOCK is inlined from an __attribute__((__artificial__))
11405 routine, return pointer to location from where it has been
11406 called. */
11407 location_t *
11408 block_nonartificial_location (tree block)
11409 {
11410 location_t *ret = NULL;
11411
11412 while (block && TREE_CODE (block) == BLOCK
11413 && BLOCK_ABSTRACT_ORIGIN (block))
11414 {
11415 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11416
11417 while (TREE_CODE (ao) == BLOCK
11418 && BLOCK_ABSTRACT_ORIGIN (ao)
11419 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11420 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11421
11422 if (TREE_CODE (ao) == FUNCTION_DECL)
11423 {
11424 /* If AO is an artificial inline, point RET to the
11425 call site locus at which it has been inlined and continue
11426 the loop, in case AO's caller is also an artificial
11427 inline. */
11428 if (DECL_DECLARED_INLINE_P (ao)
11429 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11430 ret = &BLOCK_SOURCE_LOCATION (block);
11431 else
11432 break;
11433 }
11434 else if (TREE_CODE (ao) != BLOCK)
11435 break;
11436
11437 block = BLOCK_SUPERCONTEXT (block);
11438 }
11439 return ret;
11440 }
11441
11442
11443 /* If EXP is inlined from an __attribute__((__artificial__))
11444 function, return the location of the original call expression. */
11445
11446 location_t
11447 tree_nonartificial_location (tree exp)
11448 {
11449 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11450
11451 if (loc)
11452 return *loc;
11453 else
11454 return EXPR_LOCATION (exp);
11455 }
11456
11457
11458 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11459 nodes. */
11460
11461 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11462
11463 static hashval_t
11464 cl_option_hash_hash (const void *x)
11465 {
11466 const_tree const t = (const_tree) x;
11467 const char *p;
11468 size_t i;
11469 size_t len = 0;
11470 hashval_t hash = 0;
11471
11472 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11473 {
11474 p = (const char *)TREE_OPTIMIZATION (t);
11475 len = sizeof (struct cl_optimization);
11476 }
11477
11478 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11479 {
11480 p = (const char *)TREE_TARGET_OPTION (t);
11481 len = sizeof (struct cl_target_option);
11482 }
11483
11484 else
11485 gcc_unreachable ();
11486
11487 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11488 something else. */
11489 for (i = 0; i < len; i++)
11490 if (p[i])
11491 hash = (hash << 4) ^ ((i << 2) | p[i]);
11492
11493 return hash;
11494 }
11495
11496 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11497 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11498 same. */
11499
11500 static int
11501 cl_option_hash_eq (const void *x, const void *y)
11502 {
11503 const_tree const xt = (const_tree) x;
11504 const_tree const yt = (const_tree) y;
11505 const char *xp;
11506 const char *yp;
11507 size_t len;
11508
11509 if (TREE_CODE (xt) != TREE_CODE (yt))
11510 return 0;
11511
11512 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11513 {
11514 xp = (const char *)TREE_OPTIMIZATION (xt);
11515 yp = (const char *)TREE_OPTIMIZATION (yt);
11516 len = sizeof (struct cl_optimization);
11517 }
11518
11519 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11520 {
11521 xp = (const char *)TREE_TARGET_OPTION (xt);
11522 yp = (const char *)TREE_TARGET_OPTION (yt);
11523 len = sizeof (struct cl_target_option);
11524 }
11525
11526 else
11527 gcc_unreachable ();
11528
11529 return (memcmp (xp, yp, len) == 0);
11530 }
11531
11532 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11533
11534 tree
11535 build_optimization_node (struct gcc_options *opts)
11536 {
11537 tree t;
11538 void **slot;
11539
11540 /* Use the cache of optimization nodes. */
11541
11542 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11543 opts);
11544
11545 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11546 t = (tree) *slot;
11547 if (!t)
11548 {
11549 /* Insert this one into the hash table. */
11550 t = cl_optimization_node;
11551 *slot = t;
11552
11553 /* Make a new node for next time round. */
11554 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11555 }
11556
11557 return t;
11558 }
11559
11560 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11561
11562 tree
11563 build_target_option_node (struct gcc_options *opts)
11564 {
11565 tree t;
11566 void **slot;
11567
11568 /* Use the cache of optimization nodes. */
11569
11570 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11571 opts);
11572
11573 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11574 t = (tree) *slot;
11575 if (!t)
11576 {
11577 /* Insert this one into the hash table. */
11578 t = cl_target_option_node;
11579 *slot = t;
11580
11581 /* Make a new node for next time round. */
11582 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11583 }
11584
11585 return t;
11586 }
11587
11588 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11589 Called through htab_traverse. */
11590
11591 static int
11592 prepare_target_option_node_for_pch (void **slot, void *)
11593 {
11594 tree node = (tree) *slot;
11595 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11596 TREE_TARGET_GLOBALS (node) = NULL;
11597 return 1;
11598 }
11599
11600 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11601 so that they aren't saved during PCH writing. */
11602
11603 void
11604 prepare_target_option_nodes_for_pch (void)
11605 {
11606 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11607 NULL);
11608 }
11609
11610 /* Determine the "ultimate origin" of a block. The block may be an inlined
11611 instance of an inlined instance of a block which is local to an inline
11612 function, so we have to trace all of the way back through the origin chain
11613 to find out what sort of node actually served as the original seed for the
11614 given block. */
11615
11616 tree
11617 block_ultimate_origin (const_tree block)
11618 {
11619 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11620
11621 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11622 we're trying to output the abstract instance of this function. */
11623 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11624 return NULL_TREE;
11625
11626 if (immediate_origin == NULL_TREE)
11627 return NULL_TREE;
11628 else
11629 {
11630 tree ret_val;
11631 tree lookahead = immediate_origin;
11632
11633 do
11634 {
11635 ret_val = lookahead;
11636 lookahead = (TREE_CODE (ret_val) == BLOCK
11637 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11638 }
11639 while (lookahead != NULL && lookahead != ret_val);
11640
11641 /* The block's abstract origin chain may not be the *ultimate* origin of
11642 the block. It could lead to a DECL that has an abstract origin set.
11643 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11644 will give us if it has one). Note that DECL's abstract origins are
11645 supposed to be the most distant ancestor (or so decl_ultimate_origin
11646 claims), so we don't need to loop following the DECL origins. */
11647 if (DECL_P (ret_val))
11648 return DECL_ORIGIN (ret_val);
11649
11650 return ret_val;
11651 }
11652 }
11653
11654 /* Return true iff conversion in EXP generates no instruction. Mark
11655 it inline so that we fully inline into the stripping functions even
11656 though we have two uses of this function. */
11657
11658 static inline bool
11659 tree_nop_conversion (const_tree exp)
11660 {
11661 tree outer_type, inner_type;
11662
11663 if (!CONVERT_EXPR_P (exp)
11664 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11665 return false;
11666 if (TREE_OPERAND (exp, 0) == error_mark_node)
11667 return false;
11668
11669 outer_type = TREE_TYPE (exp);
11670 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11671
11672 if (!inner_type)
11673 return false;
11674
11675 /* Use precision rather then machine mode when we can, which gives
11676 the correct answer even for submode (bit-field) types. */
11677 if ((INTEGRAL_TYPE_P (outer_type)
11678 || POINTER_TYPE_P (outer_type)
11679 || TREE_CODE (outer_type) == OFFSET_TYPE)
11680 && (INTEGRAL_TYPE_P (inner_type)
11681 || POINTER_TYPE_P (inner_type)
11682 || TREE_CODE (inner_type) == OFFSET_TYPE))
11683 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11684
11685 /* Otherwise fall back on comparing machine modes (e.g. for
11686 aggregate types, floats). */
11687 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11688 }
11689
11690 /* Return true iff conversion in EXP generates no instruction. Don't
11691 consider conversions changing the signedness. */
11692
11693 static bool
11694 tree_sign_nop_conversion (const_tree exp)
11695 {
11696 tree outer_type, inner_type;
11697
11698 if (!tree_nop_conversion (exp))
11699 return false;
11700
11701 outer_type = TREE_TYPE (exp);
11702 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11703
11704 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11705 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11706 }
11707
11708 /* Strip conversions from EXP according to tree_nop_conversion and
11709 return the resulting expression. */
11710
11711 tree
11712 tree_strip_nop_conversions (tree exp)
11713 {
11714 while (tree_nop_conversion (exp))
11715 exp = TREE_OPERAND (exp, 0);
11716 return exp;
11717 }
11718
11719 /* Strip conversions from EXP according to tree_sign_nop_conversion
11720 and return the resulting expression. */
11721
11722 tree
11723 tree_strip_sign_nop_conversions (tree exp)
11724 {
11725 while (tree_sign_nop_conversion (exp))
11726 exp = TREE_OPERAND (exp, 0);
11727 return exp;
11728 }
11729
11730 /* Avoid any floating point extensions from EXP. */
11731 tree
11732 strip_float_extensions (tree exp)
11733 {
11734 tree sub, expt, subt;
11735
11736 /* For floating point constant look up the narrowest type that can hold
11737 it properly and handle it like (type)(narrowest_type)constant.
11738 This way we can optimize for instance a=a*2.0 where "a" is float
11739 but 2.0 is double constant. */
11740 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11741 {
11742 REAL_VALUE_TYPE orig;
11743 tree type = NULL;
11744
11745 orig = TREE_REAL_CST (exp);
11746 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11747 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11748 type = float_type_node;
11749 else if (TYPE_PRECISION (TREE_TYPE (exp))
11750 > TYPE_PRECISION (double_type_node)
11751 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11752 type = double_type_node;
11753 if (type)
11754 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11755 }
11756
11757 if (!CONVERT_EXPR_P (exp))
11758 return exp;
11759
11760 sub = TREE_OPERAND (exp, 0);
11761 subt = TREE_TYPE (sub);
11762 expt = TREE_TYPE (exp);
11763
11764 if (!FLOAT_TYPE_P (subt))
11765 return exp;
11766
11767 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11768 return exp;
11769
11770 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11771 return exp;
11772
11773 return strip_float_extensions (sub);
11774 }
11775
11776 /* Strip out all handled components that produce invariant
11777 offsets. */
11778
11779 const_tree
11780 strip_invariant_refs (const_tree op)
11781 {
11782 while (handled_component_p (op))
11783 {
11784 switch (TREE_CODE (op))
11785 {
11786 case ARRAY_REF:
11787 case ARRAY_RANGE_REF:
11788 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11789 || TREE_OPERAND (op, 2) != NULL_TREE
11790 || TREE_OPERAND (op, 3) != NULL_TREE)
11791 return NULL;
11792 break;
11793
11794 case COMPONENT_REF:
11795 if (TREE_OPERAND (op, 2) != NULL_TREE)
11796 return NULL;
11797 break;
11798
11799 default:;
11800 }
11801 op = TREE_OPERAND (op, 0);
11802 }
11803
11804 return op;
11805 }
11806
11807 static GTY(()) tree gcc_eh_personality_decl;
11808
11809 /* Return the GCC personality function decl. */
11810
11811 tree
11812 lhd_gcc_personality (void)
11813 {
11814 if (!gcc_eh_personality_decl)
11815 gcc_eh_personality_decl = build_personality_function ("gcc");
11816 return gcc_eh_personality_decl;
11817 }
11818
11819 /* TARGET is a call target of GIMPLE call statement
11820 (obtained by gimple_call_fn). Return true if it is
11821 OBJ_TYPE_REF representing an virtual call of C++ method.
11822 (As opposed to OBJ_TYPE_REF representing objc calls
11823 through a cast where middle-end devirtualization machinery
11824 can't apply.) */
11825
11826 bool
11827 virtual_method_call_p (tree target)
11828 {
11829 if (TREE_CODE (target) != OBJ_TYPE_REF)
11830 return false;
11831 target = TREE_TYPE (target);
11832 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11833 target = TREE_TYPE (target);
11834 if (TREE_CODE (target) == FUNCTION_TYPE)
11835 return false;
11836 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11837 return true;
11838 }
11839
11840 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11841
11842 tree
11843 obj_type_ref_class (tree ref)
11844 {
11845 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11846 ref = TREE_TYPE (ref);
11847 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11848 ref = TREE_TYPE (ref);
11849 /* We look for type THIS points to. ObjC also builds
11850 OBJ_TYPE_REF with non-method calls, Their first parameter
11851 ID however also corresponds to class type. */
11852 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11853 || TREE_CODE (ref) == FUNCTION_TYPE);
11854 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11855 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11856 return TREE_TYPE (ref);
11857 }
11858
11859 /* Return true if T is in anonymous namespace. */
11860
11861 bool
11862 type_in_anonymous_namespace_p (const_tree t)
11863 {
11864 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11865 bulitin types; those have CONTEXT NULL. */
11866 if (!TYPE_CONTEXT (t))
11867 return false;
11868 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11869 }
11870
11871 /* Try to find a base info of BINFO that would have its field decl at offset
11872 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11873 found, return, otherwise return NULL_TREE. */
11874
11875 tree
11876 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11877 {
11878 tree type = BINFO_TYPE (binfo);
11879
11880 while (true)
11881 {
11882 HOST_WIDE_INT pos, size;
11883 tree fld;
11884 int i;
11885
11886 if (types_same_for_odr (type, expected_type))
11887 return binfo;
11888 if (offset < 0)
11889 return NULL_TREE;
11890
11891 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11892 {
11893 if (TREE_CODE (fld) != FIELD_DECL)
11894 continue;
11895
11896 pos = int_bit_position (fld);
11897 size = tree_to_uhwi (DECL_SIZE (fld));
11898 if (pos <= offset && (pos + size) > offset)
11899 break;
11900 }
11901 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11902 return NULL_TREE;
11903
11904 if (!DECL_ARTIFICIAL (fld))
11905 {
11906 binfo = TYPE_BINFO (TREE_TYPE (fld));
11907 if (!binfo)
11908 return NULL_TREE;
11909 }
11910 /* Offset 0 indicates the primary base, whose vtable contents are
11911 represented in the binfo for the derived class. */
11912 else if (offset != 0)
11913 {
11914 tree base_binfo, binfo2 = binfo;
11915
11916 /* Find BINFO corresponding to FLD. This is bit harder
11917 by a fact that in virtual inheritance we may need to walk down
11918 the non-virtual inheritance chain. */
11919 while (true)
11920 {
11921 tree containing_binfo = NULL, found_binfo = NULL;
11922 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11923 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11924 {
11925 found_binfo = base_binfo;
11926 break;
11927 }
11928 else
11929 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11930 - tree_to_shwi (BINFO_OFFSET (binfo)))
11931 * BITS_PER_UNIT < pos
11932 /* Rule out types with no virtual methods or we can get confused
11933 here by zero sized bases. */
11934 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11935 && (!containing_binfo
11936 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11937 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11938 containing_binfo = base_binfo;
11939 if (found_binfo)
11940 {
11941 binfo = found_binfo;
11942 break;
11943 }
11944 if (!containing_binfo)
11945 return NULL_TREE;
11946 binfo2 = containing_binfo;
11947 }
11948 }
11949
11950 type = TREE_TYPE (fld);
11951 offset -= pos;
11952 }
11953 }
11954
11955 /* Returns true if X is a typedef decl. */
11956
11957 bool
11958 is_typedef_decl (tree x)
11959 {
11960 return (x && TREE_CODE (x) == TYPE_DECL
11961 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11962 }
11963
11964 /* Returns true iff TYPE is a type variant created for a typedef. */
11965
11966 bool
11967 typedef_variant_p (tree type)
11968 {
11969 return is_typedef_decl (TYPE_NAME (type));
11970 }
11971
11972 /* Warn about a use of an identifier which was marked deprecated. */
11973 void
11974 warn_deprecated_use (tree node, tree attr)
11975 {
11976 const char *msg;
11977
11978 if (node == 0 || !warn_deprecated_decl)
11979 return;
11980
11981 if (!attr)
11982 {
11983 if (DECL_P (node))
11984 attr = DECL_ATTRIBUTES (node);
11985 else if (TYPE_P (node))
11986 {
11987 tree decl = TYPE_STUB_DECL (node);
11988 if (decl)
11989 attr = lookup_attribute ("deprecated",
11990 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11991 }
11992 }
11993
11994 if (attr)
11995 attr = lookup_attribute ("deprecated", attr);
11996
11997 if (attr)
11998 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11999 else
12000 msg = NULL;
12001
12002 if (DECL_P (node))
12003 {
12004 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12005 if (msg)
12006 warning (OPT_Wdeprecated_declarations,
12007 "%qD is deprecated (declared at %r%s:%d%R): %s",
12008 node, "locus", xloc.file, xloc.line, msg);
12009 else
12010 warning (OPT_Wdeprecated_declarations,
12011 "%qD is deprecated (declared at %r%s:%d%R)",
12012 node, "locus", xloc.file, xloc.line);
12013 }
12014 else if (TYPE_P (node))
12015 {
12016 tree what = NULL_TREE;
12017 tree decl = TYPE_STUB_DECL (node);
12018
12019 if (TYPE_NAME (node))
12020 {
12021 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12022 what = TYPE_NAME (node);
12023 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12024 && DECL_NAME (TYPE_NAME (node)))
12025 what = DECL_NAME (TYPE_NAME (node));
12026 }
12027
12028 if (decl)
12029 {
12030 expanded_location xloc
12031 = expand_location (DECL_SOURCE_LOCATION (decl));
12032 if (what)
12033 {
12034 if (msg)
12035 warning (OPT_Wdeprecated_declarations,
12036 "%qE is deprecated (declared at %r%s:%d%R): %s",
12037 what, "locus", xloc.file, xloc.line, msg);
12038 else
12039 warning (OPT_Wdeprecated_declarations,
12040 "%qE is deprecated (declared at %r%s:%d%R)",
12041 what, "locus", xloc.file, xloc.line);
12042 }
12043 else
12044 {
12045 if (msg)
12046 warning (OPT_Wdeprecated_declarations,
12047 "type is deprecated (declared at %r%s:%d%R): %s",
12048 "locus", xloc.file, xloc.line, msg);
12049 else
12050 warning (OPT_Wdeprecated_declarations,
12051 "type is deprecated (declared at %r%s:%d%R)",
12052 "locus", xloc.file, xloc.line);
12053 }
12054 }
12055 else
12056 {
12057 if (what)
12058 {
12059 if (msg)
12060 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12061 what, msg);
12062 else
12063 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12064 }
12065 else
12066 {
12067 if (msg)
12068 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12069 msg);
12070 else
12071 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12072 }
12073 }
12074 }
12075 }
12076
12077 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12078 somewhere in it. */
12079
12080 bool
12081 contains_bitfld_component_ref_p (const_tree ref)
12082 {
12083 while (handled_component_p (ref))
12084 {
12085 if (TREE_CODE (ref) == COMPONENT_REF
12086 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12087 return true;
12088 ref = TREE_OPERAND (ref, 0);
12089 }
12090
12091 return false;
12092 }
12093
12094 /* Try to determine whether a TRY_CATCH expression can fall through.
12095 This is a subroutine of block_may_fallthru. */
12096
12097 static bool
12098 try_catch_may_fallthru (const_tree stmt)
12099 {
12100 tree_stmt_iterator i;
12101
12102 /* If the TRY block can fall through, the whole TRY_CATCH can
12103 fall through. */
12104 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12105 return true;
12106
12107 i = tsi_start (TREE_OPERAND (stmt, 1));
12108 switch (TREE_CODE (tsi_stmt (i)))
12109 {
12110 case CATCH_EXPR:
12111 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12112 catch expression and a body. The whole TRY_CATCH may fall
12113 through iff any of the catch bodies falls through. */
12114 for (; !tsi_end_p (i); tsi_next (&i))
12115 {
12116 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12117 return true;
12118 }
12119 return false;
12120
12121 case EH_FILTER_EXPR:
12122 /* The exception filter expression only matters if there is an
12123 exception. If the exception does not match EH_FILTER_TYPES,
12124 we will execute EH_FILTER_FAILURE, and we will fall through
12125 if that falls through. If the exception does match
12126 EH_FILTER_TYPES, the stack unwinder will continue up the
12127 stack, so we will not fall through. We don't know whether we
12128 will throw an exception which matches EH_FILTER_TYPES or not,
12129 so we just ignore EH_FILTER_TYPES and assume that we might
12130 throw an exception which doesn't match. */
12131 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12132
12133 default:
12134 /* This case represents statements to be executed when an
12135 exception occurs. Those statements are implicitly followed
12136 by a RESX statement to resume execution after the exception.
12137 So in this case the TRY_CATCH never falls through. */
12138 return false;
12139 }
12140 }
12141
12142 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12143 need not be 100% accurate; simply be conservative and return true if we
12144 don't know. This is used only to avoid stupidly generating extra code.
12145 If we're wrong, we'll just delete the extra code later. */
12146
12147 bool
12148 block_may_fallthru (const_tree block)
12149 {
12150 /* This CONST_CAST is okay because expr_last returns its argument
12151 unmodified and we assign it to a const_tree. */
12152 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12153
12154 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12155 {
12156 case GOTO_EXPR:
12157 case RETURN_EXPR:
12158 /* Easy cases. If the last statement of the block implies
12159 control transfer, then we can't fall through. */
12160 return false;
12161
12162 case SWITCH_EXPR:
12163 /* If SWITCH_LABELS is set, this is lowered, and represents a
12164 branch to a selected label and hence can not fall through.
12165 Otherwise SWITCH_BODY is set, and the switch can fall
12166 through. */
12167 return SWITCH_LABELS (stmt) == NULL_TREE;
12168
12169 case COND_EXPR:
12170 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12171 return true;
12172 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12173
12174 case BIND_EXPR:
12175 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12176
12177 case TRY_CATCH_EXPR:
12178 return try_catch_may_fallthru (stmt);
12179
12180 case TRY_FINALLY_EXPR:
12181 /* The finally clause is always executed after the try clause,
12182 so if it does not fall through, then the try-finally will not
12183 fall through. Otherwise, if the try clause does not fall
12184 through, then when the finally clause falls through it will
12185 resume execution wherever the try clause was going. So the
12186 whole try-finally will only fall through if both the try
12187 clause and the finally clause fall through. */
12188 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12189 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12190
12191 case MODIFY_EXPR:
12192 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12193 stmt = TREE_OPERAND (stmt, 1);
12194 else
12195 return true;
12196 /* FALLTHRU */
12197
12198 case CALL_EXPR:
12199 /* Functions that do not return do not fall through. */
12200 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12201
12202 case CLEANUP_POINT_EXPR:
12203 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12204
12205 case TARGET_EXPR:
12206 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12207
12208 case ERROR_MARK:
12209 return true;
12210
12211 default:
12212 return lang_hooks.block_may_fallthru (stmt);
12213 }
12214 }
12215
12216 /* True if we are using EH to handle cleanups. */
12217 static bool using_eh_for_cleanups_flag = false;
12218
12219 /* This routine is called from front ends to indicate eh should be used for
12220 cleanups. */
12221 void
12222 using_eh_for_cleanups (void)
12223 {
12224 using_eh_for_cleanups_flag = true;
12225 }
12226
12227 /* Query whether EH is used for cleanups. */
12228 bool
12229 using_eh_for_cleanups_p (void)
12230 {
12231 return using_eh_for_cleanups_flag;
12232 }
12233
12234 /* Wrapper for tree_code_name to ensure that tree code is valid */
12235 const char *
12236 get_tree_code_name (enum tree_code code)
12237 {
12238 const char *invalid = "<invalid tree code>";
12239
12240 if (code >= MAX_TREE_CODES)
12241 return invalid;
12242
12243 return tree_code_name[code];
12244 }
12245
12246 /* Drops the TREE_OVERFLOW flag from T. */
12247
12248 tree
12249 drop_tree_overflow (tree t)
12250 {
12251 gcc_checking_assert (TREE_OVERFLOW (t));
12252
12253 /* For tree codes with a sharing machinery re-build the result. */
12254 if (TREE_CODE (t) == INTEGER_CST)
12255 return wide_int_to_tree (TREE_TYPE (t), t);
12256
12257 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12258 and drop the flag. */
12259 t = copy_node (t);
12260 TREE_OVERFLOW (t) = 0;
12261 return t;
12262 }
12263
12264 /* Given a memory reference expression T, return its base address.
12265 The base address of a memory reference expression is the main
12266 object being referenced. For instance, the base address for
12267 'array[i].fld[j]' is 'array'. You can think of this as stripping
12268 away the offset part from a memory address.
12269
12270 This function calls handled_component_p to strip away all the inner
12271 parts of the memory reference until it reaches the base object. */
12272
12273 tree
12274 get_base_address (tree t)
12275 {
12276 while (handled_component_p (t))
12277 t = TREE_OPERAND (t, 0);
12278
12279 if ((TREE_CODE (t) == MEM_REF
12280 || TREE_CODE (t) == TARGET_MEM_REF)
12281 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12282 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12283
12284 /* ??? Either the alias oracle or all callers need to properly deal
12285 with WITH_SIZE_EXPRs before we can look through those. */
12286 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12287 return NULL_TREE;
12288
12289 return t;
12290 }
12291
12292 #include "gt-tree.h"