cgraph.h: Flatten.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "hashtab.h"
42 #include "hash-set.h"
43 #include "vec.h"
44 #include "machmode.h"
45 #include "hard-reg-set.h"
46 #include "input.h"
47 #include "function.h"
48 #include "obstack.h"
49 #include "toplev.h" /* get_random_seed */
50 #include "inchash.h"
51 #include "filenames.h"
52 #include "output.h"
53 #include "target.h"
54 #include "common/common-target.h"
55 #include "langhooks.h"
56 #include "tree-inline.h"
57 #include "tree-iterator.h"
58 #include "predict.h"
59 #include "dominance.h"
60 #include "cfg.h"
61 #include "basic-block.h"
62 #include "bitmap.h"
63 #include "tree-ssa-alias.h"
64 #include "internal-fn.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimple-iterator.h"
69 #include "gimplify.h"
70 #include "gimple-ssa.h"
71 #include "hash-map.h"
72 #include "plugin-api.h"
73 #include "ipa-ref.h"
74 #include "cgraph.h"
75 #include "tree-phinodes.h"
76 #include "stringpool.h"
77 #include "tree-ssanames.h"
78 #include "expr.h"
79 #include "tree-dfa.h"
80 #include "params.h"
81 #include "tree-pass.h"
82 #include "langhooks-def.h"
83 #include "diagnostic.h"
84 #include "tree-diagnostic.h"
85 #include "tree-pretty-print.h"
86 #include "except.h"
87 #include "debug.h"
88 #include "intl.h"
89 #include "wide-int.h"
90 #include "builtins.h"
91
92 /* Tree code classes. */
93
94 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
95 #define END_OF_BASE_TREE_CODES tcc_exceptional,
96
97 const enum tree_code_class tree_code_type[] = {
98 #include "all-tree.def"
99 };
100
101 #undef DEFTREECODE
102 #undef END_OF_BASE_TREE_CODES
103
104 /* Table indexed by tree code giving number of expression
105 operands beyond the fixed part of the node structure.
106 Not used for types or decls. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
109 #define END_OF_BASE_TREE_CODES 0,
110
111 const unsigned char tree_code_length[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Names of tree components.
119 Used for printing out the tree and error messages. */
120 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
121 #define END_OF_BASE_TREE_CODES "@dummy",
122
123 static const char *const tree_code_name[] = {
124 #include "all-tree.def"
125 };
126
127 #undef DEFTREECODE
128 #undef END_OF_BASE_TREE_CODES
129
130 /* Each tree code class has an associated string representation.
131 These must correspond to the tree_code_class entries. */
132
133 const char *const tree_code_class_strings[] =
134 {
135 "exceptional",
136 "constant",
137 "type",
138 "declaration",
139 "reference",
140 "comparison",
141 "unary",
142 "binary",
143 "statement",
144 "vl_exp",
145 "expression"
146 };
147
148 /* obstack.[ch] explicitly declined to prototype this. */
149 extern int _obstack_allocated_p (struct obstack *h, void *obj);
150
151 /* Statistics-gathering stuff. */
152
153 static int tree_code_counts[MAX_TREE_CODES];
154 int tree_node_counts[(int) all_kinds];
155 int tree_node_sizes[(int) all_kinds];
156
157 /* Keep in sync with tree.h:enum tree_node_kind. */
158 static const char * const tree_node_kind_names[] = {
159 "decls",
160 "types",
161 "blocks",
162 "stmts",
163 "refs",
164 "exprs",
165 "constants",
166 "identifiers",
167 "vecs",
168 "binfos",
169 "ssa names",
170 "constructors",
171 "random kinds",
172 "lang_decl kinds",
173 "lang_type kinds",
174 "omp clauses",
175 };
176
177 /* Unique id for next decl created. */
178 static GTY(()) int next_decl_uid;
179 /* Unique id for next type created. */
180 static GTY(()) int next_type_uid = 1;
181 /* Unique id for next debug decl created. Use negative numbers,
182 to catch erroneous uses. */
183 static GTY(()) int next_debug_decl_uid;
184
185 /* Since we cannot rehash a type after it is in the table, we have to
186 keep the hash code. */
187
188 struct GTY(()) type_hash {
189 unsigned long hash;
190 tree type;
191 };
192
193 /* Initial size of the hash table (rounded to next prime). */
194 #define TYPE_HASH_INITIAL_SIZE 1000
195
196 /* Now here is the hash table. When recording a type, it is added to
197 the slot whose index is the hash code. Note that the hash table is
198 used for several kinds of types (function types, array types and
199 array index range types, for now). While all these live in the
200 same table, they are completely independent, and the hash code is
201 computed differently for each of these. */
202
203 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
204 htab_t type_hash_table;
205
206 /* Hash table and temporary node for larger integer const values. */
207 static GTY (()) tree int_cst_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t int_cst_hash_table;
210
211 /* Hash table for optimization flags and target option flags. Use the same
212 hash table for both sets of options. Nodes for building the current
213 optimization and target option nodes. The assumption is most of the time
214 the options created will already be in the hash table, so we avoid
215 allocating and freeing up a node repeatably. */
216 static GTY (()) tree cl_optimization_node;
217 static GTY (()) tree cl_target_option_node;
218 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
219 htab_t cl_option_hash_table;
220
221 /* General tree->tree mapping structure for use in hash tables. */
222
223
224 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
225 htab_t debug_expr_for_decl;
226
227 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
228 htab_t value_expr_for_decl;
229
230 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
231 htab_t debug_args_for_decl;
232
233 static void set_type_quals (tree, int);
234 static int type_hash_eq (const void *, const void *);
235 static hashval_t type_hash_hash (const void *);
236 static hashval_t int_cst_hash_hash (const void *);
237 static int int_cst_hash_eq (const void *, const void *);
238 static hashval_t cl_option_hash_hash (const void *);
239 static int cl_option_hash_eq (const void *, const void *);
240 static void print_type_hash_statistics (void);
241 static void print_debug_expr_statistics (void);
242 static void print_value_expr_statistics (void);
243 static int type_hash_marked_p (const void *);
244 static void type_hash_list (const_tree, inchash::hash &);
245 static void attribute_hash_list (const_tree, inchash::hash &);
246
247 tree global_trees[TI_MAX];
248 tree integer_types[itk_none];
249
250 bool int_n_enabled_p[NUM_INT_N_ENTS];
251 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
252
253 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
254
255 /* Number of operands for each OpenMP clause. */
256 unsigned const char omp_clause_num_ops[] =
257 {
258 0, /* OMP_CLAUSE_ERROR */
259 1, /* OMP_CLAUSE_PRIVATE */
260 1, /* OMP_CLAUSE_SHARED */
261 1, /* OMP_CLAUSE_FIRSTPRIVATE */
262 2, /* OMP_CLAUSE_LASTPRIVATE */
263 4, /* OMP_CLAUSE_REDUCTION */
264 1, /* OMP_CLAUSE_COPYIN */
265 1, /* OMP_CLAUSE_COPYPRIVATE */
266 3, /* OMP_CLAUSE_LINEAR */
267 2, /* OMP_CLAUSE_ALIGNED */
268 1, /* OMP_CLAUSE_DEPEND */
269 1, /* OMP_CLAUSE_UNIFORM */
270 2, /* OMP_CLAUSE_FROM */
271 2, /* OMP_CLAUSE_TO */
272 2, /* OMP_CLAUSE_MAP */
273 1, /* OMP_CLAUSE__LOOPTEMP_ */
274 1, /* OMP_CLAUSE_IF */
275 1, /* OMP_CLAUSE_NUM_THREADS */
276 1, /* OMP_CLAUSE_SCHEDULE */
277 0, /* OMP_CLAUSE_NOWAIT */
278 0, /* OMP_CLAUSE_ORDERED */
279 0, /* OMP_CLAUSE_DEFAULT */
280 3, /* OMP_CLAUSE_COLLAPSE */
281 0, /* OMP_CLAUSE_UNTIED */
282 1, /* OMP_CLAUSE_FINAL */
283 0, /* OMP_CLAUSE_MERGEABLE */
284 1, /* OMP_CLAUSE_DEVICE */
285 1, /* OMP_CLAUSE_DIST_SCHEDULE */
286 0, /* OMP_CLAUSE_INBRANCH */
287 0, /* OMP_CLAUSE_NOTINBRANCH */
288 1, /* OMP_CLAUSE_NUM_TEAMS */
289 1, /* OMP_CLAUSE_THREAD_LIMIT */
290 0, /* OMP_CLAUSE_PROC_BIND */
291 1, /* OMP_CLAUSE_SAFELEN */
292 1, /* OMP_CLAUSE_SIMDLEN */
293 0, /* OMP_CLAUSE_FOR */
294 0, /* OMP_CLAUSE_PARALLEL */
295 0, /* OMP_CLAUSE_SECTIONS */
296 0, /* OMP_CLAUSE_TASKGROUP */
297 1, /* OMP_CLAUSE__SIMDUID_ */
298 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
299 };
300
301 const char * const omp_clause_code_name[] =
302 {
303 "error_clause",
304 "private",
305 "shared",
306 "firstprivate",
307 "lastprivate",
308 "reduction",
309 "copyin",
310 "copyprivate",
311 "linear",
312 "aligned",
313 "depend",
314 "uniform",
315 "from",
316 "to",
317 "map",
318 "_looptemp_",
319 "if",
320 "num_threads",
321 "schedule",
322 "nowait",
323 "ordered",
324 "default",
325 "collapse",
326 "untied",
327 "final",
328 "mergeable",
329 "device",
330 "dist_schedule",
331 "inbranch",
332 "notinbranch",
333 "num_teams",
334 "thread_limit",
335 "proc_bind",
336 "safelen",
337 "simdlen",
338 "for",
339 "parallel",
340 "sections",
341 "taskgroup",
342 "_simduid_",
343 "_Cilk_for_count_"
344 };
345
346
347 /* Return the tree node structure used by tree code CODE. */
348
349 static inline enum tree_node_structure_enum
350 tree_node_structure_for_code (enum tree_code code)
351 {
352 switch (TREE_CODE_CLASS (code))
353 {
354 case tcc_declaration:
355 {
356 switch (code)
357 {
358 case FIELD_DECL:
359 return TS_FIELD_DECL;
360 case PARM_DECL:
361 return TS_PARM_DECL;
362 case VAR_DECL:
363 return TS_VAR_DECL;
364 case LABEL_DECL:
365 return TS_LABEL_DECL;
366 case RESULT_DECL:
367 return TS_RESULT_DECL;
368 case DEBUG_EXPR_DECL:
369 return TS_DECL_WRTL;
370 case CONST_DECL:
371 return TS_CONST_DECL;
372 case TYPE_DECL:
373 return TS_TYPE_DECL;
374 case FUNCTION_DECL:
375 return TS_FUNCTION_DECL;
376 case TRANSLATION_UNIT_DECL:
377 return TS_TRANSLATION_UNIT_DECL;
378 default:
379 return TS_DECL_NON_COMMON;
380 }
381 }
382 case tcc_type:
383 return TS_TYPE_NON_COMMON;
384 case tcc_reference:
385 case tcc_comparison:
386 case tcc_unary:
387 case tcc_binary:
388 case tcc_expression:
389 case tcc_statement:
390 case tcc_vl_exp:
391 return TS_EXP;
392 default: /* tcc_constant and tcc_exceptional */
393 break;
394 }
395 switch (code)
396 {
397 /* tcc_constant cases. */
398 case VOID_CST: return TS_TYPED;
399 case INTEGER_CST: return TS_INT_CST;
400 case REAL_CST: return TS_REAL_CST;
401 case FIXED_CST: return TS_FIXED_CST;
402 case COMPLEX_CST: return TS_COMPLEX;
403 case VECTOR_CST: return TS_VECTOR;
404 case STRING_CST: return TS_STRING;
405 /* tcc_exceptional cases. */
406 case ERROR_MARK: return TS_COMMON;
407 case IDENTIFIER_NODE: return TS_IDENTIFIER;
408 case TREE_LIST: return TS_LIST;
409 case TREE_VEC: return TS_VEC;
410 case SSA_NAME: return TS_SSA_NAME;
411 case PLACEHOLDER_EXPR: return TS_COMMON;
412 case STATEMENT_LIST: return TS_STATEMENT_LIST;
413 case BLOCK: return TS_BLOCK;
414 case CONSTRUCTOR: return TS_CONSTRUCTOR;
415 case TREE_BINFO: return TS_BINFO;
416 case OMP_CLAUSE: return TS_OMP_CLAUSE;
417 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
418 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
419
420 default:
421 gcc_unreachable ();
422 }
423 }
424
425
426 /* Initialize tree_contains_struct to describe the hierarchy of tree
427 nodes. */
428
429 static void
430 initialize_tree_contains_struct (void)
431 {
432 unsigned i;
433
434 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
435 {
436 enum tree_code code;
437 enum tree_node_structure_enum ts_code;
438
439 code = (enum tree_code) i;
440 ts_code = tree_node_structure_for_code (code);
441
442 /* Mark the TS structure itself. */
443 tree_contains_struct[code][ts_code] = 1;
444
445 /* Mark all the structures that TS is derived from. */
446 switch (ts_code)
447 {
448 case TS_TYPED:
449 case TS_BLOCK:
450 MARK_TS_BASE (code);
451 break;
452
453 case TS_COMMON:
454 case TS_INT_CST:
455 case TS_REAL_CST:
456 case TS_FIXED_CST:
457 case TS_VECTOR:
458 case TS_STRING:
459 case TS_COMPLEX:
460 case TS_SSA_NAME:
461 case TS_CONSTRUCTOR:
462 case TS_EXP:
463 case TS_STATEMENT_LIST:
464 MARK_TS_TYPED (code);
465 break;
466
467 case TS_IDENTIFIER:
468 case TS_DECL_MINIMAL:
469 case TS_TYPE_COMMON:
470 case TS_LIST:
471 case TS_VEC:
472 case TS_BINFO:
473 case TS_OMP_CLAUSE:
474 case TS_OPTIMIZATION:
475 case TS_TARGET_OPTION:
476 MARK_TS_COMMON (code);
477 break;
478
479 case TS_TYPE_WITH_LANG_SPECIFIC:
480 MARK_TS_TYPE_COMMON (code);
481 break;
482
483 case TS_TYPE_NON_COMMON:
484 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
485 break;
486
487 case TS_DECL_COMMON:
488 MARK_TS_DECL_MINIMAL (code);
489 break;
490
491 case TS_DECL_WRTL:
492 case TS_CONST_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_DECL_NON_COMMON:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_DECL_WITH_VIS:
501 case TS_PARM_DECL:
502 case TS_LABEL_DECL:
503 case TS_RESULT_DECL:
504 MARK_TS_DECL_WRTL (code);
505 break;
506
507 case TS_FIELD_DECL:
508 MARK_TS_DECL_COMMON (code);
509 break;
510
511 case TS_VAR_DECL:
512 MARK_TS_DECL_WITH_VIS (code);
513 break;
514
515 case TS_TYPE_DECL:
516 case TS_FUNCTION_DECL:
517 MARK_TS_DECL_NON_COMMON (code);
518 break;
519
520 case TS_TRANSLATION_UNIT_DECL:
521 MARK_TS_DECL_COMMON (code);
522 break;
523
524 default:
525 gcc_unreachable ();
526 }
527 }
528
529 /* Basic consistency checks for attributes used in fold. */
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
531 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
542 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
543 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
544 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
546 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
547 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
548 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
549 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
556 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
557 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
558 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
559 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
560 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
561 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
562 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
563 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
564 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
565 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
566 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
567 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
568 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
569 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
570 }
571
572
573 /* Init tree.c. */
574
575 void
576 init_ttree (void)
577 {
578 /* Initialize the hash table of types. */
579 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
580 type_hash_eq, 0);
581
582 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
583 tree_decl_map_eq, 0);
584
585 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
586 tree_decl_map_eq, 0);
587
588 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
589 int_cst_hash_eq, NULL);
590
591 int_cst_node = make_int_cst (1, 1);
592
593 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
594 cl_option_hash_eq, NULL);
595
596 cl_optimization_node = make_node (OPTIMIZATION_NODE);
597 cl_target_option_node = make_node (TARGET_OPTION_NODE);
598
599 /* Initialize the tree_contains_struct array. */
600 initialize_tree_contains_struct ();
601 lang_hooks.init_ts ();
602 }
603
604 \f
605 /* The name of the object as the assembler will see it (but before any
606 translations made by ASM_OUTPUT_LABELREF). Often this is the same
607 as DECL_NAME. It is an IDENTIFIER_NODE. */
608 tree
609 decl_assembler_name (tree decl)
610 {
611 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
612 lang_hooks.set_decl_assembler_name (decl);
613 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
614 }
615
616 /* When the target supports COMDAT groups, this indicates which group the
617 DECL is associated with. This can be either an IDENTIFIER_NODE or a
618 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
619 tree
620 decl_comdat_group (const_tree node)
621 {
622 struct symtab_node *snode = symtab_node::get (node);
623 if (!snode)
624 return NULL;
625 return snode->get_comdat_group ();
626 }
627
628 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
629 tree
630 decl_comdat_group_id (const_tree node)
631 {
632 struct symtab_node *snode = symtab_node::get (node);
633 if (!snode)
634 return NULL;
635 return snode->get_comdat_group_id ();
636 }
637
638 /* When the target supports named section, return its name as IDENTIFIER_NODE
639 or NULL if it is in no section. */
640 const char *
641 decl_section_name (const_tree node)
642 {
643 struct symtab_node *snode = symtab_node::get (node);
644 if (!snode)
645 return NULL;
646 return snode->get_section ();
647 }
648
649 /* Set section section name of NODE to VALUE (that is expected to
650 be identifier node) */
651 void
652 set_decl_section_name (tree node, const char *value)
653 {
654 struct symtab_node *snode;
655
656 if (value == NULL)
657 {
658 snode = symtab_node::get (node);
659 if (!snode)
660 return;
661 }
662 else if (TREE_CODE (node) == VAR_DECL)
663 snode = varpool_node::get_create (node);
664 else
665 snode = cgraph_node::get_create (node);
666 snode->set_section (value);
667 }
668
669 /* Return TLS model of a variable NODE. */
670 enum tls_model
671 decl_tls_model (const_tree node)
672 {
673 struct varpool_node *snode = varpool_node::get (node);
674 if (!snode)
675 return TLS_MODEL_NONE;
676 return snode->tls_model;
677 }
678
679 /* Set TLS model of variable NODE to MODEL. */
680 void
681 set_decl_tls_model (tree node, enum tls_model model)
682 {
683 struct varpool_node *vnode;
684
685 if (model == TLS_MODEL_NONE)
686 {
687 vnode = varpool_node::get (node);
688 if (!vnode)
689 return;
690 }
691 else
692 vnode = varpool_node::get_create (node);
693 vnode->tls_model = model;
694 }
695
696 /* Compute the number of bytes occupied by a tree with code CODE.
697 This function cannot be used for nodes that have variable sizes,
698 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
699 size_t
700 tree_code_size (enum tree_code code)
701 {
702 switch (TREE_CODE_CLASS (code))
703 {
704 case tcc_declaration: /* A decl node */
705 {
706 switch (code)
707 {
708 case FIELD_DECL:
709 return sizeof (struct tree_field_decl);
710 case PARM_DECL:
711 return sizeof (struct tree_parm_decl);
712 case VAR_DECL:
713 return sizeof (struct tree_var_decl);
714 case LABEL_DECL:
715 return sizeof (struct tree_label_decl);
716 case RESULT_DECL:
717 return sizeof (struct tree_result_decl);
718 case CONST_DECL:
719 return sizeof (struct tree_const_decl);
720 case TYPE_DECL:
721 return sizeof (struct tree_type_decl);
722 case FUNCTION_DECL:
723 return sizeof (struct tree_function_decl);
724 case DEBUG_EXPR_DECL:
725 return sizeof (struct tree_decl_with_rtl);
726 case TRANSLATION_UNIT_DECL:
727 return sizeof (struct tree_translation_unit_decl);
728 case NAMESPACE_DECL:
729 case IMPORTED_DECL:
730 case NAMELIST_DECL:
731 return sizeof (struct tree_decl_non_common);
732 default:
733 return lang_hooks.tree_size (code);
734 }
735 }
736
737 case tcc_type: /* a type node */
738 return sizeof (struct tree_type_non_common);
739
740 case tcc_reference: /* a reference */
741 case tcc_expression: /* an expression */
742 case tcc_statement: /* an expression with side effects */
743 case tcc_comparison: /* a comparison expression */
744 case tcc_unary: /* a unary arithmetic expression */
745 case tcc_binary: /* a binary arithmetic expression */
746 return (sizeof (struct tree_exp)
747 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
748
749 case tcc_constant: /* a constant */
750 switch (code)
751 {
752 case VOID_CST: return sizeof (struct tree_typed);
753 case INTEGER_CST: gcc_unreachable ();
754 case REAL_CST: return sizeof (struct tree_real_cst);
755 case FIXED_CST: return sizeof (struct tree_fixed_cst);
756 case COMPLEX_CST: return sizeof (struct tree_complex);
757 case VECTOR_CST: return sizeof (struct tree_vector);
758 case STRING_CST: gcc_unreachable ();
759 default:
760 return lang_hooks.tree_size (code);
761 }
762
763 case tcc_exceptional: /* something random, like an identifier. */
764 switch (code)
765 {
766 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
767 case TREE_LIST: return sizeof (struct tree_list);
768
769 case ERROR_MARK:
770 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
771
772 case TREE_VEC:
773 case OMP_CLAUSE: gcc_unreachable ();
774
775 case SSA_NAME: return sizeof (struct tree_ssa_name);
776
777 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
778 case BLOCK: return sizeof (struct tree_block);
779 case CONSTRUCTOR: return sizeof (struct tree_constructor);
780 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
781 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
782
783 default:
784 return lang_hooks.tree_size (code);
785 }
786
787 default:
788 gcc_unreachable ();
789 }
790 }
791
792 /* Compute the number of bytes occupied by NODE. This routine only
793 looks at TREE_CODE, except for those nodes that have variable sizes. */
794 size_t
795 tree_size (const_tree node)
796 {
797 const enum tree_code code = TREE_CODE (node);
798 switch (code)
799 {
800 case INTEGER_CST:
801 return (sizeof (struct tree_int_cst)
802 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
803
804 case TREE_BINFO:
805 return (offsetof (struct tree_binfo, base_binfos)
806 + vec<tree, va_gc>
807 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
808
809 case TREE_VEC:
810 return (sizeof (struct tree_vec)
811 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
812
813 case VECTOR_CST:
814 return (sizeof (struct tree_vector)
815 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
816
817 case STRING_CST:
818 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
819
820 case OMP_CLAUSE:
821 return (sizeof (struct tree_omp_clause)
822 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
823 * sizeof (tree));
824
825 default:
826 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
827 return (sizeof (struct tree_exp)
828 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
829 else
830 return tree_code_size (code);
831 }
832 }
833
834 /* Record interesting allocation statistics for a tree node with CODE
835 and LENGTH. */
836
837 static void
838 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
839 size_t length ATTRIBUTE_UNUSED)
840 {
841 enum tree_code_class type = TREE_CODE_CLASS (code);
842 tree_node_kind kind;
843
844 if (!GATHER_STATISTICS)
845 return;
846
847 switch (type)
848 {
849 case tcc_declaration: /* A decl node */
850 kind = d_kind;
851 break;
852
853 case tcc_type: /* a type node */
854 kind = t_kind;
855 break;
856
857 case tcc_statement: /* an expression with side effects */
858 kind = s_kind;
859 break;
860
861 case tcc_reference: /* a reference */
862 kind = r_kind;
863 break;
864
865 case tcc_expression: /* an expression */
866 case tcc_comparison: /* a comparison expression */
867 case tcc_unary: /* a unary arithmetic expression */
868 case tcc_binary: /* a binary arithmetic expression */
869 kind = e_kind;
870 break;
871
872 case tcc_constant: /* a constant */
873 kind = c_kind;
874 break;
875
876 case tcc_exceptional: /* something random, like an identifier. */
877 switch (code)
878 {
879 case IDENTIFIER_NODE:
880 kind = id_kind;
881 break;
882
883 case TREE_VEC:
884 kind = vec_kind;
885 break;
886
887 case TREE_BINFO:
888 kind = binfo_kind;
889 break;
890
891 case SSA_NAME:
892 kind = ssa_name_kind;
893 break;
894
895 case BLOCK:
896 kind = b_kind;
897 break;
898
899 case CONSTRUCTOR:
900 kind = constr_kind;
901 break;
902
903 case OMP_CLAUSE:
904 kind = omp_clause_kind;
905 break;
906
907 default:
908 kind = x_kind;
909 break;
910 }
911 break;
912
913 case tcc_vl_exp:
914 kind = e_kind;
915 break;
916
917 default:
918 gcc_unreachable ();
919 }
920
921 tree_code_counts[(int) code]++;
922 tree_node_counts[(int) kind]++;
923 tree_node_sizes[(int) kind] += length;
924 }
925
926 /* Allocate and return a new UID from the DECL_UID namespace. */
927
928 int
929 allocate_decl_uid (void)
930 {
931 return next_decl_uid++;
932 }
933
934 /* Return a newly allocated node of code CODE. For decl and type
935 nodes, some other fields are initialized. The rest of the node is
936 initialized to zero. This function cannot be used for TREE_VEC,
937 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
938 tree_code_size.
939
940 Achoo! I got a code in the node. */
941
942 tree
943 make_node_stat (enum tree_code code MEM_STAT_DECL)
944 {
945 tree t;
946 enum tree_code_class type = TREE_CODE_CLASS (code);
947 size_t length = tree_code_size (code);
948
949 record_node_allocation_statistics (code, length);
950
951 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
952 TREE_SET_CODE (t, code);
953
954 switch (type)
955 {
956 case tcc_statement:
957 TREE_SIDE_EFFECTS (t) = 1;
958 break;
959
960 case tcc_declaration:
961 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
962 {
963 if (code == FUNCTION_DECL)
964 {
965 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
966 DECL_MODE (t) = FUNCTION_MODE;
967 }
968 else
969 DECL_ALIGN (t) = 1;
970 }
971 DECL_SOURCE_LOCATION (t) = input_location;
972 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
973 DECL_UID (t) = --next_debug_decl_uid;
974 else
975 {
976 DECL_UID (t) = allocate_decl_uid ();
977 SET_DECL_PT_UID (t, -1);
978 }
979 if (TREE_CODE (t) == LABEL_DECL)
980 LABEL_DECL_UID (t) = -1;
981
982 break;
983
984 case tcc_type:
985 TYPE_UID (t) = next_type_uid++;
986 TYPE_ALIGN (t) = BITS_PER_UNIT;
987 TYPE_USER_ALIGN (t) = 0;
988 TYPE_MAIN_VARIANT (t) = t;
989 TYPE_CANONICAL (t) = t;
990
991 /* Default to no attributes for type, but let target change that. */
992 TYPE_ATTRIBUTES (t) = NULL_TREE;
993 targetm.set_default_type_attributes (t);
994
995 /* We have not yet computed the alias set for this type. */
996 TYPE_ALIAS_SET (t) = -1;
997 break;
998
999 case tcc_constant:
1000 TREE_CONSTANT (t) = 1;
1001 break;
1002
1003 case tcc_expression:
1004 switch (code)
1005 {
1006 case INIT_EXPR:
1007 case MODIFY_EXPR:
1008 case VA_ARG_EXPR:
1009 case PREDECREMENT_EXPR:
1010 case PREINCREMENT_EXPR:
1011 case POSTDECREMENT_EXPR:
1012 case POSTINCREMENT_EXPR:
1013 /* All of these have side-effects, no matter what their
1014 operands are. */
1015 TREE_SIDE_EFFECTS (t) = 1;
1016 break;
1017
1018 default:
1019 break;
1020 }
1021 break;
1022
1023 default:
1024 /* Other classes need no special treatment. */
1025 break;
1026 }
1027
1028 return t;
1029 }
1030 \f
1031 /* Return a new node with the same contents as NODE except that its
1032 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1033
1034 tree
1035 copy_node_stat (tree node MEM_STAT_DECL)
1036 {
1037 tree t;
1038 enum tree_code code = TREE_CODE (node);
1039 size_t length;
1040
1041 gcc_assert (code != STATEMENT_LIST);
1042
1043 length = tree_size (node);
1044 record_node_allocation_statistics (code, length);
1045 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1046 memcpy (t, node, length);
1047
1048 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1049 TREE_CHAIN (t) = 0;
1050 TREE_ASM_WRITTEN (t) = 0;
1051 TREE_VISITED (t) = 0;
1052
1053 if (TREE_CODE_CLASS (code) == tcc_declaration)
1054 {
1055 if (code == DEBUG_EXPR_DECL)
1056 DECL_UID (t) = --next_debug_decl_uid;
1057 else
1058 {
1059 DECL_UID (t) = allocate_decl_uid ();
1060 if (DECL_PT_UID_SET_P (node))
1061 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1062 }
1063 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1064 && DECL_HAS_VALUE_EXPR_P (node))
1065 {
1066 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1067 DECL_HAS_VALUE_EXPR_P (t) = 1;
1068 }
1069 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1070 if (TREE_CODE (node) == VAR_DECL)
1071 {
1072 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1073 t->decl_with_vis.symtab_node = NULL;
1074 }
1075 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1076 {
1077 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1078 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1079 }
1080 if (TREE_CODE (node) == FUNCTION_DECL)
1081 {
1082 DECL_STRUCT_FUNCTION (t) = NULL;
1083 t->decl_with_vis.symtab_node = NULL;
1084 }
1085 }
1086 else if (TREE_CODE_CLASS (code) == tcc_type)
1087 {
1088 TYPE_UID (t) = next_type_uid++;
1089 /* The following is so that the debug code for
1090 the copy is different from the original type.
1091 The two statements usually duplicate each other
1092 (because they clear fields of the same union),
1093 but the optimizer should catch that. */
1094 TYPE_SYMTAB_POINTER (t) = 0;
1095 TYPE_SYMTAB_ADDRESS (t) = 0;
1096
1097 /* Do not copy the values cache. */
1098 if (TYPE_CACHED_VALUES_P (t))
1099 {
1100 TYPE_CACHED_VALUES_P (t) = 0;
1101 TYPE_CACHED_VALUES (t) = NULL_TREE;
1102 }
1103 }
1104
1105 return t;
1106 }
1107
1108 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1109 For example, this can copy a list made of TREE_LIST nodes. */
1110
1111 tree
1112 copy_list (tree list)
1113 {
1114 tree head;
1115 tree prev, next;
1116
1117 if (list == 0)
1118 return 0;
1119
1120 head = prev = copy_node (list);
1121 next = TREE_CHAIN (list);
1122 while (next)
1123 {
1124 TREE_CHAIN (prev) = copy_node (next);
1125 prev = TREE_CHAIN (prev);
1126 next = TREE_CHAIN (next);
1127 }
1128 return head;
1129 }
1130
1131 \f
1132 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1133 INTEGER_CST with value CST and type TYPE. */
1134
1135 static unsigned int
1136 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1137 {
1138 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1139 /* We need an extra zero HWI if CST is an unsigned integer with its
1140 upper bit set, and if CST occupies a whole number of HWIs. */
1141 if (TYPE_UNSIGNED (type)
1142 && wi::neg_p (cst)
1143 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1144 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1145 return cst.get_len ();
1146 }
1147
1148 /* Return a new INTEGER_CST with value CST and type TYPE. */
1149
1150 static tree
1151 build_new_int_cst (tree type, const wide_int &cst)
1152 {
1153 unsigned int len = cst.get_len ();
1154 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1155 tree nt = make_int_cst (len, ext_len);
1156
1157 if (len < ext_len)
1158 {
1159 --ext_len;
1160 TREE_INT_CST_ELT (nt, ext_len) = 0;
1161 for (unsigned int i = len; i < ext_len; ++i)
1162 TREE_INT_CST_ELT (nt, i) = -1;
1163 }
1164 else if (TYPE_UNSIGNED (type)
1165 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1166 {
1167 len--;
1168 TREE_INT_CST_ELT (nt, len)
1169 = zext_hwi (cst.elt (len),
1170 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1171 }
1172
1173 for (unsigned int i = 0; i < len; i++)
1174 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1175 TREE_TYPE (nt) = type;
1176 return nt;
1177 }
1178
1179 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1180
1181 tree
1182 build_int_cst (tree type, HOST_WIDE_INT low)
1183 {
1184 /* Support legacy code. */
1185 if (!type)
1186 type = integer_type_node;
1187
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 tree
1192 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1193 {
1194 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1195 }
1196
1197 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1198
1199 tree
1200 build_int_cst_type (tree type, HOST_WIDE_INT low)
1201 {
1202 gcc_assert (type);
1203 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1204 }
1205
1206 /* Constructs tree in type TYPE from with value given by CST. Signedness
1207 of CST is assumed to be the same as the signedness of TYPE. */
1208
1209 tree
1210 double_int_to_tree (tree type, double_int cst)
1211 {
1212 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1213 }
1214
1215 /* We force the wide_int CST to the range of the type TYPE by sign or
1216 zero extending it. OVERFLOWABLE indicates if we are interested in
1217 overflow of the value, when >0 we are only interested in signed
1218 overflow, for <0 we are interested in any overflow. OVERFLOWED
1219 indicates whether overflow has already occurred. CONST_OVERFLOWED
1220 indicates whether constant overflow has already occurred. We force
1221 T's value to be within range of T's type (by setting to 0 or 1 all
1222 the bits outside the type's range). We set TREE_OVERFLOWED if,
1223 OVERFLOWED is nonzero,
1224 or OVERFLOWABLE is >0 and signed overflow occurs
1225 or OVERFLOWABLE is <0 and any overflow occurs
1226 We return a new tree node for the extended wide_int. The node
1227 is shared if no overflow flags are set. */
1228
1229
1230 tree
1231 force_fit_type (tree type, const wide_int_ref &cst,
1232 int overflowable, bool overflowed)
1233 {
1234 signop sign = TYPE_SIGN (type);
1235
1236 /* If we need to set overflow flags, return a new unshared node. */
1237 if (overflowed || !wi::fits_to_tree_p (cst, type))
1238 {
1239 if (overflowed
1240 || overflowable < 0
1241 || (overflowable > 0 && sign == SIGNED))
1242 {
1243 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1244 tree t = build_new_int_cst (type, tmp);
1245 TREE_OVERFLOW (t) = 1;
1246 return t;
1247 }
1248 }
1249
1250 /* Else build a shared node. */
1251 return wide_int_to_tree (type, cst);
1252 }
1253
1254 /* These are the hash table functions for the hash table of INTEGER_CST
1255 nodes of a sizetype. */
1256
1257 /* Return the hash code code X, an INTEGER_CST. */
1258
1259 static hashval_t
1260 int_cst_hash_hash (const void *x)
1261 {
1262 const_tree const t = (const_tree) x;
1263 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1264 int i;
1265
1266 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1267 code ^= TREE_INT_CST_ELT (t, i);
1268
1269 return code;
1270 }
1271
1272 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1273 is the same as that given by *Y, which is the same. */
1274
1275 static int
1276 int_cst_hash_eq (const void *x, const void *y)
1277 {
1278 const_tree const xt = (const_tree) x;
1279 const_tree const yt = (const_tree) y;
1280
1281 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1282 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1283 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1284 return false;
1285
1286 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1287 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1288 return false;
1289
1290 return true;
1291 }
1292
1293 /* Create an INT_CST node of TYPE and value CST.
1294 The returned node is always shared. For small integers we use a
1295 per-type vector cache, for larger ones we use a single hash table.
1296 The value is extended from its precision according to the sign of
1297 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1298 the upper bits and ensures that hashing and value equality based
1299 upon the underlying HOST_WIDE_INTs works without masking. */
1300
1301 tree
1302 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1303 {
1304 tree t;
1305 int ix = -1;
1306 int limit = 0;
1307
1308 gcc_assert (type);
1309 unsigned int prec = TYPE_PRECISION (type);
1310 signop sgn = TYPE_SIGN (type);
1311
1312 /* Verify that everything is canonical. */
1313 int l = pcst.get_len ();
1314 if (l > 1)
1315 {
1316 if (pcst.elt (l - 1) == 0)
1317 gcc_checking_assert (pcst.elt (l - 2) < 0);
1318 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1319 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1320 }
1321
1322 wide_int cst = wide_int::from (pcst, prec, sgn);
1323 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1324
1325 if (ext_len == 1)
1326 {
1327 /* We just need to store a single HOST_WIDE_INT. */
1328 HOST_WIDE_INT hwi;
1329 if (TYPE_UNSIGNED (type))
1330 hwi = cst.to_uhwi ();
1331 else
1332 hwi = cst.to_shwi ();
1333
1334 switch (TREE_CODE (type))
1335 {
1336 case NULLPTR_TYPE:
1337 gcc_assert (hwi == 0);
1338 /* Fallthru. */
1339
1340 case POINTER_TYPE:
1341 case REFERENCE_TYPE:
1342 /* Cache NULL pointer. */
1343 if (hwi == 0)
1344 {
1345 limit = 1;
1346 ix = 0;
1347 }
1348 break;
1349
1350 case BOOLEAN_TYPE:
1351 /* Cache false or true. */
1352 limit = 2;
1353 if (hwi < 2)
1354 ix = hwi;
1355 break;
1356
1357 case INTEGER_TYPE:
1358 case OFFSET_TYPE:
1359 if (TYPE_SIGN (type) == UNSIGNED)
1360 {
1361 /* Cache [0, N). */
1362 limit = INTEGER_SHARE_LIMIT;
1363 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1364 ix = hwi;
1365 }
1366 else
1367 {
1368 /* Cache [-1, N). */
1369 limit = INTEGER_SHARE_LIMIT + 1;
1370 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1371 ix = hwi + 1;
1372 }
1373 break;
1374
1375 case ENUMERAL_TYPE:
1376 break;
1377
1378 default:
1379 gcc_unreachable ();
1380 }
1381
1382 if (ix >= 0)
1383 {
1384 /* Look for it in the type's vector of small shared ints. */
1385 if (!TYPE_CACHED_VALUES_P (type))
1386 {
1387 TYPE_CACHED_VALUES_P (type) = 1;
1388 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1389 }
1390
1391 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1392 if (t)
1393 /* Make sure no one is clobbering the shared constant. */
1394 gcc_checking_assert (TREE_TYPE (t) == type
1395 && TREE_INT_CST_NUNITS (t) == 1
1396 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1397 && TREE_INT_CST_EXT_NUNITS (t) == 1
1398 && TREE_INT_CST_ELT (t, 0) == hwi);
1399 else
1400 {
1401 /* Create a new shared int. */
1402 t = build_new_int_cst (type, cst);
1403 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1404 }
1405 }
1406 else
1407 {
1408 /* Use the cache of larger shared ints, using int_cst_node as
1409 a temporary. */
1410 void **slot;
1411
1412 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1413 TREE_TYPE (int_cst_node) = type;
1414
1415 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1416 t = (tree) *slot;
1417 if (!t)
1418 {
1419 /* Insert this one into the hash table. */
1420 t = int_cst_node;
1421 *slot = t;
1422 /* Make a new node for next time round. */
1423 int_cst_node = make_int_cst (1, 1);
1424 }
1425 }
1426 }
1427 else
1428 {
1429 /* The value either hashes properly or we drop it on the floor
1430 for the gc to take care of. There will not be enough of them
1431 to worry about. */
1432 void **slot;
1433
1434 tree nt = build_new_int_cst (type, cst);
1435 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1436 t = (tree) *slot;
1437 if (!t)
1438 {
1439 /* Insert this one into the hash table. */
1440 t = nt;
1441 *slot = t;
1442 }
1443 }
1444
1445 return t;
1446 }
1447
1448 void
1449 cache_integer_cst (tree t)
1450 {
1451 tree type = TREE_TYPE (t);
1452 int ix = -1;
1453 int limit = 0;
1454 int prec = TYPE_PRECISION (type);
1455
1456 gcc_assert (!TREE_OVERFLOW (t));
1457
1458 switch (TREE_CODE (type))
1459 {
1460 case NULLPTR_TYPE:
1461 gcc_assert (integer_zerop (t));
1462 /* Fallthru. */
1463
1464 case POINTER_TYPE:
1465 case REFERENCE_TYPE:
1466 /* Cache NULL pointer. */
1467 if (integer_zerop (t))
1468 {
1469 limit = 1;
1470 ix = 0;
1471 }
1472 break;
1473
1474 case BOOLEAN_TYPE:
1475 /* Cache false or true. */
1476 limit = 2;
1477 if (wi::ltu_p (t, 2))
1478 ix = TREE_INT_CST_ELT (t, 0);
1479 break;
1480
1481 case INTEGER_TYPE:
1482 case OFFSET_TYPE:
1483 if (TYPE_UNSIGNED (type))
1484 {
1485 /* Cache 0..N */
1486 limit = INTEGER_SHARE_LIMIT;
1487
1488 /* This is a little hokie, but if the prec is smaller than
1489 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1490 obvious test will not get the correct answer. */
1491 if (prec < HOST_BITS_PER_WIDE_INT)
1492 {
1493 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1494 ix = tree_to_uhwi (t);
1495 }
1496 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1497 ix = tree_to_uhwi (t);
1498 }
1499 else
1500 {
1501 /* Cache -1..N */
1502 limit = INTEGER_SHARE_LIMIT + 1;
1503
1504 if (integer_minus_onep (t))
1505 ix = 0;
1506 else if (!wi::neg_p (t))
1507 {
1508 if (prec < HOST_BITS_PER_WIDE_INT)
1509 {
1510 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1511 ix = tree_to_shwi (t) + 1;
1512 }
1513 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1514 ix = tree_to_shwi (t) + 1;
1515 }
1516 }
1517 break;
1518
1519 case ENUMERAL_TYPE:
1520 break;
1521
1522 default:
1523 gcc_unreachable ();
1524 }
1525
1526 if (ix >= 0)
1527 {
1528 /* Look for it in the type's vector of small shared ints. */
1529 if (!TYPE_CACHED_VALUES_P (type))
1530 {
1531 TYPE_CACHED_VALUES_P (type) = 1;
1532 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1533 }
1534
1535 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1536 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1537 }
1538 else
1539 {
1540 /* Use the cache of larger shared ints. */
1541 void **slot;
1542
1543 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1544 /* If there is already an entry for the number verify it's the
1545 same. */
1546 if (*slot)
1547 gcc_assert (wi::eq_p (tree (*slot), t));
1548 else
1549 /* Otherwise insert this one into the hash table. */
1550 *slot = t;
1551 }
1552 }
1553
1554
1555 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1556 and the rest are zeros. */
1557
1558 tree
1559 build_low_bits_mask (tree type, unsigned bits)
1560 {
1561 gcc_assert (bits <= TYPE_PRECISION (type));
1562
1563 return wide_int_to_tree (type, wi::mask (bits, false,
1564 TYPE_PRECISION (type)));
1565 }
1566
1567 /* Checks that X is integer constant that can be expressed in (unsigned)
1568 HOST_WIDE_INT without loss of precision. */
1569
1570 bool
1571 cst_and_fits_in_hwi (const_tree x)
1572 {
1573 if (TREE_CODE (x) != INTEGER_CST)
1574 return false;
1575
1576 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1577 return false;
1578
1579 return TREE_INT_CST_NUNITS (x) == 1;
1580 }
1581
1582 /* Build a newly constructed TREE_VEC node of length LEN. */
1583
1584 tree
1585 make_vector_stat (unsigned len MEM_STAT_DECL)
1586 {
1587 tree t;
1588 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1589
1590 record_node_allocation_statistics (VECTOR_CST, length);
1591
1592 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1593
1594 TREE_SET_CODE (t, VECTOR_CST);
1595 TREE_CONSTANT (t) = 1;
1596
1597 return t;
1598 }
1599
1600 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1601 are in a list pointed to by VALS. */
1602
1603 tree
1604 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1605 {
1606 int over = 0;
1607 unsigned cnt = 0;
1608 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1609 TREE_TYPE (v) = type;
1610
1611 /* Iterate through elements and check for overflow. */
1612 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1613 {
1614 tree value = vals[cnt];
1615
1616 VECTOR_CST_ELT (v, cnt) = value;
1617
1618 /* Don't crash if we get an address constant. */
1619 if (!CONSTANT_CLASS_P (value))
1620 continue;
1621
1622 over |= TREE_OVERFLOW (value);
1623 }
1624
1625 TREE_OVERFLOW (v) = over;
1626 return v;
1627 }
1628
1629 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1630 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1631
1632 tree
1633 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1634 {
1635 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1636 unsigned HOST_WIDE_INT idx;
1637 tree value;
1638
1639 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1640 vec[idx] = value;
1641 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1642 vec[idx] = build_zero_cst (TREE_TYPE (type));
1643
1644 return build_vector (type, vec);
1645 }
1646
1647 /* Build a vector of type VECTYPE where all the elements are SCs. */
1648 tree
1649 build_vector_from_val (tree vectype, tree sc)
1650 {
1651 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1652
1653 if (sc == error_mark_node)
1654 return sc;
1655
1656 /* Verify that the vector type is suitable for SC. Note that there
1657 is some inconsistency in the type-system with respect to restrict
1658 qualifications of pointers. Vector types always have a main-variant
1659 element type and the qualification is applied to the vector-type.
1660 So TREE_TYPE (vector-type) does not return a properly qualified
1661 vector element-type. */
1662 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1663 TREE_TYPE (vectype)));
1664
1665 if (CONSTANT_CLASS_P (sc))
1666 {
1667 tree *v = XALLOCAVEC (tree, nunits);
1668 for (i = 0; i < nunits; ++i)
1669 v[i] = sc;
1670 return build_vector (vectype, v);
1671 }
1672 else
1673 {
1674 vec<constructor_elt, va_gc> *v;
1675 vec_alloc (v, nunits);
1676 for (i = 0; i < nunits; ++i)
1677 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1678 return build_constructor (vectype, v);
1679 }
1680 }
1681
1682 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1683 are in the vec pointed to by VALS. */
1684 tree
1685 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1686 {
1687 tree c = make_node (CONSTRUCTOR);
1688 unsigned int i;
1689 constructor_elt *elt;
1690 bool constant_p = true;
1691 bool side_effects_p = false;
1692
1693 TREE_TYPE (c) = type;
1694 CONSTRUCTOR_ELTS (c) = vals;
1695
1696 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1697 {
1698 /* Mostly ctors will have elts that don't have side-effects, so
1699 the usual case is to scan all the elements. Hence a single
1700 loop for both const and side effects, rather than one loop
1701 each (with early outs). */
1702 if (!TREE_CONSTANT (elt->value))
1703 constant_p = false;
1704 if (TREE_SIDE_EFFECTS (elt->value))
1705 side_effects_p = true;
1706 }
1707
1708 TREE_SIDE_EFFECTS (c) = side_effects_p;
1709 TREE_CONSTANT (c) = constant_p;
1710
1711 return c;
1712 }
1713
1714 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1715 INDEX and VALUE. */
1716 tree
1717 build_constructor_single (tree type, tree index, tree value)
1718 {
1719 vec<constructor_elt, va_gc> *v;
1720 constructor_elt elt = {index, value};
1721
1722 vec_alloc (v, 1);
1723 v->quick_push (elt);
1724
1725 return build_constructor (type, v);
1726 }
1727
1728
1729 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1730 are in a list pointed to by VALS. */
1731 tree
1732 build_constructor_from_list (tree type, tree vals)
1733 {
1734 tree t;
1735 vec<constructor_elt, va_gc> *v = NULL;
1736
1737 if (vals)
1738 {
1739 vec_alloc (v, list_length (vals));
1740 for (t = vals; t; t = TREE_CHAIN (t))
1741 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1742 }
1743
1744 return build_constructor (type, v);
1745 }
1746
1747 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1748 of elements, provided as index/value pairs. */
1749
1750 tree
1751 build_constructor_va (tree type, int nelts, ...)
1752 {
1753 vec<constructor_elt, va_gc> *v = NULL;
1754 va_list p;
1755
1756 va_start (p, nelts);
1757 vec_alloc (v, nelts);
1758 while (nelts--)
1759 {
1760 tree index = va_arg (p, tree);
1761 tree value = va_arg (p, tree);
1762 CONSTRUCTOR_APPEND_ELT (v, index, value);
1763 }
1764 va_end (p);
1765 return build_constructor (type, v);
1766 }
1767
1768 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1769
1770 tree
1771 build_fixed (tree type, FIXED_VALUE_TYPE f)
1772 {
1773 tree v;
1774 FIXED_VALUE_TYPE *fp;
1775
1776 v = make_node (FIXED_CST);
1777 fp = ggc_alloc<fixed_value> ();
1778 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1779
1780 TREE_TYPE (v) = type;
1781 TREE_FIXED_CST_PTR (v) = fp;
1782 return v;
1783 }
1784
1785 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1786
1787 tree
1788 build_real (tree type, REAL_VALUE_TYPE d)
1789 {
1790 tree v;
1791 REAL_VALUE_TYPE *dp;
1792 int overflow = 0;
1793
1794 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1795 Consider doing it via real_convert now. */
1796
1797 v = make_node (REAL_CST);
1798 dp = ggc_alloc<real_value> ();
1799 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1800
1801 TREE_TYPE (v) = type;
1802 TREE_REAL_CST_PTR (v) = dp;
1803 TREE_OVERFLOW (v) = overflow;
1804 return v;
1805 }
1806
1807 /* Return a new REAL_CST node whose type is TYPE
1808 and whose value is the integer value of the INTEGER_CST node I. */
1809
1810 REAL_VALUE_TYPE
1811 real_value_from_int_cst (const_tree type, const_tree i)
1812 {
1813 REAL_VALUE_TYPE d;
1814
1815 /* Clear all bits of the real value type so that we can later do
1816 bitwise comparisons to see if two values are the same. */
1817 memset (&d, 0, sizeof d);
1818
1819 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1820 TYPE_SIGN (TREE_TYPE (i)));
1821 return d;
1822 }
1823
1824 /* Given a tree representing an integer constant I, return a tree
1825 representing the same value as a floating-point constant of type TYPE. */
1826
1827 tree
1828 build_real_from_int_cst (tree type, const_tree i)
1829 {
1830 tree v;
1831 int overflow = TREE_OVERFLOW (i);
1832
1833 v = build_real (type, real_value_from_int_cst (type, i));
1834
1835 TREE_OVERFLOW (v) |= overflow;
1836 return v;
1837 }
1838
1839 /* Return a newly constructed STRING_CST node whose value is
1840 the LEN characters at STR.
1841 Note that for a C string literal, LEN should include the trailing NUL.
1842 The TREE_TYPE is not initialized. */
1843
1844 tree
1845 build_string (int len, const char *str)
1846 {
1847 tree s;
1848 size_t length;
1849
1850 /* Do not waste bytes provided by padding of struct tree_string. */
1851 length = len + offsetof (struct tree_string, str) + 1;
1852
1853 record_node_allocation_statistics (STRING_CST, length);
1854
1855 s = (tree) ggc_internal_alloc (length);
1856
1857 memset (s, 0, sizeof (struct tree_typed));
1858 TREE_SET_CODE (s, STRING_CST);
1859 TREE_CONSTANT (s) = 1;
1860 TREE_STRING_LENGTH (s) = len;
1861 memcpy (s->string.str, str, len);
1862 s->string.str[len] = '\0';
1863
1864 return s;
1865 }
1866
1867 /* Return a newly constructed COMPLEX_CST node whose value is
1868 specified by the real and imaginary parts REAL and IMAG.
1869 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1870 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1871
1872 tree
1873 build_complex (tree type, tree real, tree imag)
1874 {
1875 tree t = make_node (COMPLEX_CST);
1876
1877 TREE_REALPART (t) = real;
1878 TREE_IMAGPART (t) = imag;
1879 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1880 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1881 return t;
1882 }
1883
1884 /* Return a constant of arithmetic type TYPE which is the
1885 multiplicative identity of the set TYPE. */
1886
1887 tree
1888 build_one_cst (tree type)
1889 {
1890 switch (TREE_CODE (type))
1891 {
1892 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1893 case POINTER_TYPE: case REFERENCE_TYPE:
1894 case OFFSET_TYPE:
1895 return build_int_cst (type, 1);
1896
1897 case REAL_TYPE:
1898 return build_real (type, dconst1);
1899
1900 case FIXED_POINT_TYPE:
1901 /* We can only generate 1 for accum types. */
1902 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1903 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1904
1905 case VECTOR_TYPE:
1906 {
1907 tree scalar = build_one_cst (TREE_TYPE (type));
1908
1909 return build_vector_from_val (type, scalar);
1910 }
1911
1912 case COMPLEX_TYPE:
1913 return build_complex (type,
1914 build_one_cst (TREE_TYPE (type)),
1915 build_zero_cst (TREE_TYPE (type)));
1916
1917 default:
1918 gcc_unreachable ();
1919 }
1920 }
1921
1922 /* Return an integer of type TYPE containing all 1's in as much precision as
1923 it contains, or a complex or vector whose subparts are such integers. */
1924
1925 tree
1926 build_all_ones_cst (tree type)
1927 {
1928 if (TREE_CODE (type) == COMPLEX_TYPE)
1929 {
1930 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1931 return build_complex (type, scalar, scalar);
1932 }
1933 else
1934 return build_minus_one_cst (type);
1935 }
1936
1937 /* Return a constant of arithmetic type TYPE which is the
1938 opposite of the multiplicative identity of the set TYPE. */
1939
1940 tree
1941 build_minus_one_cst (tree type)
1942 {
1943 switch (TREE_CODE (type))
1944 {
1945 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1946 case POINTER_TYPE: case REFERENCE_TYPE:
1947 case OFFSET_TYPE:
1948 return build_int_cst (type, -1);
1949
1950 case REAL_TYPE:
1951 return build_real (type, dconstm1);
1952
1953 case FIXED_POINT_TYPE:
1954 /* We can only generate 1 for accum types. */
1955 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1956 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1957 TYPE_MODE (type)));
1958
1959 case VECTOR_TYPE:
1960 {
1961 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1962
1963 return build_vector_from_val (type, scalar);
1964 }
1965
1966 case COMPLEX_TYPE:
1967 return build_complex (type,
1968 build_minus_one_cst (TREE_TYPE (type)),
1969 build_zero_cst (TREE_TYPE (type)));
1970
1971 default:
1972 gcc_unreachable ();
1973 }
1974 }
1975
1976 /* Build 0 constant of type TYPE. This is used by constructor folding
1977 and thus the constant should be represented in memory by
1978 zero(es). */
1979
1980 tree
1981 build_zero_cst (tree type)
1982 {
1983 switch (TREE_CODE (type))
1984 {
1985 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1986 case POINTER_TYPE: case REFERENCE_TYPE:
1987 case OFFSET_TYPE: case NULLPTR_TYPE:
1988 return build_int_cst (type, 0);
1989
1990 case REAL_TYPE:
1991 return build_real (type, dconst0);
1992
1993 case FIXED_POINT_TYPE:
1994 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1995
1996 case VECTOR_TYPE:
1997 {
1998 tree scalar = build_zero_cst (TREE_TYPE (type));
1999
2000 return build_vector_from_val (type, scalar);
2001 }
2002
2003 case COMPLEX_TYPE:
2004 {
2005 tree zero = build_zero_cst (TREE_TYPE (type));
2006
2007 return build_complex (type, zero, zero);
2008 }
2009
2010 default:
2011 if (!AGGREGATE_TYPE_P (type))
2012 return fold_convert (type, integer_zero_node);
2013 return build_constructor (type, NULL);
2014 }
2015 }
2016
2017
2018 /* Build a BINFO with LEN language slots. */
2019
2020 tree
2021 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2022 {
2023 tree t;
2024 size_t length = (offsetof (struct tree_binfo, base_binfos)
2025 + vec<tree, va_gc>::embedded_size (base_binfos));
2026
2027 record_node_allocation_statistics (TREE_BINFO, length);
2028
2029 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2030
2031 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2032
2033 TREE_SET_CODE (t, TREE_BINFO);
2034
2035 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2036
2037 return t;
2038 }
2039
2040 /* Create a CASE_LABEL_EXPR tree node and return it. */
2041
2042 tree
2043 build_case_label (tree low_value, tree high_value, tree label_decl)
2044 {
2045 tree t = make_node (CASE_LABEL_EXPR);
2046
2047 TREE_TYPE (t) = void_type_node;
2048 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2049
2050 CASE_LOW (t) = low_value;
2051 CASE_HIGH (t) = high_value;
2052 CASE_LABEL (t) = label_decl;
2053 CASE_CHAIN (t) = NULL_TREE;
2054
2055 return t;
2056 }
2057
2058 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2059 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2060 The latter determines the length of the HOST_WIDE_INT vector. */
2061
2062 tree
2063 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2064 {
2065 tree t;
2066 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2067 + sizeof (struct tree_int_cst));
2068
2069 gcc_assert (len);
2070 record_node_allocation_statistics (INTEGER_CST, length);
2071
2072 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2073
2074 TREE_SET_CODE (t, INTEGER_CST);
2075 TREE_INT_CST_NUNITS (t) = len;
2076 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2077 /* to_offset can only be applied to trees that are offset_int-sized
2078 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2079 must be exactly the precision of offset_int and so LEN is correct. */
2080 if (ext_len <= OFFSET_INT_ELTS)
2081 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2082 else
2083 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2084
2085 TREE_CONSTANT (t) = 1;
2086
2087 return t;
2088 }
2089
2090 /* Build a newly constructed TREE_VEC node of length LEN. */
2091
2092 tree
2093 make_tree_vec_stat (int len MEM_STAT_DECL)
2094 {
2095 tree t;
2096 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2097
2098 record_node_allocation_statistics (TREE_VEC, length);
2099
2100 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2101
2102 TREE_SET_CODE (t, TREE_VEC);
2103 TREE_VEC_LENGTH (t) = len;
2104
2105 return t;
2106 }
2107
2108 /* Grow a TREE_VEC node to new length LEN. */
2109
2110 tree
2111 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2112 {
2113 gcc_assert (TREE_CODE (v) == TREE_VEC);
2114
2115 int oldlen = TREE_VEC_LENGTH (v);
2116 gcc_assert (len > oldlen);
2117
2118 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2119 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2120
2121 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2122
2123 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2124
2125 TREE_VEC_LENGTH (v) = len;
2126
2127 return v;
2128 }
2129 \f
2130 /* Return 1 if EXPR is the integer constant zero or a complex constant
2131 of zero. */
2132
2133 int
2134 integer_zerop (const_tree expr)
2135 {
2136 STRIP_NOPS (expr);
2137
2138 switch (TREE_CODE (expr))
2139 {
2140 case INTEGER_CST:
2141 return wi::eq_p (expr, 0);
2142 case COMPLEX_CST:
2143 return (integer_zerop (TREE_REALPART (expr))
2144 && integer_zerop (TREE_IMAGPART (expr)));
2145 case VECTOR_CST:
2146 {
2147 unsigned i;
2148 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2149 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2150 return false;
2151 return true;
2152 }
2153 default:
2154 return false;
2155 }
2156 }
2157
2158 /* Return 1 if EXPR is the integer constant one or the corresponding
2159 complex constant. */
2160
2161 int
2162 integer_onep (const_tree expr)
2163 {
2164 STRIP_NOPS (expr);
2165
2166 switch (TREE_CODE (expr))
2167 {
2168 case INTEGER_CST:
2169 return wi::eq_p (wi::to_widest (expr), 1);
2170 case COMPLEX_CST:
2171 return (integer_onep (TREE_REALPART (expr))
2172 && integer_zerop (TREE_IMAGPART (expr)));
2173 case VECTOR_CST:
2174 {
2175 unsigned i;
2176 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2177 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2178 return false;
2179 return true;
2180 }
2181 default:
2182 return false;
2183 }
2184 }
2185
2186 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2187 return 1 if every piece is the integer constant one. */
2188
2189 int
2190 integer_each_onep (const_tree expr)
2191 {
2192 STRIP_NOPS (expr);
2193
2194 if (TREE_CODE (expr) == COMPLEX_CST)
2195 return (integer_onep (TREE_REALPART (expr))
2196 && integer_onep (TREE_IMAGPART (expr)));
2197 else
2198 return integer_onep (expr);
2199 }
2200
2201 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2202 it contains, or a complex or vector whose subparts are such integers. */
2203
2204 int
2205 integer_all_onesp (const_tree expr)
2206 {
2207 STRIP_NOPS (expr);
2208
2209 if (TREE_CODE (expr) == COMPLEX_CST
2210 && integer_all_onesp (TREE_REALPART (expr))
2211 && integer_all_onesp (TREE_IMAGPART (expr)))
2212 return 1;
2213
2214 else if (TREE_CODE (expr) == VECTOR_CST)
2215 {
2216 unsigned i;
2217 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2218 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2219 return 0;
2220 return 1;
2221 }
2222
2223 else if (TREE_CODE (expr) != INTEGER_CST)
2224 return 0;
2225
2226 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2227 }
2228
2229 /* Return 1 if EXPR is the integer constant minus one. */
2230
2231 int
2232 integer_minus_onep (const_tree expr)
2233 {
2234 STRIP_NOPS (expr);
2235
2236 if (TREE_CODE (expr) == COMPLEX_CST)
2237 return (integer_all_onesp (TREE_REALPART (expr))
2238 && integer_zerop (TREE_IMAGPART (expr)));
2239 else
2240 return integer_all_onesp (expr);
2241 }
2242
2243 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2244 one bit on). */
2245
2246 int
2247 integer_pow2p (const_tree expr)
2248 {
2249 STRIP_NOPS (expr);
2250
2251 if (TREE_CODE (expr) == COMPLEX_CST
2252 && integer_pow2p (TREE_REALPART (expr))
2253 && integer_zerop (TREE_IMAGPART (expr)))
2254 return 1;
2255
2256 if (TREE_CODE (expr) != INTEGER_CST)
2257 return 0;
2258
2259 return wi::popcount (expr) == 1;
2260 }
2261
2262 /* Return 1 if EXPR is an integer constant other than zero or a
2263 complex constant other than zero. */
2264
2265 int
2266 integer_nonzerop (const_tree expr)
2267 {
2268 STRIP_NOPS (expr);
2269
2270 return ((TREE_CODE (expr) == INTEGER_CST
2271 && !wi::eq_p (expr, 0))
2272 || (TREE_CODE (expr) == COMPLEX_CST
2273 && (integer_nonzerop (TREE_REALPART (expr))
2274 || integer_nonzerop (TREE_IMAGPART (expr)))));
2275 }
2276
2277 /* Return 1 if EXPR is the fixed-point constant zero. */
2278
2279 int
2280 fixed_zerop (const_tree expr)
2281 {
2282 return (TREE_CODE (expr) == FIXED_CST
2283 && TREE_FIXED_CST (expr).data.is_zero ());
2284 }
2285
2286 /* Return the power of two represented by a tree node known to be a
2287 power of two. */
2288
2289 int
2290 tree_log2 (const_tree expr)
2291 {
2292 STRIP_NOPS (expr);
2293
2294 if (TREE_CODE (expr) == COMPLEX_CST)
2295 return tree_log2 (TREE_REALPART (expr));
2296
2297 return wi::exact_log2 (expr);
2298 }
2299
2300 /* Similar, but return the largest integer Y such that 2 ** Y is less
2301 than or equal to EXPR. */
2302
2303 int
2304 tree_floor_log2 (const_tree expr)
2305 {
2306 STRIP_NOPS (expr);
2307
2308 if (TREE_CODE (expr) == COMPLEX_CST)
2309 return tree_log2 (TREE_REALPART (expr));
2310
2311 return wi::floor_log2 (expr);
2312 }
2313
2314 /* Return number of known trailing zero bits in EXPR, or, if the value of
2315 EXPR is known to be zero, the precision of it's type. */
2316
2317 unsigned int
2318 tree_ctz (const_tree expr)
2319 {
2320 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2321 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2322 return 0;
2323
2324 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2325 switch (TREE_CODE (expr))
2326 {
2327 case INTEGER_CST:
2328 ret1 = wi::ctz (expr);
2329 return MIN (ret1, prec);
2330 case SSA_NAME:
2331 ret1 = wi::ctz (get_nonzero_bits (expr));
2332 return MIN (ret1, prec);
2333 case PLUS_EXPR:
2334 case MINUS_EXPR:
2335 case BIT_IOR_EXPR:
2336 case BIT_XOR_EXPR:
2337 case MIN_EXPR:
2338 case MAX_EXPR:
2339 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2340 if (ret1 == 0)
2341 return ret1;
2342 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2343 return MIN (ret1, ret2);
2344 case POINTER_PLUS_EXPR:
2345 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2346 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2347 /* Second operand is sizetype, which could be in theory
2348 wider than pointer's precision. Make sure we never
2349 return more than prec. */
2350 ret2 = MIN (ret2, prec);
2351 return MIN (ret1, ret2);
2352 case BIT_AND_EXPR:
2353 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2354 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2355 return MAX (ret1, ret2);
2356 case MULT_EXPR:
2357 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2358 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2359 return MIN (ret1 + ret2, prec);
2360 case LSHIFT_EXPR:
2361 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2362 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2363 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2364 {
2365 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2366 return MIN (ret1 + ret2, prec);
2367 }
2368 return ret1;
2369 case RSHIFT_EXPR:
2370 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2371 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2372 {
2373 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2374 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2375 if (ret1 > ret2)
2376 return ret1 - ret2;
2377 }
2378 return 0;
2379 case TRUNC_DIV_EXPR:
2380 case CEIL_DIV_EXPR:
2381 case FLOOR_DIV_EXPR:
2382 case ROUND_DIV_EXPR:
2383 case EXACT_DIV_EXPR:
2384 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2385 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2386 {
2387 int l = tree_log2 (TREE_OPERAND (expr, 1));
2388 if (l >= 0)
2389 {
2390 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2391 ret2 = l;
2392 if (ret1 > ret2)
2393 return ret1 - ret2;
2394 }
2395 }
2396 return 0;
2397 CASE_CONVERT:
2398 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2399 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2400 ret1 = prec;
2401 return MIN (ret1, prec);
2402 case SAVE_EXPR:
2403 return tree_ctz (TREE_OPERAND (expr, 0));
2404 case COND_EXPR:
2405 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2406 if (ret1 == 0)
2407 return 0;
2408 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2409 return MIN (ret1, ret2);
2410 case COMPOUND_EXPR:
2411 return tree_ctz (TREE_OPERAND (expr, 1));
2412 case ADDR_EXPR:
2413 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2414 if (ret1 > BITS_PER_UNIT)
2415 {
2416 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2417 return MIN (ret1, prec);
2418 }
2419 return 0;
2420 default:
2421 return 0;
2422 }
2423 }
2424
2425 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2426 decimal float constants, so don't return 1 for them. */
2427
2428 int
2429 real_zerop (const_tree expr)
2430 {
2431 STRIP_NOPS (expr);
2432
2433 switch (TREE_CODE (expr))
2434 {
2435 case REAL_CST:
2436 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2437 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2438 case COMPLEX_CST:
2439 return real_zerop (TREE_REALPART (expr))
2440 && real_zerop (TREE_IMAGPART (expr));
2441 case VECTOR_CST:
2442 {
2443 unsigned i;
2444 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2445 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2446 return false;
2447 return true;
2448 }
2449 default:
2450 return false;
2451 }
2452 }
2453
2454 /* Return 1 if EXPR is the real constant one in real or complex form.
2455 Trailing zeroes matter for decimal float constants, so don't return
2456 1 for them. */
2457
2458 int
2459 real_onep (const_tree expr)
2460 {
2461 STRIP_NOPS (expr);
2462
2463 switch (TREE_CODE (expr))
2464 {
2465 case REAL_CST:
2466 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2467 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2468 case COMPLEX_CST:
2469 return real_onep (TREE_REALPART (expr))
2470 && real_zerop (TREE_IMAGPART (expr));
2471 case VECTOR_CST:
2472 {
2473 unsigned i;
2474 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2475 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2476 return false;
2477 return true;
2478 }
2479 default:
2480 return false;
2481 }
2482 }
2483
2484 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2485 matter for decimal float constants, so don't return 1 for them. */
2486
2487 int
2488 real_minus_onep (const_tree expr)
2489 {
2490 STRIP_NOPS (expr);
2491
2492 switch (TREE_CODE (expr))
2493 {
2494 case REAL_CST:
2495 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2496 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2497 case COMPLEX_CST:
2498 return real_minus_onep (TREE_REALPART (expr))
2499 && real_zerop (TREE_IMAGPART (expr));
2500 case VECTOR_CST:
2501 {
2502 unsigned i;
2503 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2504 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2505 return false;
2506 return true;
2507 }
2508 default:
2509 return false;
2510 }
2511 }
2512
2513 /* Nonzero if EXP is a constant or a cast of a constant. */
2514
2515 int
2516 really_constant_p (const_tree exp)
2517 {
2518 /* This is not quite the same as STRIP_NOPS. It does more. */
2519 while (CONVERT_EXPR_P (exp)
2520 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2521 exp = TREE_OPERAND (exp, 0);
2522 return TREE_CONSTANT (exp);
2523 }
2524 \f
2525 /* Return first list element whose TREE_VALUE is ELEM.
2526 Return 0 if ELEM is not in LIST. */
2527
2528 tree
2529 value_member (tree elem, tree list)
2530 {
2531 while (list)
2532 {
2533 if (elem == TREE_VALUE (list))
2534 return list;
2535 list = TREE_CHAIN (list);
2536 }
2537 return NULL_TREE;
2538 }
2539
2540 /* Return first list element whose TREE_PURPOSE is ELEM.
2541 Return 0 if ELEM is not in LIST. */
2542
2543 tree
2544 purpose_member (const_tree elem, tree list)
2545 {
2546 while (list)
2547 {
2548 if (elem == TREE_PURPOSE (list))
2549 return list;
2550 list = TREE_CHAIN (list);
2551 }
2552 return NULL_TREE;
2553 }
2554
2555 /* Return true if ELEM is in V. */
2556
2557 bool
2558 vec_member (const_tree elem, vec<tree, va_gc> *v)
2559 {
2560 unsigned ix;
2561 tree t;
2562 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2563 if (elem == t)
2564 return true;
2565 return false;
2566 }
2567
2568 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2569 NULL_TREE. */
2570
2571 tree
2572 chain_index (int idx, tree chain)
2573 {
2574 for (; chain && idx > 0; --idx)
2575 chain = TREE_CHAIN (chain);
2576 return chain;
2577 }
2578
2579 /* Return nonzero if ELEM is part of the chain CHAIN. */
2580
2581 int
2582 chain_member (const_tree elem, const_tree chain)
2583 {
2584 while (chain)
2585 {
2586 if (elem == chain)
2587 return 1;
2588 chain = DECL_CHAIN (chain);
2589 }
2590
2591 return 0;
2592 }
2593
2594 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2595 We expect a null pointer to mark the end of the chain.
2596 This is the Lisp primitive `length'. */
2597
2598 int
2599 list_length (const_tree t)
2600 {
2601 const_tree p = t;
2602 #ifdef ENABLE_TREE_CHECKING
2603 const_tree q = t;
2604 #endif
2605 int len = 0;
2606
2607 while (p)
2608 {
2609 p = TREE_CHAIN (p);
2610 #ifdef ENABLE_TREE_CHECKING
2611 if (len % 2)
2612 q = TREE_CHAIN (q);
2613 gcc_assert (p != q);
2614 #endif
2615 len++;
2616 }
2617
2618 return len;
2619 }
2620
2621 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2622 UNION_TYPE TYPE, or NULL_TREE if none. */
2623
2624 tree
2625 first_field (const_tree type)
2626 {
2627 tree t = TYPE_FIELDS (type);
2628 while (t && TREE_CODE (t) != FIELD_DECL)
2629 t = TREE_CHAIN (t);
2630 return t;
2631 }
2632
2633 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2634 by modifying the last node in chain 1 to point to chain 2.
2635 This is the Lisp primitive `nconc'. */
2636
2637 tree
2638 chainon (tree op1, tree op2)
2639 {
2640 tree t1;
2641
2642 if (!op1)
2643 return op2;
2644 if (!op2)
2645 return op1;
2646
2647 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2648 continue;
2649 TREE_CHAIN (t1) = op2;
2650
2651 #ifdef ENABLE_TREE_CHECKING
2652 {
2653 tree t2;
2654 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2655 gcc_assert (t2 != t1);
2656 }
2657 #endif
2658
2659 return op1;
2660 }
2661
2662 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2663
2664 tree
2665 tree_last (tree chain)
2666 {
2667 tree next;
2668 if (chain)
2669 while ((next = TREE_CHAIN (chain)))
2670 chain = next;
2671 return chain;
2672 }
2673
2674 /* Reverse the order of elements in the chain T,
2675 and return the new head of the chain (old last element). */
2676
2677 tree
2678 nreverse (tree t)
2679 {
2680 tree prev = 0, decl, next;
2681 for (decl = t; decl; decl = next)
2682 {
2683 /* We shouldn't be using this function to reverse BLOCK chains; we
2684 have blocks_nreverse for that. */
2685 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2686 next = TREE_CHAIN (decl);
2687 TREE_CHAIN (decl) = prev;
2688 prev = decl;
2689 }
2690 return prev;
2691 }
2692 \f
2693 /* Return a newly created TREE_LIST node whose
2694 purpose and value fields are PARM and VALUE. */
2695
2696 tree
2697 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2698 {
2699 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2700 TREE_PURPOSE (t) = parm;
2701 TREE_VALUE (t) = value;
2702 return t;
2703 }
2704
2705 /* Build a chain of TREE_LIST nodes from a vector. */
2706
2707 tree
2708 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2709 {
2710 tree ret = NULL_TREE;
2711 tree *pp = &ret;
2712 unsigned int i;
2713 tree t;
2714 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2715 {
2716 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2717 pp = &TREE_CHAIN (*pp);
2718 }
2719 return ret;
2720 }
2721
2722 /* Return a newly created TREE_LIST node whose
2723 purpose and value fields are PURPOSE and VALUE
2724 and whose TREE_CHAIN is CHAIN. */
2725
2726 tree
2727 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2728 {
2729 tree node;
2730
2731 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2732 memset (node, 0, sizeof (struct tree_common));
2733
2734 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2735
2736 TREE_SET_CODE (node, TREE_LIST);
2737 TREE_CHAIN (node) = chain;
2738 TREE_PURPOSE (node) = purpose;
2739 TREE_VALUE (node) = value;
2740 return node;
2741 }
2742
2743 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2744 trees. */
2745
2746 vec<tree, va_gc> *
2747 ctor_to_vec (tree ctor)
2748 {
2749 vec<tree, va_gc> *vec;
2750 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2751 unsigned int ix;
2752 tree val;
2753
2754 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2755 vec->quick_push (val);
2756
2757 return vec;
2758 }
2759 \f
2760 /* Return the size nominally occupied by an object of type TYPE
2761 when it resides in memory. The value is measured in units of bytes,
2762 and its data type is that normally used for type sizes
2763 (which is the first type created by make_signed_type or
2764 make_unsigned_type). */
2765
2766 tree
2767 size_in_bytes (const_tree type)
2768 {
2769 tree t;
2770
2771 if (type == error_mark_node)
2772 return integer_zero_node;
2773
2774 type = TYPE_MAIN_VARIANT (type);
2775 t = TYPE_SIZE_UNIT (type);
2776
2777 if (t == 0)
2778 {
2779 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2780 return size_zero_node;
2781 }
2782
2783 return t;
2784 }
2785
2786 /* Return the size of TYPE (in bytes) as a wide integer
2787 or return -1 if the size can vary or is larger than an integer. */
2788
2789 HOST_WIDE_INT
2790 int_size_in_bytes (const_tree type)
2791 {
2792 tree t;
2793
2794 if (type == error_mark_node)
2795 return 0;
2796
2797 type = TYPE_MAIN_VARIANT (type);
2798 t = TYPE_SIZE_UNIT (type);
2799
2800 if (t && tree_fits_uhwi_p (t))
2801 return TREE_INT_CST_LOW (t);
2802 else
2803 return -1;
2804 }
2805
2806 /* Return the maximum size of TYPE (in bytes) as a wide integer
2807 or return -1 if the size can vary or is larger than an integer. */
2808
2809 HOST_WIDE_INT
2810 max_int_size_in_bytes (const_tree type)
2811 {
2812 HOST_WIDE_INT size = -1;
2813 tree size_tree;
2814
2815 /* If this is an array type, check for a possible MAX_SIZE attached. */
2816
2817 if (TREE_CODE (type) == ARRAY_TYPE)
2818 {
2819 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2820
2821 if (size_tree && tree_fits_uhwi_p (size_tree))
2822 size = tree_to_uhwi (size_tree);
2823 }
2824
2825 /* If we still haven't been able to get a size, see if the language
2826 can compute a maximum size. */
2827
2828 if (size == -1)
2829 {
2830 size_tree = lang_hooks.types.max_size (type);
2831
2832 if (size_tree && tree_fits_uhwi_p (size_tree))
2833 size = tree_to_uhwi (size_tree);
2834 }
2835
2836 return size;
2837 }
2838 \f
2839 /* Return the bit position of FIELD, in bits from the start of the record.
2840 This is a tree of type bitsizetype. */
2841
2842 tree
2843 bit_position (const_tree field)
2844 {
2845 return bit_from_pos (DECL_FIELD_OFFSET (field),
2846 DECL_FIELD_BIT_OFFSET (field));
2847 }
2848 \f
2849 /* Return the byte position of FIELD, in bytes from the start of the record.
2850 This is a tree of type sizetype. */
2851
2852 tree
2853 byte_position (const_tree field)
2854 {
2855 return byte_from_pos (DECL_FIELD_OFFSET (field),
2856 DECL_FIELD_BIT_OFFSET (field));
2857 }
2858
2859 /* Likewise, but return as an integer. It must be representable in
2860 that way (since it could be a signed value, we don't have the
2861 option of returning -1 like int_size_in_byte can. */
2862
2863 HOST_WIDE_INT
2864 int_byte_position (const_tree field)
2865 {
2866 return tree_to_shwi (byte_position (field));
2867 }
2868 \f
2869 /* Return the strictest alignment, in bits, that T is known to have. */
2870
2871 unsigned int
2872 expr_align (const_tree t)
2873 {
2874 unsigned int align0, align1;
2875
2876 switch (TREE_CODE (t))
2877 {
2878 CASE_CONVERT: case NON_LVALUE_EXPR:
2879 /* If we have conversions, we know that the alignment of the
2880 object must meet each of the alignments of the types. */
2881 align0 = expr_align (TREE_OPERAND (t, 0));
2882 align1 = TYPE_ALIGN (TREE_TYPE (t));
2883 return MAX (align0, align1);
2884
2885 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2886 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2887 case CLEANUP_POINT_EXPR:
2888 /* These don't change the alignment of an object. */
2889 return expr_align (TREE_OPERAND (t, 0));
2890
2891 case COND_EXPR:
2892 /* The best we can do is say that the alignment is the least aligned
2893 of the two arms. */
2894 align0 = expr_align (TREE_OPERAND (t, 1));
2895 align1 = expr_align (TREE_OPERAND (t, 2));
2896 return MIN (align0, align1);
2897
2898 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2899 meaningfully, it's always 1. */
2900 case LABEL_DECL: case CONST_DECL:
2901 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2902 case FUNCTION_DECL:
2903 gcc_assert (DECL_ALIGN (t) != 0);
2904 return DECL_ALIGN (t);
2905
2906 default:
2907 break;
2908 }
2909
2910 /* Otherwise take the alignment from that of the type. */
2911 return TYPE_ALIGN (TREE_TYPE (t));
2912 }
2913 \f
2914 /* Return, as a tree node, the number of elements for TYPE (which is an
2915 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2916
2917 tree
2918 array_type_nelts (const_tree type)
2919 {
2920 tree index_type, min, max;
2921
2922 /* If they did it with unspecified bounds, then we should have already
2923 given an error about it before we got here. */
2924 if (! TYPE_DOMAIN (type))
2925 return error_mark_node;
2926
2927 index_type = TYPE_DOMAIN (type);
2928 min = TYPE_MIN_VALUE (index_type);
2929 max = TYPE_MAX_VALUE (index_type);
2930
2931 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2932 if (!max)
2933 return error_mark_node;
2934
2935 return (integer_zerop (min)
2936 ? max
2937 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2938 }
2939 \f
2940 /* If arg is static -- a reference to an object in static storage -- then
2941 return the object. This is not the same as the C meaning of `static'.
2942 If arg isn't static, return NULL. */
2943
2944 tree
2945 staticp (tree arg)
2946 {
2947 switch (TREE_CODE (arg))
2948 {
2949 case FUNCTION_DECL:
2950 /* Nested functions are static, even though taking their address will
2951 involve a trampoline as we unnest the nested function and create
2952 the trampoline on the tree level. */
2953 return arg;
2954
2955 case VAR_DECL:
2956 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2957 && ! DECL_THREAD_LOCAL_P (arg)
2958 && ! DECL_DLLIMPORT_P (arg)
2959 ? arg : NULL);
2960
2961 case CONST_DECL:
2962 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2963 ? arg : NULL);
2964
2965 case CONSTRUCTOR:
2966 return TREE_STATIC (arg) ? arg : NULL;
2967
2968 case LABEL_DECL:
2969 case STRING_CST:
2970 return arg;
2971
2972 case COMPONENT_REF:
2973 /* If the thing being referenced is not a field, then it is
2974 something language specific. */
2975 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2976
2977 /* If we are referencing a bitfield, we can't evaluate an
2978 ADDR_EXPR at compile time and so it isn't a constant. */
2979 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2980 return NULL;
2981
2982 return staticp (TREE_OPERAND (arg, 0));
2983
2984 case BIT_FIELD_REF:
2985 return NULL;
2986
2987 case INDIRECT_REF:
2988 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2989
2990 case ARRAY_REF:
2991 case ARRAY_RANGE_REF:
2992 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2993 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2994 return staticp (TREE_OPERAND (arg, 0));
2995 else
2996 return NULL;
2997
2998 case COMPOUND_LITERAL_EXPR:
2999 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3000
3001 default:
3002 return NULL;
3003 }
3004 }
3005
3006 \f
3007
3008
3009 /* Return whether OP is a DECL whose address is function-invariant. */
3010
3011 bool
3012 decl_address_invariant_p (const_tree op)
3013 {
3014 /* The conditions below are slightly less strict than the one in
3015 staticp. */
3016
3017 switch (TREE_CODE (op))
3018 {
3019 case PARM_DECL:
3020 case RESULT_DECL:
3021 case LABEL_DECL:
3022 case FUNCTION_DECL:
3023 return true;
3024
3025 case VAR_DECL:
3026 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3027 || DECL_THREAD_LOCAL_P (op)
3028 || DECL_CONTEXT (op) == current_function_decl
3029 || decl_function_context (op) == current_function_decl)
3030 return true;
3031 break;
3032
3033 case CONST_DECL:
3034 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3035 || decl_function_context (op) == current_function_decl)
3036 return true;
3037 break;
3038
3039 default:
3040 break;
3041 }
3042
3043 return false;
3044 }
3045
3046 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3047
3048 bool
3049 decl_address_ip_invariant_p (const_tree op)
3050 {
3051 /* The conditions below are slightly less strict than the one in
3052 staticp. */
3053
3054 switch (TREE_CODE (op))
3055 {
3056 case LABEL_DECL:
3057 case FUNCTION_DECL:
3058 case STRING_CST:
3059 return true;
3060
3061 case VAR_DECL:
3062 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3063 && !DECL_DLLIMPORT_P (op))
3064 || DECL_THREAD_LOCAL_P (op))
3065 return true;
3066 break;
3067
3068 case CONST_DECL:
3069 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3070 return true;
3071 break;
3072
3073 default:
3074 break;
3075 }
3076
3077 return false;
3078 }
3079
3080
3081 /* Return true if T is function-invariant (internal function, does
3082 not handle arithmetic; that's handled in skip_simple_arithmetic and
3083 tree_invariant_p). */
3084
3085 static bool tree_invariant_p (tree t);
3086
3087 static bool
3088 tree_invariant_p_1 (tree t)
3089 {
3090 tree op;
3091
3092 if (TREE_CONSTANT (t)
3093 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3094 return true;
3095
3096 switch (TREE_CODE (t))
3097 {
3098 case SAVE_EXPR:
3099 return true;
3100
3101 case ADDR_EXPR:
3102 op = TREE_OPERAND (t, 0);
3103 while (handled_component_p (op))
3104 {
3105 switch (TREE_CODE (op))
3106 {
3107 case ARRAY_REF:
3108 case ARRAY_RANGE_REF:
3109 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3110 || TREE_OPERAND (op, 2) != NULL_TREE
3111 || TREE_OPERAND (op, 3) != NULL_TREE)
3112 return false;
3113 break;
3114
3115 case COMPONENT_REF:
3116 if (TREE_OPERAND (op, 2) != NULL_TREE)
3117 return false;
3118 break;
3119
3120 default:;
3121 }
3122 op = TREE_OPERAND (op, 0);
3123 }
3124
3125 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3126
3127 default:
3128 break;
3129 }
3130
3131 return false;
3132 }
3133
3134 /* Return true if T is function-invariant. */
3135
3136 static bool
3137 tree_invariant_p (tree t)
3138 {
3139 tree inner = skip_simple_arithmetic (t);
3140 return tree_invariant_p_1 (inner);
3141 }
3142
3143 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3144 Do this to any expression which may be used in more than one place,
3145 but must be evaluated only once.
3146
3147 Normally, expand_expr would reevaluate the expression each time.
3148 Calling save_expr produces something that is evaluated and recorded
3149 the first time expand_expr is called on it. Subsequent calls to
3150 expand_expr just reuse the recorded value.
3151
3152 The call to expand_expr that generates code that actually computes
3153 the value is the first call *at compile time*. Subsequent calls
3154 *at compile time* generate code to use the saved value.
3155 This produces correct result provided that *at run time* control
3156 always flows through the insns made by the first expand_expr
3157 before reaching the other places where the save_expr was evaluated.
3158 You, the caller of save_expr, must make sure this is so.
3159
3160 Constants, and certain read-only nodes, are returned with no
3161 SAVE_EXPR because that is safe. Expressions containing placeholders
3162 are not touched; see tree.def for an explanation of what these
3163 are used for. */
3164
3165 tree
3166 save_expr (tree expr)
3167 {
3168 tree t = fold (expr);
3169 tree inner;
3170
3171 /* If the tree evaluates to a constant, then we don't want to hide that
3172 fact (i.e. this allows further folding, and direct checks for constants).
3173 However, a read-only object that has side effects cannot be bypassed.
3174 Since it is no problem to reevaluate literals, we just return the
3175 literal node. */
3176 inner = skip_simple_arithmetic (t);
3177 if (TREE_CODE (inner) == ERROR_MARK)
3178 return inner;
3179
3180 if (tree_invariant_p_1 (inner))
3181 return t;
3182
3183 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3184 it means that the size or offset of some field of an object depends on
3185 the value within another field.
3186
3187 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3188 and some variable since it would then need to be both evaluated once and
3189 evaluated more than once. Front-ends must assure this case cannot
3190 happen by surrounding any such subexpressions in their own SAVE_EXPR
3191 and forcing evaluation at the proper time. */
3192 if (contains_placeholder_p (inner))
3193 return t;
3194
3195 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3196 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3197
3198 /* This expression might be placed ahead of a jump to ensure that the
3199 value was computed on both sides of the jump. So make sure it isn't
3200 eliminated as dead. */
3201 TREE_SIDE_EFFECTS (t) = 1;
3202 return t;
3203 }
3204
3205 /* Look inside EXPR into any simple arithmetic operations. Return the
3206 outermost non-arithmetic or non-invariant node. */
3207
3208 tree
3209 skip_simple_arithmetic (tree expr)
3210 {
3211 /* We don't care about whether this can be used as an lvalue in this
3212 context. */
3213 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3214 expr = TREE_OPERAND (expr, 0);
3215
3216 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3217 a constant, it will be more efficient to not make another SAVE_EXPR since
3218 it will allow better simplification and GCSE will be able to merge the
3219 computations if they actually occur. */
3220 while (true)
3221 {
3222 if (UNARY_CLASS_P (expr))
3223 expr = TREE_OPERAND (expr, 0);
3224 else if (BINARY_CLASS_P (expr))
3225 {
3226 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3227 expr = TREE_OPERAND (expr, 0);
3228 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3229 expr = TREE_OPERAND (expr, 1);
3230 else
3231 break;
3232 }
3233 else
3234 break;
3235 }
3236
3237 return expr;
3238 }
3239
3240 /* Look inside EXPR into simple arithmetic operations involving constants.
3241 Return the outermost non-arithmetic or non-constant node. */
3242
3243 tree
3244 skip_simple_constant_arithmetic (tree expr)
3245 {
3246 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3247 expr = TREE_OPERAND (expr, 0);
3248
3249 while (true)
3250 {
3251 if (UNARY_CLASS_P (expr))
3252 expr = TREE_OPERAND (expr, 0);
3253 else if (BINARY_CLASS_P (expr))
3254 {
3255 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3256 expr = TREE_OPERAND (expr, 0);
3257 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3258 expr = TREE_OPERAND (expr, 1);
3259 else
3260 break;
3261 }
3262 else
3263 break;
3264 }
3265
3266 return expr;
3267 }
3268
3269 /* Return which tree structure is used by T. */
3270
3271 enum tree_node_structure_enum
3272 tree_node_structure (const_tree t)
3273 {
3274 const enum tree_code code = TREE_CODE (t);
3275 return tree_node_structure_for_code (code);
3276 }
3277
3278 /* Set various status flags when building a CALL_EXPR object T. */
3279
3280 static void
3281 process_call_operands (tree t)
3282 {
3283 bool side_effects = TREE_SIDE_EFFECTS (t);
3284 bool read_only = false;
3285 int i = call_expr_flags (t);
3286
3287 /* Calls have side-effects, except those to const or pure functions. */
3288 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3289 side_effects = true;
3290 /* Propagate TREE_READONLY of arguments for const functions. */
3291 if (i & ECF_CONST)
3292 read_only = true;
3293
3294 if (!side_effects || read_only)
3295 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3296 {
3297 tree op = TREE_OPERAND (t, i);
3298 if (op && TREE_SIDE_EFFECTS (op))
3299 side_effects = true;
3300 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3301 read_only = false;
3302 }
3303
3304 TREE_SIDE_EFFECTS (t) = side_effects;
3305 TREE_READONLY (t) = read_only;
3306 }
3307 \f
3308 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3309 size or offset that depends on a field within a record. */
3310
3311 bool
3312 contains_placeholder_p (const_tree exp)
3313 {
3314 enum tree_code code;
3315
3316 if (!exp)
3317 return 0;
3318
3319 code = TREE_CODE (exp);
3320 if (code == PLACEHOLDER_EXPR)
3321 return 1;
3322
3323 switch (TREE_CODE_CLASS (code))
3324 {
3325 case tcc_reference:
3326 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3327 position computations since they will be converted into a
3328 WITH_RECORD_EXPR involving the reference, which will assume
3329 here will be valid. */
3330 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3331
3332 case tcc_exceptional:
3333 if (code == TREE_LIST)
3334 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3335 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3336 break;
3337
3338 case tcc_unary:
3339 case tcc_binary:
3340 case tcc_comparison:
3341 case tcc_expression:
3342 switch (code)
3343 {
3344 case COMPOUND_EXPR:
3345 /* Ignoring the first operand isn't quite right, but works best. */
3346 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3347
3348 case COND_EXPR:
3349 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3350 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3351 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3352
3353 case SAVE_EXPR:
3354 /* The save_expr function never wraps anything containing
3355 a PLACEHOLDER_EXPR. */
3356 return 0;
3357
3358 default:
3359 break;
3360 }
3361
3362 switch (TREE_CODE_LENGTH (code))
3363 {
3364 case 1:
3365 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3366 case 2:
3367 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3368 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3369 default:
3370 return 0;
3371 }
3372
3373 case tcc_vl_exp:
3374 switch (code)
3375 {
3376 case CALL_EXPR:
3377 {
3378 const_tree arg;
3379 const_call_expr_arg_iterator iter;
3380 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3381 if (CONTAINS_PLACEHOLDER_P (arg))
3382 return 1;
3383 return 0;
3384 }
3385 default:
3386 return 0;
3387 }
3388
3389 default:
3390 return 0;
3391 }
3392 return 0;
3393 }
3394
3395 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3396 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3397 field positions. */
3398
3399 static bool
3400 type_contains_placeholder_1 (const_tree type)
3401 {
3402 /* If the size contains a placeholder or the parent type (component type in
3403 the case of arrays) type involves a placeholder, this type does. */
3404 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3405 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3406 || (!POINTER_TYPE_P (type)
3407 && TREE_TYPE (type)
3408 && type_contains_placeholder_p (TREE_TYPE (type))))
3409 return true;
3410
3411 /* Now do type-specific checks. Note that the last part of the check above
3412 greatly limits what we have to do below. */
3413 switch (TREE_CODE (type))
3414 {
3415 case VOID_TYPE:
3416 case COMPLEX_TYPE:
3417 case ENUMERAL_TYPE:
3418 case BOOLEAN_TYPE:
3419 case POINTER_TYPE:
3420 case OFFSET_TYPE:
3421 case REFERENCE_TYPE:
3422 case METHOD_TYPE:
3423 case FUNCTION_TYPE:
3424 case VECTOR_TYPE:
3425 case NULLPTR_TYPE:
3426 return false;
3427
3428 case INTEGER_TYPE:
3429 case REAL_TYPE:
3430 case FIXED_POINT_TYPE:
3431 /* Here we just check the bounds. */
3432 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3433 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3434
3435 case ARRAY_TYPE:
3436 /* We have already checked the component type above, so just check the
3437 domain type. */
3438 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3439
3440 case RECORD_TYPE:
3441 case UNION_TYPE:
3442 case QUAL_UNION_TYPE:
3443 {
3444 tree field;
3445
3446 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3447 if (TREE_CODE (field) == FIELD_DECL
3448 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3449 || (TREE_CODE (type) == QUAL_UNION_TYPE
3450 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3451 || type_contains_placeholder_p (TREE_TYPE (field))))
3452 return true;
3453
3454 return false;
3455 }
3456
3457 default:
3458 gcc_unreachable ();
3459 }
3460 }
3461
3462 /* Wrapper around above function used to cache its result. */
3463
3464 bool
3465 type_contains_placeholder_p (tree type)
3466 {
3467 bool result;
3468
3469 /* If the contains_placeholder_bits field has been initialized,
3470 then we know the answer. */
3471 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3472 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3473
3474 /* Indicate that we've seen this type node, and the answer is false.
3475 This is what we want to return if we run into recursion via fields. */
3476 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3477
3478 /* Compute the real value. */
3479 result = type_contains_placeholder_1 (type);
3480
3481 /* Store the real value. */
3482 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3483
3484 return result;
3485 }
3486 \f
3487 /* Push tree EXP onto vector QUEUE if it is not already present. */
3488
3489 static void
3490 push_without_duplicates (tree exp, vec<tree> *queue)
3491 {
3492 unsigned int i;
3493 tree iter;
3494
3495 FOR_EACH_VEC_ELT (*queue, i, iter)
3496 if (simple_cst_equal (iter, exp) == 1)
3497 break;
3498
3499 if (!iter)
3500 queue->safe_push (exp);
3501 }
3502
3503 /* Given a tree EXP, find all occurrences of references to fields
3504 in a PLACEHOLDER_EXPR and place them in vector REFS without
3505 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3506 we assume here that EXP contains only arithmetic expressions
3507 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3508 argument list. */
3509
3510 void
3511 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3512 {
3513 enum tree_code code = TREE_CODE (exp);
3514 tree inner;
3515 int i;
3516
3517 /* We handle TREE_LIST and COMPONENT_REF separately. */
3518 if (code == TREE_LIST)
3519 {
3520 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3521 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3522 }
3523 else if (code == COMPONENT_REF)
3524 {
3525 for (inner = TREE_OPERAND (exp, 0);
3526 REFERENCE_CLASS_P (inner);
3527 inner = TREE_OPERAND (inner, 0))
3528 ;
3529
3530 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3531 push_without_duplicates (exp, refs);
3532 else
3533 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3534 }
3535 else
3536 switch (TREE_CODE_CLASS (code))
3537 {
3538 case tcc_constant:
3539 break;
3540
3541 case tcc_declaration:
3542 /* Variables allocated to static storage can stay. */
3543 if (!TREE_STATIC (exp))
3544 push_without_duplicates (exp, refs);
3545 break;
3546
3547 case tcc_expression:
3548 /* This is the pattern built in ada/make_aligning_type. */
3549 if (code == ADDR_EXPR
3550 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3551 {
3552 push_without_duplicates (exp, refs);
3553 break;
3554 }
3555
3556 /* Fall through... */
3557
3558 case tcc_exceptional:
3559 case tcc_unary:
3560 case tcc_binary:
3561 case tcc_comparison:
3562 case tcc_reference:
3563 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3564 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3565 break;
3566
3567 case tcc_vl_exp:
3568 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3569 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3570 break;
3571
3572 default:
3573 gcc_unreachable ();
3574 }
3575 }
3576
3577 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3578 return a tree with all occurrences of references to F in a
3579 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3580 CONST_DECLs. Note that we assume here that EXP contains only
3581 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3582 occurring only in their argument list. */
3583
3584 tree
3585 substitute_in_expr (tree exp, tree f, tree r)
3586 {
3587 enum tree_code code = TREE_CODE (exp);
3588 tree op0, op1, op2, op3;
3589 tree new_tree;
3590
3591 /* We handle TREE_LIST and COMPONENT_REF separately. */
3592 if (code == TREE_LIST)
3593 {
3594 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3595 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3596 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3597 return exp;
3598
3599 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3600 }
3601 else if (code == COMPONENT_REF)
3602 {
3603 tree inner;
3604
3605 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3606 and it is the right field, replace it with R. */
3607 for (inner = TREE_OPERAND (exp, 0);
3608 REFERENCE_CLASS_P (inner);
3609 inner = TREE_OPERAND (inner, 0))
3610 ;
3611
3612 /* The field. */
3613 op1 = TREE_OPERAND (exp, 1);
3614
3615 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3616 return r;
3617
3618 /* If this expression hasn't been completed let, leave it alone. */
3619 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3620 return exp;
3621
3622 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3623 if (op0 == TREE_OPERAND (exp, 0))
3624 return exp;
3625
3626 new_tree
3627 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3628 }
3629 else
3630 switch (TREE_CODE_CLASS (code))
3631 {
3632 case tcc_constant:
3633 return exp;
3634
3635 case tcc_declaration:
3636 if (exp == f)
3637 return r;
3638 else
3639 return exp;
3640
3641 case tcc_expression:
3642 if (exp == f)
3643 return r;
3644
3645 /* Fall through... */
3646
3647 case tcc_exceptional:
3648 case tcc_unary:
3649 case tcc_binary:
3650 case tcc_comparison:
3651 case tcc_reference:
3652 switch (TREE_CODE_LENGTH (code))
3653 {
3654 case 0:
3655 return exp;
3656
3657 case 1:
3658 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3659 if (op0 == TREE_OPERAND (exp, 0))
3660 return exp;
3661
3662 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3663 break;
3664
3665 case 2:
3666 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3667 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3668
3669 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3670 return exp;
3671
3672 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3673 break;
3674
3675 case 3:
3676 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3677 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3678 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3679
3680 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3681 && op2 == TREE_OPERAND (exp, 2))
3682 return exp;
3683
3684 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3685 break;
3686
3687 case 4:
3688 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3689 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3690 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3691 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3692
3693 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3694 && op2 == TREE_OPERAND (exp, 2)
3695 && op3 == TREE_OPERAND (exp, 3))
3696 return exp;
3697
3698 new_tree
3699 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3700 break;
3701
3702 default:
3703 gcc_unreachable ();
3704 }
3705 break;
3706
3707 case tcc_vl_exp:
3708 {
3709 int i;
3710
3711 new_tree = NULL_TREE;
3712
3713 /* If we are trying to replace F with a constant, inline back
3714 functions which do nothing else than computing a value from
3715 the arguments they are passed. This makes it possible to
3716 fold partially or entirely the replacement expression. */
3717 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3718 {
3719 tree t = maybe_inline_call_in_expr (exp);
3720 if (t)
3721 return SUBSTITUTE_IN_EXPR (t, f, r);
3722 }
3723
3724 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3725 {
3726 tree op = TREE_OPERAND (exp, i);
3727 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3728 if (new_op != op)
3729 {
3730 if (!new_tree)
3731 new_tree = copy_node (exp);
3732 TREE_OPERAND (new_tree, i) = new_op;
3733 }
3734 }
3735
3736 if (new_tree)
3737 {
3738 new_tree = fold (new_tree);
3739 if (TREE_CODE (new_tree) == CALL_EXPR)
3740 process_call_operands (new_tree);
3741 }
3742 else
3743 return exp;
3744 }
3745 break;
3746
3747 default:
3748 gcc_unreachable ();
3749 }
3750
3751 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3752
3753 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3754 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3755
3756 return new_tree;
3757 }
3758
3759 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3760 for it within OBJ, a tree that is an object or a chain of references. */
3761
3762 tree
3763 substitute_placeholder_in_expr (tree exp, tree obj)
3764 {
3765 enum tree_code code = TREE_CODE (exp);
3766 tree op0, op1, op2, op3;
3767 tree new_tree;
3768
3769 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3770 in the chain of OBJ. */
3771 if (code == PLACEHOLDER_EXPR)
3772 {
3773 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3774 tree elt;
3775
3776 for (elt = obj; elt != 0;
3777 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3778 || TREE_CODE (elt) == COND_EXPR)
3779 ? TREE_OPERAND (elt, 1)
3780 : (REFERENCE_CLASS_P (elt)
3781 || UNARY_CLASS_P (elt)
3782 || BINARY_CLASS_P (elt)
3783 || VL_EXP_CLASS_P (elt)
3784 || EXPRESSION_CLASS_P (elt))
3785 ? TREE_OPERAND (elt, 0) : 0))
3786 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3787 return elt;
3788
3789 for (elt = obj; elt != 0;
3790 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3791 || TREE_CODE (elt) == COND_EXPR)
3792 ? TREE_OPERAND (elt, 1)
3793 : (REFERENCE_CLASS_P (elt)
3794 || UNARY_CLASS_P (elt)
3795 || BINARY_CLASS_P (elt)
3796 || VL_EXP_CLASS_P (elt)
3797 || EXPRESSION_CLASS_P (elt))
3798 ? TREE_OPERAND (elt, 0) : 0))
3799 if (POINTER_TYPE_P (TREE_TYPE (elt))
3800 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3801 == need_type))
3802 return fold_build1 (INDIRECT_REF, need_type, elt);
3803
3804 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3805 survives until RTL generation, there will be an error. */
3806 return exp;
3807 }
3808
3809 /* TREE_LIST is special because we need to look at TREE_VALUE
3810 and TREE_CHAIN, not TREE_OPERANDS. */
3811 else if (code == TREE_LIST)
3812 {
3813 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3814 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3815 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3816 return exp;
3817
3818 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3819 }
3820 else
3821 switch (TREE_CODE_CLASS (code))
3822 {
3823 case tcc_constant:
3824 case tcc_declaration:
3825 return exp;
3826
3827 case tcc_exceptional:
3828 case tcc_unary:
3829 case tcc_binary:
3830 case tcc_comparison:
3831 case tcc_expression:
3832 case tcc_reference:
3833 case tcc_statement:
3834 switch (TREE_CODE_LENGTH (code))
3835 {
3836 case 0:
3837 return exp;
3838
3839 case 1:
3840 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3841 if (op0 == TREE_OPERAND (exp, 0))
3842 return exp;
3843
3844 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3845 break;
3846
3847 case 2:
3848 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3849 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3850
3851 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3852 return exp;
3853
3854 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3855 break;
3856
3857 case 3:
3858 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3859 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3860 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3861
3862 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3863 && op2 == TREE_OPERAND (exp, 2))
3864 return exp;
3865
3866 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3867 break;
3868
3869 case 4:
3870 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3871 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3872 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3873 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3874
3875 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3876 && op2 == TREE_OPERAND (exp, 2)
3877 && op3 == TREE_OPERAND (exp, 3))
3878 return exp;
3879
3880 new_tree
3881 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3882 break;
3883
3884 default:
3885 gcc_unreachable ();
3886 }
3887 break;
3888
3889 case tcc_vl_exp:
3890 {
3891 int i;
3892
3893 new_tree = NULL_TREE;
3894
3895 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3896 {
3897 tree op = TREE_OPERAND (exp, i);
3898 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3899 if (new_op != op)
3900 {
3901 if (!new_tree)
3902 new_tree = copy_node (exp);
3903 TREE_OPERAND (new_tree, i) = new_op;
3904 }
3905 }
3906
3907 if (new_tree)
3908 {
3909 new_tree = fold (new_tree);
3910 if (TREE_CODE (new_tree) == CALL_EXPR)
3911 process_call_operands (new_tree);
3912 }
3913 else
3914 return exp;
3915 }
3916 break;
3917
3918 default:
3919 gcc_unreachable ();
3920 }
3921
3922 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3923
3924 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3925 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3926
3927 return new_tree;
3928 }
3929 \f
3930
3931 /* Subroutine of stabilize_reference; this is called for subtrees of
3932 references. Any expression with side-effects must be put in a SAVE_EXPR
3933 to ensure that it is only evaluated once.
3934
3935 We don't put SAVE_EXPR nodes around everything, because assigning very
3936 simple expressions to temporaries causes us to miss good opportunities
3937 for optimizations. Among other things, the opportunity to fold in the
3938 addition of a constant into an addressing mode often gets lost, e.g.
3939 "y[i+1] += x;". In general, we take the approach that we should not make
3940 an assignment unless we are forced into it - i.e., that any non-side effect
3941 operator should be allowed, and that cse should take care of coalescing
3942 multiple utterances of the same expression should that prove fruitful. */
3943
3944 static tree
3945 stabilize_reference_1 (tree e)
3946 {
3947 tree result;
3948 enum tree_code code = TREE_CODE (e);
3949
3950 /* We cannot ignore const expressions because it might be a reference
3951 to a const array but whose index contains side-effects. But we can
3952 ignore things that are actual constant or that already have been
3953 handled by this function. */
3954
3955 if (tree_invariant_p (e))
3956 return e;
3957
3958 switch (TREE_CODE_CLASS (code))
3959 {
3960 case tcc_exceptional:
3961 case tcc_type:
3962 case tcc_declaration:
3963 case tcc_comparison:
3964 case tcc_statement:
3965 case tcc_expression:
3966 case tcc_reference:
3967 case tcc_vl_exp:
3968 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3969 so that it will only be evaluated once. */
3970 /* The reference (r) and comparison (<) classes could be handled as
3971 below, but it is generally faster to only evaluate them once. */
3972 if (TREE_SIDE_EFFECTS (e))
3973 return save_expr (e);
3974 return e;
3975
3976 case tcc_constant:
3977 /* Constants need no processing. In fact, we should never reach
3978 here. */
3979 return e;
3980
3981 case tcc_binary:
3982 /* Division is slow and tends to be compiled with jumps,
3983 especially the division by powers of 2 that is often
3984 found inside of an array reference. So do it just once. */
3985 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3986 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3987 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3988 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3989 return save_expr (e);
3990 /* Recursively stabilize each operand. */
3991 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3992 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3993 break;
3994
3995 case tcc_unary:
3996 /* Recursively stabilize each operand. */
3997 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3998 break;
3999
4000 default:
4001 gcc_unreachable ();
4002 }
4003
4004 TREE_TYPE (result) = TREE_TYPE (e);
4005 TREE_READONLY (result) = TREE_READONLY (e);
4006 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4007 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4008
4009 return result;
4010 }
4011
4012 /* Stabilize a reference so that we can use it any number of times
4013 without causing its operands to be evaluated more than once.
4014 Returns the stabilized reference. This works by means of save_expr,
4015 so see the caveats in the comments about save_expr.
4016
4017 Also allows conversion expressions whose operands are references.
4018 Any other kind of expression is returned unchanged. */
4019
4020 tree
4021 stabilize_reference (tree ref)
4022 {
4023 tree result;
4024 enum tree_code code = TREE_CODE (ref);
4025
4026 switch (code)
4027 {
4028 case VAR_DECL:
4029 case PARM_DECL:
4030 case RESULT_DECL:
4031 /* No action is needed in this case. */
4032 return ref;
4033
4034 CASE_CONVERT:
4035 case FLOAT_EXPR:
4036 case FIX_TRUNC_EXPR:
4037 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4038 break;
4039
4040 case INDIRECT_REF:
4041 result = build_nt (INDIRECT_REF,
4042 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4043 break;
4044
4045 case COMPONENT_REF:
4046 result = build_nt (COMPONENT_REF,
4047 stabilize_reference (TREE_OPERAND (ref, 0)),
4048 TREE_OPERAND (ref, 1), NULL_TREE);
4049 break;
4050
4051 case BIT_FIELD_REF:
4052 result = build_nt (BIT_FIELD_REF,
4053 stabilize_reference (TREE_OPERAND (ref, 0)),
4054 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4055 break;
4056
4057 case ARRAY_REF:
4058 result = build_nt (ARRAY_REF,
4059 stabilize_reference (TREE_OPERAND (ref, 0)),
4060 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4061 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4062 break;
4063
4064 case ARRAY_RANGE_REF:
4065 result = build_nt (ARRAY_RANGE_REF,
4066 stabilize_reference (TREE_OPERAND (ref, 0)),
4067 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4068 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4069 break;
4070
4071 case COMPOUND_EXPR:
4072 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4073 it wouldn't be ignored. This matters when dealing with
4074 volatiles. */
4075 return stabilize_reference_1 (ref);
4076
4077 /* If arg isn't a kind of lvalue we recognize, make no change.
4078 Caller should recognize the error for an invalid lvalue. */
4079 default:
4080 return ref;
4081
4082 case ERROR_MARK:
4083 return error_mark_node;
4084 }
4085
4086 TREE_TYPE (result) = TREE_TYPE (ref);
4087 TREE_READONLY (result) = TREE_READONLY (ref);
4088 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4089 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4090
4091 return result;
4092 }
4093 \f
4094 /* Low-level constructors for expressions. */
4095
4096 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4097 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4098
4099 void
4100 recompute_tree_invariant_for_addr_expr (tree t)
4101 {
4102 tree node;
4103 bool tc = true, se = false;
4104
4105 /* We started out assuming this address is both invariant and constant, but
4106 does not have side effects. Now go down any handled components and see if
4107 any of them involve offsets that are either non-constant or non-invariant.
4108 Also check for side-effects.
4109
4110 ??? Note that this code makes no attempt to deal with the case where
4111 taking the address of something causes a copy due to misalignment. */
4112
4113 #define UPDATE_FLAGS(NODE) \
4114 do { tree _node = (NODE); \
4115 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4116 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4117
4118 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4119 node = TREE_OPERAND (node, 0))
4120 {
4121 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4122 array reference (probably made temporarily by the G++ front end),
4123 so ignore all the operands. */
4124 if ((TREE_CODE (node) == ARRAY_REF
4125 || TREE_CODE (node) == ARRAY_RANGE_REF)
4126 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4127 {
4128 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4129 if (TREE_OPERAND (node, 2))
4130 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4131 if (TREE_OPERAND (node, 3))
4132 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4133 }
4134 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4135 FIELD_DECL, apparently. The G++ front end can put something else
4136 there, at least temporarily. */
4137 else if (TREE_CODE (node) == COMPONENT_REF
4138 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4139 {
4140 if (TREE_OPERAND (node, 2))
4141 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4142 }
4143 }
4144
4145 node = lang_hooks.expr_to_decl (node, &tc, &se);
4146
4147 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4148 the address, since &(*a)->b is a form of addition. If it's a constant, the
4149 address is constant too. If it's a decl, its address is constant if the
4150 decl is static. Everything else is not constant and, furthermore,
4151 taking the address of a volatile variable is not volatile. */
4152 if (TREE_CODE (node) == INDIRECT_REF
4153 || TREE_CODE (node) == MEM_REF)
4154 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4155 else if (CONSTANT_CLASS_P (node))
4156 ;
4157 else if (DECL_P (node))
4158 tc &= (staticp (node) != NULL_TREE);
4159 else
4160 {
4161 tc = false;
4162 se |= TREE_SIDE_EFFECTS (node);
4163 }
4164
4165
4166 TREE_CONSTANT (t) = tc;
4167 TREE_SIDE_EFFECTS (t) = se;
4168 #undef UPDATE_FLAGS
4169 }
4170
4171 /* Build an expression of code CODE, data type TYPE, and operands as
4172 specified. Expressions and reference nodes can be created this way.
4173 Constants, decls, types and misc nodes cannot be.
4174
4175 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4176 enough for all extant tree codes. */
4177
4178 tree
4179 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4180 {
4181 tree t;
4182
4183 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4184
4185 t = make_node_stat (code PASS_MEM_STAT);
4186 TREE_TYPE (t) = tt;
4187
4188 return t;
4189 }
4190
4191 tree
4192 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4193 {
4194 int length = sizeof (struct tree_exp);
4195 tree t;
4196
4197 record_node_allocation_statistics (code, length);
4198
4199 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4200
4201 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4202
4203 memset (t, 0, sizeof (struct tree_common));
4204
4205 TREE_SET_CODE (t, code);
4206
4207 TREE_TYPE (t) = type;
4208 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4209 TREE_OPERAND (t, 0) = node;
4210 if (node && !TYPE_P (node))
4211 {
4212 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4213 TREE_READONLY (t) = TREE_READONLY (node);
4214 }
4215
4216 if (TREE_CODE_CLASS (code) == tcc_statement)
4217 TREE_SIDE_EFFECTS (t) = 1;
4218 else switch (code)
4219 {
4220 case VA_ARG_EXPR:
4221 /* All of these have side-effects, no matter what their
4222 operands are. */
4223 TREE_SIDE_EFFECTS (t) = 1;
4224 TREE_READONLY (t) = 0;
4225 break;
4226
4227 case INDIRECT_REF:
4228 /* Whether a dereference is readonly has nothing to do with whether
4229 its operand is readonly. */
4230 TREE_READONLY (t) = 0;
4231 break;
4232
4233 case ADDR_EXPR:
4234 if (node)
4235 recompute_tree_invariant_for_addr_expr (t);
4236 break;
4237
4238 default:
4239 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4240 && node && !TYPE_P (node)
4241 && TREE_CONSTANT (node))
4242 TREE_CONSTANT (t) = 1;
4243 if (TREE_CODE_CLASS (code) == tcc_reference
4244 && node && TREE_THIS_VOLATILE (node))
4245 TREE_THIS_VOLATILE (t) = 1;
4246 break;
4247 }
4248
4249 return t;
4250 }
4251
4252 #define PROCESS_ARG(N) \
4253 do { \
4254 TREE_OPERAND (t, N) = arg##N; \
4255 if (arg##N &&!TYPE_P (arg##N)) \
4256 { \
4257 if (TREE_SIDE_EFFECTS (arg##N)) \
4258 side_effects = 1; \
4259 if (!TREE_READONLY (arg##N) \
4260 && !CONSTANT_CLASS_P (arg##N)) \
4261 (void) (read_only = 0); \
4262 if (!TREE_CONSTANT (arg##N)) \
4263 (void) (constant = 0); \
4264 } \
4265 } while (0)
4266
4267 tree
4268 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4269 {
4270 bool constant, read_only, side_effects;
4271 tree t;
4272
4273 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4274
4275 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4276 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4277 /* When sizetype precision doesn't match that of pointers
4278 we need to be able to build explicit extensions or truncations
4279 of the offset argument. */
4280 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4281 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4282 && TREE_CODE (arg1) == INTEGER_CST);
4283
4284 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4285 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4286 && ptrofftype_p (TREE_TYPE (arg1)));
4287
4288 t = make_node_stat (code PASS_MEM_STAT);
4289 TREE_TYPE (t) = tt;
4290
4291 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4292 result based on those same flags for the arguments. But if the
4293 arguments aren't really even `tree' expressions, we shouldn't be trying
4294 to do this. */
4295
4296 /* Expressions without side effects may be constant if their
4297 arguments are as well. */
4298 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4299 || TREE_CODE_CLASS (code) == tcc_binary);
4300 read_only = 1;
4301 side_effects = TREE_SIDE_EFFECTS (t);
4302
4303 PROCESS_ARG (0);
4304 PROCESS_ARG (1);
4305
4306 TREE_READONLY (t) = read_only;
4307 TREE_CONSTANT (t) = constant;
4308 TREE_SIDE_EFFECTS (t) = side_effects;
4309 TREE_THIS_VOLATILE (t)
4310 = (TREE_CODE_CLASS (code) == tcc_reference
4311 && arg0 && TREE_THIS_VOLATILE (arg0));
4312
4313 return t;
4314 }
4315
4316
4317 tree
4318 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4319 tree arg2 MEM_STAT_DECL)
4320 {
4321 bool constant, read_only, side_effects;
4322 tree t;
4323
4324 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4325 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4326
4327 t = make_node_stat (code PASS_MEM_STAT);
4328 TREE_TYPE (t) = tt;
4329
4330 read_only = 1;
4331
4332 /* As a special exception, if COND_EXPR has NULL branches, we
4333 assume that it is a gimple statement and always consider
4334 it to have side effects. */
4335 if (code == COND_EXPR
4336 && tt == void_type_node
4337 && arg1 == NULL_TREE
4338 && arg2 == NULL_TREE)
4339 side_effects = true;
4340 else
4341 side_effects = TREE_SIDE_EFFECTS (t);
4342
4343 PROCESS_ARG (0);
4344 PROCESS_ARG (1);
4345 PROCESS_ARG (2);
4346
4347 if (code == COND_EXPR)
4348 TREE_READONLY (t) = read_only;
4349
4350 TREE_SIDE_EFFECTS (t) = side_effects;
4351 TREE_THIS_VOLATILE (t)
4352 = (TREE_CODE_CLASS (code) == tcc_reference
4353 && arg0 && TREE_THIS_VOLATILE (arg0));
4354
4355 return t;
4356 }
4357
4358 tree
4359 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4360 tree arg2, tree arg3 MEM_STAT_DECL)
4361 {
4362 bool constant, read_only, side_effects;
4363 tree t;
4364
4365 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4366
4367 t = make_node_stat (code PASS_MEM_STAT);
4368 TREE_TYPE (t) = tt;
4369
4370 side_effects = TREE_SIDE_EFFECTS (t);
4371
4372 PROCESS_ARG (0);
4373 PROCESS_ARG (1);
4374 PROCESS_ARG (2);
4375 PROCESS_ARG (3);
4376
4377 TREE_SIDE_EFFECTS (t) = side_effects;
4378 TREE_THIS_VOLATILE (t)
4379 = (TREE_CODE_CLASS (code) == tcc_reference
4380 && arg0 && TREE_THIS_VOLATILE (arg0));
4381
4382 return t;
4383 }
4384
4385 tree
4386 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4387 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4388 {
4389 bool constant, read_only, side_effects;
4390 tree t;
4391
4392 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4393
4394 t = make_node_stat (code PASS_MEM_STAT);
4395 TREE_TYPE (t) = tt;
4396
4397 side_effects = TREE_SIDE_EFFECTS (t);
4398
4399 PROCESS_ARG (0);
4400 PROCESS_ARG (1);
4401 PROCESS_ARG (2);
4402 PROCESS_ARG (3);
4403 PROCESS_ARG (4);
4404
4405 TREE_SIDE_EFFECTS (t) = side_effects;
4406 TREE_THIS_VOLATILE (t)
4407 = (TREE_CODE_CLASS (code) == tcc_reference
4408 && arg0 && TREE_THIS_VOLATILE (arg0));
4409
4410 return t;
4411 }
4412
4413 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4414 on the pointer PTR. */
4415
4416 tree
4417 build_simple_mem_ref_loc (location_t loc, tree ptr)
4418 {
4419 HOST_WIDE_INT offset = 0;
4420 tree ptype = TREE_TYPE (ptr);
4421 tree tem;
4422 /* For convenience allow addresses that collapse to a simple base
4423 and offset. */
4424 if (TREE_CODE (ptr) == ADDR_EXPR
4425 && (handled_component_p (TREE_OPERAND (ptr, 0))
4426 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4427 {
4428 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4429 gcc_assert (ptr);
4430 ptr = build_fold_addr_expr (ptr);
4431 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4432 }
4433 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4434 ptr, build_int_cst (ptype, offset));
4435 SET_EXPR_LOCATION (tem, loc);
4436 return tem;
4437 }
4438
4439 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4440
4441 offset_int
4442 mem_ref_offset (const_tree t)
4443 {
4444 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4445 }
4446
4447 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4448 offsetted by OFFSET units. */
4449
4450 tree
4451 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4452 {
4453 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4454 build_fold_addr_expr (base),
4455 build_int_cst (ptr_type_node, offset));
4456 tree addr = build1 (ADDR_EXPR, type, ref);
4457 recompute_tree_invariant_for_addr_expr (addr);
4458 return addr;
4459 }
4460
4461 /* Similar except don't specify the TREE_TYPE
4462 and leave the TREE_SIDE_EFFECTS as 0.
4463 It is permissible for arguments to be null,
4464 or even garbage if their values do not matter. */
4465
4466 tree
4467 build_nt (enum tree_code code, ...)
4468 {
4469 tree t;
4470 int length;
4471 int i;
4472 va_list p;
4473
4474 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4475
4476 va_start (p, code);
4477
4478 t = make_node (code);
4479 length = TREE_CODE_LENGTH (code);
4480
4481 for (i = 0; i < length; i++)
4482 TREE_OPERAND (t, i) = va_arg (p, tree);
4483
4484 va_end (p);
4485 return t;
4486 }
4487
4488 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4489 tree vec. */
4490
4491 tree
4492 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4493 {
4494 tree ret, t;
4495 unsigned int ix;
4496
4497 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4498 CALL_EXPR_FN (ret) = fn;
4499 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4500 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4501 CALL_EXPR_ARG (ret, ix) = t;
4502 return ret;
4503 }
4504 \f
4505 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4506 We do NOT enter this node in any sort of symbol table.
4507
4508 LOC is the location of the decl.
4509
4510 layout_decl is used to set up the decl's storage layout.
4511 Other slots are initialized to 0 or null pointers. */
4512
4513 tree
4514 build_decl_stat (location_t loc, enum tree_code code, tree name,
4515 tree type MEM_STAT_DECL)
4516 {
4517 tree t;
4518
4519 t = make_node_stat (code PASS_MEM_STAT);
4520 DECL_SOURCE_LOCATION (t) = loc;
4521
4522 /* if (type == error_mark_node)
4523 type = integer_type_node; */
4524 /* That is not done, deliberately, so that having error_mark_node
4525 as the type can suppress useless errors in the use of this variable. */
4526
4527 DECL_NAME (t) = name;
4528 TREE_TYPE (t) = type;
4529
4530 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4531 layout_decl (t, 0);
4532
4533 return t;
4534 }
4535
4536 /* Builds and returns function declaration with NAME and TYPE. */
4537
4538 tree
4539 build_fn_decl (const char *name, tree type)
4540 {
4541 tree id = get_identifier (name);
4542 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4543
4544 DECL_EXTERNAL (decl) = 1;
4545 TREE_PUBLIC (decl) = 1;
4546 DECL_ARTIFICIAL (decl) = 1;
4547 TREE_NOTHROW (decl) = 1;
4548
4549 return decl;
4550 }
4551
4552 vec<tree, va_gc> *all_translation_units;
4553
4554 /* Builds a new translation-unit decl with name NAME, queues it in the
4555 global list of translation-unit decls and returns it. */
4556
4557 tree
4558 build_translation_unit_decl (tree name)
4559 {
4560 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4561 name, NULL_TREE);
4562 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4563 vec_safe_push (all_translation_units, tu);
4564 return tu;
4565 }
4566
4567 \f
4568 /* BLOCK nodes are used to represent the structure of binding contours
4569 and declarations, once those contours have been exited and their contents
4570 compiled. This information is used for outputting debugging info. */
4571
4572 tree
4573 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4574 {
4575 tree block = make_node (BLOCK);
4576
4577 BLOCK_VARS (block) = vars;
4578 BLOCK_SUBBLOCKS (block) = subblocks;
4579 BLOCK_SUPERCONTEXT (block) = supercontext;
4580 BLOCK_CHAIN (block) = chain;
4581 return block;
4582 }
4583
4584 \f
4585 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4586
4587 LOC is the location to use in tree T. */
4588
4589 void
4590 protected_set_expr_location (tree t, location_t loc)
4591 {
4592 if (CAN_HAVE_LOCATION_P (t))
4593 SET_EXPR_LOCATION (t, loc);
4594 }
4595 \f
4596 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4597 is ATTRIBUTE. */
4598
4599 tree
4600 build_decl_attribute_variant (tree ddecl, tree attribute)
4601 {
4602 DECL_ATTRIBUTES (ddecl) = attribute;
4603 return ddecl;
4604 }
4605
4606 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4607 is ATTRIBUTE and its qualifiers are QUALS.
4608
4609 Record such modified types already made so we don't make duplicates. */
4610
4611 tree
4612 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4613 {
4614 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4615 {
4616 inchash::hash hstate;
4617 tree ntype;
4618 int i;
4619 tree t;
4620 enum tree_code code = TREE_CODE (ttype);
4621
4622 /* Building a distinct copy of a tagged type is inappropriate; it
4623 causes breakage in code that expects there to be a one-to-one
4624 relationship between a struct and its fields.
4625 build_duplicate_type is another solution (as used in
4626 handle_transparent_union_attribute), but that doesn't play well
4627 with the stronger C++ type identity model. */
4628 if (TREE_CODE (ttype) == RECORD_TYPE
4629 || TREE_CODE (ttype) == UNION_TYPE
4630 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4631 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4632 {
4633 warning (OPT_Wattributes,
4634 "ignoring attributes applied to %qT after definition",
4635 TYPE_MAIN_VARIANT (ttype));
4636 return build_qualified_type (ttype, quals);
4637 }
4638
4639 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4640 ntype = build_distinct_type_copy (ttype);
4641
4642 TYPE_ATTRIBUTES (ntype) = attribute;
4643
4644 hstate.add_int (code);
4645 if (TREE_TYPE (ntype))
4646 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4647 attribute_hash_list (attribute, hstate);
4648
4649 switch (TREE_CODE (ntype))
4650 {
4651 case FUNCTION_TYPE:
4652 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4653 break;
4654 case ARRAY_TYPE:
4655 if (TYPE_DOMAIN (ntype))
4656 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4657 break;
4658 case INTEGER_TYPE:
4659 t = TYPE_MAX_VALUE (ntype);
4660 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4661 hstate.add_object (TREE_INT_CST_ELT (t, i));
4662 break;
4663 case REAL_TYPE:
4664 case FIXED_POINT_TYPE:
4665 {
4666 unsigned int precision = TYPE_PRECISION (ntype);
4667 hstate.add_object (precision);
4668 }
4669 break;
4670 default:
4671 break;
4672 }
4673
4674 ntype = type_hash_canon (hstate.end(), ntype);
4675
4676 /* If the target-dependent attributes make NTYPE different from
4677 its canonical type, we will need to use structural equality
4678 checks for this type. */
4679 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4680 || !comp_type_attributes (ntype, ttype))
4681 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4682 else if (TYPE_CANONICAL (ntype) == ntype)
4683 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4684
4685 ttype = build_qualified_type (ntype, quals);
4686 }
4687 else if (TYPE_QUALS (ttype) != quals)
4688 ttype = build_qualified_type (ttype, quals);
4689
4690 return ttype;
4691 }
4692
4693 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4694 the same. */
4695
4696 static bool
4697 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4698 {
4699 tree cl1, cl2;
4700 for (cl1 = clauses1, cl2 = clauses2;
4701 cl1 && cl2;
4702 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4703 {
4704 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4705 return false;
4706 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4707 {
4708 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4709 OMP_CLAUSE_DECL (cl2)) != 1)
4710 return false;
4711 }
4712 switch (OMP_CLAUSE_CODE (cl1))
4713 {
4714 case OMP_CLAUSE_ALIGNED:
4715 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4716 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4717 return false;
4718 break;
4719 case OMP_CLAUSE_LINEAR:
4720 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4721 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4722 return false;
4723 break;
4724 case OMP_CLAUSE_SIMDLEN:
4725 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4726 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4727 return false;
4728 default:
4729 break;
4730 }
4731 }
4732 return true;
4733 }
4734
4735 /* Compare two constructor-element-type constants. Return 1 if the lists
4736 are known to be equal; otherwise return 0. */
4737
4738 static bool
4739 simple_cst_list_equal (const_tree l1, const_tree l2)
4740 {
4741 while (l1 != NULL_TREE && l2 != NULL_TREE)
4742 {
4743 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4744 return false;
4745
4746 l1 = TREE_CHAIN (l1);
4747 l2 = TREE_CHAIN (l2);
4748 }
4749
4750 return l1 == l2;
4751 }
4752
4753 /* Compare two attributes for their value identity. Return true if the
4754 attribute values are known to be equal; otherwise return false.
4755 */
4756
4757 static bool
4758 attribute_value_equal (const_tree attr1, const_tree attr2)
4759 {
4760 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4761 return true;
4762
4763 if (TREE_VALUE (attr1) != NULL_TREE
4764 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4765 && TREE_VALUE (attr2) != NULL
4766 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4767 return (simple_cst_list_equal (TREE_VALUE (attr1),
4768 TREE_VALUE (attr2)) == 1);
4769
4770 if ((flag_openmp || flag_openmp_simd)
4771 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4772 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4773 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4774 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4775 TREE_VALUE (attr2));
4776
4777 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4778 }
4779
4780 /* Return 0 if the attributes for two types are incompatible, 1 if they
4781 are compatible, and 2 if they are nearly compatible (which causes a
4782 warning to be generated). */
4783 int
4784 comp_type_attributes (const_tree type1, const_tree type2)
4785 {
4786 const_tree a1 = TYPE_ATTRIBUTES (type1);
4787 const_tree a2 = TYPE_ATTRIBUTES (type2);
4788 const_tree a;
4789
4790 if (a1 == a2)
4791 return 1;
4792 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4793 {
4794 const struct attribute_spec *as;
4795 const_tree attr;
4796
4797 as = lookup_attribute_spec (get_attribute_name (a));
4798 if (!as || as->affects_type_identity == false)
4799 continue;
4800
4801 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4802 if (!attr || !attribute_value_equal (a, attr))
4803 break;
4804 }
4805 if (!a)
4806 {
4807 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4808 {
4809 const struct attribute_spec *as;
4810
4811 as = lookup_attribute_spec (get_attribute_name (a));
4812 if (!as || as->affects_type_identity == false)
4813 continue;
4814
4815 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4816 break;
4817 /* We don't need to compare trees again, as we did this
4818 already in first loop. */
4819 }
4820 /* All types - affecting identity - are equal, so
4821 there is no need to call target hook for comparison. */
4822 if (!a)
4823 return 1;
4824 }
4825 /* As some type combinations - like default calling-convention - might
4826 be compatible, we have to call the target hook to get the final result. */
4827 return targetm.comp_type_attributes (type1, type2);
4828 }
4829
4830 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4831 is ATTRIBUTE.
4832
4833 Record such modified types already made so we don't make duplicates. */
4834
4835 tree
4836 build_type_attribute_variant (tree ttype, tree attribute)
4837 {
4838 return build_type_attribute_qual_variant (ttype, attribute,
4839 TYPE_QUALS (ttype));
4840 }
4841
4842
4843 /* Reset the expression *EXPR_P, a size or position.
4844
4845 ??? We could reset all non-constant sizes or positions. But it's cheap
4846 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4847
4848 We need to reset self-referential sizes or positions because they cannot
4849 be gimplified and thus can contain a CALL_EXPR after the gimplification
4850 is finished, which will run afoul of LTO streaming. And they need to be
4851 reset to something essentially dummy but not constant, so as to preserve
4852 the properties of the object they are attached to. */
4853
4854 static inline void
4855 free_lang_data_in_one_sizepos (tree *expr_p)
4856 {
4857 tree expr = *expr_p;
4858 if (CONTAINS_PLACEHOLDER_P (expr))
4859 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4860 }
4861
4862
4863 /* Reset all the fields in a binfo node BINFO. We only keep
4864 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4865
4866 static void
4867 free_lang_data_in_binfo (tree binfo)
4868 {
4869 unsigned i;
4870 tree t;
4871
4872 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4873
4874 BINFO_VIRTUALS (binfo) = NULL_TREE;
4875 BINFO_BASE_ACCESSES (binfo) = NULL;
4876 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4877 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4878
4879 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4880 free_lang_data_in_binfo (t);
4881 }
4882
4883
4884 /* Reset all language specific information still present in TYPE. */
4885
4886 static void
4887 free_lang_data_in_type (tree type)
4888 {
4889 gcc_assert (TYPE_P (type));
4890
4891 /* Give the FE a chance to remove its own data first. */
4892 lang_hooks.free_lang_data (type);
4893
4894 TREE_LANG_FLAG_0 (type) = 0;
4895 TREE_LANG_FLAG_1 (type) = 0;
4896 TREE_LANG_FLAG_2 (type) = 0;
4897 TREE_LANG_FLAG_3 (type) = 0;
4898 TREE_LANG_FLAG_4 (type) = 0;
4899 TREE_LANG_FLAG_5 (type) = 0;
4900 TREE_LANG_FLAG_6 (type) = 0;
4901
4902 if (TREE_CODE (type) == FUNCTION_TYPE)
4903 {
4904 /* Remove the const and volatile qualifiers from arguments. The
4905 C++ front end removes them, but the C front end does not,
4906 leading to false ODR violation errors when merging two
4907 instances of the same function signature compiled by
4908 different front ends. */
4909 tree p;
4910
4911 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4912 {
4913 tree arg_type = TREE_VALUE (p);
4914
4915 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4916 {
4917 int quals = TYPE_QUALS (arg_type)
4918 & ~TYPE_QUAL_CONST
4919 & ~TYPE_QUAL_VOLATILE;
4920 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4921 free_lang_data_in_type (TREE_VALUE (p));
4922 }
4923 }
4924 }
4925
4926 /* Remove members that are not actually FIELD_DECLs from the field
4927 list of an aggregate. These occur in C++. */
4928 if (RECORD_OR_UNION_TYPE_P (type))
4929 {
4930 tree prev, member;
4931
4932 /* Note that TYPE_FIELDS can be shared across distinct
4933 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4934 to be removed, we cannot set its TREE_CHAIN to NULL.
4935 Otherwise, we would not be able to find all the other fields
4936 in the other instances of this TREE_TYPE.
4937
4938 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4939 prev = NULL_TREE;
4940 member = TYPE_FIELDS (type);
4941 while (member)
4942 {
4943 if (TREE_CODE (member) == FIELD_DECL
4944 || TREE_CODE (member) == TYPE_DECL)
4945 {
4946 if (prev)
4947 TREE_CHAIN (prev) = member;
4948 else
4949 TYPE_FIELDS (type) = member;
4950 prev = member;
4951 }
4952
4953 member = TREE_CHAIN (member);
4954 }
4955
4956 if (prev)
4957 TREE_CHAIN (prev) = NULL_TREE;
4958 else
4959 TYPE_FIELDS (type) = NULL_TREE;
4960
4961 TYPE_METHODS (type) = NULL_TREE;
4962 if (TYPE_BINFO (type))
4963 free_lang_data_in_binfo (TYPE_BINFO (type));
4964 }
4965 else
4966 {
4967 /* For non-aggregate types, clear out the language slot (which
4968 overloads TYPE_BINFO). */
4969 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4970
4971 if (INTEGRAL_TYPE_P (type)
4972 || SCALAR_FLOAT_TYPE_P (type)
4973 || FIXED_POINT_TYPE_P (type))
4974 {
4975 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4976 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4977 }
4978 }
4979
4980 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4981 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4982
4983 if (TYPE_CONTEXT (type)
4984 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4985 {
4986 tree ctx = TYPE_CONTEXT (type);
4987 do
4988 {
4989 ctx = BLOCK_SUPERCONTEXT (ctx);
4990 }
4991 while (ctx && TREE_CODE (ctx) == BLOCK);
4992 TYPE_CONTEXT (type) = ctx;
4993 }
4994 }
4995
4996
4997 /* Return true if DECL may need an assembler name to be set. */
4998
4999 static inline bool
5000 need_assembler_name_p (tree decl)
5001 {
5002 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5003 merging. */
5004 if (flag_lto_odr_type_mering
5005 && TREE_CODE (decl) == TYPE_DECL
5006 && DECL_NAME (decl)
5007 && decl == TYPE_NAME (TREE_TYPE (decl))
5008 && !is_lang_specific (TREE_TYPE (decl))
5009 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5010 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5011 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5012 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5013 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5014 if (TREE_CODE (decl) != FUNCTION_DECL
5015 && TREE_CODE (decl) != VAR_DECL)
5016 return false;
5017
5018 /* If DECL already has its assembler name set, it does not need a
5019 new one. */
5020 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5021 || DECL_ASSEMBLER_NAME_SET_P (decl))
5022 return false;
5023
5024 /* Abstract decls do not need an assembler name. */
5025 if (DECL_ABSTRACT_P (decl))
5026 return false;
5027
5028 /* For VAR_DECLs, only static, public and external symbols need an
5029 assembler name. */
5030 if (TREE_CODE (decl) == VAR_DECL
5031 && !TREE_STATIC (decl)
5032 && !TREE_PUBLIC (decl)
5033 && !DECL_EXTERNAL (decl))
5034 return false;
5035
5036 if (TREE_CODE (decl) == FUNCTION_DECL)
5037 {
5038 /* Do not set assembler name on builtins. Allow RTL expansion to
5039 decide whether to expand inline or via a regular call. */
5040 if (DECL_BUILT_IN (decl)
5041 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5042 return false;
5043
5044 /* Functions represented in the callgraph need an assembler name. */
5045 if (cgraph_node::get (decl) != NULL)
5046 return true;
5047
5048 /* Unused and not public functions don't need an assembler name. */
5049 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5050 return false;
5051 }
5052
5053 return true;
5054 }
5055
5056
5057 /* Reset all language specific information still present in symbol
5058 DECL. */
5059
5060 static void
5061 free_lang_data_in_decl (tree decl)
5062 {
5063 gcc_assert (DECL_P (decl));
5064
5065 /* Give the FE a chance to remove its own data first. */
5066 lang_hooks.free_lang_data (decl);
5067
5068 TREE_LANG_FLAG_0 (decl) = 0;
5069 TREE_LANG_FLAG_1 (decl) = 0;
5070 TREE_LANG_FLAG_2 (decl) = 0;
5071 TREE_LANG_FLAG_3 (decl) = 0;
5072 TREE_LANG_FLAG_4 (decl) = 0;
5073 TREE_LANG_FLAG_5 (decl) = 0;
5074 TREE_LANG_FLAG_6 (decl) = 0;
5075
5076 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5077 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5078 if (TREE_CODE (decl) == FIELD_DECL)
5079 {
5080 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5081 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5082 DECL_QUALIFIER (decl) = NULL_TREE;
5083 }
5084
5085 if (TREE_CODE (decl) == FUNCTION_DECL)
5086 {
5087 struct cgraph_node *node;
5088 if (!(node = cgraph_node::get (decl))
5089 || (!node->definition && !node->clones))
5090 {
5091 if (node)
5092 node->release_body ();
5093 else
5094 {
5095 release_function_body (decl);
5096 DECL_ARGUMENTS (decl) = NULL;
5097 DECL_RESULT (decl) = NULL;
5098 DECL_INITIAL (decl) = error_mark_node;
5099 }
5100 }
5101 if (gimple_has_body_p (decl))
5102 {
5103 tree t;
5104
5105 /* If DECL has a gimple body, then the context for its
5106 arguments must be DECL. Otherwise, it doesn't really
5107 matter, as we will not be emitting any code for DECL. In
5108 general, there may be other instances of DECL created by
5109 the front end and since PARM_DECLs are generally shared,
5110 their DECL_CONTEXT changes as the replicas of DECL are
5111 created. The only time where DECL_CONTEXT is important
5112 is for the FUNCTION_DECLs that have a gimple body (since
5113 the PARM_DECL will be used in the function's body). */
5114 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5115 DECL_CONTEXT (t) = decl;
5116 }
5117
5118 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5119 At this point, it is not needed anymore. */
5120 DECL_SAVED_TREE (decl) = NULL_TREE;
5121
5122 /* Clear the abstract origin if it refers to a method. Otherwise
5123 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5124 origin will not be output correctly. */
5125 if (DECL_ABSTRACT_ORIGIN (decl)
5126 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5127 && RECORD_OR_UNION_TYPE_P
5128 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5129 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5130
5131 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5132 DECL_VINDEX referring to itself into a vtable slot number as it
5133 should. Happens with functions that are copied and then forgotten
5134 about. Just clear it, it won't matter anymore. */
5135 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5136 DECL_VINDEX (decl) = NULL_TREE;
5137 }
5138 else if (TREE_CODE (decl) == VAR_DECL)
5139 {
5140 if ((DECL_EXTERNAL (decl)
5141 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5142 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5143 DECL_INITIAL (decl) = NULL_TREE;
5144 }
5145 else if (TREE_CODE (decl) == TYPE_DECL
5146 || TREE_CODE (decl) == FIELD_DECL)
5147 DECL_INITIAL (decl) = NULL_TREE;
5148 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5149 && DECL_INITIAL (decl)
5150 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5151 {
5152 /* Strip builtins from the translation-unit BLOCK. We still have targets
5153 without builtin_decl_explicit support and also builtins are shared
5154 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5155 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5156 while (*nextp)
5157 {
5158 tree var = *nextp;
5159 if (TREE_CODE (var) == FUNCTION_DECL
5160 && DECL_BUILT_IN (var))
5161 *nextp = TREE_CHAIN (var);
5162 else
5163 nextp = &TREE_CHAIN (var);
5164 }
5165 }
5166 }
5167
5168
5169 /* Data used when collecting DECLs and TYPEs for language data removal. */
5170
5171 struct free_lang_data_d
5172 {
5173 /* Worklist to avoid excessive recursion. */
5174 vec<tree> worklist;
5175
5176 /* Set of traversed objects. Used to avoid duplicate visits. */
5177 hash_set<tree> *pset;
5178
5179 /* Array of symbols to process with free_lang_data_in_decl. */
5180 vec<tree> decls;
5181
5182 /* Array of types to process with free_lang_data_in_type. */
5183 vec<tree> types;
5184 };
5185
5186
5187 /* Save all language fields needed to generate proper debug information
5188 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5189
5190 static void
5191 save_debug_info_for_decl (tree t)
5192 {
5193 /*struct saved_debug_info_d *sdi;*/
5194
5195 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5196
5197 /* FIXME. Partial implementation for saving debug info removed. */
5198 }
5199
5200
5201 /* Save all language fields needed to generate proper debug information
5202 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5203
5204 static void
5205 save_debug_info_for_type (tree t)
5206 {
5207 /*struct saved_debug_info_d *sdi;*/
5208
5209 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5210
5211 /* FIXME. Partial implementation for saving debug info removed. */
5212 }
5213
5214
5215 /* Add type or decl T to one of the list of tree nodes that need their
5216 language data removed. The lists are held inside FLD. */
5217
5218 static void
5219 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5220 {
5221 if (DECL_P (t))
5222 {
5223 fld->decls.safe_push (t);
5224 if (debug_info_level > DINFO_LEVEL_TERSE)
5225 save_debug_info_for_decl (t);
5226 }
5227 else if (TYPE_P (t))
5228 {
5229 fld->types.safe_push (t);
5230 if (debug_info_level > DINFO_LEVEL_TERSE)
5231 save_debug_info_for_type (t);
5232 }
5233 else
5234 gcc_unreachable ();
5235 }
5236
5237 /* Push tree node T into FLD->WORKLIST. */
5238
5239 static inline void
5240 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5241 {
5242 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5243 fld->worklist.safe_push ((t));
5244 }
5245
5246
5247 /* Operand callback helper for free_lang_data_in_node. *TP is the
5248 subtree operand being considered. */
5249
5250 static tree
5251 find_decls_types_r (tree *tp, int *ws, void *data)
5252 {
5253 tree t = *tp;
5254 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5255
5256 if (TREE_CODE (t) == TREE_LIST)
5257 return NULL_TREE;
5258
5259 /* Language specific nodes will be removed, so there is no need
5260 to gather anything under them. */
5261 if (is_lang_specific (t))
5262 {
5263 *ws = 0;
5264 return NULL_TREE;
5265 }
5266
5267 if (DECL_P (t))
5268 {
5269 /* Note that walk_tree does not traverse every possible field in
5270 decls, so we have to do our own traversals here. */
5271 add_tree_to_fld_list (t, fld);
5272
5273 fld_worklist_push (DECL_NAME (t), fld);
5274 fld_worklist_push (DECL_CONTEXT (t), fld);
5275 fld_worklist_push (DECL_SIZE (t), fld);
5276 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5277
5278 /* We are going to remove everything under DECL_INITIAL for
5279 TYPE_DECLs. No point walking them. */
5280 if (TREE_CODE (t) != TYPE_DECL)
5281 fld_worklist_push (DECL_INITIAL (t), fld);
5282
5283 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5284 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5285
5286 if (TREE_CODE (t) == FUNCTION_DECL)
5287 {
5288 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5289 fld_worklist_push (DECL_RESULT (t), fld);
5290 }
5291 else if (TREE_CODE (t) == TYPE_DECL)
5292 {
5293 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5294 }
5295 else if (TREE_CODE (t) == FIELD_DECL)
5296 {
5297 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5298 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5299 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5300 fld_worklist_push (DECL_FCONTEXT (t), fld);
5301 }
5302
5303 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5304 && DECL_HAS_VALUE_EXPR_P (t))
5305 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5306
5307 if (TREE_CODE (t) != FIELD_DECL
5308 && TREE_CODE (t) != TYPE_DECL)
5309 fld_worklist_push (TREE_CHAIN (t), fld);
5310 *ws = 0;
5311 }
5312 else if (TYPE_P (t))
5313 {
5314 /* Note that walk_tree does not traverse every possible field in
5315 types, so we have to do our own traversals here. */
5316 add_tree_to_fld_list (t, fld);
5317
5318 if (!RECORD_OR_UNION_TYPE_P (t))
5319 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5320 fld_worklist_push (TYPE_SIZE (t), fld);
5321 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5322 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5323 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5324 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5325 fld_worklist_push (TYPE_NAME (t), fld);
5326 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5327 them and thus do not and want not to reach unused pointer types
5328 this way. */
5329 if (!POINTER_TYPE_P (t))
5330 fld_worklist_push (TYPE_MINVAL (t), fld);
5331 if (!RECORD_OR_UNION_TYPE_P (t))
5332 fld_worklist_push (TYPE_MAXVAL (t), fld);
5333 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5334 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5335 do not and want not to reach unused variants this way. */
5336 if (TYPE_CONTEXT (t))
5337 {
5338 tree ctx = TYPE_CONTEXT (t);
5339 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5340 So push that instead. */
5341 while (ctx && TREE_CODE (ctx) == BLOCK)
5342 ctx = BLOCK_SUPERCONTEXT (ctx);
5343 fld_worklist_push (ctx, fld);
5344 }
5345 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5346 and want not to reach unused types this way. */
5347
5348 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5349 {
5350 unsigned i;
5351 tree tem;
5352 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5353 fld_worklist_push (TREE_TYPE (tem), fld);
5354 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5355 if (tem
5356 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5357 && TREE_CODE (tem) == TREE_LIST)
5358 do
5359 {
5360 fld_worklist_push (TREE_VALUE (tem), fld);
5361 tem = TREE_CHAIN (tem);
5362 }
5363 while (tem);
5364 }
5365 if (RECORD_OR_UNION_TYPE_P (t))
5366 {
5367 tree tem;
5368 /* Push all TYPE_FIELDS - there can be interleaving interesting
5369 and non-interesting things. */
5370 tem = TYPE_FIELDS (t);
5371 while (tem)
5372 {
5373 if (TREE_CODE (tem) == FIELD_DECL
5374 || TREE_CODE (tem) == TYPE_DECL)
5375 fld_worklist_push (tem, fld);
5376 tem = TREE_CHAIN (tem);
5377 }
5378 }
5379
5380 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5381 *ws = 0;
5382 }
5383 else if (TREE_CODE (t) == BLOCK)
5384 {
5385 tree tem;
5386 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5387 fld_worklist_push (tem, fld);
5388 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5389 fld_worklist_push (tem, fld);
5390 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5391 }
5392
5393 if (TREE_CODE (t) != IDENTIFIER_NODE
5394 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5395 fld_worklist_push (TREE_TYPE (t), fld);
5396
5397 return NULL_TREE;
5398 }
5399
5400
5401 /* Find decls and types in T. */
5402
5403 static void
5404 find_decls_types (tree t, struct free_lang_data_d *fld)
5405 {
5406 while (1)
5407 {
5408 if (!fld->pset->contains (t))
5409 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5410 if (fld->worklist.is_empty ())
5411 break;
5412 t = fld->worklist.pop ();
5413 }
5414 }
5415
5416 /* Translate all the types in LIST with the corresponding runtime
5417 types. */
5418
5419 static tree
5420 get_eh_types_for_runtime (tree list)
5421 {
5422 tree head, prev;
5423
5424 if (list == NULL_TREE)
5425 return NULL_TREE;
5426
5427 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5428 prev = head;
5429 list = TREE_CHAIN (list);
5430 while (list)
5431 {
5432 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5433 TREE_CHAIN (prev) = n;
5434 prev = TREE_CHAIN (prev);
5435 list = TREE_CHAIN (list);
5436 }
5437
5438 return head;
5439 }
5440
5441
5442 /* Find decls and types referenced in EH region R and store them in
5443 FLD->DECLS and FLD->TYPES. */
5444
5445 static void
5446 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5447 {
5448 switch (r->type)
5449 {
5450 case ERT_CLEANUP:
5451 break;
5452
5453 case ERT_TRY:
5454 {
5455 eh_catch c;
5456
5457 /* The types referenced in each catch must first be changed to the
5458 EH types used at runtime. This removes references to FE types
5459 in the region. */
5460 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5461 {
5462 c->type_list = get_eh_types_for_runtime (c->type_list);
5463 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5464 }
5465 }
5466 break;
5467
5468 case ERT_ALLOWED_EXCEPTIONS:
5469 r->u.allowed.type_list
5470 = get_eh_types_for_runtime (r->u.allowed.type_list);
5471 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5472 break;
5473
5474 case ERT_MUST_NOT_THROW:
5475 walk_tree (&r->u.must_not_throw.failure_decl,
5476 find_decls_types_r, fld, fld->pset);
5477 break;
5478 }
5479 }
5480
5481
5482 /* Find decls and types referenced in cgraph node N and store them in
5483 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5484 look for *every* kind of DECL and TYPE node reachable from N,
5485 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5486 NAMESPACE_DECLs, etc). */
5487
5488 static void
5489 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5490 {
5491 basic_block bb;
5492 struct function *fn;
5493 unsigned ix;
5494 tree t;
5495
5496 find_decls_types (n->decl, fld);
5497
5498 if (!gimple_has_body_p (n->decl))
5499 return;
5500
5501 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5502
5503 fn = DECL_STRUCT_FUNCTION (n->decl);
5504
5505 /* Traverse locals. */
5506 FOR_EACH_LOCAL_DECL (fn, ix, t)
5507 find_decls_types (t, fld);
5508
5509 /* Traverse EH regions in FN. */
5510 {
5511 eh_region r;
5512 FOR_ALL_EH_REGION_FN (r, fn)
5513 find_decls_types_in_eh_region (r, fld);
5514 }
5515
5516 /* Traverse every statement in FN. */
5517 FOR_EACH_BB_FN (bb, fn)
5518 {
5519 gimple_stmt_iterator si;
5520 unsigned i;
5521
5522 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5523 {
5524 gimple phi = gsi_stmt (si);
5525
5526 for (i = 0; i < gimple_phi_num_args (phi); i++)
5527 {
5528 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5529 find_decls_types (*arg_p, fld);
5530 }
5531 }
5532
5533 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5534 {
5535 gimple stmt = gsi_stmt (si);
5536
5537 if (is_gimple_call (stmt))
5538 find_decls_types (gimple_call_fntype (stmt), fld);
5539
5540 for (i = 0; i < gimple_num_ops (stmt); i++)
5541 {
5542 tree arg = gimple_op (stmt, i);
5543 find_decls_types (arg, fld);
5544 }
5545 }
5546 }
5547 }
5548
5549
5550 /* Find decls and types referenced in varpool node N and store them in
5551 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5552 look for *every* kind of DECL and TYPE node reachable from N,
5553 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5554 NAMESPACE_DECLs, etc). */
5555
5556 static void
5557 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5558 {
5559 find_decls_types (v->decl, fld);
5560 }
5561
5562 /* If T needs an assembler name, have one created for it. */
5563
5564 void
5565 assign_assembler_name_if_neeeded (tree t)
5566 {
5567 if (need_assembler_name_p (t))
5568 {
5569 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5570 diagnostics that use input_location to show locus
5571 information. The problem here is that, at this point,
5572 input_location is generally anchored to the end of the file
5573 (since the parser is long gone), so we don't have a good
5574 position to pin it to.
5575
5576 To alleviate this problem, this uses the location of T's
5577 declaration. Examples of this are
5578 testsuite/g++.dg/template/cond2.C and
5579 testsuite/g++.dg/template/pr35240.C. */
5580 location_t saved_location = input_location;
5581 input_location = DECL_SOURCE_LOCATION (t);
5582
5583 decl_assembler_name (t);
5584
5585 input_location = saved_location;
5586 }
5587 }
5588
5589
5590 /* Free language specific information for every operand and expression
5591 in every node of the call graph. This process operates in three stages:
5592
5593 1- Every callgraph node and varpool node is traversed looking for
5594 decls and types embedded in them. This is a more exhaustive
5595 search than that done by find_referenced_vars, because it will
5596 also collect individual fields, decls embedded in types, etc.
5597
5598 2- All the decls found are sent to free_lang_data_in_decl.
5599
5600 3- All the types found are sent to free_lang_data_in_type.
5601
5602 The ordering between decls and types is important because
5603 free_lang_data_in_decl sets assembler names, which includes
5604 mangling. So types cannot be freed up until assembler names have
5605 been set up. */
5606
5607 static void
5608 free_lang_data_in_cgraph (void)
5609 {
5610 struct cgraph_node *n;
5611 varpool_node *v;
5612 struct free_lang_data_d fld;
5613 tree t;
5614 unsigned i;
5615 alias_pair *p;
5616
5617 /* Initialize sets and arrays to store referenced decls and types. */
5618 fld.pset = new hash_set<tree>;
5619 fld.worklist.create (0);
5620 fld.decls.create (100);
5621 fld.types.create (100);
5622
5623 /* Find decls and types in the body of every function in the callgraph. */
5624 FOR_EACH_FUNCTION (n)
5625 find_decls_types_in_node (n, &fld);
5626
5627 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5628 find_decls_types (p->decl, &fld);
5629
5630 /* Find decls and types in every varpool symbol. */
5631 FOR_EACH_VARIABLE (v)
5632 find_decls_types_in_var (v, &fld);
5633
5634 /* Set the assembler name on every decl found. We need to do this
5635 now because free_lang_data_in_decl will invalidate data needed
5636 for mangling. This breaks mangling on interdependent decls. */
5637 FOR_EACH_VEC_ELT (fld.decls, i, t)
5638 assign_assembler_name_if_neeeded (t);
5639
5640 /* Traverse every decl found freeing its language data. */
5641 FOR_EACH_VEC_ELT (fld.decls, i, t)
5642 free_lang_data_in_decl (t);
5643
5644 /* Traverse every type found freeing its language data. */
5645 FOR_EACH_VEC_ELT (fld.types, i, t)
5646 free_lang_data_in_type (t);
5647
5648 delete fld.pset;
5649 fld.worklist.release ();
5650 fld.decls.release ();
5651 fld.types.release ();
5652 }
5653
5654
5655 /* Free resources that are used by FE but are not needed once they are done. */
5656
5657 static unsigned
5658 free_lang_data (void)
5659 {
5660 unsigned i;
5661
5662 /* If we are the LTO frontend we have freed lang-specific data already. */
5663 if (in_lto_p
5664 || !flag_generate_lto)
5665 return 0;
5666
5667 /* Allocate and assign alias sets to the standard integer types
5668 while the slots are still in the way the frontends generated them. */
5669 for (i = 0; i < itk_none; ++i)
5670 if (integer_types[i])
5671 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5672
5673 /* Traverse the IL resetting language specific information for
5674 operands, expressions, etc. */
5675 free_lang_data_in_cgraph ();
5676
5677 /* Create gimple variants for common types. */
5678 ptrdiff_type_node = integer_type_node;
5679 fileptr_type_node = ptr_type_node;
5680
5681 /* Reset some langhooks. Do not reset types_compatible_p, it may
5682 still be used indirectly via the get_alias_set langhook. */
5683 lang_hooks.dwarf_name = lhd_dwarf_name;
5684 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5685 /* We do not want the default decl_assembler_name implementation,
5686 rather if we have fixed everything we want a wrapper around it
5687 asserting that all non-local symbols already got their assembler
5688 name and only produce assembler names for local symbols. Or rather
5689 make sure we never call decl_assembler_name on local symbols and
5690 devise a separate, middle-end private scheme for it. */
5691
5692 /* Reset diagnostic machinery. */
5693 tree_diagnostics_defaults (global_dc);
5694
5695 return 0;
5696 }
5697
5698
5699 namespace {
5700
5701 const pass_data pass_data_ipa_free_lang_data =
5702 {
5703 SIMPLE_IPA_PASS, /* type */
5704 "*free_lang_data", /* name */
5705 OPTGROUP_NONE, /* optinfo_flags */
5706 TV_IPA_FREE_LANG_DATA, /* tv_id */
5707 0, /* properties_required */
5708 0, /* properties_provided */
5709 0, /* properties_destroyed */
5710 0, /* todo_flags_start */
5711 0, /* todo_flags_finish */
5712 };
5713
5714 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5715 {
5716 public:
5717 pass_ipa_free_lang_data (gcc::context *ctxt)
5718 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5719 {}
5720
5721 /* opt_pass methods: */
5722 virtual unsigned int execute (function *) { return free_lang_data (); }
5723
5724 }; // class pass_ipa_free_lang_data
5725
5726 } // anon namespace
5727
5728 simple_ipa_opt_pass *
5729 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5730 {
5731 return new pass_ipa_free_lang_data (ctxt);
5732 }
5733
5734 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5735 ATTR_NAME. Also used internally by remove_attribute(). */
5736 bool
5737 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5738 {
5739 size_t ident_len = IDENTIFIER_LENGTH (ident);
5740
5741 if (ident_len == attr_len)
5742 {
5743 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5744 return true;
5745 }
5746 else if (ident_len == attr_len + 4)
5747 {
5748 /* There is the possibility that ATTR is 'text' and IDENT is
5749 '__text__'. */
5750 const char *p = IDENTIFIER_POINTER (ident);
5751 if (p[0] == '_' && p[1] == '_'
5752 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5753 && strncmp (attr_name, p + 2, attr_len) == 0)
5754 return true;
5755 }
5756
5757 return false;
5758 }
5759
5760 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5761 of ATTR_NAME, and LIST is not NULL_TREE. */
5762 tree
5763 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5764 {
5765 while (list)
5766 {
5767 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5768
5769 if (ident_len == attr_len)
5770 {
5771 if (!strcmp (attr_name,
5772 IDENTIFIER_POINTER (get_attribute_name (list))))
5773 break;
5774 }
5775 /* TODO: If we made sure that attributes were stored in the
5776 canonical form without '__...__' (ie, as in 'text' as opposed
5777 to '__text__') then we could avoid the following case. */
5778 else if (ident_len == attr_len + 4)
5779 {
5780 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5781 if (p[0] == '_' && p[1] == '_'
5782 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5783 && strncmp (attr_name, p + 2, attr_len) == 0)
5784 break;
5785 }
5786 list = TREE_CHAIN (list);
5787 }
5788
5789 return list;
5790 }
5791
5792 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5793 return a pointer to the attribute's list first element if the attribute
5794 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5795 '__text__'). */
5796
5797 tree
5798 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5799 tree list)
5800 {
5801 while (list)
5802 {
5803 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5804
5805 if (attr_len > ident_len)
5806 {
5807 list = TREE_CHAIN (list);
5808 continue;
5809 }
5810
5811 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5812
5813 if (strncmp (attr_name, p, attr_len) == 0)
5814 break;
5815
5816 /* TODO: If we made sure that attributes were stored in the
5817 canonical form without '__...__' (ie, as in 'text' as opposed
5818 to '__text__') then we could avoid the following case. */
5819 if (p[0] == '_' && p[1] == '_' &&
5820 strncmp (attr_name, p + 2, attr_len) == 0)
5821 break;
5822
5823 list = TREE_CHAIN (list);
5824 }
5825
5826 return list;
5827 }
5828
5829
5830 /* A variant of lookup_attribute() that can be used with an identifier
5831 as the first argument, and where the identifier can be either
5832 'text' or '__text__'.
5833
5834 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5835 return a pointer to the attribute's list element if the attribute
5836 is part of the list, or NULL_TREE if not found. If the attribute
5837 appears more than once, this only returns the first occurrence; the
5838 TREE_CHAIN of the return value should be passed back in if further
5839 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5840 can be in the form 'text' or '__text__'. */
5841 static tree
5842 lookup_ident_attribute (tree attr_identifier, tree list)
5843 {
5844 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5845
5846 while (list)
5847 {
5848 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5849 == IDENTIFIER_NODE);
5850
5851 /* Identifiers can be compared directly for equality. */
5852 if (attr_identifier == get_attribute_name (list))
5853 break;
5854
5855 /* If they are not equal, they may still be one in the form
5856 'text' while the other one is in the form '__text__'. TODO:
5857 If we were storing attributes in normalized 'text' form, then
5858 this could all go away and we could take full advantage of
5859 the fact that we're comparing identifiers. :-) */
5860 {
5861 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5862 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5863
5864 if (ident_len == attr_len + 4)
5865 {
5866 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5867 const char *q = IDENTIFIER_POINTER (attr_identifier);
5868 if (p[0] == '_' && p[1] == '_'
5869 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5870 && strncmp (q, p + 2, attr_len) == 0)
5871 break;
5872 }
5873 else if (ident_len + 4 == attr_len)
5874 {
5875 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5876 const char *q = IDENTIFIER_POINTER (attr_identifier);
5877 if (q[0] == '_' && q[1] == '_'
5878 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5879 && strncmp (q + 2, p, ident_len) == 0)
5880 break;
5881 }
5882 }
5883 list = TREE_CHAIN (list);
5884 }
5885
5886 return list;
5887 }
5888
5889 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5890 modified list. */
5891
5892 tree
5893 remove_attribute (const char *attr_name, tree list)
5894 {
5895 tree *p;
5896 size_t attr_len = strlen (attr_name);
5897
5898 gcc_checking_assert (attr_name[0] != '_');
5899
5900 for (p = &list; *p; )
5901 {
5902 tree l = *p;
5903 /* TODO: If we were storing attributes in normalized form, here
5904 we could use a simple strcmp(). */
5905 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5906 *p = TREE_CHAIN (l);
5907 else
5908 p = &TREE_CHAIN (l);
5909 }
5910
5911 return list;
5912 }
5913
5914 /* Return an attribute list that is the union of a1 and a2. */
5915
5916 tree
5917 merge_attributes (tree a1, tree a2)
5918 {
5919 tree attributes;
5920
5921 /* Either one unset? Take the set one. */
5922
5923 if ((attributes = a1) == 0)
5924 attributes = a2;
5925
5926 /* One that completely contains the other? Take it. */
5927
5928 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5929 {
5930 if (attribute_list_contained (a2, a1))
5931 attributes = a2;
5932 else
5933 {
5934 /* Pick the longest list, and hang on the other list. */
5935
5936 if (list_length (a1) < list_length (a2))
5937 attributes = a2, a2 = a1;
5938
5939 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5940 {
5941 tree a;
5942 for (a = lookup_ident_attribute (get_attribute_name (a2),
5943 attributes);
5944 a != NULL_TREE && !attribute_value_equal (a, a2);
5945 a = lookup_ident_attribute (get_attribute_name (a2),
5946 TREE_CHAIN (a)))
5947 ;
5948 if (a == NULL_TREE)
5949 {
5950 a1 = copy_node (a2);
5951 TREE_CHAIN (a1) = attributes;
5952 attributes = a1;
5953 }
5954 }
5955 }
5956 }
5957 return attributes;
5958 }
5959
5960 /* Given types T1 and T2, merge their attributes and return
5961 the result. */
5962
5963 tree
5964 merge_type_attributes (tree t1, tree t2)
5965 {
5966 return merge_attributes (TYPE_ATTRIBUTES (t1),
5967 TYPE_ATTRIBUTES (t2));
5968 }
5969
5970 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5971 the result. */
5972
5973 tree
5974 merge_decl_attributes (tree olddecl, tree newdecl)
5975 {
5976 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5977 DECL_ATTRIBUTES (newdecl));
5978 }
5979
5980 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5981
5982 /* Specialization of merge_decl_attributes for various Windows targets.
5983
5984 This handles the following situation:
5985
5986 __declspec (dllimport) int foo;
5987 int foo;
5988
5989 The second instance of `foo' nullifies the dllimport. */
5990
5991 tree
5992 merge_dllimport_decl_attributes (tree old, tree new_tree)
5993 {
5994 tree a;
5995 int delete_dllimport_p = 1;
5996
5997 /* What we need to do here is remove from `old' dllimport if it doesn't
5998 appear in `new'. dllimport behaves like extern: if a declaration is
5999 marked dllimport and a definition appears later, then the object
6000 is not dllimport'd. We also remove a `new' dllimport if the old list
6001 contains dllexport: dllexport always overrides dllimport, regardless
6002 of the order of declaration. */
6003 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6004 delete_dllimport_p = 0;
6005 else if (DECL_DLLIMPORT_P (new_tree)
6006 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6007 {
6008 DECL_DLLIMPORT_P (new_tree) = 0;
6009 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6010 "dllimport ignored", new_tree);
6011 }
6012 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6013 {
6014 /* Warn about overriding a symbol that has already been used, e.g.:
6015 extern int __attribute__ ((dllimport)) foo;
6016 int* bar () {return &foo;}
6017 int foo;
6018 */
6019 if (TREE_USED (old))
6020 {
6021 warning (0, "%q+D redeclared without dllimport attribute "
6022 "after being referenced with dll linkage", new_tree);
6023 /* If we have used a variable's address with dllimport linkage,
6024 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6025 decl may already have had TREE_CONSTANT computed.
6026 We still remove the attribute so that assembler code refers
6027 to '&foo rather than '_imp__foo'. */
6028 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6029 DECL_DLLIMPORT_P (new_tree) = 1;
6030 }
6031
6032 /* Let an inline definition silently override the external reference,
6033 but otherwise warn about attribute inconsistency. */
6034 else if (TREE_CODE (new_tree) == VAR_DECL
6035 || !DECL_DECLARED_INLINE_P (new_tree))
6036 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6037 "previous dllimport ignored", new_tree);
6038 }
6039 else
6040 delete_dllimport_p = 0;
6041
6042 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6043
6044 if (delete_dllimport_p)
6045 a = remove_attribute ("dllimport", a);
6046
6047 return a;
6048 }
6049
6050 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6051 struct attribute_spec.handler. */
6052
6053 tree
6054 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6055 bool *no_add_attrs)
6056 {
6057 tree node = *pnode;
6058 bool is_dllimport;
6059
6060 /* These attributes may apply to structure and union types being created,
6061 but otherwise should pass to the declaration involved. */
6062 if (!DECL_P (node))
6063 {
6064 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6065 | (int) ATTR_FLAG_ARRAY_NEXT))
6066 {
6067 *no_add_attrs = true;
6068 return tree_cons (name, args, NULL_TREE);
6069 }
6070 if (TREE_CODE (node) == RECORD_TYPE
6071 || TREE_CODE (node) == UNION_TYPE)
6072 {
6073 node = TYPE_NAME (node);
6074 if (!node)
6075 return NULL_TREE;
6076 }
6077 else
6078 {
6079 warning (OPT_Wattributes, "%qE attribute ignored",
6080 name);
6081 *no_add_attrs = true;
6082 return NULL_TREE;
6083 }
6084 }
6085
6086 if (TREE_CODE (node) != FUNCTION_DECL
6087 && TREE_CODE (node) != VAR_DECL
6088 && TREE_CODE (node) != TYPE_DECL)
6089 {
6090 *no_add_attrs = true;
6091 warning (OPT_Wattributes, "%qE attribute ignored",
6092 name);
6093 return NULL_TREE;
6094 }
6095
6096 if (TREE_CODE (node) == TYPE_DECL
6097 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6098 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6099 {
6100 *no_add_attrs = true;
6101 warning (OPT_Wattributes, "%qE attribute ignored",
6102 name);
6103 return NULL_TREE;
6104 }
6105
6106 is_dllimport = is_attribute_p ("dllimport", name);
6107
6108 /* Report error on dllimport ambiguities seen now before they cause
6109 any damage. */
6110 if (is_dllimport)
6111 {
6112 /* Honor any target-specific overrides. */
6113 if (!targetm.valid_dllimport_attribute_p (node))
6114 *no_add_attrs = true;
6115
6116 else if (TREE_CODE (node) == FUNCTION_DECL
6117 && DECL_DECLARED_INLINE_P (node))
6118 {
6119 warning (OPT_Wattributes, "inline function %q+D declared as "
6120 " dllimport: attribute ignored", node);
6121 *no_add_attrs = true;
6122 }
6123 /* Like MS, treat definition of dllimported variables and
6124 non-inlined functions on declaration as syntax errors. */
6125 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6126 {
6127 error ("function %q+D definition is marked dllimport", node);
6128 *no_add_attrs = true;
6129 }
6130
6131 else if (TREE_CODE (node) == VAR_DECL)
6132 {
6133 if (DECL_INITIAL (node))
6134 {
6135 error ("variable %q+D definition is marked dllimport",
6136 node);
6137 *no_add_attrs = true;
6138 }
6139
6140 /* `extern' needn't be specified with dllimport.
6141 Specify `extern' now and hope for the best. Sigh. */
6142 DECL_EXTERNAL (node) = 1;
6143 /* Also, implicitly give dllimport'd variables declared within
6144 a function global scope, unless declared static. */
6145 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6146 TREE_PUBLIC (node) = 1;
6147 }
6148
6149 if (*no_add_attrs == false)
6150 DECL_DLLIMPORT_P (node) = 1;
6151 }
6152 else if (TREE_CODE (node) == FUNCTION_DECL
6153 && DECL_DECLARED_INLINE_P (node)
6154 && flag_keep_inline_dllexport)
6155 /* An exported function, even if inline, must be emitted. */
6156 DECL_EXTERNAL (node) = 0;
6157
6158 /* Report error if symbol is not accessible at global scope. */
6159 if (!TREE_PUBLIC (node)
6160 && (TREE_CODE (node) == VAR_DECL
6161 || TREE_CODE (node) == FUNCTION_DECL))
6162 {
6163 error ("external linkage required for symbol %q+D because of "
6164 "%qE attribute", node, name);
6165 *no_add_attrs = true;
6166 }
6167
6168 /* A dllexport'd entity must have default visibility so that other
6169 program units (shared libraries or the main executable) can see
6170 it. A dllimport'd entity must have default visibility so that
6171 the linker knows that undefined references within this program
6172 unit can be resolved by the dynamic linker. */
6173 if (!*no_add_attrs)
6174 {
6175 if (DECL_VISIBILITY_SPECIFIED (node)
6176 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6177 error ("%qE implies default visibility, but %qD has already "
6178 "been declared with a different visibility",
6179 name, node);
6180 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6181 DECL_VISIBILITY_SPECIFIED (node) = 1;
6182 }
6183
6184 return NULL_TREE;
6185 }
6186
6187 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6188 \f
6189 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6190 of the various TYPE_QUAL values. */
6191
6192 static void
6193 set_type_quals (tree type, int type_quals)
6194 {
6195 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6196 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6197 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6198 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6199 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6200 }
6201
6202 /* Returns true iff unqualified CAND and BASE are equivalent. */
6203
6204 bool
6205 check_base_type (const_tree cand, const_tree base)
6206 {
6207 return (TYPE_NAME (cand) == TYPE_NAME (base)
6208 /* Apparently this is needed for Objective-C. */
6209 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6210 /* Check alignment. */
6211 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6212 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6213 TYPE_ATTRIBUTES (base)));
6214 }
6215
6216 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6217
6218 bool
6219 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6220 {
6221 return (TYPE_QUALS (cand) == type_quals
6222 && check_base_type (cand, base));
6223 }
6224
6225 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6226
6227 static bool
6228 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6229 {
6230 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6231 && TYPE_NAME (cand) == TYPE_NAME (base)
6232 /* Apparently this is needed for Objective-C. */
6233 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6234 /* Check alignment. */
6235 && TYPE_ALIGN (cand) == align
6236 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6237 TYPE_ATTRIBUTES (base)));
6238 }
6239
6240 /* This function checks to see if TYPE matches the size one of the built-in
6241 atomic types, and returns that core atomic type. */
6242
6243 static tree
6244 find_atomic_core_type (tree type)
6245 {
6246 tree base_atomic_type;
6247
6248 /* Only handle complete types. */
6249 if (TYPE_SIZE (type) == NULL_TREE)
6250 return NULL_TREE;
6251
6252 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6253 switch (type_size)
6254 {
6255 case 8:
6256 base_atomic_type = atomicQI_type_node;
6257 break;
6258
6259 case 16:
6260 base_atomic_type = atomicHI_type_node;
6261 break;
6262
6263 case 32:
6264 base_atomic_type = atomicSI_type_node;
6265 break;
6266
6267 case 64:
6268 base_atomic_type = atomicDI_type_node;
6269 break;
6270
6271 case 128:
6272 base_atomic_type = atomicTI_type_node;
6273 break;
6274
6275 default:
6276 base_atomic_type = NULL_TREE;
6277 }
6278
6279 return base_atomic_type;
6280 }
6281
6282 /* Return a version of the TYPE, qualified as indicated by the
6283 TYPE_QUALS, if one exists. If no qualified version exists yet,
6284 return NULL_TREE. */
6285
6286 tree
6287 get_qualified_type (tree type, int type_quals)
6288 {
6289 tree t;
6290
6291 if (TYPE_QUALS (type) == type_quals)
6292 return type;
6293
6294 /* Search the chain of variants to see if there is already one there just
6295 like the one we need to have. If so, use that existing one. We must
6296 preserve the TYPE_NAME, since there is code that depends on this. */
6297 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6298 if (check_qualified_type (t, type, type_quals))
6299 return t;
6300
6301 return NULL_TREE;
6302 }
6303
6304 /* Like get_qualified_type, but creates the type if it does not
6305 exist. This function never returns NULL_TREE. */
6306
6307 tree
6308 build_qualified_type (tree type, int type_quals)
6309 {
6310 tree t;
6311
6312 /* See if we already have the appropriate qualified variant. */
6313 t = get_qualified_type (type, type_quals);
6314
6315 /* If not, build it. */
6316 if (!t)
6317 {
6318 t = build_variant_type_copy (type);
6319 set_type_quals (t, type_quals);
6320
6321 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6322 {
6323 /* See if this object can map to a basic atomic type. */
6324 tree atomic_type = find_atomic_core_type (type);
6325 if (atomic_type)
6326 {
6327 /* Ensure the alignment of this type is compatible with
6328 the required alignment of the atomic type. */
6329 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6330 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6331 }
6332 }
6333
6334 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6335 /* Propagate structural equality. */
6336 SET_TYPE_STRUCTURAL_EQUALITY (t);
6337 else if (TYPE_CANONICAL (type) != type)
6338 /* Build the underlying canonical type, since it is different
6339 from TYPE. */
6340 {
6341 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6342 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6343 }
6344 else
6345 /* T is its own canonical type. */
6346 TYPE_CANONICAL (t) = t;
6347
6348 }
6349
6350 return t;
6351 }
6352
6353 /* Create a variant of type T with alignment ALIGN. */
6354
6355 tree
6356 build_aligned_type (tree type, unsigned int align)
6357 {
6358 tree t;
6359
6360 if (TYPE_PACKED (type)
6361 || TYPE_ALIGN (type) == align)
6362 return type;
6363
6364 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6365 if (check_aligned_type (t, type, align))
6366 return t;
6367
6368 t = build_variant_type_copy (type);
6369 TYPE_ALIGN (t) = align;
6370
6371 return t;
6372 }
6373
6374 /* Create a new distinct copy of TYPE. The new type is made its own
6375 MAIN_VARIANT. If TYPE requires structural equality checks, the
6376 resulting type requires structural equality checks; otherwise, its
6377 TYPE_CANONICAL points to itself. */
6378
6379 tree
6380 build_distinct_type_copy (tree type)
6381 {
6382 tree t = copy_node (type);
6383
6384 TYPE_POINTER_TO (t) = 0;
6385 TYPE_REFERENCE_TO (t) = 0;
6386
6387 /* Set the canonical type either to a new equivalence class, or
6388 propagate the need for structural equality checks. */
6389 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6390 SET_TYPE_STRUCTURAL_EQUALITY (t);
6391 else
6392 TYPE_CANONICAL (t) = t;
6393
6394 /* Make it its own variant. */
6395 TYPE_MAIN_VARIANT (t) = t;
6396 TYPE_NEXT_VARIANT (t) = 0;
6397
6398 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6399 whose TREE_TYPE is not t. This can also happen in the Ada
6400 frontend when using subtypes. */
6401
6402 return t;
6403 }
6404
6405 /* Create a new variant of TYPE, equivalent but distinct. This is so
6406 the caller can modify it. TYPE_CANONICAL for the return type will
6407 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6408 are considered equal by the language itself (or that both types
6409 require structural equality checks). */
6410
6411 tree
6412 build_variant_type_copy (tree type)
6413 {
6414 tree t, m = TYPE_MAIN_VARIANT (type);
6415
6416 t = build_distinct_type_copy (type);
6417
6418 /* Since we're building a variant, assume that it is a non-semantic
6419 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6420 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6421
6422 /* Add the new type to the chain of variants of TYPE. */
6423 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6424 TYPE_NEXT_VARIANT (m) = t;
6425 TYPE_MAIN_VARIANT (t) = m;
6426
6427 return t;
6428 }
6429 \f
6430 /* Return true if the from tree in both tree maps are equal. */
6431
6432 int
6433 tree_map_base_eq (const void *va, const void *vb)
6434 {
6435 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6436 *const b = (const struct tree_map_base *) vb;
6437 return (a->from == b->from);
6438 }
6439
6440 /* Hash a from tree in a tree_base_map. */
6441
6442 unsigned int
6443 tree_map_base_hash (const void *item)
6444 {
6445 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6446 }
6447
6448 /* Return true if this tree map structure is marked for garbage collection
6449 purposes. We simply return true if the from tree is marked, so that this
6450 structure goes away when the from tree goes away. */
6451
6452 int
6453 tree_map_base_marked_p (const void *p)
6454 {
6455 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6456 }
6457
6458 /* Hash a from tree in a tree_map. */
6459
6460 unsigned int
6461 tree_map_hash (const void *item)
6462 {
6463 return (((const struct tree_map *) item)->hash);
6464 }
6465
6466 /* Hash a from tree in a tree_decl_map. */
6467
6468 unsigned int
6469 tree_decl_map_hash (const void *item)
6470 {
6471 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6472 }
6473
6474 /* Return the initialization priority for DECL. */
6475
6476 priority_type
6477 decl_init_priority_lookup (tree decl)
6478 {
6479 symtab_node *snode = symtab_node::get (decl);
6480
6481 if (!snode)
6482 return DEFAULT_INIT_PRIORITY;
6483 return
6484 snode->get_init_priority ();
6485 }
6486
6487 /* Return the finalization priority for DECL. */
6488
6489 priority_type
6490 decl_fini_priority_lookup (tree decl)
6491 {
6492 cgraph_node *node = cgraph_node::get (decl);
6493
6494 if (!node)
6495 return DEFAULT_INIT_PRIORITY;
6496 return
6497 node->get_fini_priority ();
6498 }
6499
6500 /* Set the initialization priority for DECL to PRIORITY. */
6501
6502 void
6503 decl_init_priority_insert (tree decl, priority_type priority)
6504 {
6505 struct symtab_node *snode;
6506
6507 if (priority == DEFAULT_INIT_PRIORITY)
6508 {
6509 snode = symtab_node::get (decl);
6510 if (!snode)
6511 return;
6512 }
6513 else if (TREE_CODE (decl) == VAR_DECL)
6514 snode = varpool_node::get_create (decl);
6515 else
6516 snode = cgraph_node::get_create (decl);
6517 snode->set_init_priority (priority);
6518 }
6519
6520 /* Set the finalization priority for DECL to PRIORITY. */
6521
6522 void
6523 decl_fini_priority_insert (tree decl, priority_type priority)
6524 {
6525 struct cgraph_node *node;
6526
6527 if (priority == DEFAULT_INIT_PRIORITY)
6528 {
6529 node = cgraph_node::get (decl);
6530 if (!node)
6531 return;
6532 }
6533 else
6534 node = cgraph_node::get_create (decl);
6535 node->set_fini_priority (priority);
6536 }
6537
6538 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6539
6540 static void
6541 print_debug_expr_statistics (void)
6542 {
6543 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6544 (long) htab_size (debug_expr_for_decl),
6545 (long) htab_elements (debug_expr_for_decl),
6546 htab_collisions (debug_expr_for_decl));
6547 }
6548
6549 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6550
6551 static void
6552 print_value_expr_statistics (void)
6553 {
6554 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6555 (long) htab_size (value_expr_for_decl),
6556 (long) htab_elements (value_expr_for_decl),
6557 htab_collisions (value_expr_for_decl));
6558 }
6559
6560 /* Lookup a debug expression for FROM, and return it if we find one. */
6561
6562 tree
6563 decl_debug_expr_lookup (tree from)
6564 {
6565 struct tree_decl_map *h, in;
6566 in.base.from = from;
6567
6568 h = (struct tree_decl_map *)
6569 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6570 if (h)
6571 return h->to;
6572 return NULL_TREE;
6573 }
6574
6575 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6576
6577 void
6578 decl_debug_expr_insert (tree from, tree to)
6579 {
6580 struct tree_decl_map *h;
6581 void **loc;
6582
6583 h = ggc_alloc<tree_decl_map> ();
6584 h->base.from = from;
6585 h->to = to;
6586 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6587 INSERT);
6588 *(struct tree_decl_map **) loc = h;
6589 }
6590
6591 /* Lookup a value expression for FROM, and return it if we find one. */
6592
6593 tree
6594 decl_value_expr_lookup (tree from)
6595 {
6596 struct tree_decl_map *h, in;
6597 in.base.from = from;
6598
6599 h = (struct tree_decl_map *)
6600 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6601 if (h)
6602 return h->to;
6603 return NULL_TREE;
6604 }
6605
6606 /* Insert a mapping FROM->TO in the value expression hashtable. */
6607
6608 void
6609 decl_value_expr_insert (tree from, tree to)
6610 {
6611 struct tree_decl_map *h;
6612 void **loc;
6613
6614 h = ggc_alloc<tree_decl_map> ();
6615 h->base.from = from;
6616 h->to = to;
6617 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6618 INSERT);
6619 *(struct tree_decl_map **) loc = h;
6620 }
6621
6622 /* Lookup a vector of debug arguments for FROM, and return it if we
6623 find one. */
6624
6625 vec<tree, va_gc> **
6626 decl_debug_args_lookup (tree from)
6627 {
6628 struct tree_vec_map *h, in;
6629
6630 if (!DECL_HAS_DEBUG_ARGS_P (from))
6631 return NULL;
6632 gcc_checking_assert (debug_args_for_decl != NULL);
6633 in.base.from = from;
6634 h = (struct tree_vec_map *)
6635 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6636 if (h)
6637 return &h->to;
6638 return NULL;
6639 }
6640
6641 /* Insert a mapping FROM->empty vector of debug arguments in the value
6642 expression hashtable. */
6643
6644 vec<tree, va_gc> **
6645 decl_debug_args_insert (tree from)
6646 {
6647 struct tree_vec_map *h;
6648 void **loc;
6649
6650 if (DECL_HAS_DEBUG_ARGS_P (from))
6651 return decl_debug_args_lookup (from);
6652 if (debug_args_for_decl == NULL)
6653 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6654 tree_vec_map_eq, 0);
6655 h = ggc_alloc<tree_vec_map> ();
6656 h->base.from = from;
6657 h->to = NULL;
6658 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6659 INSERT);
6660 *(struct tree_vec_map **) loc = h;
6661 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6662 return &h->to;
6663 }
6664
6665 /* Hashing of types so that we don't make duplicates.
6666 The entry point is `type_hash_canon'. */
6667
6668 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6669 with types in the TREE_VALUE slots), by adding the hash codes
6670 of the individual types. */
6671
6672 static void
6673 type_hash_list (const_tree list, inchash::hash &hstate)
6674 {
6675 const_tree tail;
6676
6677 for (tail = list; tail; tail = TREE_CHAIN (tail))
6678 if (TREE_VALUE (tail) != error_mark_node)
6679 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6680 }
6681
6682 /* These are the Hashtable callback functions. */
6683
6684 /* Returns true iff the types are equivalent. */
6685
6686 static int
6687 type_hash_eq (const void *va, const void *vb)
6688 {
6689 const struct type_hash *const a = (const struct type_hash *) va,
6690 *const b = (const struct type_hash *) vb;
6691
6692 /* First test the things that are the same for all types. */
6693 if (a->hash != b->hash
6694 || TREE_CODE (a->type) != TREE_CODE (b->type)
6695 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6696 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6697 TYPE_ATTRIBUTES (b->type))
6698 || (TREE_CODE (a->type) != COMPLEX_TYPE
6699 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6700 return 0;
6701
6702 /* Be careful about comparing arrays before and after the element type
6703 has been completed; don't compare TYPE_ALIGN unless both types are
6704 complete. */
6705 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6706 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6707 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6708 return 0;
6709
6710 switch (TREE_CODE (a->type))
6711 {
6712 case VOID_TYPE:
6713 case COMPLEX_TYPE:
6714 case POINTER_TYPE:
6715 case REFERENCE_TYPE:
6716 case NULLPTR_TYPE:
6717 return 1;
6718
6719 case VECTOR_TYPE:
6720 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6721
6722 case ENUMERAL_TYPE:
6723 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6724 && !(TYPE_VALUES (a->type)
6725 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6726 && TYPE_VALUES (b->type)
6727 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6728 && type_list_equal (TYPE_VALUES (a->type),
6729 TYPE_VALUES (b->type))))
6730 return 0;
6731
6732 /* ... fall through ... */
6733
6734 case INTEGER_TYPE:
6735 case REAL_TYPE:
6736 case BOOLEAN_TYPE:
6737 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6738 return false;
6739 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6740 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6741 TYPE_MAX_VALUE (b->type)))
6742 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6743 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6744 TYPE_MIN_VALUE (b->type))));
6745
6746 case FIXED_POINT_TYPE:
6747 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6748
6749 case OFFSET_TYPE:
6750 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6751
6752 case METHOD_TYPE:
6753 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6754 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6755 || (TYPE_ARG_TYPES (a->type)
6756 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6757 && TYPE_ARG_TYPES (b->type)
6758 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6759 && type_list_equal (TYPE_ARG_TYPES (a->type),
6760 TYPE_ARG_TYPES (b->type)))))
6761 break;
6762 return 0;
6763 case ARRAY_TYPE:
6764 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6765
6766 case RECORD_TYPE:
6767 case UNION_TYPE:
6768 case QUAL_UNION_TYPE:
6769 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6770 || (TYPE_FIELDS (a->type)
6771 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6772 && TYPE_FIELDS (b->type)
6773 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6774 && type_list_equal (TYPE_FIELDS (a->type),
6775 TYPE_FIELDS (b->type))));
6776
6777 case FUNCTION_TYPE:
6778 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6779 || (TYPE_ARG_TYPES (a->type)
6780 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6781 && TYPE_ARG_TYPES (b->type)
6782 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6783 && type_list_equal (TYPE_ARG_TYPES (a->type),
6784 TYPE_ARG_TYPES (b->type))))
6785 break;
6786 return 0;
6787
6788 default:
6789 return 0;
6790 }
6791
6792 if (lang_hooks.types.type_hash_eq != NULL)
6793 return lang_hooks.types.type_hash_eq (a->type, b->type);
6794
6795 return 1;
6796 }
6797
6798 /* Return the cached hash value. */
6799
6800 static hashval_t
6801 type_hash_hash (const void *item)
6802 {
6803 return ((const struct type_hash *) item)->hash;
6804 }
6805
6806 /* Given TYPE, and HASHCODE its hash code, return the canonical
6807 object for an identical type if one already exists.
6808 Otherwise, return TYPE, and record it as the canonical object.
6809
6810 To use this function, first create a type of the sort you want.
6811 Then compute its hash code from the fields of the type that
6812 make it different from other similar types.
6813 Then call this function and use the value. */
6814
6815 tree
6816 type_hash_canon (unsigned int hashcode, tree type)
6817 {
6818 type_hash in;
6819 void **loc;
6820
6821 /* The hash table only contains main variants, so ensure that's what we're
6822 being passed. */
6823 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6824
6825 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6826 must call that routine before comparing TYPE_ALIGNs. */
6827 layout_type (type);
6828
6829 in.hash = hashcode;
6830 in.type = type;
6831
6832 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6833 if (*loc)
6834 {
6835 tree t1 = ((type_hash *) *loc)->type;
6836 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6837 if (GATHER_STATISTICS)
6838 {
6839 tree_code_counts[(int) TREE_CODE (type)]--;
6840 tree_node_counts[(int) t_kind]--;
6841 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6842 }
6843 return t1;
6844 }
6845 else
6846 {
6847 struct type_hash *h;
6848
6849 h = ggc_alloc<type_hash> ();
6850 h->hash = hashcode;
6851 h->type = type;
6852 *loc = (void *)h;
6853
6854 return type;
6855 }
6856 }
6857
6858 /* See if the data pointed to by the type hash table is marked. We consider
6859 it marked if the type is marked or if a debug type number or symbol
6860 table entry has been made for the type. */
6861
6862 static int
6863 type_hash_marked_p (const void *p)
6864 {
6865 const_tree const type = ((const struct type_hash *) p)->type;
6866
6867 return ggc_marked_p (type);
6868 }
6869
6870 static void
6871 print_type_hash_statistics (void)
6872 {
6873 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6874 (long) htab_size (type_hash_table),
6875 (long) htab_elements (type_hash_table),
6876 htab_collisions (type_hash_table));
6877 }
6878
6879 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6880 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6881 by adding the hash codes of the individual attributes. */
6882
6883 static void
6884 attribute_hash_list (const_tree list, inchash::hash &hstate)
6885 {
6886 const_tree tail;
6887
6888 for (tail = list; tail; tail = TREE_CHAIN (tail))
6889 /* ??? Do we want to add in TREE_VALUE too? */
6890 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6891 }
6892
6893 /* Given two lists of attributes, return true if list l2 is
6894 equivalent to l1. */
6895
6896 int
6897 attribute_list_equal (const_tree l1, const_tree l2)
6898 {
6899 if (l1 == l2)
6900 return 1;
6901
6902 return attribute_list_contained (l1, l2)
6903 && attribute_list_contained (l2, l1);
6904 }
6905
6906 /* Given two lists of attributes, return true if list L2 is
6907 completely contained within L1. */
6908 /* ??? This would be faster if attribute names were stored in a canonicalized
6909 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6910 must be used to show these elements are equivalent (which they are). */
6911 /* ??? It's not clear that attributes with arguments will always be handled
6912 correctly. */
6913
6914 int
6915 attribute_list_contained (const_tree l1, const_tree l2)
6916 {
6917 const_tree t1, t2;
6918
6919 /* First check the obvious, maybe the lists are identical. */
6920 if (l1 == l2)
6921 return 1;
6922
6923 /* Maybe the lists are similar. */
6924 for (t1 = l1, t2 = l2;
6925 t1 != 0 && t2 != 0
6926 && get_attribute_name (t1) == get_attribute_name (t2)
6927 && TREE_VALUE (t1) == TREE_VALUE (t2);
6928 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6929 ;
6930
6931 /* Maybe the lists are equal. */
6932 if (t1 == 0 && t2 == 0)
6933 return 1;
6934
6935 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6936 {
6937 const_tree attr;
6938 /* This CONST_CAST is okay because lookup_attribute does not
6939 modify its argument and the return value is assigned to a
6940 const_tree. */
6941 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6942 CONST_CAST_TREE (l1));
6943 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6944 attr = lookup_ident_attribute (get_attribute_name (t2),
6945 TREE_CHAIN (attr)))
6946 ;
6947
6948 if (attr == NULL_TREE)
6949 return 0;
6950 }
6951
6952 return 1;
6953 }
6954
6955 /* Given two lists of types
6956 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6957 return 1 if the lists contain the same types in the same order.
6958 Also, the TREE_PURPOSEs must match. */
6959
6960 int
6961 type_list_equal (const_tree l1, const_tree l2)
6962 {
6963 const_tree t1, t2;
6964
6965 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6966 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6967 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6968 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6969 && (TREE_TYPE (TREE_PURPOSE (t1))
6970 == TREE_TYPE (TREE_PURPOSE (t2))))))
6971 return 0;
6972
6973 return t1 == t2;
6974 }
6975
6976 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6977 given by TYPE. If the argument list accepts variable arguments,
6978 then this function counts only the ordinary arguments. */
6979
6980 int
6981 type_num_arguments (const_tree type)
6982 {
6983 int i = 0;
6984 tree t;
6985
6986 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6987 /* If the function does not take a variable number of arguments,
6988 the last element in the list will have type `void'. */
6989 if (VOID_TYPE_P (TREE_VALUE (t)))
6990 break;
6991 else
6992 ++i;
6993
6994 return i;
6995 }
6996
6997 /* Nonzero if integer constants T1 and T2
6998 represent the same constant value. */
6999
7000 int
7001 tree_int_cst_equal (const_tree t1, const_tree t2)
7002 {
7003 if (t1 == t2)
7004 return 1;
7005
7006 if (t1 == 0 || t2 == 0)
7007 return 0;
7008
7009 if (TREE_CODE (t1) == INTEGER_CST
7010 && TREE_CODE (t2) == INTEGER_CST
7011 && wi::to_widest (t1) == wi::to_widest (t2))
7012 return 1;
7013
7014 return 0;
7015 }
7016
7017 /* Return true if T is an INTEGER_CST whose numerical value (extended
7018 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7019
7020 bool
7021 tree_fits_shwi_p (const_tree t)
7022 {
7023 return (t != NULL_TREE
7024 && TREE_CODE (t) == INTEGER_CST
7025 && wi::fits_shwi_p (wi::to_widest (t)));
7026 }
7027
7028 /* Return true if T is an INTEGER_CST whose numerical value (extended
7029 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7030
7031 bool
7032 tree_fits_uhwi_p (const_tree t)
7033 {
7034 return (t != NULL_TREE
7035 && TREE_CODE (t) == INTEGER_CST
7036 && wi::fits_uhwi_p (wi::to_widest (t)));
7037 }
7038
7039 /* T is an INTEGER_CST whose numerical value (extended according to
7040 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7041 HOST_WIDE_INT. */
7042
7043 HOST_WIDE_INT
7044 tree_to_shwi (const_tree t)
7045 {
7046 gcc_assert (tree_fits_shwi_p (t));
7047 return TREE_INT_CST_LOW (t);
7048 }
7049
7050 /* T is an INTEGER_CST whose numerical value (extended according to
7051 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7052 HOST_WIDE_INT. */
7053
7054 unsigned HOST_WIDE_INT
7055 tree_to_uhwi (const_tree t)
7056 {
7057 gcc_assert (tree_fits_uhwi_p (t));
7058 return TREE_INT_CST_LOW (t);
7059 }
7060
7061 /* Return the most significant (sign) bit of T. */
7062
7063 int
7064 tree_int_cst_sign_bit (const_tree t)
7065 {
7066 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7067
7068 return wi::extract_uhwi (t, bitno, 1);
7069 }
7070
7071 /* Return an indication of the sign of the integer constant T.
7072 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7073 Note that -1 will never be returned if T's type is unsigned. */
7074
7075 int
7076 tree_int_cst_sgn (const_tree t)
7077 {
7078 if (wi::eq_p (t, 0))
7079 return 0;
7080 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7081 return 1;
7082 else if (wi::neg_p (t))
7083 return -1;
7084 else
7085 return 1;
7086 }
7087
7088 /* Return the minimum number of bits needed to represent VALUE in a
7089 signed or unsigned type, UNSIGNEDP says which. */
7090
7091 unsigned int
7092 tree_int_cst_min_precision (tree value, signop sgn)
7093 {
7094 /* If the value is negative, compute its negative minus 1. The latter
7095 adjustment is because the absolute value of the largest negative value
7096 is one larger than the largest positive value. This is equivalent to
7097 a bit-wise negation, so use that operation instead. */
7098
7099 if (tree_int_cst_sgn (value) < 0)
7100 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7101
7102 /* Return the number of bits needed, taking into account the fact
7103 that we need one more bit for a signed than unsigned type.
7104 If value is 0 or -1, the minimum precision is 1 no matter
7105 whether unsignedp is true or false. */
7106
7107 if (integer_zerop (value))
7108 return 1;
7109 else
7110 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7111 }
7112
7113 /* Return truthvalue of whether T1 is the same tree structure as T2.
7114 Return 1 if they are the same.
7115 Return 0 if they are understandably different.
7116 Return -1 if either contains tree structure not understood by
7117 this function. */
7118
7119 int
7120 simple_cst_equal (const_tree t1, const_tree t2)
7121 {
7122 enum tree_code code1, code2;
7123 int cmp;
7124 int i;
7125
7126 if (t1 == t2)
7127 return 1;
7128 if (t1 == 0 || t2 == 0)
7129 return 0;
7130
7131 code1 = TREE_CODE (t1);
7132 code2 = TREE_CODE (t2);
7133
7134 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7135 {
7136 if (CONVERT_EXPR_CODE_P (code2)
7137 || code2 == NON_LVALUE_EXPR)
7138 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7139 else
7140 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7141 }
7142
7143 else if (CONVERT_EXPR_CODE_P (code2)
7144 || code2 == NON_LVALUE_EXPR)
7145 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7146
7147 if (code1 != code2)
7148 return 0;
7149
7150 switch (code1)
7151 {
7152 case INTEGER_CST:
7153 return wi::to_widest (t1) == wi::to_widest (t2);
7154
7155 case REAL_CST:
7156 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7157
7158 case FIXED_CST:
7159 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7160
7161 case STRING_CST:
7162 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7163 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7164 TREE_STRING_LENGTH (t1)));
7165
7166 case CONSTRUCTOR:
7167 {
7168 unsigned HOST_WIDE_INT idx;
7169 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7170 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7171
7172 if (vec_safe_length (v1) != vec_safe_length (v2))
7173 return false;
7174
7175 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7176 /* ??? Should we handle also fields here? */
7177 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7178 return false;
7179 return true;
7180 }
7181
7182 case SAVE_EXPR:
7183 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7184
7185 case CALL_EXPR:
7186 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7187 if (cmp <= 0)
7188 return cmp;
7189 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7190 return 0;
7191 {
7192 const_tree arg1, arg2;
7193 const_call_expr_arg_iterator iter1, iter2;
7194 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7195 arg2 = first_const_call_expr_arg (t2, &iter2);
7196 arg1 && arg2;
7197 arg1 = next_const_call_expr_arg (&iter1),
7198 arg2 = next_const_call_expr_arg (&iter2))
7199 {
7200 cmp = simple_cst_equal (arg1, arg2);
7201 if (cmp <= 0)
7202 return cmp;
7203 }
7204 return arg1 == arg2;
7205 }
7206
7207 case TARGET_EXPR:
7208 /* Special case: if either target is an unallocated VAR_DECL,
7209 it means that it's going to be unified with whatever the
7210 TARGET_EXPR is really supposed to initialize, so treat it
7211 as being equivalent to anything. */
7212 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7213 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7214 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7215 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7216 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7217 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7218 cmp = 1;
7219 else
7220 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7221
7222 if (cmp <= 0)
7223 return cmp;
7224
7225 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7226
7227 case WITH_CLEANUP_EXPR:
7228 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7229 if (cmp <= 0)
7230 return cmp;
7231
7232 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7233
7234 case COMPONENT_REF:
7235 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7236 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7237
7238 return 0;
7239
7240 case VAR_DECL:
7241 case PARM_DECL:
7242 case CONST_DECL:
7243 case FUNCTION_DECL:
7244 return 0;
7245
7246 default:
7247 break;
7248 }
7249
7250 /* This general rule works for most tree codes. All exceptions should be
7251 handled above. If this is a language-specific tree code, we can't
7252 trust what might be in the operand, so say we don't know
7253 the situation. */
7254 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7255 return -1;
7256
7257 switch (TREE_CODE_CLASS (code1))
7258 {
7259 case tcc_unary:
7260 case tcc_binary:
7261 case tcc_comparison:
7262 case tcc_expression:
7263 case tcc_reference:
7264 case tcc_statement:
7265 cmp = 1;
7266 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7267 {
7268 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7269 if (cmp <= 0)
7270 return cmp;
7271 }
7272
7273 return cmp;
7274
7275 default:
7276 return -1;
7277 }
7278 }
7279
7280 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7281 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7282 than U, respectively. */
7283
7284 int
7285 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7286 {
7287 if (tree_int_cst_sgn (t) < 0)
7288 return -1;
7289 else if (!tree_fits_uhwi_p (t))
7290 return 1;
7291 else if (TREE_INT_CST_LOW (t) == u)
7292 return 0;
7293 else if (TREE_INT_CST_LOW (t) < u)
7294 return -1;
7295 else
7296 return 1;
7297 }
7298
7299 /* Return true if SIZE represents a constant size that is in bounds of
7300 what the middle-end and the backend accepts (covering not more than
7301 half of the address-space). */
7302
7303 bool
7304 valid_constant_size_p (const_tree size)
7305 {
7306 if (! tree_fits_uhwi_p (size)
7307 || TREE_OVERFLOW (size)
7308 || tree_int_cst_sign_bit (size) != 0)
7309 return false;
7310 return true;
7311 }
7312
7313 /* Return the precision of the type, or for a complex or vector type the
7314 precision of the type of its elements. */
7315
7316 unsigned int
7317 element_precision (const_tree type)
7318 {
7319 enum tree_code code = TREE_CODE (type);
7320 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7321 type = TREE_TYPE (type);
7322
7323 return TYPE_PRECISION (type);
7324 }
7325
7326 /* Return true if CODE represents an associative tree code. Otherwise
7327 return false. */
7328 bool
7329 associative_tree_code (enum tree_code code)
7330 {
7331 switch (code)
7332 {
7333 case BIT_IOR_EXPR:
7334 case BIT_AND_EXPR:
7335 case BIT_XOR_EXPR:
7336 case PLUS_EXPR:
7337 case MULT_EXPR:
7338 case MIN_EXPR:
7339 case MAX_EXPR:
7340 return true;
7341
7342 default:
7343 break;
7344 }
7345 return false;
7346 }
7347
7348 /* Return true if CODE represents a commutative tree code. Otherwise
7349 return false. */
7350 bool
7351 commutative_tree_code (enum tree_code code)
7352 {
7353 switch (code)
7354 {
7355 case PLUS_EXPR:
7356 case MULT_EXPR:
7357 case MULT_HIGHPART_EXPR:
7358 case MIN_EXPR:
7359 case MAX_EXPR:
7360 case BIT_IOR_EXPR:
7361 case BIT_XOR_EXPR:
7362 case BIT_AND_EXPR:
7363 case NE_EXPR:
7364 case EQ_EXPR:
7365 case UNORDERED_EXPR:
7366 case ORDERED_EXPR:
7367 case UNEQ_EXPR:
7368 case LTGT_EXPR:
7369 case TRUTH_AND_EXPR:
7370 case TRUTH_XOR_EXPR:
7371 case TRUTH_OR_EXPR:
7372 case WIDEN_MULT_EXPR:
7373 case VEC_WIDEN_MULT_HI_EXPR:
7374 case VEC_WIDEN_MULT_LO_EXPR:
7375 case VEC_WIDEN_MULT_EVEN_EXPR:
7376 case VEC_WIDEN_MULT_ODD_EXPR:
7377 return true;
7378
7379 default:
7380 break;
7381 }
7382 return false;
7383 }
7384
7385 /* Return true if CODE represents a ternary tree code for which the
7386 first two operands are commutative. Otherwise return false. */
7387 bool
7388 commutative_ternary_tree_code (enum tree_code code)
7389 {
7390 switch (code)
7391 {
7392 case WIDEN_MULT_PLUS_EXPR:
7393 case WIDEN_MULT_MINUS_EXPR:
7394 case DOT_PROD_EXPR:
7395 case FMA_EXPR:
7396 return true;
7397
7398 default:
7399 break;
7400 }
7401 return false;
7402 }
7403
7404 namespace inchash
7405 {
7406
7407 /* Generate a hash value for an expression. This can be used iteratively
7408 by passing a previous result as the HSTATE argument.
7409
7410 This function is intended to produce the same hash for expressions which
7411 would compare equal using operand_equal_p. */
7412 void
7413 add_expr (const_tree t, inchash::hash &hstate)
7414 {
7415 int i;
7416 enum tree_code code;
7417 enum tree_code_class tclass;
7418
7419 if (t == NULL_TREE)
7420 {
7421 hstate.merge_hash (0);
7422 return;
7423 }
7424
7425 code = TREE_CODE (t);
7426
7427 switch (code)
7428 {
7429 /* Alas, constants aren't shared, so we can't rely on pointer
7430 identity. */
7431 case VOID_CST:
7432 hstate.merge_hash (0);
7433 return;
7434 case INTEGER_CST:
7435 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7436 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7437 return;
7438 case REAL_CST:
7439 {
7440 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7441 hstate.merge_hash (val2);
7442 return;
7443 }
7444 case FIXED_CST:
7445 {
7446 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7447 hstate.merge_hash (val2);
7448 return;
7449 }
7450 case STRING_CST:
7451 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7452 return;
7453 case COMPLEX_CST:
7454 inchash::add_expr (TREE_REALPART (t), hstate);
7455 inchash::add_expr (TREE_IMAGPART (t), hstate);
7456 return;
7457 case VECTOR_CST:
7458 {
7459 unsigned i;
7460 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7461 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7462 return;
7463 }
7464 case SSA_NAME:
7465 /* We can just compare by pointer. */
7466 hstate.add_wide_int (SSA_NAME_VERSION (t));
7467 return;
7468 case PLACEHOLDER_EXPR:
7469 /* The node itself doesn't matter. */
7470 return;
7471 case TREE_LIST:
7472 /* A list of expressions, for a CALL_EXPR or as the elements of a
7473 VECTOR_CST. */
7474 for (; t; t = TREE_CHAIN (t))
7475 inchash::add_expr (TREE_VALUE (t), hstate);
7476 return;
7477 case CONSTRUCTOR:
7478 {
7479 unsigned HOST_WIDE_INT idx;
7480 tree field, value;
7481 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7482 {
7483 inchash::add_expr (field, hstate);
7484 inchash::add_expr (value, hstate);
7485 }
7486 return;
7487 }
7488 case FUNCTION_DECL:
7489 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7490 Otherwise nodes that compare equal according to operand_equal_p might
7491 get different hash codes. However, don't do this for machine specific
7492 or front end builtins, since the function code is overloaded in those
7493 cases. */
7494 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7495 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7496 {
7497 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7498 code = TREE_CODE (t);
7499 }
7500 /* FALL THROUGH */
7501 default:
7502 tclass = TREE_CODE_CLASS (code);
7503
7504 if (tclass == tcc_declaration)
7505 {
7506 /* DECL's have a unique ID */
7507 hstate.add_wide_int (DECL_UID (t));
7508 }
7509 else
7510 {
7511 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7512
7513 hstate.add_object (code);
7514
7515 /* Don't hash the type, that can lead to having nodes which
7516 compare equal according to operand_equal_p, but which
7517 have different hash codes. */
7518 if (CONVERT_EXPR_CODE_P (code)
7519 || code == NON_LVALUE_EXPR)
7520 {
7521 /* Make sure to include signness in the hash computation. */
7522 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7523 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7524 }
7525
7526 else if (commutative_tree_code (code))
7527 {
7528 /* It's a commutative expression. We want to hash it the same
7529 however it appears. We do this by first hashing both operands
7530 and then rehashing based on the order of their independent
7531 hashes. */
7532 inchash::hash one, two;
7533 inchash::add_expr (TREE_OPERAND (t, 0), one);
7534 inchash::add_expr (TREE_OPERAND (t, 1), two);
7535 hstate.add_commutative (one, two);
7536 }
7537 else
7538 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7539 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7540 }
7541 return;
7542 }
7543 }
7544
7545 }
7546
7547 /* Constructors for pointer, array and function types.
7548 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7549 constructed by language-dependent code, not here.) */
7550
7551 /* Construct, lay out and return the type of pointers to TO_TYPE with
7552 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7553 reference all of memory. If such a type has already been
7554 constructed, reuse it. */
7555
7556 tree
7557 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7558 bool can_alias_all)
7559 {
7560 tree t;
7561
7562 if (to_type == error_mark_node)
7563 return error_mark_node;
7564
7565 /* If the pointed-to type has the may_alias attribute set, force
7566 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7567 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7568 can_alias_all = true;
7569
7570 /* In some cases, languages will have things that aren't a POINTER_TYPE
7571 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7572 In that case, return that type without regard to the rest of our
7573 operands.
7574
7575 ??? This is a kludge, but consistent with the way this function has
7576 always operated and there doesn't seem to be a good way to avoid this
7577 at the moment. */
7578 if (TYPE_POINTER_TO (to_type) != 0
7579 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7580 return TYPE_POINTER_TO (to_type);
7581
7582 /* First, if we already have a type for pointers to TO_TYPE and it's
7583 the proper mode, use it. */
7584 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7585 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7586 return t;
7587
7588 t = make_node (POINTER_TYPE);
7589
7590 TREE_TYPE (t) = to_type;
7591 SET_TYPE_MODE (t, mode);
7592 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7593 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7594 TYPE_POINTER_TO (to_type) = t;
7595
7596 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7597 SET_TYPE_STRUCTURAL_EQUALITY (t);
7598 else if (TYPE_CANONICAL (to_type) != to_type)
7599 TYPE_CANONICAL (t)
7600 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7601 mode, can_alias_all);
7602
7603 /* Lay out the type. This function has many callers that are concerned
7604 with expression-construction, and this simplifies them all. */
7605 layout_type (t);
7606
7607 return t;
7608 }
7609
7610 /* By default build pointers in ptr_mode. */
7611
7612 tree
7613 build_pointer_type (tree to_type)
7614 {
7615 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7616 : TYPE_ADDR_SPACE (to_type);
7617 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7618 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7619 }
7620
7621 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7622
7623 tree
7624 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7625 bool can_alias_all)
7626 {
7627 tree t;
7628
7629 if (to_type == error_mark_node)
7630 return error_mark_node;
7631
7632 /* If the pointed-to type has the may_alias attribute set, force
7633 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7634 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7635 can_alias_all = true;
7636
7637 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7638 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7639 In that case, return that type without regard to the rest of our
7640 operands.
7641
7642 ??? This is a kludge, but consistent with the way this function has
7643 always operated and there doesn't seem to be a good way to avoid this
7644 at the moment. */
7645 if (TYPE_REFERENCE_TO (to_type) != 0
7646 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7647 return TYPE_REFERENCE_TO (to_type);
7648
7649 /* First, if we already have a type for pointers to TO_TYPE and it's
7650 the proper mode, use it. */
7651 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7652 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7653 return t;
7654
7655 t = make_node (REFERENCE_TYPE);
7656
7657 TREE_TYPE (t) = to_type;
7658 SET_TYPE_MODE (t, mode);
7659 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7660 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7661 TYPE_REFERENCE_TO (to_type) = t;
7662
7663 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7664 SET_TYPE_STRUCTURAL_EQUALITY (t);
7665 else if (TYPE_CANONICAL (to_type) != to_type)
7666 TYPE_CANONICAL (t)
7667 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7668 mode, can_alias_all);
7669
7670 layout_type (t);
7671
7672 return t;
7673 }
7674
7675
7676 /* Build the node for the type of references-to-TO_TYPE by default
7677 in ptr_mode. */
7678
7679 tree
7680 build_reference_type (tree to_type)
7681 {
7682 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7683 : TYPE_ADDR_SPACE (to_type);
7684 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7685 return build_reference_type_for_mode (to_type, pointer_mode, false);
7686 }
7687
7688 #define MAX_INT_CACHED_PREC \
7689 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7690 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7691
7692 /* Builds a signed or unsigned integer type of precision PRECISION.
7693 Used for C bitfields whose precision does not match that of
7694 built-in target types. */
7695 tree
7696 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7697 int unsignedp)
7698 {
7699 tree itype, ret;
7700
7701 if (unsignedp)
7702 unsignedp = MAX_INT_CACHED_PREC + 1;
7703
7704 if (precision <= MAX_INT_CACHED_PREC)
7705 {
7706 itype = nonstandard_integer_type_cache[precision + unsignedp];
7707 if (itype)
7708 return itype;
7709 }
7710
7711 itype = make_node (INTEGER_TYPE);
7712 TYPE_PRECISION (itype) = precision;
7713
7714 if (unsignedp)
7715 fixup_unsigned_type (itype);
7716 else
7717 fixup_signed_type (itype);
7718
7719 ret = itype;
7720 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7721 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7722 if (precision <= MAX_INT_CACHED_PREC)
7723 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7724
7725 return ret;
7726 }
7727
7728 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7729 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7730 is true, reuse such a type that has already been constructed. */
7731
7732 static tree
7733 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7734 {
7735 tree itype = make_node (INTEGER_TYPE);
7736 inchash::hash hstate;
7737
7738 TREE_TYPE (itype) = type;
7739
7740 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7741 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7742
7743 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7744 SET_TYPE_MODE (itype, TYPE_MODE (type));
7745 TYPE_SIZE (itype) = TYPE_SIZE (type);
7746 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7747 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7748 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7749
7750 if (!shared)
7751 return itype;
7752
7753 if ((TYPE_MIN_VALUE (itype)
7754 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7755 || (TYPE_MAX_VALUE (itype)
7756 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7757 {
7758 /* Since we cannot reliably merge this type, we need to compare it using
7759 structural equality checks. */
7760 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7761 return itype;
7762 }
7763
7764 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7765 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7766 hstate.merge_hash (TYPE_HASH (type));
7767 itype = type_hash_canon (hstate.end (), itype);
7768
7769 return itype;
7770 }
7771
7772 /* Wrapper around build_range_type_1 with SHARED set to true. */
7773
7774 tree
7775 build_range_type (tree type, tree lowval, tree highval)
7776 {
7777 return build_range_type_1 (type, lowval, highval, true);
7778 }
7779
7780 /* Wrapper around build_range_type_1 with SHARED set to false. */
7781
7782 tree
7783 build_nonshared_range_type (tree type, tree lowval, tree highval)
7784 {
7785 return build_range_type_1 (type, lowval, highval, false);
7786 }
7787
7788 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7789 MAXVAL should be the maximum value in the domain
7790 (one less than the length of the array).
7791
7792 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7793 We don't enforce this limit, that is up to caller (e.g. language front end).
7794 The limit exists because the result is a signed type and we don't handle
7795 sizes that use more than one HOST_WIDE_INT. */
7796
7797 tree
7798 build_index_type (tree maxval)
7799 {
7800 return build_range_type (sizetype, size_zero_node, maxval);
7801 }
7802
7803 /* Return true if the debug information for TYPE, a subtype, should be emitted
7804 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7805 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7806 debug info and doesn't reflect the source code. */
7807
7808 bool
7809 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7810 {
7811 tree base_type = TREE_TYPE (type), low, high;
7812
7813 /* Subrange types have a base type which is an integral type. */
7814 if (!INTEGRAL_TYPE_P (base_type))
7815 return false;
7816
7817 /* Get the real bounds of the subtype. */
7818 if (lang_hooks.types.get_subrange_bounds)
7819 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7820 else
7821 {
7822 low = TYPE_MIN_VALUE (type);
7823 high = TYPE_MAX_VALUE (type);
7824 }
7825
7826 /* If the type and its base type have the same representation and the same
7827 name, then the type is not a subrange but a copy of the base type. */
7828 if ((TREE_CODE (base_type) == INTEGER_TYPE
7829 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7830 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7831 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7832 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7833 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7834 return false;
7835
7836 if (lowval)
7837 *lowval = low;
7838 if (highval)
7839 *highval = high;
7840 return true;
7841 }
7842
7843 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7844 and number of elements specified by the range of values of INDEX_TYPE.
7845 If SHARED is true, reuse such a type that has already been constructed. */
7846
7847 static tree
7848 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7849 {
7850 tree t;
7851
7852 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7853 {
7854 error ("arrays of functions are not meaningful");
7855 elt_type = integer_type_node;
7856 }
7857
7858 t = make_node (ARRAY_TYPE);
7859 TREE_TYPE (t) = elt_type;
7860 TYPE_DOMAIN (t) = index_type;
7861 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7862 layout_type (t);
7863
7864 /* If the element type is incomplete at this point we get marked for
7865 structural equality. Do not record these types in the canonical
7866 type hashtable. */
7867 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7868 return t;
7869
7870 if (shared)
7871 {
7872 inchash::hash hstate;
7873 hstate.add_object (TYPE_HASH (elt_type));
7874 if (index_type)
7875 hstate.add_object (TYPE_HASH (index_type));
7876 t = type_hash_canon (hstate.end (), t);
7877 }
7878
7879 if (TYPE_CANONICAL (t) == t)
7880 {
7881 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7882 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7883 SET_TYPE_STRUCTURAL_EQUALITY (t);
7884 else if (TYPE_CANONICAL (elt_type) != elt_type
7885 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7886 TYPE_CANONICAL (t)
7887 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7888 index_type
7889 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7890 shared);
7891 }
7892
7893 return t;
7894 }
7895
7896 /* Wrapper around build_array_type_1 with SHARED set to true. */
7897
7898 tree
7899 build_array_type (tree elt_type, tree index_type)
7900 {
7901 return build_array_type_1 (elt_type, index_type, true);
7902 }
7903
7904 /* Wrapper around build_array_type_1 with SHARED set to false. */
7905
7906 tree
7907 build_nonshared_array_type (tree elt_type, tree index_type)
7908 {
7909 return build_array_type_1 (elt_type, index_type, false);
7910 }
7911
7912 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7913 sizetype. */
7914
7915 tree
7916 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7917 {
7918 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7919 }
7920
7921 /* Recursively examines the array elements of TYPE, until a non-array
7922 element type is found. */
7923
7924 tree
7925 strip_array_types (tree type)
7926 {
7927 while (TREE_CODE (type) == ARRAY_TYPE)
7928 type = TREE_TYPE (type);
7929
7930 return type;
7931 }
7932
7933 /* Computes the canonical argument types from the argument type list
7934 ARGTYPES.
7935
7936 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7937 on entry to this function, or if any of the ARGTYPES are
7938 structural.
7939
7940 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7941 true on entry to this function, or if any of the ARGTYPES are
7942 non-canonical.
7943
7944 Returns a canonical argument list, which may be ARGTYPES when the
7945 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7946 true) or would not differ from ARGTYPES. */
7947
7948 static tree
7949 maybe_canonicalize_argtypes (tree argtypes,
7950 bool *any_structural_p,
7951 bool *any_noncanonical_p)
7952 {
7953 tree arg;
7954 bool any_noncanonical_argtypes_p = false;
7955
7956 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7957 {
7958 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7959 /* Fail gracefully by stating that the type is structural. */
7960 *any_structural_p = true;
7961 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7962 *any_structural_p = true;
7963 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7964 || TREE_PURPOSE (arg))
7965 /* If the argument has a default argument, we consider it
7966 non-canonical even though the type itself is canonical.
7967 That way, different variants of function and method types
7968 with default arguments will all point to the variant with
7969 no defaults as their canonical type. */
7970 any_noncanonical_argtypes_p = true;
7971 }
7972
7973 if (*any_structural_p)
7974 return argtypes;
7975
7976 if (any_noncanonical_argtypes_p)
7977 {
7978 /* Build the canonical list of argument types. */
7979 tree canon_argtypes = NULL_TREE;
7980 bool is_void = false;
7981
7982 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7983 {
7984 if (arg == void_list_node)
7985 is_void = true;
7986 else
7987 canon_argtypes = tree_cons (NULL_TREE,
7988 TYPE_CANONICAL (TREE_VALUE (arg)),
7989 canon_argtypes);
7990 }
7991
7992 canon_argtypes = nreverse (canon_argtypes);
7993 if (is_void)
7994 canon_argtypes = chainon (canon_argtypes, void_list_node);
7995
7996 /* There is a non-canonical type. */
7997 *any_noncanonical_p = true;
7998 return canon_argtypes;
7999 }
8000
8001 /* The canonical argument types are the same as ARGTYPES. */
8002 return argtypes;
8003 }
8004
8005 /* Construct, lay out and return
8006 the type of functions returning type VALUE_TYPE
8007 given arguments of types ARG_TYPES.
8008 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8009 are data type nodes for the arguments of the function.
8010 If such a type has already been constructed, reuse it. */
8011
8012 tree
8013 build_function_type (tree value_type, tree arg_types)
8014 {
8015 tree t;
8016 inchash::hash hstate;
8017 bool any_structural_p, any_noncanonical_p;
8018 tree canon_argtypes;
8019
8020 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8021 {
8022 error ("function return type cannot be function");
8023 value_type = integer_type_node;
8024 }
8025
8026 /* Make a node of the sort we want. */
8027 t = make_node (FUNCTION_TYPE);
8028 TREE_TYPE (t) = value_type;
8029 TYPE_ARG_TYPES (t) = arg_types;
8030
8031 /* If we already have such a type, use the old one. */
8032 hstate.add_object (TYPE_HASH (value_type));
8033 type_hash_list (arg_types, hstate);
8034 t = type_hash_canon (hstate.end (), t);
8035
8036 /* Set up the canonical type. */
8037 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8038 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8039 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8040 &any_structural_p,
8041 &any_noncanonical_p);
8042 if (any_structural_p)
8043 SET_TYPE_STRUCTURAL_EQUALITY (t);
8044 else if (any_noncanonical_p)
8045 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8046 canon_argtypes);
8047
8048 if (!COMPLETE_TYPE_P (t))
8049 layout_type (t);
8050 return t;
8051 }
8052
8053 /* Build a function type. The RETURN_TYPE is the type returned by the
8054 function. If VAARGS is set, no void_type_node is appended to the
8055 the list. ARGP must be always be terminated be a NULL_TREE. */
8056
8057 static tree
8058 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8059 {
8060 tree t, args, last;
8061
8062 t = va_arg (argp, tree);
8063 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8064 args = tree_cons (NULL_TREE, t, args);
8065
8066 if (vaargs)
8067 {
8068 last = args;
8069 if (args != NULL_TREE)
8070 args = nreverse (args);
8071 gcc_assert (last != void_list_node);
8072 }
8073 else if (args == NULL_TREE)
8074 args = void_list_node;
8075 else
8076 {
8077 last = args;
8078 args = nreverse (args);
8079 TREE_CHAIN (last) = void_list_node;
8080 }
8081 args = build_function_type (return_type, args);
8082
8083 return args;
8084 }
8085
8086 /* Build a function type. The RETURN_TYPE is the type returned by the
8087 function. If additional arguments are provided, they are
8088 additional argument types. The list of argument types must always
8089 be terminated by NULL_TREE. */
8090
8091 tree
8092 build_function_type_list (tree return_type, ...)
8093 {
8094 tree args;
8095 va_list p;
8096
8097 va_start (p, return_type);
8098 args = build_function_type_list_1 (false, return_type, p);
8099 va_end (p);
8100 return args;
8101 }
8102
8103 /* Build a variable argument function type. The RETURN_TYPE is the
8104 type returned by the function. If additional arguments are provided,
8105 they are additional argument types. The list of argument types must
8106 always be terminated by NULL_TREE. */
8107
8108 tree
8109 build_varargs_function_type_list (tree return_type, ...)
8110 {
8111 tree args;
8112 va_list p;
8113
8114 va_start (p, return_type);
8115 args = build_function_type_list_1 (true, return_type, p);
8116 va_end (p);
8117
8118 return args;
8119 }
8120
8121 /* Build a function type. RETURN_TYPE is the type returned by the
8122 function; VAARGS indicates whether the function takes varargs. The
8123 function takes N named arguments, the types of which are provided in
8124 ARG_TYPES. */
8125
8126 static tree
8127 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8128 tree *arg_types)
8129 {
8130 int i;
8131 tree t = vaargs ? NULL_TREE : void_list_node;
8132
8133 for (i = n - 1; i >= 0; i--)
8134 t = tree_cons (NULL_TREE, arg_types[i], t);
8135
8136 return build_function_type (return_type, t);
8137 }
8138
8139 /* Build a function type. RETURN_TYPE is the type returned by the
8140 function. The function takes N named arguments, the types of which
8141 are provided in ARG_TYPES. */
8142
8143 tree
8144 build_function_type_array (tree return_type, int n, tree *arg_types)
8145 {
8146 return build_function_type_array_1 (false, return_type, n, arg_types);
8147 }
8148
8149 /* Build a variable argument function type. RETURN_TYPE is the type
8150 returned by the function. The function takes N named arguments, the
8151 types of which are provided in ARG_TYPES. */
8152
8153 tree
8154 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8155 {
8156 return build_function_type_array_1 (true, return_type, n, arg_types);
8157 }
8158
8159 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8160 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8161 for the method. An implicit additional parameter (of type
8162 pointer-to-BASETYPE) is added to the ARGTYPES. */
8163
8164 tree
8165 build_method_type_directly (tree basetype,
8166 tree rettype,
8167 tree argtypes)
8168 {
8169 tree t;
8170 tree ptype;
8171 inchash::hash hstate;
8172 bool any_structural_p, any_noncanonical_p;
8173 tree canon_argtypes;
8174
8175 /* Make a node of the sort we want. */
8176 t = make_node (METHOD_TYPE);
8177
8178 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8179 TREE_TYPE (t) = rettype;
8180 ptype = build_pointer_type (basetype);
8181
8182 /* The actual arglist for this function includes a "hidden" argument
8183 which is "this". Put it into the list of argument types. */
8184 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8185 TYPE_ARG_TYPES (t) = argtypes;
8186
8187 /* If we already have such a type, use the old one. */
8188 hstate.add_object (TYPE_HASH (basetype));
8189 hstate.add_object (TYPE_HASH (rettype));
8190 type_hash_list (argtypes, hstate);
8191 t = type_hash_canon (hstate.end (), t);
8192
8193 /* Set up the canonical type. */
8194 any_structural_p
8195 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8196 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8197 any_noncanonical_p
8198 = (TYPE_CANONICAL (basetype) != basetype
8199 || TYPE_CANONICAL (rettype) != rettype);
8200 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8201 &any_structural_p,
8202 &any_noncanonical_p);
8203 if (any_structural_p)
8204 SET_TYPE_STRUCTURAL_EQUALITY (t);
8205 else if (any_noncanonical_p)
8206 TYPE_CANONICAL (t)
8207 = build_method_type_directly (TYPE_CANONICAL (basetype),
8208 TYPE_CANONICAL (rettype),
8209 canon_argtypes);
8210 if (!COMPLETE_TYPE_P (t))
8211 layout_type (t);
8212
8213 return t;
8214 }
8215
8216 /* Construct, lay out and return the type of methods belonging to class
8217 BASETYPE and whose arguments and values are described by TYPE.
8218 If that type exists already, reuse it.
8219 TYPE must be a FUNCTION_TYPE node. */
8220
8221 tree
8222 build_method_type (tree basetype, tree type)
8223 {
8224 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8225
8226 return build_method_type_directly (basetype,
8227 TREE_TYPE (type),
8228 TYPE_ARG_TYPES (type));
8229 }
8230
8231 /* Construct, lay out and return the type of offsets to a value
8232 of type TYPE, within an object of type BASETYPE.
8233 If a suitable offset type exists already, reuse it. */
8234
8235 tree
8236 build_offset_type (tree basetype, tree type)
8237 {
8238 tree t;
8239 inchash::hash hstate;
8240
8241 /* Make a node of the sort we want. */
8242 t = make_node (OFFSET_TYPE);
8243
8244 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8245 TREE_TYPE (t) = type;
8246
8247 /* If we already have such a type, use the old one. */
8248 hstate.add_object (TYPE_HASH (basetype));
8249 hstate.add_object (TYPE_HASH (type));
8250 t = type_hash_canon (hstate.end (), t);
8251
8252 if (!COMPLETE_TYPE_P (t))
8253 layout_type (t);
8254
8255 if (TYPE_CANONICAL (t) == t)
8256 {
8257 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8258 || TYPE_STRUCTURAL_EQUALITY_P (type))
8259 SET_TYPE_STRUCTURAL_EQUALITY (t);
8260 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8261 || TYPE_CANONICAL (type) != type)
8262 TYPE_CANONICAL (t)
8263 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8264 TYPE_CANONICAL (type));
8265 }
8266
8267 return t;
8268 }
8269
8270 /* Create a complex type whose components are COMPONENT_TYPE. */
8271
8272 tree
8273 build_complex_type (tree component_type)
8274 {
8275 tree t;
8276 inchash::hash hstate;
8277
8278 gcc_assert (INTEGRAL_TYPE_P (component_type)
8279 || SCALAR_FLOAT_TYPE_P (component_type)
8280 || FIXED_POINT_TYPE_P (component_type));
8281
8282 /* Make a node of the sort we want. */
8283 t = make_node (COMPLEX_TYPE);
8284
8285 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8286
8287 /* If we already have such a type, use the old one. */
8288 hstate.add_object (TYPE_HASH (component_type));
8289 t = type_hash_canon (hstate.end (), t);
8290
8291 if (!COMPLETE_TYPE_P (t))
8292 layout_type (t);
8293
8294 if (TYPE_CANONICAL (t) == t)
8295 {
8296 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8297 SET_TYPE_STRUCTURAL_EQUALITY (t);
8298 else if (TYPE_CANONICAL (component_type) != component_type)
8299 TYPE_CANONICAL (t)
8300 = build_complex_type (TYPE_CANONICAL (component_type));
8301 }
8302
8303 /* We need to create a name, since complex is a fundamental type. */
8304 if (! TYPE_NAME (t))
8305 {
8306 const char *name;
8307 if (component_type == char_type_node)
8308 name = "complex char";
8309 else if (component_type == signed_char_type_node)
8310 name = "complex signed char";
8311 else if (component_type == unsigned_char_type_node)
8312 name = "complex unsigned char";
8313 else if (component_type == short_integer_type_node)
8314 name = "complex short int";
8315 else if (component_type == short_unsigned_type_node)
8316 name = "complex short unsigned int";
8317 else if (component_type == integer_type_node)
8318 name = "complex int";
8319 else if (component_type == unsigned_type_node)
8320 name = "complex unsigned int";
8321 else if (component_type == long_integer_type_node)
8322 name = "complex long int";
8323 else if (component_type == long_unsigned_type_node)
8324 name = "complex long unsigned int";
8325 else if (component_type == long_long_integer_type_node)
8326 name = "complex long long int";
8327 else if (component_type == long_long_unsigned_type_node)
8328 name = "complex long long unsigned int";
8329 else
8330 name = 0;
8331
8332 if (name != 0)
8333 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8334 get_identifier (name), t);
8335 }
8336
8337 return build_qualified_type (t, TYPE_QUALS (component_type));
8338 }
8339
8340 /* If TYPE is a real or complex floating-point type and the target
8341 does not directly support arithmetic on TYPE then return the wider
8342 type to be used for arithmetic on TYPE. Otherwise, return
8343 NULL_TREE. */
8344
8345 tree
8346 excess_precision_type (tree type)
8347 {
8348 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8349 {
8350 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8351 switch (TREE_CODE (type))
8352 {
8353 case REAL_TYPE:
8354 switch (flt_eval_method)
8355 {
8356 case 1:
8357 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8358 return double_type_node;
8359 break;
8360 case 2:
8361 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8362 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8363 return long_double_type_node;
8364 break;
8365 default:
8366 gcc_unreachable ();
8367 }
8368 break;
8369 case COMPLEX_TYPE:
8370 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8371 return NULL_TREE;
8372 switch (flt_eval_method)
8373 {
8374 case 1:
8375 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8376 return complex_double_type_node;
8377 break;
8378 case 2:
8379 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8380 || (TYPE_MODE (TREE_TYPE (type))
8381 == TYPE_MODE (double_type_node)))
8382 return complex_long_double_type_node;
8383 break;
8384 default:
8385 gcc_unreachable ();
8386 }
8387 break;
8388 default:
8389 break;
8390 }
8391 }
8392 return NULL_TREE;
8393 }
8394 \f
8395 /* Return OP, stripped of any conversions to wider types as much as is safe.
8396 Converting the value back to OP's type makes a value equivalent to OP.
8397
8398 If FOR_TYPE is nonzero, we return a value which, if converted to
8399 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8400
8401 OP must have integer, real or enumeral type. Pointers are not allowed!
8402
8403 There are some cases where the obvious value we could return
8404 would regenerate to OP if converted to OP's type,
8405 but would not extend like OP to wider types.
8406 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8407 For example, if OP is (unsigned short)(signed char)-1,
8408 we avoid returning (signed char)-1 if FOR_TYPE is int,
8409 even though extending that to an unsigned short would regenerate OP,
8410 since the result of extending (signed char)-1 to (int)
8411 is different from (int) OP. */
8412
8413 tree
8414 get_unwidened (tree op, tree for_type)
8415 {
8416 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8417 tree type = TREE_TYPE (op);
8418 unsigned final_prec
8419 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8420 int uns
8421 = (for_type != 0 && for_type != type
8422 && final_prec > TYPE_PRECISION (type)
8423 && TYPE_UNSIGNED (type));
8424 tree win = op;
8425
8426 while (CONVERT_EXPR_P (op))
8427 {
8428 int bitschange;
8429
8430 /* TYPE_PRECISION on vector types has different meaning
8431 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8432 so avoid them here. */
8433 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8434 break;
8435
8436 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8437 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8438
8439 /* Truncations are many-one so cannot be removed.
8440 Unless we are later going to truncate down even farther. */
8441 if (bitschange < 0
8442 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8443 break;
8444
8445 /* See what's inside this conversion. If we decide to strip it,
8446 we will set WIN. */
8447 op = TREE_OPERAND (op, 0);
8448
8449 /* If we have not stripped any zero-extensions (uns is 0),
8450 we can strip any kind of extension.
8451 If we have previously stripped a zero-extension,
8452 only zero-extensions can safely be stripped.
8453 Any extension can be stripped if the bits it would produce
8454 are all going to be discarded later by truncating to FOR_TYPE. */
8455
8456 if (bitschange > 0)
8457 {
8458 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8459 win = op;
8460 /* TYPE_UNSIGNED says whether this is a zero-extension.
8461 Let's avoid computing it if it does not affect WIN
8462 and if UNS will not be needed again. */
8463 if ((uns
8464 || CONVERT_EXPR_P (op))
8465 && TYPE_UNSIGNED (TREE_TYPE (op)))
8466 {
8467 uns = 1;
8468 win = op;
8469 }
8470 }
8471 }
8472
8473 /* If we finally reach a constant see if it fits in for_type and
8474 in that case convert it. */
8475 if (for_type
8476 && TREE_CODE (win) == INTEGER_CST
8477 && TREE_TYPE (win) != for_type
8478 && int_fits_type_p (win, for_type))
8479 win = fold_convert (for_type, win);
8480
8481 return win;
8482 }
8483 \f
8484 /* Return OP or a simpler expression for a narrower value
8485 which can be sign-extended or zero-extended to give back OP.
8486 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8487 or 0 if the value should be sign-extended. */
8488
8489 tree
8490 get_narrower (tree op, int *unsignedp_ptr)
8491 {
8492 int uns = 0;
8493 int first = 1;
8494 tree win = op;
8495 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8496
8497 while (TREE_CODE (op) == NOP_EXPR)
8498 {
8499 int bitschange
8500 = (TYPE_PRECISION (TREE_TYPE (op))
8501 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8502
8503 /* Truncations are many-one so cannot be removed. */
8504 if (bitschange < 0)
8505 break;
8506
8507 /* See what's inside this conversion. If we decide to strip it,
8508 we will set WIN. */
8509
8510 if (bitschange > 0)
8511 {
8512 op = TREE_OPERAND (op, 0);
8513 /* An extension: the outermost one can be stripped,
8514 but remember whether it is zero or sign extension. */
8515 if (first)
8516 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8517 /* Otherwise, if a sign extension has been stripped,
8518 only sign extensions can now be stripped;
8519 if a zero extension has been stripped, only zero-extensions. */
8520 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8521 break;
8522 first = 0;
8523 }
8524 else /* bitschange == 0 */
8525 {
8526 /* A change in nominal type can always be stripped, but we must
8527 preserve the unsignedness. */
8528 if (first)
8529 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8530 first = 0;
8531 op = TREE_OPERAND (op, 0);
8532 /* Keep trying to narrow, but don't assign op to win if it
8533 would turn an integral type into something else. */
8534 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8535 continue;
8536 }
8537
8538 win = op;
8539 }
8540
8541 if (TREE_CODE (op) == COMPONENT_REF
8542 /* Since type_for_size always gives an integer type. */
8543 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8544 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8545 /* Ensure field is laid out already. */
8546 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8547 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8548 {
8549 unsigned HOST_WIDE_INT innerprec
8550 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8551 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8552 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8553 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8554
8555 /* We can get this structure field in a narrower type that fits it,
8556 but the resulting extension to its nominal type (a fullword type)
8557 must satisfy the same conditions as for other extensions.
8558
8559 Do this only for fields that are aligned (not bit-fields),
8560 because when bit-field insns will be used there is no
8561 advantage in doing this. */
8562
8563 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8564 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8565 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8566 && type != 0)
8567 {
8568 if (first)
8569 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8570 win = fold_convert (type, op);
8571 }
8572 }
8573
8574 *unsignedp_ptr = uns;
8575 return win;
8576 }
8577 \f
8578 /* Returns true if integer constant C has a value that is permissible
8579 for type TYPE (an INTEGER_TYPE). */
8580
8581 bool
8582 int_fits_type_p (const_tree c, const_tree type)
8583 {
8584 tree type_low_bound, type_high_bound;
8585 bool ok_for_low_bound, ok_for_high_bound;
8586 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8587
8588 retry:
8589 type_low_bound = TYPE_MIN_VALUE (type);
8590 type_high_bound = TYPE_MAX_VALUE (type);
8591
8592 /* If at least one bound of the type is a constant integer, we can check
8593 ourselves and maybe make a decision. If no such decision is possible, but
8594 this type is a subtype, try checking against that. Otherwise, use
8595 fits_to_tree_p, which checks against the precision.
8596
8597 Compute the status for each possibly constant bound, and return if we see
8598 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8599 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8600 for "constant known to fit". */
8601
8602 /* Check if c >= type_low_bound. */
8603 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8604 {
8605 if (tree_int_cst_lt (c, type_low_bound))
8606 return false;
8607 ok_for_low_bound = true;
8608 }
8609 else
8610 ok_for_low_bound = false;
8611
8612 /* Check if c <= type_high_bound. */
8613 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8614 {
8615 if (tree_int_cst_lt (type_high_bound, c))
8616 return false;
8617 ok_for_high_bound = true;
8618 }
8619 else
8620 ok_for_high_bound = false;
8621
8622 /* If the constant fits both bounds, the result is known. */
8623 if (ok_for_low_bound && ok_for_high_bound)
8624 return true;
8625
8626 /* Perform some generic filtering which may allow making a decision
8627 even if the bounds are not constant. First, negative integers
8628 never fit in unsigned types, */
8629 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8630 return false;
8631
8632 /* Second, narrower types always fit in wider ones. */
8633 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8634 return true;
8635
8636 /* Third, unsigned integers with top bit set never fit signed types. */
8637 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8638 {
8639 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8640 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8641 {
8642 /* When a tree_cst is converted to a wide-int, the precision
8643 is taken from the type. However, if the precision of the
8644 mode underneath the type is smaller than that, it is
8645 possible that the value will not fit. The test below
8646 fails if any bit is set between the sign bit of the
8647 underlying mode and the top bit of the type. */
8648 if (wi::ne_p (wi::zext (c, prec - 1), c))
8649 return false;
8650 }
8651 else if (wi::neg_p (c))
8652 return false;
8653 }
8654
8655 /* If we haven't been able to decide at this point, there nothing more we
8656 can check ourselves here. Look at the base type if we have one and it
8657 has the same precision. */
8658 if (TREE_CODE (type) == INTEGER_TYPE
8659 && TREE_TYPE (type) != 0
8660 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8661 {
8662 type = TREE_TYPE (type);
8663 goto retry;
8664 }
8665
8666 /* Or to fits_to_tree_p, if nothing else. */
8667 return wi::fits_to_tree_p (c, type);
8668 }
8669
8670 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8671 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8672 represented (assuming two's-complement arithmetic) within the bit
8673 precision of the type are returned instead. */
8674
8675 void
8676 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8677 {
8678 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8679 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8680 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8681 else
8682 {
8683 if (TYPE_UNSIGNED (type))
8684 mpz_set_ui (min, 0);
8685 else
8686 {
8687 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8688 wi::to_mpz (mn, min, SIGNED);
8689 }
8690 }
8691
8692 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8693 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8694 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8695 else
8696 {
8697 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8698 wi::to_mpz (mn, max, TYPE_SIGN (type));
8699 }
8700 }
8701
8702 /* Return true if VAR is an automatic variable defined in function FN. */
8703
8704 bool
8705 auto_var_in_fn_p (const_tree var, const_tree fn)
8706 {
8707 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8708 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8709 || TREE_CODE (var) == PARM_DECL)
8710 && ! TREE_STATIC (var))
8711 || TREE_CODE (var) == LABEL_DECL
8712 || TREE_CODE (var) == RESULT_DECL));
8713 }
8714
8715 /* Subprogram of following function. Called by walk_tree.
8716
8717 Return *TP if it is an automatic variable or parameter of the
8718 function passed in as DATA. */
8719
8720 static tree
8721 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8722 {
8723 tree fn = (tree) data;
8724
8725 if (TYPE_P (*tp))
8726 *walk_subtrees = 0;
8727
8728 else if (DECL_P (*tp)
8729 && auto_var_in_fn_p (*tp, fn))
8730 return *tp;
8731
8732 return NULL_TREE;
8733 }
8734
8735 /* Returns true if T is, contains, or refers to a type with variable
8736 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8737 arguments, but not the return type. If FN is nonzero, only return
8738 true if a modifier of the type or position of FN is a variable or
8739 parameter inside FN.
8740
8741 This concept is more general than that of C99 'variably modified types':
8742 in C99, a struct type is never variably modified because a VLA may not
8743 appear as a structure member. However, in GNU C code like:
8744
8745 struct S { int i[f()]; };
8746
8747 is valid, and other languages may define similar constructs. */
8748
8749 bool
8750 variably_modified_type_p (tree type, tree fn)
8751 {
8752 tree t;
8753
8754 /* Test if T is either variable (if FN is zero) or an expression containing
8755 a variable in FN. If TYPE isn't gimplified, return true also if
8756 gimplify_one_sizepos would gimplify the expression into a local
8757 variable. */
8758 #define RETURN_TRUE_IF_VAR(T) \
8759 do { tree _t = (T); \
8760 if (_t != NULL_TREE \
8761 && _t != error_mark_node \
8762 && TREE_CODE (_t) != INTEGER_CST \
8763 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8764 && (!fn \
8765 || (!TYPE_SIZES_GIMPLIFIED (type) \
8766 && !is_gimple_sizepos (_t)) \
8767 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8768 return true; } while (0)
8769
8770 if (type == error_mark_node)
8771 return false;
8772
8773 /* If TYPE itself has variable size, it is variably modified. */
8774 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8775 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8776
8777 switch (TREE_CODE (type))
8778 {
8779 case POINTER_TYPE:
8780 case REFERENCE_TYPE:
8781 case VECTOR_TYPE:
8782 if (variably_modified_type_p (TREE_TYPE (type), fn))
8783 return true;
8784 break;
8785
8786 case FUNCTION_TYPE:
8787 case METHOD_TYPE:
8788 /* If TYPE is a function type, it is variably modified if the
8789 return type is variably modified. */
8790 if (variably_modified_type_p (TREE_TYPE (type), fn))
8791 return true;
8792 break;
8793
8794 case INTEGER_TYPE:
8795 case REAL_TYPE:
8796 case FIXED_POINT_TYPE:
8797 case ENUMERAL_TYPE:
8798 case BOOLEAN_TYPE:
8799 /* Scalar types are variably modified if their end points
8800 aren't constant. */
8801 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8802 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8803 break;
8804
8805 case RECORD_TYPE:
8806 case UNION_TYPE:
8807 case QUAL_UNION_TYPE:
8808 /* We can't see if any of the fields are variably-modified by the
8809 definition we normally use, since that would produce infinite
8810 recursion via pointers. */
8811 /* This is variably modified if some field's type is. */
8812 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8813 if (TREE_CODE (t) == FIELD_DECL)
8814 {
8815 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8816 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8817 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8818
8819 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8820 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8821 }
8822 break;
8823
8824 case ARRAY_TYPE:
8825 /* Do not call ourselves to avoid infinite recursion. This is
8826 variably modified if the element type is. */
8827 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8828 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8829 break;
8830
8831 default:
8832 break;
8833 }
8834
8835 /* The current language may have other cases to check, but in general,
8836 all other types are not variably modified. */
8837 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8838
8839 #undef RETURN_TRUE_IF_VAR
8840 }
8841
8842 /* Given a DECL or TYPE, return the scope in which it was declared, or
8843 NULL_TREE if there is no containing scope. */
8844
8845 tree
8846 get_containing_scope (const_tree t)
8847 {
8848 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8849 }
8850
8851 /* Return the innermost context enclosing DECL that is
8852 a FUNCTION_DECL, or zero if none. */
8853
8854 tree
8855 decl_function_context (const_tree decl)
8856 {
8857 tree context;
8858
8859 if (TREE_CODE (decl) == ERROR_MARK)
8860 return 0;
8861
8862 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8863 where we look up the function at runtime. Such functions always take
8864 a first argument of type 'pointer to real context'.
8865
8866 C++ should really be fixed to use DECL_CONTEXT for the real context,
8867 and use something else for the "virtual context". */
8868 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8869 context
8870 = TYPE_MAIN_VARIANT
8871 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8872 else
8873 context = DECL_CONTEXT (decl);
8874
8875 while (context && TREE_CODE (context) != FUNCTION_DECL)
8876 {
8877 if (TREE_CODE (context) == BLOCK)
8878 context = BLOCK_SUPERCONTEXT (context);
8879 else
8880 context = get_containing_scope (context);
8881 }
8882
8883 return context;
8884 }
8885
8886 /* Return the innermost context enclosing DECL that is
8887 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8888 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8889
8890 tree
8891 decl_type_context (const_tree decl)
8892 {
8893 tree context = DECL_CONTEXT (decl);
8894
8895 while (context)
8896 switch (TREE_CODE (context))
8897 {
8898 case NAMESPACE_DECL:
8899 case TRANSLATION_UNIT_DECL:
8900 return NULL_TREE;
8901
8902 case RECORD_TYPE:
8903 case UNION_TYPE:
8904 case QUAL_UNION_TYPE:
8905 return context;
8906
8907 case TYPE_DECL:
8908 case FUNCTION_DECL:
8909 context = DECL_CONTEXT (context);
8910 break;
8911
8912 case BLOCK:
8913 context = BLOCK_SUPERCONTEXT (context);
8914 break;
8915
8916 default:
8917 gcc_unreachable ();
8918 }
8919
8920 return NULL_TREE;
8921 }
8922
8923 /* CALL is a CALL_EXPR. Return the declaration for the function
8924 called, or NULL_TREE if the called function cannot be
8925 determined. */
8926
8927 tree
8928 get_callee_fndecl (const_tree call)
8929 {
8930 tree addr;
8931
8932 if (call == error_mark_node)
8933 return error_mark_node;
8934
8935 /* It's invalid to call this function with anything but a
8936 CALL_EXPR. */
8937 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8938
8939 /* The first operand to the CALL is the address of the function
8940 called. */
8941 addr = CALL_EXPR_FN (call);
8942
8943 /* If there is no function, return early. */
8944 if (addr == NULL_TREE)
8945 return NULL_TREE;
8946
8947 STRIP_NOPS (addr);
8948
8949 /* If this is a readonly function pointer, extract its initial value. */
8950 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8951 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8952 && DECL_INITIAL (addr))
8953 addr = DECL_INITIAL (addr);
8954
8955 /* If the address is just `&f' for some function `f', then we know
8956 that `f' is being called. */
8957 if (TREE_CODE (addr) == ADDR_EXPR
8958 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8959 return TREE_OPERAND (addr, 0);
8960
8961 /* We couldn't figure out what was being called. */
8962 return NULL_TREE;
8963 }
8964
8965 /* Print debugging information about tree nodes generated during the compile,
8966 and any language-specific information. */
8967
8968 void
8969 dump_tree_statistics (void)
8970 {
8971 if (GATHER_STATISTICS)
8972 {
8973 int i;
8974 int total_nodes, total_bytes;
8975 fprintf (stderr, "Kind Nodes Bytes\n");
8976 fprintf (stderr, "---------------------------------------\n");
8977 total_nodes = total_bytes = 0;
8978 for (i = 0; i < (int) all_kinds; i++)
8979 {
8980 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8981 tree_node_counts[i], tree_node_sizes[i]);
8982 total_nodes += tree_node_counts[i];
8983 total_bytes += tree_node_sizes[i];
8984 }
8985 fprintf (stderr, "---------------------------------------\n");
8986 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8987 fprintf (stderr, "---------------------------------------\n");
8988 fprintf (stderr, "Code Nodes\n");
8989 fprintf (stderr, "----------------------------\n");
8990 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8991 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8992 tree_code_counts[i]);
8993 fprintf (stderr, "----------------------------\n");
8994 ssanames_print_statistics ();
8995 phinodes_print_statistics ();
8996 }
8997 else
8998 fprintf (stderr, "(No per-node statistics)\n");
8999
9000 print_type_hash_statistics ();
9001 print_debug_expr_statistics ();
9002 print_value_expr_statistics ();
9003 lang_hooks.print_statistics ();
9004 }
9005 \f
9006 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9007
9008 /* Generate a crc32 of a byte. */
9009
9010 static unsigned
9011 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9012 {
9013 unsigned ix;
9014
9015 for (ix = bits; ix--; value <<= 1)
9016 {
9017 unsigned feedback;
9018
9019 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9020 chksum <<= 1;
9021 chksum ^= feedback;
9022 }
9023 return chksum;
9024 }
9025
9026 /* Generate a crc32 of a 32-bit unsigned. */
9027
9028 unsigned
9029 crc32_unsigned (unsigned chksum, unsigned value)
9030 {
9031 return crc32_unsigned_bits (chksum, value, 32);
9032 }
9033
9034 /* Generate a crc32 of a byte. */
9035
9036 unsigned
9037 crc32_byte (unsigned chksum, char byte)
9038 {
9039 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9040 }
9041
9042 /* Generate a crc32 of a string. */
9043
9044 unsigned
9045 crc32_string (unsigned chksum, const char *string)
9046 {
9047 do
9048 {
9049 chksum = crc32_byte (chksum, *string);
9050 }
9051 while (*string++);
9052 return chksum;
9053 }
9054
9055 /* P is a string that will be used in a symbol. Mask out any characters
9056 that are not valid in that context. */
9057
9058 void
9059 clean_symbol_name (char *p)
9060 {
9061 for (; *p; p++)
9062 if (! (ISALNUM (*p)
9063 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9064 || *p == '$'
9065 #endif
9066 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9067 || *p == '.'
9068 #endif
9069 ))
9070 *p = '_';
9071 }
9072
9073 /* Generate a name for a special-purpose function.
9074 The generated name may need to be unique across the whole link.
9075 Changes to this function may also require corresponding changes to
9076 xstrdup_mask_random.
9077 TYPE is some string to identify the purpose of this function to the
9078 linker or collect2; it must start with an uppercase letter,
9079 one of:
9080 I - for constructors
9081 D - for destructors
9082 N - for C++ anonymous namespaces
9083 F - for DWARF unwind frame information. */
9084
9085 tree
9086 get_file_function_name (const char *type)
9087 {
9088 char *buf;
9089 const char *p;
9090 char *q;
9091
9092 /* If we already have a name we know to be unique, just use that. */
9093 if (first_global_object_name)
9094 p = q = ASTRDUP (first_global_object_name);
9095 /* If the target is handling the constructors/destructors, they
9096 will be local to this file and the name is only necessary for
9097 debugging purposes.
9098 We also assign sub_I and sub_D sufixes to constructors called from
9099 the global static constructors. These are always local. */
9100 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9101 || (strncmp (type, "sub_", 4) == 0
9102 && (type[4] == 'I' || type[4] == 'D')))
9103 {
9104 const char *file = main_input_filename;
9105 if (! file)
9106 file = LOCATION_FILE (input_location);
9107 /* Just use the file's basename, because the full pathname
9108 might be quite long. */
9109 p = q = ASTRDUP (lbasename (file));
9110 }
9111 else
9112 {
9113 /* Otherwise, the name must be unique across the entire link.
9114 We don't have anything that we know to be unique to this translation
9115 unit, so use what we do have and throw in some randomness. */
9116 unsigned len;
9117 const char *name = weak_global_object_name;
9118 const char *file = main_input_filename;
9119
9120 if (! name)
9121 name = "";
9122 if (! file)
9123 file = LOCATION_FILE (input_location);
9124
9125 len = strlen (file);
9126 q = (char *) alloca (9 + 17 + len + 1);
9127 memcpy (q, file, len + 1);
9128
9129 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9130 crc32_string (0, name), get_random_seed (false));
9131
9132 p = q;
9133 }
9134
9135 clean_symbol_name (q);
9136 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9137 + strlen (type));
9138
9139 /* Set up the name of the file-level functions we may need.
9140 Use a global object (which is already required to be unique over
9141 the program) rather than the file name (which imposes extra
9142 constraints). */
9143 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9144
9145 return get_identifier (buf);
9146 }
9147 \f
9148 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9149
9150 /* Complain that the tree code of NODE does not match the expected 0
9151 terminated list of trailing codes. The trailing code list can be
9152 empty, for a more vague error message. FILE, LINE, and FUNCTION
9153 are of the caller. */
9154
9155 void
9156 tree_check_failed (const_tree node, const char *file,
9157 int line, const char *function, ...)
9158 {
9159 va_list args;
9160 const char *buffer;
9161 unsigned length = 0;
9162 enum tree_code code;
9163
9164 va_start (args, function);
9165 while ((code = (enum tree_code) va_arg (args, int)))
9166 length += 4 + strlen (get_tree_code_name (code));
9167 va_end (args);
9168 if (length)
9169 {
9170 char *tmp;
9171 va_start (args, function);
9172 length += strlen ("expected ");
9173 buffer = tmp = (char *) alloca (length);
9174 length = 0;
9175 while ((code = (enum tree_code) va_arg (args, int)))
9176 {
9177 const char *prefix = length ? " or " : "expected ";
9178
9179 strcpy (tmp + length, prefix);
9180 length += strlen (prefix);
9181 strcpy (tmp + length, get_tree_code_name (code));
9182 length += strlen (get_tree_code_name (code));
9183 }
9184 va_end (args);
9185 }
9186 else
9187 buffer = "unexpected node";
9188
9189 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9190 buffer, get_tree_code_name (TREE_CODE (node)),
9191 function, trim_filename (file), line);
9192 }
9193
9194 /* Complain that the tree code of NODE does match the expected 0
9195 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9196 the caller. */
9197
9198 void
9199 tree_not_check_failed (const_tree node, const char *file,
9200 int line, const char *function, ...)
9201 {
9202 va_list args;
9203 char *buffer;
9204 unsigned length = 0;
9205 enum tree_code code;
9206
9207 va_start (args, function);
9208 while ((code = (enum tree_code) va_arg (args, int)))
9209 length += 4 + strlen (get_tree_code_name (code));
9210 va_end (args);
9211 va_start (args, function);
9212 buffer = (char *) alloca (length);
9213 length = 0;
9214 while ((code = (enum tree_code) va_arg (args, int)))
9215 {
9216 if (length)
9217 {
9218 strcpy (buffer + length, " or ");
9219 length += 4;
9220 }
9221 strcpy (buffer + length, get_tree_code_name (code));
9222 length += strlen (get_tree_code_name (code));
9223 }
9224 va_end (args);
9225
9226 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9227 buffer, get_tree_code_name (TREE_CODE (node)),
9228 function, trim_filename (file), line);
9229 }
9230
9231 /* Similar to tree_check_failed, except that we check for a class of tree
9232 code, given in CL. */
9233
9234 void
9235 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9236 const char *file, int line, const char *function)
9237 {
9238 internal_error
9239 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9240 TREE_CODE_CLASS_STRING (cl),
9241 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9242 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9243 }
9244
9245 /* Similar to tree_check_failed, except that instead of specifying a
9246 dozen codes, use the knowledge that they're all sequential. */
9247
9248 void
9249 tree_range_check_failed (const_tree node, const char *file, int line,
9250 const char *function, enum tree_code c1,
9251 enum tree_code c2)
9252 {
9253 char *buffer;
9254 unsigned length = 0;
9255 unsigned int c;
9256
9257 for (c = c1; c <= c2; ++c)
9258 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9259
9260 length += strlen ("expected ");
9261 buffer = (char *) alloca (length);
9262 length = 0;
9263
9264 for (c = c1; c <= c2; ++c)
9265 {
9266 const char *prefix = length ? " or " : "expected ";
9267
9268 strcpy (buffer + length, prefix);
9269 length += strlen (prefix);
9270 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9271 length += strlen (get_tree_code_name ((enum tree_code) c));
9272 }
9273
9274 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9275 buffer, get_tree_code_name (TREE_CODE (node)),
9276 function, trim_filename (file), line);
9277 }
9278
9279
9280 /* Similar to tree_check_failed, except that we check that a tree does
9281 not have the specified code, given in CL. */
9282
9283 void
9284 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9285 const char *file, int line, const char *function)
9286 {
9287 internal_error
9288 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9289 TREE_CODE_CLASS_STRING (cl),
9290 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9291 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9292 }
9293
9294
9295 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9296
9297 void
9298 omp_clause_check_failed (const_tree node, const char *file, int line,
9299 const char *function, enum omp_clause_code code)
9300 {
9301 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9302 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9303 function, trim_filename (file), line);
9304 }
9305
9306
9307 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9308
9309 void
9310 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9311 const char *function, enum omp_clause_code c1,
9312 enum omp_clause_code c2)
9313 {
9314 char *buffer;
9315 unsigned length = 0;
9316 unsigned int c;
9317
9318 for (c = c1; c <= c2; ++c)
9319 length += 4 + strlen (omp_clause_code_name[c]);
9320
9321 length += strlen ("expected ");
9322 buffer = (char *) alloca (length);
9323 length = 0;
9324
9325 for (c = c1; c <= c2; ++c)
9326 {
9327 const char *prefix = length ? " or " : "expected ";
9328
9329 strcpy (buffer + length, prefix);
9330 length += strlen (prefix);
9331 strcpy (buffer + length, omp_clause_code_name[c]);
9332 length += strlen (omp_clause_code_name[c]);
9333 }
9334
9335 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9336 buffer, omp_clause_code_name[TREE_CODE (node)],
9337 function, trim_filename (file), line);
9338 }
9339
9340
9341 #undef DEFTREESTRUCT
9342 #define DEFTREESTRUCT(VAL, NAME) NAME,
9343
9344 static const char *ts_enum_names[] = {
9345 #include "treestruct.def"
9346 };
9347 #undef DEFTREESTRUCT
9348
9349 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9350
9351 /* Similar to tree_class_check_failed, except that we check for
9352 whether CODE contains the tree structure identified by EN. */
9353
9354 void
9355 tree_contains_struct_check_failed (const_tree node,
9356 const enum tree_node_structure_enum en,
9357 const char *file, int line,
9358 const char *function)
9359 {
9360 internal_error
9361 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9362 TS_ENUM_NAME (en),
9363 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9364 }
9365
9366
9367 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9368 (dynamically sized) vector. */
9369
9370 void
9371 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9372 const char *function)
9373 {
9374 internal_error
9375 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9376 idx + 1, len, function, trim_filename (file), line);
9377 }
9378
9379 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9380 (dynamically sized) vector. */
9381
9382 void
9383 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9384 const char *function)
9385 {
9386 internal_error
9387 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9388 idx + 1, len, function, trim_filename (file), line);
9389 }
9390
9391 /* Similar to above, except that the check is for the bounds of the operand
9392 vector of an expression node EXP. */
9393
9394 void
9395 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9396 int line, const char *function)
9397 {
9398 enum tree_code code = TREE_CODE (exp);
9399 internal_error
9400 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9401 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9402 function, trim_filename (file), line);
9403 }
9404
9405 /* Similar to above, except that the check is for the number of
9406 operands of an OMP_CLAUSE node. */
9407
9408 void
9409 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9410 int line, const char *function)
9411 {
9412 internal_error
9413 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9414 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9415 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9416 trim_filename (file), line);
9417 }
9418 #endif /* ENABLE_TREE_CHECKING */
9419 \f
9420 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9421 and mapped to the machine mode MODE. Initialize its fields and build
9422 the information necessary for debugging output. */
9423
9424 static tree
9425 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9426 {
9427 tree t;
9428 inchash::hash hstate;
9429
9430 t = make_node (VECTOR_TYPE);
9431 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9432 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9433 SET_TYPE_MODE (t, mode);
9434
9435 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9436 SET_TYPE_STRUCTURAL_EQUALITY (t);
9437 else if (TYPE_CANONICAL (innertype) != innertype
9438 || mode != VOIDmode)
9439 TYPE_CANONICAL (t)
9440 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9441
9442 layout_type (t);
9443
9444 hstate.add_wide_int (VECTOR_TYPE);
9445 hstate.add_wide_int (nunits);
9446 hstate.add_wide_int (mode);
9447 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9448 t = type_hash_canon (hstate.end (), t);
9449
9450 /* We have built a main variant, based on the main variant of the
9451 inner type. Use it to build the variant we return. */
9452 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9453 && TREE_TYPE (t) != innertype)
9454 return build_type_attribute_qual_variant (t,
9455 TYPE_ATTRIBUTES (innertype),
9456 TYPE_QUALS (innertype));
9457
9458 return t;
9459 }
9460
9461 static tree
9462 make_or_reuse_type (unsigned size, int unsignedp)
9463 {
9464 int i;
9465
9466 if (size == INT_TYPE_SIZE)
9467 return unsignedp ? unsigned_type_node : integer_type_node;
9468 if (size == CHAR_TYPE_SIZE)
9469 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9470 if (size == SHORT_TYPE_SIZE)
9471 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9472 if (size == LONG_TYPE_SIZE)
9473 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9474 if (size == LONG_LONG_TYPE_SIZE)
9475 return (unsignedp ? long_long_unsigned_type_node
9476 : long_long_integer_type_node);
9477
9478 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9479 if (size == int_n_data[i].bitsize
9480 && int_n_enabled_p[i])
9481 return (unsignedp ? int_n_trees[i].unsigned_type
9482 : int_n_trees[i].signed_type);
9483
9484 if (unsignedp)
9485 return make_unsigned_type (size);
9486 else
9487 return make_signed_type (size);
9488 }
9489
9490 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9491
9492 static tree
9493 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9494 {
9495 if (satp)
9496 {
9497 if (size == SHORT_FRACT_TYPE_SIZE)
9498 return unsignedp ? sat_unsigned_short_fract_type_node
9499 : sat_short_fract_type_node;
9500 if (size == FRACT_TYPE_SIZE)
9501 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9502 if (size == LONG_FRACT_TYPE_SIZE)
9503 return unsignedp ? sat_unsigned_long_fract_type_node
9504 : sat_long_fract_type_node;
9505 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9506 return unsignedp ? sat_unsigned_long_long_fract_type_node
9507 : sat_long_long_fract_type_node;
9508 }
9509 else
9510 {
9511 if (size == SHORT_FRACT_TYPE_SIZE)
9512 return unsignedp ? unsigned_short_fract_type_node
9513 : short_fract_type_node;
9514 if (size == FRACT_TYPE_SIZE)
9515 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9516 if (size == LONG_FRACT_TYPE_SIZE)
9517 return unsignedp ? unsigned_long_fract_type_node
9518 : long_fract_type_node;
9519 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9520 return unsignedp ? unsigned_long_long_fract_type_node
9521 : long_long_fract_type_node;
9522 }
9523
9524 return make_fract_type (size, unsignedp, satp);
9525 }
9526
9527 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9528
9529 static tree
9530 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9531 {
9532 if (satp)
9533 {
9534 if (size == SHORT_ACCUM_TYPE_SIZE)
9535 return unsignedp ? sat_unsigned_short_accum_type_node
9536 : sat_short_accum_type_node;
9537 if (size == ACCUM_TYPE_SIZE)
9538 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9539 if (size == LONG_ACCUM_TYPE_SIZE)
9540 return unsignedp ? sat_unsigned_long_accum_type_node
9541 : sat_long_accum_type_node;
9542 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9543 return unsignedp ? sat_unsigned_long_long_accum_type_node
9544 : sat_long_long_accum_type_node;
9545 }
9546 else
9547 {
9548 if (size == SHORT_ACCUM_TYPE_SIZE)
9549 return unsignedp ? unsigned_short_accum_type_node
9550 : short_accum_type_node;
9551 if (size == ACCUM_TYPE_SIZE)
9552 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9553 if (size == LONG_ACCUM_TYPE_SIZE)
9554 return unsignedp ? unsigned_long_accum_type_node
9555 : long_accum_type_node;
9556 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9557 return unsignedp ? unsigned_long_long_accum_type_node
9558 : long_long_accum_type_node;
9559 }
9560
9561 return make_accum_type (size, unsignedp, satp);
9562 }
9563
9564
9565 /* Create an atomic variant node for TYPE. This routine is called
9566 during initialization of data types to create the 5 basic atomic
9567 types. The generic build_variant_type function requires these to
9568 already be set up in order to function properly, so cannot be
9569 called from there. If ALIGN is non-zero, then ensure alignment is
9570 overridden to this value. */
9571
9572 static tree
9573 build_atomic_base (tree type, unsigned int align)
9574 {
9575 tree t;
9576
9577 /* Make sure its not already registered. */
9578 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9579 return t;
9580
9581 t = build_variant_type_copy (type);
9582 set_type_quals (t, TYPE_QUAL_ATOMIC);
9583
9584 if (align)
9585 TYPE_ALIGN (t) = align;
9586
9587 return t;
9588 }
9589
9590 /* Create nodes for all integer types (and error_mark_node) using the sizes
9591 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9592 SHORT_DOUBLE specifies whether double should be of the same precision
9593 as float. */
9594
9595 void
9596 build_common_tree_nodes (bool signed_char, bool short_double)
9597 {
9598 int i;
9599
9600 error_mark_node = make_node (ERROR_MARK);
9601 TREE_TYPE (error_mark_node) = error_mark_node;
9602
9603 initialize_sizetypes ();
9604
9605 /* Define both `signed char' and `unsigned char'. */
9606 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9607 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9608 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9609 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9610
9611 /* Define `char', which is like either `signed char' or `unsigned char'
9612 but not the same as either. */
9613 char_type_node
9614 = (signed_char
9615 ? make_signed_type (CHAR_TYPE_SIZE)
9616 : make_unsigned_type (CHAR_TYPE_SIZE));
9617 TYPE_STRING_FLAG (char_type_node) = 1;
9618
9619 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9620 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9621 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9622 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9623 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9624 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9625 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9626 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9627
9628 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9629 {
9630 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9631 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9632 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9633 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9634
9635 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9636 && int_n_enabled_p[i])
9637 {
9638 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9639 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9640 }
9641 }
9642
9643 /* Define a boolean type. This type only represents boolean values but
9644 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9645 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9646 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9647 TYPE_PRECISION (boolean_type_node) = 1;
9648 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9649
9650 /* Define what type to use for size_t. */
9651 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9652 size_type_node = unsigned_type_node;
9653 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9654 size_type_node = long_unsigned_type_node;
9655 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9656 size_type_node = long_long_unsigned_type_node;
9657 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9658 size_type_node = short_unsigned_type_node;
9659 else
9660 {
9661 int i;
9662
9663 size_type_node = NULL_TREE;
9664 for (i = 0; i < NUM_INT_N_ENTS; i++)
9665 if (int_n_enabled_p[i])
9666 {
9667 char name[50];
9668 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9669
9670 if (strcmp (name, SIZE_TYPE) == 0)
9671 {
9672 size_type_node = int_n_trees[i].unsigned_type;
9673 }
9674 }
9675 if (size_type_node == NULL_TREE)
9676 gcc_unreachable ();
9677 }
9678
9679 /* Fill in the rest of the sized types. Reuse existing type nodes
9680 when possible. */
9681 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9682 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9683 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9684 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9685 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9686
9687 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9688 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9689 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9690 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9691 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9692
9693 /* Don't call build_qualified type for atomics. That routine does
9694 special processing for atomics, and until they are initialized
9695 it's better not to make that call.
9696
9697 Check to see if there is a target override for atomic types. */
9698
9699 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9700 targetm.atomic_align_for_mode (QImode));
9701 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9702 targetm.atomic_align_for_mode (HImode));
9703 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9704 targetm.atomic_align_for_mode (SImode));
9705 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9706 targetm.atomic_align_for_mode (DImode));
9707 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9708 targetm.atomic_align_for_mode (TImode));
9709
9710 access_public_node = get_identifier ("public");
9711 access_protected_node = get_identifier ("protected");
9712 access_private_node = get_identifier ("private");
9713
9714 /* Define these next since types below may used them. */
9715 integer_zero_node = build_int_cst (integer_type_node, 0);
9716 integer_one_node = build_int_cst (integer_type_node, 1);
9717 integer_three_node = build_int_cst (integer_type_node, 3);
9718 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9719
9720 size_zero_node = size_int (0);
9721 size_one_node = size_int (1);
9722 bitsize_zero_node = bitsize_int (0);
9723 bitsize_one_node = bitsize_int (1);
9724 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9725
9726 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9727 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9728
9729 void_type_node = make_node (VOID_TYPE);
9730 layout_type (void_type_node);
9731
9732 /* We are not going to have real types in C with less than byte alignment,
9733 so we might as well not have any types that claim to have it. */
9734 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9735 TYPE_USER_ALIGN (void_type_node) = 0;
9736
9737 void_node = make_node (VOID_CST);
9738 TREE_TYPE (void_node) = void_type_node;
9739
9740 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9741 layout_type (TREE_TYPE (null_pointer_node));
9742
9743 ptr_type_node = build_pointer_type (void_type_node);
9744 const_ptr_type_node
9745 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9746 fileptr_type_node = ptr_type_node;
9747
9748 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9749
9750 float_type_node = make_node (REAL_TYPE);
9751 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9752 layout_type (float_type_node);
9753
9754 double_type_node = make_node (REAL_TYPE);
9755 if (short_double)
9756 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9757 else
9758 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9759 layout_type (double_type_node);
9760
9761 long_double_type_node = make_node (REAL_TYPE);
9762 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9763 layout_type (long_double_type_node);
9764
9765 float_ptr_type_node = build_pointer_type (float_type_node);
9766 double_ptr_type_node = build_pointer_type (double_type_node);
9767 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9768 integer_ptr_type_node = build_pointer_type (integer_type_node);
9769
9770 /* Fixed size integer types. */
9771 uint16_type_node = make_or_reuse_type (16, 1);
9772 uint32_type_node = make_or_reuse_type (32, 1);
9773 uint64_type_node = make_or_reuse_type (64, 1);
9774
9775 /* Decimal float types. */
9776 dfloat32_type_node = make_node (REAL_TYPE);
9777 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9778 layout_type (dfloat32_type_node);
9779 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9780 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9781
9782 dfloat64_type_node = make_node (REAL_TYPE);
9783 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9784 layout_type (dfloat64_type_node);
9785 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9786 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9787
9788 dfloat128_type_node = make_node (REAL_TYPE);
9789 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9790 layout_type (dfloat128_type_node);
9791 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9792 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9793
9794 complex_integer_type_node = build_complex_type (integer_type_node);
9795 complex_float_type_node = build_complex_type (float_type_node);
9796 complex_double_type_node = build_complex_type (double_type_node);
9797 complex_long_double_type_node = build_complex_type (long_double_type_node);
9798
9799 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9800 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9801 sat_ ## KIND ## _type_node = \
9802 make_sat_signed_ ## KIND ## _type (SIZE); \
9803 sat_unsigned_ ## KIND ## _type_node = \
9804 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9805 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9806 unsigned_ ## KIND ## _type_node = \
9807 make_unsigned_ ## KIND ## _type (SIZE);
9808
9809 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9810 sat_ ## WIDTH ## KIND ## _type_node = \
9811 make_sat_signed_ ## KIND ## _type (SIZE); \
9812 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9813 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9814 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9815 unsigned_ ## WIDTH ## KIND ## _type_node = \
9816 make_unsigned_ ## KIND ## _type (SIZE);
9817
9818 /* Make fixed-point type nodes based on four different widths. */
9819 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9820 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9821 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9822 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9823 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9824
9825 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9826 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9827 NAME ## _type_node = \
9828 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9829 u ## NAME ## _type_node = \
9830 make_or_reuse_unsigned_ ## KIND ## _type \
9831 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9832 sat_ ## NAME ## _type_node = \
9833 make_or_reuse_sat_signed_ ## KIND ## _type \
9834 (GET_MODE_BITSIZE (MODE ## mode)); \
9835 sat_u ## NAME ## _type_node = \
9836 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9837 (GET_MODE_BITSIZE (U ## MODE ## mode));
9838
9839 /* Fixed-point type and mode nodes. */
9840 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9841 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9842 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9843 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9844 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9845 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9846 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9847 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9848 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9849 MAKE_FIXED_MODE_NODE (accum, da, DA)
9850 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9851
9852 {
9853 tree t = targetm.build_builtin_va_list ();
9854
9855 /* Many back-ends define record types without setting TYPE_NAME.
9856 If we copied the record type here, we'd keep the original
9857 record type without a name. This breaks name mangling. So,
9858 don't copy record types and let c_common_nodes_and_builtins()
9859 declare the type to be __builtin_va_list. */
9860 if (TREE_CODE (t) != RECORD_TYPE)
9861 t = build_variant_type_copy (t);
9862
9863 va_list_type_node = t;
9864 }
9865 }
9866
9867 /* Modify DECL for given flags.
9868 TM_PURE attribute is set only on types, so the function will modify
9869 DECL's type when ECF_TM_PURE is used. */
9870
9871 void
9872 set_call_expr_flags (tree decl, int flags)
9873 {
9874 if (flags & ECF_NOTHROW)
9875 TREE_NOTHROW (decl) = 1;
9876 if (flags & ECF_CONST)
9877 TREE_READONLY (decl) = 1;
9878 if (flags & ECF_PURE)
9879 DECL_PURE_P (decl) = 1;
9880 if (flags & ECF_LOOPING_CONST_OR_PURE)
9881 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9882 if (flags & ECF_NOVOPS)
9883 DECL_IS_NOVOPS (decl) = 1;
9884 if (flags & ECF_NORETURN)
9885 TREE_THIS_VOLATILE (decl) = 1;
9886 if (flags & ECF_MALLOC)
9887 DECL_IS_MALLOC (decl) = 1;
9888 if (flags & ECF_RETURNS_TWICE)
9889 DECL_IS_RETURNS_TWICE (decl) = 1;
9890 if (flags & ECF_LEAF)
9891 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9892 NULL, DECL_ATTRIBUTES (decl));
9893 if ((flags & ECF_TM_PURE) && flag_tm)
9894 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9895 /* Looping const or pure is implied by noreturn.
9896 There is currently no way to declare looping const or looping pure alone. */
9897 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9898 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9899 }
9900
9901
9902 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9903
9904 static void
9905 local_define_builtin (const char *name, tree type, enum built_in_function code,
9906 const char *library_name, int ecf_flags)
9907 {
9908 tree decl;
9909
9910 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9911 library_name, NULL_TREE);
9912 set_call_expr_flags (decl, ecf_flags);
9913
9914 set_builtin_decl (code, decl, true);
9915 }
9916
9917 /* Call this function after instantiating all builtins that the language
9918 front end cares about. This will build the rest of the builtins
9919 and internal functions that are relied upon by the tree optimizers and
9920 the middle-end. */
9921
9922 void
9923 build_common_builtin_nodes (void)
9924 {
9925 tree tmp, ftype;
9926 int ecf_flags;
9927
9928 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9929 {
9930 ftype = build_function_type (void_type_node, void_list_node);
9931 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9932 "__builtin_unreachable",
9933 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9934 | ECF_CONST | ECF_LEAF);
9935 }
9936
9937 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9938 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9939 {
9940 ftype = build_function_type_list (ptr_type_node,
9941 ptr_type_node, const_ptr_type_node,
9942 size_type_node, NULL_TREE);
9943
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9945 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9946 "memcpy", ECF_NOTHROW | ECF_LEAF);
9947 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9948 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9949 "memmove", ECF_NOTHROW | ECF_LEAF);
9950 }
9951
9952 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9953 {
9954 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9955 const_ptr_type_node, size_type_node,
9956 NULL_TREE);
9957 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9958 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9959 }
9960
9961 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9962 {
9963 ftype = build_function_type_list (ptr_type_node,
9964 ptr_type_node, integer_type_node,
9965 size_type_node, NULL_TREE);
9966 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9967 "memset", ECF_NOTHROW | ECF_LEAF);
9968 }
9969
9970 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9971 {
9972 ftype = build_function_type_list (ptr_type_node,
9973 size_type_node, NULL_TREE);
9974 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9975 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9976 }
9977
9978 ftype = build_function_type_list (ptr_type_node, size_type_node,
9979 size_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_alloca_with_align", ftype,
9981 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9982 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9983
9984 /* If we're checking the stack, `alloca' can throw. */
9985 if (flag_stack_check)
9986 {
9987 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9988 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9989 }
9990
9991 ftype = build_function_type_list (void_type_node,
9992 ptr_type_node, ptr_type_node,
9993 ptr_type_node, NULL_TREE);
9994 local_define_builtin ("__builtin_init_trampoline", ftype,
9995 BUILT_IN_INIT_TRAMPOLINE,
9996 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9997 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9998 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9999 "__builtin_init_heap_trampoline",
10000 ECF_NOTHROW | ECF_LEAF);
10001
10002 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10004 BUILT_IN_ADJUST_TRAMPOLINE,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST | ECF_NOTHROW);
10007
10008 ftype = build_function_type_list (void_type_node,
10009 ptr_type_node, ptr_type_node, NULL_TREE);
10010 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10011 BUILT_IN_NONLOCAL_GOTO,
10012 "__builtin_nonlocal_goto",
10013 ECF_NORETURN | ECF_NOTHROW);
10014
10015 ftype = build_function_type_list (void_type_node,
10016 ptr_type_node, ptr_type_node, NULL_TREE);
10017 local_define_builtin ("__builtin_setjmp_setup", ftype,
10018 BUILT_IN_SETJMP_SETUP,
10019 "__builtin_setjmp_setup", ECF_NOTHROW);
10020
10021 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10022 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10023 BUILT_IN_SETJMP_RECEIVER,
10024 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10025
10026 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10028 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10029
10030 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_stack_restore", ftype,
10032 BUILT_IN_STACK_RESTORE,
10033 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10034
10035 /* If there's a possibility that we might use the ARM EABI, build the
10036 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10037 if (targetm.arm_eabi_unwinder)
10038 {
10039 ftype = build_function_type_list (void_type_node, NULL_TREE);
10040 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10041 BUILT_IN_CXA_END_CLEANUP,
10042 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10043 }
10044
10045 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10046 local_define_builtin ("__builtin_unwind_resume", ftype,
10047 BUILT_IN_UNWIND_RESUME,
10048 ((targetm_common.except_unwind_info (&global_options)
10049 == UI_SJLJ)
10050 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10051 ECF_NORETURN);
10052
10053 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10054 {
10055 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10056 NULL_TREE);
10057 local_define_builtin ("__builtin_return_address", ftype,
10058 BUILT_IN_RETURN_ADDRESS,
10059 "__builtin_return_address",
10060 ECF_NOTHROW);
10061 }
10062
10063 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10064 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10065 {
10066 ftype = build_function_type_list (void_type_node, ptr_type_node,
10067 ptr_type_node, NULL_TREE);
10068 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10069 local_define_builtin ("__cyg_profile_func_enter", ftype,
10070 BUILT_IN_PROFILE_FUNC_ENTER,
10071 "__cyg_profile_func_enter", 0);
10072 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10073 local_define_builtin ("__cyg_profile_func_exit", ftype,
10074 BUILT_IN_PROFILE_FUNC_EXIT,
10075 "__cyg_profile_func_exit", 0);
10076 }
10077
10078 /* The exception object and filter values from the runtime. The argument
10079 must be zero before exception lowering, i.e. from the front end. After
10080 exception lowering, it will be the region number for the exception
10081 landing pad. These functions are PURE instead of CONST to prevent
10082 them from being hoisted past the exception edge that will initialize
10083 its value in the landing pad. */
10084 ftype = build_function_type_list (ptr_type_node,
10085 integer_type_node, NULL_TREE);
10086 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10087 /* Only use TM_PURE if we we have TM language support. */
10088 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10089 ecf_flags |= ECF_TM_PURE;
10090 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10091 "__builtin_eh_pointer", ecf_flags);
10092
10093 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10094 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10095 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10096 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10097
10098 ftype = build_function_type_list (void_type_node,
10099 integer_type_node, integer_type_node,
10100 NULL_TREE);
10101 local_define_builtin ("__builtin_eh_copy_values", ftype,
10102 BUILT_IN_EH_COPY_VALUES,
10103 "__builtin_eh_copy_values", ECF_NOTHROW);
10104
10105 /* Complex multiplication and division. These are handled as builtins
10106 rather than optabs because emit_library_call_value doesn't support
10107 complex. Further, we can do slightly better with folding these
10108 beasties if the real and complex parts of the arguments are separate. */
10109 {
10110 int mode;
10111
10112 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10113 {
10114 char mode_name_buf[4], *q;
10115 const char *p;
10116 enum built_in_function mcode, dcode;
10117 tree type, inner_type;
10118 const char *prefix = "__";
10119
10120 if (targetm.libfunc_gnu_prefix)
10121 prefix = "__gnu_";
10122
10123 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10124 if (type == NULL)
10125 continue;
10126 inner_type = TREE_TYPE (type);
10127
10128 ftype = build_function_type_list (type, inner_type, inner_type,
10129 inner_type, inner_type, NULL_TREE);
10130
10131 mcode = ((enum built_in_function)
10132 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10133 dcode = ((enum built_in_function)
10134 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10135
10136 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10137 *q = TOLOWER (*p);
10138 *q = '\0';
10139
10140 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10141 NULL);
10142 local_define_builtin (built_in_names[mcode], ftype, mcode,
10143 built_in_names[mcode],
10144 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10145
10146 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10147 NULL);
10148 local_define_builtin (built_in_names[dcode], ftype, dcode,
10149 built_in_names[dcode],
10150 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10151 }
10152 }
10153
10154 init_internal_fns ();
10155 }
10156
10157 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10158 better way.
10159
10160 If we requested a pointer to a vector, build up the pointers that
10161 we stripped off while looking for the inner type. Similarly for
10162 return values from functions.
10163
10164 The argument TYPE is the top of the chain, and BOTTOM is the
10165 new type which we will point to. */
10166
10167 tree
10168 reconstruct_complex_type (tree type, tree bottom)
10169 {
10170 tree inner, outer;
10171
10172 if (TREE_CODE (type) == POINTER_TYPE)
10173 {
10174 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10175 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10176 TYPE_REF_CAN_ALIAS_ALL (type));
10177 }
10178 else if (TREE_CODE (type) == REFERENCE_TYPE)
10179 {
10180 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10181 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10182 TYPE_REF_CAN_ALIAS_ALL (type));
10183 }
10184 else if (TREE_CODE (type) == ARRAY_TYPE)
10185 {
10186 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10187 outer = build_array_type (inner, TYPE_DOMAIN (type));
10188 }
10189 else if (TREE_CODE (type) == FUNCTION_TYPE)
10190 {
10191 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10192 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10193 }
10194 else if (TREE_CODE (type) == METHOD_TYPE)
10195 {
10196 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10197 /* The build_method_type_directly() routine prepends 'this' to argument list,
10198 so we must compensate by getting rid of it. */
10199 outer
10200 = build_method_type_directly
10201 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10202 inner,
10203 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10204 }
10205 else if (TREE_CODE (type) == OFFSET_TYPE)
10206 {
10207 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10208 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10209 }
10210 else
10211 return bottom;
10212
10213 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10214 TYPE_QUALS (type));
10215 }
10216
10217 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10218 the inner type. */
10219 tree
10220 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10221 {
10222 int nunits;
10223
10224 switch (GET_MODE_CLASS (mode))
10225 {
10226 case MODE_VECTOR_INT:
10227 case MODE_VECTOR_FLOAT:
10228 case MODE_VECTOR_FRACT:
10229 case MODE_VECTOR_UFRACT:
10230 case MODE_VECTOR_ACCUM:
10231 case MODE_VECTOR_UACCUM:
10232 nunits = GET_MODE_NUNITS (mode);
10233 break;
10234
10235 case MODE_INT:
10236 /* Check that there are no leftover bits. */
10237 gcc_assert (GET_MODE_BITSIZE (mode)
10238 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10239
10240 nunits = GET_MODE_BITSIZE (mode)
10241 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10242 break;
10243
10244 default:
10245 gcc_unreachable ();
10246 }
10247
10248 return make_vector_type (innertype, nunits, mode);
10249 }
10250
10251 /* Similarly, but takes the inner type and number of units, which must be
10252 a power of two. */
10253
10254 tree
10255 build_vector_type (tree innertype, int nunits)
10256 {
10257 return make_vector_type (innertype, nunits, VOIDmode);
10258 }
10259
10260 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10261
10262 tree
10263 build_opaque_vector_type (tree innertype, int nunits)
10264 {
10265 tree t = make_vector_type (innertype, nunits, VOIDmode);
10266 tree cand;
10267 /* We always build the non-opaque variant before the opaque one,
10268 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10269 cand = TYPE_NEXT_VARIANT (t);
10270 if (cand
10271 && TYPE_VECTOR_OPAQUE (cand)
10272 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10273 return cand;
10274 /* Othewise build a variant type and make sure to queue it after
10275 the non-opaque type. */
10276 cand = build_distinct_type_copy (t);
10277 TYPE_VECTOR_OPAQUE (cand) = true;
10278 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10279 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10280 TYPE_NEXT_VARIANT (t) = cand;
10281 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10282 return cand;
10283 }
10284
10285
10286 /* Given an initializer INIT, return TRUE if INIT is zero or some
10287 aggregate of zeros. Otherwise return FALSE. */
10288 bool
10289 initializer_zerop (const_tree init)
10290 {
10291 tree elt;
10292
10293 STRIP_NOPS (init);
10294
10295 switch (TREE_CODE (init))
10296 {
10297 case INTEGER_CST:
10298 return integer_zerop (init);
10299
10300 case REAL_CST:
10301 /* ??? Note that this is not correct for C4X float formats. There,
10302 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10303 negative exponent. */
10304 return real_zerop (init)
10305 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10306
10307 case FIXED_CST:
10308 return fixed_zerop (init);
10309
10310 case COMPLEX_CST:
10311 return integer_zerop (init)
10312 || (real_zerop (init)
10313 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10314 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10315
10316 case VECTOR_CST:
10317 {
10318 unsigned i;
10319 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10320 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10321 return false;
10322 return true;
10323 }
10324
10325 case CONSTRUCTOR:
10326 {
10327 unsigned HOST_WIDE_INT idx;
10328
10329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10330 if (!initializer_zerop (elt))
10331 return false;
10332 return true;
10333 }
10334
10335 case STRING_CST:
10336 {
10337 int i;
10338
10339 /* We need to loop through all elements to handle cases like
10340 "\0" and "\0foobar". */
10341 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10342 if (TREE_STRING_POINTER (init)[i] != '\0')
10343 return false;
10344
10345 return true;
10346 }
10347
10348 default:
10349 return false;
10350 }
10351 }
10352
10353 /* Check if vector VEC consists of all the equal elements and
10354 that the number of elements corresponds to the type of VEC.
10355 The function returns first element of the vector
10356 or NULL_TREE if the vector is not uniform. */
10357 tree
10358 uniform_vector_p (const_tree vec)
10359 {
10360 tree first, t;
10361 unsigned i;
10362
10363 if (vec == NULL_TREE)
10364 return NULL_TREE;
10365
10366 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10367
10368 if (TREE_CODE (vec) == VECTOR_CST)
10369 {
10370 first = VECTOR_CST_ELT (vec, 0);
10371 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10372 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10373 return NULL_TREE;
10374
10375 return first;
10376 }
10377
10378 else if (TREE_CODE (vec) == CONSTRUCTOR)
10379 {
10380 first = error_mark_node;
10381
10382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10383 {
10384 if (i == 0)
10385 {
10386 first = t;
10387 continue;
10388 }
10389 if (!operand_equal_p (first, t, 0))
10390 return NULL_TREE;
10391 }
10392 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10393 return NULL_TREE;
10394
10395 return first;
10396 }
10397
10398 return NULL_TREE;
10399 }
10400
10401 /* Build an empty statement at location LOC. */
10402
10403 tree
10404 build_empty_stmt (location_t loc)
10405 {
10406 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10407 SET_EXPR_LOCATION (t, loc);
10408 return t;
10409 }
10410
10411
10412 /* Build an OpenMP clause with code CODE. LOC is the location of the
10413 clause. */
10414
10415 tree
10416 build_omp_clause (location_t loc, enum omp_clause_code code)
10417 {
10418 tree t;
10419 int size, length;
10420
10421 length = omp_clause_num_ops[code];
10422 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10423
10424 record_node_allocation_statistics (OMP_CLAUSE, size);
10425
10426 t = (tree) ggc_internal_alloc (size);
10427 memset (t, 0, size);
10428 TREE_SET_CODE (t, OMP_CLAUSE);
10429 OMP_CLAUSE_SET_CODE (t, code);
10430 OMP_CLAUSE_LOCATION (t) = loc;
10431
10432 return t;
10433 }
10434
10435 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10436 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10437 Except for the CODE and operand count field, other storage for the
10438 object is initialized to zeros. */
10439
10440 tree
10441 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10442 {
10443 tree t;
10444 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10445
10446 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10447 gcc_assert (len >= 1);
10448
10449 record_node_allocation_statistics (code, length);
10450
10451 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10452
10453 TREE_SET_CODE (t, code);
10454
10455 /* Can't use TREE_OPERAND to store the length because if checking is
10456 enabled, it will try to check the length before we store it. :-P */
10457 t->exp.operands[0] = build_int_cst (sizetype, len);
10458
10459 return t;
10460 }
10461
10462 /* Helper function for build_call_* functions; build a CALL_EXPR with
10463 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10464 the argument slots. */
10465
10466 static tree
10467 build_call_1 (tree return_type, tree fn, int nargs)
10468 {
10469 tree t;
10470
10471 t = build_vl_exp (CALL_EXPR, nargs + 3);
10472 TREE_TYPE (t) = return_type;
10473 CALL_EXPR_FN (t) = fn;
10474 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10475
10476 return t;
10477 }
10478
10479 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10480 FN and a null static chain slot. NARGS is the number of call arguments
10481 which are specified as "..." arguments. */
10482
10483 tree
10484 build_call_nary (tree return_type, tree fn, int nargs, ...)
10485 {
10486 tree ret;
10487 va_list args;
10488 va_start (args, nargs);
10489 ret = build_call_valist (return_type, fn, nargs, args);
10490 va_end (args);
10491 return ret;
10492 }
10493
10494 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10495 FN and a null static chain slot. NARGS is the number of call arguments
10496 which are specified as a va_list ARGS. */
10497
10498 tree
10499 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10500 {
10501 tree t;
10502 int i;
10503
10504 t = build_call_1 (return_type, fn, nargs);
10505 for (i = 0; i < nargs; i++)
10506 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10507 process_call_operands (t);
10508 return t;
10509 }
10510
10511 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10512 FN and a null static chain slot. NARGS is the number of call arguments
10513 which are specified as a tree array ARGS. */
10514
10515 tree
10516 build_call_array_loc (location_t loc, tree return_type, tree fn,
10517 int nargs, const tree *args)
10518 {
10519 tree t;
10520 int i;
10521
10522 t = build_call_1 (return_type, fn, nargs);
10523 for (i = 0; i < nargs; i++)
10524 CALL_EXPR_ARG (t, i) = args[i];
10525 process_call_operands (t);
10526 SET_EXPR_LOCATION (t, loc);
10527 return t;
10528 }
10529
10530 /* Like build_call_array, but takes a vec. */
10531
10532 tree
10533 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10534 {
10535 tree ret, t;
10536 unsigned int ix;
10537
10538 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10539 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10540 CALL_EXPR_ARG (ret, ix) = t;
10541 process_call_operands (ret);
10542 return ret;
10543 }
10544
10545 /* Conveniently construct a function call expression. FNDECL names the
10546 function to be called and N arguments are passed in the array
10547 ARGARRAY. */
10548
10549 tree
10550 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10551 {
10552 tree fntype = TREE_TYPE (fndecl);
10553 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10554
10555 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10556 }
10557
10558 /* Conveniently construct a function call expression. FNDECL names the
10559 function to be called and the arguments are passed in the vector
10560 VEC. */
10561
10562 tree
10563 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10564 {
10565 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10566 vec_safe_address (vec));
10567 }
10568
10569
10570 /* Conveniently construct a function call expression. FNDECL names the
10571 function to be called, N is the number of arguments, and the "..."
10572 parameters are the argument expressions. */
10573
10574 tree
10575 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10576 {
10577 va_list ap;
10578 tree *argarray = XALLOCAVEC (tree, n);
10579 int i;
10580
10581 va_start (ap, n);
10582 for (i = 0; i < n; i++)
10583 argarray[i] = va_arg (ap, tree);
10584 va_end (ap);
10585 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10586 }
10587
10588 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10589 varargs macros aren't supported by all bootstrap compilers. */
10590
10591 tree
10592 build_call_expr (tree fndecl, int n, ...)
10593 {
10594 va_list ap;
10595 tree *argarray = XALLOCAVEC (tree, n);
10596 int i;
10597
10598 va_start (ap, n);
10599 for (i = 0; i < n; i++)
10600 argarray[i] = va_arg (ap, tree);
10601 va_end (ap);
10602 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10603 }
10604
10605 /* Build internal call expression. This is just like CALL_EXPR, except
10606 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10607 internal function. */
10608
10609 tree
10610 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10611 tree type, int n, ...)
10612 {
10613 va_list ap;
10614 int i;
10615
10616 tree fn = build_call_1 (type, NULL_TREE, n);
10617 va_start (ap, n);
10618 for (i = 0; i < n; i++)
10619 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10620 va_end (ap);
10621 SET_EXPR_LOCATION (fn, loc);
10622 CALL_EXPR_IFN (fn) = ifn;
10623 return fn;
10624 }
10625
10626 /* Create a new constant string literal and return a char* pointer to it.
10627 The STRING_CST value is the LEN characters at STR. */
10628 tree
10629 build_string_literal (int len, const char *str)
10630 {
10631 tree t, elem, index, type;
10632
10633 t = build_string (len, str);
10634 elem = build_type_variant (char_type_node, 1, 0);
10635 index = build_index_type (size_int (len - 1));
10636 type = build_array_type (elem, index);
10637 TREE_TYPE (t) = type;
10638 TREE_CONSTANT (t) = 1;
10639 TREE_READONLY (t) = 1;
10640 TREE_STATIC (t) = 1;
10641
10642 type = build_pointer_type (elem);
10643 t = build1 (ADDR_EXPR, type,
10644 build4 (ARRAY_REF, elem,
10645 t, integer_zero_node, NULL_TREE, NULL_TREE));
10646 return t;
10647 }
10648
10649
10650
10651 /* Return true if T (assumed to be a DECL) must be assigned a memory
10652 location. */
10653
10654 bool
10655 needs_to_live_in_memory (const_tree t)
10656 {
10657 return (TREE_ADDRESSABLE (t)
10658 || is_global_var (t)
10659 || (TREE_CODE (t) == RESULT_DECL
10660 && !DECL_BY_REFERENCE (t)
10661 && aggregate_value_p (t, current_function_decl)));
10662 }
10663
10664 /* Return value of a constant X and sign-extend it. */
10665
10666 HOST_WIDE_INT
10667 int_cst_value (const_tree x)
10668 {
10669 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10670 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10671
10672 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10673 gcc_assert (cst_and_fits_in_hwi (x));
10674
10675 if (bits < HOST_BITS_PER_WIDE_INT)
10676 {
10677 bool negative = ((val >> (bits - 1)) & 1) != 0;
10678 if (negative)
10679 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10680 else
10681 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10682 }
10683
10684 return val;
10685 }
10686
10687 /* If TYPE is an integral or pointer type, return an integer type with
10688 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10689 if TYPE is already an integer type of signedness UNSIGNEDP. */
10690
10691 tree
10692 signed_or_unsigned_type_for (int unsignedp, tree type)
10693 {
10694 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10695 return type;
10696
10697 if (TREE_CODE (type) == VECTOR_TYPE)
10698 {
10699 tree inner = TREE_TYPE (type);
10700 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10701 if (!inner2)
10702 return NULL_TREE;
10703 if (inner == inner2)
10704 return type;
10705 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10706 }
10707
10708 if (!INTEGRAL_TYPE_P (type)
10709 && !POINTER_TYPE_P (type)
10710 && TREE_CODE (type) != OFFSET_TYPE)
10711 return NULL_TREE;
10712
10713 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10714 }
10715
10716 /* If TYPE is an integral or pointer type, return an integer type with
10717 the same precision which is unsigned, or itself if TYPE is already an
10718 unsigned integer type. */
10719
10720 tree
10721 unsigned_type_for (tree type)
10722 {
10723 return signed_or_unsigned_type_for (1, type);
10724 }
10725
10726 /* If TYPE is an integral or pointer type, return an integer type with
10727 the same precision which is signed, or itself if TYPE is already a
10728 signed integer type. */
10729
10730 tree
10731 signed_type_for (tree type)
10732 {
10733 return signed_or_unsigned_type_for (0, type);
10734 }
10735
10736 /* If TYPE is a vector type, return a signed integer vector type with the
10737 same width and number of subparts. Otherwise return boolean_type_node. */
10738
10739 tree
10740 truth_type_for (tree type)
10741 {
10742 if (TREE_CODE (type) == VECTOR_TYPE)
10743 {
10744 tree elem = lang_hooks.types.type_for_size
10745 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10746 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10747 }
10748 else
10749 return boolean_type_node;
10750 }
10751
10752 /* Returns the largest value obtainable by casting something in INNER type to
10753 OUTER type. */
10754
10755 tree
10756 upper_bound_in_type (tree outer, tree inner)
10757 {
10758 unsigned int det = 0;
10759 unsigned oprec = TYPE_PRECISION (outer);
10760 unsigned iprec = TYPE_PRECISION (inner);
10761 unsigned prec;
10762
10763 /* Compute a unique number for every combination. */
10764 det |= (oprec > iprec) ? 4 : 0;
10765 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10766 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10767
10768 /* Determine the exponent to use. */
10769 switch (det)
10770 {
10771 case 0:
10772 case 1:
10773 /* oprec <= iprec, outer: signed, inner: don't care. */
10774 prec = oprec - 1;
10775 break;
10776 case 2:
10777 case 3:
10778 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10779 prec = oprec;
10780 break;
10781 case 4:
10782 /* oprec > iprec, outer: signed, inner: signed. */
10783 prec = iprec - 1;
10784 break;
10785 case 5:
10786 /* oprec > iprec, outer: signed, inner: unsigned. */
10787 prec = iprec;
10788 break;
10789 case 6:
10790 /* oprec > iprec, outer: unsigned, inner: signed. */
10791 prec = oprec;
10792 break;
10793 case 7:
10794 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10795 prec = iprec;
10796 break;
10797 default:
10798 gcc_unreachable ();
10799 }
10800
10801 return wide_int_to_tree (outer,
10802 wi::mask (prec, false, TYPE_PRECISION (outer)));
10803 }
10804
10805 /* Returns the smallest value obtainable by casting something in INNER type to
10806 OUTER type. */
10807
10808 tree
10809 lower_bound_in_type (tree outer, tree inner)
10810 {
10811 unsigned oprec = TYPE_PRECISION (outer);
10812 unsigned iprec = TYPE_PRECISION (inner);
10813
10814 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10815 and obtain 0. */
10816 if (TYPE_UNSIGNED (outer)
10817 /* If we are widening something of an unsigned type, OUTER type
10818 contains all values of INNER type. In particular, both INNER
10819 and OUTER types have zero in common. */
10820 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10821 return build_int_cst (outer, 0);
10822 else
10823 {
10824 /* If we are widening a signed type to another signed type, we
10825 want to obtain -2^^(iprec-1). If we are keeping the
10826 precision or narrowing to a signed type, we want to obtain
10827 -2^(oprec-1). */
10828 unsigned prec = oprec > iprec ? iprec : oprec;
10829 return wide_int_to_tree (outer,
10830 wi::mask (prec - 1, true,
10831 TYPE_PRECISION (outer)));
10832 }
10833 }
10834
10835 /* Return nonzero if two operands that are suitable for PHI nodes are
10836 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10837 SSA_NAME or invariant. Note that this is strictly an optimization.
10838 That is, callers of this function can directly call operand_equal_p
10839 and get the same result, only slower. */
10840
10841 int
10842 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10843 {
10844 if (arg0 == arg1)
10845 return 1;
10846 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10847 return 0;
10848 return operand_equal_p (arg0, arg1, 0);
10849 }
10850
10851 /* Returns number of zeros at the end of binary representation of X. */
10852
10853 tree
10854 num_ending_zeros (const_tree x)
10855 {
10856 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10857 }
10858
10859
10860 #define WALK_SUBTREE(NODE) \
10861 do \
10862 { \
10863 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10864 if (result) \
10865 return result; \
10866 } \
10867 while (0)
10868
10869 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10870 be walked whenever a type is seen in the tree. Rest of operands and return
10871 value are as for walk_tree. */
10872
10873 static tree
10874 walk_type_fields (tree type, walk_tree_fn func, void *data,
10875 hash_set<tree> *pset, walk_tree_lh lh)
10876 {
10877 tree result = NULL_TREE;
10878
10879 switch (TREE_CODE (type))
10880 {
10881 case POINTER_TYPE:
10882 case REFERENCE_TYPE:
10883 case VECTOR_TYPE:
10884 /* We have to worry about mutually recursive pointers. These can't
10885 be written in C. They can in Ada. It's pathological, but
10886 there's an ACATS test (c38102a) that checks it. Deal with this
10887 by checking if we're pointing to another pointer, that one
10888 points to another pointer, that one does too, and we have no htab.
10889 If so, get a hash table. We check three levels deep to avoid
10890 the cost of the hash table if we don't need one. */
10891 if (POINTER_TYPE_P (TREE_TYPE (type))
10892 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10893 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10894 && !pset)
10895 {
10896 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10897 func, data);
10898 if (result)
10899 return result;
10900
10901 break;
10902 }
10903
10904 /* ... fall through ... */
10905
10906 case COMPLEX_TYPE:
10907 WALK_SUBTREE (TREE_TYPE (type));
10908 break;
10909
10910 case METHOD_TYPE:
10911 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10912
10913 /* Fall through. */
10914
10915 case FUNCTION_TYPE:
10916 WALK_SUBTREE (TREE_TYPE (type));
10917 {
10918 tree arg;
10919
10920 /* We never want to walk into default arguments. */
10921 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10922 WALK_SUBTREE (TREE_VALUE (arg));
10923 }
10924 break;
10925
10926 case ARRAY_TYPE:
10927 /* Don't follow this nodes's type if a pointer for fear that
10928 we'll have infinite recursion. If we have a PSET, then we
10929 need not fear. */
10930 if (pset
10931 || (!POINTER_TYPE_P (TREE_TYPE (type))
10932 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10933 WALK_SUBTREE (TREE_TYPE (type));
10934 WALK_SUBTREE (TYPE_DOMAIN (type));
10935 break;
10936
10937 case OFFSET_TYPE:
10938 WALK_SUBTREE (TREE_TYPE (type));
10939 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10940 break;
10941
10942 default:
10943 break;
10944 }
10945
10946 return NULL_TREE;
10947 }
10948
10949 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10950 called with the DATA and the address of each sub-tree. If FUNC returns a
10951 non-NULL value, the traversal is stopped, and the value returned by FUNC
10952 is returned. If PSET is non-NULL it is used to record the nodes visited,
10953 and to avoid visiting a node more than once. */
10954
10955 tree
10956 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10957 hash_set<tree> *pset, walk_tree_lh lh)
10958 {
10959 enum tree_code code;
10960 int walk_subtrees;
10961 tree result;
10962
10963 #define WALK_SUBTREE_TAIL(NODE) \
10964 do \
10965 { \
10966 tp = & (NODE); \
10967 goto tail_recurse; \
10968 } \
10969 while (0)
10970
10971 tail_recurse:
10972 /* Skip empty subtrees. */
10973 if (!*tp)
10974 return NULL_TREE;
10975
10976 /* Don't walk the same tree twice, if the user has requested
10977 that we avoid doing so. */
10978 if (pset && pset->add (*tp))
10979 return NULL_TREE;
10980
10981 /* Call the function. */
10982 walk_subtrees = 1;
10983 result = (*func) (tp, &walk_subtrees, data);
10984
10985 /* If we found something, return it. */
10986 if (result)
10987 return result;
10988
10989 code = TREE_CODE (*tp);
10990
10991 /* Even if we didn't, FUNC may have decided that there was nothing
10992 interesting below this point in the tree. */
10993 if (!walk_subtrees)
10994 {
10995 /* But we still need to check our siblings. */
10996 if (code == TREE_LIST)
10997 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10998 else if (code == OMP_CLAUSE)
10999 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11000 else
11001 return NULL_TREE;
11002 }
11003
11004 if (lh)
11005 {
11006 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11007 if (result || !walk_subtrees)
11008 return result;
11009 }
11010
11011 switch (code)
11012 {
11013 case ERROR_MARK:
11014 case IDENTIFIER_NODE:
11015 case INTEGER_CST:
11016 case REAL_CST:
11017 case FIXED_CST:
11018 case VECTOR_CST:
11019 case STRING_CST:
11020 case BLOCK:
11021 case PLACEHOLDER_EXPR:
11022 case SSA_NAME:
11023 case FIELD_DECL:
11024 case RESULT_DECL:
11025 /* None of these have subtrees other than those already walked
11026 above. */
11027 break;
11028
11029 case TREE_LIST:
11030 WALK_SUBTREE (TREE_VALUE (*tp));
11031 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11032 break;
11033
11034 case TREE_VEC:
11035 {
11036 int len = TREE_VEC_LENGTH (*tp);
11037
11038 if (len == 0)
11039 break;
11040
11041 /* Walk all elements but the first. */
11042 while (--len)
11043 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11044
11045 /* Now walk the first one as a tail call. */
11046 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11047 }
11048
11049 case COMPLEX_CST:
11050 WALK_SUBTREE (TREE_REALPART (*tp));
11051 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11052
11053 case CONSTRUCTOR:
11054 {
11055 unsigned HOST_WIDE_INT idx;
11056 constructor_elt *ce;
11057
11058 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11059 idx++)
11060 WALK_SUBTREE (ce->value);
11061 }
11062 break;
11063
11064 case SAVE_EXPR:
11065 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11066
11067 case BIND_EXPR:
11068 {
11069 tree decl;
11070 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11071 {
11072 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11073 into declarations that are just mentioned, rather than
11074 declared; they don't really belong to this part of the tree.
11075 And, we can see cycles: the initializer for a declaration
11076 can refer to the declaration itself. */
11077 WALK_SUBTREE (DECL_INITIAL (decl));
11078 WALK_SUBTREE (DECL_SIZE (decl));
11079 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11080 }
11081 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11082 }
11083
11084 case STATEMENT_LIST:
11085 {
11086 tree_stmt_iterator i;
11087 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11088 WALK_SUBTREE (*tsi_stmt_ptr (i));
11089 }
11090 break;
11091
11092 case OMP_CLAUSE:
11093 switch (OMP_CLAUSE_CODE (*tp))
11094 {
11095 case OMP_CLAUSE_PRIVATE:
11096 case OMP_CLAUSE_SHARED:
11097 case OMP_CLAUSE_FIRSTPRIVATE:
11098 case OMP_CLAUSE_COPYIN:
11099 case OMP_CLAUSE_COPYPRIVATE:
11100 case OMP_CLAUSE_FINAL:
11101 case OMP_CLAUSE_IF:
11102 case OMP_CLAUSE_NUM_THREADS:
11103 case OMP_CLAUSE_SCHEDULE:
11104 case OMP_CLAUSE_UNIFORM:
11105 case OMP_CLAUSE_DEPEND:
11106 case OMP_CLAUSE_NUM_TEAMS:
11107 case OMP_CLAUSE_THREAD_LIMIT:
11108 case OMP_CLAUSE_DEVICE:
11109 case OMP_CLAUSE_DIST_SCHEDULE:
11110 case OMP_CLAUSE_SAFELEN:
11111 case OMP_CLAUSE_SIMDLEN:
11112 case OMP_CLAUSE__LOOPTEMP_:
11113 case OMP_CLAUSE__SIMDUID_:
11114 case OMP_CLAUSE__CILK_FOR_COUNT_:
11115 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11116 /* FALLTHRU */
11117
11118 case OMP_CLAUSE_NOWAIT:
11119 case OMP_CLAUSE_ORDERED:
11120 case OMP_CLAUSE_DEFAULT:
11121 case OMP_CLAUSE_UNTIED:
11122 case OMP_CLAUSE_MERGEABLE:
11123 case OMP_CLAUSE_PROC_BIND:
11124 case OMP_CLAUSE_INBRANCH:
11125 case OMP_CLAUSE_NOTINBRANCH:
11126 case OMP_CLAUSE_FOR:
11127 case OMP_CLAUSE_PARALLEL:
11128 case OMP_CLAUSE_SECTIONS:
11129 case OMP_CLAUSE_TASKGROUP:
11130 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11131
11132 case OMP_CLAUSE_LASTPRIVATE:
11133 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11134 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11135 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11136
11137 case OMP_CLAUSE_COLLAPSE:
11138 {
11139 int i;
11140 for (i = 0; i < 3; i++)
11141 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11142 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11143 }
11144
11145 case OMP_CLAUSE_LINEAR:
11146 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11147 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11148 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11149 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11150
11151 case OMP_CLAUSE_ALIGNED:
11152 case OMP_CLAUSE_FROM:
11153 case OMP_CLAUSE_TO:
11154 case OMP_CLAUSE_MAP:
11155 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11156 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11157 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11158
11159 case OMP_CLAUSE_REDUCTION:
11160 {
11161 int i;
11162 for (i = 0; i < 4; i++)
11163 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11164 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11165 }
11166
11167 default:
11168 gcc_unreachable ();
11169 }
11170 break;
11171
11172 case TARGET_EXPR:
11173 {
11174 int i, len;
11175
11176 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11177 But, we only want to walk once. */
11178 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11179 for (i = 0; i < len; ++i)
11180 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11181 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11182 }
11183
11184 case DECL_EXPR:
11185 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11186 defining. We only want to walk into these fields of a type in this
11187 case and not in the general case of a mere reference to the type.
11188
11189 The criterion is as follows: if the field can be an expression, it
11190 must be walked only here. This should be in keeping with the fields
11191 that are directly gimplified in gimplify_type_sizes in order for the
11192 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11193 variable-sized types.
11194
11195 Note that DECLs get walked as part of processing the BIND_EXPR. */
11196 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11197 {
11198 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11199 if (TREE_CODE (*type_p) == ERROR_MARK)
11200 return NULL_TREE;
11201
11202 /* Call the function for the type. See if it returns anything or
11203 doesn't want us to continue. If we are to continue, walk both
11204 the normal fields and those for the declaration case. */
11205 result = (*func) (type_p, &walk_subtrees, data);
11206 if (result || !walk_subtrees)
11207 return result;
11208
11209 /* But do not walk a pointed-to type since it may itself need to
11210 be walked in the declaration case if it isn't anonymous. */
11211 if (!POINTER_TYPE_P (*type_p))
11212 {
11213 result = walk_type_fields (*type_p, func, data, pset, lh);
11214 if (result)
11215 return result;
11216 }
11217
11218 /* If this is a record type, also walk the fields. */
11219 if (RECORD_OR_UNION_TYPE_P (*type_p))
11220 {
11221 tree field;
11222
11223 for (field = TYPE_FIELDS (*type_p); field;
11224 field = DECL_CHAIN (field))
11225 {
11226 /* We'd like to look at the type of the field, but we can
11227 easily get infinite recursion. So assume it's pointed
11228 to elsewhere in the tree. Also, ignore things that
11229 aren't fields. */
11230 if (TREE_CODE (field) != FIELD_DECL)
11231 continue;
11232
11233 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11234 WALK_SUBTREE (DECL_SIZE (field));
11235 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11236 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11237 WALK_SUBTREE (DECL_QUALIFIER (field));
11238 }
11239 }
11240
11241 /* Same for scalar types. */
11242 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11243 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11244 || TREE_CODE (*type_p) == INTEGER_TYPE
11245 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11246 || TREE_CODE (*type_p) == REAL_TYPE)
11247 {
11248 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11249 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11250 }
11251
11252 WALK_SUBTREE (TYPE_SIZE (*type_p));
11253 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11254 }
11255 /* FALLTHRU */
11256
11257 default:
11258 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11259 {
11260 int i, len;
11261
11262 /* Walk over all the sub-trees of this operand. */
11263 len = TREE_OPERAND_LENGTH (*tp);
11264
11265 /* Go through the subtrees. We need to do this in forward order so
11266 that the scope of a FOR_EXPR is handled properly. */
11267 if (len)
11268 {
11269 for (i = 0; i < len - 1; ++i)
11270 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11271 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11272 }
11273 }
11274 /* If this is a type, walk the needed fields in the type. */
11275 else if (TYPE_P (*tp))
11276 return walk_type_fields (*tp, func, data, pset, lh);
11277 break;
11278 }
11279
11280 /* We didn't find what we were looking for. */
11281 return NULL_TREE;
11282
11283 #undef WALK_SUBTREE_TAIL
11284 }
11285 #undef WALK_SUBTREE
11286
11287 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11288
11289 tree
11290 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11291 walk_tree_lh lh)
11292 {
11293 tree result;
11294
11295 hash_set<tree> pset;
11296 result = walk_tree_1 (tp, func, data, &pset, lh);
11297 return result;
11298 }
11299
11300
11301 tree
11302 tree_block (tree t)
11303 {
11304 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11305
11306 if (IS_EXPR_CODE_CLASS (c))
11307 return LOCATION_BLOCK (t->exp.locus);
11308 gcc_unreachable ();
11309 return NULL;
11310 }
11311
11312 void
11313 tree_set_block (tree t, tree b)
11314 {
11315 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11316
11317 if (IS_EXPR_CODE_CLASS (c))
11318 {
11319 if (b)
11320 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11321 else
11322 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11323 }
11324 else
11325 gcc_unreachable ();
11326 }
11327
11328 /* Create a nameless artificial label and put it in the current
11329 function context. The label has a location of LOC. Returns the
11330 newly created label. */
11331
11332 tree
11333 create_artificial_label (location_t loc)
11334 {
11335 tree lab = build_decl (loc,
11336 LABEL_DECL, NULL_TREE, void_type_node);
11337
11338 DECL_ARTIFICIAL (lab) = 1;
11339 DECL_IGNORED_P (lab) = 1;
11340 DECL_CONTEXT (lab) = current_function_decl;
11341 return lab;
11342 }
11343
11344 /* Given a tree, try to return a useful variable name that we can use
11345 to prefix a temporary that is being assigned the value of the tree.
11346 I.E. given <temp> = &A, return A. */
11347
11348 const char *
11349 get_name (tree t)
11350 {
11351 tree stripped_decl;
11352
11353 stripped_decl = t;
11354 STRIP_NOPS (stripped_decl);
11355 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11356 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11357 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11358 {
11359 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11360 if (!name)
11361 return NULL;
11362 return IDENTIFIER_POINTER (name);
11363 }
11364 else
11365 {
11366 switch (TREE_CODE (stripped_decl))
11367 {
11368 case ADDR_EXPR:
11369 return get_name (TREE_OPERAND (stripped_decl, 0));
11370 default:
11371 return NULL;
11372 }
11373 }
11374 }
11375
11376 /* Return true if TYPE has a variable argument list. */
11377
11378 bool
11379 stdarg_p (const_tree fntype)
11380 {
11381 function_args_iterator args_iter;
11382 tree n = NULL_TREE, t;
11383
11384 if (!fntype)
11385 return false;
11386
11387 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11388 {
11389 n = t;
11390 }
11391
11392 return n != NULL_TREE && n != void_type_node;
11393 }
11394
11395 /* Return true if TYPE has a prototype. */
11396
11397 bool
11398 prototype_p (tree fntype)
11399 {
11400 tree t;
11401
11402 gcc_assert (fntype != NULL_TREE);
11403
11404 t = TYPE_ARG_TYPES (fntype);
11405 return (t != NULL_TREE);
11406 }
11407
11408 /* If BLOCK is inlined from an __attribute__((__artificial__))
11409 routine, return pointer to location from where it has been
11410 called. */
11411 location_t *
11412 block_nonartificial_location (tree block)
11413 {
11414 location_t *ret = NULL;
11415
11416 while (block && TREE_CODE (block) == BLOCK
11417 && BLOCK_ABSTRACT_ORIGIN (block))
11418 {
11419 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11420
11421 while (TREE_CODE (ao) == BLOCK
11422 && BLOCK_ABSTRACT_ORIGIN (ao)
11423 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11424 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11425
11426 if (TREE_CODE (ao) == FUNCTION_DECL)
11427 {
11428 /* If AO is an artificial inline, point RET to the
11429 call site locus at which it has been inlined and continue
11430 the loop, in case AO's caller is also an artificial
11431 inline. */
11432 if (DECL_DECLARED_INLINE_P (ao)
11433 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11434 ret = &BLOCK_SOURCE_LOCATION (block);
11435 else
11436 break;
11437 }
11438 else if (TREE_CODE (ao) != BLOCK)
11439 break;
11440
11441 block = BLOCK_SUPERCONTEXT (block);
11442 }
11443 return ret;
11444 }
11445
11446
11447 /* If EXP is inlined from an __attribute__((__artificial__))
11448 function, return the location of the original call expression. */
11449
11450 location_t
11451 tree_nonartificial_location (tree exp)
11452 {
11453 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11454
11455 if (loc)
11456 return *loc;
11457 else
11458 return EXPR_LOCATION (exp);
11459 }
11460
11461
11462 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11463 nodes. */
11464
11465 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11466
11467 static hashval_t
11468 cl_option_hash_hash (const void *x)
11469 {
11470 const_tree const t = (const_tree) x;
11471 const char *p;
11472 size_t i;
11473 size_t len = 0;
11474 hashval_t hash = 0;
11475
11476 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11477 {
11478 p = (const char *)TREE_OPTIMIZATION (t);
11479 len = sizeof (struct cl_optimization);
11480 }
11481
11482 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11483 {
11484 p = (const char *)TREE_TARGET_OPTION (t);
11485 len = sizeof (struct cl_target_option);
11486 }
11487
11488 else
11489 gcc_unreachable ();
11490
11491 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11492 something else. */
11493 for (i = 0; i < len; i++)
11494 if (p[i])
11495 hash = (hash << 4) ^ ((i << 2) | p[i]);
11496
11497 return hash;
11498 }
11499
11500 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11501 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11502 same. */
11503
11504 static int
11505 cl_option_hash_eq (const void *x, const void *y)
11506 {
11507 const_tree const xt = (const_tree) x;
11508 const_tree const yt = (const_tree) y;
11509 const char *xp;
11510 const char *yp;
11511 size_t len;
11512
11513 if (TREE_CODE (xt) != TREE_CODE (yt))
11514 return 0;
11515
11516 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11517 {
11518 xp = (const char *)TREE_OPTIMIZATION (xt);
11519 yp = (const char *)TREE_OPTIMIZATION (yt);
11520 len = sizeof (struct cl_optimization);
11521 }
11522
11523 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11524 {
11525 xp = (const char *)TREE_TARGET_OPTION (xt);
11526 yp = (const char *)TREE_TARGET_OPTION (yt);
11527 len = sizeof (struct cl_target_option);
11528 }
11529
11530 else
11531 gcc_unreachable ();
11532
11533 return (memcmp (xp, yp, len) == 0);
11534 }
11535
11536 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11537
11538 tree
11539 build_optimization_node (struct gcc_options *opts)
11540 {
11541 tree t;
11542 void **slot;
11543
11544 /* Use the cache of optimization nodes. */
11545
11546 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11547 opts);
11548
11549 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11550 t = (tree) *slot;
11551 if (!t)
11552 {
11553 /* Insert this one into the hash table. */
11554 t = cl_optimization_node;
11555 *slot = t;
11556
11557 /* Make a new node for next time round. */
11558 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11559 }
11560
11561 return t;
11562 }
11563
11564 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11565
11566 tree
11567 build_target_option_node (struct gcc_options *opts)
11568 {
11569 tree t;
11570 void **slot;
11571
11572 /* Use the cache of optimization nodes. */
11573
11574 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11575 opts);
11576
11577 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11578 t = (tree) *slot;
11579 if (!t)
11580 {
11581 /* Insert this one into the hash table. */
11582 t = cl_target_option_node;
11583 *slot = t;
11584
11585 /* Make a new node for next time round. */
11586 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11587 }
11588
11589 return t;
11590 }
11591
11592 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11593 Called through htab_traverse. */
11594
11595 static int
11596 prepare_target_option_node_for_pch (void **slot, void *)
11597 {
11598 tree node = (tree) *slot;
11599 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11600 TREE_TARGET_GLOBALS (node) = NULL;
11601 return 1;
11602 }
11603
11604 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11605 so that they aren't saved during PCH writing. */
11606
11607 void
11608 prepare_target_option_nodes_for_pch (void)
11609 {
11610 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11611 NULL);
11612 }
11613
11614 /* Determine the "ultimate origin" of a block. The block may be an inlined
11615 instance of an inlined instance of a block which is local to an inline
11616 function, so we have to trace all of the way back through the origin chain
11617 to find out what sort of node actually served as the original seed for the
11618 given block. */
11619
11620 tree
11621 block_ultimate_origin (const_tree block)
11622 {
11623 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11624
11625 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11626 we're trying to output the abstract instance of this function. */
11627 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11628 return NULL_TREE;
11629
11630 if (immediate_origin == NULL_TREE)
11631 return NULL_TREE;
11632 else
11633 {
11634 tree ret_val;
11635 tree lookahead = immediate_origin;
11636
11637 do
11638 {
11639 ret_val = lookahead;
11640 lookahead = (TREE_CODE (ret_val) == BLOCK
11641 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11642 }
11643 while (lookahead != NULL && lookahead != ret_val);
11644
11645 /* The block's abstract origin chain may not be the *ultimate* origin of
11646 the block. It could lead to a DECL that has an abstract origin set.
11647 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11648 will give us if it has one). Note that DECL's abstract origins are
11649 supposed to be the most distant ancestor (or so decl_ultimate_origin
11650 claims), so we don't need to loop following the DECL origins. */
11651 if (DECL_P (ret_val))
11652 return DECL_ORIGIN (ret_val);
11653
11654 return ret_val;
11655 }
11656 }
11657
11658 /* Return true iff conversion in EXP generates no instruction. Mark
11659 it inline so that we fully inline into the stripping functions even
11660 though we have two uses of this function. */
11661
11662 static inline bool
11663 tree_nop_conversion (const_tree exp)
11664 {
11665 tree outer_type, inner_type;
11666
11667 if (!CONVERT_EXPR_P (exp)
11668 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11669 return false;
11670 if (TREE_OPERAND (exp, 0) == error_mark_node)
11671 return false;
11672
11673 outer_type = TREE_TYPE (exp);
11674 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11675
11676 if (!inner_type)
11677 return false;
11678
11679 /* Use precision rather then machine mode when we can, which gives
11680 the correct answer even for submode (bit-field) types. */
11681 if ((INTEGRAL_TYPE_P (outer_type)
11682 || POINTER_TYPE_P (outer_type)
11683 || TREE_CODE (outer_type) == OFFSET_TYPE)
11684 && (INTEGRAL_TYPE_P (inner_type)
11685 || POINTER_TYPE_P (inner_type)
11686 || TREE_CODE (inner_type) == OFFSET_TYPE))
11687 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11688
11689 /* Otherwise fall back on comparing machine modes (e.g. for
11690 aggregate types, floats). */
11691 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11692 }
11693
11694 /* Return true iff conversion in EXP generates no instruction. Don't
11695 consider conversions changing the signedness. */
11696
11697 static bool
11698 tree_sign_nop_conversion (const_tree exp)
11699 {
11700 tree outer_type, inner_type;
11701
11702 if (!tree_nop_conversion (exp))
11703 return false;
11704
11705 outer_type = TREE_TYPE (exp);
11706 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11707
11708 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11709 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11710 }
11711
11712 /* Strip conversions from EXP according to tree_nop_conversion and
11713 return the resulting expression. */
11714
11715 tree
11716 tree_strip_nop_conversions (tree exp)
11717 {
11718 while (tree_nop_conversion (exp))
11719 exp = TREE_OPERAND (exp, 0);
11720 return exp;
11721 }
11722
11723 /* Strip conversions from EXP according to tree_sign_nop_conversion
11724 and return the resulting expression. */
11725
11726 tree
11727 tree_strip_sign_nop_conversions (tree exp)
11728 {
11729 while (tree_sign_nop_conversion (exp))
11730 exp = TREE_OPERAND (exp, 0);
11731 return exp;
11732 }
11733
11734 /* Avoid any floating point extensions from EXP. */
11735 tree
11736 strip_float_extensions (tree exp)
11737 {
11738 tree sub, expt, subt;
11739
11740 /* For floating point constant look up the narrowest type that can hold
11741 it properly and handle it like (type)(narrowest_type)constant.
11742 This way we can optimize for instance a=a*2.0 where "a" is float
11743 but 2.0 is double constant. */
11744 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11745 {
11746 REAL_VALUE_TYPE orig;
11747 tree type = NULL;
11748
11749 orig = TREE_REAL_CST (exp);
11750 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11751 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11752 type = float_type_node;
11753 else if (TYPE_PRECISION (TREE_TYPE (exp))
11754 > TYPE_PRECISION (double_type_node)
11755 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11756 type = double_type_node;
11757 if (type)
11758 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11759 }
11760
11761 if (!CONVERT_EXPR_P (exp))
11762 return exp;
11763
11764 sub = TREE_OPERAND (exp, 0);
11765 subt = TREE_TYPE (sub);
11766 expt = TREE_TYPE (exp);
11767
11768 if (!FLOAT_TYPE_P (subt))
11769 return exp;
11770
11771 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11772 return exp;
11773
11774 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11775 return exp;
11776
11777 return strip_float_extensions (sub);
11778 }
11779
11780 /* Strip out all handled components that produce invariant
11781 offsets. */
11782
11783 const_tree
11784 strip_invariant_refs (const_tree op)
11785 {
11786 while (handled_component_p (op))
11787 {
11788 switch (TREE_CODE (op))
11789 {
11790 case ARRAY_REF:
11791 case ARRAY_RANGE_REF:
11792 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11793 || TREE_OPERAND (op, 2) != NULL_TREE
11794 || TREE_OPERAND (op, 3) != NULL_TREE)
11795 return NULL;
11796 break;
11797
11798 case COMPONENT_REF:
11799 if (TREE_OPERAND (op, 2) != NULL_TREE)
11800 return NULL;
11801 break;
11802
11803 default:;
11804 }
11805 op = TREE_OPERAND (op, 0);
11806 }
11807
11808 return op;
11809 }
11810
11811 static GTY(()) tree gcc_eh_personality_decl;
11812
11813 /* Return the GCC personality function decl. */
11814
11815 tree
11816 lhd_gcc_personality (void)
11817 {
11818 if (!gcc_eh_personality_decl)
11819 gcc_eh_personality_decl = build_personality_function ("gcc");
11820 return gcc_eh_personality_decl;
11821 }
11822
11823 /* TARGET is a call target of GIMPLE call statement
11824 (obtained by gimple_call_fn). Return true if it is
11825 OBJ_TYPE_REF representing an virtual call of C++ method.
11826 (As opposed to OBJ_TYPE_REF representing objc calls
11827 through a cast where middle-end devirtualization machinery
11828 can't apply.) */
11829
11830 bool
11831 virtual_method_call_p (tree target)
11832 {
11833 if (TREE_CODE (target) != OBJ_TYPE_REF)
11834 return false;
11835 target = TREE_TYPE (target);
11836 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11837 target = TREE_TYPE (target);
11838 if (TREE_CODE (target) == FUNCTION_TYPE)
11839 return false;
11840 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11841 return true;
11842 }
11843
11844 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11845
11846 tree
11847 obj_type_ref_class (tree ref)
11848 {
11849 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11850 ref = TREE_TYPE (ref);
11851 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11852 ref = TREE_TYPE (ref);
11853 /* We look for type THIS points to. ObjC also builds
11854 OBJ_TYPE_REF with non-method calls, Their first parameter
11855 ID however also corresponds to class type. */
11856 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11857 || TREE_CODE (ref) == FUNCTION_TYPE);
11858 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11859 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11860 return TREE_TYPE (ref);
11861 }
11862
11863 /* Return true if T is in anonymous namespace. */
11864
11865 bool
11866 type_in_anonymous_namespace_p (const_tree t)
11867 {
11868 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11869 bulitin types; those have CONTEXT NULL. */
11870 if (!TYPE_CONTEXT (t))
11871 return false;
11872 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11873 }
11874
11875 /* Try to find a base info of BINFO that would have its field decl at offset
11876 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11877 found, return, otherwise return NULL_TREE. */
11878
11879 tree
11880 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11881 {
11882 tree type = BINFO_TYPE (binfo);
11883
11884 while (true)
11885 {
11886 HOST_WIDE_INT pos, size;
11887 tree fld;
11888 int i;
11889
11890 if (types_same_for_odr (type, expected_type))
11891 return binfo;
11892 if (offset < 0)
11893 return NULL_TREE;
11894
11895 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11896 {
11897 if (TREE_CODE (fld) != FIELD_DECL)
11898 continue;
11899
11900 pos = int_bit_position (fld);
11901 size = tree_to_uhwi (DECL_SIZE (fld));
11902 if (pos <= offset && (pos + size) > offset)
11903 break;
11904 }
11905 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11906 return NULL_TREE;
11907
11908 if (!DECL_ARTIFICIAL (fld))
11909 {
11910 binfo = TYPE_BINFO (TREE_TYPE (fld));
11911 if (!binfo)
11912 return NULL_TREE;
11913 }
11914 /* Offset 0 indicates the primary base, whose vtable contents are
11915 represented in the binfo for the derived class. */
11916 else if (offset != 0)
11917 {
11918 tree base_binfo, binfo2 = binfo;
11919
11920 /* Find BINFO corresponding to FLD. This is bit harder
11921 by a fact that in virtual inheritance we may need to walk down
11922 the non-virtual inheritance chain. */
11923 while (true)
11924 {
11925 tree containing_binfo = NULL, found_binfo = NULL;
11926 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11927 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11928 {
11929 found_binfo = base_binfo;
11930 break;
11931 }
11932 else
11933 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11934 - tree_to_shwi (BINFO_OFFSET (binfo)))
11935 * BITS_PER_UNIT < pos
11936 /* Rule out types with no virtual methods or we can get confused
11937 here by zero sized bases. */
11938 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11939 && (!containing_binfo
11940 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11941 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11942 containing_binfo = base_binfo;
11943 if (found_binfo)
11944 {
11945 binfo = found_binfo;
11946 break;
11947 }
11948 if (!containing_binfo)
11949 return NULL_TREE;
11950 binfo2 = containing_binfo;
11951 }
11952 }
11953
11954 type = TREE_TYPE (fld);
11955 offset -= pos;
11956 }
11957 }
11958
11959 /* Returns true if X is a typedef decl. */
11960
11961 bool
11962 is_typedef_decl (tree x)
11963 {
11964 return (x && TREE_CODE (x) == TYPE_DECL
11965 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11966 }
11967
11968 /* Returns true iff TYPE is a type variant created for a typedef. */
11969
11970 bool
11971 typedef_variant_p (tree type)
11972 {
11973 return is_typedef_decl (TYPE_NAME (type));
11974 }
11975
11976 /* Warn about a use of an identifier which was marked deprecated. */
11977 void
11978 warn_deprecated_use (tree node, tree attr)
11979 {
11980 const char *msg;
11981
11982 if (node == 0 || !warn_deprecated_decl)
11983 return;
11984
11985 if (!attr)
11986 {
11987 if (DECL_P (node))
11988 attr = DECL_ATTRIBUTES (node);
11989 else if (TYPE_P (node))
11990 {
11991 tree decl = TYPE_STUB_DECL (node);
11992 if (decl)
11993 attr = lookup_attribute ("deprecated",
11994 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11995 }
11996 }
11997
11998 if (attr)
11999 attr = lookup_attribute ("deprecated", attr);
12000
12001 if (attr)
12002 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12003 else
12004 msg = NULL;
12005
12006 if (DECL_P (node))
12007 {
12008 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12009 if (msg)
12010 warning (OPT_Wdeprecated_declarations,
12011 "%qD is deprecated (declared at %r%s:%d%R): %s",
12012 node, "locus", xloc.file, xloc.line, msg);
12013 else
12014 warning (OPT_Wdeprecated_declarations,
12015 "%qD is deprecated (declared at %r%s:%d%R)",
12016 node, "locus", xloc.file, xloc.line);
12017 }
12018 else if (TYPE_P (node))
12019 {
12020 tree what = NULL_TREE;
12021 tree decl = TYPE_STUB_DECL (node);
12022
12023 if (TYPE_NAME (node))
12024 {
12025 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12026 what = TYPE_NAME (node);
12027 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12028 && DECL_NAME (TYPE_NAME (node)))
12029 what = DECL_NAME (TYPE_NAME (node));
12030 }
12031
12032 if (decl)
12033 {
12034 expanded_location xloc
12035 = expand_location (DECL_SOURCE_LOCATION (decl));
12036 if (what)
12037 {
12038 if (msg)
12039 warning (OPT_Wdeprecated_declarations,
12040 "%qE is deprecated (declared at %r%s:%d%R): %s",
12041 what, "locus", xloc.file, xloc.line, msg);
12042 else
12043 warning (OPT_Wdeprecated_declarations,
12044 "%qE is deprecated (declared at %r%s:%d%R)",
12045 what, "locus", xloc.file, xloc.line);
12046 }
12047 else
12048 {
12049 if (msg)
12050 warning (OPT_Wdeprecated_declarations,
12051 "type is deprecated (declared at %r%s:%d%R): %s",
12052 "locus", xloc.file, xloc.line, msg);
12053 else
12054 warning (OPT_Wdeprecated_declarations,
12055 "type is deprecated (declared at %r%s:%d%R)",
12056 "locus", xloc.file, xloc.line);
12057 }
12058 }
12059 else
12060 {
12061 if (what)
12062 {
12063 if (msg)
12064 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12065 what, msg);
12066 else
12067 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12068 }
12069 else
12070 {
12071 if (msg)
12072 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12073 msg);
12074 else
12075 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12076 }
12077 }
12078 }
12079 }
12080
12081 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12082 somewhere in it. */
12083
12084 bool
12085 contains_bitfld_component_ref_p (const_tree ref)
12086 {
12087 while (handled_component_p (ref))
12088 {
12089 if (TREE_CODE (ref) == COMPONENT_REF
12090 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12091 return true;
12092 ref = TREE_OPERAND (ref, 0);
12093 }
12094
12095 return false;
12096 }
12097
12098 /* Try to determine whether a TRY_CATCH expression can fall through.
12099 This is a subroutine of block_may_fallthru. */
12100
12101 static bool
12102 try_catch_may_fallthru (const_tree stmt)
12103 {
12104 tree_stmt_iterator i;
12105
12106 /* If the TRY block can fall through, the whole TRY_CATCH can
12107 fall through. */
12108 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12109 return true;
12110
12111 i = tsi_start (TREE_OPERAND (stmt, 1));
12112 switch (TREE_CODE (tsi_stmt (i)))
12113 {
12114 case CATCH_EXPR:
12115 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12116 catch expression and a body. The whole TRY_CATCH may fall
12117 through iff any of the catch bodies falls through. */
12118 for (; !tsi_end_p (i); tsi_next (&i))
12119 {
12120 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12121 return true;
12122 }
12123 return false;
12124
12125 case EH_FILTER_EXPR:
12126 /* The exception filter expression only matters if there is an
12127 exception. If the exception does not match EH_FILTER_TYPES,
12128 we will execute EH_FILTER_FAILURE, and we will fall through
12129 if that falls through. If the exception does match
12130 EH_FILTER_TYPES, the stack unwinder will continue up the
12131 stack, so we will not fall through. We don't know whether we
12132 will throw an exception which matches EH_FILTER_TYPES or not,
12133 so we just ignore EH_FILTER_TYPES and assume that we might
12134 throw an exception which doesn't match. */
12135 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12136
12137 default:
12138 /* This case represents statements to be executed when an
12139 exception occurs. Those statements are implicitly followed
12140 by a RESX statement to resume execution after the exception.
12141 So in this case the TRY_CATCH never falls through. */
12142 return false;
12143 }
12144 }
12145
12146 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12147 need not be 100% accurate; simply be conservative and return true if we
12148 don't know. This is used only to avoid stupidly generating extra code.
12149 If we're wrong, we'll just delete the extra code later. */
12150
12151 bool
12152 block_may_fallthru (const_tree block)
12153 {
12154 /* This CONST_CAST is okay because expr_last returns its argument
12155 unmodified and we assign it to a const_tree. */
12156 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12157
12158 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12159 {
12160 case GOTO_EXPR:
12161 case RETURN_EXPR:
12162 /* Easy cases. If the last statement of the block implies
12163 control transfer, then we can't fall through. */
12164 return false;
12165
12166 case SWITCH_EXPR:
12167 /* If SWITCH_LABELS is set, this is lowered, and represents a
12168 branch to a selected label and hence can not fall through.
12169 Otherwise SWITCH_BODY is set, and the switch can fall
12170 through. */
12171 return SWITCH_LABELS (stmt) == NULL_TREE;
12172
12173 case COND_EXPR:
12174 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12175 return true;
12176 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12177
12178 case BIND_EXPR:
12179 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12180
12181 case TRY_CATCH_EXPR:
12182 return try_catch_may_fallthru (stmt);
12183
12184 case TRY_FINALLY_EXPR:
12185 /* The finally clause is always executed after the try clause,
12186 so if it does not fall through, then the try-finally will not
12187 fall through. Otherwise, if the try clause does not fall
12188 through, then when the finally clause falls through it will
12189 resume execution wherever the try clause was going. So the
12190 whole try-finally will only fall through if both the try
12191 clause and the finally clause fall through. */
12192 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12193 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12194
12195 case MODIFY_EXPR:
12196 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12197 stmt = TREE_OPERAND (stmt, 1);
12198 else
12199 return true;
12200 /* FALLTHRU */
12201
12202 case CALL_EXPR:
12203 /* Functions that do not return do not fall through. */
12204 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12205
12206 case CLEANUP_POINT_EXPR:
12207 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12208
12209 case TARGET_EXPR:
12210 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12211
12212 case ERROR_MARK:
12213 return true;
12214
12215 default:
12216 return lang_hooks.block_may_fallthru (stmt);
12217 }
12218 }
12219
12220 /* True if we are using EH to handle cleanups. */
12221 static bool using_eh_for_cleanups_flag = false;
12222
12223 /* This routine is called from front ends to indicate eh should be used for
12224 cleanups. */
12225 void
12226 using_eh_for_cleanups (void)
12227 {
12228 using_eh_for_cleanups_flag = true;
12229 }
12230
12231 /* Query whether EH is used for cleanups. */
12232 bool
12233 using_eh_for_cleanups_p (void)
12234 {
12235 return using_eh_for_cleanups_flag;
12236 }
12237
12238 /* Wrapper for tree_code_name to ensure that tree code is valid */
12239 const char *
12240 get_tree_code_name (enum tree_code code)
12241 {
12242 const char *invalid = "<invalid tree code>";
12243
12244 if (code >= MAX_TREE_CODES)
12245 return invalid;
12246
12247 return tree_code_name[code];
12248 }
12249
12250 /* Drops the TREE_OVERFLOW flag from T. */
12251
12252 tree
12253 drop_tree_overflow (tree t)
12254 {
12255 gcc_checking_assert (TREE_OVERFLOW (t));
12256
12257 /* For tree codes with a sharing machinery re-build the result. */
12258 if (TREE_CODE (t) == INTEGER_CST)
12259 return wide_int_to_tree (TREE_TYPE (t), t);
12260
12261 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12262 and drop the flag. */
12263 t = copy_node (t);
12264 TREE_OVERFLOW (t) = 0;
12265 return t;
12266 }
12267
12268 /* Given a memory reference expression T, return its base address.
12269 The base address of a memory reference expression is the main
12270 object being referenced. For instance, the base address for
12271 'array[i].fld[j]' is 'array'. You can think of this as stripping
12272 away the offset part from a memory address.
12273
12274 This function calls handled_component_p to strip away all the inner
12275 parts of the memory reference until it reaches the base object. */
12276
12277 tree
12278 get_base_address (tree t)
12279 {
12280 while (handled_component_p (t))
12281 t = TREE_OPERAND (t, 0);
12282
12283 if ((TREE_CODE (t) == MEM_REF
12284 || TREE_CODE (t) == TARGET_MEM_REF)
12285 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12286 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12287
12288 /* ??? Either the alias oracle or all callers need to properly deal
12289 with WITH_SIZE_EXPRs before we can look through those. */
12290 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12291 return NULL_TREE;
12292
12293 return t;
12294 }
12295
12296 #include "gt-tree.h"