tree.h (int_bit_position): Turn into inline function; implement using wide int.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static void type_hash_list (const_tree, inchash::hash &);
234 static void attribute_hash_list (const_tree, inchash::hash &);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_",
329 "_Cilk_for_count_"
330 };
331
332
333 /* Return the tree node structure used by tree code CODE. */
334
335 static inline enum tree_node_structure_enum
336 tree_node_structure_for_code (enum tree_code code)
337 {
338 switch (TREE_CODE_CLASS (code))
339 {
340 case tcc_declaration:
341 {
342 switch (code)
343 {
344 case FIELD_DECL:
345 return TS_FIELD_DECL;
346 case PARM_DECL:
347 return TS_PARM_DECL;
348 case VAR_DECL:
349 return TS_VAR_DECL;
350 case LABEL_DECL:
351 return TS_LABEL_DECL;
352 case RESULT_DECL:
353 return TS_RESULT_DECL;
354 case DEBUG_EXPR_DECL:
355 return TS_DECL_WRTL;
356 case CONST_DECL:
357 return TS_CONST_DECL;
358 case TYPE_DECL:
359 return TS_TYPE_DECL;
360 case FUNCTION_DECL:
361 return TS_FUNCTION_DECL;
362 case TRANSLATION_UNIT_DECL:
363 return TS_TRANSLATION_UNIT_DECL;
364 default:
365 return TS_DECL_NON_COMMON;
366 }
367 }
368 case tcc_type:
369 return TS_TYPE_NON_COMMON;
370 case tcc_reference:
371 case tcc_comparison:
372 case tcc_unary:
373 case tcc_binary:
374 case tcc_expression:
375 case tcc_statement:
376 case tcc_vl_exp:
377 return TS_EXP;
378 default: /* tcc_constant and tcc_exceptional */
379 break;
380 }
381 switch (code)
382 {
383 /* tcc_constant cases. */
384 case VOID_CST: return TS_TYPED;
385 case INTEGER_CST: return TS_INT_CST;
386 case REAL_CST: return TS_REAL_CST;
387 case FIXED_CST: return TS_FIXED_CST;
388 case COMPLEX_CST: return TS_COMPLEX;
389 case VECTOR_CST: return TS_VECTOR;
390 case STRING_CST: return TS_STRING;
391 /* tcc_exceptional cases. */
392 case ERROR_MARK: return TS_COMMON;
393 case IDENTIFIER_NODE: return TS_IDENTIFIER;
394 case TREE_LIST: return TS_LIST;
395 case TREE_VEC: return TS_VEC;
396 case SSA_NAME: return TS_SSA_NAME;
397 case PLACEHOLDER_EXPR: return TS_COMMON;
398 case STATEMENT_LIST: return TS_STATEMENT_LIST;
399 case BLOCK: return TS_BLOCK;
400 case CONSTRUCTOR: return TS_CONSTRUCTOR;
401 case TREE_BINFO: return TS_BINFO;
402 case OMP_CLAUSE: return TS_OMP_CLAUSE;
403 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
404 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
405
406 default:
407 gcc_unreachable ();
408 }
409 }
410
411
412 /* Initialize tree_contains_struct to describe the hierarchy of tree
413 nodes. */
414
415 static void
416 initialize_tree_contains_struct (void)
417 {
418 unsigned i;
419
420 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
421 {
422 enum tree_code code;
423 enum tree_node_structure_enum ts_code;
424
425 code = (enum tree_code) i;
426 ts_code = tree_node_structure_for_code (code);
427
428 /* Mark the TS structure itself. */
429 tree_contains_struct[code][ts_code] = 1;
430
431 /* Mark all the structures that TS is derived from. */
432 switch (ts_code)
433 {
434 case TS_TYPED:
435 case TS_BLOCK:
436 MARK_TS_BASE (code);
437 break;
438
439 case TS_COMMON:
440 case TS_INT_CST:
441 case TS_REAL_CST:
442 case TS_FIXED_CST:
443 case TS_VECTOR:
444 case TS_STRING:
445 case TS_COMPLEX:
446 case TS_SSA_NAME:
447 case TS_CONSTRUCTOR:
448 case TS_EXP:
449 case TS_STATEMENT_LIST:
450 MARK_TS_TYPED (code);
451 break;
452
453 case TS_IDENTIFIER:
454 case TS_DECL_MINIMAL:
455 case TS_TYPE_COMMON:
456 case TS_LIST:
457 case TS_VEC:
458 case TS_BINFO:
459 case TS_OMP_CLAUSE:
460 case TS_OPTIMIZATION:
461 case TS_TARGET_OPTION:
462 MARK_TS_COMMON (code);
463 break;
464
465 case TS_TYPE_WITH_LANG_SPECIFIC:
466 MARK_TS_TYPE_COMMON (code);
467 break;
468
469 case TS_TYPE_NON_COMMON:
470 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
471 break;
472
473 case TS_DECL_COMMON:
474 MARK_TS_DECL_MINIMAL (code);
475 break;
476
477 case TS_DECL_WRTL:
478 case TS_CONST_DECL:
479 MARK_TS_DECL_COMMON (code);
480 break;
481
482 case TS_DECL_NON_COMMON:
483 MARK_TS_DECL_WITH_VIS (code);
484 break;
485
486 case TS_DECL_WITH_VIS:
487 case TS_PARM_DECL:
488 case TS_LABEL_DECL:
489 case TS_RESULT_DECL:
490 MARK_TS_DECL_WRTL (code);
491 break;
492
493 case TS_FIELD_DECL:
494 MARK_TS_DECL_COMMON (code);
495 break;
496
497 case TS_VAR_DECL:
498 MARK_TS_DECL_WITH_VIS (code);
499 break;
500
501 case TS_TYPE_DECL:
502 case TS_FUNCTION_DECL:
503 MARK_TS_DECL_NON_COMMON (code);
504 break;
505
506 case TS_TRANSLATION_UNIT_DECL:
507 MARK_TS_DECL_COMMON (code);
508 break;
509
510 default:
511 gcc_unreachable ();
512 }
513 }
514
515 /* Basic consistency checks for attributes used in fold. */
516 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
518 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
527 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
544 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
545 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
546 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
547 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
548 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
549 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
550 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
551 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
556 }
557
558
559 /* Init tree.c. */
560
561 void
562 init_ttree (void)
563 {
564 /* Initialize the hash table of types. */
565 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
566 type_hash_eq, 0);
567
568 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
569 tree_decl_map_eq, 0);
570
571 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
572 tree_decl_map_eq, 0);
573
574 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
575 int_cst_hash_eq, NULL);
576
577 int_cst_node = make_int_cst (1, 1);
578
579 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
580 cl_option_hash_eq, NULL);
581
582 cl_optimization_node = make_node (OPTIMIZATION_NODE);
583 cl_target_option_node = make_node (TARGET_OPTION_NODE);
584
585 /* Initialize the tree_contains_struct array. */
586 initialize_tree_contains_struct ();
587 lang_hooks.init_ts ();
588 }
589
590 \f
591 /* The name of the object as the assembler will see it (but before any
592 translations made by ASM_OUTPUT_LABELREF). Often this is the same
593 as DECL_NAME. It is an IDENTIFIER_NODE. */
594 tree
595 decl_assembler_name (tree decl)
596 {
597 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
598 lang_hooks.set_decl_assembler_name (decl);
599 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
600 }
601
602 /* When the target supports COMDAT groups, this indicates which group the
603 DECL is associated with. This can be either an IDENTIFIER_NODE or a
604 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
605 tree
606 decl_comdat_group (const_tree node)
607 {
608 struct symtab_node *snode = symtab_node::get (node);
609 if (!snode)
610 return NULL;
611 return snode->get_comdat_group ();
612 }
613
614 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
615 tree
616 decl_comdat_group_id (const_tree node)
617 {
618 struct symtab_node *snode = symtab_node::get (node);
619 if (!snode)
620 return NULL;
621 return snode->get_comdat_group_id ();
622 }
623
624 /* When the target supports named section, return its name as IDENTIFIER_NODE
625 or NULL if it is in no section. */
626 const char *
627 decl_section_name (const_tree node)
628 {
629 struct symtab_node *snode = symtab_node::get (node);
630 if (!snode)
631 return NULL;
632 return snode->get_section ();
633 }
634
635 /* Set section section name of NODE to VALUE (that is expected to
636 be identifier node) */
637 void
638 set_decl_section_name (tree node, const char *value)
639 {
640 struct symtab_node *snode;
641
642 if (value == NULL)
643 {
644 snode = symtab_node::get (node);
645 if (!snode)
646 return;
647 }
648 else if (TREE_CODE (node) == VAR_DECL)
649 snode = varpool_node::get_create (node);
650 else
651 snode = cgraph_node::get_create (node);
652 snode->set_section (value);
653 }
654
655 /* Return TLS model of a variable NODE. */
656 enum tls_model
657 decl_tls_model (const_tree node)
658 {
659 struct varpool_node *snode = varpool_node::get (node);
660 if (!snode)
661 return TLS_MODEL_NONE;
662 return snode->tls_model;
663 }
664
665 /* Set TLS model of variable NODE to MODEL. */
666 void
667 set_decl_tls_model (tree node, enum tls_model model)
668 {
669 struct varpool_node *vnode;
670
671 if (model == TLS_MODEL_NONE)
672 {
673 vnode = varpool_node::get (node);
674 if (!vnode)
675 return;
676 }
677 else
678 vnode = varpool_node::get_create (node);
679 vnode->tls_model = model;
680 }
681
682 /* Compute the number of bytes occupied by a tree with code CODE.
683 This function cannot be used for nodes that have variable sizes,
684 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
685 size_t
686 tree_code_size (enum tree_code code)
687 {
688 switch (TREE_CODE_CLASS (code))
689 {
690 case tcc_declaration: /* A decl node */
691 {
692 switch (code)
693 {
694 case FIELD_DECL:
695 return sizeof (struct tree_field_decl);
696 case PARM_DECL:
697 return sizeof (struct tree_parm_decl);
698 case VAR_DECL:
699 return sizeof (struct tree_var_decl);
700 case LABEL_DECL:
701 return sizeof (struct tree_label_decl);
702 case RESULT_DECL:
703 return sizeof (struct tree_result_decl);
704 case CONST_DECL:
705 return sizeof (struct tree_const_decl);
706 case TYPE_DECL:
707 return sizeof (struct tree_type_decl);
708 case FUNCTION_DECL:
709 return sizeof (struct tree_function_decl);
710 case DEBUG_EXPR_DECL:
711 return sizeof (struct tree_decl_with_rtl);
712 case TRANSLATION_UNIT_DECL:
713 return sizeof (struct tree_translation_unit_decl);
714 case NAMESPACE_DECL:
715 case IMPORTED_DECL:
716 case NAMELIST_DECL:
717 return sizeof (struct tree_decl_non_common);
718 default:
719 return lang_hooks.tree_size (code);
720 }
721 }
722
723 case tcc_type: /* a type node */
724 return sizeof (struct tree_type_non_common);
725
726 case tcc_reference: /* a reference */
727 case tcc_expression: /* an expression */
728 case tcc_statement: /* an expression with side effects */
729 case tcc_comparison: /* a comparison expression */
730 case tcc_unary: /* a unary arithmetic expression */
731 case tcc_binary: /* a binary arithmetic expression */
732 return (sizeof (struct tree_exp)
733 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
734
735 case tcc_constant: /* a constant */
736 switch (code)
737 {
738 case VOID_CST: return sizeof (struct tree_typed);
739 case INTEGER_CST: gcc_unreachable ();
740 case REAL_CST: return sizeof (struct tree_real_cst);
741 case FIXED_CST: return sizeof (struct tree_fixed_cst);
742 case COMPLEX_CST: return sizeof (struct tree_complex);
743 case VECTOR_CST: return sizeof (struct tree_vector);
744 case STRING_CST: gcc_unreachable ();
745 default:
746 return lang_hooks.tree_size (code);
747 }
748
749 case tcc_exceptional: /* something random, like an identifier. */
750 switch (code)
751 {
752 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
753 case TREE_LIST: return sizeof (struct tree_list);
754
755 case ERROR_MARK:
756 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
757
758 case TREE_VEC:
759 case OMP_CLAUSE: gcc_unreachable ();
760
761 case SSA_NAME: return sizeof (struct tree_ssa_name);
762
763 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
764 case BLOCK: return sizeof (struct tree_block);
765 case CONSTRUCTOR: return sizeof (struct tree_constructor);
766 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
767 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
768
769 default:
770 return lang_hooks.tree_size (code);
771 }
772
773 default:
774 gcc_unreachable ();
775 }
776 }
777
778 /* Compute the number of bytes occupied by NODE. This routine only
779 looks at TREE_CODE, except for those nodes that have variable sizes. */
780 size_t
781 tree_size (const_tree node)
782 {
783 const enum tree_code code = TREE_CODE (node);
784 switch (code)
785 {
786 case INTEGER_CST:
787 return (sizeof (struct tree_int_cst)
788 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
789
790 case TREE_BINFO:
791 return (offsetof (struct tree_binfo, base_binfos)
792 + vec<tree, va_gc>
793 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
794
795 case TREE_VEC:
796 return (sizeof (struct tree_vec)
797 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
798
799 case VECTOR_CST:
800 return (sizeof (struct tree_vector)
801 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
802
803 case STRING_CST:
804 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
805
806 case OMP_CLAUSE:
807 return (sizeof (struct tree_omp_clause)
808 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
809 * sizeof (tree));
810
811 default:
812 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
813 return (sizeof (struct tree_exp)
814 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
815 else
816 return tree_code_size (code);
817 }
818 }
819
820 /* Record interesting allocation statistics for a tree node with CODE
821 and LENGTH. */
822
823 static void
824 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
825 size_t length ATTRIBUTE_UNUSED)
826 {
827 enum tree_code_class type = TREE_CODE_CLASS (code);
828 tree_node_kind kind;
829
830 if (!GATHER_STATISTICS)
831 return;
832
833 switch (type)
834 {
835 case tcc_declaration: /* A decl node */
836 kind = d_kind;
837 break;
838
839 case tcc_type: /* a type node */
840 kind = t_kind;
841 break;
842
843 case tcc_statement: /* an expression with side effects */
844 kind = s_kind;
845 break;
846
847 case tcc_reference: /* a reference */
848 kind = r_kind;
849 break;
850
851 case tcc_expression: /* an expression */
852 case tcc_comparison: /* a comparison expression */
853 case tcc_unary: /* a unary arithmetic expression */
854 case tcc_binary: /* a binary arithmetic expression */
855 kind = e_kind;
856 break;
857
858 case tcc_constant: /* a constant */
859 kind = c_kind;
860 break;
861
862 case tcc_exceptional: /* something random, like an identifier. */
863 switch (code)
864 {
865 case IDENTIFIER_NODE:
866 kind = id_kind;
867 break;
868
869 case TREE_VEC:
870 kind = vec_kind;
871 break;
872
873 case TREE_BINFO:
874 kind = binfo_kind;
875 break;
876
877 case SSA_NAME:
878 kind = ssa_name_kind;
879 break;
880
881 case BLOCK:
882 kind = b_kind;
883 break;
884
885 case CONSTRUCTOR:
886 kind = constr_kind;
887 break;
888
889 case OMP_CLAUSE:
890 kind = omp_clause_kind;
891 break;
892
893 default:
894 kind = x_kind;
895 break;
896 }
897 break;
898
899 case tcc_vl_exp:
900 kind = e_kind;
901 break;
902
903 default:
904 gcc_unreachable ();
905 }
906
907 tree_code_counts[(int) code]++;
908 tree_node_counts[(int) kind]++;
909 tree_node_sizes[(int) kind] += length;
910 }
911
912 /* Allocate and return a new UID from the DECL_UID namespace. */
913
914 int
915 allocate_decl_uid (void)
916 {
917 return next_decl_uid++;
918 }
919
920 /* Return a newly allocated node of code CODE. For decl and type
921 nodes, some other fields are initialized. The rest of the node is
922 initialized to zero. This function cannot be used for TREE_VEC,
923 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
924 tree_code_size.
925
926 Achoo! I got a code in the node. */
927
928 tree
929 make_node_stat (enum tree_code code MEM_STAT_DECL)
930 {
931 tree t;
932 enum tree_code_class type = TREE_CODE_CLASS (code);
933 size_t length = tree_code_size (code);
934
935 record_node_allocation_statistics (code, length);
936
937 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
938 TREE_SET_CODE (t, code);
939
940 switch (type)
941 {
942 case tcc_statement:
943 TREE_SIDE_EFFECTS (t) = 1;
944 break;
945
946 case tcc_declaration:
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
948 {
949 if (code == FUNCTION_DECL)
950 {
951 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
952 DECL_MODE (t) = FUNCTION_MODE;
953 }
954 else
955 DECL_ALIGN (t) = 1;
956 }
957 DECL_SOURCE_LOCATION (t) = input_location;
958 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
959 DECL_UID (t) = --next_debug_decl_uid;
960 else
961 {
962 DECL_UID (t) = allocate_decl_uid ();
963 SET_DECL_PT_UID (t, -1);
964 }
965 if (TREE_CODE (t) == LABEL_DECL)
966 LABEL_DECL_UID (t) = -1;
967
968 break;
969
970 case tcc_type:
971 TYPE_UID (t) = next_type_uid++;
972 TYPE_ALIGN (t) = BITS_PER_UNIT;
973 TYPE_USER_ALIGN (t) = 0;
974 TYPE_MAIN_VARIANT (t) = t;
975 TYPE_CANONICAL (t) = t;
976
977 /* Default to no attributes for type, but let target change that. */
978 TYPE_ATTRIBUTES (t) = NULL_TREE;
979 targetm.set_default_type_attributes (t);
980
981 /* We have not yet computed the alias set for this type. */
982 TYPE_ALIAS_SET (t) = -1;
983 break;
984
985 case tcc_constant:
986 TREE_CONSTANT (t) = 1;
987 break;
988
989 case tcc_expression:
990 switch (code)
991 {
992 case INIT_EXPR:
993 case MODIFY_EXPR:
994 case VA_ARG_EXPR:
995 case PREDECREMENT_EXPR:
996 case PREINCREMENT_EXPR:
997 case POSTDECREMENT_EXPR:
998 case POSTINCREMENT_EXPR:
999 /* All of these have side-effects, no matter what their
1000 operands are. */
1001 TREE_SIDE_EFFECTS (t) = 1;
1002 break;
1003
1004 default:
1005 break;
1006 }
1007 break;
1008
1009 default:
1010 /* Other classes need no special treatment. */
1011 break;
1012 }
1013
1014 return t;
1015 }
1016 \f
1017 /* Return a new node with the same contents as NODE except that its
1018 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1019
1020 tree
1021 copy_node_stat (tree node MEM_STAT_DECL)
1022 {
1023 tree t;
1024 enum tree_code code = TREE_CODE (node);
1025 size_t length;
1026
1027 gcc_assert (code != STATEMENT_LIST);
1028
1029 length = tree_size (node);
1030 record_node_allocation_statistics (code, length);
1031 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1032 memcpy (t, node, length);
1033
1034 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1035 TREE_CHAIN (t) = 0;
1036 TREE_ASM_WRITTEN (t) = 0;
1037 TREE_VISITED (t) = 0;
1038
1039 if (TREE_CODE_CLASS (code) == tcc_declaration)
1040 {
1041 if (code == DEBUG_EXPR_DECL)
1042 DECL_UID (t) = --next_debug_decl_uid;
1043 else
1044 {
1045 DECL_UID (t) = allocate_decl_uid ();
1046 if (DECL_PT_UID_SET_P (node))
1047 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1048 }
1049 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1050 && DECL_HAS_VALUE_EXPR_P (node))
1051 {
1052 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1053 DECL_HAS_VALUE_EXPR_P (t) = 1;
1054 }
1055 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1056 if (TREE_CODE (node) == VAR_DECL)
1057 {
1058 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1059 t->decl_with_vis.symtab_node = NULL;
1060 }
1061 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1062 {
1063 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1064 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1065 }
1066 if (TREE_CODE (node) == FUNCTION_DECL)
1067 {
1068 DECL_STRUCT_FUNCTION (t) = NULL;
1069 t->decl_with_vis.symtab_node = NULL;
1070 }
1071 }
1072 else if (TREE_CODE_CLASS (code) == tcc_type)
1073 {
1074 TYPE_UID (t) = next_type_uid++;
1075 /* The following is so that the debug code for
1076 the copy is different from the original type.
1077 The two statements usually duplicate each other
1078 (because they clear fields of the same union),
1079 but the optimizer should catch that. */
1080 TYPE_SYMTAB_POINTER (t) = 0;
1081 TYPE_SYMTAB_ADDRESS (t) = 0;
1082
1083 /* Do not copy the values cache. */
1084 if (TYPE_CACHED_VALUES_P (t))
1085 {
1086 TYPE_CACHED_VALUES_P (t) = 0;
1087 TYPE_CACHED_VALUES (t) = NULL_TREE;
1088 }
1089 }
1090
1091 return t;
1092 }
1093
1094 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1095 For example, this can copy a list made of TREE_LIST nodes. */
1096
1097 tree
1098 copy_list (tree list)
1099 {
1100 tree head;
1101 tree prev, next;
1102
1103 if (list == 0)
1104 return 0;
1105
1106 head = prev = copy_node (list);
1107 next = TREE_CHAIN (list);
1108 while (next)
1109 {
1110 TREE_CHAIN (prev) = copy_node (next);
1111 prev = TREE_CHAIN (prev);
1112 next = TREE_CHAIN (next);
1113 }
1114 return head;
1115 }
1116
1117 \f
1118 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1119 INTEGER_CST with value CST and type TYPE. */
1120
1121 static unsigned int
1122 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1123 {
1124 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1125 /* We need an extra zero HWI if CST is an unsigned integer with its
1126 upper bit set, and if CST occupies a whole number of HWIs. */
1127 if (TYPE_UNSIGNED (type)
1128 && wi::neg_p (cst)
1129 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1130 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1131 return cst.get_len ();
1132 }
1133
1134 /* Return a new INTEGER_CST with value CST and type TYPE. */
1135
1136 static tree
1137 build_new_int_cst (tree type, const wide_int &cst)
1138 {
1139 unsigned int len = cst.get_len ();
1140 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1141 tree nt = make_int_cst (len, ext_len);
1142
1143 if (len < ext_len)
1144 {
1145 --ext_len;
1146 TREE_INT_CST_ELT (nt, ext_len) = 0;
1147 for (unsigned int i = len; i < ext_len; ++i)
1148 TREE_INT_CST_ELT (nt, i) = -1;
1149 }
1150 else if (TYPE_UNSIGNED (type)
1151 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1152 {
1153 len--;
1154 TREE_INT_CST_ELT (nt, len)
1155 = zext_hwi (cst.elt (len),
1156 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1157 }
1158
1159 for (unsigned int i = 0; i < len; i++)
1160 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1161 TREE_TYPE (nt) = type;
1162 return nt;
1163 }
1164
1165 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1166
1167 tree
1168 build_int_cst (tree type, HOST_WIDE_INT low)
1169 {
1170 /* Support legacy code. */
1171 if (!type)
1172 type = integer_type_node;
1173
1174 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1175 }
1176
1177 tree
1178 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1179 {
1180 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1181 }
1182
1183 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1184
1185 tree
1186 build_int_cst_type (tree type, HOST_WIDE_INT low)
1187 {
1188 gcc_assert (type);
1189 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1190 }
1191
1192 /* Constructs tree in type TYPE from with value given by CST. Signedness
1193 of CST is assumed to be the same as the signedness of TYPE. */
1194
1195 tree
1196 double_int_to_tree (tree type, double_int cst)
1197 {
1198 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1199 }
1200
1201 /* We force the wide_int CST to the range of the type TYPE by sign or
1202 zero extending it. OVERFLOWABLE indicates if we are interested in
1203 overflow of the value, when >0 we are only interested in signed
1204 overflow, for <0 we are interested in any overflow. OVERFLOWED
1205 indicates whether overflow has already occurred. CONST_OVERFLOWED
1206 indicates whether constant overflow has already occurred. We force
1207 T's value to be within range of T's type (by setting to 0 or 1 all
1208 the bits outside the type's range). We set TREE_OVERFLOWED if,
1209 OVERFLOWED is nonzero,
1210 or OVERFLOWABLE is >0 and signed overflow occurs
1211 or OVERFLOWABLE is <0 and any overflow occurs
1212 We return a new tree node for the extended wide_int. The node
1213 is shared if no overflow flags are set. */
1214
1215
1216 tree
1217 force_fit_type (tree type, const wide_int_ref &cst,
1218 int overflowable, bool overflowed)
1219 {
1220 signop sign = TYPE_SIGN (type);
1221
1222 /* If we need to set overflow flags, return a new unshared node. */
1223 if (overflowed || !wi::fits_to_tree_p (cst, type))
1224 {
1225 if (overflowed
1226 || overflowable < 0
1227 || (overflowable > 0 && sign == SIGNED))
1228 {
1229 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1230 tree t = build_new_int_cst (type, tmp);
1231 TREE_OVERFLOW (t) = 1;
1232 return t;
1233 }
1234 }
1235
1236 /* Else build a shared node. */
1237 return wide_int_to_tree (type, cst);
1238 }
1239
1240 /* These are the hash table functions for the hash table of INTEGER_CST
1241 nodes of a sizetype. */
1242
1243 /* Return the hash code code X, an INTEGER_CST. */
1244
1245 static hashval_t
1246 int_cst_hash_hash (const void *x)
1247 {
1248 const_tree const t = (const_tree) x;
1249 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1250 int i;
1251
1252 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1253 code ^= TREE_INT_CST_ELT (t, i);
1254
1255 return code;
1256 }
1257
1258 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1259 is the same as that given by *Y, which is the same. */
1260
1261 static int
1262 int_cst_hash_eq (const void *x, const void *y)
1263 {
1264 const_tree const xt = (const_tree) x;
1265 const_tree const yt = (const_tree) y;
1266
1267 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1268 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1269 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1270 return false;
1271
1272 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1273 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1274 return false;
1275
1276 return true;
1277 }
1278
1279 /* Create an INT_CST node of TYPE and value CST.
1280 The returned node is always shared. For small integers we use a
1281 per-type vector cache, for larger ones we use a single hash table.
1282 The value is extended from its precision according to the sign of
1283 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1284 the upper bits and ensures that hashing and value equality based
1285 upon the underlying HOST_WIDE_INTs works without masking. */
1286
1287 tree
1288 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1289 {
1290 tree t;
1291 int ix = -1;
1292 int limit = 0;
1293
1294 gcc_assert (type);
1295 unsigned int prec = TYPE_PRECISION (type);
1296 signop sgn = TYPE_SIGN (type);
1297
1298 /* Verify that everything is canonical. */
1299 int l = pcst.get_len ();
1300 if (l > 1)
1301 {
1302 if (pcst.elt (l - 1) == 0)
1303 gcc_checking_assert (pcst.elt (l - 2) < 0);
1304 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1305 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1306 }
1307
1308 wide_int cst = wide_int::from (pcst, prec, sgn);
1309 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1310
1311 if (ext_len == 1)
1312 {
1313 /* We just need to store a single HOST_WIDE_INT. */
1314 HOST_WIDE_INT hwi;
1315 if (TYPE_UNSIGNED (type))
1316 hwi = cst.to_uhwi ();
1317 else
1318 hwi = cst.to_shwi ();
1319
1320 switch (TREE_CODE (type))
1321 {
1322 case NULLPTR_TYPE:
1323 gcc_assert (hwi == 0);
1324 /* Fallthru. */
1325
1326 case POINTER_TYPE:
1327 case REFERENCE_TYPE:
1328 /* Cache NULL pointer. */
1329 if (hwi == 0)
1330 {
1331 limit = 1;
1332 ix = 0;
1333 }
1334 break;
1335
1336 case BOOLEAN_TYPE:
1337 /* Cache false or true. */
1338 limit = 2;
1339 if (hwi < 2)
1340 ix = hwi;
1341 break;
1342
1343 case INTEGER_TYPE:
1344 case OFFSET_TYPE:
1345 if (TYPE_SIGN (type) == UNSIGNED)
1346 {
1347 /* Cache [0, N). */
1348 limit = INTEGER_SHARE_LIMIT;
1349 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1350 ix = hwi;
1351 }
1352 else
1353 {
1354 /* Cache [-1, N). */
1355 limit = INTEGER_SHARE_LIMIT + 1;
1356 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1357 ix = hwi + 1;
1358 }
1359 break;
1360
1361 case ENUMERAL_TYPE:
1362 break;
1363
1364 default:
1365 gcc_unreachable ();
1366 }
1367
1368 if (ix >= 0)
1369 {
1370 /* Look for it in the type's vector of small shared ints. */
1371 if (!TYPE_CACHED_VALUES_P (type))
1372 {
1373 TYPE_CACHED_VALUES_P (type) = 1;
1374 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1375 }
1376
1377 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1378 if (t)
1379 /* Make sure no one is clobbering the shared constant. */
1380 gcc_checking_assert (TREE_TYPE (t) == type
1381 && TREE_INT_CST_NUNITS (t) == 1
1382 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1383 && TREE_INT_CST_EXT_NUNITS (t) == 1
1384 && TREE_INT_CST_ELT (t, 0) == hwi);
1385 else
1386 {
1387 /* Create a new shared int. */
1388 t = build_new_int_cst (type, cst);
1389 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1390 }
1391 }
1392 else
1393 {
1394 /* Use the cache of larger shared ints, using int_cst_node as
1395 a temporary. */
1396 void **slot;
1397
1398 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1399 TREE_TYPE (int_cst_node) = type;
1400
1401 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1402 t = (tree) *slot;
1403 if (!t)
1404 {
1405 /* Insert this one into the hash table. */
1406 t = int_cst_node;
1407 *slot = t;
1408 /* Make a new node for next time round. */
1409 int_cst_node = make_int_cst (1, 1);
1410 }
1411 }
1412 }
1413 else
1414 {
1415 /* The value either hashes properly or we drop it on the floor
1416 for the gc to take care of. There will not be enough of them
1417 to worry about. */
1418 void **slot;
1419
1420 tree nt = build_new_int_cst (type, cst);
1421 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1422 t = (tree) *slot;
1423 if (!t)
1424 {
1425 /* Insert this one into the hash table. */
1426 t = nt;
1427 *slot = t;
1428 }
1429 }
1430
1431 return t;
1432 }
1433
1434 void
1435 cache_integer_cst (tree t)
1436 {
1437 tree type = TREE_TYPE (t);
1438 int ix = -1;
1439 int limit = 0;
1440 int prec = TYPE_PRECISION (type);
1441
1442 gcc_assert (!TREE_OVERFLOW (t));
1443
1444 switch (TREE_CODE (type))
1445 {
1446 case NULLPTR_TYPE:
1447 gcc_assert (integer_zerop (t));
1448 /* Fallthru. */
1449
1450 case POINTER_TYPE:
1451 case REFERENCE_TYPE:
1452 /* Cache NULL pointer. */
1453 if (integer_zerop (t))
1454 {
1455 limit = 1;
1456 ix = 0;
1457 }
1458 break;
1459
1460 case BOOLEAN_TYPE:
1461 /* Cache false or true. */
1462 limit = 2;
1463 if (wi::ltu_p (t, 2))
1464 ix = TREE_INT_CST_ELT (t, 0);
1465 break;
1466
1467 case INTEGER_TYPE:
1468 case OFFSET_TYPE:
1469 if (TYPE_UNSIGNED (type))
1470 {
1471 /* Cache 0..N */
1472 limit = INTEGER_SHARE_LIMIT;
1473
1474 /* This is a little hokie, but if the prec is smaller than
1475 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1476 obvious test will not get the correct answer. */
1477 if (prec < HOST_BITS_PER_WIDE_INT)
1478 {
1479 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1480 ix = tree_to_uhwi (t);
1481 }
1482 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1483 ix = tree_to_uhwi (t);
1484 }
1485 else
1486 {
1487 /* Cache -1..N */
1488 limit = INTEGER_SHARE_LIMIT + 1;
1489
1490 if (integer_minus_onep (t))
1491 ix = 0;
1492 else if (!wi::neg_p (t))
1493 {
1494 if (prec < HOST_BITS_PER_WIDE_INT)
1495 {
1496 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1497 ix = tree_to_shwi (t) + 1;
1498 }
1499 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1500 ix = tree_to_shwi (t) + 1;
1501 }
1502 }
1503 break;
1504
1505 case ENUMERAL_TYPE:
1506 break;
1507
1508 default:
1509 gcc_unreachable ();
1510 }
1511
1512 if (ix >= 0)
1513 {
1514 /* Look for it in the type's vector of small shared ints. */
1515 if (!TYPE_CACHED_VALUES_P (type))
1516 {
1517 TYPE_CACHED_VALUES_P (type) = 1;
1518 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1519 }
1520
1521 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1522 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1523 }
1524 else
1525 {
1526 /* Use the cache of larger shared ints. */
1527 void **slot;
1528
1529 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1530 /* If there is already an entry for the number verify it's the
1531 same. */
1532 if (*slot)
1533 gcc_assert (wi::eq_p (tree (*slot), t));
1534 else
1535 /* Otherwise insert this one into the hash table. */
1536 *slot = t;
1537 }
1538 }
1539
1540
1541 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1542 and the rest are zeros. */
1543
1544 tree
1545 build_low_bits_mask (tree type, unsigned bits)
1546 {
1547 gcc_assert (bits <= TYPE_PRECISION (type));
1548
1549 return wide_int_to_tree (type, wi::mask (bits, false,
1550 TYPE_PRECISION (type)));
1551 }
1552
1553 /* Checks that X is integer constant that can be expressed in (unsigned)
1554 HOST_WIDE_INT without loss of precision. */
1555
1556 bool
1557 cst_and_fits_in_hwi (const_tree x)
1558 {
1559 if (TREE_CODE (x) != INTEGER_CST)
1560 return false;
1561
1562 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1563 return false;
1564
1565 return TREE_INT_CST_NUNITS (x) == 1;
1566 }
1567
1568 /* Build a newly constructed TREE_VEC node of length LEN. */
1569
1570 tree
1571 make_vector_stat (unsigned len MEM_STAT_DECL)
1572 {
1573 tree t;
1574 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1575
1576 record_node_allocation_statistics (VECTOR_CST, length);
1577
1578 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1579
1580 TREE_SET_CODE (t, VECTOR_CST);
1581 TREE_CONSTANT (t) = 1;
1582
1583 return t;
1584 }
1585
1586 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1587 are in a list pointed to by VALS. */
1588
1589 tree
1590 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1591 {
1592 int over = 0;
1593 unsigned cnt = 0;
1594 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1595 TREE_TYPE (v) = type;
1596
1597 /* Iterate through elements and check for overflow. */
1598 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1599 {
1600 tree value = vals[cnt];
1601
1602 VECTOR_CST_ELT (v, cnt) = value;
1603
1604 /* Don't crash if we get an address constant. */
1605 if (!CONSTANT_CLASS_P (value))
1606 continue;
1607
1608 over |= TREE_OVERFLOW (value);
1609 }
1610
1611 TREE_OVERFLOW (v) = over;
1612 return v;
1613 }
1614
1615 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1616 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1617
1618 tree
1619 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1620 {
1621 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1622 unsigned HOST_WIDE_INT idx;
1623 tree value;
1624
1625 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1626 vec[idx] = value;
1627 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1628 vec[idx] = build_zero_cst (TREE_TYPE (type));
1629
1630 return build_vector (type, vec);
1631 }
1632
1633 /* Build a vector of type VECTYPE where all the elements are SCs. */
1634 tree
1635 build_vector_from_val (tree vectype, tree sc)
1636 {
1637 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1638
1639 if (sc == error_mark_node)
1640 return sc;
1641
1642 /* Verify that the vector type is suitable for SC. Note that there
1643 is some inconsistency in the type-system with respect to restrict
1644 qualifications of pointers. Vector types always have a main-variant
1645 element type and the qualification is applied to the vector-type.
1646 So TREE_TYPE (vector-type) does not return a properly qualified
1647 vector element-type. */
1648 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1649 TREE_TYPE (vectype)));
1650
1651 if (CONSTANT_CLASS_P (sc))
1652 {
1653 tree *v = XALLOCAVEC (tree, nunits);
1654 for (i = 0; i < nunits; ++i)
1655 v[i] = sc;
1656 return build_vector (vectype, v);
1657 }
1658 else
1659 {
1660 vec<constructor_elt, va_gc> *v;
1661 vec_alloc (v, nunits);
1662 for (i = 0; i < nunits; ++i)
1663 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1664 return build_constructor (vectype, v);
1665 }
1666 }
1667
1668 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1669 are in the vec pointed to by VALS. */
1670 tree
1671 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1672 {
1673 tree c = make_node (CONSTRUCTOR);
1674 unsigned int i;
1675 constructor_elt *elt;
1676 bool constant_p = true;
1677 bool side_effects_p = false;
1678
1679 TREE_TYPE (c) = type;
1680 CONSTRUCTOR_ELTS (c) = vals;
1681
1682 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1683 {
1684 /* Mostly ctors will have elts that don't have side-effects, so
1685 the usual case is to scan all the elements. Hence a single
1686 loop for both const and side effects, rather than one loop
1687 each (with early outs). */
1688 if (!TREE_CONSTANT (elt->value))
1689 constant_p = false;
1690 if (TREE_SIDE_EFFECTS (elt->value))
1691 side_effects_p = true;
1692 }
1693
1694 TREE_SIDE_EFFECTS (c) = side_effects_p;
1695 TREE_CONSTANT (c) = constant_p;
1696
1697 return c;
1698 }
1699
1700 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1701 INDEX and VALUE. */
1702 tree
1703 build_constructor_single (tree type, tree index, tree value)
1704 {
1705 vec<constructor_elt, va_gc> *v;
1706 constructor_elt elt = {index, value};
1707
1708 vec_alloc (v, 1);
1709 v->quick_push (elt);
1710
1711 return build_constructor (type, v);
1712 }
1713
1714
1715 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1716 are in a list pointed to by VALS. */
1717 tree
1718 build_constructor_from_list (tree type, tree vals)
1719 {
1720 tree t;
1721 vec<constructor_elt, va_gc> *v = NULL;
1722
1723 if (vals)
1724 {
1725 vec_alloc (v, list_length (vals));
1726 for (t = vals; t; t = TREE_CHAIN (t))
1727 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1728 }
1729
1730 return build_constructor (type, v);
1731 }
1732
1733 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1734 of elements, provided as index/value pairs. */
1735
1736 tree
1737 build_constructor_va (tree type, int nelts, ...)
1738 {
1739 vec<constructor_elt, va_gc> *v = NULL;
1740 va_list p;
1741
1742 va_start (p, nelts);
1743 vec_alloc (v, nelts);
1744 while (nelts--)
1745 {
1746 tree index = va_arg (p, tree);
1747 tree value = va_arg (p, tree);
1748 CONSTRUCTOR_APPEND_ELT (v, index, value);
1749 }
1750 va_end (p);
1751 return build_constructor (type, v);
1752 }
1753
1754 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1755
1756 tree
1757 build_fixed (tree type, FIXED_VALUE_TYPE f)
1758 {
1759 tree v;
1760 FIXED_VALUE_TYPE *fp;
1761
1762 v = make_node (FIXED_CST);
1763 fp = ggc_alloc<fixed_value> ();
1764 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1765
1766 TREE_TYPE (v) = type;
1767 TREE_FIXED_CST_PTR (v) = fp;
1768 return v;
1769 }
1770
1771 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1772
1773 tree
1774 build_real (tree type, REAL_VALUE_TYPE d)
1775 {
1776 tree v;
1777 REAL_VALUE_TYPE *dp;
1778 int overflow = 0;
1779
1780 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1781 Consider doing it via real_convert now. */
1782
1783 v = make_node (REAL_CST);
1784 dp = ggc_alloc<real_value> ();
1785 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1786
1787 TREE_TYPE (v) = type;
1788 TREE_REAL_CST_PTR (v) = dp;
1789 TREE_OVERFLOW (v) = overflow;
1790 return v;
1791 }
1792
1793 /* Return a new REAL_CST node whose type is TYPE
1794 and whose value is the integer value of the INTEGER_CST node I. */
1795
1796 REAL_VALUE_TYPE
1797 real_value_from_int_cst (const_tree type, const_tree i)
1798 {
1799 REAL_VALUE_TYPE d;
1800
1801 /* Clear all bits of the real value type so that we can later do
1802 bitwise comparisons to see if two values are the same. */
1803 memset (&d, 0, sizeof d);
1804
1805 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1806 TYPE_SIGN (TREE_TYPE (i)));
1807 return d;
1808 }
1809
1810 /* Given a tree representing an integer constant I, return a tree
1811 representing the same value as a floating-point constant of type TYPE. */
1812
1813 tree
1814 build_real_from_int_cst (tree type, const_tree i)
1815 {
1816 tree v;
1817 int overflow = TREE_OVERFLOW (i);
1818
1819 v = build_real (type, real_value_from_int_cst (type, i));
1820
1821 TREE_OVERFLOW (v) |= overflow;
1822 return v;
1823 }
1824
1825 /* Return a newly constructed STRING_CST node whose value is
1826 the LEN characters at STR.
1827 Note that for a C string literal, LEN should include the trailing NUL.
1828 The TREE_TYPE is not initialized. */
1829
1830 tree
1831 build_string (int len, const char *str)
1832 {
1833 tree s;
1834 size_t length;
1835
1836 /* Do not waste bytes provided by padding of struct tree_string. */
1837 length = len + offsetof (struct tree_string, str) + 1;
1838
1839 record_node_allocation_statistics (STRING_CST, length);
1840
1841 s = (tree) ggc_internal_alloc (length);
1842
1843 memset (s, 0, sizeof (struct tree_typed));
1844 TREE_SET_CODE (s, STRING_CST);
1845 TREE_CONSTANT (s) = 1;
1846 TREE_STRING_LENGTH (s) = len;
1847 memcpy (s->string.str, str, len);
1848 s->string.str[len] = '\0';
1849
1850 return s;
1851 }
1852
1853 /* Return a newly constructed COMPLEX_CST node whose value is
1854 specified by the real and imaginary parts REAL and IMAG.
1855 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1856 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1857
1858 tree
1859 build_complex (tree type, tree real, tree imag)
1860 {
1861 tree t = make_node (COMPLEX_CST);
1862
1863 TREE_REALPART (t) = real;
1864 TREE_IMAGPART (t) = imag;
1865 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1867 return t;
1868 }
1869
1870 /* Return a constant of arithmetic type TYPE which is the
1871 multiplicative identity of the set TYPE. */
1872
1873 tree
1874 build_one_cst (tree type)
1875 {
1876 switch (TREE_CODE (type))
1877 {
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case POINTER_TYPE: case REFERENCE_TYPE:
1880 case OFFSET_TYPE:
1881 return build_int_cst (type, 1);
1882
1883 case REAL_TYPE:
1884 return build_real (type, dconst1);
1885
1886 case FIXED_POINT_TYPE:
1887 /* We can only generate 1 for accum types. */
1888 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1889 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1890
1891 case VECTOR_TYPE:
1892 {
1893 tree scalar = build_one_cst (TREE_TYPE (type));
1894
1895 return build_vector_from_val (type, scalar);
1896 }
1897
1898 case COMPLEX_TYPE:
1899 return build_complex (type,
1900 build_one_cst (TREE_TYPE (type)),
1901 build_zero_cst (TREE_TYPE (type)));
1902
1903 default:
1904 gcc_unreachable ();
1905 }
1906 }
1907
1908 /* Return an integer of type TYPE containing all 1's in as much precision as
1909 it contains, or a complex or vector whose subparts are such integers. */
1910
1911 tree
1912 build_all_ones_cst (tree type)
1913 {
1914 if (TREE_CODE (type) == COMPLEX_TYPE)
1915 {
1916 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1917 return build_complex (type, scalar, scalar);
1918 }
1919 else
1920 return build_minus_one_cst (type);
1921 }
1922
1923 /* Return a constant of arithmetic type TYPE which is the
1924 opposite of the multiplicative identity of the set TYPE. */
1925
1926 tree
1927 build_minus_one_cst (tree type)
1928 {
1929 switch (TREE_CODE (type))
1930 {
1931 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case OFFSET_TYPE:
1934 return build_int_cst (type, -1);
1935
1936 case REAL_TYPE:
1937 return build_real (type, dconstm1);
1938
1939 case FIXED_POINT_TYPE:
1940 /* We can only generate 1 for accum types. */
1941 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1942 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1943 TYPE_MODE (type)));
1944
1945 case VECTOR_TYPE:
1946 {
1947 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1948
1949 return build_vector_from_val (type, scalar);
1950 }
1951
1952 case COMPLEX_TYPE:
1953 return build_complex (type,
1954 build_minus_one_cst (TREE_TYPE (type)),
1955 build_zero_cst (TREE_TYPE (type)));
1956
1957 default:
1958 gcc_unreachable ();
1959 }
1960 }
1961
1962 /* Build 0 constant of type TYPE. This is used by constructor folding
1963 and thus the constant should be represented in memory by
1964 zero(es). */
1965
1966 tree
1967 build_zero_cst (tree type)
1968 {
1969 switch (TREE_CODE (type))
1970 {
1971 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1972 case POINTER_TYPE: case REFERENCE_TYPE:
1973 case OFFSET_TYPE: case NULLPTR_TYPE:
1974 return build_int_cst (type, 0);
1975
1976 case REAL_TYPE:
1977 return build_real (type, dconst0);
1978
1979 case FIXED_POINT_TYPE:
1980 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1981
1982 case VECTOR_TYPE:
1983 {
1984 tree scalar = build_zero_cst (TREE_TYPE (type));
1985
1986 return build_vector_from_val (type, scalar);
1987 }
1988
1989 case COMPLEX_TYPE:
1990 {
1991 tree zero = build_zero_cst (TREE_TYPE (type));
1992
1993 return build_complex (type, zero, zero);
1994 }
1995
1996 default:
1997 if (!AGGREGATE_TYPE_P (type))
1998 return fold_convert (type, integer_zero_node);
1999 return build_constructor (type, NULL);
2000 }
2001 }
2002
2003
2004 /* Build a BINFO with LEN language slots. */
2005
2006 tree
2007 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2008 {
2009 tree t;
2010 size_t length = (offsetof (struct tree_binfo, base_binfos)
2011 + vec<tree, va_gc>::embedded_size (base_binfos));
2012
2013 record_node_allocation_statistics (TREE_BINFO, length);
2014
2015 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2016
2017 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2018
2019 TREE_SET_CODE (t, TREE_BINFO);
2020
2021 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2022
2023 return t;
2024 }
2025
2026 /* Create a CASE_LABEL_EXPR tree node and return it. */
2027
2028 tree
2029 build_case_label (tree low_value, tree high_value, tree label_decl)
2030 {
2031 tree t = make_node (CASE_LABEL_EXPR);
2032
2033 TREE_TYPE (t) = void_type_node;
2034 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2035
2036 CASE_LOW (t) = low_value;
2037 CASE_HIGH (t) = high_value;
2038 CASE_LABEL (t) = label_decl;
2039 CASE_CHAIN (t) = NULL_TREE;
2040
2041 return t;
2042 }
2043
2044 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2045 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2046 The latter determines the length of the HOST_WIDE_INT vector. */
2047
2048 tree
2049 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2050 {
2051 tree t;
2052 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2053 + sizeof (struct tree_int_cst));
2054
2055 gcc_assert (len);
2056 record_node_allocation_statistics (INTEGER_CST, length);
2057
2058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2059
2060 TREE_SET_CODE (t, INTEGER_CST);
2061 TREE_INT_CST_NUNITS (t) = len;
2062 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2063 /* to_offset can only be applied to trees that are offset_int-sized
2064 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2065 must be exactly the precision of offset_int and so LEN is correct. */
2066 if (ext_len <= OFFSET_INT_ELTS)
2067 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2068 else
2069 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2070
2071 TREE_CONSTANT (t) = 1;
2072
2073 return t;
2074 }
2075
2076 /* Build a newly constructed TREE_VEC node of length LEN. */
2077
2078 tree
2079 make_tree_vec_stat (int len MEM_STAT_DECL)
2080 {
2081 tree t;
2082 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2083
2084 record_node_allocation_statistics (TREE_VEC, length);
2085
2086 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2087
2088 TREE_SET_CODE (t, TREE_VEC);
2089 TREE_VEC_LENGTH (t) = len;
2090
2091 return t;
2092 }
2093
2094 /* Grow a TREE_VEC node to new length LEN. */
2095
2096 tree
2097 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2098 {
2099 gcc_assert (TREE_CODE (v) == TREE_VEC);
2100
2101 int oldlen = TREE_VEC_LENGTH (v);
2102 gcc_assert (len > oldlen);
2103
2104 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2105 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2106
2107 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2108
2109 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2110
2111 TREE_VEC_LENGTH (v) = len;
2112
2113 return v;
2114 }
2115 \f
2116 /* Return 1 if EXPR is the integer constant zero or a complex constant
2117 of zero. */
2118
2119 int
2120 integer_zerop (const_tree expr)
2121 {
2122 STRIP_NOPS (expr);
2123
2124 switch (TREE_CODE (expr))
2125 {
2126 case INTEGER_CST:
2127 return wi::eq_p (expr, 0);
2128 case COMPLEX_CST:
2129 return (integer_zerop (TREE_REALPART (expr))
2130 && integer_zerop (TREE_IMAGPART (expr)));
2131 case VECTOR_CST:
2132 {
2133 unsigned i;
2134 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2135 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2136 return false;
2137 return true;
2138 }
2139 default:
2140 return false;
2141 }
2142 }
2143
2144 /* Return 1 if EXPR is the integer constant one or the corresponding
2145 complex constant. */
2146
2147 int
2148 integer_onep (const_tree expr)
2149 {
2150 STRIP_NOPS (expr);
2151
2152 switch (TREE_CODE (expr))
2153 {
2154 case INTEGER_CST:
2155 return wi::eq_p (wi::to_widest (expr), 1);
2156 case COMPLEX_CST:
2157 return (integer_onep (TREE_REALPART (expr))
2158 && integer_zerop (TREE_IMAGPART (expr)));
2159 case VECTOR_CST:
2160 {
2161 unsigned i;
2162 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2163 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2164 return false;
2165 return true;
2166 }
2167 default:
2168 return false;
2169 }
2170 }
2171
2172 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2173 return 1 if every piece is the integer constant one. */
2174
2175 int
2176 integer_each_onep (const_tree expr)
2177 {
2178 STRIP_NOPS (expr);
2179
2180 if (TREE_CODE (expr) == COMPLEX_CST)
2181 return (integer_onep (TREE_REALPART (expr))
2182 && integer_onep (TREE_IMAGPART (expr)));
2183 else
2184 return integer_onep (expr);
2185 }
2186
2187 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2188 it contains, or a complex or vector whose subparts are such integers. */
2189
2190 int
2191 integer_all_onesp (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST
2196 && integer_all_onesp (TREE_REALPART (expr))
2197 && integer_all_onesp (TREE_IMAGPART (expr)))
2198 return 1;
2199
2200 else if (TREE_CODE (expr) == VECTOR_CST)
2201 {
2202 unsigned i;
2203 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2204 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2205 return 0;
2206 return 1;
2207 }
2208
2209 else if (TREE_CODE (expr) != INTEGER_CST)
2210 return 0;
2211
2212 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2213 }
2214
2215 /* Return 1 if EXPR is the integer constant minus one. */
2216
2217 int
2218 integer_minus_onep (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 if (TREE_CODE (expr) == COMPLEX_CST)
2223 return (integer_all_onesp (TREE_REALPART (expr))
2224 && integer_zerop (TREE_IMAGPART (expr)));
2225 else
2226 return integer_all_onesp (expr);
2227 }
2228
2229 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2230 one bit on). */
2231
2232 int
2233 integer_pow2p (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST
2238 && integer_pow2p (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)))
2240 return 1;
2241
2242 if (TREE_CODE (expr) != INTEGER_CST)
2243 return 0;
2244
2245 return wi::popcount (expr) == 1;
2246 }
2247
2248 /* Return 1 if EXPR is an integer constant other than zero or a
2249 complex constant other than zero. */
2250
2251 int
2252 integer_nonzerop (const_tree expr)
2253 {
2254 STRIP_NOPS (expr);
2255
2256 return ((TREE_CODE (expr) == INTEGER_CST
2257 && !wi::eq_p (expr, 0))
2258 || (TREE_CODE (expr) == COMPLEX_CST
2259 && (integer_nonzerop (TREE_REALPART (expr))
2260 || integer_nonzerop (TREE_IMAGPART (expr)))));
2261 }
2262
2263 /* Return 1 if EXPR is the fixed-point constant zero. */
2264
2265 int
2266 fixed_zerop (const_tree expr)
2267 {
2268 return (TREE_CODE (expr) == FIXED_CST
2269 && TREE_FIXED_CST (expr).data.is_zero ());
2270 }
2271
2272 /* Return the power of two represented by a tree node known to be a
2273 power of two. */
2274
2275 int
2276 tree_log2 (const_tree expr)
2277 {
2278 STRIP_NOPS (expr);
2279
2280 if (TREE_CODE (expr) == COMPLEX_CST)
2281 return tree_log2 (TREE_REALPART (expr));
2282
2283 return wi::exact_log2 (expr);
2284 }
2285
2286 /* Similar, but return the largest integer Y such that 2 ** Y is less
2287 than or equal to EXPR. */
2288
2289 int
2290 tree_floor_log2 (const_tree expr)
2291 {
2292 STRIP_NOPS (expr);
2293
2294 if (TREE_CODE (expr) == COMPLEX_CST)
2295 return tree_log2 (TREE_REALPART (expr));
2296
2297 return wi::floor_log2 (expr);
2298 }
2299
2300 /* Return number of known trailing zero bits in EXPR, or, if the value of
2301 EXPR is known to be zero, the precision of it's type. */
2302
2303 unsigned int
2304 tree_ctz (const_tree expr)
2305 {
2306 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2307 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2308 return 0;
2309
2310 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2311 switch (TREE_CODE (expr))
2312 {
2313 case INTEGER_CST:
2314 ret1 = wi::ctz (expr);
2315 return MIN (ret1, prec);
2316 case SSA_NAME:
2317 ret1 = wi::ctz (get_nonzero_bits (expr));
2318 return MIN (ret1, prec);
2319 case PLUS_EXPR:
2320 case MINUS_EXPR:
2321 case BIT_IOR_EXPR:
2322 case BIT_XOR_EXPR:
2323 case MIN_EXPR:
2324 case MAX_EXPR:
2325 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2326 if (ret1 == 0)
2327 return ret1;
2328 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2329 return MIN (ret1, ret2);
2330 case POINTER_PLUS_EXPR:
2331 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2332 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2333 /* Second operand is sizetype, which could be in theory
2334 wider than pointer's precision. Make sure we never
2335 return more than prec. */
2336 ret2 = MIN (ret2, prec);
2337 return MIN (ret1, ret2);
2338 case BIT_AND_EXPR:
2339 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2340 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2341 return MAX (ret1, ret2);
2342 case MULT_EXPR:
2343 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2344 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2345 return MIN (ret1 + ret2, prec);
2346 case LSHIFT_EXPR:
2347 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2348 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2349 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2350 {
2351 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2352 return MIN (ret1 + ret2, prec);
2353 }
2354 return ret1;
2355 case RSHIFT_EXPR:
2356 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2357 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2358 {
2359 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2360 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2361 if (ret1 > ret2)
2362 return ret1 - ret2;
2363 }
2364 return 0;
2365 case TRUNC_DIV_EXPR:
2366 case CEIL_DIV_EXPR:
2367 case FLOOR_DIV_EXPR:
2368 case ROUND_DIV_EXPR:
2369 case EXACT_DIV_EXPR:
2370 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2371 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2372 {
2373 int l = tree_log2 (TREE_OPERAND (expr, 1));
2374 if (l >= 0)
2375 {
2376 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2377 ret2 = l;
2378 if (ret1 > ret2)
2379 return ret1 - ret2;
2380 }
2381 }
2382 return 0;
2383 CASE_CONVERT:
2384 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2385 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2386 ret1 = prec;
2387 return MIN (ret1, prec);
2388 case SAVE_EXPR:
2389 return tree_ctz (TREE_OPERAND (expr, 0));
2390 case COND_EXPR:
2391 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2392 if (ret1 == 0)
2393 return 0;
2394 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2395 return MIN (ret1, ret2);
2396 case COMPOUND_EXPR:
2397 return tree_ctz (TREE_OPERAND (expr, 1));
2398 case ADDR_EXPR:
2399 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2400 if (ret1 > BITS_PER_UNIT)
2401 {
2402 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2403 return MIN (ret1, prec);
2404 }
2405 return 0;
2406 default:
2407 return 0;
2408 }
2409 }
2410
2411 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2412 decimal float constants, so don't return 1 for them. */
2413
2414 int
2415 real_zerop (const_tree expr)
2416 {
2417 STRIP_NOPS (expr);
2418
2419 switch (TREE_CODE (expr))
2420 {
2421 case REAL_CST:
2422 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2423 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2424 case COMPLEX_CST:
2425 return real_zerop (TREE_REALPART (expr))
2426 && real_zerop (TREE_IMAGPART (expr));
2427 case VECTOR_CST:
2428 {
2429 unsigned i;
2430 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2431 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2432 return false;
2433 return true;
2434 }
2435 default:
2436 return false;
2437 }
2438 }
2439
2440 /* Return 1 if EXPR is the real constant one in real or complex form.
2441 Trailing zeroes matter for decimal float constants, so don't return
2442 1 for them. */
2443
2444 int
2445 real_onep (const_tree expr)
2446 {
2447 STRIP_NOPS (expr);
2448
2449 switch (TREE_CODE (expr))
2450 {
2451 case REAL_CST:
2452 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2453 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2454 case COMPLEX_CST:
2455 return real_onep (TREE_REALPART (expr))
2456 && real_zerop (TREE_IMAGPART (expr));
2457 case VECTOR_CST:
2458 {
2459 unsigned i;
2460 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2461 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2462 return false;
2463 return true;
2464 }
2465 default:
2466 return false;
2467 }
2468 }
2469
2470 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2471 matter for decimal float constants, so don't return 1 for them. */
2472
2473 int
2474 real_minus_onep (const_tree expr)
2475 {
2476 STRIP_NOPS (expr);
2477
2478 switch (TREE_CODE (expr))
2479 {
2480 case REAL_CST:
2481 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2482 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2483 case COMPLEX_CST:
2484 return real_minus_onep (TREE_REALPART (expr))
2485 && real_zerop (TREE_IMAGPART (expr));
2486 case VECTOR_CST:
2487 {
2488 unsigned i;
2489 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2490 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2491 return false;
2492 return true;
2493 }
2494 default:
2495 return false;
2496 }
2497 }
2498
2499 /* Nonzero if EXP is a constant or a cast of a constant. */
2500
2501 int
2502 really_constant_p (const_tree exp)
2503 {
2504 /* This is not quite the same as STRIP_NOPS. It does more. */
2505 while (CONVERT_EXPR_P (exp)
2506 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2507 exp = TREE_OPERAND (exp, 0);
2508 return TREE_CONSTANT (exp);
2509 }
2510 \f
2511 /* Return first list element whose TREE_VALUE is ELEM.
2512 Return 0 if ELEM is not in LIST. */
2513
2514 tree
2515 value_member (tree elem, tree list)
2516 {
2517 while (list)
2518 {
2519 if (elem == TREE_VALUE (list))
2520 return list;
2521 list = TREE_CHAIN (list);
2522 }
2523 return NULL_TREE;
2524 }
2525
2526 /* Return first list element whose TREE_PURPOSE is ELEM.
2527 Return 0 if ELEM is not in LIST. */
2528
2529 tree
2530 purpose_member (const_tree elem, tree list)
2531 {
2532 while (list)
2533 {
2534 if (elem == TREE_PURPOSE (list))
2535 return list;
2536 list = TREE_CHAIN (list);
2537 }
2538 return NULL_TREE;
2539 }
2540
2541 /* Return true if ELEM is in V. */
2542
2543 bool
2544 vec_member (const_tree elem, vec<tree, va_gc> *v)
2545 {
2546 unsigned ix;
2547 tree t;
2548 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2549 if (elem == t)
2550 return true;
2551 return false;
2552 }
2553
2554 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2555 NULL_TREE. */
2556
2557 tree
2558 chain_index (int idx, tree chain)
2559 {
2560 for (; chain && idx > 0; --idx)
2561 chain = TREE_CHAIN (chain);
2562 return chain;
2563 }
2564
2565 /* Return nonzero if ELEM is part of the chain CHAIN. */
2566
2567 int
2568 chain_member (const_tree elem, const_tree chain)
2569 {
2570 while (chain)
2571 {
2572 if (elem == chain)
2573 return 1;
2574 chain = DECL_CHAIN (chain);
2575 }
2576
2577 return 0;
2578 }
2579
2580 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2581 We expect a null pointer to mark the end of the chain.
2582 This is the Lisp primitive `length'. */
2583
2584 int
2585 list_length (const_tree t)
2586 {
2587 const_tree p = t;
2588 #ifdef ENABLE_TREE_CHECKING
2589 const_tree q = t;
2590 #endif
2591 int len = 0;
2592
2593 while (p)
2594 {
2595 p = TREE_CHAIN (p);
2596 #ifdef ENABLE_TREE_CHECKING
2597 if (len % 2)
2598 q = TREE_CHAIN (q);
2599 gcc_assert (p != q);
2600 #endif
2601 len++;
2602 }
2603
2604 return len;
2605 }
2606
2607 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2608 UNION_TYPE TYPE, or NULL_TREE if none. */
2609
2610 tree
2611 first_field (const_tree type)
2612 {
2613 tree t = TYPE_FIELDS (type);
2614 while (t && TREE_CODE (t) != FIELD_DECL)
2615 t = TREE_CHAIN (t);
2616 return t;
2617 }
2618
2619 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2620 by modifying the last node in chain 1 to point to chain 2.
2621 This is the Lisp primitive `nconc'. */
2622
2623 tree
2624 chainon (tree op1, tree op2)
2625 {
2626 tree t1;
2627
2628 if (!op1)
2629 return op2;
2630 if (!op2)
2631 return op1;
2632
2633 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2634 continue;
2635 TREE_CHAIN (t1) = op2;
2636
2637 #ifdef ENABLE_TREE_CHECKING
2638 {
2639 tree t2;
2640 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2641 gcc_assert (t2 != t1);
2642 }
2643 #endif
2644
2645 return op1;
2646 }
2647
2648 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2649
2650 tree
2651 tree_last (tree chain)
2652 {
2653 tree next;
2654 if (chain)
2655 while ((next = TREE_CHAIN (chain)))
2656 chain = next;
2657 return chain;
2658 }
2659
2660 /* Reverse the order of elements in the chain T,
2661 and return the new head of the chain (old last element). */
2662
2663 tree
2664 nreverse (tree t)
2665 {
2666 tree prev = 0, decl, next;
2667 for (decl = t; decl; decl = next)
2668 {
2669 /* We shouldn't be using this function to reverse BLOCK chains; we
2670 have blocks_nreverse for that. */
2671 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2672 next = TREE_CHAIN (decl);
2673 TREE_CHAIN (decl) = prev;
2674 prev = decl;
2675 }
2676 return prev;
2677 }
2678 \f
2679 /* Return a newly created TREE_LIST node whose
2680 purpose and value fields are PARM and VALUE. */
2681
2682 tree
2683 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2684 {
2685 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2686 TREE_PURPOSE (t) = parm;
2687 TREE_VALUE (t) = value;
2688 return t;
2689 }
2690
2691 /* Build a chain of TREE_LIST nodes from a vector. */
2692
2693 tree
2694 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2695 {
2696 tree ret = NULL_TREE;
2697 tree *pp = &ret;
2698 unsigned int i;
2699 tree t;
2700 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2701 {
2702 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2703 pp = &TREE_CHAIN (*pp);
2704 }
2705 return ret;
2706 }
2707
2708 /* Return a newly created TREE_LIST node whose
2709 purpose and value fields are PURPOSE and VALUE
2710 and whose TREE_CHAIN is CHAIN. */
2711
2712 tree
2713 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2714 {
2715 tree node;
2716
2717 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2718 memset (node, 0, sizeof (struct tree_common));
2719
2720 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2721
2722 TREE_SET_CODE (node, TREE_LIST);
2723 TREE_CHAIN (node) = chain;
2724 TREE_PURPOSE (node) = purpose;
2725 TREE_VALUE (node) = value;
2726 return node;
2727 }
2728
2729 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2730 trees. */
2731
2732 vec<tree, va_gc> *
2733 ctor_to_vec (tree ctor)
2734 {
2735 vec<tree, va_gc> *vec;
2736 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2737 unsigned int ix;
2738 tree val;
2739
2740 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2741 vec->quick_push (val);
2742
2743 return vec;
2744 }
2745 \f
2746 /* Return the size nominally occupied by an object of type TYPE
2747 when it resides in memory. The value is measured in units of bytes,
2748 and its data type is that normally used for type sizes
2749 (which is the first type created by make_signed_type or
2750 make_unsigned_type). */
2751
2752 tree
2753 size_in_bytes (const_tree type)
2754 {
2755 tree t;
2756
2757 if (type == error_mark_node)
2758 return integer_zero_node;
2759
2760 type = TYPE_MAIN_VARIANT (type);
2761 t = TYPE_SIZE_UNIT (type);
2762
2763 if (t == 0)
2764 {
2765 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2766 return size_zero_node;
2767 }
2768
2769 return t;
2770 }
2771
2772 /* Return the size of TYPE (in bytes) as a wide integer
2773 or return -1 if the size can vary or is larger than an integer. */
2774
2775 HOST_WIDE_INT
2776 int_size_in_bytes (const_tree type)
2777 {
2778 tree t;
2779
2780 if (type == error_mark_node)
2781 return 0;
2782
2783 type = TYPE_MAIN_VARIANT (type);
2784 t = TYPE_SIZE_UNIT (type);
2785
2786 if (t && tree_fits_uhwi_p (t))
2787 return TREE_INT_CST_LOW (t);
2788 else
2789 return -1;
2790 }
2791
2792 /* Return the maximum size of TYPE (in bytes) as a wide integer
2793 or return -1 if the size can vary or is larger than an integer. */
2794
2795 HOST_WIDE_INT
2796 max_int_size_in_bytes (const_tree type)
2797 {
2798 HOST_WIDE_INT size = -1;
2799 tree size_tree;
2800
2801 /* If this is an array type, check for a possible MAX_SIZE attached. */
2802
2803 if (TREE_CODE (type) == ARRAY_TYPE)
2804 {
2805 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2806
2807 if (size_tree && tree_fits_uhwi_p (size_tree))
2808 size = tree_to_uhwi (size_tree);
2809 }
2810
2811 /* If we still haven't been able to get a size, see if the language
2812 can compute a maximum size. */
2813
2814 if (size == -1)
2815 {
2816 size_tree = lang_hooks.types.max_size (type);
2817
2818 if (size_tree && tree_fits_uhwi_p (size_tree))
2819 size = tree_to_uhwi (size_tree);
2820 }
2821
2822 return size;
2823 }
2824 \f
2825 /* Return the bit position of FIELD, in bits from the start of the record.
2826 This is a tree of type bitsizetype. */
2827
2828 tree
2829 bit_position (const_tree field)
2830 {
2831 return bit_from_pos (DECL_FIELD_OFFSET (field),
2832 DECL_FIELD_BIT_OFFSET (field));
2833 }
2834 \f
2835 /* Return the byte position of FIELD, in bytes from the start of the record.
2836 This is a tree of type sizetype. */
2837
2838 tree
2839 byte_position (const_tree field)
2840 {
2841 return byte_from_pos (DECL_FIELD_OFFSET (field),
2842 DECL_FIELD_BIT_OFFSET (field));
2843 }
2844
2845 /* Likewise, but return as an integer. It must be representable in
2846 that way (since it could be a signed value, we don't have the
2847 option of returning -1 like int_size_in_byte can. */
2848
2849 HOST_WIDE_INT
2850 int_byte_position (const_tree field)
2851 {
2852 return tree_to_shwi (byte_position (field));
2853 }
2854 \f
2855 /* Return the strictest alignment, in bits, that T is known to have. */
2856
2857 unsigned int
2858 expr_align (const_tree t)
2859 {
2860 unsigned int align0, align1;
2861
2862 switch (TREE_CODE (t))
2863 {
2864 CASE_CONVERT: case NON_LVALUE_EXPR:
2865 /* If we have conversions, we know that the alignment of the
2866 object must meet each of the alignments of the types. */
2867 align0 = expr_align (TREE_OPERAND (t, 0));
2868 align1 = TYPE_ALIGN (TREE_TYPE (t));
2869 return MAX (align0, align1);
2870
2871 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2872 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2873 case CLEANUP_POINT_EXPR:
2874 /* These don't change the alignment of an object. */
2875 return expr_align (TREE_OPERAND (t, 0));
2876
2877 case COND_EXPR:
2878 /* The best we can do is say that the alignment is the least aligned
2879 of the two arms. */
2880 align0 = expr_align (TREE_OPERAND (t, 1));
2881 align1 = expr_align (TREE_OPERAND (t, 2));
2882 return MIN (align0, align1);
2883
2884 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2885 meaningfully, it's always 1. */
2886 case LABEL_DECL: case CONST_DECL:
2887 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2888 case FUNCTION_DECL:
2889 gcc_assert (DECL_ALIGN (t) != 0);
2890 return DECL_ALIGN (t);
2891
2892 default:
2893 break;
2894 }
2895
2896 /* Otherwise take the alignment from that of the type. */
2897 return TYPE_ALIGN (TREE_TYPE (t));
2898 }
2899 \f
2900 /* Return, as a tree node, the number of elements for TYPE (which is an
2901 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2902
2903 tree
2904 array_type_nelts (const_tree type)
2905 {
2906 tree index_type, min, max;
2907
2908 /* If they did it with unspecified bounds, then we should have already
2909 given an error about it before we got here. */
2910 if (! TYPE_DOMAIN (type))
2911 return error_mark_node;
2912
2913 index_type = TYPE_DOMAIN (type);
2914 min = TYPE_MIN_VALUE (index_type);
2915 max = TYPE_MAX_VALUE (index_type);
2916
2917 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2918 if (!max)
2919 return error_mark_node;
2920
2921 return (integer_zerop (min)
2922 ? max
2923 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2924 }
2925 \f
2926 /* If arg is static -- a reference to an object in static storage -- then
2927 return the object. This is not the same as the C meaning of `static'.
2928 If arg isn't static, return NULL. */
2929
2930 tree
2931 staticp (tree arg)
2932 {
2933 switch (TREE_CODE (arg))
2934 {
2935 case FUNCTION_DECL:
2936 /* Nested functions are static, even though taking their address will
2937 involve a trampoline as we unnest the nested function and create
2938 the trampoline on the tree level. */
2939 return arg;
2940
2941 case VAR_DECL:
2942 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2943 && ! DECL_THREAD_LOCAL_P (arg)
2944 && ! DECL_DLLIMPORT_P (arg)
2945 ? arg : NULL);
2946
2947 case CONST_DECL:
2948 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2949 ? arg : NULL);
2950
2951 case CONSTRUCTOR:
2952 return TREE_STATIC (arg) ? arg : NULL;
2953
2954 case LABEL_DECL:
2955 case STRING_CST:
2956 return arg;
2957
2958 case COMPONENT_REF:
2959 /* If the thing being referenced is not a field, then it is
2960 something language specific. */
2961 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2962
2963 /* If we are referencing a bitfield, we can't evaluate an
2964 ADDR_EXPR at compile time and so it isn't a constant. */
2965 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2966 return NULL;
2967
2968 return staticp (TREE_OPERAND (arg, 0));
2969
2970 case BIT_FIELD_REF:
2971 return NULL;
2972
2973 case INDIRECT_REF:
2974 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2975
2976 case ARRAY_REF:
2977 case ARRAY_RANGE_REF:
2978 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2979 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2980 return staticp (TREE_OPERAND (arg, 0));
2981 else
2982 return NULL;
2983
2984 case COMPOUND_LITERAL_EXPR:
2985 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2986
2987 default:
2988 return NULL;
2989 }
2990 }
2991
2992 \f
2993
2994
2995 /* Return whether OP is a DECL whose address is function-invariant. */
2996
2997 bool
2998 decl_address_invariant_p (const_tree op)
2999 {
3000 /* The conditions below are slightly less strict than the one in
3001 staticp. */
3002
3003 switch (TREE_CODE (op))
3004 {
3005 case PARM_DECL:
3006 case RESULT_DECL:
3007 case LABEL_DECL:
3008 case FUNCTION_DECL:
3009 return true;
3010
3011 case VAR_DECL:
3012 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3013 || DECL_THREAD_LOCAL_P (op)
3014 || DECL_CONTEXT (op) == current_function_decl
3015 || decl_function_context (op) == current_function_decl)
3016 return true;
3017 break;
3018
3019 case CONST_DECL:
3020 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3021 || decl_function_context (op) == current_function_decl)
3022 return true;
3023 break;
3024
3025 default:
3026 break;
3027 }
3028
3029 return false;
3030 }
3031
3032 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3033
3034 bool
3035 decl_address_ip_invariant_p (const_tree op)
3036 {
3037 /* The conditions below are slightly less strict than the one in
3038 staticp. */
3039
3040 switch (TREE_CODE (op))
3041 {
3042 case LABEL_DECL:
3043 case FUNCTION_DECL:
3044 case STRING_CST:
3045 return true;
3046
3047 case VAR_DECL:
3048 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3049 && !DECL_DLLIMPORT_P (op))
3050 || DECL_THREAD_LOCAL_P (op))
3051 return true;
3052 break;
3053
3054 case CONST_DECL:
3055 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3056 return true;
3057 break;
3058
3059 default:
3060 break;
3061 }
3062
3063 return false;
3064 }
3065
3066
3067 /* Return true if T is function-invariant (internal function, does
3068 not handle arithmetic; that's handled in skip_simple_arithmetic and
3069 tree_invariant_p). */
3070
3071 static bool tree_invariant_p (tree t);
3072
3073 static bool
3074 tree_invariant_p_1 (tree t)
3075 {
3076 tree op;
3077
3078 if (TREE_CONSTANT (t)
3079 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3080 return true;
3081
3082 switch (TREE_CODE (t))
3083 {
3084 case SAVE_EXPR:
3085 return true;
3086
3087 case ADDR_EXPR:
3088 op = TREE_OPERAND (t, 0);
3089 while (handled_component_p (op))
3090 {
3091 switch (TREE_CODE (op))
3092 {
3093 case ARRAY_REF:
3094 case ARRAY_RANGE_REF:
3095 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3096 || TREE_OPERAND (op, 2) != NULL_TREE
3097 || TREE_OPERAND (op, 3) != NULL_TREE)
3098 return false;
3099 break;
3100
3101 case COMPONENT_REF:
3102 if (TREE_OPERAND (op, 2) != NULL_TREE)
3103 return false;
3104 break;
3105
3106 default:;
3107 }
3108 op = TREE_OPERAND (op, 0);
3109 }
3110
3111 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3112
3113 default:
3114 break;
3115 }
3116
3117 return false;
3118 }
3119
3120 /* Return true if T is function-invariant. */
3121
3122 static bool
3123 tree_invariant_p (tree t)
3124 {
3125 tree inner = skip_simple_arithmetic (t);
3126 return tree_invariant_p_1 (inner);
3127 }
3128
3129 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3130 Do this to any expression which may be used in more than one place,
3131 but must be evaluated only once.
3132
3133 Normally, expand_expr would reevaluate the expression each time.
3134 Calling save_expr produces something that is evaluated and recorded
3135 the first time expand_expr is called on it. Subsequent calls to
3136 expand_expr just reuse the recorded value.
3137
3138 The call to expand_expr that generates code that actually computes
3139 the value is the first call *at compile time*. Subsequent calls
3140 *at compile time* generate code to use the saved value.
3141 This produces correct result provided that *at run time* control
3142 always flows through the insns made by the first expand_expr
3143 before reaching the other places where the save_expr was evaluated.
3144 You, the caller of save_expr, must make sure this is so.
3145
3146 Constants, and certain read-only nodes, are returned with no
3147 SAVE_EXPR because that is safe. Expressions containing placeholders
3148 are not touched; see tree.def for an explanation of what these
3149 are used for. */
3150
3151 tree
3152 save_expr (tree expr)
3153 {
3154 tree t = fold (expr);
3155 tree inner;
3156
3157 /* If the tree evaluates to a constant, then we don't want to hide that
3158 fact (i.e. this allows further folding, and direct checks for constants).
3159 However, a read-only object that has side effects cannot be bypassed.
3160 Since it is no problem to reevaluate literals, we just return the
3161 literal node. */
3162 inner = skip_simple_arithmetic (t);
3163 if (TREE_CODE (inner) == ERROR_MARK)
3164 return inner;
3165
3166 if (tree_invariant_p_1 (inner))
3167 return t;
3168
3169 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3170 it means that the size or offset of some field of an object depends on
3171 the value within another field.
3172
3173 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3174 and some variable since it would then need to be both evaluated once and
3175 evaluated more than once. Front-ends must assure this case cannot
3176 happen by surrounding any such subexpressions in their own SAVE_EXPR
3177 and forcing evaluation at the proper time. */
3178 if (contains_placeholder_p (inner))
3179 return t;
3180
3181 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3182 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3183
3184 /* This expression might be placed ahead of a jump to ensure that the
3185 value was computed on both sides of the jump. So make sure it isn't
3186 eliminated as dead. */
3187 TREE_SIDE_EFFECTS (t) = 1;
3188 return t;
3189 }
3190
3191 /* Look inside EXPR into any simple arithmetic operations. Return the
3192 outermost non-arithmetic or non-invariant node. */
3193
3194 tree
3195 skip_simple_arithmetic (tree expr)
3196 {
3197 /* We don't care about whether this can be used as an lvalue in this
3198 context. */
3199 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3200 expr = TREE_OPERAND (expr, 0);
3201
3202 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3203 a constant, it will be more efficient to not make another SAVE_EXPR since
3204 it will allow better simplification and GCSE will be able to merge the
3205 computations if they actually occur. */
3206 while (true)
3207 {
3208 if (UNARY_CLASS_P (expr))
3209 expr = TREE_OPERAND (expr, 0);
3210 else if (BINARY_CLASS_P (expr))
3211 {
3212 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3213 expr = TREE_OPERAND (expr, 0);
3214 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3215 expr = TREE_OPERAND (expr, 1);
3216 else
3217 break;
3218 }
3219 else
3220 break;
3221 }
3222
3223 return expr;
3224 }
3225
3226 /* Look inside EXPR into simple arithmetic operations involving constants.
3227 Return the outermost non-arithmetic or non-constant node. */
3228
3229 tree
3230 skip_simple_constant_arithmetic (tree expr)
3231 {
3232 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3233 expr = TREE_OPERAND (expr, 0);
3234
3235 while (true)
3236 {
3237 if (UNARY_CLASS_P (expr))
3238 expr = TREE_OPERAND (expr, 0);
3239 else if (BINARY_CLASS_P (expr))
3240 {
3241 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3242 expr = TREE_OPERAND (expr, 0);
3243 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3244 expr = TREE_OPERAND (expr, 1);
3245 else
3246 break;
3247 }
3248 else
3249 break;
3250 }
3251
3252 return expr;
3253 }
3254
3255 /* Return which tree structure is used by T. */
3256
3257 enum tree_node_structure_enum
3258 tree_node_structure (const_tree t)
3259 {
3260 const enum tree_code code = TREE_CODE (t);
3261 return tree_node_structure_for_code (code);
3262 }
3263
3264 /* Set various status flags when building a CALL_EXPR object T. */
3265
3266 static void
3267 process_call_operands (tree t)
3268 {
3269 bool side_effects = TREE_SIDE_EFFECTS (t);
3270 bool read_only = false;
3271 int i = call_expr_flags (t);
3272
3273 /* Calls have side-effects, except those to const or pure functions. */
3274 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3275 side_effects = true;
3276 /* Propagate TREE_READONLY of arguments for const functions. */
3277 if (i & ECF_CONST)
3278 read_only = true;
3279
3280 if (!side_effects || read_only)
3281 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3282 {
3283 tree op = TREE_OPERAND (t, i);
3284 if (op && TREE_SIDE_EFFECTS (op))
3285 side_effects = true;
3286 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3287 read_only = false;
3288 }
3289
3290 TREE_SIDE_EFFECTS (t) = side_effects;
3291 TREE_READONLY (t) = read_only;
3292 }
3293 \f
3294 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3295 size or offset that depends on a field within a record. */
3296
3297 bool
3298 contains_placeholder_p (const_tree exp)
3299 {
3300 enum tree_code code;
3301
3302 if (!exp)
3303 return 0;
3304
3305 code = TREE_CODE (exp);
3306 if (code == PLACEHOLDER_EXPR)
3307 return 1;
3308
3309 switch (TREE_CODE_CLASS (code))
3310 {
3311 case tcc_reference:
3312 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3313 position computations since they will be converted into a
3314 WITH_RECORD_EXPR involving the reference, which will assume
3315 here will be valid. */
3316 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3317
3318 case tcc_exceptional:
3319 if (code == TREE_LIST)
3320 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3321 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3322 break;
3323
3324 case tcc_unary:
3325 case tcc_binary:
3326 case tcc_comparison:
3327 case tcc_expression:
3328 switch (code)
3329 {
3330 case COMPOUND_EXPR:
3331 /* Ignoring the first operand isn't quite right, but works best. */
3332 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3333
3334 case COND_EXPR:
3335 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3336 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3337 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3338
3339 case SAVE_EXPR:
3340 /* The save_expr function never wraps anything containing
3341 a PLACEHOLDER_EXPR. */
3342 return 0;
3343
3344 default:
3345 break;
3346 }
3347
3348 switch (TREE_CODE_LENGTH (code))
3349 {
3350 case 1:
3351 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3352 case 2:
3353 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3354 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3355 default:
3356 return 0;
3357 }
3358
3359 case tcc_vl_exp:
3360 switch (code)
3361 {
3362 case CALL_EXPR:
3363 {
3364 const_tree arg;
3365 const_call_expr_arg_iterator iter;
3366 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3367 if (CONTAINS_PLACEHOLDER_P (arg))
3368 return 1;
3369 return 0;
3370 }
3371 default:
3372 return 0;
3373 }
3374
3375 default:
3376 return 0;
3377 }
3378 return 0;
3379 }
3380
3381 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3382 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3383 field positions. */
3384
3385 static bool
3386 type_contains_placeholder_1 (const_tree type)
3387 {
3388 /* If the size contains a placeholder or the parent type (component type in
3389 the case of arrays) type involves a placeholder, this type does. */
3390 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3391 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3392 || (!POINTER_TYPE_P (type)
3393 && TREE_TYPE (type)
3394 && type_contains_placeholder_p (TREE_TYPE (type))))
3395 return true;
3396
3397 /* Now do type-specific checks. Note that the last part of the check above
3398 greatly limits what we have to do below. */
3399 switch (TREE_CODE (type))
3400 {
3401 case VOID_TYPE:
3402 case COMPLEX_TYPE:
3403 case ENUMERAL_TYPE:
3404 case BOOLEAN_TYPE:
3405 case POINTER_TYPE:
3406 case OFFSET_TYPE:
3407 case REFERENCE_TYPE:
3408 case METHOD_TYPE:
3409 case FUNCTION_TYPE:
3410 case VECTOR_TYPE:
3411 case NULLPTR_TYPE:
3412 return false;
3413
3414 case INTEGER_TYPE:
3415 case REAL_TYPE:
3416 case FIXED_POINT_TYPE:
3417 /* Here we just check the bounds. */
3418 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3419 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3420
3421 case ARRAY_TYPE:
3422 /* We have already checked the component type above, so just check the
3423 domain type. */
3424 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3425
3426 case RECORD_TYPE:
3427 case UNION_TYPE:
3428 case QUAL_UNION_TYPE:
3429 {
3430 tree field;
3431
3432 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3433 if (TREE_CODE (field) == FIELD_DECL
3434 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3435 || (TREE_CODE (type) == QUAL_UNION_TYPE
3436 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3437 || type_contains_placeholder_p (TREE_TYPE (field))))
3438 return true;
3439
3440 return false;
3441 }
3442
3443 default:
3444 gcc_unreachable ();
3445 }
3446 }
3447
3448 /* Wrapper around above function used to cache its result. */
3449
3450 bool
3451 type_contains_placeholder_p (tree type)
3452 {
3453 bool result;
3454
3455 /* If the contains_placeholder_bits field has been initialized,
3456 then we know the answer. */
3457 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3458 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3459
3460 /* Indicate that we've seen this type node, and the answer is false.
3461 This is what we want to return if we run into recursion via fields. */
3462 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3463
3464 /* Compute the real value. */
3465 result = type_contains_placeholder_1 (type);
3466
3467 /* Store the real value. */
3468 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3469
3470 return result;
3471 }
3472 \f
3473 /* Push tree EXP onto vector QUEUE if it is not already present. */
3474
3475 static void
3476 push_without_duplicates (tree exp, vec<tree> *queue)
3477 {
3478 unsigned int i;
3479 tree iter;
3480
3481 FOR_EACH_VEC_ELT (*queue, i, iter)
3482 if (simple_cst_equal (iter, exp) == 1)
3483 break;
3484
3485 if (!iter)
3486 queue->safe_push (exp);
3487 }
3488
3489 /* Given a tree EXP, find all occurrences of references to fields
3490 in a PLACEHOLDER_EXPR and place them in vector REFS without
3491 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3492 we assume here that EXP contains only arithmetic expressions
3493 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3494 argument list. */
3495
3496 void
3497 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3498 {
3499 enum tree_code code = TREE_CODE (exp);
3500 tree inner;
3501 int i;
3502
3503 /* We handle TREE_LIST and COMPONENT_REF separately. */
3504 if (code == TREE_LIST)
3505 {
3506 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3507 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3508 }
3509 else if (code == COMPONENT_REF)
3510 {
3511 for (inner = TREE_OPERAND (exp, 0);
3512 REFERENCE_CLASS_P (inner);
3513 inner = TREE_OPERAND (inner, 0))
3514 ;
3515
3516 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3517 push_without_duplicates (exp, refs);
3518 else
3519 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3520 }
3521 else
3522 switch (TREE_CODE_CLASS (code))
3523 {
3524 case tcc_constant:
3525 break;
3526
3527 case tcc_declaration:
3528 /* Variables allocated to static storage can stay. */
3529 if (!TREE_STATIC (exp))
3530 push_without_duplicates (exp, refs);
3531 break;
3532
3533 case tcc_expression:
3534 /* This is the pattern built in ada/make_aligning_type. */
3535 if (code == ADDR_EXPR
3536 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3537 {
3538 push_without_duplicates (exp, refs);
3539 break;
3540 }
3541
3542 /* Fall through... */
3543
3544 case tcc_exceptional:
3545 case tcc_unary:
3546 case tcc_binary:
3547 case tcc_comparison:
3548 case tcc_reference:
3549 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3550 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3551 break;
3552
3553 case tcc_vl_exp:
3554 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3555 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3556 break;
3557
3558 default:
3559 gcc_unreachable ();
3560 }
3561 }
3562
3563 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3564 return a tree with all occurrences of references to F in a
3565 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3566 CONST_DECLs. Note that we assume here that EXP contains only
3567 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3568 occurring only in their argument list. */
3569
3570 tree
3571 substitute_in_expr (tree exp, tree f, tree r)
3572 {
3573 enum tree_code code = TREE_CODE (exp);
3574 tree op0, op1, op2, op3;
3575 tree new_tree;
3576
3577 /* We handle TREE_LIST and COMPONENT_REF separately. */
3578 if (code == TREE_LIST)
3579 {
3580 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3581 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3582 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3583 return exp;
3584
3585 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3586 }
3587 else if (code == COMPONENT_REF)
3588 {
3589 tree inner;
3590
3591 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3592 and it is the right field, replace it with R. */
3593 for (inner = TREE_OPERAND (exp, 0);
3594 REFERENCE_CLASS_P (inner);
3595 inner = TREE_OPERAND (inner, 0))
3596 ;
3597
3598 /* The field. */
3599 op1 = TREE_OPERAND (exp, 1);
3600
3601 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3602 return r;
3603
3604 /* If this expression hasn't been completed let, leave it alone. */
3605 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3606 return exp;
3607
3608 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3609 if (op0 == TREE_OPERAND (exp, 0))
3610 return exp;
3611
3612 new_tree
3613 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3614 }
3615 else
3616 switch (TREE_CODE_CLASS (code))
3617 {
3618 case tcc_constant:
3619 return exp;
3620
3621 case tcc_declaration:
3622 if (exp == f)
3623 return r;
3624 else
3625 return exp;
3626
3627 case tcc_expression:
3628 if (exp == f)
3629 return r;
3630
3631 /* Fall through... */
3632
3633 case tcc_exceptional:
3634 case tcc_unary:
3635 case tcc_binary:
3636 case tcc_comparison:
3637 case tcc_reference:
3638 switch (TREE_CODE_LENGTH (code))
3639 {
3640 case 0:
3641 return exp;
3642
3643 case 1:
3644 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3645 if (op0 == TREE_OPERAND (exp, 0))
3646 return exp;
3647
3648 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3649 break;
3650
3651 case 2:
3652 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3653 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3654
3655 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3656 return exp;
3657
3658 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3659 break;
3660
3661 case 3:
3662 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3663 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3664 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3665
3666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3667 && op2 == TREE_OPERAND (exp, 2))
3668 return exp;
3669
3670 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3671 break;
3672
3673 case 4:
3674 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3675 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3676 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3677 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3678
3679 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3680 && op2 == TREE_OPERAND (exp, 2)
3681 && op3 == TREE_OPERAND (exp, 3))
3682 return exp;
3683
3684 new_tree
3685 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3686 break;
3687
3688 default:
3689 gcc_unreachable ();
3690 }
3691 break;
3692
3693 case tcc_vl_exp:
3694 {
3695 int i;
3696
3697 new_tree = NULL_TREE;
3698
3699 /* If we are trying to replace F with a constant, inline back
3700 functions which do nothing else than computing a value from
3701 the arguments they are passed. This makes it possible to
3702 fold partially or entirely the replacement expression. */
3703 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3704 {
3705 tree t = maybe_inline_call_in_expr (exp);
3706 if (t)
3707 return SUBSTITUTE_IN_EXPR (t, f, r);
3708 }
3709
3710 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3711 {
3712 tree op = TREE_OPERAND (exp, i);
3713 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3714 if (new_op != op)
3715 {
3716 if (!new_tree)
3717 new_tree = copy_node (exp);
3718 TREE_OPERAND (new_tree, i) = new_op;
3719 }
3720 }
3721
3722 if (new_tree)
3723 {
3724 new_tree = fold (new_tree);
3725 if (TREE_CODE (new_tree) == CALL_EXPR)
3726 process_call_operands (new_tree);
3727 }
3728 else
3729 return exp;
3730 }
3731 break;
3732
3733 default:
3734 gcc_unreachable ();
3735 }
3736
3737 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3738
3739 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3740 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3741
3742 return new_tree;
3743 }
3744
3745 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3746 for it within OBJ, a tree that is an object or a chain of references. */
3747
3748 tree
3749 substitute_placeholder_in_expr (tree exp, tree obj)
3750 {
3751 enum tree_code code = TREE_CODE (exp);
3752 tree op0, op1, op2, op3;
3753 tree new_tree;
3754
3755 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3756 in the chain of OBJ. */
3757 if (code == PLACEHOLDER_EXPR)
3758 {
3759 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3760 tree elt;
3761
3762 for (elt = obj; elt != 0;
3763 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3764 || TREE_CODE (elt) == COND_EXPR)
3765 ? TREE_OPERAND (elt, 1)
3766 : (REFERENCE_CLASS_P (elt)
3767 || UNARY_CLASS_P (elt)
3768 || BINARY_CLASS_P (elt)
3769 || VL_EXP_CLASS_P (elt)
3770 || EXPRESSION_CLASS_P (elt))
3771 ? TREE_OPERAND (elt, 0) : 0))
3772 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3773 return elt;
3774
3775 for (elt = obj; elt != 0;
3776 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3777 || TREE_CODE (elt) == COND_EXPR)
3778 ? TREE_OPERAND (elt, 1)
3779 : (REFERENCE_CLASS_P (elt)
3780 || UNARY_CLASS_P (elt)
3781 || BINARY_CLASS_P (elt)
3782 || VL_EXP_CLASS_P (elt)
3783 || EXPRESSION_CLASS_P (elt))
3784 ? TREE_OPERAND (elt, 0) : 0))
3785 if (POINTER_TYPE_P (TREE_TYPE (elt))
3786 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3787 == need_type))
3788 return fold_build1 (INDIRECT_REF, need_type, elt);
3789
3790 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3791 survives until RTL generation, there will be an error. */
3792 return exp;
3793 }
3794
3795 /* TREE_LIST is special because we need to look at TREE_VALUE
3796 and TREE_CHAIN, not TREE_OPERANDS. */
3797 else if (code == TREE_LIST)
3798 {
3799 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3800 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3801 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3802 return exp;
3803
3804 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3805 }
3806 else
3807 switch (TREE_CODE_CLASS (code))
3808 {
3809 case tcc_constant:
3810 case tcc_declaration:
3811 return exp;
3812
3813 case tcc_exceptional:
3814 case tcc_unary:
3815 case tcc_binary:
3816 case tcc_comparison:
3817 case tcc_expression:
3818 case tcc_reference:
3819 case tcc_statement:
3820 switch (TREE_CODE_LENGTH (code))
3821 {
3822 case 0:
3823 return exp;
3824
3825 case 1:
3826 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3827 if (op0 == TREE_OPERAND (exp, 0))
3828 return exp;
3829
3830 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3831 break;
3832
3833 case 2:
3834 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3835 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3836
3837 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3838 return exp;
3839
3840 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3841 break;
3842
3843 case 3:
3844 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3845 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3846 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3847
3848 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3849 && op2 == TREE_OPERAND (exp, 2))
3850 return exp;
3851
3852 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3853 break;
3854
3855 case 4:
3856 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3857 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3858 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3859 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3860
3861 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3862 && op2 == TREE_OPERAND (exp, 2)
3863 && op3 == TREE_OPERAND (exp, 3))
3864 return exp;
3865
3866 new_tree
3867 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3868 break;
3869
3870 default:
3871 gcc_unreachable ();
3872 }
3873 break;
3874
3875 case tcc_vl_exp:
3876 {
3877 int i;
3878
3879 new_tree = NULL_TREE;
3880
3881 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3882 {
3883 tree op = TREE_OPERAND (exp, i);
3884 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3885 if (new_op != op)
3886 {
3887 if (!new_tree)
3888 new_tree = copy_node (exp);
3889 TREE_OPERAND (new_tree, i) = new_op;
3890 }
3891 }
3892
3893 if (new_tree)
3894 {
3895 new_tree = fold (new_tree);
3896 if (TREE_CODE (new_tree) == CALL_EXPR)
3897 process_call_operands (new_tree);
3898 }
3899 else
3900 return exp;
3901 }
3902 break;
3903
3904 default:
3905 gcc_unreachable ();
3906 }
3907
3908 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3909
3910 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3911 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3912
3913 return new_tree;
3914 }
3915 \f
3916
3917 /* Subroutine of stabilize_reference; this is called for subtrees of
3918 references. Any expression with side-effects must be put in a SAVE_EXPR
3919 to ensure that it is only evaluated once.
3920
3921 We don't put SAVE_EXPR nodes around everything, because assigning very
3922 simple expressions to temporaries causes us to miss good opportunities
3923 for optimizations. Among other things, the opportunity to fold in the
3924 addition of a constant into an addressing mode often gets lost, e.g.
3925 "y[i+1] += x;". In general, we take the approach that we should not make
3926 an assignment unless we are forced into it - i.e., that any non-side effect
3927 operator should be allowed, and that cse should take care of coalescing
3928 multiple utterances of the same expression should that prove fruitful. */
3929
3930 static tree
3931 stabilize_reference_1 (tree e)
3932 {
3933 tree result;
3934 enum tree_code code = TREE_CODE (e);
3935
3936 /* We cannot ignore const expressions because it might be a reference
3937 to a const array but whose index contains side-effects. But we can
3938 ignore things that are actual constant or that already have been
3939 handled by this function. */
3940
3941 if (tree_invariant_p (e))
3942 return e;
3943
3944 switch (TREE_CODE_CLASS (code))
3945 {
3946 case tcc_exceptional:
3947 case tcc_type:
3948 case tcc_declaration:
3949 case tcc_comparison:
3950 case tcc_statement:
3951 case tcc_expression:
3952 case tcc_reference:
3953 case tcc_vl_exp:
3954 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3955 so that it will only be evaluated once. */
3956 /* The reference (r) and comparison (<) classes could be handled as
3957 below, but it is generally faster to only evaluate them once. */
3958 if (TREE_SIDE_EFFECTS (e))
3959 return save_expr (e);
3960 return e;
3961
3962 case tcc_constant:
3963 /* Constants need no processing. In fact, we should never reach
3964 here. */
3965 return e;
3966
3967 case tcc_binary:
3968 /* Division is slow and tends to be compiled with jumps,
3969 especially the division by powers of 2 that is often
3970 found inside of an array reference. So do it just once. */
3971 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3972 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3973 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3974 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3975 return save_expr (e);
3976 /* Recursively stabilize each operand. */
3977 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3978 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3979 break;
3980
3981 case tcc_unary:
3982 /* Recursively stabilize each operand. */
3983 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3984 break;
3985
3986 default:
3987 gcc_unreachable ();
3988 }
3989
3990 TREE_TYPE (result) = TREE_TYPE (e);
3991 TREE_READONLY (result) = TREE_READONLY (e);
3992 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3993 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3994
3995 return result;
3996 }
3997
3998 /* Stabilize a reference so that we can use it any number of times
3999 without causing its operands to be evaluated more than once.
4000 Returns the stabilized reference. This works by means of save_expr,
4001 so see the caveats in the comments about save_expr.
4002
4003 Also allows conversion expressions whose operands are references.
4004 Any other kind of expression is returned unchanged. */
4005
4006 tree
4007 stabilize_reference (tree ref)
4008 {
4009 tree result;
4010 enum tree_code code = TREE_CODE (ref);
4011
4012 switch (code)
4013 {
4014 case VAR_DECL:
4015 case PARM_DECL:
4016 case RESULT_DECL:
4017 /* No action is needed in this case. */
4018 return ref;
4019
4020 CASE_CONVERT:
4021 case FLOAT_EXPR:
4022 case FIX_TRUNC_EXPR:
4023 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4024 break;
4025
4026 case INDIRECT_REF:
4027 result = build_nt (INDIRECT_REF,
4028 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4029 break;
4030
4031 case COMPONENT_REF:
4032 result = build_nt (COMPONENT_REF,
4033 stabilize_reference (TREE_OPERAND (ref, 0)),
4034 TREE_OPERAND (ref, 1), NULL_TREE);
4035 break;
4036
4037 case BIT_FIELD_REF:
4038 result = build_nt (BIT_FIELD_REF,
4039 stabilize_reference (TREE_OPERAND (ref, 0)),
4040 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4041 break;
4042
4043 case ARRAY_REF:
4044 result = build_nt (ARRAY_REF,
4045 stabilize_reference (TREE_OPERAND (ref, 0)),
4046 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4047 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4048 break;
4049
4050 case ARRAY_RANGE_REF:
4051 result = build_nt (ARRAY_RANGE_REF,
4052 stabilize_reference (TREE_OPERAND (ref, 0)),
4053 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4054 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4055 break;
4056
4057 case COMPOUND_EXPR:
4058 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4059 it wouldn't be ignored. This matters when dealing with
4060 volatiles. */
4061 return stabilize_reference_1 (ref);
4062
4063 /* If arg isn't a kind of lvalue we recognize, make no change.
4064 Caller should recognize the error for an invalid lvalue. */
4065 default:
4066 return ref;
4067
4068 case ERROR_MARK:
4069 return error_mark_node;
4070 }
4071
4072 TREE_TYPE (result) = TREE_TYPE (ref);
4073 TREE_READONLY (result) = TREE_READONLY (ref);
4074 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4075 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4076
4077 return result;
4078 }
4079 \f
4080 /* Low-level constructors for expressions. */
4081
4082 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4083 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4084
4085 void
4086 recompute_tree_invariant_for_addr_expr (tree t)
4087 {
4088 tree node;
4089 bool tc = true, se = false;
4090
4091 /* We started out assuming this address is both invariant and constant, but
4092 does not have side effects. Now go down any handled components and see if
4093 any of them involve offsets that are either non-constant or non-invariant.
4094 Also check for side-effects.
4095
4096 ??? Note that this code makes no attempt to deal with the case where
4097 taking the address of something causes a copy due to misalignment. */
4098
4099 #define UPDATE_FLAGS(NODE) \
4100 do { tree _node = (NODE); \
4101 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4102 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4103
4104 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4105 node = TREE_OPERAND (node, 0))
4106 {
4107 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4108 array reference (probably made temporarily by the G++ front end),
4109 so ignore all the operands. */
4110 if ((TREE_CODE (node) == ARRAY_REF
4111 || TREE_CODE (node) == ARRAY_RANGE_REF)
4112 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4113 {
4114 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4115 if (TREE_OPERAND (node, 2))
4116 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4117 if (TREE_OPERAND (node, 3))
4118 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4119 }
4120 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4121 FIELD_DECL, apparently. The G++ front end can put something else
4122 there, at least temporarily. */
4123 else if (TREE_CODE (node) == COMPONENT_REF
4124 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4125 {
4126 if (TREE_OPERAND (node, 2))
4127 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4128 }
4129 }
4130
4131 node = lang_hooks.expr_to_decl (node, &tc, &se);
4132
4133 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4134 the address, since &(*a)->b is a form of addition. If it's a constant, the
4135 address is constant too. If it's a decl, its address is constant if the
4136 decl is static. Everything else is not constant and, furthermore,
4137 taking the address of a volatile variable is not volatile. */
4138 if (TREE_CODE (node) == INDIRECT_REF
4139 || TREE_CODE (node) == MEM_REF)
4140 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4141 else if (CONSTANT_CLASS_P (node))
4142 ;
4143 else if (DECL_P (node))
4144 tc &= (staticp (node) != NULL_TREE);
4145 else
4146 {
4147 tc = false;
4148 se |= TREE_SIDE_EFFECTS (node);
4149 }
4150
4151
4152 TREE_CONSTANT (t) = tc;
4153 TREE_SIDE_EFFECTS (t) = se;
4154 #undef UPDATE_FLAGS
4155 }
4156
4157 /* Build an expression of code CODE, data type TYPE, and operands as
4158 specified. Expressions and reference nodes can be created this way.
4159 Constants, decls, types and misc nodes cannot be.
4160
4161 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4162 enough for all extant tree codes. */
4163
4164 tree
4165 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4166 {
4167 tree t;
4168
4169 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4170
4171 t = make_node_stat (code PASS_MEM_STAT);
4172 TREE_TYPE (t) = tt;
4173
4174 return t;
4175 }
4176
4177 tree
4178 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4179 {
4180 int length = sizeof (struct tree_exp);
4181 tree t;
4182
4183 record_node_allocation_statistics (code, length);
4184
4185 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4186
4187 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4188
4189 memset (t, 0, sizeof (struct tree_common));
4190
4191 TREE_SET_CODE (t, code);
4192
4193 TREE_TYPE (t) = type;
4194 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4195 TREE_OPERAND (t, 0) = node;
4196 if (node && !TYPE_P (node))
4197 {
4198 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4199 TREE_READONLY (t) = TREE_READONLY (node);
4200 }
4201
4202 if (TREE_CODE_CLASS (code) == tcc_statement)
4203 TREE_SIDE_EFFECTS (t) = 1;
4204 else switch (code)
4205 {
4206 case VA_ARG_EXPR:
4207 /* All of these have side-effects, no matter what their
4208 operands are. */
4209 TREE_SIDE_EFFECTS (t) = 1;
4210 TREE_READONLY (t) = 0;
4211 break;
4212
4213 case INDIRECT_REF:
4214 /* Whether a dereference is readonly has nothing to do with whether
4215 its operand is readonly. */
4216 TREE_READONLY (t) = 0;
4217 break;
4218
4219 case ADDR_EXPR:
4220 if (node)
4221 recompute_tree_invariant_for_addr_expr (t);
4222 break;
4223
4224 default:
4225 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4226 && node && !TYPE_P (node)
4227 && TREE_CONSTANT (node))
4228 TREE_CONSTANT (t) = 1;
4229 if (TREE_CODE_CLASS (code) == tcc_reference
4230 && node && TREE_THIS_VOLATILE (node))
4231 TREE_THIS_VOLATILE (t) = 1;
4232 break;
4233 }
4234
4235 return t;
4236 }
4237
4238 #define PROCESS_ARG(N) \
4239 do { \
4240 TREE_OPERAND (t, N) = arg##N; \
4241 if (arg##N &&!TYPE_P (arg##N)) \
4242 { \
4243 if (TREE_SIDE_EFFECTS (arg##N)) \
4244 side_effects = 1; \
4245 if (!TREE_READONLY (arg##N) \
4246 && !CONSTANT_CLASS_P (arg##N)) \
4247 (void) (read_only = 0); \
4248 if (!TREE_CONSTANT (arg##N)) \
4249 (void) (constant = 0); \
4250 } \
4251 } while (0)
4252
4253 tree
4254 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4255 {
4256 bool constant, read_only, side_effects;
4257 tree t;
4258
4259 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4260
4261 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4262 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4263 /* When sizetype precision doesn't match that of pointers
4264 we need to be able to build explicit extensions or truncations
4265 of the offset argument. */
4266 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4267 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4268 && TREE_CODE (arg1) == INTEGER_CST);
4269
4270 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4271 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4272 && ptrofftype_p (TREE_TYPE (arg1)));
4273
4274 t = make_node_stat (code PASS_MEM_STAT);
4275 TREE_TYPE (t) = tt;
4276
4277 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4278 result based on those same flags for the arguments. But if the
4279 arguments aren't really even `tree' expressions, we shouldn't be trying
4280 to do this. */
4281
4282 /* Expressions without side effects may be constant if their
4283 arguments are as well. */
4284 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4285 || TREE_CODE_CLASS (code) == tcc_binary);
4286 read_only = 1;
4287 side_effects = TREE_SIDE_EFFECTS (t);
4288
4289 PROCESS_ARG (0);
4290 PROCESS_ARG (1);
4291
4292 TREE_READONLY (t) = read_only;
4293 TREE_CONSTANT (t) = constant;
4294 TREE_SIDE_EFFECTS (t) = side_effects;
4295 TREE_THIS_VOLATILE (t)
4296 = (TREE_CODE_CLASS (code) == tcc_reference
4297 && arg0 && TREE_THIS_VOLATILE (arg0));
4298
4299 return t;
4300 }
4301
4302
4303 tree
4304 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4305 tree arg2 MEM_STAT_DECL)
4306 {
4307 bool constant, read_only, side_effects;
4308 tree t;
4309
4310 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4311 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4312
4313 t = make_node_stat (code PASS_MEM_STAT);
4314 TREE_TYPE (t) = tt;
4315
4316 read_only = 1;
4317
4318 /* As a special exception, if COND_EXPR has NULL branches, we
4319 assume that it is a gimple statement and always consider
4320 it to have side effects. */
4321 if (code == COND_EXPR
4322 && tt == void_type_node
4323 && arg1 == NULL_TREE
4324 && arg2 == NULL_TREE)
4325 side_effects = true;
4326 else
4327 side_effects = TREE_SIDE_EFFECTS (t);
4328
4329 PROCESS_ARG (0);
4330 PROCESS_ARG (1);
4331 PROCESS_ARG (2);
4332
4333 if (code == COND_EXPR)
4334 TREE_READONLY (t) = read_only;
4335
4336 TREE_SIDE_EFFECTS (t) = side_effects;
4337 TREE_THIS_VOLATILE (t)
4338 = (TREE_CODE_CLASS (code) == tcc_reference
4339 && arg0 && TREE_THIS_VOLATILE (arg0));
4340
4341 return t;
4342 }
4343
4344 tree
4345 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4346 tree arg2, tree arg3 MEM_STAT_DECL)
4347 {
4348 bool constant, read_only, side_effects;
4349 tree t;
4350
4351 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4352
4353 t = make_node_stat (code PASS_MEM_STAT);
4354 TREE_TYPE (t) = tt;
4355
4356 side_effects = TREE_SIDE_EFFECTS (t);
4357
4358 PROCESS_ARG (0);
4359 PROCESS_ARG (1);
4360 PROCESS_ARG (2);
4361 PROCESS_ARG (3);
4362
4363 TREE_SIDE_EFFECTS (t) = side_effects;
4364 TREE_THIS_VOLATILE (t)
4365 = (TREE_CODE_CLASS (code) == tcc_reference
4366 && arg0 && TREE_THIS_VOLATILE (arg0));
4367
4368 return t;
4369 }
4370
4371 tree
4372 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4373 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4374 {
4375 bool constant, read_only, side_effects;
4376 tree t;
4377
4378 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4379
4380 t = make_node_stat (code PASS_MEM_STAT);
4381 TREE_TYPE (t) = tt;
4382
4383 side_effects = TREE_SIDE_EFFECTS (t);
4384
4385 PROCESS_ARG (0);
4386 PROCESS_ARG (1);
4387 PROCESS_ARG (2);
4388 PROCESS_ARG (3);
4389 PROCESS_ARG (4);
4390
4391 TREE_SIDE_EFFECTS (t) = side_effects;
4392 TREE_THIS_VOLATILE (t)
4393 = (TREE_CODE_CLASS (code) == tcc_reference
4394 && arg0 && TREE_THIS_VOLATILE (arg0));
4395
4396 return t;
4397 }
4398
4399 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4400 on the pointer PTR. */
4401
4402 tree
4403 build_simple_mem_ref_loc (location_t loc, tree ptr)
4404 {
4405 HOST_WIDE_INT offset = 0;
4406 tree ptype = TREE_TYPE (ptr);
4407 tree tem;
4408 /* For convenience allow addresses that collapse to a simple base
4409 and offset. */
4410 if (TREE_CODE (ptr) == ADDR_EXPR
4411 && (handled_component_p (TREE_OPERAND (ptr, 0))
4412 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4413 {
4414 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4415 gcc_assert (ptr);
4416 ptr = build_fold_addr_expr (ptr);
4417 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4418 }
4419 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4420 ptr, build_int_cst (ptype, offset));
4421 SET_EXPR_LOCATION (tem, loc);
4422 return tem;
4423 }
4424
4425 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4426
4427 offset_int
4428 mem_ref_offset (const_tree t)
4429 {
4430 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4431 }
4432
4433 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4434 offsetted by OFFSET units. */
4435
4436 tree
4437 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4438 {
4439 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4440 build_fold_addr_expr (base),
4441 build_int_cst (ptr_type_node, offset));
4442 tree addr = build1 (ADDR_EXPR, type, ref);
4443 recompute_tree_invariant_for_addr_expr (addr);
4444 return addr;
4445 }
4446
4447 /* Similar except don't specify the TREE_TYPE
4448 and leave the TREE_SIDE_EFFECTS as 0.
4449 It is permissible for arguments to be null,
4450 or even garbage if their values do not matter. */
4451
4452 tree
4453 build_nt (enum tree_code code, ...)
4454 {
4455 tree t;
4456 int length;
4457 int i;
4458 va_list p;
4459
4460 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4461
4462 va_start (p, code);
4463
4464 t = make_node (code);
4465 length = TREE_CODE_LENGTH (code);
4466
4467 for (i = 0; i < length; i++)
4468 TREE_OPERAND (t, i) = va_arg (p, tree);
4469
4470 va_end (p);
4471 return t;
4472 }
4473
4474 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4475 tree vec. */
4476
4477 tree
4478 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4479 {
4480 tree ret, t;
4481 unsigned int ix;
4482
4483 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4484 CALL_EXPR_FN (ret) = fn;
4485 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4486 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4487 CALL_EXPR_ARG (ret, ix) = t;
4488 return ret;
4489 }
4490 \f
4491 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4492 We do NOT enter this node in any sort of symbol table.
4493
4494 LOC is the location of the decl.
4495
4496 layout_decl is used to set up the decl's storage layout.
4497 Other slots are initialized to 0 or null pointers. */
4498
4499 tree
4500 build_decl_stat (location_t loc, enum tree_code code, tree name,
4501 tree type MEM_STAT_DECL)
4502 {
4503 tree t;
4504
4505 t = make_node_stat (code PASS_MEM_STAT);
4506 DECL_SOURCE_LOCATION (t) = loc;
4507
4508 /* if (type == error_mark_node)
4509 type = integer_type_node; */
4510 /* That is not done, deliberately, so that having error_mark_node
4511 as the type can suppress useless errors in the use of this variable. */
4512
4513 DECL_NAME (t) = name;
4514 TREE_TYPE (t) = type;
4515
4516 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4517 layout_decl (t, 0);
4518
4519 return t;
4520 }
4521
4522 /* Builds and returns function declaration with NAME and TYPE. */
4523
4524 tree
4525 build_fn_decl (const char *name, tree type)
4526 {
4527 tree id = get_identifier (name);
4528 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4529
4530 DECL_EXTERNAL (decl) = 1;
4531 TREE_PUBLIC (decl) = 1;
4532 DECL_ARTIFICIAL (decl) = 1;
4533 TREE_NOTHROW (decl) = 1;
4534
4535 return decl;
4536 }
4537
4538 vec<tree, va_gc> *all_translation_units;
4539
4540 /* Builds a new translation-unit decl with name NAME, queues it in the
4541 global list of translation-unit decls and returns it. */
4542
4543 tree
4544 build_translation_unit_decl (tree name)
4545 {
4546 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4547 name, NULL_TREE);
4548 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4549 vec_safe_push (all_translation_units, tu);
4550 return tu;
4551 }
4552
4553 \f
4554 /* BLOCK nodes are used to represent the structure of binding contours
4555 and declarations, once those contours have been exited and their contents
4556 compiled. This information is used for outputting debugging info. */
4557
4558 tree
4559 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4560 {
4561 tree block = make_node (BLOCK);
4562
4563 BLOCK_VARS (block) = vars;
4564 BLOCK_SUBBLOCKS (block) = subblocks;
4565 BLOCK_SUPERCONTEXT (block) = supercontext;
4566 BLOCK_CHAIN (block) = chain;
4567 return block;
4568 }
4569
4570 \f
4571 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4572
4573 LOC is the location to use in tree T. */
4574
4575 void
4576 protected_set_expr_location (tree t, location_t loc)
4577 {
4578 if (CAN_HAVE_LOCATION_P (t))
4579 SET_EXPR_LOCATION (t, loc);
4580 }
4581 \f
4582 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4583 is ATTRIBUTE. */
4584
4585 tree
4586 build_decl_attribute_variant (tree ddecl, tree attribute)
4587 {
4588 DECL_ATTRIBUTES (ddecl) = attribute;
4589 return ddecl;
4590 }
4591
4592 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4593 is ATTRIBUTE and its qualifiers are QUALS.
4594
4595 Record such modified types already made so we don't make duplicates. */
4596
4597 tree
4598 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4599 {
4600 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4601 {
4602 inchash::hash hstate;
4603 tree ntype;
4604 int i;
4605 tree t;
4606 enum tree_code code = TREE_CODE (ttype);
4607
4608 /* Building a distinct copy of a tagged type is inappropriate; it
4609 causes breakage in code that expects there to be a one-to-one
4610 relationship between a struct and its fields.
4611 build_duplicate_type is another solution (as used in
4612 handle_transparent_union_attribute), but that doesn't play well
4613 with the stronger C++ type identity model. */
4614 if (TREE_CODE (ttype) == RECORD_TYPE
4615 || TREE_CODE (ttype) == UNION_TYPE
4616 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4617 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4618 {
4619 warning (OPT_Wattributes,
4620 "ignoring attributes applied to %qT after definition",
4621 TYPE_MAIN_VARIANT (ttype));
4622 return build_qualified_type (ttype, quals);
4623 }
4624
4625 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4626 ntype = build_distinct_type_copy (ttype);
4627
4628 TYPE_ATTRIBUTES (ntype) = attribute;
4629
4630 hstate.add_int (code);
4631 if (TREE_TYPE (ntype))
4632 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4633 attribute_hash_list (attribute, hstate);
4634
4635 switch (TREE_CODE (ntype))
4636 {
4637 case FUNCTION_TYPE:
4638 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4639 break;
4640 case ARRAY_TYPE:
4641 if (TYPE_DOMAIN (ntype))
4642 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4643 break;
4644 case INTEGER_TYPE:
4645 t = TYPE_MAX_VALUE (ntype);
4646 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4647 hstate.add_object (TREE_INT_CST_ELT (t, i));
4648 break;
4649 case REAL_TYPE:
4650 case FIXED_POINT_TYPE:
4651 {
4652 unsigned int precision = TYPE_PRECISION (ntype);
4653 hstate.add_object (precision);
4654 }
4655 break;
4656 default:
4657 break;
4658 }
4659
4660 ntype = type_hash_canon (hstate.end(), ntype);
4661
4662 /* If the target-dependent attributes make NTYPE different from
4663 its canonical type, we will need to use structural equality
4664 checks for this type. */
4665 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4666 || !comp_type_attributes (ntype, ttype))
4667 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4668 else if (TYPE_CANONICAL (ntype) == ntype)
4669 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4670
4671 ttype = build_qualified_type (ntype, quals);
4672 }
4673 else if (TYPE_QUALS (ttype) != quals)
4674 ttype = build_qualified_type (ttype, quals);
4675
4676 return ttype;
4677 }
4678
4679 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4680 the same. */
4681
4682 static bool
4683 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4684 {
4685 tree cl1, cl2;
4686 for (cl1 = clauses1, cl2 = clauses2;
4687 cl1 && cl2;
4688 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4689 {
4690 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4691 return false;
4692 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4693 {
4694 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4695 OMP_CLAUSE_DECL (cl2)) != 1)
4696 return false;
4697 }
4698 switch (OMP_CLAUSE_CODE (cl1))
4699 {
4700 case OMP_CLAUSE_ALIGNED:
4701 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4702 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4703 return false;
4704 break;
4705 case OMP_CLAUSE_LINEAR:
4706 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4707 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4708 return false;
4709 break;
4710 case OMP_CLAUSE_SIMDLEN:
4711 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4712 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4713 return false;
4714 default:
4715 break;
4716 }
4717 }
4718 return true;
4719 }
4720
4721 /* Compare two constructor-element-type constants. Return 1 if the lists
4722 are known to be equal; otherwise return 0. */
4723
4724 static bool
4725 simple_cst_list_equal (const_tree l1, const_tree l2)
4726 {
4727 while (l1 != NULL_TREE && l2 != NULL_TREE)
4728 {
4729 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4730 return false;
4731
4732 l1 = TREE_CHAIN (l1);
4733 l2 = TREE_CHAIN (l2);
4734 }
4735
4736 return l1 == l2;
4737 }
4738
4739 /* Compare two attributes for their value identity. Return true if the
4740 attribute values are known to be equal; otherwise return false.
4741 */
4742
4743 static bool
4744 attribute_value_equal (const_tree attr1, const_tree attr2)
4745 {
4746 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4747 return true;
4748
4749 if (TREE_VALUE (attr1) != NULL_TREE
4750 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4751 && TREE_VALUE (attr2) != NULL
4752 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4753 return (simple_cst_list_equal (TREE_VALUE (attr1),
4754 TREE_VALUE (attr2)) == 1);
4755
4756 if ((flag_openmp || flag_openmp_simd)
4757 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4758 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4759 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4760 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4761 TREE_VALUE (attr2));
4762
4763 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4764 }
4765
4766 /* Return 0 if the attributes for two types are incompatible, 1 if they
4767 are compatible, and 2 if they are nearly compatible (which causes a
4768 warning to be generated). */
4769 int
4770 comp_type_attributes (const_tree type1, const_tree type2)
4771 {
4772 const_tree a1 = TYPE_ATTRIBUTES (type1);
4773 const_tree a2 = TYPE_ATTRIBUTES (type2);
4774 const_tree a;
4775
4776 if (a1 == a2)
4777 return 1;
4778 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4779 {
4780 const struct attribute_spec *as;
4781 const_tree attr;
4782
4783 as = lookup_attribute_spec (get_attribute_name (a));
4784 if (!as || as->affects_type_identity == false)
4785 continue;
4786
4787 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4788 if (!attr || !attribute_value_equal (a, attr))
4789 break;
4790 }
4791 if (!a)
4792 {
4793 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4794 {
4795 const struct attribute_spec *as;
4796
4797 as = lookup_attribute_spec (get_attribute_name (a));
4798 if (!as || as->affects_type_identity == false)
4799 continue;
4800
4801 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4802 break;
4803 /* We don't need to compare trees again, as we did this
4804 already in first loop. */
4805 }
4806 /* All types - affecting identity - are equal, so
4807 there is no need to call target hook for comparison. */
4808 if (!a)
4809 return 1;
4810 }
4811 /* As some type combinations - like default calling-convention - might
4812 be compatible, we have to call the target hook to get the final result. */
4813 return targetm.comp_type_attributes (type1, type2);
4814 }
4815
4816 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4817 is ATTRIBUTE.
4818
4819 Record such modified types already made so we don't make duplicates. */
4820
4821 tree
4822 build_type_attribute_variant (tree ttype, tree attribute)
4823 {
4824 return build_type_attribute_qual_variant (ttype, attribute,
4825 TYPE_QUALS (ttype));
4826 }
4827
4828
4829 /* Reset the expression *EXPR_P, a size or position.
4830
4831 ??? We could reset all non-constant sizes or positions. But it's cheap
4832 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4833
4834 We need to reset self-referential sizes or positions because they cannot
4835 be gimplified and thus can contain a CALL_EXPR after the gimplification
4836 is finished, which will run afoul of LTO streaming. And they need to be
4837 reset to something essentially dummy but not constant, so as to preserve
4838 the properties of the object they are attached to. */
4839
4840 static inline void
4841 free_lang_data_in_one_sizepos (tree *expr_p)
4842 {
4843 tree expr = *expr_p;
4844 if (CONTAINS_PLACEHOLDER_P (expr))
4845 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4846 }
4847
4848
4849 /* Reset all the fields in a binfo node BINFO. We only keep
4850 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4851
4852 static void
4853 free_lang_data_in_binfo (tree binfo)
4854 {
4855 unsigned i;
4856 tree t;
4857
4858 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4859
4860 BINFO_VIRTUALS (binfo) = NULL_TREE;
4861 BINFO_BASE_ACCESSES (binfo) = NULL;
4862 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4863 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4864
4865 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4866 free_lang_data_in_binfo (t);
4867 }
4868
4869
4870 /* Reset all language specific information still present in TYPE. */
4871
4872 static void
4873 free_lang_data_in_type (tree type)
4874 {
4875 gcc_assert (TYPE_P (type));
4876
4877 /* Give the FE a chance to remove its own data first. */
4878 lang_hooks.free_lang_data (type);
4879
4880 TREE_LANG_FLAG_0 (type) = 0;
4881 TREE_LANG_FLAG_1 (type) = 0;
4882 TREE_LANG_FLAG_2 (type) = 0;
4883 TREE_LANG_FLAG_3 (type) = 0;
4884 TREE_LANG_FLAG_4 (type) = 0;
4885 TREE_LANG_FLAG_5 (type) = 0;
4886 TREE_LANG_FLAG_6 (type) = 0;
4887
4888 if (TREE_CODE (type) == FUNCTION_TYPE)
4889 {
4890 /* Remove the const and volatile qualifiers from arguments. The
4891 C++ front end removes them, but the C front end does not,
4892 leading to false ODR violation errors when merging two
4893 instances of the same function signature compiled by
4894 different front ends. */
4895 tree p;
4896
4897 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4898 {
4899 tree arg_type = TREE_VALUE (p);
4900
4901 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4902 {
4903 int quals = TYPE_QUALS (arg_type)
4904 & ~TYPE_QUAL_CONST
4905 & ~TYPE_QUAL_VOLATILE;
4906 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4907 free_lang_data_in_type (TREE_VALUE (p));
4908 }
4909 }
4910 }
4911
4912 /* Remove members that are not actually FIELD_DECLs from the field
4913 list of an aggregate. These occur in C++. */
4914 if (RECORD_OR_UNION_TYPE_P (type))
4915 {
4916 tree prev, member;
4917
4918 /* Note that TYPE_FIELDS can be shared across distinct
4919 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4920 to be removed, we cannot set its TREE_CHAIN to NULL.
4921 Otherwise, we would not be able to find all the other fields
4922 in the other instances of this TREE_TYPE.
4923
4924 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4925 prev = NULL_TREE;
4926 member = TYPE_FIELDS (type);
4927 while (member)
4928 {
4929 if (TREE_CODE (member) == FIELD_DECL
4930 || TREE_CODE (member) == TYPE_DECL)
4931 {
4932 if (prev)
4933 TREE_CHAIN (prev) = member;
4934 else
4935 TYPE_FIELDS (type) = member;
4936 prev = member;
4937 }
4938
4939 member = TREE_CHAIN (member);
4940 }
4941
4942 if (prev)
4943 TREE_CHAIN (prev) = NULL_TREE;
4944 else
4945 TYPE_FIELDS (type) = NULL_TREE;
4946
4947 TYPE_METHODS (type) = NULL_TREE;
4948 if (TYPE_BINFO (type))
4949 free_lang_data_in_binfo (TYPE_BINFO (type));
4950 }
4951 else
4952 {
4953 /* For non-aggregate types, clear out the language slot (which
4954 overloads TYPE_BINFO). */
4955 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4956
4957 if (INTEGRAL_TYPE_P (type)
4958 || SCALAR_FLOAT_TYPE_P (type)
4959 || FIXED_POINT_TYPE_P (type))
4960 {
4961 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4962 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4963 }
4964 }
4965
4966 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4967 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4968
4969 if (TYPE_CONTEXT (type)
4970 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4971 {
4972 tree ctx = TYPE_CONTEXT (type);
4973 do
4974 {
4975 ctx = BLOCK_SUPERCONTEXT (ctx);
4976 }
4977 while (ctx && TREE_CODE (ctx) == BLOCK);
4978 TYPE_CONTEXT (type) = ctx;
4979 }
4980 }
4981
4982
4983 /* Return true if DECL may need an assembler name to be set. */
4984
4985 static inline bool
4986 need_assembler_name_p (tree decl)
4987 {
4988 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
4989 merging. */
4990 if (flag_lto_odr_type_mering
4991 && TREE_CODE (decl) == TYPE_DECL
4992 && DECL_NAME (decl)
4993 && decl == TYPE_NAME (TREE_TYPE (decl))
4994 && !is_lang_specific (TREE_TYPE (decl))
4995 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
4996 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
4997 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
4998 return !DECL_ASSEMBLER_NAME_SET_P (decl);
4999 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5000 if (TREE_CODE (decl) != FUNCTION_DECL
5001 && TREE_CODE (decl) != VAR_DECL)
5002 return false;
5003
5004 /* If DECL already has its assembler name set, it does not need a
5005 new one. */
5006 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5007 || DECL_ASSEMBLER_NAME_SET_P (decl))
5008 return false;
5009
5010 /* Abstract decls do not need an assembler name. */
5011 if (DECL_ABSTRACT (decl))
5012 return false;
5013
5014 /* For VAR_DECLs, only static, public and external symbols need an
5015 assembler name. */
5016 if (TREE_CODE (decl) == VAR_DECL
5017 && !TREE_STATIC (decl)
5018 && !TREE_PUBLIC (decl)
5019 && !DECL_EXTERNAL (decl))
5020 return false;
5021
5022 if (TREE_CODE (decl) == FUNCTION_DECL)
5023 {
5024 /* Do not set assembler name on builtins. Allow RTL expansion to
5025 decide whether to expand inline or via a regular call. */
5026 if (DECL_BUILT_IN (decl)
5027 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5028 return false;
5029
5030 /* Functions represented in the callgraph need an assembler name. */
5031 if (cgraph_node::get (decl) != NULL)
5032 return true;
5033
5034 /* Unused and not public functions don't need an assembler name. */
5035 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5036 return false;
5037 }
5038
5039 return true;
5040 }
5041
5042
5043 /* Reset all language specific information still present in symbol
5044 DECL. */
5045
5046 static void
5047 free_lang_data_in_decl (tree decl)
5048 {
5049 gcc_assert (DECL_P (decl));
5050
5051 /* Give the FE a chance to remove its own data first. */
5052 lang_hooks.free_lang_data (decl);
5053
5054 TREE_LANG_FLAG_0 (decl) = 0;
5055 TREE_LANG_FLAG_1 (decl) = 0;
5056 TREE_LANG_FLAG_2 (decl) = 0;
5057 TREE_LANG_FLAG_3 (decl) = 0;
5058 TREE_LANG_FLAG_4 (decl) = 0;
5059 TREE_LANG_FLAG_5 (decl) = 0;
5060 TREE_LANG_FLAG_6 (decl) = 0;
5061
5062 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5063 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5064 if (TREE_CODE (decl) == FIELD_DECL)
5065 {
5066 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5067 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5068 DECL_QUALIFIER (decl) = NULL_TREE;
5069 }
5070
5071 if (TREE_CODE (decl) == FUNCTION_DECL)
5072 {
5073 struct cgraph_node *node;
5074 if (!(node = cgraph_node::get (decl))
5075 || (!node->definition && !node->clones))
5076 {
5077 if (node)
5078 node->release_body ();
5079 else
5080 {
5081 release_function_body (decl);
5082 DECL_ARGUMENTS (decl) = NULL;
5083 DECL_RESULT (decl) = NULL;
5084 DECL_INITIAL (decl) = error_mark_node;
5085 }
5086 }
5087 if (gimple_has_body_p (decl))
5088 {
5089 tree t;
5090
5091 /* If DECL has a gimple body, then the context for its
5092 arguments must be DECL. Otherwise, it doesn't really
5093 matter, as we will not be emitting any code for DECL. In
5094 general, there may be other instances of DECL created by
5095 the front end and since PARM_DECLs are generally shared,
5096 their DECL_CONTEXT changes as the replicas of DECL are
5097 created. The only time where DECL_CONTEXT is important
5098 is for the FUNCTION_DECLs that have a gimple body (since
5099 the PARM_DECL will be used in the function's body). */
5100 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5101 DECL_CONTEXT (t) = decl;
5102 }
5103
5104 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5105 At this point, it is not needed anymore. */
5106 DECL_SAVED_TREE (decl) = NULL_TREE;
5107
5108 /* Clear the abstract origin if it refers to a method. Otherwise
5109 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5110 origin will not be output correctly. */
5111 if (DECL_ABSTRACT_ORIGIN (decl)
5112 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5113 && RECORD_OR_UNION_TYPE_P
5114 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5115 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5116
5117 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5118 DECL_VINDEX referring to itself into a vtable slot number as it
5119 should. Happens with functions that are copied and then forgotten
5120 about. Just clear it, it won't matter anymore. */
5121 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5122 DECL_VINDEX (decl) = NULL_TREE;
5123 }
5124 else if (TREE_CODE (decl) == VAR_DECL)
5125 {
5126 if ((DECL_EXTERNAL (decl)
5127 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5128 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5129 DECL_INITIAL (decl) = NULL_TREE;
5130 }
5131 else if (TREE_CODE (decl) == TYPE_DECL
5132 || TREE_CODE (decl) == FIELD_DECL)
5133 DECL_INITIAL (decl) = NULL_TREE;
5134 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5135 && DECL_INITIAL (decl)
5136 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5137 {
5138 /* Strip builtins from the translation-unit BLOCK. We still have targets
5139 without builtin_decl_explicit support and also builtins are shared
5140 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5141 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5142 while (*nextp)
5143 {
5144 tree var = *nextp;
5145 if (TREE_CODE (var) == FUNCTION_DECL
5146 && DECL_BUILT_IN (var))
5147 *nextp = TREE_CHAIN (var);
5148 else
5149 nextp = &TREE_CHAIN (var);
5150 }
5151 }
5152 }
5153
5154
5155 /* Data used when collecting DECLs and TYPEs for language data removal. */
5156
5157 struct free_lang_data_d
5158 {
5159 /* Worklist to avoid excessive recursion. */
5160 vec<tree> worklist;
5161
5162 /* Set of traversed objects. Used to avoid duplicate visits. */
5163 hash_set<tree> *pset;
5164
5165 /* Array of symbols to process with free_lang_data_in_decl. */
5166 vec<tree> decls;
5167
5168 /* Array of types to process with free_lang_data_in_type. */
5169 vec<tree> types;
5170 };
5171
5172
5173 /* Save all language fields needed to generate proper debug information
5174 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5175
5176 static void
5177 save_debug_info_for_decl (tree t)
5178 {
5179 /*struct saved_debug_info_d *sdi;*/
5180
5181 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5182
5183 /* FIXME. Partial implementation for saving debug info removed. */
5184 }
5185
5186
5187 /* Save all language fields needed to generate proper debug information
5188 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5189
5190 static void
5191 save_debug_info_for_type (tree t)
5192 {
5193 /*struct saved_debug_info_d *sdi;*/
5194
5195 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5196
5197 /* FIXME. Partial implementation for saving debug info removed. */
5198 }
5199
5200
5201 /* Add type or decl T to one of the list of tree nodes that need their
5202 language data removed. The lists are held inside FLD. */
5203
5204 static void
5205 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5206 {
5207 if (DECL_P (t))
5208 {
5209 fld->decls.safe_push (t);
5210 if (debug_info_level > DINFO_LEVEL_TERSE)
5211 save_debug_info_for_decl (t);
5212 }
5213 else if (TYPE_P (t))
5214 {
5215 fld->types.safe_push (t);
5216 if (debug_info_level > DINFO_LEVEL_TERSE)
5217 save_debug_info_for_type (t);
5218 }
5219 else
5220 gcc_unreachable ();
5221 }
5222
5223 /* Push tree node T into FLD->WORKLIST. */
5224
5225 static inline void
5226 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5227 {
5228 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5229 fld->worklist.safe_push ((t));
5230 }
5231
5232
5233 /* Operand callback helper for free_lang_data_in_node. *TP is the
5234 subtree operand being considered. */
5235
5236 static tree
5237 find_decls_types_r (tree *tp, int *ws, void *data)
5238 {
5239 tree t = *tp;
5240 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5241
5242 if (TREE_CODE (t) == TREE_LIST)
5243 return NULL_TREE;
5244
5245 /* Language specific nodes will be removed, so there is no need
5246 to gather anything under them. */
5247 if (is_lang_specific (t))
5248 {
5249 *ws = 0;
5250 return NULL_TREE;
5251 }
5252
5253 if (DECL_P (t))
5254 {
5255 /* Note that walk_tree does not traverse every possible field in
5256 decls, so we have to do our own traversals here. */
5257 add_tree_to_fld_list (t, fld);
5258
5259 fld_worklist_push (DECL_NAME (t), fld);
5260 fld_worklist_push (DECL_CONTEXT (t), fld);
5261 fld_worklist_push (DECL_SIZE (t), fld);
5262 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5263
5264 /* We are going to remove everything under DECL_INITIAL for
5265 TYPE_DECLs. No point walking them. */
5266 if (TREE_CODE (t) != TYPE_DECL)
5267 fld_worklist_push (DECL_INITIAL (t), fld);
5268
5269 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5270 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5271
5272 if (TREE_CODE (t) == FUNCTION_DECL)
5273 {
5274 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5275 fld_worklist_push (DECL_RESULT (t), fld);
5276 }
5277 else if (TREE_CODE (t) == TYPE_DECL)
5278 {
5279 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5280 }
5281 else if (TREE_CODE (t) == FIELD_DECL)
5282 {
5283 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5284 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5285 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5286 fld_worklist_push (DECL_FCONTEXT (t), fld);
5287 }
5288
5289 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5290 && DECL_HAS_VALUE_EXPR_P (t))
5291 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5292
5293 if (TREE_CODE (t) != FIELD_DECL
5294 && TREE_CODE (t) != TYPE_DECL)
5295 fld_worklist_push (TREE_CHAIN (t), fld);
5296 *ws = 0;
5297 }
5298 else if (TYPE_P (t))
5299 {
5300 /* Note that walk_tree does not traverse every possible field in
5301 types, so we have to do our own traversals here. */
5302 add_tree_to_fld_list (t, fld);
5303
5304 if (!RECORD_OR_UNION_TYPE_P (t))
5305 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5306 fld_worklist_push (TYPE_SIZE (t), fld);
5307 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5308 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5309 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5310 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5311 fld_worklist_push (TYPE_NAME (t), fld);
5312 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5313 them and thus do not and want not to reach unused pointer types
5314 this way. */
5315 if (!POINTER_TYPE_P (t))
5316 fld_worklist_push (TYPE_MINVAL (t), fld);
5317 if (!RECORD_OR_UNION_TYPE_P (t))
5318 fld_worklist_push (TYPE_MAXVAL (t), fld);
5319 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5320 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5321 do not and want not to reach unused variants this way. */
5322 if (TYPE_CONTEXT (t))
5323 {
5324 tree ctx = TYPE_CONTEXT (t);
5325 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5326 So push that instead. */
5327 while (ctx && TREE_CODE (ctx) == BLOCK)
5328 ctx = BLOCK_SUPERCONTEXT (ctx);
5329 fld_worklist_push (ctx, fld);
5330 }
5331 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5332 and want not to reach unused types this way. */
5333
5334 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5335 {
5336 unsigned i;
5337 tree tem;
5338 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5339 fld_worklist_push (TREE_TYPE (tem), fld);
5340 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5341 if (tem
5342 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5343 && TREE_CODE (tem) == TREE_LIST)
5344 do
5345 {
5346 fld_worklist_push (TREE_VALUE (tem), fld);
5347 tem = TREE_CHAIN (tem);
5348 }
5349 while (tem);
5350 }
5351 if (RECORD_OR_UNION_TYPE_P (t))
5352 {
5353 tree tem;
5354 /* Push all TYPE_FIELDS - there can be interleaving interesting
5355 and non-interesting things. */
5356 tem = TYPE_FIELDS (t);
5357 while (tem)
5358 {
5359 if (TREE_CODE (tem) == FIELD_DECL
5360 || TREE_CODE (tem) == TYPE_DECL)
5361 fld_worklist_push (tem, fld);
5362 tem = TREE_CHAIN (tem);
5363 }
5364 }
5365
5366 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5367 *ws = 0;
5368 }
5369 else if (TREE_CODE (t) == BLOCK)
5370 {
5371 tree tem;
5372 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5373 fld_worklist_push (tem, fld);
5374 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5375 fld_worklist_push (tem, fld);
5376 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5377 }
5378
5379 if (TREE_CODE (t) != IDENTIFIER_NODE
5380 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5381 fld_worklist_push (TREE_TYPE (t), fld);
5382
5383 return NULL_TREE;
5384 }
5385
5386
5387 /* Find decls and types in T. */
5388
5389 static void
5390 find_decls_types (tree t, struct free_lang_data_d *fld)
5391 {
5392 while (1)
5393 {
5394 if (!fld->pset->contains (t))
5395 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5396 if (fld->worklist.is_empty ())
5397 break;
5398 t = fld->worklist.pop ();
5399 }
5400 }
5401
5402 /* Translate all the types in LIST with the corresponding runtime
5403 types. */
5404
5405 static tree
5406 get_eh_types_for_runtime (tree list)
5407 {
5408 tree head, prev;
5409
5410 if (list == NULL_TREE)
5411 return NULL_TREE;
5412
5413 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5414 prev = head;
5415 list = TREE_CHAIN (list);
5416 while (list)
5417 {
5418 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5419 TREE_CHAIN (prev) = n;
5420 prev = TREE_CHAIN (prev);
5421 list = TREE_CHAIN (list);
5422 }
5423
5424 return head;
5425 }
5426
5427
5428 /* Find decls and types referenced in EH region R and store them in
5429 FLD->DECLS and FLD->TYPES. */
5430
5431 static void
5432 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5433 {
5434 switch (r->type)
5435 {
5436 case ERT_CLEANUP:
5437 break;
5438
5439 case ERT_TRY:
5440 {
5441 eh_catch c;
5442
5443 /* The types referenced in each catch must first be changed to the
5444 EH types used at runtime. This removes references to FE types
5445 in the region. */
5446 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5447 {
5448 c->type_list = get_eh_types_for_runtime (c->type_list);
5449 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5450 }
5451 }
5452 break;
5453
5454 case ERT_ALLOWED_EXCEPTIONS:
5455 r->u.allowed.type_list
5456 = get_eh_types_for_runtime (r->u.allowed.type_list);
5457 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5458 break;
5459
5460 case ERT_MUST_NOT_THROW:
5461 walk_tree (&r->u.must_not_throw.failure_decl,
5462 find_decls_types_r, fld, fld->pset);
5463 break;
5464 }
5465 }
5466
5467
5468 /* Find decls and types referenced in cgraph node N and store them in
5469 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5470 look for *every* kind of DECL and TYPE node reachable from N,
5471 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5472 NAMESPACE_DECLs, etc). */
5473
5474 static void
5475 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5476 {
5477 basic_block bb;
5478 struct function *fn;
5479 unsigned ix;
5480 tree t;
5481
5482 find_decls_types (n->decl, fld);
5483
5484 if (!gimple_has_body_p (n->decl))
5485 return;
5486
5487 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5488
5489 fn = DECL_STRUCT_FUNCTION (n->decl);
5490
5491 /* Traverse locals. */
5492 FOR_EACH_LOCAL_DECL (fn, ix, t)
5493 find_decls_types (t, fld);
5494
5495 /* Traverse EH regions in FN. */
5496 {
5497 eh_region r;
5498 FOR_ALL_EH_REGION_FN (r, fn)
5499 find_decls_types_in_eh_region (r, fld);
5500 }
5501
5502 /* Traverse every statement in FN. */
5503 FOR_EACH_BB_FN (bb, fn)
5504 {
5505 gimple_stmt_iterator si;
5506 unsigned i;
5507
5508 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5509 {
5510 gimple phi = gsi_stmt (si);
5511
5512 for (i = 0; i < gimple_phi_num_args (phi); i++)
5513 {
5514 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5515 find_decls_types (*arg_p, fld);
5516 }
5517 }
5518
5519 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5520 {
5521 gimple stmt = gsi_stmt (si);
5522
5523 if (is_gimple_call (stmt))
5524 find_decls_types (gimple_call_fntype (stmt), fld);
5525
5526 for (i = 0; i < gimple_num_ops (stmt); i++)
5527 {
5528 tree arg = gimple_op (stmt, i);
5529 find_decls_types (arg, fld);
5530 }
5531 }
5532 }
5533 }
5534
5535
5536 /* Find decls and types referenced in varpool node N and store them in
5537 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5538 look for *every* kind of DECL and TYPE node reachable from N,
5539 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5540 NAMESPACE_DECLs, etc). */
5541
5542 static void
5543 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5544 {
5545 find_decls_types (v->decl, fld);
5546 }
5547
5548 /* If T needs an assembler name, have one created for it. */
5549
5550 void
5551 assign_assembler_name_if_neeeded (tree t)
5552 {
5553 if (need_assembler_name_p (t))
5554 {
5555 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5556 diagnostics that use input_location to show locus
5557 information. The problem here is that, at this point,
5558 input_location is generally anchored to the end of the file
5559 (since the parser is long gone), so we don't have a good
5560 position to pin it to.
5561
5562 To alleviate this problem, this uses the location of T's
5563 declaration. Examples of this are
5564 testsuite/g++.dg/template/cond2.C and
5565 testsuite/g++.dg/template/pr35240.C. */
5566 location_t saved_location = input_location;
5567 input_location = DECL_SOURCE_LOCATION (t);
5568
5569 decl_assembler_name (t);
5570
5571 input_location = saved_location;
5572 }
5573 }
5574
5575
5576 /* Free language specific information for every operand and expression
5577 in every node of the call graph. This process operates in three stages:
5578
5579 1- Every callgraph node and varpool node is traversed looking for
5580 decls and types embedded in them. This is a more exhaustive
5581 search than that done by find_referenced_vars, because it will
5582 also collect individual fields, decls embedded in types, etc.
5583
5584 2- All the decls found are sent to free_lang_data_in_decl.
5585
5586 3- All the types found are sent to free_lang_data_in_type.
5587
5588 The ordering between decls and types is important because
5589 free_lang_data_in_decl sets assembler names, which includes
5590 mangling. So types cannot be freed up until assembler names have
5591 been set up. */
5592
5593 static void
5594 free_lang_data_in_cgraph (void)
5595 {
5596 struct cgraph_node *n;
5597 varpool_node *v;
5598 struct free_lang_data_d fld;
5599 tree t;
5600 unsigned i;
5601 alias_pair *p;
5602
5603 /* Initialize sets and arrays to store referenced decls and types. */
5604 fld.pset = new hash_set<tree>;
5605 fld.worklist.create (0);
5606 fld.decls.create (100);
5607 fld.types.create (100);
5608
5609 /* Find decls and types in the body of every function in the callgraph. */
5610 FOR_EACH_FUNCTION (n)
5611 find_decls_types_in_node (n, &fld);
5612
5613 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5614 find_decls_types (p->decl, &fld);
5615
5616 /* Find decls and types in every varpool symbol. */
5617 FOR_EACH_VARIABLE (v)
5618 find_decls_types_in_var (v, &fld);
5619
5620 /* Set the assembler name on every decl found. We need to do this
5621 now because free_lang_data_in_decl will invalidate data needed
5622 for mangling. This breaks mangling on interdependent decls. */
5623 FOR_EACH_VEC_ELT (fld.decls, i, t)
5624 assign_assembler_name_if_neeeded (t);
5625
5626 /* Traverse every decl found freeing its language data. */
5627 FOR_EACH_VEC_ELT (fld.decls, i, t)
5628 free_lang_data_in_decl (t);
5629
5630 /* Traverse every type found freeing its language data. */
5631 FOR_EACH_VEC_ELT (fld.types, i, t)
5632 free_lang_data_in_type (t);
5633
5634 delete fld.pset;
5635 fld.worklist.release ();
5636 fld.decls.release ();
5637 fld.types.release ();
5638 }
5639
5640
5641 /* Free resources that are used by FE but are not needed once they are done. */
5642
5643 static unsigned
5644 free_lang_data (void)
5645 {
5646 unsigned i;
5647
5648 /* If we are the LTO frontend we have freed lang-specific data already. */
5649 if (in_lto_p
5650 || !flag_generate_lto)
5651 return 0;
5652
5653 /* Allocate and assign alias sets to the standard integer types
5654 while the slots are still in the way the frontends generated them. */
5655 for (i = 0; i < itk_none; ++i)
5656 if (integer_types[i])
5657 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5658
5659 /* Traverse the IL resetting language specific information for
5660 operands, expressions, etc. */
5661 free_lang_data_in_cgraph ();
5662
5663 /* Create gimple variants for common types. */
5664 ptrdiff_type_node = integer_type_node;
5665 fileptr_type_node = ptr_type_node;
5666
5667 /* Reset some langhooks. Do not reset types_compatible_p, it may
5668 still be used indirectly via the get_alias_set langhook. */
5669 lang_hooks.dwarf_name = lhd_dwarf_name;
5670 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5671 /* We do not want the default decl_assembler_name implementation,
5672 rather if we have fixed everything we want a wrapper around it
5673 asserting that all non-local symbols already got their assembler
5674 name and only produce assembler names for local symbols. Or rather
5675 make sure we never call decl_assembler_name on local symbols and
5676 devise a separate, middle-end private scheme for it. */
5677
5678 /* Reset diagnostic machinery. */
5679 tree_diagnostics_defaults (global_dc);
5680
5681 return 0;
5682 }
5683
5684
5685 namespace {
5686
5687 const pass_data pass_data_ipa_free_lang_data =
5688 {
5689 SIMPLE_IPA_PASS, /* type */
5690 "*free_lang_data", /* name */
5691 OPTGROUP_NONE, /* optinfo_flags */
5692 TV_IPA_FREE_LANG_DATA, /* tv_id */
5693 0, /* properties_required */
5694 0, /* properties_provided */
5695 0, /* properties_destroyed */
5696 0, /* todo_flags_start */
5697 0, /* todo_flags_finish */
5698 };
5699
5700 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5701 {
5702 public:
5703 pass_ipa_free_lang_data (gcc::context *ctxt)
5704 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5705 {}
5706
5707 /* opt_pass methods: */
5708 virtual unsigned int execute (function *) { return free_lang_data (); }
5709
5710 }; // class pass_ipa_free_lang_data
5711
5712 } // anon namespace
5713
5714 simple_ipa_opt_pass *
5715 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5716 {
5717 return new pass_ipa_free_lang_data (ctxt);
5718 }
5719
5720 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5721 ATTR_NAME. Also used internally by remove_attribute(). */
5722 bool
5723 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5724 {
5725 size_t ident_len = IDENTIFIER_LENGTH (ident);
5726
5727 if (ident_len == attr_len)
5728 {
5729 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5730 return true;
5731 }
5732 else if (ident_len == attr_len + 4)
5733 {
5734 /* There is the possibility that ATTR is 'text' and IDENT is
5735 '__text__'. */
5736 const char *p = IDENTIFIER_POINTER (ident);
5737 if (p[0] == '_' && p[1] == '_'
5738 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5739 && strncmp (attr_name, p + 2, attr_len) == 0)
5740 return true;
5741 }
5742
5743 return false;
5744 }
5745
5746 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5747 of ATTR_NAME, and LIST is not NULL_TREE. */
5748 tree
5749 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5750 {
5751 while (list)
5752 {
5753 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5754
5755 if (ident_len == attr_len)
5756 {
5757 if (!strcmp (attr_name,
5758 IDENTIFIER_POINTER (get_attribute_name (list))))
5759 break;
5760 }
5761 /* TODO: If we made sure that attributes were stored in the
5762 canonical form without '__...__' (ie, as in 'text' as opposed
5763 to '__text__') then we could avoid the following case. */
5764 else if (ident_len == attr_len + 4)
5765 {
5766 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5767 if (p[0] == '_' && p[1] == '_'
5768 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5769 && strncmp (attr_name, p + 2, attr_len) == 0)
5770 break;
5771 }
5772 list = TREE_CHAIN (list);
5773 }
5774
5775 return list;
5776 }
5777
5778 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5779 return a pointer to the attribute's list first element if the attribute
5780 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5781 '__text__'). */
5782
5783 tree
5784 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5785 tree list)
5786 {
5787 while (list)
5788 {
5789 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5790
5791 if (attr_len > ident_len)
5792 {
5793 list = TREE_CHAIN (list);
5794 continue;
5795 }
5796
5797 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5798
5799 if (strncmp (attr_name, p, attr_len) == 0)
5800 break;
5801
5802 /* TODO: If we made sure that attributes were stored in the
5803 canonical form without '__...__' (ie, as in 'text' as opposed
5804 to '__text__') then we could avoid the following case. */
5805 if (p[0] == '_' && p[1] == '_' &&
5806 strncmp (attr_name, p + 2, attr_len) == 0)
5807 break;
5808
5809 list = TREE_CHAIN (list);
5810 }
5811
5812 return list;
5813 }
5814
5815
5816 /* A variant of lookup_attribute() that can be used with an identifier
5817 as the first argument, and where the identifier can be either
5818 'text' or '__text__'.
5819
5820 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5821 return a pointer to the attribute's list element if the attribute
5822 is part of the list, or NULL_TREE if not found. If the attribute
5823 appears more than once, this only returns the first occurrence; the
5824 TREE_CHAIN of the return value should be passed back in if further
5825 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5826 can be in the form 'text' or '__text__'. */
5827 static tree
5828 lookup_ident_attribute (tree attr_identifier, tree list)
5829 {
5830 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5831
5832 while (list)
5833 {
5834 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5835 == IDENTIFIER_NODE);
5836
5837 /* Identifiers can be compared directly for equality. */
5838 if (attr_identifier == get_attribute_name (list))
5839 break;
5840
5841 /* If they are not equal, they may still be one in the form
5842 'text' while the other one is in the form '__text__'. TODO:
5843 If we were storing attributes in normalized 'text' form, then
5844 this could all go away and we could take full advantage of
5845 the fact that we're comparing identifiers. :-) */
5846 {
5847 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5848 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5849
5850 if (ident_len == attr_len + 4)
5851 {
5852 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5853 const char *q = IDENTIFIER_POINTER (attr_identifier);
5854 if (p[0] == '_' && p[1] == '_'
5855 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5856 && strncmp (q, p + 2, attr_len) == 0)
5857 break;
5858 }
5859 else if (ident_len + 4 == attr_len)
5860 {
5861 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5862 const char *q = IDENTIFIER_POINTER (attr_identifier);
5863 if (q[0] == '_' && q[1] == '_'
5864 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5865 && strncmp (q + 2, p, ident_len) == 0)
5866 break;
5867 }
5868 }
5869 list = TREE_CHAIN (list);
5870 }
5871
5872 return list;
5873 }
5874
5875 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5876 modified list. */
5877
5878 tree
5879 remove_attribute (const char *attr_name, tree list)
5880 {
5881 tree *p;
5882 size_t attr_len = strlen (attr_name);
5883
5884 gcc_checking_assert (attr_name[0] != '_');
5885
5886 for (p = &list; *p; )
5887 {
5888 tree l = *p;
5889 /* TODO: If we were storing attributes in normalized form, here
5890 we could use a simple strcmp(). */
5891 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5892 *p = TREE_CHAIN (l);
5893 else
5894 p = &TREE_CHAIN (l);
5895 }
5896
5897 return list;
5898 }
5899
5900 /* Return an attribute list that is the union of a1 and a2. */
5901
5902 tree
5903 merge_attributes (tree a1, tree a2)
5904 {
5905 tree attributes;
5906
5907 /* Either one unset? Take the set one. */
5908
5909 if ((attributes = a1) == 0)
5910 attributes = a2;
5911
5912 /* One that completely contains the other? Take it. */
5913
5914 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5915 {
5916 if (attribute_list_contained (a2, a1))
5917 attributes = a2;
5918 else
5919 {
5920 /* Pick the longest list, and hang on the other list. */
5921
5922 if (list_length (a1) < list_length (a2))
5923 attributes = a2, a2 = a1;
5924
5925 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5926 {
5927 tree a;
5928 for (a = lookup_ident_attribute (get_attribute_name (a2),
5929 attributes);
5930 a != NULL_TREE && !attribute_value_equal (a, a2);
5931 a = lookup_ident_attribute (get_attribute_name (a2),
5932 TREE_CHAIN (a)))
5933 ;
5934 if (a == NULL_TREE)
5935 {
5936 a1 = copy_node (a2);
5937 TREE_CHAIN (a1) = attributes;
5938 attributes = a1;
5939 }
5940 }
5941 }
5942 }
5943 return attributes;
5944 }
5945
5946 /* Given types T1 and T2, merge their attributes and return
5947 the result. */
5948
5949 tree
5950 merge_type_attributes (tree t1, tree t2)
5951 {
5952 return merge_attributes (TYPE_ATTRIBUTES (t1),
5953 TYPE_ATTRIBUTES (t2));
5954 }
5955
5956 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5957 the result. */
5958
5959 tree
5960 merge_decl_attributes (tree olddecl, tree newdecl)
5961 {
5962 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5963 DECL_ATTRIBUTES (newdecl));
5964 }
5965
5966 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5967
5968 /* Specialization of merge_decl_attributes for various Windows targets.
5969
5970 This handles the following situation:
5971
5972 __declspec (dllimport) int foo;
5973 int foo;
5974
5975 The second instance of `foo' nullifies the dllimport. */
5976
5977 tree
5978 merge_dllimport_decl_attributes (tree old, tree new_tree)
5979 {
5980 tree a;
5981 int delete_dllimport_p = 1;
5982
5983 /* What we need to do here is remove from `old' dllimport if it doesn't
5984 appear in `new'. dllimport behaves like extern: if a declaration is
5985 marked dllimport and a definition appears later, then the object
5986 is not dllimport'd. We also remove a `new' dllimport if the old list
5987 contains dllexport: dllexport always overrides dllimport, regardless
5988 of the order of declaration. */
5989 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5990 delete_dllimport_p = 0;
5991 else if (DECL_DLLIMPORT_P (new_tree)
5992 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5993 {
5994 DECL_DLLIMPORT_P (new_tree) = 0;
5995 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5996 "dllimport ignored", new_tree);
5997 }
5998 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5999 {
6000 /* Warn about overriding a symbol that has already been used, e.g.:
6001 extern int __attribute__ ((dllimport)) foo;
6002 int* bar () {return &foo;}
6003 int foo;
6004 */
6005 if (TREE_USED (old))
6006 {
6007 warning (0, "%q+D redeclared without dllimport attribute "
6008 "after being referenced with dll linkage", new_tree);
6009 /* If we have used a variable's address with dllimport linkage,
6010 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6011 decl may already have had TREE_CONSTANT computed.
6012 We still remove the attribute so that assembler code refers
6013 to '&foo rather than '_imp__foo'. */
6014 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6015 DECL_DLLIMPORT_P (new_tree) = 1;
6016 }
6017
6018 /* Let an inline definition silently override the external reference,
6019 but otherwise warn about attribute inconsistency. */
6020 else if (TREE_CODE (new_tree) == VAR_DECL
6021 || !DECL_DECLARED_INLINE_P (new_tree))
6022 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6023 "previous dllimport ignored", new_tree);
6024 }
6025 else
6026 delete_dllimport_p = 0;
6027
6028 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6029
6030 if (delete_dllimport_p)
6031 a = remove_attribute ("dllimport", a);
6032
6033 return a;
6034 }
6035
6036 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6037 struct attribute_spec.handler. */
6038
6039 tree
6040 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6041 bool *no_add_attrs)
6042 {
6043 tree node = *pnode;
6044 bool is_dllimport;
6045
6046 /* These attributes may apply to structure and union types being created,
6047 but otherwise should pass to the declaration involved. */
6048 if (!DECL_P (node))
6049 {
6050 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6051 | (int) ATTR_FLAG_ARRAY_NEXT))
6052 {
6053 *no_add_attrs = true;
6054 return tree_cons (name, args, NULL_TREE);
6055 }
6056 if (TREE_CODE (node) == RECORD_TYPE
6057 || TREE_CODE (node) == UNION_TYPE)
6058 {
6059 node = TYPE_NAME (node);
6060 if (!node)
6061 return NULL_TREE;
6062 }
6063 else
6064 {
6065 warning (OPT_Wattributes, "%qE attribute ignored",
6066 name);
6067 *no_add_attrs = true;
6068 return NULL_TREE;
6069 }
6070 }
6071
6072 if (TREE_CODE (node) != FUNCTION_DECL
6073 && TREE_CODE (node) != VAR_DECL
6074 && TREE_CODE (node) != TYPE_DECL)
6075 {
6076 *no_add_attrs = true;
6077 warning (OPT_Wattributes, "%qE attribute ignored",
6078 name);
6079 return NULL_TREE;
6080 }
6081
6082 if (TREE_CODE (node) == TYPE_DECL
6083 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6084 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6085 {
6086 *no_add_attrs = true;
6087 warning (OPT_Wattributes, "%qE attribute ignored",
6088 name);
6089 return NULL_TREE;
6090 }
6091
6092 is_dllimport = is_attribute_p ("dllimport", name);
6093
6094 /* Report error on dllimport ambiguities seen now before they cause
6095 any damage. */
6096 if (is_dllimport)
6097 {
6098 /* Honor any target-specific overrides. */
6099 if (!targetm.valid_dllimport_attribute_p (node))
6100 *no_add_attrs = true;
6101
6102 else if (TREE_CODE (node) == FUNCTION_DECL
6103 && DECL_DECLARED_INLINE_P (node))
6104 {
6105 warning (OPT_Wattributes, "inline function %q+D declared as "
6106 " dllimport: attribute ignored", node);
6107 *no_add_attrs = true;
6108 }
6109 /* Like MS, treat definition of dllimported variables and
6110 non-inlined functions on declaration as syntax errors. */
6111 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6112 {
6113 error ("function %q+D definition is marked dllimport", node);
6114 *no_add_attrs = true;
6115 }
6116
6117 else if (TREE_CODE (node) == VAR_DECL)
6118 {
6119 if (DECL_INITIAL (node))
6120 {
6121 error ("variable %q+D definition is marked dllimport",
6122 node);
6123 *no_add_attrs = true;
6124 }
6125
6126 /* `extern' needn't be specified with dllimport.
6127 Specify `extern' now and hope for the best. Sigh. */
6128 DECL_EXTERNAL (node) = 1;
6129 /* Also, implicitly give dllimport'd variables declared within
6130 a function global scope, unless declared static. */
6131 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6132 TREE_PUBLIC (node) = 1;
6133 }
6134
6135 if (*no_add_attrs == false)
6136 DECL_DLLIMPORT_P (node) = 1;
6137 }
6138 else if (TREE_CODE (node) == FUNCTION_DECL
6139 && DECL_DECLARED_INLINE_P (node)
6140 && flag_keep_inline_dllexport)
6141 /* An exported function, even if inline, must be emitted. */
6142 DECL_EXTERNAL (node) = 0;
6143
6144 /* Report error if symbol is not accessible at global scope. */
6145 if (!TREE_PUBLIC (node)
6146 && (TREE_CODE (node) == VAR_DECL
6147 || TREE_CODE (node) == FUNCTION_DECL))
6148 {
6149 error ("external linkage required for symbol %q+D because of "
6150 "%qE attribute", node, name);
6151 *no_add_attrs = true;
6152 }
6153
6154 /* A dllexport'd entity must have default visibility so that other
6155 program units (shared libraries or the main executable) can see
6156 it. A dllimport'd entity must have default visibility so that
6157 the linker knows that undefined references within this program
6158 unit can be resolved by the dynamic linker. */
6159 if (!*no_add_attrs)
6160 {
6161 if (DECL_VISIBILITY_SPECIFIED (node)
6162 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6163 error ("%qE implies default visibility, but %qD has already "
6164 "been declared with a different visibility",
6165 name, node);
6166 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6167 DECL_VISIBILITY_SPECIFIED (node) = 1;
6168 }
6169
6170 return NULL_TREE;
6171 }
6172
6173 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6174 \f
6175 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6176 of the various TYPE_QUAL values. */
6177
6178 static void
6179 set_type_quals (tree type, int type_quals)
6180 {
6181 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6182 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6183 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6184 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6185 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6186 }
6187
6188 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6189
6190 bool
6191 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6192 {
6193 return (TYPE_QUALS (cand) == type_quals
6194 && TYPE_NAME (cand) == TYPE_NAME (base)
6195 /* Apparently this is needed for Objective-C. */
6196 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6197 /* Check alignment. */
6198 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6199 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6200 TYPE_ATTRIBUTES (base)));
6201 }
6202
6203 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6204
6205 static bool
6206 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6207 {
6208 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6209 && TYPE_NAME (cand) == TYPE_NAME (base)
6210 /* Apparently this is needed for Objective-C. */
6211 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6212 /* Check alignment. */
6213 && TYPE_ALIGN (cand) == align
6214 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6215 TYPE_ATTRIBUTES (base)));
6216 }
6217
6218 /* This function checks to see if TYPE matches the size one of the built-in
6219 atomic types, and returns that core atomic type. */
6220
6221 static tree
6222 find_atomic_core_type (tree type)
6223 {
6224 tree base_atomic_type;
6225
6226 /* Only handle complete types. */
6227 if (TYPE_SIZE (type) == NULL_TREE)
6228 return NULL_TREE;
6229
6230 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6231 switch (type_size)
6232 {
6233 case 8:
6234 base_atomic_type = atomicQI_type_node;
6235 break;
6236
6237 case 16:
6238 base_atomic_type = atomicHI_type_node;
6239 break;
6240
6241 case 32:
6242 base_atomic_type = atomicSI_type_node;
6243 break;
6244
6245 case 64:
6246 base_atomic_type = atomicDI_type_node;
6247 break;
6248
6249 case 128:
6250 base_atomic_type = atomicTI_type_node;
6251 break;
6252
6253 default:
6254 base_atomic_type = NULL_TREE;
6255 }
6256
6257 return base_atomic_type;
6258 }
6259
6260 /* Return a version of the TYPE, qualified as indicated by the
6261 TYPE_QUALS, if one exists. If no qualified version exists yet,
6262 return NULL_TREE. */
6263
6264 tree
6265 get_qualified_type (tree type, int type_quals)
6266 {
6267 tree t;
6268
6269 if (TYPE_QUALS (type) == type_quals)
6270 return type;
6271
6272 /* Search the chain of variants to see if there is already one there just
6273 like the one we need to have. If so, use that existing one. We must
6274 preserve the TYPE_NAME, since there is code that depends on this. */
6275 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6276 if (check_qualified_type (t, type, type_quals))
6277 return t;
6278
6279 return NULL_TREE;
6280 }
6281
6282 /* Like get_qualified_type, but creates the type if it does not
6283 exist. This function never returns NULL_TREE. */
6284
6285 tree
6286 build_qualified_type (tree type, int type_quals)
6287 {
6288 tree t;
6289
6290 /* See if we already have the appropriate qualified variant. */
6291 t = get_qualified_type (type, type_quals);
6292
6293 /* If not, build it. */
6294 if (!t)
6295 {
6296 t = build_variant_type_copy (type);
6297 set_type_quals (t, type_quals);
6298
6299 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6300 {
6301 /* See if this object can map to a basic atomic type. */
6302 tree atomic_type = find_atomic_core_type (type);
6303 if (atomic_type)
6304 {
6305 /* Ensure the alignment of this type is compatible with
6306 the required alignment of the atomic type. */
6307 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6308 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6309 }
6310 }
6311
6312 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6313 /* Propagate structural equality. */
6314 SET_TYPE_STRUCTURAL_EQUALITY (t);
6315 else if (TYPE_CANONICAL (type) != type)
6316 /* Build the underlying canonical type, since it is different
6317 from TYPE. */
6318 {
6319 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6320 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6321 }
6322 else
6323 /* T is its own canonical type. */
6324 TYPE_CANONICAL (t) = t;
6325
6326 }
6327
6328 return t;
6329 }
6330
6331 /* Create a variant of type T with alignment ALIGN. */
6332
6333 tree
6334 build_aligned_type (tree type, unsigned int align)
6335 {
6336 tree t;
6337
6338 if (TYPE_PACKED (type)
6339 || TYPE_ALIGN (type) == align)
6340 return type;
6341
6342 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6343 if (check_aligned_type (t, type, align))
6344 return t;
6345
6346 t = build_variant_type_copy (type);
6347 TYPE_ALIGN (t) = align;
6348
6349 return t;
6350 }
6351
6352 /* Create a new distinct copy of TYPE. The new type is made its own
6353 MAIN_VARIANT. If TYPE requires structural equality checks, the
6354 resulting type requires structural equality checks; otherwise, its
6355 TYPE_CANONICAL points to itself. */
6356
6357 tree
6358 build_distinct_type_copy (tree type)
6359 {
6360 tree t = copy_node (type);
6361
6362 TYPE_POINTER_TO (t) = 0;
6363 TYPE_REFERENCE_TO (t) = 0;
6364
6365 /* Set the canonical type either to a new equivalence class, or
6366 propagate the need for structural equality checks. */
6367 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6368 SET_TYPE_STRUCTURAL_EQUALITY (t);
6369 else
6370 TYPE_CANONICAL (t) = t;
6371
6372 /* Make it its own variant. */
6373 TYPE_MAIN_VARIANT (t) = t;
6374 TYPE_NEXT_VARIANT (t) = 0;
6375
6376 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6377 whose TREE_TYPE is not t. This can also happen in the Ada
6378 frontend when using subtypes. */
6379
6380 return t;
6381 }
6382
6383 /* Create a new variant of TYPE, equivalent but distinct. This is so
6384 the caller can modify it. TYPE_CANONICAL for the return type will
6385 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6386 are considered equal by the language itself (or that both types
6387 require structural equality checks). */
6388
6389 tree
6390 build_variant_type_copy (tree type)
6391 {
6392 tree t, m = TYPE_MAIN_VARIANT (type);
6393
6394 t = build_distinct_type_copy (type);
6395
6396 /* Since we're building a variant, assume that it is a non-semantic
6397 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6398 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6399
6400 /* Add the new type to the chain of variants of TYPE. */
6401 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6402 TYPE_NEXT_VARIANT (m) = t;
6403 TYPE_MAIN_VARIANT (t) = m;
6404
6405 return t;
6406 }
6407 \f
6408 /* Return true if the from tree in both tree maps are equal. */
6409
6410 int
6411 tree_map_base_eq (const void *va, const void *vb)
6412 {
6413 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6414 *const b = (const struct tree_map_base *) vb;
6415 return (a->from == b->from);
6416 }
6417
6418 /* Hash a from tree in a tree_base_map. */
6419
6420 unsigned int
6421 tree_map_base_hash (const void *item)
6422 {
6423 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6424 }
6425
6426 /* Return true if this tree map structure is marked for garbage collection
6427 purposes. We simply return true if the from tree is marked, so that this
6428 structure goes away when the from tree goes away. */
6429
6430 int
6431 tree_map_base_marked_p (const void *p)
6432 {
6433 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6434 }
6435
6436 /* Hash a from tree in a tree_map. */
6437
6438 unsigned int
6439 tree_map_hash (const void *item)
6440 {
6441 return (((const struct tree_map *) item)->hash);
6442 }
6443
6444 /* Hash a from tree in a tree_decl_map. */
6445
6446 unsigned int
6447 tree_decl_map_hash (const void *item)
6448 {
6449 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6450 }
6451
6452 /* Return the initialization priority for DECL. */
6453
6454 priority_type
6455 decl_init_priority_lookup (tree decl)
6456 {
6457 symtab_node *snode = symtab_node::get (decl);
6458
6459 if (!snode)
6460 return DEFAULT_INIT_PRIORITY;
6461 return
6462 snode->get_init_priority ();
6463 }
6464
6465 /* Return the finalization priority for DECL. */
6466
6467 priority_type
6468 decl_fini_priority_lookup (tree decl)
6469 {
6470 cgraph_node *node = cgraph_node::get (decl);
6471
6472 if (!node)
6473 return DEFAULT_INIT_PRIORITY;
6474 return
6475 node->get_fini_priority ();
6476 }
6477
6478 /* Set the initialization priority for DECL to PRIORITY. */
6479
6480 void
6481 decl_init_priority_insert (tree decl, priority_type priority)
6482 {
6483 struct symtab_node *snode;
6484
6485 if (priority == DEFAULT_INIT_PRIORITY)
6486 {
6487 snode = symtab_node::get (decl);
6488 if (!snode)
6489 return;
6490 }
6491 else if (TREE_CODE (decl) == VAR_DECL)
6492 snode = varpool_node::get_create (decl);
6493 else
6494 snode = cgraph_node::get_create (decl);
6495 snode->set_init_priority (priority);
6496 }
6497
6498 /* Set the finalization priority for DECL to PRIORITY. */
6499
6500 void
6501 decl_fini_priority_insert (tree decl, priority_type priority)
6502 {
6503 struct cgraph_node *node;
6504
6505 if (priority == DEFAULT_INIT_PRIORITY)
6506 {
6507 node = cgraph_node::get (decl);
6508 if (!node)
6509 return;
6510 }
6511 else
6512 node = cgraph_node::get_create (decl);
6513 node->set_fini_priority (priority);
6514 }
6515
6516 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6517
6518 static void
6519 print_debug_expr_statistics (void)
6520 {
6521 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6522 (long) htab_size (debug_expr_for_decl),
6523 (long) htab_elements (debug_expr_for_decl),
6524 htab_collisions (debug_expr_for_decl));
6525 }
6526
6527 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6528
6529 static void
6530 print_value_expr_statistics (void)
6531 {
6532 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6533 (long) htab_size (value_expr_for_decl),
6534 (long) htab_elements (value_expr_for_decl),
6535 htab_collisions (value_expr_for_decl));
6536 }
6537
6538 /* Lookup a debug expression for FROM, and return it if we find one. */
6539
6540 tree
6541 decl_debug_expr_lookup (tree from)
6542 {
6543 struct tree_decl_map *h, in;
6544 in.base.from = from;
6545
6546 h = (struct tree_decl_map *)
6547 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6548 if (h)
6549 return h->to;
6550 return NULL_TREE;
6551 }
6552
6553 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6554
6555 void
6556 decl_debug_expr_insert (tree from, tree to)
6557 {
6558 struct tree_decl_map *h;
6559 void **loc;
6560
6561 h = ggc_alloc<tree_decl_map> ();
6562 h->base.from = from;
6563 h->to = to;
6564 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6565 INSERT);
6566 *(struct tree_decl_map **) loc = h;
6567 }
6568
6569 /* Lookup a value expression for FROM, and return it if we find one. */
6570
6571 tree
6572 decl_value_expr_lookup (tree from)
6573 {
6574 struct tree_decl_map *h, in;
6575 in.base.from = from;
6576
6577 h = (struct tree_decl_map *)
6578 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6579 if (h)
6580 return h->to;
6581 return NULL_TREE;
6582 }
6583
6584 /* Insert a mapping FROM->TO in the value expression hashtable. */
6585
6586 void
6587 decl_value_expr_insert (tree from, tree to)
6588 {
6589 struct tree_decl_map *h;
6590 void **loc;
6591
6592 h = ggc_alloc<tree_decl_map> ();
6593 h->base.from = from;
6594 h->to = to;
6595 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6596 INSERT);
6597 *(struct tree_decl_map **) loc = h;
6598 }
6599
6600 /* Lookup a vector of debug arguments for FROM, and return it if we
6601 find one. */
6602
6603 vec<tree, va_gc> **
6604 decl_debug_args_lookup (tree from)
6605 {
6606 struct tree_vec_map *h, in;
6607
6608 if (!DECL_HAS_DEBUG_ARGS_P (from))
6609 return NULL;
6610 gcc_checking_assert (debug_args_for_decl != NULL);
6611 in.base.from = from;
6612 h = (struct tree_vec_map *)
6613 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6614 if (h)
6615 return &h->to;
6616 return NULL;
6617 }
6618
6619 /* Insert a mapping FROM->empty vector of debug arguments in the value
6620 expression hashtable. */
6621
6622 vec<tree, va_gc> **
6623 decl_debug_args_insert (tree from)
6624 {
6625 struct tree_vec_map *h;
6626 void **loc;
6627
6628 if (DECL_HAS_DEBUG_ARGS_P (from))
6629 return decl_debug_args_lookup (from);
6630 if (debug_args_for_decl == NULL)
6631 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6632 tree_vec_map_eq, 0);
6633 h = ggc_alloc<tree_vec_map> ();
6634 h->base.from = from;
6635 h->to = NULL;
6636 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6637 INSERT);
6638 *(struct tree_vec_map **) loc = h;
6639 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6640 return &h->to;
6641 }
6642
6643 /* Hashing of types so that we don't make duplicates.
6644 The entry point is `type_hash_canon'. */
6645
6646 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6647 with types in the TREE_VALUE slots), by adding the hash codes
6648 of the individual types. */
6649
6650 static void
6651 type_hash_list (const_tree list, inchash::hash &hstate)
6652 {
6653 const_tree tail;
6654
6655 for (tail = list; tail; tail = TREE_CHAIN (tail))
6656 if (TREE_VALUE (tail) != error_mark_node)
6657 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6658 }
6659
6660 /* These are the Hashtable callback functions. */
6661
6662 /* Returns true iff the types are equivalent. */
6663
6664 static int
6665 type_hash_eq (const void *va, const void *vb)
6666 {
6667 const struct type_hash *const a = (const struct type_hash *) va,
6668 *const b = (const struct type_hash *) vb;
6669
6670 /* First test the things that are the same for all types. */
6671 if (a->hash != b->hash
6672 || TREE_CODE (a->type) != TREE_CODE (b->type)
6673 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6674 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6675 TYPE_ATTRIBUTES (b->type))
6676 || (TREE_CODE (a->type) != COMPLEX_TYPE
6677 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6678 return 0;
6679
6680 /* Be careful about comparing arrays before and after the element type
6681 has been completed; don't compare TYPE_ALIGN unless both types are
6682 complete. */
6683 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6684 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6685 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6686 return 0;
6687
6688 switch (TREE_CODE (a->type))
6689 {
6690 case VOID_TYPE:
6691 case COMPLEX_TYPE:
6692 case POINTER_TYPE:
6693 case REFERENCE_TYPE:
6694 case NULLPTR_TYPE:
6695 return 1;
6696
6697 case VECTOR_TYPE:
6698 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6699
6700 case ENUMERAL_TYPE:
6701 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6702 && !(TYPE_VALUES (a->type)
6703 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6704 && TYPE_VALUES (b->type)
6705 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6706 && type_list_equal (TYPE_VALUES (a->type),
6707 TYPE_VALUES (b->type))))
6708 return 0;
6709
6710 /* ... fall through ... */
6711
6712 case INTEGER_TYPE:
6713 case REAL_TYPE:
6714 case BOOLEAN_TYPE:
6715 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6716 return false;
6717 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6718 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6719 TYPE_MAX_VALUE (b->type)))
6720 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6721 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6722 TYPE_MIN_VALUE (b->type))));
6723
6724 case FIXED_POINT_TYPE:
6725 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6726
6727 case OFFSET_TYPE:
6728 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6729
6730 case METHOD_TYPE:
6731 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6732 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6733 || (TYPE_ARG_TYPES (a->type)
6734 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6735 && TYPE_ARG_TYPES (b->type)
6736 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6737 && type_list_equal (TYPE_ARG_TYPES (a->type),
6738 TYPE_ARG_TYPES (b->type)))))
6739 break;
6740 return 0;
6741 case ARRAY_TYPE:
6742 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6743
6744 case RECORD_TYPE:
6745 case UNION_TYPE:
6746 case QUAL_UNION_TYPE:
6747 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6748 || (TYPE_FIELDS (a->type)
6749 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6750 && TYPE_FIELDS (b->type)
6751 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6752 && type_list_equal (TYPE_FIELDS (a->type),
6753 TYPE_FIELDS (b->type))));
6754
6755 case FUNCTION_TYPE:
6756 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6757 || (TYPE_ARG_TYPES (a->type)
6758 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6759 && TYPE_ARG_TYPES (b->type)
6760 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6761 && type_list_equal (TYPE_ARG_TYPES (a->type),
6762 TYPE_ARG_TYPES (b->type))))
6763 break;
6764 return 0;
6765
6766 default:
6767 return 0;
6768 }
6769
6770 if (lang_hooks.types.type_hash_eq != NULL)
6771 return lang_hooks.types.type_hash_eq (a->type, b->type);
6772
6773 return 1;
6774 }
6775
6776 /* Return the cached hash value. */
6777
6778 static hashval_t
6779 type_hash_hash (const void *item)
6780 {
6781 return ((const struct type_hash *) item)->hash;
6782 }
6783
6784 /* Given TYPE, and HASHCODE its hash code, return the canonical
6785 object for an identical type if one already exists.
6786 Otherwise, return TYPE, and record it as the canonical object.
6787
6788 To use this function, first create a type of the sort you want.
6789 Then compute its hash code from the fields of the type that
6790 make it different from other similar types.
6791 Then call this function and use the value. */
6792
6793 tree
6794 type_hash_canon (unsigned int hashcode, tree type)
6795 {
6796 type_hash in;
6797 void **loc;
6798
6799 /* The hash table only contains main variants, so ensure that's what we're
6800 being passed. */
6801 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6802
6803 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6804 must call that routine before comparing TYPE_ALIGNs. */
6805 layout_type (type);
6806
6807 in.hash = hashcode;
6808 in.type = type;
6809
6810 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6811 if (*loc)
6812 {
6813 tree t1 = ((type_hash *) *loc)->type;
6814 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6815 if (GATHER_STATISTICS)
6816 {
6817 tree_code_counts[(int) TREE_CODE (type)]--;
6818 tree_node_counts[(int) t_kind]--;
6819 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6820 }
6821 return t1;
6822 }
6823 else
6824 {
6825 struct type_hash *h;
6826
6827 h = ggc_alloc<type_hash> ();
6828 h->hash = hashcode;
6829 h->type = type;
6830 *loc = (void *)h;
6831
6832 return type;
6833 }
6834 }
6835
6836 /* See if the data pointed to by the type hash table is marked. We consider
6837 it marked if the type is marked or if a debug type number or symbol
6838 table entry has been made for the type. */
6839
6840 static int
6841 type_hash_marked_p (const void *p)
6842 {
6843 const_tree const type = ((const struct type_hash *) p)->type;
6844
6845 return ggc_marked_p (type);
6846 }
6847
6848 static void
6849 print_type_hash_statistics (void)
6850 {
6851 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6852 (long) htab_size (type_hash_table),
6853 (long) htab_elements (type_hash_table),
6854 htab_collisions (type_hash_table));
6855 }
6856
6857 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6858 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6859 by adding the hash codes of the individual attributes. */
6860
6861 static void
6862 attribute_hash_list (const_tree list, inchash::hash &hstate)
6863 {
6864 const_tree tail;
6865
6866 for (tail = list; tail; tail = TREE_CHAIN (tail))
6867 /* ??? Do we want to add in TREE_VALUE too? */
6868 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6869 }
6870
6871 /* Given two lists of attributes, return true if list l2 is
6872 equivalent to l1. */
6873
6874 int
6875 attribute_list_equal (const_tree l1, const_tree l2)
6876 {
6877 if (l1 == l2)
6878 return 1;
6879
6880 return attribute_list_contained (l1, l2)
6881 && attribute_list_contained (l2, l1);
6882 }
6883
6884 /* Given two lists of attributes, return true if list L2 is
6885 completely contained within L1. */
6886 /* ??? This would be faster if attribute names were stored in a canonicalized
6887 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6888 must be used to show these elements are equivalent (which they are). */
6889 /* ??? It's not clear that attributes with arguments will always be handled
6890 correctly. */
6891
6892 int
6893 attribute_list_contained (const_tree l1, const_tree l2)
6894 {
6895 const_tree t1, t2;
6896
6897 /* First check the obvious, maybe the lists are identical. */
6898 if (l1 == l2)
6899 return 1;
6900
6901 /* Maybe the lists are similar. */
6902 for (t1 = l1, t2 = l2;
6903 t1 != 0 && t2 != 0
6904 && get_attribute_name (t1) == get_attribute_name (t2)
6905 && TREE_VALUE (t1) == TREE_VALUE (t2);
6906 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6907 ;
6908
6909 /* Maybe the lists are equal. */
6910 if (t1 == 0 && t2 == 0)
6911 return 1;
6912
6913 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6914 {
6915 const_tree attr;
6916 /* This CONST_CAST is okay because lookup_attribute does not
6917 modify its argument and the return value is assigned to a
6918 const_tree. */
6919 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6920 CONST_CAST_TREE (l1));
6921 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6922 attr = lookup_ident_attribute (get_attribute_name (t2),
6923 TREE_CHAIN (attr)))
6924 ;
6925
6926 if (attr == NULL_TREE)
6927 return 0;
6928 }
6929
6930 return 1;
6931 }
6932
6933 /* Given two lists of types
6934 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6935 return 1 if the lists contain the same types in the same order.
6936 Also, the TREE_PURPOSEs must match. */
6937
6938 int
6939 type_list_equal (const_tree l1, const_tree l2)
6940 {
6941 const_tree t1, t2;
6942
6943 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6944 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6945 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6946 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6947 && (TREE_TYPE (TREE_PURPOSE (t1))
6948 == TREE_TYPE (TREE_PURPOSE (t2))))))
6949 return 0;
6950
6951 return t1 == t2;
6952 }
6953
6954 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6955 given by TYPE. If the argument list accepts variable arguments,
6956 then this function counts only the ordinary arguments. */
6957
6958 int
6959 type_num_arguments (const_tree type)
6960 {
6961 int i = 0;
6962 tree t;
6963
6964 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6965 /* If the function does not take a variable number of arguments,
6966 the last element in the list will have type `void'. */
6967 if (VOID_TYPE_P (TREE_VALUE (t)))
6968 break;
6969 else
6970 ++i;
6971
6972 return i;
6973 }
6974
6975 /* Nonzero if integer constants T1 and T2
6976 represent the same constant value. */
6977
6978 int
6979 tree_int_cst_equal (const_tree t1, const_tree t2)
6980 {
6981 if (t1 == t2)
6982 return 1;
6983
6984 if (t1 == 0 || t2 == 0)
6985 return 0;
6986
6987 if (TREE_CODE (t1) == INTEGER_CST
6988 && TREE_CODE (t2) == INTEGER_CST
6989 && wi::to_widest (t1) == wi::to_widest (t2))
6990 return 1;
6991
6992 return 0;
6993 }
6994
6995 /* Return true if T is an INTEGER_CST whose numerical value (extended
6996 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6997
6998 bool
6999 tree_fits_shwi_p (const_tree t)
7000 {
7001 return (t != NULL_TREE
7002 && TREE_CODE (t) == INTEGER_CST
7003 && wi::fits_shwi_p (wi::to_widest (t)));
7004 }
7005
7006 /* Return true if T is an INTEGER_CST whose numerical value (extended
7007 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7008
7009 bool
7010 tree_fits_uhwi_p (const_tree t)
7011 {
7012 return (t != NULL_TREE
7013 && TREE_CODE (t) == INTEGER_CST
7014 && wi::fits_uhwi_p (wi::to_widest (t)));
7015 }
7016
7017 /* T is an INTEGER_CST whose numerical value (extended according to
7018 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7019 HOST_WIDE_INT. */
7020
7021 HOST_WIDE_INT
7022 tree_to_shwi (const_tree t)
7023 {
7024 gcc_assert (tree_fits_shwi_p (t));
7025 return TREE_INT_CST_LOW (t);
7026 }
7027
7028 /* T is an INTEGER_CST whose numerical value (extended according to
7029 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7030 HOST_WIDE_INT. */
7031
7032 unsigned HOST_WIDE_INT
7033 tree_to_uhwi (const_tree t)
7034 {
7035 gcc_assert (tree_fits_uhwi_p (t));
7036 return TREE_INT_CST_LOW (t);
7037 }
7038
7039 /* Return the most significant (sign) bit of T. */
7040
7041 int
7042 tree_int_cst_sign_bit (const_tree t)
7043 {
7044 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7045
7046 return wi::extract_uhwi (t, bitno, 1);
7047 }
7048
7049 /* Return an indication of the sign of the integer constant T.
7050 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7051 Note that -1 will never be returned if T's type is unsigned. */
7052
7053 int
7054 tree_int_cst_sgn (const_tree t)
7055 {
7056 if (wi::eq_p (t, 0))
7057 return 0;
7058 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7059 return 1;
7060 else if (wi::neg_p (t))
7061 return -1;
7062 else
7063 return 1;
7064 }
7065
7066 /* Return the minimum number of bits needed to represent VALUE in a
7067 signed or unsigned type, UNSIGNEDP says which. */
7068
7069 unsigned int
7070 tree_int_cst_min_precision (tree value, signop sgn)
7071 {
7072 /* If the value is negative, compute its negative minus 1. The latter
7073 adjustment is because the absolute value of the largest negative value
7074 is one larger than the largest positive value. This is equivalent to
7075 a bit-wise negation, so use that operation instead. */
7076
7077 if (tree_int_cst_sgn (value) < 0)
7078 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7079
7080 /* Return the number of bits needed, taking into account the fact
7081 that we need one more bit for a signed than unsigned type.
7082 If value is 0 or -1, the minimum precision is 1 no matter
7083 whether unsignedp is true or false. */
7084
7085 if (integer_zerop (value))
7086 return 1;
7087 else
7088 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7089 }
7090
7091 /* Return truthvalue of whether T1 is the same tree structure as T2.
7092 Return 1 if they are the same.
7093 Return 0 if they are understandably different.
7094 Return -1 if either contains tree structure not understood by
7095 this function. */
7096
7097 int
7098 simple_cst_equal (const_tree t1, const_tree t2)
7099 {
7100 enum tree_code code1, code2;
7101 int cmp;
7102 int i;
7103
7104 if (t1 == t2)
7105 return 1;
7106 if (t1 == 0 || t2 == 0)
7107 return 0;
7108
7109 code1 = TREE_CODE (t1);
7110 code2 = TREE_CODE (t2);
7111
7112 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7113 {
7114 if (CONVERT_EXPR_CODE_P (code2)
7115 || code2 == NON_LVALUE_EXPR)
7116 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7117 else
7118 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7119 }
7120
7121 else if (CONVERT_EXPR_CODE_P (code2)
7122 || code2 == NON_LVALUE_EXPR)
7123 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7124
7125 if (code1 != code2)
7126 return 0;
7127
7128 switch (code1)
7129 {
7130 case INTEGER_CST:
7131 return wi::to_widest (t1) == wi::to_widest (t2);
7132
7133 case REAL_CST:
7134 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7135
7136 case FIXED_CST:
7137 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7138
7139 case STRING_CST:
7140 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7141 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7142 TREE_STRING_LENGTH (t1)));
7143
7144 case CONSTRUCTOR:
7145 {
7146 unsigned HOST_WIDE_INT idx;
7147 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7148 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7149
7150 if (vec_safe_length (v1) != vec_safe_length (v2))
7151 return false;
7152
7153 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7154 /* ??? Should we handle also fields here? */
7155 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7156 return false;
7157 return true;
7158 }
7159
7160 case SAVE_EXPR:
7161 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7162
7163 case CALL_EXPR:
7164 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7165 if (cmp <= 0)
7166 return cmp;
7167 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7168 return 0;
7169 {
7170 const_tree arg1, arg2;
7171 const_call_expr_arg_iterator iter1, iter2;
7172 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7173 arg2 = first_const_call_expr_arg (t2, &iter2);
7174 arg1 && arg2;
7175 arg1 = next_const_call_expr_arg (&iter1),
7176 arg2 = next_const_call_expr_arg (&iter2))
7177 {
7178 cmp = simple_cst_equal (arg1, arg2);
7179 if (cmp <= 0)
7180 return cmp;
7181 }
7182 return arg1 == arg2;
7183 }
7184
7185 case TARGET_EXPR:
7186 /* Special case: if either target is an unallocated VAR_DECL,
7187 it means that it's going to be unified with whatever the
7188 TARGET_EXPR is really supposed to initialize, so treat it
7189 as being equivalent to anything. */
7190 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7191 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7192 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7193 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7194 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7195 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7196 cmp = 1;
7197 else
7198 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7199
7200 if (cmp <= 0)
7201 return cmp;
7202
7203 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7204
7205 case WITH_CLEANUP_EXPR:
7206 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7207 if (cmp <= 0)
7208 return cmp;
7209
7210 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7211
7212 case COMPONENT_REF:
7213 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7214 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7215
7216 return 0;
7217
7218 case VAR_DECL:
7219 case PARM_DECL:
7220 case CONST_DECL:
7221 case FUNCTION_DECL:
7222 return 0;
7223
7224 default:
7225 break;
7226 }
7227
7228 /* This general rule works for most tree codes. All exceptions should be
7229 handled above. If this is a language-specific tree code, we can't
7230 trust what might be in the operand, so say we don't know
7231 the situation. */
7232 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7233 return -1;
7234
7235 switch (TREE_CODE_CLASS (code1))
7236 {
7237 case tcc_unary:
7238 case tcc_binary:
7239 case tcc_comparison:
7240 case tcc_expression:
7241 case tcc_reference:
7242 case tcc_statement:
7243 cmp = 1;
7244 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7245 {
7246 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7247 if (cmp <= 0)
7248 return cmp;
7249 }
7250
7251 return cmp;
7252
7253 default:
7254 return -1;
7255 }
7256 }
7257
7258 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7259 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7260 than U, respectively. */
7261
7262 int
7263 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7264 {
7265 if (tree_int_cst_sgn (t) < 0)
7266 return -1;
7267 else if (!tree_fits_uhwi_p (t))
7268 return 1;
7269 else if (TREE_INT_CST_LOW (t) == u)
7270 return 0;
7271 else if (TREE_INT_CST_LOW (t) < u)
7272 return -1;
7273 else
7274 return 1;
7275 }
7276
7277 /* Return true if SIZE represents a constant size that is in bounds of
7278 what the middle-end and the backend accepts (covering not more than
7279 half of the address-space). */
7280
7281 bool
7282 valid_constant_size_p (const_tree size)
7283 {
7284 if (! tree_fits_uhwi_p (size)
7285 || TREE_OVERFLOW (size)
7286 || tree_int_cst_sign_bit (size) != 0)
7287 return false;
7288 return true;
7289 }
7290
7291 /* Return the precision of the type, or for a complex or vector type the
7292 precision of the type of its elements. */
7293
7294 unsigned int
7295 element_precision (const_tree type)
7296 {
7297 enum tree_code code = TREE_CODE (type);
7298 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7299 type = TREE_TYPE (type);
7300
7301 return TYPE_PRECISION (type);
7302 }
7303
7304 /* Return true if CODE represents an associative tree code. Otherwise
7305 return false. */
7306 bool
7307 associative_tree_code (enum tree_code code)
7308 {
7309 switch (code)
7310 {
7311 case BIT_IOR_EXPR:
7312 case BIT_AND_EXPR:
7313 case BIT_XOR_EXPR:
7314 case PLUS_EXPR:
7315 case MULT_EXPR:
7316 case MIN_EXPR:
7317 case MAX_EXPR:
7318 return true;
7319
7320 default:
7321 break;
7322 }
7323 return false;
7324 }
7325
7326 /* Return true if CODE represents a commutative tree code. Otherwise
7327 return false. */
7328 bool
7329 commutative_tree_code (enum tree_code code)
7330 {
7331 switch (code)
7332 {
7333 case PLUS_EXPR:
7334 case MULT_EXPR:
7335 case MULT_HIGHPART_EXPR:
7336 case MIN_EXPR:
7337 case MAX_EXPR:
7338 case BIT_IOR_EXPR:
7339 case BIT_XOR_EXPR:
7340 case BIT_AND_EXPR:
7341 case NE_EXPR:
7342 case EQ_EXPR:
7343 case UNORDERED_EXPR:
7344 case ORDERED_EXPR:
7345 case UNEQ_EXPR:
7346 case LTGT_EXPR:
7347 case TRUTH_AND_EXPR:
7348 case TRUTH_XOR_EXPR:
7349 case TRUTH_OR_EXPR:
7350 case WIDEN_MULT_EXPR:
7351 case VEC_WIDEN_MULT_HI_EXPR:
7352 case VEC_WIDEN_MULT_LO_EXPR:
7353 case VEC_WIDEN_MULT_EVEN_EXPR:
7354 case VEC_WIDEN_MULT_ODD_EXPR:
7355 return true;
7356
7357 default:
7358 break;
7359 }
7360 return false;
7361 }
7362
7363 /* Return true if CODE represents a ternary tree code for which the
7364 first two operands are commutative. Otherwise return false. */
7365 bool
7366 commutative_ternary_tree_code (enum tree_code code)
7367 {
7368 switch (code)
7369 {
7370 case WIDEN_MULT_PLUS_EXPR:
7371 case WIDEN_MULT_MINUS_EXPR:
7372 return true;
7373
7374 default:
7375 break;
7376 }
7377 return false;
7378 }
7379
7380 namespace inchash
7381 {
7382
7383 /* Generate a hash value for an expression. This can be used iteratively
7384 by passing a previous result as the HSTATE argument.
7385
7386 This function is intended to produce the same hash for expressions which
7387 would compare equal using operand_equal_p. */
7388 void
7389 add_expr (const_tree t, inchash::hash &hstate)
7390 {
7391 int i;
7392 enum tree_code code;
7393 enum tree_code_class tclass;
7394
7395 if (t == NULL_TREE)
7396 {
7397 hstate.merge_hash (0);
7398 return;
7399 }
7400
7401 code = TREE_CODE (t);
7402
7403 switch (code)
7404 {
7405 /* Alas, constants aren't shared, so we can't rely on pointer
7406 identity. */
7407 case VOID_CST:
7408 hstate.merge_hash (0);
7409 return;
7410 case INTEGER_CST:
7411 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7412 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7413 return;
7414 case REAL_CST:
7415 {
7416 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7417 hstate.merge_hash (val2);
7418 return;
7419 }
7420 case FIXED_CST:
7421 {
7422 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7423 hstate.merge_hash (val2);
7424 return;
7425 }
7426 case STRING_CST:
7427 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7428 return;
7429 case COMPLEX_CST:
7430 inchash::add_expr (TREE_REALPART (t), hstate);
7431 inchash::add_expr (TREE_IMAGPART (t), hstate);
7432 return;
7433 case VECTOR_CST:
7434 {
7435 unsigned i;
7436 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7437 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7438 return;
7439 }
7440 case SSA_NAME:
7441 /* We can just compare by pointer. */
7442 hstate.add_wide_int (SSA_NAME_VERSION (t));
7443 return;
7444 case PLACEHOLDER_EXPR:
7445 /* The node itself doesn't matter. */
7446 return;
7447 case TREE_LIST:
7448 /* A list of expressions, for a CALL_EXPR or as the elements of a
7449 VECTOR_CST. */
7450 for (; t; t = TREE_CHAIN (t))
7451 inchash::add_expr (TREE_VALUE (t), hstate);
7452 return;
7453 case CONSTRUCTOR:
7454 {
7455 unsigned HOST_WIDE_INT idx;
7456 tree field, value;
7457 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7458 {
7459 inchash::add_expr (field, hstate);
7460 inchash::add_expr (value, hstate);
7461 }
7462 return;
7463 }
7464 case FUNCTION_DECL:
7465 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7466 Otherwise nodes that compare equal according to operand_equal_p might
7467 get different hash codes. However, don't do this for machine specific
7468 or front end builtins, since the function code is overloaded in those
7469 cases. */
7470 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7471 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7472 {
7473 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7474 code = TREE_CODE (t);
7475 }
7476 /* FALL THROUGH */
7477 default:
7478 tclass = TREE_CODE_CLASS (code);
7479
7480 if (tclass == tcc_declaration)
7481 {
7482 /* DECL's have a unique ID */
7483 hstate.add_wide_int (DECL_UID (t));
7484 }
7485 else
7486 {
7487 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7488
7489 hstate.add_object (code);
7490
7491 /* Don't hash the type, that can lead to having nodes which
7492 compare equal according to operand_equal_p, but which
7493 have different hash codes. */
7494 if (CONVERT_EXPR_CODE_P (code)
7495 || code == NON_LVALUE_EXPR)
7496 {
7497 /* Make sure to include signness in the hash computation. */
7498 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7499 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7500 }
7501
7502 else if (commutative_tree_code (code))
7503 {
7504 /* It's a commutative expression. We want to hash it the same
7505 however it appears. We do this by first hashing both operands
7506 and then rehashing based on the order of their independent
7507 hashes. */
7508 inchash::hash one, two;
7509 inchash::add_expr (TREE_OPERAND (t, 0), one);
7510 inchash::add_expr (TREE_OPERAND (t, 1), two);
7511 hstate.add_commutative (one, two);
7512 }
7513 else
7514 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7515 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7516 }
7517 return;
7518 }
7519 }
7520
7521 }
7522
7523 /* Constructors for pointer, array and function types.
7524 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7525 constructed by language-dependent code, not here.) */
7526
7527 /* Construct, lay out and return the type of pointers to TO_TYPE with
7528 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7529 reference all of memory. If such a type has already been
7530 constructed, reuse it. */
7531
7532 tree
7533 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7534 bool can_alias_all)
7535 {
7536 tree t;
7537
7538 if (to_type == error_mark_node)
7539 return error_mark_node;
7540
7541 /* If the pointed-to type has the may_alias attribute set, force
7542 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7543 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7544 can_alias_all = true;
7545
7546 /* In some cases, languages will have things that aren't a POINTER_TYPE
7547 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7548 In that case, return that type without regard to the rest of our
7549 operands.
7550
7551 ??? This is a kludge, but consistent with the way this function has
7552 always operated and there doesn't seem to be a good way to avoid this
7553 at the moment. */
7554 if (TYPE_POINTER_TO (to_type) != 0
7555 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7556 return TYPE_POINTER_TO (to_type);
7557
7558 /* First, if we already have a type for pointers to TO_TYPE and it's
7559 the proper mode, use it. */
7560 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7561 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7562 return t;
7563
7564 t = make_node (POINTER_TYPE);
7565
7566 TREE_TYPE (t) = to_type;
7567 SET_TYPE_MODE (t, mode);
7568 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7569 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7570 TYPE_POINTER_TO (to_type) = t;
7571
7572 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7573 SET_TYPE_STRUCTURAL_EQUALITY (t);
7574 else if (TYPE_CANONICAL (to_type) != to_type)
7575 TYPE_CANONICAL (t)
7576 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7577 mode, can_alias_all);
7578
7579 /* Lay out the type. This function has many callers that are concerned
7580 with expression-construction, and this simplifies them all. */
7581 layout_type (t);
7582
7583 return t;
7584 }
7585
7586 /* By default build pointers in ptr_mode. */
7587
7588 tree
7589 build_pointer_type (tree to_type)
7590 {
7591 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7592 : TYPE_ADDR_SPACE (to_type);
7593 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7594 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7595 }
7596
7597 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7598
7599 tree
7600 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7601 bool can_alias_all)
7602 {
7603 tree t;
7604
7605 if (to_type == error_mark_node)
7606 return error_mark_node;
7607
7608 /* If the pointed-to type has the may_alias attribute set, force
7609 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7610 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7611 can_alias_all = true;
7612
7613 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7614 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7615 In that case, return that type without regard to the rest of our
7616 operands.
7617
7618 ??? This is a kludge, but consistent with the way this function has
7619 always operated and there doesn't seem to be a good way to avoid this
7620 at the moment. */
7621 if (TYPE_REFERENCE_TO (to_type) != 0
7622 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7623 return TYPE_REFERENCE_TO (to_type);
7624
7625 /* First, if we already have a type for pointers to TO_TYPE and it's
7626 the proper mode, use it. */
7627 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7628 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7629 return t;
7630
7631 t = make_node (REFERENCE_TYPE);
7632
7633 TREE_TYPE (t) = to_type;
7634 SET_TYPE_MODE (t, mode);
7635 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7636 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7637 TYPE_REFERENCE_TO (to_type) = t;
7638
7639 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7640 SET_TYPE_STRUCTURAL_EQUALITY (t);
7641 else if (TYPE_CANONICAL (to_type) != to_type)
7642 TYPE_CANONICAL (t)
7643 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7644 mode, can_alias_all);
7645
7646 layout_type (t);
7647
7648 return t;
7649 }
7650
7651
7652 /* Build the node for the type of references-to-TO_TYPE by default
7653 in ptr_mode. */
7654
7655 tree
7656 build_reference_type (tree to_type)
7657 {
7658 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7659 : TYPE_ADDR_SPACE (to_type);
7660 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7661 return build_reference_type_for_mode (to_type, pointer_mode, false);
7662 }
7663
7664 #define MAX_INT_CACHED_PREC \
7665 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7666 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7667
7668 /* Builds a signed or unsigned integer type of precision PRECISION.
7669 Used for C bitfields whose precision does not match that of
7670 built-in target types. */
7671 tree
7672 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7673 int unsignedp)
7674 {
7675 tree itype, ret;
7676
7677 if (unsignedp)
7678 unsignedp = MAX_INT_CACHED_PREC + 1;
7679
7680 if (precision <= MAX_INT_CACHED_PREC)
7681 {
7682 itype = nonstandard_integer_type_cache[precision + unsignedp];
7683 if (itype)
7684 return itype;
7685 }
7686
7687 itype = make_node (INTEGER_TYPE);
7688 TYPE_PRECISION (itype) = precision;
7689
7690 if (unsignedp)
7691 fixup_unsigned_type (itype);
7692 else
7693 fixup_signed_type (itype);
7694
7695 ret = itype;
7696 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7697 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7698 if (precision <= MAX_INT_CACHED_PREC)
7699 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7700
7701 return ret;
7702 }
7703
7704 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7705 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7706 is true, reuse such a type that has already been constructed. */
7707
7708 static tree
7709 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7710 {
7711 tree itype = make_node (INTEGER_TYPE);
7712 inchash::hash hstate;
7713
7714 TREE_TYPE (itype) = type;
7715
7716 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7717 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7718
7719 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7720 SET_TYPE_MODE (itype, TYPE_MODE (type));
7721 TYPE_SIZE (itype) = TYPE_SIZE (type);
7722 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7723 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7724 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7725
7726 if (!shared)
7727 return itype;
7728
7729 if ((TYPE_MIN_VALUE (itype)
7730 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7731 || (TYPE_MAX_VALUE (itype)
7732 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7733 {
7734 /* Since we cannot reliably merge this type, we need to compare it using
7735 structural equality checks. */
7736 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7737 return itype;
7738 }
7739
7740 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7741 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7742 hstate.merge_hash (TYPE_HASH (type));
7743 itype = type_hash_canon (hstate.end (), itype);
7744
7745 return itype;
7746 }
7747
7748 /* Wrapper around build_range_type_1 with SHARED set to true. */
7749
7750 tree
7751 build_range_type (tree type, tree lowval, tree highval)
7752 {
7753 return build_range_type_1 (type, lowval, highval, true);
7754 }
7755
7756 /* Wrapper around build_range_type_1 with SHARED set to false. */
7757
7758 tree
7759 build_nonshared_range_type (tree type, tree lowval, tree highval)
7760 {
7761 return build_range_type_1 (type, lowval, highval, false);
7762 }
7763
7764 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7765 MAXVAL should be the maximum value in the domain
7766 (one less than the length of the array).
7767
7768 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7769 We don't enforce this limit, that is up to caller (e.g. language front end).
7770 The limit exists because the result is a signed type and we don't handle
7771 sizes that use more than one HOST_WIDE_INT. */
7772
7773 tree
7774 build_index_type (tree maxval)
7775 {
7776 return build_range_type (sizetype, size_zero_node, maxval);
7777 }
7778
7779 /* Return true if the debug information for TYPE, a subtype, should be emitted
7780 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7781 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7782 debug info and doesn't reflect the source code. */
7783
7784 bool
7785 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7786 {
7787 tree base_type = TREE_TYPE (type), low, high;
7788
7789 /* Subrange types have a base type which is an integral type. */
7790 if (!INTEGRAL_TYPE_P (base_type))
7791 return false;
7792
7793 /* Get the real bounds of the subtype. */
7794 if (lang_hooks.types.get_subrange_bounds)
7795 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7796 else
7797 {
7798 low = TYPE_MIN_VALUE (type);
7799 high = TYPE_MAX_VALUE (type);
7800 }
7801
7802 /* If the type and its base type have the same representation and the same
7803 name, then the type is not a subrange but a copy of the base type. */
7804 if ((TREE_CODE (base_type) == INTEGER_TYPE
7805 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7806 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7807 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7808 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7809 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7810 return false;
7811
7812 if (lowval)
7813 *lowval = low;
7814 if (highval)
7815 *highval = high;
7816 return true;
7817 }
7818
7819 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7820 and number of elements specified by the range of values of INDEX_TYPE.
7821 If SHARED is true, reuse such a type that has already been constructed. */
7822
7823 static tree
7824 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7825 {
7826 tree t;
7827
7828 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7829 {
7830 error ("arrays of functions are not meaningful");
7831 elt_type = integer_type_node;
7832 }
7833
7834 t = make_node (ARRAY_TYPE);
7835 TREE_TYPE (t) = elt_type;
7836 TYPE_DOMAIN (t) = index_type;
7837 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7838 layout_type (t);
7839
7840 /* If the element type is incomplete at this point we get marked for
7841 structural equality. Do not record these types in the canonical
7842 type hashtable. */
7843 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7844 return t;
7845
7846 if (shared)
7847 {
7848 inchash::hash hstate;
7849 hstate.add_object (TYPE_HASH (elt_type));
7850 if (index_type)
7851 hstate.add_object (TYPE_HASH (index_type));
7852 t = type_hash_canon (hstate.end (), t);
7853 }
7854
7855 if (TYPE_CANONICAL (t) == t)
7856 {
7857 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7858 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7859 SET_TYPE_STRUCTURAL_EQUALITY (t);
7860 else if (TYPE_CANONICAL (elt_type) != elt_type
7861 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7862 TYPE_CANONICAL (t)
7863 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7864 index_type
7865 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7866 shared);
7867 }
7868
7869 return t;
7870 }
7871
7872 /* Wrapper around build_array_type_1 with SHARED set to true. */
7873
7874 tree
7875 build_array_type (tree elt_type, tree index_type)
7876 {
7877 return build_array_type_1 (elt_type, index_type, true);
7878 }
7879
7880 /* Wrapper around build_array_type_1 with SHARED set to false. */
7881
7882 tree
7883 build_nonshared_array_type (tree elt_type, tree index_type)
7884 {
7885 return build_array_type_1 (elt_type, index_type, false);
7886 }
7887
7888 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7889 sizetype. */
7890
7891 tree
7892 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7893 {
7894 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7895 }
7896
7897 /* Recursively examines the array elements of TYPE, until a non-array
7898 element type is found. */
7899
7900 tree
7901 strip_array_types (tree type)
7902 {
7903 while (TREE_CODE (type) == ARRAY_TYPE)
7904 type = TREE_TYPE (type);
7905
7906 return type;
7907 }
7908
7909 /* Computes the canonical argument types from the argument type list
7910 ARGTYPES.
7911
7912 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7913 on entry to this function, or if any of the ARGTYPES are
7914 structural.
7915
7916 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7917 true on entry to this function, or if any of the ARGTYPES are
7918 non-canonical.
7919
7920 Returns a canonical argument list, which may be ARGTYPES when the
7921 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7922 true) or would not differ from ARGTYPES. */
7923
7924 static tree
7925 maybe_canonicalize_argtypes (tree argtypes,
7926 bool *any_structural_p,
7927 bool *any_noncanonical_p)
7928 {
7929 tree arg;
7930 bool any_noncanonical_argtypes_p = false;
7931
7932 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7933 {
7934 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7935 /* Fail gracefully by stating that the type is structural. */
7936 *any_structural_p = true;
7937 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7938 *any_structural_p = true;
7939 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7940 || TREE_PURPOSE (arg))
7941 /* If the argument has a default argument, we consider it
7942 non-canonical even though the type itself is canonical.
7943 That way, different variants of function and method types
7944 with default arguments will all point to the variant with
7945 no defaults as their canonical type. */
7946 any_noncanonical_argtypes_p = true;
7947 }
7948
7949 if (*any_structural_p)
7950 return argtypes;
7951
7952 if (any_noncanonical_argtypes_p)
7953 {
7954 /* Build the canonical list of argument types. */
7955 tree canon_argtypes = NULL_TREE;
7956 bool is_void = false;
7957
7958 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7959 {
7960 if (arg == void_list_node)
7961 is_void = true;
7962 else
7963 canon_argtypes = tree_cons (NULL_TREE,
7964 TYPE_CANONICAL (TREE_VALUE (arg)),
7965 canon_argtypes);
7966 }
7967
7968 canon_argtypes = nreverse (canon_argtypes);
7969 if (is_void)
7970 canon_argtypes = chainon (canon_argtypes, void_list_node);
7971
7972 /* There is a non-canonical type. */
7973 *any_noncanonical_p = true;
7974 return canon_argtypes;
7975 }
7976
7977 /* The canonical argument types are the same as ARGTYPES. */
7978 return argtypes;
7979 }
7980
7981 /* Construct, lay out and return
7982 the type of functions returning type VALUE_TYPE
7983 given arguments of types ARG_TYPES.
7984 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7985 are data type nodes for the arguments of the function.
7986 If such a type has already been constructed, reuse it. */
7987
7988 tree
7989 build_function_type (tree value_type, tree arg_types)
7990 {
7991 tree t;
7992 inchash::hash hstate;
7993 bool any_structural_p, any_noncanonical_p;
7994 tree canon_argtypes;
7995
7996 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7997 {
7998 error ("function return type cannot be function");
7999 value_type = integer_type_node;
8000 }
8001
8002 /* Make a node of the sort we want. */
8003 t = make_node (FUNCTION_TYPE);
8004 TREE_TYPE (t) = value_type;
8005 TYPE_ARG_TYPES (t) = arg_types;
8006
8007 /* If we already have such a type, use the old one. */
8008 hstate.add_object (TYPE_HASH (value_type));
8009 type_hash_list (arg_types, hstate);
8010 t = type_hash_canon (hstate.end (), t);
8011
8012 /* Set up the canonical type. */
8013 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8014 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8015 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8016 &any_structural_p,
8017 &any_noncanonical_p);
8018 if (any_structural_p)
8019 SET_TYPE_STRUCTURAL_EQUALITY (t);
8020 else if (any_noncanonical_p)
8021 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8022 canon_argtypes);
8023
8024 if (!COMPLETE_TYPE_P (t))
8025 layout_type (t);
8026 return t;
8027 }
8028
8029 /* Build a function type. The RETURN_TYPE is the type returned by the
8030 function. If VAARGS is set, no void_type_node is appended to the
8031 the list. ARGP must be always be terminated be a NULL_TREE. */
8032
8033 static tree
8034 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8035 {
8036 tree t, args, last;
8037
8038 t = va_arg (argp, tree);
8039 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8040 args = tree_cons (NULL_TREE, t, args);
8041
8042 if (vaargs)
8043 {
8044 last = args;
8045 if (args != NULL_TREE)
8046 args = nreverse (args);
8047 gcc_assert (last != void_list_node);
8048 }
8049 else if (args == NULL_TREE)
8050 args = void_list_node;
8051 else
8052 {
8053 last = args;
8054 args = nreverse (args);
8055 TREE_CHAIN (last) = void_list_node;
8056 }
8057 args = build_function_type (return_type, args);
8058
8059 return args;
8060 }
8061
8062 /* Build a function type. The RETURN_TYPE is the type returned by the
8063 function. If additional arguments are provided, they are
8064 additional argument types. The list of argument types must always
8065 be terminated by NULL_TREE. */
8066
8067 tree
8068 build_function_type_list (tree return_type, ...)
8069 {
8070 tree args;
8071 va_list p;
8072
8073 va_start (p, return_type);
8074 args = build_function_type_list_1 (false, return_type, p);
8075 va_end (p);
8076 return args;
8077 }
8078
8079 /* Build a variable argument function type. The RETURN_TYPE is the
8080 type returned by the function. If additional arguments are provided,
8081 they are additional argument types. The list of argument types must
8082 always be terminated by NULL_TREE. */
8083
8084 tree
8085 build_varargs_function_type_list (tree return_type, ...)
8086 {
8087 tree args;
8088 va_list p;
8089
8090 va_start (p, return_type);
8091 args = build_function_type_list_1 (true, return_type, p);
8092 va_end (p);
8093
8094 return args;
8095 }
8096
8097 /* Build a function type. RETURN_TYPE is the type returned by the
8098 function; VAARGS indicates whether the function takes varargs. The
8099 function takes N named arguments, the types of which are provided in
8100 ARG_TYPES. */
8101
8102 static tree
8103 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8104 tree *arg_types)
8105 {
8106 int i;
8107 tree t = vaargs ? NULL_TREE : void_list_node;
8108
8109 for (i = n - 1; i >= 0; i--)
8110 t = tree_cons (NULL_TREE, arg_types[i], t);
8111
8112 return build_function_type (return_type, t);
8113 }
8114
8115 /* Build a function type. RETURN_TYPE is the type returned by the
8116 function. The function takes N named arguments, the types of which
8117 are provided in ARG_TYPES. */
8118
8119 tree
8120 build_function_type_array (tree return_type, int n, tree *arg_types)
8121 {
8122 return build_function_type_array_1 (false, return_type, n, arg_types);
8123 }
8124
8125 /* Build a variable argument function type. RETURN_TYPE is the type
8126 returned by the function. The function takes N named arguments, the
8127 types of which are provided in ARG_TYPES. */
8128
8129 tree
8130 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8131 {
8132 return build_function_type_array_1 (true, return_type, n, arg_types);
8133 }
8134
8135 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8136 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8137 for the method. An implicit additional parameter (of type
8138 pointer-to-BASETYPE) is added to the ARGTYPES. */
8139
8140 tree
8141 build_method_type_directly (tree basetype,
8142 tree rettype,
8143 tree argtypes)
8144 {
8145 tree t;
8146 tree ptype;
8147 inchash::hash hstate;
8148 bool any_structural_p, any_noncanonical_p;
8149 tree canon_argtypes;
8150
8151 /* Make a node of the sort we want. */
8152 t = make_node (METHOD_TYPE);
8153
8154 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8155 TREE_TYPE (t) = rettype;
8156 ptype = build_pointer_type (basetype);
8157
8158 /* The actual arglist for this function includes a "hidden" argument
8159 which is "this". Put it into the list of argument types. */
8160 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8161 TYPE_ARG_TYPES (t) = argtypes;
8162
8163 /* If we already have such a type, use the old one. */
8164 hstate.add_object (TYPE_HASH (basetype));
8165 hstate.add_object (TYPE_HASH (rettype));
8166 type_hash_list (argtypes, hstate);
8167 t = type_hash_canon (hstate.end (), t);
8168
8169 /* Set up the canonical type. */
8170 any_structural_p
8171 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8172 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8173 any_noncanonical_p
8174 = (TYPE_CANONICAL (basetype) != basetype
8175 || TYPE_CANONICAL (rettype) != rettype);
8176 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8177 &any_structural_p,
8178 &any_noncanonical_p);
8179 if (any_structural_p)
8180 SET_TYPE_STRUCTURAL_EQUALITY (t);
8181 else if (any_noncanonical_p)
8182 TYPE_CANONICAL (t)
8183 = build_method_type_directly (TYPE_CANONICAL (basetype),
8184 TYPE_CANONICAL (rettype),
8185 canon_argtypes);
8186 if (!COMPLETE_TYPE_P (t))
8187 layout_type (t);
8188
8189 return t;
8190 }
8191
8192 /* Construct, lay out and return the type of methods belonging to class
8193 BASETYPE and whose arguments and values are described by TYPE.
8194 If that type exists already, reuse it.
8195 TYPE must be a FUNCTION_TYPE node. */
8196
8197 tree
8198 build_method_type (tree basetype, tree type)
8199 {
8200 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8201
8202 return build_method_type_directly (basetype,
8203 TREE_TYPE (type),
8204 TYPE_ARG_TYPES (type));
8205 }
8206
8207 /* Construct, lay out and return the type of offsets to a value
8208 of type TYPE, within an object of type BASETYPE.
8209 If a suitable offset type exists already, reuse it. */
8210
8211 tree
8212 build_offset_type (tree basetype, tree type)
8213 {
8214 tree t;
8215 inchash::hash hstate;
8216
8217 /* Make a node of the sort we want. */
8218 t = make_node (OFFSET_TYPE);
8219
8220 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8221 TREE_TYPE (t) = type;
8222
8223 /* If we already have such a type, use the old one. */
8224 hstate.add_object (TYPE_HASH (basetype));
8225 hstate.add_object (TYPE_HASH (type));
8226 t = type_hash_canon (hstate.end (), t);
8227
8228 if (!COMPLETE_TYPE_P (t))
8229 layout_type (t);
8230
8231 if (TYPE_CANONICAL (t) == t)
8232 {
8233 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8234 || TYPE_STRUCTURAL_EQUALITY_P (type))
8235 SET_TYPE_STRUCTURAL_EQUALITY (t);
8236 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8237 || TYPE_CANONICAL (type) != type)
8238 TYPE_CANONICAL (t)
8239 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8240 TYPE_CANONICAL (type));
8241 }
8242
8243 return t;
8244 }
8245
8246 /* Create a complex type whose components are COMPONENT_TYPE. */
8247
8248 tree
8249 build_complex_type (tree component_type)
8250 {
8251 tree t;
8252 inchash::hash hstate;
8253
8254 gcc_assert (INTEGRAL_TYPE_P (component_type)
8255 || SCALAR_FLOAT_TYPE_P (component_type)
8256 || FIXED_POINT_TYPE_P (component_type));
8257
8258 /* Make a node of the sort we want. */
8259 t = make_node (COMPLEX_TYPE);
8260
8261 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8262
8263 /* If we already have such a type, use the old one. */
8264 hstate.add_object (TYPE_HASH (component_type));
8265 t = type_hash_canon (hstate.end (), t);
8266
8267 if (!COMPLETE_TYPE_P (t))
8268 layout_type (t);
8269
8270 if (TYPE_CANONICAL (t) == t)
8271 {
8272 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8273 SET_TYPE_STRUCTURAL_EQUALITY (t);
8274 else if (TYPE_CANONICAL (component_type) != component_type)
8275 TYPE_CANONICAL (t)
8276 = build_complex_type (TYPE_CANONICAL (component_type));
8277 }
8278
8279 /* We need to create a name, since complex is a fundamental type. */
8280 if (! TYPE_NAME (t))
8281 {
8282 const char *name;
8283 if (component_type == char_type_node)
8284 name = "complex char";
8285 else if (component_type == signed_char_type_node)
8286 name = "complex signed char";
8287 else if (component_type == unsigned_char_type_node)
8288 name = "complex unsigned char";
8289 else if (component_type == short_integer_type_node)
8290 name = "complex short int";
8291 else if (component_type == short_unsigned_type_node)
8292 name = "complex short unsigned int";
8293 else if (component_type == integer_type_node)
8294 name = "complex int";
8295 else if (component_type == unsigned_type_node)
8296 name = "complex unsigned int";
8297 else if (component_type == long_integer_type_node)
8298 name = "complex long int";
8299 else if (component_type == long_unsigned_type_node)
8300 name = "complex long unsigned int";
8301 else if (component_type == long_long_integer_type_node)
8302 name = "complex long long int";
8303 else if (component_type == long_long_unsigned_type_node)
8304 name = "complex long long unsigned int";
8305 else
8306 name = 0;
8307
8308 if (name != 0)
8309 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8310 get_identifier (name), t);
8311 }
8312
8313 return build_qualified_type (t, TYPE_QUALS (component_type));
8314 }
8315
8316 /* If TYPE is a real or complex floating-point type and the target
8317 does not directly support arithmetic on TYPE then return the wider
8318 type to be used for arithmetic on TYPE. Otherwise, return
8319 NULL_TREE. */
8320
8321 tree
8322 excess_precision_type (tree type)
8323 {
8324 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8325 {
8326 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8327 switch (TREE_CODE (type))
8328 {
8329 case REAL_TYPE:
8330 switch (flt_eval_method)
8331 {
8332 case 1:
8333 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8334 return double_type_node;
8335 break;
8336 case 2:
8337 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8338 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8339 return long_double_type_node;
8340 break;
8341 default:
8342 gcc_unreachable ();
8343 }
8344 break;
8345 case COMPLEX_TYPE:
8346 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8347 return NULL_TREE;
8348 switch (flt_eval_method)
8349 {
8350 case 1:
8351 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8352 return complex_double_type_node;
8353 break;
8354 case 2:
8355 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8356 || (TYPE_MODE (TREE_TYPE (type))
8357 == TYPE_MODE (double_type_node)))
8358 return complex_long_double_type_node;
8359 break;
8360 default:
8361 gcc_unreachable ();
8362 }
8363 break;
8364 default:
8365 break;
8366 }
8367 }
8368 return NULL_TREE;
8369 }
8370 \f
8371 /* Return OP, stripped of any conversions to wider types as much as is safe.
8372 Converting the value back to OP's type makes a value equivalent to OP.
8373
8374 If FOR_TYPE is nonzero, we return a value which, if converted to
8375 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8376
8377 OP must have integer, real or enumeral type. Pointers are not allowed!
8378
8379 There are some cases where the obvious value we could return
8380 would regenerate to OP if converted to OP's type,
8381 but would not extend like OP to wider types.
8382 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8383 For example, if OP is (unsigned short)(signed char)-1,
8384 we avoid returning (signed char)-1 if FOR_TYPE is int,
8385 even though extending that to an unsigned short would regenerate OP,
8386 since the result of extending (signed char)-1 to (int)
8387 is different from (int) OP. */
8388
8389 tree
8390 get_unwidened (tree op, tree for_type)
8391 {
8392 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8393 tree type = TREE_TYPE (op);
8394 unsigned final_prec
8395 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8396 int uns
8397 = (for_type != 0 && for_type != type
8398 && final_prec > TYPE_PRECISION (type)
8399 && TYPE_UNSIGNED (type));
8400 tree win = op;
8401
8402 while (CONVERT_EXPR_P (op))
8403 {
8404 int bitschange;
8405
8406 /* TYPE_PRECISION on vector types has different meaning
8407 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8408 so avoid them here. */
8409 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8410 break;
8411
8412 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8413 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8414
8415 /* Truncations are many-one so cannot be removed.
8416 Unless we are later going to truncate down even farther. */
8417 if (bitschange < 0
8418 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8419 break;
8420
8421 /* See what's inside this conversion. If we decide to strip it,
8422 we will set WIN. */
8423 op = TREE_OPERAND (op, 0);
8424
8425 /* If we have not stripped any zero-extensions (uns is 0),
8426 we can strip any kind of extension.
8427 If we have previously stripped a zero-extension,
8428 only zero-extensions can safely be stripped.
8429 Any extension can be stripped if the bits it would produce
8430 are all going to be discarded later by truncating to FOR_TYPE. */
8431
8432 if (bitschange > 0)
8433 {
8434 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8435 win = op;
8436 /* TYPE_UNSIGNED says whether this is a zero-extension.
8437 Let's avoid computing it if it does not affect WIN
8438 and if UNS will not be needed again. */
8439 if ((uns
8440 || CONVERT_EXPR_P (op))
8441 && TYPE_UNSIGNED (TREE_TYPE (op)))
8442 {
8443 uns = 1;
8444 win = op;
8445 }
8446 }
8447 }
8448
8449 /* If we finally reach a constant see if it fits in for_type and
8450 in that case convert it. */
8451 if (for_type
8452 && TREE_CODE (win) == INTEGER_CST
8453 && TREE_TYPE (win) != for_type
8454 && int_fits_type_p (win, for_type))
8455 win = fold_convert (for_type, win);
8456
8457 return win;
8458 }
8459 \f
8460 /* Return OP or a simpler expression for a narrower value
8461 which can be sign-extended or zero-extended to give back OP.
8462 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8463 or 0 if the value should be sign-extended. */
8464
8465 tree
8466 get_narrower (tree op, int *unsignedp_ptr)
8467 {
8468 int uns = 0;
8469 int first = 1;
8470 tree win = op;
8471 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8472
8473 while (TREE_CODE (op) == NOP_EXPR)
8474 {
8475 int bitschange
8476 = (TYPE_PRECISION (TREE_TYPE (op))
8477 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8478
8479 /* Truncations are many-one so cannot be removed. */
8480 if (bitschange < 0)
8481 break;
8482
8483 /* See what's inside this conversion. If we decide to strip it,
8484 we will set WIN. */
8485
8486 if (bitschange > 0)
8487 {
8488 op = TREE_OPERAND (op, 0);
8489 /* An extension: the outermost one can be stripped,
8490 but remember whether it is zero or sign extension. */
8491 if (first)
8492 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8493 /* Otherwise, if a sign extension has been stripped,
8494 only sign extensions can now be stripped;
8495 if a zero extension has been stripped, only zero-extensions. */
8496 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8497 break;
8498 first = 0;
8499 }
8500 else /* bitschange == 0 */
8501 {
8502 /* A change in nominal type can always be stripped, but we must
8503 preserve the unsignedness. */
8504 if (first)
8505 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8506 first = 0;
8507 op = TREE_OPERAND (op, 0);
8508 /* Keep trying to narrow, but don't assign op to win if it
8509 would turn an integral type into something else. */
8510 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8511 continue;
8512 }
8513
8514 win = op;
8515 }
8516
8517 if (TREE_CODE (op) == COMPONENT_REF
8518 /* Since type_for_size always gives an integer type. */
8519 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8520 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8521 /* Ensure field is laid out already. */
8522 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8523 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8524 {
8525 unsigned HOST_WIDE_INT innerprec
8526 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8527 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8528 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8529 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8530
8531 /* We can get this structure field in a narrower type that fits it,
8532 but the resulting extension to its nominal type (a fullword type)
8533 must satisfy the same conditions as for other extensions.
8534
8535 Do this only for fields that are aligned (not bit-fields),
8536 because when bit-field insns will be used there is no
8537 advantage in doing this. */
8538
8539 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8540 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8541 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8542 && type != 0)
8543 {
8544 if (first)
8545 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8546 win = fold_convert (type, op);
8547 }
8548 }
8549
8550 *unsignedp_ptr = uns;
8551 return win;
8552 }
8553 \f
8554 /* Returns true if integer constant C has a value that is permissible
8555 for type TYPE (an INTEGER_TYPE). */
8556
8557 bool
8558 int_fits_type_p (const_tree c, const_tree type)
8559 {
8560 tree type_low_bound, type_high_bound;
8561 bool ok_for_low_bound, ok_for_high_bound;
8562 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8563
8564 retry:
8565 type_low_bound = TYPE_MIN_VALUE (type);
8566 type_high_bound = TYPE_MAX_VALUE (type);
8567
8568 /* If at least one bound of the type is a constant integer, we can check
8569 ourselves and maybe make a decision. If no such decision is possible, but
8570 this type is a subtype, try checking against that. Otherwise, use
8571 fits_to_tree_p, which checks against the precision.
8572
8573 Compute the status for each possibly constant bound, and return if we see
8574 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8575 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8576 for "constant known to fit". */
8577
8578 /* Check if c >= type_low_bound. */
8579 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8580 {
8581 if (tree_int_cst_lt (c, type_low_bound))
8582 return false;
8583 ok_for_low_bound = true;
8584 }
8585 else
8586 ok_for_low_bound = false;
8587
8588 /* Check if c <= type_high_bound. */
8589 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8590 {
8591 if (tree_int_cst_lt (type_high_bound, c))
8592 return false;
8593 ok_for_high_bound = true;
8594 }
8595 else
8596 ok_for_high_bound = false;
8597
8598 /* If the constant fits both bounds, the result is known. */
8599 if (ok_for_low_bound && ok_for_high_bound)
8600 return true;
8601
8602 /* Perform some generic filtering which may allow making a decision
8603 even if the bounds are not constant. First, negative integers
8604 never fit in unsigned types, */
8605 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8606 return false;
8607
8608 /* Second, narrower types always fit in wider ones. */
8609 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8610 return true;
8611
8612 /* Third, unsigned integers with top bit set never fit signed types. */
8613 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8614 {
8615 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8616 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8617 {
8618 /* When a tree_cst is converted to a wide-int, the precision
8619 is taken from the type. However, if the precision of the
8620 mode underneath the type is smaller than that, it is
8621 possible that the value will not fit. The test below
8622 fails if any bit is set between the sign bit of the
8623 underlying mode and the top bit of the type. */
8624 if (wi::ne_p (wi::zext (c, prec - 1), c))
8625 return false;
8626 }
8627 else if (wi::neg_p (c))
8628 return false;
8629 }
8630
8631 /* If we haven't been able to decide at this point, there nothing more we
8632 can check ourselves here. Look at the base type if we have one and it
8633 has the same precision. */
8634 if (TREE_CODE (type) == INTEGER_TYPE
8635 && TREE_TYPE (type) != 0
8636 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8637 {
8638 type = TREE_TYPE (type);
8639 goto retry;
8640 }
8641
8642 /* Or to fits_to_tree_p, if nothing else. */
8643 return wi::fits_to_tree_p (c, type);
8644 }
8645
8646 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8647 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8648 represented (assuming two's-complement arithmetic) within the bit
8649 precision of the type are returned instead. */
8650
8651 void
8652 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8653 {
8654 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8655 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8656 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8657 else
8658 {
8659 if (TYPE_UNSIGNED (type))
8660 mpz_set_ui (min, 0);
8661 else
8662 {
8663 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8664 wi::to_mpz (mn, min, SIGNED);
8665 }
8666 }
8667
8668 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8669 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8670 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8671 else
8672 {
8673 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8674 wi::to_mpz (mn, max, TYPE_SIGN (type));
8675 }
8676 }
8677
8678 /* Return true if VAR is an automatic variable defined in function FN. */
8679
8680 bool
8681 auto_var_in_fn_p (const_tree var, const_tree fn)
8682 {
8683 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8684 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8685 || TREE_CODE (var) == PARM_DECL)
8686 && ! TREE_STATIC (var))
8687 || TREE_CODE (var) == LABEL_DECL
8688 || TREE_CODE (var) == RESULT_DECL));
8689 }
8690
8691 /* Subprogram of following function. Called by walk_tree.
8692
8693 Return *TP if it is an automatic variable or parameter of the
8694 function passed in as DATA. */
8695
8696 static tree
8697 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8698 {
8699 tree fn = (tree) data;
8700
8701 if (TYPE_P (*tp))
8702 *walk_subtrees = 0;
8703
8704 else if (DECL_P (*tp)
8705 && auto_var_in_fn_p (*tp, fn))
8706 return *tp;
8707
8708 return NULL_TREE;
8709 }
8710
8711 /* Returns true if T is, contains, or refers to a type with variable
8712 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8713 arguments, but not the return type. If FN is nonzero, only return
8714 true if a modifier of the type or position of FN is a variable or
8715 parameter inside FN.
8716
8717 This concept is more general than that of C99 'variably modified types':
8718 in C99, a struct type is never variably modified because a VLA may not
8719 appear as a structure member. However, in GNU C code like:
8720
8721 struct S { int i[f()]; };
8722
8723 is valid, and other languages may define similar constructs. */
8724
8725 bool
8726 variably_modified_type_p (tree type, tree fn)
8727 {
8728 tree t;
8729
8730 /* Test if T is either variable (if FN is zero) or an expression containing
8731 a variable in FN. If TYPE isn't gimplified, return true also if
8732 gimplify_one_sizepos would gimplify the expression into a local
8733 variable. */
8734 #define RETURN_TRUE_IF_VAR(T) \
8735 do { tree _t = (T); \
8736 if (_t != NULL_TREE \
8737 && _t != error_mark_node \
8738 && TREE_CODE (_t) != INTEGER_CST \
8739 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8740 && (!fn \
8741 || (!TYPE_SIZES_GIMPLIFIED (type) \
8742 && !is_gimple_sizepos (_t)) \
8743 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8744 return true; } while (0)
8745
8746 if (type == error_mark_node)
8747 return false;
8748
8749 /* If TYPE itself has variable size, it is variably modified. */
8750 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8751 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8752
8753 switch (TREE_CODE (type))
8754 {
8755 case POINTER_TYPE:
8756 case REFERENCE_TYPE:
8757 case VECTOR_TYPE:
8758 if (variably_modified_type_p (TREE_TYPE (type), fn))
8759 return true;
8760 break;
8761
8762 case FUNCTION_TYPE:
8763 case METHOD_TYPE:
8764 /* If TYPE is a function type, it is variably modified if the
8765 return type is variably modified. */
8766 if (variably_modified_type_p (TREE_TYPE (type), fn))
8767 return true;
8768 break;
8769
8770 case INTEGER_TYPE:
8771 case REAL_TYPE:
8772 case FIXED_POINT_TYPE:
8773 case ENUMERAL_TYPE:
8774 case BOOLEAN_TYPE:
8775 /* Scalar types are variably modified if their end points
8776 aren't constant. */
8777 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8778 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8779 break;
8780
8781 case RECORD_TYPE:
8782 case UNION_TYPE:
8783 case QUAL_UNION_TYPE:
8784 /* We can't see if any of the fields are variably-modified by the
8785 definition we normally use, since that would produce infinite
8786 recursion via pointers. */
8787 /* This is variably modified if some field's type is. */
8788 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8789 if (TREE_CODE (t) == FIELD_DECL)
8790 {
8791 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8792 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8793 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8794
8795 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8796 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8797 }
8798 break;
8799
8800 case ARRAY_TYPE:
8801 /* Do not call ourselves to avoid infinite recursion. This is
8802 variably modified if the element type is. */
8803 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8804 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8805 break;
8806
8807 default:
8808 break;
8809 }
8810
8811 /* The current language may have other cases to check, but in general,
8812 all other types are not variably modified. */
8813 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8814
8815 #undef RETURN_TRUE_IF_VAR
8816 }
8817
8818 /* Given a DECL or TYPE, return the scope in which it was declared, or
8819 NULL_TREE if there is no containing scope. */
8820
8821 tree
8822 get_containing_scope (const_tree t)
8823 {
8824 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8825 }
8826
8827 /* Return the innermost context enclosing DECL that is
8828 a FUNCTION_DECL, or zero if none. */
8829
8830 tree
8831 decl_function_context (const_tree decl)
8832 {
8833 tree context;
8834
8835 if (TREE_CODE (decl) == ERROR_MARK)
8836 return 0;
8837
8838 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8839 where we look up the function at runtime. Such functions always take
8840 a first argument of type 'pointer to real context'.
8841
8842 C++ should really be fixed to use DECL_CONTEXT for the real context,
8843 and use something else for the "virtual context". */
8844 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8845 context
8846 = TYPE_MAIN_VARIANT
8847 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8848 else
8849 context = DECL_CONTEXT (decl);
8850
8851 while (context && TREE_CODE (context) != FUNCTION_DECL)
8852 {
8853 if (TREE_CODE (context) == BLOCK)
8854 context = BLOCK_SUPERCONTEXT (context);
8855 else
8856 context = get_containing_scope (context);
8857 }
8858
8859 return context;
8860 }
8861
8862 /* Return the innermost context enclosing DECL that is
8863 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8864 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8865
8866 tree
8867 decl_type_context (const_tree decl)
8868 {
8869 tree context = DECL_CONTEXT (decl);
8870
8871 while (context)
8872 switch (TREE_CODE (context))
8873 {
8874 case NAMESPACE_DECL:
8875 case TRANSLATION_UNIT_DECL:
8876 return NULL_TREE;
8877
8878 case RECORD_TYPE:
8879 case UNION_TYPE:
8880 case QUAL_UNION_TYPE:
8881 return context;
8882
8883 case TYPE_DECL:
8884 case FUNCTION_DECL:
8885 context = DECL_CONTEXT (context);
8886 break;
8887
8888 case BLOCK:
8889 context = BLOCK_SUPERCONTEXT (context);
8890 break;
8891
8892 default:
8893 gcc_unreachable ();
8894 }
8895
8896 return NULL_TREE;
8897 }
8898
8899 /* CALL is a CALL_EXPR. Return the declaration for the function
8900 called, or NULL_TREE if the called function cannot be
8901 determined. */
8902
8903 tree
8904 get_callee_fndecl (const_tree call)
8905 {
8906 tree addr;
8907
8908 if (call == error_mark_node)
8909 return error_mark_node;
8910
8911 /* It's invalid to call this function with anything but a
8912 CALL_EXPR. */
8913 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8914
8915 /* The first operand to the CALL is the address of the function
8916 called. */
8917 addr = CALL_EXPR_FN (call);
8918
8919 /* If there is no function, return early. */
8920 if (addr == NULL_TREE)
8921 return NULL_TREE;
8922
8923 STRIP_NOPS (addr);
8924
8925 /* If this is a readonly function pointer, extract its initial value. */
8926 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8927 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8928 && DECL_INITIAL (addr))
8929 addr = DECL_INITIAL (addr);
8930
8931 /* If the address is just `&f' for some function `f', then we know
8932 that `f' is being called. */
8933 if (TREE_CODE (addr) == ADDR_EXPR
8934 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8935 return TREE_OPERAND (addr, 0);
8936
8937 /* We couldn't figure out what was being called. */
8938 return NULL_TREE;
8939 }
8940
8941 /* Print debugging information about tree nodes generated during the compile,
8942 and any language-specific information. */
8943
8944 void
8945 dump_tree_statistics (void)
8946 {
8947 if (GATHER_STATISTICS)
8948 {
8949 int i;
8950 int total_nodes, total_bytes;
8951 fprintf (stderr, "Kind Nodes Bytes\n");
8952 fprintf (stderr, "---------------------------------------\n");
8953 total_nodes = total_bytes = 0;
8954 for (i = 0; i < (int) all_kinds; i++)
8955 {
8956 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8957 tree_node_counts[i], tree_node_sizes[i]);
8958 total_nodes += tree_node_counts[i];
8959 total_bytes += tree_node_sizes[i];
8960 }
8961 fprintf (stderr, "---------------------------------------\n");
8962 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8963 fprintf (stderr, "---------------------------------------\n");
8964 fprintf (stderr, "Code Nodes\n");
8965 fprintf (stderr, "----------------------------\n");
8966 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8967 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8968 tree_code_counts[i]);
8969 fprintf (stderr, "----------------------------\n");
8970 ssanames_print_statistics ();
8971 phinodes_print_statistics ();
8972 }
8973 else
8974 fprintf (stderr, "(No per-node statistics)\n");
8975
8976 print_type_hash_statistics ();
8977 print_debug_expr_statistics ();
8978 print_value_expr_statistics ();
8979 lang_hooks.print_statistics ();
8980 }
8981 \f
8982 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8983
8984 /* Generate a crc32 of a byte. */
8985
8986 static unsigned
8987 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8988 {
8989 unsigned ix;
8990
8991 for (ix = bits; ix--; value <<= 1)
8992 {
8993 unsigned feedback;
8994
8995 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8996 chksum <<= 1;
8997 chksum ^= feedback;
8998 }
8999 return chksum;
9000 }
9001
9002 /* Generate a crc32 of a 32-bit unsigned. */
9003
9004 unsigned
9005 crc32_unsigned (unsigned chksum, unsigned value)
9006 {
9007 return crc32_unsigned_bits (chksum, value, 32);
9008 }
9009
9010 /* Generate a crc32 of a byte. */
9011
9012 unsigned
9013 crc32_byte (unsigned chksum, char byte)
9014 {
9015 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9016 }
9017
9018 /* Generate a crc32 of a string. */
9019
9020 unsigned
9021 crc32_string (unsigned chksum, const char *string)
9022 {
9023 do
9024 {
9025 chksum = crc32_byte (chksum, *string);
9026 }
9027 while (*string++);
9028 return chksum;
9029 }
9030
9031 /* P is a string that will be used in a symbol. Mask out any characters
9032 that are not valid in that context. */
9033
9034 void
9035 clean_symbol_name (char *p)
9036 {
9037 for (; *p; p++)
9038 if (! (ISALNUM (*p)
9039 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9040 || *p == '$'
9041 #endif
9042 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9043 || *p == '.'
9044 #endif
9045 ))
9046 *p = '_';
9047 }
9048
9049 /* Generate a name for a special-purpose function.
9050 The generated name may need to be unique across the whole link.
9051 Changes to this function may also require corresponding changes to
9052 xstrdup_mask_random.
9053 TYPE is some string to identify the purpose of this function to the
9054 linker or collect2; it must start with an uppercase letter,
9055 one of:
9056 I - for constructors
9057 D - for destructors
9058 N - for C++ anonymous namespaces
9059 F - for DWARF unwind frame information. */
9060
9061 tree
9062 get_file_function_name (const char *type)
9063 {
9064 char *buf;
9065 const char *p;
9066 char *q;
9067
9068 /* If we already have a name we know to be unique, just use that. */
9069 if (first_global_object_name)
9070 p = q = ASTRDUP (first_global_object_name);
9071 /* If the target is handling the constructors/destructors, they
9072 will be local to this file and the name is only necessary for
9073 debugging purposes.
9074 We also assign sub_I and sub_D sufixes to constructors called from
9075 the global static constructors. These are always local. */
9076 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9077 || (strncmp (type, "sub_", 4) == 0
9078 && (type[4] == 'I' || type[4] == 'D')))
9079 {
9080 const char *file = main_input_filename;
9081 if (! file)
9082 file = LOCATION_FILE (input_location);
9083 /* Just use the file's basename, because the full pathname
9084 might be quite long. */
9085 p = q = ASTRDUP (lbasename (file));
9086 }
9087 else
9088 {
9089 /* Otherwise, the name must be unique across the entire link.
9090 We don't have anything that we know to be unique to this translation
9091 unit, so use what we do have and throw in some randomness. */
9092 unsigned len;
9093 const char *name = weak_global_object_name;
9094 const char *file = main_input_filename;
9095
9096 if (! name)
9097 name = "";
9098 if (! file)
9099 file = LOCATION_FILE (input_location);
9100
9101 len = strlen (file);
9102 q = (char *) alloca (9 + 17 + len + 1);
9103 memcpy (q, file, len + 1);
9104
9105 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9106 crc32_string (0, name), get_random_seed (false));
9107
9108 p = q;
9109 }
9110
9111 clean_symbol_name (q);
9112 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9113 + strlen (type));
9114
9115 /* Set up the name of the file-level functions we may need.
9116 Use a global object (which is already required to be unique over
9117 the program) rather than the file name (which imposes extra
9118 constraints). */
9119 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9120
9121 return get_identifier (buf);
9122 }
9123 \f
9124 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9125
9126 /* Complain that the tree code of NODE does not match the expected 0
9127 terminated list of trailing codes. The trailing code list can be
9128 empty, for a more vague error message. FILE, LINE, and FUNCTION
9129 are of the caller. */
9130
9131 void
9132 tree_check_failed (const_tree node, const char *file,
9133 int line, const char *function, ...)
9134 {
9135 va_list args;
9136 const char *buffer;
9137 unsigned length = 0;
9138 enum tree_code code;
9139
9140 va_start (args, function);
9141 while ((code = (enum tree_code) va_arg (args, int)))
9142 length += 4 + strlen (get_tree_code_name (code));
9143 va_end (args);
9144 if (length)
9145 {
9146 char *tmp;
9147 va_start (args, function);
9148 length += strlen ("expected ");
9149 buffer = tmp = (char *) alloca (length);
9150 length = 0;
9151 while ((code = (enum tree_code) va_arg (args, int)))
9152 {
9153 const char *prefix = length ? " or " : "expected ";
9154
9155 strcpy (tmp + length, prefix);
9156 length += strlen (prefix);
9157 strcpy (tmp + length, get_tree_code_name (code));
9158 length += strlen (get_tree_code_name (code));
9159 }
9160 va_end (args);
9161 }
9162 else
9163 buffer = "unexpected node";
9164
9165 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9166 buffer, get_tree_code_name (TREE_CODE (node)),
9167 function, trim_filename (file), line);
9168 }
9169
9170 /* Complain that the tree code of NODE does match the expected 0
9171 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9172 the caller. */
9173
9174 void
9175 tree_not_check_failed (const_tree node, const char *file,
9176 int line, const char *function, ...)
9177 {
9178 va_list args;
9179 char *buffer;
9180 unsigned length = 0;
9181 enum tree_code code;
9182
9183 va_start (args, function);
9184 while ((code = (enum tree_code) va_arg (args, int)))
9185 length += 4 + strlen (get_tree_code_name (code));
9186 va_end (args);
9187 va_start (args, function);
9188 buffer = (char *) alloca (length);
9189 length = 0;
9190 while ((code = (enum tree_code) va_arg (args, int)))
9191 {
9192 if (length)
9193 {
9194 strcpy (buffer + length, " or ");
9195 length += 4;
9196 }
9197 strcpy (buffer + length, get_tree_code_name (code));
9198 length += strlen (get_tree_code_name (code));
9199 }
9200 va_end (args);
9201
9202 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9203 buffer, get_tree_code_name (TREE_CODE (node)),
9204 function, trim_filename (file), line);
9205 }
9206
9207 /* Similar to tree_check_failed, except that we check for a class of tree
9208 code, given in CL. */
9209
9210 void
9211 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9212 const char *file, int line, const char *function)
9213 {
9214 internal_error
9215 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9216 TREE_CODE_CLASS_STRING (cl),
9217 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9218 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9219 }
9220
9221 /* Similar to tree_check_failed, except that instead of specifying a
9222 dozen codes, use the knowledge that they're all sequential. */
9223
9224 void
9225 tree_range_check_failed (const_tree node, const char *file, int line,
9226 const char *function, enum tree_code c1,
9227 enum tree_code c2)
9228 {
9229 char *buffer;
9230 unsigned length = 0;
9231 unsigned int c;
9232
9233 for (c = c1; c <= c2; ++c)
9234 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9235
9236 length += strlen ("expected ");
9237 buffer = (char *) alloca (length);
9238 length = 0;
9239
9240 for (c = c1; c <= c2; ++c)
9241 {
9242 const char *prefix = length ? " or " : "expected ";
9243
9244 strcpy (buffer + length, prefix);
9245 length += strlen (prefix);
9246 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9247 length += strlen (get_tree_code_name ((enum tree_code) c));
9248 }
9249
9250 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9251 buffer, get_tree_code_name (TREE_CODE (node)),
9252 function, trim_filename (file), line);
9253 }
9254
9255
9256 /* Similar to tree_check_failed, except that we check that a tree does
9257 not have the specified code, given in CL. */
9258
9259 void
9260 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9261 const char *file, int line, const char *function)
9262 {
9263 internal_error
9264 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9265 TREE_CODE_CLASS_STRING (cl),
9266 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9267 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9268 }
9269
9270
9271 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9272
9273 void
9274 omp_clause_check_failed (const_tree node, const char *file, int line,
9275 const char *function, enum omp_clause_code code)
9276 {
9277 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9278 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9279 function, trim_filename (file), line);
9280 }
9281
9282
9283 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9284
9285 void
9286 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9287 const char *function, enum omp_clause_code c1,
9288 enum omp_clause_code c2)
9289 {
9290 char *buffer;
9291 unsigned length = 0;
9292 unsigned int c;
9293
9294 for (c = c1; c <= c2; ++c)
9295 length += 4 + strlen (omp_clause_code_name[c]);
9296
9297 length += strlen ("expected ");
9298 buffer = (char *) alloca (length);
9299 length = 0;
9300
9301 for (c = c1; c <= c2; ++c)
9302 {
9303 const char *prefix = length ? " or " : "expected ";
9304
9305 strcpy (buffer + length, prefix);
9306 length += strlen (prefix);
9307 strcpy (buffer + length, omp_clause_code_name[c]);
9308 length += strlen (omp_clause_code_name[c]);
9309 }
9310
9311 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9312 buffer, omp_clause_code_name[TREE_CODE (node)],
9313 function, trim_filename (file), line);
9314 }
9315
9316
9317 #undef DEFTREESTRUCT
9318 #define DEFTREESTRUCT(VAL, NAME) NAME,
9319
9320 static const char *ts_enum_names[] = {
9321 #include "treestruct.def"
9322 };
9323 #undef DEFTREESTRUCT
9324
9325 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9326
9327 /* Similar to tree_class_check_failed, except that we check for
9328 whether CODE contains the tree structure identified by EN. */
9329
9330 void
9331 tree_contains_struct_check_failed (const_tree node,
9332 const enum tree_node_structure_enum en,
9333 const char *file, int line,
9334 const char *function)
9335 {
9336 internal_error
9337 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9338 TS_ENUM_NAME (en),
9339 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9340 }
9341
9342
9343 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9344 (dynamically sized) vector. */
9345
9346 void
9347 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9348 const char *function)
9349 {
9350 internal_error
9351 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9352 idx + 1, len, function, trim_filename (file), line);
9353 }
9354
9355 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9356 (dynamically sized) vector. */
9357
9358 void
9359 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9360 const char *function)
9361 {
9362 internal_error
9363 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9364 idx + 1, len, function, trim_filename (file), line);
9365 }
9366
9367 /* Similar to above, except that the check is for the bounds of the operand
9368 vector of an expression node EXP. */
9369
9370 void
9371 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9372 int line, const char *function)
9373 {
9374 enum tree_code code = TREE_CODE (exp);
9375 internal_error
9376 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9377 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9378 function, trim_filename (file), line);
9379 }
9380
9381 /* Similar to above, except that the check is for the number of
9382 operands of an OMP_CLAUSE node. */
9383
9384 void
9385 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9386 int line, const char *function)
9387 {
9388 internal_error
9389 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9390 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9391 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9392 trim_filename (file), line);
9393 }
9394 #endif /* ENABLE_TREE_CHECKING */
9395 \f
9396 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9397 and mapped to the machine mode MODE. Initialize its fields and build
9398 the information necessary for debugging output. */
9399
9400 static tree
9401 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9402 {
9403 tree t;
9404 inchash::hash hstate;
9405
9406 t = make_node (VECTOR_TYPE);
9407 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9408 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9409 SET_TYPE_MODE (t, mode);
9410
9411 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9412 SET_TYPE_STRUCTURAL_EQUALITY (t);
9413 else if (TYPE_CANONICAL (innertype) != innertype
9414 || mode != VOIDmode)
9415 TYPE_CANONICAL (t)
9416 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9417
9418 layout_type (t);
9419
9420 hstate.add_wide_int (VECTOR_TYPE);
9421 hstate.add_wide_int (nunits);
9422 hstate.add_wide_int (mode);
9423 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9424 t = type_hash_canon (hstate.end (), t);
9425
9426 /* We have built a main variant, based on the main variant of the
9427 inner type. Use it to build the variant we return. */
9428 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9429 && TREE_TYPE (t) != innertype)
9430 return build_type_attribute_qual_variant (t,
9431 TYPE_ATTRIBUTES (innertype),
9432 TYPE_QUALS (innertype));
9433
9434 return t;
9435 }
9436
9437 static tree
9438 make_or_reuse_type (unsigned size, int unsignedp)
9439 {
9440 if (size == INT_TYPE_SIZE)
9441 return unsignedp ? unsigned_type_node : integer_type_node;
9442 if (size == CHAR_TYPE_SIZE)
9443 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9444 if (size == SHORT_TYPE_SIZE)
9445 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9446 if (size == LONG_TYPE_SIZE)
9447 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9448 if (size == LONG_LONG_TYPE_SIZE)
9449 return (unsignedp ? long_long_unsigned_type_node
9450 : long_long_integer_type_node);
9451 if (size == 128 && int128_integer_type_node)
9452 return (unsignedp ? int128_unsigned_type_node
9453 : int128_integer_type_node);
9454
9455 if (unsignedp)
9456 return make_unsigned_type (size);
9457 else
9458 return make_signed_type (size);
9459 }
9460
9461 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9462
9463 static tree
9464 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9465 {
9466 if (satp)
9467 {
9468 if (size == SHORT_FRACT_TYPE_SIZE)
9469 return unsignedp ? sat_unsigned_short_fract_type_node
9470 : sat_short_fract_type_node;
9471 if (size == FRACT_TYPE_SIZE)
9472 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9473 if (size == LONG_FRACT_TYPE_SIZE)
9474 return unsignedp ? sat_unsigned_long_fract_type_node
9475 : sat_long_fract_type_node;
9476 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9477 return unsignedp ? sat_unsigned_long_long_fract_type_node
9478 : sat_long_long_fract_type_node;
9479 }
9480 else
9481 {
9482 if (size == SHORT_FRACT_TYPE_SIZE)
9483 return unsignedp ? unsigned_short_fract_type_node
9484 : short_fract_type_node;
9485 if (size == FRACT_TYPE_SIZE)
9486 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9487 if (size == LONG_FRACT_TYPE_SIZE)
9488 return unsignedp ? unsigned_long_fract_type_node
9489 : long_fract_type_node;
9490 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9491 return unsignedp ? unsigned_long_long_fract_type_node
9492 : long_long_fract_type_node;
9493 }
9494
9495 return make_fract_type (size, unsignedp, satp);
9496 }
9497
9498 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9499
9500 static tree
9501 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9502 {
9503 if (satp)
9504 {
9505 if (size == SHORT_ACCUM_TYPE_SIZE)
9506 return unsignedp ? sat_unsigned_short_accum_type_node
9507 : sat_short_accum_type_node;
9508 if (size == ACCUM_TYPE_SIZE)
9509 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9510 if (size == LONG_ACCUM_TYPE_SIZE)
9511 return unsignedp ? sat_unsigned_long_accum_type_node
9512 : sat_long_accum_type_node;
9513 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9514 return unsignedp ? sat_unsigned_long_long_accum_type_node
9515 : sat_long_long_accum_type_node;
9516 }
9517 else
9518 {
9519 if (size == SHORT_ACCUM_TYPE_SIZE)
9520 return unsignedp ? unsigned_short_accum_type_node
9521 : short_accum_type_node;
9522 if (size == ACCUM_TYPE_SIZE)
9523 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9524 if (size == LONG_ACCUM_TYPE_SIZE)
9525 return unsignedp ? unsigned_long_accum_type_node
9526 : long_accum_type_node;
9527 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9528 return unsignedp ? unsigned_long_long_accum_type_node
9529 : long_long_accum_type_node;
9530 }
9531
9532 return make_accum_type (size, unsignedp, satp);
9533 }
9534
9535
9536 /* Create an atomic variant node for TYPE. This routine is called
9537 during initialization of data types to create the 5 basic atomic
9538 types. The generic build_variant_type function requires these to
9539 already be set up in order to function properly, so cannot be
9540 called from there. If ALIGN is non-zero, then ensure alignment is
9541 overridden to this value. */
9542
9543 static tree
9544 build_atomic_base (tree type, unsigned int align)
9545 {
9546 tree t;
9547
9548 /* Make sure its not already registered. */
9549 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9550 return t;
9551
9552 t = build_variant_type_copy (type);
9553 set_type_quals (t, TYPE_QUAL_ATOMIC);
9554
9555 if (align)
9556 TYPE_ALIGN (t) = align;
9557
9558 return t;
9559 }
9560
9561 /* Create nodes for all integer types (and error_mark_node) using the sizes
9562 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9563 SHORT_DOUBLE specifies whether double should be of the same precision
9564 as float. */
9565
9566 void
9567 build_common_tree_nodes (bool signed_char, bool short_double)
9568 {
9569 error_mark_node = make_node (ERROR_MARK);
9570 TREE_TYPE (error_mark_node) = error_mark_node;
9571
9572 initialize_sizetypes ();
9573
9574 /* Define both `signed char' and `unsigned char'. */
9575 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9576 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9577 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9578 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9579
9580 /* Define `char', which is like either `signed char' or `unsigned char'
9581 but not the same as either. */
9582 char_type_node
9583 = (signed_char
9584 ? make_signed_type (CHAR_TYPE_SIZE)
9585 : make_unsigned_type (CHAR_TYPE_SIZE));
9586 TYPE_STRING_FLAG (char_type_node) = 1;
9587
9588 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9589 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9590 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9591 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9592 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9593 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9594 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9595 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9596 #if HOST_BITS_PER_WIDE_INT >= 64
9597 /* TODO: This isn't correct, but as logic depends at the moment on
9598 host's instead of target's wide-integer.
9599 If there is a target not supporting TImode, but has an 128-bit
9600 integer-scalar register, this target check needs to be adjusted. */
9601 if (targetm.scalar_mode_supported_p (TImode))
9602 {
9603 int128_integer_type_node = make_signed_type (128);
9604 int128_unsigned_type_node = make_unsigned_type (128);
9605 }
9606 #endif
9607
9608 /* Define a boolean type. This type only represents boolean values but
9609 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9610 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9611 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9612 TYPE_PRECISION (boolean_type_node) = 1;
9613 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9614
9615 /* Define what type to use for size_t. */
9616 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9617 size_type_node = unsigned_type_node;
9618 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9619 size_type_node = long_unsigned_type_node;
9620 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9621 size_type_node = long_long_unsigned_type_node;
9622 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9623 size_type_node = short_unsigned_type_node;
9624 else
9625 gcc_unreachable ();
9626
9627 /* Fill in the rest of the sized types. Reuse existing type nodes
9628 when possible. */
9629 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9630 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9631 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9632 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9633 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9634
9635 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9636 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9637 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9638 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9639 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9640
9641 /* Don't call build_qualified type for atomics. That routine does
9642 special processing for atomics, and until they are initialized
9643 it's better not to make that call.
9644
9645 Check to see if there is a target override for atomic types. */
9646
9647 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9648 targetm.atomic_align_for_mode (QImode));
9649 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9650 targetm.atomic_align_for_mode (HImode));
9651 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9652 targetm.atomic_align_for_mode (SImode));
9653 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9654 targetm.atomic_align_for_mode (DImode));
9655 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9656 targetm.atomic_align_for_mode (TImode));
9657
9658 access_public_node = get_identifier ("public");
9659 access_protected_node = get_identifier ("protected");
9660 access_private_node = get_identifier ("private");
9661
9662 /* Define these next since types below may used them. */
9663 integer_zero_node = build_int_cst (integer_type_node, 0);
9664 integer_one_node = build_int_cst (integer_type_node, 1);
9665 integer_three_node = build_int_cst (integer_type_node, 3);
9666 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9667
9668 size_zero_node = size_int (0);
9669 size_one_node = size_int (1);
9670 bitsize_zero_node = bitsize_int (0);
9671 bitsize_one_node = bitsize_int (1);
9672 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9673
9674 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9675 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9676
9677 void_type_node = make_node (VOID_TYPE);
9678 layout_type (void_type_node);
9679
9680 /* We are not going to have real types in C with less than byte alignment,
9681 so we might as well not have any types that claim to have it. */
9682 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9683 TYPE_USER_ALIGN (void_type_node) = 0;
9684
9685 void_node = make_node (VOID_CST);
9686 TREE_TYPE (void_node) = void_type_node;
9687
9688 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9689 layout_type (TREE_TYPE (null_pointer_node));
9690
9691 ptr_type_node = build_pointer_type (void_type_node);
9692 const_ptr_type_node
9693 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9694 fileptr_type_node = ptr_type_node;
9695
9696 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9697
9698 float_type_node = make_node (REAL_TYPE);
9699 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9700 layout_type (float_type_node);
9701
9702 double_type_node = make_node (REAL_TYPE);
9703 if (short_double)
9704 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9705 else
9706 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9707 layout_type (double_type_node);
9708
9709 long_double_type_node = make_node (REAL_TYPE);
9710 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9711 layout_type (long_double_type_node);
9712
9713 float_ptr_type_node = build_pointer_type (float_type_node);
9714 double_ptr_type_node = build_pointer_type (double_type_node);
9715 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9716 integer_ptr_type_node = build_pointer_type (integer_type_node);
9717
9718 /* Fixed size integer types. */
9719 uint16_type_node = make_or_reuse_type (16, 1);
9720 uint32_type_node = make_or_reuse_type (32, 1);
9721 uint64_type_node = make_or_reuse_type (64, 1);
9722
9723 /* Decimal float types. */
9724 dfloat32_type_node = make_node (REAL_TYPE);
9725 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9726 layout_type (dfloat32_type_node);
9727 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9728 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9729
9730 dfloat64_type_node = make_node (REAL_TYPE);
9731 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9732 layout_type (dfloat64_type_node);
9733 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9734 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9735
9736 dfloat128_type_node = make_node (REAL_TYPE);
9737 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9738 layout_type (dfloat128_type_node);
9739 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9740 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9741
9742 complex_integer_type_node = build_complex_type (integer_type_node);
9743 complex_float_type_node = build_complex_type (float_type_node);
9744 complex_double_type_node = build_complex_type (double_type_node);
9745 complex_long_double_type_node = build_complex_type (long_double_type_node);
9746
9747 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9748 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9749 sat_ ## KIND ## _type_node = \
9750 make_sat_signed_ ## KIND ## _type (SIZE); \
9751 sat_unsigned_ ## KIND ## _type_node = \
9752 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9753 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9754 unsigned_ ## KIND ## _type_node = \
9755 make_unsigned_ ## KIND ## _type (SIZE);
9756
9757 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9758 sat_ ## WIDTH ## KIND ## _type_node = \
9759 make_sat_signed_ ## KIND ## _type (SIZE); \
9760 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9761 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9762 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9763 unsigned_ ## WIDTH ## KIND ## _type_node = \
9764 make_unsigned_ ## KIND ## _type (SIZE);
9765
9766 /* Make fixed-point type nodes based on four different widths. */
9767 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9768 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9769 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9770 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9771 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9772
9773 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9774 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9775 NAME ## _type_node = \
9776 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9777 u ## NAME ## _type_node = \
9778 make_or_reuse_unsigned_ ## KIND ## _type \
9779 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9780 sat_ ## NAME ## _type_node = \
9781 make_or_reuse_sat_signed_ ## KIND ## _type \
9782 (GET_MODE_BITSIZE (MODE ## mode)); \
9783 sat_u ## NAME ## _type_node = \
9784 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9785 (GET_MODE_BITSIZE (U ## MODE ## mode));
9786
9787 /* Fixed-point type and mode nodes. */
9788 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9789 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9790 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9791 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9792 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9793 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9794 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9795 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9796 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9797 MAKE_FIXED_MODE_NODE (accum, da, DA)
9798 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9799
9800 {
9801 tree t = targetm.build_builtin_va_list ();
9802
9803 /* Many back-ends define record types without setting TYPE_NAME.
9804 If we copied the record type here, we'd keep the original
9805 record type without a name. This breaks name mangling. So,
9806 don't copy record types and let c_common_nodes_and_builtins()
9807 declare the type to be __builtin_va_list. */
9808 if (TREE_CODE (t) != RECORD_TYPE)
9809 t = build_variant_type_copy (t);
9810
9811 va_list_type_node = t;
9812 }
9813 }
9814
9815 /* Modify DECL for given flags.
9816 TM_PURE attribute is set only on types, so the function will modify
9817 DECL's type when ECF_TM_PURE is used. */
9818
9819 void
9820 set_call_expr_flags (tree decl, int flags)
9821 {
9822 if (flags & ECF_NOTHROW)
9823 TREE_NOTHROW (decl) = 1;
9824 if (flags & ECF_CONST)
9825 TREE_READONLY (decl) = 1;
9826 if (flags & ECF_PURE)
9827 DECL_PURE_P (decl) = 1;
9828 if (flags & ECF_LOOPING_CONST_OR_PURE)
9829 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9830 if (flags & ECF_NOVOPS)
9831 DECL_IS_NOVOPS (decl) = 1;
9832 if (flags & ECF_NORETURN)
9833 TREE_THIS_VOLATILE (decl) = 1;
9834 if (flags & ECF_MALLOC)
9835 DECL_IS_MALLOC (decl) = 1;
9836 if (flags & ECF_RETURNS_TWICE)
9837 DECL_IS_RETURNS_TWICE (decl) = 1;
9838 if (flags & ECF_LEAF)
9839 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9840 NULL, DECL_ATTRIBUTES (decl));
9841 if ((flags & ECF_TM_PURE) && flag_tm)
9842 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9843 /* Looping const or pure is implied by noreturn.
9844 There is currently no way to declare looping const or looping pure alone. */
9845 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9846 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9847 }
9848
9849
9850 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9851
9852 static void
9853 local_define_builtin (const char *name, tree type, enum built_in_function code,
9854 const char *library_name, int ecf_flags)
9855 {
9856 tree decl;
9857
9858 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9859 library_name, NULL_TREE);
9860 set_call_expr_flags (decl, ecf_flags);
9861
9862 set_builtin_decl (code, decl, true);
9863 }
9864
9865 /* Call this function after instantiating all builtins that the language
9866 front end cares about. This will build the rest of the builtins
9867 and internal functions that are relied upon by the tree optimizers and
9868 the middle-end. */
9869
9870 void
9871 build_common_builtin_nodes (void)
9872 {
9873 tree tmp, ftype;
9874 int ecf_flags;
9875
9876 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9877 {
9878 ftype = build_function_type (void_type_node, void_list_node);
9879 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9880 "__builtin_unreachable",
9881 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9882 | ECF_CONST | ECF_LEAF);
9883 }
9884
9885 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9886 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9887 {
9888 ftype = build_function_type_list (ptr_type_node,
9889 ptr_type_node, const_ptr_type_node,
9890 size_type_node, NULL_TREE);
9891
9892 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9893 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9894 "memcpy", ECF_NOTHROW | ECF_LEAF);
9895 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9896 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9897 "memmove", ECF_NOTHROW | ECF_LEAF);
9898 }
9899
9900 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9901 {
9902 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9903 const_ptr_type_node, size_type_node,
9904 NULL_TREE);
9905 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9906 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9907 }
9908
9909 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9910 {
9911 ftype = build_function_type_list (ptr_type_node,
9912 ptr_type_node, integer_type_node,
9913 size_type_node, NULL_TREE);
9914 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9915 "memset", ECF_NOTHROW | ECF_LEAF);
9916 }
9917
9918 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9919 {
9920 ftype = build_function_type_list (ptr_type_node,
9921 size_type_node, NULL_TREE);
9922 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9923 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9924 }
9925
9926 ftype = build_function_type_list (ptr_type_node, size_type_node,
9927 size_type_node, NULL_TREE);
9928 local_define_builtin ("__builtin_alloca_with_align", ftype,
9929 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9930 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9931
9932 /* If we're checking the stack, `alloca' can throw. */
9933 if (flag_stack_check)
9934 {
9935 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9936 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9937 }
9938
9939 ftype = build_function_type_list (void_type_node,
9940 ptr_type_node, ptr_type_node,
9941 ptr_type_node, NULL_TREE);
9942 local_define_builtin ("__builtin_init_trampoline", ftype,
9943 BUILT_IN_INIT_TRAMPOLINE,
9944 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9945 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9946 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9947 "__builtin_init_heap_trampoline",
9948 ECF_NOTHROW | ECF_LEAF);
9949
9950 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9951 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9952 BUILT_IN_ADJUST_TRAMPOLINE,
9953 "__builtin_adjust_trampoline",
9954 ECF_CONST | ECF_NOTHROW);
9955
9956 ftype = build_function_type_list (void_type_node,
9957 ptr_type_node, ptr_type_node, NULL_TREE);
9958 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9959 BUILT_IN_NONLOCAL_GOTO,
9960 "__builtin_nonlocal_goto",
9961 ECF_NORETURN | ECF_NOTHROW);
9962
9963 ftype = build_function_type_list (void_type_node,
9964 ptr_type_node, ptr_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_setjmp_setup", ftype,
9966 BUILT_IN_SETJMP_SETUP,
9967 "__builtin_setjmp_setup", ECF_NOTHROW);
9968
9969 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9970 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9971 BUILT_IN_SETJMP_RECEIVER,
9972 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9973
9974 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9975 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9976 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9977
9978 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9979 local_define_builtin ("__builtin_stack_restore", ftype,
9980 BUILT_IN_STACK_RESTORE,
9981 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9982
9983 /* If there's a possibility that we might use the ARM EABI, build the
9984 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9985 if (targetm.arm_eabi_unwinder)
9986 {
9987 ftype = build_function_type_list (void_type_node, NULL_TREE);
9988 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9989 BUILT_IN_CXA_END_CLEANUP,
9990 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9991 }
9992
9993 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9994 local_define_builtin ("__builtin_unwind_resume", ftype,
9995 BUILT_IN_UNWIND_RESUME,
9996 ((targetm_common.except_unwind_info (&global_options)
9997 == UI_SJLJ)
9998 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9999 ECF_NORETURN);
10000
10001 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10002 {
10003 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10004 NULL_TREE);
10005 local_define_builtin ("__builtin_return_address", ftype,
10006 BUILT_IN_RETURN_ADDRESS,
10007 "__builtin_return_address",
10008 ECF_NOTHROW);
10009 }
10010
10011 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10012 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10013 {
10014 ftype = build_function_type_list (void_type_node, ptr_type_node,
10015 ptr_type_node, NULL_TREE);
10016 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10017 local_define_builtin ("__cyg_profile_func_enter", ftype,
10018 BUILT_IN_PROFILE_FUNC_ENTER,
10019 "__cyg_profile_func_enter", 0);
10020 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10021 local_define_builtin ("__cyg_profile_func_exit", ftype,
10022 BUILT_IN_PROFILE_FUNC_EXIT,
10023 "__cyg_profile_func_exit", 0);
10024 }
10025
10026 /* The exception object and filter values from the runtime. The argument
10027 must be zero before exception lowering, i.e. from the front end. After
10028 exception lowering, it will be the region number for the exception
10029 landing pad. These functions are PURE instead of CONST to prevent
10030 them from being hoisted past the exception edge that will initialize
10031 its value in the landing pad. */
10032 ftype = build_function_type_list (ptr_type_node,
10033 integer_type_node, NULL_TREE);
10034 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10035 /* Only use TM_PURE if we we have TM language support. */
10036 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10037 ecf_flags |= ECF_TM_PURE;
10038 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10039 "__builtin_eh_pointer", ecf_flags);
10040
10041 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10042 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10043 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10044 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10045
10046 ftype = build_function_type_list (void_type_node,
10047 integer_type_node, integer_type_node,
10048 NULL_TREE);
10049 local_define_builtin ("__builtin_eh_copy_values", ftype,
10050 BUILT_IN_EH_COPY_VALUES,
10051 "__builtin_eh_copy_values", ECF_NOTHROW);
10052
10053 /* Complex multiplication and division. These are handled as builtins
10054 rather than optabs because emit_library_call_value doesn't support
10055 complex. Further, we can do slightly better with folding these
10056 beasties if the real and complex parts of the arguments are separate. */
10057 {
10058 int mode;
10059
10060 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10061 {
10062 char mode_name_buf[4], *q;
10063 const char *p;
10064 enum built_in_function mcode, dcode;
10065 tree type, inner_type;
10066 const char *prefix = "__";
10067
10068 if (targetm.libfunc_gnu_prefix)
10069 prefix = "__gnu_";
10070
10071 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10072 if (type == NULL)
10073 continue;
10074 inner_type = TREE_TYPE (type);
10075
10076 ftype = build_function_type_list (type, inner_type, inner_type,
10077 inner_type, inner_type, NULL_TREE);
10078
10079 mcode = ((enum built_in_function)
10080 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10081 dcode = ((enum built_in_function)
10082 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10083
10084 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10085 *q = TOLOWER (*p);
10086 *q = '\0';
10087
10088 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10089 NULL);
10090 local_define_builtin (built_in_names[mcode], ftype, mcode,
10091 built_in_names[mcode],
10092 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10093
10094 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10095 NULL);
10096 local_define_builtin (built_in_names[dcode], ftype, dcode,
10097 built_in_names[dcode],
10098 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10099 }
10100 }
10101
10102 init_internal_fns ();
10103 }
10104
10105 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10106 better way.
10107
10108 If we requested a pointer to a vector, build up the pointers that
10109 we stripped off while looking for the inner type. Similarly for
10110 return values from functions.
10111
10112 The argument TYPE is the top of the chain, and BOTTOM is the
10113 new type which we will point to. */
10114
10115 tree
10116 reconstruct_complex_type (tree type, tree bottom)
10117 {
10118 tree inner, outer;
10119
10120 if (TREE_CODE (type) == POINTER_TYPE)
10121 {
10122 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10123 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10124 TYPE_REF_CAN_ALIAS_ALL (type));
10125 }
10126 else if (TREE_CODE (type) == REFERENCE_TYPE)
10127 {
10128 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10129 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10130 TYPE_REF_CAN_ALIAS_ALL (type));
10131 }
10132 else if (TREE_CODE (type) == ARRAY_TYPE)
10133 {
10134 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10135 outer = build_array_type (inner, TYPE_DOMAIN (type));
10136 }
10137 else if (TREE_CODE (type) == FUNCTION_TYPE)
10138 {
10139 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10140 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10141 }
10142 else if (TREE_CODE (type) == METHOD_TYPE)
10143 {
10144 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10145 /* The build_method_type_directly() routine prepends 'this' to argument list,
10146 so we must compensate by getting rid of it. */
10147 outer
10148 = build_method_type_directly
10149 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10150 inner,
10151 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10152 }
10153 else if (TREE_CODE (type) == OFFSET_TYPE)
10154 {
10155 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10156 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10157 }
10158 else
10159 return bottom;
10160
10161 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10162 TYPE_QUALS (type));
10163 }
10164
10165 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10166 the inner type. */
10167 tree
10168 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10169 {
10170 int nunits;
10171
10172 switch (GET_MODE_CLASS (mode))
10173 {
10174 case MODE_VECTOR_INT:
10175 case MODE_VECTOR_FLOAT:
10176 case MODE_VECTOR_FRACT:
10177 case MODE_VECTOR_UFRACT:
10178 case MODE_VECTOR_ACCUM:
10179 case MODE_VECTOR_UACCUM:
10180 nunits = GET_MODE_NUNITS (mode);
10181 break;
10182
10183 case MODE_INT:
10184 /* Check that there are no leftover bits. */
10185 gcc_assert (GET_MODE_BITSIZE (mode)
10186 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10187
10188 nunits = GET_MODE_BITSIZE (mode)
10189 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10190 break;
10191
10192 default:
10193 gcc_unreachable ();
10194 }
10195
10196 return make_vector_type (innertype, nunits, mode);
10197 }
10198
10199 /* Similarly, but takes the inner type and number of units, which must be
10200 a power of two. */
10201
10202 tree
10203 build_vector_type (tree innertype, int nunits)
10204 {
10205 return make_vector_type (innertype, nunits, VOIDmode);
10206 }
10207
10208 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10209
10210 tree
10211 build_opaque_vector_type (tree innertype, int nunits)
10212 {
10213 tree t = make_vector_type (innertype, nunits, VOIDmode);
10214 tree cand;
10215 /* We always build the non-opaque variant before the opaque one,
10216 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10217 cand = TYPE_NEXT_VARIANT (t);
10218 if (cand
10219 && TYPE_VECTOR_OPAQUE (cand)
10220 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10221 return cand;
10222 /* Othewise build a variant type and make sure to queue it after
10223 the non-opaque type. */
10224 cand = build_distinct_type_copy (t);
10225 TYPE_VECTOR_OPAQUE (cand) = true;
10226 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10227 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10228 TYPE_NEXT_VARIANT (t) = cand;
10229 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10230 return cand;
10231 }
10232
10233
10234 /* Given an initializer INIT, return TRUE if INIT is zero or some
10235 aggregate of zeros. Otherwise return FALSE. */
10236 bool
10237 initializer_zerop (const_tree init)
10238 {
10239 tree elt;
10240
10241 STRIP_NOPS (init);
10242
10243 switch (TREE_CODE (init))
10244 {
10245 case INTEGER_CST:
10246 return integer_zerop (init);
10247
10248 case REAL_CST:
10249 /* ??? Note that this is not correct for C4X float formats. There,
10250 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10251 negative exponent. */
10252 return real_zerop (init)
10253 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10254
10255 case FIXED_CST:
10256 return fixed_zerop (init);
10257
10258 case COMPLEX_CST:
10259 return integer_zerop (init)
10260 || (real_zerop (init)
10261 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10262 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10263
10264 case VECTOR_CST:
10265 {
10266 unsigned i;
10267 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10268 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10269 return false;
10270 return true;
10271 }
10272
10273 case CONSTRUCTOR:
10274 {
10275 unsigned HOST_WIDE_INT idx;
10276
10277 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10278 if (!initializer_zerop (elt))
10279 return false;
10280 return true;
10281 }
10282
10283 case STRING_CST:
10284 {
10285 int i;
10286
10287 /* We need to loop through all elements to handle cases like
10288 "\0" and "\0foobar". */
10289 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10290 if (TREE_STRING_POINTER (init)[i] != '\0')
10291 return false;
10292
10293 return true;
10294 }
10295
10296 default:
10297 return false;
10298 }
10299 }
10300
10301 /* Check if vector VEC consists of all the equal elements and
10302 that the number of elements corresponds to the type of VEC.
10303 The function returns first element of the vector
10304 or NULL_TREE if the vector is not uniform. */
10305 tree
10306 uniform_vector_p (const_tree vec)
10307 {
10308 tree first, t;
10309 unsigned i;
10310
10311 if (vec == NULL_TREE)
10312 return NULL_TREE;
10313
10314 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10315
10316 if (TREE_CODE (vec) == VECTOR_CST)
10317 {
10318 first = VECTOR_CST_ELT (vec, 0);
10319 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10320 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10321 return NULL_TREE;
10322
10323 return first;
10324 }
10325
10326 else if (TREE_CODE (vec) == CONSTRUCTOR)
10327 {
10328 first = error_mark_node;
10329
10330 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10331 {
10332 if (i == 0)
10333 {
10334 first = t;
10335 continue;
10336 }
10337 if (!operand_equal_p (first, t, 0))
10338 return NULL_TREE;
10339 }
10340 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10341 return NULL_TREE;
10342
10343 return first;
10344 }
10345
10346 return NULL_TREE;
10347 }
10348
10349 /* Build an empty statement at location LOC. */
10350
10351 tree
10352 build_empty_stmt (location_t loc)
10353 {
10354 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10355 SET_EXPR_LOCATION (t, loc);
10356 return t;
10357 }
10358
10359
10360 /* Build an OpenMP clause with code CODE. LOC is the location of the
10361 clause. */
10362
10363 tree
10364 build_omp_clause (location_t loc, enum omp_clause_code code)
10365 {
10366 tree t;
10367 int size, length;
10368
10369 length = omp_clause_num_ops[code];
10370 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10371
10372 record_node_allocation_statistics (OMP_CLAUSE, size);
10373
10374 t = (tree) ggc_internal_alloc (size);
10375 memset (t, 0, size);
10376 TREE_SET_CODE (t, OMP_CLAUSE);
10377 OMP_CLAUSE_SET_CODE (t, code);
10378 OMP_CLAUSE_LOCATION (t) = loc;
10379
10380 return t;
10381 }
10382
10383 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10384 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10385 Except for the CODE and operand count field, other storage for the
10386 object is initialized to zeros. */
10387
10388 tree
10389 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10390 {
10391 tree t;
10392 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10393
10394 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10395 gcc_assert (len >= 1);
10396
10397 record_node_allocation_statistics (code, length);
10398
10399 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10400
10401 TREE_SET_CODE (t, code);
10402
10403 /* Can't use TREE_OPERAND to store the length because if checking is
10404 enabled, it will try to check the length before we store it. :-P */
10405 t->exp.operands[0] = build_int_cst (sizetype, len);
10406
10407 return t;
10408 }
10409
10410 /* Helper function for build_call_* functions; build a CALL_EXPR with
10411 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10412 the argument slots. */
10413
10414 static tree
10415 build_call_1 (tree return_type, tree fn, int nargs)
10416 {
10417 tree t;
10418
10419 t = build_vl_exp (CALL_EXPR, nargs + 3);
10420 TREE_TYPE (t) = return_type;
10421 CALL_EXPR_FN (t) = fn;
10422 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10423
10424 return t;
10425 }
10426
10427 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10428 FN and a null static chain slot. NARGS is the number of call arguments
10429 which are specified as "..." arguments. */
10430
10431 tree
10432 build_call_nary (tree return_type, tree fn, int nargs, ...)
10433 {
10434 tree ret;
10435 va_list args;
10436 va_start (args, nargs);
10437 ret = build_call_valist (return_type, fn, nargs, args);
10438 va_end (args);
10439 return ret;
10440 }
10441
10442 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10443 FN and a null static chain slot. NARGS is the number of call arguments
10444 which are specified as a va_list ARGS. */
10445
10446 tree
10447 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10448 {
10449 tree t;
10450 int i;
10451
10452 t = build_call_1 (return_type, fn, nargs);
10453 for (i = 0; i < nargs; i++)
10454 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10455 process_call_operands (t);
10456 return t;
10457 }
10458
10459 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10460 FN and a null static chain slot. NARGS is the number of call arguments
10461 which are specified as a tree array ARGS. */
10462
10463 tree
10464 build_call_array_loc (location_t loc, tree return_type, tree fn,
10465 int nargs, const tree *args)
10466 {
10467 tree t;
10468 int i;
10469
10470 t = build_call_1 (return_type, fn, nargs);
10471 for (i = 0; i < nargs; i++)
10472 CALL_EXPR_ARG (t, i) = args[i];
10473 process_call_operands (t);
10474 SET_EXPR_LOCATION (t, loc);
10475 return t;
10476 }
10477
10478 /* Like build_call_array, but takes a vec. */
10479
10480 tree
10481 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10482 {
10483 tree ret, t;
10484 unsigned int ix;
10485
10486 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10487 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10488 CALL_EXPR_ARG (ret, ix) = t;
10489 process_call_operands (ret);
10490 return ret;
10491 }
10492
10493 /* Conveniently construct a function call expression. FNDECL names the
10494 function to be called and N arguments are passed in the array
10495 ARGARRAY. */
10496
10497 tree
10498 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10499 {
10500 tree fntype = TREE_TYPE (fndecl);
10501 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10502
10503 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10504 }
10505
10506 /* Conveniently construct a function call expression. FNDECL names the
10507 function to be called and the arguments are passed in the vector
10508 VEC. */
10509
10510 tree
10511 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10512 {
10513 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10514 vec_safe_address (vec));
10515 }
10516
10517
10518 /* Conveniently construct a function call expression. FNDECL names the
10519 function to be called, N is the number of arguments, and the "..."
10520 parameters are the argument expressions. */
10521
10522 tree
10523 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10524 {
10525 va_list ap;
10526 tree *argarray = XALLOCAVEC (tree, n);
10527 int i;
10528
10529 va_start (ap, n);
10530 for (i = 0; i < n; i++)
10531 argarray[i] = va_arg (ap, tree);
10532 va_end (ap);
10533 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10534 }
10535
10536 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10537 varargs macros aren't supported by all bootstrap compilers. */
10538
10539 tree
10540 build_call_expr (tree fndecl, int n, ...)
10541 {
10542 va_list ap;
10543 tree *argarray = XALLOCAVEC (tree, n);
10544 int i;
10545
10546 va_start (ap, n);
10547 for (i = 0; i < n; i++)
10548 argarray[i] = va_arg (ap, tree);
10549 va_end (ap);
10550 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10551 }
10552
10553 /* Build internal call expression. This is just like CALL_EXPR, except
10554 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10555 internal function. */
10556
10557 tree
10558 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10559 tree type, int n, ...)
10560 {
10561 va_list ap;
10562 int i;
10563
10564 tree fn = build_call_1 (type, NULL_TREE, n);
10565 va_start (ap, n);
10566 for (i = 0; i < n; i++)
10567 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10568 va_end (ap);
10569 SET_EXPR_LOCATION (fn, loc);
10570 CALL_EXPR_IFN (fn) = ifn;
10571 return fn;
10572 }
10573
10574 /* Create a new constant string literal and return a char* pointer to it.
10575 The STRING_CST value is the LEN characters at STR. */
10576 tree
10577 build_string_literal (int len, const char *str)
10578 {
10579 tree t, elem, index, type;
10580
10581 t = build_string (len, str);
10582 elem = build_type_variant (char_type_node, 1, 0);
10583 index = build_index_type (size_int (len - 1));
10584 type = build_array_type (elem, index);
10585 TREE_TYPE (t) = type;
10586 TREE_CONSTANT (t) = 1;
10587 TREE_READONLY (t) = 1;
10588 TREE_STATIC (t) = 1;
10589
10590 type = build_pointer_type (elem);
10591 t = build1 (ADDR_EXPR, type,
10592 build4 (ARRAY_REF, elem,
10593 t, integer_zero_node, NULL_TREE, NULL_TREE));
10594 return t;
10595 }
10596
10597
10598
10599 /* Return true if T (assumed to be a DECL) must be assigned a memory
10600 location. */
10601
10602 bool
10603 needs_to_live_in_memory (const_tree t)
10604 {
10605 return (TREE_ADDRESSABLE (t)
10606 || is_global_var (t)
10607 || (TREE_CODE (t) == RESULT_DECL
10608 && !DECL_BY_REFERENCE (t)
10609 && aggregate_value_p (t, current_function_decl)));
10610 }
10611
10612 /* Return value of a constant X and sign-extend it. */
10613
10614 HOST_WIDE_INT
10615 int_cst_value (const_tree x)
10616 {
10617 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10618 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10619
10620 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10621 gcc_assert (cst_and_fits_in_hwi (x));
10622
10623 if (bits < HOST_BITS_PER_WIDE_INT)
10624 {
10625 bool negative = ((val >> (bits - 1)) & 1) != 0;
10626 if (negative)
10627 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10628 else
10629 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10630 }
10631
10632 return val;
10633 }
10634
10635 /* If TYPE is an integral or pointer type, return an integer type with
10636 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10637 if TYPE is already an integer type of signedness UNSIGNEDP. */
10638
10639 tree
10640 signed_or_unsigned_type_for (int unsignedp, tree type)
10641 {
10642 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10643 return type;
10644
10645 if (TREE_CODE (type) == VECTOR_TYPE)
10646 {
10647 tree inner = TREE_TYPE (type);
10648 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10649 if (!inner2)
10650 return NULL_TREE;
10651 if (inner == inner2)
10652 return type;
10653 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10654 }
10655
10656 if (!INTEGRAL_TYPE_P (type)
10657 && !POINTER_TYPE_P (type)
10658 && TREE_CODE (type) != OFFSET_TYPE)
10659 return NULL_TREE;
10660
10661 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10662 }
10663
10664 /* If TYPE is an integral or pointer type, return an integer type with
10665 the same precision which is unsigned, or itself if TYPE is already an
10666 unsigned integer type. */
10667
10668 tree
10669 unsigned_type_for (tree type)
10670 {
10671 return signed_or_unsigned_type_for (1, type);
10672 }
10673
10674 /* If TYPE is an integral or pointer type, return an integer type with
10675 the same precision which is signed, or itself if TYPE is already a
10676 signed integer type. */
10677
10678 tree
10679 signed_type_for (tree type)
10680 {
10681 return signed_or_unsigned_type_for (0, type);
10682 }
10683
10684 /* If TYPE is a vector type, return a signed integer vector type with the
10685 same width and number of subparts. Otherwise return boolean_type_node. */
10686
10687 tree
10688 truth_type_for (tree type)
10689 {
10690 if (TREE_CODE (type) == VECTOR_TYPE)
10691 {
10692 tree elem = lang_hooks.types.type_for_size
10693 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10694 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10695 }
10696 else
10697 return boolean_type_node;
10698 }
10699
10700 /* Returns the largest value obtainable by casting something in INNER type to
10701 OUTER type. */
10702
10703 tree
10704 upper_bound_in_type (tree outer, tree inner)
10705 {
10706 unsigned int det = 0;
10707 unsigned oprec = TYPE_PRECISION (outer);
10708 unsigned iprec = TYPE_PRECISION (inner);
10709 unsigned prec;
10710
10711 /* Compute a unique number for every combination. */
10712 det |= (oprec > iprec) ? 4 : 0;
10713 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10714 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10715
10716 /* Determine the exponent to use. */
10717 switch (det)
10718 {
10719 case 0:
10720 case 1:
10721 /* oprec <= iprec, outer: signed, inner: don't care. */
10722 prec = oprec - 1;
10723 break;
10724 case 2:
10725 case 3:
10726 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10727 prec = oprec;
10728 break;
10729 case 4:
10730 /* oprec > iprec, outer: signed, inner: signed. */
10731 prec = iprec - 1;
10732 break;
10733 case 5:
10734 /* oprec > iprec, outer: signed, inner: unsigned. */
10735 prec = iprec;
10736 break;
10737 case 6:
10738 /* oprec > iprec, outer: unsigned, inner: signed. */
10739 prec = oprec;
10740 break;
10741 case 7:
10742 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10743 prec = iprec;
10744 break;
10745 default:
10746 gcc_unreachable ();
10747 }
10748
10749 return wide_int_to_tree (outer,
10750 wi::mask (prec, false, TYPE_PRECISION (outer)));
10751 }
10752
10753 /* Returns the smallest value obtainable by casting something in INNER type to
10754 OUTER type. */
10755
10756 tree
10757 lower_bound_in_type (tree outer, tree inner)
10758 {
10759 unsigned oprec = TYPE_PRECISION (outer);
10760 unsigned iprec = TYPE_PRECISION (inner);
10761
10762 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10763 and obtain 0. */
10764 if (TYPE_UNSIGNED (outer)
10765 /* If we are widening something of an unsigned type, OUTER type
10766 contains all values of INNER type. In particular, both INNER
10767 and OUTER types have zero in common. */
10768 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10769 return build_int_cst (outer, 0);
10770 else
10771 {
10772 /* If we are widening a signed type to another signed type, we
10773 want to obtain -2^^(iprec-1). If we are keeping the
10774 precision or narrowing to a signed type, we want to obtain
10775 -2^(oprec-1). */
10776 unsigned prec = oprec > iprec ? iprec : oprec;
10777 return wide_int_to_tree (outer,
10778 wi::mask (prec - 1, true,
10779 TYPE_PRECISION (outer)));
10780 }
10781 }
10782
10783 /* Return nonzero if two operands that are suitable for PHI nodes are
10784 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10785 SSA_NAME or invariant. Note that this is strictly an optimization.
10786 That is, callers of this function can directly call operand_equal_p
10787 and get the same result, only slower. */
10788
10789 int
10790 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10791 {
10792 if (arg0 == arg1)
10793 return 1;
10794 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10795 return 0;
10796 return operand_equal_p (arg0, arg1, 0);
10797 }
10798
10799 /* Returns number of zeros at the end of binary representation of X. */
10800
10801 tree
10802 num_ending_zeros (const_tree x)
10803 {
10804 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10805 }
10806
10807
10808 #define WALK_SUBTREE(NODE) \
10809 do \
10810 { \
10811 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10812 if (result) \
10813 return result; \
10814 } \
10815 while (0)
10816
10817 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10818 be walked whenever a type is seen in the tree. Rest of operands and return
10819 value are as for walk_tree. */
10820
10821 static tree
10822 walk_type_fields (tree type, walk_tree_fn func, void *data,
10823 hash_set<tree> *pset, walk_tree_lh lh)
10824 {
10825 tree result = NULL_TREE;
10826
10827 switch (TREE_CODE (type))
10828 {
10829 case POINTER_TYPE:
10830 case REFERENCE_TYPE:
10831 case VECTOR_TYPE:
10832 /* We have to worry about mutually recursive pointers. These can't
10833 be written in C. They can in Ada. It's pathological, but
10834 there's an ACATS test (c38102a) that checks it. Deal with this
10835 by checking if we're pointing to another pointer, that one
10836 points to another pointer, that one does too, and we have no htab.
10837 If so, get a hash table. We check three levels deep to avoid
10838 the cost of the hash table if we don't need one. */
10839 if (POINTER_TYPE_P (TREE_TYPE (type))
10840 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10841 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10842 && !pset)
10843 {
10844 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10845 func, data);
10846 if (result)
10847 return result;
10848
10849 break;
10850 }
10851
10852 /* ... fall through ... */
10853
10854 case COMPLEX_TYPE:
10855 WALK_SUBTREE (TREE_TYPE (type));
10856 break;
10857
10858 case METHOD_TYPE:
10859 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10860
10861 /* Fall through. */
10862
10863 case FUNCTION_TYPE:
10864 WALK_SUBTREE (TREE_TYPE (type));
10865 {
10866 tree arg;
10867
10868 /* We never want to walk into default arguments. */
10869 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10870 WALK_SUBTREE (TREE_VALUE (arg));
10871 }
10872 break;
10873
10874 case ARRAY_TYPE:
10875 /* Don't follow this nodes's type if a pointer for fear that
10876 we'll have infinite recursion. If we have a PSET, then we
10877 need not fear. */
10878 if (pset
10879 || (!POINTER_TYPE_P (TREE_TYPE (type))
10880 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10881 WALK_SUBTREE (TREE_TYPE (type));
10882 WALK_SUBTREE (TYPE_DOMAIN (type));
10883 break;
10884
10885 case OFFSET_TYPE:
10886 WALK_SUBTREE (TREE_TYPE (type));
10887 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10888 break;
10889
10890 default:
10891 break;
10892 }
10893
10894 return NULL_TREE;
10895 }
10896
10897 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10898 called with the DATA and the address of each sub-tree. If FUNC returns a
10899 non-NULL value, the traversal is stopped, and the value returned by FUNC
10900 is returned. If PSET is non-NULL it is used to record the nodes visited,
10901 and to avoid visiting a node more than once. */
10902
10903 tree
10904 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10905 hash_set<tree> *pset, walk_tree_lh lh)
10906 {
10907 enum tree_code code;
10908 int walk_subtrees;
10909 tree result;
10910
10911 #define WALK_SUBTREE_TAIL(NODE) \
10912 do \
10913 { \
10914 tp = & (NODE); \
10915 goto tail_recurse; \
10916 } \
10917 while (0)
10918
10919 tail_recurse:
10920 /* Skip empty subtrees. */
10921 if (!*tp)
10922 return NULL_TREE;
10923
10924 /* Don't walk the same tree twice, if the user has requested
10925 that we avoid doing so. */
10926 if (pset && pset->add (*tp))
10927 return NULL_TREE;
10928
10929 /* Call the function. */
10930 walk_subtrees = 1;
10931 result = (*func) (tp, &walk_subtrees, data);
10932
10933 /* If we found something, return it. */
10934 if (result)
10935 return result;
10936
10937 code = TREE_CODE (*tp);
10938
10939 /* Even if we didn't, FUNC may have decided that there was nothing
10940 interesting below this point in the tree. */
10941 if (!walk_subtrees)
10942 {
10943 /* But we still need to check our siblings. */
10944 if (code == TREE_LIST)
10945 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10946 else if (code == OMP_CLAUSE)
10947 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10948 else
10949 return NULL_TREE;
10950 }
10951
10952 if (lh)
10953 {
10954 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10955 if (result || !walk_subtrees)
10956 return result;
10957 }
10958
10959 switch (code)
10960 {
10961 case ERROR_MARK:
10962 case IDENTIFIER_NODE:
10963 case INTEGER_CST:
10964 case REAL_CST:
10965 case FIXED_CST:
10966 case VECTOR_CST:
10967 case STRING_CST:
10968 case BLOCK:
10969 case PLACEHOLDER_EXPR:
10970 case SSA_NAME:
10971 case FIELD_DECL:
10972 case RESULT_DECL:
10973 /* None of these have subtrees other than those already walked
10974 above. */
10975 break;
10976
10977 case TREE_LIST:
10978 WALK_SUBTREE (TREE_VALUE (*tp));
10979 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10980 break;
10981
10982 case TREE_VEC:
10983 {
10984 int len = TREE_VEC_LENGTH (*tp);
10985
10986 if (len == 0)
10987 break;
10988
10989 /* Walk all elements but the first. */
10990 while (--len)
10991 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10992
10993 /* Now walk the first one as a tail call. */
10994 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10995 }
10996
10997 case COMPLEX_CST:
10998 WALK_SUBTREE (TREE_REALPART (*tp));
10999 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11000
11001 case CONSTRUCTOR:
11002 {
11003 unsigned HOST_WIDE_INT idx;
11004 constructor_elt *ce;
11005
11006 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11007 idx++)
11008 WALK_SUBTREE (ce->value);
11009 }
11010 break;
11011
11012 case SAVE_EXPR:
11013 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11014
11015 case BIND_EXPR:
11016 {
11017 tree decl;
11018 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11019 {
11020 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11021 into declarations that are just mentioned, rather than
11022 declared; they don't really belong to this part of the tree.
11023 And, we can see cycles: the initializer for a declaration
11024 can refer to the declaration itself. */
11025 WALK_SUBTREE (DECL_INITIAL (decl));
11026 WALK_SUBTREE (DECL_SIZE (decl));
11027 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11028 }
11029 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11030 }
11031
11032 case STATEMENT_LIST:
11033 {
11034 tree_stmt_iterator i;
11035 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11036 WALK_SUBTREE (*tsi_stmt_ptr (i));
11037 }
11038 break;
11039
11040 case OMP_CLAUSE:
11041 switch (OMP_CLAUSE_CODE (*tp))
11042 {
11043 case OMP_CLAUSE_PRIVATE:
11044 case OMP_CLAUSE_SHARED:
11045 case OMP_CLAUSE_FIRSTPRIVATE:
11046 case OMP_CLAUSE_COPYIN:
11047 case OMP_CLAUSE_COPYPRIVATE:
11048 case OMP_CLAUSE_FINAL:
11049 case OMP_CLAUSE_IF:
11050 case OMP_CLAUSE_NUM_THREADS:
11051 case OMP_CLAUSE_SCHEDULE:
11052 case OMP_CLAUSE_UNIFORM:
11053 case OMP_CLAUSE_DEPEND:
11054 case OMP_CLAUSE_NUM_TEAMS:
11055 case OMP_CLAUSE_THREAD_LIMIT:
11056 case OMP_CLAUSE_DEVICE:
11057 case OMP_CLAUSE_DIST_SCHEDULE:
11058 case OMP_CLAUSE_SAFELEN:
11059 case OMP_CLAUSE_SIMDLEN:
11060 case OMP_CLAUSE__LOOPTEMP_:
11061 case OMP_CLAUSE__SIMDUID_:
11062 case OMP_CLAUSE__CILK_FOR_COUNT_:
11063 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11064 /* FALLTHRU */
11065
11066 case OMP_CLAUSE_NOWAIT:
11067 case OMP_CLAUSE_ORDERED:
11068 case OMP_CLAUSE_DEFAULT:
11069 case OMP_CLAUSE_UNTIED:
11070 case OMP_CLAUSE_MERGEABLE:
11071 case OMP_CLAUSE_PROC_BIND:
11072 case OMP_CLAUSE_INBRANCH:
11073 case OMP_CLAUSE_NOTINBRANCH:
11074 case OMP_CLAUSE_FOR:
11075 case OMP_CLAUSE_PARALLEL:
11076 case OMP_CLAUSE_SECTIONS:
11077 case OMP_CLAUSE_TASKGROUP:
11078 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11079
11080 case OMP_CLAUSE_LASTPRIVATE:
11081 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11082 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11083 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11084
11085 case OMP_CLAUSE_COLLAPSE:
11086 {
11087 int i;
11088 for (i = 0; i < 3; i++)
11089 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11090 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11091 }
11092
11093 case OMP_CLAUSE_LINEAR:
11094 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11095 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11096 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11097 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11098
11099 case OMP_CLAUSE_ALIGNED:
11100 case OMP_CLAUSE_FROM:
11101 case OMP_CLAUSE_TO:
11102 case OMP_CLAUSE_MAP:
11103 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11104 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11105 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11106
11107 case OMP_CLAUSE_REDUCTION:
11108 {
11109 int i;
11110 for (i = 0; i < 4; i++)
11111 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11112 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11113 }
11114
11115 default:
11116 gcc_unreachable ();
11117 }
11118 break;
11119
11120 case TARGET_EXPR:
11121 {
11122 int i, len;
11123
11124 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11125 But, we only want to walk once. */
11126 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11127 for (i = 0; i < len; ++i)
11128 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11129 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11130 }
11131
11132 case DECL_EXPR:
11133 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11134 defining. We only want to walk into these fields of a type in this
11135 case and not in the general case of a mere reference to the type.
11136
11137 The criterion is as follows: if the field can be an expression, it
11138 must be walked only here. This should be in keeping with the fields
11139 that are directly gimplified in gimplify_type_sizes in order for the
11140 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11141 variable-sized types.
11142
11143 Note that DECLs get walked as part of processing the BIND_EXPR. */
11144 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11145 {
11146 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11147 if (TREE_CODE (*type_p) == ERROR_MARK)
11148 return NULL_TREE;
11149
11150 /* Call the function for the type. See if it returns anything or
11151 doesn't want us to continue. If we are to continue, walk both
11152 the normal fields and those for the declaration case. */
11153 result = (*func) (type_p, &walk_subtrees, data);
11154 if (result || !walk_subtrees)
11155 return result;
11156
11157 /* But do not walk a pointed-to type since it may itself need to
11158 be walked in the declaration case if it isn't anonymous. */
11159 if (!POINTER_TYPE_P (*type_p))
11160 {
11161 result = walk_type_fields (*type_p, func, data, pset, lh);
11162 if (result)
11163 return result;
11164 }
11165
11166 /* If this is a record type, also walk the fields. */
11167 if (RECORD_OR_UNION_TYPE_P (*type_p))
11168 {
11169 tree field;
11170
11171 for (field = TYPE_FIELDS (*type_p); field;
11172 field = DECL_CHAIN (field))
11173 {
11174 /* We'd like to look at the type of the field, but we can
11175 easily get infinite recursion. So assume it's pointed
11176 to elsewhere in the tree. Also, ignore things that
11177 aren't fields. */
11178 if (TREE_CODE (field) != FIELD_DECL)
11179 continue;
11180
11181 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11182 WALK_SUBTREE (DECL_SIZE (field));
11183 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11184 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11185 WALK_SUBTREE (DECL_QUALIFIER (field));
11186 }
11187 }
11188
11189 /* Same for scalar types. */
11190 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11191 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11192 || TREE_CODE (*type_p) == INTEGER_TYPE
11193 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11194 || TREE_CODE (*type_p) == REAL_TYPE)
11195 {
11196 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11197 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11198 }
11199
11200 WALK_SUBTREE (TYPE_SIZE (*type_p));
11201 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11202 }
11203 /* FALLTHRU */
11204
11205 default:
11206 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11207 {
11208 int i, len;
11209
11210 /* Walk over all the sub-trees of this operand. */
11211 len = TREE_OPERAND_LENGTH (*tp);
11212
11213 /* Go through the subtrees. We need to do this in forward order so
11214 that the scope of a FOR_EXPR is handled properly. */
11215 if (len)
11216 {
11217 for (i = 0; i < len - 1; ++i)
11218 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11219 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11220 }
11221 }
11222 /* If this is a type, walk the needed fields in the type. */
11223 else if (TYPE_P (*tp))
11224 return walk_type_fields (*tp, func, data, pset, lh);
11225 break;
11226 }
11227
11228 /* We didn't find what we were looking for. */
11229 return NULL_TREE;
11230
11231 #undef WALK_SUBTREE_TAIL
11232 }
11233 #undef WALK_SUBTREE
11234
11235 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11236
11237 tree
11238 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11239 walk_tree_lh lh)
11240 {
11241 tree result;
11242
11243 hash_set<tree> pset;
11244 result = walk_tree_1 (tp, func, data, &pset, lh);
11245 return result;
11246 }
11247
11248
11249 tree
11250 tree_block (tree t)
11251 {
11252 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11253
11254 if (IS_EXPR_CODE_CLASS (c))
11255 return LOCATION_BLOCK (t->exp.locus);
11256 gcc_unreachable ();
11257 return NULL;
11258 }
11259
11260 void
11261 tree_set_block (tree t, tree b)
11262 {
11263 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11264
11265 if (IS_EXPR_CODE_CLASS (c))
11266 {
11267 if (b)
11268 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11269 else
11270 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11271 }
11272 else
11273 gcc_unreachable ();
11274 }
11275
11276 /* Create a nameless artificial label and put it in the current
11277 function context. The label has a location of LOC. Returns the
11278 newly created label. */
11279
11280 tree
11281 create_artificial_label (location_t loc)
11282 {
11283 tree lab = build_decl (loc,
11284 LABEL_DECL, NULL_TREE, void_type_node);
11285
11286 DECL_ARTIFICIAL (lab) = 1;
11287 DECL_IGNORED_P (lab) = 1;
11288 DECL_CONTEXT (lab) = current_function_decl;
11289 return lab;
11290 }
11291
11292 /* Given a tree, try to return a useful variable name that we can use
11293 to prefix a temporary that is being assigned the value of the tree.
11294 I.E. given <temp> = &A, return A. */
11295
11296 const char *
11297 get_name (tree t)
11298 {
11299 tree stripped_decl;
11300
11301 stripped_decl = t;
11302 STRIP_NOPS (stripped_decl);
11303 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11304 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11305 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11306 {
11307 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11308 if (!name)
11309 return NULL;
11310 return IDENTIFIER_POINTER (name);
11311 }
11312 else
11313 {
11314 switch (TREE_CODE (stripped_decl))
11315 {
11316 case ADDR_EXPR:
11317 return get_name (TREE_OPERAND (stripped_decl, 0));
11318 default:
11319 return NULL;
11320 }
11321 }
11322 }
11323
11324 /* Return true if TYPE has a variable argument list. */
11325
11326 bool
11327 stdarg_p (const_tree fntype)
11328 {
11329 function_args_iterator args_iter;
11330 tree n = NULL_TREE, t;
11331
11332 if (!fntype)
11333 return false;
11334
11335 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11336 {
11337 n = t;
11338 }
11339
11340 return n != NULL_TREE && n != void_type_node;
11341 }
11342
11343 /* Return true if TYPE has a prototype. */
11344
11345 bool
11346 prototype_p (tree fntype)
11347 {
11348 tree t;
11349
11350 gcc_assert (fntype != NULL_TREE);
11351
11352 t = TYPE_ARG_TYPES (fntype);
11353 return (t != NULL_TREE);
11354 }
11355
11356 /* If BLOCK is inlined from an __attribute__((__artificial__))
11357 routine, return pointer to location from where it has been
11358 called. */
11359 location_t *
11360 block_nonartificial_location (tree block)
11361 {
11362 location_t *ret = NULL;
11363
11364 while (block && TREE_CODE (block) == BLOCK
11365 && BLOCK_ABSTRACT_ORIGIN (block))
11366 {
11367 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11368
11369 while (TREE_CODE (ao) == BLOCK
11370 && BLOCK_ABSTRACT_ORIGIN (ao)
11371 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11372 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11373
11374 if (TREE_CODE (ao) == FUNCTION_DECL)
11375 {
11376 /* If AO is an artificial inline, point RET to the
11377 call site locus at which it has been inlined and continue
11378 the loop, in case AO's caller is also an artificial
11379 inline. */
11380 if (DECL_DECLARED_INLINE_P (ao)
11381 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11382 ret = &BLOCK_SOURCE_LOCATION (block);
11383 else
11384 break;
11385 }
11386 else if (TREE_CODE (ao) != BLOCK)
11387 break;
11388
11389 block = BLOCK_SUPERCONTEXT (block);
11390 }
11391 return ret;
11392 }
11393
11394
11395 /* If EXP is inlined from an __attribute__((__artificial__))
11396 function, return the location of the original call expression. */
11397
11398 location_t
11399 tree_nonartificial_location (tree exp)
11400 {
11401 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11402
11403 if (loc)
11404 return *loc;
11405 else
11406 return EXPR_LOCATION (exp);
11407 }
11408
11409
11410 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11411 nodes. */
11412
11413 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11414
11415 static hashval_t
11416 cl_option_hash_hash (const void *x)
11417 {
11418 const_tree const t = (const_tree) x;
11419 const char *p;
11420 size_t i;
11421 size_t len = 0;
11422 hashval_t hash = 0;
11423
11424 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11425 {
11426 p = (const char *)TREE_OPTIMIZATION (t);
11427 len = sizeof (struct cl_optimization);
11428 }
11429
11430 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11431 {
11432 p = (const char *)TREE_TARGET_OPTION (t);
11433 len = sizeof (struct cl_target_option);
11434 }
11435
11436 else
11437 gcc_unreachable ();
11438
11439 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11440 something else. */
11441 for (i = 0; i < len; i++)
11442 if (p[i])
11443 hash = (hash << 4) ^ ((i << 2) | p[i]);
11444
11445 return hash;
11446 }
11447
11448 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11449 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11450 same. */
11451
11452 static int
11453 cl_option_hash_eq (const void *x, const void *y)
11454 {
11455 const_tree const xt = (const_tree) x;
11456 const_tree const yt = (const_tree) y;
11457 const char *xp;
11458 const char *yp;
11459 size_t len;
11460
11461 if (TREE_CODE (xt) != TREE_CODE (yt))
11462 return 0;
11463
11464 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11465 {
11466 xp = (const char *)TREE_OPTIMIZATION (xt);
11467 yp = (const char *)TREE_OPTIMIZATION (yt);
11468 len = sizeof (struct cl_optimization);
11469 }
11470
11471 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11472 {
11473 xp = (const char *)TREE_TARGET_OPTION (xt);
11474 yp = (const char *)TREE_TARGET_OPTION (yt);
11475 len = sizeof (struct cl_target_option);
11476 }
11477
11478 else
11479 gcc_unreachable ();
11480
11481 return (memcmp (xp, yp, len) == 0);
11482 }
11483
11484 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11485
11486 tree
11487 build_optimization_node (struct gcc_options *opts)
11488 {
11489 tree t;
11490 void **slot;
11491
11492 /* Use the cache of optimization nodes. */
11493
11494 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11495 opts);
11496
11497 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11498 t = (tree) *slot;
11499 if (!t)
11500 {
11501 /* Insert this one into the hash table. */
11502 t = cl_optimization_node;
11503 *slot = t;
11504
11505 /* Make a new node for next time round. */
11506 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11507 }
11508
11509 return t;
11510 }
11511
11512 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11513
11514 tree
11515 build_target_option_node (struct gcc_options *opts)
11516 {
11517 tree t;
11518 void **slot;
11519
11520 /* Use the cache of optimization nodes. */
11521
11522 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11523 opts);
11524
11525 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11526 t = (tree) *slot;
11527 if (!t)
11528 {
11529 /* Insert this one into the hash table. */
11530 t = cl_target_option_node;
11531 *slot = t;
11532
11533 /* Make a new node for next time round. */
11534 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11535 }
11536
11537 return t;
11538 }
11539
11540 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11541 Called through htab_traverse. */
11542
11543 static int
11544 prepare_target_option_node_for_pch (void **slot, void *)
11545 {
11546 tree node = (tree) *slot;
11547 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11548 TREE_TARGET_GLOBALS (node) = NULL;
11549 return 1;
11550 }
11551
11552 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11553 so that they aren't saved during PCH writing. */
11554
11555 void
11556 prepare_target_option_nodes_for_pch (void)
11557 {
11558 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11559 NULL);
11560 }
11561
11562 /* Determine the "ultimate origin" of a block. The block may be an inlined
11563 instance of an inlined instance of a block which is local to an inline
11564 function, so we have to trace all of the way back through the origin chain
11565 to find out what sort of node actually served as the original seed for the
11566 given block. */
11567
11568 tree
11569 block_ultimate_origin (const_tree block)
11570 {
11571 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11572
11573 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11574 we're trying to output the abstract instance of this function. */
11575 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11576 return NULL_TREE;
11577
11578 if (immediate_origin == NULL_TREE)
11579 return NULL_TREE;
11580 else
11581 {
11582 tree ret_val;
11583 tree lookahead = immediate_origin;
11584
11585 do
11586 {
11587 ret_val = lookahead;
11588 lookahead = (TREE_CODE (ret_val) == BLOCK
11589 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11590 }
11591 while (lookahead != NULL && lookahead != ret_val);
11592
11593 /* The block's abstract origin chain may not be the *ultimate* origin of
11594 the block. It could lead to a DECL that has an abstract origin set.
11595 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11596 will give us if it has one). Note that DECL's abstract origins are
11597 supposed to be the most distant ancestor (or so decl_ultimate_origin
11598 claims), so we don't need to loop following the DECL origins. */
11599 if (DECL_P (ret_val))
11600 return DECL_ORIGIN (ret_val);
11601
11602 return ret_val;
11603 }
11604 }
11605
11606 /* Return true iff conversion in EXP generates no instruction. Mark
11607 it inline so that we fully inline into the stripping functions even
11608 though we have two uses of this function. */
11609
11610 static inline bool
11611 tree_nop_conversion (const_tree exp)
11612 {
11613 tree outer_type, inner_type;
11614
11615 if (!CONVERT_EXPR_P (exp)
11616 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11617 return false;
11618 if (TREE_OPERAND (exp, 0) == error_mark_node)
11619 return false;
11620
11621 outer_type = TREE_TYPE (exp);
11622 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11623
11624 if (!inner_type)
11625 return false;
11626
11627 /* Use precision rather then machine mode when we can, which gives
11628 the correct answer even for submode (bit-field) types. */
11629 if ((INTEGRAL_TYPE_P (outer_type)
11630 || POINTER_TYPE_P (outer_type)
11631 || TREE_CODE (outer_type) == OFFSET_TYPE)
11632 && (INTEGRAL_TYPE_P (inner_type)
11633 || POINTER_TYPE_P (inner_type)
11634 || TREE_CODE (inner_type) == OFFSET_TYPE))
11635 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11636
11637 /* Otherwise fall back on comparing machine modes (e.g. for
11638 aggregate types, floats). */
11639 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11640 }
11641
11642 /* Return true iff conversion in EXP generates no instruction. Don't
11643 consider conversions changing the signedness. */
11644
11645 static bool
11646 tree_sign_nop_conversion (const_tree exp)
11647 {
11648 tree outer_type, inner_type;
11649
11650 if (!tree_nop_conversion (exp))
11651 return false;
11652
11653 outer_type = TREE_TYPE (exp);
11654 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11655
11656 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11657 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11658 }
11659
11660 /* Strip conversions from EXP according to tree_nop_conversion and
11661 return the resulting expression. */
11662
11663 tree
11664 tree_strip_nop_conversions (tree exp)
11665 {
11666 while (tree_nop_conversion (exp))
11667 exp = TREE_OPERAND (exp, 0);
11668 return exp;
11669 }
11670
11671 /* Strip conversions from EXP according to tree_sign_nop_conversion
11672 and return the resulting expression. */
11673
11674 tree
11675 tree_strip_sign_nop_conversions (tree exp)
11676 {
11677 while (tree_sign_nop_conversion (exp))
11678 exp = TREE_OPERAND (exp, 0);
11679 return exp;
11680 }
11681
11682 /* Avoid any floating point extensions from EXP. */
11683 tree
11684 strip_float_extensions (tree exp)
11685 {
11686 tree sub, expt, subt;
11687
11688 /* For floating point constant look up the narrowest type that can hold
11689 it properly and handle it like (type)(narrowest_type)constant.
11690 This way we can optimize for instance a=a*2.0 where "a" is float
11691 but 2.0 is double constant. */
11692 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11693 {
11694 REAL_VALUE_TYPE orig;
11695 tree type = NULL;
11696
11697 orig = TREE_REAL_CST (exp);
11698 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11699 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11700 type = float_type_node;
11701 else if (TYPE_PRECISION (TREE_TYPE (exp))
11702 > TYPE_PRECISION (double_type_node)
11703 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11704 type = double_type_node;
11705 if (type)
11706 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11707 }
11708
11709 if (!CONVERT_EXPR_P (exp))
11710 return exp;
11711
11712 sub = TREE_OPERAND (exp, 0);
11713 subt = TREE_TYPE (sub);
11714 expt = TREE_TYPE (exp);
11715
11716 if (!FLOAT_TYPE_P (subt))
11717 return exp;
11718
11719 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11720 return exp;
11721
11722 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11723 return exp;
11724
11725 return strip_float_extensions (sub);
11726 }
11727
11728 /* Strip out all handled components that produce invariant
11729 offsets. */
11730
11731 const_tree
11732 strip_invariant_refs (const_tree op)
11733 {
11734 while (handled_component_p (op))
11735 {
11736 switch (TREE_CODE (op))
11737 {
11738 case ARRAY_REF:
11739 case ARRAY_RANGE_REF:
11740 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11741 || TREE_OPERAND (op, 2) != NULL_TREE
11742 || TREE_OPERAND (op, 3) != NULL_TREE)
11743 return NULL;
11744 break;
11745
11746 case COMPONENT_REF:
11747 if (TREE_OPERAND (op, 2) != NULL_TREE)
11748 return NULL;
11749 break;
11750
11751 default:;
11752 }
11753 op = TREE_OPERAND (op, 0);
11754 }
11755
11756 return op;
11757 }
11758
11759 static GTY(()) tree gcc_eh_personality_decl;
11760
11761 /* Return the GCC personality function decl. */
11762
11763 tree
11764 lhd_gcc_personality (void)
11765 {
11766 if (!gcc_eh_personality_decl)
11767 gcc_eh_personality_decl = build_personality_function ("gcc");
11768 return gcc_eh_personality_decl;
11769 }
11770
11771 /* TARGET is a call target of GIMPLE call statement
11772 (obtained by gimple_call_fn). Return true if it is
11773 OBJ_TYPE_REF representing an virtual call of C++ method.
11774 (As opposed to OBJ_TYPE_REF representing objc calls
11775 through a cast where middle-end devirtualization machinery
11776 can't apply.) */
11777
11778 bool
11779 virtual_method_call_p (tree target)
11780 {
11781 if (TREE_CODE (target) != OBJ_TYPE_REF)
11782 return false;
11783 target = TREE_TYPE (target);
11784 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11785 target = TREE_TYPE (target);
11786 if (TREE_CODE (target) == FUNCTION_TYPE)
11787 return false;
11788 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11789 return true;
11790 }
11791
11792 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11793
11794 tree
11795 obj_type_ref_class (tree ref)
11796 {
11797 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11798 ref = TREE_TYPE (ref);
11799 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11800 ref = TREE_TYPE (ref);
11801 /* We look for type THIS points to. ObjC also builds
11802 OBJ_TYPE_REF with non-method calls, Their first parameter
11803 ID however also corresponds to class type. */
11804 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11805 || TREE_CODE (ref) == FUNCTION_TYPE);
11806 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11807 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11808 return TREE_TYPE (ref);
11809 }
11810
11811 /* Return true if T is in anonymous namespace. */
11812
11813 bool
11814 type_in_anonymous_namespace_p (const_tree t)
11815 {
11816 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11817 bulitin types; those have CONTEXT NULL. */
11818 if (!TYPE_CONTEXT (t))
11819 return false;
11820 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11821 }
11822
11823 /* Try to find a base info of BINFO that would have its field decl at offset
11824 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11825 found, return, otherwise return NULL_TREE. */
11826
11827 tree
11828 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11829 {
11830 tree type = BINFO_TYPE (binfo);
11831
11832 while (true)
11833 {
11834 HOST_WIDE_INT pos, size;
11835 tree fld;
11836 int i;
11837
11838 if (types_same_for_odr (type, expected_type))
11839 return binfo;
11840 if (offset < 0)
11841 return NULL_TREE;
11842
11843 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11844 {
11845 if (TREE_CODE (fld) != FIELD_DECL)
11846 continue;
11847
11848 pos = int_bit_position (fld);
11849 size = tree_to_uhwi (DECL_SIZE (fld));
11850 if (pos <= offset && (pos + size) > offset)
11851 break;
11852 }
11853 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11854 return NULL_TREE;
11855
11856 if (!DECL_ARTIFICIAL (fld))
11857 {
11858 binfo = TYPE_BINFO (TREE_TYPE (fld));
11859 if (!binfo)
11860 return NULL_TREE;
11861 }
11862 /* Offset 0 indicates the primary base, whose vtable contents are
11863 represented in the binfo for the derived class. */
11864 else if (offset != 0)
11865 {
11866 tree base_binfo, binfo2 = binfo;
11867
11868 /* Find BINFO corresponding to FLD. This is bit harder
11869 by a fact that in virtual inheritance we may need to walk down
11870 the non-virtual inheritance chain. */
11871 while (true)
11872 {
11873 tree containing_binfo = NULL, found_binfo = NULL;
11874 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11875 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11876 {
11877 found_binfo = base_binfo;
11878 break;
11879 }
11880 else
11881 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11882 - tree_to_shwi (BINFO_OFFSET (binfo)))
11883 * BITS_PER_UNIT < pos
11884 /* Rule out types with no virtual methods or we can get confused
11885 here by zero sized bases. */
11886 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11887 && (!containing_binfo
11888 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11889 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11890 containing_binfo = base_binfo;
11891 if (found_binfo)
11892 {
11893 binfo = found_binfo;
11894 break;
11895 }
11896 if (!containing_binfo)
11897 return NULL_TREE;
11898 binfo2 = containing_binfo;
11899 }
11900 }
11901
11902 type = TREE_TYPE (fld);
11903 offset -= pos;
11904 }
11905 }
11906
11907 /* Returns true if X is a typedef decl. */
11908
11909 bool
11910 is_typedef_decl (tree x)
11911 {
11912 return (x && TREE_CODE (x) == TYPE_DECL
11913 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11914 }
11915
11916 /* Returns true iff TYPE is a type variant created for a typedef. */
11917
11918 bool
11919 typedef_variant_p (tree type)
11920 {
11921 return is_typedef_decl (TYPE_NAME (type));
11922 }
11923
11924 /* Warn about a use of an identifier which was marked deprecated. */
11925 void
11926 warn_deprecated_use (tree node, tree attr)
11927 {
11928 const char *msg;
11929
11930 if (node == 0 || !warn_deprecated_decl)
11931 return;
11932
11933 if (!attr)
11934 {
11935 if (DECL_P (node))
11936 attr = DECL_ATTRIBUTES (node);
11937 else if (TYPE_P (node))
11938 {
11939 tree decl = TYPE_STUB_DECL (node);
11940 if (decl)
11941 attr = lookup_attribute ("deprecated",
11942 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11943 }
11944 }
11945
11946 if (attr)
11947 attr = lookup_attribute ("deprecated", attr);
11948
11949 if (attr)
11950 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11951 else
11952 msg = NULL;
11953
11954 if (DECL_P (node))
11955 {
11956 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11957 if (msg)
11958 warning (OPT_Wdeprecated_declarations,
11959 "%qD is deprecated (declared at %r%s:%d%R): %s",
11960 node, "locus", xloc.file, xloc.line, msg);
11961 else
11962 warning (OPT_Wdeprecated_declarations,
11963 "%qD is deprecated (declared at %r%s:%d%R)",
11964 node, "locus", xloc.file, xloc.line);
11965 }
11966 else if (TYPE_P (node))
11967 {
11968 tree what = NULL_TREE;
11969 tree decl = TYPE_STUB_DECL (node);
11970
11971 if (TYPE_NAME (node))
11972 {
11973 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11974 what = TYPE_NAME (node);
11975 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11976 && DECL_NAME (TYPE_NAME (node)))
11977 what = DECL_NAME (TYPE_NAME (node));
11978 }
11979
11980 if (decl)
11981 {
11982 expanded_location xloc
11983 = expand_location (DECL_SOURCE_LOCATION (decl));
11984 if (what)
11985 {
11986 if (msg)
11987 warning (OPT_Wdeprecated_declarations,
11988 "%qE is deprecated (declared at %r%s:%d%R): %s",
11989 what, "locus", xloc.file, xloc.line, msg);
11990 else
11991 warning (OPT_Wdeprecated_declarations,
11992 "%qE is deprecated (declared at %r%s:%d%R)",
11993 what, "locus", xloc.file, xloc.line);
11994 }
11995 else
11996 {
11997 if (msg)
11998 warning (OPT_Wdeprecated_declarations,
11999 "type is deprecated (declared at %r%s:%d%R): %s",
12000 "locus", xloc.file, xloc.line, msg);
12001 else
12002 warning (OPT_Wdeprecated_declarations,
12003 "type is deprecated (declared at %r%s:%d%R)",
12004 "locus", xloc.file, xloc.line);
12005 }
12006 }
12007 else
12008 {
12009 if (what)
12010 {
12011 if (msg)
12012 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12013 what, msg);
12014 else
12015 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12016 }
12017 else
12018 {
12019 if (msg)
12020 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12021 msg);
12022 else
12023 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12024 }
12025 }
12026 }
12027 }
12028
12029 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12030 somewhere in it. */
12031
12032 bool
12033 contains_bitfld_component_ref_p (const_tree ref)
12034 {
12035 while (handled_component_p (ref))
12036 {
12037 if (TREE_CODE (ref) == COMPONENT_REF
12038 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12039 return true;
12040 ref = TREE_OPERAND (ref, 0);
12041 }
12042
12043 return false;
12044 }
12045
12046 /* Try to determine whether a TRY_CATCH expression can fall through.
12047 This is a subroutine of block_may_fallthru. */
12048
12049 static bool
12050 try_catch_may_fallthru (const_tree stmt)
12051 {
12052 tree_stmt_iterator i;
12053
12054 /* If the TRY block can fall through, the whole TRY_CATCH can
12055 fall through. */
12056 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12057 return true;
12058
12059 i = tsi_start (TREE_OPERAND (stmt, 1));
12060 switch (TREE_CODE (tsi_stmt (i)))
12061 {
12062 case CATCH_EXPR:
12063 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12064 catch expression and a body. The whole TRY_CATCH may fall
12065 through iff any of the catch bodies falls through. */
12066 for (; !tsi_end_p (i); tsi_next (&i))
12067 {
12068 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12069 return true;
12070 }
12071 return false;
12072
12073 case EH_FILTER_EXPR:
12074 /* The exception filter expression only matters if there is an
12075 exception. If the exception does not match EH_FILTER_TYPES,
12076 we will execute EH_FILTER_FAILURE, and we will fall through
12077 if that falls through. If the exception does match
12078 EH_FILTER_TYPES, the stack unwinder will continue up the
12079 stack, so we will not fall through. We don't know whether we
12080 will throw an exception which matches EH_FILTER_TYPES or not,
12081 so we just ignore EH_FILTER_TYPES and assume that we might
12082 throw an exception which doesn't match. */
12083 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12084
12085 default:
12086 /* This case represents statements to be executed when an
12087 exception occurs. Those statements are implicitly followed
12088 by a RESX statement to resume execution after the exception.
12089 So in this case the TRY_CATCH never falls through. */
12090 return false;
12091 }
12092 }
12093
12094 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12095 need not be 100% accurate; simply be conservative and return true if we
12096 don't know. This is used only to avoid stupidly generating extra code.
12097 If we're wrong, we'll just delete the extra code later. */
12098
12099 bool
12100 block_may_fallthru (const_tree block)
12101 {
12102 /* This CONST_CAST is okay because expr_last returns its argument
12103 unmodified and we assign it to a const_tree. */
12104 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12105
12106 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12107 {
12108 case GOTO_EXPR:
12109 case RETURN_EXPR:
12110 /* Easy cases. If the last statement of the block implies
12111 control transfer, then we can't fall through. */
12112 return false;
12113
12114 case SWITCH_EXPR:
12115 /* If SWITCH_LABELS is set, this is lowered, and represents a
12116 branch to a selected label and hence can not fall through.
12117 Otherwise SWITCH_BODY is set, and the switch can fall
12118 through. */
12119 return SWITCH_LABELS (stmt) == NULL_TREE;
12120
12121 case COND_EXPR:
12122 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12123 return true;
12124 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12125
12126 case BIND_EXPR:
12127 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12128
12129 case TRY_CATCH_EXPR:
12130 return try_catch_may_fallthru (stmt);
12131
12132 case TRY_FINALLY_EXPR:
12133 /* The finally clause is always executed after the try clause,
12134 so if it does not fall through, then the try-finally will not
12135 fall through. Otherwise, if the try clause does not fall
12136 through, then when the finally clause falls through it will
12137 resume execution wherever the try clause was going. So the
12138 whole try-finally will only fall through if both the try
12139 clause and the finally clause fall through. */
12140 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12141 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12142
12143 case MODIFY_EXPR:
12144 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12145 stmt = TREE_OPERAND (stmt, 1);
12146 else
12147 return true;
12148 /* FALLTHRU */
12149
12150 case CALL_EXPR:
12151 /* Functions that do not return do not fall through. */
12152 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12153
12154 case CLEANUP_POINT_EXPR:
12155 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12156
12157 case TARGET_EXPR:
12158 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12159
12160 case ERROR_MARK:
12161 return true;
12162
12163 default:
12164 return lang_hooks.block_may_fallthru (stmt);
12165 }
12166 }
12167
12168 /* True if we are using EH to handle cleanups. */
12169 static bool using_eh_for_cleanups_flag = false;
12170
12171 /* This routine is called from front ends to indicate eh should be used for
12172 cleanups. */
12173 void
12174 using_eh_for_cleanups (void)
12175 {
12176 using_eh_for_cleanups_flag = true;
12177 }
12178
12179 /* Query whether EH is used for cleanups. */
12180 bool
12181 using_eh_for_cleanups_p (void)
12182 {
12183 return using_eh_for_cleanups_flag;
12184 }
12185
12186 /* Wrapper for tree_code_name to ensure that tree code is valid */
12187 const char *
12188 get_tree_code_name (enum tree_code code)
12189 {
12190 const char *invalid = "<invalid tree code>";
12191
12192 if (code >= MAX_TREE_CODES)
12193 return invalid;
12194
12195 return tree_code_name[code];
12196 }
12197
12198 /* Drops the TREE_OVERFLOW flag from T. */
12199
12200 tree
12201 drop_tree_overflow (tree t)
12202 {
12203 gcc_checking_assert (TREE_OVERFLOW (t));
12204
12205 /* For tree codes with a sharing machinery re-build the result. */
12206 if (TREE_CODE (t) == INTEGER_CST)
12207 return wide_int_to_tree (TREE_TYPE (t), t);
12208
12209 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12210 and drop the flag. */
12211 t = copy_node (t);
12212 TREE_OVERFLOW (t) = 0;
12213 return t;
12214 }
12215
12216 /* Given a memory reference expression T, return its base address.
12217 The base address of a memory reference expression is the main
12218 object being referenced. For instance, the base address for
12219 'array[i].fld[j]' is 'array'. You can think of this as stripping
12220 away the offset part from a memory address.
12221
12222 This function calls handled_component_p to strip away all the inner
12223 parts of the memory reference until it reaches the base object. */
12224
12225 tree
12226 get_base_address (tree t)
12227 {
12228 while (handled_component_p (t))
12229 t = TREE_OPERAND (t, 0);
12230
12231 if ((TREE_CODE (t) == MEM_REF
12232 || TREE_CODE (t) == TARGET_MEM_REF)
12233 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12234 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12235
12236 /* ??? Either the alias oracle or all callers need to properly deal
12237 with WITH_SIZE_EXPRs before we can look through those. */
12238 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12239 return NULL_TREE;
12240
12241 return t;
12242 }
12243
12244 #include "gt-tree.h"