tree.c (type_hash_lookup, [...]): Merge into ...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static void type_hash_list (const_tree, inchash::hash &);
234 static void attribute_hash_list (const_tree, inchash::hash &);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 };
285
286 const char * const omp_clause_code_name[] =
287 {
288 "error_clause",
289 "private",
290 "shared",
291 "firstprivate",
292 "lastprivate",
293 "reduction",
294 "copyin",
295 "copyprivate",
296 "linear",
297 "aligned",
298 "depend",
299 "uniform",
300 "from",
301 "to",
302 "map",
303 "_looptemp_",
304 "if",
305 "num_threads",
306 "schedule",
307 "nowait",
308 "ordered",
309 "default",
310 "collapse",
311 "untied",
312 "final",
313 "mergeable",
314 "device",
315 "dist_schedule",
316 "inbranch",
317 "notinbranch",
318 "num_teams",
319 "thread_limit",
320 "proc_bind",
321 "safelen",
322 "simdlen",
323 "for",
324 "parallel",
325 "sections",
326 "taskgroup",
327 "_simduid_"
328 };
329
330
331 /* Return the tree node structure used by tree code CODE. */
332
333 static inline enum tree_node_structure_enum
334 tree_node_structure_for_code (enum tree_code code)
335 {
336 switch (TREE_CODE_CLASS (code))
337 {
338 case tcc_declaration:
339 {
340 switch (code)
341 {
342 case FIELD_DECL:
343 return TS_FIELD_DECL;
344 case PARM_DECL:
345 return TS_PARM_DECL;
346 case VAR_DECL:
347 return TS_VAR_DECL;
348 case LABEL_DECL:
349 return TS_LABEL_DECL;
350 case RESULT_DECL:
351 return TS_RESULT_DECL;
352 case DEBUG_EXPR_DECL:
353 return TS_DECL_WRTL;
354 case CONST_DECL:
355 return TS_CONST_DECL;
356 case TYPE_DECL:
357 return TS_TYPE_DECL;
358 case FUNCTION_DECL:
359 return TS_FUNCTION_DECL;
360 case TRANSLATION_UNIT_DECL:
361 return TS_TRANSLATION_UNIT_DECL;
362 default:
363 return TS_DECL_NON_COMMON;
364 }
365 }
366 case tcc_type:
367 return TS_TYPE_NON_COMMON;
368 case tcc_reference:
369 case tcc_comparison:
370 case tcc_unary:
371 case tcc_binary:
372 case tcc_expression:
373 case tcc_statement:
374 case tcc_vl_exp:
375 return TS_EXP;
376 default: /* tcc_constant and tcc_exceptional */
377 break;
378 }
379 switch (code)
380 {
381 /* tcc_constant cases. */
382 case VOID_CST: return TS_TYPED;
383 case INTEGER_CST: return TS_INT_CST;
384 case REAL_CST: return TS_REAL_CST;
385 case FIXED_CST: return TS_FIXED_CST;
386 case COMPLEX_CST: return TS_COMPLEX;
387 case VECTOR_CST: return TS_VECTOR;
388 case STRING_CST: return TS_STRING;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK: return TS_COMMON;
391 case IDENTIFIER_NODE: return TS_IDENTIFIER;
392 case TREE_LIST: return TS_LIST;
393 case TREE_VEC: return TS_VEC;
394 case SSA_NAME: return TS_SSA_NAME;
395 case PLACEHOLDER_EXPR: return TS_COMMON;
396 case STATEMENT_LIST: return TS_STATEMENT_LIST;
397 case BLOCK: return TS_BLOCK;
398 case CONSTRUCTOR: return TS_CONSTRUCTOR;
399 case TREE_BINFO: return TS_BINFO;
400 case OMP_CLAUSE: return TS_OMP_CLAUSE;
401 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
402 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
403
404 default:
405 gcc_unreachable ();
406 }
407 }
408
409
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
411 nodes. */
412
413 static void
414 initialize_tree_contains_struct (void)
415 {
416 unsigned i;
417
418 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
419 {
420 enum tree_code code;
421 enum tree_node_structure_enum ts_code;
422
423 code = (enum tree_code) i;
424 ts_code = tree_node_structure_for_code (code);
425
426 /* Mark the TS structure itself. */
427 tree_contains_struct[code][ts_code] = 1;
428
429 /* Mark all the structures that TS is derived from. */
430 switch (ts_code)
431 {
432 case TS_TYPED:
433 case TS_BLOCK:
434 MARK_TS_BASE (code);
435 break;
436
437 case TS_COMMON:
438 case TS_INT_CST:
439 case TS_REAL_CST:
440 case TS_FIXED_CST:
441 case TS_VECTOR:
442 case TS_STRING:
443 case TS_COMPLEX:
444 case TS_SSA_NAME:
445 case TS_CONSTRUCTOR:
446 case TS_EXP:
447 case TS_STATEMENT_LIST:
448 MARK_TS_TYPED (code);
449 break;
450
451 case TS_IDENTIFIER:
452 case TS_DECL_MINIMAL:
453 case TS_TYPE_COMMON:
454 case TS_LIST:
455 case TS_VEC:
456 case TS_BINFO:
457 case TS_OMP_CLAUSE:
458 case TS_OPTIMIZATION:
459 case TS_TARGET_OPTION:
460 MARK_TS_COMMON (code);
461 break;
462
463 case TS_TYPE_WITH_LANG_SPECIFIC:
464 MARK_TS_TYPE_COMMON (code);
465 break;
466
467 case TS_TYPE_NON_COMMON:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
469 break;
470
471 case TS_DECL_COMMON:
472 MARK_TS_DECL_MINIMAL (code);
473 break;
474
475 case TS_DECL_WRTL:
476 case TS_CONST_DECL:
477 MARK_TS_DECL_COMMON (code);
478 break;
479
480 case TS_DECL_NON_COMMON:
481 MARK_TS_DECL_WITH_VIS (code);
482 break;
483
484 case TS_DECL_WITH_VIS:
485 case TS_PARM_DECL:
486 case TS_LABEL_DECL:
487 case TS_RESULT_DECL:
488 MARK_TS_DECL_WRTL (code);
489 break;
490
491 case TS_FIELD_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 case TS_VAR_DECL:
496 MARK_TS_DECL_WITH_VIS (code);
497 break;
498
499 case TS_TYPE_DECL:
500 case TS_FUNCTION_DECL:
501 MARK_TS_DECL_NON_COMMON (code);
502 break;
503
504 case TS_TRANSLATION_UNIT_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 }
512
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
540 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
543 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
544 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
547 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
549 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
550 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
552 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
554 }
555
556
557 /* Init tree.c. */
558
559 void
560 init_ttree (void)
561 {
562 /* Initialize the hash table of types. */
563 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
564 type_hash_eq, 0);
565
566 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
567 tree_decl_map_eq, 0);
568
569 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
570 tree_decl_map_eq, 0);
571
572 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
573 int_cst_hash_eq, NULL);
574
575 int_cst_node = make_int_cst (1, 1);
576
577 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
578 cl_option_hash_eq, NULL);
579
580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
581 cl_target_option_node = make_node (TARGET_OPTION_NODE);
582
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks.init_ts ();
586 }
587
588 \f
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
592 tree
593 decl_assembler_name (tree decl)
594 {
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
596 lang_hooks.set_decl_assembler_name (decl);
597 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
598 }
599
600 /* When the target supports COMDAT groups, this indicates which group the
601 DECL is associated with. This can be either an IDENTIFIER_NODE or a
602 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
603 tree
604 decl_comdat_group (const_tree node)
605 {
606 struct symtab_node *snode = symtab_node::get (node);
607 if (!snode)
608 return NULL;
609 return snode->get_comdat_group ();
610 }
611
612 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
613 tree
614 decl_comdat_group_id (const_tree node)
615 {
616 struct symtab_node *snode = symtab_node::get (node);
617 if (!snode)
618 return NULL;
619 return snode->get_comdat_group_id ();
620 }
621
622 /* When the target supports named section, return its name as IDENTIFIER_NODE
623 or NULL if it is in no section. */
624 const char *
625 decl_section_name (const_tree node)
626 {
627 struct symtab_node *snode = symtab_node::get (node);
628 if (!snode)
629 return NULL;
630 return snode->get_section ();
631 }
632
633 /* Set section section name of NODE to VALUE (that is expected to
634 be identifier node) */
635 void
636 set_decl_section_name (tree node, const char *value)
637 {
638 struct symtab_node *snode;
639
640 if (value == NULL)
641 {
642 snode = symtab_node::get (node);
643 if (!snode)
644 return;
645 }
646 else if (TREE_CODE (node) == VAR_DECL)
647 snode = varpool_node::get_create (node);
648 else
649 snode = cgraph_node::get_create (node);
650 snode->set_section (value);
651 }
652
653 /* Return TLS model of a variable NODE. */
654 enum tls_model
655 decl_tls_model (const_tree node)
656 {
657 struct varpool_node *snode = varpool_node::get (node);
658 if (!snode)
659 return TLS_MODEL_NONE;
660 return snode->tls_model;
661 }
662
663 /* Set TLS model of variable NODE to MODEL. */
664 void
665 set_decl_tls_model (tree node, enum tls_model model)
666 {
667 struct varpool_node *vnode;
668
669 if (model == TLS_MODEL_NONE)
670 {
671 vnode = varpool_node::get (node);
672 if (!vnode)
673 return;
674 }
675 else
676 vnode = varpool_node::get_create (node);
677 vnode->tls_model = model;
678 }
679
680 /* Compute the number of bytes occupied by a tree with code CODE.
681 This function cannot be used for nodes that have variable sizes,
682 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
683 size_t
684 tree_code_size (enum tree_code code)
685 {
686 switch (TREE_CODE_CLASS (code))
687 {
688 case tcc_declaration: /* A decl node */
689 {
690 switch (code)
691 {
692 case FIELD_DECL:
693 return sizeof (struct tree_field_decl);
694 case PARM_DECL:
695 return sizeof (struct tree_parm_decl);
696 case VAR_DECL:
697 return sizeof (struct tree_var_decl);
698 case LABEL_DECL:
699 return sizeof (struct tree_label_decl);
700 case RESULT_DECL:
701 return sizeof (struct tree_result_decl);
702 case CONST_DECL:
703 return sizeof (struct tree_const_decl);
704 case TYPE_DECL:
705 return sizeof (struct tree_type_decl);
706 case FUNCTION_DECL:
707 return sizeof (struct tree_function_decl);
708 case DEBUG_EXPR_DECL:
709 return sizeof (struct tree_decl_with_rtl);
710 case TRANSLATION_UNIT_DECL:
711 return sizeof (struct tree_translation_unit_decl);
712 case NAMESPACE_DECL:
713 case IMPORTED_DECL:
714 case NAMELIST_DECL:
715 return sizeof (struct tree_decl_non_common);
716 default:
717 return lang_hooks.tree_size (code);
718 }
719 }
720
721 case tcc_type: /* a type node */
722 return sizeof (struct tree_type_non_common);
723
724 case tcc_reference: /* a reference */
725 case tcc_expression: /* an expression */
726 case tcc_statement: /* an expression with side effects */
727 case tcc_comparison: /* a comparison expression */
728 case tcc_unary: /* a unary arithmetic expression */
729 case tcc_binary: /* a binary arithmetic expression */
730 return (sizeof (struct tree_exp)
731 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
732
733 case tcc_constant: /* a constant */
734 switch (code)
735 {
736 case VOID_CST: return sizeof (struct tree_typed);
737 case INTEGER_CST: gcc_unreachable ();
738 case REAL_CST: return sizeof (struct tree_real_cst);
739 case FIXED_CST: return sizeof (struct tree_fixed_cst);
740 case COMPLEX_CST: return sizeof (struct tree_complex);
741 case VECTOR_CST: return sizeof (struct tree_vector);
742 case STRING_CST: gcc_unreachable ();
743 default:
744 return lang_hooks.tree_size (code);
745 }
746
747 case tcc_exceptional: /* something random, like an identifier. */
748 switch (code)
749 {
750 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
751 case TREE_LIST: return sizeof (struct tree_list);
752
753 case ERROR_MARK:
754 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
755
756 case TREE_VEC:
757 case OMP_CLAUSE: gcc_unreachable ();
758
759 case SSA_NAME: return sizeof (struct tree_ssa_name);
760
761 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
762 case BLOCK: return sizeof (struct tree_block);
763 case CONSTRUCTOR: return sizeof (struct tree_constructor);
764 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
765 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
766
767 default:
768 return lang_hooks.tree_size (code);
769 }
770
771 default:
772 gcc_unreachable ();
773 }
774 }
775
776 /* Compute the number of bytes occupied by NODE. This routine only
777 looks at TREE_CODE, except for those nodes that have variable sizes. */
778 size_t
779 tree_size (const_tree node)
780 {
781 const enum tree_code code = TREE_CODE (node);
782 switch (code)
783 {
784 case INTEGER_CST:
785 return (sizeof (struct tree_int_cst)
786 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
787
788 case TREE_BINFO:
789 return (offsetof (struct tree_binfo, base_binfos)
790 + vec<tree, va_gc>
791 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
792
793 case TREE_VEC:
794 return (sizeof (struct tree_vec)
795 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
796
797 case VECTOR_CST:
798 return (sizeof (struct tree_vector)
799 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
800
801 case STRING_CST:
802 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
803
804 case OMP_CLAUSE:
805 return (sizeof (struct tree_omp_clause)
806 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
807 * sizeof (tree));
808
809 default:
810 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
811 return (sizeof (struct tree_exp)
812 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
813 else
814 return tree_code_size (code);
815 }
816 }
817
818 /* Record interesting allocation statistics for a tree node with CODE
819 and LENGTH. */
820
821 static void
822 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
823 size_t length ATTRIBUTE_UNUSED)
824 {
825 enum tree_code_class type = TREE_CODE_CLASS (code);
826 tree_node_kind kind;
827
828 if (!GATHER_STATISTICS)
829 return;
830
831 switch (type)
832 {
833 case tcc_declaration: /* A decl node */
834 kind = d_kind;
835 break;
836
837 case tcc_type: /* a type node */
838 kind = t_kind;
839 break;
840
841 case tcc_statement: /* an expression with side effects */
842 kind = s_kind;
843 break;
844
845 case tcc_reference: /* a reference */
846 kind = r_kind;
847 break;
848
849 case tcc_expression: /* an expression */
850 case tcc_comparison: /* a comparison expression */
851 case tcc_unary: /* a unary arithmetic expression */
852 case tcc_binary: /* a binary arithmetic expression */
853 kind = e_kind;
854 break;
855
856 case tcc_constant: /* a constant */
857 kind = c_kind;
858 break;
859
860 case tcc_exceptional: /* something random, like an identifier. */
861 switch (code)
862 {
863 case IDENTIFIER_NODE:
864 kind = id_kind;
865 break;
866
867 case TREE_VEC:
868 kind = vec_kind;
869 break;
870
871 case TREE_BINFO:
872 kind = binfo_kind;
873 break;
874
875 case SSA_NAME:
876 kind = ssa_name_kind;
877 break;
878
879 case BLOCK:
880 kind = b_kind;
881 break;
882
883 case CONSTRUCTOR:
884 kind = constr_kind;
885 break;
886
887 case OMP_CLAUSE:
888 kind = omp_clause_kind;
889 break;
890
891 default:
892 kind = x_kind;
893 break;
894 }
895 break;
896
897 case tcc_vl_exp:
898 kind = e_kind;
899 break;
900
901 default:
902 gcc_unreachable ();
903 }
904
905 tree_code_counts[(int) code]++;
906 tree_node_counts[(int) kind]++;
907 tree_node_sizes[(int) kind] += length;
908 }
909
910 /* Allocate and return a new UID from the DECL_UID namespace. */
911
912 int
913 allocate_decl_uid (void)
914 {
915 return next_decl_uid++;
916 }
917
918 /* Return a newly allocated node of code CODE. For decl and type
919 nodes, some other fields are initialized. The rest of the node is
920 initialized to zero. This function cannot be used for TREE_VEC,
921 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
922 tree_code_size.
923
924 Achoo! I got a code in the node. */
925
926 tree
927 make_node_stat (enum tree_code code MEM_STAT_DECL)
928 {
929 tree t;
930 enum tree_code_class type = TREE_CODE_CLASS (code);
931 size_t length = tree_code_size (code);
932
933 record_node_allocation_statistics (code, length);
934
935 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
936 TREE_SET_CODE (t, code);
937
938 switch (type)
939 {
940 case tcc_statement:
941 TREE_SIDE_EFFECTS (t) = 1;
942 break;
943
944 case tcc_declaration:
945 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
946 {
947 if (code == FUNCTION_DECL)
948 {
949 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
950 DECL_MODE (t) = FUNCTION_MODE;
951 }
952 else
953 DECL_ALIGN (t) = 1;
954 }
955 DECL_SOURCE_LOCATION (t) = input_location;
956 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
957 DECL_UID (t) = --next_debug_decl_uid;
958 else
959 {
960 DECL_UID (t) = allocate_decl_uid ();
961 SET_DECL_PT_UID (t, -1);
962 }
963 if (TREE_CODE (t) == LABEL_DECL)
964 LABEL_DECL_UID (t) = -1;
965
966 break;
967
968 case tcc_type:
969 TYPE_UID (t) = next_type_uid++;
970 TYPE_ALIGN (t) = BITS_PER_UNIT;
971 TYPE_USER_ALIGN (t) = 0;
972 TYPE_MAIN_VARIANT (t) = t;
973 TYPE_CANONICAL (t) = t;
974
975 /* Default to no attributes for type, but let target change that. */
976 TYPE_ATTRIBUTES (t) = NULL_TREE;
977 targetm.set_default_type_attributes (t);
978
979 /* We have not yet computed the alias set for this type. */
980 TYPE_ALIAS_SET (t) = -1;
981 break;
982
983 case tcc_constant:
984 TREE_CONSTANT (t) = 1;
985 break;
986
987 case tcc_expression:
988 switch (code)
989 {
990 case INIT_EXPR:
991 case MODIFY_EXPR:
992 case VA_ARG_EXPR:
993 case PREDECREMENT_EXPR:
994 case PREINCREMENT_EXPR:
995 case POSTDECREMENT_EXPR:
996 case POSTINCREMENT_EXPR:
997 /* All of these have side-effects, no matter what their
998 operands are. */
999 TREE_SIDE_EFFECTS (t) = 1;
1000 break;
1001
1002 default:
1003 break;
1004 }
1005 break;
1006
1007 default:
1008 /* Other classes need no special treatment. */
1009 break;
1010 }
1011
1012 return t;
1013 }
1014 \f
1015 /* Return a new node with the same contents as NODE except that its
1016 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1017
1018 tree
1019 copy_node_stat (tree node MEM_STAT_DECL)
1020 {
1021 tree t;
1022 enum tree_code code = TREE_CODE (node);
1023 size_t length;
1024
1025 gcc_assert (code != STATEMENT_LIST);
1026
1027 length = tree_size (node);
1028 record_node_allocation_statistics (code, length);
1029 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1030 memcpy (t, node, length);
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1033 TREE_CHAIN (t) = 0;
1034 TREE_ASM_WRITTEN (t) = 0;
1035 TREE_VISITED (t) = 0;
1036
1037 if (TREE_CODE_CLASS (code) == tcc_declaration)
1038 {
1039 if (code == DEBUG_EXPR_DECL)
1040 DECL_UID (t) = --next_debug_decl_uid;
1041 else
1042 {
1043 DECL_UID (t) = allocate_decl_uid ();
1044 if (DECL_PT_UID_SET_P (node))
1045 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1046 }
1047 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1048 && DECL_HAS_VALUE_EXPR_P (node))
1049 {
1050 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1051 DECL_HAS_VALUE_EXPR_P (t) = 1;
1052 }
1053 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1054 if (TREE_CODE (node) == VAR_DECL)
1055 {
1056 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1057 t->decl_with_vis.symtab_node = NULL;
1058 }
1059 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1060 {
1061 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1062 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1063 }
1064 if (TREE_CODE (node) == FUNCTION_DECL)
1065 {
1066 DECL_STRUCT_FUNCTION (t) = NULL;
1067 t->decl_with_vis.symtab_node = NULL;
1068 }
1069 }
1070 else if (TREE_CODE_CLASS (code) == tcc_type)
1071 {
1072 TYPE_UID (t) = next_type_uid++;
1073 /* The following is so that the debug code for
1074 the copy is different from the original type.
1075 The two statements usually duplicate each other
1076 (because they clear fields of the same union),
1077 but the optimizer should catch that. */
1078 TYPE_SYMTAB_POINTER (t) = 0;
1079 TYPE_SYMTAB_ADDRESS (t) = 0;
1080
1081 /* Do not copy the values cache. */
1082 if (TYPE_CACHED_VALUES_P (t))
1083 {
1084 TYPE_CACHED_VALUES_P (t) = 0;
1085 TYPE_CACHED_VALUES (t) = NULL_TREE;
1086 }
1087 }
1088
1089 return t;
1090 }
1091
1092 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1093 For example, this can copy a list made of TREE_LIST nodes. */
1094
1095 tree
1096 copy_list (tree list)
1097 {
1098 tree head;
1099 tree prev, next;
1100
1101 if (list == 0)
1102 return 0;
1103
1104 head = prev = copy_node (list);
1105 next = TREE_CHAIN (list);
1106 while (next)
1107 {
1108 TREE_CHAIN (prev) = copy_node (next);
1109 prev = TREE_CHAIN (prev);
1110 next = TREE_CHAIN (next);
1111 }
1112 return head;
1113 }
1114
1115 \f
1116 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1117 INTEGER_CST with value CST and type TYPE. */
1118
1119 static unsigned int
1120 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1121 {
1122 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1123 /* We need an extra zero HWI if CST is an unsigned integer with its
1124 upper bit set, and if CST occupies a whole number of HWIs. */
1125 if (TYPE_UNSIGNED (type)
1126 && wi::neg_p (cst)
1127 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1128 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1129 return cst.get_len ();
1130 }
1131
1132 /* Return a new INTEGER_CST with value CST and type TYPE. */
1133
1134 static tree
1135 build_new_int_cst (tree type, const wide_int &cst)
1136 {
1137 unsigned int len = cst.get_len ();
1138 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1139 tree nt = make_int_cst (len, ext_len);
1140
1141 if (len < ext_len)
1142 {
1143 --ext_len;
1144 TREE_INT_CST_ELT (nt, ext_len) = 0;
1145 for (unsigned int i = len; i < ext_len; ++i)
1146 TREE_INT_CST_ELT (nt, i) = -1;
1147 }
1148 else if (TYPE_UNSIGNED (type)
1149 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1150 {
1151 len--;
1152 TREE_INT_CST_ELT (nt, len)
1153 = zext_hwi (cst.elt (len),
1154 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1155 }
1156
1157 for (unsigned int i = 0; i < len; i++)
1158 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1159 TREE_TYPE (nt) = type;
1160 return nt;
1161 }
1162
1163 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1164
1165 tree
1166 build_int_cst (tree type, HOST_WIDE_INT low)
1167 {
1168 /* Support legacy code. */
1169 if (!type)
1170 type = integer_type_node;
1171
1172 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1173 }
1174
1175 tree
1176 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1177 {
1178 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1179 }
1180
1181 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1182
1183 tree
1184 build_int_cst_type (tree type, HOST_WIDE_INT low)
1185 {
1186 gcc_assert (type);
1187 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1188 }
1189
1190 /* Constructs tree in type TYPE from with value given by CST. Signedness
1191 of CST is assumed to be the same as the signedness of TYPE. */
1192
1193 tree
1194 double_int_to_tree (tree type, double_int cst)
1195 {
1196 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1197 }
1198
1199 /* We force the wide_int CST to the range of the type TYPE by sign or
1200 zero extending it. OVERFLOWABLE indicates if we are interested in
1201 overflow of the value, when >0 we are only interested in signed
1202 overflow, for <0 we are interested in any overflow. OVERFLOWED
1203 indicates whether overflow has already occurred. CONST_OVERFLOWED
1204 indicates whether constant overflow has already occurred. We force
1205 T's value to be within range of T's type (by setting to 0 or 1 all
1206 the bits outside the type's range). We set TREE_OVERFLOWED if,
1207 OVERFLOWED is nonzero,
1208 or OVERFLOWABLE is >0 and signed overflow occurs
1209 or OVERFLOWABLE is <0 and any overflow occurs
1210 We return a new tree node for the extended wide_int. The node
1211 is shared if no overflow flags are set. */
1212
1213
1214 tree
1215 force_fit_type (tree type, const wide_int_ref &cst,
1216 int overflowable, bool overflowed)
1217 {
1218 signop sign = TYPE_SIGN (type);
1219
1220 /* If we need to set overflow flags, return a new unshared node. */
1221 if (overflowed || !wi::fits_to_tree_p (cst, type))
1222 {
1223 if (overflowed
1224 || overflowable < 0
1225 || (overflowable > 0 && sign == SIGNED))
1226 {
1227 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1228 tree t = build_new_int_cst (type, tmp);
1229 TREE_OVERFLOW (t) = 1;
1230 return t;
1231 }
1232 }
1233
1234 /* Else build a shared node. */
1235 return wide_int_to_tree (type, cst);
1236 }
1237
1238 /* These are the hash table functions for the hash table of INTEGER_CST
1239 nodes of a sizetype. */
1240
1241 /* Return the hash code code X, an INTEGER_CST. */
1242
1243 static hashval_t
1244 int_cst_hash_hash (const void *x)
1245 {
1246 const_tree const t = (const_tree) x;
1247 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1248 int i;
1249
1250 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1251 code ^= TREE_INT_CST_ELT (t, i);
1252
1253 return code;
1254 }
1255
1256 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1257 is the same as that given by *Y, which is the same. */
1258
1259 static int
1260 int_cst_hash_eq (const void *x, const void *y)
1261 {
1262 const_tree const xt = (const_tree) x;
1263 const_tree const yt = (const_tree) y;
1264
1265 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1266 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1267 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1268 return false;
1269
1270 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1271 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1272 return false;
1273
1274 return true;
1275 }
1276
1277 /* Create an INT_CST node of TYPE and value CST.
1278 The returned node is always shared. For small integers we use a
1279 per-type vector cache, for larger ones we use a single hash table.
1280 The value is extended from its precision according to the sign of
1281 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1282 the upper bits and ensures that hashing and value equality based
1283 upon the underlying HOST_WIDE_INTs works without masking. */
1284
1285 tree
1286 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1287 {
1288 tree t;
1289 int ix = -1;
1290 int limit = 0;
1291
1292 gcc_assert (type);
1293 unsigned int prec = TYPE_PRECISION (type);
1294 signop sgn = TYPE_SIGN (type);
1295
1296 /* Verify that everything is canonical. */
1297 int l = pcst.get_len ();
1298 if (l > 1)
1299 {
1300 if (pcst.elt (l - 1) == 0)
1301 gcc_checking_assert (pcst.elt (l - 2) < 0);
1302 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1303 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1304 }
1305
1306 wide_int cst = wide_int::from (pcst, prec, sgn);
1307 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1308
1309 if (ext_len == 1)
1310 {
1311 /* We just need to store a single HOST_WIDE_INT. */
1312 HOST_WIDE_INT hwi;
1313 if (TYPE_UNSIGNED (type))
1314 hwi = cst.to_uhwi ();
1315 else
1316 hwi = cst.to_shwi ();
1317
1318 switch (TREE_CODE (type))
1319 {
1320 case NULLPTR_TYPE:
1321 gcc_assert (hwi == 0);
1322 /* Fallthru. */
1323
1324 case POINTER_TYPE:
1325 case REFERENCE_TYPE:
1326 /* Cache NULL pointer. */
1327 if (hwi == 0)
1328 {
1329 limit = 1;
1330 ix = 0;
1331 }
1332 break;
1333
1334 case BOOLEAN_TYPE:
1335 /* Cache false or true. */
1336 limit = 2;
1337 if (hwi < 2)
1338 ix = hwi;
1339 break;
1340
1341 case INTEGER_TYPE:
1342 case OFFSET_TYPE:
1343 if (TYPE_SIGN (type) == UNSIGNED)
1344 {
1345 /* Cache [0, N). */
1346 limit = INTEGER_SHARE_LIMIT;
1347 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1348 ix = hwi;
1349 }
1350 else
1351 {
1352 /* Cache [-1, N). */
1353 limit = INTEGER_SHARE_LIMIT + 1;
1354 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1355 ix = hwi + 1;
1356 }
1357 break;
1358
1359 case ENUMERAL_TYPE:
1360 break;
1361
1362 default:
1363 gcc_unreachable ();
1364 }
1365
1366 if (ix >= 0)
1367 {
1368 /* Look for it in the type's vector of small shared ints. */
1369 if (!TYPE_CACHED_VALUES_P (type))
1370 {
1371 TYPE_CACHED_VALUES_P (type) = 1;
1372 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1373 }
1374
1375 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1376 if (t)
1377 /* Make sure no one is clobbering the shared constant. */
1378 gcc_checking_assert (TREE_TYPE (t) == type
1379 && TREE_INT_CST_NUNITS (t) == 1
1380 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1381 && TREE_INT_CST_EXT_NUNITS (t) == 1
1382 && TREE_INT_CST_ELT (t, 0) == hwi);
1383 else
1384 {
1385 /* Create a new shared int. */
1386 t = build_new_int_cst (type, cst);
1387 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1388 }
1389 }
1390 else
1391 {
1392 /* Use the cache of larger shared ints, using int_cst_node as
1393 a temporary. */
1394 void **slot;
1395
1396 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1397 TREE_TYPE (int_cst_node) = type;
1398
1399 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1400 t = (tree) *slot;
1401 if (!t)
1402 {
1403 /* Insert this one into the hash table. */
1404 t = int_cst_node;
1405 *slot = t;
1406 /* Make a new node for next time round. */
1407 int_cst_node = make_int_cst (1, 1);
1408 }
1409 }
1410 }
1411 else
1412 {
1413 /* The value either hashes properly or we drop it on the floor
1414 for the gc to take care of. There will not be enough of them
1415 to worry about. */
1416 void **slot;
1417
1418 tree nt = build_new_int_cst (type, cst);
1419 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1420 t = (tree) *slot;
1421 if (!t)
1422 {
1423 /* Insert this one into the hash table. */
1424 t = nt;
1425 *slot = t;
1426 }
1427 }
1428
1429 return t;
1430 }
1431
1432 void
1433 cache_integer_cst (tree t)
1434 {
1435 tree type = TREE_TYPE (t);
1436 int ix = -1;
1437 int limit = 0;
1438 int prec = TYPE_PRECISION (type);
1439
1440 gcc_assert (!TREE_OVERFLOW (t));
1441
1442 switch (TREE_CODE (type))
1443 {
1444 case NULLPTR_TYPE:
1445 gcc_assert (integer_zerop (t));
1446 /* Fallthru. */
1447
1448 case POINTER_TYPE:
1449 case REFERENCE_TYPE:
1450 /* Cache NULL pointer. */
1451 if (integer_zerop (t))
1452 {
1453 limit = 1;
1454 ix = 0;
1455 }
1456 break;
1457
1458 case BOOLEAN_TYPE:
1459 /* Cache false or true. */
1460 limit = 2;
1461 if (wi::ltu_p (t, 2))
1462 ix = TREE_INT_CST_ELT (t, 0);
1463 break;
1464
1465 case INTEGER_TYPE:
1466 case OFFSET_TYPE:
1467 if (TYPE_UNSIGNED (type))
1468 {
1469 /* Cache 0..N */
1470 limit = INTEGER_SHARE_LIMIT;
1471
1472 /* This is a little hokie, but if the prec is smaller than
1473 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1474 obvious test will not get the correct answer. */
1475 if (prec < HOST_BITS_PER_WIDE_INT)
1476 {
1477 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1478 ix = tree_to_uhwi (t);
1479 }
1480 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1481 ix = tree_to_uhwi (t);
1482 }
1483 else
1484 {
1485 /* Cache -1..N */
1486 limit = INTEGER_SHARE_LIMIT + 1;
1487
1488 if (integer_minus_onep (t))
1489 ix = 0;
1490 else if (!wi::neg_p (t))
1491 {
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1495 ix = tree_to_shwi (t) + 1;
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_shwi (t) + 1;
1499 }
1500 }
1501 break;
1502
1503 case ENUMERAL_TYPE:
1504 break;
1505
1506 default:
1507 gcc_unreachable ();
1508 }
1509
1510 if (ix >= 0)
1511 {
1512 /* Look for it in the type's vector of small shared ints. */
1513 if (!TYPE_CACHED_VALUES_P (type))
1514 {
1515 TYPE_CACHED_VALUES_P (type) = 1;
1516 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1517 }
1518
1519 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1520 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1521 }
1522 else
1523 {
1524 /* Use the cache of larger shared ints. */
1525 void **slot;
1526
1527 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1528 /* If there is already an entry for the number verify it's the
1529 same. */
1530 if (*slot)
1531 gcc_assert (wi::eq_p (tree (*slot), t));
1532 else
1533 /* Otherwise insert this one into the hash table. */
1534 *slot = t;
1535 }
1536 }
1537
1538
1539 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1540 and the rest are zeros. */
1541
1542 tree
1543 build_low_bits_mask (tree type, unsigned bits)
1544 {
1545 gcc_assert (bits <= TYPE_PRECISION (type));
1546
1547 return wide_int_to_tree (type, wi::mask (bits, false,
1548 TYPE_PRECISION (type)));
1549 }
1550
1551 /* Checks that X is integer constant that can be expressed in (unsigned)
1552 HOST_WIDE_INT without loss of precision. */
1553
1554 bool
1555 cst_and_fits_in_hwi (const_tree x)
1556 {
1557 if (TREE_CODE (x) != INTEGER_CST)
1558 return false;
1559
1560 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1561 return false;
1562
1563 return TREE_INT_CST_NUNITS (x) == 1;
1564 }
1565
1566 /* Build a newly constructed TREE_VEC node of length LEN. */
1567
1568 tree
1569 make_vector_stat (unsigned len MEM_STAT_DECL)
1570 {
1571 tree t;
1572 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1573
1574 record_node_allocation_statistics (VECTOR_CST, length);
1575
1576 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1577
1578 TREE_SET_CODE (t, VECTOR_CST);
1579 TREE_CONSTANT (t) = 1;
1580
1581 return t;
1582 }
1583
1584 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1585 are in a list pointed to by VALS. */
1586
1587 tree
1588 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1589 {
1590 int over = 0;
1591 unsigned cnt = 0;
1592 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1593 TREE_TYPE (v) = type;
1594
1595 /* Iterate through elements and check for overflow. */
1596 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1597 {
1598 tree value = vals[cnt];
1599
1600 VECTOR_CST_ELT (v, cnt) = value;
1601
1602 /* Don't crash if we get an address constant. */
1603 if (!CONSTANT_CLASS_P (value))
1604 continue;
1605
1606 over |= TREE_OVERFLOW (value);
1607 }
1608
1609 TREE_OVERFLOW (v) = over;
1610 return v;
1611 }
1612
1613 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1614 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1615
1616 tree
1617 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1618 {
1619 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1620 unsigned HOST_WIDE_INT idx;
1621 tree value;
1622
1623 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1624 vec[idx] = value;
1625 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1626 vec[idx] = build_zero_cst (TREE_TYPE (type));
1627
1628 return build_vector (type, vec);
1629 }
1630
1631 /* Build a vector of type VECTYPE where all the elements are SCs. */
1632 tree
1633 build_vector_from_val (tree vectype, tree sc)
1634 {
1635 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1636
1637 if (sc == error_mark_node)
1638 return sc;
1639
1640 /* Verify that the vector type is suitable for SC. Note that there
1641 is some inconsistency in the type-system with respect to restrict
1642 qualifications of pointers. Vector types always have a main-variant
1643 element type and the qualification is applied to the vector-type.
1644 So TREE_TYPE (vector-type) does not return a properly qualified
1645 vector element-type. */
1646 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1647 TREE_TYPE (vectype)));
1648
1649 if (CONSTANT_CLASS_P (sc))
1650 {
1651 tree *v = XALLOCAVEC (tree, nunits);
1652 for (i = 0; i < nunits; ++i)
1653 v[i] = sc;
1654 return build_vector (vectype, v);
1655 }
1656 else
1657 {
1658 vec<constructor_elt, va_gc> *v;
1659 vec_alloc (v, nunits);
1660 for (i = 0; i < nunits; ++i)
1661 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1662 return build_constructor (vectype, v);
1663 }
1664 }
1665
1666 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1667 are in the vec pointed to by VALS. */
1668 tree
1669 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1670 {
1671 tree c = make_node (CONSTRUCTOR);
1672 unsigned int i;
1673 constructor_elt *elt;
1674 bool constant_p = true;
1675 bool side_effects_p = false;
1676
1677 TREE_TYPE (c) = type;
1678 CONSTRUCTOR_ELTS (c) = vals;
1679
1680 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1681 {
1682 /* Mostly ctors will have elts that don't have side-effects, so
1683 the usual case is to scan all the elements. Hence a single
1684 loop for both const and side effects, rather than one loop
1685 each (with early outs). */
1686 if (!TREE_CONSTANT (elt->value))
1687 constant_p = false;
1688 if (TREE_SIDE_EFFECTS (elt->value))
1689 side_effects_p = true;
1690 }
1691
1692 TREE_SIDE_EFFECTS (c) = side_effects_p;
1693 TREE_CONSTANT (c) = constant_p;
1694
1695 return c;
1696 }
1697
1698 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1699 INDEX and VALUE. */
1700 tree
1701 build_constructor_single (tree type, tree index, tree value)
1702 {
1703 vec<constructor_elt, va_gc> *v;
1704 constructor_elt elt = {index, value};
1705
1706 vec_alloc (v, 1);
1707 v->quick_push (elt);
1708
1709 return build_constructor (type, v);
1710 }
1711
1712
1713 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1714 are in a list pointed to by VALS. */
1715 tree
1716 build_constructor_from_list (tree type, tree vals)
1717 {
1718 tree t;
1719 vec<constructor_elt, va_gc> *v = NULL;
1720
1721 if (vals)
1722 {
1723 vec_alloc (v, list_length (vals));
1724 for (t = vals; t; t = TREE_CHAIN (t))
1725 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1726 }
1727
1728 return build_constructor (type, v);
1729 }
1730
1731 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1732 of elements, provided as index/value pairs. */
1733
1734 tree
1735 build_constructor_va (tree type, int nelts, ...)
1736 {
1737 vec<constructor_elt, va_gc> *v = NULL;
1738 va_list p;
1739
1740 va_start (p, nelts);
1741 vec_alloc (v, nelts);
1742 while (nelts--)
1743 {
1744 tree index = va_arg (p, tree);
1745 tree value = va_arg (p, tree);
1746 CONSTRUCTOR_APPEND_ELT (v, index, value);
1747 }
1748 va_end (p);
1749 return build_constructor (type, v);
1750 }
1751
1752 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1753
1754 tree
1755 build_fixed (tree type, FIXED_VALUE_TYPE f)
1756 {
1757 tree v;
1758 FIXED_VALUE_TYPE *fp;
1759
1760 v = make_node (FIXED_CST);
1761 fp = ggc_alloc<fixed_value> ();
1762 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1763
1764 TREE_TYPE (v) = type;
1765 TREE_FIXED_CST_PTR (v) = fp;
1766 return v;
1767 }
1768
1769 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1770
1771 tree
1772 build_real (tree type, REAL_VALUE_TYPE d)
1773 {
1774 tree v;
1775 REAL_VALUE_TYPE *dp;
1776 int overflow = 0;
1777
1778 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1779 Consider doing it via real_convert now. */
1780
1781 v = make_node (REAL_CST);
1782 dp = ggc_alloc<real_value> ();
1783 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1784
1785 TREE_TYPE (v) = type;
1786 TREE_REAL_CST_PTR (v) = dp;
1787 TREE_OVERFLOW (v) = overflow;
1788 return v;
1789 }
1790
1791 /* Return a new REAL_CST node whose type is TYPE
1792 and whose value is the integer value of the INTEGER_CST node I. */
1793
1794 REAL_VALUE_TYPE
1795 real_value_from_int_cst (const_tree type, const_tree i)
1796 {
1797 REAL_VALUE_TYPE d;
1798
1799 /* Clear all bits of the real value type so that we can later do
1800 bitwise comparisons to see if two values are the same. */
1801 memset (&d, 0, sizeof d);
1802
1803 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1804 TYPE_SIGN (TREE_TYPE (i)));
1805 return d;
1806 }
1807
1808 /* Given a tree representing an integer constant I, return a tree
1809 representing the same value as a floating-point constant of type TYPE. */
1810
1811 tree
1812 build_real_from_int_cst (tree type, const_tree i)
1813 {
1814 tree v;
1815 int overflow = TREE_OVERFLOW (i);
1816
1817 v = build_real (type, real_value_from_int_cst (type, i));
1818
1819 TREE_OVERFLOW (v) |= overflow;
1820 return v;
1821 }
1822
1823 /* Return a newly constructed STRING_CST node whose value is
1824 the LEN characters at STR.
1825 Note that for a C string literal, LEN should include the trailing NUL.
1826 The TREE_TYPE is not initialized. */
1827
1828 tree
1829 build_string (int len, const char *str)
1830 {
1831 tree s;
1832 size_t length;
1833
1834 /* Do not waste bytes provided by padding of struct tree_string. */
1835 length = len + offsetof (struct tree_string, str) + 1;
1836
1837 record_node_allocation_statistics (STRING_CST, length);
1838
1839 s = (tree) ggc_internal_alloc (length);
1840
1841 memset (s, 0, sizeof (struct tree_typed));
1842 TREE_SET_CODE (s, STRING_CST);
1843 TREE_CONSTANT (s) = 1;
1844 TREE_STRING_LENGTH (s) = len;
1845 memcpy (s->string.str, str, len);
1846 s->string.str[len] = '\0';
1847
1848 return s;
1849 }
1850
1851 /* Return a newly constructed COMPLEX_CST node whose value is
1852 specified by the real and imaginary parts REAL and IMAG.
1853 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1854 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1855
1856 tree
1857 build_complex (tree type, tree real, tree imag)
1858 {
1859 tree t = make_node (COMPLEX_CST);
1860
1861 TREE_REALPART (t) = real;
1862 TREE_IMAGPART (t) = imag;
1863 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1864 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1865 return t;
1866 }
1867
1868 /* Return a constant of arithmetic type TYPE which is the
1869 multiplicative identity of the set TYPE. */
1870
1871 tree
1872 build_one_cst (tree type)
1873 {
1874 switch (TREE_CODE (type))
1875 {
1876 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1877 case POINTER_TYPE: case REFERENCE_TYPE:
1878 case OFFSET_TYPE:
1879 return build_int_cst (type, 1);
1880
1881 case REAL_TYPE:
1882 return build_real (type, dconst1);
1883
1884 case FIXED_POINT_TYPE:
1885 /* We can only generate 1 for accum types. */
1886 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1887 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1888
1889 case VECTOR_TYPE:
1890 {
1891 tree scalar = build_one_cst (TREE_TYPE (type));
1892
1893 return build_vector_from_val (type, scalar);
1894 }
1895
1896 case COMPLEX_TYPE:
1897 return build_complex (type,
1898 build_one_cst (TREE_TYPE (type)),
1899 build_zero_cst (TREE_TYPE (type)));
1900
1901 default:
1902 gcc_unreachable ();
1903 }
1904 }
1905
1906 /* Return an integer of type TYPE containing all 1's in as much precision as
1907 it contains, or a complex or vector whose subparts are such integers. */
1908
1909 tree
1910 build_all_ones_cst (tree type)
1911 {
1912 if (TREE_CODE (type) == COMPLEX_TYPE)
1913 {
1914 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1915 return build_complex (type, scalar, scalar);
1916 }
1917 else
1918 return build_minus_one_cst (type);
1919 }
1920
1921 /* Return a constant of arithmetic type TYPE which is the
1922 opposite of the multiplicative identity of the set TYPE. */
1923
1924 tree
1925 build_minus_one_cst (tree type)
1926 {
1927 switch (TREE_CODE (type))
1928 {
1929 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 return build_int_cst (type, -1);
1933
1934 case REAL_TYPE:
1935 return build_real (type, dconstm1);
1936
1937 case FIXED_POINT_TYPE:
1938 /* We can only generate 1 for accum types. */
1939 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1940 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1941 TYPE_MODE (type)));
1942
1943 case VECTOR_TYPE:
1944 {
1945 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1946
1947 return build_vector_from_val (type, scalar);
1948 }
1949
1950 case COMPLEX_TYPE:
1951 return build_complex (type,
1952 build_minus_one_cst (TREE_TYPE (type)),
1953 build_zero_cst (TREE_TYPE (type)));
1954
1955 default:
1956 gcc_unreachable ();
1957 }
1958 }
1959
1960 /* Build 0 constant of type TYPE. This is used by constructor folding
1961 and thus the constant should be represented in memory by
1962 zero(es). */
1963
1964 tree
1965 build_zero_cst (tree type)
1966 {
1967 switch (TREE_CODE (type))
1968 {
1969 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1970 case POINTER_TYPE: case REFERENCE_TYPE:
1971 case OFFSET_TYPE: case NULLPTR_TYPE:
1972 return build_int_cst (type, 0);
1973
1974 case REAL_TYPE:
1975 return build_real (type, dconst0);
1976
1977 case FIXED_POINT_TYPE:
1978 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1979
1980 case VECTOR_TYPE:
1981 {
1982 tree scalar = build_zero_cst (TREE_TYPE (type));
1983
1984 return build_vector_from_val (type, scalar);
1985 }
1986
1987 case COMPLEX_TYPE:
1988 {
1989 tree zero = build_zero_cst (TREE_TYPE (type));
1990
1991 return build_complex (type, zero, zero);
1992 }
1993
1994 default:
1995 if (!AGGREGATE_TYPE_P (type))
1996 return fold_convert (type, integer_zero_node);
1997 return build_constructor (type, NULL);
1998 }
1999 }
2000
2001
2002 /* Build a BINFO with LEN language slots. */
2003
2004 tree
2005 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2006 {
2007 tree t;
2008 size_t length = (offsetof (struct tree_binfo, base_binfos)
2009 + vec<tree, va_gc>::embedded_size (base_binfos));
2010
2011 record_node_allocation_statistics (TREE_BINFO, length);
2012
2013 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2014
2015 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2016
2017 TREE_SET_CODE (t, TREE_BINFO);
2018
2019 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2020
2021 return t;
2022 }
2023
2024 /* Create a CASE_LABEL_EXPR tree node and return it. */
2025
2026 tree
2027 build_case_label (tree low_value, tree high_value, tree label_decl)
2028 {
2029 tree t = make_node (CASE_LABEL_EXPR);
2030
2031 TREE_TYPE (t) = void_type_node;
2032 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2033
2034 CASE_LOW (t) = low_value;
2035 CASE_HIGH (t) = high_value;
2036 CASE_LABEL (t) = label_decl;
2037 CASE_CHAIN (t) = NULL_TREE;
2038
2039 return t;
2040 }
2041
2042 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2043 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2044 The latter determines the length of the HOST_WIDE_INT vector. */
2045
2046 tree
2047 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2048 {
2049 tree t;
2050 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2051 + sizeof (struct tree_int_cst));
2052
2053 gcc_assert (len);
2054 record_node_allocation_statistics (INTEGER_CST, length);
2055
2056 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2057
2058 TREE_SET_CODE (t, INTEGER_CST);
2059 TREE_INT_CST_NUNITS (t) = len;
2060 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2061 /* to_offset can only be applied to trees that are offset_int-sized
2062 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2063 must be exactly the precision of offset_int and so LEN is correct. */
2064 if (ext_len <= OFFSET_INT_ELTS)
2065 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2066 else
2067 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2068
2069 TREE_CONSTANT (t) = 1;
2070
2071 return t;
2072 }
2073
2074 /* Build a newly constructed TREE_VEC node of length LEN. */
2075
2076 tree
2077 make_tree_vec_stat (int len MEM_STAT_DECL)
2078 {
2079 tree t;
2080 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2081
2082 record_node_allocation_statistics (TREE_VEC, length);
2083
2084 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2085
2086 TREE_SET_CODE (t, TREE_VEC);
2087 TREE_VEC_LENGTH (t) = len;
2088
2089 return t;
2090 }
2091
2092 /* Grow a TREE_VEC node to new length LEN. */
2093
2094 tree
2095 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2096 {
2097 gcc_assert (TREE_CODE (v) == TREE_VEC);
2098
2099 int oldlen = TREE_VEC_LENGTH (v);
2100 gcc_assert (len > oldlen);
2101
2102 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2103 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2104
2105 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2106
2107 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2108
2109 TREE_VEC_LENGTH (v) = len;
2110
2111 return v;
2112 }
2113 \f
2114 /* Return 1 if EXPR is the integer constant zero or a complex constant
2115 of zero. */
2116
2117 int
2118 integer_zerop (const_tree expr)
2119 {
2120 STRIP_NOPS (expr);
2121
2122 switch (TREE_CODE (expr))
2123 {
2124 case INTEGER_CST:
2125 return wi::eq_p (expr, 0);
2126 case COMPLEX_CST:
2127 return (integer_zerop (TREE_REALPART (expr))
2128 && integer_zerop (TREE_IMAGPART (expr)));
2129 case VECTOR_CST:
2130 {
2131 unsigned i;
2132 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2133 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2134 return false;
2135 return true;
2136 }
2137 default:
2138 return false;
2139 }
2140 }
2141
2142 /* Return 1 if EXPR is the integer constant one or the corresponding
2143 complex constant. */
2144
2145 int
2146 integer_onep (const_tree expr)
2147 {
2148 STRIP_NOPS (expr);
2149
2150 switch (TREE_CODE (expr))
2151 {
2152 case INTEGER_CST:
2153 return wi::eq_p (wi::to_widest (expr), 1);
2154 case COMPLEX_CST:
2155 return (integer_onep (TREE_REALPART (expr))
2156 && integer_zerop (TREE_IMAGPART (expr)));
2157 case VECTOR_CST:
2158 {
2159 unsigned i;
2160 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2161 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2162 return false;
2163 return true;
2164 }
2165 default:
2166 return false;
2167 }
2168 }
2169
2170 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2171 it contains, or a complex or vector whose subparts are such integers. */
2172
2173 int
2174 integer_all_onesp (const_tree expr)
2175 {
2176 STRIP_NOPS (expr);
2177
2178 if (TREE_CODE (expr) == COMPLEX_CST
2179 && integer_all_onesp (TREE_REALPART (expr))
2180 && integer_all_onesp (TREE_IMAGPART (expr)))
2181 return 1;
2182
2183 else if (TREE_CODE (expr) == VECTOR_CST)
2184 {
2185 unsigned i;
2186 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2187 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2188 return 0;
2189 return 1;
2190 }
2191
2192 else if (TREE_CODE (expr) != INTEGER_CST)
2193 return 0;
2194
2195 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2196 }
2197
2198 /* Return 1 if EXPR is the integer constant minus one. */
2199
2200 int
2201 integer_minus_onep (const_tree expr)
2202 {
2203 STRIP_NOPS (expr);
2204
2205 if (TREE_CODE (expr) == COMPLEX_CST)
2206 return (integer_all_onesp (TREE_REALPART (expr))
2207 && integer_zerop (TREE_IMAGPART (expr)));
2208 else
2209 return integer_all_onesp (expr);
2210 }
2211
2212 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2213 one bit on). */
2214
2215 int
2216 integer_pow2p (const_tree expr)
2217 {
2218 STRIP_NOPS (expr);
2219
2220 if (TREE_CODE (expr) == COMPLEX_CST
2221 && integer_pow2p (TREE_REALPART (expr))
2222 && integer_zerop (TREE_IMAGPART (expr)))
2223 return 1;
2224
2225 if (TREE_CODE (expr) != INTEGER_CST)
2226 return 0;
2227
2228 return wi::popcount (expr) == 1;
2229 }
2230
2231 /* Return 1 if EXPR is an integer constant other than zero or a
2232 complex constant other than zero. */
2233
2234 int
2235 integer_nonzerop (const_tree expr)
2236 {
2237 STRIP_NOPS (expr);
2238
2239 return ((TREE_CODE (expr) == INTEGER_CST
2240 && !wi::eq_p (expr, 0))
2241 || (TREE_CODE (expr) == COMPLEX_CST
2242 && (integer_nonzerop (TREE_REALPART (expr))
2243 || integer_nonzerop (TREE_IMAGPART (expr)))));
2244 }
2245
2246 /* Return 1 if EXPR is the fixed-point constant zero. */
2247
2248 int
2249 fixed_zerop (const_tree expr)
2250 {
2251 return (TREE_CODE (expr) == FIXED_CST
2252 && TREE_FIXED_CST (expr).data.is_zero ());
2253 }
2254
2255 /* Return the power of two represented by a tree node known to be a
2256 power of two. */
2257
2258 int
2259 tree_log2 (const_tree expr)
2260 {
2261 STRIP_NOPS (expr);
2262
2263 if (TREE_CODE (expr) == COMPLEX_CST)
2264 return tree_log2 (TREE_REALPART (expr));
2265
2266 return wi::exact_log2 (expr);
2267 }
2268
2269 /* Similar, but return the largest integer Y such that 2 ** Y is less
2270 than or equal to EXPR. */
2271
2272 int
2273 tree_floor_log2 (const_tree expr)
2274 {
2275 STRIP_NOPS (expr);
2276
2277 if (TREE_CODE (expr) == COMPLEX_CST)
2278 return tree_log2 (TREE_REALPART (expr));
2279
2280 return wi::floor_log2 (expr);
2281 }
2282
2283 /* Return number of known trailing zero bits in EXPR, or, if the value of
2284 EXPR is known to be zero, the precision of it's type. */
2285
2286 unsigned int
2287 tree_ctz (const_tree expr)
2288 {
2289 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2290 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2291 return 0;
2292
2293 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2294 switch (TREE_CODE (expr))
2295 {
2296 case INTEGER_CST:
2297 ret1 = wi::ctz (expr);
2298 return MIN (ret1, prec);
2299 case SSA_NAME:
2300 ret1 = wi::ctz (get_nonzero_bits (expr));
2301 return MIN (ret1, prec);
2302 case PLUS_EXPR:
2303 case MINUS_EXPR:
2304 case BIT_IOR_EXPR:
2305 case BIT_XOR_EXPR:
2306 case MIN_EXPR:
2307 case MAX_EXPR:
2308 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2309 if (ret1 == 0)
2310 return ret1;
2311 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2312 return MIN (ret1, ret2);
2313 case POINTER_PLUS_EXPR:
2314 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2315 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2316 /* Second operand is sizetype, which could be in theory
2317 wider than pointer's precision. Make sure we never
2318 return more than prec. */
2319 ret2 = MIN (ret2, prec);
2320 return MIN (ret1, ret2);
2321 case BIT_AND_EXPR:
2322 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2323 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2324 return MAX (ret1, ret2);
2325 case MULT_EXPR:
2326 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2327 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2328 return MIN (ret1 + ret2, prec);
2329 case LSHIFT_EXPR:
2330 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2331 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2332 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2333 {
2334 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2335 return MIN (ret1 + ret2, prec);
2336 }
2337 return ret1;
2338 case RSHIFT_EXPR:
2339 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2340 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2341 {
2342 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2343 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2344 if (ret1 > ret2)
2345 return ret1 - ret2;
2346 }
2347 return 0;
2348 case TRUNC_DIV_EXPR:
2349 case CEIL_DIV_EXPR:
2350 case FLOOR_DIV_EXPR:
2351 case ROUND_DIV_EXPR:
2352 case EXACT_DIV_EXPR:
2353 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2354 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2355 {
2356 int l = tree_log2 (TREE_OPERAND (expr, 1));
2357 if (l >= 0)
2358 {
2359 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2360 ret2 = l;
2361 if (ret1 > ret2)
2362 return ret1 - ret2;
2363 }
2364 }
2365 return 0;
2366 CASE_CONVERT:
2367 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2368 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2369 ret1 = prec;
2370 return MIN (ret1, prec);
2371 case SAVE_EXPR:
2372 return tree_ctz (TREE_OPERAND (expr, 0));
2373 case COND_EXPR:
2374 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2375 if (ret1 == 0)
2376 return 0;
2377 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2378 return MIN (ret1, ret2);
2379 case COMPOUND_EXPR:
2380 return tree_ctz (TREE_OPERAND (expr, 1));
2381 case ADDR_EXPR:
2382 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2383 if (ret1 > BITS_PER_UNIT)
2384 {
2385 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2386 return MIN (ret1, prec);
2387 }
2388 return 0;
2389 default:
2390 return 0;
2391 }
2392 }
2393
2394 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2395 decimal float constants, so don't return 1 for them. */
2396
2397 int
2398 real_zerop (const_tree expr)
2399 {
2400 STRIP_NOPS (expr);
2401
2402 switch (TREE_CODE (expr))
2403 {
2404 case REAL_CST:
2405 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2406 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2407 case COMPLEX_CST:
2408 return real_zerop (TREE_REALPART (expr))
2409 && real_zerop (TREE_IMAGPART (expr));
2410 case VECTOR_CST:
2411 {
2412 unsigned i;
2413 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2414 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2415 return false;
2416 return true;
2417 }
2418 default:
2419 return false;
2420 }
2421 }
2422
2423 /* Return 1 if EXPR is the real constant one in real or complex form.
2424 Trailing zeroes matter for decimal float constants, so don't return
2425 1 for them. */
2426
2427 int
2428 real_onep (const_tree expr)
2429 {
2430 STRIP_NOPS (expr);
2431
2432 switch (TREE_CODE (expr))
2433 {
2434 case REAL_CST:
2435 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2436 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2437 case COMPLEX_CST:
2438 return real_onep (TREE_REALPART (expr))
2439 && real_zerop (TREE_IMAGPART (expr));
2440 case VECTOR_CST:
2441 {
2442 unsigned i;
2443 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2444 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2445 return false;
2446 return true;
2447 }
2448 default:
2449 return false;
2450 }
2451 }
2452
2453 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2454 matter for decimal float constants, so don't return 1 for them. */
2455
2456 int
2457 real_minus_onep (const_tree expr)
2458 {
2459 STRIP_NOPS (expr);
2460
2461 switch (TREE_CODE (expr))
2462 {
2463 case REAL_CST:
2464 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2465 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2466 case COMPLEX_CST:
2467 return real_minus_onep (TREE_REALPART (expr))
2468 && real_zerop (TREE_IMAGPART (expr));
2469 case VECTOR_CST:
2470 {
2471 unsigned i;
2472 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2473 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2474 return false;
2475 return true;
2476 }
2477 default:
2478 return false;
2479 }
2480 }
2481
2482 /* Nonzero if EXP is a constant or a cast of a constant. */
2483
2484 int
2485 really_constant_p (const_tree exp)
2486 {
2487 /* This is not quite the same as STRIP_NOPS. It does more. */
2488 while (CONVERT_EXPR_P (exp)
2489 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2490 exp = TREE_OPERAND (exp, 0);
2491 return TREE_CONSTANT (exp);
2492 }
2493 \f
2494 /* Return first list element whose TREE_VALUE is ELEM.
2495 Return 0 if ELEM is not in LIST. */
2496
2497 tree
2498 value_member (tree elem, tree list)
2499 {
2500 while (list)
2501 {
2502 if (elem == TREE_VALUE (list))
2503 return list;
2504 list = TREE_CHAIN (list);
2505 }
2506 return NULL_TREE;
2507 }
2508
2509 /* Return first list element whose TREE_PURPOSE is ELEM.
2510 Return 0 if ELEM is not in LIST. */
2511
2512 tree
2513 purpose_member (const_tree elem, tree list)
2514 {
2515 while (list)
2516 {
2517 if (elem == TREE_PURPOSE (list))
2518 return list;
2519 list = TREE_CHAIN (list);
2520 }
2521 return NULL_TREE;
2522 }
2523
2524 /* Return true if ELEM is in V. */
2525
2526 bool
2527 vec_member (const_tree elem, vec<tree, va_gc> *v)
2528 {
2529 unsigned ix;
2530 tree t;
2531 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2532 if (elem == t)
2533 return true;
2534 return false;
2535 }
2536
2537 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2538 NULL_TREE. */
2539
2540 tree
2541 chain_index (int idx, tree chain)
2542 {
2543 for (; chain && idx > 0; --idx)
2544 chain = TREE_CHAIN (chain);
2545 return chain;
2546 }
2547
2548 /* Return nonzero if ELEM is part of the chain CHAIN. */
2549
2550 int
2551 chain_member (const_tree elem, const_tree chain)
2552 {
2553 while (chain)
2554 {
2555 if (elem == chain)
2556 return 1;
2557 chain = DECL_CHAIN (chain);
2558 }
2559
2560 return 0;
2561 }
2562
2563 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2564 We expect a null pointer to mark the end of the chain.
2565 This is the Lisp primitive `length'. */
2566
2567 int
2568 list_length (const_tree t)
2569 {
2570 const_tree p = t;
2571 #ifdef ENABLE_TREE_CHECKING
2572 const_tree q = t;
2573 #endif
2574 int len = 0;
2575
2576 while (p)
2577 {
2578 p = TREE_CHAIN (p);
2579 #ifdef ENABLE_TREE_CHECKING
2580 if (len % 2)
2581 q = TREE_CHAIN (q);
2582 gcc_assert (p != q);
2583 #endif
2584 len++;
2585 }
2586
2587 return len;
2588 }
2589
2590 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2591 UNION_TYPE TYPE, or NULL_TREE if none. */
2592
2593 tree
2594 first_field (const_tree type)
2595 {
2596 tree t = TYPE_FIELDS (type);
2597 while (t && TREE_CODE (t) != FIELD_DECL)
2598 t = TREE_CHAIN (t);
2599 return t;
2600 }
2601
2602 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2603 by modifying the last node in chain 1 to point to chain 2.
2604 This is the Lisp primitive `nconc'. */
2605
2606 tree
2607 chainon (tree op1, tree op2)
2608 {
2609 tree t1;
2610
2611 if (!op1)
2612 return op2;
2613 if (!op2)
2614 return op1;
2615
2616 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2617 continue;
2618 TREE_CHAIN (t1) = op2;
2619
2620 #ifdef ENABLE_TREE_CHECKING
2621 {
2622 tree t2;
2623 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2624 gcc_assert (t2 != t1);
2625 }
2626 #endif
2627
2628 return op1;
2629 }
2630
2631 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2632
2633 tree
2634 tree_last (tree chain)
2635 {
2636 tree next;
2637 if (chain)
2638 while ((next = TREE_CHAIN (chain)))
2639 chain = next;
2640 return chain;
2641 }
2642
2643 /* Reverse the order of elements in the chain T,
2644 and return the new head of the chain (old last element). */
2645
2646 tree
2647 nreverse (tree t)
2648 {
2649 tree prev = 0, decl, next;
2650 for (decl = t; decl; decl = next)
2651 {
2652 /* We shouldn't be using this function to reverse BLOCK chains; we
2653 have blocks_nreverse for that. */
2654 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2655 next = TREE_CHAIN (decl);
2656 TREE_CHAIN (decl) = prev;
2657 prev = decl;
2658 }
2659 return prev;
2660 }
2661 \f
2662 /* Return a newly created TREE_LIST node whose
2663 purpose and value fields are PARM and VALUE. */
2664
2665 tree
2666 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2667 {
2668 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2669 TREE_PURPOSE (t) = parm;
2670 TREE_VALUE (t) = value;
2671 return t;
2672 }
2673
2674 /* Build a chain of TREE_LIST nodes from a vector. */
2675
2676 tree
2677 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2678 {
2679 tree ret = NULL_TREE;
2680 tree *pp = &ret;
2681 unsigned int i;
2682 tree t;
2683 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2684 {
2685 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2686 pp = &TREE_CHAIN (*pp);
2687 }
2688 return ret;
2689 }
2690
2691 /* Return a newly created TREE_LIST node whose
2692 purpose and value fields are PURPOSE and VALUE
2693 and whose TREE_CHAIN is CHAIN. */
2694
2695 tree
2696 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2697 {
2698 tree node;
2699
2700 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2701 memset (node, 0, sizeof (struct tree_common));
2702
2703 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2704
2705 TREE_SET_CODE (node, TREE_LIST);
2706 TREE_CHAIN (node) = chain;
2707 TREE_PURPOSE (node) = purpose;
2708 TREE_VALUE (node) = value;
2709 return node;
2710 }
2711
2712 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2713 trees. */
2714
2715 vec<tree, va_gc> *
2716 ctor_to_vec (tree ctor)
2717 {
2718 vec<tree, va_gc> *vec;
2719 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2720 unsigned int ix;
2721 tree val;
2722
2723 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2724 vec->quick_push (val);
2725
2726 return vec;
2727 }
2728 \f
2729 /* Return the size nominally occupied by an object of type TYPE
2730 when it resides in memory. The value is measured in units of bytes,
2731 and its data type is that normally used for type sizes
2732 (which is the first type created by make_signed_type or
2733 make_unsigned_type). */
2734
2735 tree
2736 size_in_bytes (const_tree type)
2737 {
2738 tree t;
2739
2740 if (type == error_mark_node)
2741 return integer_zero_node;
2742
2743 type = TYPE_MAIN_VARIANT (type);
2744 t = TYPE_SIZE_UNIT (type);
2745
2746 if (t == 0)
2747 {
2748 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2749 return size_zero_node;
2750 }
2751
2752 return t;
2753 }
2754
2755 /* Return the size of TYPE (in bytes) as a wide integer
2756 or return -1 if the size can vary or is larger than an integer. */
2757
2758 HOST_WIDE_INT
2759 int_size_in_bytes (const_tree type)
2760 {
2761 tree t;
2762
2763 if (type == error_mark_node)
2764 return 0;
2765
2766 type = TYPE_MAIN_VARIANT (type);
2767 t = TYPE_SIZE_UNIT (type);
2768
2769 if (t && tree_fits_uhwi_p (t))
2770 return TREE_INT_CST_LOW (t);
2771 else
2772 return -1;
2773 }
2774
2775 /* Return the maximum size of TYPE (in bytes) as a wide integer
2776 or return -1 if the size can vary or is larger than an integer. */
2777
2778 HOST_WIDE_INT
2779 max_int_size_in_bytes (const_tree type)
2780 {
2781 HOST_WIDE_INT size = -1;
2782 tree size_tree;
2783
2784 /* If this is an array type, check for a possible MAX_SIZE attached. */
2785
2786 if (TREE_CODE (type) == ARRAY_TYPE)
2787 {
2788 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2789
2790 if (size_tree && tree_fits_uhwi_p (size_tree))
2791 size = tree_to_uhwi (size_tree);
2792 }
2793
2794 /* If we still haven't been able to get a size, see if the language
2795 can compute a maximum size. */
2796
2797 if (size == -1)
2798 {
2799 size_tree = lang_hooks.types.max_size (type);
2800
2801 if (size_tree && tree_fits_uhwi_p (size_tree))
2802 size = tree_to_uhwi (size_tree);
2803 }
2804
2805 return size;
2806 }
2807 \f
2808 /* Return the bit position of FIELD, in bits from the start of the record.
2809 This is a tree of type bitsizetype. */
2810
2811 tree
2812 bit_position (const_tree field)
2813 {
2814 return bit_from_pos (DECL_FIELD_OFFSET (field),
2815 DECL_FIELD_BIT_OFFSET (field));
2816 }
2817
2818 /* Likewise, but return as an integer. It must be representable in
2819 that way (since it could be a signed value, we don't have the
2820 option of returning -1 like int_size_in_byte can. */
2821
2822 HOST_WIDE_INT
2823 int_bit_position (const_tree field)
2824 {
2825 return tree_to_shwi (bit_position (field));
2826 }
2827 \f
2828 /* Return the byte position of FIELD, in bytes from the start of the record.
2829 This is a tree of type sizetype. */
2830
2831 tree
2832 byte_position (const_tree field)
2833 {
2834 return byte_from_pos (DECL_FIELD_OFFSET (field),
2835 DECL_FIELD_BIT_OFFSET (field));
2836 }
2837
2838 /* Likewise, but return as an integer. It must be representable in
2839 that way (since it could be a signed value, we don't have the
2840 option of returning -1 like int_size_in_byte can. */
2841
2842 HOST_WIDE_INT
2843 int_byte_position (const_tree field)
2844 {
2845 return tree_to_shwi (byte_position (field));
2846 }
2847 \f
2848 /* Return the strictest alignment, in bits, that T is known to have. */
2849
2850 unsigned int
2851 expr_align (const_tree t)
2852 {
2853 unsigned int align0, align1;
2854
2855 switch (TREE_CODE (t))
2856 {
2857 CASE_CONVERT: case NON_LVALUE_EXPR:
2858 /* If we have conversions, we know that the alignment of the
2859 object must meet each of the alignments of the types. */
2860 align0 = expr_align (TREE_OPERAND (t, 0));
2861 align1 = TYPE_ALIGN (TREE_TYPE (t));
2862 return MAX (align0, align1);
2863
2864 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2865 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2866 case CLEANUP_POINT_EXPR:
2867 /* These don't change the alignment of an object. */
2868 return expr_align (TREE_OPERAND (t, 0));
2869
2870 case COND_EXPR:
2871 /* The best we can do is say that the alignment is the least aligned
2872 of the two arms. */
2873 align0 = expr_align (TREE_OPERAND (t, 1));
2874 align1 = expr_align (TREE_OPERAND (t, 2));
2875 return MIN (align0, align1);
2876
2877 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2878 meaningfully, it's always 1. */
2879 case LABEL_DECL: case CONST_DECL:
2880 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2881 case FUNCTION_DECL:
2882 gcc_assert (DECL_ALIGN (t) != 0);
2883 return DECL_ALIGN (t);
2884
2885 default:
2886 break;
2887 }
2888
2889 /* Otherwise take the alignment from that of the type. */
2890 return TYPE_ALIGN (TREE_TYPE (t));
2891 }
2892 \f
2893 /* Return, as a tree node, the number of elements for TYPE (which is an
2894 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2895
2896 tree
2897 array_type_nelts (const_tree type)
2898 {
2899 tree index_type, min, max;
2900
2901 /* If they did it with unspecified bounds, then we should have already
2902 given an error about it before we got here. */
2903 if (! TYPE_DOMAIN (type))
2904 return error_mark_node;
2905
2906 index_type = TYPE_DOMAIN (type);
2907 min = TYPE_MIN_VALUE (index_type);
2908 max = TYPE_MAX_VALUE (index_type);
2909
2910 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2911 if (!max)
2912 return error_mark_node;
2913
2914 return (integer_zerop (min)
2915 ? max
2916 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2917 }
2918 \f
2919 /* If arg is static -- a reference to an object in static storage -- then
2920 return the object. This is not the same as the C meaning of `static'.
2921 If arg isn't static, return NULL. */
2922
2923 tree
2924 staticp (tree arg)
2925 {
2926 switch (TREE_CODE (arg))
2927 {
2928 case FUNCTION_DECL:
2929 /* Nested functions are static, even though taking their address will
2930 involve a trampoline as we unnest the nested function and create
2931 the trampoline on the tree level. */
2932 return arg;
2933
2934 case VAR_DECL:
2935 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2936 && ! DECL_THREAD_LOCAL_P (arg)
2937 && ! DECL_DLLIMPORT_P (arg)
2938 ? arg : NULL);
2939
2940 case CONST_DECL:
2941 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2942 ? arg : NULL);
2943
2944 case CONSTRUCTOR:
2945 return TREE_STATIC (arg) ? arg : NULL;
2946
2947 case LABEL_DECL:
2948 case STRING_CST:
2949 return arg;
2950
2951 case COMPONENT_REF:
2952 /* If the thing being referenced is not a field, then it is
2953 something language specific. */
2954 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2955
2956 /* If we are referencing a bitfield, we can't evaluate an
2957 ADDR_EXPR at compile time and so it isn't a constant. */
2958 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2959 return NULL;
2960
2961 return staticp (TREE_OPERAND (arg, 0));
2962
2963 case BIT_FIELD_REF:
2964 return NULL;
2965
2966 case INDIRECT_REF:
2967 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2968
2969 case ARRAY_REF:
2970 case ARRAY_RANGE_REF:
2971 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2972 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2973 return staticp (TREE_OPERAND (arg, 0));
2974 else
2975 return NULL;
2976
2977 case COMPOUND_LITERAL_EXPR:
2978 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2979
2980 default:
2981 return NULL;
2982 }
2983 }
2984
2985 \f
2986
2987
2988 /* Return whether OP is a DECL whose address is function-invariant. */
2989
2990 bool
2991 decl_address_invariant_p (const_tree op)
2992 {
2993 /* The conditions below are slightly less strict than the one in
2994 staticp. */
2995
2996 switch (TREE_CODE (op))
2997 {
2998 case PARM_DECL:
2999 case RESULT_DECL:
3000 case LABEL_DECL:
3001 case FUNCTION_DECL:
3002 return true;
3003
3004 case VAR_DECL:
3005 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3006 || DECL_THREAD_LOCAL_P (op)
3007 || DECL_CONTEXT (op) == current_function_decl
3008 || decl_function_context (op) == current_function_decl)
3009 return true;
3010 break;
3011
3012 case CONST_DECL:
3013 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3014 || decl_function_context (op) == current_function_decl)
3015 return true;
3016 break;
3017
3018 default:
3019 break;
3020 }
3021
3022 return false;
3023 }
3024
3025 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3026
3027 bool
3028 decl_address_ip_invariant_p (const_tree op)
3029 {
3030 /* The conditions below are slightly less strict than the one in
3031 staticp. */
3032
3033 switch (TREE_CODE (op))
3034 {
3035 case LABEL_DECL:
3036 case FUNCTION_DECL:
3037 case STRING_CST:
3038 return true;
3039
3040 case VAR_DECL:
3041 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3042 && !DECL_DLLIMPORT_P (op))
3043 || DECL_THREAD_LOCAL_P (op))
3044 return true;
3045 break;
3046
3047 case CONST_DECL:
3048 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3049 return true;
3050 break;
3051
3052 default:
3053 break;
3054 }
3055
3056 return false;
3057 }
3058
3059
3060 /* Return true if T is function-invariant (internal function, does
3061 not handle arithmetic; that's handled in skip_simple_arithmetic and
3062 tree_invariant_p). */
3063
3064 static bool tree_invariant_p (tree t);
3065
3066 static bool
3067 tree_invariant_p_1 (tree t)
3068 {
3069 tree op;
3070
3071 if (TREE_CONSTANT (t)
3072 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3073 return true;
3074
3075 switch (TREE_CODE (t))
3076 {
3077 case SAVE_EXPR:
3078 return true;
3079
3080 case ADDR_EXPR:
3081 op = TREE_OPERAND (t, 0);
3082 while (handled_component_p (op))
3083 {
3084 switch (TREE_CODE (op))
3085 {
3086 case ARRAY_REF:
3087 case ARRAY_RANGE_REF:
3088 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3089 || TREE_OPERAND (op, 2) != NULL_TREE
3090 || TREE_OPERAND (op, 3) != NULL_TREE)
3091 return false;
3092 break;
3093
3094 case COMPONENT_REF:
3095 if (TREE_OPERAND (op, 2) != NULL_TREE)
3096 return false;
3097 break;
3098
3099 default:;
3100 }
3101 op = TREE_OPERAND (op, 0);
3102 }
3103
3104 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3105
3106 default:
3107 break;
3108 }
3109
3110 return false;
3111 }
3112
3113 /* Return true if T is function-invariant. */
3114
3115 static bool
3116 tree_invariant_p (tree t)
3117 {
3118 tree inner = skip_simple_arithmetic (t);
3119 return tree_invariant_p_1 (inner);
3120 }
3121
3122 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3123 Do this to any expression which may be used in more than one place,
3124 but must be evaluated only once.
3125
3126 Normally, expand_expr would reevaluate the expression each time.
3127 Calling save_expr produces something that is evaluated and recorded
3128 the first time expand_expr is called on it. Subsequent calls to
3129 expand_expr just reuse the recorded value.
3130
3131 The call to expand_expr that generates code that actually computes
3132 the value is the first call *at compile time*. Subsequent calls
3133 *at compile time* generate code to use the saved value.
3134 This produces correct result provided that *at run time* control
3135 always flows through the insns made by the first expand_expr
3136 before reaching the other places where the save_expr was evaluated.
3137 You, the caller of save_expr, must make sure this is so.
3138
3139 Constants, and certain read-only nodes, are returned with no
3140 SAVE_EXPR because that is safe. Expressions containing placeholders
3141 are not touched; see tree.def for an explanation of what these
3142 are used for. */
3143
3144 tree
3145 save_expr (tree expr)
3146 {
3147 tree t = fold (expr);
3148 tree inner;
3149
3150 /* If the tree evaluates to a constant, then we don't want to hide that
3151 fact (i.e. this allows further folding, and direct checks for constants).
3152 However, a read-only object that has side effects cannot be bypassed.
3153 Since it is no problem to reevaluate literals, we just return the
3154 literal node. */
3155 inner = skip_simple_arithmetic (t);
3156 if (TREE_CODE (inner) == ERROR_MARK)
3157 return inner;
3158
3159 if (tree_invariant_p_1 (inner))
3160 return t;
3161
3162 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3163 it means that the size or offset of some field of an object depends on
3164 the value within another field.
3165
3166 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3167 and some variable since it would then need to be both evaluated once and
3168 evaluated more than once. Front-ends must assure this case cannot
3169 happen by surrounding any such subexpressions in their own SAVE_EXPR
3170 and forcing evaluation at the proper time. */
3171 if (contains_placeholder_p (inner))
3172 return t;
3173
3174 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3175 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3176
3177 /* This expression might be placed ahead of a jump to ensure that the
3178 value was computed on both sides of the jump. So make sure it isn't
3179 eliminated as dead. */
3180 TREE_SIDE_EFFECTS (t) = 1;
3181 return t;
3182 }
3183
3184 /* Look inside EXPR into any simple arithmetic operations. Return the
3185 outermost non-arithmetic or non-invariant node. */
3186
3187 tree
3188 skip_simple_arithmetic (tree expr)
3189 {
3190 /* We don't care about whether this can be used as an lvalue in this
3191 context. */
3192 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3193 expr = TREE_OPERAND (expr, 0);
3194
3195 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3196 a constant, it will be more efficient to not make another SAVE_EXPR since
3197 it will allow better simplification and GCSE will be able to merge the
3198 computations if they actually occur. */
3199 while (true)
3200 {
3201 if (UNARY_CLASS_P (expr))
3202 expr = TREE_OPERAND (expr, 0);
3203 else if (BINARY_CLASS_P (expr))
3204 {
3205 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3206 expr = TREE_OPERAND (expr, 0);
3207 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3208 expr = TREE_OPERAND (expr, 1);
3209 else
3210 break;
3211 }
3212 else
3213 break;
3214 }
3215
3216 return expr;
3217 }
3218
3219 /* Look inside EXPR into simple arithmetic operations involving constants.
3220 Return the outermost non-arithmetic or non-constant node. */
3221
3222 tree
3223 skip_simple_constant_arithmetic (tree expr)
3224 {
3225 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3226 expr = TREE_OPERAND (expr, 0);
3227
3228 while (true)
3229 {
3230 if (UNARY_CLASS_P (expr))
3231 expr = TREE_OPERAND (expr, 0);
3232 else if (BINARY_CLASS_P (expr))
3233 {
3234 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3235 expr = TREE_OPERAND (expr, 0);
3236 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3237 expr = TREE_OPERAND (expr, 1);
3238 else
3239 break;
3240 }
3241 else
3242 break;
3243 }
3244
3245 return expr;
3246 }
3247
3248 /* Return which tree structure is used by T. */
3249
3250 enum tree_node_structure_enum
3251 tree_node_structure (const_tree t)
3252 {
3253 const enum tree_code code = TREE_CODE (t);
3254 return tree_node_structure_for_code (code);
3255 }
3256
3257 /* Set various status flags when building a CALL_EXPR object T. */
3258
3259 static void
3260 process_call_operands (tree t)
3261 {
3262 bool side_effects = TREE_SIDE_EFFECTS (t);
3263 bool read_only = false;
3264 int i = call_expr_flags (t);
3265
3266 /* Calls have side-effects, except those to const or pure functions. */
3267 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3268 side_effects = true;
3269 /* Propagate TREE_READONLY of arguments for const functions. */
3270 if (i & ECF_CONST)
3271 read_only = true;
3272
3273 if (!side_effects || read_only)
3274 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3275 {
3276 tree op = TREE_OPERAND (t, i);
3277 if (op && TREE_SIDE_EFFECTS (op))
3278 side_effects = true;
3279 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3280 read_only = false;
3281 }
3282
3283 TREE_SIDE_EFFECTS (t) = side_effects;
3284 TREE_READONLY (t) = read_only;
3285 }
3286 \f
3287 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3288 size or offset that depends on a field within a record. */
3289
3290 bool
3291 contains_placeholder_p (const_tree exp)
3292 {
3293 enum tree_code code;
3294
3295 if (!exp)
3296 return 0;
3297
3298 code = TREE_CODE (exp);
3299 if (code == PLACEHOLDER_EXPR)
3300 return 1;
3301
3302 switch (TREE_CODE_CLASS (code))
3303 {
3304 case tcc_reference:
3305 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3306 position computations since they will be converted into a
3307 WITH_RECORD_EXPR involving the reference, which will assume
3308 here will be valid. */
3309 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3310
3311 case tcc_exceptional:
3312 if (code == TREE_LIST)
3313 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3314 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3315 break;
3316
3317 case tcc_unary:
3318 case tcc_binary:
3319 case tcc_comparison:
3320 case tcc_expression:
3321 switch (code)
3322 {
3323 case COMPOUND_EXPR:
3324 /* Ignoring the first operand isn't quite right, but works best. */
3325 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3326
3327 case COND_EXPR:
3328 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3329 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3330 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3331
3332 case SAVE_EXPR:
3333 /* The save_expr function never wraps anything containing
3334 a PLACEHOLDER_EXPR. */
3335 return 0;
3336
3337 default:
3338 break;
3339 }
3340
3341 switch (TREE_CODE_LENGTH (code))
3342 {
3343 case 1:
3344 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3345 case 2:
3346 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3347 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3348 default:
3349 return 0;
3350 }
3351
3352 case tcc_vl_exp:
3353 switch (code)
3354 {
3355 case CALL_EXPR:
3356 {
3357 const_tree arg;
3358 const_call_expr_arg_iterator iter;
3359 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3360 if (CONTAINS_PLACEHOLDER_P (arg))
3361 return 1;
3362 return 0;
3363 }
3364 default:
3365 return 0;
3366 }
3367
3368 default:
3369 return 0;
3370 }
3371 return 0;
3372 }
3373
3374 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3375 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3376 field positions. */
3377
3378 static bool
3379 type_contains_placeholder_1 (const_tree type)
3380 {
3381 /* If the size contains a placeholder or the parent type (component type in
3382 the case of arrays) type involves a placeholder, this type does. */
3383 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3384 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3385 || (!POINTER_TYPE_P (type)
3386 && TREE_TYPE (type)
3387 && type_contains_placeholder_p (TREE_TYPE (type))))
3388 return true;
3389
3390 /* Now do type-specific checks. Note that the last part of the check above
3391 greatly limits what we have to do below. */
3392 switch (TREE_CODE (type))
3393 {
3394 case VOID_TYPE:
3395 case COMPLEX_TYPE:
3396 case ENUMERAL_TYPE:
3397 case BOOLEAN_TYPE:
3398 case POINTER_TYPE:
3399 case OFFSET_TYPE:
3400 case REFERENCE_TYPE:
3401 case METHOD_TYPE:
3402 case FUNCTION_TYPE:
3403 case VECTOR_TYPE:
3404 case NULLPTR_TYPE:
3405 return false;
3406
3407 case INTEGER_TYPE:
3408 case REAL_TYPE:
3409 case FIXED_POINT_TYPE:
3410 /* Here we just check the bounds. */
3411 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3412 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3413
3414 case ARRAY_TYPE:
3415 /* We have already checked the component type above, so just check the
3416 domain type. */
3417 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3418
3419 case RECORD_TYPE:
3420 case UNION_TYPE:
3421 case QUAL_UNION_TYPE:
3422 {
3423 tree field;
3424
3425 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3426 if (TREE_CODE (field) == FIELD_DECL
3427 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3428 || (TREE_CODE (type) == QUAL_UNION_TYPE
3429 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3430 || type_contains_placeholder_p (TREE_TYPE (field))))
3431 return true;
3432
3433 return false;
3434 }
3435
3436 default:
3437 gcc_unreachable ();
3438 }
3439 }
3440
3441 /* Wrapper around above function used to cache its result. */
3442
3443 bool
3444 type_contains_placeholder_p (tree type)
3445 {
3446 bool result;
3447
3448 /* If the contains_placeholder_bits field has been initialized,
3449 then we know the answer. */
3450 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3451 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3452
3453 /* Indicate that we've seen this type node, and the answer is false.
3454 This is what we want to return if we run into recursion via fields. */
3455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3456
3457 /* Compute the real value. */
3458 result = type_contains_placeholder_1 (type);
3459
3460 /* Store the real value. */
3461 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3462
3463 return result;
3464 }
3465 \f
3466 /* Push tree EXP onto vector QUEUE if it is not already present. */
3467
3468 static void
3469 push_without_duplicates (tree exp, vec<tree> *queue)
3470 {
3471 unsigned int i;
3472 tree iter;
3473
3474 FOR_EACH_VEC_ELT (*queue, i, iter)
3475 if (simple_cst_equal (iter, exp) == 1)
3476 break;
3477
3478 if (!iter)
3479 queue->safe_push (exp);
3480 }
3481
3482 /* Given a tree EXP, find all occurrences of references to fields
3483 in a PLACEHOLDER_EXPR and place them in vector REFS without
3484 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3485 we assume here that EXP contains only arithmetic expressions
3486 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3487 argument list. */
3488
3489 void
3490 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3491 {
3492 enum tree_code code = TREE_CODE (exp);
3493 tree inner;
3494 int i;
3495
3496 /* We handle TREE_LIST and COMPONENT_REF separately. */
3497 if (code == TREE_LIST)
3498 {
3499 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3500 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3501 }
3502 else if (code == COMPONENT_REF)
3503 {
3504 for (inner = TREE_OPERAND (exp, 0);
3505 REFERENCE_CLASS_P (inner);
3506 inner = TREE_OPERAND (inner, 0))
3507 ;
3508
3509 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3510 push_without_duplicates (exp, refs);
3511 else
3512 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3513 }
3514 else
3515 switch (TREE_CODE_CLASS (code))
3516 {
3517 case tcc_constant:
3518 break;
3519
3520 case tcc_declaration:
3521 /* Variables allocated to static storage can stay. */
3522 if (!TREE_STATIC (exp))
3523 push_without_duplicates (exp, refs);
3524 break;
3525
3526 case tcc_expression:
3527 /* This is the pattern built in ada/make_aligning_type. */
3528 if (code == ADDR_EXPR
3529 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3530 {
3531 push_without_duplicates (exp, refs);
3532 break;
3533 }
3534
3535 /* Fall through... */
3536
3537 case tcc_exceptional:
3538 case tcc_unary:
3539 case tcc_binary:
3540 case tcc_comparison:
3541 case tcc_reference:
3542 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3543 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3544 break;
3545
3546 case tcc_vl_exp:
3547 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3548 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3549 break;
3550
3551 default:
3552 gcc_unreachable ();
3553 }
3554 }
3555
3556 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3557 return a tree with all occurrences of references to F in a
3558 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3559 CONST_DECLs. Note that we assume here that EXP contains only
3560 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3561 occurring only in their argument list. */
3562
3563 tree
3564 substitute_in_expr (tree exp, tree f, tree r)
3565 {
3566 enum tree_code code = TREE_CODE (exp);
3567 tree op0, op1, op2, op3;
3568 tree new_tree;
3569
3570 /* We handle TREE_LIST and COMPONENT_REF separately. */
3571 if (code == TREE_LIST)
3572 {
3573 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3574 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3575 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3576 return exp;
3577
3578 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3579 }
3580 else if (code == COMPONENT_REF)
3581 {
3582 tree inner;
3583
3584 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3585 and it is the right field, replace it with R. */
3586 for (inner = TREE_OPERAND (exp, 0);
3587 REFERENCE_CLASS_P (inner);
3588 inner = TREE_OPERAND (inner, 0))
3589 ;
3590
3591 /* The field. */
3592 op1 = TREE_OPERAND (exp, 1);
3593
3594 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3595 return r;
3596
3597 /* If this expression hasn't been completed let, leave it alone. */
3598 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3599 return exp;
3600
3601 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3602 if (op0 == TREE_OPERAND (exp, 0))
3603 return exp;
3604
3605 new_tree
3606 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3607 }
3608 else
3609 switch (TREE_CODE_CLASS (code))
3610 {
3611 case tcc_constant:
3612 return exp;
3613
3614 case tcc_declaration:
3615 if (exp == f)
3616 return r;
3617 else
3618 return exp;
3619
3620 case tcc_expression:
3621 if (exp == f)
3622 return r;
3623
3624 /* Fall through... */
3625
3626 case tcc_exceptional:
3627 case tcc_unary:
3628 case tcc_binary:
3629 case tcc_comparison:
3630 case tcc_reference:
3631 switch (TREE_CODE_LENGTH (code))
3632 {
3633 case 0:
3634 return exp;
3635
3636 case 1:
3637 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3638 if (op0 == TREE_OPERAND (exp, 0))
3639 return exp;
3640
3641 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3642 break;
3643
3644 case 2:
3645 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3646 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3647
3648 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3649 return exp;
3650
3651 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3652 break;
3653
3654 case 3:
3655 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3656 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3657 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3658
3659 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3660 && op2 == TREE_OPERAND (exp, 2))
3661 return exp;
3662
3663 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3664 break;
3665
3666 case 4:
3667 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3668 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3669 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3670 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3671
3672 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3673 && op2 == TREE_OPERAND (exp, 2)
3674 && op3 == TREE_OPERAND (exp, 3))
3675 return exp;
3676
3677 new_tree
3678 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3679 break;
3680
3681 default:
3682 gcc_unreachable ();
3683 }
3684 break;
3685
3686 case tcc_vl_exp:
3687 {
3688 int i;
3689
3690 new_tree = NULL_TREE;
3691
3692 /* If we are trying to replace F with a constant, inline back
3693 functions which do nothing else than computing a value from
3694 the arguments they are passed. This makes it possible to
3695 fold partially or entirely the replacement expression. */
3696 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3697 {
3698 tree t = maybe_inline_call_in_expr (exp);
3699 if (t)
3700 return SUBSTITUTE_IN_EXPR (t, f, r);
3701 }
3702
3703 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3704 {
3705 tree op = TREE_OPERAND (exp, i);
3706 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3707 if (new_op != op)
3708 {
3709 if (!new_tree)
3710 new_tree = copy_node (exp);
3711 TREE_OPERAND (new_tree, i) = new_op;
3712 }
3713 }
3714
3715 if (new_tree)
3716 {
3717 new_tree = fold (new_tree);
3718 if (TREE_CODE (new_tree) == CALL_EXPR)
3719 process_call_operands (new_tree);
3720 }
3721 else
3722 return exp;
3723 }
3724 break;
3725
3726 default:
3727 gcc_unreachable ();
3728 }
3729
3730 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3731
3732 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3733 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3734
3735 return new_tree;
3736 }
3737
3738 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3739 for it within OBJ, a tree that is an object or a chain of references. */
3740
3741 tree
3742 substitute_placeholder_in_expr (tree exp, tree obj)
3743 {
3744 enum tree_code code = TREE_CODE (exp);
3745 tree op0, op1, op2, op3;
3746 tree new_tree;
3747
3748 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3749 in the chain of OBJ. */
3750 if (code == PLACEHOLDER_EXPR)
3751 {
3752 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3753 tree elt;
3754
3755 for (elt = obj; elt != 0;
3756 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3757 || TREE_CODE (elt) == COND_EXPR)
3758 ? TREE_OPERAND (elt, 1)
3759 : (REFERENCE_CLASS_P (elt)
3760 || UNARY_CLASS_P (elt)
3761 || BINARY_CLASS_P (elt)
3762 || VL_EXP_CLASS_P (elt)
3763 || EXPRESSION_CLASS_P (elt))
3764 ? TREE_OPERAND (elt, 0) : 0))
3765 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3766 return elt;
3767
3768 for (elt = obj; elt != 0;
3769 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3770 || TREE_CODE (elt) == COND_EXPR)
3771 ? TREE_OPERAND (elt, 1)
3772 : (REFERENCE_CLASS_P (elt)
3773 || UNARY_CLASS_P (elt)
3774 || BINARY_CLASS_P (elt)
3775 || VL_EXP_CLASS_P (elt)
3776 || EXPRESSION_CLASS_P (elt))
3777 ? TREE_OPERAND (elt, 0) : 0))
3778 if (POINTER_TYPE_P (TREE_TYPE (elt))
3779 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3780 == need_type))
3781 return fold_build1 (INDIRECT_REF, need_type, elt);
3782
3783 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3784 survives until RTL generation, there will be an error. */
3785 return exp;
3786 }
3787
3788 /* TREE_LIST is special because we need to look at TREE_VALUE
3789 and TREE_CHAIN, not TREE_OPERANDS. */
3790 else if (code == TREE_LIST)
3791 {
3792 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3793 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3794 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3795 return exp;
3796
3797 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3798 }
3799 else
3800 switch (TREE_CODE_CLASS (code))
3801 {
3802 case tcc_constant:
3803 case tcc_declaration:
3804 return exp;
3805
3806 case tcc_exceptional:
3807 case tcc_unary:
3808 case tcc_binary:
3809 case tcc_comparison:
3810 case tcc_expression:
3811 case tcc_reference:
3812 case tcc_statement:
3813 switch (TREE_CODE_LENGTH (code))
3814 {
3815 case 0:
3816 return exp;
3817
3818 case 1:
3819 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3820 if (op0 == TREE_OPERAND (exp, 0))
3821 return exp;
3822
3823 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3824 break;
3825
3826 case 2:
3827 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3828 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3829
3830 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3831 return exp;
3832
3833 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3834 break;
3835
3836 case 3:
3837 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3838 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3839 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3840
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3842 && op2 == TREE_OPERAND (exp, 2))
3843 return exp;
3844
3845 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3846 break;
3847
3848 case 4:
3849 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3850 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3851 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3852 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2)
3856 && op3 == TREE_OPERAND (exp, 3))
3857 return exp;
3858
3859 new_tree
3860 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3861 break;
3862
3863 default:
3864 gcc_unreachable ();
3865 }
3866 break;
3867
3868 case tcc_vl_exp:
3869 {
3870 int i;
3871
3872 new_tree = NULL_TREE;
3873
3874 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3875 {
3876 tree op = TREE_OPERAND (exp, i);
3877 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3878 if (new_op != op)
3879 {
3880 if (!new_tree)
3881 new_tree = copy_node (exp);
3882 TREE_OPERAND (new_tree, i) = new_op;
3883 }
3884 }
3885
3886 if (new_tree)
3887 {
3888 new_tree = fold (new_tree);
3889 if (TREE_CODE (new_tree) == CALL_EXPR)
3890 process_call_operands (new_tree);
3891 }
3892 else
3893 return exp;
3894 }
3895 break;
3896
3897 default:
3898 gcc_unreachable ();
3899 }
3900
3901 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3902
3903 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3904 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3905
3906 return new_tree;
3907 }
3908 \f
3909
3910 /* Subroutine of stabilize_reference; this is called for subtrees of
3911 references. Any expression with side-effects must be put in a SAVE_EXPR
3912 to ensure that it is only evaluated once.
3913
3914 We don't put SAVE_EXPR nodes around everything, because assigning very
3915 simple expressions to temporaries causes us to miss good opportunities
3916 for optimizations. Among other things, the opportunity to fold in the
3917 addition of a constant into an addressing mode often gets lost, e.g.
3918 "y[i+1] += x;". In general, we take the approach that we should not make
3919 an assignment unless we are forced into it - i.e., that any non-side effect
3920 operator should be allowed, and that cse should take care of coalescing
3921 multiple utterances of the same expression should that prove fruitful. */
3922
3923 static tree
3924 stabilize_reference_1 (tree e)
3925 {
3926 tree result;
3927 enum tree_code code = TREE_CODE (e);
3928
3929 /* We cannot ignore const expressions because it might be a reference
3930 to a const array but whose index contains side-effects. But we can
3931 ignore things that are actual constant or that already have been
3932 handled by this function. */
3933
3934 if (tree_invariant_p (e))
3935 return e;
3936
3937 switch (TREE_CODE_CLASS (code))
3938 {
3939 case tcc_exceptional:
3940 case tcc_type:
3941 case tcc_declaration:
3942 case tcc_comparison:
3943 case tcc_statement:
3944 case tcc_expression:
3945 case tcc_reference:
3946 case tcc_vl_exp:
3947 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3948 so that it will only be evaluated once. */
3949 /* The reference (r) and comparison (<) classes could be handled as
3950 below, but it is generally faster to only evaluate them once. */
3951 if (TREE_SIDE_EFFECTS (e))
3952 return save_expr (e);
3953 return e;
3954
3955 case tcc_constant:
3956 /* Constants need no processing. In fact, we should never reach
3957 here. */
3958 return e;
3959
3960 case tcc_binary:
3961 /* Division is slow and tends to be compiled with jumps,
3962 especially the division by powers of 2 that is often
3963 found inside of an array reference. So do it just once. */
3964 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3965 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3966 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3967 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3968 return save_expr (e);
3969 /* Recursively stabilize each operand. */
3970 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3971 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3972 break;
3973
3974 case tcc_unary:
3975 /* Recursively stabilize each operand. */
3976 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3977 break;
3978
3979 default:
3980 gcc_unreachable ();
3981 }
3982
3983 TREE_TYPE (result) = TREE_TYPE (e);
3984 TREE_READONLY (result) = TREE_READONLY (e);
3985 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3986 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3987
3988 return result;
3989 }
3990
3991 /* Stabilize a reference so that we can use it any number of times
3992 without causing its operands to be evaluated more than once.
3993 Returns the stabilized reference. This works by means of save_expr,
3994 so see the caveats in the comments about save_expr.
3995
3996 Also allows conversion expressions whose operands are references.
3997 Any other kind of expression is returned unchanged. */
3998
3999 tree
4000 stabilize_reference (tree ref)
4001 {
4002 tree result;
4003 enum tree_code code = TREE_CODE (ref);
4004
4005 switch (code)
4006 {
4007 case VAR_DECL:
4008 case PARM_DECL:
4009 case RESULT_DECL:
4010 /* No action is needed in this case. */
4011 return ref;
4012
4013 CASE_CONVERT:
4014 case FLOAT_EXPR:
4015 case FIX_TRUNC_EXPR:
4016 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4017 break;
4018
4019 case INDIRECT_REF:
4020 result = build_nt (INDIRECT_REF,
4021 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4022 break;
4023
4024 case COMPONENT_REF:
4025 result = build_nt (COMPONENT_REF,
4026 stabilize_reference (TREE_OPERAND (ref, 0)),
4027 TREE_OPERAND (ref, 1), NULL_TREE);
4028 break;
4029
4030 case BIT_FIELD_REF:
4031 result = build_nt (BIT_FIELD_REF,
4032 stabilize_reference (TREE_OPERAND (ref, 0)),
4033 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4034 break;
4035
4036 case ARRAY_REF:
4037 result = build_nt (ARRAY_REF,
4038 stabilize_reference (TREE_OPERAND (ref, 0)),
4039 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4040 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4041 break;
4042
4043 case ARRAY_RANGE_REF:
4044 result = build_nt (ARRAY_RANGE_REF,
4045 stabilize_reference (TREE_OPERAND (ref, 0)),
4046 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4047 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4048 break;
4049
4050 case COMPOUND_EXPR:
4051 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4052 it wouldn't be ignored. This matters when dealing with
4053 volatiles. */
4054 return stabilize_reference_1 (ref);
4055
4056 /* If arg isn't a kind of lvalue we recognize, make no change.
4057 Caller should recognize the error for an invalid lvalue. */
4058 default:
4059 return ref;
4060
4061 case ERROR_MARK:
4062 return error_mark_node;
4063 }
4064
4065 TREE_TYPE (result) = TREE_TYPE (ref);
4066 TREE_READONLY (result) = TREE_READONLY (ref);
4067 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4068 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4069
4070 return result;
4071 }
4072 \f
4073 /* Low-level constructors for expressions. */
4074
4075 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4076 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4077
4078 void
4079 recompute_tree_invariant_for_addr_expr (tree t)
4080 {
4081 tree node;
4082 bool tc = true, se = false;
4083
4084 /* We started out assuming this address is both invariant and constant, but
4085 does not have side effects. Now go down any handled components and see if
4086 any of them involve offsets that are either non-constant or non-invariant.
4087 Also check for side-effects.
4088
4089 ??? Note that this code makes no attempt to deal with the case where
4090 taking the address of something causes a copy due to misalignment. */
4091
4092 #define UPDATE_FLAGS(NODE) \
4093 do { tree _node = (NODE); \
4094 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4095 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4096
4097 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4098 node = TREE_OPERAND (node, 0))
4099 {
4100 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4101 array reference (probably made temporarily by the G++ front end),
4102 so ignore all the operands. */
4103 if ((TREE_CODE (node) == ARRAY_REF
4104 || TREE_CODE (node) == ARRAY_RANGE_REF)
4105 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4106 {
4107 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4108 if (TREE_OPERAND (node, 2))
4109 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4110 if (TREE_OPERAND (node, 3))
4111 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4112 }
4113 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4114 FIELD_DECL, apparently. The G++ front end can put something else
4115 there, at least temporarily. */
4116 else if (TREE_CODE (node) == COMPONENT_REF
4117 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4118 {
4119 if (TREE_OPERAND (node, 2))
4120 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4121 }
4122 }
4123
4124 node = lang_hooks.expr_to_decl (node, &tc, &se);
4125
4126 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4127 the address, since &(*a)->b is a form of addition. If it's a constant, the
4128 address is constant too. If it's a decl, its address is constant if the
4129 decl is static. Everything else is not constant and, furthermore,
4130 taking the address of a volatile variable is not volatile. */
4131 if (TREE_CODE (node) == INDIRECT_REF
4132 || TREE_CODE (node) == MEM_REF)
4133 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4134 else if (CONSTANT_CLASS_P (node))
4135 ;
4136 else if (DECL_P (node))
4137 tc &= (staticp (node) != NULL_TREE);
4138 else
4139 {
4140 tc = false;
4141 se |= TREE_SIDE_EFFECTS (node);
4142 }
4143
4144
4145 TREE_CONSTANT (t) = tc;
4146 TREE_SIDE_EFFECTS (t) = se;
4147 #undef UPDATE_FLAGS
4148 }
4149
4150 /* Build an expression of code CODE, data type TYPE, and operands as
4151 specified. Expressions and reference nodes can be created this way.
4152 Constants, decls, types and misc nodes cannot be.
4153
4154 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4155 enough for all extant tree codes. */
4156
4157 tree
4158 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4159 {
4160 tree t;
4161
4162 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4163
4164 t = make_node_stat (code PASS_MEM_STAT);
4165 TREE_TYPE (t) = tt;
4166
4167 return t;
4168 }
4169
4170 tree
4171 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4172 {
4173 int length = sizeof (struct tree_exp);
4174 tree t;
4175
4176 record_node_allocation_statistics (code, length);
4177
4178 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4179
4180 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4181
4182 memset (t, 0, sizeof (struct tree_common));
4183
4184 TREE_SET_CODE (t, code);
4185
4186 TREE_TYPE (t) = type;
4187 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4188 TREE_OPERAND (t, 0) = node;
4189 if (node && !TYPE_P (node))
4190 {
4191 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4192 TREE_READONLY (t) = TREE_READONLY (node);
4193 }
4194
4195 if (TREE_CODE_CLASS (code) == tcc_statement)
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 else switch (code)
4198 {
4199 case VA_ARG_EXPR:
4200 /* All of these have side-effects, no matter what their
4201 operands are. */
4202 TREE_SIDE_EFFECTS (t) = 1;
4203 TREE_READONLY (t) = 0;
4204 break;
4205
4206 case INDIRECT_REF:
4207 /* Whether a dereference is readonly has nothing to do with whether
4208 its operand is readonly. */
4209 TREE_READONLY (t) = 0;
4210 break;
4211
4212 case ADDR_EXPR:
4213 if (node)
4214 recompute_tree_invariant_for_addr_expr (t);
4215 break;
4216
4217 default:
4218 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4219 && node && !TYPE_P (node)
4220 && TREE_CONSTANT (node))
4221 TREE_CONSTANT (t) = 1;
4222 if (TREE_CODE_CLASS (code) == tcc_reference
4223 && node && TREE_THIS_VOLATILE (node))
4224 TREE_THIS_VOLATILE (t) = 1;
4225 break;
4226 }
4227
4228 return t;
4229 }
4230
4231 #define PROCESS_ARG(N) \
4232 do { \
4233 TREE_OPERAND (t, N) = arg##N; \
4234 if (arg##N &&!TYPE_P (arg##N)) \
4235 { \
4236 if (TREE_SIDE_EFFECTS (arg##N)) \
4237 side_effects = 1; \
4238 if (!TREE_READONLY (arg##N) \
4239 && !CONSTANT_CLASS_P (arg##N)) \
4240 (void) (read_only = 0); \
4241 if (!TREE_CONSTANT (arg##N)) \
4242 (void) (constant = 0); \
4243 } \
4244 } while (0)
4245
4246 tree
4247 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4248 {
4249 bool constant, read_only, side_effects;
4250 tree t;
4251
4252 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4253
4254 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4255 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4256 /* When sizetype precision doesn't match that of pointers
4257 we need to be able to build explicit extensions or truncations
4258 of the offset argument. */
4259 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4260 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4261 && TREE_CODE (arg1) == INTEGER_CST);
4262
4263 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4264 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4265 && ptrofftype_p (TREE_TYPE (arg1)));
4266
4267 t = make_node_stat (code PASS_MEM_STAT);
4268 TREE_TYPE (t) = tt;
4269
4270 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4271 result based on those same flags for the arguments. But if the
4272 arguments aren't really even `tree' expressions, we shouldn't be trying
4273 to do this. */
4274
4275 /* Expressions without side effects may be constant if their
4276 arguments are as well. */
4277 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4278 || TREE_CODE_CLASS (code) == tcc_binary);
4279 read_only = 1;
4280 side_effects = TREE_SIDE_EFFECTS (t);
4281
4282 PROCESS_ARG (0);
4283 PROCESS_ARG (1);
4284
4285 TREE_READONLY (t) = read_only;
4286 TREE_CONSTANT (t) = constant;
4287 TREE_SIDE_EFFECTS (t) = side_effects;
4288 TREE_THIS_VOLATILE (t)
4289 = (TREE_CODE_CLASS (code) == tcc_reference
4290 && arg0 && TREE_THIS_VOLATILE (arg0));
4291
4292 return t;
4293 }
4294
4295
4296 tree
4297 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4298 tree arg2 MEM_STAT_DECL)
4299 {
4300 bool constant, read_only, side_effects;
4301 tree t;
4302
4303 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4304 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4305
4306 t = make_node_stat (code PASS_MEM_STAT);
4307 TREE_TYPE (t) = tt;
4308
4309 read_only = 1;
4310
4311 /* As a special exception, if COND_EXPR has NULL branches, we
4312 assume that it is a gimple statement and always consider
4313 it to have side effects. */
4314 if (code == COND_EXPR
4315 && tt == void_type_node
4316 && arg1 == NULL_TREE
4317 && arg2 == NULL_TREE)
4318 side_effects = true;
4319 else
4320 side_effects = TREE_SIDE_EFFECTS (t);
4321
4322 PROCESS_ARG (0);
4323 PROCESS_ARG (1);
4324 PROCESS_ARG (2);
4325
4326 if (code == COND_EXPR)
4327 TREE_READONLY (t) = read_only;
4328
4329 TREE_SIDE_EFFECTS (t) = side_effects;
4330 TREE_THIS_VOLATILE (t)
4331 = (TREE_CODE_CLASS (code) == tcc_reference
4332 && arg0 && TREE_THIS_VOLATILE (arg0));
4333
4334 return t;
4335 }
4336
4337 tree
4338 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4339 tree arg2, tree arg3 MEM_STAT_DECL)
4340 {
4341 bool constant, read_only, side_effects;
4342 tree t;
4343
4344 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4345
4346 t = make_node_stat (code PASS_MEM_STAT);
4347 TREE_TYPE (t) = tt;
4348
4349 side_effects = TREE_SIDE_EFFECTS (t);
4350
4351 PROCESS_ARG (0);
4352 PROCESS_ARG (1);
4353 PROCESS_ARG (2);
4354 PROCESS_ARG (3);
4355
4356 TREE_SIDE_EFFECTS (t) = side_effects;
4357 TREE_THIS_VOLATILE (t)
4358 = (TREE_CODE_CLASS (code) == tcc_reference
4359 && arg0 && TREE_THIS_VOLATILE (arg0));
4360
4361 return t;
4362 }
4363
4364 tree
4365 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4366 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4367 {
4368 bool constant, read_only, side_effects;
4369 tree t;
4370
4371 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4372
4373 t = make_node_stat (code PASS_MEM_STAT);
4374 TREE_TYPE (t) = tt;
4375
4376 side_effects = TREE_SIDE_EFFECTS (t);
4377
4378 PROCESS_ARG (0);
4379 PROCESS_ARG (1);
4380 PROCESS_ARG (2);
4381 PROCESS_ARG (3);
4382 PROCESS_ARG (4);
4383
4384 TREE_SIDE_EFFECTS (t) = side_effects;
4385 TREE_THIS_VOLATILE (t)
4386 = (TREE_CODE_CLASS (code) == tcc_reference
4387 && arg0 && TREE_THIS_VOLATILE (arg0));
4388
4389 return t;
4390 }
4391
4392 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4393 on the pointer PTR. */
4394
4395 tree
4396 build_simple_mem_ref_loc (location_t loc, tree ptr)
4397 {
4398 HOST_WIDE_INT offset = 0;
4399 tree ptype = TREE_TYPE (ptr);
4400 tree tem;
4401 /* For convenience allow addresses that collapse to a simple base
4402 and offset. */
4403 if (TREE_CODE (ptr) == ADDR_EXPR
4404 && (handled_component_p (TREE_OPERAND (ptr, 0))
4405 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4406 {
4407 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4408 gcc_assert (ptr);
4409 ptr = build_fold_addr_expr (ptr);
4410 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4411 }
4412 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4413 ptr, build_int_cst (ptype, offset));
4414 SET_EXPR_LOCATION (tem, loc);
4415 return tem;
4416 }
4417
4418 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4419
4420 offset_int
4421 mem_ref_offset (const_tree t)
4422 {
4423 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4424 }
4425
4426 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4427 offsetted by OFFSET units. */
4428
4429 tree
4430 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4431 {
4432 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4433 build_fold_addr_expr (base),
4434 build_int_cst (ptr_type_node, offset));
4435 tree addr = build1 (ADDR_EXPR, type, ref);
4436 recompute_tree_invariant_for_addr_expr (addr);
4437 return addr;
4438 }
4439
4440 /* Similar except don't specify the TREE_TYPE
4441 and leave the TREE_SIDE_EFFECTS as 0.
4442 It is permissible for arguments to be null,
4443 or even garbage if their values do not matter. */
4444
4445 tree
4446 build_nt (enum tree_code code, ...)
4447 {
4448 tree t;
4449 int length;
4450 int i;
4451 va_list p;
4452
4453 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4454
4455 va_start (p, code);
4456
4457 t = make_node (code);
4458 length = TREE_CODE_LENGTH (code);
4459
4460 for (i = 0; i < length; i++)
4461 TREE_OPERAND (t, i) = va_arg (p, tree);
4462
4463 va_end (p);
4464 return t;
4465 }
4466
4467 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4468 tree vec. */
4469
4470 tree
4471 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4472 {
4473 tree ret, t;
4474 unsigned int ix;
4475
4476 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4477 CALL_EXPR_FN (ret) = fn;
4478 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4479 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4480 CALL_EXPR_ARG (ret, ix) = t;
4481 return ret;
4482 }
4483 \f
4484 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4485 We do NOT enter this node in any sort of symbol table.
4486
4487 LOC is the location of the decl.
4488
4489 layout_decl is used to set up the decl's storage layout.
4490 Other slots are initialized to 0 or null pointers. */
4491
4492 tree
4493 build_decl_stat (location_t loc, enum tree_code code, tree name,
4494 tree type MEM_STAT_DECL)
4495 {
4496 tree t;
4497
4498 t = make_node_stat (code PASS_MEM_STAT);
4499 DECL_SOURCE_LOCATION (t) = loc;
4500
4501 /* if (type == error_mark_node)
4502 type = integer_type_node; */
4503 /* That is not done, deliberately, so that having error_mark_node
4504 as the type can suppress useless errors in the use of this variable. */
4505
4506 DECL_NAME (t) = name;
4507 TREE_TYPE (t) = type;
4508
4509 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4510 layout_decl (t, 0);
4511
4512 return t;
4513 }
4514
4515 /* Builds and returns function declaration with NAME and TYPE. */
4516
4517 tree
4518 build_fn_decl (const char *name, tree type)
4519 {
4520 tree id = get_identifier (name);
4521 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4522
4523 DECL_EXTERNAL (decl) = 1;
4524 TREE_PUBLIC (decl) = 1;
4525 DECL_ARTIFICIAL (decl) = 1;
4526 TREE_NOTHROW (decl) = 1;
4527
4528 return decl;
4529 }
4530
4531 vec<tree, va_gc> *all_translation_units;
4532
4533 /* Builds a new translation-unit decl with name NAME, queues it in the
4534 global list of translation-unit decls and returns it. */
4535
4536 tree
4537 build_translation_unit_decl (tree name)
4538 {
4539 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4540 name, NULL_TREE);
4541 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4542 vec_safe_push (all_translation_units, tu);
4543 return tu;
4544 }
4545
4546 \f
4547 /* BLOCK nodes are used to represent the structure of binding contours
4548 and declarations, once those contours have been exited and their contents
4549 compiled. This information is used for outputting debugging info. */
4550
4551 tree
4552 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4553 {
4554 tree block = make_node (BLOCK);
4555
4556 BLOCK_VARS (block) = vars;
4557 BLOCK_SUBBLOCKS (block) = subblocks;
4558 BLOCK_SUPERCONTEXT (block) = supercontext;
4559 BLOCK_CHAIN (block) = chain;
4560 return block;
4561 }
4562
4563 \f
4564 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4565
4566 LOC is the location to use in tree T. */
4567
4568 void
4569 protected_set_expr_location (tree t, location_t loc)
4570 {
4571 if (t && CAN_HAVE_LOCATION_P (t))
4572 SET_EXPR_LOCATION (t, loc);
4573 }
4574 \f
4575 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4576 is ATTRIBUTE. */
4577
4578 tree
4579 build_decl_attribute_variant (tree ddecl, tree attribute)
4580 {
4581 DECL_ATTRIBUTES (ddecl) = attribute;
4582 return ddecl;
4583 }
4584
4585 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4586 is ATTRIBUTE and its qualifiers are QUALS.
4587
4588 Record such modified types already made so we don't make duplicates. */
4589
4590 tree
4591 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4592 {
4593 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4594 {
4595 inchash::hash hstate;
4596 tree ntype;
4597 int i;
4598 tree t;
4599 enum tree_code code = TREE_CODE (ttype);
4600
4601 /* Building a distinct copy of a tagged type is inappropriate; it
4602 causes breakage in code that expects there to be a one-to-one
4603 relationship between a struct and its fields.
4604 build_duplicate_type is another solution (as used in
4605 handle_transparent_union_attribute), but that doesn't play well
4606 with the stronger C++ type identity model. */
4607 if (TREE_CODE (ttype) == RECORD_TYPE
4608 || TREE_CODE (ttype) == UNION_TYPE
4609 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4610 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4611 {
4612 warning (OPT_Wattributes,
4613 "ignoring attributes applied to %qT after definition",
4614 TYPE_MAIN_VARIANT (ttype));
4615 return build_qualified_type (ttype, quals);
4616 }
4617
4618 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4619 ntype = build_distinct_type_copy (ttype);
4620
4621 TYPE_ATTRIBUTES (ntype) = attribute;
4622
4623 hstate.add_int (code);
4624 if (TREE_TYPE (ntype))
4625 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4626 attribute_hash_list (attribute, hstate);
4627
4628 switch (TREE_CODE (ntype))
4629 {
4630 case FUNCTION_TYPE:
4631 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4632 break;
4633 case ARRAY_TYPE:
4634 if (TYPE_DOMAIN (ntype))
4635 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4636 break;
4637 case INTEGER_TYPE:
4638 t = TYPE_MAX_VALUE (ntype);
4639 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4640 hstate.add_object (TREE_INT_CST_ELT (t, i));
4641 break;
4642 case REAL_TYPE:
4643 case FIXED_POINT_TYPE:
4644 {
4645 unsigned int precision = TYPE_PRECISION (ntype);
4646 hstate.add_object (precision);
4647 }
4648 break;
4649 default:
4650 break;
4651 }
4652
4653 ntype = type_hash_canon (hstate.end(), ntype);
4654
4655 /* If the target-dependent attributes make NTYPE different from
4656 its canonical type, we will need to use structural equality
4657 checks for this type. */
4658 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4659 || !comp_type_attributes (ntype, ttype))
4660 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4661 else if (TYPE_CANONICAL (ntype) == ntype)
4662 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4663
4664 ttype = build_qualified_type (ntype, quals);
4665 }
4666 else if (TYPE_QUALS (ttype) != quals)
4667 ttype = build_qualified_type (ttype, quals);
4668
4669 return ttype;
4670 }
4671
4672 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4673 the same. */
4674
4675 static bool
4676 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4677 {
4678 tree cl1, cl2;
4679 for (cl1 = clauses1, cl2 = clauses2;
4680 cl1 && cl2;
4681 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4682 {
4683 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4684 return false;
4685 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4686 {
4687 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4688 OMP_CLAUSE_DECL (cl2)) != 1)
4689 return false;
4690 }
4691 switch (OMP_CLAUSE_CODE (cl1))
4692 {
4693 case OMP_CLAUSE_ALIGNED:
4694 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4695 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4696 return false;
4697 break;
4698 case OMP_CLAUSE_LINEAR:
4699 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4700 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4701 return false;
4702 break;
4703 case OMP_CLAUSE_SIMDLEN:
4704 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4705 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4706 return false;
4707 default:
4708 break;
4709 }
4710 }
4711 return true;
4712 }
4713
4714 /* Compare two constructor-element-type constants. Return 1 if the lists
4715 are known to be equal; otherwise return 0. */
4716
4717 static bool
4718 simple_cst_list_equal (const_tree l1, const_tree l2)
4719 {
4720 while (l1 != NULL_TREE && l2 != NULL_TREE)
4721 {
4722 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4723 return false;
4724
4725 l1 = TREE_CHAIN (l1);
4726 l2 = TREE_CHAIN (l2);
4727 }
4728
4729 return l1 == l2;
4730 }
4731
4732 /* Compare two attributes for their value identity. Return true if the
4733 attribute values are known to be equal; otherwise return false.
4734 */
4735
4736 static bool
4737 attribute_value_equal (const_tree attr1, const_tree attr2)
4738 {
4739 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4740 return true;
4741
4742 if (TREE_VALUE (attr1) != NULL_TREE
4743 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4744 && TREE_VALUE (attr2) != NULL
4745 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4746 return (simple_cst_list_equal (TREE_VALUE (attr1),
4747 TREE_VALUE (attr2)) == 1);
4748
4749 if ((flag_openmp || flag_openmp_simd)
4750 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4751 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4752 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4753 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4754 TREE_VALUE (attr2));
4755
4756 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4757 }
4758
4759 /* Return 0 if the attributes for two types are incompatible, 1 if they
4760 are compatible, and 2 if they are nearly compatible (which causes a
4761 warning to be generated). */
4762 int
4763 comp_type_attributes (const_tree type1, const_tree type2)
4764 {
4765 const_tree a1 = TYPE_ATTRIBUTES (type1);
4766 const_tree a2 = TYPE_ATTRIBUTES (type2);
4767 const_tree a;
4768
4769 if (a1 == a2)
4770 return 1;
4771 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4772 {
4773 const struct attribute_spec *as;
4774 const_tree attr;
4775
4776 as = lookup_attribute_spec (get_attribute_name (a));
4777 if (!as || as->affects_type_identity == false)
4778 continue;
4779
4780 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4781 if (!attr || !attribute_value_equal (a, attr))
4782 break;
4783 }
4784 if (!a)
4785 {
4786 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4787 {
4788 const struct attribute_spec *as;
4789
4790 as = lookup_attribute_spec (get_attribute_name (a));
4791 if (!as || as->affects_type_identity == false)
4792 continue;
4793
4794 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4795 break;
4796 /* We don't need to compare trees again, as we did this
4797 already in first loop. */
4798 }
4799 /* All types - affecting identity - are equal, so
4800 there is no need to call target hook for comparison. */
4801 if (!a)
4802 return 1;
4803 }
4804 /* As some type combinations - like default calling-convention - might
4805 be compatible, we have to call the target hook to get the final result. */
4806 return targetm.comp_type_attributes (type1, type2);
4807 }
4808
4809 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4810 is ATTRIBUTE.
4811
4812 Record such modified types already made so we don't make duplicates. */
4813
4814 tree
4815 build_type_attribute_variant (tree ttype, tree attribute)
4816 {
4817 return build_type_attribute_qual_variant (ttype, attribute,
4818 TYPE_QUALS (ttype));
4819 }
4820
4821
4822 /* Reset the expression *EXPR_P, a size or position.
4823
4824 ??? We could reset all non-constant sizes or positions. But it's cheap
4825 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4826
4827 We need to reset self-referential sizes or positions because they cannot
4828 be gimplified and thus can contain a CALL_EXPR after the gimplification
4829 is finished, which will run afoul of LTO streaming. And they need to be
4830 reset to something essentially dummy but not constant, so as to preserve
4831 the properties of the object they are attached to. */
4832
4833 static inline void
4834 free_lang_data_in_one_sizepos (tree *expr_p)
4835 {
4836 tree expr = *expr_p;
4837 if (CONTAINS_PLACEHOLDER_P (expr))
4838 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4839 }
4840
4841
4842 /* Reset all the fields in a binfo node BINFO. We only keep
4843 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4844
4845 static void
4846 free_lang_data_in_binfo (tree binfo)
4847 {
4848 unsigned i;
4849 tree t;
4850
4851 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4852
4853 BINFO_VIRTUALS (binfo) = NULL_TREE;
4854 BINFO_BASE_ACCESSES (binfo) = NULL;
4855 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4856 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4857
4858 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4859 free_lang_data_in_binfo (t);
4860 }
4861
4862
4863 /* Reset all language specific information still present in TYPE. */
4864
4865 static void
4866 free_lang_data_in_type (tree type)
4867 {
4868 gcc_assert (TYPE_P (type));
4869
4870 /* Give the FE a chance to remove its own data first. */
4871 lang_hooks.free_lang_data (type);
4872
4873 TREE_LANG_FLAG_0 (type) = 0;
4874 TREE_LANG_FLAG_1 (type) = 0;
4875 TREE_LANG_FLAG_2 (type) = 0;
4876 TREE_LANG_FLAG_3 (type) = 0;
4877 TREE_LANG_FLAG_4 (type) = 0;
4878 TREE_LANG_FLAG_5 (type) = 0;
4879 TREE_LANG_FLAG_6 (type) = 0;
4880
4881 if (TREE_CODE (type) == FUNCTION_TYPE)
4882 {
4883 /* Remove the const and volatile qualifiers from arguments. The
4884 C++ front end removes them, but the C front end does not,
4885 leading to false ODR violation errors when merging two
4886 instances of the same function signature compiled by
4887 different front ends. */
4888 tree p;
4889
4890 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4891 {
4892 tree arg_type = TREE_VALUE (p);
4893
4894 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4895 {
4896 int quals = TYPE_QUALS (arg_type)
4897 & ~TYPE_QUAL_CONST
4898 & ~TYPE_QUAL_VOLATILE;
4899 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4900 free_lang_data_in_type (TREE_VALUE (p));
4901 }
4902 }
4903 }
4904
4905 /* Remove members that are not actually FIELD_DECLs from the field
4906 list of an aggregate. These occur in C++. */
4907 if (RECORD_OR_UNION_TYPE_P (type))
4908 {
4909 tree prev, member;
4910
4911 /* Note that TYPE_FIELDS can be shared across distinct
4912 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4913 to be removed, we cannot set its TREE_CHAIN to NULL.
4914 Otherwise, we would not be able to find all the other fields
4915 in the other instances of this TREE_TYPE.
4916
4917 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4918 prev = NULL_TREE;
4919 member = TYPE_FIELDS (type);
4920 while (member)
4921 {
4922 if (TREE_CODE (member) == FIELD_DECL
4923 || TREE_CODE (member) == TYPE_DECL)
4924 {
4925 if (prev)
4926 TREE_CHAIN (prev) = member;
4927 else
4928 TYPE_FIELDS (type) = member;
4929 prev = member;
4930 }
4931
4932 member = TREE_CHAIN (member);
4933 }
4934
4935 if (prev)
4936 TREE_CHAIN (prev) = NULL_TREE;
4937 else
4938 TYPE_FIELDS (type) = NULL_TREE;
4939
4940 TYPE_METHODS (type) = NULL_TREE;
4941 if (TYPE_BINFO (type))
4942 free_lang_data_in_binfo (TYPE_BINFO (type));
4943 }
4944 else
4945 {
4946 /* For non-aggregate types, clear out the language slot (which
4947 overloads TYPE_BINFO). */
4948 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4949
4950 if (INTEGRAL_TYPE_P (type)
4951 || SCALAR_FLOAT_TYPE_P (type)
4952 || FIXED_POINT_TYPE_P (type))
4953 {
4954 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4955 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4956 }
4957 }
4958
4959 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4960 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4961
4962 if (TYPE_CONTEXT (type)
4963 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4964 {
4965 tree ctx = TYPE_CONTEXT (type);
4966 do
4967 {
4968 ctx = BLOCK_SUPERCONTEXT (ctx);
4969 }
4970 while (ctx && TREE_CODE (ctx) == BLOCK);
4971 TYPE_CONTEXT (type) = ctx;
4972 }
4973 }
4974
4975
4976 /* Return true if DECL may need an assembler name to be set. */
4977
4978 static inline bool
4979 need_assembler_name_p (tree decl)
4980 {
4981 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4982 if (TREE_CODE (decl) != FUNCTION_DECL
4983 && TREE_CODE (decl) != VAR_DECL)
4984 return false;
4985
4986 /* If DECL already has its assembler name set, it does not need a
4987 new one. */
4988 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4989 || DECL_ASSEMBLER_NAME_SET_P (decl))
4990 return false;
4991
4992 /* Abstract decls do not need an assembler name. */
4993 if (DECL_ABSTRACT (decl))
4994 return false;
4995
4996 /* For VAR_DECLs, only static, public and external symbols need an
4997 assembler name. */
4998 if (TREE_CODE (decl) == VAR_DECL
4999 && !TREE_STATIC (decl)
5000 && !TREE_PUBLIC (decl)
5001 && !DECL_EXTERNAL (decl))
5002 return false;
5003
5004 if (TREE_CODE (decl) == FUNCTION_DECL)
5005 {
5006 /* Do not set assembler name on builtins. Allow RTL expansion to
5007 decide whether to expand inline or via a regular call. */
5008 if (DECL_BUILT_IN (decl)
5009 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5010 return false;
5011
5012 /* Functions represented in the callgraph need an assembler name. */
5013 if (cgraph_node::get (decl) != NULL)
5014 return true;
5015
5016 /* Unused and not public functions don't need an assembler name. */
5017 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5018 return false;
5019 }
5020
5021 return true;
5022 }
5023
5024
5025 /* Reset all language specific information still present in symbol
5026 DECL. */
5027
5028 static void
5029 free_lang_data_in_decl (tree decl)
5030 {
5031 gcc_assert (DECL_P (decl));
5032
5033 /* Give the FE a chance to remove its own data first. */
5034 lang_hooks.free_lang_data (decl);
5035
5036 TREE_LANG_FLAG_0 (decl) = 0;
5037 TREE_LANG_FLAG_1 (decl) = 0;
5038 TREE_LANG_FLAG_2 (decl) = 0;
5039 TREE_LANG_FLAG_3 (decl) = 0;
5040 TREE_LANG_FLAG_4 (decl) = 0;
5041 TREE_LANG_FLAG_5 (decl) = 0;
5042 TREE_LANG_FLAG_6 (decl) = 0;
5043
5044 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5045 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5046 if (TREE_CODE (decl) == FIELD_DECL)
5047 {
5048 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5049 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5050 DECL_QUALIFIER (decl) = NULL_TREE;
5051 }
5052
5053 if (TREE_CODE (decl) == FUNCTION_DECL)
5054 {
5055 struct cgraph_node *node;
5056 if (!(node = cgraph_node::get (decl))
5057 || (!node->definition && !node->clones))
5058 {
5059 if (node)
5060 node->release_body ();
5061 else
5062 {
5063 release_function_body (decl);
5064 DECL_ARGUMENTS (decl) = NULL;
5065 DECL_RESULT (decl) = NULL;
5066 DECL_INITIAL (decl) = error_mark_node;
5067 }
5068 }
5069 if (gimple_has_body_p (decl))
5070 {
5071 tree t;
5072
5073 /* If DECL has a gimple body, then the context for its
5074 arguments must be DECL. Otherwise, it doesn't really
5075 matter, as we will not be emitting any code for DECL. In
5076 general, there may be other instances of DECL created by
5077 the front end and since PARM_DECLs are generally shared,
5078 their DECL_CONTEXT changes as the replicas of DECL are
5079 created. The only time where DECL_CONTEXT is important
5080 is for the FUNCTION_DECLs that have a gimple body (since
5081 the PARM_DECL will be used in the function's body). */
5082 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5083 DECL_CONTEXT (t) = decl;
5084 }
5085
5086 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5087 At this point, it is not needed anymore. */
5088 DECL_SAVED_TREE (decl) = NULL_TREE;
5089
5090 /* Clear the abstract origin if it refers to a method. Otherwise
5091 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5092 origin will not be output correctly. */
5093 if (DECL_ABSTRACT_ORIGIN (decl)
5094 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5095 && RECORD_OR_UNION_TYPE_P
5096 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5097 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5098
5099 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5100 DECL_VINDEX referring to itself into a vtable slot number as it
5101 should. Happens with functions that are copied and then forgotten
5102 about. Just clear it, it won't matter anymore. */
5103 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5104 DECL_VINDEX (decl) = NULL_TREE;
5105 }
5106 else if (TREE_CODE (decl) == VAR_DECL)
5107 {
5108 if ((DECL_EXTERNAL (decl)
5109 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5110 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5111 DECL_INITIAL (decl) = NULL_TREE;
5112 }
5113 else if (TREE_CODE (decl) == TYPE_DECL
5114 || TREE_CODE (decl) == FIELD_DECL)
5115 DECL_INITIAL (decl) = NULL_TREE;
5116 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5117 && DECL_INITIAL (decl)
5118 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5119 {
5120 /* Strip builtins from the translation-unit BLOCK. We still have targets
5121 without builtin_decl_explicit support and also builtins are shared
5122 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5123 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5124 while (*nextp)
5125 {
5126 tree var = *nextp;
5127 if (TREE_CODE (var) == FUNCTION_DECL
5128 && DECL_BUILT_IN (var))
5129 *nextp = TREE_CHAIN (var);
5130 else
5131 nextp = &TREE_CHAIN (var);
5132 }
5133 }
5134 }
5135
5136
5137 /* Data used when collecting DECLs and TYPEs for language data removal. */
5138
5139 struct free_lang_data_d
5140 {
5141 /* Worklist to avoid excessive recursion. */
5142 vec<tree> worklist;
5143
5144 /* Set of traversed objects. Used to avoid duplicate visits. */
5145 hash_set<tree> *pset;
5146
5147 /* Array of symbols to process with free_lang_data_in_decl. */
5148 vec<tree> decls;
5149
5150 /* Array of types to process with free_lang_data_in_type. */
5151 vec<tree> types;
5152 };
5153
5154
5155 /* Save all language fields needed to generate proper debug information
5156 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5157
5158 static void
5159 save_debug_info_for_decl (tree t)
5160 {
5161 /*struct saved_debug_info_d *sdi;*/
5162
5163 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5164
5165 /* FIXME. Partial implementation for saving debug info removed. */
5166 }
5167
5168
5169 /* Save all language fields needed to generate proper debug information
5170 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5171
5172 static void
5173 save_debug_info_for_type (tree t)
5174 {
5175 /*struct saved_debug_info_d *sdi;*/
5176
5177 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5178
5179 /* FIXME. Partial implementation for saving debug info removed. */
5180 }
5181
5182
5183 /* Add type or decl T to one of the list of tree nodes that need their
5184 language data removed. The lists are held inside FLD. */
5185
5186 static void
5187 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5188 {
5189 if (DECL_P (t))
5190 {
5191 fld->decls.safe_push (t);
5192 if (debug_info_level > DINFO_LEVEL_TERSE)
5193 save_debug_info_for_decl (t);
5194 }
5195 else if (TYPE_P (t))
5196 {
5197 fld->types.safe_push (t);
5198 if (debug_info_level > DINFO_LEVEL_TERSE)
5199 save_debug_info_for_type (t);
5200 }
5201 else
5202 gcc_unreachable ();
5203 }
5204
5205 /* Push tree node T into FLD->WORKLIST. */
5206
5207 static inline void
5208 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5209 {
5210 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5211 fld->worklist.safe_push ((t));
5212 }
5213
5214
5215 /* Operand callback helper for free_lang_data_in_node. *TP is the
5216 subtree operand being considered. */
5217
5218 static tree
5219 find_decls_types_r (tree *tp, int *ws, void *data)
5220 {
5221 tree t = *tp;
5222 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5223
5224 if (TREE_CODE (t) == TREE_LIST)
5225 return NULL_TREE;
5226
5227 /* Language specific nodes will be removed, so there is no need
5228 to gather anything under them. */
5229 if (is_lang_specific (t))
5230 {
5231 *ws = 0;
5232 return NULL_TREE;
5233 }
5234
5235 if (DECL_P (t))
5236 {
5237 /* Note that walk_tree does not traverse every possible field in
5238 decls, so we have to do our own traversals here. */
5239 add_tree_to_fld_list (t, fld);
5240
5241 fld_worklist_push (DECL_NAME (t), fld);
5242 fld_worklist_push (DECL_CONTEXT (t), fld);
5243 fld_worklist_push (DECL_SIZE (t), fld);
5244 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5245
5246 /* We are going to remove everything under DECL_INITIAL for
5247 TYPE_DECLs. No point walking them. */
5248 if (TREE_CODE (t) != TYPE_DECL)
5249 fld_worklist_push (DECL_INITIAL (t), fld);
5250
5251 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5252 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5253
5254 if (TREE_CODE (t) == FUNCTION_DECL)
5255 {
5256 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5257 fld_worklist_push (DECL_RESULT (t), fld);
5258 }
5259 else if (TREE_CODE (t) == TYPE_DECL)
5260 {
5261 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5262 }
5263 else if (TREE_CODE (t) == FIELD_DECL)
5264 {
5265 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5266 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5267 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5268 fld_worklist_push (DECL_FCONTEXT (t), fld);
5269 }
5270
5271 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5272 && DECL_HAS_VALUE_EXPR_P (t))
5273 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5274
5275 if (TREE_CODE (t) != FIELD_DECL
5276 && TREE_CODE (t) != TYPE_DECL)
5277 fld_worklist_push (TREE_CHAIN (t), fld);
5278 *ws = 0;
5279 }
5280 else if (TYPE_P (t))
5281 {
5282 /* Note that walk_tree does not traverse every possible field in
5283 types, so we have to do our own traversals here. */
5284 add_tree_to_fld_list (t, fld);
5285
5286 if (!RECORD_OR_UNION_TYPE_P (t))
5287 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5288 fld_worklist_push (TYPE_SIZE (t), fld);
5289 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5290 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5291 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5292 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5293 fld_worklist_push (TYPE_NAME (t), fld);
5294 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5295 them and thus do not and want not to reach unused pointer types
5296 this way. */
5297 if (!POINTER_TYPE_P (t))
5298 fld_worklist_push (TYPE_MINVAL (t), fld);
5299 if (!RECORD_OR_UNION_TYPE_P (t))
5300 fld_worklist_push (TYPE_MAXVAL (t), fld);
5301 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5302 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5303 do not and want not to reach unused variants this way. */
5304 if (TYPE_CONTEXT (t))
5305 {
5306 tree ctx = TYPE_CONTEXT (t);
5307 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5308 So push that instead. */
5309 while (ctx && TREE_CODE (ctx) == BLOCK)
5310 ctx = BLOCK_SUPERCONTEXT (ctx);
5311 fld_worklist_push (ctx, fld);
5312 }
5313 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5314 and want not to reach unused types this way. */
5315
5316 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5317 {
5318 unsigned i;
5319 tree tem;
5320 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5321 fld_worklist_push (TREE_TYPE (tem), fld);
5322 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5323 if (tem
5324 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5325 && TREE_CODE (tem) == TREE_LIST)
5326 do
5327 {
5328 fld_worklist_push (TREE_VALUE (tem), fld);
5329 tem = TREE_CHAIN (tem);
5330 }
5331 while (tem);
5332 }
5333 if (RECORD_OR_UNION_TYPE_P (t))
5334 {
5335 tree tem;
5336 /* Push all TYPE_FIELDS - there can be interleaving interesting
5337 and non-interesting things. */
5338 tem = TYPE_FIELDS (t);
5339 while (tem)
5340 {
5341 if (TREE_CODE (tem) == FIELD_DECL
5342 || TREE_CODE (tem) == TYPE_DECL)
5343 fld_worklist_push (tem, fld);
5344 tem = TREE_CHAIN (tem);
5345 }
5346 }
5347
5348 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5349 *ws = 0;
5350 }
5351 else if (TREE_CODE (t) == BLOCK)
5352 {
5353 tree tem;
5354 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5355 fld_worklist_push (tem, fld);
5356 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5357 fld_worklist_push (tem, fld);
5358 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5359 }
5360
5361 if (TREE_CODE (t) != IDENTIFIER_NODE
5362 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5363 fld_worklist_push (TREE_TYPE (t), fld);
5364
5365 return NULL_TREE;
5366 }
5367
5368
5369 /* Find decls and types in T. */
5370
5371 static void
5372 find_decls_types (tree t, struct free_lang_data_d *fld)
5373 {
5374 while (1)
5375 {
5376 if (!fld->pset->contains (t))
5377 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5378 if (fld->worklist.is_empty ())
5379 break;
5380 t = fld->worklist.pop ();
5381 }
5382 }
5383
5384 /* Translate all the types in LIST with the corresponding runtime
5385 types. */
5386
5387 static tree
5388 get_eh_types_for_runtime (tree list)
5389 {
5390 tree head, prev;
5391
5392 if (list == NULL_TREE)
5393 return NULL_TREE;
5394
5395 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5396 prev = head;
5397 list = TREE_CHAIN (list);
5398 while (list)
5399 {
5400 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5401 TREE_CHAIN (prev) = n;
5402 prev = TREE_CHAIN (prev);
5403 list = TREE_CHAIN (list);
5404 }
5405
5406 return head;
5407 }
5408
5409
5410 /* Find decls and types referenced in EH region R and store them in
5411 FLD->DECLS and FLD->TYPES. */
5412
5413 static void
5414 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5415 {
5416 switch (r->type)
5417 {
5418 case ERT_CLEANUP:
5419 break;
5420
5421 case ERT_TRY:
5422 {
5423 eh_catch c;
5424
5425 /* The types referenced in each catch must first be changed to the
5426 EH types used at runtime. This removes references to FE types
5427 in the region. */
5428 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5429 {
5430 c->type_list = get_eh_types_for_runtime (c->type_list);
5431 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5432 }
5433 }
5434 break;
5435
5436 case ERT_ALLOWED_EXCEPTIONS:
5437 r->u.allowed.type_list
5438 = get_eh_types_for_runtime (r->u.allowed.type_list);
5439 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5440 break;
5441
5442 case ERT_MUST_NOT_THROW:
5443 walk_tree (&r->u.must_not_throw.failure_decl,
5444 find_decls_types_r, fld, fld->pset);
5445 break;
5446 }
5447 }
5448
5449
5450 /* Find decls and types referenced in cgraph node N and store them in
5451 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5452 look for *every* kind of DECL and TYPE node reachable from N,
5453 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5454 NAMESPACE_DECLs, etc). */
5455
5456 static void
5457 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5458 {
5459 basic_block bb;
5460 struct function *fn;
5461 unsigned ix;
5462 tree t;
5463
5464 find_decls_types (n->decl, fld);
5465
5466 if (!gimple_has_body_p (n->decl))
5467 return;
5468
5469 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5470
5471 fn = DECL_STRUCT_FUNCTION (n->decl);
5472
5473 /* Traverse locals. */
5474 FOR_EACH_LOCAL_DECL (fn, ix, t)
5475 find_decls_types (t, fld);
5476
5477 /* Traverse EH regions in FN. */
5478 {
5479 eh_region r;
5480 FOR_ALL_EH_REGION_FN (r, fn)
5481 find_decls_types_in_eh_region (r, fld);
5482 }
5483
5484 /* Traverse every statement in FN. */
5485 FOR_EACH_BB_FN (bb, fn)
5486 {
5487 gimple_stmt_iterator si;
5488 unsigned i;
5489
5490 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5491 {
5492 gimple phi = gsi_stmt (si);
5493
5494 for (i = 0; i < gimple_phi_num_args (phi); i++)
5495 {
5496 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5497 find_decls_types (*arg_p, fld);
5498 }
5499 }
5500
5501 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5502 {
5503 gimple stmt = gsi_stmt (si);
5504
5505 if (is_gimple_call (stmt))
5506 find_decls_types (gimple_call_fntype (stmt), fld);
5507
5508 for (i = 0; i < gimple_num_ops (stmt); i++)
5509 {
5510 tree arg = gimple_op (stmt, i);
5511 find_decls_types (arg, fld);
5512 }
5513 }
5514 }
5515 }
5516
5517
5518 /* Find decls and types referenced in varpool node N and store them in
5519 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5520 look for *every* kind of DECL and TYPE node reachable from N,
5521 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5522 NAMESPACE_DECLs, etc). */
5523
5524 static void
5525 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5526 {
5527 find_decls_types (v->decl, fld);
5528 }
5529
5530 /* If T needs an assembler name, have one created for it. */
5531
5532 void
5533 assign_assembler_name_if_neeeded (tree t)
5534 {
5535 if (need_assembler_name_p (t))
5536 {
5537 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5538 diagnostics that use input_location to show locus
5539 information. The problem here is that, at this point,
5540 input_location is generally anchored to the end of the file
5541 (since the parser is long gone), so we don't have a good
5542 position to pin it to.
5543
5544 To alleviate this problem, this uses the location of T's
5545 declaration. Examples of this are
5546 testsuite/g++.dg/template/cond2.C and
5547 testsuite/g++.dg/template/pr35240.C. */
5548 location_t saved_location = input_location;
5549 input_location = DECL_SOURCE_LOCATION (t);
5550
5551 decl_assembler_name (t);
5552
5553 input_location = saved_location;
5554 }
5555 }
5556
5557
5558 /* Free language specific information for every operand and expression
5559 in every node of the call graph. This process operates in three stages:
5560
5561 1- Every callgraph node and varpool node is traversed looking for
5562 decls and types embedded in them. This is a more exhaustive
5563 search than that done by find_referenced_vars, because it will
5564 also collect individual fields, decls embedded in types, etc.
5565
5566 2- All the decls found are sent to free_lang_data_in_decl.
5567
5568 3- All the types found are sent to free_lang_data_in_type.
5569
5570 The ordering between decls and types is important because
5571 free_lang_data_in_decl sets assembler names, which includes
5572 mangling. So types cannot be freed up until assembler names have
5573 been set up. */
5574
5575 static void
5576 free_lang_data_in_cgraph (void)
5577 {
5578 struct cgraph_node *n;
5579 varpool_node *v;
5580 struct free_lang_data_d fld;
5581 tree t;
5582 unsigned i;
5583 alias_pair *p;
5584
5585 /* Initialize sets and arrays to store referenced decls and types. */
5586 fld.pset = new hash_set<tree>;
5587 fld.worklist.create (0);
5588 fld.decls.create (100);
5589 fld.types.create (100);
5590
5591 /* Find decls and types in the body of every function in the callgraph. */
5592 FOR_EACH_FUNCTION (n)
5593 find_decls_types_in_node (n, &fld);
5594
5595 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5596 find_decls_types (p->decl, &fld);
5597
5598 /* Find decls and types in every varpool symbol. */
5599 FOR_EACH_VARIABLE (v)
5600 find_decls_types_in_var (v, &fld);
5601
5602 /* Set the assembler name on every decl found. We need to do this
5603 now because free_lang_data_in_decl will invalidate data needed
5604 for mangling. This breaks mangling on interdependent decls. */
5605 FOR_EACH_VEC_ELT (fld.decls, i, t)
5606 assign_assembler_name_if_neeeded (t);
5607
5608 /* Traverse every decl found freeing its language data. */
5609 FOR_EACH_VEC_ELT (fld.decls, i, t)
5610 free_lang_data_in_decl (t);
5611
5612 /* Traverse every type found freeing its language data. */
5613 FOR_EACH_VEC_ELT (fld.types, i, t)
5614 free_lang_data_in_type (t);
5615
5616 delete fld.pset;
5617 fld.worklist.release ();
5618 fld.decls.release ();
5619 fld.types.release ();
5620 }
5621
5622
5623 /* Free resources that are used by FE but are not needed once they are done. */
5624
5625 static unsigned
5626 free_lang_data (void)
5627 {
5628 unsigned i;
5629
5630 /* If we are the LTO frontend we have freed lang-specific data already. */
5631 if (in_lto_p
5632 || !flag_generate_lto)
5633 return 0;
5634
5635 /* Allocate and assign alias sets to the standard integer types
5636 while the slots are still in the way the frontends generated them. */
5637 for (i = 0; i < itk_none; ++i)
5638 if (integer_types[i])
5639 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5640
5641 /* Traverse the IL resetting language specific information for
5642 operands, expressions, etc. */
5643 free_lang_data_in_cgraph ();
5644
5645 /* Create gimple variants for common types. */
5646 ptrdiff_type_node = integer_type_node;
5647 fileptr_type_node = ptr_type_node;
5648
5649 /* Reset some langhooks. Do not reset types_compatible_p, it may
5650 still be used indirectly via the get_alias_set langhook. */
5651 lang_hooks.dwarf_name = lhd_dwarf_name;
5652 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5653 /* We do not want the default decl_assembler_name implementation,
5654 rather if we have fixed everything we want a wrapper around it
5655 asserting that all non-local symbols already got their assembler
5656 name and only produce assembler names for local symbols. Or rather
5657 make sure we never call decl_assembler_name on local symbols and
5658 devise a separate, middle-end private scheme for it. */
5659
5660 /* Reset diagnostic machinery. */
5661 tree_diagnostics_defaults (global_dc);
5662
5663 return 0;
5664 }
5665
5666
5667 namespace {
5668
5669 const pass_data pass_data_ipa_free_lang_data =
5670 {
5671 SIMPLE_IPA_PASS, /* type */
5672 "*free_lang_data", /* name */
5673 OPTGROUP_NONE, /* optinfo_flags */
5674 TV_IPA_FREE_LANG_DATA, /* tv_id */
5675 0, /* properties_required */
5676 0, /* properties_provided */
5677 0, /* properties_destroyed */
5678 0, /* todo_flags_start */
5679 0, /* todo_flags_finish */
5680 };
5681
5682 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5683 {
5684 public:
5685 pass_ipa_free_lang_data (gcc::context *ctxt)
5686 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5687 {}
5688
5689 /* opt_pass methods: */
5690 virtual unsigned int execute (function *) { return free_lang_data (); }
5691
5692 }; // class pass_ipa_free_lang_data
5693
5694 } // anon namespace
5695
5696 simple_ipa_opt_pass *
5697 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5698 {
5699 return new pass_ipa_free_lang_data (ctxt);
5700 }
5701
5702 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5703 ATTR_NAME. Also used internally by remove_attribute(). */
5704 bool
5705 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5706 {
5707 size_t ident_len = IDENTIFIER_LENGTH (ident);
5708
5709 if (ident_len == attr_len)
5710 {
5711 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5712 return true;
5713 }
5714 else if (ident_len == attr_len + 4)
5715 {
5716 /* There is the possibility that ATTR is 'text' and IDENT is
5717 '__text__'. */
5718 const char *p = IDENTIFIER_POINTER (ident);
5719 if (p[0] == '_' && p[1] == '_'
5720 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5721 && strncmp (attr_name, p + 2, attr_len) == 0)
5722 return true;
5723 }
5724
5725 return false;
5726 }
5727
5728 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5729 of ATTR_NAME, and LIST is not NULL_TREE. */
5730 tree
5731 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5732 {
5733 while (list)
5734 {
5735 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5736
5737 if (ident_len == attr_len)
5738 {
5739 if (!strcmp (attr_name,
5740 IDENTIFIER_POINTER (get_attribute_name (list))))
5741 break;
5742 }
5743 /* TODO: If we made sure that attributes were stored in the
5744 canonical form without '__...__' (ie, as in 'text' as opposed
5745 to '__text__') then we could avoid the following case. */
5746 else if (ident_len == attr_len + 4)
5747 {
5748 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5749 if (p[0] == '_' && p[1] == '_'
5750 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5751 && strncmp (attr_name, p + 2, attr_len) == 0)
5752 break;
5753 }
5754 list = TREE_CHAIN (list);
5755 }
5756
5757 return list;
5758 }
5759
5760 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5761 return a pointer to the attribute's list first element if the attribute
5762 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5763 '__text__'). */
5764
5765 tree
5766 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5767 tree list)
5768 {
5769 while (list)
5770 {
5771 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5772
5773 if (attr_len > ident_len)
5774 {
5775 list = TREE_CHAIN (list);
5776 continue;
5777 }
5778
5779 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5780
5781 if (strncmp (attr_name, p, attr_len) == 0)
5782 break;
5783
5784 /* TODO: If we made sure that attributes were stored in the
5785 canonical form without '__...__' (ie, as in 'text' as opposed
5786 to '__text__') then we could avoid the following case. */
5787 if (p[0] == '_' && p[1] == '_' &&
5788 strncmp (attr_name, p + 2, attr_len) == 0)
5789 break;
5790
5791 list = TREE_CHAIN (list);
5792 }
5793
5794 return list;
5795 }
5796
5797
5798 /* A variant of lookup_attribute() that can be used with an identifier
5799 as the first argument, and where the identifier can be either
5800 'text' or '__text__'.
5801
5802 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5803 return a pointer to the attribute's list element if the attribute
5804 is part of the list, or NULL_TREE if not found. If the attribute
5805 appears more than once, this only returns the first occurrence; the
5806 TREE_CHAIN of the return value should be passed back in if further
5807 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5808 can be in the form 'text' or '__text__'. */
5809 static tree
5810 lookup_ident_attribute (tree attr_identifier, tree list)
5811 {
5812 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5813
5814 while (list)
5815 {
5816 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5817 == IDENTIFIER_NODE);
5818
5819 /* Identifiers can be compared directly for equality. */
5820 if (attr_identifier == get_attribute_name (list))
5821 break;
5822
5823 /* If they are not equal, they may still be one in the form
5824 'text' while the other one is in the form '__text__'. TODO:
5825 If we were storing attributes in normalized 'text' form, then
5826 this could all go away and we could take full advantage of
5827 the fact that we're comparing identifiers. :-) */
5828 {
5829 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5830 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5831
5832 if (ident_len == attr_len + 4)
5833 {
5834 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5835 const char *q = IDENTIFIER_POINTER (attr_identifier);
5836 if (p[0] == '_' && p[1] == '_'
5837 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5838 && strncmp (q, p + 2, attr_len) == 0)
5839 break;
5840 }
5841 else if (ident_len + 4 == attr_len)
5842 {
5843 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5844 const char *q = IDENTIFIER_POINTER (attr_identifier);
5845 if (q[0] == '_' && q[1] == '_'
5846 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5847 && strncmp (q + 2, p, ident_len) == 0)
5848 break;
5849 }
5850 }
5851 list = TREE_CHAIN (list);
5852 }
5853
5854 return list;
5855 }
5856
5857 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5858 modified list. */
5859
5860 tree
5861 remove_attribute (const char *attr_name, tree list)
5862 {
5863 tree *p;
5864 size_t attr_len = strlen (attr_name);
5865
5866 gcc_checking_assert (attr_name[0] != '_');
5867
5868 for (p = &list; *p; )
5869 {
5870 tree l = *p;
5871 /* TODO: If we were storing attributes in normalized form, here
5872 we could use a simple strcmp(). */
5873 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5874 *p = TREE_CHAIN (l);
5875 else
5876 p = &TREE_CHAIN (l);
5877 }
5878
5879 return list;
5880 }
5881
5882 /* Return an attribute list that is the union of a1 and a2. */
5883
5884 tree
5885 merge_attributes (tree a1, tree a2)
5886 {
5887 tree attributes;
5888
5889 /* Either one unset? Take the set one. */
5890
5891 if ((attributes = a1) == 0)
5892 attributes = a2;
5893
5894 /* One that completely contains the other? Take it. */
5895
5896 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5897 {
5898 if (attribute_list_contained (a2, a1))
5899 attributes = a2;
5900 else
5901 {
5902 /* Pick the longest list, and hang on the other list. */
5903
5904 if (list_length (a1) < list_length (a2))
5905 attributes = a2, a2 = a1;
5906
5907 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5908 {
5909 tree a;
5910 for (a = lookup_ident_attribute (get_attribute_name (a2),
5911 attributes);
5912 a != NULL_TREE && !attribute_value_equal (a, a2);
5913 a = lookup_ident_attribute (get_attribute_name (a2),
5914 TREE_CHAIN (a)))
5915 ;
5916 if (a == NULL_TREE)
5917 {
5918 a1 = copy_node (a2);
5919 TREE_CHAIN (a1) = attributes;
5920 attributes = a1;
5921 }
5922 }
5923 }
5924 }
5925 return attributes;
5926 }
5927
5928 /* Given types T1 and T2, merge their attributes and return
5929 the result. */
5930
5931 tree
5932 merge_type_attributes (tree t1, tree t2)
5933 {
5934 return merge_attributes (TYPE_ATTRIBUTES (t1),
5935 TYPE_ATTRIBUTES (t2));
5936 }
5937
5938 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5939 the result. */
5940
5941 tree
5942 merge_decl_attributes (tree olddecl, tree newdecl)
5943 {
5944 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5945 DECL_ATTRIBUTES (newdecl));
5946 }
5947
5948 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5949
5950 /* Specialization of merge_decl_attributes for various Windows targets.
5951
5952 This handles the following situation:
5953
5954 __declspec (dllimport) int foo;
5955 int foo;
5956
5957 The second instance of `foo' nullifies the dllimport. */
5958
5959 tree
5960 merge_dllimport_decl_attributes (tree old, tree new_tree)
5961 {
5962 tree a;
5963 int delete_dllimport_p = 1;
5964
5965 /* What we need to do here is remove from `old' dllimport if it doesn't
5966 appear in `new'. dllimport behaves like extern: if a declaration is
5967 marked dllimport and a definition appears later, then the object
5968 is not dllimport'd. We also remove a `new' dllimport if the old list
5969 contains dllexport: dllexport always overrides dllimport, regardless
5970 of the order of declaration. */
5971 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5972 delete_dllimport_p = 0;
5973 else if (DECL_DLLIMPORT_P (new_tree)
5974 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5975 {
5976 DECL_DLLIMPORT_P (new_tree) = 0;
5977 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5978 "dllimport ignored", new_tree);
5979 }
5980 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5981 {
5982 /* Warn about overriding a symbol that has already been used, e.g.:
5983 extern int __attribute__ ((dllimport)) foo;
5984 int* bar () {return &foo;}
5985 int foo;
5986 */
5987 if (TREE_USED (old))
5988 {
5989 warning (0, "%q+D redeclared without dllimport attribute "
5990 "after being referenced with dll linkage", new_tree);
5991 /* If we have used a variable's address with dllimport linkage,
5992 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5993 decl may already have had TREE_CONSTANT computed.
5994 We still remove the attribute so that assembler code refers
5995 to '&foo rather than '_imp__foo'. */
5996 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5997 DECL_DLLIMPORT_P (new_tree) = 1;
5998 }
5999
6000 /* Let an inline definition silently override the external reference,
6001 but otherwise warn about attribute inconsistency. */
6002 else if (TREE_CODE (new_tree) == VAR_DECL
6003 || !DECL_DECLARED_INLINE_P (new_tree))
6004 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6005 "previous dllimport ignored", new_tree);
6006 }
6007 else
6008 delete_dllimport_p = 0;
6009
6010 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6011
6012 if (delete_dllimport_p)
6013 a = remove_attribute ("dllimport", a);
6014
6015 return a;
6016 }
6017
6018 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6019 struct attribute_spec.handler. */
6020
6021 tree
6022 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6023 bool *no_add_attrs)
6024 {
6025 tree node = *pnode;
6026 bool is_dllimport;
6027
6028 /* These attributes may apply to structure and union types being created,
6029 but otherwise should pass to the declaration involved. */
6030 if (!DECL_P (node))
6031 {
6032 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6033 | (int) ATTR_FLAG_ARRAY_NEXT))
6034 {
6035 *no_add_attrs = true;
6036 return tree_cons (name, args, NULL_TREE);
6037 }
6038 if (TREE_CODE (node) == RECORD_TYPE
6039 || TREE_CODE (node) == UNION_TYPE)
6040 {
6041 node = TYPE_NAME (node);
6042 if (!node)
6043 return NULL_TREE;
6044 }
6045 else
6046 {
6047 warning (OPT_Wattributes, "%qE attribute ignored",
6048 name);
6049 *no_add_attrs = true;
6050 return NULL_TREE;
6051 }
6052 }
6053
6054 if (TREE_CODE (node) != FUNCTION_DECL
6055 && TREE_CODE (node) != VAR_DECL
6056 && TREE_CODE (node) != TYPE_DECL)
6057 {
6058 *no_add_attrs = true;
6059 warning (OPT_Wattributes, "%qE attribute ignored",
6060 name);
6061 return NULL_TREE;
6062 }
6063
6064 if (TREE_CODE (node) == TYPE_DECL
6065 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6066 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6067 {
6068 *no_add_attrs = true;
6069 warning (OPT_Wattributes, "%qE attribute ignored",
6070 name);
6071 return NULL_TREE;
6072 }
6073
6074 is_dllimport = is_attribute_p ("dllimport", name);
6075
6076 /* Report error on dllimport ambiguities seen now before they cause
6077 any damage. */
6078 if (is_dllimport)
6079 {
6080 /* Honor any target-specific overrides. */
6081 if (!targetm.valid_dllimport_attribute_p (node))
6082 *no_add_attrs = true;
6083
6084 else if (TREE_CODE (node) == FUNCTION_DECL
6085 && DECL_DECLARED_INLINE_P (node))
6086 {
6087 warning (OPT_Wattributes, "inline function %q+D declared as "
6088 " dllimport: attribute ignored", node);
6089 *no_add_attrs = true;
6090 }
6091 /* Like MS, treat definition of dllimported variables and
6092 non-inlined functions on declaration as syntax errors. */
6093 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6094 {
6095 error ("function %q+D definition is marked dllimport", node);
6096 *no_add_attrs = true;
6097 }
6098
6099 else if (TREE_CODE (node) == VAR_DECL)
6100 {
6101 if (DECL_INITIAL (node))
6102 {
6103 error ("variable %q+D definition is marked dllimport",
6104 node);
6105 *no_add_attrs = true;
6106 }
6107
6108 /* `extern' needn't be specified with dllimport.
6109 Specify `extern' now and hope for the best. Sigh. */
6110 DECL_EXTERNAL (node) = 1;
6111 /* Also, implicitly give dllimport'd variables declared within
6112 a function global scope, unless declared static. */
6113 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6114 TREE_PUBLIC (node) = 1;
6115 }
6116
6117 if (*no_add_attrs == false)
6118 DECL_DLLIMPORT_P (node) = 1;
6119 }
6120 else if (TREE_CODE (node) == FUNCTION_DECL
6121 && DECL_DECLARED_INLINE_P (node)
6122 && flag_keep_inline_dllexport)
6123 /* An exported function, even if inline, must be emitted. */
6124 DECL_EXTERNAL (node) = 0;
6125
6126 /* Report error if symbol is not accessible at global scope. */
6127 if (!TREE_PUBLIC (node)
6128 && (TREE_CODE (node) == VAR_DECL
6129 || TREE_CODE (node) == FUNCTION_DECL))
6130 {
6131 error ("external linkage required for symbol %q+D because of "
6132 "%qE attribute", node, name);
6133 *no_add_attrs = true;
6134 }
6135
6136 /* A dllexport'd entity must have default visibility so that other
6137 program units (shared libraries or the main executable) can see
6138 it. A dllimport'd entity must have default visibility so that
6139 the linker knows that undefined references within this program
6140 unit can be resolved by the dynamic linker. */
6141 if (!*no_add_attrs)
6142 {
6143 if (DECL_VISIBILITY_SPECIFIED (node)
6144 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6145 error ("%qE implies default visibility, but %qD has already "
6146 "been declared with a different visibility",
6147 name, node);
6148 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6149 DECL_VISIBILITY_SPECIFIED (node) = 1;
6150 }
6151
6152 return NULL_TREE;
6153 }
6154
6155 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6156 \f
6157 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6158 of the various TYPE_QUAL values. */
6159
6160 static void
6161 set_type_quals (tree type, int type_quals)
6162 {
6163 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6164 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6165 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6166 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6167 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6168 }
6169
6170 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6171
6172 bool
6173 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6174 {
6175 return (TYPE_QUALS (cand) == type_quals
6176 && TYPE_NAME (cand) == TYPE_NAME (base)
6177 /* Apparently this is needed for Objective-C. */
6178 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6179 /* Check alignment. */
6180 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6181 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6182 TYPE_ATTRIBUTES (base)));
6183 }
6184
6185 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6186
6187 static bool
6188 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6189 {
6190 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6191 && TYPE_NAME (cand) == TYPE_NAME (base)
6192 /* Apparently this is needed for Objective-C. */
6193 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6194 /* Check alignment. */
6195 && TYPE_ALIGN (cand) == align
6196 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6197 TYPE_ATTRIBUTES (base)));
6198 }
6199
6200 /* This function checks to see if TYPE matches the size one of the built-in
6201 atomic types, and returns that core atomic type. */
6202
6203 static tree
6204 find_atomic_core_type (tree type)
6205 {
6206 tree base_atomic_type;
6207
6208 /* Only handle complete types. */
6209 if (TYPE_SIZE (type) == NULL_TREE)
6210 return NULL_TREE;
6211
6212 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6213 switch (type_size)
6214 {
6215 case 8:
6216 base_atomic_type = atomicQI_type_node;
6217 break;
6218
6219 case 16:
6220 base_atomic_type = atomicHI_type_node;
6221 break;
6222
6223 case 32:
6224 base_atomic_type = atomicSI_type_node;
6225 break;
6226
6227 case 64:
6228 base_atomic_type = atomicDI_type_node;
6229 break;
6230
6231 case 128:
6232 base_atomic_type = atomicTI_type_node;
6233 break;
6234
6235 default:
6236 base_atomic_type = NULL_TREE;
6237 }
6238
6239 return base_atomic_type;
6240 }
6241
6242 /* Return a version of the TYPE, qualified as indicated by the
6243 TYPE_QUALS, if one exists. If no qualified version exists yet,
6244 return NULL_TREE. */
6245
6246 tree
6247 get_qualified_type (tree type, int type_quals)
6248 {
6249 tree t;
6250
6251 if (TYPE_QUALS (type) == type_quals)
6252 return type;
6253
6254 /* Search the chain of variants to see if there is already one there just
6255 like the one we need to have. If so, use that existing one. We must
6256 preserve the TYPE_NAME, since there is code that depends on this. */
6257 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6258 if (check_qualified_type (t, type, type_quals))
6259 return t;
6260
6261 return NULL_TREE;
6262 }
6263
6264 /* Like get_qualified_type, but creates the type if it does not
6265 exist. This function never returns NULL_TREE. */
6266
6267 tree
6268 build_qualified_type (tree type, int type_quals)
6269 {
6270 tree t;
6271
6272 /* See if we already have the appropriate qualified variant. */
6273 t = get_qualified_type (type, type_quals);
6274
6275 /* If not, build it. */
6276 if (!t)
6277 {
6278 t = build_variant_type_copy (type);
6279 set_type_quals (t, type_quals);
6280
6281 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6282 {
6283 /* See if this object can map to a basic atomic type. */
6284 tree atomic_type = find_atomic_core_type (type);
6285 if (atomic_type)
6286 {
6287 /* Ensure the alignment of this type is compatible with
6288 the required alignment of the atomic type. */
6289 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6290 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6291 }
6292 }
6293
6294 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6295 /* Propagate structural equality. */
6296 SET_TYPE_STRUCTURAL_EQUALITY (t);
6297 else if (TYPE_CANONICAL (type) != type)
6298 /* Build the underlying canonical type, since it is different
6299 from TYPE. */
6300 {
6301 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6302 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6303 }
6304 else
6305 /* T is its own canonical type. */
6306 TYPE_CANONICAL (t) = t;
6307
6308 }
6309
6310 return t;
6311 }
6312
6313 /* Create a variant of type T with alignment ALIGN. */
6314
6315 tree
6316 build_aligned_type (tree type, unsigned int align)
6317 {
6318 tree t;
6319
6320 if (TYPE_PACKED (type)
6321 || TYPE_ALIGN (type) == align)
6322 return type;
6323
6324 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6325 if (check_aligned_type (t, type, align))
6326 return t;
6327
6328 t = build_variant_type_copy (type);
6329 TYPE_ALIGN (t) = align;
6330
6331 return t;
6332 }
6333
6334 /* Create a new distinct copy of TYPE. The new type is made its own
6335 MAIN_VARIANT. If TYPE requires structural equality checks, the
6336 resulting type requires structural equality checks; otherwise, its
6337 TYPE_CANONICAL points to itself. */
6338
6339 tree
6340 build_distinct_type_copy (tree type)
6341 {
6342 tree t = copy_node (type);
6343
6344 TYPE_POINTER_TO (t) = 0;
6345 TYPE_REFERENCE_TO (t) = 0;
6346
6347 /* Set the canonical type either to a new equivalence class, or
6348 propagate the need for structural equality checks. */
6349 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6350 SET_TYPE_STRUCTURAL_EQUALITY (t);
6351 else
6352 TYPE_CANONICAL (t) = t;
6353
6354 /* Make it its own variant. */
6355 TYPE_MAIN_VARIANT (t) = t;
6356 TYPE_NEXT_VARIANT (t) = 0;
6357
6358 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6359 whose TREE_TYPE is not t. This can also happen in the Ada
6360 frontend when using subtypes. */
6361
6362 return t;
6363 }
6364
6365 /* Create a new variant of TYPE, equivalent but distinct. This is so
6366 the caller can modify it. TYPE_CANONICAL for the return type will
6367 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6368 are considered equal by the language itself (or that both types
6369 require structural equality checks). */
6370
6371 tree
6372 build_variant_type_copy (tree type)
6373 {
6374 tree t, m = TYPE_MAIN_VARIANT (type);
6375
6376 t = build_distinct_type_copy (type);
6377
6378 /* Since we're building a variant, assume that it is a non-semantic
6379 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6380 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6381
6382 /* Add the new type to the chain of variants of TYPE. */
6383 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6384 TYPE_NEXT_VARIANT (m) = t;
6385 TYPE_MAIN_VARIANT (t) = m;
6386
6387 return t;
6388 }
6389 \f
6390 /* Return true if the from tree in both tree maps are equal. */
6391
6392 int
6393 tree_map_base_eq (const void *va, const void *vb)
6394 {
6395 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6396 *const b = (const struct tree_map_base *) vb;
6397 return (a->from == b->from);
6398 }
6399
6400 /* Hash a from tree in a tree_base_map. */
6401
6402 unsigned int
6403 tree_map_base_hash (const void *item)
6404 {
6405 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6406 }
6407
6408 /* Return true if this tree map structure is marked for garbage collection
6409 purposes. We simply return true if the from tree is marked, so that this
6410 structure goes away when the from tree goes away. */
6411
6412 int
6413 tree_map_base_marked_p (const void *p)
6414 {
6415 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6416 }
6417
6418 /* Hash a from tree in a tree_map. */
6419
6420 unsigned int
6421 tree_map_hash (const void *item)
6422 {
6423 return (((const struct tree_map *) item)->hash);
6424 }
6425
6426 /* Hash a from tree in a tree_decl_map. */
6427
6428 unsigned int
6429 tree_decl_map_hash (const void *item)
6430 {
6431 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6432 }
6433
6434 /* Return the initialization priority for DECL. */
6435
6436 priority_type
6437 decl_init_priority_lookup (tree decl)
6438 {
6439 symtab_node *snode = symtab_node::get (decl);
6440
6441 if (!snode)
6442 return DEFAULT_INIT_PRIORITY;
6443 return
6444 snode->get_init_priority ();
6445 }
6446
6447 /* Return the finalization priority for DECL. */
6448
6449 priority_type
6450 decl_fini_priority_lookup (tree decl)
6451 {
6452 cgraph_node *node = cgraph_node::get (decl);
6453
6454 if (!node)
6455 return DEFAULT_INIT_PRIORITY;
6456 return
6457 node->get_fini_priority ();
6458 }
6459
6460 /* Set the initialization priority for DECL to PRIORITY. */
6461
6462 void
6463 decl_init_priority_insert (tree decl, priority_type priority)
6464 {
6465 struct symtab_node *snode;
6466
6467 if (priority == DEFAULT_INIT_PRIORITY)
6468 {
6469 snode = symtab_node::get (decl);
6470 if (!snode)
6471 return;
6472 }
6473 else if (TREE_CODE (decl) == VAR_DECL)
6474 snode = varpool_node::get_create (decl);
6475 else
6476 snode = cgraph_node::get_create (decl);
6477 snode->set_init_priority (priority);
6478 }
6479
6480 /* Set the finalization priority for DECL to PRIORITY. */
6481
6482 void
6483 decl_fini_priority_insert (tree decl, priority_type priority)
6484 {
6485 struct cgraph_node *node;
6486
6487 if (priority == DEFAULT_INIT_PRIORITY)
6488 {
6489 node = cgraph_node::get (decl);
6490 if (!node)
6491 return;
6492 }
6493 else
6494 node = cgraph_node::get_create (decl);
6495 node->set_fini_priority (priority);
6496 }
6497
6498 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6499
6500 static void
6501 print_debug_expr_statistics (void)
6502 {
6503 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6504 (long) htab_size (debug_expr_for_decl),
6505 (long) htab_elements (debug_expr_for_decl),
6506 htab_collisions (debug_expr_for_decl));
6507 }
6508
6509 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6510
6511 static void
6512 print_value_expr_statistics (void)
6513 {
6514 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6515 (long) htab_size (value_expr_for_decl),
6516 (long) htab_elements (value_expr_for_decl),
6517 htab_collisions (value_expr_for_decl));
6518 }
6519
6520 /* Lookup a debug expression for FROM, and return it if we find one. */
6521
6522 tree
6523 decl_debug_expr_lookup (tree from)
6524 {
6525 struct tree_decl_map *h, in;
6526 in.base.from = from;
6527
6528 h = (struct tree_decl_map *)
6529 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6530 if (h)
6531 return h->to;
6532 return NULL_TREE;
6533 }
6534
6535 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6536
6537 void
6538 decl_debug_expr_insert (tree from, tree to)
6539 {
6540 struct tree_decl_map *h;
6541 void **loc;
6542
6543 h = ggc_alloc<tree_decl_map> ();
6544 h->base.from = from;
6545 h->to = to;
6546 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6547 INSERT);
6548 *(struct tree_decl_map **) loc = h;
6549 }
6550
6551 /* Lookup a value expression for FROM, and return it if we find one. */
6552
6553 tree
6554 decl_value_expr_lookup (tree from)
6555 {
6556 struct tree_decl_map *h, in;
6557 in.base.from = from;
6558
6559 h = (struct tree_decl_map *)
6560 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6561 if (h)
6562 return h->to;
6563 return NULL_TREE;
6564 }
6565
6566 /* Insert a mapping FROM->TO in the value expression hashtable. */
6567
6568 void
6569 decl_value_expr_insert (tree from, tree to)
6570 {
6571 struct tree_decl_map *h;
6572 void **loc;
6573
6574 h = ggc_alloc<tree_decl_map> ();
6575 h->base.from = from;
6576 h->to = to;
6577 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6578 INSERT);
6579 *(struct tree_decl_map **) loc = h;
6580 }
6581
6582 /* Lookup a vector of debug arguments for FROM, and return it if we
6583 find one. */
6584
6585 vec<tree, va_gc> **
6586 decl_debug_args_lookup (tree from)
6587 {
6588 struct tree_vec_map *h, in;
6589
6590 if (!DECL_HAS_DEBUG_ARGS_P (from))
6591 return NULL;
6592 gcc_checking_assert (debug_args_for_decl != NULL);
6593 in.base.from = from;
6594 h = (struct tree_vec_map *)
6595 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6596 if (h)
6597 return &h->to;
6598 return NULL;
6599 }
6600
6601 /* Insert a mapping FROM->empty vector of debug arguments in the value
6602 expression hashtable. */
6603
6604 vec<tree, va_gc> **
6605 decl_debug_args_insert (tree from)
6606 {
6607 struct tree_vec_map *h;
6608 void **loc;
6609
6610 if (DECL_HAS_DEBUG_ARGS_P (from))
6611 return decl_debug_args_lookup (from);
6612 if (debug_args_for_decl == NULL)
6613 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6614 tree_vec_map_eq, 0);
6615 h = ggc_alloc<tree_vec_map> ();
6616 h->base.from = from;
6617 h->to = NULL;
6618 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6619 INSERT);
6620 *(struct tree_vec_map **) loc = h;
6621 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6622 return &h->to;
6623 }
6624
6625 /* Hashing of types so that we don't make duplicates.
6626 The entry point is `type_hash_canon'. */
6627
6628 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6629 with types in the TREE_VALUE slots), by adding the hash codes
6630 of the individual types. */
6631
6632 static void
6633 type_hash_list (const_tree list, inchash::hash &hstate)
6634 {
6635 const_tree tail;
6636
6637 for (tail = list; tail; tail = TREE_CHAIN (tail))
6638 if (TREE_VALUE (tail) != error_mark_node)
6639 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6640 }
6641
6642 /* These are the Hashtable callback functions. */
6643
6644 /* Returns true iff the types are equivalent. */
6645
6646 static int
6647 type_hash_eq (const void *va, const void *vb)
6648 {
6649 const struct type_hash *const a = (const struct type_hash *) va,
6650 *const b = (const struct type_hash *) vb;
6651
6652 /* First test the things that are the same for all types. */
6653 if (a->hash != b->hash
6654 || TREE_CODE (a->type) != TREE_CODE (b->type)
6655 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6656 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6657 TYPE_ATTRIBUTES (b->type))
6658 || (TREE_CODE (a->type) != COMPLEX_TYPE
6659 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6660 return 0;
6661
6662 /* Be careful about comparing arrays before and after the element type
6663 has been completed; don't compare TYPE_ALIGN unless both types are
6664 complete. */
6665 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6666 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6667 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6668 return 0;
6669
6670 switch (TREE_CODE (a->type))
6671 {
6672 case VOID_TYPE:
6673 case COMPLEX_TYPE:
6674 case POINTER_TYPE:
6675 case REFERENCE_TYPE:
6676 case NULLPTR_TYPE:
6677 return 1;
6678
6679 case VECTOR_TYPE:
6680 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6681
6682 case ENUMERAL_TYPE:
6683 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6684 && !(TYPE_VALUES (a->type)
6685 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6686 && TYPE_VALUES (b->type)
6687 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6688 && type_list_equal (TYPE_VALUES (a->type),
6689 TYPE_VALUES (b->type))))
6690 return 0;
6691
6692 /* ... fall through ... */
6693
6694 case INTEGER_TYPE:
6695 case REAL_TYPE:
6696 case BOOLEAN_TYPE:
6697 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6698 return false;
6699 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6700 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6701 TYPE_MAX_VALUE (b->type)))
6702 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6703 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6704 TYPE_MIN_VALUE (b->type))));
6705
6706 case FIXED_POINT_TYPE:
6707 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6708
6709 case OFFSET_TYPE:
6710 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6711
6712 case METHOD_TYPE:
6713 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6714 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6715 || (TYPE_ARG_TYPES (a->type)
6716 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6717 && TYPE_ARG_TYPES (b->type)
6718 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6719 && type_list_equal (TYPE_ARG_TYPES (a->type),
6720 TYPE_ARG_TYPES (b->type)))))
6721 break;
6722 return 0;
6723 case ARRAY_TYPE:
6724 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6725
6726 case RECORD_TYPE:
6727 case UNION_TYPE:
6728 case QUAL_UNION_TYPE:
6729 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6730 || (TYPE_FIELDS (a->type)
6731 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6732 && TYPE_FIELDS (b->type)
6733 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6734 && type_list_equal (TYPE_FIELDS (a->type),
6735 TYPE_FIELDS (b->type))));
6736
6737 case FUNCTION_TYPE:
6738 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6739 || (TYPE_ARG_TYPES (a->type)
6740 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6741 && TYPE_ARG_TYPES (b->type)
6742 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6743 && type_list_equal (TYPE_ARG_TYPES (a->type),
6744 TYPE_ARG_TYPES (b->type))))
6745 break;
6746 return 0;
6747
6748 default:
6749 return 0;
6750 }
6751
6752 if (lang_hooks.types.type_hash_eq != NULL)
6753 return lang_hooks.types.type_hash_eq (a->type, b->type);
6754
6755 return 1;
6756 }
6757
6758 /* Return the cached hash value. */
6759
6760 static hashval_t
6761 type_hash_hash (const void *item)
6762 {
6763 return ((const struct type_hash *) item)->hash;
6764 }
6765
6766 /* Given TYPE, and HASHCODE its hash code, return the canonical
6767 object for an identical type if one already exists.
6768 Otherwise, return TYPE, and record it as the canonical object.
6769
6770 To use this function, first create a type of the sort you want.
6771 Then compute its hash code from the fields of the type that
6772 make it different from other similar types.
6773 Then call this function and use the value. */
6774
6775 tree
6776 type_hash_canon (unsigned int hashcode, tree type)
6777 {
6778 type_hash in;
6779 void **loc;
6780
6781 /* The hash table only contains main variants, so ensure that's what we're
6782 being passed. */
6783 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6784
6785 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6786 must call that routine before comparing TYPE_ALIGNs. */
6787 layout_type (type);
6788
6789 in.hash = hashcode;
6790 in.type = type;
6791
6792 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6793 if (*loc)
6794 {
6795 tree t1 = ((type_hash *) *loc)->type;
6796 /* ??? We'd like to assert here that the hashtable only contains
6797 main variants but the C++ frontend breaks this by modifying
6798 types already in the hashtable in build_cplus_array_type. */
6799 /* gcc_assert (TYPE_MAIN_VARIANT (t1) == t1); */
6800 if (GATHER_STATISTICS)
6801 {
6802 tree_code_counts[(int) TREE_CODE (type)]--;
6803 tree_node_counts[(int) t_kind]--;
6804 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6805 }
6806 return t1;
6807 }
6808 else
6809 {
6810 struct type_hash *h;
6811
6812 h = ggc_alloc<type_hash> ();
6813 h->hash = hashcode;
6814 h->type = type;
6815 *loc = (void *)h;
6816
6817 return type;
6818 }
6819 }
6820
6821 /* See if the data pointed to by the type hash table is marked. We consider
6822 it marked if the type is marked or if a debug type number or symbol
6823 table entry has been made for the type. */
6824
6825 static int
6826 type_hash_marked_p (const void *p)
6827 {
6828 const_tree const type = ((const struct type_hash *) p)->type;
6829
6830 return ggc_marked_p (type);
6831 }
6832
6833 static void
6834 print_type_hash_statistics (void)
6835 {
6836 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6837 (long) htab_size (type_hash_table),
6838 (long) htab_elements (type_hash_table),
6839 htab_collisions (type_hash_table));
6840 }
6841
6842 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6843 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6844 by adding the hash codes of the individual attributes. */
6845
6846 static void
6847 attribute_hash_list (const_tree list, inchash::hash &hstate)
6848 {
6849 const_tree tail;
6850
6851 for (tail = list; tail; tail = TREE_CHAIN (tail))
6852 /* ??? Do we want to add in TREE_VALUE too? */
6853 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6854 }
6855
6856 /* Given two lists of attributes, return true if list l2 is
6857 equivalent to l1. */
6858
6859 int
6860 attribute_list_equal (const_tree l1, const_tree l2)
6861 {
6862 if (l1 == l2)
6863 return 1;
6864
6865 return attribute_list_contained (l1, l2)
6866 && attribute_list_contained (l2, l1);
6867 }
6868
6869 /* Given two lists of attributes, return true if list L2 is
6870 completely contained within L1. */
6871 /* ??? This would be faster if attribute names were stored in a canonicalized
6872 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6873 must be used to show these elements are equivalent (which they are). */
6874 /* ??? It's not clear that attributes with arguments will always be handled
6875 correctly. */
6876
6877 int
6878 attribute_list_contained (const_tree l1, const_tree l2)
6879 {
6880 const_tree t1, t2;
6881
6882 /* First check the obvious, maybe the lists are identical. */
6883 if (l1 == l2)
6884 return 1;
6885
6886 /* Maybe the lists are similar. */
6887 for (t1 = l1, t2 = l2;
6888 t1 != 0 && t2 != 0
6889 && get_attribute_name (t1) == get_attribute_name (t2)
6890 && TREE_VALUE (t1) == TREE_VALUE (t2);
6891 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6892 ;
6893
6894 /* Maybe the lists are equal. */
6895 if (t1 == 0 && t2 == 0)
6896 return 1;
6897
6898 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6899 {
6900 const_tree attr;
6901 /* This CONST_CAST is okay because lookup_attribute does not
6902 modify its argument and the return value is assigned to a
6903 const_tree. */
6904 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6905 CONST_CAST_TREE (l1));
6906 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6907 attr = lookup_ident_attribute (get_attribute_name (t2),
6908 TREE_CHAIN (attr)))
6909 ;
6910
6911 if (attr == NULL_TREE)
6912 return 0;
6913 }
6914
6915 return 1;
6916 }
6917
6918 /* Given two lists of types
6919 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6920 return 1 if the lists contain the same types in the same order.
6921 Also, the TREE_PURPOSEs must match. */
6922
6923 int
6924 type_list_equal (const_tree l1, const_tree l2)
6925 {
6926 const_tree t1, t2;
6927
6928 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6929 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6930 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6931 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6932 && (TREE_TYPE (TREE_PURPOSE (t1))
6933 == TREE_TYPE (TREE_PURPOSE (t2))))))
6934 return 0;
6935
6936 return t1 == t2;
6937 }
6938
6939 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6940 given by TYPE. If the argument list accepts variable arguments,
6941 then this function counts only the ordinary arguments. */
6942
6943 int
6944 type_num_arguments (const_tree type)
6945 {
6946 int i = 0;
6947 tree t;
6948
6949 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6950 /* If the function does not take a variable number of arguments,
6951 the last element in the list will have type `void'. */
6952 if (VOID_TYPE_P (TREE_VALUE (t)))
6953 break;
6954 else
6955 ++i;
6956
6957 return i;
6958 }
6959
6960 /* Nonzero if integer constants T1 and T2
6961 represent the same constant value. */
6962
6963 int
6964 tree_int_cst_equal (const_tree t1, const_tree t2)
6965 {
6966 if (t1 == t2)
6967 return 1;
6968
6969 if (t1 == 0 || t2 == 0)
6970 return 0;
6971
6972 if (TREE_CODE (t1) == INTEGER_CST
6973 && TREE_CODE (t2) == INTEGER_CST
6974 && wi::to_widest (t1) == wi::to_widest (t2))
6975 return 1;
6976
6977 return 0;
6978 }
6979
6980 /* Return true if T is an INTEGER_CST whose numerical value (extended
6981 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6982
6983 bool
6984 tree_fits_shwi_p (const_tree t)
6985 {
6986 return (t != NULL_TREE
6987 && TREE_CODE (t) == INTEGER_CST
6988 && wi::fits_shwi_p (wi::to_widest (t)));
6989 }
6990
6991 /* Return true if T is an INTEGER_CST whose numerical value (extended
6992 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6993
6994 bool
6995 tree_fits_uhwi_p (const_tree t)
6996 {
6997 return (t != NULL_TREE
6998 && TREE_CODE (t) == INTEGER_CST
6999 && wi::fits_uhwi_p (wi::to_widest (t)));
7000 }
7001
7002 /* T is an INTEGER_CST whose numerical value (extended according to
7003 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7004 HOST_WIDE_INT. */
7005
7006 HOST_WIDE_INT
7007 tree_to_shwi (const_tree t)
7008 {
7009 gcc_assert (tree_fits_shwi_p (t));
7010 return TREE_INT_CST_LOW (t);
7011 }
7012
7013 /* T is an INTEGER_CST whose numerical value (extended according to
7014 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7015 HOST_WIDE_INT. */
7016
7017 unsigned HOST_WIDE_INT
7018 tree_to_uhwi (const_tree t)
7019 {
7020 gcc_assert (tree_fits_uhwi_p (t));
7021 return TREE_INT_CST_LOW (t);
7022 }
7023
7024 /* Return the most significant (sign) bit of T. */
7025
7026 int
7027 tree_int_cst_sign_bit (const_tree t)
7028 {
7029 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7030
7031 return wi::extract_uhwi (t, bitno, 1);
7032 }
7033
7034 /* Return an indication of the sign of the integer constant T.
7035 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7036 Note that -1 will never be returned if T's type is unsigned. */
7037
7038 int
7039 tree_int_cst_sgn (const_tree t)
7040 {
7041 if (wi::eq_p (t, 0))
7042 return 0;
7043 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7044 return 1;
7045 else if (wi::neg_p (t))
7046 return -1;
7047 else
7048 return 1;
7049 }
7050
7051 /* Return the minimum number of bits needed to represent VALUE in a
7052 signed or unsigned type, UNSIGNEDP says which. */
7053
7054 unsigned int
7055 tree_int_cst_min_precision (tree value, signop sgn)
7056 {
7057 /* If the value is negative, compute its negative minus 1. The latter
7058 adjustment is because the absolute value of the largest negative value
7059 is one larger than the largest positive value. This is equivalent to
7060 a bit-wise negation, so use that operation instead. */
7061
7062 if (tree_int_cst_sgn (value) < 0)
7063 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7064
7065 /* Return the number of bits needed, taking into account the fact
7066 that we need one more bit for a signed than unsigned type.
7067 If value is 0 or -1, the minimum precision is 1 no matter
7068 whether unsignedp is true or false. */
7069
7070 if (integer_zerop (value))
7071 return 1;
7072 else
7073 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7074 }
7075
7076 /* Return truthvalue of whether T1 is the same tree structure as T2.
7077 Return 1 if they are the same.
7078 Return 0 if they are understandably different.
7079 Return -1 if either contains tree structure not understood by
7080 this function. */
7081
7082 int
7083 simple_cst_equal (const_tree t1, const_tree t2)
7084 {
7085 enum tree_code code1, code2;
7086 int cmp;
7087 int i;
7088
7089 if (t1 == t2)
7090 return 1;
7091 if (t1 == 0 || t2 == 0)
7092 return 0;
7093
7094 code1 = TREE_CODE (t1);
7095 code2 = TREE_CODE (t2);
7096
7097 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7098 {
7099 if (CONVERT_EXPR_CODE_P (code2)
7100 || code2 == NON_LVALUE_EXPR)
7101 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7102 else
7103 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7104 }
7105
7106 else if (CONVERT_EXPR_CODE_P (code2)
7107 || code2 == NON_LVALUE_EXPR)
7108 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7109
7110 if (code1 != code2)
7111 return 0;
7112
7113 switch (code1)
7114 {
7115 case INTEGER_CST:
7116 return wi::to_widest (t1) == wi::to_widest (t2);
7117
7118 case REAL_CST:
7119 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7120
7121 case FIXED_CST:
7122 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7123
7124 case STRING_CST:
7125 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7126 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7127 TREE_STRING_LENGTH (t1)));
7128
7129 case CONSTRUCTOR:
7130 {
7131 unsigned HOST_WIDE_INT idx;
7132 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7133 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7134
7135 if (vec_safe_length (v1) != vec_safe_length (v2))
7136 return false;
7137
7138 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7139 /* ??? Should we handle also fields here? */
7140 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7141 return false;
7142 return true;
7143 }
7144
7145 case SAVE_EXPR:
7146 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7147
7148 case CALL_EXPR:
7149 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7150 if (cmp <= 0)
7151 return cmp;
7152 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7153 return 0;
7154 {
7155 const_tree arg1, arg2;
7156 const_call_expr_arg_iterator iter1, iter2;
7157 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7158 arg2 = first_const_call_expr_arg (t2, &iter2);
7159 arg1 && arg2;
7160 arg1 = next_const_call_expr_arg (&iter1),
7161 arg2 = next_const_call_expr_arg (&iter2))
7162 {
7163 cmp = simple_cst_equal (arg1, arg2);
7164 if (cmp <= 0)
7165 return cmp;
7166 }
7167 return arg1 == arg2;
7168 }
7169
7170 case TARGET_EXPR:
7171 /* Special case: if either target is an unallocated VAR_DECL,
7172 it means that it's going to be unified with whatever the
7173 TARGET_EXPR is really supposed to initialize, so treat it
7174 as being equivalent to anything. */
7175 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7176 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7177 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7178 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7179 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7180 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7181 cmp = 1;
7182 else
7183 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7184
7185 if (cmp <= 0)
7186 return cmp;
7187
7188 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7189
7190 case WITH_CLEANUP_EXPR:
7191 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7192 if (cmp <= 0)
7193 return cmp;
7194
7195 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7196
7197 case COMPONENT_REF:
7198 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7199 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7200
7201 return 0;
7202
7203 case VAR_DECL:
7204 case PARM_DECL:
7205 case CONST_DECL:
7206 case FUNCTION_DECL:
7207 return 0;
7208
7209 default:
7210 break;
7211 }
7212
7213 /* This general rule works for most tree codes. All exceptions should be
7214 handled above. If this is a language-specific tree code, we can't
7215 trust what might be in the operand, so say we don't know
7216 the situation. */
7217 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7218 return -1;
7219
7220 switch (TREE_CODE_CLASS (code1))
7221 {
7222 case tcc_unary:
7223 case tcc_binary:
7224 case tcc_comparison:
7225 case tcc_expression:
7226 case tcc_reference:
7227 case tcc_statement:
7228 cmp = 1;
7229 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7230 {
7231 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7232 if (cmp <= 0)
7233 return cmp;
7234 }
7235
7236 return cmp;
7237
7238 default:
7239 return -1;
7240 }
7241 }
7242
7243 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7244 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7245 than U, respectively. */
7246
7247 int
7248 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7249 {
7250 if (tree_int_cst_sgn (t) < 0)
7251 return -1;
7252 else if (!tree_fits_uhwi_p (t))
7253 return 1;
7254 else if (TREE_INT_CST_LOW (t) == u)
7255 return 0;
7256 else if (TREE_INT_CST_LOW (t) < u)
7257 return -1;
7258 else
7259 return 1;
7260 }
7261
7262 /* Return true if SIZE represents a constant size that is in bounds of
7263 what the middle-end and the backend accepts (covering not more than
7264 half of the address-space). */
7265
7266 bool
7267 valid_constant_size_p (const_tree size)
7268 {
7269 if (! tree_fits_uhwi_p (size)
7270 || TREE_OVERFLOW (size)
7271 || tree_int_cst_sign_bit (size) != 0)
7272 return false;
7273 return true;
7274 }
7275
7276 /* Return the precision of the type, or for a complex or vector type the
7277 precision of the type of its elements. */
7278
7279 unsigned int
7280 element_precision (const_tree type)
7281 {
7282 enum tree_code code = TREE_CODE (type);
7283 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7284 type = TREE_TYPE (type);
7285
7286 return TYPE_PRECISION (type);
7287 }
7288
7289 /* Return true if CODE represents an associative tree code. Otherwise
7290 return false. */
7291 bool
7292 associative_tree_code (enum tree_code code)
7293 {
7294 switch (code)
7295 {
7296 case BIT_IOR_EXPR:
7297 case BIT_AND_EXPR:
7298 case BIT_XOR_EXPR:
7299 case PLUS_EXPR:
7300 case MULT_EXPR:
7301 case MIN_EXPR:
7302 case MAX_EXPR:
7303 return true;
7304
7305 default:
7306 break;
7307 }
7308 return false;
7309 }
7310
7311 /* Return true if CODE represents a commutative tree code. Otherwise
7312 return false. */
7313 bool
7314 commutative_tree_code (enum tree_code code)
7315 {
7316 switch (code)
7317 {
7318 case PLUS_EXPR:
7319 case MULT_EXPR:
7320 case MULT_HIGHPART_EXPR:
7321 case MIN_EXPR:
7322 case MAX_EXPR:
7323 case BIT_IOR_EXPR:
7324 case BIT_XOR_EXPR:
7325 case BIT_AND_EXPR:
7326 case NE_EXPR:
7327 case EQ_EXPR:
7328 case UNORDERED_EXPR:
7329 case ORDERED_EXPR:
7330 case UNEQ_EXPR:
7331 case LTGT_EXPR:
7332 case TRUTH_AND_EXPR:
7333 case TRUTH_XOR_EXPR:
7334 case TRUTH_OR_EXPR:
7335 case WIDEN_MULT_EXPR:
7336 case VEC_WIDEN_MULT_HI_EXPR:
7337 case VEC_WIDEN_MULT_LO_EXPR:
7338 case VEC_WIDEN_MULT_EVEN_EXPR:
7339 case VEC_WIDEN_MULT_ODD_EXPR:
7340 return true;
7341
7342 default:
7343 break;
7344 }
7345 return false;
7346 }
7347
7348 /* Return true if CODE represents a ternary tree code for which the
7349 first two operands are commutative. Otherwise return false. */
7350 bool
7351 commutative_ternary_tree_code (enum tree_code code)
7352 {
7353 switch (code)
7354 {
7355 case WIDEN_MULT_PLUS_EXPR:
7356 case WIDEN_MULT_MINUS_EXPR:
7357 return true;
7358
7359 default:
7360 break;
7361 }
7362 return false;
7363 }
7364
7365 namespace inchash
7366 {
7367
7368 /* Generate a hash value for an expression. This can be used iteratively
7369 by passing a previous result as the HSTATE argument.
7370
7371 This function is intended to produce the same hash for expressions which
7372 would compare equal using operand_equal_p. */
7373 void
7374 add_expr (const_tree t, inchash::hash &hstate)
7375 {
7376 int i;
7377 enum tree_code code;
7378 enum tree_code_class tclass;
7379
7380 if (t == NULL_TREE)
7381 {
7382 hstate.merge_hash (0);
7383 return;
7384 }
7385
7386 code = TREE_CODE (t);
7387
7388 switch (code)
7389 {
7390 /* Alas, constants aren't shared, so we can't rely on pointer
7391 identity. */
7392 case VOID_CST:
7393 hstate.merge_hash (0);
7394 return;
7395 case INTEGER_CST:
7396 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7397 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7398 return;
7399 case REAL_CST:
7400 {
7401 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7402 hstate.merge_hash (val2);
7403 return;
7404 }
7405 case FIXED_CST:
7406 {
7407 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7408 hstate.merge_hash (val2);
7409 return;
7410 }
7411 case STRING_CST:
7412 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7413 return;
7414 case COMPLEX_CST:
7415 inchash::add_expr (TREE_REALPART (t), hstate);
7416 inchash::add_expr (TREE_IMAGPART (t), hstate);
7417 return;
7418 case VECTOR_CST:
7419 {
7420 unsigned i;
7421 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7422 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7423 return;
7424 }
7425 case SSA_NAME:
7426 /* We can just compare by pointer. */
7427 hstate.add_wide_int (SSA_NAME_VERSION (t));
7428 return;
7429 case PLACEHOLDER_EXPR:
7430 /* The node itself doesn't matter. */
7431 return;
7432 case TREE_LIST:
7433 /* A list of expressions, for a CALL_EXPR or as the elements of a
7434 VECTOR_CST. */
7435 for (; t; t = TREE_CHAIN (t))
7436 inchash::add_expr (TREE_VALUE (t), hstate);
7437 return;
7438 case CONSTRUCTOR:
7439 {
7440 unsigned HOST_WIDE_INT idx;
7441 tree field, value;
7442 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7443 {
7444 inchash::add_expr (field, hstate);
7445 inchash::add_expr (value, hstate);
7446 }
7447 return;
7448 }
7449 case FUNCTION_DECL:
7450 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7451 Otherwise nodes that compare equal according to operand_equal_p might
7452 get different hash codes. However, don't do this for machine specific
7453 or front end builtins, since the function code is overloaded in those
7454 cases. */
7455 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7456 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7457 {
7458 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7459 code = TREE_CODE (t);
7460 }
7461 /* FALL THROUGH */
7462 default:
7463 tclass = TREE_CODE_CLASS (code);
7464
7465 if (tclass == tcc_declaration)
7466 {
7467 /* DECL's have a unique ID */
7468 hstate.add_wide_int (DECL_UID (t));
7469 }
7470 else
7471 {
7472 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7473
7474 hstate.add_object (code);
7475
7476 /* Don't hash the type, that can lead to having nodes which
7477 compare equal according to operand_equal_p, but which
7478 have different hash codes. */
7479 if (CONVERT_EXPR_CODE_P (code)
7480 || code == NON_LVALUE_EXPR)
7481 {
7482 /* Make sure to include signness in the hash computation. */
7483 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7484 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7485 }
7486
7487 else if (commutative_tree_code (code))
7488 {
7489 /* It's a commutative expression. We want to hash it the same
7490 however it appears. We do this by first hashing both operands
7491 and then rehashing based on the order of their independent
7492 hashes. */
7493 inchash::hash one, two;
7494 inchash::add_expr (TREE_OPERAND (t, 0), one);
7495 inchash::add_expr (TREE_OPERAND (t, 1), two);
7496 hstate.add_commutative (one, two);
7497 }
7498 else
7499 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7500 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7501 }
7502 return;
7503 }
7504 }
7505
7506 }
7507
7508 /* Constructors for pointer, array and function types.
7509 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7510 constructed by language-dependent code, not here.) */
7511
7512 /* Construct, lay out and return the type of pointers to TO_TYPE with
7513 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7514 reference all of memory. If such a type has already been
7515 constructed, reuse it. */
7516
7517 tree
7518 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7519 bool can_alias_all)
7520 {
7521 tree t;
7522
7523 if (to_type == error_mark_node)
7524 return error_mark_node;
7525
7526 /* If the pointed-to type has the may_alias attribute set, force
7527 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7528 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7529 can_alias_all = true;
7530
7531 /* In some cases, languages will have things that aren't a POINTER_TYPE
7532 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7533 In that case, return that type without regard to the rest of our
7534 operands.
7535
7536 ??? This is a kludge, but consistent with the way this function has
7537 always operated and there doesn't seem to be a good way to avoid this
7538 at the moment. */
7539 if (TYPE_POINTER_TO (to_type) != 0
7540 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7541 return TYPE_POINTER_TO (to_type);
7542
7543 /* First, if we already have a type for pointers to TO_TYPE and it's
7544 the proper mode, use it. */
7545 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7546 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7547 return t;
7548
7549 t = make_node (POINTER_TYPE);
7550
7551 TREE_TYPE (t) = to_type;
7552 SET_TYPE_MODE (t, mode);
7553 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7554 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7555 TYPE_POINTER_TO (to_type) = t;
7556
7557 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7558 SET_TYPE_STRUCTURAL_EQUALITY (t);
7559 else if (TYPE_CANONICAL (to_type) != to_type)
7560 TYPE_CANONICAL (t)
7561 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7562 mode, can_alias_all);
7563
7564 /* Lay out the type. This function has many callers that are concerned
7565 with expression-construction, and this simplifies them all. */
7566 layout_type (t);
7567
7568 return t;
7569 }
7570
7571 /* By default build pointers in ptr_mode. */
7572
7573 tree
7574 build_pointer_type (tree to_type)
7575 {
7576 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7577 : TYPE_ADDR_SPACE (to_type);
7578 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7579 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7580 }
7581
7582 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7583
7584 tree
7585 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7586 bool can_alias_all)
7587 {
7588 tree t;
7589
7590 if (to_type == error_mark_node)
7591 return error_mark_node;
7592
7593 /* If the pointed-to type has the may_alias attribute set, force
7594 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7595 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7596 can_alias_all = true;
7597
7598 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7599 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7600 In that case, return that type without regard to the rest of our
7601 operands.
7602
7603 ??? This is a kludge, but consistent with the way this function has
7604 always operated and there doesn't seem to be a good way to avoid this
7605 at the moment. */
7606 if (TYPE_REFERENCE_TO (to_type) != 0
7607 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7608 return TYPE_REFERENCE_TO (to_type);
7609
7610 /* First, if we already have a type for pointers to TO_TYPE and it's
7611 the proper mode, use it. */
7612 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7613 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7614 return t;
7615
7616 t = make_node (REFERENCE_TYPE);
7617
7618 TREE_TYPE (t) = to_type;
7619 SET_TYPE_MODE (t, mode);
7620 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7621 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7622 TYPE_REFERENCE_TO (to_type) = t;
7623
7624 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7625 SET_TYPE_STRUCTURAL_EQUALITY (t);
7626 else if (TYPE_CANONICAL (to_type) != to_type)
7627 TYPE_CANONICAL (t)
7628 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7629 mode, can_alias_all);
7630
7631 layout_type (t);
7632
7633 return t;
7634 }
7635
7636
7637 /* Build the node for the type of references-to-TO_TYPE by default
7638 in ptr_mode. */
7639
7640 tree
7641 build_reference_type (tree to_type)
7642 {
7643 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7644 : TYPE_ADDR_SPACE (to_type);
7645 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7646 return build_reference_type_for_mode (to_type, pointer_mode, false);
7647 }
7648
7649 #define MAX_INT_CACHED_PREC \
7650 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7651 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7652
7653 /* Builds a signed or unsigned integer type of precision PRECISION.
7654 Used for C bitfields whose precision does not match that of
7655 built-in target types. */
7656 tree
7657 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7658 int unsignedp)
7659 {
7660 tree itype, ret;
7661
7662 if (unsignedp)
7663 unsignedp = MAX_INT_CACHED_PREC + 1;
7664
7665 if (precision <= MAX_INT_CACHED_PREC)
7666 {
7667 itype = nonstandard_integer_type_cache[precision + unsignedp];
7668 if (itype)
7669 return itype;
7670 }
7671
7672 itype = make_node (INTEGER_TYPE);
7673 TYPE_PRECISION (itype) = precision;
7674
7675 if (unsignedp)
7676 fixup_unsigned_type (itype);
7677 else
7678 fixup_signed_type (itype);
7679
7680 ret = itype;
7681 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7682 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7683 if (precision <= MAX_INT_CACHED_PREC)
7684 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7685
7686 return ret;
7687 }
7688
7689 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7690 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7691 is true, reuse such a type that has already been constructed. */
7692
7693 static tree
7694 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7695 {
7696 tree itype = make_node (INTEGER_TYPE);
7697 inchash::hash hstate;
7698
7699 TREE_TYPE (itype) = type;
7700
7701 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7702 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7703
7704 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7705 SET_TYPE_MODE (itype, TYPE_MODE (type));
7706 TYPE_SIZE (itype) = TYPE_SIZE (type);
7707 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7708 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7709 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7710
7711 if (!shared)
7712 return itype;
7713
7714 if ((TYPE_MIN_VALUE (itype)
7715 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7716 || (TYPE_MAX_VALUE (itype)
7717 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7718 {
7719 /* Since we cannot reliably merge this type, we need to compare it using
7720 structural equality checks. */
7721 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7722 return itype;
7723 }
7724
7725 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7726 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7727 hstate.merge_hash (TYPE_HASH (type));
7728 itype = type_hash_canon (hstate.end (), itype);
7729
7730 return itype;
7731 }
7732
7733 /* Wrapper around build_range_type_1 with SHARED set to true. */
7734
7735 tree
7736 build_range_type (tree type, tree lowval, tree highval)
7737 {
7738 return build_range_type_1 (type, lowval, highval, true);
7739 }
7740
7741 /* Wrapper around build_range_type_1 with SHARED set to false. */
7742
7743 tree
7744 build_nonshared_range_type (tree type, tree lowval, tree highval)
7745 {
7746 return build_range_type_1 (type, lowval, highval, false);
7747 }
7748
7749 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7750 MAXVAL should be the maximum value in the domain
7751 (one less than the length of the array).
7752
7753 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7754 We don't enforce this limit, that is up to caller (e.g. language front end).
7755 The limit exists because the result is a signed type and we don't handle
7756 sizes that use more than one HOST_WIDE_INT. */
7757
7758 tree
7759 build_index_type (tree maxval)
7760 {
7761 return build_range_type (sizetype, size_zero_node, maxval);
7762 }
7763
7764 /* Return true if the debug information for TYPE, a subtype, should be emitted
7765 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7766 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7767 debug info and doesn't reflect the source code. */
7768
7769 bool
7770 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7771 {
7772 tree base_type = TREE_TYPE (type), low, high;
7773
7774 /* Subrange types have a base type which is an integral type. */
7775 if (!INTEGRAL_TYPE_P (base_type))
7776 return false;
7777
7778 /* Get the real bounds of the subtype. */
7779 if (lang_hooks.types.get_subrange_bounds)
7780 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7781 else
7782 {
7783 low = TYPE_MIN_VALUE (type);
7784 high = TYPE_MAX_VALUE (type);
7785 }
7786
7787 /* If the type and its base type have the same representation and the same
7788 name, then the type is not a subrange but a copy of the base type. */
7789 if ((TREE_CODE (base_type) == INTEGER_TYPE
7790 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7791 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7792 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7793 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7794 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7795 return false;
7796
7797 if (lowval)
7798 *lowval = low;
7799 if (highval)
7800 *highval = high;
7801 return true;
7802 }
7803
7804 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7805 and number of elements specified by the range of values of INDEX_TYPE.
7806 If SHARED is true, reuse such a type that has already been constructed. */
7807
7808 static tree
7809 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7810 {
7811 tree t;
7812
7813 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7814 {
7815 error ("arrays of functions are not meaningful");
7816 elt_type = integer_type_node;
7817 }
7818
7819 t = make_node (ARRAY_TYPE);
7820 TREE_TYPE (t) = elt_type;
7821 TYPE_DOMAIN (t) = index_type;
7822 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7823 layout_type (t);
7824
7825 /* If the element type is incomplete at this point we get marked for
7826 structural equality. Do not record these types in the canonical
7827 type hashtable. */
7828 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7829 return t;
7830
7831 if (shared)
7832 {
7833 inchash::hash hstate;
7834 hstate.add_object (TYPE_HASH (elt_type));
7835 if (index_type)
7836 hstate.add_object (TYPE_HASH (index_type));
7837 t = type_hash_canon (hstate.end (), t);
7838 }
7839
7840 if (TYPE_CANONICAL (t) == t)
7841 {
7842 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7843 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7844 SET_TYPE_STRUCTURAL_EQUALITY (t);
7845 else if (TYPE_CANONICAL (elt_type) != elt_type
7846 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7847 TYPE_CANONICAL (t)
7848 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7849 index_type
7850 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7851 shared);
7852 }
7853
7854 return t;
7855 }
7856
7857 /* Wrapper around build_array_type_1 with SHARED set to true. */
7858
7859 tree
7860 build_array_type (tree elt_type, tree index_type)
7861 {
7862 return build_array_type_1 (elt_type, index_type, true);
7863 }
7864
7865 /* Wrapper around build_array_type_1 with SHARED set to false. */
7866
7867 tree
7868 build_nonshared_array_type (tree elt_type, tree index_type)
7869 {
7870 return build_array_type_1 (elt_type, index_type, false);
7871 }
7872
7873 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7874 sizetype. */
7875
7876 tree
7877 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7878 {
7879 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7880 }
7881
7882 /* Recursively examines the array elements of TYPE, until a non-array
7883 element type is found. */
7884
7885 tree
7886 strip_array_types (tree type)
7887 {
7888 while (TREE_CODE (type) == ARRAY_TYPE)
7889 type = TREE_TYPE (type);
7890
7891 return type;
7892 }
7893
7894 /* Computes the canonical argument types from the argument type list
7895 ARGTYPES.
7896
7897 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7898 on entry to this function, or if any of the ARGTYPES are
7899 structural.
7900
7901 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7902 true on entry to this function, or if any of the ARGTYPES are
7903 non-canonical.
7904
7905 Returns a canonical argument list, which may be ARGTYPES when the
7906 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7907 true) or would not differ from ARGTYPES. */
7908
7909 static tree
7910 maybe_canonicalize_argtypes (tree argtypes,
7911 bool *any_structural_p,
7912 bool *any_noncanonical_p)
7913 {
7914 tree arg;
7915 bool any_noncanonical_argtypes_p = false;
7916
7917 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7918 {
7919 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7920 /* Fail gracefully by stating that the type is structural. */
7921 *any_structural_p = true;
7922 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7923 *any_structural_p = true;
7924 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7925 || TREE_PURPOSE (arg))
7926 /* If the argument has a default argument, we consider it
7927 non-canonical even though the type itself is canonical.
7928 That way, different variants of function and method types
7929 with default arguments will all point to the variant with
7930 no defaults as their canonical type. */
7931 any_noncanonical_argtypes_p = true;
7932 }
7933
7934 if (*any_structural_p)
7935 return argtypes;
7936
7937 if (any_noncanonical_argtypes_p)
7938 {
7939 /* Build the canonical list of argument types. */
7940 tree canon_argtypes = NULL_TREE;
7941 bool is_void = false;
7942
7943 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7944 {
7945 if (arg == void_list_node)
7946 is_void = true;
7947 else
7948 canon_argtypes = tree_cons (NULL_TREE,
7949 TYPE_CANONICAL (TREE_VALUE (arg)),
7950 canon_argtypes);
7951 }
7952
7953 canon_argtypes = nreverse (canon_argtypes);
7954 if (is_void)
7955 canon_argtypes = chainon (canon_argtypes, void_list_node);
7956
7957 /* There is a non-canonical type. */
7958 *any_noncanonical_p = true;
7959 return canon_argtypes;
7960 }
7961
7962 /* The canonical argument types are the same as ARGTYPES. */
7963 return argtypes;
7964 }
7965
7966 /* Construct, lay out and return
7967 the type of functions returning type VALUE_TYPE
7968 given arguments of types ARG_TYPES.
7969 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7970 are data type nodes for the arguments of the function.
7971 If such a type has already been constructed, reuse it. */
7972
7973 tree
7974 build_function_type (tree value_type, tree arg_types)
7975 {
7976 tree t;
7977 inchash::hash hstate;
7978 bool any_structural_p, any_noncanonical_p;
7979 tree canon_argtypes;
7980
7981 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7982 {
7983 error ("function return type cannot be function");
7984 value_type = integer_type_node;
7985 }
7986
7987 /* Make a node of the sort we want. */
7988 t = make_node (FUNCTION_TYPE);
7989 TREE_TYPE (t) = value_type;
7990 TYPE_ARG_TYPES (t) = arg_types;
7991
7992 /* If we already have such a type, use the old one. */
7993 hstate.add_object (TYPE_HASH (value_type));
7994 type_hash_list (arg_types, hstate);
7995 t = type_hash_canon (hstate.end (), t);
7996
7997 /* Set up the canonical type. */
7998 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7999 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8000 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8001 &any_structural_p,
8002 &any_noncanonical_p);
8003 if (any_structural_p)
8004 SET_TYPE_STRUCTURAL_EQUALITY (t);
8005 else if (any_noncanonical_p)
8006 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8007 canon_argtypes);
8008
8009 if (!COMPLETE_TYPE_P (t))
8010 layout_type (t);
8011 return t;
8012 }
8013
8014 /* Build a function type. The RETURN_TYPE is the type returned by the
8015 function. If VAARGS is set, no void_type_node is appended to the
8016 the list. ARGP must be always be terminated be a NULL_TREE. */
8017
8018 static tree
8019 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8020 {
8021 tree t, args, last;
8022
8023 t = va_arg (argp, tree);
8024 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8025 args = tree_cons (NULL_TREE, t, args);
8026
8027 if (vaargs)
8028 {
8029 last = args;
8030 if (args != NULL_TREE)
8031 args = nreverse (args);
8032 gcc_assert (last != void_list_node);
8033 }
8034 else if (args == NULL_TREE)
8035 args = void_list_node;
8036 else
8037 {
8038 last = args;
8039 args = nreverse (args);
8040 TREE_CHAIN (last) = void_list_node;
8041 }
8042 args = build_function_type (return_type, args);
8043
8044 return args;
8045 }
8046
8047 /* Build a function type. The RETURN_TYPE is the type returned by the
8048 function. If additional arguments are provided, they are
8049 additional argument types. The list of argument types must always
8050 be terminated by NULL_TREE. */
8051
8052 tree
8053 build_function_type_list (tree return_type, ...)
8054 {
8055 tree args;
8056 va_list p;
8057
8058 va_start (p, return_type);
8059 args = build_function_type_list_1 (false, return_type, p);
8060 va_end (p);
8061 return args;
8062 }
8063
8064 /* Build a variable argument function type. The RETURN_TYPE is the
8065 type returned by the function. If additional arguments are provided,
8066 they are additional argument types. The list of argument types must
8067 always be terminated by NULL_TREE. */
8068
8069 tree
8070 build_varargs_function_type_list (tree return_type, ...)
8071 {
8072 tree args;
8073 va_list p;
8074
8075 va_start (p, return_type);
8076 args = build_function_type_list_1 (true, return_type, p);
8077 va_end (p);
8078
8079 return args;
8080 }
8081
8082 /* Build a function type. RETURN_TYPE is the type returned by the
8083 function; VAARGS indicates whether the function takes varargs. The
8084 function takes N named arguments, the types of which are provided in
8085 ARG_TYPES. */
8086
8087 static tree
8088 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8089 tree *arg_types)
8090 {
8091 int i;
8092 tree t = vaargs ? NULL_TREE : void_list_node;
8093
8094 for (i = n - 1; i >= 0; i--)
8095 t = tree_cons (NULL_TREE, arg_types[i], t);
8096
8097 return build_function_type (return_type, t);
8098 }
8099
8100 /* Build a function type. RETURN_TYPE is the type returned by the
8101 function. The function takes N named arguments, the types of which
8102 are provided in ARG_TYPES. */
8103
8104 tree
8105 build_function_type_array (tree return_type, int n, tree *arg_types)
8106 {
8107 return build_function_type_array_1 (false, return_type, n, arg_types);
8108 }
8109
8110 /* Build a variable argument function type. RETURN_TYPE is the type
8111 returned by the function. The function takes N named arguments, the
8112 types of which are provided in ARG_TYPES. */
8113
8114 tree
8115 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8116 {
8117 return build_function_type_array_1 (true, return_type, n, arg_types);
8118 }
8119
8120 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8121 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8122 for the method. An implicit additional parameter (of type
8123 pointer-to-BASETYPE) is added to the ARGTYPES. */
8124
8125 tree
8126 build_method_type_directly (tree basetype,
8127 tree rettype,
8128 tree argtypes)
8129 {
8130 tree t;
8131 tree ptype;
8132 inchash::hash hstate;
8133 bool any_structural_p, any_noncanonical_p;
8134 tree canon_argtypes;
8135
8136 /* Make a node of the sort we want. */
8137 t = make_node (METHOD_TYPE);
8138
8139 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8140 TREE_TYPE (t) = rettype;
8141 ptype = build_pointer_type (basetype);
8142
8143 /* The actual arglist for this function includes a "hidden" argument
8144 which is "this". Put it into the list of argument types. */
8145 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8146 TYPE_ARG_TYPES (t) = argtypes;
8147
8148 /* If we already have such a type, use the old one. */
8149 hstate.add_object (TYPE_HASH (basetype));
8150 hstate.add_object (TYPE_HASH (rettype));
8151 type_hash_list (argtypes, hstate);
8152 t = type_hash_canon (hstate.end (), t);
8153
8154 /* Set up the canonical type. */
8155 any_structural_p
8156 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8157 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8158 any_noncanonical_p
8159 = (TYPE_CANONICAL (basetype) != basetype
8160 || TYPE_CANONICAL (rettype) != rettype);
8161 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8162 &any_structural_p,
8163 &any_noncanonical_p);
8164 if (any_structural_p)
8165 SET_TYPE_STRUCTURAL_EQUALITY (t);
8166 else if (any_noncanonical_p)
8167 TYPE_CANONICAL (t)
8168 = build_method_type_directly (TYPE_CANONICAL (basetype),
8169 TYPE_CANONICAL (rettype),
8170 canon_argtypes);
8171 if (!COMPLETE_TYPE_P (t))
8172 layout_type (t);
8173
8174 return t;
8175 }
8176
8177 /* Construct, lay out and return the type of methods belonging to class
8178 BASETYPE and whose arguments and values are described by TYPE.
8179 If that type exists already, reuse it.
8180 TYPE must be a FUNCTION_TYPE node. */
8181
8182 tree
8183 build_method_type (tree basetype, tree type)
8184 {
8185 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8186
8187 return build_method_type_directly (basetype,
8188 TREE_TYPE (type),
8189 TYPE_ARG_TYPES (type));
8190 }
8191
8192 /* Construct, lay out and return the type of offsets to a value
8193 of type TYPE, within an object of type BASETYPE.
8194 If a suitable offset type exists already, reuse it. */
8195
8196 tree
8197 build_offset_type (tree basetype, tree type)
8198 {
8199 tree t;
8200 inchash::hash hstate;
8201
8202 /* Make a node of the sort we want. */
8203 t = make_node (OFFSET_TYPE);
8204
8205 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8206 TREE_TYPE (t) = type;
8207
8208 /* If we already have such a type, use the old one. */
8209 hstate.add_object (TYPE_HASH (basetype));
8210 hstate.add_object (TYPE_HASH (type));
8211 t = type_hash_canon (hstate.end (), t);
8212
8213 if (!COMPLETE_TYPE_P (t))
8214 layout_type (t);
8215
8216 if (TYPE_CANONICAL (t) == t)
8217 {
8218 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8219 || TYPE_STRUCTURAL_EQUALITY_P (type))
8220 SET_TYPE_STRUCTURAL_EQUALITY (t);
8221 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8222 || TYPE_CANONICAL (type) != type)
8223 TYPE_CANONICAL (t)
8224 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8225 TYPE_CANONICAL (type));
8226 }
8227
8228 return t;
8229 }
8230
8231 /* Create a complex type whose components are COMPONENT_TYPE. */
8232
8233 tree
8234 build_complex_type (tree component_type)
8235 {
8236 tree t;
8237 inchash::hash hstate;
8238
8239 gcc_assert (INTEGRAL_TYPE_P (component_type)
8240 || SCALAR_FLOAT_TYPE_P (component_type)
8241 || FIXED_POINT_TYPE_P (component_type));
8242
8243 /* Make a node of the sort we want. */
8244 t = make_node (COMPLEX_TYPE);
8245
8246 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8247
8248 /* If we already have such a type, use the old one. */
8249 hstate.add_object (TYPE_HASH (component_type));
8250 t = type_hash_canon (hstate.end (), t);
8251
8252 if (!COMPLETE_TYPE_P (t))
8253 layout_type (t);
8254
8255 if (TYPE_CANONICAL (t) == t)
8256 {
8257 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8258 SET_TYPE_STRUCTURAL_EQUALITY (t);
8259 else if (TYPE_CANONICAL (component_type) != component_type)
8260 TYPE_CANONICAL (t)
8261 = build_complex_type (TYPE_CANONICAL (component_type));
8262 }
8263
8264 /* We need to create a name, since complex is a fundamental type. */
8265 if (! TYPE_NAME (t))
8266 {
8267 const char *name;
8268 if (component_type == char_type_node)
8269 name = "complex char";
8270 else if (component_type == signed_char_type_node)
8271 name = "complex signed char";
8272 else if (component_type == unsigned_char_type_node)
8273 name = "complex unsigned char";
8274 else if (component_type == short_integer_type_node)
8275 name = "complex short int";
8276 else if (component_type == short_unsigned_type_node)
8277 name = "complex short unsigned int";
8278 else if (component_type == integer_type_node)
8279 name = "complex int";
8280 else if (component_type == unsigned_type_node)
8281 name = "complex unsigned int";
8282 else if (component_type == long_integer_type_node)
8283 name = "complex long int";
8284 else if (component_type == long_unsigned_type_node)
8285 name = "complex long unsigned int";
8286 else if (component_type == long_long_integer_type_node)
8287 name = "complex long long int";
8288 else if (component_type == long_long_unsigned_type_node)
8289 name = "complex long long unsigned int";
8290 else
8291 name = 0;
8292
8293 if (name != 0)
8294 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8295 get_identifier (name), t);
8296 }
8297
8298 return build_qualified_type (t, TYPE_QUALS (component_type));
8299 }
8300
8301 /* If TYPE is a real or complex floating-point type and the target
8302 does not directly support arithmetic on TYPE then return the wider
8303 type to be used for arithmetic on TYPE. Otherwise, return
8304 NULL_TREE. */
8305
8306 tree
8307 excess_precision_type (tree type)
8308 {
8309 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8310 {
8311 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8312 switch (TREE_CODE (type))
8313 {
8314 case REAL_TYPE:
8315 switch (flt_eval_method)
8316 {
8317 case 1:
8318 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8319 return double_type_node;
8320 break;
8321 case 2:
8322 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8323 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8324 return long_double_type_node;
8325 break;
8326 default:
8327 gcc_unreachable ();
8328 }
8329 break;
8330 case COMPLEX_TYPE:
8331 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8332 return NULL_TREE;
8333 switch (flt_eval_method)
8334 {
8335 case 1:
8336 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8337 return complex_double_type_node;
8338 break;
8339 case 2:
8340 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8341 || (TYPE_MODE (TREE_TYPE (type))
8342 == TYPE_MODE (double_type_node)))
8343 return complex_long_double_type_node;
8344 break;
8345 default:
8346 gcc_unreachable ();
8347 }
8348 break;
8349 default:
8350 break;
8351 }
8352 }
8353 return NULL_TREE;
8354 }
8355 \f
8356 /* Return OP, stripped of any conversions to wider types as much as is safe.
8357 Converting the value back to OP's type makes a value equivalent to OP.
8358
8359 If FOR_TYPE is nonzero, we return a value which, if converted to
8360 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8361
8362 OP must have integer, real or enumeral type. Pointers are not allowed!
8363
8364 There are some cases where the obvious value we could return
8365 would regenerate to OP if converted to OP's type,
8366 but would not extend like OP to wider types.
8367 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8368 For example, if OP is (unsigned short)(signed char)-1,
8369 we avoid returning (signed char)-1 if FOR_TYPE is int,
8370 even though extending that to an unsigned short would regenerate OP,
8371 since the result of extending (signed char)-1 to (int)
8372 is different from (int) OP. */
8373
8374 tree
8375 get_unwidened (tree op, tree for_type)
8376 {
8377 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8378 tree type = TREE_TYPE (op);
8379 unsigned final_prec
8380 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8381 int uns
8382 = (for_type != 0 && for_type != type
8383 && final_prec > TYPE_PRECISION (type)
8384 && TYPE_UNSIGNED (type));
8385 tree win = op;
8386
8387 while (CONVERT_EXPR_P (op))
8388 {
8389 int bitschange;
8390
8391 /* TYPE_PRECISION on vector types has different meaning
8392 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8393 so avoid them here. */
8394 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8395 break;
8396
8397 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8398 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8399
8400 /* Truncations are many-one so cannot be removed.
8401 Unless we are later going to truncate down even farther. */
8402 if (bitschange < 0
8403 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8404 break;
8405
8406 /* See what's inside this conversion. If we decide to strip it,
8407 we will set WIN. */
8408 op = TREE_OPERAND (op, 0);
8409
8410 /* If we have not stripped any zero-extensions (uns is 0),
8411 we can strip any kind of extension.
8412 If we have previously stripped a zero-extension,
8413 only zero-extensions can safely be stripped.
8414 Any extension can be stripped if the bits it would produce
8415 are all going to be discarded later by truncating to FOR_TYPE. */
8416
8417 if (bitschange > 0)
8418 {
8419 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8420 win = op;
8421 /* TYPE_UNSIGNED says whether this is a zero-extension.
8422 Let's avoid computing it if it does not affect WIN
8423 and if UNS will not be needed again. */
8424 if ((uns
8425 || CONVERT_EXPR_P (op))
8426 && TYPE_UNSIGNED (TREE_TYPE (op)))
8427 {
8428 uns = 1;
8429 win = op;
8430 }
8431 }
8432 }
8433
8434 /* If we finally reach a constant see if it fits in for_type and
8435 in that case convert it. */
8436 if (for_type
8437 && TREE_CODE (win) == INTEGER_CST
8438 && TREE_TYPE (win) != for_type
8439 && int_fits_type_p (win, for_type))
8440 win = fold_convert (for_type, win);
8441
8442 return win;
8443 }
8444 \f
8445 /* Return OP or a simpler expression for a narrower value
8446 which can be sign-extended or zero-extended to give back OP.
8447 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8448 or 0 if the value should be sign-extended. */
8449
8450 tree
8451 get_narrower (tree op, int *unsignedp_ptr)
8452 {
8453 int uns = 0;
8454 int first = 1;
8455 tree win = op;
8456 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8457
8458 while (TREE_CODE (op) == NOP_EXPR)
8459 {
8460 int bitschange
8461 = (TYPE_PRECISION (TREE_TYPE (op))
8462 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8463
8464 /* Truncations are many-one so cannot be removed. */
8465 if (bitschange < 0)
8466 break;
8467
8468 /* See what's inside this conversion. If we decide to strip it,
8469 we will set WIN. */
8470
8471 if (bitschange > 0)
8472 {
8473 op = TREE_OPERAND (op, 0);
8474 /* An extension: the outermost one can be stripped,
8475 but remember whether it is zero or sign extension. */
8476 if (first)
8477 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8478 /* Otherwise, if a sign extension has been stripped,
8479 only sign extensions can now be stripped;
8480 if a zero extension has been stripped, only zero-extensions. */
8481 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8482 break;
8483 first = 0;
8484 }
8485 else /* bitschange == 0 */
8486 {
8487 /* A change in nominal type can always be stripped, but we must
8488 preserve the unsignedness. */
8489 if (first)
8490 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8491 first = 0;
8492 op = TREE_OPERAND (op, 0);
8493 /* Keep trying to narrow, but don't assign op to win if it
8494 would turn an integral type into something else. */
8495 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8496 continue;
8497 }
8498
8499 win = op;
8500 }
8501
8502 if (TREE_CODE (op) == COMPONENT_REF
8503 /* Since type_for_size always gives an integer type. */
8504 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8505 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8506 /* Ensure field is laid out already. */
8507 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8508 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8509 {
8510 unsigned HOST_WIDE_INT innerprec
8511 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8512 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8513 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8514 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8515
8516 /* We can get this structure field in a narrower type that fits it,
8517 but the resulting extension to its nominal type (a fullword type)
8518 must satisfy the same conditions as for other extensions.
8519
8520 Do this only for fields that are aligned (not bit-fields),
8521 because when bit-field insns will be used there is no
8522 advantage in doing this. */
8523
8524 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8525 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8526 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8527 && type != 0)
8528 {
8529 if (first)
8530 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8531 win = fold_convert (type, op);
8532 }
8533 }
8534
8535 *unsignedp_ptr = uns;
8536 return win;
8537 }
8538 \f
8539 /* Returns true if integer constant C has a value that is permissible
8540 for type TYPE (an INTEGER_TYPE). */
8541
8542 bool
8543 int_fits_type_p (const_tree c, const_tree type)
8544 {
8545 tree type_low_bound, type_high_bound;
8546 bool ok_for_low_bound, ok_for_high_bound;
8547 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8548
8549 retry:
8550 type_low_bound = TYPE_MIN_VALUE (type);
8551 type_high_bound = TYPE_MAX_VALUE (type);
8552
8553 /* If at least one bound of the type is a constant integer, we can check
8554 ourselves and maybe make a decision. If no such decision is possible, but
8555 this type is a subtype, try checking against that. Otherwise, use
8556 fits_to_tree_p, which checks against the precision.
8557
8558 Compute the status for each possibly constant bound, and return if we see
8559 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8560 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8561 for "constant known to fit". */
8562
8563 /* Check if c >= type_low_bound. */
8564 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8565 {
8566 if (tree_int_cst_lt (c, type_low_bound))
8567 return false;
8568 ok_for_low_bound = true;
8569 }
8570 else
8571 ok_for_low_bound = false;
8572
8573 /* Check if c <= type_high_bound. */
8574 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8575 {
8576 if (tree_int_cst_lt (type_high_bound, c))
8577 return false;
8578 ok_for_high_bound = true;
8579 }
8580 else
8581 ok_for_high_bound = false;
8582
8583 /* If the constant fits both bounds, the result is known. */
8584 if (ok_for_low_bound && ok_for_high_bound)
8585 return true;
8586
8587 /* Perform some generic filtering which may allow making a decision
8588 even if the bounds are not constant. First, negative integers
8589 never fit in unsigned types, */
8590 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8591 return false;
8592
8593 /* Second, narrower types always fit in wider ones. */
8594 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8595 return true;
8596
8597 /* Third, unsigned integers with top bit set never fit signed types. */
8598 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8599 {
8600 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8601 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8602 {
8603 /* When a tree_cst is converted to a wide-int, the precision
8604 is taken from the type. However, if the precision of the
8605 mode underneath the type is smaller than that, it is
8606 possible that the value will not fit. The test below
8607 fails if any bit is set between the sign bit of the
8608 underlying mode and the top bit of the type. */
8609 if (wi::ne_p (wi::zext (c, prec - 1), c))
8610 return false;
8611 }
8612 else if (wi::neg_p (c))
8613 return false;
8614 }
8615
8616 /* If we haven't been able to decide at this point, there nothing more we
8617 can check ourselves here. Look at the base type if we have one and it
8618 has the same precision. */
8619 if (TREE_CODE (type) == INTEGER_TYPE
8620 && TREE_TYPE (type) != 0
8621 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8622 {
8623 type = TREE_TYPE (type);
8624 goto retry;
8625 }
8626
8627 /* Or to fits_to_tree_p, if nothing else. */
8628 return wi::fits_to_tree_p (c, type);
8629 }
8630
8631 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8632 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8633 represented (assuming two's-complement arithmetic) within the bit
8634 precision of the type are returned instead. */
8635
8636 void
8637 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8638 {
8639 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8640 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8641 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8642 else
8643 {
8644 if (TYPE_UNSIGNED (type))
8645 mpz_set_ui (min, 0);
8646 else
8647 {
8648 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8649 wi::to_mpz (mn, min, SIGNED);
8650 }
8651 }
8652
8653 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8654 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8655 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8656 else
8657 {
8658 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8659 wi::to_mpz (mn, max, TYPE_SIGN (type));
8660 }
8661 }
8662
8663 /* Return true if VAR is an automatic variable defined in function FN. */
8664
8665 bool
8666 auto_var_in_fn_p (const_tree var, const_tree fn)
8667 {
8668 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8669 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8670 || TREE_CODE (var) == PARM_DECL)
8671 && ! TREE_STATIC (var))
8672 || TREE_CODE (var) == LABEL_DECL
8673 || TREE_CODE (var) == RESULT_DECL));
8674 }
8675
8676 /* Subprogram of following function. Called by walk_tree.
8677
8678 Return *TP if it is an automatic variable or parameter of the
8679 function passed in as DATA. */
8680
8681 static tree
8682 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8683 {
8684 tree fn = (tree) data;
8685
8686 if (TYPE_P (*tp))
8687 *walk_subtrees = 0;
8688
8689 else if (DECL_P (*tp)
8690 && auto_var_in_fn_p (*tp, fn))
8691 return *tp;
8692
8693 return NULL_TREE;
8694 }
8695
8696 /* Returns true if T is, contains, or refers to a type with variable
8697 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8698 arguments, but not the return type. If FN is nonzero, only return
8699 true if a modifier of the type or position of FN is a variable or
8700 parameter inside FN.
8701
8702 This concept is more general than that of C99 'variably modified types':
8703 in C99, a struct type is never variably modified because a VLA may not
8704 appear as a structure member. However, in GNU C code like:
8705
8706 struct S { int i[f()]; };
8707
8708 is valid, and other languages may define similar constructs. */
8709
8710 bool
8711 variably_modified_type_p (tree type, tree fn)
8712 {
8713 tree t;
8714
8715 /* Test if T is either variable (if FN is zero) or an expression containing
8716 a variable in FN. If TYPE isn't gimplified, return true also if
8717 gimplify_one_sizepos would gimplify the expression into a local
8718 variable. */
8719 #define RETURN_TRUE_IF_VAR(T) \
8720 do { tree _t = (T); \
8721 if (_t != NULL_TREE \
8722 && _t != error_mark_node \
8723 && TREE_CODE (_t) != INTEGER_CST \
8724 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8725 && (!fn \
8726 || (!TYPE_SIZES_GIMPLIFIED (type) \
8727 && !is_gimple_sizepos (_t)) \
8728 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8729 return true; } while (0)
8730
8731 if (type == error_mark_node)
8732 return false;
8733
8734 /* If TYPE itself has variable size, it is variably modified. */
8735 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8736 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8737
8738 switch (TREE_CODE (type))
8739 {
8740 case POINTER_TYPE:
8741 case REFERENCE_TYPE:
8742 case VECTOR_TYPE:
8743 if (variably_modified_type_p (TREE_TYPE (type), fn))
8744 return true;
8745 break;
8746
8747 case FUNCTION_TYPE:
8748 case METHOD_TYPE:
8749 /* If TYPE is a function type, it is variably modified if the
8750 return type is variably modified. */
8751 if (variably_modified_type_p (TREE_TYPE (type), fn))
8752 return true;
8753 break;
8754
8755 case INTEGER_TYPE:
8756 case REAL_TYPE:
8757 case FIXED_POINT_TYPE:
8758 case ENUMERAL_TYPE:
8759 case BOOLEAN_TYPE:
8760 /* Scalar types are variably modified if their end points
8761 aren't constant. */
8762 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8763 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8764 break;
8765
8766 case RECORD_TYPE:
8767 case UNION_TYPE:
8768 case QUAL_UNION_TYPE:
8769 /* We can't see if any of the fields are variably-modified by the
8770 definition we normally use, since that would produce infinite
8771 recursion via pointers. */
8772 /* This is variably modified if some field's type is. */
8773 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8774 if (TREE_CODE (t) == FIELD_DECL)
8775 {
8776 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8777 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8778 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8779
8780 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8781 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8782 }
8783 break;
8784
8785 case ARRAY_TYPE:
8786 /* Do not call ourselves to avoid infinite recursion. This is
8787 variably modified if the element type is. */
8788 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8789 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8790 break;
8791
8792 default:
8793 break;
8794 }
8795
8796 /* The current language may have other cases to check, but in general,
8797 all other types are not variably modified. */
8798 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8799
8800 #undef RETURN_TRUE_IF_VAR
8801 }
8802
8803 /* Given a DECL or TYPE, return the scope in which it was declared, or
8804 NULL_TREE if there is no containing scope. */
8805
8806 tree
8807 get_containing_scope (const_tree t)
8808 {
8809 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8810 }
8811
8812 /* Return the innermost context enclosing DECL that is
8813 a FUNCTION_DECL, or zero if none. */
8814
8815 tree
8816 decl_function_context (const_tree decl)
8817 {
8818 tree context;
8819
8820 if (TREE_CODE (decl) == ERROR_MARK)
8821 return 0;
8822
8823 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8824 where we look up the function at runtime. Such functions always take
8825 a first argument of type 'pointer to real context'.
8826
8827 C++ should really be fixed to use DECL_CONTEXT for the real context,
8828 and use something else for the "virtual context". */
8829 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8830 context
8831 = TYPE_MAIN_VARIANT
8832 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8833 else
8834 context = DECL_CONTEXT (decl);
8835
8836 while (context && TREE_CODE (context) != FUNCTION_DECL)
8837 {
8838 if (TREE_CODE (context) == BLOCK)
8839 context = BLOCK_SUPERCONTEXT (context);
8840 else
8841 context = get_containing_scope (context);
8842 }
8843
8844 return context;
8845 }
8846
8847 /* Return the innermost context enclosing DECL that is
8848 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8849 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8850
8851 tree
8852 decl_type_context (const_tree decl)
8853 {
8854 tree context = DECL_CONTEXT (decl);
8855
8856 while (context)
8857 switch (TREE_CODE (context))
8858 {
8859 case NAMESPACE_DECL:
8860 case TRANSLATION_UNIT_DECL:
8861 return NULL_TREE;
8862
8863 case RECORD_TYPE:
8864 case UNION_TYPE:
8865 case QUAL_UNION_TYPE:
8866 return context;
8867
8868 case TYPE_DECL:
8869 case FUNCTION_DECL:
8870 context = DECL_CONTEXT (context);
8871 break;
8872
8873 case BLOCK:
8874 context = BLOCK_SUPERCONTEXT (context);
8875 break;
8876
8877 default:
8878 gcc_unreachable ();
8879 }
8880
8881 return NULL_TREE;
8882 }
8883
8884 /* CALL is a CALL_EXPR. Return the declaration for the function
8885 called, or NULL_TREE if the called function cannot be
8886 determined. */
8887
8888 tree
8889 get_callee_fndecl (const_tree call)
8890 {
8891 tree addr;
8892
8893 if (call == error_mark_node)
8894 return error_mark_node;
8895
8896 /* It's invalid to call this function with anything but a
8897 CALL_EXPR. */
8898 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8899
8900 /* The first operand to the CALL is the address of the function
8901 called. */
8902 addr = CALL_EXPR_FN (call);
8903
8904 /* If there is no function, return early. */
8905 if (addr == NULL_TREE)
8906 return NULL_TREE;
8907
8908 STRIP_NOPS (addr);
8909
8910 /* If this is a readonly function pointer, extract its initial value. */
8911 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8912 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8913 && DECL_INITIAL (addr))
8914 addr = DECL_INITIAL (addr);
8915
8916 /* If the address is just `&f' for some function `f', then we know
8917 that `f' is being called. */
8918 if (TREE_CODE (addr) == ADDR_EXPR
8919 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8920 return TREE_OPERAND (addr, 0);
8921
8922 /* We couldn't figure out what was being called. */
8923 return NULL_TREE;
8924 }
8925
8926 /* Print debugging information about tree nodes generated during the compile,
8927 and any language-specific information. */
8928
8929 void
8930 dump_tree_statistics (void)
8931 {
8932 if (GATHER_STATISTICS)
8933 {
8934 int i;
8935 int total_nodes, total_bytes;
8936 fprintf (stderr, "Kind Nodes Bytes\n");
8937 fprintf (stderr, "---------------------------------------\n");
8938 total_nodes = total_bytes = 0;
8939 for (i = 0; i < (int) all_kinds; i++)
8940 {
8941 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8942 tree_node_counts[i], tree_node_sizes[i]);
8943 total_nodes += tree_node_counts[i];
8944 total_bytes += tree_node_sizes[i];
8945 }
8946 fprintf (stderr, "---------------------------------------\n");
8947 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8948 fprintf (stderr, "---------------------------------------\n");
8949 fprintf (stderr, "Code Nodes\n");
8950 fprintf (stderr, "----------------------------\n");
8951 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8952 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8953 tree_code_counts[i]);
8954 fprintf (stderr, "----------------------------\n");
8955 ssanames_print_statistics ();
8956 phinodes_print_statistics ();
8957 }
8958 else
8959 fprintf (stderr, "(No per-node statistics)\n");
8960
8961 print_type_hash_statistics ();
8962 print_debug_expr_statistics ();
8963 print_value_expr_statistics ();
8964 lang_hooks.print_statistics ();
8965 }
8966 \f
8967 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8968
8969 /* Generate a crc32 of a byte. */
8970
8971 static unsigned
8972 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8973 {
8974 unsigned ix;
8975
8976 for (ix = bits; ix--; value <<= 1)
8977 {
8978 unsigned feedback;
8979
8980 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8981 chksum <<= 1;
8982 chksum ^= feedback;
8983 }
8984 return chksum;
8985 }
8986
8987 /* Generate a crc32 of a 32-bit unsigned. */
8988
8989 unsigned
8990 crc32_unsigned (unsigned chksum, unsigned value)
8991 {
8992 return crc32_unsigned_bits (chksum, value, 32);
8993 }
8994
8995 /* Generate a crc32 of a byte. */
8996
8997 unsigned
8998 crc32_byte (unsigned chksum, char byte)
8999 {
9000 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9001 }
9002
9003 /* Generate a crc32 of a string. */
9004
9005 unsigned
9006 crc32_string (unsigned chksum, const char *string)
9007 {
9008 do
9009 {
9010 chksum = crc32_byte (chksum, *string);
9011 }
9012 while (*string++);
9013 return chksum;
9014 }
9015
9016 /* P is a string that will be used in a symbol. Mask out any characters
9017 that are not valid in that context. */
9018
9019 void
9020 clean_symbol_name (char *p)
9021 {
9022 for (; *p; p++)
9023 if (! (ISALNUM (*p)
9024 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9025 || *p == '$'
9026 #endif
9027 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9028 || *p == '.'
9029 #endif
9030 ))
9031 *p = '_';
9032 }
9033
9034 /* Generate a name for a special-purpose function.
9035 The generated name may need to be unique across the whole link.
9036 Changes to this function may also require corresponding changes to
9037 xstrdup_mask_random.
9038 TYPE is some string to identify the purpose of this function to the
9039 linker or collect2; it must start with an uppercase letter,
9040 one of:
9041 I - for constructors
9042 D - for destructors
9043 N - for C++ anonymous namespaces
9044 F - for DWARF unwind frame information. */
9045
9046 tree
9047 get_file_function_name (const char *type)
9048 {
9049 char *buf;
9050 const char *p;
9051 char *q;
9052
9053 /* If we already have a name we know to be unique, just use that. */
9054 if (first_global_object_name)
9055 p = q = ASTRDUP (first_global_object_name);
9056 /* If the target is handling the constructors/destructors, they
9057 will be local to this file and the name is only necessary for
9058 debugging purposes.
9059 We also assign sub_I and sub_D sufixes to constructors called from
9060 the global static constructors. These are always local. */
9061 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9062 || (strncmp (type, "sub_", 4) == 0
9063 && (type[4] == 'I' || type[4] == 'D')))
9064 {
9065 const char *file = main_input_filename;
9066 if (! file)
9067 file = LOCATION_FILE (input_location);
9068 /* Just use the file's basename, because the full pathname
9069 might be quite long. */
9070 p = q = ASTRDUP (lbasename (file));
9071 }
9072 else
9073 {
9074 /* Otherwise, the name must be unique across the entire link.
9075 We don't have anything that we know to be unique to this translation
9076 unit, so use what we do have and throw in some randomness. */
9077 unsigned len;
9078 const char *name = weak_global_object_name;
9079 const char *file = main_input_filename;
9080
9081 if (! name)
9082 name = "";
9083 if (! file)
9084 file = LOCATION_FILE (input_location);
9085
9086 len = strlen (file);
9087 q = (char *) alloca (9 + 17 + len + 1);
9088 memcpy (q, file, len + 1);
9089
9090 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9091 crc32_string (0, name), get_random_seed (false));
9092
9093 p = q;
9094 }
9095
9096 clean_symbol_name (q);
9097 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9098 + strlen (type));
9099
9100 /* Set up the name of the file-level functions we may need.
9101 Use a global object (which is already required to be unique over
9102 the program) rather than the file name (which imposes extra
9103 constraints). */
9104 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9105
9106 return get_identifier (buf);
9107 }
9108 \f
9109 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9110
9111 /* Complain that the tree code of NODE does not match the expected 0
9112 terminated list of trailing codes. The trailing code list can be
9113 empty, for a more vague error message. FILE, LINE, and FUNCTION
9114 are of the caller. */
9115
9116 void
9117 tree_check_failed (const_tree node, const char *file,
9118 int line, const char *function, ...)
9119 {
9120 va_list args;
9121 const char *buffer;
9122 unsigned length = 0;
9123 enum tree_code code;
9124
9125 va_start (args, function);
9126 while ((code = (enum tree_code) va_arg (args, int)))
9127 length += 4 + strlen (get_tree_code_name (code));
9128 va_end (args);
9129 if (length)
9130 {
9131 char *tmp;
9132 va_start (args, function);
9133 length += strlen ("expected ");
9134 buffer = tmp = (char *) alloca (length);
9135 length = 0;
9136 while ((code = (enum tree_code) va_arg (args, int)))
9137 {
9138 const char *prefix = length ? " or " : "expected ";
9139
9140 strcpy (tmp + length, prefix);
9141 length += strlen (prefix);
9142 strcpy (tmp + length, get_tree_code_name (code));
9143 length += strlen (get_tree_code_name (code));
9144 }
9145 va_end (args);
9146 }
9147 else
9148 buffer = "unexpected node";
9149
9150 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9151 buffer, get_tree_code_name (TREE_CODE (node)),
9152 function, trim_filename (file), line);
9153 }
9154
9155 /* Complain that the tree code of NODE does match the expected 0
9156 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9157 the caller. */
9158
9159 void
9160 tree_not_check_failed (const_tree node, const char *file,
9161 int line, const char *function, ...)
9162 {
9163 va_list args;
9164 char *buffer;
9165 unsigned length = 0;
9166 enum tree_code code;
9167
9168 va_start (args, function);
9169 while ((code = (enum tree_code) va_arg (args, int)))
9170 length += 4 + strlen (get_tree_code_name (code));
9171 va_end (args);
9172 va_start (args, function);
9173 buffer = (char *) alloca (length);
9174 length = 0;
9175 while ((code = (enum tree_code) va_arg (args, int)))
9176 {
9177 if (length)
9178 {
9179 strcpy (buffer + length, " or ");
9180 length += 4;
9181 }
9182 strcpy (buffer + length, get_tree_code_name (code));
9183 length += strlen (get_tree_code_name (code));
9184 }
9185 va_end (args);
9186
9187 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9188 buffer, get_tree_code_name (TREE_CODE (node)),
9189 function, trim_filename (file), line);
9190 }
9191
9192 /* Similar to tree_check_failed, except that we check for a class of tree
9193 code, given in CL. */
9194
9195 void
9196 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9197 const char *file, int line, const char *function)
9198 {
9199 internal_error
9200 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9201 TREE_CODE_CLASS_STRING (cl),
9202 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9203 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9204 }
9205
9206 /* Similar to tree_check_failed, except that instead of specifying a
9207 dozen codes, use the knowledge that they're all sequential. */
9208
9209 void
9210 tree_range_check_failed (const_tree node, const char *file, int line,
9211 const char *function, enum tree_code c1,
9212 enum tree_code c2)
9213 {
9214 char *buffer;
9215 unsigned length = 0;
9216 unsigned int c;
9217
9218 for (c = c1; c <= c2; ++c)
9219 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9220
9221 length += strlen ("expected ");
9222 buffer = (char *) alloca (length);
9223 length = 0;
9224
9225 for (c = c1; c <= c2; ++c)
9226 {
9227 const char *prefix = length ? " or " : "expected ";
9228
9229 strcpy (buffer + length, prefix);
9230 length += strlen (prefix);
9231 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9232 length += strlen (get_tree_code_name ((enum tree_code) c));
9233 }
9234
9235 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9236 buffer, get_tree_code_name (TREE_CODE (node)),
9237 function, trim_filename (file), line);
9238 }
9239
9240
9241 /* Similar to tree_check_failed, except that we check that a tree does
9242 not have the specified code, given in CL. */
9243
9244 void
9245 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9246 const char *file, int line, const char *function)
9247 {
9248 internal_error
9249 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9250 TREE_CODE_CLASS_STRING (cl),
9251 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9252 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9253 }
9254
9255
9256 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9257
9258 void
9259 omp_clause_check_failed (const_tree node, const char *file, int line,
9260 const char *function, enum omp_clause_code code)
9261 {
9262 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9263 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9264 function, trim_filename (file), line);
9265 }
9266
9267
9268 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9269
9270 void
9271 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9272 const char *function, enum omp_clause_code c1,
9273 enum omp_clause_code c2)
9274 {
9275 char *buffer;
9276 unsigned length = 0;
9277 unsigned int c;
9278
9279 for (c = c1; c <= c2; ++c)
9280 length += 4 + strlen (omp_clause_code_name[c]);
9281
9282 length += strlen ("expected ");
9283 buffer = (char *) alloca (length);
9284 length = 0;
9285
9286 for (c = c1; c <= c2; ++c)
9287 {
9288 const char *prefix = length ? " or " : "expected ";
9289
9290 strcpy (buffer + length, prefix);
9291 length += strlen (prefix);
9292 strcpy (buffer + length, omp_clause_code_name[c]);
9293 length += strlen (omp_clause_code_name[c]);
9294 }
9295
9296 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9297 buffer, omp_clause_code_name[TREE_CODE (node)],
9298 function, trim_filename (file), line);
9299 }
9300
9301
9302 #undef DEFTREESTRUCT
9303 #define DEFTREESTRUCT(VAL, NAME) NAME,
9304
9305 static const char *ts_enum_names[] = {
9306 #include "treestruct.def"
9307 };
9308 #undef DEFTREESTRUCT
9309
9310 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9311
9312 /* Similar to tree_class_check_failed, except that we check for
9313 whether CODE contains the tree structure identified by EN. */
9314
9315 void
9316 tree_contains_struct_check_failed (const_tree node,
9317 const enum tree_node_structure_enum en,
9318 const char *file, int line,
9319 const char *function)
9320 {
9321 internal_error
9322 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9323 TS_ENUM_NAME (en),
9324 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9325 }
9326
9327
9328 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9329 (dynamically sized) vector. */
9330
9331 void
9332 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9333 const char *function)
9334 {
9335 internal_error
9336 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9337 idx + 1, len, function, trim_filename (file), line);
9338 }
9339
9340 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9341 (dynamically sized) vector. */
9342
9343 void
9344 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9345 const char *function)
9346 {
9347 internal_error
9348 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9349 idx + 1, len, function, trim_filename (file), line);
9350 }
9351
9352 /* Similar to above, except that the check is for the bounds of the operand
9353 vector of an expression node EXP. */
9354
9355 void
9356 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9357 int line, const char *function)
9358 {
9359 enum tree_code code = TREE_CODE (exp);
9360 internal_error
9361 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9362 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9363 function, trim_filename (file), line);
9364 }
9365
9366 /* Similar to above, except that the check is for the number of
9367 operands of an OMP_CLAUSE node. */
9368
9369 void
9370 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9371 int line, const char *function)
9372 {
9373 internal_error
9374 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9375 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9376 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9377 trim_filename (file), line);
9378 }
9379 #endif /* ENABLE_TREE_CHECKING */
9380 \f
9381 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9382 and mapped to the machine mode MODE. Initialize its fields and build
9383 the information necessary for debugging output. */
9384
9385 static tree
9386 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9387 {
9388 tree t;
9389 inchash::hash hstate;
9390
9391 t = make_node (VECTOR_TYPE);
9392 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9393 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9394 SET_TYPE_MODE (t, mode);
9395
9396 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9397 SET_TYPE_STRUCTURAL_EQUALITY (t);
9398 else if (TYPE_CANONICAL (innertype) != innertype
9399 || mode != VOIDmode)
9400 TYPE_CANONICAL (t)
9401 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9402
9403 layout_type (t);
9404
9405 hstate.add_wide_int (VECTOR_TYPE);
9406 hstate.add_wide_int (nunits);
9407 hstate.add_wide_int (mode);
9408 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9409 t = type_hash_canon (hstate.end (), t);
9410
9411 /* We have built a main variant, based on the main variant of the
9412 inner type. Use it to build the variant we return. */
9413 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9414 && TREE_TYPE (t) != innertype)
9415 return build_type_attribute_qual_variant (t,
9416 TYPE_ATTRIBUTES (innertype),
9417 TYPE_QUALS (innertype));
9418
9419 return t;
9420 }
9421
9422 static tree
9423 make_or_reuse_type (unsigned size, int unsignedp)
9424 {
9425 if (size == INT_TYPE_SIZE)
9426 return unsignedp ? unsigned_type_node : integer_type_node;
9427 if (size == CHAR_TYPE_SIZE)
9428 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9429 if (size == SHORT_TYPE_SIZE)
9430 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9431 if (size == LONG_TYPE_SIZE)
9432 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9433 if (size == LONG_LONG_TYPE_SIZE)
9434 return (unsignedp ? long_long_unsigned_type_node
9435 : long_long_integer_type_node);
9436 if (size == 128 && int128_integer_type_node)
9437 return (unsignedp ? int128_unsigned_type_node
9438 : int128_integer_type_node);
9439
9440 if (unsignedp)
9441 return make_unsigned_type (size);
9442 else
9443 return make_signed_type (size);
9444 }
9445
9446 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9447
9448 static tree
9449 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9450 {
9451 if (satp)
9452 {
9453 if (size == SHORT_FRACT_TYPE_SIZE)
9454 return unsignedp ? sat_unsigned_short_fract_type_node
9455 : sat_short_fract_type_node;
9456 if (size == FRACT_TYPE_SIZE)
9457 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9458 if (size == LONG_FRACT_TYPE_SIZE)
9459 return unsignedp ? sat_unsigned_long_fract_type_node
9460 : sat_long_fract_type_node;
9461 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9462 return unsignedp ? sat_unsigned_long_long_fract_type_node
9463 : sat_long_long_fract_type_node;
9464 }
9465 else
9466 {
9467 if (size == SHORT_FRACT_TYPE_SIZE)
9468 return unsignedp ? unsigned_short_fract_type_node
9469 : short_fract_type_node;
9470 if (size == FRACT_TYPE_SIZE)
9471 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9472 if (size == LONG_FRACT_TYPE_SIZE)
9473 return unsignedp ? unsigned_long_fract_type_node
9474 : long_fract_type_node;
9475 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9476 return unsignedp ? unsigned_long_long_fract_type_node
9477 : long_long_fract_type_node;
9478 }
9479
9480 return make_fract_type (size, unsignedp, satp);
9481 }
9482
9483 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9484
9485 static tree
9486 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9487 {
9488 if (satp)
9489 {
9490 if (size == SHORT_ACCUM_TYPE_SIZE)
9491 return unsignedp ? sat_unsigned_short_accum_type_node
9492 : sat_short_accum_type_node;
9493 if (size == ACCUM_TYPE_SIZE)
9494 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9495 if (size == LONG_ACCUM_TYPE_SIZE)
9496 return unsignedp ? sat_unsigned_long_accum_type_node
9497 : sat_long_accum_type_node;
9498 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9499 return unsignedp ? sat_unsigned_long_long_accum_type_node
9500 : sat_long_long_accum_type_node;
9501 }
9502 else
9503 {
9504 if (size == SHORT_ACCUM_TYPE_SIZE)
9505 return unsignedp ? unsigned_short_accum_type_node
9506 : short_accum_type_node;
9507 if (size == ACCUM_TYPE_SIZE)
9508 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9509 if (size == LONG_ACCUM_TYPE_SIZE)
9510 return unsignedp ? unsigned_long_accum_type_node
9511 : long_accum_type_node;
9512 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9513 return unsignedp ? unsigned_long_long_accum_type_node
9514 : long_long_accum_type_node;
9515 }
9516
9517 return make_accum_type (size, unsignedp, satp);
9518 }
9519
9520
9521 /* Create an atomic variant node for TYPE. This routine is called
9522 during initialization of data types to create the 5 basic atomic
9523 types. The generic build_variant_type function requires these to
9524 already be set up in order to function properly, so cannot be
9525 called from there. If ALIGN is non-zero, then ensure alignment is
9526 overridden to this value. */
9527
9528 static tree
9529 build_atomic_base (tree type, unsigned int align)
9530 {
9531 tree t;
9532
9533 /* Make sure its not already registered. */
9534 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9535 return t;
9536
9537 t = build_variant_type_copy (type);
9538 set_type_quals (t, TYPE_QUAL_ATOMIC);
9539
9540 if (align)
9541 TYPE_ALIGN (t) = align;
9542
9543 return t;
9544 }
9545
9546 /* Create nodes for all integer types (and error_mark_node) using the sizes
9547 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9548 SHORT_DOUBLE specifies whether double should be of the same precision
9549 as float. */
9550
9551 void
9552 build_common_tree_nodes (bool signed_char, bool short_double)
9553 {
9554 error_mark_node = make_node (ERROR_MARK);
9555 TREE_TYPE (error_mark_node) = error_mark_node;
9556
9557 initialize_sizetypes ();
9558
9559 /* Define both `signed char' and `unsigned char'. */
9560 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9561 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9562 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9563 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9564
9565 /* Define `char', which is like either `signed char' or `unsigned char'
9566 but not the same as either. */
9567 char_type_node
9568 = (signed_char
9569 ? make_signed_type (CHAR_TYPE_SIZE)
9570 : make_unsigned_type (CHAR_TYPE_SIZE));
9571 TYPE_STRING_FLAG (char_type_node) = 1;
9572
9573 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9574 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9575 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9576 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9577 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9578 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9579 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9580 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9581 #if HOST_BITS_PER_WIDE_INT >= 64
9582 /* TODO: This isn't correct, but as logic depends at the moment on
9583 host's instead of target's wide-integer.
9584 If there is a target not supporting TImode, but has an 128-bit
9585 integer-scalar register, this target check needs to be adjusted. */
9586 if (targetm.scalar_mode_supported_p (TImode))
9587 {
9588 int128_integer_type_node = make_signed_type (128);
9589 int128_unsigned_type_node = make_unsigned_type (128);
9590 }
9591 #endif
9592
9593 /* Define a boolean type. This type only represents boolean values but
9594 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9595 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9596 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9597 TYPE_PRECISION (boolean_type_node) = 1;
9598 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9599
9600 /* Define what type to use for size_t. */
9601 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9602 size_type_node = unsigned_type_node;
9603 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9604 size_type_node = long_unsigned_type_node;
9605 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9606 size_type_node = long_long_unsigned_type_node;
9607 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9608 size_type_node = short_unsigned_type_node;
9609 else
9610 gcc_unreachable ();
9611
9612 /* Fill in the rest of the sized types. Reuse existing type nodes
9613 when possible. */
9614 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9615 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9616 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9617 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9618 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9619
9620 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9621 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9622 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9623 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9624 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9625
9626 /* Don't call build_qualified type for atomics. That routine does
9627 special processing for atomics, and until they are initialized
9628 it's better not to make that call.
9629
9630 Check to see if there is a target override for atomic types. */
9631
9632 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9633 targetm.atomic_align_for_mode (QImode));
9634 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9635 targetm.atomic_align_for_mode (HImode));
9636 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9637 targetm.atomic_align_for_mode (SImode));
9638 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9639 targetm.atomic_align_for_mode (DImode));
9640 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9641 targetm.atomic_align_for_mode (TImode));
9642
9643 access_public_node = get_identifier ("public");
9644 access_protected_node = get_identifier ("protected");
9645 access_private_node = get_identifier ("private");
9646
9647 /* Define these next since types below may used them. */
9648 integer_zero_node = build_int_cst (integer_type_node, 0);
9649 integer_one_node = build_int_cst (integer_type_node, 1);
9650 integer_three_node = build_int_cst (integer_type_node, 3);
9651 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9652
9653 size_zero_node = size_int (0);
9654 size_one_node = size_int (1);
9655 bitsize_zero_node = bitsize_int (0);
9656 bitsize_one_node = bitsize_int (1);
9657 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9658
9659 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9660 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9661
9662 void_type_node = make_node (VOID_TYPE);
9663 layout_type (void_type_node);
9664
9665 /* We are not going to have real types in C with less than byte alignment,
9666 so we might as well not have any types that claim to have it. */
9667 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9668 TYPE_USER_ALIGN (void_type_node) = 0;
9669
9670 void_node = make_node (VOID_CST);
9671 TREE_TYPE (void_node) = void_type_node;
9672
9673 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9674 layout_type (TREE_TYPE (null_pointer_node));
9675
9676 ptr_type_node = build_pointer_type (void_type_node);
9677 const_ptr_type_node
9678 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9679 fileptr_type_node = ptr_type_node;
9680
9681 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9682
9683 float_type_node = make_node (REAL_TYPE);
9684 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9685 layout_type (float_type_node);
9686
9687 double_type_node = make_node (REAL_TYPE);
9688 if (short_double)
9689 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9690 else
9691 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9692 layout_type (double_type_node);
9693
9694 long_double_type_node = make_node (REAL_TYPE);
9695 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9696 layout_type (long_double_type_node);
9697
9698 float_ptr_type_node = build_pointer_type (float_type_node);
9699 double_ptr_type_node = build_pointer_type (double_type_node);
9700 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9701 integer_ptr_type_node = build_pointer_type (integer_type_node);
9702
9703 /* Fixed size integer types. */
9704 uint16_type_node = build_nonstandard_integer_type (16, true);
9705 uint32_type_node = build_nonstandard_integer_type (32, true);
9706 uint64_type_node = build_nonstandard_integer_type (64, true);
9707
9708 /* Decimal float types. */
9709 dfloat32_type_node = make_node (REAL_TYPE);
9710 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9711 layout_type (dfloat32_type_node);
9712 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9713 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9714
9715 dfloat64_type_node = make_node (REAL_TYPE);
9716 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9717 layout_type (dfloat64_type_node);
9718 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9719 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9720
9721 dfloat128_type_node = make_node (REAL_TYPE);
9722 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9723 layout_type (dfloat128_type_node);
9724 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9725 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9726
9727 complex_integer_type_node = build_complex_type (integer_type_node);
9728 complex_float_type_node = build_complex_type (float_type_node);
9729 complex_double_type_node = build_complex_type (double_type_node);
9730 complex_long_double_type_node = build_complex_type (long_double_type_node);
9731
9732 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9733 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9734 sat_ ## KIND ## _type_node = \
9735 make_sat_signed_ ## KIND ## _type (SIZE); \
9736 sat_unsigned_ ## KIND ## _type_node = \
9737 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9738 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9739 unsigned_ ## KIND ## _type_node = \
9740 make_unsigned_ ## KIND ## _type (SIZE);
9741
9742 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9743 sat_ ## WIDTH ## KIND ## _type_node = \
9744 make_sat_signed_ ## KIND ## _type (SIZE); \
9745 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9746 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9747 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9748 unsigned_ ## WIDTH ## KIND ## _type_node = \
9749 make_unsigned_ ## KIND ## _type (SIZE);
9750
9751 /* Make fixed-point type nodes based on four different widths. */
9752 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9753 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9754 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9755 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9756 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9757
9758 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9759 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9760 NAME ## _type_node = \
9761 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9762 u ## NAME ## _type_node = \
9763 make_or_reuse_unsigned_ ## KIND ## _type \
9764 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9765 sat_ ## NAME ## _type_node = \
9766 make_or_reuse_sat_signed_ ## KIND ## _type \
9767 (GET_MODE_BITSIZE (MODE ## mode)); \
9768 sat_u ## NAME ## _type_node = \
9769 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9770 (GET_MODE_BITSIZE (U ## MODE ## mode));
9771
9772 /* Fixed-point type and mode nodes. */
9773 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9774 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9775 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9776 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9777 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9778 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9779 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9780 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9781 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9782 MAKE_FIXED_MODE_NODE (accum, da, DA)
9783 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9784
9785 {
9786 tree t = targetm.build_builtin_va_list ();
9787
9788 /* Many back-ends define record types without setting TYPE_NAME.
9789 If we copied the record type here, we'd keep the original
9790 record type without a name. This breaks name mangling. So,
9791 don't copy record types and let c_common_nodes_and_builtins()
9792 declare the type to be __builtin_va_list. */
9793 if (TREE_CODE (t) != RECORD_TYPE)
9794 t = build_variant_type_copy (t);
9795
9796 va_list_type_node = t;
9797 }
9798 }
9799
9800 /* Modify DECL for given flags.
9801 TM_PURE attribute is set only on types, so the function will modify
9802 DECL's type when ECF_TM_PURE is used. */
9803
9804 void
9805 set_call_expr_flags (tree decl, int flags)
9806 {
9807 if (flags & ECF_NOTHROW)
9808 TREE_NOTHROW (decl) = 1;
9809 if (flags & ECF_CONST)
9810 TREE_READONLY (decl) = 1;
9811 if (flags & ECF_PURE)
9812 DECL_PURE_P (decl) = 1;
9813 if (flags & ECF_LOOPING_CONST_OR_PURE)
9814 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9815 if (flags & ECF_NOVOPS)
9816 DECL_IS_NOVOPS (decl) = 1;
9817 if (flags & ECF_NORETURN)
9818 TREE_THIS_VOLATILE (decl) = 1;
9819 if (flags & ECF_MALLOC)
9820 DECL_IS_MALLOC (decl) = 1;
9821 if (flags & ECF_RETURNS_TWICE)
9822 DECL_IS_RETURNS_TWICE (decl) = 1;
9823 if (flags & ECF_LEAF)
9824 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9825 NULL, DECL_ATTRIBUTES (decl));
9826 if ((flags & ECF_TM_PURE) && flag_tm)
9827 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9828 /* Looping const or pure is implied by noreturn.
9829 There is currently no way to declare looping const or looping pure alone. */
9830 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9831 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9832 }
9833
9834
9835 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9836
9837 static void
9838 local_define_builtin (const char *name, tree type, enum built_in_function code,
9839 const char *library_name, int ecf_flags)
9840 {
9841 tree decl;
9842
9843 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9844 library_name, NULL_TREE);
9845 set_call_expr_flags (decl, ecf_flags);
9846
9847 set_builtin_decl (code, decl, true);
9848 }
9849
9850 /* Call this function after instantiating all builtins that the language
9851 front end cares about. This will build the rest of the builtins
9852 and internal functions that are relied upon by the tree optimizers and
9853 the middle-end. */
9854
9855 void
9856 build_common_builtin_nodes (void)
9857 {
9858 tree tmp, ftype;
9859 int ecf_flags;
9860
9861 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9862 {
9863 ftype = build_function_type (void_type_node, void_list_node);
9864 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9865 "__builtin_unreachable",
9866 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9867 | ECF_CONST | ECF_LEAF);
9868 }
9869
9870 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9871 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9872 {
9873 ftype = build_function_type_list (ptr_type_node,
9874 ptr_type_node, const_ptr_type_node,
9875 size_type_node, NULL_TREE);
9876
9877 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9878 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9879 "memcpy", ECF_NOTHROW | ECF_LEAF);
9880 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9881 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9882 "memmove", ECF_NOTHROW | ECF_LEAF);
9883 }
9884
9885 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9886 {
9887 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9888 const_ptr_type_node, size_type_node,
9889 NULL_TREE);
9890 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9891 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9892 }
9893
9894 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9895 {
9896 ftype = build_function_type_list (ptr_type_node,
9897 ptr_type_node, integer_type_node,
9898 size_type_node, NULL_TREE);
9899 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9900 "memset", ECF_NOTHROW | ECF_LEAF);
9901 }
9902
9903 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9904 {
9905 ftype = build_function_type_list (ptr_type_node,
9906 size_type_node, NULL_TREE);
9907 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9908 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9909 }
9910
9911 ftype = build_function_type_list (ptr_type_node, size_type_node,
9912 size_type_node, NULL_TREE);
9913 local_define_builtin ("__builtin_alloca_with_align", ftype,
9914 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9915 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9916
9917 /* If we're checking the stack, `alloca' can throw. */
9918 if (flag_stack_check)
9919 {
9920 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9921 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9922 }
9923
9924 ftype = build_function_type_list (void_type_node,
9925 ptr_type_node, ptr_type_node,
9926 ptr_type_node, NULL_TREE);
9927 local_define_builtin ("__builtin_init_trampoline", ftype,
9928 BUILT_IN_INIT_TRAMPOLINE,
9929 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9930 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9931 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9932 "__builtin_init_heap_trampoline",
9933 ECF_NOTHROW | ECF_LEAF);
9934
9935 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9936 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9937 BUILT_IN_ADJUST_TRAMPOLINE,
9938 "__builtin_adjust_trampoline",
9939 ECF_CONST | ECF_NOTHROW);
9940
9941 ftype = build_function_type_list (void_type_node,
9942 ptr_type_node, ptr_type_node, NULL_TREE);
9943 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9944 BUILT_IN_NONLOCAL_GOTO,
9945 "__builtin_nonlocal_goto",
9946 ECF_NORETURN | ECF_NOTHROW);
9947
9948 ftype = build_function_type_list (void_type_node,
9949 ptr_type_node, ptr_type_node, NULL_TREE);
9950 local_define_builtin ("__builtin_setjmp_setup", ftype,
9951 BUILT_IN_SETJMP_SETUP,
9952 "__builtin_setjmp_setup", ECF_NOTHROW);
9953
9954 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9955 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9956 BUILT_IN_SETJMP_RECEIVER,
9957 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9958
9959 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9960 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9961 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9962
9963 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9964 local_define_builtin ("__builtin_stack_restore", ftype,
9965 BUILT_IN_STACK_RESTORE,
9966 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9967
9968 /* If there's a possibility that we might use the ARM EABI, build the
9969 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9970 if (targetm.arm_eabi_unwinder)
9971 {
9972 ftype = build_function_type_list (void_type_node, NULL_TREE);
9973 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9974 BUILT_IN_CXA_END_CLEANUP,
9975 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9976 }
9977
9978 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9979 local_define_builtin ("__builtin_unwind_resume", ftype,
9980 BUILT_IN_UNWIND_RESUME,
9981 ((targetm_common.except_unwind_info (&global_options)
9982 == UI_SJLJ)
9983 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9984 ECF_NORETURN);
9985
9986 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9987 {
9988 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9989 NULL_TREE);
9990 local_define_builtin ("__builtin_return_address", ftype,
9991 BUILT_IN_RETURN_ADDRESS,
9992 "__builtin_return_address",
9993 ECF_NOTHROW);
9994 }
9995
9996 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9997 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9998 {
9999 ftype = build_function_type_list (void_type_node, ptr_type_node,
10000 ptr_type_node, NULL_TREE);
10001 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10002 local_define_builtin ("__cyg_profile_func_enter", ftype,
10003 BUILT_IN_PROFILE_FUNC_ENTER,
10004 "__cyg_profile_func_enter", 0);
10005 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10006 local_define_builtin ("__cyg_profile_func_exit", ftype,
10007 BUILT_IN_PROFILE_FUNC_EXIT,
10008 "__cyg_profile_func_exit", 0);
10009 }
10010
10011 /* The exception object and filter values from the runtime. The argument
10012 must be zero before exception lowering, i.e. from the front end. After
10013 exception lowering, it will be the region number for the exception
10014 landing pad. These functions are PURE instead of CONST to prevent
10015 them from being hoisted past the exception edge that will initialize
10016 its value in the landing pad. */
10017 ftype = build_function_type_list (ptr_type_node,
10018 integer_type_node, NULL_TREE);
10019 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10020 /* Only use TM_PURE if we we have TM language support. */
10021 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10022 ecf_flags |= ECF_TM_PURE;
10023 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10024 "__builtin_eh_pointer", ecf_flags);
10025
10026 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10027 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10028 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10029 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10030
10031 ftype = build_function_type_list (void_type_node,
10032 integer_type_node, integer_type_node,
10033 NULL_TREE);
10034 local_define_builtin ("__builtin_eh_copy_values", ftype,
10035 BUILT_IN_EH_COPY_VALUES,
10036 "__builtin_eh_copy_values", ECF_NOTHROW);
10037
10038 /* Complex multiplication and division. These are handled as builtins
10039 rather than optabs because emit_library_call_value doesn't support
10040 complex. Further, we can do slightly better with folding these
10041 beasties if the real and complex parts of the arguments are separate. */
10042 {
10043 int mode;
10044
10045 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10046 {
10047 char mode_name_buf[4], *q;
10048 const char *p;
10049 enum built_in_function mcode, dcode;
10050 tree type, inner_type;
10051 const char *prefix = "__";
10052
10053 if (targetm.libfunc_gnu_prefix)
10054 prefix = "__gnu_";
10055
10056 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10057 if (type == NULL)
10058 continue;
10059 inner_type = TREE_TYPE (type);
10060
10061 ftype = build_function_type_list (type, inner_type, inner_type,
10062 inner_type, inner_type, NULL_TREE);
10063
10064 mcode = ((enum built_in_function)
10065 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10066 dcode = ((enum built_in_function)
10067 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10068
10069 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10070 *q = TOLOWER (*p);
10071 *q = '\0';
10072
10073 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10074 NULL);
10075 local_define_builtin (built_in_names[mcode], ftype, mcode,
10076 built_in_names[mcode],
10077 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10078
10079 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10080 NULL);
10081 local_define_builtin (built_in_names[dcode], ftype, dcode,
10082 built_in_names[dcode],
10083 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10084 }
10085 }
10086
10087 init_internal_fns ();
10088 }
10089
10090 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10091 better way.
10092
10093 If we requested a pointer to a vector, build up the pointers that
10094 we stripped off while looking for the inner type. Similarly for
10095 return values from functions.
10096
10097 The argument TYPE is the top of the chain, and BOTTOM is the
10098 new type which we will point to. */
10099
10100 tree
10101 reconstruct_complex_type (tree type, tree bottom)
10102 {
10103 tree inner, outer;
10104
10105 if (TREE_CODE (type) == POINTER_TYPE)
10106 {
10107 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10108 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10109 TYPE_REF_CAN_ALIAS_ALL (type));
10110 }
10111 else if (TREE_CODE (type) == REFERENCE_TYPE)
10112 {
10113 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10114 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10115 TYPE_REF_CAN_ALIAS_ALL (type));
10116 }
10117 else if (TREE_CODE (type) == ARRAY_TYPE)
10118 {
10119 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10120 outer = build_array_type (inner, TYPE_DOMAIN (type));
10121 }
10122 else if (TREE_CODE (type) == FUNCTION_TYPE)
10123 {
10124 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10125 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10126 }
10127 else if (TREE_CODE (type) == METHOD_TYPE)
10128 {
10129 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10130 /* The build_method_type_directly() routine prepends 'this' to argument list,
10131 so we must compensate by getting rid of it. */
10132 outer
10133 = build_method_type_directly
10134 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10135 inner,
10136 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10137 }
10138 else if (TREE_CODE (type) == OFFSET_TYPE)
10139 {
10140 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10141 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10142 }
10143 else
10144 return bottom;
10145
10146 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10147 TYPE_QUALS (type));
10148 }
10149
10150 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10151 the inner type. */
10152 tree
10153 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10154 {
10155 int nunits;
10156
10157 switch (GET_MODE_CLASS (mode))
10158 {
10159 case MODE_VECTOR_INT:
10160 case MODE_VECTOR_FLOAT:
10161 case MODE_VECTOR_FRACT:
10162 case MODE_VECTOR_UFRACT:
10163 case MODE_VECTOR_ACCUM:
10164 case MODE_VECTOR_UACCUM:
10165 nunits = GET_MODE_NUNITS (mode);
10166 break;
10167
10168 case MODE_INT:
10169 /* Check that there are no leftover bits. */
10170 gcc_assert (GET_MODE_BITSIZE (mode)
10171 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10172
10173 nunits = GET_MODE_BITSIZE (mode)
10174 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10175 break;
10176
10177 default:
10178 gcc_unreachable ();
10179 }
10180
10181 return make_vector_type (innertype, nunits, mode);
10182 }
10183
10184 /* Similarly, but takes the inner type and number of units, which must be
10185 a power of two. */
10186
10187 tree
10188 build_vector_type (tree innertype, int nunits)
10189 {
10190 return make_vector_type (innertype, nunits, VOIDmode);
10191 }
10192
10193 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10194
10195 tree
10196 build_opaque_vector_type (tree innertype, int nunits)
10197 {
10198 tree t = make_vector_type (innertype, nunits, VOIDmode);
10199 tree cand;
10200 /* We always build the non-opaque variant before the opaque one,
10201 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10202 cand = TYPE_NEXT_VARIANT (t);
10203 if (cand
10204 && TYPE_VECTOR_OPAQUE (cand)
10205 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10206 return cand;
10207 /* Othewise build a variant type and make sure to queue it after
10208 the non-opaque type. */
10209 cand = build_distinct_type_copy (t);
10210 TYPE_VECTOR_OPAQUE (cand) = true;
10211 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10212 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10213 TYPE_NEXT_VARIANT (t) = cand;
10214 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10215 return cand;
10216 }
10217
10218
10219 /* Given an initializer INIT, return TRUE if INIT is zero or some
10220 aggregate of zeros. Otherwise return FALSE. */
10221 bool
10222 initializer_zerop (const_tree init)
10223 {
10224 tree elt;
10225
10226 STRIP_NOPS (init);
10227
10228 switch (TREE_CODE (init))
10229 {
10230 case INTEGER_CST:
10231 return integer_zerop (init);
10232
10233 case REAL_CST:
10234 /* ??? Note that this is not correct for C4X float formats. There,
10235 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10236 negative exponent. */
10237 return real_zerop (init)
10238 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10239
10240 case FIXED_CST:
10241 return fixed_zerop (init);
10242
10243 case COMPLEX_CST:
10244 return integer_zerop (init)
10245 || (real_zerop (init)
10246 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10247 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10248
10249 case VECTOR_CST:
10250 {
10251 unsigned i;
10252 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10253 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10254 return false;
10255 return true;
10256 }
10257
10258 case CONSTRUCTOR:
10259 {
10260 unsigned HOST_WIDE_INT idx;
10261
10262 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10263 if (!initializer_zerop (elt))
10264 return false;
10265 return true;
10266 }
10267
10268 case STRING_CST:
10269 {
10270 int i;
10271
10272 /* We need to loop through all elements to handle cases like
10273 "\0" and "\0foobar". */
10274 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10275 if (TREE_STRING_POINTER (init)[i] != '\0')
10276 return false;
10277
10278 return true;
10279 }
10280
10281 default:
10282 return false;
10283 }
10284 }
10285
10286 /* Check if vector VEC consists of all the equal elements and
10287 that the number of elements corresponds to the type of VEC.
10288 The function returns first element of the vector
10289 or NULL_TREE if the vector is not uniform. */
10290 tree
10291 uniform_vector_p (const_tree vec)
10292 {
10293 tree first, t;
10294 unsigned i;
10295
10296 if (vec == NULL_TREE)
10297 return NULL_TREE;
10298
10299 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10300
10301 if (TREE_CODE (vec) == VECTOR_CST)
10302 {
10303 first = VECTOR_CST_ELT (vec, 0);
10304 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10305 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10306 return NULL_TREE;
10307
10308 return first;
10309 }
10310
10311 else if (TREE_CODE (vec) == CONSTRUCTOR)
10312 {
10313 first = error_mark_node;
10314
10315 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10316 {
10317 if (i == 0)
10318 {
10319 first = t;
10320 continue;
10321 }
10322 if (!operand_equal_p (first, t, 0))
10323 return NULL_TREE;
10324 }
10325 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10326 return NULL_TREE;
10327
10328 return first;
10329 }
10330
10331 return NULL_TREE;
10332 }
10333
10334 /* Build an empty statement at location LOC. */
10335
10336 tree
10337 build_empty_stmt (location_t loc)
10338 {
10339 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10340 SET_EXPR_LOCATION (t, loc);
10341 return t;
10342 }
10343
10344
10345 /* Build an OpenMP clause with code CODE. LOC is the location of the
10346 clause. */
10347
10348 tree
10349 build_omp_clause (location_t loc, enum omp_clause_code code)
10350 {
10351 tree t;
10352 int size, length;
10353
10354 length = omp_clause_num_ops[code];
10355 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10356
10357 record_node_allocation_statistics (OMP_CLAUSE, size);
10358
10359 t = (tree) ggc_internal_alloc (size);
10360 memset (t, 0, size);
10361 TREE_SET_CODE (t, OMP_CLAUSE);
10362 OMP_CLAUSE_SET_CODE (t, code);
10363 OMP_CLAUSE_LOCATION (t) = loc;
10364
10365 return t;
10366 }
10367
10368 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10369 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10370 Except for the CODE and operand count field, other storage for the
10371 object is initialized to zeros. */
10372
10373 tree
10374 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10375 {
10376 tree t;
10377 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10378
10379 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10380 gcc_assert (len >= 1);
10381
10382 record_node_allocation_statistics (code, length);
10383
10384 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10385
10386 TREE_SET_CODE (t, code);
10387
10388 /* Can't use TREE_OPERAND to store the length because if checking is
10389 enabled, it will try to check the length before we store it. :-P */
10390 t->exp.operands[0] = build_int_cst (sizetype, len);
10391
10392 return t;
10393 }
10394
10395 /* Helper function for build_call_* functions; build a CALL_EXPR with
10396 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10397 the argument slots. */
10398
10399 static tree
10400 build_call_1 (tree return_type, tree fn, int nargs)
10401 {
10402 tree t;
10403
10404 t = build_vl_exp (CALL_EXPR, nargs + 3);
10405 TREE_TYPE (t) = return_type;
10406 CALL_EXPR_FN (t) = fn;
10407 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10408
10409 return t;
10410 }
10411
10412 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10413 FN and a null static chain slot. NARGS is the number of call arguments
10414 which are specified as "..." arguments. */
10415
10416 tree
10417 build_call_nary (tree return_type, tree fn, int nargs, ...)
10418 {
10419 tree ret;
10420 va_list args;
10421 va_start (args, nargs);
10422 ret = build_call_valist (return_type, fn, nargs, args);
10423 va_end (args);
10424 return ret;
10425 }
10426
10427 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10428 FN and a null static chain slot. NARGS is the number of call arguments
10429 which are specified as a va_list ARGS. */
10430
10431 tree
10432 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10433 {
10434 tree t;
10435 int i;
10436
10437 t = build_call_1 (return_type, fn, nargs);
10438 for (i = 0; i < nargs; i++)
10439 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10440 process_call_operands (t);
10441 return t;
10442 }
10443
10444 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10445 FN and a null static chain slot. NARGS is the number of call arguments
10446 which are specified as a tree array ARGS. */
10447
10448 tree
10449 build_call_array_loc (location_t loc, tree return_type, tree fn,
10450 int nargs, const tree *args)
10451 {
10452 tree t;
10453 int i;
10454
10455 t = build_call_1 (return_type, fn, nargs);
10456 for (i = 0; i < nargs; i++)
10457 CALL_EXPR_ARG (t, i) = args[i];
10458 process_call_operands (t);
10459 SET_EXPR_LOCATION (t, loc);
10460 return t;
10461 }
10462
10463 /* Like build_call_array, but takes a vec. */
10464
10465 tree
10466 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10467 {
10468 tree ret, t;
10469 unsigned int ix;
10470
10471 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10472 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10473 CALL_EXPR_ARG (ret, ix) = t;
10474 process_call_operands (ret);
10475 return ret;
10476 }
10477
10478 /* Conveniently construct a function call expression. FNDECL names the
10479 function to be called and N arguments are passed in the array
10480 ARGARRAY. */
10481
10482 tree
10483 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10484 {
10485 tree fntype = TREE_TYPE (fndecl);
10486 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10487
10488 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10489 }
10490
10491 /* Conveniently construct a function call expression. FNDECL names the
10492 function to be called and the arguments are passed in the vector
10493 VEC. */
10494
10495 tree
10496 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10497 {
10498 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10499 vec_safe_address (vec));
10500 }
10501
10502
10503 /* Conveniently construct a function call expression. FNDECL names the
10504 function to be called, N is the number of arguments, and the "..."
10505 parameters are the argument expressions. */
10506
10507 tree
10508 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10509 {
10510 va_list ap;
10511 tree *argarray = XALLOCAVEC (tree, n);
10512 int i;
10513
10514 va_start (ap, n);
10515 for (i = 0; i < n; i++)
10516 argarray[i] = va_arg (ap, tree);
10517 va_end (ap);
10518 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10519 }
10520
10521 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10522 varargs macros aren't supported by all bootstrap compilers. */
10523
10524 tree
10525 build_call_expr (tree fndecl, int n, ...)
10526 {
10527 va_list ap;
10528 tree *argarray = XALLOCAVEC (tree, n);
10529 int i;
10530
10531 va_start (ap, n);
10532 for (i = 0; i < n; i++)
10533 argarray[i] = va_arg (ap, tree);
10534 va_end (ap);
10535 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10536 }
10537
10538 /* Build internal call expression. This is just like CALL_EXPR, except
10539 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10540 internal function. */
10541
10542 tree
10543 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10544 tree type, int n, ...)
10545 {
10546 va_list ap;
10547 int i;
10548
10549 tree fn = build_call_1 (type, NULL_TREE, n);
10550 va_start (ap, n);
10551 for (i = 0; i < n; i++)
10552 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10553 va_end (ap);
10554 SET_EXPR_LOCATION (fn, loc);
10555 CALL_EXPR_IFN (fn) = ifn;
10556 return fn;
10557 }
10558
10559 /* Create a new constant string literal and return a char* pointer to it.
10560 The STRING_CST value is the LEN characters at STR. */
10561 tree
10562 build_string_literal (int len, const char *str)
10563 {
10564 tree t, elem, index, type;
10565
10566 t = build_string (len, str);
10567 elem = build_type_variant (char_type_node, 1, 0);
10568 index = build_index_type (size_int (len - 1));
10569 type = build_array_type (elem, index);
10570 TREE_TYPE (t) = type;
10571 TREE_CONSTANT (t) = 1;
10572 TREE_READONLY (t) = 1;
10573 TREE_STATIC (t) = 1;
10574
10575 type = build_pointer_type (elem);
10576 t = build1 (ADDR_EXPR, type,
10577 build4 (ARRAY_REF, elem,
10578 t, integer_zero_node, NULL_TREE, NULL_TREE));
10579 return t;
10580 }
10581
10582
10583
10584 /* Return true if T (assumed to be a DECL) must be assigned a memory
10585 location. */
10586
10587 bool
10588 needs_to_live_in_memory (const_tree t)
10589 {
10590 return (TREE_ADDRESSABLE (t)
10591 || is_global_var (t)
10592 || (TREE_CODE (t) == RESULT_DECL
10593 && !DECL_BY_REFERENCE (t)
10594 && aggregate_value_p (t, current_function_decl)));
10595 }
10596
10597 /* Return value of a constant X and sign-extend it. */
10598
10599 HOST_WIDE_INT
10600 int_cst_value (const_tree x)
10601 {
10602 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10603 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10604
10605 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10606 gcc_assert (cst_and_fits_in_hwi (x));
10607
10608 if (bits < HOST_BITS_PER_WIDE_INT)
10609 {
10610 bool negative = ((val >> (bits - 1)) & 1) != 0;
10611 if (negative)
10612 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10613 else
10614 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10615 }
10616
10617 return val;
10618 }
10619
10620 /* If TYPE is an integral or pointer type, return an integer type with
10621 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10622 if TYPE is already an integer type of signedness UNSIGNEDP. */
10623
10624 tree
10625 signed_or_unsigned_type_for (int unsignedp, tree type)
10626 {
10627 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10628 return type;
10629
10630 if (TREE_CODE (type) == VECTOR_TYPE)
10631 {
10632 tree inner = TREE_TYPE (type);
10633 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10634 if (!inner2)
10635 return NULL_TREE;
10636 if (inner == inner2)
10637 return type;
10638 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10639 }
10640
10641 if (!INTEGRAL_TYPE_P (type)
10642 && !POINTER_TYPE_P (type)
10643 && TREE_CODE (type) != OFFSET_TYPE)
10644 return NULL_TREE;
10645
10646 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10647 }
10648
10649 /* If TYPE is an integral or pointer type, return an integer type with
10650 the same precision which is unsigned, or itself if TYPE is already an
10651 unsigned integer type. */
10652
10653 tree
10654 unsigned_type_for (tree type)
10655 {
10656 return signed_or_unsigned_type_for (1, type);
10657 }
10658
10659 /* If TYPE is an integral or pointer type, return an integer type with
10660 the same precision which is signed, or itself if TYPE is already a
10661 signed integer type. */
10662
10663 tree
10664 signed_type_for (tree type)
10665 {
10666 return signed_or_unsigned_type_for (0, type);
10667 }
10668
10669 /* If TYPE is a vector type, return a signed integer vector type with the
10670 same width and number of subparts. Otherwise return boolean_type_node. */
10671
10672 tree
10673 truth_type_for (tree type)
10674 {
10675 if (TREE_CODE (type) == VECTOR_TYPE)
10676 {
10677 tree elem = lang_hooks.types.type_for_size
10678 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10679 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10680 }
10681 else
10682 return boolean_type_node;
10683 }
10684
10685 /* Returns the largest value obtainable by casting something in INNER type to
10686 OUTER type. */
10687
10688 tree
10689 upper_bound_in_type (tree outer, tree inner)
10690 {
10691 unsigned int det = 0;
10692 unsigned oprec = TYPE_PRECISION (outer);
10693 unsigned iprec = TYPE_PRECISION (inner);
10694 unsigned prec;
10695
10696 /* Compute a unique number for every combination. */
10697 det |= (oprec > iprec) ? 4 : 0;
10698 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10699 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10700
10701 /* Determine the exponent to use. */
10702 switch (det)
10703 {
10704 case 0:
10705 case 1:
10706 /* oprec <= iprec, outer: signed, inner: don't care. */
10707 prec = oprec - 1;
10708 break;
10709 case 2:
10710 case 3:
10711 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10712 prec = oprec;
10713 break;
10714 case 4:
10715 /* oprec > iprec, outer: signed, inner: signed. */
10716 prec = iprec - 1;
10717 break;
10718 case 5:
10719 /* oprec > iprec, outer: signed, inner: unsigned. */
10720 prec = iprec;
10721 break;
10722 case 6:
10723 /* oprec > iprec, outer: unsigned, inner: signed. */
10724 prec = oprec;
10725 break;
10726 case 7:
10727 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10728 prec = iprec;
10729 break;
10730 default:
10731 gcc_unreachable ();
10732 }
10733
10734 return wide_int_to_tree (outer,
10735 wi::mask (prec, false, TYPE_PRECISION (outer)));
10736 }
10737
10738 /* Returns the smallest value obtainable by casting something in INNER type to
10739 OUTER type. */
10740
10741 tree
10742 lower_bound_in_type (tree outer, tree inner)
10743 {
10744 unsigned oprec = TYPE_PRECISION (outer);
10745 unsigned iprec = TYPE_PRECISION (inner);
10746
10747 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10748 and obtain 0. */
10749 if (TYPE_UNSIGNED (outer)
10750 /* If we are widening something of an unsigned type, OUTER type
10751 contains all values of INNER type. In particular, both INNER
10752 and OUTER types have zero in common. */
10753 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10754 return build_int_cst (outer, 0);
10755 else
10756 {
10757 /* If we are widening a signed type to another signed type, we
10758 want to obtain -2^^(iprec-1). If we are keeping the
10759 precision or narrowing to a signed type, we want to obtain
10760 -2^(oprec-1). */
10761 unsigned prec = oprec > iprec ? iprec : oprec;
10762 return wide_int_to_tree (outer,
10763 wi::mask (prec - 1, true,
10764 TYPE_PRECISION (outer)));
10765 }
10766 }
10767
10768 /* Return nonzero if two operands that are suitable for PHI nodes are
10769 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10770 SSA_NAME or invariant. Note that this is strictly an optimization.
10771 That is, callers of this function can directly call operand_equal_p
10772 and get the same result, only slower. */
10773
10774 int
10775 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10776 {
10777 if (arg0 == arg1)
10778 return 1;
10779 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10780 return 0;
10781 return operand_equal_p (arg0, arg1, 0);
10782 }
10783
10784 /* Returns number of zeros at the end of binary representation of X. */
10785
10786 tree
10787 num_ending_zeros (const_tree x)
10788 {
10789 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10790 }
10791
10792
10793 #define WALK_SUBTREE(NODE) \
10794 do \
10795 { \
10796 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10797 if (result) \
10798 return result; \
10799 } \
10800 while (0)
10801
10802 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10803 be walked whenever a type is seen in the tree. Rest of operands and return
10804 value are as for walk_tree. */
10805
10806 static tree
10807 walk_type_fields (tree type, walk_tree_fn func, void *data,
10808 hash_set<tree> *pset, walk_tree_lh lh)
10809 {
10810 tree result = NULL_TREE;
10811
10812 switch (TREE_CODE (type))
10813 {
10814 case POINTER_TYPE:
10815 case REFERENCE_TYPE:
10816 case VECTOR_TYPE:
10817 /* We have to worry about mutually recursive pointers. These can't
10818 be written in C. They can in Ada. It's pathological, but
10819 there's an ACATS test (c38102a) that checks it. Deal with this
10820 by checking if we're pointing to another pointer, that one
10821 points to another pointer, that one does too, and we have no htab.
10822 If so, get a hash table. We check three levels deep to avoid
10823 the cost of the hash table if we don't need one. */
10824 if (POINTER_TYPE_P (TREE_TYPE (type))
10825 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10826 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10827 && !pset)
10828 {
10829 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10830 func, data);
10831 if (result)
10832 return result;
10833
10834 break;
10835 }
10836
10837 /* ... fall through ... */
10838
10839 case COMPLEX_TYPE:
10840 WALK_SUBTREE (TREE_TYPE (type));
10841 break;
10842
10843 case METHOD_TYPE:
10844 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10845
10846 /* Fall through. */
10847
10848 case FUNCTION_TYPE:
10849 WALK_SUBTREE (TREE_TYPE (type));
10850 {
10851 tree arg;
10852
10853 /* We never want to walk into default arguments. */
10854 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10855 WALK_SUBTREE (TREE_VALUE (arg));
10856 }
10857 break;
10858
10859 case ARRAY_TYPE:
10860 /* Don't follow this nodes's type if a pointer for fear that
10861 we'll have infinite recursion. If we have a PSET, then we
10862 need not fear. */
10863 if (pset
10864 || (!POINTER_TYPE_P (TREE_TYPE (type))
10865 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10866 WALK_SUBTREE (TREE_TYPE (type));
10867 WALK_SUBTREE (TYPE_DOMAIN (type));
10868 break;
10869
10870 case OFFSET_TYPE:
10871 WALK_SUBTREE (TREE_TYPE (type));
10872 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10873 break;
10874
10875 default:
10876 break;
10877 }
10878
10879 return NULL_TREE;
10880 }
10881
10882 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10883 called with the DATA and the address of each sub-tree. If FUNC returns a
10884 non-NULL value, the traversal is stopped, and the value returned by FUNC
10885 is returned. If PSET is non-NULL it is used to record the nodes visited,
10886 and to avoid visiting a node more than once. */
10887
10888 tree
10889 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10890 hash_set<tree> *pset, walk_tree_lh lh)
10891 {
10892 enum tree_code code;
10893 int walk_subtrees;
10894 tree result;
10895
10896 #define WALK_SUBTREE_TAIL(NODE) \
10897 do \
10898 { \
10899 tp = & (NODE); \
10900 goto tail_recurse; \
10901 } \
10902 while (0)
10903
10904 tail_recurse:
10905 /* Skip empty subtrees. */
10906 if (!*tp)
10907 return NULL_TREE;
10908
10909 /* Don't walk the same tree twice, if the user has requested
10910 that we avoid doing so. */
10911 if (pset && pset->add (*tp))
10912 return NULL_TREE;
10913
10914 /* Call the function. */
10915 walk_subtrees = 1;
10916 result = (*func) (tp, &walk_subtrees, data);
10917
10918 /* If we found something, return it. */
10919 if (result)
10920 return result;
10921
10922 code = TREE_CODE (*tp);
10923
10924 /* Even if we didn't, FUNC may have decided that there was nothing
10925 interesting below this point in the tree. */
10926 if (!walk_subtrees)
10927 {
10928 /* But we still need to check our siblings. */
10929 if (code == TREE_LIST)
10930 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10931 else if (code == OMP_CLAUSE)
10932 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10933 else
10934 return NULL_TREE;
10935 }
10936
10937 if (lh)
10938 {
10939 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10940 if (result || !walk_subtrees)
10941 return result;
10942 }
10943
10944 switch (code)
10945 {
10946 case ERROR_MARK:
10947 case IDENTIFIER_NODE:
10948 case INTEGER_CST:
10949 case REAL_CST:
10950 case FIXED_CST:
10951 case VECTOR_CST:
10952 case STRING_CST:
10953 case BLOCK:
10954 case PLACEHOLDER_EXPR:
10955 case SSA_NAME:
10956 case FIELD_DECL:
10957 case RESULT_DECL:
10958 /* None of these have subtrees other than those already walked
10959 above. */
10960 break;
10961
10962 case TREE_LIST:
10963 WALK_SUBTREE (TREE_VALUE (*tp));
10964 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10965 break;
10966
10967 case TREE_VEC:
10968 {
10969 int len = TREE_VEC_LENGTH (*tp);
10970
10971 if (len == 0)
10972 break;
10973
10974 /* Walk all elements but the first. */
10975 while (--len)
10976 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10977
10978 /* Now walk the first one as a tail call. */
10979 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10980 }
10981
10982 case COMPLEX_CST:
10983 WALK_SUBTREE (TREE_REALPART (*tp));
10984 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10985
10986 case CONSTRUCTOR:
10987 {
10988 unsigned HOST_WIDE_INT idx;
10989 constructor_elt *ce;
10990
10991 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10992 idx++)
10993 WALK_SUBTREE (ce->value);
10994 }
10995 break;
10996
10997 case SAVE_EXPR:
10998 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
10999
11000 case BIND_EXPR:
11001 {
11002 tree decl;
11003 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11004 {
11005 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11006 into declarations that are just mentioned, rather than
11007 declared; they don't really belong to this part of the tree.
11008 And, we can see cycles: the initializer for a declaration
11009 can refer to the declaration itself. */
11010 WALK_SUBTREE (DECL_INITIAL (decl));
11011 WALK_SUBTREE (DECL_SIZE (decl));
11012 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11013 }
11014 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11015 }
11016
11017 case STATEMENT_LIST:
11018 {
11019 tree_stmt_iterator i;
11020 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11021 WALK_SUBTREE (*tsi_stmt_ptr (i));
11022 }
11023 break;
11024
11025 case OMP_CLAUSE:
11026 switch (OMP_CLAUSE_CODE (*tp))
11027 {
11028 case OMP_CLAUSE_PRIVATE:
11029 case OMP_CLAUSE_SHARED:
11030 case OMP_CLAUSE_FIRSTPRIVATE:
11031 case OMP_CLAUSE_COPYIN:
11032 case OMP_CLAUSE_COPYPRIVATE:
11033 case OMP_CLAUSE_FINAL:
11034 case OMP_CLAUSE_IF:
11035 case OMP_CLAUSE_NUM_THREADS:
11036 case OMP_CLAUSE_SCHEDULE:
11037 case OMP_CLAUSE_UNIFORM:
11038 case OMP_CLAUSE_DEPEND:
11039 case OMP_CLAUSE_NUM_TEAMS:
11040 case OMP_CLAUSE_THREAD_LIMIT:
11041 case OMP_CLAUSE_DEVICE:
11042 case OMP_CLAUSE_DIST_SCHEDULE:
11043 case OMP_CLAUSE_SAFELEN:
11044 case OMP_CLAUSE_SIMDLEN:
11045 case OMP_CLAUSE__LOOPTEMP_:
11046 case OMP_CLAUSE__SIMDUID_:
11047 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11048 /* FALLTHRU */
11049
11050 case OMP_CLAUSE_NOWAIT:
11051 case OMP_CLAUSE_ORDERED:
11052 case OMP_CLAUSE_DEFAULT:
11053 case OMP_CLAUSE_UNTIED:
11054 case OMP_CLAUSE_MERGEABLE:
11055 case OMP_CLAUSE_PROC_BIND:
11056 case OMP_CLAUSE_INBRANCH:
11057 case OMP_CLAUSE_NOTINBRANCH:
11058 case OMP_CLAUSE_FOR:
11059 case OMP_CLAUSE_PARALLEL:
11060 case OMP_CLAUSE_SECTIONS:
11061 case OMP_CLAUSE_TASKGROUP:
11062 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11063
11064 case OMP_CLAUSE_LASTPRIVATE:
11065 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11066 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11067 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11068
11069 case OMP_CLAUSE_COLLAPSE:
11070 {
11071 int i;
11072 for (i = 0; i < 3; i++)
11073 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11074 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11075 }
11076
11077 case OMP_CLAUSE_LINEAR:
11078 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11079 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11080 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11081 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11082
11083 case OMP_CLAUSE_ALIGNED:
11084 case OMP_CLAUSE_FROM:
11085 case OMP_CLAUSE_TO:
11086 case OMP_CLAUSE_MAP:
11087 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11088 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11089 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11090
11091 case OMP_CLAUSE_REDUCTION:
11092 {
11093 int i;
11094 for (i = 0; i < 4; i++)
11095 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11096 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11097 }
11098
11099 default:
11100 gcc_unreachable ();
11101 }
11102 break;
11103
11104 case TARGET_EXPR:
11105 {
11106 int i, len;
11107
11108 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11109 But, we only want to walk once. */
11110 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11111 for (i = 0; i < len; ++i)
11112 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11113 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11114 }
11115
11116 case DECL_EXPR:
11117 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11118 defining. We only want to walk into these fields of a type in this
11119 case and not in the general case of a mere reference to the type.
11120
11121 The criterion is as follows: if the field can be an expression, it
11122 must be walked only here. This should be in keeping with the fields
11123 that are directly gimplified in gimplify_type_sizes in order for the
11124 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11125 variable-sized types.
11126
11127 Note that DECLs get walked as part of processing the BIND_EXPR. */
11128 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11129 {
11130 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11131 if (TREE_CODE (*type_p) == ERROR_MARK)
11132 return NULL_TREE;
11133
11134 /* Call the function for the type. See if it returns anything or
11135 doesn't want us to continue. If we are to continue, walk both
11136 the normal fields and those for the declaration case. */
11137 result = (*func) (type_p, &walk_subtrees, data);
11138 if (result || !walk_subtrees)
11139 return result;
11140
11141 /* But do not walk a pointed-to type since it may itself need to
11142 be walked in the declaration case if it isn't anonymous. */
11143 if (!POINTER_TYPE_P (*type_p))
11144 {
11145 result = walk_type_fields (*type_p, func, data, pset, lh);
11146 if (result)
11147 return result;
11148 }
11149
11150 /* If this is a record type, also walk the fields. */
11151 if (RECORD_OR_UNION_TYPE_P (*type_p))
11152 {
11153 tree field;
11154
11155 for (field = TYPE_FIELDS (*type_p); field;
11156 field = DECL_CHAIN (field))
11157 {
11158 /* We'd like to look at the type of the field, but we can
11159 easily get infinite recursion. So assume it's pointed
11160 to elsewhere in the tree. Also, ignore things that
11161 aren't fields. */
11162 if (TREE_CODE (field) != FIELD_DECL)
11163 continue;
11164
11165 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11166 WALK_SUBTREE (DECL_SIZE (field));
11167 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11168 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11169 WALK_SUBTREE (DECL_QUALIFIER (field));
11170 }
11171 }
11172
11173 /* Same for scalar types. */
11174 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11175 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11176 || TREE_CODE (*type_p) == INTEGER_TYPE
11177 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11178 || TREE_CODE (*type_p) == REAL_TYPE)
11179 {
11180 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11181 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11182 }
11183
11184 WALK_SUBTREE (TYPE_SIZE (*type_p));
11185 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11186 }
11187 /* FALLTHRU */
11188
11189 default:
11190 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11191 {
11192 int i, len;
11193
11194 /* Walk over all the sub-trees of this operand. */
11195 len = TREE_OPERAND_LENGTH (*tp);
11196
11197 /* Go through the subtrees. We need to do this in forward order so
11198 that the scope of a FOR_EXPR is handled properly. */
11199 if (len)
11200 {
11201 for (i = 0; i < len - 1; ++i)
11202 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11203 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11204 }
11205 }
11206 /* If this is a type, walk the needed fields in the type. */
11207 else if (TYPE_P (*tp))
11208 return walk_type_fields (*tp, func, data, pset, lh);
11209 break;
11210 }
11211
11212 /* We didn't find what we were looking for. */
11213 return NULL_TREE;
11214
11215 #undef WALK_SUBTREE_TAIL
11216 }
11217 #undef WALK_SUBTREE
11218
11219 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11220
11221 tree
11222 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11223 walk_tree_lh lh)
11224 {
11225 tree result;
11226
11227 hash_set<tree> pset;
11228 result = walk_tree_1 (tp, func, data, &pset, lh);
11229 return result;
11230 }
11231
11232
11233 tree
11234 tree_block (tree t)
11235 {
11236 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11237
11238 if (IS_EXPR_CODE_CLASS (c))
11239 return LOCATION_BLOCK (t->exp.locus);
11240 gcc_unreachable ();
11241 return NULL;
11242 }
11243
11244 void
11245 tree_set_block (tree t, tree b)
11246 {
11247 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11248
11249 if (IS_EXPR_CODE_CLASS (c))
11250 {
11251 if (b)
11252 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11253 else
11254 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11255 }
11256 else
11257 gcc_unreachable ();
11258 }
11259
11260 /* Create a nameless artificial label and put it in the current
11261 function context. The label has a location of LOC. Returns the
11262 newly created label. */
11263
11264 tree
11265 create_artificial_label (location_t loc)
11266 {
11267 tree lab = build_decl (loc,
11268 LABEL_DECL, NULL_TREE, void_type_node);
11269
11270 DECL_ARTIFICIAL (lab) = 1;
11271 DECL_IGNORED_P (lab) = 1;
11272 DECL_CONTEXT (lab) = current_function_decl;
11273 return lab;
11274 }
11275
11276 /* Given a tree, try to return a useful variable name that we can use
11277 to prefix a temporary that is being assigned the value of the tree.
11278 I.E. given <temp> = &A, return A. */
11279
11280 const char *
11281 get_name (tree t)
11282 {
11283 tree stripped_decl;
11284
11285 stripped_decl = t;
11286 STRIP_NOPS (stripped_decl);
11287 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11288 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11289 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11290 {
11291 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11292 if (!name)
11293 return NULL;
11294 return IDENTIFIER_POINTER (name);
11295 }
11296 else
11297 {
11298 switch (TREE_CODE (stripped_decl))
11299 {
11300 case ADDR_EXPR:
11301 return get_name (TREE_OPERAND (stripped_decl, 0));
11302 default:
11303 return NULL;
11304 }
11305 }
11306 }
11307
11308 /* Return true if TYPE has a variable argument list. */
11309
11310 bool
11311 stdarg_p (const_tree fntype)
11312 {
11313 function_args_iterator args_iter;
11314 tree n = NULL_TREE, t;
11315
11316 if (!fntype)
11317 return false;
11318
11319 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11320 {
11321 n = t;
11322 }
11323
11324 return n != NULL_TREE && n != void_type_node;
11325 }
11326
11327 /* Return true if TYPE has a prototype. */
11328
11329 bool
11330 prototype_p (tree fntype)
11331 {
11332 tree t;
11333
11334 gcc_assert (fntype != NULL_TREE);
11335
11336 t = TYPE_ARG_TYPES (fntype);
11337 return (t != NULL_TREE);
11338 }
11339
11340 /* If BLOCK is inlined from an __attribute__((__artificial__))
11341 routine, return pointer to location from where it has been
11342 called. */
11343 location_t *
11344 block_nonartificial_location (tree block)
11345 {
11346 location_t *ret = NULL;
11347
11348 while (block && TREE_CODE (block) == BLOCK
11349 && BLOCK_ABSTRACT_ORIGIN (block))
11350 {
11351 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11352
11353 while (TREE_CODE (ao) == BLOCK
11354 && BLOCK_ABSTRACT_ORIGIN (ao)
11355 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11356 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11357
11358 if (TREE_CODE (ao) == FUNCTION_DECL)
11359 {
11360 /* If AO is an artificial inline, point RET to the
11361 call site locus at which it has been inlined and continue
11362 the loop, in case AO's caller is also an artificial
11363 inline. */
11364 if (DECL_DECLARED_INLINE_P (ao)
11365 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11366 ret = &BLOCK_SOURCE_LOCATION (block);
11367 else
11368 break;
11369 }
11370 else if (TREE_CODE (ao) != BLOCK)
11371 break;
11372
11373 block = BLOCK_SUPERCONTEXT (block);
11374 }
11375 return ret;
11376 }
11377
11378
11379 /* If EXP is inlined from an __attribute__((__artificial__))
11380 function, return the location of the original call expression. */
11381
11382 location_t
11383 tree_nonartificial_location (tree exp)
11384 {
11385 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11386
11387 if (loc)
11388 return *loc;
11389 else
11390 return EXPR_LOCATION (exp);
11391 }
11392
11393
11394 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11395 nodes. */
11396
11397 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11398
11399 static hashval_t
11400 cl_option_hash_hash (const void *x)
11401 {
11402 const_tree const t = (const_tree) x;
11403 const char *p;
11404 size_t i;
11405 size_t len = 0;
11406 hashval_t hash = 0;
11407
11408 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11409 {
11410 p = (const char *)TREE_OPTIMIZATION (t);
11411 len = sizeof (struct cl_optimization);
11412 }
11413
11414 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11415 {
11416 p = (const char *)TREE_TARGET_OPTION (t);
11417 len = sizeof (struct cl_target_option);
11418 }
11419
11420 else
11421 gcc_unreachable ();
11422
11423 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11424 something else. */
11425 for (i = 0; i < len; i++)
11426 if (p[i])
11427 hash = (hash << 4) ^ ((i << 2) | p[i]);
11428
11429 return hash;
11430 }
11431
11432 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11433 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11434 same. */
11435
11436 static int
11437 cl_option_hash_eq (const void *x, const void *y)
11438 {
11439 const_tree const xt = (const_tree) x;
11440 const_tree const yt = (const_tree) y;
11441 const char *xp;
11442 const char *yp;
11443 size_t len;
11444
11445 if (TREE_CODE (xt) != TREE_CODE (yt))
11446 return 0;
11447
11448 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11449 {
11450 xp = (const char *)TREE_OPTIMIZATION (xt);
11451 yp = (const char *)TREE_OPTIMIZATION (yt);
11452 len = sizeof (struct cl_optimization);
11453 }
11454
11455 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11456 {
11457 xp = (const char *)TREE_TARGET_OPTION (xt);
11458 yp = (const char *)TREE_TARGET_OPTION (yt);
11459 len = sizeof (struct cl_target_option);
11460 }
11461
11462 else
11463 gcc_unreachable ();
11464
11465 return (memcmp (xp, yp, len) == 0);
11466 }
11467
11468 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11469
11470 tree
11471 build_optimization_node (struct gcc_options *opts)
11472 {
11473 tree t;
11474 void **slot;
11475
11476 /* Use the cache of optimization nodes. */
11477
11478 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11479 opts);
11480
11481 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11482 t = (tree) *slot;
11483 if (!t)
11484 {
11485 /* Insert this one into the hash table. */
11486 t = cl_optimization_node;
11487 *slot = t;
11488
11489 /* Make a new node for next time round. */
11490 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11491 }
11492
11493 return t;
11494 }
11495
11496 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11497
11498 tree
11499 build_target_option_node (struct gcc_options *opts)
11500 {
11501 tree t;
11502 void **slot;
11503
11504 /* Use the cache of optimization nodes. */
11505
11506 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11507 opts);
11508
11509 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11510 t = (tree) *slot;
11511 if (!t)
11512 {
11513 /* Insert this one into the hash table. */
11514 t = cl_target_option_node;
11515 *slot = t;
11516
11517 /* Make a new node for next time round. */
11518 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11519 }
11520
11521 return t;
11522 }
11523
11524 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11525 Called through htab_traverse. */
11526
11527 static int
11528 prepare_target_option_node_for_pch (void **slot, void *)
11529 {
11530 tree node = (tree) *slot;
11531 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11532 TREE_TARGET_GLOBALS (node) = NULL;
11533 return 1;
11534 }
11535
11536 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11537 so that they aren't saved during PCH writing. */
11538
11539 void
11540 prepare_target_option_nodes_for_pch (void)
11541 {
11542 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11543 NULL);
11544 }
11545
11546 /* Determine the "ultimate origin" of a block. The block may be an inlined
11547 instance of an inlined instance of a block which is local to an inline
11548 function, so we have to trace all of the way back through the origin chain
11549 to find out what sort of node actually served as the original seed for the
11550 given block. */
11551
11552 tree
11553 block_ultimate_origin (const_tree block)
11554 {
11555 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11556
11557 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11558 nodes in the function to point to themselves; ignore that if
11559 we're trying to output the abstract instance of this function. */
11560 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11561 return NULL_TREE;
11562
11563 if (immediate_origin == NULL_TREE)
11564 return NULL_TREE;
11565 else
11566 {
11567 tree ret_val;
11568 tree lookahead = immediate_origin;
11569
11570 do
11571 {
11572 ret_val = lookahead;
11573 lookahead = (TREE_CODE (ret_val) == BLOCK
11574 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11575 }
11576 while (lookahead != NULL && lookahead != ret_val);
11577
11578 /* The block's abstract origin chain may not be the *ultimate* origin of
11579 the block. It could lead to a DECL that has an abstract origin set.
11580 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11581 will give us if it has one). Note that DECL's abstract origins are
11582 supposed to be the most distant ancestor (or so decl_ultimate_origin
11583 claims), so we don't need to loop following the DECL origins. */
11584 if (DECL_P (ret_val))
11585 return DECL_ORIGIN (ret_val);
11586
11587 return ret_val;
11588 }
11589 }
11590
11591 /* Return true iff conversion in EXP generates no instruction. Mark
11592 it inline so that we fully inline into the stripping functions even
11593 though we have two uses of this function. */
11594
11595 static inline bool
11596 tree_nop_conversion (const_tree exp)
11597 {
11598 tree outer_type, inner_type;
11599
11600 if (!CONVERT_EXPR_P (exp)
11601 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11602 return false;
11603 if (TREE_OPERAND (exp, 0) == error_mark_node)
11604 return false;
11605
11606 outer_type = TREE_TYPE (exp);
11607 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11608
11609 if (!inner_type)
11610 return false;
11611
11612 /* Use precision rather then machine mode when we can, which gives
11613 the correct answer even for submode (bit-field) types. */
11614 if ((INTEGRAL_TYPE_P (outer_type)
11615 || POINTER_TYPE_P (outer_type)
11616 || TREE_CODE (outer_type) == OFFSET_TYPE)
11617 && (INTEGRAL_TYPE_P (inner_type)
11618 || POINTER_TYPE_P (inner_type)
11619 || TREE_CODE (inner_type) == OFFSET_TYPE))
11620 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11621
11622 /* Otherwise fall back on comparing machine modes (e.g. for
11623 aggregate types, floats). */
11624 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11625 }
11626
11627 /* Return true iff conversion in EXP generates no instruction. Don't
11628 consider conversions changing the signedness. */
11629
11630 static bool
11631 tree_sign_nop_conversion (const_tree exp)
11632 {
11633 tree outer_type, inner_type;
11634
11635 if (!tree_nop_conversion (exp))
11636 return false;
11637
11638 outer_type = TREE_TYPE (exp);
11639 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11640
11641 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11642 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11643 }
11644
11645 /* Strip conversions from EXP according to tree_nop_conversion and
11646 return the resulting expression. */
11647
11648 tree
11649 tree_strip_nop_conversions (tree exp)
11650 {
11651 while (tree_nop_conversion (exp))
11652 exp = TREE_OPERAND (exp, 0);
11653 return exp;
11654 }
11655
11656 /* Strip conversions from EXP according to tree_sign_nop_conversion
11657 and return the resulting expression. */
11658
11659 tree
11660 tree_strip_sign_nop_conversions (tree exp)
11661 {
11662 while (tree_sign_nop_conversion (exp))
11663 exp = TREE_OPERAND (exp, 0);
11664 return exp;
11665 }
11666
11667 /* Avoid any floating point extensions from EXP. */
11668 tree
11669 strip_float_extensions (tree exp)
11670 {
11671 tree sub, expt, subt;
11672
11673 /* For floating point constant look up the narrowest type that can hold
11674 it properly and handle it like (type)(narrowest_type)constant.
11675 This way we can optimize for instance a=a*2.0 where "a" is float
11676 but 2.0 is double constant. */
11677 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11678 {
11679 REAL_VALUE_TYPE orig;
11680 tree type = NULL;
11681
11682 orig = TREE_REAL_CST (exp);
11683 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11684 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11685 type = float_type_node;
11686 else if (TYPE_PRECISION (TREE_TYPE (exp))
11687 > TYPE_PRECISION (double_type_node)
11688 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11689 type = double_type_node;
11690 if (type)
11691 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11692 }
11693
11694 if (!CONVERT_EXPR_P (exp))
11695 return exp;
11696
11697 sub = TREE_OPERAND (exp, 0);
11698 subt = TREE_TYPE (sub);
11699 expt = TREE_TYPE (exp);
11700
11701 if (!FLOAT_TYPE_P (subt))
11702 return exp;
11703
11704 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11705 return exp;
11706
11707 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11708 return exp;
11709
11710 return strip_float_extensions (sub);
11711 }
11712
11713 /* Strip out all handled components that produce invariant
11714 offsets. */
11715
11716 const_tree
11717 strip_invariant_refs (const_tree op)
11718 {
11719 while (handled_component_p (op))
11720 {
11721 switch (TREE_CODE (op))
11722 {
11723 case ARRAY_REF:
11724 case ARRAY_RANGE_REF:
11725 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11726 || TREE_OPERAND (op, 2) != NULL_TREE
11727 || TREE_OPERAND (op, 3) != NULL_TREE)
11728 return NULL;
11729 break;
11730
11731 case COMPONENT_REF:
11732 if (TREE_OPERAND (op, 2) != NULL_TREE)
11733 return NULL;
11734 break;
11735
11736 default:;
11737 }
11738 op = TREE_OPERAND (op, 0);
11739 }
11740
11741 return op;
11742 }
11743
11744 static GTY(()) tree gcc_eh_personality_decl;
11745
11746 /* Return the GCC personality function decl. */
11747
11748 tree
11749 lhd_gcc_personality (void)
11750 {
11751 if (!gcc_eh_personality_decl)
11752 gcc_eh_personality_decl = build_personality_function ("gcc");
11753 return gcc_eh_personality_decl;
11754 }
11755
11756 /* TARGET is a call target of GIMPLE call statement
11757 (obtained by gimple_call_fn). Return true if it is
11758 OBJ_TYPE_REF representing an virtual call of C++ method.
11759 (As opposed to OBJ_TYPE_REF representing objc calls
11760 through a cast where middle-end devirtualization machinery
11761 can't apply.) */
11762
11763 bool
11764 virtual_method_call_p (tree target)
11765 {
11766 if (TREE_CODE (target) != OBJ_TYPE_REF)
11767 return false;
11768 target = TREE_TYPE (target);
11769 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11770 target = TREE_TYPE (target);
11771 if (TREE_CODE (target) == FUNCTION_TYPE)
11772 return false;
11773 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11774 return true;
11775 }
11776
11777 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11778
11779 tree
11780 obj_type_ref_class (tree ref)
11781 {
11782 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11783 ref = TREE_TYPE (ref);
11784 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11785 ref = TREE_TYPE (ref);
11786 /* We look for type THIS points to. ObjC also builds
11787 OBJ_TYPE_REF with non-method calls, Their first parameter
11788 ID however also corresponds to class type. */
11789 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11790 || TREE_CODE (ref) == FUNCTION_TYPE);
11791 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11792 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11793 return TREE_TYPE (ref);
11794 }
11795
11796 /* Return true if T is in anonymous namespace. */
11797
11798 bool
11799 type_in_anonymous_namespace_p (const_tree t)
11800 {
11801 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11802 bulitin types; those have CONTEXT NULL. */
11803 if (!TYPE_CONTEXT (t))
11804 return false;
11805 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11806 }
11807
11808 /* Try to find a base info of BINFO that would have its field decl at offset
11809 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11810 found, return, otherwise return NULL_TREE. */
11811
11812 tree
11813 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11814 {
11815 tree type = BINFO_TYPE (binfo);
11816
11817 while (true)
11818 {
11819 HOST_WIDE_INT pos, size;
11820 tree fld;
11821 int i;
11822
11823 if (types_same_for_odr (type, expected_type))
11824 return binfo;
11825 if (offset < 0)
11826 return NULL_TREE;
11827
11828 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11829 {
11830 if (TREE_CODE (fld) != FIELD_DECL)
11831 continue;
11832
11833 pos = int_bit_position (fld);
11834 size = tree_to_uhwi (DECL_SIZE (fld));
11835 if (pos <= offset && (pos + size) > offset)
11836 break;
11837 }
11838 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11839 return NULL_TREE;
11840
11841 if (!DECL_ARTIFICIAL (fld))
11842 {
11843 binfo = TYPE_BINFO (TREE_TYPE (fld));
11844 if (!binfo)
11845 return NULL_TREE;
11846 }
11847 /* Offset 0 indicates the primary base, whose vtable contents are
11848 represented in the binfo for the derived class. */
11849 else if (offset != 0)
11850 {
11851 tree base_binfo, binfo2 = binfo;
11852
11853 /* Find BINFO corresponding to FLD. This is bit harder
11854 by a fact that in virtual inheritance we may need to walk down
11855 the non-virtual inheritance chain. */
11856 while (true)
11857 {
11858 tree containing_binfo = NULL, found_binfo = NULL;
11859 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11860 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11861 {
11862 found_binfo = base_binfo;
11863 break;
11864 }
11865 else
11866 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11867 - tree_to_shwi (BINFO_OFFSET (binfo)))
11868 * BITS_PER_UNIT < pos
11869 /* Rule out types with no virtual methods or we can get confused
11870 here by zero sized bases. */
11871 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11872 && (!containing_binfo
11873 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11874 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11875 containing_binfo = base_binfo;
11876 if (found_binfo)
11877 {
11878 binfo = found_binfo;
11879 break;
11880 }
11881 if (!containing_binfo)
11882 return NULL_TREE;
11883 binfo2 = containing_binfo;
11884 }
11885 }
11886
11887 type = TREE_TYPE (fld);
11888 offset -= pos;
11889 }
11890 }
11891
11892 /* Returns true if X is a typedef decl. */
11893
11894 bool
11895 is_typedef_decl (tree x)
11896 {
11897 return (x && TREE_CODE (x) == TYPE_DECL
11898 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11899 }
11900
11901 /* Returns true iff TYPE is a type variant created for a typedef. */
11902
11903 bool
11904 typedef_variant_p (tree type)
11905 {
11906 return is_typedef_decl (TYPE_NAME (type));
11907 }
11908
11909 /* Warn about a use of an identifier which was marked deprecated. */
11910 void
11911 warn_deprecated_use (tree node, tree attr)
11912 {
11913 const char *msg;
11914
11915 if (node == 0 || !warn_deprecated_decl)
11916 return;
11917
11918 if (!attr)
11919 {
11920 if (DECL_P (node))
11921 attr = DECL_ATTRIBUTES (node);
11922 else if (TYPE_P (node))
11923 {
11924 tree decl = TYPE_STUB_DECL (node);
11925 if (decl)
11926 attr = lookup_attribute ("deprecated",
11927 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11928 }
11929 }
11930
11931 if (attr)
11932 attr = lookup_attribute ("deprecated", attr);
11933
11934 if (attr)
11935 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11936 else
11937 msg = NULL;
11938
11939 if (DECL_P (node))
11940 {
11941 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11942 if (msg)
11943 warning (OPT_Wdeprecated_declarations,
11944 "%qD is deprecated (declared at %r%s:%d%R): %s",
11945 node, "locus", xloc.file, xloc.line, msg);
11946 else
11947 warning (OPT_Wdeprecated_declarations,
11948 "%qD is deprecated (declared at %r%s:%d%R)",
11949 node, "locus", xloc.file, xloc.line);
11950 }
11951 else if (TYPE_P (node))
11952 {
11953 tree what = NULL_TREE;
11954 tree decl = TYPE_STUB_DECL (node);
11955
11956 if (TYPE_NAME (node))
11957 {
11958 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11959 what = TYPE_NAME (node);
11960 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11961 && DECL_NAME (TYPE_NAME (node)))
11962 what = DECL_NAME (TYPE_NAME (node));
11963 }
11964
11965 if (decl)
11966 {
11967 expanded_location xloc
11968 = expand_location (DECL_SOURCE_LOCATION (decl));
11969 if (what)
11970 {
11971 if (msg)
11972 warning (OPT_Wdeprecated_declarations,
11973 "%qE is deprecated (declared at %r%s:%d%R): %s",
11974 what, "locus", xloc.file, xloc.line, msg);
11975 else
11976 warning (OPT_Wdeprecated_declarations,
11977 "%qE is deprecated (declared at %r%s:%d%R)",
11978 what, "locus", xloc.file, xloc.line);
11979 }
11980 else
11981 {
11982 if (msg)
11983 warning (OPT_Wdeprecated_declarations,
11984 "type is deprecated (declared at %r%s:%d%R): %s",
11985 "locus", xloc.file, xloc.line, msg);
11986 else
11987 warning (OPT_Wdeprecated_declarations,
11988 "type is deprecated (declared at %r%s:%d%R)",
11989 "locus", xloc.file, xloc.line);
11990 }
11991 }
11992 else
11993 {
11994 if (what)
11995 {
11996 if (msg)
11997 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
11998 what, msg);
11999 else
12000 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12001 }
12002 else
12003 {
12004 if (msg)
12005 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12006 msg);
12007 else
12008 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12009 }
12010 }
12011 }
12012 }
12013
12014 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12015 somewhere in it. */
12016
12017 bool
12018 contains_bitfld_component_ref_p (const_tree ref)
12019 {
12020 while (handled_component_p (ref))
12021 {
12022 if (TREE_CODE (ref) == COMPONENT_REF
12023 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12024 return true;
12025 ref = TREE_OPERAND (ref, 0);
12026 }
12027
12028 return false;
12029 }
12030
12031 /* Try to determine whether a TRY_CATCH expression can fall through.
12032 This is a subroutine of block_may_fallthru. */
12033
12034 static bool
12035 try_catch_may_fallthru (const_tree stmt)
12036 {
12037 tree_stmt_iterator i;
12038
12039 /* If the TRY block can fall through, the whole TRY_CATCH can
12040 fall through. */
12041 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12042 return true;
12043
12044 i = tsi_start (TREE_OPERAND (stmt, 1));
12045 switch (TREE_CODE (tsi_stmt (i)))
12046 {
12047 case CATCH_EXPR:
12048 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12049 catch expression and a body. The whole TRY_CATCH may fall
12050 through iff any of the catch bodies falls through. */
12051 for (; !tsi_end_p (i); tsi_next (&i))
12052 {
12053 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12054 return true;
12055 }
12056 return false;
12057
12058 case EH_FILTER_EXPR:
12059 /* The exception filter expression only matters if there is an
12060 exception. If the exception does not match EH_FILTER_TYPES,
12061 we will execute EH_FILTER_FAILURE, and we will fall through
12062 if that falls through. If the exception does match
12063 EH_FILTER_TYPES, the stack unwinder will continue up the
12064 stack, so we will not fall through. We don't know whether we
12065 will throw an exception which matches EH_FILTER_TYPES or not,
12066 so we just ignore EH_FILTER_TYPES and assume that we might
12067 throw an exception which doesn't match. */
12068 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12069
12070 default:
12071 /* This case represents statements to be executed when an
12072 exception occurs. Those statements are implicitly followed
12073 by a RESX statement to resume execution after the exception.
12074 So in this case the TRY_CATCH never falls through. */
12075 return false;
12076 }
12077 }
12078
12079 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12080 need not be 100% accurate; simply be conservative and return true if we
12081 don't know. This is used only to avoid stupidly generating extra code.
12082 If we're wrong, we'll just delete the extra code later. */
12083
12084 bool
12085 block_may_fallthru (const_tree block)
12086 {
12087 /* This CONST_CAST is okay because expr_last returns its argument
12088 unmodified and we assign it to a const_tree. */
12089 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12090
12091 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12092 {
12093 case GOTO_EXPR:
12094 case RETURN_EXPR:
12095 /* Easy cases. If the last statement of the block implies
12096 control transfer, then we can't fall through. */
12097 return false;
12098
12099 case SWITCH_EXPR:
12100 /* If SWITCH_LABELS is set, this is lowered, and represents a
12101 branch to a selected label and hence can not fall through.
12102 Otherwise SWITCH_BODY is set, and the switch can fall
12103 through. */
12104 return SWITCH_LABELS (stmt) == NULL_TREE;
12105
12106 case COND_EXPR:
12107 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12108 return true;
12109 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12110
12111 case BIND_EXPR:
12112 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12113
12114 case TRY_CATCH_EXPR:
12115 return try_catch_may_fallthru (stmt);
12116
12117 case TRY_FINALLY_EXPR:
12118 /* The finally clause is always executed after the try clause,
12119 so if it does not fall through, then the try-finally will not
12120 fall through. Otherwise, if the try clause does not fall
12121 through, then when the finally clause falls through it will
12122 resume execution wherever the try clause was going. So the
12123 whole try-finally will only fall through if both the try
12124 clause and the finally clause fall through. */
12125 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12126 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12127
12128 case MODIFY_EXPR:
12129 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12130 stmt = TREE_OPERAND (stmt, 1);
12131 else
12132 return true;
12133 /* FALLTHRU */
12134
12135 case CALL_EXPR:
12136 /* Functions that do not return do not fall through. */
12137 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12138
12139 case CLEANUP_POINT_EXPR:
12140 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12141
12142 case TARGET_EXPR:
12143 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12144
12145 case ERROR_MARK:
12146 return true;
12147
12148 default:
12149 return lang_hooks.block_may_fallthru (stmt);
12150 }
12151 }
12152
12153 /* True if we are using EH to handle cleanups. */
12154 static bool using_eh_for_cleanups_flag = false;
12155
12156 /* This routine is called from front ends to indicate eh should be used for
12157 cleanups. */
12158 void
12159 using_eh_for_cleanups (void)
12160 {
12161 using_eh_for_cleanups_flag = true;
12162 }
12163
12164 /* Query whether EH is used for cleanups. */
12165 bool
12166 using_eh_for_cleanups_p (void)
12167 {
12168 return using_eh_for_cleanups_flag;
12169 }
12170
12171 /* Wrapper for tree_code_name to ensure that tree code is valid */
12172 const char *
12173 get_tree_code_name (enum tree_code code)
12174 {
12175 const char *invalid = "<invalid tree code>";
12176
12177 if (code >= MAX_TREE_CODES)
12178 return invalid;
12179
12180 return tree_code_name[code];
12181 }
12182
12183 /* Drops the TREE_OVERFLOW flag from T. */
12184
12185 tree
12186 drop_tree_overflow (tree t)
12187 {
12188 gcc_checking_assert (TREE_OVERFLOW (t));
12189
12190 /* For tree codes with a sharing machinery re-build the result. */
12191 if (TREE_CODE (t) == INTEGER_CST)
12192 return wide_int_to_tree (TREE_TYPE (t), t);
12193
12194 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12195 and drop the flag. */
12196 t = copy_node (t);
12197 TREE_OVERFLOW (t) = 0;
12198 return t;
12199 }
12200
12201 /* Given a memory reference expression T, return its base address.
12202 The base address of a memory reference expression is the main
12203 object being referenced. For instance, the base address for
12204 'array[i].fld[j]' is 'array'. You can think of this as stripping
12205 away the offset part from a memory address.
12206
12207 This function calls handled_component_p to strip away all the inner
12208 parts of the memory reference until it reaches the base object. */
12209
12210 tree
12211 get_base_address (tree t)
12212 {
12213 while (handled_component_p (t))
12214 t = TREE_OPERAND (t, 0);
12215
12216 if ((TREE_CODE (t) == MEM_REF
12217 || TREE_CODE (t) == TARGET_MEM_REF)
12218 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12219 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12220
12221 /* ??? Either the alias oracle or all callers need to properly deal
12222 with WITH_SIZE_EXPRs before we can look through those. */
12223 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12224 return NULL_TREE;
12225
12226 return t;
12227 }
12228
12229 #include "gt-tree.h"