stor-layout.c (finish_builtin_struct): Copy fields into the variants.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "filenames.h"
46 #include "output.h"
47 #include "target.h"
48 #include "common/common-target.h"
49 #include "langhooks.h"
50 #include "tree-inline.h"
51 #include "tree-iterator.h"
52 #include "basic-block.h"
53 #include "bitmap.h"
54 #include "pointer-set.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static unsigned int type_hash_list (const_tree, hashval_t);
234 static unsigned int attribute_hash_list (const_tree, hashval_t);
235 static bool decls_same_for_odr (tree decl1, tree decl2);
236
237 tree global_trees[TI_MAX];
238 tree integer_types[itk_none];
239
240 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
241
242 /* Number of operands for each OpenMP clause. */
243 unsigned const char omp_clause_num_ops[] =
244 {
245 0, /* OMP_CLAUSE_ERROR */
246 1, /* OMP_CLAUSE_PRIVATE */
247 1, /* OMP_CLAUSE_SHARED */
248 1, /* OMP_CLAUSE_FIRSTPRIVATE */
249 2, /* OMP_CLAUSE_LASTPRIVATE */
250 4, /* OMP_CLAUSE_REDUCTION */
251 1, /* OMP_CLAUSE_COPYIN */
252 1, /* OMP_CLAUSE_COPYPRIVATE */
253 3, /* OMP_CLAUSE_LINEAR */
254 2, /* OMP_CLAUSE_ALIGNED */
255 1, /* OMP_CLAUSE_DEPEND */
256 1, /* OMP_CLAUSE_UNIFORM */
257 2, /* OMP_CLAUSE_FROM */
258 2, /* OMP_CLAUSE_TO */
259 2, /* OMP_CLAUSE_MAP */
260 1, /* OMP_CLAUSE__LOOPTEMP_ */
261 1, /* OMP_CLAUSE_IF */
262 1, /* OMP_CLAUSE_NUM_THREADS */
263 1, /* OMP_CLAUSE_SCHEDULE */
264 0, /* OMP_CLAUSE_NOWAIT */
265 0, /* OMP_CLAUSE_ORDERED */
266 0, /* OMP_CLAUSE_DEFAULT */
267 3, /* OMP_CLAUSE_COLLAPSE */
268 0, /* OMP_CLAUSE_UNTIED */
269 1, /* OMP_CLAUSE_FINAL */
270 0, /* OMP_CLAUSE_MERGEABLE */
271 1, /* OMP_CLAUSE_DEVICE */
272 1, /* OMP_CLAUSE_DIST_SCHEDULE */
273 0, /* OMP_CLAUSE_INBRANCH */
274 0, /* OMP_CLAUSE_NOTINBRANCH */
275 1, /* OMP_CLAUSE_NUM_TEAMS */
276 1, /* OMP_CLAUSE_THREAD_LIMIT */
277 0, /* OMP_CLAUSE_PROC_BIND */
278 1, /* OMP_CLAUSE_SAFELEN */
279 1, /* OMP_CLAUSE_SIMDLEN */
280 0, /* OMP_CLAUSE_FOR */
281 0, /* OMP_CLAUSE_PARALLEL */
282 0, /* OMP_CLAUSE_SECTIONS */
283 0, /* OMP_CLAUSE_TASKGROUP */
284 1, /* OMP_CLAUSE__SIMDUID_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_"
329 };
330
331
332 /* Return the tree node structure used by tree code CODE. */
333
334 static inline enum tree_node_structure_enum
335 tree_node_structure_for_code (enum tree_code code)
336 {
337 switch (TREE_CODE_CLASS (code))
338 {
339 case tcc_declaration:
340 {
341 switch (code)
342 {
343 case FIELD_DECL:
344 return TS_FIELD_DECL;
345 case PARM_DECL:
346 return TS_PARM_DECL;
347 case VAR_DECL:
348 return TS_VAR_DECL;
349 case LABEL_DECL:
350 return TS_LABEL_DECL;
351 case RESULT_DECL:
352 return TS_RESULT_DECL;
353 case DEBUG_EXPR_DECL:
354 return TS_DECL_WRTL;
355 case CONST_DECL:
356 return TS_CONST_DECL;
357 case TYPE_DECL:
358 return TS_TYPE_DECL;
359 case FUNCTION_DECL:
360 return TS_FUNCTION_DECL;
361 case TRANSLATION_UNIT_DECL:
362 return TS_TRANSLATION_UNIT_DECL;
363 default:
364 return TS_DECL_NON_COMMON;
365 }
366 }
367 case tcc_type:
368 return TS_TYPE_NON_COMMON;
369 case tcc_reference:
370 case tcc_comparison:
371 case tcc_unary:
372 case tcc_binary:
373 case tcc_expression:
374 case tcc_statement:
375 case tcc_vl_exp:
376 return TS_EXP;
377 default: /* tcc_constant and tcc_exceptional */
378 break;
379 }
380 switch (code)
381 {
382 /* tcc_constant cases. */
383 case VOID_CST: return TS_TYPED;
384 case INTEGER_CST: return TS_INT_CST;
385 case REAL_CST: return TS_REAL_CST;
386 case FIXED_CST: return TS_FIXED_CST;
387 case COMPLEX_CST: return TS_COMPLEX;
388 case VECTOR_CST: return TS_VECTOR;
389 case STRING_CST: return TS_STRING;
390 /* tcc_exceptional cases. */
391 case ERROR_MARK: return TS_COMMON;
392 case IDENTIFIER_NODE: return TS_IDENTIFIER;
393 case TREE_LIST: return TS_LIST;
394 case TREE_VEC: return TS_VEC;
395 case SSA_NAME: return TS_SSA_NAME;
396 case PLACEHOLDER_EXPR: return TS_COMMON;
397 case STATEMENT_LIST: return TS_STATEMENT_LIST;
398 case BLOCK: return TS_BLOCK;
399 case CONSTRUCTOR: return TS_CONSTRUCTOR;
400 case TREE_BINFO: return TS_BINFO;
401 case OMP_CLAUSE: return TS_OMP_CLAUSE;
402 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
403 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
404
405 default:
406 gcc_unreachable ();
407 }
408 }
409
410
411 /* Initialize tree_contains_struct to describe the hierarchy of tree
412 nodes. */
413
414 static void
415 initialize_tree_contains_struct (void)
416 {
417 unsigned i;
418
419 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
420 {
421 enum tree_code code;
422 enum tree_node_structure_enum ts_code;
423
424 code = (enum tree_code) i;
425 ts_code = tree_node_structure_for_code (code);
426
427 /* Mark the TS structure itself. */
428 tree_contains_struct[code][ts_code] = 1;
429
430 /* Mark all the structures that TS is derived from. */
431 switch (ts_code)
432 {
433 case TS_TYPED:
434 case TS_BLOCK:
435 MARK_TS_BASE (code);
436 break;
437
438 case TS_COMMON:
439 case TS_INT_CST:
440 case TS_REAL_CST:
441 case TS_FIXED_CST:
442 case TS_VECTOR:
443 case TS_STRING:
444 case TS_COMPLEX:
445 case TS_SSA_NAME:
446 case TS_CONSTRUCTOR:
447 case TS_EXP:
448 case TS_STATEMENT_LIST:
449 MARK_TS_TYPED (code);
450 break;
451
452 case TS_IDENTIFIER:
453 case TS_DECL_MINIMAL:
454 case TS_TYPE_COMMON:
455 case TS_LIST:
456 case TS_VEC:
457 case TS_BINFO:
458 case TS_OMP_CLAUSE:
459 case TS_OPTIMIZATION:
460 case TS_TARGET_OPTION:
461 MARK_TS_COMMON (code);
462 break;
463
464 case TS_TYPE_WITH_LANG_SPECIFIC:
465 MARK_TS_TYPE_COMMON (code);
466 break;
467
468 case TS_TYPE_NON_COMMON:
469 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
470 break;
471
472 case TS_DECL_COMMON:
473 MARK_TS_DECL_MINIMAL (code);
474 break;
475
476 case TS_DECL_WRTL:
477 case TS_CONST_DECL:
478 MARK_TS_DECL_COMMON (code);
479 break;
480
481 case TS_DECL_NON_COMMON:
482 MARK_TS_DECL_WITH_VIS (code);
483 break;
484
485 case TS_DECL_WITH_VIS:
486 case TS_PARM_DECL:
487 case TS_LABEL_DECL:
488 case TS_RESULT_DECL:
489 MARK_TS_DECL_WRTL (code);
490 break;
491
492 case TS_FIELD_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_VAR_DECL:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_TYPE_DECL:
501 case TS_FUNCTION_DECL:
502 MARK_TS_DECL_NON_COMMON (code);
503 break;
504
505 case TS_TRANSLATION_UNIT_DECL:
506 MARK_TS_DECL_COMMON (code);
507 break;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514 /* Basic consistency checks for attributes used in fold. */
515 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
544 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
545 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
546 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
547 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
548 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
549 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
555 }
556
557
558 /* Init tree.c. */
559
560 void
561 init_ttree (void)
562 {
563 /* Initialize the hash table of types. */
564 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
565 type_hash_eq, 0);
566
567 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
568 tree_decl_map_eq, 0);
569
570 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
571 tree_decl_map_eq, 0);
572
573 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
574 int_cst_hash_eq, NULL);
575
576 int_cst_node = make_int_cst (1, 1);
577
578 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
579 cl_option_hash_eq, NULL);
580
581 cl_optimization_node = make_node (OPTIMIZATION_NODE);
582 cl_target_option_node = make_node (TARGET_OPTION_NODE);
583
584 /* Initialize the tree_contains_struct array. */
585 initialize_tree_contains_struct ();
586 lang_hooks.init_ts ();
587 }
588
589 \f
590 /* The name of the object as the assembler will see it (but before any
591 translations made by ASM_OUTPUT_LABELREF). Often this is the same
592 as DECL_NAME. It is an IDENTIFIER_NODE. */
593 tree
594 decl_assembler_name (tree decl)
595 {
596 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
597 lang_hooks.set_decl_assembler_name (decl);
598 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
599 }
600
601 /* When the target supports COMDAT groups, this indicates which group the
602 DECL is associated with. This can be either an IDENTIFIER_NODE or a
603 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
604 tree
605 decl_comdat_group (const_tree node)
606 {
607 struct symtab_node *snode = symtab_get_node (node);
608 if (!snode)
609 return NULL;
610 return snode->get_comdat_group ();
611 }
612
613 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
614 tree
615 decl_comdat_group_id (const_tree node)
616 {
617 struct symtab_node *snode = symtab_get_node (node);
618 if (!snode)
619 return NULL;
620 return snode->get_comdat_group_id ();
621 }
622
623 /* When the target supports named section, return its name as IDENTIFIER_NODE
624 or NULL if it is in no section. */
625 const char *
626 decl_section_name (const_tree node)
627 {
628 struct symtab_node *snode = symtab_get_node (node);
629 if (!snode)
630 return NULL;
631 return snode->get_section ();
632 }
633
634 /* Set section section name of NODE to VALUE (that is expected to
635 be identifier node) */
636 void
637 set_decl_section_name (tree node, const char *value)
638 {
639 struct symtab_node *snode;
640
641 if (value == NULL)
642 {
643 snode = symtab_get_node (node);
644 if (!snode)
645 return;
646 }
647 else if (TREE_CODE (node) == VAR_DECL)
648 snode = varpool_node_for_decl (node);
649 else
650 snode = cgraph_get_create_node (node);
651 snode->set_section (value);
652 }
653
654 /* Return TLS model of a variable NODE. */
655 enum tls_model
656 decl_tls_model (const_tree node)
657 {
658 struct varpool_node *snode = varpool_get_node (node);
659 if (!snode)
660 return TLS_MODEL_NONE;
661 return snode->tls_model;
662 }
663
664 /* Set TLS model of variable NODE to MODEL. */
665 void
666 set_decl_tls_model (tree node, enum tls_model model)
667 {
668 struct varpool_node *vnode;
669
670 if (model == TLS_MODEL_NONE)
671 {
672 vnode = varpool_get_node (node);
673 if (!vnode)
674 return;
675 }
676 else
677 vnode = varpool_node_for_decl (node);
678 vnode->tls_model = model;
679 }
680
681 /* Compute the number of bytes occupied by a tree with code CODE.
682 This function cannot be used for nodes that have variable sizes,
683 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
684 size_t
685 tree_code_size (enum tree_code code)
686 {
687 switch (TREE_CODE_CLASS (code))
688 {
689 case tcc_declaration: /* A decl node */
690 {
691 switch (code)
692 {
693 case FIELD_DECL:
694 return sizeof (struct tree_field_decl);
695 case PARM_DECL:
696 return sizeof (struct tree_parm_decl);
697 case VAR_DECL:
698 return sizeof (struct tree_var_decl);
699 case LABEL_DECL:
700 return sizeof (struct tree_label_decl);
701 case RESULT_DECL:
702 return sizeof (struct tree_result_decl);
703 case CONST_DECL:
704 return sizeof (struct tree_const_decl);
705 case TYPE_DECL:
706 return sizeof (struct tree_type_decl);
707 case FUNCTION_DECL:
708 return sizeof (struct tree_function_decl);
709 case DEBUG_EXPR_DECL:
710 return sizeof (struct tree_decl_with_rtl);
711 default:
712 return sizeof (struct tree_decl_non_common);
713 }
714 }
715
716 case tcc_type: /* a type node */
717 return sizeof (struct tree_type_non_common);
718
719 case tcc_reference: /* a reference */
720 case tcc_expression: /* an expression */
721 case tcc_statement: /* an expression with side effects */
722 case tcc_comparison: /* a comparison expression */
723 case tcc_unary: /* a unary arithmetic expression */
724 case tcc_binary: /* a binary arithmetic expression */
725 return (sizeof (struct tree_exp)
726 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
727
728 case tcc_constant: /* a constant */
729 switch (code)
730 {
731 case VOID_CST: return sizeof (struct tree_typed);
732 case INTEGER_CST: gcc_unreachable ();
733 case REAL_CST: return sizeof (struct tree_real_cst);
734 case FIXED_CST: return sizeof (struct tree_fixed_cst);
735 case COMPLEX_CST: return sizeof (struct tree_complex);
736 case VECTOR_CST: return sizeof (struct tree_vector);
737 case STRING_CST: gcc_unreachable ();
738 default:
739 return lang_hooks.tree_size (code);
740 }
741
742 case tcc_exceptional: /* something random, like an identifier. */
743 switch (code)
744 {
745 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
746 case TREE_LIST: return sizeof (struct tree_list);
747
748 case ERROR_MARK:
749 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
750
751 case TREE_VEC:
752 case OMP_CLAUSE: gcc_unreachable ();
753
754 case SSA_NAME: return sizeof (struct tree_ssa_name);
755
756 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
757 case BLOCK: return sizeof (struct tree_block);
758 case CONSTRUCTOR: return sizeof (struct tree_constructor);
759 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
760 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
761
762 default:
763 return lang_hooks.tree_size (code);
764 }
765
766 default:
767 gcc_unreachable ();
768 }
769 }
770
771 /* Compute the number of bytes occupied by NODE. This routine only
772 looks at TREE_CODE, except for those nodes that have variable sizes. */
773 size_t
774 tree_size (const_tree node)
775 {
776 const enum tree_code code = TREE_CODE (node);
777 switch (code)
778 {
779 case INTEGER_CST:
780 return (sizeof (struct tree_int_cst)
781 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
782
783 case TREE_BINFO:
784 return (offsetof (struct tree_binfo, base_binfos)
785 + vec<tree, va_gc>
786 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
787
788 case TREE_VEC:
789 return (sizeof (struct tree_vec)
790 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
791
792 case VECTOR_CST:
793 return (sizeof (struct tree_vector)
794 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
795
796 case STRING_CST:
797 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
798
799 case OMP_CLAUSE:
800 return (sizeof (struct tree_omp_clause)
801 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
802 * sizeof (tree));
803
804 default:
805 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
806 return (sizeof (struct tree_exp)
807 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
808 else
809 return tree_code_size (code);
810 }
811 }
812
813 /* Record interesting allocation statistics for a tree node with CODE
814 and LENGTH. */
815
816 static void
817 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
818 size_t length ATTRIBUTE_UNUSED)
819 {
820 enum tree_code_class type = TREE_CODE_CLASS (code);
821 tree_node_kind kind;
822
823 if (!GATHER_STATISTICS)
824 return;
825
826 switch (type)
827 {
828 case tcc_declaration: /* A decl node */
829 kind = d_kind;
830 break;
831
832 case tcc_type: /* a type node */
833 kind = t_kind;
834 break;
835
836 case tcc_statement: /* an expression with side effects */
837 kind = s_kind;
838 break;
839
840 case tcc_reference: /* a reference */
841 kind = r_kind;
842 break;
843
844 case tcc_expression: /* an expression */
845 case tcc_comparison: /* a comparison expression */
846 case tcc_unary: /* a unary arithmetic expression */
847 case tcc_binary: /* a binary arithmetic expression */
848 kind = e_kind;
849 break;
850
851 case tcc_constant: /* a constant */
852 kind = c_kind;
853 break;
854
855 case tcc_exceptional: /* something random, like an identifier. */
856 switch (code)
857 {
858 case IDENTIFIER_NODE:
859 kind = id_kind;
860 break;
861
862 case TREE_VEC:
863 kind = vec_kind;
864 break;
865
866 case TREE_BINFO:
867 kind = binfo_kind;
868 break;
869
870 case SSA_NAME:
871 kind = ssa_name_kind;
872 break;
873
874 case BLOCK:
875 kind = b_kind;
876 break;
877
878 case CONSTRUCTOR:
879 kind = constr_kind;
880 break;
881
882 case OMP_CLAUSE:
883 kind = omp_clause_kind;
884 break;
885
886 default:
887 kind = x_kind;
888 break;
889 }
890 break;
891
892 case tcc_vl_exp:
893 kind = e_kind;
894 break;
895
896 default:
897 gcc_unreachable ();
898 }
899
900 tree_code_counts[(int) code]++;
901 tree_node_counts[(int) kind]++;
902 tree_node_sizes[(int) kind] += length;
903 }
904
905 /* Allocate and return a new UID from the DECL_UID namespace. */
906
907 int
908 allocate_decl_uid (void)
909 {
910 return next_decl_uid++;
911 }
912
913 /* Return a newly allocated node of code CODE. For decl and type
914 nodes, some other fields are initialized. The rest of the node is
915 initialized to zero. This function cannot be used for TREE_VEC,
916 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
917 tree_code_size.
918
919 Achoo! I got a code in the node. */
920
921 tree
922 make_node_stat (enum tree_code code MEM_STAT_DECL)
923 {
924 tree t;
925 enum tree_code_class type = TREE_CODE_CLASS (code);
926 size_t length = tree_code_size (code);
927
928 record_node_allocation_statistics (code, length);
929
930 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
931 TREE_SET_CODE (t, code);
932
933 switch (type)
934 {
935 case tcc_statement:
936 TREE_SIDE_EFFECTS (t) = 1;
937 break;
938
939 case tcc_declaration:
940 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
941 {
942 if (code == FUNCTION_DECL)
943 {
944 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
945 DECL_MODE (t) = FUNCTION_MODE;
946 }
947 else
948 DECL_ALIGN (t) = 1;
949 }
950 DECL_SOURCE_LOCATION (t) = input_location;
951 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
952 DECL_UID (t) = --next_debug_decl_uid;
953 else
954 {
955 DECL_UID (t) = allocate_decl_uid ();
956 SET_DECL_PT_UID (t, -1);
957 }
958 if (TREE_CODE (t) == LABEL_DECL)
959 LABEL_DECL_UID (t) = -1;
960
961 break;
962
963 case tcc_type:
964 TYPE_UID (t) = next_type_uid++;
965 TYPE_ALIGN (t) = BITS_PER_UNIT;
966 TYPE_USER_ALIGN (t) = 0;
967 TYPE_MAIN_VARIANT (t) = t;
968 TYPE_CANONICAL (t) = t;
969
970 /* Default to no attributes for type, but let target change that. */
971 TYPE_ATTRIBUTES (t) = NULL_TREE;
972 targetm.set_default_type_attributes (t);
973
974 /* We have not yet computed the alias set for this type. */
975 TYPE_ALIAS_SET (t) = -1;
976 break;
977
978 case tcc_constant:
979 TREE_CONSTANT (t) = 1;
980 break;
981
982 case tcc_expression:
983 switch (code)
984 {
985 case INIT_EXPR:
986 case MODIFY_EXPR:
987 case VA_ARG_EXPR:
988 case PREDECREMENT_EXPR:
989 case PREINCREMENT_EXPR:
990 case POSTDECREMENT_EXPR:
991 case POSTINCREMENT_EXPR:
992 /* All of these have side-effects, no matter what their
993 operands are. */
994 TREE_SIDE_EFFECTS (t) = 1;
995 break;
996
997 default:
998 break;
999 }
1000 break;
1001
1002 default:
1003 /* Other classes need no special treatment. */
1004 break;
1005 }
1006
1007 return t;
1008 }
1009 \f
1010 /* Return a new node with the same contents as NODE except that its
1011 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1012
1013 tree
1014 copy_node_stat (tree node MEM_STAT_DECL)
1015 {
1016 tree t;
1017 enum tree_code code = TREE_CODE (node);
1018 size_t length;
1019
1020 gcc_assert (code != STATEMENT_LIST);
1021
1022 length = tree_size (node);
1023 record_node_allocation_statistics (code, length);
1024 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1025 memcpy (t, node, length);
1026
1027 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1028 TREE_CHAIN (t) = 0;
1029 TREE_ASM_WRITTEN (t) = 0;
1030 TREE_VISITED (t) = 0;
1031
1032 if (TREE_CODE_CLASS (code) == tcc_declaration)
1033 {
1034 if (code == DEBUG_EXPR_DECL)
1035 DECL_UID (t) = --next_debug_decl_uid;
1036 else
1037 {
1038 DECL_UID (t) = allocate_decl_uid ();
1039 if (DECL_PT_UID_SET_P (node))
1040 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1041 }
1042 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1043 && DECL_HAS_VALUE_EXPR_P (node))
1044 {
1045 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1046 DECL_HAS_VALUE_EXPR_P (t) = 1;
1047 }
1048 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1049 if (TREE_CODE (node) == VAR_DECL)
1050 {
1051 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1052 t->decl_with_vis.symtab_node = NULL;
1053 }
1054 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1055 {
1056 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1057 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1058 }
1059 if (TREE_CODE (node) == FUNCTION_DECL)
1060 {
1061 DECL_STRUCT_FUNCTION (t) = NULL;
1062 t->decl_with_vis.symtab_node = NULL;
1063 }
1064 }
1065 else if (TREE_CODE_CLASS (code) == tcc_type)
1066 {
1067 TYPE_UID (t) = next_type_uid++;
1068 /* The following is so that the debug code for
1069 the copy is different from the original type.
1070 The two statements usually duplicate each other
1071 (because they clear fields of the same union),
1072 but the optimizer should catch that. */
1073 TYPE_SYMTAB_POINTER (t) = 0;
1074 TYPE_SYMTAB_ADDRESS (t) = 0;
1075
1076 /* Do not copy the values cache. */
1077 if (TYPE_CACHED_VALUES_P (t))
1078 {
1079 TYPE_CACHED_VALUES_P (t) = 0;
1080 TYPE_CACHED_VALUES (t) = NULL_TREE;
1081 }
1082 }
1083
1084 return t;
1085 }
1086
1087 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1088 For example, this can copy a list made of TREE_LIST nodes. */
1089
1090 tree
1091 copy_list (tree list)
1092 {
1093 tree head;
1094 tree prev, next;
1095
1096 if (list == 0)
1097 return 0;
1098
1099 head = prev = copy_node (list);
1100 next = TREE_CHAIN (list);
1101 while (next)
1102 {
1103 TREE_CHAIN (prev) = copy_node (next);
1104 prev = TREE_CHAIN (prev);
1105 next = TREE_CHAIN (next);
1106 }
1107 return head;
1108 }
1109
1110 \f
1111 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1112 INTEGER_CST with value CST and type TYPE. */
1113
1114 static unsigned int
1115 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1116 {
1117 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1118 /* We need an extra zero HWI if CST is an unsigned integer with its
1119 upper bit set, and if CST occupies a whole number of HWIs. */
1120 if (TYPE_UNSIGNED (type)
1121 && wi::neg_p (cst)
1122 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1123 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1124 return cst.get_len ();
1125 }
1126
1127 /* Return a new INTEGER_CST with value CST and type TYPE. */
1128
1129 static tree
1130 build_new_int_cst (tree type, const wide_int &cst)
1131 {
1132 unsigned int len = cst.get_len ();
1133 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1134 tree nt = make_int_cst (len, ext_len);
1135
1136 if (len < ext_len)
1137 {
1138 --ext_len;
1139 TREE_INT_CST_ELT (nt, ext_len) = 0;
1140 for (unsigned int i = len; i < ext_len; ++i)
1141 TREE_INT_CST_ELT (nt, i) = -1;
1142 }
1143 else if (TYPE_UNSIGNED (type)
1144 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1145 {
1146 len--;
1147 TREE_INT_CST_ELT (nt, len)
1148 = zext_hwi (cst.elt (len),
1149 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1150 }
1151
1152 for (unsigned int i = 0; i < len; i++)
1153 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1154 TREE_TYPE (nt) = type;
1155 return nt;
1156 }
1157
1158 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1159
1160 tree
1161 build_int_cst (tree type, HOST_WIDE_INT low)
1162 {
1163 /* Support legacy code. */
1164 if (!type)
1165 type = integer_type_node;
1166
1167 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1168 }
1169
1170 tree
1171 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1172 {
1173 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1174 }
1175
1176 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1177
1178 tree
1179 build_int_cst_type (tree type, HOST_WIDE_INT low)
1180 {
1181 gcc_assert (type);
1182 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1183 }
1184
1185 /* Constructs tree in type TYPE from with value given by CST. Signedness
1186 of CST is assumed to be the same as the signedness of TYPE. */
1187
1188 tree
1189 double_int_to_tree (tree type, double_int cst)
1190 {
1191 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1192 }
1193
1194 /* We force the wide_int CST to the range of the type TYPE by sign or
1195 zero extending it. OVERFLOWABLE indicates if we are interested in
1196 overflow of the value, when >0 we are only interested in signed
1197 overflow, for <0 we are interested in any overflow. OVERFLOWED
1198 indicates whether overflow has already occurred. CONST_OVERFLOWED
1199 indicates whether constant overflow has already occurred. We force
1200 T's value to be within range of T's type (by setting to 0 or 1 all
1201 the bits outside the type's range). We set TREE_OVERFLOWED if,
1202 OVERFLOWED is nonzero,
1203 or OVERFLOWABLE is >0 and signed overflow occurs
1204 or OVERFLOWABLE is <0 and any overflow occurs
1205 We return a new tree node for the extended wide_int. The node
1206 is shared if no overflow flags are set. */
1207
1208
1209 tree
1210 force_fit_type (tree type, const wide_int_ref &cst,
1211 int overflowable, bool overflowed)
1212 {
1213 signop sign = TYPE_SIGN (type);
1214
1215 /* If we need to set overflow flags, return a new unshared node. */
1216 if (overflowed || !wi::fits_to_tree_p (cst, type))
1217 {
1218 if (overflowed
1219 || overflowable < 0
1220 || (overflowable > 0 && sign == SIGNED))
1221 {
1222 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1223 tree t = build_new_int_cst (type, tmp);
1224 TREE_OVERFLOW (t) = 1;
1225 return t;
1226 }
1227 }
1228
1229 /* Else build a shared node. */
1230 return wide_int_to_tree (type, cst);
1231 }
1232
1233 /* These are the hash table functions for the hash table of INTEGER_CST
1234 nodes of a sizetype. */
1235
1236 /* Return the hash code code X, an INTEGER_CST. */
1237
1238 static hashval_t
1239 int_cst_hash_hash (const void *x)
1240 {
1241 const_tree const t = (const_tree) x;
1242 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1243 int i;
1244
1245 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1246 code ^= TREE_INT_CST_ELT (t, i);
1247
1248 return code;
1249 }
1250
1251 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1252 is the same as that given by *Y, which is the same. */
1253
1254 static int
1255 int_cst_hash_eq (const void *x, const void *y)
1256 {
1257 const_tree const xt = (const_tree) x;
1258 const_tree const yt = (const_tree) y;
1259
1260 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1261 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1262 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1263 return false;
1264
1265 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1266 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1267 return false;
1268
1269 return true;
1270 }
1271
1272 /* Create an INT_CST node of TYPE and value CST.
1273 The returned node is always shared. For small integers we use a
1274 per-type vector cache, for larger ones we use a single hash table.
1275 The value is extended from its precision according to the sign of
1276 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1277 the upper bits and ensures that hashing and value equality based
1278 upon the underlying HOST_WIDE_INTs works without masking. */
1279
1280 tree
1281 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1282 {
1283 tree t;
1284 int ix = -1;
1285 int limit = 0;
1286
1287 gcc_assert (type);
1288 unsigned int prec = TYPE_PRECISION (type);
1289 signop sgn = TYPE_SIGN (type);
1290
1291 /* Verify that everything is canonical. */
1292 int l = pcst.get_len ();
1293 if (l > 1)
1294 {
1295 if (pcst.elt (l - 1) == 0)
1296 gcc_checking_assert (pcst.elt (l - 2) < 0);
1297 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1298 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1299 }
1300
1301 wide_int cst = wide_int::from (pcst, prec, sgn);
1302 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1303
1304 if (ext_len == 1)
1305 {
1306 /* We just need to store a single HOST_WIDE_INT. */
1307 HOST_WIDE_INT hwi;
1308 if (TYPE_UNSIGNED (type))
1309 hwi = cst.to_uhwi ();
1310 else
1311 hwi = cst.to_shwi ();
1312
1313 switch (TREE_CODE (type))
1314 {
1315 case NULLPTR_TYPE:
1316 gcc_assert (hwi == 0);
1317 /* Fallthru. */
1318
1319 case POINTER_TYPE:
1320 case REFERENCE_TYPE:
1321 /* Cache NULL pointer. */
1322 if (hwi == 0)
1323 {
1324 limit = 1;
1325 ix = 0;
1326 }
1327 break;
1328
1329 case BOOLEAN_TYPE:
1330 /* Cache false or true. */
1331 limit = 2;
1332 if (hwi < 2)
1333 ix = hwi;
1334 break;
1335
1336 case INTEGER_TYPE:
1337 case OFFSET_TYPE:
1338 if (TYPE_SIGN (type) == UNSIGNED)
1339 {
1340 /* Cache [0, N). */
1341 limit = INTEGER_SHARE_LIMIT;
1342 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1343 ix = hwi;
1344 }
1345 else
1346 {
1347 /* Cache [-1, N). */
1348 limit = INTEGER_SHARE_LIMIT + 1;
1349 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1350 ix = hwi + 1;
1351 }
1352 break;
1353
1354 case ENUMERAL_TYPE:
1355 break;
1356
1357 default:
1358 gcc_unreachable ();
1359 }
1360
1361 if (ix >= 0)
1362 {
1363 /* Look for it in the type's vector of small shared ints. */
1364 if (!TYPE_CACHED_VALUES_P (type))
1365 {
1366 TYPE_CACHED_VALUES_P (type) = 1;
1367 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1368 }
1369
1370 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1371 if (t)
1372 /* Make sure no one is clobbering the shared constant. */
1373 gcc_checking_assert (TREE_TYPE (t) == type
1374 && TREE_INT_CST_NUNITS (t) == 1
1375 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1376 && TREE_INT_CST_EXT_NUNITS (t) == 1
1377 && TREE_INT_CST_ELT (t, 0) == hwi);
1378 else
1379 {
1380 /* Create a new shared int. */
1381 t = build_new_int_cst (type, cst);
1382 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1383 }
1384 }
1385 else
1386 {
1387 /* Use the cache of larger shared ints, using int_cst_node as
1388 a temporary. */
1389 void **slot;
1390
1391 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1392 TREE_TYPE (int_cst_node) = type;
1393
1394 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1395 t = (tree) *slot;
1396 if (!t)
1397 {
1398 /* Insert this one into the hash table. */
1399 t = int_cst_node;
1400 *slot = t;
1401 /* Make a new node for next time round. */
1402 int_cst_node = make_int_cst (1, 1);
1403 }
1404 }
1405 }
1406 else
1407 {
1408 /* The value either hashes properly or we drop it on the floor
1409 for the gc to take care of. There will not be enough of them
1410 to worry about. */
1411 void **slot;
1412
1413 tree nt = build_new_int_cst (type, cst);
1414 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1415 t = (tree) *slot;
1416 if (!t)
1417 {
1418 /* Insert this one into the hash table. */
1419 t = nt;
1420 *slot = t;
1421 }
1422 }
1423
1424 return t;
1425 }
1426
1427 void
1428 cache_integer_cst (tree t)
1429 {
1430 tree type = TREE_TYPE (t);
1431 int ix = -1;
1432 int limit = 0;
1433 int prec = TYPE_PRECISION (type);
1434
1435 gcc_assert (!TREE_OVERFLOW (t));
1436
1437 switch (TREE_CODE (type))
1438 {
1439 case NULLPTR_TYPE:
1440 gcc_assert (integer_zerop (t));
1441 /* Fallthru. */
1442
1443 case POINTER_TYPE:
1444 case REFERENCE_TYPE:
1445 /* Cache NULL pointer. */
1446 if (integer_zerop (t))
1447 {
1448 limit = 1;
1449 ix = 0;
1450 }
1451 break;
1452
1453 case BOOLEAN_TYPE:
1454 /* Cache false or true. */
1455 limit = 2;
1456 if (wi::ltu_p (t, 2))
1457 ix = TREE_INT_CST_ELT (t, 0);
1458 break;
1459
1460 case INTEGER_TYPE:
1461 case OFFSET_TYPE:
1462 if (TYPE_UNSIGNED (type))
1463 {
1464 /* Cache 0..N */
1465 limit = INTEGER_SHARE_LIMIT;
1466
1467 /* This is a little hokie, but if the prec is smaller than
1468 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1469 obvious test will not get the correct answer. */
1470 if (prec < HOST_BITS_PER_WIDE_INT)
1471 {
1472 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1473 ix = tree_to_uhwi (t);
1474 }
1475 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1476 ix = tree_to_uhwi (t);
1477 }
1478 else
1479 {
1480 /* Cache -1..N */
1481 limit = INTEGER_SHARE_LIMIT + 1;
1482
1483 if (integer_minus_onep (t))
1484 ix = 0;
1485 else if (!wi::neg_p (t))
1486 {
1487 if (prec < HOST_BITS_PER_WIDE_INT)
1488 {
1489 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1490 ix = tree_to_shwi (t) + 1;
1491 }
1492 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1493 ix = tree_to_shwi (t) + 1;
1494 }
1495 }
1496 break;
1497
1498 case ENUMERAL_TYPE:
1499 break;
1500
1501 default:
1502 gcc_unreachable ();
1503 }
1504
1505 if (ix >= 0)
1506 {
1507 /* Look for it in the type's vector of small shared ints. */
1508 if (!TYPE_CACHED_VALUES_P (type))
1509 {
1510 TYPE_CACHED_VALUES_P (type) = 1;
1511 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1512 }
1513
1514 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1515 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1516 }
1517 else
1518 {
1519 /* Use the cache of larger shared ints. */
1520 void **slot;
1521
1522 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1523 /* If there is already an entry for the number verify it's the
1524 same. */
1525 if (*slot)
1526 gcc_assert (wi::eq_p (tree (*slot), t));
1527 else
1528 /* Otherwise insert this one into the hash table. */
1529 *slot = t;
1530 }
1531 }
1532
1533
1534 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1535 and the rest are zeros. */
1536
1537 tree
1538 build_low_bits_mask (tree type, unsigned bits)
1539 {
1540 gcc_assert (bits <= TYPE_PRECISION (type));
1541
1542 return wide_int_to_tree (type, wi::mask (bits, false,
1543 TYPE_PRECISION (type)));
1544 }
1545
1546 /* Checks that X is integer constant that can be expressed in (unsigned)
1547 HOST_WIDE_INT without loss of precision. */
1548
1549 bool
1550 cst_and_fits_in_hwi (const_tree x)
1551 {
1552 if (TREE_CODE (x) != INTEGER_CST)
1553 return false;
1554
1555 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1556 return false;
1557
1558 return TREE_INT_CST_NUNITS (x) == 1;
1559 }
1560
1561 /* Build a newly constructed TREE_VEC node of length LEN. */
1562
1563 tree
1564 make_vector_stat (unsigned len MEM_STAT_DECL)
1565 {
1566 tree t;
1567 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1568
1569 record_node_allocation_statistics (VECTOR_CST, length);
1570
1571 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1572
1573 TREE_SET_CODE (t, VECTOR_CST);
1574 TREE_CONSTANT (t) = 1;
1575
1576 return t;
1577 }
1578
1579 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1580 are in a list pointed to by VALS. */
1581
1582 tree
1583 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1584 {
1585 int over = 0;
1586 unsigned cnt = 0;
1587 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1588 TREE_TYPE (v) = type;
1589
1590 /* Iterate through elements and check for overflow. */
1591 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1592 {
1593 tree value = vals[cnt];
1594
1595 VECTOR_CST_ELT (v, cnt) = value;
1596
1597 /* Don't crash if we get an address constant. */
1598 if (!CONSTANT_CLASS_P (value))
1599 continue;
1600
1601 over |= TREE_OVERFLOW (value);
1602 }
1603
1604 TREE_OVERFLOW (v) = over;
1605 return v;
1606 }
1607
1608 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1609 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1610
1611 tree
1612 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1613 {
1614 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1615 unsigned HOST_WIDE_INT idx;
1616 tree value;
1617
1618 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1619 vec[idx] = value;
1620 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1621 vec[idx] = build_zero_cst (TREE_TYPE (type));
1622
1623 return build_vector (type, vec);
1624 }
1625
1626 /* Build a vector of type VECTYPE where all the elements are SCs. */
1627 tree
1628 build_vector_from_val (tree vectype, tree sc)
1629 {
1630 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1631
1632 if (sc == error_mark_node)
1633 return sc;
1634
1635 /* Verify that the vector type is suitable for SC. Note that there
1636 is some inconsistency in the type-system with respect to restrict
1637 qualifications of pointers. Vector types always have a main-variant
1638 element type and the qualification is applied to the vector-type.
1639 So TREE_TYPE (vector-type) does not return a properly qualified
1640 vector element-type. */
1641 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1642 TREE_TYPE (vectype)));
1643
1644 if (CONSTANT_CLASS_P (sc))
1645 {
1646 tree *v = XALLOCAVEC (tree, nunits);
1647 for (i = 0; i < nunits; ++i)
1648 v[i] = sc;
1649 return build_vector (vectype, v);
1650 }
1651 else
1652 {
1653 vec<constructor_elt, va_gc> *v;
1654 vec_alloc (v, nunits);
1655 for (i = 0; i < nunits; ++i)
1656 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1657 return build_constructor (vectype, v);
1658 }
1659 }
1660
1661 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1662 are in the vec pointed to by VALS. */
1663 tree
1664 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1665 {
1666 tree c = make_node (CONSTRUCTOR);
1667 unsigned int i;
1668 constructor_elt *elt;
1669 bool constant_p = true;
1670 bool side_effects_p = false;
1671
1672 TREE_TYPE (c) = type;
1673 CONSTRUCTOR_ELTS (c) = vals;
1674
1675 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1676 {
1677 /* Mostly ctors will have elts that don't have side-effects, so
1678 the usual case is to scan all the elements. Hence a single
1679 loop for both const and side effects, rather than one loop
1680 each (with early outs). */
1681 if (!TREE_CONSTANT (elt->value))
1682 constant_p = false;
1683 if (TREE_SIDE_EFFECTS (elt->value))
1684 side_effects_p = true;
1685 }
1686
1687 TREE_SIDE_EFFECTS (c) = side_effects_p;
1688 TREE_CONSTANT (c) = constant_p;
1689
1690 return c;
1691 }
1692
1693 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1694 INDEX and VALUE. */
1695 tree
1696 build_constructor_single (tree type, tree index, tree value)
1697 {
1698 vec<constructor_elt, va_gc> *v;
1699 constructor_elt elt = {index, value};
1700
1701 vec_alloc (v, 1);
1702 v->quick_push (elt);
1703
1704 return build_constructor (type, v);
1705 }
1706
1707
1708 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1709 are in a list pointed to by VALS. */
1710 tree
1711 build_constructor_from_list (tree type, tree vals)
1712 {
1713 tree t;
1714 vec<constructor_elt, va_gc> *v = NULL;
1715
1716 if (vals)
1717 {
1718 vec_alloc (v, list_length (vals));
1719 for (t = vals; t; t = TREE_CHAIN (t))
1720 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1721 }
1722
1723 return build_constructor (type, v);
1724 }
1725
1726 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1727 of elements, provided as index/value pairs. */
1728
1729 tree
1730 build_constructor_va (tree type, int nelts, ...)
1731 {
1732 vec<constructor_elt, va_gc> *v = NULL;
1733 va_list p;
1734
1735 va_start (p, nelts);
1736 vec_alloc (v, nelts);
1737 while (nelts--)
1738 {
1739 tree index = va_arg (p, tree);
1740 tree value = va_arg (p, tree);
1741 CONSTRUCTOR_APPEND_ELT (v, index, value);
1742 }
1743 va_end (p);
1744 return build_constructor (type, v);
1745 }
1746
1747 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1748
1749 tree
1750 build_fixed (tree type, FIXED_VALUE_TYPE f)
1751 {
1752 tree v;
1753 FIXED_VALUE_TYPE *fp;
1754
1755 v = make_node (FIXED_CST);
1756 fp = ggc_alloc<fixed_value> ();
1757 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1758
1759 TREE_TYPE (v) = type;
1760 TREE_FIXED_CST_PTR (v) = fp;
1761 return v;
1762 }
1763
1764 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1765
1766 tree
1767 build_real (tree type, REAL_VALUE_TYPE d)
1768 {
1769 tree v;
1770 REAL_VALUE_TYPE *dp;
1771 int overflow = 0;
1772
1773 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1774 Consider doing it via real_convert now. */
1775
1776 v = make_node (REAL_CST);
1777 dp = ggc_alloc<real_value> ();
1778 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1779
1780 TREE_TYPE (v) = type;
1781 TREE_REAL_CST_PTR (v) = dp;
1782 TREE_OVERFLOW (v) = overflow;
1783 return v;
1784 }
1785
1786 /* Return a new REAL_CST node whose type is TYPE
1787 and whose value is the integer value of the INTEGER_CST node I. */
1788
1789 REAL_VALUE_TYPE
1790 real_value_from_int_cst (const_tree type, const_tree i)
1791 {
1792 REAL_VALUE_TYPE d;
1793
1794 /* Clear all bits of the real value type so that we can later do
1795 bitwise comparisons to see if two values are the same. */
1796 memset (&d, 0, sizeof d);
1797
1798 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1799 TYPE_SIGN (TREE_TYPE (i)));
1800 return d;
1801 }
1802
1803 /* Given a tree representing an integer constant I, return a tree
1804 representing the same value as a floating-point constant of type TYPE. */
1805
1806 tree
1807 build_real_from_int_cst (tree type, const_tree i)
1808 {
1809 tree v;
1810 int overflow = TREE_OVERFLOW (i);
1811
1812 v = build_real (type, real_value_from_int_cst (type, i));
1813
1814 TREE_OVERFLOW (v) |= overflow;
1815 return v;
1816 }
1817
1818 /* Return a newly constructed STRING_CST node whose value is
1819 the LEN characters at STR.
1820 Note that for a C string literal, LEN should include the trailing NUL.
1821 The TREE_TYPE is not initialized. */
1822
1823 tree
1824 build_string (int len, const char *str)
1825 {
1826 tree s;
1827 size_t length;
1828
1829 /* Do not waste bytes provided by padding of struct tree_string. */
1830 length = len + offsetof (struct tree_string, str) + 1;
1831
1832 record_node_allocation_statistics (STRING_CST, length);
1833
1834 s = (tree) ggc_internal_alloc (length);
1835
1836 memset (s, 0, sizeof (struct tree_typed));
1837 TREE_SET_CODE (s, STRING_CST);
1838 TREE_CONSTANT (s) = 1;
1839 TREE_STRING_LENGTH (s) = len;
1840 memcpy (s->string.str, str, len);
1841 s->string.str[len] = '\0';
1842
1843 return s;
1844 }
1845
1846 /* Return a newly constructed COMPLEX_CST node whose value is
1847 specified by the real and imaginary parts REAL and IMAG.
1848 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1849 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1850
1851 tree
1852 build_complex (tree type, tree real, tree imag)
1853 {
1854 tree t = make_node (COMPLEX_CST);
1855
1856 TREE_REALPART (t) = real;
1857 TREE_IMAGPART (t) = imag;
1858 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1859 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1860 return t;
1861 }
1862
1863 /* Return a constant of arithmetic type TYPE which is the
1864 multiplicative identity of the set TYPE. */
1865
1866 tree
1867 build_one_cst (tree type)
1868 {
1869 switch (TREE_CODE (type))
1870 {
1871 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1872 case POINTER_TYPE: case REFERENCE_TYPE:
1873 case OFFSET_TYPE:
1874 return build_int_cst (type, 1);
1875
1876 case REAL_TYPE:
1877 return build_real (type, dconst1);
1878
1879 case FIXED_POINT_TYPE:
1880 /* We can only generate 1 for accum types. */
1881 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1882 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1883
1884 case VECTOR_TYPE:
1885 {
1886 tree scalar = build_one_cst (TREE_TYPE (type));
1887
1888 return build_vector_from_val (type, scalar);
1889 }
1890
1891 case COMPLEX_TYPE:
1892 return build_complex (type,
1893 build_one_cst (TREE_TYPE (type)),
1894 build_zero_cst (TREE_TYPE (type)));
1895
1896 default:
1897 gcc_unreachable ();
1898 }
1899 }
1900
1901 /* Return an integer of type TYPE containing all 1's in as much precision as
1902 it contains, or a complex or vector whose subparts are such integers. */
1903
1904 tree
1905 build_all_ones_cst (tree type)
1906 {
1907 if (TREE_CODE (type) == COMPLEX_TYPE)
1908 {
1909 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1910 return build_complex (type, scalar, scalar);
1911 }
1912 else
1913 return build_minus_one_cst (type);
1914 }
1915
1916 /* Return a constant of arithmetic type TYPE which is the
1917 opposite of the multiplicative identity of the set TYPE. */
1918
1919 tree
1920 build_minus_one_cst (tree type)
1921 {
1922 switch (TREE_CODE (type))
1923 {
1924 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1925 case POINTER_TYPE: case REFERENCE_TYPE:
1926 case OFFSET_TYPE:
1927 return build_int_cst (type, -1);
1928
1929 case REAL_TYPE:
1930 return build_real (type, dconstm1);
1931
1932 case FIXED_POINT_TYPE:
1933 /* We can only generate 1 for accum types. */
1934 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1935 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1936 TYPE_MODE (type)));
1937
1938 case VECTOR_TYPE:
1939 {
1940 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1941
1942 return build_vector_from_val (type, scalar);
1943 }
1944
1945 case COMPLEX_TYPE:
1946 return build_complex (type,
1947 build_minus_one_cst (TREE_TYPE (type)),
1948 build_zero_cst (TREE_TYPE (type)));
1949
1950 default:
1951 gcc_unreachable ();
1952 }
1953 }
1954
1955 /* Build 0 constant of type TYPE. This is used by constructor folding
1956 and thus the constant should be represented in memory by
1957 zero(es). */
1958
1959 tree
1960 build_zero_cst (tree type)
1961 {
1962 switch (TREE_CODE (type))
1963 {
1964 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1965 case POINTER_TYPE: case REFERENCE_TYPE:
1966 case OFFSET_TYPE: case NULLPTR_TYPE:
1967 return build_int_cst (type, 0);
1968
1969 case REAL_TYPE:
1970 return build_real (type, dconst0);
1971
1972 case FIXED_POINT_TYPE:
1973 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1974
1975 case VECTOR_TYPE:
1976 {
1977 tree scalar = build_zero_cst (TREE_TYPE (type));
1978
1979 return build_vector_from_val (type, scalar);
1980 }
1981
1982 case COMPLEX_TYPE:
1983 {
1984 tree zero = build_zero_cst (TREE_TYPE (type));
1985
1986 return build_complex (type, zero, zero);
1987 }
1988
1989 default:
1990 if (!AGGREGATE_TYPE_P (type))
1991 return fold_convert (type, integer_zero_node);
1992 return build_constructor (type, NULL);
1993 }
1994 }
1995
1996
1997 /* Build a BINFO with LEN language slots. */
1998
1999 tree
2000 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2001 {
2002 tree t;
2003 size_t length = (offsetof (struct tree_binfo, base_binfos)
2004 + vec<tree, va_gc>::embedded_size (base_binfos));
2005
2006 record_node_allocation_statistics (TREE_BINFO, length);
2007
2008 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2009
2010 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2011
2012 TREE_SET_CODE (t, TREE_BINFO);
2013
2014 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2015
2016 return t;
2017 }
2018
2019 /* Create a CASE_LABEL_EXPR tree node and return it. */
2020
2021 tree
2022 build_case_label (tree low_value, tree high_value, tree label_decl)
2023 {
2024 tree t = make_node (CASE_LABEL_EXPR);
2025
2026 TREE_TYPE (t) = void_type_node;
2027 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2028
2029 CASE_LOW (t) = low_value;
2030 CASE_HIGH (t) = high_value;
2031 CASE_LABEL (t) = label_decl;
2032 CASE_CHAIN (t) = NULL_TREE;
2033
2034 return t;
2035 }
2036
2037 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2038 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2039 The latter determines the length of the HOST_WIDE_INT vector. */
2040
2041 tree
2042 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2043 {
2044 tree t;
2045 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2046 + sizeof (struct tree_int_cst));
2047
2048 gcc_assert (len);
2049 record_node_allocation_statistics (INTEGER_CST, length);
2050
2051 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2052
2053 TREE_SET_CODE (t, INTEGER_CST);
2054 TREE_INT_CST_NUNITS (t) = len;
2055 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2056 /* to_offset can only be applied to trees that are offset_int-sized
2057 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2058 must be exactly the precision of offset_int and so LEN is correct. */
2059 if (ext_len <= OFFSET_INT_ELTS)
2060 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2061 else
2062 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2063
2064 TREE_CONSTANT (t) = 1;
2065
2066 return t;
2067 }
2068
2069 /* Build a newly constructed TREE_VEC node of length LEN. */
2070
2071 tree
2072 make_tree_vec_stat (int len MEM_STAT_DECL)
2073 {
2074 tree t;
2075 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2076
2077 record_node_allocation_statistics (TREE_VEC, length);
2078
2079 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2080
2081 TREE_SET_CODE (t, TREE_VEC);
2082 TREE_VEC_LENGTH (t) = len;
2083
2084 return t;
2085 }
2086
2087 /* Grow a TREE_VEC node to new length LEN. */
2088
2089 tree
2090 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2091 {
2092 gcc_assert (TREE_CODE (v) == TREE_VEC);
2093
2094 int oldlen = TREE_VEC_LENGTH (v);
2095 gcc_assert (len > oldlen);
2096
2097 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2099
2100 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2101
2102 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2103
2104 TREE_VEC_LENGTH (v) = len;
2105
2106 return v;
2107 }
2108 \f
2109 /* Return 1 if EXPR is the integer constant zero or a complex constant
2110 of zero. */
2111
2112 int
2113 integer_zerop (const_tree expr)
2114 {
2115 STRIP_NOPS (expr);
2116
2117 switch (TREE_CODE (expr))
2118 {
2119 case INTEGER_CST:
2120 return wi::eq_p (expr, 0);
2121 case COMPLEX_CST:
2122 return (integer_zerop (TREE_REALPART (expr))
2123 && integer_zerop (TREE_IMAGPART (expr)));
2124 case VECTOR_CST:
2125 {
2126 unsigned i;
2127 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2128 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2129 return false;
2130 return true;
2131 }
2132 default:
2133 return false;
2134 }
2135 }
2136
2137 /* Return 1 if EXPR is the integer constant one or the corresponding
2138 complex constant. */
2139
2140 int
2141 integer_onep (const_tree expr)
2142 {
2143 STRIP_NOPS (expr);
2144
2145 switch (TREE_CODE (expr))
2146 {
2147 case INTEGER_CST:
2148 return wi::eq_p (wi::to_widest (expr), 1);
2149 case COMPLEX_CST:
2150 return (integer_onep (TREE_REALPART (expr))
2151 && integer_zerop (TREE_IMAGPART (expr)));
2152 case VECTOR_CST:
2153 {
2154 unsigned i;
2155 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2156 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2157 return false;
2158 return true;
2159 }
2160 default:
2161 return false;
2162 }
2163 }
2164
2165 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2166 it contains, or a complex or vector whose subparts are such integers. */
2167
2168 int
2169 integer_all_onesp (const_tree expr)
2170 {
2171 STRIP_NOPS (expr);
2172
2173 if (TREE_CODE (expr) == COMPLEX_CST
2174 && integer_all_onesp (TREE_REALPART (expr))
2175 && integer_all_onesp (TREE_IMAGPART (expr)))
2176 return 1;
2177
2178 else if (TREE_CODE (expr) == VECTOR_CST)
2179 {
2180 unsigned i;
2181 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2182 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2183 return 0;
2184 return 1;
2185 }
2186
2187 else if (TREE_CODE (expr) != INTEGER_CST)
2188 return 0;
2189
2190 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2191 }
2192
2193 /* Return 1 if EXPR is the integer constant minus one. */
2194
2195 int
2196 integer_minus_onep (const_tree expr)
2197 {
2198 STRIP_NOPS (expr);
2199
2200 if (TREE_CODE (expr) == COMPLEX_CST)
2201 return (integer_all_onesp (TREE_REALPART (expr))
2202 && integer_zerop (TREE_IMAGPART (expr)));
2203 else
2204 return integer_all_onesp (expr);
2205 }
2206
2207 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2208 one bit on). */
2209
2210 int
2211 integer_pow2p (const_tree expr)
2212 {
2213 STRIP_NOPS (expr);
2214
2215 if (TREE_CODE (expr) == COMPLEX_CST
2216 && integer_pow2p (TREE_REALPART (expr))
2217 && integer_zerop (TREE_IMAGPART (expr)))
2218 return 1;
2219
2220 if (TREE_CODE (expr) != INTEGER_CST)
2221 return 0;
2222
2223 return wi::popcount (expr) == 1;
2224 }
2225
2226 /* Return 1 if EXPR is an integer constant other than zero or a
2227 complex constant other than zero. */
2228
2229 int
2230 integer_nonzerop (const_tree expr)
2231 {
2232 STRIP_NOPS (expr);
2233
2234 return ((TREE_CODE (expr) == INTEGER_CST
2235 && !wi::eq_p (expr, 0))
2236 || (TREE_CODE (expr) == COMPLEX_CST
2237 && (integer_nonzerop (TREE_REALPART (expr))
2238 || integer_nonzerop (TREE_IMAGPART (expr)))));
2239 }
2240
2241 /* Return 1 if EXPR is the fixed-point constant zero. */
2242
2243 int
2244 fixed_zerop (const_tree expr)
2245 {
2246 return (TREE_CODE (expr) == FIXED_CST
2247 && TREE_FIXED_CST (expr).data.is_zero ());
2248 }
2249
2250 /* Return the power of two represented by a tree node known to be a
2251 power of two. */
2252
2253 int
2254 tree_log2 (const_tree expr)
2255 {
2256 STRIP_NOPS (expr);
2257
2258 if (TREE_CODE (expr) == COMPLEX_CST)
2259 return tree_log2 (TREE_REALPART (expr));
2260
2261 return wi::exact_log2 (expr);
2262 }
2263
2264 /* Similar, but return the largest integer Y such that 2 ** Y is less
2265 than or equal to EXPR. */
2266
2267 int
2268 tree_floor_log2 (const_tree expr)
2269 {
2270 STRIP_NOPS (expr);
2271
2272 if (TREE_CODE (expr) == COMPLEX_CST)
2273 return tree_log2 (TREE_REALPART (expr));
2274
2275 return wi::floor_log2 (expr);
2276 }
2277
2278 /* Return number of known trailing zero bits in EXPR, or, if the value of
2279 EXPR is known to be zero, the precision of it's type. */
2280
2281 unsigned int
2282 tree_ctz (const_tree expr)
2283 {
2284 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2285 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2286 return 0;
2287
2288 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2289 switch (TREE_CODE (expr))
2290 {
2291 case INTEGER_CST:
2292 ret1 = wi::ctz (expr);
2293 return MIN (ret1, prec);
2294 case SSA_NAME:
2295 ret1 = wi::ctz (get_nonzero_bits (expr));
2296 return MIN (ret1, prec);
2297 case PLUS_EXPR:
2298 case MINUS_EXPR:
2299 case BIT_IOR_EXPR:
2300 case BIT_XOR_EXPR:
2301 case MIN_EXPR:
2302 case MAX_EXPR:
2303 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2304 if (ret1 == 0)
2305 return ret1;
2306 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2307 return MIN (ret1, ret2);
2308 case POINTER_PLUS_EXPR:
2309 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2310 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2311 /* Second operand is sizetype, which could be in theory
2312 wider than pointer's precision. Make sure we never
2313 return more than prec. */
2314 ret2 = MIN (ret2, prec);
2315 return MIN (ret1, ret2);
2316 case BIT_AND_EXPR:
2317 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2318 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2319 return MAX (ret1, ret2);
2320 case MULT_EXPR:
2321 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2322 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2323 return MIN (ret1 + ret2, prec);
2324 case LSHIFT_EXPR:
2325 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2326 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2327 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2328 {
2329 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2330 return MIN (ret1 + ret2, prec);
2331 }
2332 return ret1;
2333 case RSHIFT_EXPR:
2334 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2335 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2336 {
2337 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2338 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2339 if (ret1 > ret2)
2340 return ret1 - ret2;
2341 }
2342 return 0;
2343 case TRUNC_DIV_EXPR:
2344 case CEIL_DIV_EXPR:
2345 case FLOOR_DIV_EXPR:
2346 case ROUND_DIV_EXPR:
2347 case EXACT_DIV_EXPR:
2348 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2349 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2350 {
2351 int l = tree_log2 (TREE_OPERAND (expr, 1));
2352 if (l >= 0)
2353 {
2354 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2355 ret2 = l;
2356 if (ret1 > ret2)
2357 return ret1 - ret2;
2358 }
2359 }
2360 return 0;
2361 CASE_CONVERT:
2362 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2363 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2364 ret1 = prec;
2365 return MIN (ret1, prec);
2366 case SAVE_EXPR:
2367 return tree_ctz (TREE_OPERAND (expr, 0));
2368 case COND_EXPR:
2369 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2370 if (ret1 == 0)
2371 return 0;
2372 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2373 return MIN (ret1, ret2);
2374 case COMPOUND_EXPR:
2375 return tree_ctz (TREE_OPERAND (expr, 1));
2376 case ADDR_EXPR:
2377 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2378 if (ret1 > BITS_PER_UNIT)
2379 {
2380 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2381 return MIN (ret1, prec);
2382 }
2383 return 0;
2384 default:
2385 return 0;
2386 }
2387 }
2388
2389 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2390 decimal float constants, so don't return 1 for them. */
2391
2392 int
2393 real_zerop (const_tree expr)
2394 {
2395 STRIP_NOPS (expr);
2396
2397 switch (TREE_CODE (expr))
2398 {
2399 case REAL_CST:
2400 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2401 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2402 case COMPLEX_CST:
2403 return real_zerop (TREE_REALPART (expr))
2404 && real_zerop (TREE_IMAGPART (expr));
2405 case VECTOR_CST:
2406 {
2407 unsigned i;
2408 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2409 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2410 return false;
2411 return true;
2412 }
2413 default:
2414 return false;
2415 }
2416 }
2417
2418 /* Return 1 if EXPR is the real constant one in real or complex form.
2419 Trailing zeroes matter for decimal float constants, so don't return
2420 1 for them. */
2421
2422 int
2423 real_onep (const_tree expr)
2424 {
2425 STRIP_NOPS (expr);
2426
2427 switch (TREE_CODE (expr))
2428 {
2429 case REAL_CST:
2430 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2431 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2432 case COMPLEX_CST:
2433 return real_onep (TREE_REALPART (expr))
2434 && real_zerop (TREE_IMAGPART (expr));
2435 case VECTOR_CST:
2436 {
2437 unsigned i;
2438 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2439 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2440 return false;
2441 return true;
2442 }
2443 default:
2444 return false;
2445 }
2446 }
2447
2448 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2449 matter for decimal float constants, so don't return 1 for them. */
2450
2451 int
2452 real_minus_onep (const_tree expr)
2453 {
2454 STRIP_NOPS (expr);
2455
2456 switch (TREE_CODE (expr))
2457 {
2458 case REAL_CST:
2459 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2460 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2461 case COMPLEX_CST:
2462 return real_minus_onep (TREE_REALPART (expr))
2463 && real_zerop (TREE_IMAGPART (expr));
2464 case VECTOR_CST:
2465 {
2466 unsigned i;
2467 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2468 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2469 return false;
2470 return true;
2471 }
2472 default:
2473 return false;
2474 }
2475 }
2476
2477 /* Nonzero if EXP is a constant or a cast of a constant. */
2478
2479 int
2480 really_constant_p (const_tree exp)
2481 {
2482 /* This is not quite the same as STRIP_NOPS. It does more. */
2483 while (CONVERT_EXPR_P (exp)
2484 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2485 exp = TREE_OPERAND (exp, 0);
2486 return TREE_CONSTANT (exp);
2487 }
2488 \f
2489 /* Return first list element whose TREE_VALUE is ELEM.
2490 Return 0 if ELEM is not in LIST. */
2491
2492 tree
2493 value_member (tree elem, tree list)
2494 {
2495 while (list)
2496 {
2497 if (elem == TREE_VALUE (list))
2498 return list;
2499 list = TREE_CHAIN (list);
2500 }
2501 return NULL_TREE;
2502 }
2503
2504 /* Return first list element whose TREE_PURPOSE is ELEM.
2505 Return 0 if ELEM is not in LIST. */
2506
2507 tree
2508 purpose_member (const_tree elem, tree list)
2509 {
2510 while (list)
2511 {
2512 if (elem == TREE_PURPOSE (list))
2513 return list;
2514 list = TREE_CHAIN (list);
2515 }
2516 return NULL_TREE;
2517 }
2518
2519 /* Return true if ELEM is in V. */
2520
2521 bool
2522 vec_member (const_tree elem, vec<tree, va_gc> *v)
2523 {
2524 unsigned ix;
2525 tree t;
2526 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2527 if (elem == t)
2528 return true;
2529 return false;
2530 }
2531
2532 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2533 NULL_TREE. */
2534
2535 tree
2536 chain_index (int idx, tree chain)
2537 {
2538 for (; chain && idx > 0; --idx)
2539 chain = TREE_CHAIN (chain);
2540 return chain;
2541 }
2542
2543 /* Return nonzero if ELEM is part of the chain CHAIN. */
2544
2545 int
2546 chain_member (const_tree elem, const_tree chain)
2547 {
2548 while (chain)
2549 {
2550 if (elem == chain)
2551 return 1;
2552 chain = DECL_CHAIN (chain);
2553 }
2554
2555 return 0;
2556 }
2557
2558 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2559 We expect a null pointer to mark the end of the chain.
2560 This is the Lisp primitive `length'. */
2561
2562 int
2563 list_length (const_tree t)
2564 {
2565 const_tree p = t;
2566 #ifdef ENABLE_TREE_CHECKING
2567 const_tree q = t;
2568 #endif
2569 int len = 0;
2570
2571 while (p)
2572 {
2573 p = TREE_CHAIN (p);
2574 #ifdef ENABLE_TREE_CHECKING
2575 if (len % 2)
2576 q = TREE_CHAIN (q);
2577 gcc_assert (p != q);
2578 #endif
2579 len++;
2580 }
2581
2582 return len;
2583 }
2584
2585 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2586 UNION_TYPE TYPE, or NULL_TREE if none. */
2587
2588 tree
2589 first_field (const_tree type)
2590 {
2591 tree t = TYPE_FIELDS (type);
2592 while (t && TREE_CODE (t) != FIELD_DECL)
2593 t = TREE_CHAIN (t);
2594 return t;
2595 }
2596
2597 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2598 by modifying the last node in chain 1 to point to chain 2.
2599 This is the Lisp primitive `nconc'. */
2600
2601 tree
2602 chainon (tree op1, tree op2)
2603 {
2604 tree t1;
2605
2606 if (!op1)
2607 return op2;
2608 if (!op2)
2609 return op1;
2610
2611 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2612 continue;
2613 TREE_CHAIN (t1) = op2;
2614
2615 #ifdef ENABLE_TREE_CHECKING
2616 {
2617 tree t2;
2618 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2619 gcc_assert (t2 != t1);
2620 }
2621 #endif
2622
2623 return op1;
2624 }
2625
2626 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2627
2628 tree
2629 tree_last (tree chain)
2630 {
2631 tree next;
2632 if (chain)
2633 while ((next = TREE_CHAIN (chain)))
2634 chain = next;
2635 return chain;
2636 }
2637
2638 /* Reverse the order of elements in the chain T,
2639 and return the new head of the chain (old last element). */
2640
2641 tree
2642 nreverse (tree t)
2643 {
2644 tree prev = 0, decl, next;
2645 for (decl = t; decl; decl = next)
2646 {
2647 /* We shouldn't be using this function to reverse BLOCK chains; we
2648 have blocks_nreverse for that. */
2649 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2650 next = TREE_CHAIN (decl);
2651 TREE_CHAIN (decl) = prev;
2652 prev = decl;
2653 }
2654 return prev;
2655 }
2656 \f
2657 /* Return a newly created TREE_LIST node whose
2658 purpose and value fields are PARM and VALUE. */
2659
2660 tree
2661 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2662 {
2663 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2664 TREE_PURPOSE (t) = parm;
2665 TREE_VALUE (t) = value;
2666 return t;
2667 }
2668
2669 /* Build a chain of TREE_LIST nodes from a vector. */
2670
2671 tree
2672 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2673 {
2674 tree ret = NULL_TREE;
2675 tree *pp = &ret;
2676 unsigned int i;
2677 tree t;
2678 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2679 {
2680 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2681 pp = &TREE_CHAIN (*pp);
2682 }
2683 return ret;
2684 }
2685
2686 /* Return a newly created TREE_LIST node whose
2687 purpose and value fields are PURPOSE and VALUE
2688 and whose TREE_CHAIN is CHAIN. */
2689
2690 tree
2691 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2692 {
2693 tree node;
2694
2695 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2696 memset (node, 0, sizeof (struct tree_common));
2697
2698 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2699
2700 TREE_SET_CODE (node, TREE_LIST);
2701 TREE_CHAIN (node) = chain;
2702 TREE_PURPOSE (node) = purpose;
2703 TREE_VALUE (node) = value;
2704 return node;
2705 }
2706
2707 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2708 trees. */
2709
2710 vec<tree, va_gc> *
2711 ctor_to_vec (tree ctor)
2712 {
2713 vec<tree, va_gc> *vec;
2714 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2715 unsigned int ix;
2716 tree val;
2717
2718 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2719 vec->quick_push (val);
2720
2721 return vec;
2722 }
2723 \f
2724 /* Return the size nominally occupied by an object of type TYPE
2725 when it resides in memory. The value is measured in units of bytes,
2726 and its data type is that normally used for type sizes
2727 (which is the first type created by make_signed_type or
2728 make_unsigned_type). */
2729
2730 tree
2731 size_in_bytes (const_tree type)
2732 {
2733 tree t;
2734
2735 if (type == error_mark_node)
2736 return integer_zero_node;
2737
2738 type = TYPE_MAIN_VARIANT (type);
2739 t = TYPE_SIZE_UNIT (type);
2740
2741 if (t == 0)
2742 {
2743 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2744 return size_zero_node;
2745 }
2746
2747 return t;
2748 }
2749
2750 /* Return the size of TYPE (in bytes) as a wide integer
2751 or return -1 if the size can vary or is larger than an integer. */
2752
2753 HOST_WIDE_INT
2754 int_size_in_bytes (const_tree type)
2755 {
2756 tree t;
2757
2758 if (type == error_mark_node)
2759 return 0;
2760
2761 type = TYPE_MAIN_VARIANT (type);
2762 t = TYPE_SIZE_UNIT (type);
2763
2764 if (t && tree_fits_uhwi_p (t))
2765 return TREE_INT_CST_LOW (t);
2766 else
2767 return -1;
2768 }
2769
2770 /* Return the maximum size of TYPE (in bytes) as a wide integer
2771 or return -1 if the size can vary or is larger than an integer. */
2772
2773 HOST_WIDE_INT
2774 max_int_size_in_bytes (const_tree type)
2775 {
2776 HOST_WIDE_INT size = -1;
2777 tree size_tree;
2778
2779 /* If this is an array type, check for a possible MAX_SIZE attached. */
2780
2781 if (TREE_CODE (type) == ARRAY_TYPE)
2782 {
2783 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2784
2785 if (size_tree && tree_fits_uhwi_p (size_tree))
2786 size = tree_to_uhwi (size_tree);
2787 }
2788
2789 /* If we still haven't been able to get a size, see if the language
2790 can compute a maximum size. */
2791
2792 if (size == -1)
2793 {
2794 size_tree = lang_hooks.types.max_size (type);
2795
2796 if (size_tree && tree_fits_uhwi_p (size_tree))
2797 size = tree_to_uhwi (size_tree);
2798 }
2799
2800 return size;
2801 }
2802 \f
2803 /* Return the bit position of FIELD, in bits from the start of the record.
2804 This is a tree of type bitsizetype. */
2805
2806 tree
2807 bit_position (const_tree field)
2808 {
2809 return bit_from_pos (DECL_FIELD_OFFSET (field),
2810 DECL_FIELD_BIT_OFFSET (field));
2811 }
2812
2813 /* Likewise, but return as an integer. It must be representable in
2814 that way (since it could be a signed value, we don't have the
2815 option of returning -1 like int_size_in_byte can. */
2816
2817 HOST_WIDE_INT
2818 int_bit_position (const_tree field)
2819 {
2820 return tree_to_shwi (bit_position (field));
2821 }
2822 \f
2823 /* Return the byte position of FIELD, in bytes from the start of the record.
2824 This is a tree of type sizetype. */
2825
2826 tree
2827 byte_position (const_tree field)
2828 {
2829 return byte_from_pos (DECL_FIELD_OFFSET (field),
2830 DECL_FIELD_BIT_OFFSET (field));
2831 }
2832
2833 /* Likewise, but return as an integer. It must be representable in
2834 that way (since it could be a signed value, we don't have the
2835 option of returning -1 like int_size_in_byte can. */
2836
2837 HOST_WIDE_INT
2838 int_byte_position (const_tree field)
2839 {
2840 return tree_to_shwi (byte_position (field));
2841 }
2842 \f
2843 /* Return the strictest alignment, in bits, that T is known to have. */
2844
2845 unsigned int
2846 expr_align (const_tree t)
2847 {
2848 unsigned int align0, align1;
2849
2850 switch (TREE_CODE (t))
2851 {
2852 CASE_CONVERT: case NON_LVALUE_EXPR:
2853 /* If we have conversions, we know that the alignment of the
2854 object must meet each of the alignments of the types. */
2855 align0 = expr_align (TREE_OPERAND (t, 0));
2856 align1 = TYPE_ALIGN (TREE_TYPE (t));
2857 return MAX (align0, align1);
2858
2859 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2860 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2861 case CLEANUP_POINT_EXPR:
2862 /* These don't change the alignment of an object. */
2863 return expr_align (TREE_OPERAND (t, 0));
2864
2865 case COND_EXPR:
2866 /* The best we can do is say that the alignment is the least aligned
2867 of the two arms. */
2868 align0 = expr_align (TREE_OPERAND (t, 1));
2869 align1 = expr_align (TREE_OPERAND (t, 2));
2870 return MIN (align0, align1);
2871
2872 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2873 meaningfully, it's always 1. */
2874 case LABEL_DECL: case CONST_DECL:
2875 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2876 case FUNCTION_DECL:
2877 gcc_assert (DECL_ALIGN (t) != 0);
2878 return DECL_ALIGN (t);
2879
2880 default:
2881 break;
2882 }
2883
2884 /* Otherwise take the alignment from that of the type. */
2885 return TYPE_ALIGN (TREE_TYPE (t));
2886 }
2887 \f
2888 /* Return, as a tree node, the number of elements for TYPE (which is an
2889 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2890
2891 tree
2892 array_type_nelts (const_tree type)
2893 {
2894 tree index_type, min, max;
2895
2896 /* If they did it with unspecified bounds, then we should have already
2897 given an error about it before we got here. */
2898 if (! TYPE_DOMAIN (type))
2899 return error_mark_node;
2900
2901 index_type = TYPE_DOMAIN (type);
2902 min = TYPE_MIN_VALUE (index_type);
2903 max = TYPE_MAX_VALUE (index_type);
2904
2905 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2906 if (!max)
2907 return error_mark_node;
2908
2909 return (integer_zerop (min)
2910 ? max
2911 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2912 }
2913 \f
2914 /* If arg is static -- a reference to an object in static storage -- then
2915 return the object. This is not the same as the C meaning of `static'.
2916 If arg isn't static, return NULL. */
2917
2918 tree
2919 staticp (tree arg)
2920 {
2921 switch (TREE_CODE (arg))
2922 {
2923 case FUNCTION_DECL:
2924 /* Nested functions are static, even though taking their address will
2925 involve a trampoline as we unnest the nested function and create
2926 the trampoline on the tree level. */
2927 return arg;
2928
2929 case VAR_DECL:
2930 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2931 && ! DECL_THREAD_LOCAL_P (arg)
2932 && ! DECL_DLLIMPORT_P (arg)
2933 ? arg : NULL);
2934
2935 case CONST_DECL:
2936 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2937 ? arg : NULL);
2938
2939 case CONSTRUCTOR:
2940 return TREE_STATIC (arg) ? arg : NULL;
2941
2942 case LABEL_DECL:
2943 case STRING_CST:
2944 return arg;
2945
2946 case COMPONENT_REF:
2947 /* If the thing being referenced is not a field, then it is
2948 something language specific. */
2949 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2950
2951 /* If we are referencing a bitfield, we can't evaluate an
2952 ADDR_EXPR at compile time and so it isn't a constant. */
2953 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2954 return NULL;
2955
2956 return staticp (TREE_OPERAND (arg, 0));
2957
2958 case BIT_FIELD_REF:
2959 return NULL;
2960
2961 case INDIRECT_REF:
2962 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2963
2964 case ARRAY_REF:
2965 case ARRAY_RANGE_REF:
2966 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2967 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2968 return staticp (TREE_OPERAND (arg, 0));
2969 else
2970 return NULL;
2971
2972 case COMPOUND_LITERAL_EXPR:
2973 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2974
2975 default:
2976 return NULL;
2977 }
2978 }
2979
2980 \f
2981
2982
2983 /* Return whether OP is a DECL whose address is function-invariant. */
2984
2985 bool
2986 decl_address_invariant_p (const_tree op)
2987 {
2988 /* The conditions below are slightly less strict than the one in
2989 staticp. */
2990
2991 switch (TREE_CODE (op))
2992 {
2993 case PARM_DECL:
2994 case RESULT_DECL:
2995 case LABEL_DECL:
2996 case FUNCTION_DECL:
2997 return true;
2998
2999 case VAR_DECL:
3000 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3001 || DECL_THREAD_LOCAL_P (op)
3002 || DECL_CONTEXT (op) == current_function_decl
3003 || decl_function_context (op) == current_function_decl)
3004 return true;
3005 break;
3006
3007 case CONST_DECL:
3008 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3009 || decl_function_context (op) == current_function_decl)
3010 return true;
3011 break;
3012
3013 default:
3014 break;
3015 }
3016
3017 return false;
3018 }
3019
3020 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3021
3022 bool
3023 decl_address_ip_invariant_p (const_tree op)
3024 {
3025 /* The conditions below are slightly less strict than the one in
3026 staticp. */
3027
3028 switch (TREE_CODE (op))
3029 {
3030 case LABEL_DECL:
3031 case FUNCTION_DECL:
3032 case STRING_CST:
3033 return true;
3034
3035 case VAR_DECL:
3036 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3037 && !DECL_DLLIMPORT_P (op))
3038 || DECL_THREAD_LOCAL_P (op))
3039 return true;
3040 break;
3041
3042 case CONST_DECL:
3043 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3044 return true;
3045 break;
3046
3047 default:
3048 break;
3049 }
3050
3051 return false;
3052 }
3053
3054
3055 /* Return true if T is function-invariant (internal function, does
3056 not handle arithmetic; that's handled in skip_simple_arithmetic and
3057 tree_invariant_p). */
3058
3059 static bool tree_invariant_p (tree t);
3060
3061 static bool
3062 tree_invariant_p_1 (tree t)
3063 {
3064 tree op;
3065
3066 if (TREE_CONSTANT (t)
3067 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3068 return true;
3069
3070 switch (TREE_CODE (t))
3071 {
3072 case SAVE_EXPR:
3073 return true;
3074
3075 case ADDR_EXPR:
3076 op = TREE_OPERAND (t, 0);
3077 while (handled_component_p (op))
3078 {
3079 switch (TREE_CODE (op))
3080 {
3081 case ARRAY_REF:
3082 case ARRAY_RANGE_REF:
3083 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3084 || TREE_OPERAND (op, 2) != NULL_TREE
3085 || TREE_OPERAND (op, 3) != NULL_TREE)
3086 return false;
3087 break;
3088
3089 case COMPONENT_REF:
3090 if (TREE_OPERAND (op, 2) != NULL_TREE)
3091 return false;
3092 break;
3093
3094 default:;
3095 }
3096 op = TREE_OPERAND (op, 0);
3097 }
3098
3099 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3100
3101 default:
3102 break;
3103 }
3104
3105 return false;
3106 }
3107
3108 /* Return true if T is function-invariant. */
3109
3110 static bool
3111 tree_invariant_p (tree t)
3112 {
3113 tree inner = skip_simple_arithmetic (t);
3114 return tree_invariant_p_1 (inner);
3115 }
3116
3117 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3118 Do this to any expression which may be used in more than one place,
3119 but must be evaluated only once.
3120
3121 Normally, expand_expr would reevaluate the expression each time.
3122 Calling save_expr produces something that is evaluated and recorded
3123 the first time expand_expr is called on it. Subsequent calls to
3124 expand_expr just reuse the recorded value.
3125
3126 The call to expand_expr that generates code that actually computes
3127 the value is the first call *at compile time*. Subsequent calls
3128 *at compile time* generate code to use the saved value.
3129 This produces correct result provided that *at run time* control
3130 always flows through the insns made by the first expand_expr
3131 before reaching the other places where the save_expr was evaluated.
3132 You, the caller of save_expr, must make sure this is so.
3133
3134 Constants, and certain read-only nodes, are returned with no
3135 SAVE_EXPR because that is safe. Expressions containing placeholders
3136 are not touched; see tree.def for an explanation of what these
3137 are used for. */
3138
3139 tree
3140 save_expr (tree expr)
3141 {
3142 tree t = fold (expr);
3143 tree inner;
3144
3145 /* If the tree evaluates to a constant, then we don't want to hide that
3146 fact (i.e. this allows further folding, and direct checks for constants).
3147 However, a read-only object that has side effects cannot be bypassed.
3148 Since it is no problem to reevaluate literals, we just return the
3149 literal node. */
3150 inner = skip_simple_arithmetic (t);
3151 if (TREE_CODE (inner) == ERROR_MARK)
3152 return inner;
3153
3154 if (tree_invariant_p_1 (inner))
3155 return t;
3156
3157 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3158 it means that the size or offset of some field of an object depends on
3159 the value within another field.
3160
3161 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3162 and some variable since it would then need to be both evaluated once and
3163 evaluated more than once. Front-ends must assure this case cannot
3164 happen by surrounding any such subexpressions in their own SAVE_EXPR
3165 and forcing evaluation at the proper time. */
3166 if (contains_placeholder_p (inner))
3167 return t;
3168
3169 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3170 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3171
3172 /* This expression might be placed ahead of a jump to ensure that the
3173 value was computed on both sides of the jump. So make sure it isn't
3174 eliminated as dead. */
3175 TREE_SIDE_EFFECTS (t) = 1;
3176 return t;
3177 }
3178
3179 /* Look inside EXPR into any simple arithmetic operations. Return the
3180 outermost non-arithmetic or non-invariant node. */
3181
3182 tree
3183 skip_simple_arithmetic (tree expr)
3184 {
3185 /* We don't care about whether this can be used as an lvalue in this
3186 context. */
3187 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3188 expr = TREE_OPERAND (expr, 0);
3189
3190 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3191 a constant, it will be more efficient to not make another SAVE_EXPR since
3192 it will allow better simplification and GCSE will be able to merge the
3193 computations if they actually occur. */
3194 while (true)
3195 {
3196 if (UNARY_CLASS_P (expr))
3197 expr = TREE_OPERAND (expr, 0);
3198 else if (BINARY_CLASS_P (expr))
3199 {
3200 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3201 expr = TREE_OPERAND (expr, 0);
3202 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3203 expr = TREE_OPERAND (expr, 1);
3204 else
3205 break;
3206 }
3207 else
3208 break;
3209 }
3210
3211 return expr;
3212 }
3213
3214 /* Look inside EXPR into simple arithmetic operations involving constants.
3215 Return the outermost non-arithmetic or non-constant node. */
3216
3217 tree
3218 skip_simple_constant_arithmetic (tree expr)
3219 {
3220 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3221 expr = TREE_OPERAND (expr, 0);
3222
3223 while (true)
3224 {
3225 if (UNARY_CLASS_P (expr))
3226 expr = TREE_OPERAND (expr, 0);
3227 else if (BINARY_CLASS_P (expr))
3228 {
3229 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3230 expr = TREE_OPERAND (expr, 0);
3231 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3232 expr = TREE_OPERAND (expr, 1);
3233 else
3234 break;
3235 }
3236 else
3237 break;
3238 }
3239
3240 return expr;
3241 }
3242
3243 /* Return which tree structure is used by T. */
3244
3245 enum tree_node_structure_enum
3246 tree_node_structure (const_tree t)
3247 {
3248 const enum tree_code code = TREE_CODE (t);
3249 return tree_node_structure_for_code (code);
3250 }
3251
3252 /* Set various status flags when building a CALL_EXPR object T. */
3253
3254 static void
3255 process_call_operands (tree t)
3256 {
3257 bool side_effects = TREE_SIDE_EFFECTS (t);
3258 bool read_only = false;
3259 int i = call_expr_flags (t);
3260
3261 /* Calls have side-effects, except those to const or pure functions. */
3262 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3263 side_effects = true;
3264 /* Propagate TREE_READONLY of arguments for const functions. */
3265 if (i & ECF_CONST)
3266 read_only = true;
3267
3268 if (!side_effects || read_only)
3269 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3270 {
3271 tree op = TREE_OPERAND (t, i);
3272 if (op && TREE_SIDE_EFFECTS (op))
3273 side_effects = true;
3274 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3275 read_only = false;
3276 }
3277
3278 TREE_SIDE_EFFECTS (t) = side_effects;
3279 TREE_READONLY (t) = read_only;
3280 }
3281 \f
3282 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3283 size or offset that depends on a field within a record. */
3284
3285 bool
3286 contains_placeholder_p (const_tree exp)
3287 {
3288 enum tree_code code;
3289
3290 if (!exp)
3291 return 0;
3292
3293 code = TREE_CODE (exp);
3294 if (code == PLACEHOLDER_EXPR)
3295 return 1;
3296
3297 switch (TREE_CODE_CLASS (code))
3298 {
3299 case tcc_reference:
3300 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3301 position computations since they will be converted into a
3302 WITH_RECORD_EXPR involving the reference, which will assume
3303 here will be valid. */
3304 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3305
3306 case tcc_exceptional:
3307 if (code == TREE_LIST)
3308 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3309 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3310 break;
3311
3312 case tcc_unary:
3313 case tcc_binary:
3314 case tcc_comparison:
3315 case tcc_expression:
3316 switch (code)
3317 {
3318 case COMPOUND_EXPR:
3319 /* Ignoring the first operand isn't quite right, but works best. */
3320 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3321
3322 case COND_EXPR:
3323 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3324 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3325 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3326
3327 case SAVE_EXPR:
3328 /* The save_expr function never wraps anything containing
3329 a PLACEHOLDER_EXPR. */
3330 return 0;
3331
3332 default:
3333 break;
3334 }
3335
3336 switch (TREE_CODE_LENGTH (code))
3337 {
3338 case 1:
3339 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3340 case 2:
3341 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3342 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3343 default:
3344 return 0;
3345 }
3346
3347 case tcc_vl_exp:
3348 switch (code)
3349 {
3350 case CALL_EXPR:
3351 {
3352 const_tree arg;
3353 const_call_expr_arg_iterator iter;
3354 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3355 if (CONTAINS_PLACEHOLDER_P (arg))
3356 return 1;
3357 return 0;
3358 }
3359 default:
3360 return 0;
3361 }
3362
3363 default:
3364 return 0;
3365 }
3366 return 0;
3367 }
3368
3369 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3370 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3371 field positions. */
3372
3373 static bool
3374 type_contains_placeholder_1 (const_tree type)
3375 {
3376 /* If the size contains a placeholder or the parent type (component type in
3377 the case of arrays) type involves a placeholder, this type does. */
3378 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3379 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3380 || (!POINTER_TYPE_P (type)
3381 && TREE_TYPE (type)
3382 && type_contains_placeholder_p (TREE_TYPE (type))))
3383 return true;
3384
3385 /* Now do type-specific checks. Note that the last part of the check above
3386 greatly limits what we have to do below. */
3387 switch (TREE_CODE (type))
3388 {
3389 case VOID_TYPE:
3390 case COMPLEX_TYPE:
3391 case ENUMERAL_TYPE:
3392 case BOOLEAN_TYPE:
3393 case POINTER_TYPE:
3394 case OFFSET_TYPE:
3395 case REFERENCE_TYPE:
3396 case METHOD_TYPE:
3397 case FUNCTION_TYPE:
3398 case VECTOR_TYPE:
3399 case NULLPTR_TYPE:
3400 return false;
3401
3402 case INTEGER_TYPE:
3403 case REAL_TYPE:
3404 case FIXED_POINT_TYPE:
3405 /* Here we just check the bounds. */
3406 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3407 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3408
3409 case ARRAY_TYPE:
3410 /* We have already checked the component type above, so just check the
3411 domain type. */
3412 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3413
3414 case RECORD_TYPE:
3415 case UNION_TYPE:
3416 case QUAL_UNION_TYPE:
3417 {
3418 tree field;
3419
3420 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3421 if (TREE_CODE (field) == FIELD_DECL
3422 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3423 || (TREE_CODE (type) == QUAL_UNION_TYPE
3424 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3425 || type_contains_placeholder_p (TREE_TYPE (field))))
3426 return true;
3427
3428 return false;
3429 }
3430
3431 default:
3432 gcc_unreachable ();
3433 }
3434 }
3435
3436 /* Wrapper around above function used to cache its result. */
3437
3438 bool
3439 type_contains_placeholder_p (tree type)
3440 {
3441 bool result;
3442
3443 /* If the contains_placeholder_bits field has been initialized,
3444 then we know the answer. */
3445 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3446 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3447
3448 /* Indicate that we've seen this type node, and the answer is false.
3449 This is what we want to return if we run into recursion via fields. */
3450 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3451
3452 /* Compute the real value. */
3453 result = type_contains_placeholder_1 (type);
3454
3455 /* Store the real value. */
3456 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3457
3458 return result;
3459 }
3460 \f
3461 /* Push tree EXP onto vector QUEUE if it is not already present. */
3462
3463 static void
3464 push_without_duplicates (tree exp, vec<tree> *queue)
3465 {
3466 unsigned int i;
3467 tree iter;
3468
3469 FOR_EACH_VEC_ELT (*queue, i, iter)
3470 if (simple_cst_equal (iter, exp) == 1)
3471 break;
3472
3473 if (!iter)
3474 queue->safe_push (exp);
3475 }
3476
3477 /* Given a tree EXP, find all occurrences of references to fields
3478 in a PLACEHOLDER_EXPR and place them in vector REFS without
3479 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3480 we assume here that EXP contains only arithmetic expressions
3481 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3482 argument list. */
3483
3484 void
3485 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3486 {
3487 enum tree_code code = TREE_CODE (exp);
3488 tree inner;
3489 int i;
3490
3491 /* We handle TREE_LIST and COMPONENT_REF separately. */
3492 if (code == TREE_LIST)
3493 {
3494 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3495 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3496 }
3497 else if (code == COMPONENT_REF)
3498 {
3499 for (inner = TREE_OPERAND (exp, 0);
3500 REFERENCE_CLASS_P (inner);
3501 inner = TREE_OPERAND (inner, 0))
3502 ;
3503
3504 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3505 push_without_duplicates (exp, refs);
3506 else
3507 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3508 }
3509 else
3510 switch (TREE_CODE_CLASS (code))
3511 {
3512 case tcc_constant:
3513 break;
3514
3515 case tcc_declaration:
3516 /* Variables allocated to static storage can stay. */
3517 if (!TREE_STATIC (exp))
3518 push_without_duplicates (exp, refs);
3519 break;
3520
3521 case tcc_expression:
3522 /* This is the pattern built in ada/make_aligning_type. */
3523 if (code == ADDR_EXPR
3524 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3525 {
3526 push_without_duplicates (exp, refs);
3527 break;
3528 }
3529
3530 /* Fall through... */
3531
3532 case tcc_exceptional:
3533 case tcc_unary:
3534 case tcc_binary:
3535 case tcc_comparison:
3536 case tcc_reference:
3537 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3538 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3539 break;
3540
3541 case tcc_vl_exp:
3542 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3543 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3544 break;
3545
3546 default:
3547 gcc_unreachable ();
3548 }
3549 }
3550
3551 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3552 return a tree with all occurrences of references to F in a
3553 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3554 CONST_DECLs. Note that we assume here that EXP contains only
3555 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3556 occurring only in their argument list. */
3557
3558 tree
3559 substitute_in_expr (tree exp, tree f, tree r)
3560 {
3561 enum tree_code code = TREE_CODE (exp);
3562 tree op0, op1, op2, op3;
3563 tree new_tree;
3564
3565 /* We handle TREE_LIST and COMPONENT_REF separately. */
3566 if (code == TREE_LIST)
3567 {
3568 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3569 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3570 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3571 return exp;
3572
3573 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3574 }
3575 else if (code == COMPONENT_REF)
3576 {
3577 tree inner;
3578
3579 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3580 and it is the right field, replace it with R. */
3581 for (inner = TREE_OPERAND (exp, 0);
3582 REFERENCE_CLASS_P (inner);
3583 inner = TREE_OPERAND (inner, 0))
3584 ;
3585
3586 /* The field. */
3587 op1 = TREE_OPERAND (exp, 1);
3588
3589 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3590 return r;
3591
3592 /* If this expression hasn't been completed let, leave it alone. */
3593 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3594 return exp;
3595
3596 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3597 if (op0 == TREE_OPERAND (exp, 0))
3598 return exp;
3599
3600 new_tree
3601 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3602 }
3603 else
3604 switch (TREE_CODE_CLASS (code))
3605 {
3606 case tcc_constant:
3607 return exp;
3608
3609 case tcc_declaration:
3610 if (exp == f)
3611 return r;
3612 else
3613 return exp;
3614
3615 case tcc_expression:
3616 if (exp == f)
3617 return r;
3618
3619 /* Fall through... */
3620
3621 case tcc_exceptional:
3622 case tcc_unary:
3623 case tcc_binary:
3624 case tcc_comparison:
3625 case tcc_reference:
3626 switch (TREE_CODE_LENGTH (code))
3627 {
3628 case 0:
3629 return exp;
3630
3631 case 1:
3632 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3633 if (op0 == TREE_OPERAND (exp, 0))
3634 return exp;
3635
3636 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3637 break;
3638
3639 case 2:
3640 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3641 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3642
3643 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3644 return exp;
3645
3646 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3647 break;
3648
3649 case 3:
3650 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3651 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3652 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3653
3654 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3655 && op2 == TREE_OPERAND (exp, 2))
3656 return exp;
3657
3658 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3659 break;
3660
3661 case 4:
3662 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3663 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3664 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3665 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3666
3667 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3668 && op2 == TREE_OPERAND (exp, 2)
3669 && op3 == TREE_OPERAND (exp, 3))
3670 return exp;
3671
3672 new_tree
3673 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3674 break;
3675
3676 default:
3677 gcc_unreachable ();
3678 }
3679 break;
3680
3681 case tcc_vl_exp:
3682 {
3683 int i;
3684
3685 new_tree = NULL_TREE;
3686
3687 /* If we are trying to replace F with a constant, inline back
3688 functions which do nothing else than computing a value from
3689 the arguments they are passed. This makes it possible to
3690 fold partially or entirely the replacement expression. */
3691 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3692 {
3693 tree t = maybe_inline_call_in_expr (exp);
3694 if (t)
3695 return SUBSTITUTE_IN_EXPR (t, f, r);
3696 }
3697
3698 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3699 {
3700 tree op = TREE_OPERAND (exp, i);
3701 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3702 if (new_op != op)
3703 {
3704 if (!new_tree)
3705 new_tree = copy_node (exp);
3706 TREE_OPERAND (new_tree, i) = new_op;
3707 }
3708 }
3709
3710 if (new_tree)
3711 {
3712 new_tree = fold (new_tree);
3713 if (TREE_CODE (new_tree) == CALL_EXPR)
3714 process_call_operands (new_tree);
3715 }
3716 else
3717 return exp;
3718 }
3719 break;
3720
3721 default:
3722 gcc_unreachable ();
3723 }
3724
3725 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3726
3727 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3728 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3729
3730 return new_tree;
3731 }
3732
3733 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3734 for it within OBJ, a tree that is an object or a chain of references. */
3735
3736 tree
3737 substitute_placeholder_in_expr (tree exp, tree obj)
3738 {
3739 enum tree_code code = TREE_CODE (exp);
3740 tree op0, op1, op2, op3;
3741 tree new_tree;
3742
3743 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3744 in the chain of OBJ. */
3745 if (code == PLACEHOLDER_EXPR)
3746 {
3747 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3748 tree elt;
3749
3750 for (elt = obj; elt != 0;
3751 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3752 || TREE_CODE (elt) == COND_EXPR)
3753 ? TREE_OPERAND (elt, 1)
3754 : (REFERENCE_CLASS_P (elt)
3755 || UNARY_CLASS_P (elt)
3756 || BINARY_CLASS_P (elt)
3757 || VL_EXP_CLASS_P (elt)
3758 || EXPRESSION_CLASS_P (elt))
3759 ? TREE_OPERAND (elt, 0) : 0))
3760 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3761 return elt;
3762
3763 for (elt = obj; elt != 0;
3764 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3765 || TREE_CODE (elt) == COND_EXPR)
3766 ? TREE_OPERAND (elt, 1)
3767 : (REFERENCE_CLASS_P (elt)
3768 || UNARY_CLASS_P (elt)
3769 || BINARY_CLASS_P (elt)
3770 || VL_EXP_CLASS_P (elt)
3771 || EXPRESSION_CLASS_P (elt))
3772 ? TREE_OPERAND (elt, 0) : 0))
3773 if (POINTER_TYPE_P (TREE_TYPE (elt))
3774 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3775 == need_type))
3776 return fold_build1 (INDIRECT_REF, need_type, elt);
3777
3778 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3779 survives until RTL generation, there will be an error. */
3780 return exp;
3781 }
3782
3783 /* TREE_LIST is special because we need to look at TREE_VALUE
3784 and TREE_CHAIN, not TREE_OPERANDS. */
3785 else if (code == TREE_LIST)
3786 {
3787 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3788 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3789 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3790 return exp;
3791
3792 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3793 }
3794 else
3795 switch (TREE_CODE_CLASS (code))
3796 {
3797 case tcc_constant:
3798 case tcc_declaration:
3799 return exp;
3800
3801 case tcc_exceptional:
3802 case tcc_unary:
3803 case tcc_binary:
3804 case tcc_comparison:
3805 case tcc_expression:
3806 case tcc_reference:
3807 case tcc_statement:
3808 switch (TREE_CODE_LENGTH (code))
3809 {
3810 case 0:
3811 return exp;
3812
3813 case 1:
3814 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3815 if (op0 == TREE_OPERAND (exp, 0))
3816 return exp;
3817
3818 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3819 break;
3820
3821 case 2:
3822 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3823 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3824
3825 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3826 return exp;
3827
3828 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3829 break;
3830
3831 case 3:
3832 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3833 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3834 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3835
3836 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3837 && op2 == TREE_OPERAND (exp, 2))
3838 return exp;
3839
3840 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3841 break;
3842
3843 case 4:
3844 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3845 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3846 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3847 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3848
3849 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3850 && op2 == TREE_OPERAND (exp, 2)
3851 && op3 == TREE_OPERAND (exp, 3))
3852 return exp;
3853
3854 new_tree
3855 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3856 break;
3857
3858 default:
3859 gcc_unreachable ();
3860 }
3861 break;
3862
3863 case tcc_vl_exp:
3864 {
3865 int i;
3866
3867 new_tree = NULL_TREE;
3868
3869 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3870 {
3871 tree op = TREE_OPERAND (exp, i);
3872 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3873 if (new_op != op)
3874 {
3875 if (!new_tree)
3876 new_tree = copy_node (exp);
3877 TREE_OPERAND (new_tree, i) = new_op;
3878 }
3879 }
3880
3881 if (new_tree)
3882 {
3883 new_tree = fold (new_tree);
3884 if (TREE_CODE (new_tree) == CALL_EXPR)
3885 process_call_operands (new_tree);
3886 }
3887 else
3888 return exp;
3889 }
3890 break;
3891
3892 default:
3893 gcc_unreachable ();
3894 }
3895
3896 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3897
3898 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3899 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3900
3901 return new_tree;
3902 }
3903 \f
3904
3905 /* Subroutine of stabilize_reference; this is called for subtrees of
3906 references. Any expression with side-effects must be put in a SAVE_EXPR
3907 to ensure that it is only evaluated once.
3908
3909 We don't put SAVE_EXPR nodes around everything, because assigning very
3910 simple expressions to temporaries causes us to miss good opportunities
3911 for optimizations. Among other things, the opportunity to fold in the
3912 addition of a constant into an addressing mode often gets lost, e.g.
3913 "y[i+1] += x;". In general, we take the approach that we should not make
3914 an assignment unless we are forced into it - i.e., that any non-side effect
3915 operator should be allowed, and that cse should take care of coalescing
3916 multiple utterances of the same expression should that prove fruitful. */
3917
3918 static tree
3919 stabilize_reference_1 (tree e)
3920 {
3921 tree result;
3922 enum tree_code code = TREE_CODE (e);
3923
3924 /* We cannot ignore const expressions because it might be a reference
3925 to a const array but whose index contains side-effects. But we can
3926 ignore things that are actual constant or that already have been
3927 handled by this function. */
3928
3929 if (tree_invariant_p (e))
3930 return e;
3931
3932 switch (TREE_CODE_CLASS (code))
3933 {
3934 case tcc_exceptional:
3935 case tcc_type:
3936 case tcc_declaration:
3937 case tcc_comparison:
3938 case tcc_statement:
3939 case tcc_expression:
3940 case tcc_reference:
3941 case tcc_vl_exp:
3942 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3943 so that it will only be evaluated once. */
3944 /* The reference (r) and comparison (<) classes could be handled as
3945 below, but it is generally faster to only evaluate them once. */
3946 if (TREE_SIDE_EFFECTS (e))
3947 return save_expr (e);
3948 return e;
3949
3950 case tcc_constant:
3951 /* Constants need no processing. In fact, we should never reach
3952 here. */
3953 return e;
3954
3955 case tcc_binary:
3956 /* Division is slow and tends to be compiled with jumps,
3957 especially the division by powers of 2 that is often
3958 found inside of an array reference. So do it just once. */
3959 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3960 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3961 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3962 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3963 return save_expr (e);
3964 /* Recursively stabilize each operand. */
3965 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3966 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3967 break;
3968
3969 case tcc_unary:
3970 /* Recursively stabilize each operand. */
3971 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3972 break;
3973
3974 default:
3975 gcc_unreachable ();
3976 }
3977
3978 TREE_TYPE (result) = TREE_TYPE (e);
3979 TREE_READONLY (result) = TREE_READONLY (e);
3980 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3981 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3982
3983 return result;
3984 }
3985
3986 /* Stabilize a reference so that we can use it any number of times
3987 without causing its operands to be evaluated more than once.
3988 Returns the stabilized reference. This works by means of save_expr,
3989 so see the caveats in the comments about save_expr.
3990
3991 Also allows conversion expressions whose operands are references.
3992 Any other kind of expression is returned unchanged. */
3993
3994 tree
3995 stabilize_reference (tree ref)
3996 {
3997 tree result;
3998 enum tree_code code = TREE_CODE (ref);
3999
4000 switch (code)
4001 {
4002 case VAR_DECL:
4003 case PARM_DECL:
4004 case RESULT_DECL:
4005 /* No action is needed in this case. */
4006 return ref;
4007
4008 CASE_CONVERT:
4009 case FLOAT_EXPR:
4010 case FIX_TRUNC_EXPR:
4011 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4012 break;
4013
4014 case INDIRECT_REF:
4015 result = build_nt (INDIRECT_REF,
4016 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4017 break;
4018
4019 case COMPONENT_REF:
4020 result = build_nt (COMPONENT_REF,
4021 stabilize_reference (TREE_OPERAND (ref, 0)),
4022 TREE_OPERAND (ref, 1), NULL_TREE);
4023 break;
4024
4025 case BIT_FIELD_REF:
4026 result = build_nt (BIT_FIELD_REF,
4027 stabilize_reference (TREE_OPERAND (ref, 0)),
4028 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4029 break;
4030
4031 case ARRAY_REF:
4032 result = build_nt (ARRAY_REF,
4033 stabilize_reference (TREE_OPERAND (ref, 0)),
4034 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4035 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4036 break;
4037
4038 case ARRAY_RANGE_REF:
4039 result = build_nt (ARRAY_RANGE_REF,
4040 stabilize_reference (TREE_OPERAND (ref, 0)),
4041 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4042 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4043 break;
4044
4045 case COMPOUND_EXPR:
4046 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4047 it wouldn't be ignored. This matters when dealing with
4048 volatiles. */
4049 return stabilize_reference_1 (ref);
4050
4051 /* If arg isn't a kind of lvalue we recognize, make no change.
4052 Caller should recognize the error for an invalid lvalue. */
4053 default:
4054 return ref;
4055
4056 case ERROR_MARK:
4057 return error_mark_node;
4058 }
4059
4060 TREE_TYPE (result) = TREE_TYPE (ref);
4061 TREE_READONLY (result) = TREE_READONLY (ref);
4062 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4063 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4064
4065 return result;
4066 }
4067 \f
4068 /* Low-level constructors for expressions. */
4069
4070 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4071 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4072
4073 void
4074 recompute_tree_invariant_for_addr_expr (tree t)
4075 {
4076 tree node;
4077 bool tc = true, se = false;
4078
4079 /* We started out assuming this address is both invariant and constant, but
4080 does not have side effects. Now go down any handled components and see if
4081 any of them involve offsets that are either non-constant or non-invariant.
4082 Also check for side-effects.
4083
4084 ??? Note that this code makes no attempt to deal with the case where
4085 taking the address of something causes a copy due to misalignment. */
4086
4087 #define UPDATE_FLAGS(NODE) \
4088 do { tree _node = (NODE); \
4089 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4090 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4091
4092 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4093 node = TREE_OPERAND (node, 0))
4094 {
4095 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4096 array reference (probably made temporarily by the G++ front end),
4097 so ignore all the operands. */
4098 if ((TREE_CODE (node) == ARRAY_REF
4099 || TREE_CODE (node) == ARRAY_RANGE_REF)
4100 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4101 {
4102 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4103 if (TREE_OPERAND (node, 2))
4104 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4105 if (TREE_OPERAND (node, 3))
4106 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4107 }
4108 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4109 FIELD_DECL, apparently. The G++ front end can put something else
4110 there, at least temporarily. */
4111 else if (TREE_CODE (node) == COMPONENT_REF
4112 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4113 {
4114 if (TREE_OPERAND (node, 2))
4115 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4116 }
4117 }
4118
4119 node = lang_hooks.expr_to_decl (node, &tc, &se);
4120
4121 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4122 the address, since &(*a)->b is a form of addition. If it's a constant, the
4123 address is constant too. If it's a decl, its address is constant if the
4124 decl is static. Everything else is not constant and, furthermore,
4125 taking the address of a volatile variable is not volatile. */
4126 if (TREE_CODE (node) == INDIRECT_REF
4127 || TREE_CODE (node) == MEM_REF)
4128 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4129 else if (CONSTANT_CLASS_P (node))
4130 ;
4131 else if (DECL_P (node))
4132 tc &= (staticp (node) != NULL_TREE);
4133 else
4134 {
4135 tc = false;
4136 se |= TREE_SIDE_EFFECTS (node);
4137 }
4138
4139
4140 TREE_CONSTANT (t) = tc;
4141 TREE_SIDE_EFFECTS (t) = se;
4142 #undef UPDATE_FLAGS
4143 }
4144
4145 /* Build an expression of code CODE, data type TYPE, and operands as
4146 specified. Expressions and reference nodes can be created this way.
4147 Constants, decls, types and misc nodes cannot be.
4148
4149 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4150 enough for all extant tree codes. */
4151
4152 tree
4153 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4154 {
4155 tree t;
4156
4157 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4158
4159 t = make_node_stat (code PASS_MEM_STAT);
4160 TREE_TYPE (t) = tt;
4161
4162 return t;
4163 }
4164
4165 tree
4166 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4167 {
4168 int length = sizeof (struct tree_exp);
4169 tree t;
4170
4171 record_node_allocation_statistics (code, length);
4172
4173 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4174
4175 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4176
4177 memset (t, 0, sizeof (struct tree_common));
4178
4179 TREE_SET_CODE (t, code);
4180
4181 TREE_TYPE (t) = type;
4182 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4183 TREE_OPERAND (t, 0) = node;
4184 if (node && !TYPE_P (node))
4185 {
4186 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4187 TREE_READONLY (t) = TREE_READONLY (node);
4188 }
4189
4190 if (TREE_CODE_CLASS (code) == tcc_statement)
4191 TREE_SIDE_EFFECTS (t) = 1;
4192 else switch (code)
4193 {
4194 case VA_ARG_EXPR:
4195 /* All of these have side-effects, no matter what their
4196 operands are. */
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 TREE_READONLY (t) = 0;
4199 break;
4200
4201 case INDIRECT_REF:
4202 /* Whether a dereference is readonly has nothing to do with whether
4203 its operand is readonly. */
4204 TREE_READONLY (t) = 0;
4205 break;
4206
4207 case ADDR_EXPR:
4208 if (node)
4209 recompute_tree_invariant_for_addr_expr (t);
4210 break;
4211
4212 default:
4213 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4214 && node && !TYPE_P (node)
4215 && TREE_CONSTANT (node))
4216 TREE_CONSTANT (t) = 1;
4217 if (TREE_CODE_CLASS (code) == tcc_reference
4218 && node && TREE_THIS_VOLATILE (node))
4219 TREE_THIS_VOLATILE (t) = 1;
4220 break;
4221 }
4222
4223 return t;
4224 }
4225
4226 #define PROCESS_ARG(N) \
4227 do { \
4228 TREE_OPERAND (t, N) = arg##N; \
4229 if (arg##N &&!TYPE_P (arg##N)) \
4230 { \
4231 if (TREE_SIDE_EFFECTS (arg##N)) \
4232 side_effects = 1; \
4233 if (!TREE_READONLY (arg##N) \
4234 && !CONSTANT_CLASS_P (arg##N)) \
4235 (void) (read_only = 0); \
4236 if (!TREE_CONSTANT (arg##N)) \
4237 (void) (constant = 0); \
4238 } \
4239 } while (0)
4240
4241 tree
4242 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4243 {
4244 bool constant, read_only, side_effects;
4245 tree t;
4246
4247 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4248
4249 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4250 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4251 /* When sizetype precision doesn't match that of pointers
4252 we need to be able to build explicit extensions or truncations
4253 of the offset argument. */
4254 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4255 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4256 && TREE_CODE (arg1) == INTEGER_CST);
4257
4258 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4259 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4260 && ptrofftype_p (TREE_TYPE (arg1)));
4261
4262 t = make_node_stat (code PASS_MEM_STAT);
4263 TREE_TYPE (t) = tt;
4264
4265 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4266 result based on those same flags for the arguments. But if the
4267 arguments aren't really even `tree' expressions, we shouldn't be trying
4268 to do this. */
4269
4270 /* Expressions without side effects may be constant if their
4271 arguments are as well. */
4272 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4273 || TREE_CODE_CLASS (code) == tcc_binary);
4274 read_only = 1;
4275 side_effects = TREE_SIDE_EFFECTS (t);
4276
4277 PROCESS_ARG (0);
4278 PROCESS_ARG (1);
4279
4280 TREE_READONLY (t) = read_only;
4281 TREE_CONSTANT (t) = constant;
4282 TREE_SIDE_EFFECTS (t) = side_effects;
4283 TREE_THIS_VOLATILE (t)
4284 = (TREE_CODE_CLASS (code) == tcc_reference
4285 && arg0 && TREE_THIS_VOLATILE (arg0));
4286
4287 return t;
4288 }
4289
4290
4291 tree
4292 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4293 tree arg2 MEM_STAT_DECL)
4294 {
4295 bool constant, read_only, side_effects;
4296 tree t;
4297
4298 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4299 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4300
4301 t = make_node_stat (code PASS_MEM_STAT);
4302 TREE_TYPE (t) = tt;
4303
4304 read_only = 1;
4305
4306 /* As a special exception, if COND_EXPR has NULL branches, we
4307 assume that it is a gimple statement and always consider
4308 it to have side effects. */
4309 if (code == COND_EXPR
4310 && tt == void_type_node
4311 && arg1 == NULL_TREE
4312 && arg2 == NULL_TREE)
4313 side_effects = true;
4314 else
4315 side_effects = TREE_SIDE_EFFECTS (t);
4316
4317 PROCESS_ARG (0);
4318 PROCESS_ARG (1);
4319 PROCESS_ARG (2);
4320
4321 if (code == COND_EXPR)
4322 TREE_READONLY (t) = read_only;
4323
4324 TREE_SIDE_EFFECTS (t) = side_effects;
4325 TREE_THIS_VOLATILE (t)
4326 = (TREE_CODE_CLASS (code) == tcc_reference
4327 && arg0 && TREE_THIS_VOLATILE (arg0));
4328
4329 return t;
4330 }
4331
4332 tree
4333 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4334 tree arg2, tree arg3 MEM_STAT_DECL)
4335 {
4336 bool constant, read_only, side_effects;
4337 tree t;
4338
4339 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4340
4341 t = make_node_stat (code PASS_MEM_STAT);
4342 TREE_TYPE (t) = tt;
4343
4344 side_effects = TREE_SIDE_EFFECTS (t);
4345
4346 PROCESS_ARG (0);
4347 PROCESS_ARG (1);
4348 PROCESS_ARG (2);
4349 PROCESS_ARG (3);
4350
4351 TREE_SIDE_EFFECTS (t) = side_effects;
4352 TREE_THIS_VOLATILE (t)
4353 = (TREE_CODE_CLASS (code) == tcc_reference
4354 && arg0 && TREE_THIS_VOLATILE (arg0));
4355
4356 return t;
4357 }
4358
4359 tree
4360 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4361 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4362 {
4363 bool constant, read_only, side_effects;
4364 tree t;
4365
4366 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4367
4368 t = make_node_stat (code PASS_MEM_STAT);
4369 TREE_TYPE (t) = tt;
4370
4371 side_effects = TREE_SIDE_EFFECTS (t);
4372
4373 PROCESS_ARG (0);
4374 PROCESS_ARG (1);
4375 PROCESS_ARG (2);
4376 PROCESS_ARG (3);
4377 PROCESS_ARG (4);
4378
4379 TREE_SIDE_EFFECTS (t) = side_effects;
4380 TREE_THIS_VOLATILE (t)
4381 = (TREE_CODE_CLASS (code) == tcc_reference
4382 && arg0 && TREE_THIS_VOLATILE (arg0));
4383
4384 return t;
4385 }
4386
4387 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4388 on the pointer PTR. */
4389
4390 tree
4391 build_simple_mem_ref_loc (location_t loc, tree ptr)
4392 {
4393 HOST_WIDE_INT offset = 0;
4394 tree ptype = TREE_TYPE (ptr);
4395 tree tem;
4396 /* For convenience allow addresses that collapse to a simple base
4397 and offset. */
4398 if (TREE_CODE (ptr) == ADDR_EXPR
4399 && (handled_component_p (TREE_OPERAND (ptr, 0))
4400 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4401 {
4402 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4403 gcc_assert (ptr);
4404 ptr = build_fold_addr_expr (ptr);
4405 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4406 }
4407 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4408 ptr, build_int_cst (ptype, offset));
4409 SET_EXPR_LOCATION (tem, loc);
4410 return tem;
4411 }
4412
4413 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4414
4415 offset_int
4416 mem_ref_offset (const_tree t)
4417 {
4418 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4419 }
4420
4421 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4422 offsetted by OFFSET units. */
4423
4424 tree
4425 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4426 {
4427 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4428 build_fold_addr_expr (base),
4429 build_int_cst (ptr_type_node, offset));
4430 tree addr = build1 (ADDR_EXPR, type, ref);
4431 recompute_tree_invariant_for_addr_expr (addr);
4432 return addr;
4433 }
4434
4435 /* Similar except don't specify the TREE_TYPE
4436 and leave the TREE_SIDE_EFFECTS as 0.
4437 It is permissible for arguments to be null,
4438 or even garbage if their values do not matter. */
4439
4440 tree
4441 build_nt (enum tree_code code, ...)
4442 {
4443 tree t;
4444 int length;
4445 int i;
4446 va_list p;
4447
4448 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4449
4450 va_start (p, code);
4451
4452 t = make_node (code);
4453 length = TREE_CODE_LENGTH (code);
4454
4455 for (i = 0; i < length; i++)
4456 TREE_OPERAND (t, i) = va_arg (p, tree);
4457
4458 va_end (p);
4459 return t;
4460 }
4461
4462 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4463 tree vec. */
4464
4465 tree
4466 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4467 {
4468 tree ret, t;
4469 unsigned int ix;
4470
4471 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4472 CALL_EXPR_FN (ret) = fn;
4473 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4474 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4475 CALL_EXPR_ARG (ret, ix) = t;
4476 return ret;
4477 }
4478 \f
4479 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4480 We do NOT enter this node in any sort of symbol table.
4481
4482 LOC is the location of the decl.
4483
4484 layout_decl is used to set up the decl's storage layout.
4485 Other slots are initialized to 0 or null pointers. */
4486
4487 tree
4488 build_decl_stat (location_t loc, enum tree_code code, tree name,
4489 tree type MEM_STAT_DECL)
4490 {
4491 tree t;
4492
4493 t = make_node_stat (code PASS_MEM_STAT);
4494 DECL_SOURCE_LOCATION (t) = loc;
4495
4496 /* if (type == error_mark_node)
4497 type = integer_type_node; */
4498 /* That is not done, deliberately, so that having error_mark_node
4499 as the type can suppress useless errors in the use of this variable. */
4500
4501 DECL_NAME (t) = name;
4502 TREE_TYPE (t) = type;
4503
4504 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4505 layout_decl (t, 0);
4506
4507 return t;
4508 }
4509
4510 /* Builds and returns function declaration with NAME and TYPE. */
4511
4512 tree
4513 build_fn_decl (const char *name, tree type)
4514 {
4515 tree id = get_identifier (name);
4516 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4517
4518 DECL_EXTERNAL (decl) = 1;
4519 TREE_PUBLIC (decl) = 1;
4520 DECL_ARTIFICIAL (decl) = 1;
4521 TREE_NOTHROW (decl) = 1;
4522
4523 return decl;
4524 }
4525
4526 vec<tree, va_gc> *all_translation_units;
4527
4528 /* Builds a new translation-unit decl with name NAME, queues it in the
4529 global list of translation-unit decls and returns it. */
4530
4531 tree
4532 build_translation_unit_decl (tree name)
4533 {
4534 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4535 name, NULL_TREE);
4536 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4537 vec_safe_push (all_translation_units, tu);
4538 return tu;
4539 }
4540
4541 \f
4542 /* BLOCK nodes are used to represent the structure of binding contours
4543 and declarations, once those contours have been exited and their contents
4544 compiled. This information is used for outputting debugging info. */
4545
4546 tree
4547 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4548 {
4549 tree block = make_node (BLOCK);
4550
4551 BLOCK_VARS (block) = vars;
4552 BLOCK_SUBBLOCKS (block) = subblocks;
4553 BLOCK_SUPERCONTEXT (block) = supercontext;
4554 BLOCK_CHAIN (block) = chain;
4555 return block;
4556 }
4557
4558 \f
4559 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4560
4561 LOC is the location to use in tree T. */
4562
4563 void
4564 protected_set_expr_location (tree t, location_t loc)
4565 {
4566 if (t && CAN_HAVE_LOCATION_P (t))
4567 SET_EXPR_LOCATION (t, loc);
4568 }
4569 \f
4570 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4571 is ATTRIBUTE. */
4572
4573 tree
4574 build_decl_attribute_variant (tree ddecl, tree attribute)
4575 {
4576 DECL_ATTRIBUTES (ddecl) = attribute;
4577 return ddecl;
4578 }
4579
4580 /* Borrowed from hashtab.c iterative_hash implementation. */
4581 #define mix(a,b,c) \
4582 { \
4583 a -= b; a -= c; a ^= (c>>13); \
4584 b -= c; b -= a; b ^= (a<< 8); \
4585 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4586 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4587 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4588 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4589 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4590 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4591 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4592 }
4593
4594
4595 /* Produce good hash value combining VAL and VAL2. */
4596 hashval_t
4597 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4598 {
4599 /* the golden ratio; an arbitrary value. */
4600 hashval_t a = 0x9e3779b9;
4601
4602 mix (a, val, val2);
4603 return val2;
4604 }
4605
4606 /* Produce good hash value combining VAL and VAL2. */
4607 hashval_t
4608 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4609 {
4610 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4611 return iterative_hash_hashval_t (val, val2);
4612 else
4613 {
4614 hashval_t a = (hashval_t) val;
4615 /* Avoid warnings about shifting of more than the width of the type on
4616 hosts that won't execute this path. */
4617 int zero = 0;
4618 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4619 mix (a, b, val2);
4620 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4621 {
4622 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4623 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4624 mix (a, b, val2);
4625 }
4626 return val2;
4627 }
4628 }
4629
4630 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4631 is ATTRIBUTE and its qualifiers are QUALS.
4632
4633 Record such modified types already made so we don't make duplicates. */
4634
4635 tree
4636 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4637 {
4638 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4639 {
4640 hashval_t hashcode = 0;
4641 tree ntype;
4642 int i;
4643 tree t;
4644 enum tree_code code = TREE_CODE (ttype);
4645
4646 /* Building a distinct copy of a tagged type is inappropriate; it
4647 causes breakage in code that expects there to be a one-to-one
4648 relationship between a struct and its fields.
4649 build_duplicate_type is another solution (as used in
4650 handle_transparent_union_attribute), but that doesn't play well
4651 with the stronger C++ type identity model. */
4652 if (TREE_CODE (ttype) == RECORD_TYPE
4653 || TREE_CODE (ttype) == UNION_TYPE
4654 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4655 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4656 {
4657 warning (OPT_Wattributes,
4658 "ignoring attributes applied to %qT after definition",
4659 TYPE_MAIN_VARIANT (ttype));
4660 return build_qualified_type (ttype, quals);
4661 }
4662
4663 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4664 ntype = build_distinct_type_copy (ttype);
4665
4666 TYPE_ATTRIBUTES (ntype) = attribute;
4667
4668 hashcode = iterative_hash_object (code, hashcode);
4669 if (TREE_TYPE (ntype))
4670 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4671 hashcode);
4672 hashcode = attribute_hash_list (attribute, hashcode);
4673
4674 switch (TREE_CODE (ntype))
4675 {
4676 case FUNCTION_TYPE:
4677 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4678 break;
4679 case ARRAY_TYPE:
4680 if (TYPE_DOMAIN (ntype))
4681 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4682 hashcode);
4683 break;
4684 case INTEGER_TYPE:
4685 t = TYPE_MAX_VALUE (ntype);
4686 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4687 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4688 break;
4689 case REAL_TYPE:
4690 case FIXED_POINT_TYPE:
4691 {
4692 unsigned int precision = TYPE_PRECISION (ntype);
4693 hashcode = iterative_hash_object (precision, hashcode);
4694 }
4695 break;
4696 default:
4697 break;
4698 }
4699
4700 ntype = type_hash_canon (hashcode, ntype);
4701
4702 /* If the target-dependent attributes make NTYPE different from
4703 its canonical type, we will need to use structural equality
4704 checks for this type. */
4705 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4706 || !comp_type_attributes (ntype, ttype))
4707 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4708 else if (TYPE_CANONICAL (ntype) == ntype)
4709 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4710
4711 ttype = build_qualified_type (ntype, quals);
4712 }
4713 else if (TYPE_QUALS (ttype) != quals)
4714 ttype = build_qualified_type (ttype, quals);
4715
4716 return ttype;
4717 }
4718
4719 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4720 the same. */
4721
4722 static bool
4723 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4724 {
4725 tree cl1, cl2;
4726 for (cl1 = clauses1, cl2 = clauses2;
4727 cl1 && cl2;
4728 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4729 {
4730 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4731 return false;
4732 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4733 {
4734 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4735 OMP_CLAUSE_DECL (cl2)) != 1)
4736 return false;
4737 }
4738 switch (OMP_CLAUSE_CODE (cl1))
4739 {
4740 case OMP_CLAUSE_ALIGNED:
4741 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4742 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4743 return false;
4744 break;
4745 case OMP_CLAUSE_LINEAR:
4746 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4747 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4748 return false;
4749 break;
4750 case OMP_CLAUSE_SIMDLEN:
4751 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4752 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4753 return false;
4754 default:
4755 break;
4756 }
4757 }
4758 return true;
4759 }
4760
4761 /* Compare two constructor-element-type constants. Return 1 if the lists
4762 are known to be equal; otherwise return 0. */
4763
4764 static bool
4765 simple_cst_list_equal (const_tree l1, const_tree l2)
4766 {
4767 while (l1 != NULL_TREE && l2 != NULL_TREE)
4768 {
4769 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4770 return false;
4771
4772 l1 = TREE_CHAIN (l1);
4773 l2 = TREE_CHAIN (l2);
4774 }
4775
4776 return l1 == l2;
4777 }
4778
4779 /* Compare two attributes for their value identity. Return true if the
4780 attribute values are known to be equal; otherwise return false.
4781 */
4782
4783 static bool
4784 attribute_value_equal (const_tree attr1, const_tree attr2)
4785 {
4786 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4787 return true;
4788
4789 if (TREE_VALUE (attr1) != NULL_TREE
4790 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4791 && TREE_VALUE (attr2) != NULL
4792 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4793 return (simple_cst_list_equal (TREE_VALUE (attr1),
4794 TREE_VALUE (attr2)) == 1);
4795
4796 if ((flag_openmp || flag_openmp_simd)
4797 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4798 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4799 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4800 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4801 TREE_VALUE (attr2));
4802
4803 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4804 }
4805
4806 /* Return 0 if the attributes for two types are incompatible, 1 if they
4807 are compatible, and 2 if they are nearly compatible (which causes a
4808 warning to be generated). */
4809 int
4810 comp_type_attributes (const_tree type1, const_tree type2)
4811 {
4812 const_tree a1 = TYPE_ATTRIBUTES (type1);
4813 const_tree a2 = TYPE_ATTRIBUTES (type2);
4814 const_tree a;
4815
4816 if (a1 == a2)
4817 return 1;
4818 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4819 {
4820 const struct attribute_spec *as;
4821 const_tree attr;
4822
4823 as = lookup_attribute_spec (get_attribute_name (a));
4824 if (!as || as->affects_type_identity == false)
4825 continue;
4826
4827 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4828 if (!attr || !attribute_value_equal (a, attr))
4829 break;
4830 }
4831 if (!a)
4832 {
4833 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4834 {
4835 const struct attribute_spec *as;
4836
4837 as = lookup_attribute_spec (get_attribute_name (a));
4838 if (!as || as->affects_type_identity == false)
4839 continue;
4840
4841 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4842 break;
4843 /* We don't need to compare trees again, as we did this
4844 already in first loop. */
4845 }
4846 /* All types - affecting identity - are equal, so
4847 there is no need to call target hook for comparison. */
4848 if (!a)
4849 return 1;
4850 }
4851 /* As some type combinations - like default calling-convention - might
4852 be compatible, we have to call the target hook to get the final result. */
4853 return targetm.comp_type_attributes (type1, type2);
4854 }
4855
4856 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4857 is ATTRIBUTE.
4858
4859 Record such modified types already made so we don't make duplicates. */
4860
4861 tree
4862 build_type_attribute_variant (tree ttype, tree attribute)
4863 {
4864 return build_type_attribute_qual_variant (ttype, attribute,
4865 TYPE_QUALS (ttype));
4866 }
4867
4868
4869 /* Reset the expression *EXPR_P, a size or position.
4870
4871 ??? We could reset all non-constant sizes or positions. But it's cheap
4872 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4873
4874 We need to reset self-referential sizes or positions because they cannot
4875 be gimplified and thus can contain a CALL_EXPR after the gimplification
4876 is finished, which will run afoul of LTO streaming. And they need to be
4877 reset to something essentially dummy but not constant, so as to preserve
4878 the properties of the object they are attached to. */
4879
4880 static inline void
4881 free_lang_data_in_one_sizepos (tree *expr_p)
4882 {
4883 tree expr = *expr_p;
4884 if (CONTAINS_PLACEHOLDER_P (expr))
4885 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4886 }
4887
4888
4889 /* Reset all the fields in a binfo node BINFO. We only keep
4890 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4891
4892 static void
4893 free_lang_data_in_binfo (tree binfo)
4894 {
4895 unsigned i;
4896 tree t;
4897
4898 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4899
4900 BINFO_VIRTUALS (binfo) = NULL_TREE;
4901 BINFO_BASE_ACCESSES (binfo) = NULL;
4902 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4903 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4904
4905 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4906 free_lang_data_in_binfo (t);
4907 }
4908
4909
4910 /* Reset all language specific information still present in TYPE. */
4911
4912 static void
4913 free_lang_data_in_type (tree type)
4914 {
4915 gcc_assert (TYPE_P (type));
4916
4917 /* Give the FE a chance to remove its own data first. */
4918 lang_hooks.free_lang_data (type);
4919
4920 TREE_LANG_FLAG_0 (type) = 0;
4921 TREE_LANG_FLAG_1 (type) = 0;
4922 TREE_LANG_FLAG_2 (type) = 0;
4923 TREE_LANG_FLAG_3 (type) = 0;
4924 TREE_LANG_FLAG_4 (type) = 0;
4925 TREE_LANG_FLAG_5 (type) = 0;
4926 TREE_LANG_FLAG_6 (type) = 0;
4927
4928 if (TREE_CODE (type) == FUNCTION_TYPE)
4929 {
4930 /* Remove the const and volatile qualifiers from arguments. The
4931 C++ front end removes them, but the C front end does not,
4932 leading to false ODR violation errors when merging two
4933 instances of the same function signature compiled by
4934 different front ends. */
4935 tree p;
4936
4937 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4938 {
4939 tree arg_type = TREE_VALUE (p);
4940
4941 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4942 {
4943 int quals = TYPE_QUALS (arg_type)
4944 & ~TYPE_QUAL_CONST
4945 & ~TYPE_QUAL_VOLATILE;
4946 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4947 free_lang_data_in_type (TREE_VALUE (p));
4948 }
4949 }
4950 }
4951
4952 /* Remove members that are not actually FIELD_DECLs from the field
4953 list of an aggregate. These occur in C++. */
4954 if (RECORD_OR_UNION_TYPE_P (type))
4955 {
4956 tree prev, member;
4957
4958 /* Note that TYPE_FIELDS can be shared across distinct
4959 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4960 to be removed, we cannot set its TREE_CHAIN to NULL.
4961 Otherwise, we would not be able to find all the other fields
4962 in the other instances of this TREE_TYPE.
4963
4964 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4965 prev = NULL_TREE;
4966 member = TYPE_FIELDS (type);
4967 while (member)
4968 {
4969 if (TREE_CODE (member) == FIELD_DECL
4970 || TREE_CODE (member) == TYPE_DECL)
4971 {
4972 if (prev)
4973 TREE_CHAIN (prev) = member;
4974 else
4975 TYPE_FIELDS (type) = member;
4976 prev = member;
4977 }
4978
4979 member = TREE_CHAIN (member);
4980 }
4981
4982 if (prev)
4983 TREE_CHAIN (prev) = NULL_TREE;
4984 else
4985 TYPE_FIELDS (type) = NULL_TREE;
4986
4987 TYPE_METHODS (type) = NULL_TREE;
4988 if (TYPE_BINFO (type))
4989 free_lang_data_in_binfo (TYPE_BINFO (type));
4990 }
4991 else
4992 {
4993 /* For non-aggregate types, clear out the language slot (which
4994 overloads TYPE_BINFO). */
4995 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4996
4997 if (INTEGRAL_TYPE_P (type)
4998 || SCALAR_FLOAT_TYPE_P (type)
4999 || FIXED_POINT_TYPE_P (type))
5000 {
5001 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5002 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5003 }
5004 }
5005
5006 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5007 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5008
5009 if (TYPE_CONTEXT (type)
5010 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5011 {
5012 tree ctx = TYPE_CONTEXT (type);
5013 do
5014 {
5015 ctx = BLOCK_SUPERCONTEXT (ctx);
5016 }
5017 while (ctx && TREE_CODE (ctx) == BLOCK);
5018 TYPE_CONTEXT (type) = ctx;
5019 }
5020 }
5021
5022
5023 /* Return true if DECL may need an assembler name to be set. */
5024
5025 static inline bool
5026 need_assembler_name_p (tree decl)
5027 {
5028 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5029 if (TREE_CODE (decl) != FUNCTION_DECL
5030 && TREE_CODE (decl) != VAR_DECL)
5031 return false;
5032
5033 /* If DECL already has its assembler name set, it does not need a
5034 new one. */
5035 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5036 || DECL_ASSEMBLER_NAME_SET_P (decl))
5037 return false;
5038
5039 /* Abstract decls do not need an assembler name. */
5040 if (DECL_ABSTRACT (decl))
5041 return false;
5042
5043 /* For VAR_DECLs, only static, public and external symbols need an
5044 assembler name. */
5045 if (TREE_CODE (decl) == VAR_DECL
5046 && !TREE_STATIC (decl)
5047 && !TREE_PUBLIC (decl)
5048 && !DECL_EXTERNAL (decl))
5049 return false;
5050
5051 if (TREE_CODE (decl) == FUNCTION_DECL)
5052 {
5053 /* Do not set assembler name on builtins. Allow RTL expansion to
5054 decide whether to expand inline or via a regular call. */
5055 if (DECL_BUILT_IN (decl)
5056 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5057 return false;
5058
5059 /* Functions represented in the callgraph need an assembler name. */
5060 if (cgraph_get_node (decl) != NULL)
5061 return true;
5062
5063 /* Unused and not public functions don't need an assembler name. */
5064 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5065 return false;
5066 }
5067
5068 return true;
5069 }
5070
5071
5072 /* Reset all language specific information still present in symbol
5073 DECL. */
5074
5075 static void
5076 free_lang_data_in_decl (tree decl)
5077 {
5078 gcc_assert (DECL_P (decl));
5079
5080 /* Give the FE a chance to remove its own data first. */
5081 lang_hooks.free_lang_data (decl);
5082
5083 TREE_LANG_FLAG_0 (decl) = 0;
5084 TREE_LANG_FLAG_1 (decl) = 0;
5085 TREE_LANG_FLAG_2 (decl) = 0;
5086 TREE_LANG_FLAG_3 (decl) = 0;
5087 TREE_LANG_FLAG_4 (decl) = 0;
5088 TREE_LANG_FLAG_5 (decl) = 0;
5089 TREE_LANG_FLAG_6 (decl) = 0;
5090
5091 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5092 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5093 if (TREE_CODE (decl) == FIELD_DECL)
5094 {
5095 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5096 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5097 DECL_QUALIFIER (decl) = NULL_TREE;
5098 }
5099
5100 if (TREE_CODE (decl) == FUNCTION_DECL)
5101 {
5102 struct cgraph_node *node;
5103 if (!(node = cgraph_get_node (decl))
5104 || (!node->definition && !node->clones))
5105 {
5106 if (node)
5107 cgraph_release_function_body (node);
5108 else
5109 {
5110 release_function_body (decl);
5111 DECL_ARGUMENTS (decl) = NULL;
5112 DECL_RESULT (decl) = NULL;
5113 DECL_INITIAL (decl) = error_mark_node;
5114 }
5115 }
5116 if (gimple_has_body_p (decl))
5117 {
5118 tree t;
5119
5120 /* If DECL has a gimple body, then the context for its
5121 arguments must be DECL. Otherwise, it doesn't really
5122 matter, as we will not be emitting any code for DECL. In
5123 general, there may be other instances of DECL created by
5124 the front end and since PARM_DECLs are generally shared,
5125 their DECL_CONTEXT changes as the replicas of DECL are
5126 created. The only time where DECL_CONTEXT is important
5127 is for the FUNCTION_DECLs that have a gimple body (since
5128 the PARM_DECL will be used in the function's body). */
5129 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5130 DECL_CONTEXT (t) = decl;
5131 }
5132
5133 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5134 At this point, it is not needed anymore. */
5135 DECL_SAVED_TREE (decl) = NULL_TREE;
5136
5137 /* Clear the abstract origin if it refers to a method. Otherwise
5138 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5139 origin will not be output correctly. */
5140 if (DECL_ABSTRACT_ORIGIN (decl)
5141 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5142 && RECORD_OR_UNION_TYPE_P
5143 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5144 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5145
5146 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5147 DECL_VINDEX referring to itself into a vtable slot number as it
5148 should. Happens with functions that are copied and then forgotten
5149 about. Just clear it, it won't matter anymore. */
5150 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5151 DECL_VINDEX (decl) = NULL_TREE;
5152 }
5153 else if (TREE_CODE (decl) == VAR_DECL)
5154 {
5155 if ((DECL_EXTERNAL (decl)
5156 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5157 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5158 DECL_INITIAL (decl) = NULL_TREE;
5159 }
5160 else if (TREE_CODE (decl) == TYPE_DECL
5161 || TREE_CODE (decl) == FIELD_DECL)
5162 DECL_INITIAL (decl) = NULL_TREE;
5163 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5164 && DECL_INITIAL (decl)
5165 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5166 {
5167 /* Strip builtins from the translation-unit BLOCK. We still have targets
5168 without builtin_decl_explicit support and also builtins are shared
5169 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5170 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5171 while (*nextp)
5172 {
5173 tree var = *nextp;
5174 if (TREE_CODE (var) == FUNCTION_DECL
5175 && DECL_BUILT_IN (var))
5176 *nextp = TREE_CHAIN (var);
5177 else
5178 nextp = &TREE_CHAIN (var);
5179 }
5180 }
5181 }
5182
5183
5184 /* Data used when collecting DECLs and TYPEs for language data removal. */
5185
5186 struct free_lang_data_d
5187 {
5188 /* Worklist to avoid excessive recursion. */
5189 vec<tree> worklist;
5190
5191 /* Set of traversed objects. Used to avoid duplicate visits. */
5192 struct pointer_set_t *pset;
5193
5194 /* Array of symbols to process with free_lang_data_in_decl. */
5195 vec<tree> decls;
5196
5197 /* Array of types to process with free_lang_data_in_type. */
5198 vec<tree> types;
5199 };
5200
5201
5202 /* Save all language fields needed to generate proper debug information
5203 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5204
5205 static void
5206 save_debug_info_for_decl (tree t)
5207 {
5208 /*struct saved_debug_info_d *sdi;*/
5209
5210 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5211
5212 /* FIXME. Partial implementation for saving debug info removed. */
5213 }
5214
5215
5216 /* Save all language fields needed to generate proper debug information
5217 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5218
5219 static void
5220 save_debug_info_for_type (tree t)
5221 {
5222 /*struct saved_debug_info_d *sdi;*/
5223
5224 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5225
5226 /* FIXME. Partial implementation for saving debug info removed. */
5227 }
5228
5229
5230 /* Add type or decl T to one of the list of tree nodes that need their
5231 language data removed. The lists are held inside FLD. */
5232
5233 static void
5234 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5235 {
5236 if (DECL_P (t))
5237 {
5238 fld->decls.safe_push (t);
5239 if (debug_info_level > DINFO_LEVEL_TERSE)
5240 save_debug_info_for_decl (t);
5241 }
5242 else if (TYPE_P (t))
5243 {
5244 fld->types.safe_push (t);
5245 if (debug_info_level > DINFO_LEVEL_TERSE)
5246 save_debug_info_for_type (t);
5247 }
5248 else
5249 gcc_unreachable ();
5250 }
5251
5252 /* Push tree node T into FLD->WORKLIST. */
5253
5254 static inline void
5255 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5256 {
5257 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5258 fld->worklist.safe_push ((t));
5259 }
5260
5261
5262 /* Operand callback helper for free_lang_data_in_node. *TP is the
5263 subtree operand being considered. */
5264
5265 static tree
5266 find_decls_types_r (tree *tp, int *ws, void *data)
5267 {
5268 tree t = *tp;
5269 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5270
5271 if (TREE_CODE (t) == TREE_LIST)
5272 return NULL_TREE;
5273
5274 /* Language specific nodes will be removed, so there is no need
5275 to gather anything under them. */
5276 if (is_lang_specific (t))
5277 {
5278 *ws = 0;
5279 return NULL_TREE;
5280 }
5281
5282 if (DECL_P (t))
5283 {
5284 /* Note that walk_tree does not traverse every possible field in
5285 decls, so we have to do our own traversals here. */
5286 add_tree_to_fld_list (t, fld);
5287
5288 fld_worklist_push (DECL_NAME (t), fld);
5289 fld_worklist_push (DECL_CONTEXT (t), fld);
5290 fld_worklist_push (DECL_SIZE (t), fld);
5291 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5292
5293 /* We are going to remove everything under DECL_INITIAL for
5294 TYPE_DECLs. No point walking them. */
5295 if (TREE_CODE (t) != TYPE_DECL)
5296 fld_worklist_push (DECL_INITIAL (t), fld);
5297
5298 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5299 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5300
5301 if (TREE_CODE (t) == FUNCTION_DECL)
5302 {
5303 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5304 fld_worklist_push (DECL_RESULT (t), fld);
5305 }
5306 else if (TREE_CODE (t) == TYPE_DECL)
5307 {
5308 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5309 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5310 }
5311 else if (TREE_CODE (t) == FIELD_DECL)
5312 {
5313 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5314 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5315 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5316 fld_worklist_push (DECL_FCONTEXT (t), fld);
5317 }
5318
5319 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5320 && DECL_HAS_VALUE_EXPR_P (t))
5321 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5322
5323 if (TREE_CODE (t) != FIELD_DECL
5324 && TREE_CODE (t) != TYPE_DECL)
5325 fld_worklist_push (TREE_CHAIN (t), fld);
5326 *ws = 0;
5327 }
5328 else if (TYPE_P (t))
5329 {
5330 /* Note that walk_tree does not traverse every possible field in
5331 types, so we have to do our own traversals here. */
5332 add_tree_to_fld_list (t, fld);
5333
5334 if (!RECORD_OR_UNION_TYPE_P (t))
5335 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5336 fld_worklist_push (TYPE_SIZE (t), fld);
5337 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5338 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5339 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5340 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5341 fld_worklist_push (TYPE_NAME (t), fld);
5342 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5343 them and thus do not and want not to reach unused pointer types
5344 this way. */
5345 if (!POINTER_TYPE_P (t))
5346 fld_worklist_push (TYPE_MINVAL (t), fld);
5347 if (!RECORD_OR_UNION_TYPE_P (t))
5348 fld_worklist_push (TYPE_MAXVAL (t), fld);
5349 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5350 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5351 do not and want not to reach unused variants this way. */
5352 if (TYPE_CONTEXT (t))
5353 {
5354 tree ctx = TYPE_CONTEXT (t);
5355 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5356 So push that instead. */
5357 while (ctx && TREE_CODE (ctx) == BLOCK)
5358 ctx = BLOCK_SUPERCONTEXT (ctx);
5359 fld_worklist_push (ctx, fld);
5360 }
5361 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5362 and want not to reach unused types this way. */
5363
5364 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5365 {
5366 unsigned i;
5367 tree tem;
5368 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5369 fld_worklist_push (TREE_TYPE (tem), fld);
5370 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5371 if (tem
5372 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5373 && TREE_CODE (tem) == TREE_LIST)
5374 do
5375 {
5376 fld_worklist_push (TREE_VALUE (tem), fld);
5377 tem = TREE_CHAIN (tem);
5378 }
5379 while (tem);
5380 }
5381 if (RECORD_OR_UNION_TYPE_P (t))
5382 {
5383 tree tem;
5384 /* Push all TYPE_FIELDS - there can be interleaving interesting
5385 and non-interesting things. */
5386 tem = TYPE_FIELDS (t);
5387 while (tem)
5388 {
5389 if (TREE_CODE (tem) == FIELD_DECL
5390 || TREE_CODE (tem) == TYPE_DECL)
5391 fld_worklist_push (tem, fld);
5392 tem = TREE_CHAIN (tem);
5393 }
5394 }
5395
5396 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5397 *ws = 0;
5398 }
5399 else if (TREE_CODE (t) == BLOCK)
5400 {
5401 tree tem;
5402 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5403 fld_worklist_push (tem, fld);
5404 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5405 fld_worklist_push (tem, fld);
5406 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5407 }
5408
5409 if (TREE_CODE (t) != IDENTIFIER_NODE
5410 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5411 fld_worklist_push (TREE_TYPE (t), fld);
5412
5413 return NULL_TREE;
5414 }
5415
5416
5417 /* Find decls and types in T. */
5418
5419 static void
5420 find_decls_types (tree t, struct free_lang_data_d *fld)
5421 {
5422 while (1)
5423 {
5424 if (!pointer_set_contains (fld->pset, t))
5425 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5426 if (fld->worklist.is_empty ())
5427 break;
5428 t = fld->worklist.pop ();
5429 }
5430 }
5431
5432 /* Translate all the types in LIST with the corresponding runtime
5433 types. */
5434
5435 static tree
5436 get_eh_types_for_runtime (tree list)
5437 {
5438 tree head, prev;
5439
5440 if (list == NULL_TREE)
5441 return NULL_TREE;
5442
5443 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5444 prev = head;
5445 list = TREE_CHAIN (list);
5446 while (list)
5447 {
5448 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5449 TREE_CHAIN (prev) = n;
5450 prev = TREE_CHAIN (prev);
5451 list = TREE_CHAIN (list);
5452 }
5453
5454 return head;
5455 }
5456
5457
5458 /* Find decls and types referenced in EH region R and store them in
5459 FLD->DECLS and FLD->TYPES. */
5460
5461 static void
5462 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5463 {
5464 switch (r->type)
5465 {
5466 case ERT_CLEANUP:
5467 break;
5468
5469 case ERT_TRY:
5470 {
5471 eh_catch c;
5472
5473 /* The types referenced in each catch must first be changed to the
5474 EH types used at runtime. This removes references to FE types
5475 in the region. */
5476 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5477 {
5478 c->type_list = get_eh_types_for_runtime (c->type_list);
5479 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5480 }
5481 }
5482 break;
5483
5484 case ERT_ALLOWED_EXCEPTIONS:
5485 r->u.allowed.type_list
5486 = get_eh_types_for_runtime (r->u.allowed.type_list);
5487 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5488 break;
5489
5490 case ERT_MUST_NOT_THROW:
5491 walk_tree (&r->u.must_not_throw.failure_decl,
5492 find_decls_types_r, fld, fld->pset);
5493 break;
5494 }
5495 }
5496
5497
5498 /* Find decls and types referenced in cgraph node N and store them in
5499 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5500 look for *every* kind of DECL and TYPE node reachable from N,
5501 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5502 NAMESPACE_DECLs, etc). */
5503
5504 static void
5505 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5506 {
5507 basic_block bb;
5508 struct function *fn;
5509 unsigned ix;
5510 tree t;
5511
5512 find_decls_types (n->decl, fld);
5513
5514 if (!gimple_has_body_p (n->decl))
5515 return;
5516
5517 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5518
5519 fn = DECL_STRUCT_FUNCTION (n->decl);
5520
5521 /* Traverse locals. */
5522 FOR_EACH_LOCAL_DECL (fn, ix, t)
5523 find_decls_types (t, fld);
5524
5525 /* Traverse EH regions in FN. */
5526 {
5527 eh_region r;
5528 FOR_ALL_EH_REGION_FN (r, fn)
5529 find_decls_types_in_eh_region (r, fld);
5530 }
5531
5532 /* Traverse every statement in FN. */
5533 FOR_EACH_BB_FN (bb, fn)
5534 {
5535 gimple_stmt_iterator si;
5536 unsigned i;
5537
5538 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5539 {
5540 gimple phi = gsi_stmt (si);
5541
5542 for (i = 0; i < gimple_phi_num_args (phi); i++)
5543 {
5544 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5545 find_decls_types (*arg_p, fld);
5546 }
5547 }
5548
5549 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5550 {
5551 gimple stmt = gsi_stmt (si);
5552
5553 if (is_gimple_call (stmt))
5554 find_decls_types (gimple_call_fntype (stmt), fld);
5555
5556 for (i = 0; i < gimple_num_ops (stmt); i++)
5557 {
5558 tree arg = gimple_op (stmt, i);
5559 find_decls_types (arg, fld);
5560 }
5561 }
5562 }
5563 }
5564
5565
5566 /* Find decls and types referenced in varpool node N and store them in
5567 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5568 look for *every* kind of DECL and TYPE node reachable from N,
5569 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5570 NAMESPACE_DECLs, etc). */
5571
5572 static void
5573 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5574 {
5575 find_decls_types (v->decl, fld);
5576 }
5577
5578 /* If T needs an assembler name, have one created for it. */
5579
5580 void
5581 assign_assembler_name_if_neeeded (tree t)
5582 {
5583 if (need_assembler_name_p (t))
5584 {
5585 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5586 diagnostics that use input_location to show locus
5587 information. The problem here is that, at this point,
5588 input_location is generally anchored to the end of the file
5589 (since the parser is long gone), so we don't have a good
5590 position to pin it to.
5591
5592 To alleviate this problem, this uses the location of T's
5593 declaration. Examples of this are
5594 testsuite/g++.dg/template/cond2.C and
5595 testsuite/g++.dg/template/pr35240.C. */
5596 location_t saved_location = input_location;
5597 input_location = DECL_SOURCE_LOCATION (t);
5598
5599 decl_assembler_name (t);
5600
5601 input_location = saved_location;
5602 }
5603 }
5604
5605
5606 /* Free language specific information for every operand and expression
5607 in every node of the call graph. This process operates in three stages:
5608
5609 1- Every callgraph node and varpool node is traversed looking for
5610 decls and types embedded in them. This is a more exhaustive
5611 search than that done by find_referenced_vars, because it will
5612 also collect individual fields, decls embedded in types, etc.
5613
5614 2- All the decls found are sent to free_lang_data_in_decl.
5615
5616 3- All the types found are sent to free_lang_data_in_type.
5617
5618 The ordering between decls and types is important because
5619 free_lang_data_in_decl sets assembler names, which includes
5620 mangling. So types cannot be freed up until assembler names have
5621 been set up. */
5622
5623 static void
5624 free_lang_data_in_cgraph (void)
5625 {
5626 struct cgraph_node *n;
5627 varpool_node *v;
5628 struct free_lang_data_d fld;
5629 tree t;
5630 unsigned i;
5631 alias_pair *p;
5632
5633 /* Initialize sets and arrays to store referenced decls and types. */
5634 fld.pset = pointer_set_create ();
5635 fld.worklist.create (0);
5636 fld.decls.create (100);
5637 fld.types.create (100);
5638
5639 /* Find decls and types in the body of every function in the callgraph. */
5640 FOR_EACH_FUNCTION (n)
5641 find_decls_types_in_node (n, &fld);
5642
5643 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5644 find_decls_types (p->decl, &fld);
5645
5646 /* Find decls and types in every varpool symbol. */
5647 FOR_EACH_VARIABLE (v)
5648 find_decls_types_in_var (v, &fld);
5649
5650 /* Set the assembler name on every decl found. We need to do this
5651 now because free_lang_data_in_decl will invalidate data needed
5652 for mangling. This breaks mangling on interdependent decls. */
5653 FOR_EACH_VEC_ELT (fld.decls, i, t)
5654 assign_assembler_name_if_neeeded (t);
5655
5656 /* Traverse every decl found freeing its language data. */
5657 FOR_EACH_VEC_ELT (fld.decls, i, t)
5658 free_lang_data_in_decl (t);
5659
5660 /* Traverse every type found freeing its language data. */
5661 FOR_EACH_VEC_ELT (fld.types, i, t)
5662 free_lang_data_in_type (t);
5663
5664 pointer_set_destroy (fld.pset);
5665 fld.worklist.release ();
5666 fld.decls.release ();
5667 fld.types.release ();
5668 }
5669
5670
5671 /* Free resources that are used by FE but are not needed once they are done. */
5672
5673 static unsigned
5674 free_lang_data (void)
5675 {
5676 unsigned i;
5677
5678 /* If we are the LTO frontend we have freed lang-specific data already. */
5679 if (in_lto_p
5680 || !flag_generate_lto)
5681 return 0;
5682
5683 /* Allocate and assign alias sets to the standard integer types
5684 while the slots are still in the way the frontends generated them. */
5685 for (i = 0; i < itk_none; ++i)
5686 if (integer_types[i])
5687 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5688
5689 /* Traverse the IL resetting language specific information for
5690 operands, expressions, etc. */
5691 free_lang_data_in_cgraph ();
5692
5693 /* Create gimple variants for common types. */
5694 ptrdiff_type_node = integer_type_node;
5695 fileptr_type_node = ptr_type_node;
5696
5697 /* Reset some langhooks. Do not reset types_compatible_p, it may
5698 still be used indirectly via the get_alias_set langhook. */
5699 lang_hooks.dwarf_name = lhd_dwarf_name;
5700 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5701 /* We do not want the default decl_assembler_name implementation,
5702 rather if we have fixed everything we want a wrapper around it
5703 asserting that all non-local symbols already got their assembler
5704 name and only produce assembler names for local symbols. Or rather
5705 make sure we never call decl_assembler_name on local symbols and
5706 devise a separate, middle-end private scheme for it. */
5707
5708 /* Reset diagnostic machinery. */
5709 tree_diagnostics_defaults (global_dc);
5710
5711 return 0;
5712 }
5713
5714
5715 namespace {
5716
5717 const pass_data pass_data_ipa_free_lang_data =
5718 {
5719 SIMPLE_IPA_PASS, /* type */
5720 "*free_lang_data", /* name */
5721 OPTGROUP_NONE, /* optinfo_flags */
5722 true, /* has_execute */
5723 TV_IPA_FREE_LANG_DATA, /* tv_id */
5724 0, /* properties_required */
5725 0, /* properties_provided */
5726 0, /* properties_destroyed */
5727 0, /* todo_flags_start */
5728 0, /* todo_flags_finish */
5729 };
5730
5731 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5732 {
5733 public:
5734 pass_ipa_free_lang_data (gcc::context *ctxt)
5735 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5736 {}
5737
5738 /* opt_pass methods: */
5739 virtual unsigned int execute (function *) { return free_lang_data (); }
5740
5741 }; // class pass_ipa_free_lang_data
5742
5743 } // anon namespace
5744
5745 simple_ipa_opt_pass *
5746 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5747 {
5748 return new pass_ipa_free_lang_data (ctxt);
5749 }
5750
5751 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5752 ATTR_NAME. Also used internally by remove_attribute(). */
5753 bool
5754 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5755 {
5756 size_t ident_len = IDENTIFIER_LENGTH (ident);
5757
5758 if (ident_len == attr_len)
5759 {
5760 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5761 return true;
5762 }
5763 else if (ident_len == attr_len + 4)
5764 {
5765 /* There is the possibility that ATTR is 'text' and IDENT is
5766 '__text__'. */
5767 const char *p = IDENTIFIER_POINTER (ident);
5768 if (p[0] == '_' && p[1] == '_'
5769 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5770 && strncmp (attr_name, p + 2, attr_len) == 0)
5771 return true;
5772 }
5773
5774 return false;
5775 }
5776
5777 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5778 of ATTR_NAME, and LIST is not NULL_TREE. */
5779 tree
5780 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5781 {
5782 while (list)
5783 {
5784 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5785
5786 if (ident_len == attr_len)
5787 {
5788 if (!strcmp (attr_name,
5789 IDENTIFIER_POINTER (get_attribute_name (list))))
5790 break;
5791 }
5792 /* TODO: If we made sure that attributes were stored in the
5793 canonical form without '__...__' (ie, as in 'text' as opposed
5794 to '__text__') then we could avoid the following case. */
5795 else if (ident_len == attr_len + 4)
5796 {
5797 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5798 if (p[0] == '_' && p[1] == '_'
5799 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5800 && strncmp (attr_name, p + 2, attr_len) == 0)
5801 break;
5802 }
5803 list = TREE_CHAIN (list);
5804 }
5805
5806 return list;
5807 }
5808
5809 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5810 return a pointer to the attribute's list first element if the attribute
5811 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5812 '__text__'). */
5813
5814 tree
5815 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5816 tree list)
5817 {
5818 while (list)
5819 {
5820 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5821
5822 if (attr_len > ident_len)
5823 {
5824 list = TREE_CHAIN (list);
5825 continue;
5826 }
5827
5828 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5829
5830 if (strncmp (attr_name, p, attr_len) == 0)
5831 break;
5832
5833 /* TODO: If we made sure that attributes were stored in the
5834 canonical form without '__...__' (ie, as in 'text' as opposed
5835 to '__text__') then we could avoid the following case. */
5836 if (p[0] == '_' && p[1] == '_' &&
5837 strncmp (attr_name, p + 2, attr_len) == 0)
5838 break;
5839
5840 list = TREE_CHAIN (list);
5841 }
5842
5843 return list;
5844 }
5845
5846
5847 /* A variant of lookup_attribute() that can be used with an identifier
5848 as the first argument, and where the identifier can be either
5849 'text' or '__text__'.
5850
5851 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5852 return a pointer to the attribute's list element if the attribute
5853 is part of the list, or NULL_TREE if not found. If the attribute
5854 appears more than once, this only returns the first occurrence; the
5855 TREE_CHAIN of the return value should be passed back in if further
5856 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5857 can be in the form 'text' or '__text__'. */
5858 static tree
5859 lookup_ident_attribute (tree attr_identifier, tree list)
5860 {
5861 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5862
5863 while (list)
5864 {
5865 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5866 == IDENTIFIER_NODE);
5867
5868 /* Identifiers can be compared directly for equality. */
5869 if (attr_identifier == get_attribute_name (list))
5870 break;
5871
5872 /* If they are not equal, they may still be one in the form
5873 'text' while the other one is in the form '__text__'. TODO:
5874 If we were storing attributes in normalized 'text' form, then
5875 this could all go away and we could take full advantage of
5876 the fact that we're comparing identifiers. :-) */
5877 {
5878 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5879 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5880
5881 if (ident_len == attr_len + 4)
5882 {
5883 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5884 const char *q = IDENTIFIER_POINTER (attr_identifier);
5885 if (p[0] == '_' && p[1] == '_'
5886 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5887 && strncmp (q, p + 2, attr_len) == 0)
5888 break;
5889 }
5890 else if (ident_len + 4 == attr_len)
5891 {
5892 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5893 const char *q = IDENTIFIER_POINTER (attr_identifier);
5894 if (q[0] == '_' && q[1] == '_'
5895 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5896 && strncmp (q + 2, p, ident_len) == 0)
5897 break;
5898 }
5899 }
5900 list = TREE_CHAIN (list);
5901 }
5902
5903 return list;
5904 }
5905
5906 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5907 modified list. */
5908
5909 tree
5910 remove_attribute (const char *attr_name, tree list)
5911 {
5912 tree *p;
5913 size_t attr_len = strlen (attr_name);
5914
5915 gcc_checking_assert (attr_name[0] != '_');
5916
5917 for (p = &list; *p; )
5918 {
5919 tree l = *p;
5920 /* TODO: If we were storing attributes in normalized form, here
5921 we could use a simple strcmp(). */
5922 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5923 *p = TREE_CHAIN (l);
5924 else
5925 p = &TREE_CHAIN (l);
5926 }
5927
5928 return list;
5929 }
5930
5931 /* Return an attribute list that is the union of a1 and a2. */
5932
5933 tree
5934 merge_attributes (tree a1, tree a2)
5935 {
5936 tree attributes;
5937
5938 /* Either one unset? Take the set one. */
5939
5940 if ((attributes = a1) == 0)
5941 attributes = a2;
5942
5943 /* One that completely contains the other? Take it. */
5944
5945 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5946 {
5947 if (attribute_list_contained (a2, a1))
5948 attributes = a2;
5949 else
5950 {
5951 /* Pick the longest list, and hang on the other list. */
5952
5953 if (list_length (a1) < list_length (a2))
5954 attributes = a2, a2 = a1;
5955
5956 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5957 {
5958 tree a;
5959 for (a = lookup_ident_attribute (get_attribute_name (a2),
5960 attributes);
5961 a != NULL_TREE && !attribute_value_equal (a, a2);
5962 a = lookup_ident_attribute (get_attribute_name (a2),
5963 TREE_CHAIN (a)))
5964 ;
5965 if (a == NULL_TREE)
5966 {
5967 a1 = copy_node (a2);
5968 TREE_CHAIN (a1) = attributes;
5969 attributes = a1;
5970 }
5971 }
5972 }
5973 }
5974 return attributes;
5975 }
5976
5977 /* Given types T1 and T2, merge their attributes and return
5978 the result. */
5979
5980 tree
5981 merge_type_attributes (tree t1, tree t2)
5982 {
5983 return merge_attributes (TYPE_ATTRIBUTES (t1),
5984 TYPE_ATTRIBUTES (t2));
5985 }
5986
5987 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5988 the result. */
5989
5990 tree
5991 merge_decl_attributes (tree olddecl, tree newdecl)
5992 {
5993 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5994 DECL_ATTRIBUTES (newdecl));
5995 }
5996
5997 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5998
5999 /* Specialization of merge_decl_attributes for various Windows targets.
6000
6001 This handles the following situation:
6002
6003 __declspec (dllimport) int foo;
6004 int foo;
6005
6006 The second instance of `foo' nullifies the dllimport. */
6007
6008 tree
6009 merge_dllimport_decl_attributes (tree old, tree new_tree)
6010 {
6011 tree a;
6012 int delete_dllimport_p = 1;
6013
6014 /* What we need to do here is remove from `old' dllimport if it doesn't
6015 appear in `new'. dllimport behaves like extern: if a declaration is
6016 marked dllimport and a definition appears later, then the object
6017 is not dllimport'd. We also remove a `new' dllimport if the old list
6018 contains dllexport: dllexport always overrides dllimport, regardless
6019 of the order of declaration. */
6020 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6021 delete_dllimport_p = 0;
6022 else if (DECL_DLLIMPORT_P (new_tree)
6023 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6024 {
6025 DECL_DLLIMPORT_P (new_tree) = 0;
6026 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6027 "dllimport ignored", new_tree);
6028 }
6029 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6030 {
6031 /* Warn about overriding a symbol that has already been used, e.g.:
6032 extern int __attribute__ ((dllimport)) foo;
6033 int* bar () {return &foo;}
6034 int foo;
6035 */
6036 if (TREE_USED (old))
6037 {
6038 warning (0, "%q+D redeclared without dllimport attribute "
6039 "after being referenced with dll linkage", new_tree);
6040 /* If we have used a variable's address with dllimport linkage,
6041 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6042 decl may already have had TREE_CONSTANT computed.
6043 We still remove the attribute so that assembler code refers
6044 to '&foo rather than '_imp__foo'. */
6045 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6046 DECL_DLLIMPORT_P (new_tree) = 1;
6047 }
6048
6049 /* Let an inline definition silently override the external reference,
6050 but otherwise warn about attribute inconsistency. */
6051 else if (TREE_CODE (new_tree) == VAR_DECL
6052 || !DECL_DECLARED_INLINE_P (new_tree))
6053 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6054 "previous dllimport ignored", new_tree);
6055 }
6056 else
6057 delete_dllimport_p = 0;
6058
6059 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6060
6061 if (delete_dllimport_p)
6062 a = remove_attribute ("dllimport", a);
6063
6064 return a;
6065 }
6066
6067 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6068 struct attribute_spec.handler. */
6069
6070 tree
6071 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6072 bool *no_add_attrs)
6073 {
6074 tree node = *pnode;
6075 bool is_dllimport;
6076
6077 /* These attributes may apply to structure and union types being created,
6078 but otherwise should pass to the declaration involved. */
6079 if (!DECL_P (node))
6080 {
6081 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6082 | (int) ATTR_FLAG_ARRAY_NEXT))
6083 {
6084 *no_add_attrs = true;
6085 return tree_cons (name, args, NULL_TREE);
6086 }
6087 if (TREE_CODE (node) == RECORD_TYPE
6088 || TREE_CODE (node) == UNION_TYPE)
6089 {
6090 node = TYPE_NAME (node);
6091 if (!node)
6092 return NULL_TREE;
6093 }
6094 else
6095 {
6096 warning (OPT_Wattributes, "%qE attribute ignored",
6097 name);
6098 *no_add_attrs = true;
6099 return NULL_TREE;
6100 }
6101 }
6102
6103 if (TREE_CODE (node) != FUNCTION_DECL
6104 && TREE_CODE (node) != VAR_DECL
6105 && TREE_CODE (node) != TYPE_DECL)
6106 {
6107 *no_add_attrs = true;
6108 warning (OPT_Wattributes, "%qE attribute ignored",
6109 name);
6110 return NULL_TREE;
6111 }
6112
6113 if (TREE_CODE (node) == TYPE_DECL
6114 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6115 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6116 {
6117 *no_add_attrs = true;
6118 warning (OPT_Wattributes, "%qE attribute ignored",
6119 name);
6120 return NULL_TREE;
6121 }
6122
6123 is_dllimport = is_attribute_p ("dllimport", name);
6124
6125 /* Report error on dllimport ambiguities seen now before they cause
6126 any damage. */
6127 if (is_dllimport)
6128 {
6129 /* Honor any target-specific overrides. */
6130 if (!targetm.valid_dllimport_attribute_p (node))
6131 *no_add_attrs = true;
6132
6133 else if (TREE_CODE (node) == FUNCTION_DECL
6134 && DECL_DECLARED_INLINE_P (node))
6135 {
6136 warning (OPT_Wattributes, "inline function %q+D declared as "
6137 " dllimport: attribute ignored", node);
6138 *no_add_attrs = true;
6139 }
6140 /* Like MS, treat definition of dllimported variables and
6141 non-inlined functions on declaration as syntax errors. */
6142 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6143 {
6144 error ("function %q+D definition is marked dllimport", node);
6145 *no_add_attrs = true;
6146 }
6147
6148 else if (TREE_CODE (node) == VAR_DECL)
6149 {
6150 if (DECL_INITIAL (node))
6151 {
6152 error ("variable %q+D definition is marked dllimport",
6153 node);
6154 *no_add_attrs = true;
6155 }
6156
6157 /* `extern' needn't be specified with dllimport.
6158 Specify `extern' now and hope for the best. Sigh. */
6159 DECL_EXTERNAL (node) = 1;
6160 /* Also, implicitly give dllimport'd variables declared within
6161 a function global scope, unless declared static. */
6162 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6163 TREE_PUBLIC (node) = 1;
6164 }
6165
6166 if (*no_add_attrs == false)
6167 DECL_DLLIMPORT_P (node) = 1;
6168 }
6169 else if (TREE_CODE (node) == FUNCTION_DECL
6170 && DECL_DECLARED_INLINE_P (node)
6171 && flag_keep_inline_dllexport)
6172 /* An exported function, even if inline, must be emitted. */
6173 DECL_EXTERNAL (node) = 0;
6174
6175 /* Report error if symbol is not accessible at global scope. */
6176 if (!TREE_PUBLIC (node)
6177 && (TREE_CODE (node) == VAR_DECL
6178 || TREE_CODE (node) == FUNCTION_DECL))
6179 {
6180 error ("external linkage required for symbol %q+D because of "
6181 "%qE attribute", node, name);
6182 *no_add_attrs = true;
6183 }
6184
6185 /* A dllexport'd entity must have default visibility so that other
6186 program units (shared libraries or the main executable) can see
6187 it. A dllimport'd entity must have default visibility so that
6188 the linker knows that undefined references within this program
6189 unit can be resolved by the dynamic linker. */
6190 if (!*no_add_attrs)
6191 {
6192 if (DECL_VISIBILITY_SPECIFIED (node)
6193 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6194 error ("%qE implies default visibility, but %qD has already "
6195 "been declared with a different visibility",
6196 name, node);
6197 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6198 DECL_VISIBILITY_SPECIFIED (node) = 1;
6199 }
6200
6201 return NULL_TREE;
6202 }
6203
6204 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6205 \f
6206 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6207 of the various TYPE_QUAL values. */
6208
6209 static void
6210 set_type_quals (tree type, int type_quals)
6211 {
6212 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6213 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6214 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6215 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6216 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6217 }
6218
6219 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6220
6221 bool
6222 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6223 {
6224 return (TYPE_QUALS (cand) == type_quals
6225 && TYPE_NAME (cand) == TYPE_NAME (base)
6226 /* Apparently this is needed for Objective-C. */
6227 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6228 /* Check alignment. */
6229 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6230 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6231 TYPE_ATTRIBUTES (base)));
6232 }
6233
6234 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6235
6236 static bool
6237 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6238 {
6239 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6240 && TYPE_NAME (cand) == TYPE_NAME (base)
6241 /* Apparently this is needed for Objective-C. */
6242 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6243 /* Check alignment. */
6244 && TYPE_ALIGN (cand) == align
6245 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6246 TYPE_ATTRIBUTES (base)));
6247 }
6248
6249 /* This function checks to see if TYPE matches the size one of the built-in
6250 atomic types, and returns that core atomic type. */
6251
6252 static tree
6253 find_atomic_core_type (tree type)
6254 {
6255 tree base_atomic_type;
6256
6257 /* Only handle complete types. */
6258 if (TYPE_SIZE (type) == NULL_TREE)
6259 return NULL_TREE;
6260
6261 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6262 switch (type_size)
6263 {
6264 case 8:
6265 base_atomic_type = atomicQI_type_node;
6266 break;
6267
6268 case 16:
6269 base_atomic_type = atomicHI_type_node;
6270 break;
6271
6272 case 32:
6273 base_atomic_type = atomicSI_type_node;
6274 break;
6275
6276 case 64:
6277 base_atomic_type = atomicDI_type_node;
6278 break;
6279
6280 case 128:
6281 base_atomic_type = atomicTI_type_node;
6282 break;
6283
6284 default:
6285 base_atomic_type = NULL_TREE;
6286 }
6287
6288 return base_atomic_type;
6289 }
6290
6291 /* Return a version of the TYPE, qualified as indicated by the
6292 TYPE_QUALS, if one exists. If no qualified version exists yet,
6293 return NULL_TREE. */
6294
6295 tree
6296 get_qualified_type (tree type, int type_quals)
6297 {
6298 tree t;
6299
6300 if (TYPE_QUALS (type) == type_quals)
6301 return type;
6302
6303 /* Search the chain of variants to see if there is already one there just
6304 like the one we need to have. If so, use that existing one. We must
6305 preserve the TYPE_NAME, since there is code that depends on this. */
6306 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6307 if (check_qualified_type (t, type, type_quals))
6308 return t;
6309
6310 return NULL_TREE;
6311 }
6312
6313 /* Like get_qualified_type, but creates the type if it does not
6314 exist. This function never returns NULL_TREE. */
6315
6316 tree
6317 build_qualified_type (tree type, int type_quals)
6318 {
6319 tree t;
6320
6321 /* See if we already have the appropriate qualified variant. */
6322 t = get_qualified_type (type, type_quals);
6323
6324 /* If not, build it. */
6325 if (!t)
6326 {
6327 t = build_variant_type_copy (type);
6328 set_type_quals (t, type_quals);
6329
6330 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6331 {
6332 /* See if this object can map to a basic atomic type. */
6333 tree atomic_type = find_atomic_core_type (type);
6334 if (atomic_type)
6335 {
6336 /* Ensure the alignment of this type is compatible with
6337 the required alignment of the atomic type. */
6338 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6339 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6340 }
6341 }
6342
6343 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6344 /* Propagate structural equality. */
6345 SET_TYPE_STRUCTURAL_EQUALITY (t);
6346 else if (TYPE_CANONICAL (type) != type)
6347 /* Build the underlying canonical type, since it is different
6348 from TYPE. */
6349 {
6350 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6351 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6352 }
6353 else
6354 /* T is its own canonical type. */
6355 TYPE_CANONICAL (t) = t;
6356
6357 }
6358
6359 return t;
6360 }
6361
6362 /* Create a variant of type T with alignment ALIGN. */
6363
6364 tree
6365 build_aligned_type (tree type, unsigned int align)
6366 {
6367 tree t;
6368
6369 if (TYPE_PACKED (type)
6370 || TYPE_ALIGN (type) == align)
6371 return type;
6372
6373 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6374 if (check_aligned_type (t, type, align))
6375 return t;
6376
6377 t = build_variant_type_copy (type);
6378 TYPE_ALIGN (t) = align;
6379
6380 return t;
6381 }
6382
6383 /* Create a new distinct copy of TYPE. The new type is made its own
6384 MAIN_VARIANT. If TYPE requires structural equality checks, the
6385 resulting type requires structural equality checks; otherwise, its
6386 TYPE_CANONICAL points to itself. */
6387
6388 tree
6389 build_distinct_type_copy (tree type)
6390 {
6391 tree t = copy_node (type);
6392
6393 TYPE_POINTER_TO (t) = 0;
6394 TYPE_REFERENCE_TO (t) = 0;
6395
6396 /* Set the canonical type either to a new equivalence class, or
6397 propagate the need for structural equality checks. */
6398 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6399 SET_TYPE_STRUCTURAL_EQUALITY (t);
6400 else
6401 TYPE_CANONICAL (t) = t;
6402
6403 /* Make it its own variant. */
6404 TYPE_MAIN_VARIANT (t) = t;
6405 TYPE_NEXT_VARIANT (t) = 0;
6406
6407 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6408 whose TREE_TYPE is not t. This can also happen in the Ada
6409 frontend when using subtypes. */
6410
6411 return t;
6412 }
6413
6414 /* Create a new variant of TYPE, equivalent but distinct. This is so
6415 the caller can modify it. TYPE_CANONICAL for the return type will
6416 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6417 are considered equal by the language itself (or that both types
6418 require structural equality checks). */
6419
6420 tree
6421 build_variant_type_copy (tree type)
6422 {
6423 tree t, m = TYPE_MAIN_VARIANT (type);
6424
6425 t = build_distinct_type_copy (type);
6426
6427 /* Since we're building a variant, assume that it is a non-semantic
6428 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6429 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6430
6431 /* Add the new type to the chain of variants of TYPE. */
6432 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6433 TYPE_NEXT_VARIANT (m) = t;
6434 TYPE_MAIN_VARIANT (t) = m;
6435
6436 return t;
6437 }
6438 \f
6439 /* Return true if the from tree in both tree maps are equal. */
6440
6441 int
6442 tree_map_base_eq (const void *va, const void *vb)
6443 {
6444 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6445 *const b = (const struct tree_map_base *) vb;
6446 return (a->from == b->from);
6447 }
6448
6449 /* Hash a from tree in a tree_base_map. */
6450
6451 unsigned int
6452 tree_map_base_hash (const void *item)
6453 {
6454 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6455 }
6456
6457 /* Return true if this tree map structure is marked for garbage collection
6458 purposes. We simply return true if the from tree is marked, so that this
6459 structure goes away when the from tree goes away. */
6460
6461 int
6462 tree_map_base_marked_p (const void *p)
6463 {
6464 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6465 }
6466
6467 /* Hash a from tree in a tree_map. */
6468
6469 unsigned int
6470 tree_map_hash (const void *item)
6471 {
6472 return (((const struct tree_map *) item)->hash);
6473 }
6474
6475 /* Hash a from tree in a tree_decl_map. */
6476
6477 unsigned int
6478 tree_decl_map_hash (const void *item)
6479 {
6480 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6481 }
6482
6483 /* Return the initialization priority for DECL. */
6484
6485 priority_type
6486 decl_init_priority_lookup (tree decl)
6487 {
6488 symtab_node *snode = symtab_get_node (decl);
6489
6490 if (!snode)
6491 return DEFAULT_INIT_PRIORITY;
6492 return
6493 snode->get_init_priority ();
6494 }
6495
6496 /* Return the finalization priority for DECL. */
6497
6498 priority_type
6499 decl_fini_priority_lookup (tree decl)
6500 {
6501 cgraph_node *node = cgraph_get_node (decl);
6502
6503 if (!node)
6504 return DEFAULT_INIT_PRIORITY;
6505 return
6506 node->get_fini_priority ();
6507 }
6508
6509 /* Set the initialization priority for DECL to PRIORITY. */
6510
6511 void
6512 decl_init_priority_insert (tree decl, priority_type priority)
6513 {
6514 struct symtab_node *snode;
6515
6516 if (priority == DEFAULT_INIT_PRIORITY)
6517 {
6518 snode = symtab_get_node (decl);
6519 if (!snode)
6520 return;
6521 }
6522 else if (TREE_CODE (decl) == VAR_DECL)
6523 snode = varpool_node_for_decl (decl);
6524 else
6525 snode = cgraph_get_create_node (decl);
6526 snode->set_init_priority (priority);
6527 }
6528
6529 /* Set the finalization priority for DECL to PRIORITY. */
6530
6531 void
6532 decl_fini_priority_insert (tree decl, priority_type priority)
6533 {
6534 struct cgraph_node *node;
6535
6536 if (priority == DEFAULT_INIT_PRIORITY)
6537 {
6538 node = cgraph_get_node (decl);
6539 if (!node)
6540 return;
6541 }
6542 else
6543 node = cgraph_get_create_node (decl);
6544 node->set_fini_priority (priority);
6545 }
6546
6547 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6548
6549 static void
6550 print_debug_expr_statistics (void)
6551 {
6552 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6553 (long) htab_size (debug_expr_for_decl),
6554 (long) htab_elements (debug_expr_for_decl),
6555 htab_collisions (debug_expr_for_decl));
6556 }
6557
6558 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6559
6560 static void
6561 print_value_expr_statistics (void)
6562 {
6563 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6564 (long) htab_size (value_expr_for_decl),
6565 (long) htab_elements (value_expr_for_decl),
6566 htab_collisions (value_expr_for_decl));
6567 }
6568
6569 /* Lookup a debug expression for FROM, and return it if we find one. */
6570
6571 tree
6572 decl_debug_expr_lookup (tree from)
6573 {
6574 struct tree_decl_map *h, in;
6575 in.base.from = from;
6576
6577 h = (struct tree_decl_map *)
6578 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6579 if (h)
6580 return h->to;
6581 return NULL_TREE;
6582 }
6583
6584 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6585
6586 void
6587 decl_debug_expr_insert (tree from, tree to)
6588 {
6589 struct tree_decl_map *h;
6590 void **loc;
6591
6592 h = ggc_alloc<tree_decl_map> ();
6593 h->base.from = from;
6594 h->to = to;
6595 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6596 INSERT);
6597 *(struct tree_decl_map **) loc = h;
6598 }
6599
6600 /* Lookup a value expression for FROM, and return it if we find one. */
6601
6602 tree
6603 decl_value_expr_lookup (tree from)
6604 {
6605 struct tree_decl_map *h, in;
6606 in.base.from = from;
6607
6608 h = (struct tree_decl_map *)
6609 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6610 if (h)
6611 return h->to;
6612 return NULL_TREE;
6613 }
6614
6615 /* Insert a mapping FROM->TO in the value expression hashtable. */
6616
6617 void
6618 decl_value_expr_insert (tree from, tree to)
6619 {
6620 struct tree_decl_map *h;
6621 void **loc;
6622
6623 h = ggc_alloc<tree_decl_map> ();
6624 h->base.from = from;
6625 h->to = to;
6626 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6627 INSERT);
6628 *(struct tree_decl_map **) loc = h;
6629 }
6630
6631 /* Lookup a vector of debug arguments for FROM, and return it if we
6632 find one. */
6633
6634 vec<tree, va_gc> **
6635 decl_debug_args_lookup (tree from)
6636 {
6637 struct tree_vec_map *h, in;
6638
6639 if (!DECL_HAS_DEBUG_ARGS_P (from))
6640 return NULL;
6641 gcc_checking_assert (debug_args_for_decl != NULL);
6642 in.base.from = from;
6643 h = (struct tree_vec_map *)
6644 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6645 if (h)
6646 return &h->to;
6647 return NULL;
6648 }
6649
6650 /* Insert a mapping FROM->empty vector of debug arguments in the value
6651 expression hashtable. */
6652
6653 vec<tree, va_gc> **
6654 decl_debug_args_insert (tree from)
6655 {
6656 struct tree_vec_map *h;
6657 void **loc;
6658
6659 if (DECL_HAS_DEBUG_ARGS_P (from))
6660 return decl_debug_args_lookup (from);
6661 if (debug_args_for_decl == NULL)
6662 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6663 tree_vec_map_eq, 0);
6664 h = ggc_alloc<tree_vec_map> ();
6665 h->base.from = from;
6666 h->to = NULL;
6667 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6668 INSERT);
6669 *(struct tree_vec_map **) loc = h;
6670 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6671 return &h->to;
6672 }
6673
6674 /* Hashing of types so that we don't make duplicates.
6675 The entry point is `type_hash_canon'. */
6676
6677 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6678 with types in the TREE_VALUE slots), by adding the hash codes
6679 of the individual types. */
6680
6681 static unsigned int
6682 type_hash_list (const_tree list, hashval_t hashcode)
6683 {
6684 const_tree tail;
6685
6686 for (tail = list; tail; tail = TREE_CHAIN (tail))
6687 if (TREE_VALUE (tail) != error_mark_node)
6688 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6689 hashcode);
6690
6691 return hashcode;
6692 }
6693
6694 /* These are the Hashtable callback functions. */
6695
6696 /* Returns true iff the types are equivalent. */
6697
6698 static int
6699 type_hash_eq (const void *va, const void *vb)
6700 {
6701 const struct type_hash *const a = (const struct type_hash *) va,
6702 *const b = (const struct type_hash *) vb;
6703
6704 /* First test the things that are the same for all types. */
6705 if (a->hash != b->hash
6706 || TREE_CODE (a->type) != TREE_CODE (b->type)
6707 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6708 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6709 TYPE_ATTRIBUTES (b->type))
6710 || (TREE_CODE (a->type) != COMPLEX_TYPE
6711 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6712 return 0;
6713
6714 /* Be careful about comparing arrays before and after the element type
6715 has been completed; don't compare TYPE_ALIGN unless both types are
6716 complete. */
6717 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6718 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6719 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6720 return 0;
6721
6722 switch (TREE_CODE (a->type))
6723 {
6724 case VOID_TYPE:
6725 case COMPLEX_TYPE:
6726 case POINTER_TYPE:
6727 case REFERENCE_TYPE:
6728 case NULLPTR_TYPE:
6729 return 1;
6730
6731 case VECTOR_TYPE:
6732 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6733
6734 case ENUMERAL_TYPE:
6735 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6736 && !(TYPE_VALUES (a->type)
6737 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6738 && TYPE_VALUES (b->type)
6739 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6740 && type_list_equal (TYPE_VALUES (a->type),
6741 TYPE_VALUES (b->type))))
6742 return 0;
6743
6744 /* ... fall through ... */
6745
6746 case INTEGER_TYPE:
6747 case REAL_TYPE:
6748 case BOOLEAN_TYPE:
6749 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6750 return false;
6751 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6752 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6753 TYPE_MAX_VALUE (b->type)))
6754 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6755 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6756 TYPE_MIN_VALUE (b->type))));
6757
6758 case FIXED_POINT_TYPE:
6759 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6760
6761 case OFFSET_TYPE:
6762 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6763
6764 case METHOD_TYPE:
6765 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6766 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6767 || (TYPE_ARG_TYPES (a->type)
6768 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6769 && TYPE_ARG_TYPES (b->type)
6770 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6771 && type_list_equal (TYPE_ARG_TYPES (a->type),
6772 TYPE_ARG_TYPES (b->type)))))
6773 break;
6774 return 0;
6775 case ARRAY_TYPE:
6776 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6777
6778 case RECORD_TYPE:
6779 case UNION_TYPE:
6780 case QUAL_UNION_TYPE:
6781 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6782 || (TYPE_FIELDS (a->type)
6783 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6784 && TYPE_FIELDS (b->type)
6785 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6786 && type_list_equal (TYPE_FIELDS (a->type),
6787 TYPE_FIELDS (b->type))));
6788
6789 case FUNCTION_TYPE:
6790 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6791 || (TYPE_ARG_TYPES (a->type)
6792 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6793 && TYPE_ARG_TYPES (b->type)
6794 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6795 && type_list_equal (TYPE_ARG_TYPES (a->type),
6796 TYPE_ARG_TYPES (b->type))))
6797 break;
6798 return 0;
6799
6800 default:
6801 return 0;
6802 }
6803
6804 if (lang_hooks.types.type_hash_eq != NULL)
6805 return lang_hooks.types.type_hash_eq (a->type, b->type);
6806
6807 return 1;
6808 }
6809
6810 /* Return the cached hash value. */
6811
6812 static hashval_t
6813 type_hash_hash (const void *item)
6814 {
6815 return ((const struct type_hash *) item)->hash;
6816 }
6817
6818 /* Look in the type hash table for a type isomorphic to TYPE.
6819 If one is found, return it. Otherwise return 0. */
6820
6821 static tree
6822 type_hash_lookup (hashval_t hashcode, tree type)
6823 {
6824 struct type_hash *h, in;
6825
6826 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6827 must call that routine before comparing TYPE_ALIGNs. */
6828 layout_type (type);
6829
6830 in.hash = hashcode;
6831 in.type = type;
6832
6833 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6834 hashcode);
6835 if (h)
6836 return h->type;
6837 return NULL_TREE;
6838 }
6839
6840 /* Add an entry to the type-hash-table
6841 for a type TYPE whose hash code is HASHCODE. */
6842
6843 static void
6844 type_hash_add (hashval_t hashcode, tree type)
6845 {
6846 struct type_hash *h;
6847 void **loc;
6848
6849 h = ggc_alloc<type_hash> ();
6850 h->hash = hashcode;
6851 h->type = type;
6852 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6853 *loc = (void *)h;
6854 }
6855
6856 /* Given TYPE, and HASHCODE its hash code, return the canonical
6857 object for an identical type if one already exists.
6858 Otherwise, return TYPE, and record it as the canonical object.
6859
6860 To use this function, first create a type of the sort you want.
6861 Then compute its hash code from the fields of the type that
6862 make it different from other similar types.
6863 Then call this function and use the value. */
6864
6865 tree
6866 type_hash_canon (unsigned int hashcode, tree type)
6867 {
6868 tree t1;
6869
6870 /* The hash table only contains main variants, so ensure that's what we're
6871 being passed. */
6872 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6873
6874 /* See if the type is in the hash table already. If so, return it.
6875 Otherwise, add the type. */
6876 t1 = type_hash_lookup (hashcode, type);
6877 if (t1 != 0)
6878 {
6879 if (GATHER_STATISTICS)
6880 {
6881 tree_code_counts[(int) TREE_CODE (type)]--;
6882 tree_node_counts[(int) t_kind]--;
6883 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6884 }
6885 return t1;
6886 }
6887 else
6888 {
6889 type_hash_add (hashcode, type);
6890 return type;
6891 }
6892 }
6893
6894 /* See if the data pointed to by the type hash table is marked. We consider
6895 it marked if the type is marked or if a debug type number or symbol
6896 table entry has been made for the type. */
6897
6898 static int
6899 type_hash_marked_p (const void *p)
6900 {
6901 const_tree const type = ((const struct type_hash *) p)->type;
6902
6903 return ggc_marked_p (type);
6904 }
6905
6906 static void
6907 print_type_hash_statistics (void)
6908 {
6909 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6910 (long) htab_size (type_hash_table),
6911 (long) htab_elements (type_hash_table),
6912 htab_collisions (type_hash_table));
6913 }
6914
6915 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6916 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6917 by adding the hash codes of the individual attributes. */
6918
6919 static unsigned int
6920 attribute_hash_list (const_tree list, hashval_t hashcode)
6921 {
6922 const_tree tail;
6923
6924 for (tail = list; tail; tail = TREE_CHAIN (tail))
6925 /* ??? Do we want to add in TREE_VALUE too? */
6926 hashcode = iterative_hash_object
6927 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6928 return hashcode;
6929 }
6930
6931 /* Given two lists of attributes, return true if list l2 is
6932 equivalent to l1. */
6933
6934 int
6935 attribute_list_equal (const_tree l1, const_tree l2)
6936 {
6937 if (l1 == l2)
6938 return 1;
6939
6940 return attribute_list_contained (l1, l2)
6941 && attribute_list_contained (l2, l1);
6942 }
6943
6944 /* Given two lists of attributes, return true if list L2 is
6945 completely contained within L1. */
6946 /* ??? This would be faster if attribute names were stored in a canonicalized
6947 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6948 must be used to show these elements are equivalent (which they are). */
6949 /* ??? It's not clear that attributes with arguments will always be handled
6950 correctly. */
6951
6952 int
6953 attribute_list_contained (const_tree l1, const_tree l2)
6954 {
6955 const_tree t1, t2;
6956
6957 /* First check the obvious, maybe the lists are identical. */
6958 if (l1 == l2)
6959 return 1;
6960
6961 /* Maybe the lists are similar. */
6962 for (t1 = l1, t2 = l2;
6963 t1 != 0 && t2 != 0
6964 && get_attribute_name (t1) == get_attribute_name (t2)
6965 && TREE_VALUE (t1) == TREE_VALUE (t2);
6966 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6967 ;
6968
6969 /* Maybe the lists are equal. */
6970 if (t1 == 0 && t2 == 0)
6971 return 1;
6972
6973 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6974 {
6975 const_tree attr;
6976 /* This CONST_CAST is okay because lookup_attribute does not
6977 modify its argument and the return value is assigned to a
6978 const_tree. */
6979 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6980 CONST_CAST_TREE (l1));
6981 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6982 attr = lookup_ident_attribute (get_attribute_name (t2),
6983 TREE_CHAIN (attr)))
6984 ;
6985
6986 if (attr == NULL_TREE)
6987 return 0;
6988 }
6989
6990 return 1;
6991 }
6992
6993 /* Given two lists of types
6994 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6995 return 1 if the lists contain the same types in the same order.
6996 Also, the TREE_PURPOSEs must match. */
6997
6998 int
6999 type_list_equal (const_tree l1, const_tree l2)
7000 {
7001 const_tree t1, t2;
7002
7003 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7004 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7005 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7006 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7007 && (TREE_TYPE (TREE_PURPOSE (t1))
7008 == TREE_TYPE (TREE_PURPOSE (t2))))))
7009 return 0;
7010
7011 return t1 == t2;
7012 }
7013
7014 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7015 given by TYPE. If the argument list accepts variable arguments,
7016 then this function counts only the ordinary arguments. */
7017
7018 int
7019 type_num_arguments (const_tree type)
7020 {
7021 int i = 0;
7022 tree t;
7023
7024 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7025 /* If the function does not take a variable number of arguments,
7026 the last element in the list will have type `void'. */
7027 if (VOID_TYPE_P (TREE_VALUE (t)))
7028 break;
7029 else
7030 ++i;
7031
7032 return i;
7033 }
7034
7035 /* Nonzero if integer constants T1 and T2
7036 represent the same constant value. */
7037
7038 int
7039 tree_int_cst_equal (const_tree t1, const_tree t2)
7040 {
7041 if (t1 == t2)
7042 return 1;
7043
7044 if (t1 == 0 || t2 == 0)
7045 return 0;
7046
7047 if (TREE_CODE (t1) == INTEGER_CST
7048 && TREE_CODE (t2) == INTEGER_CST
7049 && wi::to_widest (t1) == wi::to_widest (t2))
7050 return 1;
7051
7052 return 0;
7053 }
7054
7055 /* Return true if T is an INTEGER_CST whose numerical value (extended
7056 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7057
7058 bool
7059 tree_fits_shwi_p (const_tree t)
7060 {
7061 return (t != NULL_TREE
7062 && TREE_CODE (t) == INTEGER_CST
7063 && wi::fits_shwi_p (wi::to_widest (t)));
7064 }
7065
7066 /* Return true if T is an INTEGER_CST whose numerical value (extended
7067 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7068
7069 bool
7070 tree_fits_uhwi_p (const_tree t)
7071 {
7072 return (t != NULL_TREE
7073 && TREE_CODE (t) == INTEGER_CST
7074 && wi::fits_uhwi_p (wi::to_widest (t)));
7075 }
7076
7077 /* T is an INTEGER_CST whose numerical value (extended according to
7078 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7079 HOST_WIDE_INT. */
7080
7081 HOST_WIDE_INT
7082 tree_to_shwi (const_tree t)
7083 {
7084 gcc_assert (tree_fits_shwi_p (t));
7085 return TREE_INT_CST_LOW (t);
7086 }
7087
7088 /* T is an INTEGER_CST whose numerical value (extended according to
7089 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7090 HOST_WIDE_INT. */
7091
7092 unsigned HOST_WIDE_INT
7093 tree_to_uhwi (const_tree t)
7094 {
7095 gcc_assert (tree_fits_uhwi_p (t));
7096 return TREE_INT_CST_LOW (t);
7097 }
7098
7099 /* Return the most significant (sign) bit of T. */
7100
7101 int
7102 tree_int_cst_sign_bit (const_tree t)
7103 {
7104 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7105
7106 return wi::extract_uhwi (t, bitno, 1);
7107 }
7108
7109 /* Return an indication of the sign of the integer constant T.
7110 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7111 Note that -1 will never be returned if T's type is unsigned. */
7112
7113 int
7114 tree_int_cst_sgn (const_tree t)
7115 {
7116 if (wi::eq_p (t, 0))
7117 return 0;
7118 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7119 return 1;
7120 else if (wi::neg_p (t))
7121 return -1;
7122 else
7123 return 1;
7124 }
7125
7126 /* Return the minimum number of bits needed to represent VALUE in a
7127 signed or unsigned type, UNSIGNEDP says which. */
7128
7129 unsigned int
7130 tree_int_cst_min_precision (tree value, signop sgn)
7131 {
7132 /* If the value is negative, compute its negative minus 1. The latter
7133 adjustment is because the absolute value of the largest negative value
7134 is one larger than the largest positive value. This is equivalent to
7135 a bit-wise negation, so use that operation instead. */
7136
7137 if (tree_int_cst_sgn (value) < 0)
7138 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7139
7140 /* Return the number of bits needed, taking into account the fact
7141 that we need one more bit for a signed than unsigned type.
7142 If value is 0 or -1, the minimum precision is 1 no matter
7143 whether unsignedp is true or false. */
7144
7145 if (integer_zerop (value))
7146 return 1;
7147 else
7148 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7149 }
7150
7151 /* Return truthvalue of whether T1 is the same tree structure as T2.
7152 Return 1 if they are the same.
7153 Return 0 if they are understandably different.
7154 Return -1 if either contains tree structure not understood by
7155 this function. */
7156
7157 int
7158 simple_cst_equal (const_tree t1, const_tree t2)
7159 {
7160 enum tree_code code1, code2;
7161 int cmp;
7162 int i;
7163
7164 if (t1 == t2)
7165 return 1;
7166 if (t1 == 0 || t2 == 0)
7167 return 0;
7168
7169 code1 = TREE_CODE (t1);
7170 code2 = TREE_CODE (t2);
7171
7172 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7173 {
7174 if (CONVERT_EXPR_CODE_P (code2)
7175 || code2 == NON_LVALUE_EXPR)
7176 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7177 else
7178 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7179 }
7180
7181 else if (CONVERT_EXPR_CODE_P (code2)
7182 || code2 == NON_LVALUE_EXPR)
7183 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7184
7185 if (code1 != code2)
7186 return 0;
7187
7188 switch (code1)
7189 {
7190 case INTEGER_CST:
7191 return wi::to_widest (t1) == wi::to_widest (t2);
7192
7193 case REAL_CST:
7194 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7195
7196 case FIXED_CST:
7197 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7198
7199 case STRING_CST:
7200 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7201 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7202 TREE_STRING_LENGTH (t1)));
7203
7204 case CONSTRUCTOR:
7205 {
7206 unsigned HOST_WIDE_INT idx;
7207 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7208 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7209
7210 if (vec_safe_length (v1) != vec_safe_length (v2))
7211 return false;
7212
7213 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7214 /* ??? Should we handle also fields here? */
7215 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7216 return false;
7217 return true;
7218 }
7219
7220 case SAVE_EXPR:
7221 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7222
7223 case CALL_EXPR:
7224 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7225 if (cmp <= 0)
7226 return cmp;
7227 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7228 return 0;
7229 {
7230 const_tree arg1, arg2;
7231 const_call_expr_arg_iterator iter1, iter2;
7232 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7233 arg2 = first_const_call_expr_arg (t2, &iter2);
7234 arg1 && arg2;
7235 arg1 = next_const_call_expr_arg (&iter1),
7236 arg2 = next_const_call_expr_arg (&iter2))
7237 {
7238 cmp = simple_cst_equal (arg1, arg2);
7239 if (cmp <= 0)
7240 return cmp;
7241 }
7242 return arg1 == arg2;
7243 }
7244
7245 case TARGET_EXPR:
7246 /* Special case: if either target is an unallocated VAR_DECL,
7247 it means that it's going to be unified with whatever the
7248 TARGET_EXPR is really supposed to initialize, so treat it
7249 as being equivalent to anything. */
7250 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7251 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7252 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7253 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7254 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7255 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7256 cmp = 1;
7257 else
7258 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7259
7260 if (cmp <= 0)
7261 return cmp;
7262
7263 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7264
7265 case WITH_CLEANUP_EXPR:
7266 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7267 if (cmp <= 0)
7268 return cmp;
7269
7270 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7271
7272 case COMPONENT_REF:
7273 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7274 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7275
7276 return 0;
7277
7278 case VAR_DECL:
7279 case PARM_DECL:
7280 case CONST_DECL:
7281 case FUNCTION_DECL:
7282 return 0;
7283
7284 default:
7285 break;
7286 }
7287
7288 /* This general rule works for most tree codes. All exceptions should be
7289 handled above. If this is a language-specific tree code, we can't
7290 trust what might be in the operand, so say we don't know
7291 the situation. */
7292 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7293 return -1;
7294
7295 switch (TREE_CODE_CLASS (code1))
7296 {
7297 case tcc_unary:
7298 case tcc_binary:
7299 case tcc_comparison:
7300 case tcc_expression:
7301 case tcc_reference:
7302 case tcc_statement:
7303 cmp = 1;
7304 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7305 {
7306 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7307 if (cmp <= 0)
7308 return cmp;
7309 }
7310
7311 return cmp;
7312
7313 default:
7314 return -1;
7315 }
7316 }
7317
7318 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7319 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7320 than U, respectively. */
7321
7322 int
7323 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7324 {
7325 if (tree_int_cst_sgn (t) < 0)
7326 return -1;
7327 else if (!tree_fits_uhwi_p (t))
7328 return 1;
7329 else if (TREE_INT_CST_LOW (t) == u)
7330 return 0;
7331 else if (TREE_INT_CST_LOW (t) < u)
7332 return -1;
7333 else
7334 return 1;
7335 }
7336
7337 /* Return true if SIZE represents a constant size that is in bounds of
7338 what the middle-end and the backend accepts (covering not more than
7339 half of the address-space). */
7340
7341 bool
7342 valid_constant_size_p (const_tree size)
7343 {
7344 if (! tree_fits_uhwi_p (size)
7345 || TREE_OVERFLOW (size)
7346 || tree_int_cst_sign_bit (size) != 0)
7347 return false;
7348 return true;
7349 }
7350
7351 /* Return the precision of the type, or for a complex or vector type the
7352 precision of the type of its elements. */
7353
7354 unsigned int
7355 element_precision (const_tree type)
7356 {
7357 enum tree_code code = TREE_CODE (type);
7358 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7359 type = TREE_TYPE (type);
7360
7361 return TYPE_PRECISION (type);
7362 }
7363
7364 /* Return true if CODE represents an associative tree code. Otherwise
7365 return false. */
7366 bool
7367 associative_tree_code (enum tree_code code)
7368 {
7369 switch (code)
7370 {
7371 case BIT_IOR_EXPR:
7372 case BIT_AND_EXPR:
7373 case BIT_XOR_EXPR:
7374 case PLUS_EXPR:
7375 case MULT_EXPR:
7376 case MIN_EXPR:
7377 case MAX_EXPR:
7378 return true;
7379
7380 default:
7381 break;
7382 }
7383 return false;
7384 }
7385
7386 /* Return true if CODE represents a commutative tree code. Otherwise
7387 return false. */
7388 bool
7389 commutative_tree_code (enum tree_code code)
7390 {
7391 switch (code)
7392 {
7393 case PLUS_EXPR:
7394 case MULT_EXPR:
7395 case MULT_HIGHPART_EXPR:
7396 case MIN_EXPR:
7397 case MAX_EXPR:
7398 case BIT_IOR_EXPR:
7399 case BIT_XOR_EXPR:
7400 case BIT_AND_EXPR:
7401 case NE_EXPR:
7402 case EQ_EXPR:
7403 case UNORDERED_EXPR:
7404 case ORDERED_EXPR:
7405 case UNEQ_EXPR:
7406 case LTGT_EXPR:
7407 case TRUTH_AND_EXPR:
7408 case TRUTH_XOR_EXPR:
7409 case TRUTH_OR_EXPR:
7410 case WIDEN_MULT_EXPR:
7411 case VEC_WIDEN_MULT_HI_EXPR:
7412 case VEC_WIDEN_MULT_LO_EXPR:
7413 case VEC_WIDEN_MULT_EVEN_EXPR:
7414 case VEC_WIDEN_MULT_ODD_EXPR:
7415 return true;
7416
7417 default:
7418 break;
7419 }
7420 return false;
7421 }
7422
7423 /* Return true if CODE represents a ternary tree code for which the
7424 first two operands are commutative. Otherwise return false. */
7425 bool
7426 commutative_ternary_tree_code (enum tree_code code)
7427 {
7428 switch (code)
7429 {
7430 case WIDEN_MULT_PLUS_EXPR:
7431 case WIDEN_MULT_MINUS_EXPR:
7432 return true;
7433
7434 default:
7435 break;
7436 }
7437 return false;
7438 }
7439
7440 /* Generate a hash value for an expression. This can be used iteratively
7441 by passing a previous result as the VAL argument.
7442
7443 This function is intended to produce the same hash for expressions which
7444 would compare equal using operand_equal_p. */
7445
7446 hashval_t
7447 iterative_hash_expr (const_tree t, hashval_t val)
7448 {
7449 int i;
7450 enum tree_code code;
7451 enum tree_code_class tclass;
7452
7453 if (t == NULL_TREE)
7454 return iterative_hash_hashval_t (0, val);
7455
7456 code = TREE_CODE (t);
7457
7458 switch (code)
7459 {
7460 /* Alas, constants aren't shared, so we can't rely on pointer
7461 identity. */
7462 case VOID_CST:
7463 return iterative_hash_hashval_t (0, val);
7464 case INTEGER_CST:
7465 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7466 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7467 return val;
7468 case REAL_CST:
7469 {
7470 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7471
7472 return iterative_hash_hashval_t (val2, val);
7473 }
7474 case FIXED_CST:
7475 {
7476 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7477
7478 return iterative_hash_hashval_t (val2, val);
7479 }
7480 case STRING_CST:
7481 return iterative_hash (TREE_STRING_POINTER (t),
7482 TREE_STRING_LENGTH (t), val);
7483 case COMPLEX_CST:
7484 val = iterative_hash_expr (TREE_REALPART (t), val);
7485 return iterative_hash_expr (TREE_IMAGPART (t), val);
7486 case VECTOR_CST:
7487 {
7488 unsigned i;
7489 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7490 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7491 return val;
7492 }
7493 case SSA_NAME:
7494 /* We can just compare by pointer. */
7495 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7496 case PLACEHOLDER_EXPR:
7497 /* The node itself doesn't matter. */
7498 return val;
7499 case TREE_LIST:
7500 /* A list of expressions, for a CALL_EXPR or as the elements of a
7501 VECTOR_CST. */
7502 for (; t; t = TREE_CHAIN (t))
7503 val = iterative_hash_expr (TREE_VALUE (t), val);
7504 return val;
7505 case CONSTRUCTOR:
7506 {
7507 unsigned HOST_WIDE_INT idx;
7508 tree field, value;
7509 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7510 {
7511 val = iterative_hash_expr (field, val);
7512 val = iterative_hash_expr (value, val);
7513 }
7514 return val;
7515 }
7516 case FUNCTION_DECL:
7517 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7518 Otherwise nodes that compare equal according to operand_equal_p might
7519 get different hash codes. However, don't do this for machine specific
7520 or front end builtins, since the function code is overloaded in those
7521 cases. */
7522 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7523 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7524 {
7525 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7526 code = TREE_CODE (t);
7527 }
7528 /* FALL THROUGH */
7529 default:
7530 tclass = TREE_CODE_CLASS (code);
7531
7532 if (tclass == tcc_declaration)
7533 {
7534 /* DECL's have a unique ID */
7535 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7536 }
7537 else
7538 {
7539 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7540
7541 val = iterative_hash_object (code, val);
7542
7543 /* Don't hash the type, that can lead to having nodes which
7544 compare equal according to operand_equal_p, but which
7545 have different hash codes. */
7546 if (CONVERT_EXPR_CODE_P (code)
7547 || code == NON_LVALUE_EXPR)
7548 {
7549 /* Make sure to include signness in the hash computation. */
7550 val += TYPE_UNSIGNED (TREE_TYPE (t));
7551 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7552 }
7553
7554 else if (commutative_tree_code (code))
7555 {
7556 /* It's a commutative expression. We want to hash it the same
7557 however it appears. We do this by first hashing both operands
7558 and then rehashing based on the order of their independent
7559 hashes. */
7560 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7561 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7562 hashval_t t;
7563
7564 if (one > two)
7565 t = one, one = two, two = t;
7566
7567 val = iterative_hash_hashval_t (one, val);
7568 val = iterative_hash_hashval_t (two, val);
7569 }
7570 else
7571 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7572 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7573 }
7574 return val;
7575 }
7576 }
7577
7578 /* Constructors for pointer, array and function types.
7579 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7580 constructed by language-dependent code, not here.) */
7581
7582 /* Construct, lay out and return the type of pointers to TO_TYPE with
7583 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7584 reference all of memory. If such a type has already been
7585 constructed, reuse it. */
7586
7587 tree
7588 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7589 bool can_alias_all)
7590 {
7591 tree t;
7592
7593 if (to_type == error_mark_node)
7594 return error_mark_node;
7595
7596 /* If the pointed-to type has the may_alias attribute set, force
7597 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7598 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7599 can_alias_all = true;
7600
7601 /* In some cases, languages will have things that aren't a POINTER_TYPE
7602 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7603 In that case, return that type without regard to the rest of our
7604 operands.
7605
7606 ??? This is a kludge, but consistent with the way this function has
7607 always operated and there doesn't seem to be a good way to avoid this
7608 at the moment. */
7609 if (TYPE_POINTER_TO (to_type) != 0
7610 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7611 return TYPE_POINTER_TO (to_type);
7612
7613 /* First, if we already have a type for pointers to TO_TYPE and it's
7614 the proper mode, use it. */
7615 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7616 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7617 return t;
7618
7619 t = make_node (POINTER_TYPE);
7620
7621 TREE_TYPE (t) = to_type;
7622 SET_TYPE_MODE (t, mode);
7623 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7624 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7625 TYPE_POINTER_TO (to_type) = t;
7626
7627 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7628 SET_TYPE_STRUCTURAL_EQUALITY (t);
7629 else if (TYPE_CANONICAL (to_type) != to_type)
7630 TYPE_CANONICAL (t)
7631 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7632 mode, can_alias_all);
7633
7634 /* Lay out the type. This function has many callers that are concerned
7635 with expression-construction, and this simplifies them all. */
7636 layout_type (t);
7637
7638 return t;
7639 }
7640
7641 /* By default build pointers in ptr_mode. */
7642
7643 tree
7644 build_pointer_type (tree to_type)
7645 {
7646 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7647 : TYPE_ADDR_SPACE (to_type);
7648 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7649 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7650 }
7651
7652 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7653
7654 tree
7655 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7656 bool can_alias_all)
7657 {
7658 tree t;
7659
7660 if (to_type == error_mark_node)
7661 return error_mark_node;
7662
7663 /* If the pointed-to type has the may_alias attribute set, force
7664 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7665 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7666 can_alias_all = true;
7667
7668 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7669 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7670 In that case, return that type without regard to the rest of our
7671 operands.
7672
7673 ??? This is a kludge, but consistent with the way this function has
7674 always operated and there doesn't seem to be a good way to avoid this
7675 at the moment. */
7676 if (TYPE_REFERENCE_TO (to_type) != 0
7677 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7678 return TYPE_REFERENCE_TO (to_type);
7679
7680 /* First, if we already have a type for pointers to TO_TYPE and it's
7681 the proper mode, use it. */
7682 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7683 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7684 return t;
7685
7686 t = make_node (REFERENCE_TYPE);
7687
7688 TREE_TYPE (t) = to_type;
7689 SET_TYPE_MODE (t, mode);
7690 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7691 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7692 TYPE_REFERENCE_TO (to_type) = t;
7693
7694 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7695 SET_TYPE_STRUCTURAL_EQUALITY (t);
7696 else if (TYPE_CANONICAL (to_type) != to_type)
7697 TYPE_CANONICAL (t)
7698 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7699 mode, can_alias_all);
7700
7701 layout_type (t);
7702
7703 return t;
7704 }
7705
7706
7707 /* Build the node for the type of references-to-TO_TYPE by default
7708 in ptr_mode. */
7709
7710 tree
7711 build_reference_type (tree to_type)
7712 {
7713 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7714 : TYPE_ADDR_SPACE (to_type);
7715 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7716 return build_reference_type_for_mode (to_type, pointer_mode, false);
7717 }
7718
7719 #define MAX_INT_CACHED_PREC \
7720 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7721 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7722
7723 /* Builds a signed or unsigned integer type of precision PRECISION.
7724 Used for C bitfields whose precision does not match that of
7725 built-in target types. */
7726 tree
7727 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7728 int unsignedp)
7729 {
7730 tree itype, ret;
7731
7732 if (unsignedp)
7733 unsignedp = MAX_INT_CACHED_PREC + 1;
7734
7735 if (precision <= MAX_INT_CACHED_PREC)
7736 {
7737 itype = nonstandard_integer_type_cache[precision + unsignedp];
7738 if (itype)
7739 return itype;
7740 }
7741
7742 itype = make_node (INTEGER_TYPE);
7743 TYPE_PRECISION (itype) = precision;
7744
7745 if (unsignedp)
7746 fixup_unsigned_type (itype);
7747 else
7748 fixup_signed_type (itype);
7749
7750 ret = itype;
7751 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7752 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7753 if (precision <= MAX_INT_CACHED_PREC)
7754 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7755
7756 return ret;
7757 }
7758
7759 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7760 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7761 is true, reuse such a type that has already been constructed. */
7762
7763 static tree
7764 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7765 {
7766 tree itype = make_node (INTEGER_TYPE);
7767 hashval_t hashcode = 0;
7768
7769 TREE_TYPE (itype) = type;
7770
7771 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7772 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7773
7774 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7775 SET_TYPE_MODE (itype, TYPE_MODE (type));
7776 TYPE_SIZE (itype) = TYPE_SIZE (type);
7777 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7778 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7779 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7780
7781 if (!shared)
7782 return itype;
7783
7784 if ((TYPE_MIN_VALUE (itype)
7785 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7786 || (TYPE_MAX_VALUE (itype)
7787 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7788 {
7789 /* Since we cannot reliably merge this type, we need to compare it using
7790 structural equality checks. */
7791 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7792 return itype;
7793 }
7794
7795 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7796 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7797 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7798 itype = type_hash_canon (hashcode, itype);
7799
7800 return itype;
7801 }
7802
7803 /* Wrapper around build_range_type_1 with SHARED set to true. */
7804
7805 tree
7806 build_range_type (tree type, tree lowval, tree highval)
7807 {
7808 return build_range_type_1 (type, lowval, highval, true);
7809 }
7810
7811 /* Wrapper around build_range_type_1 with SHARED set to false. */
7812
7813 tree
7814 build_nonshared_range_type (tree type, tree lowval, tree highval)
7815 {
7816 return build_range_type_1 (type, lowval, highval, false);
7817 }
7818
7819 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7820 MAXVAL should be the maximum value in the domain
7821 (one less than the length of the array).
7822
7823 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7824 We don't enforce this limit, that is up to caller (e.g. language front end).
7825 The limit exists because the result is a signed type and we don't handle
7826 sizes that use more than one HOST_WIDE_INT. */
7827
7828 tree
7829 build_index_type (tree maxval)
7830 {
7831 return build_range_type (sizetype, size_zero_node, maxval);
7832 }
7833
7834 /* Return true if the debug information for TYPE, a subtype, should be emitted
7835 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7836 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7837 debug info and doesn't reflect the source code. */
7838
7839 bool
7840 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7841 {
7842 tree base_type = TREE_TYPE (type), low, high;
7843
7844 /* Subrange types have a base type which is an integral type. */
7845 if (!INTEGRAL_TYPE_P (base_type))
7846 return false;
7847
7848 /* Get the real bounds of the subtype. */
7849 if (lang_hooks.types.get_subrange_bounds)
7850 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7851 else
7852 {
7853 low = TYPE_MIN_VALUE (type);
7854 high = TYPE_MAX_VALUE (type);
7855 }
7856
7857 /* If the type and its base type have the same representation and the same
7858 name, then the type is not a subrange but a copy of the base type. */
7859 if ((TREE_CODE (base_type) == INTEGER_TYPE
7860 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7861 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7862 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7863 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7864 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7865 return false;
7866
7867 if (lowval)
7868 *lowval = low;
7869 if (highval)
7870 *highval = high;
7871 return true;
7872 }
7873
7874 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7875 and number of elements specified by the range of values of INDEX_TYPE.
7876 If SHARED is true, reuse such a type that has already been constructed. */
7877
7878 static tree
7879 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7880 {
7881 tree t;
7882
7883 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7884 {
7885 error ("arrays of functions are not meaningful");
7886 elt_type = integer_type_node;
7887 }
7888
7889 t = make_node (ARRAY_TYPE);
7890 TREE_TYPE (t) = elt_type;
7891 TYPE_DOMAIN (t) = index_type;
7892 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7893 layout_type (t);
7894
7895 /* If the element type is incomplete at this point we get marked for
7896 structural equality. Do not record these types in the canonical
7897 type hashtable. */
7898 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7899 return t;
7900
7901 if (shared)
7902 {
7903 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7904 if (index_type)
7905 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7906 t = type_hash_canon (hashcode, t);
7907 }
7908
7909 if (TYPE_CANONICAL (t) == t)
7910 {
7911 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7912 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7913 SET_TYPE_STRUCTURAL_EQUALITY (t);
7914 else if (TYPE_CANONICAL (elt_type) != elt_type
7915 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7916 TYPE_CANONICAL (t)
7917 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7918 index_type
7919 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7920 shared);
7921 }
7922
7923 return t;
7924 }
7925
7926 /* Wrapper around build_array_type_1 with SHARED set to true. */
7927
7928 tree
7929 build_array_type (tree elt_type, tree index_type)
7930 {
7931 return build_array_type_1 (elt_type, index_type, true);
7932 }
7933
7934 /* Wrapper around build_array_type_1 with SHARED set to false. */
7935
7936 tree
7937 build_nonshared_array_type (tree elt_type, tree index_type)
7938 {
7939 return build_array_type_1 (elt_type, index_type, false);
7940 }
7941
7942 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7943 sizetype. */
7944
7945 tree
7946 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7947 {
7948 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7949 }
7950
7951 /* Recursively examines the array elements of TYPE, until a non-array
7952 element type is found. */
7953
7954 tree
7955 strip_array_types (tree type)
7956 {
7957 while (TREE_CODE (type) == ARRAY_TYPE)
7958 type = TREE_TYPE (type);
7959
7960 return type;
7961 }
7962
7963 /* Computes the canonical argument types from the argument type list
7964 ARGTYPES.
7965
7966 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7967 on entry to this function, or if any of the ARGTYPES are
7968 structural.
7969
7970 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7971 true on entry to this function, or if any of the ARGTYPES are
7972 non-canonical.
7973
7974 Returns a canonical argument list, which may be ARGTYPES when the
7975 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7976 true) or would not differ from ARGTYPES. */
7977
7978 static tree
7979 maybe_canonicalize_argtypes (tree argtypes,
7980 bool *any_structural_p,
7981 bool *any_noncanonical_p)
7982 {
7983 tree arg;
7984 bool any_noncanonical_argtypes_p = false;
7985
7986 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7987 {
7988 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7989 /* Fail gracefully by stating that the type is structural. */
7990 *any_structural_p = true;
7991 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7992 *any_structural_p = true;
7993 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7994 || TREE_PURPOSE (arg))
7995 /* If the argument has a default argument, we consider it
7996 non-canonical even though the type itself is canonical.
7997 That way, different variants of function and method types
7998 with default arguments will all point to the variant with
7999 no defaults as their canonical type. */
8000 any_noncanonical_argtypes_p = true;
8001 }
8002
8003 if (*any_structural_p)
8004 return argtypes;
8005
8006 if (any_noncanonical_argtypes_p)
8007 {
8008 /* Build the canonical list of argument types. */
8009 tree canon_argtypes = NULL_TREE;
8010 bool is_void = false;
8011
8012 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8013 {
8014 if (arg == void_list_node)
8015 is_void = true;
8016 else
8017 canon_argtypes = tree_cons (NULL_TREE,
8018 TYPE_CANONICAL (TREE_VALUE (arg)),
8019 canon_argtypes);
8020 }
8021
8022 canon_argtypes = nreverse (canon_argtypes);
8023 if (is_void)
8024 canon_argtypes = chainon (canon_argtypes, void_list_node);
8025
8026 /* There is a non-canonical type. */
8027 *any_noncanonical_p = true;
8028 return canon_argtypes;
8029 }
8030
8031 /* The canonical argument types are the same as ARGTYPES. */
8032 return argtypes;
8033 }
8034
8035 /* Construct, lay out and return
8036 the type of functions returning type VALUE_TYPE
8037 given arguments of types ARG_TYPES.
8038 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8039 are data type nodes for the arguments of the function.
8040 If such a type has already been constructed, reuse it. */
8041
8042 tree
8043 build_function_type (tree value_type, tree arg_types)
8044 {
8045 tree t;
8046 hashval_t hashcode = 0;
8047 bool any_structural_p, any_noncanonical_p;
8048 tree canon_argtypes;
8049
8050 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8051 {
8052 error ("function return type cannot be function");
8053 value_type = integer_type_node;
8054 }
8055
8056 /* Make a node of the sort we want. */
8057 t = make_node (FUNCTION_TYPE);
8058 TREE_TYPE (t) = value_type;
8059 TYPE_ARG_TYPES (t) = arg_types;
8060
8061 /* If we already have such a type, use the old one. */
8062 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8063 hashcode = type_hash_list (arg_types, hashcode);
8064 t = type_hash_canon (hashcode, t);
8065
8066 /* Set up the canonical type. */
8067 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8068 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8069 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8070 &any_structural_p,
8071 &any_noncanonical_p);
8072 if (any_structural_p)
8073 SET_TYPE_STRUCTURAL_EQUALITY (t);
8074 else if (any_noncanonical_p)
8075 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8076 canon_argtypes);
8077
8078 if (!COMPLETE_TYPE_P (t))
8079 layout_type (t);
8080 return t;
8081 }
8082
8083 /* Build a function type. The RETURN_TYPE is the type returned by the
8084 function. If VAARGS is set, no void_type_node is appended to the
8085 the list. ARGP must be always be terminated be a NULL_TREE. */
8086
8087 static tree
8088 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8089 {
8090 tree t, args, last;
8091
8092 t = va_arg (argp, tree);
8093 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8094 args = tree_cons (NULL_TREE, t, args);
8095
8096 if (vaargs)
8097 {
8098 last = args;
8099 if (args != NULL_TREE)
8100 args = nreverse (args);
8101 gcc_assert (last != void_list_node);
8102 }
8103 else if (args == NULL_TREE)
8104 args = void_list_node;
8105 else
8106 {
8107 last = args;
8108 args = nreverse (args);
8109 TREE_CHAIN (last) = void_list_node;
8110 }
8111 args = build_function_type (return_type, args);
8112
8113 return args;
8114 }
8115
8116 /* Build a function type. The RETURN_TYPE is the type returned by the
8117 function. If additional arguments are provided, they are
8118 additional argument types. The list of argument types must always
8119 be terminated by NULL_TREE. */
8120
8121 tree
8122 build_function_type_list (tree return_type, ...)
8123 {
8124 tree args;
8125 va_list p;
8126
8127 va_start (p, return_type);
8128 args = build_function_type_list_1 (false, return_type, p);
8129 va_end (p);
8130 return args;
8131 }
8132
8133 /* Build a variable argument function type. The RETURN_TYPE is the
8134 type returned by the function. If additional arguments are provided,
8135 they are additional argument types. The list of argument types must
8136 always be terminated by NULL_TREE. */
8137
8138 tree
8139 build_varargs_function_type_list (tree return_type, ...)
8140 {
8141 tree args;
8142 va_list p;
8143
8144 va_start (p, return_type);
8145 args = build_function_type_list_1 (true, return_type, p);
8146 va_end (p);
8147
8148 return args;
8149 }
8150
8151 /* Build a function type. RETURN_TYPE is the type returned by the
8152 function; VAARGS indicates whether the function takes varargs. The
8153 function takes N named arguments, the types of which are provided in
8154 ARG_TYPES. */
8155
8156 static tree
8157 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8158 tree *arg_types)
8159 {
8160 int i;
8161 tree t = vaargs ? NULL_TREE : void_list_node;
8162
8163 for (i = n - 1; i >= 0; i--)
8164 t = tree_cons (NULL_TREE, arg_types[i], t);
8165
8166 return build_function_type (return_type, t);
8167 }
8168
8169 /* Build a function type. RETURN_TYPE is the type returned by the
8170 function. The function takes N named arguments, the types of which
8171 are provided in ARG_TYPES. */
8172
8173 tree
8174 build_function_type_array (tree return_type, int n, tree *arg_types)
8175 {
8176 return build_function_type_array_1 (false, return_type, n, arg_types);
8177 }
8178
8179 /* Build a variable argument function type. RETURN_TYPE is the type
8180 returned by the function. The function takes N named arguments, the
8181 types of which are provided in ARG_TYPES. */
8182
8183 tree
8184 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8185 {
8186 return build_function_type_array_1 (true, return_type, n, arg_types);
8187 }
8188
8189 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8190 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8191 for the method. An implicit additional parameter (of type
8192 pointer-to-BASETYPE) is added to the ARGTYPES. */
8193
8194 tree
8195 build_method_type_directly (tree basetype,
8196 tree rettype,
8197 tree argtypes)
8198 {
8199 tree t;
8200 tree ptype;
8201 int hashcode = 0;
8202 bool any_structural_p, any_noncanonical_p;
8203 tree canon_argtypes;
8204
8205 /* Make a node of the sort we want. */
8206 t = make_node (METHOD_TYPE);
8207
8208 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8209 TREE_TYPE (t) = rettype;
8210 ptype = build_pointer_type (basetype);
8211
8212 /* The actual arglist for this function includes a "hidden" argument
8213 which is "this". Put it into the list of argument types. */
8214 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8215 TYPE_ARG_TYPES (t) = argtypes;
8216
8217 /* If we already have such a type, use the old one. */
8218 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8219 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8220 hashcode = type_hash_list (argtypes, hashcode);
8221 t = type_hash_canon (hashcode, t);
8222
8223 /* Set up the canonical type. */
8224 any_structural_p
8225 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8226 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8227 any_noncanonical_p
8228 = (TYPE_CANONICAL (basetype) != basetype
8229 || TYPE_CANONICAL (rettype) != rettype);
8230 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8231 &any_structural_p,
8232 &any_noncanonical_p);
8233 if (any_structural_p)
8234 SET_TYPE_STRUCTURAL_EQUALITY (t);
8235 else if (any_noncanonical_p)
8236 TYPE_CANONICAL (t)
8237 = build_method_type_directly (TYPE_CANONICAL (basetype),
8238 TYPE_CANONICAL (rettype),
8239 canon_argtypes);
8240 if (!COMPLETE_TYPE_P (t))
8241 layout_type (t);
8242
8243 return t;
8244 }
8245
8246 /* Construct, lay out and return the type of methods belonging to class
8247 BASETYPE and whose arguments and values are described by TYPE.
8248 If that type exists already, reuse it.
8249 TYPE must be a FUNCTION_TYPE node. */
8250
8251 tree
8252 build_method_type (tree basetype, tree type)
8253 {
8254 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8255
8256 return build_method_type_directly (basetype,
8257 TREE_TYPE (type),
8258 TYPE_ARG_TYPES (type));
8259 }
8260
8261 /* Construct, lay out and return the type of offsets to a value
8262 of type TYPE, within an object of type BASETYPE.
8263 If a suitable offset type exists already, reuse it. */
8264
8265 tree
8266 build_offset_type (tree basetype, tree type)
8267 {
8268 tree t;
8269 hashval_t hashcode = 0;
8270
8271 /* Make a node of the sort we want. */
8272 t = make_node (OFFSET_TYPE);
8273
8274 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8275 TREE_TYPE (t) = type;
8276
8277 /* If we already have such a type, use the old one. */
8278 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8279 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8280 t = type_hash_canon (hashcode, t);
8281
8282 if (!COMPLETE_TYPE_P (t))
8283 layout_type (t);
8284
8285 if (TYPE_CANONICAL (t) == t)
8286 {
8287 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8288 || TYPE_STRUCTURAL_EQUALITY_P (type))
8289 SET_TYPE_STRUCTURAL_EQUALITY (t);
8290 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8291 || TYPE_CANONICAL (type) != type)
8292 TYPE_CANONICAL (t)
8293 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8294 TYPE_CANONICAL (type));
8295 }
8296
8297 return t;
8298 }
8299
8300 /* Create a complex type whose components are COMPONENT_TYPE. */
8301
8302 tree
8303 build_complex_type (tree component_type)
8304 {
8305 tree t;
8306 hashval_t hashcode;
8307
8308 gcc_assert (INTEGRAL_TYPE_P (component_type)
8309 || SCALAR_FLOAT_TYPE_P (component_type)
8310 || FIXED_POINT_TYPE_P (component_type));
8311
8312 /* Make a node of the sort we want. */
8313 t = make_node (COMPLEX_TYPE);
8314
8315 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8316
8317 /* If we already have such a type, use the old one. */
8318 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8319 t = type_hash_canon (hashcode, t);
8320
8321 if (!COMPLETE_TYPE_P (t))
8322 layout_type (t);
8323
8324 if (TYPE_CANONICAL (t) == t)
8325 {
8326 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8327 SET_TYPE_STRUCTURAL_EQUALITY (t);
8328 else if (TYPE_CANONICAL (component_type) != component_type)
8329 TYPE_CANONICAL (t)
8330 = build_complex_type (TYPE_CANONICAL (component_type));
8331 }
8332
8333 /* We need to create a name, since complex is a fundamental type. */
8334 if (! TYPE_NAME (t))
8335 {
8336 const char *name;
8337 if (component_type == char_type_node)
8338 name = "complex char";
8339 else if (component_type == signed_char_type_node)
8340 name = "complex signed char";
8341 else if (component_type == unsigned_char_type_node)
8342 name = "complex unsigned char";
8343 else if (component_type == short_integer_type_node)
8344 name = "complex short int";
8345 else if (component_type == short_unsigned_type_node)
8346 name = "complex short unsigned int";
8347 else if (component_type == integer_type_node)
8348 name = "complex int";
8349 else if (component_type == unsigned_type_node)
8350 name = "complex unsigned int";
8351 else if (component_type == long_integer_type_node)
8352 name = "complex long int";
8353 else if (component_type == long_unsigned_type_node)
8354 name = "complex long unsigned int";
8355 else if (component_type == long_long_integer_type_node)
8356 name = "complex long long int";
8357 else if (component_type == long_long_unsigned_type_node)
8358 name = "complex long long unsigned int";
8359 else
8360 name = 0;
8361
8362 if (name != 0)
8363 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8364 get_identifier (name), t);
8365 }
8366
8367 return build_qualified_type (t, TYPE_QUALS (component_type));
8368 }
8369
8370 /* If TYPE is a real or complex floating-point type and the target
8371 does not directly support arithmetic on TYPE then return the wider
8372 type to be used for arithmetic on TYPE. Otherwise, return
8373 NULL_TREE. */
8374
8375 tree
8376 excess_precision_type (tree type)
8377 {
8378 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8379 {
8380 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8381 switch (TREE_CODE (type))
8382 {
8383 case REAL_TYPE:
8384 switch (flt_eval_method)
8385 {
8386 case 1:
8387 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8388 return double_type_node;
8389 break;
8390 case 2:
8391 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8392 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8393 return long_double_type_node;
8394 break;
8395 default:
8396 gcc_unreachable ();
8397 }
8398 break;
8399 case COMPLEX_TYPE:
8400 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8401 return NULL_TREE;
8402 switch (flt_eval_method)
8403 {
8404 case 1:
8405 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8406 return complex_double_type_node;
8407 break;
8408 case 2:
8409 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8410 || (TYPE_MODE (TREE_TYPE (type))
8411 == TYPE_MODE (double_type_node)))
8412 return complex_long_double_type_node;
8413 break;
8414 default:
8415 gcc_unreachable ();
8416 }
8417 break;
8418 default:
8419 break;
8420 }
8421 }
8422 return NULL_TREE;
8423 }
8424 \f
8425 /* Return OP, stripped of any conversions to wider types as much as is safe.
8426 Converting the value back to OP's type makes a value equivalent to OP.
8427
8428 If FOR_TYPE is nonzero, we return a value which, if converted to
8429 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8430
8431 OP must have integer, real or enumeral type. Pointers are not allowed!
8432
8433 There are some cases where the obvious value we could return
8434 would regenerate to OP if converted to OP's type,
8435 but would not extend like OP to wider types.
8436 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8437 For example, if OP is (unsigned short)(signed char)-1,
8438 we avoid returning (signed char)-1 if FOR_TYPE is int,
8439 even though extending that to an unsigned short would regenerate OP,
8440 since the result of extending (signed char)-1 to (int)
8441 is different from (int) OP. */
8442
8443 tree
8444 get_unwidened (tree op, tree for_type)
8445 {
8446 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8447 tree type = TREE_TYPE (op);
8448 unsigned final_prec
8449 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8450 int uns
8451 = (for_type != 0 && for_type != type
8452 && final_prec > TYPE_PRECISION (type)
8453 && TYPE_UNSIGNED (type));
8454 tree win = op;
8455
8456 while (CONVERT_EXPR_P (op))
8457 {
8458 int bitschange;
8459
8460 /* TYPE_PRECISION on vector types has different meaning
8461 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8462 so avoid them here. */
8463 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8464 break;
8465
8466 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8467 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8468
8469 /* Truncations are many-one so cannot be removed.
8470 Unless we are later going to truncate down even farther. */
8471 if (bitschange < 0
8472 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8473 break;
8474
8475 /* See what's inside this conversion. If we decide to strip it,
8476 we will set WIN. */
8477 op = TREE_OPERAND (op, 0);
8478
8479 /* If we have not stripped any zero-extensions (uns is 0),
8480 we can strip any kind of extension.
8481 If we have previously stripped a zero-extension,
8482 only zero-extensions can safely be stripped.
8483 Any extension can be stripped if the bits it would produce
8484 are all going to be discarded later by truncating to FOR_TYPE. */
8485
8486 if (bitschange > 0)
8487 {
8488 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8489 win = op;
8490 /* TYPE_UNSIGNED says whether this is a zero-extension.
8491 Let's avoid computing it if it does not affect WIN
8492 and if UNS will not be needed again. */
8493 if ((uns
8494 || CONVERT_EXPR_P (op))
8495 && TYPE_UNSIGNED (TREE_TYPE (op)))
8496 {
8497 uns = 1;
8498 win = op;
8499 }
8500 }
8501 }
8502
8503 /* If we finally reach a constant see if it fits in for_type and
8504 in that case convert it. */
8505 if (for_type
8506 && TREE_CODE (win) == INTEGER_CST
8507 && TREE_TYPE (win) != for_type
8508 && int_fits_type_p (win, for_type))
8509 win = fold_convert (for_type, win);
8510
8511 return win;
8512 }
8513 \f
8514 /* Return OP or a simpler expression for a narrower value
8515 which can be sign-extended or zero-extended to give back OP.
8516 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8517 or 0 if the value should be sign-extended. */
8518
8519 tree
8520 get_narrower (tree op, int *unsignedp_ptr)
8521 {
8522 int uns = 0;
8523 int first = 1;
8524 tree win = op;
8525 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8526
8527 while (TREE_CODE (op) == NOP_EXPR)
8528 {
8529 int bitschange
8530 = (TYPE_PRECISION (TREE_TYPE (op))
8531 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8532
8533 /* Truncations are many-one so cannot be removed. */
8534 if (bitschange < 0)
8535 break;
8536
8537 /* See what's inside this conversion. If we decide to strip it,
8538 we will set WIN. */
8539
8540 if (bitschange > 0)
8541 {
8542 op = TREE_OPERAND (op, 0);
8543 /* An extension: the outermost one can be stripped,
8544 but remember whether it is zero or sign extension. */
8545 if (first)
8546 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8547 /* Otherwise, if a sign extension has been stripped,
8548 only sign extensions can now be stripped;
8549 if a zero extension has been stripped, only zero-extensions. */
8550 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8551 break;
8552 first = 0;
8553 }
8554 else /* bitschange == 0 */
8555 {
8556 /* A change in nominal type can always be stripped, but we must
8557 preserve the unsignedness. */
8558 if (first)
8559 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8560 first = 0;
8561 op = TREE_OPERAND (op, 0);
8562 /* Keep trying to narrow, but don't assign op to win if it
8563 would turn an integral type into something else. */
8564 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8565 continue;
8566 }
8567
8568 win = op;
8569 }
8570
8571 if (TREE_CODE (op) == COMPONENT_REF
8572 /* Since type_for_size always gives an integer type. */
8573 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8574 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8575 /* Ensure field is laid out already. */
8576 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8577 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8578 {
8579 unsigned HOST_WIDE_INT innerprec
8580 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8581 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8582 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8583 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8584
8585 /* We can get this structure field in a narrower type that fits it,
8586 but the resulting extension to its nominal type (a fullword type)
8587 must satisfy the same conditions as for other extensions.
8588
8589 Do this only for fields that are aligned (not bit-fields),
8590 because when bit-field insns will be used there is no
8591 advantage in doing this. */
8592
8593 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8594 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8595 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8596 && type != 0)
8597 {
8598 if (first)
8599 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8600 win = fold_convert (type, op);
8601 }
8602 }
8603
8604 *unsignedp_ptr = uns;
8605 return win;
8606 }
8607 \f
8608 /* Returns true if integer constant C has a value that is permissible
8609 for type TYPE (an INTEGER_TYPE). */
8610
8611 bool
8612 int_fits_type_p (const_tree c, const_tree type)
8613 {
8614 tree type_low_bound, type_high_bound;
8615 bool ok_for_low_bound, ok_for_high_bound;
8616 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8617
8618 retry:
8619 type_low_bound = TYPE_MIN_VALUE (type);
8620 type_high_bound = TYPE_MAX_VALUE (type);
8621
8622 /* If at least one bound of the type is a constant integer, we can check
8623 ourselves and maybe make a decision. If no such decision is possible, but
8624 this type is a subtype, try checking against that. Otherwise, use
8625 fits_to_tree_p, which checks against the precision.
8626
8627 Compute the status for each possibly constant bound, and return if we see
8628 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8629 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8630 for "constant known to fit". */
8631
8632 /* Check if c >= type_low_bound. */
8633 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8634 {
8635 if (tree_int_cst_lt (c, type_low_bound))
8636 return false;
8637 ok_for_low_bound = true;
8638 }
8639 else
8640 ok_for_low_bound = false;
8641
8642 /* Check if c <= type_high_bound. */
8643 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8644 {
8645 if (tree_int_cst_lt (type_high_bound, c))
8646 return false;
8647 ok_for_high_bound = true;
8648 }
8649 else
8650 ok_for_high_bound = false;
8651
8652 /* If the constant fits both bounds, the result is known. */
8653 if (ok_for_low_bound && ok_for_high_bound)
8654 return true;
8655
8656 /* Perform some generic filtering which may allow making a decision
8657 even if the bounds are not constant. First, negative integers
8658 never fit in unsigned types, */
8659 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8660 return false;
8661
8662 /* Second, narrower types always fit in wider ones. */
8663 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8664 return true;
8665
8666 /* Third, unsigned integers with top bit set never fit signed types. */
8667 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8668 {
8669 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8670 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8671 {
8672 /* When a tree_cst is converted to a wide-int, the precision
8673 is taken from the type. However, if the precision of the
8674 mode underneath the type is smaller than that, it is
8675 possible that the value will not fit. The test below
8676 fails if any bit is set between the sign bit of the
8677 underlying mode and the top bit of the type. */
8678 if (wi::ne_p (wi::zext (c, prec - 1), c))
8679 return false;
8680 }
8681 else if (wi::neg_p (c))
8682 return false;
8683 }
8684
8685 /* If we haven't been able to decide at this point, there nothing more we
8686 can check ourselves here. Look at the base type if we have one and it
8687 has the same precision. */
8688 if (TREE_CODE (type) == INTEGER_TYPE
8689 && TREE_TYPE (type) != 0
8690 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8691 {
8692 type = TREE_TYPE (type);
8693 goto retry;
8694 }
8695
8696 /* Or to fits_to_tree_p, if nothing else. */
8697 return wi::fits_to_tree_p (c, type);
8698 }
8699
8700 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8701 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8702 represented (assuming two's-complement arithmetic) within the bit
8703 precision of the type are returned instead. */
8704
8705 void
8706 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8707 {
8708 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8709 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8710 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8711 else
8712 {
8713 if (TYPE_UNSIGNED (type))
8714 mpz_set_ui (min, 0);
8715 else
8716 {
8717 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8718 wi::to_mpz (mn, min, SIGNED);
8719 }
8720 }
8721
8722 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8723 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8724 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8725 else
8726 {
8727 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8728 wi::to_mpz (mn, max, TYPE_SIGN (type));
8729 }
8730 }
8731
8732 /* Return true if VAR is an automatic variable defined in function FN. */
8733
8734 bool
8735 auto_var_in_fn_p (const_tree var, const_tree fn)
8736 {
8737 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8738 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8739 || TREE_CODE (var) == PARM_DECL)
8740 && ! TREE_STATIC (var))
8741 || TREE_CODE (var) == LABEL_DECL
8742 || TREE_CODE (var) == RESULT_DECL));
8743 }
8744
8745 /* Subprogram of following function. Called by walk_tree.
8746
8747 Return *TP if it is an automatic variable or parameter of the
8748 function passed in as DATA. */
8749
8750 static tree
8751 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8752 {
8753 tree fn = (tree) data;
8754
8755 if (TYPE_P (*tp))
8756 *walk_subtrees = 0;
8757
8758 else if (DECL_P (*tp)
8759 && auto_var_in_fn_p (*tp, fn))
8760 return *tp;
8761
8762 return NULL_TREE;
8763 }
8764
8765 /* Returns true if T is, contains, or refers to a type with variable
8766 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8767 arguments, but not the return type. If FN is nonzero, only return
8768 true if a modifier of the type or position of FN is a variable or
8769 parameter inside FN.
8770
8771 This concept is more general than that of C99 'variably modified types':
8772 in C99, a struct type is never variably modified because a VLA may not
8773 appear as a structure member. However, in GNU C code like:
8774
8775 struct S { int i[f()]; };
8776
8777 is valid, and other languages may define similar constructs. */
8778
8779 bool
8780 variably_modified_type_p (tree type, tree fn)
8781 {
8782 tree t;
8783
8784 /* Test if T is either variable (if FN is zero) or an expression containing
8785 a variable in FN. If TYPE isn't gimplified, return true also if
8786 gimplify_one_sizepos would gimplify the expression into a local
8787 variable. */
8788 #define RETURN_TRUE_IF_VAR(T) \
8789 do { tree _t = (T); \
8790 if (_t != NULL_TREE \
8791 && _t != error_mark_node \
8792 && TREE_CODE (_t) != INTEGER_CST \
8793 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8794 && (!fn \
8795 || (!TYPE_SIZES_GIMPLIFIED (type) \
8796 && !is_gimple_sizepos (_t)) \
8797 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8798 return true; } while (0)
8799
8800 if (type == error_mark_node)
8801 return false;
8802
8803 /* If TYPE itself has variable size, it is variably modified. */
8804 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8805 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8806
8807 switch (TREE_CODE (type))
8808 {
8809 case POINTER_TYPE:
8810 case REFERENCE_TYPE:
8811 case VECTOR_TYPE:
8812 if (variably_modified_type_p (TREE_TYPE (type), fn))
8813 return true;
8814 break;
8815
8816 case FUNCTION_TYPE:
8817 case METHOD_TYPE:
8818 /* If TYPE is a function type, it is variably modified if the
8819 return type is variably modified. */
8820 if (variably_modified_type_p (TREE_TYPE (type), fn))
8821 return true;
8822 break;
8823
8824 case INTEGER_TYPE:
8825 case REAL_TYPE:
8826 case FIXED_POINT_TYPE:
8827 case ENUMERAL_TYPE:
8828 case BOOLEAN_TYPE:
8829 /* Scalar types are variably modified if their end points
8830 aren't constant. */
8831 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8832 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8833 break;
8834
8835 case RECORD_TYPE:
8836 case UNION_TYPE:
8837 case QUAL_UNION_TYPE:
8838 /* We can't see if any of the fields are variably-modified by the
8839 definition we normally use, since that would produce infinite
8840 recursion via pointers. */
8841 /* This is variably modified if some field's type is. */
8842 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8843 if (TREE_CODE (t) == FIELD_DECL)
8844 {
8845 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8846 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8847 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8848
8849 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8850 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8851 }
8852 break;
8853
8854 case ARRAY_TYPE:
8855 /* Do not call ourselves to avoid infinite recursion. This is
8856 variably modified if the element type is. */
8857 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8858 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8859 break;
8860
8861 default:
8862 break;
8863 }
8864
8865 /* The current language may have other cases to check, but in general,
8866 all other types are not variably modified. */
8867 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8868
8869 #undef RETURN_TRUE_IF_VAR
8870 }
8871
8872 /* Given a DECL or TYPE, return the scope in which it was declared, or
8873 NULL_TREE if there is no containing scope. */
8874
8875 tree
8876 get_containing_scope (const_tree t)
8877 {
8878 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8879 }
8880
8881 /* Return the innermost context enclosing DECL that is
8882 a FUNCTION_DECL, or zero if none. */
8883
8884 tree
8885 decl_function_context (const_tree decl)
8886 {
8887 tree context;
8888
8889 if (TREE_CODE (decl) == ERROR_MARK)
8890 return 0;
8891
8892 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8893 where we look up the function at runtime. Such functions always take
8894 a first argument of type 'pointer to real context'.
8895
8896 C++ should really be fixed to use DECL_CONTEXT for the real context,
8897 and use something else for the "virtual context". */
8898 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8899 context
8900 = TYPE_MAIN_VARIANT
8901 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8902 else
8903 context = DECL_CONTEXT (decl);
8904
8905 while (context && TREE_CODE (context) != FUNCTION_DECL)
8906 {
8907 if (TREE_CODE (context) == BLOCK)
8908 context = BLOCK_SUPERCONTEXT (context);
8909 else
8910 context = get_containing_scope (context);
8911 }
8912
8913 return context;
8914 }
8915
8916 /* Return the innermost context enclosing DECL that is
8917 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8918 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8919
8920 tree
8921 decl_type_context (const_tree decl)
8922 {
8923 tree context = DECL_CONTEXT (decl);
8924
8925 while (context)
8926 switch (TREE_CODE (context))
8927 {
8928 case NAMESPACE_DECL:
8929 case TRANSLATION_UNIT_DECL:
8930 return NULL_TREE;
8931
8932 case RECORD_TYPE:
8933 case UNION_TYPE:
8934 case QUAL_UNION_TYPE:
8935 return context;
8936
8937 case TYPE_DECL:
8938 case FUNCTION_DECL:
8939 context = DECL_CONTEXT (context);
8940 break;
8941
8942 case BLOCK:
8943 context = BLOCK_SUPERCONTEXT (context);
8944 break;
8945
8946 default:
8947 gcc_unreachable ();
8948 }
8949
8950 return NULL_TREE;
8951 }
8952
8953 /* CALL is a CALL_EXPR. Return the declaration for the function
8954 called, or NULL_TREE if the called function cannot be
8955 determined. */
8956
8957 tree
8958 get_callee_fndecl (const_tree call)
8959 {
8960 tree addr;
8961
8962 if (call == error_mark_node)
8963 return error_mark_node;
8964
8965 /* It's invalid to call this function with anything but a
8966 CALL_EXPR. */
8967 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8968
8969 /* The first operand to the CALL is the address of the function
8970 called. */
8971 addr = CALL_EXPR_FN (call);
8972
8973 /* If there is no function, return early. */
8974 if (addr == NULL_TREE)
8975 return NULL_TREE;
8976
8977 STRIP_NOPS (addr);
8978
8979 /* If this is a readonly function pointer, extract its initial value. */
8980 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8981 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8982 && DECL_INITIAL (addr))
8983 addr = DECL_INITIAL (addr);
8984
8985 /* If the address is just `&f' for some function `f', then we know
8986 that `f' is being called. */
8987 if (TREE_CODE (addr) == ADDR_EXPR
8988 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8989 return TREE_OPERAND (addr, 0);
8990
8991 /* We couldn't figure out what was being called. */
8992 return NULL_TREE;
8993 }
8994
8995 /* Print debugging information about tree nodes generated during the compile,
8996 and any language-specific information. */
8997
8998 void
8999 dump_tree_statistics (void)
9000 {
9001 if (GATHER_STATISTICS)
9002 {
9003 int i;
9004 int total_nodes, total_bytes;
9005 fprintf (stderr, "Kind Nodes Bytes\n");
9006 fprintf (stderr, "---------------------------------------\n");
9007 total_nodes = total_bytes = 0;
9008 for (i = 0; i < (int) all_kinds; i++)
9009 {
9010 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9011 tree_node_counts[i], tree_node_sizes[i]);
9012 total_nodes += tree_node_counts[i];
9013 total_bytes += tree_node_sizes[i];
9014 }
9015 fprintf (stderr, "---------------------------------------\n");
9016 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9017 fprintf (stderr, "---------------------------------------\n");
9018 fprintf (stderr, "Code Nodes\n");
9019 fprintf (stderr, "----------------------------\n");
9020 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9021 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9022 tree_code_counts[i]);
9023 fprintf (stderr, "----------------------------\n");
9024 ssanames_print_statistics ();
9025 phinodes_print_statistics ();
9026 }
9027 else
9028 fprintf (stderr, "(No per-node statistics)\n");
9029
9030 print_type_hash_statistics ();
9031 print_debug_expr_statistics ();
9032 print_value_expr_statistics ();
9033 lang_hooks.print_statistics ();
9034 }
9035 \f
9036 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9037
9038 /* Generate a crc32 of a byte. */
9039
9040 static unsigned
9041 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9042 {
9043 unsigned ix;
9044
9045 for (ix = bits; ix--; value <<= 1)
9046 {
9047 unsigned feedback;
9048
9049 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9050 chksum <<= 1;
9051 chksum ^= feedback;
9052 }
9053 return chksum;
9054 }
9055
9056 /* Generate a crc32 of a 32-bit unsigned. */
9057
9058 unsigned
9059 crc32_unsigned (unsigned chksum, unsigned value)
9060 {
9061 return crc32_unsigned_bits (chksum, value, 32);
9062 }
9063
9064 /* Generate a crc32 of a byte. */
9065
9066 unsigned
9067 crc32_byte (unsigned chksum, char byte)
9068 {
9069 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9070 }
9071
9072 /* Generate a crc32 of a string. */
9073
9074 unsigned
9075 crc32_string (unsigned chksum, const char *string)
9076 {
9077 do
9078 {
9079 chksum = crc32_byte (chksum, *string);
9080 }
9081 while (*string++);
9082 return chksum;
9083 }
9084
9085 /* P is a string that will be used in a symbol. Mask out any characters
9086 that are not valid in that context. */
9087
9088 void
9089 clean_symbol_name (char *p)
9090 {
9091 for (; *p; p++)
9092 if (! (ISALNUM (*p)
9093 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9094 || *p == '$'
9095 #endif
9096 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9097 || *p == '.'
9098 #endif
9099 ))
9100 *p = '_';
9101 }
9102
9103 /* Generate a name for a special-purpose function.
9104 The generated name may need to be unique across the whole link.
9105 Changes to this function may also require corresponding changes to
9106 xstrdup_mask_random.
9107 TYPE is some string to identify the purpose of this function to the
9108 linker or collect2; it must start with an uppercase letter,
9109 one of:
9110 I - for constructors
9111 D - for destructors
9112 N - for C++ anonymous namespaces
9113 F - for DWARF unwind frame information. */
9114
9115 tree
9116 get_file_function_name (const char *type)
9117 {
9118 char *buf;
9119 const char *p;
9120 char *q;
9121
9122 /* If we already have a name we know to be unique, just use that. */
9123 if (first_global_object_name)
9124 p = q = ASTRDUP (first_global_object_name);
9125 /* If the target is handling the constructors/destructors, they
9126 will be local to this file and the name is only necessary for
9127 debugging purposes.
9128 We also assign sub_I and sub_D sufixes to constructors called from
9129 the global static constructors. These are always local. */
9130 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9131 || (strncmp (type, "sub_", 4) == 0
9132 && (type[4] == 'I' || type[4] == 'D')))
9133 {
9134 const char *file = main_input_filename;
9135 if (! file)
9136 file = LOCATION_FILE (input_location);
9137 /* Just use the file's basename, because the full pathname
9138 might be quite long. */
9139 p = q = ASTRDUP (lbasename (file));
9140 }
9141 else
9142 {
9143 /* Otherwise, the name must be unique across the entire link.
9144 We don't have anything that we know to be unique to this translation
9145 unit, so use what we do have and throw in some randomness. */
9146 unsigned len;
9147 const char *name = weak_global_object_name;
9148 const char *file = main_input_filename;
9149
9150 if (! name)
9151 name = "";
9152 if (! file)
9153 file = LOCATION_FILE (input_location);
9154
9155 len = strlen (file);
9156 q = (char *) alloca (9 + 17 + len + 1);
9157 memcpy (q, file, len + 1);
9158
9159 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9160 crc32_string (0, name), get_random_seed (false));
9161
9162 p = q;
9163 }
9164
9165 clean_symbol_name (q);
9166 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9167 + strlen (type));
9168
9169 /* Set up the name of the file-level functions we may need.
9170 Use a global object (which is already required to be unique over
9171 the program) rather than the file name (which imposes extra
9172 constraints). */
9173 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9174
9175 return get_identifier (buf);
9176 }
9177 \f
9178 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9179
9180 /* Complain that the tree code of NODE does not match the expected 0
9181 terminated list of trailing codes. The trailing code list can be
9182 empty, for a more vague error message. FILE, LINE, and FUNCTION
9183 are of the caller. */
9184
9185 void
9186 tree_check_failed (const_tree node, const char *file,
9187 int line, const char *function, ...)
9188 {
9189 va_list args;
9190 const char *buffer;
9191 unsigned length = 0;
9192 enum tree_code code;
9193
9194 va_start (args, function);
9195 while ((code = (enum tree_code) va_arg (args, int)))
9196 length += 4 + strlen (get_tree_code_name (code));
9197 va_end (args);
9198 if (length)
9199 {
9200 char *tmp;
9201 va_start (args, function);
9202 length += strlen ("expected ");
9203 buffer = tmp = (char *) alloca (length);
9204 length = 0;
9205 while ((code = (enum tree_code) va_arg (args, int)))
9206 {
9207 const char *prefix = length ? " or " : "expected ";
9208
9209 strcpy (tmp + length, prefix);
9210 length += strlen (prefix);
9211 strcpy (tmp + length, get_tree_code_name (code));
9212 length += strlen (get_tree_code_name (code));
9213 }
9214 va_end (args);
9215 }
9216 else
9217 buffer = "unexpected node";
9218
9219 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9220 buffer, get_tree_code_name (TREE_CODE (node)),
9221 function, trim_filename (file), line);
9222 }
9223
9224 /* Complain that the tree code of NODE does match the expected 0
9225 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9226 the caller. */
9227
9228 void
9229 tree_not_check_failed (const_tree node, const char *file,
9230 int line, const char *function, ...)
9231 {
9232 va_list args;
9233 char *buffer;
9234 unsigned length = 0;
9235 enum tree_code code;
9236
9237 va_start (args, function);
9238 while ((code = (enum tree_code) va_arg (args, int)))
9239 length += 4 + strlen (get_tree_code_name (code));
9240 va_end (args);
9241 va_start (args, function);
9242 buffer = (char *) alloca (length);
9243 length = 0;
9244 while ((code = (enum tree_code) va_arg (args, int)))
9245 {
9246 if (length)
9247 {
9248 strcpy (buffer + length, " or ");
9249 length += 4;
9250 }
9251 strcpy (buffer + length, get_tree_code_name (code));
9252 length += strlen (get_tree_code_name (code));
9253 }
9254 va_end (args);
9255
9256 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9257 buffer, get_tree_code_name (TREE_CODE (node)),
9258 function, trim_filename (file), line);
9259 }
9260
9261 /* Similar to tree_check_failed, except that we check for a class of tree
9262 code, given in CL. */
9263
9264 void
9265 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9266 const char *file, int line, const char *function)
9267 {
9268 internal_error
9269 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9270 TREE_CODE_CLASS_STRING (cl),
9271 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9272 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9273 }
9274
9275 /* Similar to tree_check_failed, except that instead of specifying a
9276 dozen codes, use the knowledge that they're all sequential. */
9277
9278 void
9279 tree_range_check_failed (const_tree node, const char *file, int line,
9280 const char *function, enum tree_code c1,
9281 enum tree_code c2)
9282 {
9283 char *buffer;
9284 unsigned length = 0;
9285 unsigned int c;
9286
9287 for (c = c1; c <= c2; ++c)
9288 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9289
9290 length += strlen ("expected ");
9291 buffer = (char *) alloca (length);
9292 length = 0;
9293
9294 for (c = c1; c <= c2; ++c)
9295 {
9296 const char *prefix = length ? " or " : "expected ";
9297
9298 strcpy (buffer + length, prefix);
9299 length += strlen (prefix);
9300 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9301 length += strlen (get_tree_code_name ((enum tree_code) c));
9302 }
9303
9304 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9305 buffer, get_tree_code_name (TREE_CODE (node)),
9306 function, trim_filename (file), line);
9307 }
9308
9309
9310 /* Similar to tree_check_failed, except that we check that a tree does
9311 not have the specified code, given in CL. */
9312
9313 void
9314 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9315 const char *file, int line, const char *function)
9316 {
9317 internal_error
9318 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9319 TREE_CODE_CLASS_STRING (cl),
9320 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9321 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9322 }
9323
9324
9325 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9326
9327 void
9328 omp_clause_check_failed (const_tree node, const char *file, int line,
9329 const char *function, enum omp_clause_code code)
9330 {
9331 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9332 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9333 function, trim_filename (file), line);
9334 }
9335
9336
9337 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9338
9339 void
9340 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9341 const char *function, enum omp_clause_code c1,
9342 enum omp_clause_code c2)
9343 {
9344 char *buffer;
9345 unsigned length = 0;
9346 unsigned int c;
9347
9348 for (c = c1; c <= c2; ++c)
9349 length += 4 + strlen (omp_clause_code_name[c]);
9350
9351 length += strlen ("expected ");
9352 buffer = (char *) alloca (length);
9353 length = 0;
9354
9355 for (c = c1; c <= c2; ++c)
9356 {
9357 const char *prefix = length ? " or " : "expected ";
9358
9359 strcpy (buffer + length, prefix);
9360 length += strlen (prefix);
9361 strcpy (buffer + length, omp_clause_code_name[c]);
9362 length += strlen (omp_clause_code_name[c]);
9363 }
9364
9365 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9366 buffer, omp_clause_code_name[TREE_CODE (node)],
9367 function, trim_filename (file), line);
9368 }
9369
9370
9371 #undef DEFTREESTRUCT
9372 #define DEFTREESTRUCT(VAL, NAME) NAME,
9373
9374 static const char *ts_enum_names[] = {
9375 #include "treestruct.def"
9376 };
9377 #undef DEFTREESTRUCT
9378
9379 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9380
9381 /* Similar to tree_class_check_failed, except that we check for
9382 whether CODE contains the tree structure identified by EN. */
9383
9384 void
9385 tree_contains_struct_check_failed (const_tree node,
9386 const enum tree_node_structure_enum en,
9387 const char *file, int line,
9388 const char *function)
9389 {
9390 internal_error
9391 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9392 TS_ENUM_NAME (en),
9393 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9394 }
9395
9396
9397 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9398 (dynamically sized) vector. */
9399
9400 void
9401 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9402 const char *function)
9403 {
9404 internal_error
9405 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9406 idx + 1, len, function, trim_filename (file), line);
9407 }
9408
9409 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9410 (dynamically sized) vector. */
9411
9412 void
9413 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9414 const char *function)
9415 {
9416 internal_error
9417 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9418 idx + 1, len, function, trim_filename (file), line);
9419 }
9420
9421 /* Similar to above, except that the check is for the bounds of the operand
9422 vector of an expression node EXP. */
9423
9424 void
9425 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9426 int line, const char *function)
9427 {
9428 enum tree_code code = TREE_CODE (exp);
9429 internal_error
9430 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9431 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9432 function, trim_filename (file), line);
9433 }
9434
9435 /* Similar to above, except that the check is for the number of
9436 operands of an OMP_CLAUSE node. */
9437
9438 void
9439 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9440 int line, const char *function)
9441 {
9442 internal_error
9443 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9444 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9445 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9446 trim_filename (file), line);
9447 }
9448 #endif /* ENABLE_TREE_CHECKING */
9449 \f
9450 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9451 and mapped to the machine mode MODE. Initialize its fields and build
9452 the information necessary for debugging output. */
9453
9454 static tree
9455 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9456 {
9457 tree t;
9458 hashval_t hashcode = 0;
9459
9460 t = make_node (VECTOR_TYPE);
9461 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9462 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9463 SET_TYPE_MODE (t, mode);
9464
9465 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9466 SET_TYPE_STRUCTURAL_EQUALITY (t);
9467 else if (TYPE_CANONICAL (innertype) != innertype
9468 || mode != VOIDmode)
9469 TYPE_CANONICAL (t)
9470 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9471
9472 layout_type (t);
9473
9474 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9475 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9476 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9477 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9478 t = type_hash_canon (hashcode, t);
9479
9480 /* We have built a main variant, based on the main variant of the
9481 inner type. Use it to build the variant we return. */
9482 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9483 && TREE_TYPE (t) != innertype)
9484 return build_type_attribute_qual_variant (t,
9485 TYPE_ATTRIBUTES (innertype),
9486 TYPE_QUALS (innertype));
9487
9488 return t;
9489 }
9490
9491 static tree
9492 make_or_reuse_type (unsigned size, int unsignedp)
9493 {
9494 if (size == INT_TYPE_SIZE)
9495 return unsignedp ? unsigned_type_node : integer_type_node;
9496 if (size == CHAR_TYPE_SIZE)
9497 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9498 if (size == SHORT_TYPE_SIZE)
9499 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9500 if (size == LONG_TYPE_SIZE)
9501 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9502 if (size == LONG_LONG_TYPE_SIZE)
9503 return (unsignedp ? long_long_unsigned_type_node
9504 : long_long_integer_type_node);
9505 if (size == 128 && int128_integer_type_node)
9506 return (unsignedp ? int128_unsigned_type_node
9507 : int128_integer_type_node);
9508
9509 if (unsignedp)
9510 return make_unsigned_type (size);
9511 else
9512 return make_signed_type (size);
9513 }
9514
9515 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9516
9517 static tree
9518 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9519 {
9520 if (satp)
9521 {
9522 if (size == SHORT_FRACT_TYPE_SIZE)
9523 return unsignedp ? sat_unsigned_short_fract_type_node
9524 : sat_short_fract_type_node;
9525 if (size == FRACT_TYPE_SIZE)
9526 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9527 if (size == LONG_FRACT_TYPE_SIZE)
9528 return unsignedp ? sat_unsigned_long_fract_type_node
9529 : sat_long_fract_type_node;
9530 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9531 return unsignedp ? sat_unsigned_long_long_fract_type_node
9532 : sat_long_long_fract_type_node;
9533 }
9534 else
9535 {
9536 if (size == SHORT_FRACT_TYPE_SIZE)
9537 return unsignedp ? unsigned_short_fract_type_node
9538 : short_fract_type_node;
9539 if (size == FRACT_TYPE_SIZE)
9540 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9541 if (size == LONG_FRACT_TYPE_SIZE)
9542 return unsignedp ? unsigned_long_fract_type_node
9543 : long_fract_type_node;
9544 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9545 return unsignedp ? unsigned_long_long_fract_type_node
9546 : long_long_fract_type_node;
9547 }
9548
9549 return make_fract_type (size, unsignedp, satp);
9550 }
9551
9552 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9553
9554 static tree
9555 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9556 {
9557 if (satp)
9558 {
9559 if (size == SHORT_ACCUM_TYPE_SIZE)
9560 return unsignedp ? sat_unsigned_short_accum_type_node
9561 : sat_short_accum_type_node;
9562 if (size == ACCUM_TYPE_SIZE)
9563 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9564 if (size == LONG_ACCUM_TYPE_SIZE)
9565 return unsignedp ? sat_unsigned_long_accum_type_node
9566 : sat_long_accum_type_node;
9567 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9568 return unsignedp ? sat_unsigned_long_long_accum_type_node
9569 : sat_long_long_accum_type_node;
9570 }
9571 else
9572 {
9573 if (size == SHORT_ACCUM_TYPE_SIZE)
9574 return unsignedp ? unsigned_short_accum_type_node
9575 : short_accum_type_node;
9576 if (size == ACCUM_TYPE_SIZE)
9577 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9578 if (size == LONG_ACCUM_TYPE_SIZE)
9579 return unsignedp ? unsigned_long_accum_type_node
9580 : long_accum_type_node;
9581 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9582 return unsignedp ? unsigned_long_long_accum_type_node
9583 : long_long_accum_type_node;
9584 }
9585
9586 return make_accum_type (size, unsignedp, satp);
9587 }
9588
9589
9590 /* Create an atomic variant node for TYPE. This routine is called
9591 during initialization of data types to create the 5 basic atomic
9592 types. The generic build_variant_type function requires these to
9593 already be set up in order to function properly, so cannot be
9594 called from there. If ALIGN is non-zero, then ensure alignment is
9595 overridden to this value. */
9596
9597 static tree
9598 build_atomic_base (tree type, unsigned int align)
9599 {
9600 tree t;
9601
9602 /* Make sure its not already registered. */
9603 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9604 return t;
9605
9606 t = build_variant_type_copy (type);
9607 set_type_quals (t, TYPE_QUAL_ATOMIC);
9608
9609 if (align)
9610 TYPE_ALIGN (t) = align;
9611
9612 return t;
9613 }
9614
9615 /* Create nodes for all integer types (and error_mark_node) using the sizes
9616 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9617 SHORT_DOUBLE specifies whether double should be of the same precision
9618 as float. */
9619
9620 void
9621 build_common_tree_nodes (bool signed_char, bool short_double)
9622 {
9623 error_mark_node = make_node (ERROR_MARK);
9624 TREE_TYPE (error_mark_node) = error_mark_node;
9625
9626 initialize_sizetypes ();
9627
9628 /* Define both `signed char' and `unsigned char'. */
9629 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9630 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9631 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9632 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9633
9634 /* Define `char', which is like either `signed char' or `unsigned char'
9635 but not the same as either. */
9636 char_type_node
9637 = (signed_char
9638 ? make_signed_type (CHAR_TYPE_SIZE)
9639 : make_unsigned_type (CHAR_TYPE_SIZE));
9640 TYPE_STRING_FLAG (char_type_node) = 1;
9641
9642 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9643 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9644 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9645 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9646 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9647 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9648 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9649 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9650 #if HOST_BITS_PER_WIDE_INT >= 64
9651 /* TODO: This isn't correct, but as logic depends at the moment on
9652 host's instead of target's wide-integer.
9653 If there is a target not supporting TImode, but has an 128-bit
9654 integer-scalar register, this target check needs to be adjusted. */
9655 if (targetm.scalar_mode_supported_p (TImode))
9656 {
9657 int128_integer_type_node = make_signed_type (128);
9658 int128_unsigned_type_node = make_unsigned_type (128);
9659 }
9660 #endif
9661
9662 /* Define a boolean type. This type only represents boolean values but
9663 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9664 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9665 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9666 TYPE_PRECISION (boolean_type_node) = 1;
9667 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9668
9669 /* Define what type to use for size_t. */
9670 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9671 size_type_node = unsigned_type_node;
9672 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9673 size_type_node = long_unsigned_type_node;
9674 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9675 size_type_node = long_long_unsigned_type_node;
9676 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9677 size_type_node = short_unsigned_type_node;
9678 else
9679 gcc_unreachable ();
9680
9681 /* Fill in the rest of the sized types. Reuse existing type nodes
9682 when possible. */
9683 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9684 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9685 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9686 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9687 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9688
9689 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9690 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9691 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9692 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9693 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9694
9695 /* Don't call build_qualified type for atomics. That routine does
9696 special processing for atomics, and until they are initialized
9697 it's better not to make that call.
9698
9699 Check to see if there is a target override for atomic types. */
9700
9701 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9702 targetm.atomic_align_for_mode (QImode));
9703 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9704 targetm.atomic_align_for_mode (HImode));
9705 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9706 targetm.atomic_align_for_mode (SImode));
9707 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9708 targetm.atomic_align_for_mode (DImode));
9709 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9710 targetm.atomic_align_for_mode (TImode));
9711
9712 access_public_node = get_identifier ("public");
9713 access_protected_node = get_identifier ("protected");
9714 access_private_node = get_identifier ("private");
9715
9716 /* Define these next since types below may used them. */
9717 integer_zero_node = build_int_cst (integer_type_node, 0);
9718 integer_one_node = build_int_cst (integer_type_node, 1);
9719 integer_three_node = build_int_cst (integer_type_node, 3);
9720 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9721
9722 size_zero_node = size_int (0);
9723 size_one_node = size_int (1);
9724 bitsize_zero_node = bitsize_int (0);
9725 bitsize_one_node = bitsize_int (1);
9726 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9727
9728 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9729 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9730
9731 void_type_node = make_node (VOID_TYPE);
9732 layout_type (void_type_node);
9733
9734 /* We are not going to have real types in C with less than byte alignment,
9735 so we might as well not have any types that claim to have it. */
9736 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9737 TYPE_USER_ALIGN (void_type_node) = 0;
9738
9739 void_node = make_node (VOID_CST);
9740 TREE_TYPE (void_node) = void_type_node;
9741
9742 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9743 layout_type (TREE_TYPE (null_pointer_node));
9744
9745 ptr_type_node = build_pointer_type (void_type_node);
9746 const_ptr_type_node
9747 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9748 fileptr_type_node = ptr_type_node;
9749
9750 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9751
9752 float_type_node = make_node (REAL_TYPE);
9753 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9754 layout_type (float_type_node);
9755
9756 double_type_node = make_node (REAL_TYPE);
9757 if (short_double)
9758 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9759 else
9760 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9761 layout_type (double_type_node);
9762
9763 long_double_type_node = make_node (REAL_TYPE);
9764 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9765 layout_type (long_double_type_node);
9766
9767 float_ptr_type_node = build_pointer_type (float_type_node);
9768 double_ptr_type_node = build_pointer_type (double_type_node);
9769 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9770 integer_ptr_type_node = build_pointer_type (integer_type_node);
9771
9772 /* Fixed size integer types. */
9773 uint16_type_node = build_nonstandard_integer_type (16, true);
9774 uint32_type_node = build_nonstandard_integer_type (32, true);
9775 uint64_type_node = build_nonstandard_integer_type (64, true);
9776
9777 /* Decimal float types. */
9778 dfloat32_type_node = make_node (REAL_TYPE);
9779 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9780 layout_type (dfloat32_type_node);
9781 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9782 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9783
9784 dfloat64_type_node = make_node (REAL_TYPE);
9785 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9786 layout_type (dfloat64_type_node);
9787 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9788 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9789
9790 dfloat128_type_node = make_node (REAL_TYPE);
9791 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9792 layout_type (dfloat128_type_node);
9793 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9794 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9795
9796 complex_integer_type_node = build_complex_type (integer_type_node);
9797 complex_float_type_node = build_complex_type (float_type_node);
9798 complex_double_type_node = build_complex_type (double_type_node);
9799 complex_long_double_type_node = build_complex_type (long_double_type_node);
9800
9801 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9802 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9803 sat_ ## KIND ## _type_node = \
9804 make_sat_signed_ ## KIND ## _type (SIZE); \
9805 sat_unsigned_ ## KIND ## _type_node = \
9806 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9807 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9808 unsigned_ ## KIND ## _type_node = \
9809 make_unsigned_ ## KIND ## _type (SIZE);
9810
9811 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9812 sat_ ## WIDTH ## KIND ## _type_node = \
9813 make_sat_signed_ ## KIND ## _type (SIZE); \
9814 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9815 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9816 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9817 unsigned_ ## WIDTH ## KIND ## _type_node = \
9818 make_unsigned_ ## KIND ## _type (SIZE);
9819
9820 /* Make fixed-point type nodes based on four different widths. */
9821 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9822 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9823 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9824 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9825 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9826
9827 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9828 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9829 NAME ## _type_node = \
9830 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9831 u ## NAME ## _type_node = \
9832 make_or_reuse_unsigned_ ## KIND ## _type \
9833 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9834 sat_ ## NAME ## _type_node = \
9835 make_or_reuse_sat_signed_ ## KIND ## _type \
9836 (GET_MODE_BITSIZE (MODE ## mode)); \
9837 sat_u ## NAME ## _type_node = \
9838 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9839 (GET_MODE_BITSIZE (U ## MODE ## mode));
9840
9841 /* Fixed-point type and mode nodes. */
9842 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9843 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9844 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9845 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9846 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9847 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9848 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9849 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9850 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9851 MAKE_FIXED_MODE_NODE (accum, da, DA)
9852 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9853
9854 {
9855 tree t = targetm.build_builtin_va_list ();
9856
9857 /* Many back-ends define record types without setting TYPE_NAME.
9858 If we copied the record type here, we'd keep the original
9859 record type without a name. This breaks name mangling. So,
9860 don't copy record types and let c_common_nodes_and_builtins()
9861 declare the type to be __builtin_va_list. */
9862 if (TREE_CODE (t) != RECORD_TYPE)
9863 t = build_variant_type_copy (t);
9864
9865 va_list_type_node = t;
9866 }
9867 }
9868
9869 /* Modify DECL for given flags.
9870 TM_PURE attribute is set only on types, so the function will modify
9871 DECL's type when ECF_TM_PURE is used. */
9872
9873 void
9874 set_call_expr_flags (tree decl, int flags)
9875 {
9876 if (flags & ECF_NOTHROW)
9877 TREE_NOTHROW (decl) = 1;
9878 if (flags & ECF_CONST)
9879 TREE_READONLY (decl) = 1;
9880 if (flags & ECF_PURE)
9881 DECL_PURE_P (decl) = 1;
9882 if (flags & ECF_LOOPING_CONST_OR_PURE)
9883 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9884 if (flags & ECF_NOVOPS)
9885 DECL_IS_NOVOPS (decl) = 1;
9886 if (flags & ECF_NORETURN)
9887 TREE_THIS_VOLATILE (decl) = 1;
9888 if (flags & ECF_MALLOC)
9889 DECL_IS_MALLOC (decl) = 1;
9890 if (flags & ECF_RETURNS_TWICE)
9891 DECL_IS_RETURNS_TWICE (decl) = 1;
9892 if (flags & ECF_LEAF)
9893 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9894 NULL, DECL_ATTRIBUTES (decl));
9895 if ((flags & ECF_TM_PURE) && flag_tm)
9896 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9897 /* Looping const or pure is implied by noreturn.
9898 There is currently no way to declare looping const or looping pure alone. */
9899 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9900 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9901 }
9902
9903
9904 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9905
9906 static void
9907 local_define_builtin (const char *name, tree type, enum built_in_function code,
9908 const char *library_name, int ecf_flags)
9909 {
9910 tree decl;
9911
9912 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9913 library_name, NULL_TREE);
9914 set_call_expr_flags (decl, ecf_flags);
9915
9916 set_builtin_decl (code, decl, true);
9917 }
9918
9919 /* Call this function after instantiating all builtins that the language
9920 front end cares about. This will build the rest of the builtins that
9921 are relied upon by the tree optimizers and the middle-end. */
9922
9923 void
9924 build_common_builtin_nodes (void)
9925 {
9926 tree tmp, ftype;
9927 int ecf_flags;
9928
9929 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9930 {
9931 ftype = build_function_type (void_type_node, void_list_node);
9932 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9933 "__builtin_unreachable",
9934 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9935 | ECF_CONST | ECF_LEAF);
9936 }
9937
9938 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9939 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9940 {
9941 ftype = build_function_type_list (ptr_type_node,
9942 ptr_type_node, const_ptr_type_node,
9943 size_type_node, NULL_TREE);
9944
9945 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9946 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9947 "memcpy", ECF_NOTHROW | ECF_LEAF);
9948 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9949 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9950 "memmove", ECF_NOTHROW | ECF_LEAF);
9951 }
9952
9953 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9954 {
9955 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9956 const_ptr_type_node, size_type_node,
9957 NULL_TREE);
9958 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9959 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9960 }
9961
9962 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9963 {
9964 ftype = build_function_type_list (ptr_type_node,
9965 ptr_type_node, integer_type_node,
9966 size_type_node, NULL_TREE);
9967 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9968 "memset", ECF_NOTHROW | ECF_LEAF);
9969 }
9970
9971 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9972 {
9973 ftype = build_function_type_list (ptr_type_node,
9974 size_type_node, NULL_TREE);
9975 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9976 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9977 }
9978
9979 ftype = build_function_type_list (ptr_type_node, size_type_node,
9980 size_type_node, NULL_TREE);
9981 local_define_builtin ("__builtin_alloca_with_align", ftype,
9982 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9983 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9984
9985 /* If we're checking the stack, `alloca' can throw. */
9986 if (flag_stack_check)
9987 {
9988 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9989 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9990 }
9991
9992 ftype = build_function_type_list (void_type_node,
9993 ptr_type_node, ptr_type_node,
9994 ptr_type_node, NULL_TREE);
9995 local_define_builtin ("__builtin_init_trampoline", ftype,
9996 BUILT_IN_INIT_TRAMPOLINE,
9997 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9998 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9999 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10000 "__builtin_init_heap_trampoline",
10001 ECF_NOTHROW | ECF_LEAF);
10002
10003 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10004 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10005 BUILT_IN_ADJUST_TRAMPOLINE,
10006 "__builtin_adjust_trampoline",
10007 ECF_CONST | ECF_NOTHROW);
10008
10009 ftype = build_function_type_list (void_type_node,
10010 ptr_type_node, ptr_type_node, NULL_TREE);
10011 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10012 BUILT_IN_NONLOCAL_GOTO,
10013 "__builtin_nonlocal_goto",
10014 ECF_NORETURN | ECF_NOTHROW);
10015
10016 ftype = build_function_type_list (void_type_node,
10017 ptr_type_node, ptr_type_node, NULL_TREE);
10018 local_define_builtin ("__builtin_setjmp_setup", ftype,
10019 BUILT_IN_SETJMP_SETUP,
10020 "__builtin_setjmp_setup", ECF_NOTHROW);
10021
10022 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10023 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10024 BUILT_IN_SETJMP_RECEIVER,
10025 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10026
10027 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10028 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10029 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10030
10031 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10032 local_define_builtin ("__builtin_stack_restore", ftype,
10033 BUILT_IN_STACK_RESTORE,
10034 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10035
10036 /* If there's a possibility that we might use the ARM EABI, build the
10037 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10038 if (targetm.arm_eabi_unwinder)
10039 {
10040 ftype = build_function_type_list (void_type_node, NULL_TREE);
10041 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10042 BUILT_IN_CXA_END_CLEANUP,
10043 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10044 }
10045
10046 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10047 local_define_builtin ("__builtin_unwind_resume", ftype,
10048 BUILT_IN_UNWIND_RESUME,
10049 ((targetm_common.except_unwind_info (&global_options)
10050 == UI_SJLJ)
10051 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10052 ECF_NORETURN);
10053
10054 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10055 {
10056 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10057 NULL_TREE);
10058 local_define_builtin ("__builtin_return_address", ftype,
10059 BUILT_IN_RETURN_ADDRESS,
10060 "__builtin_return_address",
10061 ECF_NOTHROW);
10062 }
10063
10064 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10065 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10066 {
10067 ftype = build_function_type_list (void_type_node, ptr_type_node,
10068 ptr_type_node, NULL_TREE);
10069 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10070 local_define_builtin ("__cyg_profile_func_enter", ftype,
10071 BUILT_IN_PROFILE_FUNC_ENTER,
10072 "__cyg_profile_func_enter", 0);
10073 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10074 local_define_builtin ("__cyg_profile_func_exit", ftype,
10075 BUILT_IN_PROFILE_FUNC_EXIT,
10076 "__cyg_profile_func_exit", 0);
10077 }
10078
10079 /* The exception object and filter values from the runtime. The argument
10080 must be zero before exception lowering, i.e. from the front end. After
10081 exception lowering, it will be the region number for the exception
10082 landing pad. These functions are PURE instead of CONST to prevent
10083 them from being hoisted past the exception edge that will initialize
10084 its value in the landing pad. */
10085 ftype = build_function_type_list (ptr_type_node,
10086 integer_type_node, NULL_TREE);
10087 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10088 /* Only use TM_PURE if we we have TM language support. */
10089 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10090 ecf_flags |= ECF_TM_PURE;
10091 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10092 "__builtin_eh_pointer", ecf_flags);
10093
10094 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10095 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10096 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10097 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10098
10099 ftype = build_function_type_list (void_type_node,
10100 integer_type_node, integer_type_node,
10101 NULL_TREE);
10102 local_define_builtin ("__builtin_eh_copy_values", ftype,
10103 BUILT_IN_EH_COPY_VALUES,
10104 "__builtin_eh_copy_values", ECF_NOTHROW);
10105
10106 /* Complex multiplication and division. These are handled as builtins
10107 rather than optabs because emit_library_call_value doesn't support
10108 complex. Further, we can do slightly better with folding these
10109 beasties if the real and complex parts of the arguments are separate. */
10110 {
10111 int mode;
10112
10113 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10114 {
10115 char mode_name_buf[4], *q;
10116 const char *p;
10117 enum built_in_function mcode, dcode;
10118 tree type, inner_type;
10119 const char *prefix = "__";
10120
10121 if (targetm.libfunc_gnu_prefix)
10122 prefix = "__gnu_";
10123
10124 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10125 if (type == NULL)
10126 continue;
10127 inner_type = TREE_TYPE (type);
10128
10129 ftype = build_function_type_list (type, inner_type, inner_type,
10130 inner_type, inner_type, NULL_TREE);
10131
10132 mcode = ((enum built_in_function)
10133 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10134 dcode = ((enum built_in_function)
10135 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10136
10137 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10138 *q = TOLOWER (*p);
10139 *q = '\0';
10140
10141 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10142 NULL);
10143 local_define_builtin (built_in_names[mcode], ftype, mcode,
10144 built_in_names[mcode],
10145 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10146
10147 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10148 NULL);
10149 local_define_builtin (built_in_names[dcode], ftype, dcode,
10150 built_in_names[dcode],
10151 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10152 }
10153 }
10154 }
10155
10156 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10157 better way.
10158
10159 If we requested a pointer to a vector, build up the pointers that
10160 we stripped off while looking for the inner type. Similarly for
10161 return values from functions.
10162
10163 The argument TYPE is the top of the chain, and BOTTOM is the
10164 new type which we will point to. */
10165
10166 tree
10167 reconstruct_complex_type (tree type, tree bottom)
10168 {
10169 tree inner, outer;
10170
10171 if (TREE_CODE (type) == POINTER_TYPE)
10172 {
10173 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10174 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10175 TYPE_REF_CAN_ALIAS_ALL (type));
10176 }
10177 else if (TREE_CODE (type) == REFERENCE_TYPE)
10178 {
10179 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10180 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10181 TYPE_REF_CAN_ALIAS_ALL (type));
10182 }
10183 else if (TREE_CODE (type) == ARRAY_TYPE)
10184 {
10185 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10186 outer = build_array_type (inner, TYPE_DOMAIN (type));
10187 }
10188 else if (TREE_CODE (type) == FUNCTION_TYPE)
10189 {
10190 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10191 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10192 }
10193 else if (TREE_CODE (type) == METHOD_TYPE)
10194 {
10195 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10196 /* The build_method_type_directly() routine prepends 'this' to argument list,
10197 so we must compensate by getting rid of it. */
10198 outer
10199 = build_method_type_directly
10200 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10201 inner,
10202 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10203 }
10204 else if (TREE_CODE (type) == OFFSET_TYPE)
10205 {
10206 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10207 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10208 }
10209 else
10210 return bottom;
10211
10212 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10213 TYPE_QUALS (type));
10214 }
10215
10216 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10217 the inner type. */
10218 tree
10219 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10220 {
10221 int nunits;
10222
10223 switch (GET_MODE_CLASS (mode))
10224 {
10225 case MODE_VECTOR_INT:
10226 case MODE_VECTOR_FLOAT:
10227 case MODE_VECTOR_FRACT:
10228 case MODE_VECTOR_UFRACT:
10229 case MODE_VECTOR_ACCUM:
10230 case MODE_VECTOR_UACCUM:
10231 nunits = GET_MODE_NUNITS (mode);
10232 break;
10233
10234 case MODE_INT:
10235 /* Check that there are no leftover bits. */
10236 gcc_assert (GET_MODE_BITSIZE (mode)
10237 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10238
10239 nunits = GET_MODE_BITSIZE (mode)
10240 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10241 break;
10242
10243 default:
10244 gcc_unreachable ();
10245 }
10246
10247 return make_vector_type (innertype, nunits, mode);
10248 }
10249
10250 /* Similarly, but takes the inner type and number of units, which must be
10251 a power of two. */
10252
10253 tree
10254 build_vector_type (tree innertype, int nunits)
10255 {
10256 return make_vector_type (innertype, nunits, VOIDmode);
10257 }
10258
10259 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10260
10261 tree
10262 build_opaque_vector_type (tree innertype, int nunits)
10263 {
10264 tree t = make_vector_type (innertype, nunits, VOIDmode);
10265 tree cand;
10266 /* We always build the non-opaque variant before the opaque one,
10267 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10268 cand = TYPE_NEXT_VARIANT (t);
10269 if (cand
10270 && TYPE_VECTOR_OPAQUE (cand)
10271 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10272 return cand;
10273 /* Othewise build a variant type and make sure to queue it after
10274 the non-opaque type. */
10275 cand = build_distinct_type_copy (t);
10276 TYPE_VECTOR_OPAQUE (cand) = true;
10277 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10278 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10279 TYPE_NEXT_VARIANT (t) = cand;
10280 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10281 return cand;
10282 }
10283
10284
10285 /* Given an initializer INIT, return TRUE if INIT is zero or some
10286 aggregate of zeros. Otherwise return FALSE. */
10287 bool
10288 initializer_zerop (const_tree init)
10289 {
10290 tree elt;
10291
10292 STRIP_NOPS (init);
10293
10294 switch (TREE_CODE (init))
10295 {
10296 case INTEGER_CST:
10297 return integer_zerop (init);
10298
10299 case REAL_CST:
10300 /* ??? Note that this is not correct for C4X float formats. There,
10301 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10302 negative exponent. */
10303 return real_zerop (init)
10304 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10305
10306 case FIXED_CST:
10307 return fixed_zerop (init);
10308
10309 case COMPLEX_CST:
10310 return integer_zerop (init)
10311 || (real_zerop (init)
10312 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10313 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10314
10315 case VECTOR_CST:
10316 {
10317 unsigned i;
10318 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10319 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10320 return false;
10321 return true;
10322 }
10323
10324 case CONSTRUCTOR:
10325 {
10326 unsigned HOST_WIDE_INT idx;
10327
10328 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10329 if (!initializer_zerop (elt))
10330 return false;
10331 return true;
10332 }
10333
10334 case STRING_CST:
10335 {
10336 int i;
10337
10338 /* We need to loop through all elements to handle cases like
10339 "\0" and "\0foobar". */
10340 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10341 if (TREE_STRING_POINTER (init)[i] != '\0')
10342 return false;
10343
10344 return true;
10345 }
10346
10347 default:
10348 return false;
10349 }
10350 }
10351
10352 /* Check if vector VEC consists of all the equal elements and
10353 that the number of elements corresponds to the type of VEC.
10354 The function returns first element of the vector
10355 or NULL_TREE if the vector is not uniform. */
10356 tree
10357 uniform_vector_p (const_tree vec)
10358 {
10359 tree first, t;
10360 unsigned i;
10361
10362 if (vec == NULL_TREE)
10363 return NULL_TREE;
10364
10365 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10366
10367 if (TREE_CODE (vec) == VECTOR_CST)
10368 {
10369 first = VECTOR_CST_ELT (vec, 0);
10370 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10371 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10372 return NULL_TREE;
10373
10374 return first;
10375 }
10376
10377 else if (TREE_CODE (vec) == CONSTRUCTOR)
10378 {
10379 first = error_mark_node;
10380
10381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10382 {
10383 if (i == 0)
10384 {
10385 first = t;
10386 continue;
10387 }
10388 if (!operand_equal_p (first, t, 0))
10389 return NULL_TREE;
10390 }
10391 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10392 return NULL_TREE;
10393
10394 return first;
10395 }
10396
10397 return NULL_TREE;
10398 }
10399
10400 /* Build an empty statement at location LOC. */
10401
10402 tree
10403 build_empty_stmt (location_t loc)
10404 {
10405 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10406 SET_EXPR_LOCATION (t, loc);
10407 return t;
10408 }
10409
10410
10411 /* Build an OpenMP clause with code CODE. LOC is the location of the
10412 clause. */
10413
10414 tree
10415 build_omp_clause (location_t loc, enum omp_clause_code code)
10416 {
10417 tree t;
10418 int size, length;
10419
10420 length = omp_clause_num_ops[code];
10421 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10422
10423 record_node_allocation_statistics (OMP_CLAUSE, size);
10424
10425 t = (tree) ggc_internal_alloc (size);
10426 memset (t, 0, size);
10427 TREE_SET_CODE (t, OMP_CLAUSE);
10428 OMP_CLAUSE_SET_CODE (t, code);
10429 OMP_CLAUSE_LOCATION (t) = loc;
10430
10431 return t;
10432 }
10433
10434 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10435 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10436 Except for the CODE and operand count field, other storage for the
10437 object is initialized to zeros. */
10438
10439 tree
10440 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10441 {
10442 tree t;
10443 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10444
10445 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10446 gcc_assert (len >= 1);
10447
10448 record_node_allocation_statistics (code, length);
10449
10450 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10451
10452 TREE_SET_CODE (t, code);
10453
10454 /* Can't use TREE_OPERAND to store the length because if checking is
10455 enabled, it will try to check the length before we store it. :-P */
10456 t->exp.operands[0] = build_int_cst (sizetype, len);
10457
10458 return t;
10459 }
10460
10461 /* Helper function for build_call_* functions; build a CALL_EXPR with
10462 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10463 the argument slots. */
10464
10465 static tree
10466 build_call_1 (tree return_type, tree fn, int nargs)
10467 {
10468 tree t;
10469
10470 t = build_vl_exp (CALL_EXPR, nargs + 3);
10471 TREE_TYPE (t) = return_type;
10472 CALL_EXPR_FN (t) = fn;
10473 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10474
10475 return t;
10476 }
10477
10478 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10479 FN and a null static chain slot. NARGS is the number of call arguments
10480 which are specified as "..." arguments. */
10481
10482 tree
10483 build_call_nary (tree return_type, tree fn, int nargs, ...)
10484 {
10485 tree ret;
10486 va_list args;
10487 va_start (args, nargs);
10488 ret = build_call_valist (return_type, fn, nargs, args);
10489 va_end (args);
10490 return ret;
10491 }
10492
10493 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10494 FN and a null static chain slot. NARGS is the number of call arguments
10495 which are specified as a va_list ARGS. */
10496
10497 tree
10498 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10499 {
10500 tree t;
10501 int i;
10502
10503 t = build_call_1 (return_type, fn, nargs);
10504 for (i = 0; i < nargs; i++)
10505 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10506 process_call_operands (t);
10507 return t;
10508 }
10509
10510 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10511 FN and a null static chain slot. NARGS is the number of call arguments
10512 which are specified as a tree array ARGS. */
10513
10514 tree
10515 build_call_array_loc (location_t loc, tree return_type, tree fn,
10516 int nargs, const tree *args)
10517 {
10518 tree t;
10519 int i;
10520
10521 t = build_call_1 (return_type, fn, nargs);
10522 for (i = 0; i < nargs; i++)
10523 CALL_EXPR_ARG (t, i) = args[i];
10524 process_call_operands (t);
10525 SET_EXPR_LOCATION (t, loc);
10526 return t;
10527 }
10528
10529 /* Like build_call_array, but takes a vec. */
10530
10531 tree
10532 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10533 {
10534 tree ret, t;
10535 unsigned int ix;
10536
10537 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10538 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10539 CALL_EXPR_ARG (ret, ix) = t;
10540 process_call_operands (ret);
10541 return ret;
10542 }
10543
10544 /* Conveniently construct a function call expression. FNDECL names the
10545 function to be called and N arguments are passed in the array
10546 ARGARRAY. */
10547
10548 tree
10549 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10550 {
10551 tree fntype = TREE_TYPE (fndecl);
10552 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10553
10554 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10555 }
10556
10557 /* Conveniently construct a function call expression. FNDECL names the
10558 function to be called and the arguments are passed in the vector
10559 VEC. */
10560
10561 tree
10562 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10563 {
10564 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10565 vec_safe_address (vec));
10566 }
10567
10568
10569 /* Conveniently construct a function call expression. FNDECL names the
10570 function to be called, N is the number of arguments, and the "..."
10571 parameters are the argument expressions. */
10572
10573 tree
10574 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10575 {
10576 va_list ap;
10577 tree *argarray = XALLOCAVEC (tree, n);
10578 int i;
10579
10580 va_start (ap, n);
10581 for (i = 0; i < n; i++)
10582 argarray[i] = va_arg (ap, tree);
10583 va_end (ap);
10584 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10585 }
10586
10587 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10588 varargs macros aren't supported by all bootstrap compilers. */
10589
10590 tree
10591 build_call_expr (tree fndecl, int n, ...)
10592 {
10593 va_list ap;
10594 tree *argarray = XALLOCAVEC (tree, n);
10595 int i;
10596
10597 va_start (ap, n);
10598 for (i = 0; i < n; i++)
10599 argarray[i] = va_arg (ap, tree);
10600 va_end (ap);
10601 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10602 }
10603
10604 /* Build internal call expression. This is just like CALL_EXPR, except
10605 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10606 internal function. */
10607
10608 tree
10609 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10610 tree type, int n, ...)
10611 {
10612 va_list ap;
10613 int i;
10614
10615 tree fn = build_call_1 (type, NULL_TREE, n);
10616 va_start (ap, n);
10617 for (i = 0; i < n; i++)
10618 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10619 va_end (ap);
10620 SET_EXPR_LOCATION (fn, loc);
10621 CALL_EXPR_IFN (fn) = ifn;
10622 return fn;
10623 }
10624
10625 /* Create a new constant string literal and return a char* pointer to it.
10626 The STRING_CST value is the LEN characters at STR. */
10627 tree
10628 build_string_literal (int len, const char *str)
10629 {
10630 tree t, elem, index, type;
10631
10632 t = build_string (len, str);
10633 elem = build_type_variant (char_type_node, 1, 0);
10634 index = build_index_type (size_int (len - 1));
10635 type = build_array_type (elem, index);
10636 TREE_TYPE (t) = type;
10637 TREE_CONSTANT (t) = 1;
10638 TREE_READONLY (t) = 1;
10639 TREE_STATIC (t) = 1;
10640
10641 type = build_pointer_type (elem);
10642 t = build1 (ADDR_EXPR, type,
10643 build4 (ARRAY_REF, elem,
10644 t, integer_zero_node, NULL_TREE, NULL_TREE));
10645 return t;
10646 }
10647
10648
10649
10650 /* Return true if T (assumed to be a DECL) must be assigned a memory
10651 location. */
10652
10653 bool
10654 needs_to_live_in_memory (const_tree t)
10655 {
10656 return (TREE_ADDRESSABLE (t)
10657 || is_global_var (t)
10658 || (TREE_CODE (t) == RESULT_DECL
10659 && !DECL_BY_REFERENCE (t)
10660 && aggregate_value_p (t, current_function_decl)));
10661 }
10662
10663 /* Return value of a constant X and sign-extend it. */
10664
10665 HOST_WIDE_INT
10666 int_cst_value (const_tree x)
10667 {
10668 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10669 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10670
10671 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10672 gcc_assert (cst_and_fits_in_hwi (x));
10673
10674 if (bits < HOST_BITS_PER_WIDE_INT)
10675 {
10676 bool negative = ((val >> (bits - 1)) & 1) != 0;
10677 if (negative)
10678 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10679 else
10680 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10681 }
10682
10683 return val;
10684 }
10685
10686 /* If TYPE is an integral or pointer type, return an integer type with
10687 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10688 if TYPE is already an integer type of signedness UNSIGNEDP. */
10689
10690 tree
10691 signed_or_unsigned_type_for (int unsignedp, tree type)
10692 {
10693 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10694 return type;
10695
10696 if (TREE_CODE (type) == VECTOR_TYPE)
10697 {
10698 tree inner = TREE_TYPE (type);
10699 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10700 if (!inner2)
10701 return NULL_TREE;
10702 if (inner == inner2)
10703 return type;
10704 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10705 }
10706
10707 if (!INTEGRAL_TYPE_P (type)
10708 && !POINTER_TYPE_P (type)
10709 && TREE_CODE (type) != OFFSET_TYPE)
10710 return NULL_TREE;
10711
10712 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10713 }
10714
10715 /* If TYPE is an integral or pointer type, return an integer type with
10716 the same precision which is unsigned, or itself if TYPE is already an
10717 unsigned integer type. */
10718
10719 tree
10720 unsigned_type_for (tree type)
10721 {
10722 return signed_or_unsigned_type_for (1, type);
10723 }
10724
10725 /* If TYPE is an integral or pointer type, return an integer type with
10726 the same precision which is signed, or itself if TYPE is already a
10727 signed integer type. */
10728
10729 tree
10730 signed_type_for (tree type)
10731 {
10732 return signed_or_unsigned_type_for (0, type);
10733 }
10734
10735 /* If TYPE is a vector type, return a signed integer vector type with the
10736 same width and number of subparts. Otherwise return boolean_type_node. */
10737
10738 tree
10739 truth_type_for (tree type)
10740 {
10741 if (TREE_CODE (type) == VECTOR_TYPE)
10742 {
10743 tree elem = lang_hooks.types.type_for_size
10744 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10745 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10746 }
10747 else
10748 return boolean_type_node;
10749 }
10750
10751 /* Returns the largest value obtainable by casting something in INNER type to
10752 OUTER type. */
10753
10754 tree
10755 upper_bound_in_type (tree outer, tree inner)
10756 {
10757 unsigned int det = 0;
10758 unsigned oprec = TYPE_PRECISION (outer);
10759 unsigned iprec = TYPE_PRECISION (inner);
10760 unsigned prec;
10761
10762 /* Compute a unique number for every combination. */
10763 det |= (oprec > iprec) ? 4 : 0;
10764 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10765 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10766
10767 /* Determine the exponent to use. */
10768 switch (det)
10769 {
10770 case 0:
10771 case 1:
10772 /* oprec <= iprec, outer: signed, inner: don't care. */
10773 prec = oprec - 1;
10774 break;
10775 case 2:
10776 case 3:
10777 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10778 prec = oprec;
10779 break;
10780 case 4:
10781 /* oprec > iprec, outer: signed, inner: signed. */
10782 prec = iprec - 1;
10783 break;
10784 case 5:
10785 /* oprec > iprec, outer: signed, inner: unsigned. */
10786 prec = iprec;
10787 break;
10788 case 6:
10789 /* oprec > iprec, outer: unsigned, inner: signed. */
10790 prec = oprec;
10791 break;
10792 case 7:
10793 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10794 prec = iprec;
10795 break;
10796 default:
10797 gcc_unreachable ();
10798 }
10799
10800 return wide_int_to_tree (outer,
10801 wi::mask (prec, false, TYPE_PRECISION (outer)));
10802 }
10803
10804 /* Returns the smallest value obtainable by casting something in INNER type to
10805 OUTER type. */
10806
10807 tree
10808 lower_bound_in_type (tree outer, tree inner)
10809 {
10810 unsigned oprec = TYPE_PRECISION (outer);
10811 unsigned iprec = TYPE_PRECISION (inner);
10812
10813 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10814 and obtain 0. */
10815 if (TYPE_UNSIGNED (outer)
10816 /* If we are widening something of an unsigned type, OUTER type
10817 contains all values of INNER type. In particular, both INNER
10818 and OUTER types have zero in common. */
10819 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10820 return build_int_cst (outer, 0);
10821 else
10822 {
10823 /* If we are widening a signed type to another signed type, we
10824 want to obtain -2^^(iprec-1). If we are keeping the
10825 precision or narrowing to a signed type, we want to obtain
10826 -2^(oprec-1). */
10827 unsigned prec = oprec > iprec ? iprec : oprec;
10828 return wide_int_to_tree (outer,
10829 wi::mask (prec - 1, true,
10830 TYPE_PRECISION (outer)));
10831 }
10832 }
10833
10834 /* Return nonzero if two operands that are suitable for PHI nodes are
10835 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10836 SSA_NAME or invariant. Note that this is strictly an optimization.
10837 That is, callers of this function can directly call operand_equal_p
10838 and get the same result, only slower. */
10839
10840 int
10841 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10842 {
10843 if (arg0 == arg1)
10844 return 1;
10845 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10846 return 0;
10847 return operand_equal_p (arg0, arg1, 0);
10848 }
10849
10850 /* Returns number of zeros at the end of binary representation of X. */
10851
10852 tree
10853 num_ending_zeros (const_tree x)
10854 {
10855 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10856 }
10857
10858
10859 #define WALK_SUBTREE(NODE) \
10860 do \
10861 { \
10862 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10863 if (result) \
10864 return result; \
10865 } \
10866 while (0)
10867
10868 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10869 be walked whenever a type is seen in the tree. Rest of operands and return
10870 value are as for walk_tree. */
10871
10872 static tree
10873 walk_type_fields (tree type, walk_tree_fn func, void *data,
10874 struct pointer_set_t *pset, walk_tree_lh lh)
10875 {
10876 tree result = NULL_TREE;
10877
10878 switch (TREE_CODE (type))
10879 {
10880 case POINTER_TYPE:
10881 case REFERENCE_TYPE:
10882 case VECTOR_TYPE:
10883 /* We have to worry about mutually recursive pointers. These can't
10884 be written in C. They can in Ada. It's pathological, but
10885 there's an ACATS test (c38102a) that checks it. Deal with this
10886 by checking if we're pointing to another pointer, that one
10887 points to another pointer, that one does too, and we have no htab.
10888 If so, get a hash table. We check three levels deep to avoid
10889 the cost of the hash table if we don't need one. */
10890 if (POINTER_TYPE_P (TREE_TYPE (type))
10891 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10892 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10893 && !pset)
10894 {
10895 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10896 func, data);
10897 if (result)
10898 return result;
10899
10900 break;
10901 }
10902
10903 /* ... fall through ... */
10904
10905 case COMPLEX_TYPE:
10906 WALK_SUBTREE (TREE_TYPE (type));
10907 break;
10908
10909 case METHOD_TYPE:
10910 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10911
10912 /* Fall through. */
10913
10914 case FUNCTION_TYPE:
10915 WALK_SUBTREE (TREE_TYPE (type));
10916 {
10917 tree arg;
10918
10919 /* We never want to walk into default arguments. */
10920 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10921 WALK_SUBTREE (TREE_VALUE (arg));
10922 }
10923 break;
10924
10925 case ARRAY_TYPE:
10926 /* Don't follow this nodes's type if a pointer for fear that
10927 we'll have infinite recursion. If we have a PSET, then we
10928 need not fear. */
10929 if (pset
10930 || (!POINTER_TYPE_P (TREE_TYPE (type))
10931 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10932 WALK_SUBTREE (TREE_TYPE (type));
10933 WALK_SUBTREE (TYPE_DOMAIN (type));
10934 break;
10935
10936 case OFFSET_TYPE:
10937 WALK_SUBTREE (TREE_TYPE (type));
10938 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10939 break;
10940
10941 default:
10942 break;
10943 }
10944
10945 return NULL_TREE;
10946 }
10947
10948 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10949 called with the DATA and the address of each sub-tree. If FUNC returns a
10950 non-NULL value, the traversal is stopped, and the value returned by FUNC
10951 is returned. If PSET is non-NULL it is used to record the nodes visited,
10952 and to avoid visiting a node more than once. */
10953
10954 tree
10955 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10956 struct pointer_set_t *pset, walk_tree_lh lh)
10957 {
10958 enum tree_code code;
10959 int walk_subtrees;
10960 tree result;
10961
10962 #define WALK_SUBTREE_TAIL(NODE) \
10963 do \
10964 { \
10965 tp = & (NODE); \
10966 goto tail_recurse; \
10967 } \
10968 while (0)
10969
10970 tail_recurse:
10971 /* Skip empty subtrees. */
10972 if (!*tp)
10973 return NULL_TREE;
10974
10975 /* Don't walk the same tree twice, if the user has requested
10976 that we avoid doing so. */
10977 if (pset && pointer_set_insert (pset, *tp))
10978 return NULL_TREE;
10979
10980 /* Call the function. */
10981 walk_subtrees = 1;
10982 result = (*func) (tp, &walk_subtrees, data);
10983
10984 /* If we found something, return it. */
10985 if (result)
10986 return result;
10987
10988 code = TREE_CODE (*tp);
10989
10990 /* Even if we didn't, FUNC may have decided that there was nothing
10991 interesting below this point in the tree. */
10992 if (!walk_subtrees)
10993 {
10994 /* But we still need to check our siblings. */
10995 if (code == TREE_LIST)
10996 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10997 else if (code == OMP_CLAUSE)
10998 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10999 else
11000 return NULL_TREE;
11001 }
11002
11003 if (lh)
11004 {
11005 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11006 if (result || !walk_subtrees)
11007 return result;
11008 }
11009
11010 switch (code)
11011 {
11012 case ERROR_MARK:
11013 case IDENTIFIER_NODE:
11014 case INTEGER_CST:
11015 case REAL_CST:
11016 case FIXED_CST:
11017 case VECTOR_CST:
11018 case STRING_CST:
11019 case BLOCK:
11020 case PLACEHOLDER_EXPR:
11021 case SSA_NAME:
11022 case FIELD_DECL:
11023 case RESULT_DECL:
11024 /* None of these have subtrees other than those already walked
11025 above. */
11026 break;
11027
11028 case TREE_LIST:
11029 WALK_SUBTREE (TREE_VALUE (*tp));
11030 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11031 break;
11032
11033 case TREE_VEC:
11034 {
11035 int len = TREE_VEC_LENGTH (*tp);
11036
11037 if (len == 0)
11038 break;
11039
11040 /* Walk all elements but the first. */
11041 while (--len)
11042 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11043
11044 /* Now walk the first one as a tail call. */
11045 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11046 }
11047
11048 case COMPLEX_CST:
11049 WALK_SUBTREE (TREE_REALPART (*tp));
11050 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11051
11052 case CONSTRUCTOR:
11053 {
11054 unsigned HOST_WIDE_INT idx;
11055 constructor_elt *ce;
11056
11057 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11058 idx++)
11059 WALK_SUBTREE (ce->value);
11060 }
11061 break;
11062
11063 case SAVE_EXPR:
11064 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11065
11066 case BIND_EXPR:
11067 {
11068 tree decl;
11069 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11070 {
11071 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11072 into declarations that are just mentioned, rather than
11073 declared; they don't really belong to this part of the tree.
11074 And, we can see cycles: the initializer for a declaration
11075 can refer to the declaration itself. */
11076 WALK_SUBTREE (DECL_INITIAL (decl));
11077 WALK_SUBTREE (DECL_SIZE (decl));
11078 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11079 }
11080 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11081 }
11082
11083 case STATEMENT_LIST:
11084 {
11085 tree_stmt_iterator i;
11086 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11087 WALK_SUBTREE (*tsi_stmt_ptr (i));
11088 }
11089 break;
11090
11091 case OMP_CLAUSE:
11092 switch (OMP_CLAUSE_CODE (*tp))
11093 {
11094 case OMP_CLAUSE_PRIVATE:
11095 case OMP_CLAUSE_SHARED:
11096 case OMP_CLAUSE_FIRSTPRIVATE:
11097 case OMP_CLAUSE_COPYIN:
11098 case OMP_CLAUSE_COPYPRIVATE:
11099 case OMP_CLAUSE_FINAL:
11100 case OMP_CLAUSE_IF:
11101 case OMP_CLAUSE_NUM_THREADS:
11102 case OMP_CLAUSE_SCHEDULE:
11103 case OMP_CLAUSE_UNIFORM:
11104 case OMP_CLAUSE_DEPEND:
11105 case OMP_CLAUSE_NUM_TEAMS:
11106 case OMP_CLAUSE_THREAD_LIMIT:
11107 case OMP_CLAUSE_DEVICE:
11108 case OMP_CLAUSE_DIST_SCHEDULE:
11109 case OMP_CLAUSE_SAFELEN:
11110 case OMP_CLAUSE_SIMDLEN:
11111 case OMP_CLAUSE__LOOPTEMP_:
11112 case OMP_CLAUSE__SIMDUID_:
11113 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11114 /* FALLTHRU */
11115
11116 case OMP_CLAUSE_NOWAIT:
11117 case OMP_CLAUSE_ORDERED:
11118 case OMP_CLAUSE_DEFAULT:
11119 case OMP_CLAUSE_UNTIED:
11120 case OMP_CLAUSE_MERGEABLE:
11121 case OMP_CLAUSE_PROC_BIND:
11122 case OMP_CLAUSE_INBRANCH:
11123 case OMP_CLAUSE_NOTINBRANCH:
11124 case OMP_CLAUSE_FOR:
11125 case OMP_CLAUSE_PARALLEL:
11126 case OMP_CLAUSE_SECTIONS:
11127 case OMP_CLAUSE_TASKGROUP:
11128 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11129
11130 case OMP_CLAUSE_LASTPRIVATE:
11131 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11132 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11133 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11134
11135 case OMP_CLAUSE_COLLAPSE:
11136 {
11137 int i;
11138 for (i = 0; i < 3; i++)
11139 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11140 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11141 }
11142
11143 case OMP_CLAUSE_LINEAR:
11144 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11145 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11146 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11147 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11148
11149 case OMP_CLAUSE_ALIGNED:
11150 case OMP_CLAUSE_FROM:
11151 case OMP_CLAUSE_TO:
11152 case OMP_CLAUSE_MAP:
11153 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11154 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11155 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11156
11157 case OMP_CLAUSE_REDUCTION:
11158 {
11159 int i;
11160 for (i = 0; i < 4; i++)
11161 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11162 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11163 }
11164
11165 default:
11166 gcc_unreachable ();
11167 }
11168 break;
11169
11170 case TARGET_EXPR:
11171 {
11172 int i, len;
11173
11174 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11175 But, we only want to walk once. */
11176 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11177 for (i = 0; i < len; ++i)
11178 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11179 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11180 }
11181
11182 case DECL_EXPR:
11183 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11184 defining. We only want to walk into these fields of a type in this
11185 case and not in the general case of a mere reference to the type.
11186
11187 The criterion is as follows: if the field can be an expression, it
11188 must be walked only here. This should be in keeping with the fields
11189 that are directly gimplified in gimplify_type_sizes in order for the
11190 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11191 variable-sized types.
11192
11193 Note that DECLs get walked as part of processing the BIND_EXPR. */
11194 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11195 {
11196 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11197 if (TREE_CODE (*type_p) == ERROR_MARK)
11198 return NULL_TREE;
11199
11200 /* Call the function for the type. See if it returns anything or
11201 doesn't want us to continue. If we are to continue, walk both
11202 the normal fields and those for the declaration case. */
11203 result = (*func) (type_p, &walk_subtrees, data);
11204 if (result || !walk_subtrees)
11205 return result;
11206
11207 /* But do not walk a pointed-to type since it may itself need to
11208 be walked in the declaration case if it isn't anonymous. */
11209 if (!POINTER_TYPE_P (*type_p))
11210 {
11211 result = walk_type_fields (*type_p, func, data, pset, lh);
11212 if (result)
11213 return result;
11214 }
11215
11216 /* If this is a record type, also walk the fields. */
11217 if (RECORD_OR_UNION_TYPE_P (*type_p))
11218 {
11219 tree field;
11220
11221 for (field = TYPE_FIELDS (*type_p); field;
11222 field = DECL_CHAIN (field))
11223 {
11224 /* We'd like to look at the type of the field, but we can
11225 easily get infinite recursion. So assume it's pointed
11226 to elsewhere in the tree. Also, ignore things that
11227 aren't fields. */
11228 if (TREE_CODE (field) != FIELD_DECL)
11229 continue;
11230
11231 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11232 WALK_SUBTREE (DECL_SIZE (field));
11233 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11234 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11235 WALK_SUBTREE (DECL_QUALIFIER (field));
11236 }
11237 }
11238
11239 /* Same for scalar types. */
11240 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11241 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11242 || TREE_CODE (*type_p) == INTEGER_TYPE
11243 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11244 || TREE_CODE (*type_p) == REAL_TYPE)
11245 {
11246 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11247 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11248 }
11249
11250 WALK_SUBTREE (TYPE_SIZE (*type_p));
11251 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11252 }
11253 /* FALLTHRU */
11254
11255 default:
11256 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11257 {
11258 int i, len;
11259
11260 /* Walk over all the sub-trees of this operand. */
11261 len = TREE_OPERAND_LENGTH (*tp);
11262
11263 /* Go through the subtrees. We need to do this in forward order so
11264 that the scope of a FOR_EXPR is handled properly. */
11265 if (len)
11266 {
11267 for (i = 0; i < len - 1; ++i)
11268 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11269 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11270 }
11271 }
11272 /* If this is a type, walk the needed fields in the type. */
11273 else if (TYPE_P (*tp))
11274 return walk_type_fields (*tp, func, data, pset, lh);
11275 break;
11276 }
11277
11278 /* We didn't find what we were looking for. */
11279 return NULL_TREE;
11280
11281 #undef WALK_SUBTREE_TAIL
11282 }
11283 #undef WALK_SUBTREE
11284
11285 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11286
11287 tree
11288 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11289 walk_tree_lh lh)
11290 {
11291 tree result;
11292 struct pointer_set_t *pset;
11293
11294 pset = pointer_set_create ();
11295 result = walk_tree_1 (tp, func, data, pset, lh);
11296 pointer_set_destroy (pset);
11297 return result;
11298 }
11299
11300
11301 tree
11302 tree_block (tree t)
11303 {
11304 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11305
11306 if (IS_EXPR_CODE_CLASS (c))
11307 return LOCATION_BLOCK (t->exp.locus);
11308 gcc_unreachable ();
11309 return NULL;
11310 }
11311
11312 void
11313 tree_set_block (tree t, tree b)
11314 {
11315 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11316
11317 if (IS_EXPR_CODE_CLASS (c))
11318 {
11319 if (b)
11320 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11321 else
11322 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11323 }
11324 else
11325 gcc_unreachable ();
11326 }
11327
11328 /* Create a nameless artificial label and put it in the current
11329 function context. The label has a location of LOC. Returns the
11330 newly created label. */
11331
11332 tree
11333 create_artificial_label (location_t loc)
11334 {
11335 tree lab = build_decl (loc,
11336 LABEL_DECL, NULL_TREE, void_type_node);
11337
11338 DECL_ARTIFICIAL (lab) = 1;
11339 DECL_IGNORED_P (lab) = 1;
11340 DECL_CONTEXT (lab) = current_function_decl;
11341 return lab;
11342 }
11343
11344 /* Given a tree, try to return a useful variable name that we can use
11345 to prefix a temporary that is being assigned the value of the tree.
11346 I.E. given <temp> = &A, return A. */
11347
11348 const char *
11349 get_name (tree t)
11350 {
11351 tree stripped_decl;
11352
11353 stripped_decl = t;
11354 STRIP_NOPS (stripped_decl);
11355 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11356 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11357 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11358 {
11359 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11360 if (!name)
11361 return NULL;
11362 return IDENTIFIER_POINTER (name);
11363 }
11364 else
11365 {
11366 switch (TREE_CODE (stripped_decl))
11367 {
11368 case ADDR_EXPR:
11369 return get_name (TREE_OPERAND (stripped_decl, 0));
11370 default:
11371 return NULL;
11372 }
11373 }
11374 }
11375
11376 /* Return true if TYPE has a variable argument list. */
11377
11378 bool
11379 stdarg_p (const_tree fntype)
11380 {
11381 function_args_iterator args_iter;
11382 tree n = NULL_TREE, t;
11383
11384 if (!fntype)
11385 return false;
11386
11387 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11388 {
11389 n = t;
11390 }
11391
11392 return n != NULL_TREE && n != void_type_node;
11393 }
11394
11395 /* Return true if TYPE has a prototype. */
11396
11397 bool
11398 prototype_p (tree fntype)
11399 {
11400 tree t;
11401
11402 gcc_assert (fntype != NULL_TREE);
11403
11404 t = TYPE_ARG_TYPES (fntype);
11405 return (t != NULL_TREE);
11406 }
11407
11408 /* If BLOCK is inlined from an __attribute__((__artificial__))
11409 routine, return pointer to location from where it has been
11410 called. */
11411 location_t *
11412 block_nonartificial_location (tree block)
11413 {
11414 location_t *ret = NULL;
11415
11416 while (block && TREE_CODE (block) == BLOCK
11417 && BLOCK_ABSTRACT_ORIGIN (block))
11418 {
11419 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11420
11421 while (TREE_CODE (ao) == BLOCK
11422 && BLOCK_ABSTRACT_ORIGIN (ao)
11423 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11424 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11425
11426 if (TREE_CODE (ao) == FUNCTION_DECL)
11427 {
11428 /* If AO is an artificial inline, point RET to the
11429 call site locus at which it has been inlined and continue
11430 the loop, in case AO's caller is also an artificial
11431 inline. */
11432 if (DECL_DECLARED_INLINE_P (ao)
11433 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11434 ret = &BLOCK_SOURCE_LOCATION (block);
11435 else
11436 break;
11437 }
11438 else if (TREE_CODE (ao) != BLOCK)
11439 break;
11440
11441 block = BLOCK_SUPERCONTEXT (block);
11442 }
11443 return ret;
11444 }
11445
11446
11447 /* If EXP is inlined from an __attribute__((__artificial__))
11448 function, return the location of the original call expression. */
11449
11450 location_t
11451 tree_nonartificial_location (tree exp)
11452 {
11453 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11454
11455 if (loc)
11456 return *loc;
11457 else
11458 return EXPR_LOCATION (exp);
11459 }
11460
11461
11462 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11463 nodes. */
11464
11465 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11466
11467 static hashval_t
11468 cl_option_hash_hash (const void *x)
11469 {
11470 const_tree const t = (const_tree) x;
11471 const char *p;
11472 size_t i;
11473 size_t len = 0;
11474 hashval_t hash = 0;
11475
11476 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11477 {
11478 p = (const char *)TREE_OPTIMIZATION (t);
11479 len = sizeof (struct cl_optimization);
11480 }
11481
11482 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11483 {
11484 p = (const char *)TREE_TARGET_OPTION (t);
11485 len = sizeof (struct cl_target_option);
11486 }
11487
11488 else
11489 gcc_unreachable ();
11490
11491 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11492 something else. */
11493 for (i = 0; i < len; i++)
11494 if (p[i])
11495 hash = (hash << 4) ^ ((i << 2) | p[i]);
11496
11497 return hash;
11498 }
11499
11500 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11501 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11502 same. */
11503
11504 static int
11505 cl_option_hash_eq (const void *x, const void *y)
11506 {
11507 const_tree const xt = (const_tree) x;
11508 const_tree const yt = (const_tree) y;
11509 const char *xp;
11510 const char *yp;
11511 size_t len;
11512
11513 if (TREE_CODE (xt) != TREE_CODE (yt))
11514 return 0;
11515
11516 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11517 {
11518 xp = (const char *)TREE_OPTIMIZATION (xt);
11519 yp = (const char *)TREE_OPTIMIZATION (yt);
11520 len = sizeof (struct cl_optimization);
11521 }
11522
11523 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11524 {
11525 xp = (const char *)TREE_TARGET_OPTION (xt);
11526 yp = (const char *)TREE_TARGET_OPTION (yt);
11527 len = sizeof (struct cl_target_option);
11528 }
11529
11530 else
11531 gcc_unreachable ();
11532
11533 return (memcmp (xp, yp, len) == 0);
11534 }
11535
11536 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11537
11538 tree
11539 build_optimization_node (struct gcc_options *opts)
11540 {
11541 tree t;
11542 void **slot;
11543
11544 /* Use the cache of optimization nodes. */
11545
11546 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11547 opts);
11548
11549 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11550 t = (tree) *slot;
11551 if (!t)
11552 {
11553 /* Insert this one into the hash table. */
11554 t = cl_optimization_node;
11555 *slot = t;
11556
11557 /* Make a new node for next time round. */
11558 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11559 }
11560
11561 return t;
11562 }
11563
11564 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11565
11566 tree
11567 build_target_option_node (struct gcc_options *opts)
11568 {
11569 tree t;
11570 void **slot;
11571
11572 /* Use the cache of optimization nodes. */
11573
11574 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11575 opts);
11576
11577 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11578 t = (tree) *slot;
11579 if (!t)
11580 {
11581 /* Insert this one into the hash table. */
11582 t = cl_target_option_node;
11583 *slot = t;
11584
11585 /* Make a new node for next time round. */
11586 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11587 }
11588
11589 return t;
11590 }
11591
11592 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11593 Called through htab_traverse. */
11594
11595 static int
11596 prepare_target_option_node_for_pch (void **slot, void *)
11597 {
11598 tree node = (tree) *slot;
11599 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11600 TREE_TARGET_GLOBALS (node) = NULL;
11601 return 1;
11602 }
11603
11604 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11605 so that they aren't saved during PCH writing. */
11606
11607 void
11608 prepare_target_option_nodes_for_pch (void)
11609 {
11610 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11611 NULL);
11612 }
11613
11614 /* Determine the "ultimate origin" of a block. The block may be an inlined
11615 instance of an inlined instance of a block which is local to an inline
11616 function, so we have to trace all of the way back through the origin chain
11617 to find out what sort of node actually served as the original seed for the
11618 given block. */
11619
11620 tree
11621 block_ultimate_origin (const_tree block)
11622 {
11623 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11624
11625 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11626 nodes in the function to point to themselves; ignore that if
11627 we're trying to output the abstract instance of this function. */
11628 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11629 return NULL_TREE;
11630
11631 if (immediate_origin == NULL_TREE)
11632 return NULL_TREE;
11633 else
11634 {
11635 tree ret_val;
11636 tree lookahead = immediate_origin;
11637
11638 do
11639 {
11640 ret_val = lookahead;
11641 lookahead = (TREE_CODE (ret_val) == BLOCK
11642 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11643 }
11644 while (lookahead != NULL && lookahead != ret_val);
11645
11646 /* The block's abstract origin chain may not be the *ultimate* origin of
11647 the block. It could lead to a DECL that has an abstract origin set.
11648 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11649 will give us if it has one). Note that DECL's abstract origins are
11650 supposed to be the most distant ancestor (or so decl_ultimate_origin
11651 claims), so we don't need to loop following the DECL origins. */
11652 if (DECL_P (ret_val))
11653 return DECL_ORIGIN (ret_val);
11654
11655 return ret_val;
11656 }
11657 }
11658
11659 /* Return true iff conversion in EXP generates no instruction. Mark
11660 it inline so that we fully inline into the stripping functions even
11661 though we have two uses of this function. */
11662
11663 static inline bool
11664 tree_nop_conversion (const_tree exp)
11665 {
11666 tree outer_type, inner_type;
11667
11668 if (!CONVERT_EXPR_P (exp)
11669 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11670 return false;
11671 if (TREE_OPERAND (exp, 0) == error_mark_node)
11672 return false;
11673
11674 outer_type = TREE_TYPE (exp);
11675 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11676
11677 if (!inner_type)
11678 return false;
11679
11680 /* Use precision rather then machine mode when we can, which gives
11681 the correct answer even for submode (bit-field) types. */
11682 if ((INTEGRAL_TYPE_P (outer_type)
11683 || POINTER_TYPE_P (outer_type)
11684 || TREE_CODE (outer_type) == OFFSET_TYPE)
11685 && (INTEGRAL_TYPE_P (inner_type)
11686 || POINTER_TYPE_P (inner_type)
11687 || TREE_CODE (inner_type) == OFFSET_TYPE))
11688 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11689
11690 /* Otherwise fall back on comparing machine modes (e.g. for
11691 aggregate types, floats). */
11692 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11693 }
11694
11695 /* Return true iff conversion in EXP generates no instruction. Don't
11696 consider conversions changing the signedness. */
11697
11698 static bool
11699 tree_sign_nop_conversion (const_tree exp)
11700 {
11701 tree outer_type, inner_type;
11702
11703 if (!tree_nop_conversion (exp))
11704 return false;
11705
11706 outer_type = TREE_TYPE (exp);
11707 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11708
11709 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11710 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11711 }
11712
11713 /* Strip conversions from EXP according to tree_nop_conversion and
11714 return the resulting expression. */
11715
11716 tree
11717 tree_strip_nop_conversions (tree exp)
11718 {
11719 while (tree_nop_conversion (exp))
11720 exp = TREE_OPERAND (exp, 0);
11721 return exp;
11722 }
11723
11724 /* Strip conversions from EXP according to tree_sign_nop_conversion
11725 and return the resulting expression. */
11726
11727 tree
11728 tree_strip_sign_nop_conversions (tree exp)
11729 {
11730 while (tree_sign_nop_conversion (exp))
11731 exp = TREE_OPERAND (exp, 0);
11732 return exp;
11733 }
11734
11735 /* Avoid any floating point extensions from EXP. */
11736 tree
11737 strip_float_extensions (tree exp)
11738 {
11739 tree sub, expt, subt;
11740
11741 /* For floating point constant look up the narrowest type that can hold
11742 it properly and handle it like (type)(narrowest_type)constant.
11743 This way we can optimize for instance a=a*2.0 where "a" is float
11744 but 2.0 is double constant. */
11745 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11746 {
11747 REAL_VALUE_TYPE orig;
11748 tree type = NULL;
11749
11750 orig = TREE_REAL_CST (exp);
11751 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11752 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11753 type = float_type_node;
11754 else if (TYPE_PRECISION (TREE_TYPE (exp))
11755 > TYPE_PRECISION (double_type_node)
11756 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11757 type = double_type_node;
11758 if (type)
11759 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11760 }
11761
11762 if (!CONVERT_EXPR_P (exp))
11763 return exp;
11764
11765 sub = TREE_OPERAND (exp, 0);
11766 subt = TREE_TYPE (sub);
11767 expt = TREE_TYPE (exp);
11768
11769 if (!FLOAT_TYPE_P (subt))
11770 return exp;
11771
11772 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11773 return exp;
11774
11775 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11776 return exp;
11777
11778 return strip_float_extensions (sub);
11779 }
11780
11781 /* Strip out all handled components that produce invariant
11782 offsets. */
11783
11784 const_tree
11785 strip_invariant_refs (const_tree op)
11786 {
11787 while (handled_component_p (op))
11788 {
11789 switch (TREE_CODE (op))
11790 {
11791 case ARRAY_REF:
11792 case ARRAY_RANGE_REF:
11793 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11794 || TREE_OPERAND (op, 2) != NULL_TREE
11795 || TREE_OPERAND (op, 3) != NULL_TREE)
11796 return NULL;
11797 break;
11798
11799 case COMPONENT_REF:
11800 if (TREE_OPERAND (op, 2) != NULL_TREE)
11801 return NULL;
11802 break;
11803
11804 default:;
11805 }
11806 op = TREE_OPERAND (op, 0);
11807 }
11808
11809 return op;
11810 }
11811
11812 static GTY(()) tree gcc_eh_personality_decl;
11813
11814 /* Return the GCC personality function decl. */
11815
11816 tree
11817 lhd_gcc_personality (void)
11818 {
11819 if (!gcc_eh_personality_decl)
11820 gcc_eh_personality_decl = build_personality_function ("gcc");
11821 return gcc_eh_personality_decl;
11822 }
11823
11824 /* For languages with One Definition Rule, work out if
11825 trees are actually the same even if the tree representation
11826 differs. This handles only decls appearing in TYPE_NAME
11827 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11828 RECORD_TYPE and IDENTIFIER_NODE. */
11829
11830 static bool
11831 same_for_odr (tree t1, tree t2)
11832 {
11833 if (t1 == t2)
11834 return true;
11835 if (!t1 || !t2)
11836 return false;
11837 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11838 if (TREE_CODE (t1) == IDENTIFIER_NODE
11839 && TREE_CODE (t2) == TYPE_DECL
11840 && DECL_FILE_SCOPE_P (t1))
11841 {
11842 t2 = DECL_NAME (t2);
11843 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11844 }
11845 if (TREE_CODE (t2) == IDENTIFIER_NODE
11846 && TREE_CODE (t1) == TYPE_DECL
11847 && DECL_FILE_SCOPE_P (t2))
11848 {
11849 t1 = DECL_NAME (t1);
11850 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11851 }
11852 if (TREE_CODE (t1) != TREE_CODE (t2))
11853 return false;
11854 if (TYPE_P (t1))
11855 return types_same_for_odr (t1, t2);
11856 if (DECL_P (t1))
11857 return decls_same_for_odr (t1, t2);
11858 return false;
11859 }
11860
11861 /* For languages with One Definition Rule, work out if
11862 decls are actually the same even if the tree representation
11863 differs. This handles only decls appearing in TYPE_NAME
11864 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11865 RECORD_TYPE and IDENTIFIER_NODE. */
11866
11867 static bool
11868 decls_same_for_odr (tree decl1, tree decl2)
11869 {
11870 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11871 && DECL_ORIGINAL_TYPE (decl1))
11872 decl1 = DECL_ORIGINAL_TYPE (decl1);
11873 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11874 && DECL_ORIGINAL_TYPE (decl2))
11875 decl2 = DECL_ORIGINAL_TYPE (decl2);
11876 if (decl1 == decl2)
11877 return true;
11878 if (!decl1 || !decl2)
11879 return false;
11880 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11881 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11882 return false;
11883 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11884 return true;
11885 if (TREE_CODE (decl1) != NAMESPACE_DECL
11886 && TREE_CODE (decl1) != TYPE_DECL)
11887 return false;
11888 if (!DECL_NAME (decl1))
11889 return false;
11890 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11891 gcc_checking_assert (!DECL_NAME (decl2)
11892 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11893 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11894 return false;
11895 return same_for_odr (DECL_CONTEXT (decl1),
11896 DECL_CONTEXT (decl2));
11897 }
11898
11899 /* For languages with One Definition Rule, work out if
11900 types are same even if the tree representation differs.
11901 This is non-trivial for LTO where minnor differences in
11902 the type representation may have prevented type merging
11903 to merge two copies of otherwise equivalent type. */
11904
11905 bool
11906 types_same_for_odr (tree type1, tree type2)
11907 {
11908 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11909 type1 = TYPE_MAIN_VARIANT (type1);
11910 type2 = TYPE_MAIN_VARIANT (type2);
11911 if (type1 == type2)
11912 return true;
11913
11914 #ifndef ENABLE_CHECKING
11915 if (!in_lto_p)
11916 return false;
11917 #endif
11918
11919 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11920 on the corresponding TYPE_STUB_DECL. */
11921 if (type_in_anonymous_namespace_p (type1)
11922 || type_in_anonymous_namespace_p (type2))
11923 return false;
11924 /* When assembler name of virtual table is available, it is
11925 easy to compare types for equivalence. */
11926 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11927 && BINFO_VTABLE (TYPE_BINFO (type1))
11928 && BINFO_VTABLE (TYPE_BINFO (type2)))
11929 {
11930 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11931 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11932
11933 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11934 {
11935 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11936 || !operand_equal_p (TREE_OPERAND (v1, 1),
11937 TREE_OPERAND (v2, 1), 0))
11938 return false;
11939 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11940 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11941 }
11942 v1 = DECL_ASSEMBLER_NAME (v1);
11943 v2 = DECL_ASSEMBLER_NAME (v2);
11944 return (v1 == v2);
11945 }
11946
11947 /* FIXME: the code comparing type names consider all instantiations of the
11948 same template to have same name. This is because we have no access
11949 to template parameters. For types with no virtual method tables
11950 we thus can return false positives. At the moment we do not need
11951 to compare types in other scenarios than devirtualization. */
11952
11953 /* If types are not structuraly same, do not bother to contnue.
11954 Match in the remainder of code would mean ODR violation. */
11955 if (!types_compatible_p (type1, type2))
11956 return false;
11957 if (!TYPE_NAME (type1))
11958 return false;
11959 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11960 return false;
11961 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11962 return false;
11963 /* When not in LTO the MAIN_VARIANT check should be the same. */
11964 gcc_assert (in_lto_p);
11965
11966 return true;
11967 }
11968
11969 /* TARGET is a call target of GIMPLE call statement
11970 (obtained by gimple_call_fn). Return true if it is
11971 OBJ_TYPE_REF representing an virtual call of C++ method.
11972 (As opposed to OBJ_TYPE_REF representing objc calls
11973 through a cast where middle-end devirtualization machinery
11974 can't apply.) */
11975
11976 bool
11977 virtual_method_call_p (tree target)
11978 {
11979 if (TREE_CODE (target) != OBJ_TYPE_REF)
11980 return false;
11981 target = TREE_TYPE (target);
11982 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11983 target = TREE_TYPE (target);
11984 if (TREE_CODE (target) == FUNCTION_TYPE)
11985 return false;
11986 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11987 return true;
11988 }
11989
11990 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11991
11992 tree
11993 obj_type_ref_class (tree ref)
11994 {
11995 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11996 ref = TREE_TYPE (ref);
11997 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11998 ref = TREE_TYPE (ref);
11999 /* We look for type THIS points to. ObjC also builds
12000 OBJ_TYPE_REF with non-method calls, Their first parameter
12001 ID however also corresponds to class type. */
12002 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
12003 || TREE_CODE (ref) == FUNCTION_TYPE);
12004 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
12005 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
12006 return TREE_TYPE (ref);
12007 }
12008
12009 /* Return true if T is in anonymous namespace. */
12010
12011 bool
12012 type_in_anonymous_namespace_p (tree t)
12013 {
12014 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
12015 }
12016
12017 /* Try to find a base info of BINFO that would have its field decl at offset
12018 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12019 found, return, otherwise return NULL_TREE. */
12020
12021 tree
12022 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12023 {
12024 tree type = BINFO_TYPE (binfo);
12025
12026 while (true)
12027 {
12028 HOST_WIDE_INT pos, size;
12029 tree fld;
12030 int i;
12031
12032 if (types_same_for_odr (type, expected_type))
12033 return binfo;
12034 if (offset < 0)
12035 return NULL_TREE;
12036
12037 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12038 {
12039 if (TREE_CODE (fld) != FIELD_DECL)
12040 continue;
12041
12042 pos = int_bit_position (fld);
12043 size = tree_to_uhwi (DECL_SIZE (fld));
12044 if (pos <= offset && (pos + size) > offset)
12045 break;
12046 }
12047 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12048 return NULL_TREE;
12049
12050 if (!DECL_ARTIFICIAL (fld))
12051 {
12052 binfo = TYPE_BINFO (TREE_TYPE (fld));
12053 if (!binfo)
12054 return NULL_TREE;
12055 }
12056 /* Offset 0 indicates the primary base, whose vtable contents are
12057 represented in the binfo for the derived class. */
12058 else if (offset != 0)
12059 {
12060 tree base_binfo, binfo2 = binfo;
12061
12062 /* Find BINFO corresponding to FLD. This is bit harder
12063 by a fact that in virtual inheritance we may need to walk down
12064 the non-virtual inheritance chain. */
12065 while (true)
12066 {
12067 tree containing_binfo = NULL, found_binfo = NULL;
12068 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
12069 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12070 {
12071 found_binfo = base_binfo;
12072 break;
12073 }
12074 else
12075 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
12076 - tree_to_shwi (BINFO_OFFSET (binfo)))
12077 * BITS_PER_UNIT < pos
12078 /* Rule out types with no virtual methods or we can get confused
12079 here by zero sized bases. */
12080 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
12081 && (!containing_binfo
12082 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
12083 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
12084 containing_binfo = base_binfo;
12085 if (found_binfo)
12086 {
12087 binfo = found_binfo;
12088 break;
12089 }
12090 if (!containing_binfo)
12091 return NULL_TREE;
12092 binfo2 = containing_binfo;
12093 }
12094 }
12095
12096 type = TREE_TYPE (fld);
12097 offset -= pos;
12098 }
12099 }
12100
12101 /* Returns true if X is a typedef decl. */
12102
12103 bool
12104 is_typedef_decl (tree x)
12105 {
12106 return (x && TREE_CODE (x) == TYPE_DECL
12107 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12108 }
12109
12110 /* Returns true iff TYPE is a type variant created for a typedef. */
12111
12112 bool
12113 typedef_variant_p (tree type)
12114 {
12115 return is_typedef_decl (TYPE_NAME (type));
12116 }
12117
12118 /* Warn about a use of an identifier which was marked deprecated. */
12119 void
12120 warn_deprecated_use (tree node, tree attr)
12121 {
12122 const char *msg;
12123
12124 if (node == 0 || !warn_deprecated_decl)
12125 return;
12126
12127 if (!attr)
12128 {
12129 if (DECL_P (node))
12130 attr = DECL_ATTRIBUTES (node);
12131 else if (TYPE_P (node))
12132 {
12133 tree decl = TYPE_STUB_DECL (node);
12134 if (decl)
12135 attr = lookup_attribute ("deprecated",
12136 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12137 }
12138 }
12139
12140 if (attr)
12141 attr = lookup_attribute ("deprecated", attr);
12142
12143 if (attr)
12144 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12145 else
12146 msg = NULL;
12147
12148 if (DECL_P (node))
12149 {
12150 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12151 if (msg)
12152 warning (OPT_Wdeprecated_declarations,
12153 "%qD is deprecated (declared at %r%s:%d%R): %s",
12154 node, "locus", xloc.file, xloc.line, msg);
12155 else
12156 warning (OPT_Wdeprecated_declarations,
12157 "%qD is deprecated (declared at %r%s:%d%R)",
12158 node, "locus", xloc.file, xloc.line);
12159 }
12160 else if (TYPE_P (node))
12161 {
12162 tree what = NULL_TREE;
12163 tree decl = TYPE_STUB_DECL (node);
12164
12165 if (TYPE_NAME (node))
12166 {
12167 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12168 what = TYPE_NAME (node);
12169 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12170 && DECL_NAME (TYPE_NAME (node)))
12171 what = DECL_NAME (TYPE_NAME (node));
12172 }
12173
12174 if (decl)
12175 {
12176 expanded_location xloc
12177 = expand_location (DECL_SOURCE_LOCATION (decl));
12178 if (what)
12179 {
12180 if (msg)
12181 warning (OPT_Wdeprecated_declarations,
12182 "%qE is deprecated (declared at %r%s:%d%R): %s",
12183 what, "locus", xloc.file, xloc.line, msg);
12184 else
12185 warning (OPT_Wdeprecated_declarations,
12186 "%qE is deprecated (declared at %r%s:%d%R)",
12187 what, "locus", xloc.file, xloc.line);
12188 }
12189 else
12190 {
12191 if (msg)
12192 warning (OPT_Wdeprecated_declarations,
12193 "type is deprecated (declared at %r%s:%d%R): %s",
12194 "locus", xloc.file, xloc.line, msg);
12195 else
12196 warning (OPT_Wdeprecated_declarations,
12197 "type is deprecated (declared at %r%s:%d%R)",
12198 "locus", xloc.file, xloc.line);
12199 }
12200 }
12201 else
12202 {
12203 if (what)
12204 {
12205 if (msg)
12206 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12207 what, msg);
12208 else
12209 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12210 }
12211 else
12212 {
12213 if (msg)
12214 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12215 msg);
12216 else
12217 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12218 }
12219 }
12220 }
12221 }
12222
12223 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12224 somewhere in it. */
12225
12226 bool
12227 contains_bitfld_component_ref_p (const_tree ref)
12228 {
12229 while (handled_component_p (ref))
12230 {
12231 if (TREE_CODE (ref) == COMPONENT_REF
12232 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12233 return true;
12234 ref = TREE_OPERAND (ref, 0);
12235 }
12236
12237 return false;
12238 }
12239
12240 /* Try to determine whether a TRY_CATCH expression can fall through.
12241 This is a subroutine of block_may_fallthru. */
12242
12243 static bool
12244 try_catch_may_fallthru (const_tree stmt)
12245 {
12246 tree_stmt_iterator i;
12247
12248 /* If the TRY block can fall through, the whole TRY_CATCH can
12249 fall through. */
12250 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12251 return true;
12252
12253 i = tsi_start (TREE_OPERAND (stmt, 1));
12254 switch (TREE_CODE (tsi_stmt (i)))
12255 {
12256 case CATCH_EXPR:
12257 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12258 catch expression and a body. The whole TRY_CATCH may fall
12259 through iff any of the catch bodies falls through. */
12260 for (; !tsi_end_p (i); tsi_next (&i))
12261 {
12262 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12263 return true;
12264 }
12265 return false;
12266
12267 case EH_FILTER_EXPR:
12268 /* The exception filter expression only matters if there is an
12269 exception. If the exception does not match EH_FILTER_TYPES,
12270 we will execute EH_FILTER_FAILURE, and we will fall through
12271 if that falls through. If the exception does match
12272 EH_FILTER_TYPES, the stack unwinder will continue up the
12273 stack, so we will not fall through. We don't know whether we
12274 will throw an exception which matches EH_FILTER_TYPES or not,
12275 so we just ignore EH_FILTER_TYPES and assume that we might
12276 throw an exception which doesn't match. */
12277 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12278
12279 default:
12280 /* This case represents statements to be executed when an
12281 exception occurs. Those statements are implicitly followed
12282 by a RESX statement to resume execution after the exception.
12283 So in this case the TRY_CATCH never falls through. */
12284 return false;
12285 }
12286 }
12287
12288 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12289 need not be 100% accurate; simply be conservative and return true if we
12290 don't know. This is used only to avoid stupidly generating extra code.
12291 If we're wrong, we'll just delete the extra code later. */
12292
12293 bool
12294 block_may_fallthru (const_tree block)
12295 {
12296 /* This CONST_CAST is okay because expr_last returns its argument
12297 unmodified and we assign it to a const_tree. */
12298 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12299
12300 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12301 {
12302 case GOTO_EXPR:
12303 case RETURN_EXPR:
12304 /* Easy cases. If the last statement of the block implies
12305 control transfer, then we can't fall through. */
12306 return false;
12307
12308 case SWITCH_EXPR:
12309 /* If SWITCH_LABELS is set, this is lowered, and represents a
12310 branch to a selected label and hence can not fall through.
12311 Otherwise SWITCH_BODY is set, and the switch can fall
12312 through. */
12313 return SWITCH_LABELS (stmt) == NULL_TREE;
12314
12315 case COND_EXPR:
12316 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12317 return true;
12318 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12319
12320 case BIND_EXPR:
12321 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12322
12323 case TRY_CATCH_EXPR:
12324 return try_catch_may_fallthru (stmt);
12325
12326 case TRY_FINALLY_EXPR:
12327 /* The finally clause is always executed after the try clause,
12328 so if it does not fall through, then the try-finally will not
12329 fall through. Otherwise, if the try clause does not fall
12330 through, then when the finally clause falls through it will
12331 resume execution wherever the try clause was going. So the
12332 whole try-finally will only fall through if both the try
12333 clause and the finally clause fall through. */
12334 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12335 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12336
12337 case MODIFY_EXPR:
12338 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12339 stmt = TREE_OPERAND (stmt, 1);
12340 else
12341 return true;
12342 /* FALLTHRU */
12343
12344 case CALL_EXPR:
12345 /* Functions that do not return do not fall through. */
12346 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12347
12348 case CLEANUP_POINT_EXPR:
12349 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12350
12351 case TARGET_EXPR:
12352 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12353
12354 case ERROR_MARK:
12355 return true;
12356
12357 default:
12358 return lang_hooks.block_may_fallthru (stmt);
12359 }
12360 }
12361
12362 /* True if we are using EH to handle cleanups. */
12363 static bool using_eh_for_cleanups_flag = false;
12364
12365 /* This routine is called from front ends to indicate eh should be used for
12366 cleanups. */
12367 void
12368 using_eh_for_cleanups (void)
12369 {
12370 using_eh_for_cleanups_flag = true;
12371 }
12372
12373 /* Query whether EH is used for cleanups. */
12374 bool
12375 using_eh_for_cleanups_p (void)
12376 {
12377 return using_eh_for_cleanups_flag;
12378 }
12379
12380 /* Wrapper for tree_code_name to ensure that tree code is valid */
12381 const char *
12382 get_tree_code_name (enum tree_code code)
12383 {
12384 const char *invalid = "<invalid tree code>";
12385
12386 if (code >= MAX_TREE_CODES)
12387 return invalid;
12388
12389 return tree_code_name[code];
12390 }
12391
12392 /* Drops the TREE_OVERFLOW flag from T. */
12393
12394 tree
12395 drop_tree_overflow (tree t)
12396 {
12397 gcc_checking_assert (TREE_OVERFLOW (t));
12398
12399 /* For tree codes with a sharing machinery re-build the result. */
12400 if (TREE_CODE (t) == INTEGER_CST)
12401 return wide_int_to_tree (TREE_TYPE (t), t);
12402
12403 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12404 and drop the flag. */
12405 t = copy_node (t);
12406 TREE_OVERFLOW (t) = 0;
12407 return t;
12408 }
12409
12410 /* Given a memory reference expression T, return its base address.
12411 The base address of a memory reference expression is the main
12412 object being referenced. For instance, the base address for
12413 'array[i].fld[j]' is 'array'. You can think of this as stripping
12414 away the offset part from a memory address.
12415
12416 This function calls handled_component_p to strip away all the inner
12417 parts of the memory reference until it reaches the base object. */
12418
12419 tree
12420 get_base_address (tree t)
12421 {
12422 while (handled_component_p (t))
12423 t = TREE_OPERAND (t, 0);
12424
12425 if ((TREE_CODE (t) == MEM_REF
12426 || TREE_CODE (t) == TARGET_MEM_REF)
12427 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12428 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12429
12430 /* ??? Either the alias oracle or all callers need to properly deal
12431 with WITH_SIZE_EXPRs before we can look through those. */
12432 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12433 return NULL_TREE;
12434
12435 return t;
12436 }
12437
12438 #include "gt-tree.h"