Add an abstract incremental hash data type
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "pointer-set.h"
56 #include "tree-ssa-alias.h"
57 #include "internal-fn.h"
58 #include "gimple-expr.h"
59 #include "is-a.h"
60 #include "gimple.h"
61 #include "gimple-iterator.h"
62 #include "gimplify.h"
63 #include "gimple-ssa.h"
64 #include "cgraph.h"
65 #include "tree-phinodes.h"
66 #include "stringpool.h"
67 #include "tree-ssanames.h"
68 #include "expr.h"
69 #include "tree-dfa.h"
70 #include "params.h"
71 #include "tree-pass.h"
72 #include "langhooks-def.h"
73 #include "diagnostic.h"
74 #include "tree-diagnostic.h"
75 #include "tree-pretty-print.h"
76 #include "except.h"
77 #include "debug.h"
78 #include "intl.h"
79 #include "wide-int.h"
80 #include "builtins.h"
81
82 /* Tree code classes. */
83
84 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
85 #define END_OF_BASE_TREE_CODES tcc_exceptional,
86
87 const enum tree_code_class tree_code_type[] = {
88 #include "all-tree.def"
89 };
90
91 #undef DEFTREECODE
92 #undef END_OF_BASE_TREE_CODES
93
94 /* Table indexed by tree code giving number of expression
95 operands beyond the fixed part of the node structure.
96 Not used for types or decls. */
97
98 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
99 #define END_OF_BASE_TREE_CODES 0,
100
101 const unsigned char tree_code_length[] = {
102 #include "all-tree.def"
103 };
104
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
107
108 /* Names of tree components.
109 Used for printing out the tree and error messages. */
110 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
111 #define END_OF_BASE_TREE_CODES "@dummy",
112
113 static const char *const tree_code_name[] = {
114 #include "all-tree.def"
115 };
116
117 #undef DEFTREECODE
118 #undef END_OF_BASE_TREE_CODES
119
120 /* Each tree code class has an associated string representation.
121 These must correspond to the tree_code_class entries. */
122
123 const char *const tree_code_class_strings[] =
124 {
125 "exceptional",
126 "constant",
127 "type",
128 "declaration",
129 "reference",
130 "comparison",
131 "unary",
132 "binary",
133 "statement",
134 "vl_exp",
135 "expression"
136 };
137
138 /* obstack.[ch] explicitly declined to prototype this. */
139 extern int _obstack_allocated_p (struct obstack *h, void *obj);
140
141 /* Statistics-gathering stuff. */
142
143 static int tree_code_counts[MAX_TREE_CODES];
144 int tree_node_counts[(int) all_kinds];
145 int tree_node_sizes[(int) all_kinds];
146
147 /* Keep in sync with tree.h:enum tree_node_kind. */
148 static const char * const tree_node_kind_names[] = {
149 "decls",
150 "types",
151 "blocks",
152 "stmts",
153 "refs",
154 "exprs",
155 "constants",
156 "identifiers",
157 "vecs",
158 "binfos",
159 "ssa names",
160 "constructors",
161 "random kinds",
162 "lang_decl kinds",
163 "lang_type kinds",
164 "omp clauses",
165 };
166
167 /* Unique id for next decl created. */
168 static GTY(()) int next_decl_uid;
169 /* Unique id for next type created. */
170 static GTY(()) int next_type_uid = 1;
171 /* Unique id for next debug decl created. Use negative numbers,
172 to catch erroneous uses. */
173 static GTY(()) int next_debug_decl_uid;
174
175 /* Since we cannot rehash a type after it is in the table, we have to
176 keep the hash code. */
177
178 struct GTY(()) type_hash {
179 unsigned long hash;
180 tree type;
181 };
182
183 /* Initial size of the hash table (rounded to next prime). */
184 #define TYPE_HASH_INITIAL_SIZE 1000
185
186 /* Now here is the hash table. When recording a type, it is added to
187 the slot whose index is the hash code. Note that the hash table is
188 used for several kinds of types (function types, array types and
189 array index range types, for now). While all these live in the
190 same table, they are completely independent, and the hash code is
191 computed differently for each of these. */
192
193 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
194 htab_t type_hash_table;
195
196 /* Hash table and temporary node for larger integer const values. */
197 static GTY (()) tree int_cst_node;
198 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
199 htab_t int_cst_hash_table;
200
201 /* Hash table for optimization flags and target option flags. Use the same
202 hash table for both sets of options. Nodes for building the current
203 optimization and target option nodes. The assumption is most of the time
204 the options created will already be in the hash table, so we avoid
205 allocating and freeing up a node repeatably. */
206 static GTY (()) tree cl_optimization_node;
207 static GTY (()) tree cl_target_option_node;
208 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
209 htab_t cl_option_hash_table;
210
211 /* General tree->tree mapping structure for use in hash tables. */
212
213
214 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
215 htab_t debug_expr_for_decl;
216
217 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
218 htab_t value_expr_for_decl;
219
220 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
221 htab_t debug_args_for_decl;
222
223 static void set_type_quals (tree, int);
224 static int type_hash_eq (const void *, const void *);
225 static hashval_t type_hash_hash (const void *);
226 static hashval_t int_cst_hash_hash (const void *);
227 static int int_cst_hash_eq (const void *, const void *);
228 static hashval_t cl_option_hash_hash (const void *);
229 static int cl_option_hash_eq (const void *, const void *);
230 static void print_type_hash_statistics (void);
231 static void print_debug_expr_statistics (void);
232 static void print_value_expr_statistics (void);
233 static int type_hash_marked_p (const void *);
234 static unsigned int type_hash_list (const_tree, hashval_t);
235 static unsigned int attribute_hash_list (const_tree, hashval_t);
236
237 tree global_trees[TI_MAX];
238 tree integer_types[itk_none];
239
240 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
241
242 /* Number of operands for each OpenMP clause. */
243 unsigned const char omp_clause_num_ops[] =
244 {
245 0, /* OMP_CLAUSE_ERROR */
246 1, /* OMP_CLAUSE_PRIVATE */
247 1, /* OMP_CLAUSE_SHARED */
248 1, /* OMP_CLAUSE_FIRSTPRIVATE */
249 2, /* OMP_CLAUSE_LASTPRIVATE */
250 4, /* OMP_CLAUSE_REDUCTION */
251 1, /* OMP_CLAUSE_COPYIN */
252 1, /* OMP_CLAUSE_COPYPRIVATE */
253 3, /* OMP_CLAUSE_LINEAR */
254 2, /* OMP_CLAUSE_ALIGNED */
255 1, /* OMP_CLAUSE_DEPEND */
256 1, /* OMP_CLAUSE_UNIFORM */
257 2, /* OMP_CLAUSE_FROM */
258 2, /* OMP_CLAUSE_TO */
259 2, /* OMP_CLAUSE_MAP */
260 1, /* OMP_CLAUSE__LOOPTEMP_ */
261 1, /* OMP_CLAUSE_IF */
262 1, /* OMP_CLAUSE_NUM_THREADS */
263 1, /* OMP_CLAUSE_SCHEDULE */
264 0, /* OMP_CLAUSE_NOWAIT */
265 0, /* OMP_CLAUSE_ORDERED */
266 0, /* OMP_CLAUSE_DEFAULT */
267 3, /* OMP_CLAUSE_COLLAPSE */
268 0, /* OMP_CLAUSE_UNTIED */
269 1, /* OMP_CLAUSE_FINAL */
270 0, /* OMP_CLAUSE_MERGEABLE */
271 1, /* OMP_CLAUSE_DEVICE */
272 1, /* OMP_CLAUSE_DIST_SCHEDULE */
273 0, /* OMP_CLAUSE_INBRANCH */
274 0, /* OMP_CLAUSE_NOTINBRANCH */
275 1, /* OMP_CLAUSE_NUM_TEAMS */
276 1, /* OMP_CLAUSE_THREAD_LIMIT */
277 0, /* OMP_CLAUSE_PROC_BIND */
278 1, /* OMP_CLAUSE_SAFELEN */
279 1, /* OMP_CLAUSE_SIMDLEN */
280 0, /* OMP_CLAUSE_FOR */
281 0, /* OMP_CLAUSE_PARALLEL */
282 0, /* OMP_CLAUSE_SECTIONS */
283 0, /* OMP_CLAUSE_TASKGROUP */
284 1, /* OMP_CLAUSE__SIMDUID_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_"
329 };
330
331
332 /* Return the tree node structure used by tree code CODE. */
333
334 static inline enum tree_node_structure_enum
335 tree_node_structure_for_code (enum tree_code code)
336 {
337 switch (TREE_CODE_CLASS (code))
338 {
339 case tcc_declaration:
340 {
341 switch (code)
342 {
343 case FIELD_DECL:
344 return TS_FIELD_DECL;
345 case PARM_DECL:
346 return TS_PARM_DECL;
347 case VAR_DECL:
348 return TS_VAR_DECL;
349 case LABEL_DECL:
350 return TS_LABEL_DECL;
351 case RESULT_DECL:
352 return TS_RESULT_DECL;
353 case DEBUG_EXPR_DECL:
354 return TS_DECL_WRTL;
355 case CONST_DECL:
356 return TS_CONST_DECL;
357 case TYPE_DECL:
358 return TS_TYPE_DECL;
359 case FUNCTION_DECL:
360 return TS_FUNCTION_DECL;
361 case TRANSLATION_UNIT_DECL:
362 return TS_TRANSLATION_UNIT_DECL;
363 default:
364 return TS_DECL_NON_COMMON;
365 }
366 }
367 case tcc_type:
368 return TS_TYPE_NON_COMMON;
369 case tcc_reference:
370 case tcc_comparison:
371 case tcc_unary:
372 case tcc_binary:
373 case tcc_expression:
374 case tcc_statement:
375 case tcc_vl_exp:
376 return TS_EXP;
377 default: /* tcc_constant and tcc_exceptional */
378 break;
379 }
380 switch (code)
381 {
382 /* tcc_constant cases. */
383 case VOID_CST: return TS_TYPED;
384 case INTEGER_CST: return TS_INT_CST;
385 case REAL_CST: return TS_REAL_CST;
386 case FIXED_CST: return TS_FIXED_CST;
387 case COMPLEX_CST: return TS_COMPLEX;
388 case VECTOR_CST: return TS_VECTOR;
389 case STRING_CST: return TS_STRING;
390 /* tcc_exceptional cases. */
391 case ERROR_MARK: return TS_COMMON;
392 case IDENTIFIER_NODE: return TS_IDENTIFIER;
393 case TREE_LIST: return TS_LIST;
394 case TREE_VEC: return TS_VEC;
395 case SSA_NAME: return TS_SSA_NAME;
396 case PLACEHOLDER_EXPR: return TS_COMMON;
397 case STATEMENT_LIST: return TS_STATEMENT_LIST;
398 case BLOCK: return TS_BLOCK;
399 case CONSTRUCTOR: return TS_CONSTRUCTOR;
400 case TREE_BINFO: return TS_BINFO;
401 case OMP_CLAUSE: return TS_OMP_CLAUSE;
402 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
403 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
404
405 default:
406 gcc_unreachable ();
407 }
408 }
409
410
411 /* Initialize tree_contains_struct to describe the hierarchy of tree
412 nodes. */
413
414 static void
415 initialize_tree_contains_struct (void)
416 {
417 unsigned i;
418
419 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
420 {
421 enum tree_code code;
422 enum tree_node_structure_enum ts_code;
423
424 code = (enum tree_code) i;
425 ts_code = tree_node_structure_for_code (code);
426
427 /* Mark the TS structure itself. */
428 tree_contains_struct[code][ts_code] = 1;
429
430 /* Mark all the structures that TS is derived from. */
431 switch (ts_code)
432 {
433 case TS_TYPED:
434 case TS_BLOCK:
435 MARK_TS_BASE (code);
436 break;
437
438 case TS_COMMON:
439 case TS_INT_CST:
440 case TS_REAL_CST:
441 case TS_FIXED_CST:
442 case TS_VECTOR:
443 case TS_STRING:
444 case TS_COMPLEX:
445 case TS_SSA_NAME:
446 case TS_CONSTRUCTOR:
447 case TS_EXP:
448 case TS_STATEMENT_LIST:
449 MARK_TS_TYPED (code);
450 break;
451
452 case TS_IDENTIFIER:
453 case TS_DECL_MINIMAL:
454 case TS_TYPE_COMMON:
455 case TS_LIST:
456 case TS_VEC:
457 case TS_BINFO:
458 case TS_OMP_CLAUSE:
459 case TS_OPTIMIZATION:
460 case TS_TARGET_OPTION:
461 MARK_TS_COMMON (code);
462 break;
463
464 case TS_TYPE_WITH_LANG_SPECIFIC:
465 MARK_TS_TYPE_COMMON (code);
466 break;
467
468 case TS_TYPE_NON_COMMON:
469 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
470 break;
471
472 case TS_DECL_COMMON:
473 MARK_TS_DECL_MINIMAL (code);
474 break;
475
476 case TS_DECL_WRTL:
477 case TS_CONST_DECL:
478 MARK_TS_DECL_COMMON (code);
479 break;
480
481 case TS_DECL_NON_COMMON:
482 MARK_TS_DECL_WITH_VIS (code);
483 break;
484
485 case TS_DECL_WITH_VIS:
486 case TS_PARM_DECL:
487 case TS_LABEL_DECL:
488 case TS_RESULT_DECL:
489 MARK_TS_DECL_WRTL (code);
490 break;
491
492 case TS_FIELD_DECL:
493 MARK_TS_DECL_COMMON (code);
494 break;
495
496 case TS_VAR_DECL:
497 MARK_TS_DECL_WITH_VIS (code);
498 break;
499
500 case TS_TYPE_DECL:
501 case TS_FUNCTION_DECL:
502 MARK_TS_DECL_NON_COMMON (code);
503 break;
504
505 case TS_TRANSLATION_UNIT_DECL:
506 MARK_TS_DECL_COMMON (code);
507 break;
508
509 default:
510 gcc_unreachable ();
511 }
512 }
513
514 /* Basic consistency checks for attributes used in fold. */
515 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
544 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
545 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
546 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
547 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
548 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
549 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
550 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
555 }
556
557
558 /* Init tree.c. */
559
560 void
561 init_ttree (void)
562 {
563 /* Initialize the hash table of types. */
564 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
565 type_hash_eq, 0);
566
567 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
568 tree_decl_map_eq, 0);
569
570 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
571 tree_decl_map_eq, 0);
572
573 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
574 int_cst_hash_eq, NULL);
575
576 int_cst_node = make_int_cst (1, 1);
577
578 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
579 cl_option_hash_eq, NULL);
580
581 cl_optimization_node = make_node (OPTIMIZATION_NODE);
582 cl_target_option_node = make_node (TARGET_OPTION_NODE);
583
584 /* Initialize the tree_contains_struct array. */
585 initialize_tree_contains_struct ();
586 lang_hooks.init_ts ();
587 }
588
589 \f
590 /* The name of the object as the assembler will see it (but before any
591 translations made by ASM_OUTPUT_LABELREF). Often this is the same
592 as DECL_NAME. It is an IDENTIFIER_NODE. */
593 tree
594 decl_assembler_name (tree decl)
595 {
596 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
597 lang_hooks.set_decl_assembler_name (decl);
598 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
599 }
600
601 /* When the target supports COMDAT groups, this indicates which group the
602 DECL is associated with. This can be either an IDENTIFIER_NODE or a
603 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
604 tree
605 decl_comdat_group (const_tree node)
606 {
607 struct symtab_node *snode = symtab_node::get (node);
608 if (!snode)
609 return NULL;
610 return snode->get_comdat_group ();
611 }
612
613 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
614 tree
615 decl_comdat_group_id (const_tree node)
616 {
617 struct symtab_node *snode = symtab_node::get (node);
618 if (!snode)
619 return NULL;
620 return snode->get_comdat_group_id ();
621 }
622
623 /* When the target supports named section, return its name as IDENTIFIER_NODE
624 or NULL if it is in no section. */
625 const char *
626 decl_section_name (const_tree node)
627 {
628 struct symtab_node *snode = symtab_node::get (node);
629 if (!snode)
630 return NULL;
631 return snode->get_section ();
632 }
633
634 /* Set section section name of NODE to VALUE (that is expected to
635 be identifier node) */
636 void
637 set_decl_section_name (tree node, const char *value)
638 {
639 struct symtab_node *snode;
640
641 if (value == NULL)
642 {
643 snode = symtab_node::get (node);
644 if (!snode)
645 return;
646 }
647 else if (TREE_CODE (node) == VAR_DECL)
648 snode = varpool_node::get_create (node);
649 else
650 snode = cgraph_node::get_create (node);
651 snode->set_section (value);
652 }
653
654 /* Return TLS model of a variable NODE. */
655 enum tls_model
656 decl_tls_model (const_tree node)
657 {
658 struct varpool_node *snode = varpool_node::get (node);
659 if (!snode)
660 return TLS_MODEL_NONE;
661 return snode->tls_model;
662 }
663
664 /* Set TLS model of variable NODE to MODEL. */
665 void
666 set_decl_tls_model (tree node, enum tls_model model)
667 {
668 struct varpool_node *vnode;
669
670 if (model == TLS_MODEL_NONE)
671 {
672 vnode = varpool_node::get (node);
673 if (!vnode)
674 return;
675 }
676 else
677 vnode = varpool_node::get_create (node);
678 vnode->tls_model = model;
679 }
680
681 /* Compute the number of bytes occupied by a tree with code CODE.
682 This function cannot be used for nodes that have variable sizes,
683 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
684 size_t
685 tree_code_size (enum tree_code code)
686 {
687 switch (TREE_CODE_CLASS (code))
688 {
689 case tcc_declaration: /* A decl node */
690 {
691 switch (code)
692 {
693 case FIELD_DECL:
694 return sizeof (struct tree_field_decl);
695 case PARM_DECL:
696 return sizeof (struct tree_parm_decl);
697 case VAR_DECL:
698 return sizeof (struct tree_var_decl);
699 case LABEL_DECL:
700 return sizeof (struct tree_label_decl);
701 case RESULT_DECL:
702 return sizeof (struct tree_result_decl);
703 case CONST_DECL:
704 return sizeof (struct tree_const_decl);
705 case TYPE_DECL:
706 return sizeof (struct tree_type_decl);
707 case FUNCTION_DECL:
708 return sizeof (struct tree_function_decl);
709 case DEBUG_EXPR_DECL:
710 return sizeof (struct tree_decl_with_rtl);
711 case TRANSLATION_UNIT_DECL:
712 return sizeof (struct tree_translation_unit_decl);
713 case NAMESPACE_DECL:
714 case IMPORTED_DECL:
715 case NAMELIST_DECL:
716 return sizeof (struct tree_decl_non_common);
717 default:
718 return lang_hooks.tree_size (code);
719 }
720 }
721
722 case tcc_type: /* a type node */
723 return sizeof (struct tree_type_non_common);
724
725 case tcc_reference: /* a reference */
726 case tcc_expression: /* an expression */
727 case tcc_statement: /* an expression with side effects */
728 case tcc_comparison: /* a comparison expression */
729 case tcc_unary: /* a unary arithmetic expression */
730 case tcc_binary: /* a binary arithmetic expression */
731 return (sizeof (struct tree_exp)
732 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
733
734 case tcc_constant: /* a constant */
735 switch (code)
736 {
737 case VOID_CST: return sizeof (struct tree_typed);
738 case INTEGER_CST: gcc_unreachable ();
739 case REAL_CST: return sizeof (struct tree_real_cst);
740 case FIXED_CST: return sizeof (struct tree_fixed_cst);
741 case COMPLEX_CST: return sizeof (struct tree_complex);
742 case VECTOR_CST: return sizeof (struct tree_vector);
743 case STRING_CST: gcc_unreachable ();
744 default:
745 return lang_hooks.tree_size (code);
746 }
747
748 case tcc_exceptional: /* something random, like an identifier. */
749 switch (code)
750 {
751 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
752 case TREE_LIST: return sizeof (struct tree_list);
753
754 case ERROR_MARK:
755 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
756
757 case TREE_VEC:
758 case OMP_CLAUSE: gcc_unreachable ();
759
760 case SSA_NAME: return sizeof (struct tree_ssa_name);
761
762 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
763 case BLOCK: return sizeof (struct tree_block);
764 case CONSTRUCTOR: return sizeof (struct tree_constructor);
765 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
766 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
767
768 default:
769 return lang_hooks.tree_size (code);
770 }
771
772 default:
773 gcc_unreachable ();
774 }
775 }
776
777 /* Compute the number of bytes occupied by NODE. This routine only
778 looks at TREE_CODE, except for those nodes that have variable sizes. */
779 size_t
780 tree_size (const_tree node)
781 {
782 const enum tree_code code = TREE_CODE (node);
783 switch (code)
784 {
785 case INTEGER_CST:
786 return (sizeof (struct tree_int_cst)
787 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
788
789 case TREE_BINFO:
790 return (offsetof (struct tree_binfo, base_binfos)
791 + vec<tree, va_gc>
792 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
793
794 case TREE_VEC:
795 return (sizeof (struct tree_vec)
796 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
797
798 case VECTOR_CST:
799 return (sizeof (struct tree_vector)
800 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
801
802 case STRING_CST:
803 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
804
805 case OMP_CLAUSE:
806 return (sizeof (struct tree_omp_clause)
807 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
808 * sizeof (tree));
809
810 default:
811 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
812 return (sizeof (struct tree_exp)
813 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
814 else
815 return tree_code_size (code);
816 }
817 }
818
819 /* Record interesting allocation statistics for a tree node with CODE
820 and LENGTH. */
821
822 static void
823 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
824 size_t length ATTRIBUTE_UNUSED)
825 {
826 enum tree_code_class type = TREE_CODE_CLASS (code);
827 tree_node_kind kind;
828
829 if (!GATHER_STATISTICS)
830 return;
831
832 switch (type)
833 {
834 case tcc_declaration: /* A decl node */
835 kind = d_kind;
836 break;
837
838 case tcc_type: /* a type node */
839 kind = t_kind;
840 break;
841
842 case tcc_statement: /* an expression with side effects */
843 kind = s_kind;
844 break;
845
846 case tcc_reference: /* a reference */
847 kind = r_kind;
848 break;
849
850 case tcc_expression: /* an expression */
851 case tcc_comparison: /* a comparison expression */
852 case tcc_unary: /* a unary arithmetic expression */
853 case tcc_binary: /* a binary arithmetic expression */
854 kind = e_kind;
855 break;
856
857 case tcc_constant: /* a constant */
858 kind = c_kind;
859 break;
860
861 case tcc_exceptional: /* something random, like an identifier. */
862 switch (code)
863 {
864 case IDENTIFIER_NODE:
865 kind = id_kind;
866 break;
867
868 case TREE_VEC:
869 kind = vec_kind;
870 break;
871
872 case TREE_BINFO:
873 kind = binfo_kind;
874 break;
875
876 case SSA_NAME:
877 kind = ssa_name_kind;
878 break;
879
880 case BLOCK:
881 kind = b_kind;
882 break;
883
884 case CONSTRUCTOR:
885 kind = constr_kind;
886 break;
887
888 case OMP_CLAUSE:
889 kind = omp_clause_kind;
890 break;
891
892 default:
893 kind = x_kind;
894 break;
895 }
896 break;
897
898 case tcc_vl_exp:
899 kind = e_kind;
900 break;
901
902 default:
903 gcc_unreachable ();
904 }
905
906 tree_code_counts[(int) code]++;
907 tree_node_counts[(int) kind]++;
908 tree_node_sizes[(int) kind] += length;
909 }
910
911 /* Allocate and return a new UID from the DECL_UID namespace. */
912
913 int
914 allocate_decl_uid (void)
915 {
916 return next_decl_uid++;
917 }
918
919 /* Return a newly allocated node of code CODE. For decl and type
920 nodes, some other fields are initialized. The rest of the node is
921 initialized to zero. This function cannot be used for TREE_VEC,
922 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
923 tree_code_size.
924
925 Achoo! I got a code in the node. */
926
927 tree
928 make_node_stat (enum tree_code code MEM_STAT_DECL)
929 {
930 tree t;
931 enum tree_code_class type = TREE_CODE_CLASS (code);
932 size_t length = tree_code_size (code);
933
934 record_node_allocation_statistics (code, length);
935
936 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
937 TREE_SET_CODE (t, code);
938
939 switch (type)
940 {
941 case tcc_statement:
942 TREE_SIDE_EFFECTS (t) = 1;
943 break;
944
945 case tcc_declaration:
946 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
947 {
948 if (code == FUNCTION_DECL)
949 {
950 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
951 DECL_MODE (t) = FUNCTION_MODE;
952 }
953 else
954 DECL_ALIGN (t) = 1;
955 }
956 DECL_SOURCE_LOCATION (t) = input_location;
957 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
958 DECL_UID (t) = --next_debug_decl_uid;
959 else
960 {
961 DECL_UID (t) = allocate_decl_uid ();
962 SET_DECL_PT_UID (t, -1);
963 }
964 if (TREE_CODE (t) == LABEL_DECL)
965 LABEL_DECL_UID (t) = -1;
966
967 break;
968
969 case tcc_type:
970 TYPE_UID (t) = next_type_uid++;
971 TYPE_ALIGN (t) = BITS_PER_UNIT;
972 TYPE_USER_ALIGN (t) = 0;
973 TYPE_MAIN_VARIANT (t) = t;
974 TYPE_CANONICAL (t) = t;
975
976 /* Default to no attributes for type, but let target change that. */
977 TYPE_ATTRIBUTES (t) = NULL_TREE;
978 targetm.set_default_type_attributes (t);
979
980 /* We have not yet computed the alias set for this type. */
981 TYPE_ALIAS_SET (t) = -1;
982 break;
983
984 case tcc_constant:
985 TREE_CONSTANT (t) = 1;
986 break;
987
988 case tcc_expression:
989 switch (code)
990 {
991 case INIT_EXPR:
992 case MODIFY_EXPR:
993 case VA_ARG_EXPR:
994 case PREDECREMENT_EXPR:
995 case PREINCREMENT_EXPR:
996 case POSTDECREMENT_EXPR:
997 case POSTINCREMENT_EXPR:
998 /* All of these have side-effects, no matter what their
999 operands are. */
1000 TREE_SIDE_EFFECTS (t) = 1;
1001 break;
1002
1003 default:
1004 break;
1005 }
1006 break;
1007
1008 default:
1009 /* Other classes need no special treatment. */
1010 break;
1011 }
1012
1013 return t;
1014 }
1015 \f
1016 /* Return a new node with the same contents as NODE except that its
1017 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1018
1019 tree
1020 copy_node_stat (tree node MEM_STAT_DECL)
1021 {
1022 tree t;
1023 enum tree_code code = TREE_CODE (node);
1024 size_t length;
1025
1026 gcc_assert (code != STATEMENT_LIST);
1027
1028 length = tree_size (node);
1029 record_node_allocation_statistics (code, length);
1030 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1031 memcpy (t, node, length);
1032
1033 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1034 TREE_CHAIN (t) = 0;
1035 TREE_ASM_WRITTEN (t) = 0;
1036 TREE_VISITED (t) = 0;
1037
1038 if (TREE_CODE_CLASS (code) == tcc_declaration)
1039 {
1040 if (code == DEBUG_EXPR_DECL)
1041 DECL_UID (t) = --next_debug_decl_uid;
1042 else
1043 {
1044 DECL_UID (t) = allocate_decl_uid ();
1045 if (DECL_PT_UID_SET_P (node))
1046 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1047 }
1048 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1049 && DECL_HAS_VALUE_EXPR_P (node))
1050 {
1051 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1052 DECL_HAS_VALUE_EXPR_P (t) = 1;
1053 }
1054 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1055 if (TREE_CODE (node) == VAR_DECL)
1056 {
1057 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1058 t->decl_with_vis.symtab_node = NULL;
1059 }
1060 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1061 {
1062 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1063 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1064 }
1065 if (TREE_CODE (node) == FUNCTION_DECL)
1066 {
1067 DECL_STRUCT_FUNCTION (t) = NULL;
1068 t->decl_with_vis.symtab_node = NULL;
1069 }
1070 }
1071 else if (TREE_CODE_CLASS (code) == tcc_type)
1072 {
1073 TYPE_UID (t) = next_type_uid++;
1074 /* The following is so that the debug code for
1075 the copy is different from the original type.
1076 The two statements usually duplicate each other
1077 (because they clear fields of the same union),
1078 but the optimizer should catch that. */
1079 TYPE_SYMTAB_POINTER (t) = 0;
1080 TYPE_SYMTAB_ADDRESS (t) = 0;
1081
1082 /* Do not copy the values cache. */
1083 if (TYPE_CACHED_VALUES_P (t))
1084 {
1085 TYPE_CACHED_VALUES_P (t) = 0;
1086 TYPE_CACHED_VALUES (t) = NULL_TREE;
1087 }
1088 }
1089
1090 return t;
1091 }
1092
1093 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1094 For example, this can copy a list made of TREE_LIST nodes. */
1095
1096 tree
1097 copy_list (tree list)
1098 {
1099 tree head;
1100 tree prev, next;
1101
1102 if (list == 0)
1103 return 0;
1104
1105 head = prev = copy_node (list);
1106 next = TREE_CHAIN (list);
1107 while (next)
1108 {
1109 TREE_CHAIN (prev) = copy_node (next);
1110 prev = TREE_CHAIN (prev);
1111 next = TREE_CHAIN (next);
1112 }
1113 return head;
1114 }
1115
1116 \f
1117 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1118 INTEGER_CST with value CST and type TYPE. */
1119
1120 static unsigned int
1121 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1122 {
1123 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1124 /* We need an extra zero HWI if CST is an unsigned integer with its
1125 upper bit set, and if CST occupies a whole number of HWIs. */
1126 if (TYPE_UNSIGNED (type)
1127 && wi::neg_p (cst)
1128 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1129 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1130 return cst.get_len ();
1131 }
1132
1133 /* Return a new INTEGER_CST with value CST and type TYPE. */
1134
1135 static tree
1136 build_new_int_cst (tree type, const wide_int &cst)
1137 {
1138 unsigned int len = cst.get_len ();
1139 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1140 tree nt = make_int_cst (len, ext_len);
1141
1142 if (len < ext_len)
1143 {
1144 --ext_len;
1145 TREE_INT_CST_ELT (nt, ext_len) = 0;
1146 for (unsigned int i = len; i < ext_len; ++i)
1147 TREE_INT_CST_ELT (nt, i) = -1;
1148 }
1149 else if (TYPE_UNSIGNED (type)
1150 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1151 {
1152 len--;
1153 TREE_INT_CST_ELT (nt, len)
1154 = zext_hwi (cst.elt (len),
1155 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1156 }
1157
1158 for (unsigned int i = 0; i < len; i++)
1159 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1160 TREE_TYPE (nt) = type;
1161 return nt;
1162 }
1163
1164 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1165
1166 tree
1167 build_int_cst (tree type, HOST_WIDE_INT low)
1168 {
1169 /* Support legacy code. */
1170 if (!type)
1171 type = integer_type_node;
1172
1173 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1174 }
1175
1176 tree
1177 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1178 {
1179 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1180 }
1181
1182 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1183
1184 tree
1185 build_int_cst_type (tree type, HOST_WIDE_INT low)
1186 {
1187 gcc_assert (type);
1188 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1189 }
1190
1191 /* Constructs tree in type TYPE from with value given by CST. Signedness
1192 of CST is assumed to be the same as the signedness of TYPE. */
1193
1194 tree
1195 double_int_to_tree (tree type, double_int cst)
1196 {
1197 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1198 }
1199
1200 /* We force the wide_int CST to the range of the type TYPE by sign or
1201 zero extending it. OVERFLOWABLE indicates if we are interested in
1202 overflow of the value, when >0 we are only interested in signed
1203 overflow, for <0 we are interested in any overflow. OVERFLOWED
1204 indicates whether overflow has already occurred. CONST_OVERFLOWED
1205 indicates whether constant overflow has already occurred. We force
1206 T's value to be within range of T's type (by setting to 0 or 1 all
1207 the bits outside the type's range). We set TREE_OVERFLOWED if,
1208 OVERFLOWED is nonzero,
1209 or OVERFLOWABLE is >0 and signed overflow occurs
1210 or OVERFLOWABLE is <0 and any overflow occurs
1211 We return a new tree node for the extended wide_int. The node
1212 is shared if no overflow flags are set. */
1213
1214
1215 tree
1216 force_fit_type (tree type, const wide_int_ref &cst,
1217 int overflowable, bool overflowed)
1218 {
1219 signop sign = TYPE_SIGN (type);
1220
1221 /* If we need to set overflow flags, return a new unshared node. */
1222 if (overflowed || !wi::fits_to_tree_p (cst, type))
1223 {
1224 if (overflowed
1225 || overflowable < 0
1226 || (overflowable > 0 && sign == SIGNED))
1227 {
1228 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1229 tree t = build_new_int_cst (type, tmp);
1230 TREE_OVERFLOW (t) = 1;
1231 return t;
1232 }
1233 }
1234
1235 /* Else build a shared node. */
1236 return wide_int_to_tree (type, cst);
1237 }
1238
1239 /* These are the hash table functions for the hash table of INTEGER_CST
1240 nodes of a sizetype. */
1241
1242 /* Return the hash code code X, an INTEGER_CST. */
1243
1244 static hashval_t
1245 int_cst_hash_hash (const void *x)
1246 {
1247 const_tree const t = (const_tree) x;
1248 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1249 int i;
1250
1251 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1252 code ^= TREE_INT_CST_ELT (t, i);
1253
1254 return code;
1255 }
1256
1257 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1258 is the same as that given by *Y, which is the same. */
1259
1260 static int
1261 int_cst_hash_eq (const void *x, const void *y)
1262 {
1263 const_tree const xt = (const_tree) x;
1264 const_tree const yt = (const_tree) y;
1265
1266 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1267 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1268 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1269 return false;
1270
1271 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1272 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1273 return false;
1274
1275 return true;
1276 }
1277
1278 /* Create an INT_CST node of TYPE and value CST.
1279 The returned node is always shared. For small integers we use a
1280 per-type vector cache, for larger ones we use a single hash table.
1281 The value is extended from its precision according to the sign of
1282 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1283 the upper bits and ensures that hashing and value equality based
1284 upon the underlying HOST_WIDE_INTs works without masking. */
1285
1286 tree
1287 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1288 {
1289 tree t;
1290 int ix = -1;
1291 int limit = 0;
1292
1293 gcc_assert (type);
1294 unsigned int prec = TYPE_PRECISION (type);
1295 signop sgn = TYPE_SIGN (type);
1296
1297 /* Verify that everything is canonical. */
1298 int l = pcst.get_len ();
1299 if (l > 1)
1300 {
1301 if (pcst.elt (l - 1) == 0)
1302 gcc_checking_assert (pcst.elt (l - 2) < 0);
1303 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1304 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1305 }
1306
1307 wide_int cst = wide_int::from (pcst, prec, sgn);
1308 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1309
1310 if (ext_len == 1)
1311 {
1312 /* We just need to store a single HOST_WIDE_INT. */
1313 HOST_WIDE_INT hwi;
1314 if (TYPE_UNSIGNED (type))
1315 hwi = cst.to_uhwi ();
1316 else
1317 hwi = cst.to_shwi ();
1318
1319 switch (TREE_CODE (type))
1320 {
1321 case NULLPTR_TYPE:
1322 gcc_assert (hwi == 0);
1323 /* Fallthru. */
1324
1325 case POINTER_TYPE:
1326 case REFERENCE_TYPE:
1327 /* Cache NULL pointer. */
1328 if (hwi == 0)
1329 {
1330 limit = 1;
1331 ix = 0;
1332 }
1333 break;
1334
1335 case BOOLEAN_TYPE:
1336 /* Cache false or true. */
1337 limit = 2;
1338 if (hwi < 2)
1339 ix = hwi;
1340 break;
1341
1342 case INTEGER_TYPE:
1343 case OFFSET_TYPE:
1344 if (TYPE_SIGN (type) == UNSIGNED)
1345 {
1346 /* Cache [0, N). */
1347 limit = INTEGER_SHARE_LIMIT;
1348 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1349 ix = hwi;
1350 }
1351 else
1352 {
1353 /* Cache [-1, N). */
1354 limit = INTEGER_SHARE_LIMIT + 1;
1355 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1356 ix = hwi + 1;
1357 }
1358 break;
1359
1360 case ENUMERAL_TYPE:
1361 break;
1362
1363 default:
1364 gcc_unreachable ();
1365 }
1366
1367 if (ix >= 0)
1368 {
1369 /* Look for it in the type's vector of small shared ints. */
1370 if (!TYPE_CACHED_VALUES_P (type))
1371 {
1372 TYPE_CACHED_VALUES_P (type) = 1;
1373 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1374 }
1375
1376 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1377 if (t)
1378 /* Make sure no one is clobbering the shared constant. */
1379 gcc_checking_assert (TREE_TYPE (t) == type
1380 && TREE_INT_CST_NUNITS (t) == 1
1381 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1382 && TREE_INT_CST_EXT_NUNITS (t) == 1
1383 && TREE_INT_CST_ELT (t, 0) == hwi);
1384 else
1385 {
1386 /* Create a new shared int. */
1387 t = build_new_int_cst (type, cst);
1388 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1389 }
1390 }
1391 else
1392 {
1393 /* Use the cache of larger shared ints, using int_cst_node as
1394 a temporary. */
1395 void **slot;
1396
1397 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1398 TREE_TYPE (int_cst_node) = type;
1399
1400 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1401 t = (tree) *slot;
1402 if (!t)
1403 {
1404 /* Insert this one into the hash table. */
1405 t = int_cst_node;
1406 *slot = t;
1407 /* Make a new node for next time round. */
1408 int_cst_node = make_int_cst (1, 1);
1409 }
1410 }
1411 }
1412 else
1413 {
1414 /* The value either hashes properly or we drop it on the floor
1415 for the gc to take care of. There will not be enough of them
1416 to worry about. */
1417 void **slot;
1418
1419 tree nt = build_new_int_cst (type, cst);
1420 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1421 t = (tree) *slot;
1422 if (!t)
1423 {
1424 /* Insert this one into the hash table. */
1425 t = nt;
1426 *slot = t;
1427 }
1428 }
1429
1430 return t;
1431 }
1432
1433 void
1434 cache_integer_cst (tree t)
1435 {
1436 tree type = TREE_TYPE (t);
1437 int ix = -1;
1438 int limit = 0;
1439 int prec = TYPE_PRECISION (type);
1440
1441 gcc_assert (!TREE_OVERFLOW (t));
1442
1443 switch (TREE_CODE (type))
1444 {
1445 case NULLPTR_TYPE:
1446 gcc_assert (integer_zerop (t));
1447 /* Fallthru. */
1448
1449 case POINTER_TYPE:
1450 case REFERENCE_TYPE:
1451 /* Cache NULL pointer. */
1452 if (integer_zerop (t))
1453 {
1454 limit = 1;
1455 ix = 0;
1456 }
1457 break;
1458
1459 case BOOLEAN_TYPE:
1460 /* Cache false or true. */
1461 limit = 2;
1462 if (wi::ltu_p (t, 2))
1463 ix = TREE_INT_CST_ELT (t, 0);
1464 break;
1465
1466 case INTEGER_TYPE:
1467 case OFFSET_TYPE:
1468 if (TYPE_UNSIGNED (type))
1469 {
1470 /* Cache 0..N */
1471 limit = INTEGER_SHARE_LIMIT;
1472
1473 /* This is a little hokie, but if the prec is smaller than
1474 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1475 obvious test will not get the correct answer. */
1476 if (prec < HOST_BITS_PER_WIDE_INT)
1477 {
1478 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1479 ix = tree_to_uhwi (t);
1480 }
1481 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1482 ix = tree_to_uhwi (t);
1483 }
1484 else
1485 {
1486 /* Cache -1..N */
1487 limit = INTEGER_SHARE_LIMIT + 1;
1488
1489 if (integer_minus_onep (t))
1490 ix = 0;
1491 else if (!wi::neg_p (t))
1492 {
1493 if (prec < HOST_BITS_PER_WIDE_INT)
1494 {
1495 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1496 ix = tree_to_shwi (t) + 1;
1497 }
1498 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1499 ix = tree_to_shwi (t) + 1;
1500 }
1501 }
1502 break;
1503
1504 case ENUMERAL_TYPE:
1505 break;
1506
1507 default:
1508 gcc_unreachable ();
1509 }
1510
1511 if (ix >= 0)
1512 {
1513 /* Look for it in the type's vector of small shared ints. */
1514 if (!TYPE_CACHED_VALUES_P (type))
1515 {
1516 TYPE_CACHED_VALUES_P (type) = 1;
1517 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1518 }
1519
1520 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1521 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1522 }
1523 else
1524 {
1525 /* Use the cache of larger shared ints. */
1526 void **slot;
1527
1528 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1529 /* If there is already an entry for the number verify it's the
1530 same. */
1531 if (*slot)
1532 gcc_assert (wi::eq_p (tree (*slot), t));
1533 else
1534 /* Otherwise insert this one into the hash table. */
1535 *slot = t;
1536 }
1537 }
1538
1539
1540 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1541 and the rest are zeros. */
1542
1543 tree
1544 build_low_bits_mask (tree type, unsigned bits)
1545 {
1546 gcc_assert (bits <= TYPE_PRECISION (type));
1547
1548 return wide_int_to_tree (type, wi::mask (bits, false,
1549 TYPE_PRECISION (type)));
1550 }
1551
1552 /* Checks that X is integer constant that can be expressed in (unsigned)
1553 HOST_WIDE_INT without loss of precision. */
1554
1555 bool
1556 cst_and_fits_in_hwi (const_tree x)
1557 {
1558 if (TREE_CODE (x) != INTEGER_CST)
1559 return false;
1560
1561 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1562 return false;
1563
1564 return TREE_INT_CST_NUNITS (x) == 1;
1565 }
1566
1567 /* Build a newly constructed TREE_VEC node of length LEN. */
1568
1569 tree
1570 make_vector_stat (unsigned len MEM_STAT_DECL)
1571 {
1572 tree t;
1573 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1574
1575 record_node_allocation_statistics (VECTOR_CST, length);
1576
1577 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1578
1579 TREE_SET_CODE (t, VECTOR_CST);
1580 TREE_CONSTANT (t) = 1;
1581
1582 return t;
1583 }
1584
1585 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1586 are in a list pointed to by VALS. */
1587
1588 tree
1589 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1590 {
1591 int over = 0;
1592 unsigned cnt = 0;
1593 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1594 TREE_TYPE (v) = type;
1595
1596 /* Iterate through elements and check for overflow. */
1597 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1598 {
1599 tree value = vals[cnt];
1600
1601 VECTOR_CST_ELT (v, cnt) = value;
1602
1603 /* Don't crash if we get an address constant. */
1604 if (!CONSTANT_CLASS_P (value))
1605 continue;
1606
1607 over |= TREE_OVERFLOW (value);
1608 }
1609
1610 TREE_OVERFLOW (v) = over;
1611 return v;
1612 }
1613
1614 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1615 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1616
1617 tree
1618 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1619 {
1620 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1621 unsigned HOST_WIDE_INT idx;
1622 tree value;
1623
1624 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1625 vec[idx] = value;
1626 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1627 vec[idx] = build_zero_cst (TREE_TYPE (type));
1628
1629 return build_vector (type, vec);
1630 }
1631
1632 /* Build a vector of type VECTYPE where all the elements are SCs. */
1633 tree
1634 build_vector_from_val (tree vectype, tree sc)
1635 {
1636 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1637
1638 if (sc == error_mark_node)
1639 return sc;
1640
1641 /* Verify that the vector type is suitable for SC. Note that there
1642 is some inconsistency in the type-system with respect to restrict
1643 qualifications of pointers. Vector types always have a main-variant
1644 element type and the qualification is applied to the vector-type.
1645 So TREE_TYPE (vector-type) does not return a properly qualified
1646 vector element-type. */
1647 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1648 TREE_TYPE (vectype)));
1649
1650 if (CONSTANT_CLASS_P (sc))
1651 {
1652 tree *v = XALLOCAVEC (tree, nunits);
1653 for (i = 0; i < nunits; ++i)
1654 v[i] = sc;
1655 return build_vector (vectype, v);
1656 }
1657 else
1658 {
1659 vec<constructor_elt, va_gc> *v;
1660 vec_alloc (v, nunits);
1661 for (i = 0; i < nunits; ++i)
1662 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1663 return build_constructor (vectype, v);
1664 }
1665 }
1666
1667 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1668 are in the vec pointed to by VALS. */
1669 tree
1670 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1671 {
1672 tree c = make_node (CONSTRUCTOR);
1673 unsigned int i;
1674 constructor_elt *elt;
1675 bool constant_p = true;
1676 bool side_effects_p = false;
1677
1678 TREE_TYPE (c) = type;
1679 CONSTRUCTOR_ELTS (c) = vals;
1680
1681 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1682 {
1683 /* Mostly ctors will have elts that don't have side-effects, so
1684 the usual case is to scan all the elements. Hence a single
1685 loop for both const and side effects, rather than one loop
1686 each (with early outs). */
1687 if (!TREE_CONSTANT (elt->value))
1688 constant_p = false;
1689 if (TREE_SIDE_EFFECTS (elt->value))
1690 side_effects_p = true;
1691 }
1692
1693 TREE_SIDE_EFFECTS (c) = side_effects_p;
1694 TREE_CONSTANT (c) = constant_p;
1695
1696 return c;
1697 }
1698
1699 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1700 INDEX and VALUE. */
1701 tree
1702 build_constructor_single (tree type, tree index, tree value)
1703 {
1704 vec<constructor_elt, va_gc> *v;
1705 constructor_elt elt = {index, value};
1706
1707 vec_alloc (v, 1);
1708 v->quick_push (elt);
1709
1710 return build_constructor (type, v);
1711 }
1712
1713
1714 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1715 are in a list pointed to by VALS. */
1716 tree
1717 build_constructor_from_list (tree type, tree vals)
1718 {
1719 tree t;
1720 vec<constructor_elt, va_gc> *v = NULL;
1721
1722 if (vals)
1723 {
1724 vec_alloc (v, list_length (vals));
1725 for (t = vals; t; t = TREE_CHAIN (t))
1726 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1727 }
1728
1729 return build_constructor (type, v);
1730 }
1731
1732 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1733 of elements, provided as index/value pairs. */
1734
1735 tree
1736 build_constructor_va (tree type, int nelts, ...)
1737 {
1738 vec<constructor_elt, va_gc> *v = NULL;
1739 va_list p;
1740
1741 va_start (p, nelts);
1742 vec_alloc (v, nelts);
1743 while (nelts--)
1744 {
1745 tree index = va_arg (p, tree);
1746 tree value = va_arg (p, tree);
1747 CONSTRUCTOR_APPEND_ELT (v, index, value);
1748 }
1749 va_end (p);
1750 return build_constructor (type, v);
1751 }
1752
1753 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1754
1755 tree
1756 build_fixed (tree type, FIXED_VALUE_TYPE f)
1757 {
1758 tree v;
1759 FIXED_VALUE_TYPE *fp;
1760
1761 v = make_node (FIXED_CST);
1762 fp = ggc_alloc<fixed_value> ();
1763 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1764
1765 TREE_TYPE (v) = type;
1766 TREE_FIXED_CST_PTR (v) = fp;
1767 return v;
1768 }
1769
1770 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1771
1772 tree
1773 build_real (tree type, REAL_VALUE_TYPE d)
1774 {
1775 tree v;
1776 REAL_VALUE_TYPE *dp;
1777 int overflow = 0;
1778
1779 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1780 Consider doing it via real_convert now. */
1781
1782 v = make_node (REAL_CST);
1783 dp = ggc_alloc<real_value> ();
1784 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1785
1786 TREE_TYPE (v) = type;
1787 TREE_REAL_CST_PTR (v) = dp;
1788 TREE_OVERFLOW (v) = overflow;
1789 return v;
1790 }
1791
1792 /* Return a new REAL_CST node whose type is TYPE
1793 and whose value is the integer value of the INTEGER_CST node I. */
1794
1795 REAL_VALUE_TYPE
1796 real_value_from_int_cst (const_tree type, const_tree i)
1797 {
1798 REAL_VALUE_TYPE d;
1799
1800 /* Clear all bits of the real value type so that we can later do
1801 bitwise comparisons to see if two values are the same. */
1802 memset (&d, 0, sizeof d);
1803
1804 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1805 TYPE_SIGN (TREE_TYPE (i)));
1806 return d;
1807 }
1808
1809 /* Given a tree representing an integer constant I, return a tree
1810 representing the same value as a floating-point constant of type TYPE. */
1811
1812 tree
1813 build_real_from_int_cst (tree type, const_tree i)
1814 {
1815 tree v;
1816 int overflow = TREE_OVERFLOW (i);
1817
1818 v = build_real (type, real_value_from_int_cst (type, i));
1819
1820 TREE_OVERFLOW (v) |= overflow;
1821 return v;
1822 }
1823
1824 /* Return a newly constructed STRING_CST node whose value is
1825 the LEN characters at STR.
1826 Note that for a C string literal, LEN should include the trailing NUL.
1827 The TREE_TYPE is not initialized. */
1828
1829 tree
1830 build_string (int len, const char *str)
1831 {
1832 tree s;
1833 size_t length;
1834
1835 /* Do not waste bytes provided by padding of struct tree_string. */
1836 length = len + offsetof (struct tree_string, str) + 1;
1837
1838 record_node_allocation_statistics (STRING_CST, length);
1839
1840 s = (tree) ggc_internal_alloc (length);
1841
1842 memset (s, 0, sizeof (struct tree_typed));
1843 TREE_SET_CODE (s, STRING_CST);
1844 TREE_CONSTANT (s) = 1;
1845 TREE_STRING_LENGTH (s) = len;
1846 memcpy (s->string.str, str, len);
1847 s->string.str[len] = '\0';
1848
1849 return s;
1850 }
1851
1852 /* Return a newly constructed COMPLEX_CST node whose value is
1853 specified by the real and imaginary parts REAL and IMAG.
1854 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1855 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1856
1857 tree
1858 build_complex (tree type, tree real, tree imag)
1859 {
1860 tree t = make_node (COMPLEX_CST);
1861
1862 TREE_REALPART (t) = real;
1863 TREE_IMAGPART (t) = imag;
1864 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1865 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1866 return t;
1867 }
1868
1869 /* Return a constant of arithmetic type TYPE which is the
1870 multiplicative identity of the set TYPE. */
1871
1872 tree
1873 build_one_cst (tree type)
1874 {
1875 switch (TREE_CODE (type))
1876 {
1877 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1878 case POINTER_TYPE: case REFERENCE_TYPE:
1879 case OFFSET_TYPE:
1880 return build_int_cst (type, 1);
1881
1882 case REAL_TYPE:
1883 return build_real (type, dconst1);
1884
1885 case FIXED_POINT_TYPE:
1886 /* We can only generate 1 for accum types. */
1887 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1888 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1889
1890 case VECTOR_TYPE:
1891 {
1892 tree scalar = build_one_cst (TREE_TYPE (type));
1893
1894 return build_vector_from_val (type, scalar);
1895 }
1896
1897 case COMPLEX_TYPE:
1898 return build_complex (type,
1899 build_one_cst (TREE_TYPE (type)),
1900 build_zero_cst (TREE_TYPE (type)));
1901
1902 default:
1903 gcc_unreachable ();
1904 }
1905 }
1906
1907 /* Return an integer of type TYPE containing all 1's in as much precision as
1908 it contains, or a complex or vector whose subparts are such integers. */
1909
1910 tree
1911 build_all_ones_cst (tree type)
1912 {
1913 if (TREE_CODE (type) == COMPLEX_TYPE)
1914 {
1915 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1916 return build_complex (type, scalar, scalar);
1917 }
1918 else
1919 return build_minus_one_cst (type);
1920 }
1921
1922 /* Return a constant of arithmetic type TYPE which is the
1923 opposite of the multiplicative identity of the set TYPE. */
1924
1925 tree
1926 build_minus_one_cst (tree type)
1927 {
1928 switch (TREE_CODE (type))
1929 {
1930 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 case OFFSET_TYPE:
1933 return build_int_cst (type, -1);
1934
1935 case REAL_TYPE:
1936 return build_real (type, dconstm1);
1937
1938 case FIXED_POINT_TYPE:
1939 /* We can only generate 1 for accum types. */
1940 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1941 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1942 TYPE_MODE (type)));
1943
1944 case VECTOR_TYPE:
1945 {
1946 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1947
1948 return build_vector_from_val (type, scalar);
1949 }
1950
1951 case COMPLEX_TYPE:
1952 return build_complex (type,
1953 build_minus_one_cst (TREE_TYPE (type)),
1954 build_zero_cst (TREE_TYPE (type)));
1955
1956 default:
1957 gcc_unreachable ();
1958 }
1959 }
1960
1961 /* Build 0 constant of type TYPE. This is used by constructor folding
1962 and thus the constant should be represented in memory by
1963 zero(es). */
1964
1965 tree
1966 build_zero_cst (tree type)
1967 {
1968 switch (TREE_CODE (type))
1969 {
1970 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1971 case POINTER_TYPE: case REFERENCE_TYPE:
1972 case OFFSET_TYPE: case NULLPTR_TYPE:
1973 return build_int_cst (type, 0);
1974
1975 case REAL_TYPE:
1976 return build_real (type, dconst0);
1977
1978 case FIXED_POINT_TYPE:
1979 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1980
1981 case VECTOR_TYPE:
1982 {
1983 tree scalar = build_zero_cst (TREE_TYPE (type));
1984
1985 return build_vector_from_val (type, scalar);
1986 }
1987
1988 case COMPLEX_TYPE:
1989 {
1990 tree zero = build_zero_cst (TREE_TYPE (type));
1991
1992 return build_complex (type, zero, zero);
1993 }
1994
1995 default:
1996 if (!AGGREGATE_TYPE_P (type))
1997 return fold_convert (type, integer_zero_node);
1998 return build_constructor (type, NULL);
1999 }
2000 }
2001
2002
2003 /* Build a BINFO with LEN language slots. */
2004
2005 tree
2006 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2007 {
2008 tree t;
2009 size_t length = (offsetof (struct tree_binfo, base_binfos)
2010 + vec<tree, va_gc>::embedded_size (base_binfos));
2011
2012 record_node_allocation_statistics (TREE_BINFO, length);
2013
2014 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2015
2016 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2017
2018 TREE_SET_CODE (t, TREE_BINFO);
2019
2020 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2021
2022 return t;
2023 }
2024
2025 /* Create a CASE_LABEL_EXPR tree node and return it. */
2026
2027 tree
2028 build_case_label (tree low_value, tree high_value, tree label_decl)
2029 {
2030 tree t = make_node (CASE_LABEL_EXPR);
2031
2032 TREE_TYPE (t) = void_type_node;
2033 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2034
2035 CASE_LOW (t) = low_value;
2036 CASE_HIGH (t) = high_value;
2037 CASE_LABEL (t) = label_decl;
2038 CASE_CHAIN (t) = NULL_TREE;
2039
2040 return t;
2041 }
2042
2043 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2044 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2045 The latter determines the length of the HOST_WIDE_INT vector. */
2046
2047 tree
2048 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2049 {
2050 tree t;
2051 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2052 + sizeof (struct tree_int_cst));
2053
2054 gcc_assert (len);
2055 record_node_allocation_statistics (INTEGER_CST, length);
2056
2057 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2058
2059 TREE_SET_CODE (t, INTEGER_CST);
2060 TREE_INT_CST_NUNITS (t) = len;
2061 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2062 /* to_offset can only be applied to trees that are offset_int-sized
2063 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2064 must be exactly the precision of offset_int and so LEN is correct. */
2065 if (ext_len <= OFFSET_INT_ELTS)
2066 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2067 else
2068 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2069
2070 TREE_CONSTANT (t) = 1;
2071
2072 return t;
2073 }
2074
2075 /* Build a newly constructed TREE_VEC node of length LEN. */
2076
2077 tree
2078 make_tree_vec_stat (int len MEM_STAT_DECL)
2079 {
2080 tree t;
2081 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2082
2083 record_node_allocation_statistics (TREE_VEC, length);
2084
2085 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2086
2087 TREE_SET_CODE (t, TREE_VEC);
2088 TREE_VEC_LENGTH (t) = len;
2089
2090 return t;
2091 }
2092
2093 /* Grow a TREE_VEC node to new length LEN. */
2094
2095 tree
2096 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2097 {
2098 gcc_assert (TREE_CODE (v) == TREE_VEC);
2099
2100 int oldlen = TREE_VEC_LENGTH (v);
2101 gcc_assert (len > oldlen);
2102
2103 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2104 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2105
2106 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2107
2108 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2109
2110 TREE_VEC_LENGTH (v) = len;
2111
2112 return v;
2113 }
2114 \f
2115 /* Return 1 if EXPR is the integer constant zero or a complex constant
2116 of zero. */
2117
2118 int
2119 integer_zerop (const_tree expr)
2120 {
2121 STRIP_NOPS (expr);
2122
2123 switch (TREE_CODE (expr))
2124 {
2125 case INTEGER_CST:
2126 return wi::eq_p (expr, 0);
2127 case COMPLEX_CST:
2128 return (integer_zerop (TREE_REALPART (expr))
2129 && integer_zerop (TREE_IMAGPART (expr)));
2130 case VECTOR_CST:
2131 {
2132 unsigned i;
2133 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2134 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2135 return false;
2136 return true;
2137 }
2138 default:
2139 return false;
2140 }
2141 }
2142
2143 /* Return 1 if EXPR is the integer constant one or the corresponding
2144 complex constant. */
2145
2146 int
2147 integer_onep (const_tree expr)
2148 {
2149 STRIP_NOPS (expr);
2150
2151 switch (TREE_CODE (expr))
2152 {
2153 case INTEGER_CST:
2154 return wi::eq_p (wi::to_widest (expr), 1);
2155 case COMPLEX_CST:
2156 return (integer_onep (TREE_REALPART (expr))
2157 && integer_zerop (TREE_IMAGPART (expr)));
2158 case VECTOR_CST:
2159 {
2160 unsigned i;
2161 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2162 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2163 return false;
2164 return true;
2165 }
2166 default:
2167 return false;
2168 }
2169 }
2170
2171 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2172 it contains, or a complex or vector whose subparts are such integers. */
2173
2174 int
2175 integer_all_onesp (const_tree expr)
2176 {
2177 STRIP_NOPS (expr);
2178
2179 if (TREE_CODE (expr) == COMPLEX_CST
2180 && integer_all_onesp (TREE_REALPART (expr))
2181 && integer_all_onesp (TREE_IMAGPART (expr)))
2182 return 1;
2183
2184 else if (TREE_CODE (expr) == VECTOR_CST)
2185 {
2186 unsigned i;
2187 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2188 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2189 return 0;
2190 return 1;
2191 }
2192
2193 else if (TREE_CODE (expr) != INTEGER_CST)
2194 return 0;
2195
2196 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2197 }
2198
2199 /* Return 1 if EXPR is the integer constant minus one. */
2200
2201 int
2202 integer_minus_onep (const_tree expr)
2203 {
2204 STRIP_NOPS (expr);
2205
2206 if (TREE_CODE (expr) == COMPLEX_CST)
2207 return (integer_all_onesp (TREE_REALPART (expr))
2208 && integer_zerop (TREE_IMAGPART (expr)));
2209 else
2210 return integer_all_onesp (expr);
2211 }
2212
2213 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2214 one bit on). */
2215
2216 int
2217 integer_pow2p (const_tree expr)
2218 {
2219 STRIP_NOPS (expr);
2220
2221 if (TREE_CODE (expr) == COMPLEX_CST
2222 && integer_pow2p (TREE_REALPART (expr))
2223 && integer_zerop (TREE_IMAGPART (expr)))
2224 return 1;
2225
2226 if (TREE_CODE (expr) != INTEGER_CST)
2227 return 0;
2228
2229 return wi::popcount (expr) == 1;
2230 }
2231
2232 /* Return 1 if EXPR is an integer constant other than zero or a
2233 complex constant other than zero. */
2234
2235 int
2236 integer_nonzerop (const_tree expr)
2237 {
2238 STRIP_NOPS (expr);
2239
2240 return ((TREE_CODE (expr) == INTEGER_CST
2241 && !wi::eq_p (expr, 0))
2242 || (TREE_CODE (expr) == COMPLEX_CST
2243 && (integer_nonzerop (TREE_REALPART (expr))
2244 || integer_nonzerop (TREE_IMAGPART (expr)))));
2245 }
2246
2247 /* Return 1 if EXPR is the fixed-point constant zero. */
2248
2249 int
2250 fixed_zerop (const_tree expr)
2251 {
2252 return (TREE_CODE (expr) == FIXED_CST
2253 && TREE_FIXED_CST (expr).data.is_zero ());
2254 }
2255
2256 /* Return the power of two represented by a tree node known to be a
2257 power of two. */
2258
2259 int
2260 tree_log2 (const_tree expr)
2261 {
2262 STRIP_NOPS (expr);
2263
2264 if (TREE_CODE (expr) == COMPLEX_CST)
2265 return tree_log2 (TREE_REALPART (expr));
2266
2267 return wi::exact_log2 (expr);
2268 }
2269
2270 /* Similar, but return the largest integer Y such that 2 ** Y is less
2271 than or equal to EXPR. */
2272
2273 int
2274 tree_floor_log2 (const_tree expr)
2275 {
2276 STRIP_NOPS (expr);
2277
2278 if (TREE_CODE (expr) == COMPLEX_CST)
2279 return tree_log2 (TREE_REALPART (expr));
2280
2281 return wi::floor_log2 (expr);
2282 }
2283
2284 /* Return number of known trailing zero bits in EXPR, or, if the value of
2285 EXPR is known to be zero, the precision of it's type. */
2286
2287 unsigned int
2288 tree_ctz (const_tree expr)
2289 {
2290 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2291 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2292 return 0;
2293
2294 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2295 switch (TREE_CODE (expr))
2296 {
2297 case INTEGER_CST:
2298 ret1 = wi::ctz (expr);
2299 return MIN (ret1, prec);
2300 case SSA_NAME:
2301 ret1 = wi::ctz (get_nonzero_bits (expr));
2302 return MIN (ret1, prec);
2303 case PLUS_EXPR:
2304 case MINUS_EXPR:
2305 case BIT_IOR_EXPR:
2306 case BIT_XOR_EXPR:
2307 case MIN_EXPR:
2308 case MAX_EXPR:
2309 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2310 if (ret1 == 0)
2311 return ret1;
2312 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2313 return MIN (ret1, ret2);
2314 case POINTER_PLUS_EXPR:
2315 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2316 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2317 /* Second operand is sizetype, which could be in theory
2318 wider than pointer's precision. Make sure we never
2319 return more than prec. */
2320 ret2 = MIN (ret2, prec);
2321 return MIN (ret1, ret2);
2322 case BIT_AND_EXPR:
2323 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2324 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2325 return MAX (ret1, ret2);
2326 case MULT_EXPR:
2327 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2328 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2329 return MIN (ret1 + ret2, prec);
2330 case LSHIFT_EXPR:
2331 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2332 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2333 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2334 {
2335 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2336 return MIN (ret1 + ret2, prec);
2337 }
2338 return ret1;
2339 case RSHIFT_EXPR:
2340 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2341 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2342 {
2343 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2344 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2345 if (ret1 > ret2)
2346 return ret1 - ret2;
2347 }
2348 return 0;
2349 case TRUNC_DIV_EXPR:
2350 case CEIL_DIV_EXPR:
2351 case FLOOR_DIV_EXPR:
2352 case ROUND_DIV_EXPR:
2353 case EXACT_DIV_EXPR:
2354 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2355 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2356 {
2357 int l = tree_log2 (TREE_OPERAND (expr, 1));
2358 if (l >= 0)
2359 {
2360 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2361 ret2 = l;
2362 if (ret1 > ret2)
2363 return ret1 - ret2;
2364 }
2365 }
2366 return 0;
2367 CASE_CONVERT:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2369 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2370 ret1 = prec;
2371 return MIN (ret1, prec);
2372 case SAVE_EXPR:
2373 return tree_ctz (TREE_OPERAND (expr, 0));
2374 case COND_EXPR:
2375 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2376 if (ret1 == 0)
2377 return 0;
2378 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2379 return MIN (ret1, ret2);
2380 case COMPOUND_EXPR:
2381 return tree_ctz (TREE_OPERAND (expr, 1));
2382 case ADDR_EXPR:
2383 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2384 if (ret1 > BITS_PER_UNIT)
2385 {
2386 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2387 return MIN (ret1, prec);
2388 }
2389 return 0;
2390 default:
2391 return 0;
2392 }
2393 }
2394
2395 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2396 decimal float constants, so don't return 1 for them. */
2397
2398 int
2399 real_zerop (const_tree expr)
2400 {
2401 STRIP_NOPS (expr);
2402
2403 switch (TREE_CODE (expr))
2404 {
2405 case REAL_CST:
2406 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2407 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2408 case COMPLEX_CST:
2409 return real_zerop (TREE_REALPART (expr))
2410 && real_zerop (TREE_IMAGPART (expr));
2411 case VECTOR_CST:
2412 {
2413 unsigned i;
2414 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2415 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2416 return false;
2417 return true;
2418 }
2419 default:
2420 return false;
2421 }
2422 }
2423
2424 /* Return 1 if EXPR is the real constant one in real or complex form.
2425 Trailing zeroes matter for decimal float constants, so don't return
2426 1 for them. */
2427
2428 int
2429 real_onep (const_tree expr)
2430 {
2431 STRIP_NOPS (expr);
2432
2433 switch (TREE_CODE (expr))
2434 {
2435 case REAL_CST:
2436 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2437 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2438 case COMPLEX_CST:
2439 return real_onep (TREE_REALPART (expr))
2440 && real_zerop (TREE_IMAGPART (expr));
2441 case VECTOR_CST:
2442 {
2443 unsigned i;
2444 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2445 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2446 return false;
2447 return true;
2448 }
2449 default:
2450 return false;
2451 }
2452 }
2453
2454 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2455 matter for decimal float constants, so don't return 1 for them. */
2456
2457 int
2458 real_minus_onep (const_tree expr)
2459 {
2460 STRIP_NOPS (expr);
2461
2462 switch (TREE_CODE (expr))
2463 {
2464 case REAL_CST:
2465 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2466 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2467 case COMPLEX_CST:
2468 return real_minus_onep (TREE_REALPART (expr))
2469 && real_zerop (TREE_IMAGPART (expr));
2470 case VECTOR_CST:
2471 {
2472 unsigned i;
2473 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2474 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2475 return false;
2476 return true;
2477 }
2478 default:
2479 return false;
2480 }
2481 }
2482
2483 /* Nonzero if EXP is a constant or a cast of a constant. */
2484
2485 int
2486 really_constant_p (const_tree exp)
2487 {
2488 /* This is not quite the same as STRIP_NOPS. It does more. */
2489 while (CONVERT_EXPR_P (exp)
2490 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2491 exp = TREE_OPERAND (exp, 0);
2492 return TREE_CONSTANT (exp);
2493 }
2494 \f
2495 /* Return first list element whose TREE_VALUE is ELEM.
2496 Return 0 if ELEM is not in LIST. */
2497
2498 tree
2499 value_member (tree elem, tree list)
2500 {
2501 while (list)
2502 {
2503 if (elem == TREE_VALUE (list))
2504 return list;
2505 list = TREE_CHAIN (list);
2506 }
2507 return NULL_TREE;
2508 }
2509
2510 /* Return first list element whose TREE_PURPOSE is ELEM.
2511 Return 0 if ELEM is not in LIST. */
2512
2513 tree
2514 purpose_member (const_tree elem, tree list)
2515 {
2516 while (list)
2517 {
2518 if (elem == TREE_PURPOSE (list))
2519 return list;
2520 list = TREE_CHAIN (list);
2521 }
2522 return NULL_TREE;
2523 }
2524
2525 /* Return true if ELEM is in V. */
2526
2527 bool
2528 vec_member (const_tree elem, vec<tree, va_gc> *v)
2529 {
2530 unsigned ix;
2531 tree t;
2532 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2533 if (elem == t)
2534 return true;
2535 return false;
2536 }
2537
2538 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2539 NULL_TREE. */
2540
2541 tree
2542 chain_index (int idx, tree chain)
2543 {
2544 for (; chain && idx > 0; --idx)
2545 chain = TREE_CHAIN (chain);
2546 return chain;
2547 }
2548
2549 /* Return nonzero if ELEM is part of the chain CHAIN. */
2550
2551 int
2552 chain_member (const_tree elem, const_tree chain)
2553 {
2554 while (chain)
2555 {
2556 if (elem == chain)
2557 return 1;
2558 chain = DECL_CHAIN (chain);
2559 }
2560
2561 return 0;
2562 }
2563
2564 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2565 We expect a null pointer to mark the end of the chain.
2566 This is the Lisp primitive `length'. */
2567
2568 int
2569 list_length (const_tree t)
2570 {
2571 const_tree p = t;
2572 #ifdef ENABLE_TREE_CHECKING
2573 const_tree q = t;
2574 #endif
2575 int len = 0;
2576
2577 while (p)
2578 {
2579 p = TREE_CHAIN (p);
2580 #ifdef ENABLE_TREE_CHECKING
2581 if (len % 2)
2582 q = TREE_CHAIN (q);
2583 gcc_assert (p != q);
2584 #endif
2585 len++;
2586 }
2587
2588 return len;
2589 }
2590
2591 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2592 UNION_TYPE TYPE, or NULL_TREE if none. */
2593
2594 tree
2595 first_field (const_tree type)
2596 {
2597 tree t = TYPE_FIELDS (type);
2598 while (t && TREE_CODE (t) != FIELD_DECL)
2599 t = TREE_CHAIN (t);
2600 return t;
2601 }
2602
2603 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2604 by modifying the last node in chain 1 to point to chain 2.
2605 This is the Lisp primitive `nconc'. */
2606
2607 tree
2608 chainon (tree op1, tree op2)
2609 {
2610 tree t1;
2611
2612 if (!op1)
2613 return op2;
2614 if (!op2)
2615 return op1;
2616
2617 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2618 continue;
2619 TREE_CHAIN (t1) = op2;
2620
2621 #ifdef ENABLE_TREE_CHECKING
2622 {
2623 tree t2;
2624 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2625 gcc_assert (t2 != t1);
2626 }
2627 #endif
2628
2629 return op1;
2630 }
2631
2632 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2633
2634 tree
2635 tree_last (tree chain)
2636 {
2637 tree next;
2638 if (chain)
2639 while ((next = TREE_CHAIN (chain)))
2640 chain = next;
2641 return chain;
2642 }
2643
2644 /* Reverse the order of elements in the chain T,
2645 and return the new head of the chain (old last element). */
2646
2647 tree
2648 nreverse (tree t)
2649 {
2650 tree prev = 0, decl, next;
2651 for (decl = t; decl; decl = next)
2652 {
2653 /* We shouldn't be using this function to reverse BLOCK chains; we
2654 have blocks_nreverse for that. */
2655 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2656 next = TREE_CHAIN (decl);
2657 TREE_CHAIN (decl) = prev;
2658 prev = decl;
2659 }
2660 return prev;
2661 }
2662 \f
2663 /* Return a newly created TREE_LIST node whose
2664 purpose and value fields are PARM and VALUE. */
2665
2666 tree
2667 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2668 {
2669 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2670 TREE_PURPOSE (t) = parm;
2671 TREE_VALUE (t) = value;
2672 return t;
2673 }
2674
2675 /* Build a chain of TREE_LIST nodes from a vector. */
2676
2677 tree
2678 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2679 {
2680 tree ret = NULL_TREE;
2681 tree *pp = &ret;
2682 unsigned int i;
2683 tree t;
2684 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2685 {
2686 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2687 pp = &TREE_CHAIN (*pp);
2688 }
2689 return ret;
2690 }
2691
2692 /* Return a newly created TREE_LIST node whose
2693 purpose and value fields are PURPOSE and VALUE
2694 and whose TREE_CHAIN is CHAIN. */
2695
2696 tree
2697 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2698 {
2699 tree node;
2700
2701 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2702 memset (node, 0, sizeof (struct tree_common));
2703
2704 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2705
2706 TREE_SET_CODE (node, TREE_LIST);
2707 TREE_CHAIN (node) = chain;
2708 TREE_PURPOSE (node) = purpose;
2709 TREE_VALUE (node) = value;
2710 return node;
2711 }
2712
2713 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2714 trees. */
2715
2716 vec<tree, va_gc> *
2717 ctor_to_vec (tree ctor)
2718 {
2719 vec<tree, va_gc> *vec;
2720 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2721 unsigned int ix;
2722 tree val;
2723
2724 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2725 vec->quick_push (val);
2726
2727 return vec;
2728 }
2729 \f
2730 /* Return the size nominally occupied by an object of type TYPE
2731 when it resides in memory. The value is measured in units of bytes,
2732 and its data type is that normally used for type sizes
2733 (which is the first type created by make_signed_type or
2734 make_unsigned_type). */
2735
2736 tree
2737 size_in_bytes (const_tree type)
2738 {
2739 tree t;
2740
2741 if (type == error_mark_node)
2742 return integer_zero_node;
2743
2744 type = TYPE_MAIN_VARIANT (type);
2745 t = TYPE_SIZE_UNIT (type);
2746
2747 if (t == 0)
2748 {
2749 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2750 return size_zero_node;
2751 }
2752
2753 return t;
2754 }
2755
2756 /* Return the size of TYPE (in bytes) as a wide integer
2757 or return -1 if the size can vary or is larger than an integer. */
2758
2759 HOST_WIDE_INT
2760 int_size_in_bytes (const_tree type)
2761 {
2762 tree t;
2763
2764 if (type == error_mark_node)
2765 return 0;
2766
2767 type = TYPE_MAIN_VARIANT (type);
2768 t = TYPE_SIZE_UNIT (type);
2769
2770 if (t && tree_fits_uhwi_p (t))
2771 return TREE_INT_CST_LOW (t);
2772 else
2773 return -1;
2774 }
2775
2776 /* Return the maximum size of TYPE (in bytes) as a wide integer
2777 or return -1 if the size can vary or is larger than an integer. */
2778
2779 HOST_WIDE_INT
2780 max_int_size_in_bytes (const_tree type)
2781 {
2782 HOST_WIDE_INT size = -1;
2783 tree size_tree;
2784
2785 /* If this is an array type, check for a possible MAX_SIZE attached. */
2786
2787 if (TREE_CODE (type) == ARRAY_TYPE)
2788 {
2789 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2790
2791 if (size_tree && tree_fits_uhwi_p (size_tree))
2792 size = tree_to_uhwi (size_tree);
2793 }
2794
2795 /* If we still haven't been able to get a size, see if the language
2796 can compute a maximum size. */
2797
2798 if (size == -1)
2799 {
2800 size_tree = lang_hooks.types.max_size (type);
2801
2802 if (size_tree && tree_fits_uhwi_p (size_tree))
2803 size = tree_to_uhwi (size_tree);
2804 }
2805
2806 return size;
2807 }
2808 \f
2809 /* Return the bit position of FIELD, in bits from the start of the record.
2810 This is a tree of type bitsizetype. */
2811
2812 tree
2813 bit_position (const_tree field)
2814 {
2815 return bit_from_pos (DECL_FIELD_OFFSET (field),
2816 DECL_FIELD_BIT_OFFSET (field));
2817 }
2818
2819 /* Likewise, but return as an integer. It must be representable in
2820 that way (since it could be a signed value, we don't have the
2821 option of returning -1 like int_size_in_byte can. */
2822
2823 HOST_WIDE_INT
2824 int_bit_position (const_tree field)
2825 {
2826 return tree_to_shwi (bit_position (field));
2827 }
2828 \f
2829 /* Return the byte position of FIELD, in bytes from the start of the record.
2830 This is a tree of type sizetype. */
2831
2832 tree
2833 byte_position (const_tree field)
2834 {
2835 return byte_from_pos (DECL_FIELD_OFFSET (field),
2836 DECL_FIELD_BIT_OFFSET (field));
2837 }
2838
2839 /* Likewise, but return as an integer. It must be representable in
2840 that way (since it could be a signed value, we don't have the
2841 option of returning -1 like int_size_in_byte can. */
2842
2843 HOST_WIDE_INT
2844 int_byte_position (const_tree field)
2845 {
2846 return tree_to_shwi (byte_position (field));
2847 }
2848 \f
2849 /* Return the strictest alignment, in bits, that T is known to have. */
2850
2851 unsigned int
2852 expr_align (const_tree t)
2853 {
2854 unsigned int align0, align1;
2855
2856 switch (TREE_CODE (t))
2857 {
2858 CASE_CONVERT: case NON_LVALUE_EXPR:
2859 /* If we have conversions, we know that the alignment of the
2860 object must meet each of the alignments of the types. */
2861 align0 = expr_align (TREE_OPERAND (t, 0));
2862 align1 = TYPE_ALIGN (TREE_TYPE (t));
2863 return MAX (align0, align1);
2864
2865 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2866 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2867 case CLEANUP_POINT_EXPR:
2868 /* These don't change the alignment of an object. */
2869 return expr_align (TREE_OPERAND (t, 0));
2870
2871 case COND_EXPR:
2872 /* The best we can do is say that the alignment is the least aligned
2873 of the two arms. */
2874 align0 = expr_align (TREE_OPERAND (t, 1));
2875 align1 = expr_align (TREE_OPERAND (t, 2));
2876 return MIN (align0, align1);
2877
2878 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2879 meaningfully, it's always 1. */
2880 case LABEL_DECL: case CONST_DECL:
2881 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2882 case FUNCTION_DECL:
2883 gcc_assert (DECL_ALIGN (t) != 0);
2884 return DECL_ALIGN (t);
2885
2886 default:
2887 break;
2888 }
2889
2890 /* Otherwise take the alignment from that of the type. */
2891 return TYPE_ALIGN (TREE_TYPE (t));
2892 }
2893 \f
2894 /* Return, as a tree node, the number of elements for TYPE (which is an
2895 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2896
2897 tree
2898 array_type_nelts (const_tree type)
2899 {
2900 tree index_type, min, max;
2901
2902 /* If they did it with unspecified bounds, then we should have already
2903 given an error about it before we got here. */
2904 if (! TYPE_DOMAIN (type))
2905 return error_mark_node;
2906
2907 index_type = TYPE_DOMAIN (type);
2908 min = TYPE_MIN_VALUE (index_type);
2909 max = TYPE_MAX_VALUE (index_type);
2910
2911 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2912 if (!max)
2913 return error_mark_node;
2914
2915 return (integer_zerop (min)
2916 ? max
2917 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2918 }
2919 \f
2920 /* If arg is static -- a reference to an object in static storage -- then
2921 return the object. This is not the same as the C meaning of `static'.
2922 If arg isn't static, return NULL. */
2923
2924 tree
2925 staticp (tree arg)
2926 {
2927 switch (TREE_CODE (arg))
2928 {
2929 case FUNCTION_DECL:
2930 /* Nested functions are static, even though taking their address will
2931 involve a trampoline as we unnest the nested function and create
2932 the trampoline on the tree level. */
2933 return arg;
2934
2935 case VAR_DECL:
2936 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2937 && ! DECL_THREAD_LOCAL_P (arg)
2938 && ! DECL_DLLIMPORT_P (arg)
2939 ? arg : NULL);
2940
2941 case CONST_DECL:
2942 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2943 ? arg : NULL);
2944
2945 case CONSTRUCTOR:
2946 return TREE_STATIC (arg) ? arg : NULL;
2947
2948 case LABEL_DECL:
2949 case STRING_CST:
2950 return arg;
2951
2952 case COMPONENT_REF:
2953 /* If the thing being referenced is not a field, then it is
2954 something language specific. */
2955 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2956
2957 /* If we are referencing a bitfield, we can't evaluate an
2958 ADDR_EXPR at compile time and so it isn't a constant. */
2959 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2960 return NULL;
2961
2962 return staticp (TREE_OPERAND (arg, 0));
2963
2964 case BIT_FIELD_REF:
2965 return NULL;
2966
2967 case INDIRECT_REF:
2968 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2969
2970 case ARRAY_REF:
2971 case ARRAY_RANGE_REF:
2972 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2973 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2974 return staticp (TREE_OPERAND (arg, 0));
2975 else
2976 return NULL;
2977
2978 case COMPOUND_LITERAL_EXPR:
2979 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2980
2981 default:
2982 return NULL;
2983 }
2984 }
2985
2986 \f
2987
2988
2989 /* Return whether OP is a DECL whose address is function-invariant. */
2990
2991 bool
2992 decl_address_invariant_p (const_tree op)
2993 {
2994 /* The conditions below are slightly less strict than the one in
2995 staticp. */
2996
2997 switch (TREE_CODE (op))
2998 {
2999 case PARM_DECL:
3000 case RESULT_DECL:
3001 case LABEL_DECL:
3002 case FUNCTION_DECL:
3003 return true;
3004
3005 case VAR_DECL:
3006 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3007 || DECL_THREAD_LOCAL_P (op)
3008 || DECL_CONTEXT (op) == current_function_decl
3009 || decl_function_context (op) == current_function_decl)
3010 return true;
3011 break;
3012
3013 case CONST_DECL:
3014 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3015 || decl_function_context (op) == current_function_decl)
3016 return true;
3017 break;
3018
3019 default:
3020 break;
3021 }
3022
3023 return false;
3024 }
3025
3026 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3027
3028 bool
3029 decl_address_ip_invariant_p (const_tree op)
3030 {
3031 /* The conditions below are slightly less strict than the one in
3032 staticp. */
3033
3034 switch (TREE_CODE (op))
3035 {
3036 case LABEL_DECL:
3037 case FUNCTION_DECL:
3038 case STRING_CST:
3039 return true;
3040
3041 case VAR_DECL:
3042 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3043 && !DECL_DLLIMPORT_P (op))
3044 || DECL_THREAD_LOCAL_P (op))
3045 return true;
3046 break;
3047
3048 case CONST_DECL:
3049 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3050 return true;
3051 break;
3052
3053 default:
3054 break;
3055 }
3056
3057 return false;
3058 }
3059
3060
3061 /* Return true if T is function-invariant (internal function, does
3062 not handle arithmetic; that's handled in skip_simple_arithmetic and
3063 tree_invariant_p). */
3064
3065 static bool tree_invariant_p (tree t);
3066
3067 static bool
3068 tree_invariant_p_1 (tree t)
3069 {
3070 tree op;
3071
3072 if (TREE_CONSTANT (t)
3073 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3074 return true;
3075
3076 switch (TREE_CODE (t))
3077 {
3078 case SAVE_EXPR:
3079 return true;
3080
3081 case ADDR_EXPR:
3082 op = TREE_OPERAND (t, 0);
3083 while (handled_component_p (op))
3084 {
3085 switch (TREE_CODE (op))
3086 {
3087 case ARRAY_REF:
3088 case ARRAY_RANGE_REF:
3089 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3090 || TREE_OPERAND (op, 2) != NULL_TREE
3091 || TREE_OPERAND (op, 3) != NULL_TREE)
3092 return false;
3093 break;
3094
3095 case COMPONENT_REF:
3096 if (TREE_OPERAND (op, 2) != NULL_TREE)
3097 return false;
3098 break;
3099
3100 default:;
3101 }
3102 op = TREE_OPERAND (op, 0);
3103 }
3104
3105 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3106
3107 default:
3108 break;
3109 }
3110
3111 return false;
3112 }
3113
3114 /* Return true if T is function-invariant. */
3115
3116 static bool
3117 tree_invariant_p (tree t)
3118 {
3119 tree inner = skip_simple_arithmetic (t);
3120 return tree_invariant_p_1 (inner);
3121 }
3122
3123 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3124 Do this to any expression which may be used in more than one place,
3125 but must be evaluated only once.
3126
3127 Normally, expand_expr would reevaluate the expression each time.
3128 Calling save_expr produces something that is evaluated and recorded
3129 the first time expand_expr is called on it. Subsequent calls to
3130 expand_expr just reuse the recorded value.
3131
3132 The call to expand_expr that generates code that actually computes
3133 the value is the first call *at compile time*. Subsequent calls
3134 *at compile time* generate code to use the saved value.
3135 This produces correct result provided that *at run time* control
3136 always flows through the insns made by the first expand_expr
3137 before reaching the other places where the save_expr was evaluated.
3138 You, the caller of save_expr, must make sure this is so.
3139
3140 Constants, and certain read-only nodes, are returned with no
3141 SAVE_EXPR because that is safe. Expressions containing placeholders
3142 are not touched; see tree.def for an explanation of what these
3143 are used for. */
3144
3145 tree
3146 save_expr (tree expr)
3147 {
3148 tree t = fold (expr);
3149 tree inner;
3150
3151 /* If the tree evaluates to a constant, then we don't want to hide that
3152 fact (i.e. this allows further folding, and direct checks for constants).
3153 However, a read-only object that has side effects cannot be bypassed.
3154 Since it is no problem to reevaluate literals, we just return the
3155 literal node. */
3156 inner = skip_simple_arithmetic (t);
3157 if (TREE_CODE (inner) == ERROR_MARK)
3158 return inner;
3159
3160 if (tree_invariant_p_1 (inner))
3161 return t;
3162
3163 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3164 it means that the size or offset of some field of an object depends on
3165 the value within another field.
3166
3167 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3168 and some variable since it would then need to be both evaluated once and
3169 evaluated more than once. Front-ends must assure this case cannot
3170 happen by surrounding any such subexpressions in their own SAVE_EXPR
3171 and forcing evaluation at the proper time. */
3172 if (contains_placeholder_p (inner))
3173 return t;
3174
3175 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3176 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3177
3178 /* This expression might be placed ahead of a jump to ensure that the
3179 value was computed on both sides of the jump. So make sure it isn't
3180 eliminated as dead. */
3181 TREE_SIDE_EFFECTS (t) = 1;
3182 return t;
3183 }
3184
3185 /* Look inside EXPR into any simple arithmetic operations. Return the
3186 outermost non-arithmetic or non-invariant node. */
3187
3188 tree
3189 skip_simple_arithmetic (tree expr)
3190 {
3191 /* We don't care about whether this can be used as an lvalue in this
3192 context. */
3193 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3194 expr = TREE_OPERAND (expr, 0);
3195
3196 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3197 a constant, it will be more efficient to not make another SAVE_EXPR since
3198 it will allow better simplification and GCSE will be able to merge the
3199 computations if they actually occur. */
3200 while (true)
3201 {
3202 if (UNARY_CLASS_P (expr))
3203 expr = TREE_OPERAND (expr, 0);
3204 else if (BINARY_CLASS_P (expr))
3205 {
3206 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3207 expr = TREE_OPERAND (expr, 0);
3208 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3209 expr = TREE_OPERAND (expr, 1);
3210 else
3211 break;
3212 }
3213 else
3214 break;
3215 }
3216
3217 return expr;
3218 }
3219
3220 /* Look inside EXPR into simple arithmetic operations involving constants.
3221 Return the outermost non-arithmetic or non-constant node. */
3222
3223 tree
3224 skip_simple_constant_arithmetic (tree expr)
3225 {
3226 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3227 expr = TREE_OPERAND (expr, 0);
3228
3229 while (true)
3230 {
3231 if (UNARY_CLASS_P (expr))
3232 expr = TREE_OPERAND (expr, 0);
3233 else if (BINARY_CLASS_P (expr))
3234 {
3235 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3236 expr = TREE_OPERAND (expr, 0);
3237 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3238 expr = TREE_OPERAND (expr, 1);
3239 else
3240 break;
3241 }
3242 else
3243 break;
3244 }
3245
3246 return expr;
3247 }
3248
3249 /* Return which tree structure is used by T. */
3250
3251 enum tree_node_structure_enum
3252 tree_node_structure (const_tree t)
3253 {
3254 const enum tree_code code = TREE_CODE (t);
3255 return tree_node_structure_for_code (code);
3256 }
3257
3258 /* Set various status flags when building a CALL_EXPR object T. */
3259
3260 static void
3261 process_call_operands (tree t)
3262 {
3263 bool side_effects = TREE_SIDE_EFFECTS (t);
3264 bool read_only = false;
3265 int i = call_expr_flags (t);
3266
3267 /* Calls have side-effects, except those to const or pure functions. */
3268 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3269 side_effects = true;
3270 /* Propagate TREE_READONLY of arguments for const functions. */
3271 if (i & ECF_CONST)
3272 read_only = true;
3273
3274 if (!side_effects || read_only)
3275 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3276 {
3277 tree op = TREE_OPERAND (t, i);
3278 if (op && TREE_SIDE_EFFECTS (op))
3279 side_effects = true;
3280 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3281 read_only = false;
3282 }
3283
3284 TREE_SIDE_EFFECTS (t) = side_effects;
3285 TREE_READONLY (t) = read_only;
3286 }
3287 \f
3288 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3289 size or offset that depends on a field within a record. */
3290
3291 bool
3292 contains_placeholder_p (const_tree exp)
3293 {
3294 enum tree_code code;
3295
3296 if (!exp)
3297 return 0;
3298
3299 code = TREE_CODE (exp);
3300 if (code == PLACEHOLDER_EXPR)
3301 return 1;
3302
3303 switch (TREE_CODE_CLASS (code))
3304 {
3305 case tcc_reference:
3306 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3307 position computations since they will be converted into a
3308 WITH_RECORD_EXPR involving the reference, which will assume
3309 here will be valid. */
3310 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3311
3312 case tcc_exceptional:
3313 if (code == TREE_LIST)
3314 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3315 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3316 break;
3317
3318 case tcc_unary:
3319 case tcc_binary:
3320 case tcc_comparison:
3321 case tcc_expression:
3322 switch (code)
3323 {
3324 case COMPOUND_EXPR:
3325 /* Ignoring the first operand isn't quite right, but works best. */
3326 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3327
3328 case COND_EXPR:
3329 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3330 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3331 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3332
3333 case SAVE_EXPR:
3334 /* The save_expr function never wraps anything containing
3335 a PLACEHOLDER_EXPR. */
3336 return 0;
3337
3338 default:
3339 break;
3340 }
3341
3342 switch (TREE_CODE_LENGTH (code))
3343 {
3344 case 1:
3345 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3346 case 2:
3347 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3348 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3349 default:
3350 return 0;
3351 }
3352
3353 case tcc_vl_exp:
3354 switch (code)
3355 {
3356 case CALL_EXPR:
3357 {
3358 const_tree arg;
3359 const_call_expr_arg_iterator iter;
3360 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3361 if (CONTAINS_PLACEHOLDER_P (arg))
3362 return 1;
3363 return 0;
3364 }
3365 default:
3366 return 0;
3367 }
3368
3369 default:
3370 return 0;
3371 }
3372 return 0;
3373 }
3374
3375 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3376 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3377 field positions. */
3378
3379 static bool
3380 type_contains_placeholder_1 (const_tree type)
3381 {
3382 /* If the size contains a placeholder or the parent type (component type in
3383 the case of arrays) type involves a placeholder, this type does. */
3384 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3385 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3386 || (!POINTER_TYPE_P (type)
3387 && TREE_TYPE (type)
3388 && type_contains_placeholder_p (TREE_TYPE (type))))
3389 return true;
3390
3391 /* Now do type-specific checks. Note that the last part of the check above
3392 greatly limits what we have to do below. */
3393 switch (TREE_CODE (type))
3394 {
3395 case VOID_TYPE:
3396 case COMPLEX_TYPE:
3397 case ENUMERAL_TYPE:
3398 case BOOLEAN_TYPE:
3399 case POINTER_TYPE:
3400 case OFFSET_TYPE:
3401 case REFERENCE_TYPE:
3402 case METHOD_TYPE:
3403 case FUNCTION_TYPE:
3404 case VECTOR_TYPE:
3405 case NULLPTR_TYPE:
3406 return false;
3407
3408 case INTEGER_TYPE:
3409 case REAL_TYPE:
3410 case FIXED_POINT_TYPE:
3411 /* Here we just check the bounds. */
3412 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3413 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3414
3415 case ARRAY_TYPE:
3416 /* We have already checked the component type above, so just check the
3417 domain type. */
3418 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3419
3420 case RECORD_TYPE:
3421 case UNION_TYPE:
3422 case QUAL_UNION_TYPE:
3423 {
3424 tree field;
3425
3426 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3427 if (TREE_CODE (field) == FIELD_DECL
3428 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3429 || (TREE_CODE (type) == QUAL_UNION_TYPE
3430 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3431 || type_contains_placeholder_p (TREE_TYPE (field))))
3432 return true;
3433
3434 return false;
3435 }
3436
3437 default:
3438 gcc_unreachable ();
3439 }
3440 }
3441
3442 /* Wrapper around above function used to cache its result. */
3443
3444 bool
3445 type_contains_placeholder_p (tree type)
3446 {
3447 bool result;
3448
3449 /* If the contains_placeholder_bits field has been initialized,
3450 then we know the answer. */
3451 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3452 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3453
3454 /* Indicate that we've seen this type node, and the answer is false.
3455 This is what we want to return if we run into recursion via fields. */
3456 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3457
3458 /* Compute the real value. */
3459 result = type_contains_placeholder_1 (type);
3460
3461 /* Store the real value. */
3462 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3463
3464 return result;
3465 }
3466 \f
3467 /* Push tree EXP onto vector QUEUE if it is not already present. */
3468
3469 static void
3470 push_without_duplicates (tree exp, vec<tree> *queue)
3471 {
3472 unsigned int i;
3473 tree iter;
3474
3475 FOR_EACH_VEC_ELT (*queue, i, iter)
3476 if (simple_cst_equal (iter, exp) == 1)
3477 break;
3478
3479 if (!iter)
3480 queue->safe_push (exp);
3481 }
3482
3483 /* Given a tree EXP, find all occurrences of references to fields
3484 in a PLACEHOLDER_EXPR and place them in vector REFS without
3485 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3486 we assume here that EXP contains only arithmetic expressions
3487 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3488 argument list. */
3489
3490 void
3491 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3492 {
3493 enum tree_code code = TREE_CODE (exp);
3494 tree inner;
3495 int i;
3496
3497 /* We handle TREE_LIST and COMPONENT_REF separately. */
3498 if (code == TREE_LIST)
3499 {
3500 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3501 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3502 }
3503 else if (code == COMPONENT_REF)
3504 {
3505 for (inner = TREE_OPERAND (exp, 0);
3506 REFERENCE_CLASS_P (inner);
3507 inner = TREE_OPERAND (inner, 0))
3508 ;
3509
3510 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3511 push_without_duplicates (exp, refs);
3512 else
3513 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3514 }
3515 else
3516 switch (TREE_CODE_CLASS (code))
3517 {
3518 case tcc_constant:
3519 break;
3520
3521 case tcc_declaration:
3522 /* Variables allocated to static storage can stay. */
3523 if (!TREE_STATIC (exp))
3524 push_without_duplicates (exp, refs);
3525 break;
3526
3527 case tcc_expression:
3528 /* This is the pattern built in ada/make_aligning_type. */
3529 if (code == ADDR_EXPR
3530 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3531 {
3532 push_without_duplicates (exp, refs);
3533 break;
3534 }
3535
3536 /* Fall through... */
3537
3538 case tcc_exceptional:
3539 case tcc_unary:
3540 case tcc_binary:
3541 case tcc_comparison:
3542 case tcc_reference:
3543 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3544 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3545 break;
3546
3547 case tcc_vl_exp:
3548 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3549 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3550 break;
3551
3552 default:
3553 gcc_unreachable ();
3554 }
3555 }
3556
3557 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3558 return a tree with all occurrences of references to F in a
3559 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3560 CONST_DECLs. Note that we assume here that EXP contains only
3561 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3562 occurring only in their argument list. */
3563
3564 tree
3565 substitute_in_expr (tree exp, tree f, tree r)
3566 {
3567 enum tree_code code = TREE_CODE (exp);
3568 tree op0, op1, op2, op3;
3569 tree new_tree;
3570
3571 /* We handle TREE_LIST and COMPONENT_REF separately. */
3572 if (code == TREE_LIST)
3573 {
3574 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3575 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3576 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3577 return exp;
3578
3579 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3580 }
3581 else if (code == COMPONENT_REF)
3582 {
3583 tree inner;
3584
3585 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3586 and it is the right field, replace it with R. */
3587 for (inner = TREE_OPERAND (exp, 0);
3588 REFERENCE_CLASS_P (inner);
3589 inner = TREE_OPERAND (inner, 0))
3590 ;
3591
3592 /* The field. */
3593 op1 = TREE_OPERAND (exp, 1);
3594
3595 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3596 return r;
3597
3598 /* If this expression hasn't been completed let, leave it alone. */
3599 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3600 return exp;
3601
3602 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3603 if (op0 == TREE_OPERAND (exp, 0))
3604 return exp;
3605
3606 new_tree
3607 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3608 }
3609 else
3610 switch (TREE_CODE_CLASS (code))
3611 {
3612 case tcc_constant:
3613 return exp;
3614
3615 case tcc_declaration:
3616 if (exp == f)
3617 return r;
3618 else
3619 return exp;
3620
3621 case tcc_expression:
3622 if (exp == f)
3623 return r;
3624
3625 /* Fall through... */
3626
3627 case tcc_exceptional:
3628 case tcc_unary:
3629 case tcc_binary:
3630 case tcc_comparison:
3631 case tcc_reference:
3632 switch (TREE_CODE_LENGTH (code))
3633 {
3634 case 0:
3635 return exp;
3636
3637 case 1:
3638 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3639 if (op0 == TREE_OPERAND (exp, 0))
3640 return exp;
3641
3642 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3643 break;
3644
3645 case 2:
3646 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3647 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3648
3649 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3650 return exp;
3651
3652 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3653 break;
3654
3655 case 3:
3656 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3657 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3658 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3659
3660 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3661 && op2 == TREE_OPERAND (exp, 2))
3662 return exp;
3663
3664 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3665 break;
3666
3667 case 4:
3668 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3669 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3670 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3671 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3672
3673 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3674 && op2 == TREE_OPERAND (exp, 2)
3675 && op3 == TREE_OPERAND (exp, 3))
3676 return exp;
3677
3678 new_tree
3679 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3680 break;
3681
3682 default:
3683 gcc_unreachable ();
3684 }
3685 break;
3686
3687 case tcc_vl_exp:
3688 {
3689 int i;
3690
3691 new_tree = NULL_TREE;
3692
3693 /* If we are trying to replace F with a constant, inline back
3694 functions which do nothing else than computing a value from
3695 the arguments they are passed. This makes it possible to
3696 fold partially or entirely the replacement expression. */
3697 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3698 {
3699 tree t = maybe_inline_call_in_expr (exp);
3700 if (t)
3701 return SUBSTITUTE_IN_EXPR (t, f, r);
3702 }
3703
3704 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3705 {
3706 tree op = TREE_OPERAND (exp, i);
3707 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3708 if (new_op != op)
3709 {
3710 if (!new_tree)
3711 new_tree = copy_node (exp);
3712 TREE_OPERAND (new_tree, i) = new_op;
3713 }
3714 }
3715
3716 if (new_tree)
3717 {
3718 new_tree = fold (new_tree);
3719 if (TREE_CODE (new_tree) == CALL_EXPR)
3720 process_call_operands (new_tree);
3721 }
3722 else
3723 return exp;
3724 }
3725 break;
3726
3727 default:
3728 gcc_unreachable ();
3729 }
3730
3731 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3732
3733 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3734 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3735
3736 return new_tree;
3737 }
3738
3739 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3740 for it within OBJ, a tree that is an object or a chain of references. */
3741
3742 tree
3743 substitute_placeholder_in_expr (tree exp, tree obj)
3744 {
3745 enum tree_code code = TREE_CODE (exp);
3746 tree op0, op1, op2, op3;
3747 tree new_tree;
3748
3749 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3750 in the chain of OBJ. */
3751 if (code == PLACEHOLDER_EXPR)
3752 {
3753 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3754 tree elt;
3755
3756 for (elt = obj; elt != 0;
3757 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3758 || TREE_CODE (elt) == COND_EXPR)
3759 ? TREE_OPERAND (elt, 1)
3760 : (REFERENCE_CLASS_P (elt)
3761 || UNARY_CLASS_P (elt)
3762 || BINARY_CLASS_P (elt)
3763 || VL_EXP_CLASS_P (elt)
3764 || EXPRESSION_CLASS_P (elt))
3765 ? TREE_OPERAND (elt, 0) : 0))
3766 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3767 return elt;
3768
3769 for (elt = obj; elt != 0;
3770 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3771 || TREE_CODE (elt) == COND_EXPR)
3772 ? TREE_OPERAND (elt, 1)
3773 : (REFERENCE_CLASS_P (elt)
3774 || UNARY_CLASS_P (elt)
3775 || BINARY_CLASS_P (elt)
3776 || VL_EXP_CLASS_P (elt)
3777 || EXPRESSION_CLASS_P (elt))
3778 ? TREE_OPERAND (elt, 0) : 0))
3779 if (POINTER_TYPE_P (TREE_TYPE (elt))
3780 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3781 == need_type))
3782 return fold_build1 (INDIRECT_REF, need_type, elt);
3783
3784 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3785 survives until RTL generation, there will be an error. */
3786 return exp;
3787 }
3788
3789 /* TREE_LIST is special because we need to look at TREE_VALUE
3790 and TREE_CHAIN, not TREE_OPERANDS. */
3791 else if (code == TREE_LIST)
3792 {
3793 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3794 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3795 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3796 return exp;
3797
3798 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3799 }
3800 else
3801 switch (TREE_CODE_CLASS (code))
3802 {
3803 case tcc_constant:
3804 case tcc_declaration:
3805 return exp;
3806
3807 case tcc_exceptional:
3808 case tcc_unary:
3809 case tcc_binary:
3810 case tcc_comparison:
3811 case tcc_expression:
3812 case tcc_reference:
3813 case tcc_statement:
3814 switch (TREE_CODE_LENGTH (code))
3815 {
3816 case 0:
3817 return exp;
3818
3819 case 1:
3820 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3821 if (op0 == TREE_OPERAND (exp, 0))
3822 return exp;
3823
3824 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3825 break;
3826
3827 case 2:
3828 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3829 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3830
3831 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3832 return exp;
3833
3834 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3835 break;
3836
3837 case 3:
3838 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3839 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3840 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3841
3842 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3843 && op2 == TREE_OPERAND (exp, 2))
3844 return exp;
3845
3846 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3847 break;
3848
3849 case 4:
3850 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3851 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3852 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3853 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3854
3855 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3856 && op2 == TREE_OPERAND (exp, 2)
3857 && op3 == TREE_OPERAND (exp, 3))
3858 return exp;
3859
3860 new_tree
3861 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3862 break;
3863
3864 default:
3865 gcc_unreachable ();
3866 }
3867 break;
3868
3869 case tcc_vl_exp:
3870 {
3871 int i;
3872
3873 new_tree = NULL_TREE;
3874
3875 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3876 {
3877 tree op = TREE_OPERAND (exp, i);
3878 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3879 if (new_op != op)
3880 {
3881 if (!new_tree)
3882 new_tree = copy_node (exp);
3883 TREE_OPERAND (new_tree, i) = new_op;
3884 }
3885 }
3886
3887 if (new_tree)
3888 {
3889 new_tree = fold (new_tree);
3890 if (TREE_CODE (new_tree) == CALL_EXPR)
3891 process_call_operands (new_tree);
3892 }
3893 else
3894 return exp;
3895 }
3896 break;
3897
3898 default:
3899 gcc_unreachable ();
3900 }
3901
3902 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3903
3904 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3905 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3906
3907 return new_tree;
3908 }
3909 \f
3910
3911 /* Subroutine of stabilize_reference; this is called for subtrees of
3912 references. Any expression with side-effects must be put in a SAVE_EXPR
3913 to ensure that it is only evaluated once.
3914
3915 We don't put SAVE_EXPR nodes around everything, because assigning very
3916 simple expressions to temporaries causes us to miss good opportunities
3917 for optimizations. Among other things, the opportunity to fold in the
3918 addition of a constant into an addressing mode often gets lost, e.g.
3919 "y[i+1] += x;". In general, we take the approach that we should not make
3920 an assignment unless we are forced into it - i.e., that any non-side effect
3921 operator should be allowed, and that cse should take care of coalescing
3922 multiple utterances of the same expression should that prove fruitful. */
3923
3924 static tree
3925 stabilize_reference_1 (tree e)
3926 {
3927 tree result;
3928 enum tree_code code = TREE_CODE (e);
3929
3930 /* We cannot ignore const expressions because it might be a reference
3931 to a const array but whose index contains side-effects. But we can
3932 ignore things that are actual constant or that already have been
3933 handled by this function. */
3934
3935 if (tree_invariant_p (e))
3936 return e;
3937
3938 switch (TREE_CODE_CLASS (code))
3939 {
3940 case tcc_exceptional:
3941 case tcc_type:
3942 case tcc_declaration:
3943 case tcc_comparison:
3944 case tcc_statement:
3945 case tcc_expression:
3946 case tcc_reference:
3947 case tcc_vl_exp:
3948 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3949 so that it will only be evaluated once. */
3950 /* The reference (r) and comparison (<) classes could be handled as
3951 below, but it is generally faster to only evaluate them once. */
3952 if (TREE_SIDE_EFFECTS (e))
3953 return save_expr (e);
3954 return e;
3955
3956 case tcc_constant:
3957 /* Constants need no processing. In fact, we should never reach
3958 here. */
3959 return e;
3960
3961 case tcc_binary:
3962 /* Division is slow and tends to be compiled with jumps,
3963 especially the division by powers of 2 that is often
3964 found inside of an array reference. So do it just once. */
3965 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3966 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3967 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3968 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3969 return save_expr (e);
3970 /* Recursively stabilize each operand. */
3971 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3972 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3973 break;
3974
3975 case tcc_unary:
3976 /* Recursively stabilize each operand. */
3977 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3978 break;
3979
3980 default:
3981 gcc_unreachable ();
3982 }
3983
3984 TREE_TYPE (result) = TREE_TYPE (e);
3985 TREE_READONLY (result) = TREE_READONLY (e);
3986 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3987 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3988
3989 return result;
3990 }
3991
3992 /* Stabilize a reference so that we can use it any number of times
3993 without causing its operands to be evaluated more than once.
3994 Returns the stabilized reference. This works by means of save_expr,
3995 so see the caveats in the comments about save_expr.
3996
3997 Also allows conversion expressions whose operands are references.
3998 Any other kind of expression is returned unchanged. */
3999
4000 tree
4001 stabilize_reference (tree ref)
4002 {
4003 tree result;
4004 enum tree_code code = TREE_CODE (ref);
4005
4006 switch (code)
4007 {
4008 case VAR_DECL:
4009 case PARM_DECL:
4010 case RESULT_DECL:
4011 /* No action is needed in this case. */
4012 return ref;
4013
4014 CASE_CONVERT:
4015 case FLOAT_EXPR:
4016 case FIX_TRUNC_EXPR:
4017 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4018 break;
4019
4020 case INDIRECT_REF:
4021 result = build_nt (INDIRECT_REF,
4022 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4023 break;
4024
4025 case COMPONENT_REF:
4026 result = build_nt (COMPONENT_REF,
4027 stabilize_reference (TREE_OPERAND (ref, 0)),
4028 TREE_OPERAND (ref, 1), NULL_TREE);
4029 break;
4030
4031 case BIT_FIELD_REF:
4032 result = build_nt (BIT_FIELD_REF,
4033 stabilize_reference (TREE_OPERAND (ref, 0)),
4034 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4035 break;
4036
4037 case ARRAY_REF:
4038 result = build_nt (ARRAY_REF,
4039 stabilize_reference (TREE_OPERAND (ref, 0)),
4040 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4041 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4042 break;
4043
4044 case ARRAY_RANGE_REF:
4045 result = build_nt (ARRAY_RANGE_REF,
4046 stabilize_reference (TREE_OPERAND (ref, 0)),
4047 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4048 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4049 break;
4050
4051 case COMPOUND_EXPR:
4052 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4053 it wouldn't be ignored. This matters when dealing with
4054 volatiles. */
4055 return stabilize_reference_1 (ref);
4056
4057 /* If arg isn't a kind of lvalue we recognize, make no change.
4058 Caller should recognize the error for an invalid lvalue. */
4059 default:
4060 return ref;
4061
4062 case ERROR_MARK:
4063 return error_mark_node;
4064 }
4065
4066 TREE_TYPE (result) = TREE_TYPE (ref);
4067 TREE_READONLY (result) = TREE_READONLY (ref);
4068 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4069 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4070
4071 return result;
4072 }
4073 \f
4074 /* Low-level constructors for expressions. */
4075
4076 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4077 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4078
4079 void
4080 recompute_tree_invariant_for_addr_expr (tree t)
4081 {
4082 tree node;
4083 bool tc = true, se = false;
4084
4085 /* We started out assuming this address is both invariant and constant, but
4086 does not have side effects. Now go down any handled components and see if
4087 any of them involve offsets that are either non-constant or non-invariant.
4088 Also check for side-effects.
4089
4090 ??? Note that this code makes no attempt to deal with the case where
4091 taking the address of something causes a copy due to misalignment. */
4092
4093 #define UPDATE_FLAGS(NODE) \
4094 do { tree _node = (NODE); \
4095 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4096 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4097
4098 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4099 node = TREE_OPERAND (node, 0))
4100 {
4101 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4102 array reference (probably made temporarily by the G++ front end),
4103 so ignore all the operands. */
4104 if ((TREE_CODE (node) == ARRAY_REF
4105 || TREE_CODE (node) == ARRAY_RANGE_REF)
4106 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4107 {
4108 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4109 if (TREE_OPERAND (node, 2))
4110 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4111 if (TREE_OPERAND (node, 3))
4112 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4113 }
4114 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4115 FIELD_DECL, apparently. The G++ front end can put something else
4116 there, at least temporarily. */
4117 else if (TREE_CODE (node) == COMPONENT_REF
4118 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4119 {
4120 if (TREE_OPERAND (node, 2))
4121 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4122 }
4123 }
4124
4125 node = lang_hooks.expr_to_decl (node, &tc, &se);
4126
4127 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4128 the address, since &(*a)->b is a form of addition. If it's a constant, the
4129 address is constant too. If it's a decl, its address is constant if the
4130 decl is static. Everything else is not constant and, furthermore,
4131 taking the address of a volatile variable is not volatile. */
4132 if (TREE_CODE (node) == INDIRECT_REF
4133 || TREE_CODE (node) == MEM_REF)
4134 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4135 else if (CONSTANT_CLASS_P (node))
4136 ;
4137 else if (DECL_P (node))
4138 tc &= (staticp (node) != NULL_TREE);
4139 else
4140 {
4141 tc = false;
4142 se |= TREE_SIDE_EFFECTS (node);
4143 }
4144
4145
4146 TREE_CONSTANT (t) = tc;
4147 TREE_SIDE_EFFECTS (t) = se;
4148 #undef UPDATE_FLAGS
4149 }
4150
4151 /* Build an expression of code CODE, data type TYPE, and operands as
4152 specified. Expressions and reference nodes can be created this way.
4153 Constants, decls, types and misc nodes cannot be.
4154
4155 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4156 enough for all extant tree codes. */
4157
4158 tree
4159 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4160 {
4161 tree t;
4162
4163 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4164
4165 t = make_node_stat (code PASS_MEM_STAT);
4166 TREE_TYPE (t) = tt;
4167
4168 return t;
4169 }
4170
4171 tree
4172 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4173 {
4174 int length = sizeof (struct tree_exp);
4175 tree t;
4176
4177 record_node_allocation_statistics (code, length);
4178
4179 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4180
4181 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4182
4183 memset (t, 0, sizeof (struct tree_common));
4184
4185 TREE_SET_CODE (t, code);
4186
4187 TREE_TYPE (t) = type;
4188 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4189 TREE_OPERAND (t, 0) = node;
4190 if (node && !TYPE_P (node))
4191 {
4192 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4193 TREE_READONLY (t) = TREE_READONLY (node);
4194 }
4195
4196 if (TREE_CODE_CLASS (code) == tcc_statement)
4197 TREE_SIDE_EFFECTS (t) = 1;
4198 else switch (code)
4199 {
4200 case VA_ARG_EXPR:
4201 /* All of these have side-effects, no matter what their
4202 operands are. */
4203 TREE_SIDE_EFFECTS (t) = 1;
4204 TREE_READONLY (t) = 0;
4205 break;
4206
4207 case INDIRECT_REF:
4208 /* Whether a dereference is readonly has nothing to do with whether
4209 its operand is readonly. */
4210 TREE_READONLY (t) = 0;
4211 break;
4212
4213 case ADDR_EXPR:
4214 if (node)
4215 recompute_tree_invariant_for_addr_expr (t);
4216 break;
4217
4218 default:
4219 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4220 && node && !TYPE_P (node)
4221 && TREE_CONSTANT (node))
4222 TREE_CONSTANT (t) = 1;
4223 if (TREE_CODE_CLASS (code) == tcc_reference
4224 && node && TREE_THIS_VOLATILE (node))
4225 TREE_THIS_VOLATILE (t) = 1;
4226 break;
4227 }
4228
4229 return t;
4230 }
4231
4232 #define PROCESS_ARG(N) \
4233 do { \
4234 TREE_OPERAND (t, N) = arg##N; \
4235 if (arg##N &&!TYPE_P (arg##N)) \
4236 { \
4237 if (TREE_SIDE_EFFECTS (arg##N)) \
4238 side_effects = 1; \
4239 if (!TREE_READONLY (arg##N) \
4240 && !CONSTANT_CLASS_P (arg##N)) \
4241 (void) (read_only = 0); \
4242 if (!TREE_CONSTANT (arg##N)) \
4243 (void) (constant = 0); \
4244 } \
4245 } while (0)
4246
4247 tree
4248 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4249 {
4250 bool constant, read_only, side_effects;
4251 tree t;
4252
4253 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4254
4255 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4256 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4257 /* When sizetype precision doesn't match that of pointers
4258 we need to be able to build explicit extensions or truncations
4259 of the offset argument. */
4260 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4261 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4262 && TREE_CODE (arg1) == INTEGER_CST);
4263
4264 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4265 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4266 && ptrofftype_p (TREE_TYPE (arg1)));
4267
4268 t = make_node_stat (code PASS_MEM_STAT);
4269 TREE_TYPE (t) = tt;
4270
4271 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4272 result based on those same flags for the arguments. But if the
4273 arguments aren't really even `tree' expressions, we shouldn't be trying
4274 to do this. */
4275
4276 /* Expressions without side effects may be constant if their
4277 arguments are as well. */
4278 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4279 || TREE_CODE_CLASS (code) == tcc_binary);
4280 read_only = 1;
4281 side_effects = TREE_SIDE_EFFECTS (t);
4282
4283 PROCESS_ARG (0);
4284 PROCESS_ARG (1);
4285
4286 TREE_READONLY (t) = read_only;
4287 TREE_CONSTANT (t) = constant;
4288 TREE_SIDE_EFFECTS (t) = side_effects;
4289 TREE_THIS_VOLATILE (t)
4290 = (TREE_CODE_CLASS (code) == tcc_reference
4291 && arg0 && TREE_THIS_VOLATILE (arg0));
4292
4293 return t;
4294 }
4295
4296
4297 tree
4298 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4299 tree arg2 MEM_STAT_DECL)
4300 {
4301 bool constant, read_only, side_effects;
4302 tree t;
4303
4304 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4305 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4306
4307 t = make_node_stat (code PASS_MEM_STAT);
4308 TREE_TYPE (t) = tt;
4309
4310 read_only = 1;
4311
4312 /* As a special exception, if COND_EXPR has NULL branches, we
4313 assume that it is a gimple statement and always consider
4314 it to have side effects. */
4315 if (code == COND_EXPR
4316 && tt == void_type_node
4317 && arg1 == NULL_TREE
4318 && arg2 == NULL_TREE)
4319 side_effects = true;
4320 else
4321 side_effects = TREE_SIDE_EFFECTS (t);
4322
4323 PROCESS_ARG (0);
4324 PROCESS_ARG (1);
4325 PROCESS_ARG (2);
4326
4327 if (code == COND_EXPR)
4328 TREE_READONLY (t) = read_only;
4329
4330 TREE_SIDE_EFFECTS (t) = side_effects;
4331 TREE_THIS_VOLATILE (t)
4332 = (TREE_CODE_CLASS (code) == tcc_reference
4333 && arg0 && TREE_THIS_VOLATILE (arg0));
4334
4335 return t;
4336 }
4337
4338 tree
4339 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4340 tree arg2, tree arg3 MEM_STAT_DECL)
4341 {
4342 bool constant, read_only, side_effects;
4343 tree t;
4344
4345 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4346
4347 t = make_node_stat (code PASS_MEM_STAT);
4348 TREE_TYPE (t) = tt;
4349
4350 side_effects = TREE_SIDE_EFFECTS (t);
4351
4352 PROCESS_ARG (0);
4353 PROCESS_ARG (1);
4354 PROCESS_ARG (2);
4355 PROCESS_ARG (3);
4356
4357 TREE_SIDE_EFFECTS (t) = side_effects;
4358 TREE_THIS_VOLATILE (t)
4359 = (TREE_CODE_CLASS (code) == tcc_reference
4360 && arg0 && TREE_THIS_VOLATILE (arg0));
4361
4362 return t;
4363 }
4364
4365 tree
4366 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4367 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4368 {
4369 bool constant, read_only, side_effects;
4370 tree t;
4371
4372 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4373
4374 t = make_node_stat (code PASS_MEM_STAT);
4375 TREE_TYPE (t) = tt;
4376
4377 side_effects = TREE_SIDE_EFFECTS (t);
4378
4379 PROCESS_ARG (0);
4380 PROCESS_ARG (1);
4381 PROCESS_ARG (2);
4382 PROCESS_ARG (3);
4383 PROCESS_ARG (4);
4384
4385 TREE_SIDE_EFFECTS (t) = side_effects;
4386 TREE_THIS_VOLATILE (t)
4387 = (TREE_CODE_CLASS (code) == tcc_reference
4388 && arg0 && TREE_THIS_VOLATILE (arg0));
4389
4390 return t;
4391 }
4392
4393 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4394 on the pointer PTR. */
4395
4396 tree
4397 build_simple_mem_ref_loc (location_t loc, tree ptr)
4398 {
4399 HOST_WIDE_INT offset = 0;
4400 tree ptype = TREE_TYPE (ptr);
4401 tree tem;
4402 /* For convenience allow addresses that collapse to a simple base
4403 and offset. */
4404 if (TREE_CODE (ptr) == ADDR_EXPR
4405 && (handled_component_p (TREE_OPERAND (ptr, 0))
4406 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4407 {
4408 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4409 gcc_assert (ptr);
4410 ptr = build_fold_addr_expr (ptr);
4411 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4412 }
4413 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4414 ptr, build_int_cst (ptype, offset));
4415 SET_EXPR_LOCATION (tem, loc);
4416 return tem;
4417 }
4418
4419 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4420
4421 offset_int
4422 mem_ref_offset (const_tree t)
4423 {
4424 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4425 }
4426
4427 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4428 offsetted by OFFSET units. */
4429
4430 tree
4431 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4432 {
4433 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4434 build_fold_addr_expr (base),
4435 build_int_cst (ptr_type_node, offset));
4436 tree addr = build1 (ADDR_EXPR, type, ref);
4437 recompute_tree_invariant_for_addr_expr (addr);
4438 return addr;
4439 }
4440
4441 /* Similar except don't specify the TREE_TYPE
4442 and leave the TREE_SIDE_EFFECTS as 0.
4443 It is permissible for arguments to be null,
4444 or even garbage if their values do not matter. */
4445
4446 tree
4447 build_nt (enum tree_code code, ...)
4448 {
4449 tree t;
4450 int length;
4451 int i;
4452 va_list p;
4453
4454 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4455
4456 va_start (p, code);
4457
4458 t = make_node (code);
4459 length = TREE_CODE_LENGTH (code);
4460
4461 for (i = 0; i < length; i++)
4462 TREE_OPERAND (t, i) = va_arg (p, tree);
4463
4464 va_end (p);
4465 return t;
4466 }
4467
4468 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4469 tree vec. */
4470
4471 tree
4472 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4473 {
4474 tree ret, t;
4475 unsigned int ix;
4476
4477 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4478 CALL_EXPR_FN (ret) = fn;
4479 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4480 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4481 CALL_EXPR_ARG (ret, ix) = t;
4482 return ret;
4483 }
4484 \f
4485 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4486 We do NOT enter this node in any sort of symbol table.
4487
4488 LOC is the location of the decl.
4489
4490 layout_decl is used to set up the decl's storage layout.
4491 Other slots are initialized to 0 or null pointers. */
4492
4493 tree
4494 build_decl_stat (location_t loc, enum tree_code code, tree name,
4495 tree type MEM_STAT_DECL)
4496 {
4497 tree t;
4498
4499 t = make_node_stat (code PASS_MEM_STAT);
4500 DECL_SOURCE_LOCATION (t) = loc;
4501
4502 /* if (type == error_mark_node)
4503 type = integer_type_node; */
4504 /* That is not done, deliberately, so that having error_mark_node
4505 as the type can suppress useless errors in the use of this variable. */
4506
4507 DECL_NAME (t) = name;
4508 TREE_TYPE (t) = type;
4509
4510 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4511 layout_decl (t, 0);
4512
4513 return t;
4514 }
4515
4516 /* Builds and returns function declaration with NAME and TYPE. */
4517
4518 tree
4519 build_fn_decl (const char *name, tree type)
4520 {
4521 tree id = get_identifier (name);
4522 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4523
4524 DECL_EXTERNAL (decl) = 1;
4525 TREE_PUBLIC (decl) = 1;
4526 DECL_ARTIFICIAL (decl) = 1;
4527 TREE_NOTHROW (decl) = 1;
4528
4529 return decl;
4530 }
4531
4532 vec<tree, va_gc> *all_translation_units;
4533
4534 /* Builds a new translation-unit decl with name NAME, queues it in the
4535 global list of translation-unit decls and returns it. */
4536
4537 tree
4538 build_translation_unit_decl (tree name)
4539 {
4540 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4541 name, NULL_TREE);
4542 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4543 vec_safe_push (all_translation_units, tu);
4544 return tu;
4545 }
4546
4547 \f
4548 /* BLOCK nodes are used to represent the structure of binding contours
4549 and declarations, once those contours have been exited and their contents
4550 compiled. This information is used for outputting debugging info. */
4551
4552 tree
4553 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4554 {
4555 tree block = make_node (BLOCK);
4556
4557 BLOCK_VARS (block) = vars;
4558 BLOCK_SUBBLOCKS (block) = subblocks;
4559 BLOCK_SUPERCONTEXT (block) = supercontext;
4560 BLOCK_CHAIN (block) = chain;
4561 return block;
4562 }
4563
4564 \f
4565 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4566
4567 LOC is the location to use in tree T. */
4568
4569 void
4570 protected_set_expr_location (tree t, location_t loc)
4571 {
4572 if (t && CAN_HAVE_LOCATION_P (t))
4573 SET_EXPR_LOCATION (t, loc);
4574 }
4575 \f
4576 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4577 is ATTRIBUTE. */
4578
4579 tree
4580 build_decl_attribute_variant (tree ddecl, tree attribute)
4581 {
4582 DECL_ATTRIBUTES (ddecl) = attribute;
4583 return ddecl;
4584 }
4585
4586 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4587 is ATTRIBUTE and its qualifiers are QUALS.
4588
4589 Record such modified types already made so we don't make duplicates. */
4590
4591 tree
4592 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4593 {
4594 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4595 {
4596 hashval_t hashcode = 0;
4597 tree ntype;
4598 int i;
4599 tree t;
4600 enum tree_code code = TREE_CODE (ttype);
4601
4602 /* Building a distinct copy of a tagged type is inappropriate; it
4603 causes breakage in code that expects there to be a one-to-one
4604 relationship between a struct and its fields.
4605 build_duplicate_type is another solution (as used in
4606 handle_transparent_union_attribute), but that doesn't play well
4607 with the stronger C++ type identity model. */
4608 if (TREE_CODE (ttype) == RECORD_TYPE
4609 || TREE_CODE (ttype) == UNION_TYPE
4610 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4611 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4612 {
4613 warning (OPT_Wattributes,
4614 "ignoring attributes applied to %qT after definition",
4615 TYPE_MAIN_VARIANT (ttype));
4616 return build_qualified_type (ttype, quals);
4617 }
4618
4619 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4620 ntype = build_distinct_type_copy (ttype);
4621
4622 TYPE_ATTRIBUTES (ntype) = attribute;
4623
4624 hashcode = iterative_hash_object (code, hashcode);
4625 if (TREE_TYPE (ntype))
4626 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4627 hashcode);
4628 hashcode = attribute_hash_list (attribute, hashcode);
4629
4630 switch (TREE_CODE (ntype))
4631 {
4632 case FUNCTION_TYPE:
4633 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4634 break;
4635 case ARRAY_TYPE:
4636 if (TYPE_DOMAIN (ntype))
4637 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4638 hashcode);
4639 break;
4640 case INTEGER_TYPE:
4641 t = TYPE_MAX_VALUE (ntype);
4642 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4643 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4644 break;
4645 case REAL_TYPE:
4646 case FIXED_POINT_TYPE:
4647 {
4648 unsigned int precision = TYPE_PRECISION (ntype);
4649 hashcode = iterative_hash_object (precision, hashcode);
4650 }
4651 break;
4652 default:
4653 break;
4654 }
4655
4656 ntype = type_hash_canon (hashcode, ntype);
4657
4658 /* If the target-dependent attributes make NTYPE different from
4659 its canonical type, we will need to use structural equality
4660 checks for this type. */
4661 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4662 || !comp_type_attributes (ntype, ttype))
4663 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4664 else if (TYPE_CANONICAL (ntype) == ntype)
4665 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4666
4667 ttype = build_qualified_type (ntype, quals);
4668 }
4669 else if (TYPE_QUALS (ttype) != quals)
4670 ttype = build_qualified_type (ttype, quals);
4671
4672 return ttype;
4673 }
4674
4675 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4676 the same. */
4677
4678 static bool
4679 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4680 {
4681 tree cl1, cl2;
4682 for (cl1 = clauses1, cl2 = clauses2;
4683 cl1 && cl2;
4684 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4685 {
4686 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4687 return false;
4688 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4689 {
4690 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4691 OMP_CLAUSE_DECL (cl2)) != 1)
4692 return false;
4693 }
4694 switch (OMP_CLAUSE_CODE (cl1))
4695 {
4696 case OMP_CLAUSE_ALIGNED:
4697 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4698 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4699 return false;
4700 break;
4701 case OMP_CLAUSE_LINEAR:
4702 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4703 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4704 return false;
4705 break;
4706 case OMP_CLAUSE_SIMDLEN:
4707 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4708 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4709 return false;
4710 default:
4711 break;
4712 }
4713 }
4714 return true;
4715 }
4716
4717 /* Compare two constructor-element-type constants. Return 1 if the lists
4718 are known to be equal; otherwise return 0. */
4719
4720 static bool
4721 simple_cst_list_equal (const_tree l1, const_tree l2)
4722 {
4723 while (l1 != NULL_TREE && l2 != NULL_TREE)
4724 {
4725 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4726 return false;
4727
4728 l1 = TREE_CHAIN (l1);
4729 l2 = TREE_CHAIN (l2);
4730 }
4731
4732 return l1 == l2;
4733 }
4734
4735 /* Compare two attributes for their value identity. Return true if the
4736 attribute values are known to be equal; otherwise return false.
4737 */
4738
4739 static bool
4740 attribute_value_equal (const_tree attr1, const_tree attr2)
4741 {
4742 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4743 return true;
4744
4745 if (TREE_VALUE (attr1) != NULL_TREE
4746 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4747 && TREE_VALUE (attr2) != NULL
4748 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4749 return (simple_cst_list_equal (TREE_VALUE (attr1),
4750 TREE_VALUE (attr2)) == 1);
4751
4752 if ((flag_openmp || flag_openmp_simd)
4753 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4754 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4755 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4756 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4757 TREE_VALUE (attr2));
4758
4759 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4760 }
4761
4762 /* Return 0 if the attributes for two types are incompatible, 1 if they
4763 are compatible, and 2 if they are nearly compatible (which causes a
4764 warning to be generated). */
4765 int
4766 comp_type_attributes (const_tree type1, const_tree type2)
4767 {
4768 const_tree a1 = TYPE_ATTRIBUTES (type1);
4769 const_tree a2 = TYPE_ATTRIBUTES (type2);
4770 const_tree a;
4771
4772 if (a1 == a2)
4773 return 1;
4774 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4775 {
4776 const struct attribute_spec *as;
4777 const_tree attr;
4778
4779 as = lookup_attribute_spec (get_attribute_name (a));
4780 if (!as || as->affects_type_identity == false)
4781 continue;
4782
4783 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4784 if (!attr || !attribute_value_equal (a, attr))
4785 break;
4786 }
4787 if (!a)
4788 {
4789 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4790 {
4791 const struct attribute_spec *as;
4792
4793 as = lookup_attribute_spec (get_attribute_name (a));
4794 if (!as || as->affects_type_identity == false)
4795 continue;
4796
4797 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4798 break;
4799 /* We don't need to compare trees again, as we did this
4800 already in first loop. */
4801 }
4802 /* All types - affecting identity - are equal, so
4803 there is no need to call target hook for comparison. */
4804 if (!a)
4805 return 1;
4806 }
4807 /* As some type combinations - like default calling-convention - might
4808 be compatible, we have to call the target hook to get the final result. */
4809 return targetm.comp_type_attributes (type1, type2);
4810 }
4811
4812 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4813 is ATTRIBUTE.
4814
4815 Record such modified types already made so we don't make duplicates. */
4816
4817 tree
4818 build_type_attribute_variant (tree ttype, tree attribute)
4819 {
4820 return build_type_attribute_qual_variant (ttype, attribute,
4821 TYPE_QUALS (ttype));
4822 }
4823
4824
4825 /* Reset the expression *EXPR_P, a size or position.
4826
4827 ??? We could reset all non-constant sizes or positions. But it's cheap
4828 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4829
4830 We need to reset self-referential sizes or positions because they cannot
4831 be gimplified and thus can contain a CALL_EXPR after the gimplification
4832 is finished, which will run afoul of LTO streaming. And they need to be
4833 reset to something essentially dummy but not constant, so as to preserve
4834 the properties of the object they are attached to. */
4835
4836 static inline void
4837 free_lang_data_in_one_sizepos (tree *expr_p)
4838 {
4839 tree expr = *expr_p;
4840 if (CONTAINS_PLACEHOLDER_P (expr))
4841 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4842 }
4843
4844
4845 /* Reset all the fields in a binfo node BINFO. We only keep
4846 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4847
4848 static void
4849 free_lang_data_in_binfo (tree binfo)
4850 {
4851 unsigned i;
4852 tree t;
4853
4854 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4855
4856 BINFO_VIRTUALS (binfo) = NULL_TREE;
4857 BINFO_BASE_ACCESSES (binfo) = NULL;
4858 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4859 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4860
4861 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4862 free_lang_data_in_binfo (t);
4863 }
4864
4865
4866 /* Reset all language specific information still present in TYPE. */
4867
4868 static void
4869 free_lang_data_in_type (tree type)
4870 {
4871 gcc_assert (TYPE_P (type));
4872
4873 /* Give the FE a chance to remove its own data first. */
4874 lang_hooks.free_lang_data (type);
4875
4876 TREE_LANG_FLAG_0 (type) = 0;
4877 TREE_LANG_FLAG_1 (type) = 0;
4878 TREE_LANG_FLAG_2 (type) = 0;
4879 TREE_LANG_FLAG_3 (type) = 0;
4880 TREE_LANG_FLAG_4 (type) = 0;
4881 TREE_LANG_FLAG_5 (type) = 0;
4882 TREE_LANG_FLAG_6 (type) = 0;
4883
4884 if (TREE_CODE (type) == FUNCTION_TYPE)
4885 {
4886 /* Remove the const and volatile qualifiers from arguments. The
4887 C++ front end removes them, but the C front end does not,
4888 leading to false ODR violation errors when merging two
4889 instances of the same function signature compiled by
4890 different front ends. */
4891 tree p;
4892
4893 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4894 {
4895 tree arg_type = TREE_VALUE (p);
4896
4897 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4898 {
4899 int quals = TYPE_QUALS (arg_type)
4900 & ~TYPE_QUAL_CONST
4901 & ~TYPE_QUAL_VOLATILE;
4902 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4903 free_lang_data_in_type (TREE_VALUE (p));
4904 }
4905 }
4906 }
4907
4908 /* Remove members that are not actually FIELD_DECLs from the field
4909 list of an aggregate. These occur in C++. */
4910 if (RECORD_OR_UNION_TYPE_P (type))
4911 {
4912 tree prev, member;
4913
4914 /* Note that TYPE_FIELDS can be shared across distinct
4915 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4916 to be removed, we cannot set its TREE_CHAIN to NULL.
4917 Otherwise, we would not be able to find all the other fields
4918 in the other instances of this TREE_TYPE.
4919
4920 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4921 prev = NULL_TREE;
4922 member = TYPE_FIELDS (type);
4923 while (member)
4924 {
4925 if (TREE_CODE (member) == FIELD_DECL
4926 || TREE_CODE (member) == TYPE_DECL)
4927 {
4928 if (prev)
4929 TREE_CHAIN (prev) = member;
4930 else
4931 TYPE_FIELDS (type) = member;
4932 prev = member;
4933 }
4934
4935 member = TREE_CHAIN (member);
4936 }
4937
4938 if (prev)
4939 TREE_CHAIN (prev) = NULL_TREE;
4940 else
4941 TYPE_FIELDS (type) = NULL_TREE;
4942
4943 TYPE_METHODS (type) = NULL_TREE;
4944 if (TYPE_BINFO (type))
4945 free_lang_data_in_binfo (TYPE_BINFO (type));
4946 }
4947 else
4948 {
4949 /* For non-aggregate types, clear out the language slot (which
4950 overloads TYPE_BINFO). */
4951 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4952
4953 if (INTEGRAL_TYPE_P (type)
4954 || SCALAR_FLOAT_TYPE_P (type)
4955 || FIXED_POINT_TYPE_P (type))
4956 {
4957 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4958 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4959 }
4960 }
4961
4962 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4963 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4964
4965 if (TYPE_CONTEXT (type)
4966 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4967 {
4968 tree ctx = TYPE_CONTEXT (type);
4969 do
4970 {
4971 ctx = BLOCK_SUPERCONTEXT (ctx);
4972 }
4973 while (ctx && TREE_CODE (ctx) == BLOCK);
4974 TYPE_CONTEXT (type) = ctx;
4975 }
4976 }
4977
4978
4979 /* Return true if DECL may need an assembler name to be set. */
4980
4981 static inline bool
4982 need_assembler_name_p (tree decl)
4983 {
4984 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4985 if (TREE_CODE (decl) != FUNCTION_DECL
4986 && TREE_CODE (decl) != VAR_DECL)
4987 return false;
4988
4989 /* If DECL already has its assembler name set, it does not need a
4990 new one. */
4991 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4992 || DECL_ASSEMBLER_NAME_SET_P (decl))
4993 return false;
4994
4995 /* Abstract decls do not need an assembler name. */
4996 if (DECL_ABSTRACT (decl))
4997 return false;
4998
4999 /* For VAR_DECLs, only static, public and external symbols need an
5000 assembler name. */
5001 if (TREE_CODE (decl) == VAR_DECL
5002 && !TREE_STATIC (decl)
5003 && !TREE_PUBLIC (decl)
5004 && !DECL_EXTERNAL (decl))
5005 return false;
5006
5007 if (TREE_CODE (decl) == FUNCTION_DECL)
5008 {
5009 /* Do not set assembler name on builtins. Allow RTL expansion to
5010 decide whether to expand inline or via a regular call. */
5011 if (DECL_BUILT_IN (decl)
5012 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5013 return false;
5014
5015 /* Functions represented in the callgraph need an assembler name. */
5016 if (cgraph_node::get (decl) != NULL)
5017 return true;
5018
5019 /* Unused and not public functions don't need an assembler name. */
5020 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5021 return false;
5022 }
5023
5024 return true;
5025 }
5026
5027
5028 /* Reset all language specific information still present in symbol
5029 DECL. */
5030
5031 static void
5032 free_lang_data_in_decl (tree decl)
5033 {
5034 gcc_assert (DECL_P (decl));
5035
5036 /* Give the FE a chance to remove its own data first. */
5037 lang_hooks.free_lang_data (decl);
5038
5039 TREE_LANG_FLAG_0 (decl) = 0;
5040 TREE_LANG_FLAG_1 (decl) = 0;
5041 TREE_LANG_FLAG_2 (decl) = 0;
5042 TREE_LANG_FLAG_3 (decl) = 0;
5043 TREE_LANG_FLAG_4 (decl) = 0;
5044 TREE_LANG_FLAG_5 (decl) = 0;
5045 TREE_LANG_FLAG_6 (decl) = 0;
5046
5047 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5048 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5049 if (TREE_CODE (decl) == FIELD_DECL)
5050 {
5051 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5052 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5053 DECL_QUALIFIER (decl) = NULL_TREE;
5054 }
5055
5056 if (TREE_CODE (decl) == FUNCTION_DECL)
5057 {
5058 struct cgraph_node *node;
5059 if (!(node = cgraph_node::get (decl))
5060 || (!node->definition && !node->clones))
5061 {
5062 if (node)
5063 node->release_body ();
5064 else
5065 {
5066 release_function_body (decl);
5067 DECL_ARGUMENTS (decl) = NULL;
5068 DECL_RESULT (decl) = NULL;
5069 DECL_INITIAL (decl) = error_mark_node;
5070 }
5071 }
5072 if (gimple_has_body_p (decl))
5073 {
5074 tree t;
5075
5076 /* If DECL has a gimple body, then the context for its
5077 arguments must be DECL. Otherwise, it doesn't really
5078 matter, as we will not be emitting any code for DECL. In
5079 general, there may be other instances of DECL created by
5080 the front end and since PARM_DECLs are generally shared,
5081 their DECL_CONTEXT changes as the replicas of DECL are
5082 created. The only time where DECL_CONTEXT is important
5083 is for the FUNCTION_DECLs that have a gimple body (since
5084 the PARM_DECL will be used in the function's body). */
5085 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5086 DECL_CONTEXT (t) = decl;
5087 }
5088
5089 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5090 At this point, it is not needed anymore. */
5091 DECL_SAVED_TREE (decl) = NULL_TREE;
5092
5093 /* Clear the abstract origin if it refers to a method. Otherwise
5094 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5095 origin will not be output correctly. */
5096 if (DECL_ABSTRACT_ORIGIN (decl)
5097 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5098 && RECORD_OR_UNION_TYPE_P
5099 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5100 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5101
5102 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5103 DECL_VINDEX referring to itself into a vtable slot number as it
5104 should. Happens with functions that are copied and then forgotten
5105 about. Just clear it, it won't matter anymore. */
5106 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5107 DECL_VINDEX (decl) = NULL_TREE;
5108 }
5109 else if (TREE_CODE (decl) == VAR_DECL)
5110 {
5111 if ((DECL_EXTERNAL (decl)
5112 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5113 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5114 DECL_INITIAL (decl) = NULL_TREE;
5115 }
5116 else if (TREE_CODE (decl) == TYPE_DECL
5117 || TREE_CODE (decl) == FIELD_DECL)
5118 DECL_INITIAL (decl) = NULL_TREE;
5119 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5120 && DECL_INITIAL (decl)
5121 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5122 {
5123 /* Strip builtins from the translation-unit BLOCK. We still have targets
5124 without builtin_decl_explicit support and also builtins are shared
5125 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5126 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5127 while (*nextp)
5128 {
5129 tree var = *nextp;
5130 if (TREE_CODE (var) == FUNCTION_DECL
5131 && DECL_BUILT_IN (var))
5132 *nextp = TREE_CHAIN (var);
5133 else
5134 nextp = &TREE_CHAIN (var);
5135 }
5136 }
5137 }
5138
5139
5140 /* Data used when collecting DECLs and TYPEs for language data removal. */
5141
5142 struct free_lang_data_d
5143 {
5144 /* Worklist to avoid excessive recursion. */
5145 vec<tree> worklist;
5146
5147 /* Set of traversed objects. Used to avoid duplicate visits. */
5148 struct pointer_set_t *pset;
5149
5150 /* Array of symbols to process with free_lang_data_in_decl. */
5151 vec<tree> decls;
5152
5153 /* Array of types to process with free_lang_data_in_type. */
5154 vec<tree> types;
5155 };
5156
5157
5158 /* Save all language fields needed to generate proper debug information
5159 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5160
5161 static void
5162 save_debug_info_for_decl (tree t)
5163 {
5164 /*struct saved_debug_info_d *sdi;*/
5165
5166 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5167
5168 /* FIXME. Partial implementation for saving debug info removed. */
5169 }
5170
5171
5172 /* Save all language fields needed to generate proper debug information
5173 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5174
5175 static void
5176 save_debug_info_for_type (tree t)
5177 {
5178 /*struct saved_debug_info_d *sdi;*/
5179
5180 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5181
5182 /* FIXME. Partial implementation for saving debug info removed. */
5183 }
5184
5185
5186 /* Add type or decl T to one of the list of tree nodes that need their
5187 language data removed. The lists are held inside FLD. */
5188
5189 static void
5190 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5191 {
5192 if (DECL_P (t))
5193 {
5194 fld->decls.safe_push (t);
5195 if (debug_info_level > DINFO_LEVEL_TERSE)
5196 save_debug_info_for_decl (t);
5197 }
5198 else if (TYPE_P (t))
5199 {
5200 fld->types.safe_push (t);
5201 if (debug_info_level > DINFO_LEVEL_TERSE)
5202 save_debug_info_for_type (t);
5203 }
5204 else
5205 gcc_unreachable ();
5206 }
5207
5208 /* Push tree node T into FLD->WORKLIST. */
5209
5210 static inline void
5211 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5212 {
5213 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5214 fld->worklist.safe_push ((t));
5215 }
5216
5217
5218 /* Operand callback helper for free_lang_data_in_node. *TP is the
5219 subtree operand being considered. */
5220
5221 static tree
5222 find_decls_types_r (tree *tp, int *ws, void *data)
5223 {
5224 tree t = *tp;
5225 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5226
5227 if (TREE_CODE (t) == TREE_LIST)
5228 return NULL_TREE;
5229
5230 /* Language specific nodes will be removed, so there is no need
5231 to gather anything under them. */
5232 if (is_lang_specific (t))
5233 {
5234 *ws = 0;
5235 return NULL_TREE;
5236 }
5237
5238 if (DECL_P (t))
5239 {
5240 /* Note that walk_tree does not traverse every possible field in
5241 decls, so we have to do our own traversals here. */
5242 add_tree_to_fld_list (t, fld);
5243
5244 fld_worklist_push (DECL_NAME (t), fld);
5245 fld_worklist_push (DECL_CONTEXT (t), fld);
5246 fld_worklist_push (DECL_SIZE (t), fld);
5247 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5248
5249 /* We are going to remove everything under DECL_INITIAL for
5250 TYPE_DECLs. No point walking them. */
5251 if (TREE_CODE (t) != TYPE_DECL)
5252 fld_worklist_push (DECL_INITIAL (t), fld);
5253
5254 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5255 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5256
5257 if (TREE_CODE (t) == FUNCTION_DECL)
5258 {
5259 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5260 fld_worklist_push (DECL_RESULT (t), fld);
5261 }
5262 else if (TREE_CODE (t) == TYPE_DECL)
5263 {
5264 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5265 }
5266 else if (TREE_CODE (t) == FIELD_DECL)
5267 {
5268 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5269 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5270 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5271 fld_worklist_push (DECL_FCONTEXT (t), fld);
5272 }
5273
5274 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5275 && DECL_HAS_VALUE_EXPR_P (t))
5276 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5277
5278 if (TREE_CODE (t) != FIELD_DECL
5279 && TREE_CODE (t) != TYPE_DECL)
5280 fld_worklist_push (TREE_CHAIN (t), fld);
5281 *ws = 0;
5282 }
5283 else if (TYPE_P (t))
5284 {
5285 /* Note that walk_tree does not traverse every possible field in
5286 types, so we have to do our own traversals here. */
5287 add_tree_to_fld_list (t, fld);
5288
5289 if (!RECORD_OR_UNION_TYPE_P (t))
5290 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5291 fld_worklist_push (TYPE_SIZE (t), fld);
5292 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5293 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5294 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5295 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5296 fld_worklist_push (TYPE_NAME (t), fld);
5297 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5298 them and thus do not and want not to reach unused pointer types
5299 this way. */
5300 if (!POINTER_TYPE_P (t))
5301 fld_worklist_push (TYPE_MINVAL (t), fld);
5302 if (!RECORD_OR_UNION_TYPE_P (t))
5303 fld_worklist_push (TYPE_MAXVAL (t), fld);
5304 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5305 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5306 do not and want not to reach unused variants this way. */
5307 if (TYPE_CONTEXT (t))
5308 {
5309 tree ctx = TYPE_CONTEXT (t);
5310 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5311 So push that instead. */
5312 while (ctx && TREE_CODE (ctx) == BLOCK)
5313 ctx = BLOCK_SUPERCONTEXT (ctx);
5314 fld_worklist_push (ctx, fld);
5315 }
5316 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5317 and want not to reach unused types this way. */
5318
5319 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5320 {
5321 unsigned i;
5322 tree tem;
5323 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5324 fld_worklist_push (TREE_TYPE (tem), fld);
5325 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5326 if (tem
5327 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5328 && TREE_CODE (tem) == TREE_LIST)
5329 do
5330 {
5331 fld_worklist_push (TREE_VALUE (tem), fld);
5332 tem = TREE_CHAIN (tem);
5333 }
5334 while (tem);
5335 }
5336 if (RECORD_OR_UNION_TYPE_P (t))
5337 {
5338 tree tem;
5339 /* Push all TYPE_FIELDS - there can be interleaving interesting
5340 and non-interesting things. */
5341 tem = TYPE_FIELDS (t);
5342 while (tem)
5343 {
5344 if (TREE_CODE (tem) == FIELD_DECL
5345 || TREE_CODE (tem) == TYPE_DECL)
5346 fld_worklist_push (tem, fld);
5347 tem = TREE_CHAIN (tem);
5348 }
5349 }
5350
5351 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5352 *ws = 0;
5353 }
5354 else if (TREE_CODE (t) == BLOCK)
5355 {
5356 tree tem;
5357 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5358 fld_worklist_push (tem, fld);
5359 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5360 fld_worklist_push (tem, fld);
5361 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5362 }
5363
5364 if (TREE_CODE (t) != IDENTIFIER_NODE
5365 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5366 fld_worklist_push (TREE_TYPE (t), fld);
5367
5368 return NULL_TREE;
5369 }
5370
5371
5372 /* Find decls and types in T. */
5373
5374 static void
5375 find_decls_types (tree t, struct free_lang_data_d *fld)
5376 {
5377 while (1)
5378 {
5379 if (!pointer_set_contains (fld->pset, t))
5380 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5381 if (fld->worklist.is_empty ())
5382 break;
5383 t = fld->worklist.pop ();
5384 }
5385 }
5386
5387 /* Translate all the types in LIST with the corresponding runtime
5388 types. */
5389
5390 static tree
5391 get_eh_types_for_runtime (tree list)
5392 {
5393 tree head, prev;
5394
5395 if (list == NULL_TREE)
5396 return NULL_TREE;
5397
5398 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5399 prev = head;
5400 list = TREE_CHAIN (list);
5401 while (list)
5402 {
5403 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5404 TREE_CHAIN (prev) = n;
5405 prev = TREE_CHAIN (prev);
5406 list = TREE_CHAIN (list);
5407 }
5408
5409 return head;
5410 }
5411
5412
5413 /* Find decls and types referenced in EH region R and store them in
5414 FLD->DECLS and FLD->TYPES. */
5415
5416 static void
5417 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5418 {
5419 switch (r->type)
5420 {
5421 case ERT_CLEANUP:
5422 break;
5423
5424 case ERT_TRY:
5425 {
5426 eh_catch c;
5427
5428 /* The types referenced in each catch must first be changed to the
5429 EH types used at runtime. This removes references to FE types
5430 in the region. */
5431 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5432 {
5433 c->type_list = get_eh_types_for_runtime (c->type_list);
5434 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5435 }
5436 }
5437 break;
5438
5439 case ERT_ALLOWED_EXCEPTIONS:
5440 r->u.allowed.type_list
5441 = get_eh_types_for_runtime (r->u.allowed.type_list);
5442 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5443 break;
5444
5445 case ERT_MUST_NOT_THROW:
5446 walk_tree (&r->u.must_not_throw.failure_decl,
5447 find_decls_types_r, fld, fld->pset);
5448 break;
5449 }
5450 }
5451
5452
5453 /* Find decls and types referenced in cgraph node N and store them in
5454 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5455 look for *every* kind of DECL and TYPE node reachable from N,
5456 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5457 NAMESPACE_DECLs, etc). */
5458
5459 static void
5460 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5461 {
5462 basic_block bb;
5463 struct function *fn;
5464 unsigned ix;
5465 tree t;
5466
5467 find_decls_types (n->decl, fld);
5468
5469 if (!gimple_has_body_p (n->decl))
5470 return;
5471
5472 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5473
5474 fn = DECL_STRUCT_FUNCTION (n->decl);
5475
5476 /* Traverse locals. */
5477 FOR_EACH_LOCAL_DECL (fn, ix, t)
5478 find_decls_types (t, fld);
5479
5480 /* Traverse EH regions in FN. */
5481 {
5482 eh_region r;
5483 FOR_ALL_EH_REGION_FN (r, fn)
5484 find_decls_types_in_eh_region (r, fld);
5485 }
5486
5487 /* Traverse every statement in FN. */
5488 FOR_EACH_BB_FN (bb, fn)
5489 {
5490 gimple_stmt_iterator si;
5491 unsigned i;
5492
5493 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5494 {
5495 gimple phi = gsi_stmt (si);
5496
5497 for (i = 0; i < gimple_phi_num_args (phi); i++)
5498 {
5499 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5500 find_decls_types (*arg_p, fld);
5501 }
5502 }
5503
5504 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5505 {
5506 gimple stmt = gsi_stmt (si);
5507
5508 if (is_gimple_call (stmt))
5509 find_decls_types (gimple_call_fntype (stmt), fld);
5510
5511 for (i = 0; i < gimple_num_ops (stmt); i++)
5512 {
5513 tree arg = gimple_op (stmt, i);
5514 find_decls_types (arg, fld);
5515 }
5516 }
5517 }
5518 }
5519
5520
5521 /* Find decls and types referenced in varpool node N and store them in
5522 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5523 look for *every* kind of DECL and TYPE node reachable from N,
5524 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5525 NAMESPACE_DECLs, etc). */
5526
5527 static void
5528 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5529 {
5530 find_decls_types (v->decl, fld);
5531 }
5532
5533 /* If T needs an assembler name, have one created for it. */
5534
5535 void
5536 assign_assembler_name_if_neeeded (tree t)
5537 {
5538 if (need_assembler_name_p (t))
5539 {
5540 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5541 diagnostics that use input_location to show locus
5542 information. The problem here is that, at this point,
5543 input_location is generally anchored to the end of the file
5544 (since the parser is long gone), so we don't have a good
5545 position to pin it to.
5546
5547 To alleviate this problem, this uses the location of T's
5548 declaration. Examples of this are
5549 testsuite/g++.dg/template/cond2.C and
5550 testsuite/g++.dg/template/pr35240.C. */
5551 location_t saved_location = input_location;
5552 input_location = DECL_SOURCE_LOCATION (t);
5553
5554 decl_assembler_name (t);
5555
5556 input_location = saved_location;
5557 }
5558 }
5559
5560
5561 /* Free language specific information for every operand and expression
5562 in every node of the call graph. This process operates in three stages:
5563
5564 1- Every callgraph node and varpool node is traversed looking for
5565 decls and types embedded in them. This is a more exhaustive
5566 search than that done by find_referenced_vars, because it will
5567 also collect individual fields, decls embedded in types, etc.
5568
5569 2- All the decls found are sent to free_lang_data_in_decl.
5570
5571 3- All the types found are sent to free_lang_data_in_type.
5572
5573 The ordering between decls and types is important because
5574 free_lang_data_in_decl sets assembler names, which includes
5575 mangling. So types cannot be freed up until assembler names have
5576 been set up. */
5577
5578 static void
5579 free_lang_data_in_cgraph (void)
5580 {
5581 struct cgraph_node *n;
5582 varpool_node *v;
5583 struct free_lang_data_d fld;
5584 tree t;
5585 unsigned i;
5586 alias_pair *p;
5587
5588 /* Initialize sets and arrays to store referenced decls and types. */
5589 fld.pset = pointer_set_create ();
5590 fld.worklist.create (0);
5591 fld.decls.create (100);
5592 fld.types.create (100);
5593
5594 /* Find decls and types in the body of every function in the callgraph. */
5595 FOR_EACH_FUNCTION (n)
5596 find_decls_types_in_node (n, &fld);
5597
5598 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5599 find_decls_types (p->decl, &fld);
5600
5601 /* Find decls and types in every varpool symbol. */
5602 FOR_EACH_VARIABLE (v)
5603 find_decls_types_in_var (v, &fld);
5604
5605 /* Set the assembler name on every decl found. We need to do this
5606 now because free_lang_data_in_decl will invalidate data needed
5607 for mangling. This breaks mangling on interdependent decls. */
5608 FOR_EACH_VEC_ELT (fld.decls, i, t)
5609 assign_assembler_name_if_neeeded (t);
5610
5611 /* Traverse every decl found freeing its language data. */
5612 FOR_EACH_VEC_ELT (fld.decls, i, t)
5613 free_lang_data_in_decl (t);
5614
5615 /* Traverse every type found freeing its language data. */
5616 FOR_EACH_VEC_ELT (fld.types, i, t)
5617 free_lang_data_in_type (t);
5618
5619 pointer_set_destroy (fld.pset);
5620 fld.worklist.release ();
5621 fld.decls.release ();
5622 fld.types.release ();
5623 }
5624
5625
5626 /* Free resources that are used by FE but are not needed once they are done. */
5627
5628 static unsigned
5629 free_lang_data (void)
5630 {
5631 unsigned i;
5632
5633 /* If we are the LTO frontend we have freed lang-specific data already. */
5634 if (in_lto_p
5635 || !flag_generate_lto)
5636 return 0;
5637
5638 /* Allocate and assign alias sets to the standard integer types
5639 while the slots are still in the way the frontends generated them. */
5640 for (i = 0; i < itk_none; ++i)
5641 if (integer_types[i])
5642 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5643
5644 /* Traverse the IL resetting language specific information for
5645 operands, expressions, etc. */
5646 free_lang_data_in_cgraph ();
5647
5648 /* Create gimple variants for common types. */
5649 ptrdiff_type_node = integer_type_node;
5650 fileptr_type_node = ptr_type_node;
5651
5652 /* Reset some langhooks. Do not reset types_compatible_p, it may
5653 still be used indirectly via the get_alias_set langhook. */
5654 lang_hooks.dwarf_name = lhd_dwarf_name;
5655 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5656 /* We do not want the default decl_assembler_name implementation,
5657 rather if we have fixed everything we want a wrapper around it
5658 asserting that all non-local symbols already got their assembler
5659 name and only produce assembler names for local symbols. Or rather
5660 make sure we never call decl_assembler_name on local symbols and
5661 devise a separate, middle-end private scheme for it. */
5662
5663 /* Reset diagnostic machinery. */
5664 tree_diagnostics_defaults (global_dc);
5665
5666 return 0;
5667 }
5668
5669
5670 namespace {
5671
5672 const pass_data pass_data_ipa_free_lang_data =
5673 {
5674 SIMPLE_IPA_PASS, /* type */
5675 "*free_lang_data", /* name */
5676 OPTGROUP_NONE, /* optinfo_flags */
5677 TV_IPA_FREE_LANG_DATA, /* tv_id */
5678 0, /* properties_required */
5679 0, /* properties_provided */
5680 0, /* properties_destroyed */
5681 0, /* todo_flags_start */
5682 0, /* todo_flags_finish */
5683 };
5684
5685 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5686 {
5687 public:
5688 pass_ipa_free_lang_data (gcc::context *ctxt)
5689 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5690 {}
5691
5692 /* opt_pass methods: */
5693 virtual unsigned int execute (function *) { return free_lang_data (); }
5694
5695 }; // class pass_ipa_free_lang_data
5696
5697 } // anon namespace
5698
5699 simple_ipa_opt_pass *
5700 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5701 {
5702 return new pass_ipa_free_lang_data (ctxt);
5703 }
5704
5705 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5706 ATTR_NAME. Also used internally by remove_attribute(). */
5707 bool
5708 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5709 {
5710 size_t ident_len = IDENTIFIER_LENGTH (ident);
5711
5712 if (ident_len == attr_len)
5713 {
5714 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5715 return true;
5716 }
5717 else if (ident_len == attr_len + 4)
5718 {
5719 /* There is the possibility that ATTR is 'text' and IDENT is
5720 '__text__'. */
5721 const char *p = IDENTIFIER_POINTER (ident);
5722 if (p[0] == '_' && p[1] == '_'
5723 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5724 && strncmp (attr_name, p + 2, attr_len) == 0)
5725 return true;
5726 }
5727
5728 return false;
5729 }
5730
5731 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5732 of ATTR_NAME, and LIST is not NULL_TREE. */
5733 tree
5734 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5735 {
5736 while (list)
5737 {
5738 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5739
5740 if (ident_len == attr_len)
5741 {
5742 if (!strcmp (attr_name,
5743 IDENTIFIER_POINTER (get_attribute_name (list))))
5744 break;
5745 }
5746 /* TODO: If we made sure that attributes were stored in the
5747 canonical form without '__...__' (ie, as in 'text' as opposed
5748 to '__text__') then we could avoid the following case. */
5749 else if (ident_len == attr_len + 4)
5750 {
5751 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5752 if (p[0] == '_' && p[1] == '_'
5753 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5754 && strncmp (attr_name, p + 2, attr_len) == 0)
5755 break;
5756 }
5757 list = TREE_CHAIN (list);
5758 }
5759
5760 return list;
5761 }
5762
5763 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5764 return a pointer to the attribute's list first element if the attribute
5765 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5766 '__text__'). */
5767
5768 tree
5769 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5770 tree list)
5771 {
5772 while (list)
5773 {
5774 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5775
5776 if (attr_len > ident_len)
5777 {
5778 list = TREE_CHAIN (list);
5779 continue;
5780 }
5781
5782 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5783
5784 if (strncmp (attr_name, p, attr_len) == 0)
5785 break;
5786
5787 /* TODO: If we made sure that attributes were stored in the
5788 canonical form without '__...__' (ie, as in 'text' as opposed
5789 to '__text__') then we could avoid the following case. */
5790 if (p[0] == '_' && p[1] == '_' &&
5791 strncmp (attr_name, p + 2, attr_len) == 0)
5792 break;
5793
5794 list = TREE_CHAIN (list);
5795 }
5796
5797 return list;
5798 }
5799
5800
5801 /* A variant of lookup_attribute() that can be used with an identifier
5802 as the first argument, and where the identifier can be either
5803 'text' or '__text__'.
5804
5805 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5806 return a pointer to the attribute's list element if the attribute
5807 is part of the list, or NULL_TREE if not found. If the attribute
5808 appears more than once, this only returns the first occurrence; the
5809 TREE_CHAIN of the return value should be passed back in if further
5810 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5811 can be in the form 'text' or '__text__'. */
5812 static tree
5813 lookup_ident_attribute (tree attr_identifier, tree list)
5814 {
5815 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5816
5817 while (list)
5818 {
5819 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5820 == IDENTIFIER_NODE);
5821
5822 /* Identifiers can be compared directly for equality. */
5823 if (attr_identifier == get_attribute_name (list))
5824 break;
5825
5826 /* If they are not equal, they may still be one in the form
5827 'text' while the other one is in the form '__text__'. TODO:
5828 If we were storing attributes in normalized 'text' form, then
5829 this could all go away and we could take full advantage of
5830 the fact that we're comparing identifiers. :-) */
5831 {
5832 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5833 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5834
5835 if (ident_len == attr_len + 4)
5836 {
5837 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5838 const char *q = IDENTIFIER_POINTER (attr_identifier);
5839 if (p[0] == '_' && p[1] == '_'
5840 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5841 && strncmp (q, p + 2, attr_len) == 0)
5842 break;
5843 }
5844 else if (ident_len + 4 == attr_len)
5845 {
5846 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5847 const char *q = IDENTIFIER_POINTER (attr_identifier);
5848 if (q[0] == '_' && q[1] == '_'
5849 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5850 && strncmp (q + 2, p, ident_len) == 0)
5851 break;
5852 }
5853 }
5854 list = TREE_CHAIN (list);
5855 }
5856
5857 return list;
5858 }
5859
5860 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5861 modified list. */
5862
5863 tree
5864 remove_attribute (const char *attr_name, tree list)
5865 {
5866 tree *p;
5867 size_t attr_len = strlen (attr_name);
5868
5869 gcc_checking_assert (attr_name[0] != '_');
5870
5871 for (p = &list; *p; )
5872 {
5873 tree l = *p;
5874 /* TODO: If we were storing attributes in normalized form, here
5875 we could use a simple strcmp(). */
5876 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5877 *p = TREE_CHAIN (l);
5878 else
5879 p = &TREE_CHAIN (l);
5880 }
5881
5882 return list;
5883 }
5884
5885 /* Return an attribute list that is the union of a1 and a2. */
5886
5887 tree
5888 merge_attributes (tree a1, tree a2)
5889 {
5890 tree attributes;
5891
5892 /* Either one unset? Take the set one. */
5893
5894 if ((attributes = a1) == 0)
5895 attributes = a2;
5896
5897 /* One that completely contains the other? Take it. */
5898
5899 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5900 {
5901 if (attribute_list_contained (a2, a1))
5902 attributes = a2;
5903 else
5904 {
5905 /* Pick the longest list, and hang on the other list. */
5906
5907 if (list_length (a1) < list_length (a2))
5908 attributes = a2, a2 = a1;
5909
5910 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5911 {
5912 tree a;
5913 for (a = lookup_ident_attribute (get_attribute_name (a2),
5914 attributes);
5915 a != NULL_TREE && !attribute_value_equal (a, a2);
5916 a = lookup_ident_attribute (get_attribute_name (a2),
5917 TREE_CHAIN (a)))
5918 ;
5919 if (a == NULL_TREE)
5920 {
5921 a1 = copy_node (a2);
5922 TREE_CHAIN (a1) = attributes;
5923 attributes = a1;
5924 }
5925 }
5926 }
5927 }
5928 return attributes;
5929 }
5930
5931 /* Given types T1 and T2, merge their attributes and return
5932 the result. */
5933
5934 tree
5935 merge_type_attributes (tree t1, tree t2)
5936 {
5937 return merge_attributes (TYPE_ATTRIBUTES (t1),
5938 TYPE_ATTRIBUTES (t2));
5939 }
5940
5941 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5942 the result. */
5943
5944 tree
5945 merge_decl_attributes (tree olddecl, tree newdecl)
5946 {
5947 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5948 DECL_ATTRIBUTES (newdecl));
5949 }
5950
5951 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5952
5953 /* Specialization of merge_decl_attributes for various Windows targets.
5954
5955 This handles the following situation:
5956
5957 __declspec (dllimport) int foo;
5958 int foo;
5959
5960 The second instance of `foo' nullifies the dllimport. */
5961
5962 tree
5963 merge_dllimport_decl_attributes (tree old, tree new_tree)
5964 {
5965 tree a;
5966 int delete_dllimport_p = 1;
5967
5968 /* What we need to do here is remove from `old' dllimport if it doesn't
5969 appear in `new'. dllimport behaves like extern: if a declaration is
5970 marked dllimport and a definition appears later, then the object
5971 is not dllimport'd. We also remove a `new' dllimport if the old list
5972 contains dllexport: dllexport always overrides dllimport, regardless
5973 of the order of declaration. */
5974 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5975 delete_dllimport_p = 0;
5976 else if (DECL_DLLIMPORT_P (new_tree)
5977 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5978 {
5979 DECL_DLLIMPORT_P (new_tree) = 0;
5980 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5981 "dllimport ignored", new_tree);
5982 }
5983 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5984 {
5985 /* Warn about overriding a symbol that has already been used, e.g.:
5986 extern int __attribute__ ((dllimport)) foo;
5987 int* bar () {return &foo;}
5988 int foo;
5989 */
5990 if (TREE_USED (old))
5991 {
5992 warning (0, "%q+D redeclared without dllimport attribute "
5993 "after being referenced with dll linkage", new_tree);
5994 /* If we have used a variable's address with dllimport linkage,
5995 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5996 decl may already have had TREE_CONSTANT computed.
5997 We still remove the attribute so that assembler code refers
5998 to '&foo rather than '_imp__foo'. */
5999 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6000 DECL_DLLIMPORT_P (new_tree) = 1;
6001 }
6002
6003 /* Let an inline definition silently override the external reference,
6004 but otherwise warn about attribute inconsistency. */
6005 else if (TREE_CODE (new_tree) == VAR_DECL
6006 || !DECL_DECLARED_INLINE_P (new_tree))
6007 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6008 "previous dllimport ignored", new_tree);
6009 }
6010 else
6011 delete_dllimport_p = 0;
6012
6013 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6014
6015 if (delete_dllimport_p)
6016 a = remove_attribute ("dllimport", a);
6017
6018 return a;
6019 }
6020
6021 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6022 struct attribute_spec.handler. */
6023
6024 tree
6025 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6026 bool *no_add_attrs)
6027 {
6028 tree node = *pnode;
6029 bool is_dllimport;
6030
6031 /* These attributes may apply to structure and union types being created,
6032 but otherwise should pass to the declaration involved. */
6033 if (!DECL_P (node))
6034 {
6035 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6036 | (int) ATTR_FLAG_ARRAY_NEXT))
6037 {
6038 *no_add_attrs = true;
6039 return tree_cons (name, args, NULL_TREE);
6040 }
6041 if (TREE_CODE (node) == RECORD_TYPE
6042 || TREE_CODE (node) == UNION_TYPE)
6043 {
6044 node = TYPE_NAME (node);
6045 if (!node)
6046 return NULL_TREE;
6047 }
6048 else
6049 {
6050 warning (OPT_Wattributes, "%qE attribute ignored",
6051 name);
6052 *no_add_attrs = true;
6053 return NULL_TREE;
6054 }
6055 }
6056
6057 if (TREE_CODE (node) != FUNCTION_DECL
6058 && TREE_CODE (node) != VAR_DECL
6059 && TREE_CODE (node) != TYPE_DECL)
6060 {
6061 *no_add_attrs = true;
6062 warning (OPT_Wattributes, "%qE attribute ignored",
6063 name);
6064 return NULL_TREE;
6065 }
6066
6067 if (TREE_CODE (node) == TYPE_DECL
6068 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6069 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6070 {
6071 *no_add_attrs = true;
6072 warning (OPT_Wattributes, "%qE attribute ignored",
6073 name);
6074 return NULL_TREE;
6075 }
6076
6077 is_dllimport = is_attribute_p ("dllimport", name);
6078
6079 /* Report error on dllimport ambiguities seen now before they cause
6080 any damage. */
6081 if (is_dllimport)
6082 {
6083 /* Honor any target-specific overrides. */
6084 if (!targetm.valid_dllimport_attribute_p (node))
6085 *no_add_attrs = true;
6086
6087 else if (TREE_CODE (node) == FUNCTION_DECL
6088 && DECL_DECLARED_INLINE_P (node))
6089 {
6090 warning (OPT_Wattributes, "inline function %q+D declared as "
6091 " dllimport: attribute ignored", node);
6092 *no_add_attrs = true;
6093 }
6094 /* Like MS, treat definition of dllimported variables and
6095 non-inlined functions on declaration as syntax errors. */
6096 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6097 {
6098 error ("function %q+D definition is marked dllimport", node);
6099 *no_add_attrs = true;
6100 }
6101
6102 else if (TREE_CODE (node) == VAR_DECL)
6103 {
6104 if (DECL_INITIAL (node))
6105 {
6106 error ("variable %q+D definition is marked dllimport",
6107 node);
6108 *no_add_attrs = true;
6109 }
6110
6111 /* `extern' needn't be specified with dllimport.
6112 Specify `extern' now and hope for the best. Sigh. */
6113 DECL_EXTERNAL (node) = 1;
6114 /* Also, implicitly give dllimport'd variables declared within
6115 a function global scope, unless declared static. */
6116 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6117 TREE_PUBLIC (node) = 1;
6118 }
6119
6120 if (*no_add_attrs == false)
6121 DECL_DLLIMPORT_P (node) = 1;
6122 }
6123 else if (TREE_CODE (node) == FUNCTION_DECL
6124 && DECL_DECLARED_INLINE_P (node)
6125 && flag_keep_inline_dllexport)
6126 /* An exported function, even if inline, must be emitted. */
6127 DECL_EXTERNAL (node) = 0;
6128
6129 /* Report error if symbol is not accessible at global scope. */
6130 if (!TREE_PUBLIC (node)
6131 && (TREE_CODE (node) == VAR_DECL
6132 || TREE_CODE (node) == FUNCTION_DECL))
6133 {
6134 error ("external linkage required for symbol %q+D because of "
6135 "%qE attribute", node, name);
6136 *no_add_attrs = true;
6137 }
6138
6139 /* A dllexport'd entity must have default visibility so that other
6140 program units (shared libraries or the main executable) can see
6141 it. A dllimport'd entity must have default visibility so that
6142 the linker knows that undefined references within this program
6143 unit can be resolved by the dynamic linker. */
6144 if (!*no_add_attrs)
6145 {
6146 if (DECL_VISIBILITY_SPECIFIED (node)
6147 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6148 error ("%qE implies default visibility, but %qD has already "
6149 "been declared with a different visibility",
6150 name, node);
6151 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6152 DECL_VISIBILITY_SPECIFIED (node) = 1;
6153 }
6154
6155 return NULL_TREE;
6156 }
6157
6158 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6159 \f
6160 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6161 of the various TYPE_QUAL values. */
6162
6163 static void
6164 set_type_quals (tree type, int type_quals)
6165 {
6166 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6167 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6168 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6169 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6170 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6171 }
6172
6173 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6174
6175 bool
6176 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6177 {
6178 return (TYPE_QUALS (cand) == type_quals
6179 && TYPE_NAME (cand) == TYPE_NAME (base)
6180 /* Apparently this is needed for Objective-C. */
6181 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6182 /* Check alignment. */
6183 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6184 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6185 TYPE_ATTRIBUTES (base)));
6186 }
6187
6188 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6189
6190 static bool
6191 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6192 {
6193 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6194 && TYPE_NAME (cand) == TYPE_NAME (base)
6195 /* Apparently this is needed for Objective-C. */
6196 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6197 /* Check alignment. */
6198 && TYPE_ALIGN (cand) == align
6199 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6200 TYPE_ATTRIBUTES (base)));
6201 }
6202
6203 /* This function checks to see if TYPE matches the size one of the built-in
6204 atomic types, and returns that core atomic type. */
6205
6206 static tree
6207 find_atomic_core_type (tree type)
6208 {
6209 tree base_atomic_type;
6210
6211 /* Only handle complete types. */
6212 if (TYPE_SIZE (type) == NULL_TREE)
6213 return NULL_TREE;
6214
6215 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6216 switch (type_size)
6217 {
6218 case 8:
6219 base_atomic_type = atomicQI_type_node;
6220 break;
6221
6222 case 16:
6223 base_atomic_type = atomicHI_type_node;
6224 break;
6225
6226 case 32:
6227 base_atomic_type = atomicSI_type_node;
6228 break;
6229
6230 case 64:
6231 base_atomic_type = atomicDI_type_node;
6232 break;
6233
6234 case 128:
6235 base_atomic_type = atomicTI_type_node;
6236 break;
6237
6238 default:
6239 base_atomic_type = NULL_TREE;
6240 }
6241
6242 return base_atomic_type;
6243 }
6244
6245 /* Return a version of the TYPE, qualified as indicated by the
6246 TYPE_QUALS, if one exists. If no qualified version exists yet,
6247 return NULL_TREE. */
6248
6249 tree
6250 get_qualified_type (tree type, int type_quals)
6251 {
6252 tree t;
6253
6254 if (TYPE_QUALS (type) == type_quals)
6255 return type;
6256
6257 /* Search the chain of variants to see if there is already one there just
6258 like the one we need to have. If so, use that existing one. We must
6259 preserve the TYPE_NAME, since there is code that depends on this. */
6260 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6261 if (check_qualified_type (t, type, type_quals))
6262 return t;
6263
6264 return NULL_TREE;
6265 }
6266
6267 /* Like get_qualified_type, but creates the type if it does not
6268 exist. This function never returns NULL_TREE. */
6269
6270 tree
6271 build_qualified_type (tree type, int type_quals)
6272 {
6273 tree t;
6274
6275 /* See if we already have the appropriate qualified variant. */
6276 t = get_qualified_type (type, type_quals);
6277
6278 /* If not, build it. */
6279 if (!t)
6280 {
6281 t = build_variant_type_copy (type);
6282 set_type_quals (t, type_quals);
6283
6284 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6285 {
6286 /* See if this object can map to a basic atomic type. */
6287 tree atomic_type = find_atomic_core_type (type);
6288 if (atomic_type)
6289 {
6290 /* Ensure the alignment of this type is compatible with
6291 the required alignment of the atomic type. */
6292 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6293 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6294 }
6295 }
6296
6297 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6298 /* Propagate structural equality. */
6299 SET_TYPE_STRUCTURAL_EQUALITY (t);
6300 else if (TYPE_CANONICAL (type) != type)
6301 /* Build the underlying canonical type, since it is different
6302 from TYPE. */
6303 {
6304 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6305 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6306 }
6307 else
6308 /* T is its own canonical type. */
6309 TYPE_CANONICAL (t) = t;
6310
6311 }
6312
6313 return t;
6314 }
6315
6316 /* Create a variant of type T with alignment ALIGN. */
6317
6318 tree
6319 build_aligned_type (tree type, unsigned int align)
6320 {
6321 tree t;
6322
6323 if (TYPE_PACKED (type)
6324 || TYPE_ALIGN (type) == align)
6325 return type;
6326
6327 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6328 if (check_aligned_type (t, type, align))
6329 return t;
6330
6331 t = build_variant_type_copy (type);
6332 TYPE_ALIGN (t) = align;
6333
6334 return t;
6335 }
6336
6337 /* Create a new distinct copy of TYPE. The new type is made its own
6338 MAIN_VARIANT. If TYPE requires structural equality checks, the
6339 resulting type requires structural equality checks; otherwise, its
6340 TYPE_CANONICAL points to itself. */
6341
6342 tree
6343 build_distinct_type_copy (tree type)
6344 {
6345 tree t = copy_node (type);
6346
6347 TYPE_POINTER_TO (t) = 0;
6348 TYPE_REFERENCE_TO (t) = 0;
6349
6350 /* Set the canonical type either to a new equivalence class, or
6351 propagate the need for structural equality checks. */
6352 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6353 SET_TYPE_STRUCTURAL_EQUALITY (t);
6354 else
6355 TYPE_CANONICAL (t) = t;
6356
6357 /* Make it its own variant. */
6358 TYPE_MAIN_VARIANT (t) = t;
6359 TYPE_NEXT_VARIANT (t) = 0;
6360
6361 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6362 whose TREE_TYPE is not t. This can also happen in the Ada
6363 frontend when using subtypes. */
6364
6365 return t;
6366 }
6367
6368 /* Create a new variant of TYPE, equivalent but distinct. This is so
6369 the caller can modify it. TYPE_CANONICAL for the return type will
6370 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6371 are considered equal by the language itself (or that both types
6372 require structural equality checks). */
6373
6374 tree
6375 build_variant_type_copy (tree type)
6376 {
6377 tree t, m = TYPE_MAIN_VARIANT (type);
6378
6379 t = build_distinct_type_copy (type);
6380
6381 /* Since we're building a variant, assume that it is a non-semantic
6382 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6383 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6384
6385 /* Add the new type to the chain of variants of TYPE. */
6386 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6387 TYPE_NEXT_VARIANT (m) = t;
6388 TYPE_MAIN_VARIANT (t) = m;
6389
6390 return t;
6391 }
6392 \f
6393 /* Return true if the from tree in both tree maps are equal. */
6394
6395 int
6396 tree_map_base_eq (const void *va, const void *vb)
6397 {
6398 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6399 *const b = (const struct tree_map_base *) vb;
6400 return (a->from == b->from);
6401 }
6402
6403 /* Hash a from tree in a tree_base_map. */
6404
6405 unsigned int
6406 tree_map_base_hash (const void *item)
6407 {
6408 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6409 }
6410
6411 /* Return true if this tree map structure is marked for garbage collection
6412 purposes. We simply return true if the from tree is marked, so that this
6413 structure goes away when the from tree goes away. */
6414
6415 int
6416 tree_map_base_marked_p (const void *p)
6417 {
6418 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6419 }
6420
6421 /* Hash a from tree in a tree_map. */
6422
6423 unsigned int
6424 tree_map_hash (const void *item)
6425 {
6426 return (((const struct tree_map *) item)->hash);
6427 }
6428
6429 /* Hash a from tree in a tree_decl_map. */
6430
6431 unsigned int
6432 tree_decl_map_hash (const void *item)
6433 {
6434 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6435 }
6436
6437 /* Return the initialization priority for DECL. */
6438
6439 priority_type
6440 decl_init_priority_lookup (tree decl)
6441 {
6442 symtab_node *snode = symtab_node::get (decl);
6443
6444 if (!snode)
6445 return DEFAULT_INIT_PRIORITY;
6446 return
6447 snode->get_init_priority ();
6448 }
6449
6450 /* Return the finalization priority for DECL. */
6451
6452 priority_type
6453 decl_fini_priority_lookup (tree decl)
6454 {
6455 cgraph_node *node = cgraph_node::get (decl);
6456
6457 if (!node)
6458 return DEFAULT_INIT_PRIORITY;
6459 return
6460 node->get_fini_priority ();
6461 }
6462
6463 /* Set the initialization priority for DECL to PRIORITY. */
6464
6465 void
6466 decl_init_priority_insert (tree decl, priority_type priority)
6467 {
6468 struct symtab_node *snode;
6469
6470 if (priority == DEFAULT_INIT_PRIORITY)
6471 {
6472 snode = symtab_node::get (decl);
6473 if (!snode)
6474 return;
6475 }
6476 else if (TREE_CODE (decl) == VAR_DECL)
6477 snode = varpool_node::get_create (decl);
6478 else
6479 snode = cgraph_node::get_create (decl);
6480 snode->set_init_priority (priority);
6481 }
6482
6483 /* Set the finalization priority for DECL to PRIORITY. */
6484
6485 void
6486 decl_fini_priority_insert (tree decl, priority_type priority)
6487 {
6488 struct cgraph_node *node;
6489
6490 if (priority == DEFAULT_INIT_PRIORITY)
6491 {
6492 node = cgraph_node::get (decl);
6493 if (!node)
6494 return;
6495 }
6496 else
6497 node = cgraph_node::get_create (decl);
6498 node->set_fini_priority (priority);
6499 }
6500
6501 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6502
6503 static void
6504 print_debug_expr_statistics (void)
6505 {
6506 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6507 (long) htab_size (debug_expr_for_decl),
6508 (long) htab_elements (debug_expr_for_decl),
6509 htab_collisions (debug_expr_for_decl));
6510 }
6511
6512 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6513
6514 static void
6515 print_value_expr_statistics (void)
6516 {
6517 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6518 (long) htab_size (value_expr_for_decl),
6519 (long) htab_elements (value_expr_for_decl),
6520 htab_collisions (value_expr_for_decl));
6521 }
6522
6523 /* Lookup a debug expression for FROM, and return it if we find one. */
6524
6525 tree
6526 decl_debug_expr_lookup (tree from)
6527 {
6528 struct tree_decl_map *h, in;
6529 in.base.from = from;
6530
6531 h = (struct tree_decl_map *)
6532 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6533 if (h)
6534 return h->to;
6535 return NULL_TREE;
6536 }
6537
6538 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6539
6540 void
6541 decl_debug_expr_insert (tree from, tree to)
6542 {
6543 struct tree_decl_map *h;
6544 void **loc;
6545
6546 h = ggc_alloc<tree_decl_map> ();
6547 h->base.from = from;
6548 h->to = to;
6549 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6550 INSERT);
6551 *(struct tree_decl_map **) loc = h;
6552 }
6553
6554 /* Lookup a value expression for FROM, and return it if we find one. */
6555
6556 tree
6557 decl_value_expr_lookup (tree from)
6558 {
6559 struct tree_decl_map *h, in;
6560 in.base.from = from;
6561
6562 h = (struct tree_decl_map *)
6563 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6564 if (h)
6565 return h->to;
6566 return NULL_TREE;
6567 }
6568
6569 /* Insert a mapping FROM->TO in the value expression hashtable. */
6570
6571 void
6572 decl_value_expr_insert (tree from, tree to)
6573 {
6574 struct tree_decl_map *h;
6575 void **loc;
6576
6577 h = ggc_alloc<tree_decl_map> ();
6578 h->base.from = from;
6579 h->to = to;
6580 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6581 INSERT);
6582 *(struct tree_decl_map **) loc = h;
6583 }
6584
6585 /* Lookup a vector of debug arguments for FROM, and return it if we
6586 find one. */
6587
6588 vec<tree, va_gc> **
6589 decl_debug_args_lookup (tree from)
6590 {
6591 struct tree_vec_map *h, in;
6592
6593 if (!DECL_HAS_DEBUG_ARGS_P (from))
6594 return NULL;
6595 gcc_checking_assert (debug_args_for_decl != NULL);
6596 in.base.from = from;
6597 h = (struct tree_vec_map *)
6598 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6599 if (h)
6600 return &h->to;
6601 return NULL;
6602 }
6603
6604 /* Insert a mapping FROM->empty vector of debug arguments in the value
6605 expression hashtable. */
6606
6607 vec<tree, va_gc> **
6608 decl_debug_args_insert (tree from)
6609 {
6610 struct tree_vec_map *h;
6611 void **loc;
6612
6613 if (DECL_HAS_DEBUG_ARGS_P (from))
6614 return decl_debug_args_lookup (from);
6615 if (debug_args_for_decl == NULL)
6616 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6617 tree_vec_map_eq, 0);
6618 h = ggc_alloc<tree_vec_map> ();
6619 h->base.from = from;
6620 h->to = NULL;
6621 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6622 INSERT);
6623 *(struct tree_vec_map **) loc = h;
6624 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6625 return &h->to;
6626 }
6627
6628 /* Hashing of types so that we don't make duplicates.
6629 The entry point is `type_hash_canon'. */
6630
6631 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6632 with types in the TREE_VALUE slots), by adding the hash codes
6633 of the individual types. */
6634
6635 static unsigned int
6636 type_hash_list (const_tree list, hashval_t hashcode)
6637 {
6638 const_tree tail;
6639
6640 for (tail = list; tail; tail = TREE_CHAIN (tail))
6641 if (TREE_VALUE (tail) != error_mark_node)
6642 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6643 hashcode);
6644
6645 return hashcode;
6646 }
6647
6648 /* These are the Hashtable callback functions. */
6649
6650 /* Returns true iff the types are equivalent. */
6651
6652 static int
6653 type_hash_eq (const void *va, const void *vb)
6654 {
6655 const struct type_hash *const a = (const struct type_hash *) va,
6656 *const b = (const struct type_hash *) vb;
6657
6658 /* First test the things that are the same for all types. */
6659 if (a->hash != b->hash
6660 || TREE_CODE (a->type) != TREE_CODE (b->type)
6661 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6662 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6663 TYPE_ATTRIBUTES (b->type))
6664 || (TREE_CODE (a->type) != COMPLEX_TYPE
6665 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6666 return 0;
6667
6668 /* Be careful about comparing arrays before and after the element type
6669 has been completed; don't compare TYPE_ALIGN unless both types are
6670 complete. */
6671 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6672 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6673 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6674 return 0;
6675
6676 switch (TREE_CODE (a->type))
6677 {
6678 case VOID_TYPE:
6679 case COMPLEX_TYPE:
6680 case POINTER_TYPE:
6681 case REFERENCE_TYPE:
6682 case NULLPTR_TYPE:
6683 return 1;
6684
6685 case VECTOR_TYPE:
6686 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6687
6688 case ENUMERAL_TYPE:
6689 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6690 && !(TYPE_VALUES (a->type)
6691 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6692 && TYPE_VALUES (b->type)
6693 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6694 && type_list_equal (TYPE_VALUES (a->type),
6695 TYPE_VALUES (b->type))))
6696 return 0;
6697
6698 /* ... fall through ... */
6699
6700 case INTEGER_TYPE:
6701 case REAL_TYPE:
6702 case BOOLEAN_TYPE:
6703 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6704 return false;
6705 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6706 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6707 TYPE_MAX_VALUE (b->type)))
6708 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6709 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6710 TYPE_MIN_VALUE (b->type))));
6711
6712 case FIXED_POINT_TYPE:
6713 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6714
6715 case OFFSET_TYPE:
6716 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6717
6718 case METHOD_TYPE:
6719 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6720 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6721 || (TYPE_ARG_TYPES (a->type)
6722 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6723 && TYPE_ARG_TYPES (b->type)
6724 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6725 && type_list_equal (TYPE_ARG_TYPES (a->type),
6726 TYPE_ARG_TYPES (b->type)))))
6727 break;
6728 return 0;
6729 case ARRAY_TYPE:
6730 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6731
6732 case RECORD_TYPE:
6733 case UNION_TYPE:
6734 case QUAL_UNION_TYPE:
6735 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6736 || (TYPE_FIELDS (a->type)
6737 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6738 && TYPE_FIELDS (b->type)
6739 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6740 && type_list_equal (TYPE_FIELDS (a->type),
6741 TYPE_FIELDS (b->type))));
6742
6743 case FUNCTION_TYPE:
6744 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6745 || (TYPE_ARG_TYPES (a->type)
6746 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6747 && TYPE_ARG_TYPES (b->type)
6748 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6749 && type_list_equal (TYPE_ARG_TYPES (a->type),
6750 TYPE_ARG_TYPES (b->type))))
6751 break;
6752 return 0;
6753
6754 default:
6755 return 0;
6756 }
6757
6758 if (lang_hooks.types.type_hash_eq != NULL)
6759 return lang_hooks.types.type_hash_eq (a->type, b->type);
6760
6761 return 1;
6762 }
6763
6764 /* Return the cached hash value. */
6765
6766 static hashval_t
6767 type_hash_hash (const void *item)
6768 {
6769 return ((const struct type_hash *) item)->hash;
6770 }
6771
6772 /* Look in the type hash table for a type isomorphic to TYPE.
6773 If one is found, return it. Otherwise return 0. */
6774
6775 static tree
6776 type_hash_lookup (hashval_t hashcode, tree type)
6777 {
6778 struct type_hash *h, in;
6779
6780 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6781 must call that routine before comparing TYPE_ALIGNs. */
6782 layout_type (type);
6783
6784 in.hash = hashcode;
6785 in.type = type;
6786
6787 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6788 hashcode);
6789 if (h)
6790 return h->type;
6791 return NULL_TREE;
6792 }
6793
6794 /* Add an entry to the type-hash-table
6795 for a type TYPE whose hash code is HASHCODE. */
6796
6797 static void
6798 type_hash_add (hashval_t hashcode, tree type)
6799 {
6800 struct type_hash *h;
6801 void **loc;
6802
6803 h = ggc_alloc<type_hash> ();
6804 h->hash = hashcode;
6805 h->type = type;
6806 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6807 *loc = (void *)h;
6808 }
6809
6810 /* Given TYPE, and HASHCODE its hash code, return the canonical
6811 object for an identical type if one already exists.
6812 Otherwise, return TYPE, and record it as the canonical object.
6813
6814 To use this function, first create a type of the sort you want.
6815 Then compute its hash code from the fields of the type that
6816 make it different from other similar types.
6817 Then call this function and use the value. */
6818
6819 tree
6820 type_hash_canon (unsigned int hashcode, tree type)
6821 {
6822 tree t1;
6823
6824 /* The hash table only contains main variants, so ensure that's what we're
6825 being passed. */
6826 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6827
6828 /* See if the type is in the hash table already. If so, return it.
6829 Otherwise, add the type. */
6830 t1 = type_hash_lookup (hashcode, type);
6831 if (t1 != 0)
6832 {
6833 if (GATHER_STATISTICS)
6834 {
6835 tree_code_counts[(int) TREE_CODE (type)]--;
6836 tree_node_counts[(int) t_kind]--;
6837 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6838 }
6839 return t1;
6840 }
6841 else
6842 {
6843 type_hash_add (hashcode, type);
6844 return type;
6845 }
6846 }
6847
6848 /* See if the data pointed to by the type hash table is marked. We consider
6849 it marked if the type is marked or if a debug type number or symbol
6850 table entry has been made for the type. */
6851
6852 static int
6853 type_hash_marked_p (const void *p)
6854 {
6855 const_tree const type = ((const struct type_hash *) p)->type;
6856
6857 return ggc_marked_p (type);
6858 }
6859
6860 static void
6861 print_type_hash_statistics (void)
6862 {
6863 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6864 (long) htab_size (type_hash_table),
6865 (long) htab_elements (type_hash_table),
6866 htab_collisions (type_hash_table));
6867 }
6868
6869 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6870 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6871 by adding the hash codes of the individual attributes. */
6872
6873 static unsigned int
6874 attribute_hash_list (const_tree list, hashval_t hashcode)
6875 {
6876 const_tree tail;
6877
6878 for (tail = list; tail; tail = TREE_CHAIN (tail))
6879 /* ??? Do we want to add in TREE_VALUE too? */
6880 hashcode = iterative_hash_object
6881 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6882 return hashcode;
6883 }
6884
6885 /* Given two lists of attributes, return true if list l2 is
6886 equivalent to l1. */
6887
6888 int
6889 attribute_list_equal (const_tree l1, const_tree l2)
6890 {
6891 if (l1 == l2)
6892 return 1;
6893
6894 return attribute_list_contained (l1, l2)
6895 && attribute_list_contained (l2, l1);
6896 }
6897
6898 /* Given two lists of attributes, return true if list L2 is
6899 completely contained within L1. */
6900 /* ??? This would be faster if attribute names were stored in a canonicalized
6901 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6902 must be used to show these elements are equivalent (which they are). */
6903 /* ??? It's not clear that attributes with arguments will always be handled
6904 correctly. */
6905
6906 int
6907 attribute_list_contained (const_tree l1, const_tree l2)
6908 {
6909 const_tree t1, t2;
6910
6911 /* First check the obvious, maybe the lists are identical. */
6912 if (l1 == l2)
6913 return 1;
6914
6915 /* Maybe the lists are similar. */
6916 for (t1 = l1, t2 = l2;
6917 t1 != 0 && t2 != 0
6918 && get_attribute_name (t1) == get_attribute_name (t2)
6919 && TREE_VALUE (t1) == TREE_VALUE (t2);
6920 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6921 ;
6922
6923 /* Maybe the lists are equal. */
6924 if (t1 == 0 && t2 == 0)
6925 return 1;
6926
6927 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6928 {
6929 const_tree attr;
6930 /* This CONST_CAST is okay because lookup_attribute does not
6931 modify its argument and the return value is assigned to a
6932 const_tree. */
6933 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6934 CONST_CAST_TREE (l1));
6935 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6936 attr = lookup_ident_attribute (get_attribute_name (t2),
6937 TREE_CHAIN (attr)))
6938 ;
6939
6940 if (attr == NULL_TREE)
6941 return 0;
6942 }
6943
6944 return 1;
6945 }
6946
6947 /* Given two lists of types
6948 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6949 return 1 if the lists contain the same types in the same order.
6950 Also, the TREE_PURPOSEs must match. */
6951
6952 int
6953 type_list_equal (const_tree l1, const_tree l2)
6954 {
6955 const_tree t1, t2;
6956
6957 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6958 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6959 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6960 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6961 && (TREE_TYPE (TREE_PURPOSE (t1))
6962 == TREE_TYPE (TREE_PURPOSE (t2))))))
6963 return 0;
6964
6965 return t1 == t2;
6966 }
6967
6968 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6969 given by TYPE. If the argument list accepts variable arguments,
6970 then this function counts only the ordinary arguments. */
6971
6972 int
6973 type_num_arguments (const_tree type)
6974 {
6975 int i = 0;
6976 tree t;
6977
6978 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6979 /* If the function does not take a variable number of arguments,
6980 the last element in the list will have type `void'. */
6981 if (VOID_TYPE_P (TREE_VALUE (t)))
6982 break;
6983 else
6984 ++i;
6985
6986 return i;
6987 }
6988
6989 /* Nonzero if integer constants T1 and T2
6990 represent the same constant value. */
6991
6992 int
6993 tree_int_cst_equal (const_tree t1, const_tree t2)
6994 {
6995 if (t1 == t2)
6996 return 1;
6997
6998 if (t1 == 0 || t2 == 0)
6999 return 0;
7000
7001 if (TREE_CODE (t1) == INTEGER_CST
7002 && TREE_CODE (t2) == INTEGER_CST
7003 && wi::to_widest (t1) == wi::to_widest (t2))
7004 return 1;
7005
7006 return 0;
7007 }
7008
7009 /* Return true if T is an INTEGER_CST whose numerical value (extended
7010 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7011
7012 bool
7013 tree_fits_shwi_p (const_tree t)
7014 {
7015 return (t != NULL_TREE
7016 && TREE_CODE (t) == INTEGER_CST
7017 && wi::fits_shwi_p (wi::to_widest (t)));
7018 }
7019
7020 /* Return true if T is an INTEGER_CST whose numerical value (extended
7021 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7022
7023 bool
7024 tree_fits_uhwi_p (const_tree t)
7025 {
7026 return (t != NULL_TREE
7027 && TREE_CODE (t) == INTEGER_CST
7028 && wi::fits_uhwi_p (wi::to_widest (t)));
7029 }
7030
7031 /* T is an INTEGER_CST whose numerical value (extended according to
7032 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7033 HOST_WIDE_INT. */
7034
7035 HOST_WIDE_INT
7036 tree_to_shwi (const_tree t)
7037 {
7038 gcc_assert (tree_fits_shwi_p (t));
7039 return TREE_INT_CST_LOW (t);
7040 }
7041
7042 /* T is an INTEGER_CST whose numerical value (extended according to
7043 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7044 HOST_WIDE_INT. */
7045
7046 unsigned HOST_WIDE_INT
7047 tree_to_uhwi (const_tree t)
7048 {
7049 gcc_assert (tree_fits_uhwi_p (t));
7050 return TREE_INT_CST_LOW (t);
7051 }
7052
7053 /* Return the most significant (sign) bit of T. */
7054
7055 int
7056 tree_int_cst_sign_bit (const_tree t)
7057 {
7058 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7059
7060 return wi::extract_uhwi (t, bitno, 1);
7061 }
7062
7063 /* Return an indication of the sign of the integer constant T.
7064 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7065 Note that -1 will never be returned if T's type is unsigned. */
7066
7067 int
7068 tree_int_cst_sgn (const_tree t)
7069 {
7070 if (wi::eq_p (t, 0))
7071 return 0;
7072 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7073 return 1;
7074 else if (wi::neg_p (t))
7075 return -1;
7076 else
7077 return 1;
7078 }
7079
7080 /* Return the minimum number of bits needed to represent VALUE in a
7081 signed or unsigned type, UNSIGNEDP says which. */
7082
7083 unsigned int
7084 tree_int_cst_min_precision (tree value, signop sgn)
7085 {
7086 /* If the value is negative, compute its negative minus 1. The latter
7087 adjustment is because the absolute value of the largest negative value
7088 is one larger than the largest positive value. This is equivalent to
7089 a bit-wise negation, so use that operation instead. */
7090
7091 if (tree_int_cst_sgn (value) < 0)
7092 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7093
7094 /* Return the number of bits needed, taking into account the fact
7095 that we need one more bit for a signed than unsigned type.
7096 If value is 0 or -1, the minimum precision is 1 no matter
7097 whether unsignedp is true or false. */
7098
7099 if (integer_zerop (value))
7100 return 1;
7101 else
7102 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7103 }
7104
7105 /* Return truthvalue of whether T1 is the same tree structure as T2.
7106 Return 1 if they are the same.
7107 Return 0 if they are understandably different.
7108 Return -1 if either contains tree structure not understood by
7109 this function. */
7110
7111 int
7112 simple_cst_equal (const_tree t1, const_tree t2)
7113 {
7114 enum tree_code code1, code2;
7115 int cmp;
7116 int i;
7117
7118 if (t1 == t2)
7119 return 1;
7120 if (t1 == 0 || t2 == 0)
7121 return 0;
7122
7123 code1 = TREE_CODE (t1);
7124 code2 = TREE_CODE (t2);
7125
7126 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7127 {
7128 if (CONVERT_EXPR_CODE_P (code2)
7129 || code2 == NON_LVALUE_EXPR)
7130 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7131 else
7132 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7133 }
7134
7135 else if (CONVERT_EXPR_CODE_P (code2)
7136 || code2 == NON_LVALUE_EXPR)
7137 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7138
7139 if (code1 != code2)
7140 return 0;
7141
7142 switch (code1)
7143 {
7144 case INTEGER_CST:
7145 return wi::to_widest (t1) == wi::to_widest (t2);
7146
7147 case REAL_CST:
7148 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7149
7150 case FIXED_CST:
7151 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7152
7153 case STRING_CST:
7154 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7155 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7156 TREE_STRING_LENGTH (t1)));
7157
7158 case CONSTRUCTOR:
7159 {
7160 unsigned HOST_WIDE_INT idx;
7161 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7162 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7163
7164 if (vec_safe_length (v1) != vec_safe_length (v2))
7165 return false;
7166
7167 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7168 /* ??? Should we handle also fields here? */
7169 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7170 return false;
7171 return true;
7172 }
7173
7174 case SAVE_EXPR:
7175 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7176
7177 case CALL_EXPR:
7178 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7179 if (cmp <= 0)
7180 return cmp;
7181 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7182 return 0;
7183 {
7184 const_tree arg1, arg2;
7185 const_call_expr_arg_iterator iter1, iter2;
7186 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7187 arg2 = first_const_call_expr_arg (t2, &iter2);
7188 arg1 && arg2;
7189 arg1 = next_const_call_expr_arg (&iter1),
7190 arg2 = next_const_call_expr_arg (&iter2))
7191 {
7192 cmp = simple_cst_equal (arg1, arg2);
7193 if (cmp <= 0)
7194 return cmp;
7195 }
7196 return arg1 == arg2;
7197 }
7198
7199 case TARGET_EXPR:
7200 /* Special case: if either target is an unallocated VAR_DECL,
7201 it means that it's going to be unified with whatever the
7202 TARGET_EXPR is really supposed to initialize, so treat it
7203 as being equivalent to anything. */
7204 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7205 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7206 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7207 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7208 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7209 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7210 cmp = 1;
7211 else
7212 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7213
7214 if (cmp <= 0)
7215 return cmp;
7216
7217 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7218
7219 case WITH_CLEANUP_EXPR:
7220 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7221 if (cmp <= 0)
7222 return cmp;
7223
7224 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7225
7226 case COMPONENT_REF:
7227 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7228 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7229
7230 return 0;
7231
7232 case VAR_DECL:
7233 case PARM_DECL:
7234 case CONST_DECL:
7235 case FUNCTION_DECL:
7236 return 0;
7237
7238 default:
7239 break;
7240 }
7241
7242 /* This general rule works for most tree codes. All exceptions should be
7243 handled above. If this is a language-specific tree code, we can't
7244 trust what might be in the operand, so say we don't know
7245 the situation. */
7246 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7247 return -1;
7248
7249 switch (TREE_CODE_CLASS (code1))
7250 {
7251 case tcc_unary:
7252 case tcc_binary:
7253 case tcc_comparison:
7254 case tcc_expression:
7255 case tcc_reference:
7256 case tcc_statement:
7257 cmp = 1;
7258 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7259 {
7260 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7261 if (cmp <= 0)
7262 return cmp;
7263 }
7264
7265 return cmp;
7266
7267 default:
7268 return -1;
7269 }
7270 }
7271
7272 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7273 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7274 than U, respectively. */
7275
7276 int
7277 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7278 {
7279 if (tree_int_cst_sgn (t) < 0)
7280 return -1;
7281 else if (!tree_fits_uhwi_p (t))
7282 return 1;
7283 else if (TREE_INT_CST_LOW (t) == u)
7284 return 0;
7285 else if (TREE_INT_CST_LOW (t) < u)
7286 return -1;
7287 else
7288 return 1;
7289 }
7290
7291 /* Return true if SIZE represents a constant size that is in bounds of
7292 what the middle-end and the backend accepts (covering not more than
7293 half of the address-space). */
7294
7295 bool
7296 valid_constant_size_p (const_tree size)
7297 {
7298 if (! tree_fits_uhwi_p (size)
7299 || TREE_OVERFLOW (size)
7300 || tree_int_cst_sign_bit (size) != 0)
7301 return false;
7302 return true;
7303 }
7304
7305 /* Return the precision of the type, or for a complex or vector type the
7306 precision of the type of its elements. */
7307
7308 unsigned int
7309 element_precision (const_tree type)
7310 {
7311 enum tree_code code = TREE_CODE (type);
7312 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7313 type = TREE_TYPE (type);
7314
7315 return TYPE_PRECISION (type);
7316 }
7317
7318 /* Return true if CODE represents an associative tree code. Otherwise
7319 return false. */
7320 bool
7321 associative_tree_code (enum tree_code code)
7322 {
7323 switch (code)
7324 {
7325 case BIT_IOR_EXPR:
7326 case BIT_AND_EXPR:
7327 case BIT_XOR_EXPR:
7328 case PLUS_EXPR:
7329 case MULT_EXPR:
7330 case MIN_EXPR:
7331 case MAX_EXPR:
7332 return true;
7333
7334 default:
7335 break;
7336 }
7337 return false;
7338 }
7339
7340 /* Return true if CODE represents a commutative tree code. Otherwise
7341 return false. */
7342 bool
7343 commutative_tree_code (enum tree_code code)
7344 {
7345 switch (code)
7346 {
7347 case PLUS_EXPR:
7348 case MULT_EXPR:
7349 case MULT_HIGHPART_EXPR:
7350 case MIN_EXPR:
7351 case MAX_EXPR:
7352 case BIT_IOR_EXPR:
7353 case BIT_XOR_EXPR:
7354 case BIT_AND_EXPR:
7355 case NE_EXPR:
7356 case EQ_EXPR:
7357 case UNORDERED_EXPR:
7358 case ORDERED_EXPR:
7359 case UNEQ_EXPR:
7360 case LTGT_EXPR:
7361 case TRUTH_AND_EXPR:
7362 case TRUTH_XOR_EXPR:
7363 case TRUTH_OR_EXPR:
7364 case WIDEN_MULT_EXPR:
7365 case VEC_WIDEN_MULT_HI_EXPR:
7366 case VEC_WIDEN_MULT_LO_EXPR:
7367 case VEC_WIDEN_MULT_EVEN_EXPR:
7368 case VEC_WIDEN_MULT_ODD_EXPR:
7369 return true;
7370
7371 default:
7372 break;
7373 }
7374 return false;
7375 }
7376
7377 /* Return true if CODE represents a ternary tree code for which the
7378 first two operands are commutative. Otherwise return false. */
7379 bool
7380 commutative_ternary_tree_code (enum tree_code code)
7381 {
7382 switch (code)
7383 {
7384 case WIDEN_MULT_PLUS_EXPR:
7385 case WIDEN_MULT_MINUS_EXPR:
7386 return true;
7387
7388 default:
7389 break;
7390 }
7391 return false;
7392 }
7393
7394 /* Generate a hash value for an expression. This can be used iteratively
7395 by passing a previous result as the VAL argument.
7396
7397 This function is intended to produce the same hash for expressions which
7398 would compare equal using operand_equal_p. */
7399
7400 hashval_t
7401 iterative_hash_expr (const_tree t, hashval_t val)
7402 {
7403 int i;
7404 enum tree_code code;
7405 enum tree_code_class tclass;
7406
7407 if (t == NULL_TREE)
7408 return iterative_hash_hashval_t (0, val);
7409
7410 code = TREE_CODE (t);
7411
7412 switch (code)
7413 {
7414 /* Alas, constants aren't shared, so we can't rely on pointer
7415 identity. */
7416 case VOID_CST:
7417 return iterative_hash_hashval_t (0, val);
7418 case INTEGER_CST:
7419 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7420 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7421 return val;
7422 case REAL_CST:
7423 {
7424 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7425
7426 return iterative_hash_hashval_t (val2, val);
7427 }
7428 case FIXED_CST:
7429 {
7430 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7431
7432 return iterative_hash_hashval_t (val2, val);
7433 }
7434 case STRING_CST:
7435 return iterative_hash (TREE_STRING_POINTER (t),
7436 TREE_STRING_LENGTH (t), val);
7437 case COMPLEX_CST:
7438 val = iterative_hash_expr (TREE_REALPART (t), val);
7439 return iterative_hash_expr (TREE_IMAGPART (t), val);
7440 case VECTOR_CST:
7441 {
7442 unsigned i;
7443 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7444 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7445 return val;
7446 }
7447 case SSA_NAME:
7448 /* We can just compare by pointer. */
7449 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7450 case PLACEHOLDER_EXPR:
7451 /* The node itself doesn't matter. */
7452 return val;
7453 case TREE_LIST:
7454 /* A list of expressions, for a CALL_EXPR or as the elements of a
7455 VECTOR_CST. */
7456 for (; t; t = TREE_CHAIN (t))
7457 val = iterative_hash_expr (TREE_VALUE (t), val);
7458 return val;
7459 case CONSTRUCTOR:
7460 {
7461 unsigned HOST_WIDE_INT idx;
7462 tree field, value;
7463 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7464 {
7465 val = iterative_hash_expr (field, val);
7466 val = iterative_hash_expr (value, val);
7467 }
7468 return val;
7469 }
7470 case FUNCTION_DECL:
7471 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7472 Otherwise nodes that compare equal according to operand_equal_p might
7473 get different hash codes. However, don't do this for machine specific
7474 or front end builtins, since the function code is overloaded in those
7475 cases. */
7476 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7477 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7478 {
7479 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7480 code = TREE_CODE (t);
7481 }
7482 /* FALL THROUGH */
7483 default:
7484 tclass = TREE_CODE_CLASS (code);
7485
7486 if (tclass == tcc_declaration)
7487 {
7488 /* DECL's have a unique ID */
7489 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7490 }
7491 else
7492 {
7493 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7494
7495 val = iterative_hash_object (code, val);
7496
7497 /* Don't hash the type, that can lead to having nodes which
7498 compare equal according to operand_equal_p, but which
7499 have different hash codes. */
7500 if (CONVERT_EXPR_CODE_P (code)
7501 || code == NON_LVALUE_EXPR)
7502 {
7503 /* Make sure to include signness in the hash computation. */
7504 val += TYPE_UNSIGNED (TREE_TYPE (t));
7505 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7506 }
7507
7508 else if (commutative_tree_code (code))
7509 {
7510 /* It's a commutative expression. We want to hash it the same
7511 however it appears. We do this by first hashing both operands
7512 and then rehashing based on the order of their independent
7513 hashes. */
7514 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7515 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7516 hashval_t t;
7517
7518 if (one > two)
7519 t = one, one = two, two = t;
7520
7521 val = iterative_hash_hashval_t (one, val);
7522 val = iterative_hash_hashval_t (two, val);
7523 }
7524 else
7525 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7526 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7527 }
7528 return val;
7529 }
7530 }
7531
7532 /* Constructors for pointer, array and function types.
7533 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7534 constructed by language-dependent code, not here.) */
7535
7536 /* Construct, lay out and return the type of pointers to TO_TYPE with
7537 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7538 reference all of memory. If such a type has already been
7539 constructed, reuse it. */
7540
7541 tree
7542 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7543 bool can_alias_all)
7544 {
7545 tree t;
7546
7547 if (to_type == error_mark_node)
7548 return error_mark_node;
7549
7550 /* If the pointed-to type has the may_alias attribute set, force
7551 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7552 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7553 can_alias_all = true;
7554
7555 /* In some cases, languages will have things that aren't a POINTER_TYPE
7556 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7557 In that case, return that type without regard to the rest of our
7558 operands.
7559
7560 ??? This is a kludge, but consistent with the way this function has
7561 always operated and there doesn't seem to be a good way to avoid this
7562 at the moment. */
7563 if (TYPE_POINTER_TO (to_type) != 0
7564 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7565 return TYPE_POINTER_TO (to_type);
7566
7567 /* First, if we already have a type for pointers to TO_TYPE and it's
7568 the proper mode, use it. */
7569 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7570 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7571 return t;
7572
7573 t = make_node (POINTER_TYPE);
7574
7575 TREE_TYPE (t) = to_type;
7576 SET_TYPE_MODE (t, mode);
7577 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7578 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7579 TYPE_POINTER_TO (to_type) = t;
7580
7581 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7582 SET_TYPE_STRUCTURAL_EQUALITY (t);
7583 else if (TYPE_CANONICAL (to_type) != to_type)
7584 TYPE_CANONICAL (t)
7585 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7586 mode, can_alias_all);
7587
7588 /* Lay out the type. This function has many callers that are concerned
7589 with expression-construction, and this simplifies them all. */
7590 layout_type (t);
7591
7592 return t;
7593 }
7594
7595 /* By default build pointers in ptr_mode. */
7596
7597 tree
7598 build_pointer_type (tree to_type)
7599 {
7600 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7601 : TYPE_ADDR_SPACE (to_type);
7602 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7603 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7604 }
7605
7606 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7607
7608 tree
7609 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7610 bool can_alias_all)
7611 {
7612 tree t;
7613
7614 if (to_type == error_mark_node)
7615 return error_mark_node;
7616
7617 /* If the pointed-to type has the may_alias attribute set, force
7618 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7619 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7620 can_alias_all = true;
7621
7622 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7623 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7624 In that case, return that type without regard to the rest of our
7625 operands.
7626
7627 ??? This is a kludge, but consistent with the way this function has
7628 always operated and there doesn't seem to be a good way to avoid this
7629 at the moment. */
7630 if (TYPE_REFERENCE_TO (to_type) != 0
7631 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7632 return TYPE_REFERENCE_TO (to_type);
7633
7634 /* First, if we already have a type for pointers to TO_TYPE and it's
7635 the proper mode, use it. */
7636 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7637 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7638 return t;
7639
7640 t = make_node (REFERENCE_TYPE);
7641
7642 TREE_TYPE (t) = to_type;
7643 SET_TYPE_MODE (t, mode);
7644 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7645 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7646 TYPE_REFERENCE_TO (to_type) = t;
7647
7648 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7649 SET_TYPE_STRUCTURAL_EQUALITY (t);
7650 else if (TYPE_CANONICAL (to_type) != to_type)
7651 TYPE_CANONICAL (t)
7652 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7653 mode, can_alias_all);
7654
7655 layout_type (t);
7656
7657 return t;
7658 }
7659
7660
7661 /* Build the node for the type of references-to-TO_TYPE by default
7662 in ptr_mode. */
7663
7664 tree
7665 build_reference_type (tree to_type)
7666 {
7667 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7668 : TYPE_ADDR_SPACE (to_type);
7669 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7670 return build_reference_type_for_mode (to_type, pointer_mode, false);
7671 }
7672
7673 #define MAX_INT_CACHED_PREC \
7674 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7675 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7676
7677 /* Builds a signed or unsigned integer type of precision PRECISION.
7678 Used for C bitfields whose precision does not match that of
7679 built-in target types. */
7680 tree
7681 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7682 int unsignedp)
7683 {
7684 tree itype, ret;
7685
7686 if (unsignedp)
7687 unsignedp = MAX_INT_CACHED_PREC + 1;
7688
7689 if (precision <= MAX_INT_CACHED_PREC)
7690 {
7691 itype = nonstandard_integer_type_cache[precision + unsignedp];
7692 if (itype)
7693 return itype;
7694 }
7695
7696 itype = make_node (INTEGER_TYPE);
7697 TYPE_PRECISION (itype) = precision;
7698
7699 if (unsignedp)
7700 fixup_unsigned_type (itype);
7701 else
7702 fixup_signed_type (itype);
7703
7704 ret = itype;
7705 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7706 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7707 if (precision <= MAX_INT_CACHED_PREC)
7708 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7709
7710 return ret;
7711 }
7712
7713 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7714 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7715 is true, reuse such a type that has already been constructed. */
7716
7717 static tree
7718 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7719 {
7720 tree itype = make_node (INTEGER_TYPE);
7721 hashval_t hashcode = 0;
7722
7723 TREE_TYPE (itype) = type;
7724
7725 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7726 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7727
7728 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7729 SET_TYPE_MODE (itype, TYPE_MODE (type));
7730 TYPE_SIZE (itype) = TYPE_SIZE (type);
7731 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7732 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7733 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7734
7735 if (!shared)
7736 return itype;
7737
7738 if ((TYPE_MIN_VALUE (itype)
7739 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7740 || (TYPE_MAX_VALUE (itype)
7741 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7742 {
7743 /* Since we cannot reliably merge this type, we need to compare it using
7744 structural equality checks. */
7745 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7746 return itype;
7747 }
7748
7749 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7750 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7751 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7752 itype = type_hash_canon (hashcode, itype);
7753
7754 return itype;
7755 }
7756
7757 /* Wrapper around build_range_type_1 with SHARED set to true. */
7758
7759 tree
7760 build_range_type (tree type, tree lowval, tree highval)
7761 {
7762 return build_range_type_1 (type, lowval, highval, true);
7763 }
7764
7765 /* Wrapper around build_range_type_1 with SHARED set to false. */
7766
7767 tree
7768 build_nonshared_range_type (tree type, tree lowval, tree highval)
7769 {
7770 return build_range_type_1 (type, lowval, highval, false);
7771 }
7772
7773 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7774 MAXVAL should be the maximum value in the domain
7775 (one less than the length of the array).
7776
7777 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7778 We don't enforce this limit, that is up to caller (e.g. language front end).
7779 The limit exists because the result is a signed type and we don't handle
7780 sizes that use more than one HOST_WIDE_INT. */
7781
7782 tree
7783 build_index_type (tree maxval)
7784 {
7785 return build_range_type (sizetype, size_zero_node, maxval);
7786 }
7787
7788 /* Return true if the debug information for TYPE, a subtype, should be emitted
7789 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7790 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7791 debug info and doesn't reflect the source code. */
7792
7793 bool
7794 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7795 {
7796 tree base_type = TREE_TYPE (type), low, high;
7797
7798 /* Subrange types have a base type which is an integral type. */
7799 if (!INTEGRAL_TYPE_P (base_type))
7800 return false;
7801
7802 /* Get the real bounds of the subtype. */
7803 if (lang_hooks.types.get_subrange_bounds)
7804 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7805 else
7806 {
7807 low = TYPE_MIN_VALUE (type);
7808 high = TYPE_MAX_VALUE (type);
7809 }
7810
7811 /* If the type and its base type have the same representation and the same
7812 name, then the type is not a subrange but a copy of the base type. */
7813 if ((TREE_CODE (base_type) == INTEGER_TYPE
7814 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7815 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7816 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7817 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7818 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7819 return false;
7820
7821 if (lowval)
7822 *lowval = low;
7823 if (highval)
7824 *highval = high;
7825 return true;
7826 }
7827
7828 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7829 and number of elements specified by the range of values of INDEX_TYPE.
7830 If SHARED is true, reuse such a type that has already been constructed. */
7831
7832 static tree
7833 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7834 {
7835 tree t;
7836
7837 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7838 {
7839 error ("arrays of functions are not meaningful");
7840 elt_type = integer_type_node;
7841 }
7842
7843 t = make_node (ARRAY_TYPE);
7844 TREE_TYPE (t) = elt_type;
7845 TYPE_DOMAIN (t) = index_type;
7846 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7847 layout_type (t);
7848
7849 /* If the element type is incomplete at this point we get marked for
7850 structural equality. Do not record these types in the canonical
7851 type hashtable. */
7852 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7853 return t;
7854
7855 if (shared)
7856 {
7857 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7858 if (index_type)
7859 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7860 t = type_hash_canon (hashcode, t);
7861 }
7862
7863 if (TYPE_CANONICAL (t) == t)
7864 {
7865 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7866 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7867 SET_TYPE_STRUCTURAL_EQUALITY (t);
7868 else if (TYPE_CANONICAL (elt_type) != elt_type
7869 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7870 TYPE_CANONICAL (t)
7871 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7872 index_type
7873 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7874 shared);
7875 }
7876
7877 return t;
7878 }
7879
7880 /* Wrapper around build_array_type_1 with SHARED set to true. */
7881
7882 tree
7883 build_array_type (tree elt_type, tree index_type)
7884 {
7885 return build_array_type_1 (elt_type, index_type, true);
7886 }
7887
7888 /* Wrapper around build_array_type_1 with SHARED set to false. */
7889
7890 tree
7891 build_nonshared_array_type (tree elt_type, tree index_type)
7892 {
7893 return build_array_type_1 (elt_type, index_type, false);
7894 }
7895
7896 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7897 sizetype. */
7898
7899 tree
7900 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7901 {
7902 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7903 }
7904
7905 /* Recursively examines the array elements of TYPE, until a non-array
7906 element type is found. */
7907
7908 tree
7909 strip_array_types (tree type)
7910 {
7911 while (TREE_CODE (type) == ARRAY_TYPE)
7912 type = TREE_TYPE (type);
7913
7914 return type;
7915 }
7916
7917 /* Computes the canonical argument types from the argument type list
7918 ARGTYPES.
7919
7920 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7921 on entry to this function, or if any of the ARGTYPES are
7922 structural.
7923
7924 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7925 true on entry to this function, or if any of the ARGTYPES are
7926 non-canonical.
7927
7928 Returns a canonical argument list, which may be ARGTYPES when the
7929 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7930 true) or would not differ from ARGTYPES. */
7931
7932 static tree
7933 maybe_canonicalize_argtypes (tree argtypes,
7934 bool *any_structural_p,
7935 bool *any_noncanonical_p)
7936 {
7937 tree arg;
7938 bool any_noncanonical_argtypes_p = false;
7939
7940 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7941 {
7942 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7943 /* Fail gracefully by stating that the type is structural. */
7944 *any_structural_p = true;
7945 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7946 *any_structural_p = true;
7947 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7948 || TREE_PURPOSE (arg))
7949 /* If the argument has a default argument, we consider it
7950 non-canonical even though the type itself is canonical.
7951 That way, different variants of function and method types
7952 with default arguments will all point to the variant with
7953 no defaults as their canonical type. */
7954 any_noncanonical_argtypes_p = true;
7955 }
7956
7957 if (*any_structural_p)
7958 return argtypes;
7959
7960 if (any_noncanonical_argtypes_p)
7961 {
7962 /* Build the canonical list of argument types. */
7963 tree canon_argtypes = NULL_TREE;
7964 bool is_void = false;
7965
7966 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7967 {
7968 if (arg == void_list_node)
7969 is_void = true;
7970 else
7971 canon_argtypes = tree_cons (NULL_TREE,
7972 TYPE_CANONICAL (TREE_VALUE (arg)),
7973 canon_argtypes);
7974 }
7975
7976 canon_argtypes = nreverse (canon_argtypes);
7977 if (is_void)
7978 canon_argtypes = chainon (canon_argtypes, void_list_node);
7979
7980 /* There is a non-canonical type. */
7981 *any_noncanonical_p = true;
7982 return canon_argtypes;
7983 }
7984
7985 /* The canonical argument types are the same as ARGTYPES. */
7986 return argtypes;
7987 }
7988
7989 /* Construct, lay out and return
7990 the type of functions returning type VALUE_TYPE
7991 given arguments of types ARG_TYPES.
7992 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7993 are data type nodes for the arguments of the function.
7994 If such a type has already been constructed, reuse it. */
7995
7996 tree
7997 build_function_type (tree value_type, tree arg_types)
7998 {
7999 tree t;
8000 hashval_t hashcode = 0;
8001 bool any_structural_p, any_noncanonical_p;
8002 tree canon_argtypes;
8003
8004 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8005 {
8006 error ("function return type cannot be function");
8007 value_type = integer_type_node;
8008 }
8009
8010 /* Make a node of the sort we want. */
8011 t = make_node (FUNCTION_TYPE);
8012 TREE_TYPE (t) = value_type;
8013 TYPE_ARG_TYPES (t) = arg_types;
8014
8015 /* If we already have such a type, use the old one. */
8016 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8017 hashcode = type_hash_list (arg_types, hashcode);
8018 t = type_hash_canon (hashcode, t);
8019
8020 /* Set up the canonical type. */
8021 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8022 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8023 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8024 &any_structural_p,
8025 &any_noncanonical_p);
8026 if (any_structural_p)
8027 SET_TYPE_STRUCTURAL_EQUALITY (t);
8028 else if (any_noncanonical_p)
8029 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8030 canon_argtypes);
8031
8032 if (!COMPLETE_TYPE_P (t))
8033 layout_type (t);
8034 return t;
8035 }
8036
8037 /* Build a function type. The RETURN_TYPE is the type returned by the
8038 function. If VAARGS is set, no void_type_node is appended to the
8039 the list. ARGP must be always be terminated be a NULL_TREE. */
8040
8041 static tree
8042 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8043 {
8044 tree t, args, last;
8045
8046 t = va_arg (argp, tree);
8047 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8048 args = tree_cons (NULL_TREE, t, args);
8049
8050 if (vaargs)
8051 {
8052 last = args;
8053 if (args != NULL_TREE)
8054 args = nreverse (args);
8055 gcc_assert (last != void_list_node);
8056 }
8057 else if (args == NULL_TREE)
8058 args = void_list_node;
8059 else
8060 {
8061 last = args;
8062 args = nreverse (args);
8063 TREE_CHAIN (last) = void_list_node;
8064 }
8065 args = build_function_type (return_type, args);
8066
8067 return args;
8068 }
8069
8070 /* Build a function type. The RETURN_TYPE is the type returned by the
8071 function. If additional arguments are provided, they are
8072 additional argument types. The list of argument types must always
8073 be terminated by NULL_TREE. */
8074
8075 tree
8076 build_function_type_list (tree return_type, ...)
8077 {
8078 tree args;
8079 va_list p;
8080
8081 va_start (p, return_type);
8082 args = build_function_type_list_1 (false, return_type, p);
8083 va_end (p);
8084 return args;
8085 }
8086
8087 /* Build a variable argument function type. The RETURN_TYPE is the
8088 type returned by the function. If additional arguments are provided,
8089 they are additional argument types. The list of argument types must
8090 always be terminated by NULL_TREE. */
8091
8092 tree
8093 build_varargs_function_type_list (tree return_type, ...)
8094 {
8095 tree args;
8096 va_list p;
8097
8098 va_start (p, return_type);
8099 args = build_function_type_list_1 (true, return_type, p);
8100 va_end (p);
8101
8102 return args;
8103 }
8104
8105 /* Build a function type. RETURN_TYPE is the type returned by the
8106 function; VAARGS indicates whether the function takes varargs. The
8107 function takes N named arguments, the types of which are provided in
8108 ARG_TYPES. */
8109
8110 static tree
8111 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8112 tree *arg_types)
8113 {
8114 int i;
8115 tree t = vaargs ? NULL_TREE : void_list_node;
8116
8117 for (i = n - 1; i >= 0; i--)
8118 t = tree_cons (NULL_TREE, arg_types[i], t);
8119
8120 return build_function_type (return_type, t);
8121 }
8122
8123 /* Build a function type. RETURN_TYPE is the type returned by the
8124 function. The function takes N named arguments, the types of which
8125 are provided in ARG_TYPES. */
8126
8127 tree
8128 build_function_type_array (tree return_type, int n, tree *arg_types)
8129 {
8130 return build_function_type_array_1 (false, return_type, n, arg_types);
8131 }
8132
8133 /* Build a variable argument function type. RETURN_TYPE is the type
8134 returned by the function. The function takes N named arguments, the
8135 types of which are provided in ARG_TYPES. */
8136
8137 tree
8138 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8139 {
8140 return build_function_type_array_1 (true, return_type, n, arg_types);
8141 }
8142
8143 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8144 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8145 for the method. An implicit additional parameter (of type
8146 pointer-to-BASETYPE) is added to the ARGTYPES. */
8147
8148 tree
8149 build_method_type_directly (tree basetype,
8150 tree rettype,
8151 tree argtypes)
8152 {
8153 tree t;
8154 tree ptype;
8155 int hashcode = 0;
8156 bool any_structural_p, any_noncanonical_p;
8157 tree canon_argtypes;
8158
8159 /* Make a node of the sort we want. */
8160 t = make_node (METHOD_TYPE);
8161
8162 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8163 TREE_TYPE (t) = rettype;
8164 ptype = build_pointer_type (basetype);
8165
8166 /* The actual arglist for this function includes a "hidden" argument
8167 which is "this". Put it into the list of argument types. */
8168 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8169 TYPE_ARG_TYPES (t) = argtypes;
8170
8171 /* If we already have such a type, use the old one. */
8172 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8173 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8174 hashcode = type_hash_list (argtypes, hashcode);
8175 t = type_hash_canon (hashcode, t);
8176
8177 /* Set up the canonical type. */
8178 any_structural_p
8179 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8180 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8181 any_noncanonical_p
8182 = (TYPE_CANONICAL (basetype) != basetype
8183 || TYPE_CANONICAL (rettype) != rettype);
8184 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8185 &any_structural_p,
8186 &any_noncanonical_p);
8187 if (any_structural_p)
8188 SET_TYPE_STRUCTURAL_EQUALITY (t);
8189 else if (any_noncanonical_p)
8190 TYPE_CANONICAL (t)
8191 = build_method_type_directly (TYPE_CANONICAL (basetype),
8192 TYPE_CANONICAL (rettype),
8193 canon_argtypes);
8194 if (!COMPLETE_TYPE_P (t))
8195 layout_type (t);
8196
8197 return t;
8198 }
8199
8200 /* Construct, lay out and return the type of methods belonging to class
8201 BASETYPE and whose arguments and values are described by TYPE.
8202 If that type exists already, reuse it.
8203 TYPE must be a FUNCTION_TYPE node. */
8204
8205 tree
8206 build_method_type (tree basetype, tree type)
8207 {
8208 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8209
8210 return build_method_type_directly (basetype,
8211 TREE_TYPE (type),
8212 TYPE_ARG_TYPES (type));
8213 }
8214
8215 /* Construct, lay out and return the type of offsets to a value
8216 of type TYPE, within an object of type BASETYPE.
8217 If a suitable offset type exists already, reuse it. */
8218
8219 tree
8220 build_offset_type (tree basetype, tree type)
8221 {
8222 tree t;
8223 hashval_t hashcode = 0;
8224
8225 /* Make a node of the sort we want. */
8226 t = make_node (OFFSET_TYPE);
8227
8228 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8229 TREE_TYPE (t) = type;
8230
8231 /* If we already have such a type, use the old one. */
8232 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8233 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8234 t = type_hash_canon (hashcode, t);
8235
8236 if (!COMPLETE_TYPE_P (t))
8237 layout_type (t);
8238
8239 if (TYPE_CANONICAL (t) == t)
8240 {
8241 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8242 || TYPE_STRUCTURAL_EQUALITY_P (type))
8243 SET_TYPE_STRUCTURAL_EQUALITY (t);
8244 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8245 || TYPE_CANONICAL (type) != type)
8246 TYPE_CANONICAL (t)
8247 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8248 TYPE_CANONICAL (type));
8249 }
8250
8251 return t;
8252 }
8253
8254 /* Create a complex type whose components are COMPONENT_TYPE. */
8255
8256 tree
8257 build_complex_type (tree component_type)
8258 {
8259 tree t;
8260 hashval_t hashcode;
8261
8262 gcc_assert (INTEGRAL_TYPE_P (component_type)
8263 || SCALAR_FLOAT_TYPE_P (component_type)
8264 || FIXED_POINT_TYPE_P (component_type));
8265
8266 /* Make a node of the sort we want. */
8267 t = make_node (COMPLEX_TYPE);
8268
8269 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8270
8271 /* If we already have such a type, use the old one. */
8272 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8273 t = type_hash_canon (hashcode, t);
8274
8275 if (!COMPLETE_TYPE_P (t))
8276 layout_type (t);
8277
8278 if (TYPE_CANONICAL (t) == t)
8279 {
8280 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8281 SET_TYPE_STRUCTURAL_EQUALITY (t);
8282 else if (TYPE_CANONICAL (component_type) != component_type)
8283 TYPE_CANONICAL (t)
8284 = build_complex_type (TYPE_CANONICAL (component_type));
8285 }
8286
8287 /* We need to create a name, since complex is a fundamental type. */
8288 if (! TYPE_NAME (t))
8289 {
8290 const char *name;
8291 if (component_type == char_type_node)
8292 name = "complex char";
8293 else if (component_type == signed_char_type_node)
8294 name = "complex signed char";
8295 else if (component_type == unsigned_char_type_node)
8296 name = "complex unsigned char";
8297 else if (component_type == short_integer_type_node)
8298 name = "complex short int";
8299 else if (component_type == short_unsigned_type_node)
8300 name = "complex short unsigned int";
8301 else if (component_type == integer_type_node)
8302 name = "complex int";
8303 else if (component_type == unsigned_type_node)
8304 name = "complex unsigned int";
8305 else if (component_type == long_integer_type_node)
8306 name = "complex long int";
8307 else if (component_type == long_unsigned_type_node)
8308 name = "complex long unsigned int";
8309 else if (component_type == long_long_integer_type_node)
8310 name = "complex long long int";
8311 else if (component_type == long_long_unsigned_type_node)
8312 name = "complex long long unsigned int";
8313 else
8314 name = 0;
8315
8316 if (name != 0)
8317 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8318 get_identifier (name), t);
8319 }
8320
8321 return build_qualified_type (t, TYPE_QUALS (component_type));
8322 }
8323
8324 /* If TYPE is a real or complex floating-point type and the target
8325 does not directly support arithmetic on TYPE then return the wider
8326 type to be used for arithmetic on TYPE. Otherwise, return
8327 NULL_TREE. */
8328
8329 tree
8330 excess_precision_type (tree type)
8331 {
8332 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8333 {
8334 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8335 switch (TREE_CODE (type))
8336 {
8337 case REAL_TYPE:
8338 switch (flt_eval_method)
8339 {
8340 case 1:
8341 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8342 return double_type_node;
8343 break;
8344 case 2:
8345 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8346 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8347 return long_double_type_node;
8348 break;
8349 default:
8350 gcc_unreachable ();
8351 }
8352 break;
8353 case COMPLEX_TYPE:
8354 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8355 return NULL_TREE;
8356 switch (flt_eval_method)
8357 {
8358 case 1:
8359 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8360 return complex_double_type_node;
8361 break;
8362 case 2:
8363 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8364 || (TYPE_MODE (TREE_TYPE (type))
8365 == TYPE_MODE (double_type_node)))
8366 return complex_long_double_type_node;
8367 break;
8368 default:
8369 gcc_unreachable ();
8370 }
8371 break;
8372 default:
8373 break;
8374 }
8375 }
8376 return NULL_TREE;
8377 }
8378 \f
8379 /* Return OP, stripped of any conversions to wider types as much as is safe.
8380 Converting the value back to OP's type makes a value equivalent to OP.
8381
8382 If FOR_TYPE is nonzero, we return a value which, if converted to
8383 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8384
8385 OP must have integer, real or enumeral type. Pointers are not allowed!
8386
8387 There are some cases where the obvious value we could return
8388 would regenerate to OP if converted to OP's type,
8389 but would not extend like OP to wider types.
8390 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8391 For example, if OP is (unsigned short)(signed char)-1,
8392 we avoid returning (signed char)-1 if FOR_TYPE is int,
8393 even though extending that to an unsigned short would regenerate OP,
8394 since the result of extending (signed char)-1 to (int)
8395 is different from (int) OP. */
8396
8397 tree
8398 get_unwidened (tree op, tree for_type)
8399 {
8400 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8401 tree type = TREE_TYPE (op);
8402 unsigned final_prec
8403 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8404 int uns
8405 = (for_type != 0 && for_type != type
8406 && final_prec > TYPE_PRECISION (type)
8407 && TYPE_UNSIGNED (type));
8408 tree win = op;
8409
8410 while (CONVERT_EXPR_P (op))
8411 {
8412 int bitschange;
8413
8414 /* TYPE_PRECISION on vector types has different meaning
8415 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8416 so avoid them here. */
8417 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8418 break;
8419
8420 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8421 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8422
8423 /* Truncations are many-one so cannot be removed.
8424 Unless we are later going to truncate down even farther. */
8425 if (bitschange < 0
8426 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8427 break;
8428
8429 /* See what's inside this conversion. If we decide to strip it,
8430 we will set WIN. */
8431 op = TREE_OPERAND (op, 0);
8432
8433 /* If we have not stripped any zero-extensions (uns is 0),
8434 we can strip any kind of extension.
8435 If we have previously stripped a zero-extension,
8436 only zero-extensions can safely be stripped.
8437 Any extension can be stripped if the bits it would produce
8438 are all going to be discarded later by truncating to FOR_TYPE. */
8439
8440 if (bitschange > 0)
8441 {
8442 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8443 win = op;
8444 /* TYPE_UNSIGNED says whether this is a zero-extension.
8445 Let's avoid computing it if it does not affect WIN
8446 and if UNS will not be needed again. */
8447 if ((uns
8448 || CONVERT_EXPR_P (op))
8449 && TYPE_UNSIGNED (TREE_TYPE (op)))
8450 {
8451 uns = 1;
8452 win = op;
8453 }
8454 }
8455 }
8456
8457 /* If we finally reach a constant see if it fits in for_type and
8458 in that case convert it. */
8459 if (for_type
8460 && TREE_CODE (win) == INTEGER_CST
8461 && TREE_TYPE (win) != for_type
8462 && int_fits_type_p (win, for_type))
8463 win = fold_convert (for_type, win);
8464
8465 return win;
8466 }
8467 \f
8468 /* Return OP or a simpler expression for a narrower value
8469 which can be sign-extended or zero-extended to give back OP.
8470 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8471 or 0 if the value should be sign-extended. */
8472
8473 tree
8474 get_narrower (tree op, int *unsignedp_ptr)
8475 {
8476 int uns = 0;
8477 int first = 1;
8478 tree win = op;
8479 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8480
8481 while (TREE_CODE (op) == NOP_EXPR)
8482 {
8483 int bitschange
8484 = (TYPE_PRECISION (TREE_TYPE (op))
8485 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8486
8487 /* Truncations are many-one so cannot be removed. */
8488 if (bitschange < 0)
8489 break;
8490
8491 /* See what's inside this conversion. If we decide to strip it,
8492 we will set WIN. */
8493
8494 if (bitschange > 0)
8495 {
8496 op = TREE_OPERAND (op, 0);
8497 /* An extension: the outermost one can be stripped,
8498 but remember whether it is zero or sign extension. */
8499 if (first)
8500 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8501 /* Otherwise, if a sign extension has been stripped,
8502 only sign extensions can now be stripped;
8503 if a zero extension has been stripped, only zero-extensions. */
8504 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8505 break;
8506 first = 0;
8507 }
8508 else /* bitschange == 0 */
8509 {
8510 /* A change in nominal type can always be stripped, but we must
8511 preserve the unsignedness. */
8512 if (first)
8513 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8514 first = 0;
8515 op = TREE_OPERAND (op, 0);
8516 /* Keep trying to narrow, but don't assign op to win if it
8517 would turn an integral type into something else. */
8518 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8519 continue;
8520 }
8521
8522 win = op;
8523 }
8524
8525 if (TREE_CODE (op) == COMPONENT_REF
8526 /* Since type_for_size always gives an integer type. */
8527 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8528 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8529 /* Ensure field is laid out already. */
8530 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8531 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8532 {
8533 unsigned HOST_WIDE_INT innerprec
8534 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8535 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8536 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8537 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8538
8539 /* We can get this structure field in a narrower type that fits it,
8540 but the resulting extension to its nominal type (a fullword type)
8541 must satisfy the same conditions as for other extensions.
8542
8543 Do this only for fields that are aligned (not bit-fields),
8544 because when bit-field insns will be used there is no
8545 advantage in doing this. */
8546
8547 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8548 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8549 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8550 && type != 0)
8551 {
8552 if (first)
8553 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8554 win = fold_convert (type, op);
8555 }
8556 }
8557
8558 *unsignedp_ptr = uns;
8559 return win;
8560 }
8561 \f
8562 /* Returns true if integer constant C has a value that is permissible
8563 for type TYPE (an INTEGER_TYPE). */
8564
8565 bool
8566 int_fits_type_p (const_tree c, const_tree type)
8567 {
8568 tree type_low_bound, type_high_bound;
8569 bool ok_for_low_bound, ok_for_high_bound;
8570 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8571
8572 retry:
8573 type_low_bound = TYPE_MIN_VALUE (type);
8574 type_high_bound = TYPE_MAX_VALUE (type);
8575
8576 /* If at least one bound of the type is a constant integer, we can check
8577 ourselves and maybe make a decision. If no such decision is possible, but
8578 this type is a subtype, try checking against that. Otherwise, use
8579 fits_to_tree_p, which checks against the precision.
8580
8581 Compute the status for each possibly constant bound, and return if we see
8582 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8583 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8584 for "constant known to fit". */
8585
8586 /* Check if c >= type_low_bound. */
8587 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8588 {
8589 if (tree_int_cst_lt (c, type_low_bound))
8590 return false;
8591 ok_for_low_bound = true;
8592 }
8593 else
8594 ok_for_low_bound = false;
8595
8596 /* Check if c <= type_high_bound. */
8597 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8598 {
8599 if (tree_int_cst_lt (type_high_bound, c))
8600 return false;
8601 ok_for_high_bound = true;
8602 }
8603 else
8604 ok_for_high_bound = false;
8605
8606 /* If the constant fits both bounds, the result is known. */
8607 if (ok_for_low_bound && ok_for_high_bound)
8608 return true;
8609
8610 /* Perform some generic filtering which may allow making a decision
8611 even if the bounds are not constant. First, negative integers
8612 never fit in unsigned types, */
8613 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8614 return false;
8615
8616 /* Second, narrower types always fit in wider ones. */
8617 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8618 return true;
8619
8620 /* Third, unsigned integers with top bit set never fit signed types. */
8621 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8622 {
8623 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8624 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8625 {
8626 /* When a tree_cst is converted to a wide-int, the precision
8627 is taken from the type. However, if the precision of the
8628 mode underneath the type is smaller than that, it is
8629 possible that the value will not fit. The test below
8630 fails if any bit is set between the sign bit of the
8631 underlying mode and the top bit of the type. */
8632 if (wi::ne_p (wi::zext (c, prec - 1), c))
8633 return false;
8634 }
8635 else if (wi::neg_p (c))
8636 return false;
8637 }
8638
8639 /* If we haven't been able to decide at this point, there nothing more we
8640 can check ourselves here. Look at the base type if we have one and it
8641 has the same precision. */
8642 if (TREE_CODE (type) == INTEGER_TYPE
8643 && TREE_TYPE (type) != 0
8644 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8645 {
8646 type = TREE_TYPE (type);
8647 goto retry;
8648 }
8649
8650 /* Or to fits_to_tree_p, if nothing else. */
8651 return wi::fits_to_tree_p (c, type);
8652 }
8653
8654 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8655 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8656 represented (assuming two's-complement arithmetic) within the bit
8657 precision of the type are returned instead. */
8658
8659 void
8660 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8661 {
8662 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8663 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8664 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8665 else
8666 {
8667 if (TYPE_UNSIGNED (type))
8668 mpz_set_ui (min, 0);
8669 else
8670 {
8671 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8672 wi::to_mpz (mn, min, SIGNED);
8673 }
8674 }
8675
8676 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8677 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8678 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8679 else
8680 {
8681 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8682 wi::to_mpz (mn, max, TYPE_SIGN (type));
8683 }
8684 }
8685
8686 /* Return true if VAR is an automatic variable defined in function FN. */
8687
8688 bool
8689 auto_var_in_fn_p (const_tree var, const_tree fn)
8690 {
8691 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8692 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8693 || TREE_CODE (var) == PARM_DECL)
8694 && ! TREE_STATIC (var))
8695 || TREE_CODE (var) == LABEL_DECL
8696 || TREE_CODE (var) == RESULT_DECL));
8697 }
8698
8699 /* Subprogram of following function. Called by walk_tree.
8700
8701 Return *TP if it is an automatic variable or parameter of the
8702 function passed in as DATA. */
8703
8704 static tree
8705 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8706 {
8707 tree fn = (tree) data;
8708
8709 if (TYPE_P (*tp))
8710 *walk_subtrees = 0;
8711
8712 else if (DECL_P (*tp)
8713 && auto_var_in_fn_p (*tp, fn))
8714 return *tp;
8715
8716 return NULL_TREE;
8717 }
8718
8719 /* Returns true if T is, contains, or refers to a type with variable
8720 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8721 arguments, but not the return type. If FN is nonzero, only return
8722 true if a modifier of the type or position of FN is a variable or
8723 parameter inside FN.
8724
8725 This concept is more general than that of C99 'variably modified types':
8726 in C99, a struct type is never variably modified because a VLA may not
8727 appear as a structure member. However, in GNU C code like:
8728
8729 struct S { int i[f()]; };
8730
8731 is valid, and other languages may define similar constructs. */
8732
8733 bool
8734 variably_modified_type_p (tree type, tree fn)
8735 {
8736 tree t;
8737
8738 /* Test if T is either variable (if FN is zero) or an expression containing
8739 a variable in FN. If TYPE isn't gimplified, return true also if
8740 gimplify_one_sizepos would gimplify the expression into a local
8741 variable. */
8742 #define RETURN_TRUE_IF_VAR(T) \
8743 do { tree _t = (T); \
8744 if (_t != NULL_TREE \
8745 && _t != error_mark_node \
8746 && TREE_CODE (_t) != INTEGER_CST \
8747 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8748 && (!fn \
8749 || (!TYPE_SIZES_GIMPLIFIED (type) \
8750 && !is_gimple_sizepos (_t)) \
8751 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8752 return true; } while (0)
8753
8754 if (type == error_mark_node)
8755 return false;
8756
8757 /* If TYPE itself has variable size, it is variably modified. */
8758 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8759 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8760
8761 switch (TREE_CODE (type))
8762 {
8763 case POINTER_TYPE:
8764 case REFERENCE_TYPE:
8765 case VECTOR_TYPE:
8766 if (variably_modified_type_p (TREE_TYPE (type), fn))
8767 return true;
8768 break;
8769
8770 case FUNCTION_TYPE:
8771 case METHOD_TYPE:
8772 /* If TYPE is a function type, it is variably modified if the
8773 return type is variably modified. */
8774 if (variably_modified_type_p (TREE_TYPE (type), fn))
8775 return true;
8776 break;
8777
8778 case INTEGER_TYPE:
8779 case REAL_TYPE:
8780 case FIXED_POINT_TYPE:
8781 case ENUMERAL_TYPE:
8782 case BOOLEAN_TYPE:
8783 /* Scalar types are variably modified if their end points
8784 aren't constant. */
8785 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8786 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8787 break;
8788
8789 case RECORD_TYPE:
8790 case UNION_TYPE:
8791 case QUAL_UNION_TYPE:
8792 /* We can't see if any of the fields are variably-modified by the
8793 definition we normally use, since that would produce infinite
8794 recursion via pointers. */
8795 /* This is variably modified if some field's type is. */
8796 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8797 if (TREE_CODE (t) == FIELD_DECL)
8798 {
8799 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8800 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8801 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8802
8803 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8804 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8805 }
8806 break;
8807
8808 case ARRAY_TYPE:
8809 /* Do not call ourselves to avoid infinite recursion. This is
8810 variably modified if the element type is. */
8811 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8812 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8813 break;
8814
8815 default:
8816 break;
8817 }
8818
8819 /* The current language may have other cases to check, but in general,
8820 all other types are not variably modified. */
8821 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8822
8823 #undef RETURN_TRUE_IF_VAR
8824 }
8825
8826 /* Given a DECL or TYPE, return the scope in which it was declared, or
8827 NULL_TREE if there is no containing scope. */
8828
8829 tree
8830 get_containing_scope (const_tree t)
8831 {
8832 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8833 }
8834
8835 /* Return the innermost context enclosing DECL that is
8836 a FUNCTION_DECL, or zero if none. */
8837
8838 tree
8839 decl_function_context (const_tree decl)
8840 {
8841 tree context;
8842
8843 if (TREE_CODE (decl) == ERROR_MARK)
8844 return 0;
8845
8846 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8847 where we look up the function at runtime. Such functions always take
8848 a first argument of type 'pointer to real context'.
8849
8850 C++ should really be fixed to use DECL_CONTEXT for the real context,
8851 and use something else for the "virtual context". */
8852 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8853 context
8854 = TYPE_MAIN_VARIANT
8855 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8856 else
8857 context = DECL_CONTEXT (decl);
8858
8859 while (context && TREE_CODE (context) != FUNCTION_DECL)
8860 {
8861 if (TREE_CODE (context) == BLOCK)
8862 context = BLOCK_SUPERCONTEXT (context);
8863 else
8864 context = get_containing_scope (context);
8865 }
8866
8867 return context;
8868 }
8869
8870 /* Return the innermost context enclosing DECL that is
8871 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8872 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8873
8874 tree
8875 decl_type_context (const_tree decl)
8876 {
8877 tree context = DECL_CONTEXT (decl);
8878
8879 while (context)
8880 switch (TREE_CODE (context))
8881 {
8882 case NAMESPACE_DECL:
8883 case TRANSLATION_UNIT_DECL:
8884 return NULL_TREE;
8885
8886 case RECORD_TYPE:
8887 case UNION_TYPE:
8888 case QUAL_UNION_TYPE:
8889 return context;
8890
8891 case TYPE_DECL:
8892 case FUNCTION_DECL:
8893 context = DECL_CONTEXT (context);
8894 break;
8895
8896 case BLOCK:
8897 context = BLOCK_SUPERCONTEXT (context);
8898 break;
8899
8900 default:
8901 gcc_unreachable ();
8902 }
8903
8904 return NULL_TREE;
8905 }
8906
8907 /* CALL is a CALL_EXPR. Return the declaration for the function
8908 called, or NULL_TREE if the called function cannot be
8909 determined. */
8910
8911 tree
8912 get_callee_fndecl (const_tree call)
8913 {
8914 tree addr;
8915
8916 if (call == error_mark_node)
8917 return error_mark_node;
8918
8919 /* It's invalid to call this function with anything but a
8920 CALL_EXPR. */
8921 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8922
8923 /* The first operand to the CALL is the address of the function
8924 called. */
8925 addr = CALL_EXPR_FN (call);
8926
8927 /* If there is no function, return early. */
8928 if (addr == NULL_TREE)
8929 return NULL_TREE;
8930
8931 STRIP_NOPS (addr);
8932
8933 /* If this is a readonly function pointer, extract its initial value. */
8934 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8935 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8936 && DECL_INITIAL (addr))
8937 addr = DECL_INITIAL (addr);
8938
8939 /* If the address is just `&f' for some function `f', then we know
8940 that `f' is being called. */
8941 if (TREE_CODE (addr) == ADDR_EXPR
8942 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8943 return TREE_OPERAND (addr, 0);
8944
8945 /* We couldn't figure out what was being called. */
8946 return NULL_TREE;
8947 }
8948
8949 /* Print debugging information about tree nodes generated during the compile,
8950 and any language-specific information. */
8951
8952 void
8953 dump_tree_statistics (void)
8954 {
8955 if (GATHER_STATISTICS)
8956 {
8957 int i;
8958 int total_nodes, total_bytes;
8959 fprintf (stderr, "Kind Nodes Bytes\n");
8960 fprintf (stderr, "---------------------------------------\n");
8961 total_nodes = total_bytes = 0;
8962 for (i = 0; i < (int) all_kinds; i++)
8963 {
8964 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8965 tree_node_counts[i], tree_node_sizes[i]);
8966 total_nodes += tree_node_counts[i];
8967 total_bytes += tree_node_sizes[i];
8968 }
8969 fprintf (stderr, "---------------------------------------\n");
8970 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8971 fprintf (stderr, "---------------------------------------\n");
8972 fprintf (stderr, "Code Nodes\n");
8973 fprintf (stderr, "----------------------------\n");
8974 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8975 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8976 tree_code_counts[i]);
8977 fprintf (stderr, "----------------------------\n");
8978 ssanames_print_statistics ();
8979 phinodes_print_statistics ();
8980 }
8981 else
8982 fprintf (stderr, "(No per-node statistics)\n");
8983
8984 print_type_hash_statistics ();
8985 print_debug_expr_statistics ();
8986 print_value_expr_statistics ();
8987 lang_hooks.print_statistics ();
8988 }
8989 \f
8990 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8991
8992 /* Generate a crc32 of a byte. */
8993
8994 static unsigned
8995 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8996 {
8997 unsigned ix;
8998
8999 for (ix = bits; ix--; value <<= 1)
9000 {
9001 unsigned feedback;
9002
9003 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9004 chksum <<= 1;
9005 chksum ^= feedback;
9006 }
9007 return chksum;
9008 }
9009
9010 /* Generate a crc32 of a 32-bit unsigned. */
9011
9012 unsigned
9013 crc32_unsigned (unsigned chksum, unsigned value)
9014 {
9015 return crc32_unsigned_bits (chksum, value, 32);
9016 }
9017
9018 /* Generate a crc32 of a byte. */
9019
9020 unsigned
9021 crc32_byte (unsigned chksum, char byte)
9022 {
9023 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9024 }
9025
9026 /* Generate a crc32 of a string. */
9027
9028 unsigned
9029 crc32_string (unsigned chksum, const char *string)
9030 {
9031 do
9032 {
9033 chksum = crc32_byte (chksum, *string);
9034 }
9035 while (*string++);
9036 return chksum;
9037 }
9038
9039 /* P is a string that will be used in a symbol. Mask out any characters
9040 that are not valid in that context. */
9041
9042 void
9043 clean_symbol_name (char *p)
9044 {
9045 for (; *p; p++)
9046 if (! (ISALNUM (*p)
9047 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9048 || *p == '$'
9049 #endif
9050 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9051 || *p == '.'
9052 #endif
9053 ))
9054 *p = '_';
9055 }
9056
9057 /* Generate a name for a special-purpose function.
9058 The generated name may need to be unique across the whole link.
9059 Changes to this function may also require corresponding changes to
9060 xstrdup_mask_random.
9061 TYPE is some string to identify the purpose of this function to the
9062 linker or collect2; it must start with an uppercase letter,
9063 one of:
9064 I - for constructors
9065 D - for destructors
9066 N - for C++ anonymous namespaces
9067 F - for DWARF unwind frame information. */
9068
9069 tree
9070 get_file_function_name (const char *type)
9071 {
9072 char *buf;
9073 const char *p;
9074 char *q;
9075
9076 /* If we already have a name we know to be unique, just use that. */
9077 if (first_global_object_name)
9078 p = q = ASTRDUP (first_global_object_name);
9079 /* If the target is handling the constructors/destructors, they
9080 will be local to this file and the name is only necessary for
9081 debugging purposes.
9082 We also assign sub_I and sub_D sufixes to constructors called from
9083 the global static constructors. These are always local. */
9084 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9085 || (strncmp (type, "sub_", 4) == 0
9086 && (type[4] == 'I' || type[4] == 'D')))
9087 {
9088 const char *file = main_input_filename;
9089 if (! file)
9090 file = LOCATION_FILE (input_location);
9091 /* Just use the file's basename, because the full pathname
9092 might be quite long. */
9093 p = q = ASTRDUP (lbasename (file));
9094 }
9095 else
9096 {
9097 /* Otherwise, the name must be unique across the entire link.
9098 We don't have anything that we know to be unique to this translation
9099 unit, so use what we do have and throw in some randomness. */
9100 unsigned len;
9101 const char *name = weak_global_object_name;
9102 const char *file = main_input_filename;
9103
9104 if (! name)
9105 name = "";
9106 if (! file)
9107 file = LOCATION_FILE (input_location);
9108
9109 len = strlen (file);
9110 q = (char *) alloca (9 + 17 + len + 1);
9111 memcpy (q, file, len + 1);
9112
9113 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9114 crc32_string (0, name), get_random_seed (false));
9115
9116 p = q;
9117 }
9118
9119 clean_symbol_name (q);
9120 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9121 + strlen (type));
9122
9123 /* Set up the name of the file-level functions we may need.
9124 Use a global object (which is already required to be unique over
9125 the program) rather than the file name (which imposes extra
9126 constraints). */
9127 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9128
9129 return get_identifier (buf);
9130 }
9131 \f
9132 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9133
9134 /* Complain that the tree code of NODE does not match the expected 0
9135 terminated list of trailing codes. The trailing code list can be
9136 empty, for a more vague error message. FILE, LINE, and FUNCTION
9137 are of the caller. */
9138
9139 void
9140 tree_check_failed (const_tree node, const char *file,
9141 int line, const char *function, ...)
9142 {
9143 va_list args;
9144 const char *buffer;
9145 unsigned length = 0;
9146 enum tree_code code;
9147
9148 va_start (args, function);
9149 while ((code = (enum tree_code) va_arg (args, int)))
9150 length += 4 + strlen (get_tree_code_name (code));
9151 va_end (args);
9152 if (length)
9153 {
9154 char *tmp;
9155 va_start (args, function);
9156 length += strlen ("expected ");
9157 buffer = tmp = (char *) alloca (length);
9158 length = 0;
9159 while ((code = (enum tree_code) va_arg (args, int)))
9160 {
9161 const char *prefix = length ? " or " : "expected ";
9162
9163 strcpy (tmp + length, prefix);
9164 length += strlen (prefix);
9165 strcpy (tmp + length, get_tree_code_name (code));
9166 length += strlen (get_tree_code_name (code));
9167 }
9168 va_end (args);
9169 }
9170 else
9171 buffer = "unexpected node";
9172
9173 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9174 buffer, get_tree_code_name (TREE_CODE (node)),
9175 function, trim_filename (file), line);
9176 }
9177
9178 /* Complain that the tree code of NODE does match the expected 0
9179 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9180 the caller. */
9181
9182 void
9183 tree_not_check_failed (const_tree node, const char *file,
9184 int line, const char *function, ...)
9185 {
9186 va_list args;
9187 char *buffer;
9188 unsigned length = 0;
9189 enum tree_code code;
9190
9191 va_start (args, function);
9192 while ((code = (enum tree_code) va_arg (args, int)))
9193 length += 4 + strlen (get_tree_code_name (code));
9194 va_end (args);
9195 va_start (args, function);
9196 buffer = (char *) alloca (length);
9197 length = 0;
9198 while ((code = (enum tree_code) va_arg (args, int)))
9199 {
9200 if (length)
9201 {
9202 strcpy (buffer + length, " or ");
9203 length += 4;
9204 }
9205 strcpy (buffer + length, get_tree_code_name (code));
9206 length += strlen (get_tree_code_name (code));
9207 }
9208 va_end (args);
9209
9210 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9211 buffer, get_tree_code_name (TREE_CODE (node)),
9212 function, trim_filename (file), line);
9213 }
9214
9215 /* Similar to tree_check_failed, except that we check for a class of tree
9216 code, given in CL. */
9217
9218 void
9219 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9220 const char *file, int line, const char *function)
9221 {
9222 internal_error
9223 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9224 TREE_CODE_CLASS_STRING (cl),
9225 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9226 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9227 }
9228
9229 /* Similar to tree_check_failed, except that instead of specifying a
9230 dozen codes, use the knowledge that they're all sequential. */
9231
9232 void
9233 tree_range_check_failed (const_tree node, const char *file, int line,
9234 const char *function, enum tree_code c1,
9235 enum tree_code c2)
9236 {
9237 char *buffer;
9238 unsigned length = 0;
9239 unsigned int c;
9240
9241 for (c = c1; c <= c2; ++c)
9242 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9243
9244 length += strlen ("expected ");
9245 buffer = (char *) alloca (length);
9246 length = 0;
9247
9248 for (c = c1; c <= c2; ++c)
9249 {
9250 const char *prefix = length ? " or " : "expected ";
9251
9252 strcpy (buffer + length, prefix);
9253 length += strlen (prefix);
9254 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9255 length += strlen (get_tree_code_name ((enum tree_code) c));
9256 }
9257
9258 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9259 buffer, get_tree_code_name (TREE_CODE (node)),
9260 function, trim_filename (file), line);
9261 }
9262
9263
9264 /* Similar to tree_check_failed, except that we check that a tree does
9265 not have the specified code, given in CL. */
9266
9267 void
9268 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9269 const char *file, int line, const char *function)
9270 {
9271 internal_error
9272 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9273 TREE_CODE_CLASS_STRING (cl),
9274 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9275 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9276 }
9277
9278
9279 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9280
9281 void
9282 omp_clause_check_failed (const_tree node, const char *file, int line,
9283 const char *function, enum omp_clause_code code)
9284 {
9285 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9286 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9287 function, trim_filename (file), line);
9288 }
9289
9290
9291 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9292
9293 void
9294 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9295 const char *function, enum omp_clause_code c1,
9296 enum omp_clause_code c2)
9297 {
9298 char *buffer;
9299 unsigned length = 0;
9300 unsigned int c;
9301
9302 for (c = c1; c <= c2; ++c)
9303 length += 4 + strlen (omp_clause_code_name[c]);
9304
9305 length += strlen ("expected ");
9306 buffer = (char *) alloca (length);
9307 length = 0;
9308
9309 for (c = c1; c <= c2; ++c)
9310 {
9311 const char *prefix = length ? " or " : "expected ";
9312
9313 strcpy (buffer + length, prefix);
9314 length += strlen (prefix);
9315 strcpy (buffer + length, omp_clause_code_name[c]);
9316 length += strlen (omp_clause_code_name[c]);
9317 }
9318
9319 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9320 buffer, omp_clause_code_name[TREE_CODE (node)],
9321 function, trim_filename (file), line);
9322 }
9323
9324
9325 #undef DEFTREESTRUCT
9326 #define DEFTREESTRUCT(VAL, NAME) NAME,
9327
9328 static const char *ts_enum_names[] = {
9329 #include "treestruct.def"
9330 };
9331 #undef DEFTREESTRUCT
9332
9333 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9334
9335 /* Similar to tree_class_check_failed, except that we check for
9336 whether CODE contains the tree structure identified by EN. */
9337
9338 void
9339 tree_contains_struct_check_failed (const_tree node,
9340 const enum tree_node_structure_enum en,
9341 const char *file, int line,
9342 const char *function)
9343 {
9344 internal_error
9345 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9346 TS_ENUM_NAME (en),
9347 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9348 }
9349
9350
9351 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9352 (dynamically sized) vector. */
9353
9354 void
9355 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9356 const char *function)
9357 {
9358 internal_error
9359 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9360 idx + 1, len, function, trim_filename (file), line);
9361 }
9362
9363 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9364 (dynamically sized) vector. */
9365
9366 void
9367 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9368 const char *function)
9369 {
9370 internal_error
9371 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9372 idx + 1, len, function, trim_filename (file), line);
9373 }
9374
9375 /* Similar to above, except that the check is for the bounds of the operand
9376 vector of an expression node EXP. */
9377
9378 void
9379 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9380 int line, const char *function)
9381 {
9382 enum tree_code code = TREE_CODE (exp);
9383 internal_error
9384 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9385 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9386 function, trim_filename (file), line);
9387 }
9388
9389 /* Similar to above, except that the check is for the number of
9390 operands of an OMP_CLAUSE node. */
9391
9392 void
9393 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9394 int line, const char *function)
9395 {
9396 internal_error
9397 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9398 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9399 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9400 trim_filename (file), line);
9401 }
9402 #endif /* ENABLE_TREE_CHECKING */
9403 \f
9404 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9405 and mapped to the machine mode MODE. Initialize its fields and build
9406 the information necessary for debugging output. */
9407
9408 static tree
9409 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9410 {
9411 tree t;
9412 hashval_t hashcode = 0;
9413
9414 t = make_node (VECTOR_TYPE);
9415 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9416 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9417 SET_TYPE_MODE (t, mode);
9418
9419 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9420 SET_TYPE_STRUCTURAL_EQUALITY (t);
9421 else if (TYPE_CANONICAL (innertype) != innertype
9422 || mode != VOIDmode)
9423 TYPE_CANONICAL (t)
9424 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9425
9426 layout_type (t);
9427
9428 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9429 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9430 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9431 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9432 t = type_hash_canon (hashcode, t);
9433
9434 /* We have built a main variant, based on the main variant of the
9435 inner type. Use it to build the variant we return. */
9436 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9437 && TREE_TYPE (t) != innertype)
9438 return build_type_attribute_qual_variant (t,
9439 TYPE_ATTRIBUTES (innertype),
9440 TYPE_QUALS (innertype));
9441
9442 return t;
9443 }
9444
9445 static tree
9446 make_or_reuse_type (unsigned size, int unsignedp)
9447 {
9448 if (size == INT_TYPE_SIZE)
9449 return unsignedp ? unsigned_type_node : integer_type_node;
9450 if (size == CHAR_TYPE_SIZE)
9451 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9452 if (size == SHORT_TYPE_SIZE)
9453 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9454 if (size == LONG_TYPE_SIZE)
9455 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9456 if (size == LONG_LONG_TYPE_SIZE)
9457 return (unsignedp ? long_long_unsigned_type_node
9458 : long_long_integer_type_node);
9459 if (size == 128 && int128_integer_type_node)
9460 return (unsignedp ? int128_unsigned_type_node
9461 : int128_integer_type_node);
9462
9463 if (unsignedp)
9464 return make_unsigned_type (size);
9465 else
9466 return make_signed_type (size);
9467 }
9468
9469 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9470
9471 static tree
9472 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9473 {
9474 if (satp)
9475 {
9476 if (size == SHORT_FRACT_TYPE_SIZE)
9477 return unsignedp ? sat_unsigned_short_fract_type_node
9478 : sat_short_fract_type_node;
9479 if (size == FRACT_TYPE_SIZE)
9480 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9481 if (size == LONG_FRACT_TYPE_SIZE)
9482 return unsignedp ? sat_unsigned_long_fract_type_node
9483 : sat_long_fract_type_node;
9484 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9485 return unsignedp ? sat_unsigned_long_long_fract_type_node
9486 : sat_long_long_fract_type_node;
9487 }
9488 else
9489 {
9490 if (size == SHORT_FRACT_TYPE_SIZE)
9491 return unsignedp ? unsigned_short_fract_type_node
9492 : short_fract_type_node;
9493 if (size == FRACT_TYPE_SIZE)
9494 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9495 if (size == LONG_FRACT_TYPE_SIZE)
9496 return unsignedp ? unsigned_long_fract_type_node
9497 : long_fract_type_node;
9498 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9499 return unsignedp ? unsigned_long_long_fract_type_node
9500 : long_long_fract_type_node;
9501 }
9502
9503 return make_fract_type (size, unsignedp, satp);
9504 }
9505
9506 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9507
9508 static tree
9509 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9510 {
9511 if (satp)
9512 {
9513 if (size == SHORT_ACCUM_TYPE_SIZE)
9514 return unsignedp ? sat_unsigned_short_accum_type_node
9515 : sat_short_accum_type_node;
9516 if (size == ACCUM_TYPE_SIZE)
9517 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9518 if (size == LONG_ACCUM_TYPE_SIZE)
9519 return unsignedp ? sat_unsigned_long_accum_type_node
9520 : sat_long_accum_type_node;
9521 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9522 return unsignedp ? sat_unsigned_long_long_accum_type_node
9523 : sat_long_long_accum_type_node;
9524 }
9525 else
9526 {
9527 if (size == SHORT_ACCUM_TYPE_SIZE)
9528 return unsignedp ? unsigned_short_accum_type_node
9529 : short_accum_type_node;
9530 if (size == ACCUM_TYPE_SIZE)
9531 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9532 if (size == LONG_ACCUM_TYPE_SIZE)
9533 return unsignedp ? unsigned_long_accum_type_node
9534 : long_accum_type_node;
9535 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9536 return unsignedp ? unsigned_long_long_accum_type_node
9537 : long_long_accum_type_node;
9538 }
9539
9540 return make_accum_type (size, unsignedp, satp);
9541 }
9542
9543
9544 /* Create an atomic variant node for TYPE. This routine is called
9545 during initialization of data types to create the 5 basic atomic
9546 types. The generic build_variant_type function requires these to
9547 already be set up in order to function properly, so cannot be
9548 called from there. If ALIGN is non-zero, then ensure alignment is
9549 overridden to this value. */
9550
9551 static tree
9552 build_atomic_base (tree type, unsigned int align)
9553 {
9554 tree t;
9555
9556 /* Make sure its not already registered. */
9557 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9558 return t;
9559
9560 t = build_variant_type_copy (type);
9561 set_type_quals (t, TYPE_QUAL_ATOMIC);
9562
9563 if (align)
9564 TYPE_ALIGN (t) = align;
9565
9566 return t;
9567 }
9568
9569 /* Create nodes for all integer types (and error_mark_node) using the sizes
9570 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9571 SHORT_DOUBLE specifies whether double should be of the same precision
9572 as float. */
9573
9574 void
9575 build_common_tree_nodes (bool signed_char, bool short_double)
9576 {
9577 error_mark_node = make_node (ERROR_MARK);
9578 TREE_TYPE (error_mark_node) = error_mark_node;
9579
9580 initialize_sizetypes ();
9581
9582 /* Define both `signed char' and `unsigned char'. */
9583 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9584 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9585 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9586 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9587
9588 /* Define `char', which is like either `signed char' or `unsigned char'
9589 but not the same as either. */
9590 char_type_node
9591 = (signed_char
9592 ? make_signed_type (CHAR_TYPE_SIZE)
9593 : make_unsigned_type (CHAR_TYPE_SIZE));
9594 TYPE_STRING_FLAG (char_type_node) = 1;
9595
9596 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9597 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9598 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9599 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9600 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9601 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9602 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9603 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9604 #if HOST_BITS_PER_WIDE_INT >= 64
9605 /* TODO: This isn't correct, but as logic depends at the moment on
9606 host's instead of target's wide-integer.
9607 If there is a target not supporting TImode, but has an 128-bit
9608 integer-scalar register, this target check needs to be adjusted. */
9609 if (targetm.scalar_mode_supported_p (TImode))
9610 {
9611 int128_integer_type_node = make_signed_type (128);
9612 int128_unsigned_type_node = make_unsigned_type (128);
9613 }
9614 #endif
9615
9616 /* Define a boolean type. This type only represents boolean values but
9617 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9618 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9619 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9620 TYPE_PRECISION (boolean_type_node) = 1;
9621 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9622
9623 /* Define what type to use for size_t. */
9624 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9625 size_type_node = unsigned_type_node;
9626 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9627 size_type_node = long_unsigned_type_node;
9628 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9629 size_type_node = long_long_unsigned_type_node;
9630 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9631 size_type_node = short_unsigned_type_node;
9632 else
9633 gcc_unreachable ();
9634
9635 /* Fill in the rest of the sized types. Reuse existing type nodes
9636 when possible. */
9637 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9638 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9639 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9640 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9641 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9642
9643 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9644 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9645 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9646 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9647 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9648
9649 /* Don't call build_qualified type for atomics. That routine does
9650 special processing for atomics, and until they are initialized
9651 it's better not to make that call.
9652
9653 Check to see if there is a target override for atomic types. */
9654
9655 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9656 targetm.atomic_align_for_mode (QImode));
9657 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9658 targetm.atomic_align_for_mode (HImode));
9659 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9660 targetm.atomic_align_for_mode (SImode));
9661 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9662 targetm.atomic_align_for_mode (DImode));
9663 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9664 targetm.atomic_align_for_mode (TImode));
9665
9666 access_public_node = get_identifier ("public");
9667 access_protected_node = get_identifier ("protected");
9668 access_private_node = get_identifier ("private");
9669
9670 /* Define these next since types below may used them. */
9671 integer_zero_node = build_int_cst (integer_type_node, 0);
9672 integer_one_node = build_int_cst (integer_type_node, 1);
9673 integer_three_node = build_int_cst (integer_type_node, 3);
9674 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9675
9676 size_zero_node = size_int (0);
9677 size_one_node = size_int (1);
9678 bitsize_zero_node = bitsize_int (0);
9679 bitsize_one_node = bitsize_int (1);
9680 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9681
9682 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9683 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9684
9685 void_type_node = make_node (VOID_TYPE);
9686 layout_type (void_type_node);
9687
9688 /* We are not going to have real types in C with less than byte alignment,
9689 so we might as well not have any types that claim to have it. */
9690 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9691 TYPE_USER_ALIGN (void_type_node) = 0;
9692
9693 void_node = make_node (VOID_CST);
9694 TREE_TYPE (void_node) = void_type_node;
9695
9696 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9697 layout_type (TREE_TYPE (null_pointer_node));
9698
9699 ptr_type_node = build_pointer_type (void_type_node);
9700 const_ptr_type_node
9701 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9702 fileptr_type_node = ptr_type_node;
9703
9704 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9705
9706 float_type_node = make_node (REAL_TYPE);
9707 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9708 layout_type (float_type_node);
9709
9710 double_type_node = make_node (REAL_TYPE);
9711 if (short_double)
9712 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9713 else
9714 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9715 layout_type (double_type_node);
9716
9717 long_double_type_node = make_node (REAL_TYPE);
9718 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9719 layout_type (long_double_type_node);
9720
9721 float_ptr_type_node = build_pointer_type (float_type_node);
9722 double_ptr_type_node = build_pointer_type (double_type_node);
9723 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9724 integer_ptr_type_node = build_pointer_type (integer_type_node);
9725
9726 /* Fixed size integer types. */
9727 uint16_type_node = build_nonstandard_integer_type (16, true);
9728 uint32_type_node = build_nonstandard_integer_type (32, true);
9729 uint64_type_node = build_nonstandard_integer_type (64, true);
9730
9731 /* Decimal float types. */
9732 dfloat32_type_node = make_node (REAL_TYPE);
9733 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9734 layout_type (dfloat32_type_node);
9735 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9736 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9737
9738 dfloat64_type_node = make_node (REAL_TYPE);
9739 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9740 layout_type (dfloat64_type_node);
9741 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9742 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9743
9744 dfloat128_type_node = make_node (REAL_TYPE);
9745 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9746 layout_type (dfloat128_type_node);
9747 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9748 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9749
9750 complex_integer_type_node = build_complex_type (integer_type_node);
9751 complex_float_type_node = build_complex_type (float_type_node);
9752 complex_double_type_node = build_complex_type (double_type_node);
9753 complex_long_double_type_node = build_complex_type (long_double_type_node);
9754
9755 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9756 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9757 sat_ ## KIND ## _type_node = \
9758 make_sat_signed_ ## KIND ## _type (SIZE); \
9759 sat_unsigned_ ## KIND ## _type_node = \
9760 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9761 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9762 unsigned_ ## KIND ## _type_node = \
9763 make_unsigned_ ## KIND ## _type (SIZE);
9764
9765 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9766 sat_ ## WIDTH ## KIND ## _type_node = \
9767 make_sat_signed_ ## KIND ## _type (SIZE); \
9768 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9769 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9770 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9771 unsigned_ ## WIDTH ## KIND ## _type_node = \
9772 make_unsigned_ ## KIND ## _type (SIZE);
9773
9774 /* Make fixed-point type nodes based on four different widths. */
9775 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9776 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9777 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9778 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9779 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9780
9781 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9782 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9783 NAME ## _type_node = \
9784 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9785 u ## NAME ## _type_node = \
9786 make_or_reuse_unsigned_ ## KIND ## _type \
9787 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9788 sat_ ## NAME ## _type_node = \
9789 make_or_reuse_sat_signed_ ## KIND ## _type \
9790 (GET_MODE_BITSIZE (MODE ## mode)); \
9791 sat_u ## NAME ## _type_node = \
9792 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9793 (GET_MODE_BITSIZE (U ## MODE ## mode));
9794
9795 /* Fixed-point type and mode nodes. */
9796 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9797 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9798 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9799 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9800 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9801 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9802 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9803 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9804 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9805 MAKE_FIXED_MODE_NODE (accum, da, DA)
9806 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9807
9808 {
9809 tree t = targetm.build_builtin_va_list ();
9810
9811 /* Many back-ends define record types without setting TYPE_NAME.
9812 If we copied the record type here, we'd keep the original
9813 record type without a name. This breaks name mangling. So,
9814 don't copy record types and let c_common_nodes_and_builtins()
9815 declare the type to be __builtin_va_list. */
9816 if (TREE_CODE (t) != RECORD_TYPE)
9817 t = build_variant_type_copy (t);
9818
9819 va_list_type_node = t;
9820 }
9821 }
9822
9823 /* Modify DECL for given flags.
9824 TM_PURE attribute is set only on types, so the function will modify
9825 DECL's type when ECF_TM_PURE is used. */
9826
9827 void
9828 set_call_expr_flags (tree decl, int flags)
9829 {
9830 if (flags & ECF_NOTHROW)
9831 TREE_NOTHROW (decl) = 1;
9832 if (flags & ECF_CONST)
9833 TREE_READONLY (decl) = 1;
9834 if (flags & ECF_PURE)
9835 DECL_PURE_P (decl) = 1;
9836 if (flags & ECF_LOOPING_CONST_OR_PURE)
9837 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9838 if (flags & ECF_NOVOPS)
9839 DECL_IS_NOVOPS (decl) = 1;
9840 if (flags & ECF_NORETURN)
9841 TREE_THIS_VOLATILE (decl) = 1;
9842 if (flags & ECF_MALLOC)
9843 DECL_IS_MALLOC (decl) = 1;
9844 if (flags & ECF_RETURNS_TWICE)
9845 DECL_IS_RETURNS_TWICE (decl) = 1;
9846 if (flags & ECF_LEAF)
9847 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9848 NULL, DECL_ATTRIBUTES (decl));
9849 if ((flags & ECF_TM_PURE) && flag_tm)
9850 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9851 /* Looping const or pure is implied by noreturn.
9852 There is currently no way to declare looping const or looping pure alone. */
9853 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9854 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9855 }
9856
9857
9858 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9859
9860 static void
9861 local_define_builtin (const char *name, tree type, enum built_in_function code,
9862 const char *library_name, int ecf_flags)
9863 {
9864 tree decl;
9865
9866 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9867 library_name, NULL_TREE);
9868 set_call_expr_flags (decl, ecf_flags);
9869
9870 set_builtin_decl (code, decl, true);
9871 }
9872
9873 /* Call this function after instantiating all builtins that the language
9874 front end cares about. This will build the rest of the builtins that
9875 are relied upon by the tree optimizers and the middle-end. */
9876
9877 void
9878 build_common_builtin_nodes (void)
9879 {
9880 tree tmp, ftype;
9881 int ecf_flags;
9882
9883 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9884 {
9885 ftype = build_function_type (void_type_node, void_list_node);
9886 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9887 "__builtin_unreachable",
9888 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9889 | ECF_CONST | ECF_LEAF);
9890 }
9891
9892 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9893 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9894 {
9895 ftype = build_function_type_list (ptr_type_node,
9896 ptr_type_node, const_ptr_type_node,
9897 size_type_node, NULL_TREE);
9898
9899 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9900 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9901 "memcpy", ECF_NOTHROW | ECF_LEAF);
9902 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9903 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9904 "memmove", ECF_NOTHROW | ECF_LEAF);
9905 }
9906
9907 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9908 {
9909 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9910 const_ptr_type_node, size_type_node,
9911 NULL_TREE);
9912 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9913 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9914 }
9915
9916 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9917 {
9918 ftype = build_function_type_list (ptr_type_node,
9919 ptr_type_node, integer_type_node,
9920 size_type_node, NULL_TREE);
9921 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9922 "memset", ECF_NOTHROW | ECF_LEAF);
9923 }
9924
9925 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9926 {
9927 ftype = build_function_type_list (ptr_type_node,
9928 size_type_node, NULL_TREE);
9929 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9930 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9931 }
9932
9933 ftype = build_function_type_list (ptr_type_node, size_type_node,
9934 size_type_node, NULL_TREE);
9935 local_define_builtin ("__builtin_alloca_with_align", ftype,
9936 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9937 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9938
9939 /* If we're checking the stack, `alloca' can throw. */
9940 if (flag_stack_check)
9941 {
9942 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9943 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9944 }
9945
9946 ftype = build_function_type_list (void_type_node,
9947 ptr_type_node, ptr_type_node,
9948 ptr_type_node, NULL_TREE);
9949 local_define_builtin ("__builtin_init_trampoline", ftype,
9950 BUILT_IN_INIT_TRAMPOLINE,
9951 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9952 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9953 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9954 "__builtin_init_heap_trampoline",
9955 ECF_NOTHROW | ECF_LEAF);
9956
9957 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9958 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9959 BUILT_IN_ADJUST_TRAMPOLINE,
9960 "__builtin_adjust_trampoline",
9961 ECF_CONST | ECF_NOTHROW);
9962
9963 ftype = build_function_type_list (void_type_node,
9964 ptr_type_node, ptr_type_node, NULL_TREE);
9965 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9966 BUILT_IN_NONLOCAL_GOTO,
9967 "__builtin_nonlocal_goto",
9968 ECF_NORETURN | ECF_NOTHROW);
9969
9970 ftype = build_function_type_list (void_type_node,
9971 ptr_type_node, ptr_type_node, NULL_TREE);
9972 local_define_builtin ("__builtin_setjmp_setup", ftype,
9973 BUILT_IN_SETJMP_SETUP,
9974 "__builtin_setjmp_setup", ECF_NOTHROW);
9975
9976 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9977 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9978 BUILT_IN_SETJMP_RECEIVER,
9979 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9980
9981 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9982 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9983 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9984
9985 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9986 local_define_builtin ("__builtin_stack_restore", ftype,
9987 BUILT_IN_STACK_RESTORE,
9988 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9989
9990 /* If there's a possibility that we might use the ARM EABI, build the
9991 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9992 if (targetm.arm_eabi_unwinder)
9993 {
9994 ftype = build_function_type_list (void_type_node, NULL_TREE);
9995 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9996 BUILT_IN_CXA_END_CLEANUP,
9997 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9998 }
9999
10000 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10001 local_define_builtin ("__builtin_unwind_resume", ftype,
10002 BUILT_IN_UNWIND_RESUME,
10003 ((targetm_common.except_unwind_info (&global_options)
10004 == UI_SJLJ)
10005 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10006 ECF_NORETURN);
10007
10008 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10009 {
10010 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10011 NULL_TREE);
10012 local_define_builtin ("__builtin_return_address", ftype,
10013 BUILT_IN_RETURN_ADDRESS,
10014 "__builtin_return_address",
10015 ECF_NOTHROW);
10016 }
10017
10018 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10019 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10020 {
10021 ftype = build_function_type_list (void_type_node, ptr_type_node,
10022 ptr_type_node, NULL_TREE);
10023 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10024 local_define_builtin ("__cyg_profile_func_enter", ftype,
10025 BUILT_IN_PROFILE_FUNC_ENTER,
10026 "__cyg_profile_func_enter", 0);
10027 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10028 local_define_builtin ("__cyg_profile_func_exit", ftype,
10029 BUILT_IN_PROFILE_FUNC_EXIT,
10030 "__cyg_profile_func_exit", 0);
10031 }
10032
10033 /* The exception object and filter values from the runtime. The argument
10034 must be zero before exception lowering, i.e. from the front end. After
10035 exception lowering, it will be the region number for the exception
10036 landing pad. These functions are PURE instead of CONST to prevent
10037 them from being hoisted past the exception edge that will initialize
10038 its value in the landing pad. */
10039 ftype = build_function_type_list (ptr_type_node,
10040 integer_type_node, NULL_TREE);
10041 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10042 /* Only use TM_PURE if we we have TM language support. */
10043 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10044 ecf_flags |= ECF_TM_PURE;
10045 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10046 "__builtin_eh_pointer", ecf_flags);
10047
10048 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10049 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10050 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10051 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10052
10053 ftype = build_function_type_list (void_type_node,
10054 integer_type_node, integer_type_node,
10055 NULL_TREE);
10056 local_define_builtin ("__builtin_eh_copy_values", ftype,
10057 BUILT_IN_EH_COPY_VALUES,
10058 "__builtin_eh_copy_values", ECF_NOTHROW);
10059
10060 /* Complex multiplication and division. These are handled as builtins
10061 rather than optabs because emit_library_call_value doesn't support
10062 complex. Further, we can do slightly better with folding these
10063 beasties if the real and complex parts of the arguments are separate. */
10064 {
10065 int mode;
10066
10067 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10068 {
10069 char mode_name_buf[4], *q;
10070 const char *p;
10071 enum built_in_function mcode, dcode;
10072 tree type, inner_type;
10073 const char *prefix = "__";
10074
10075 if (targetm.libfunc_gnu_prefix)
10076 prefix = "__gnu_";
10077
10078 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10079 if (type == NULL)
10080 continue;
10081 inner_type = TREE_TYPE (type);
10082
10083 ftype = build_function_type_list (type, inner_type, inner_type,
10084 inner_type, inner_type, NULL_TREE);
10085
10086 mcode = ((enum built_in_function)
10087 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10088 dcode = ((enum built_in_function)
10089 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10090
10091 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10092 *q = TOLOWER (*p);
10093 *q = '\0';
10094
10095 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10096 NULL);
10097 local_define_builtin (built_in_names[mcode], ftype, mcode,
10098 built_in_names[mcode],
10099 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10100
10101 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10102 NULL);
10103 local_define_builtin (built_in_names[dcode], ftype, dcode,
10104 built_in_names[dcode],
10105 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10106 }
10107 }
10108 }
10109
10110 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10111 better way.
10112
10113 If we requested a pointer to a vector, build up the pointers that
10114 we stripped off while looking for the inner type. Similarly for
10115 return values from functions.
10116
10117 The argument TYPE is the top of the chain, and BOTTOM is the
10118 new type which we will point to. */
10119
10120 tree
10121 reconstruct_complex_type (tree type, tree bottom)
10122 {
10123 tree inner, outer;
10124
10125 if (TREE_CODE (type) == POINTER_TYPE)
10126 {
10127 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10128 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10129 TYPE_REF_CAN_ALIAS_ALL (type));
10130 }
10131 else if (TREE_CODE (type) == REFERENCE_TYPE)
10132 {
10133 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10134 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10135 TYPE_REF_CAN_ALIAS_ALL (type));
10136 }
10137 else if (TREE_CODE (type) == ARRAY_TYPE)
10138 {
10139 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10140 outer = build_array_type (inner, TYPE_DOMAIN (type));
10141 }
10142 else if (TREE_CODE (type) == FUNCTION_TYPE)
10143 {
10144 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10145 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10146 }
10147 else if (TREE_CODE (type) == METHOD_TYPE)
10148 {
10149 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10150 /* The build_method_type_directly() routine prepends 'this' to argument list,
10151 so we must compensate by getting rid of it. */
10152 outer
10153 = build_method_type_directly
10154 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10155 inner,
10156 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10157 }
10158 else if (TREE_CODE (type) == OFFSET_TYPE)
10159 {
10160 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10161 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10162 }
10163 else
10164 return bottom;
10165
10166 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10167 TYPE_QUALS (type));
10168 }
10169
10170 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10171 the inner type. */
10172 tree
10173 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10174 {
10175 int nunits;
10176
10177 switch (GET_MODE_CLASS (mode))
10178 {
10179 case MODE_VECTOR_INT:
10180 case MODE_VECTOR_FLOAT:
10181 case MODE_VECTOR_FRACT:
10182 case MODE_VECTOR_UFRACT:
10183 case MODE_VECTOR_ACCUM:
10184 case MODE_VECTOR_UACCUM:
10185 nunits = GET_MODE_NUNITS (mode);
10186 break;
10187
10188 case MODE_INT:
10189 /* Check that there are no leftover bits. */
10190 gcc_assert (GET_MODE_BITSIZE (mode)
10191 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10192
10193 nunits = GET_MODE_BITSIZE (mode)
10194 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10195 break;
10196
10197 default:
10198 gcc_unreachable ();
10199 }
10200
10201 return make_vector_type (innertype, nunits, mode);
10202 }
10203
10204 /* Similarly, but takes the inner type and number of units, which must be
10205 a power of two. */
10206
10207 tree
10208 build_vector_type (tree innertype, int nunits)
10209 {
10210 return make_vector_type (innertype, nunits, VOIDmode);
10211 }
10212
10213 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10214
10215 tree
10216 build_opaque_vector_type (tree innertype, int nunits)
10217 {
10218 tree t = make_vector_type (innertype, nunits, VOIDmode);
10219 tree cand;
10220 /* We always build the non-opaque variant before the opaque one,
10221 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10222 cand = TYPE_NEXT_VARIANT (t);
10223 if (cand
10224 && TYPE_VECTOR_OPAQUE (cand)
10225 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10226 return cand;
10227 /* Othewise build a variant type and make sure to queue it after
10228 the non-opaque type. */
10229 cand = build_distinct_type_copy (t);
10230 TYPE_VECTOR_OPAQUE (cand) = true;
10231 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10232 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10233 TYPE_NEXT_VARIANT (t) = cand;
10234 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10235 return cand;
10236 }
10237
10238
10239 /* Given an initializer INIT, return TRUE if INIT is zero or some
10240 aggregate of zeros. Otherwise return FALSE. */
10241 bool
10242 initializer_zerop (const_tree init)
10243 {
10244 tree elt;
10245
10246 STRIP_NOPS (init);
10247
10248 switch (TREE_CODE (init))
10249 {
10250 case INTEGER_CST:
10251 return integer_zerop (init);
10252
10253 case REAL_CST:
10254 /* ??? Note that this is not correct for C4X float formats. There,
10255 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10256 negative exponent. */
10257 return real_zerop (init)
10258 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10259
10260 case FIXED_CST:
10261 return fixed_zerop (init);
10262
10263 case COMPLEX_CST:
10264 return integer_zerop (init)
10265 || (real_zerop (init)
10266 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10267 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10268
10269 case VECTOR_CST:
10270 {
10271 unsigned i;
10272 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10273 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10274 return false;
10275 return true;
10276 }
10277
10278 case CONSTRUCTOR:
10279 {
10280 unsigned HOST_WIDE_INT idx;
10281
10282 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10283 if (!initializer_zerop (elt))
10284 return false;
10285 return true;
10286 }
10287
10288 case STRING_CST:
10289 {
10290 int i;
10291
10292 /* We need to loop through all elements to handle cases like
10293 "\0" and "\0foobar". */
10294 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10295 if (TREE_STRING_POINTER (init)[i] != '\0')
10296 return false;
10297
10298 return true;
10299 }
10300
10301 default:
10302 return false;
10303 }
10304 }
10305
10306 /* Check if vector VEC consists of all the equal elements and
10307 that the number of elements corresponds to the type of VEC.
10308 The function returns first element of the vector
10309 or NULL_TREE if the vector is not uniform. */
10310 tree
10311 uniform_vector_p (const_tree vec)
10312 {
10313 tree first, t;
10314 unsigned i;
10315
10316 if (vec == NULL_TREE)
10317 return NULL_TREE;
10318
10319 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10320
10321 if (TREE_CODE (vec) == VECTOR_CST)
10322 {
10323 first = VECTOR_CST_ELT (vec, 0);
10324 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10325 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10326 return NULL_TREE;
10327
10328 return first;
10329 }
10330
10331 else if (TREE_CODE (vec) == CONSTRUCTOR)
10332 {
10333 first = error_mark_node;
10334
10335 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10336 {
10337 if (i == 0)
10338 {
10339 first = t;
10340 continue;
10341 }
10342 if (!operand_equal_p (first, t, 0))
10343 return NULL_TREE;
10344 }
10345 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10346 return NULL_TREE;
10347
10348 return first;
10349 }
10350
10351 return NULL_TREE;
10352 }
10353
10354 /* Build an empty statement at location LOC. */
10355
10356 tree
10357 build_empty_stmt (location_t loc)
10358 {
10359 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10360 SET_EXPR_LOCATION (t, loc);
10361 return t;
10362 }
10363
10364
10365 /* Build an OpenMP clause with code CODE. LOC is the location of the
10366 clause. */
10367
10368 tree
10369 build_omp_clause (location_t loc, enum omp_clause_code code)
10370 {
10371 tree t;
10372 int size, length;
10373
10374 length = omp_clause_num_ops[code];
10375 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10376
10377 record_node_allocation_statistics (OMP_CLAUSE, size);
10378
10379 t = (tree) ggc_internal_alloc (size);
10380 memset (t, 0, size);
10381 TREE_SET_CODE (t, OMP_CLAUSE);
10382 OMP_CLAUSE_SET_CODE (t, code);
10383 OMP_CLAUSE_LOCATION (t) = loc;
10384
10385 return t;
10386 }
10387
10388 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10389 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10390 Except for the CODE and operand count field, other storage for the
10391 object is initialized to zeros. */
10392
10393 tree
10394 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10395 {
10396 tree t;
10397 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10398
10399 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10400 gcc_assert (len >= 1);
10401
10402 record_node_allocation_statistics (code, length);
10403
10404 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10405
10406 TREE_SET_CODE (t, code);
10407
10408 /* Can't use TREE_OPERAND to store the length because if checking is
10409 enabled, it will try to check the length before we store it. :-P */
10410 t->exp.operands[0] = build_int_cst (sizetype, len);
10411
10412 return t;
10413 }
10414
10415 /* Helper function for build_call_* functions; build a CALL_EXPR with
10416 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10417 the argument slots. */
10418
10419 static tree
10420 build_call_1 (tree return_type, tree fn, int nargs)
10421 {
10422 tree t;
10423
10424 t = build_vl_exp (CALL_EXPR, nargs + 3);
10425 TREE_TYPE (t) = return_type;
10426 CALL_EXPR_FN (t) = fn;
10427 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10428
10429 return t;
10430 }
10431
10432 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10433 FN and a null static chain slot. NARGS is the number of call arguments
10434 which are specified as "..." arguments. */
10435
10436 tree
10437 build_call_nary (tree return_type, tree fn, int nargs, ...)
10438 {
10439 tree ret;
10440 va_list args;
10441 va_start (args, nargs);
10442 ret = build_call_valist (return_type, fn, nargs, args);
10443 va_end (args);
10444 return ret;
10445 }
10446
10447 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10448 FN and a null static chain slot. NARGS is the number of call arguments
10449 which are specified as a va_list ARGS. */
10450
10451 tree
10452 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10453 {
10454 tree t;
10455 int i;
10456
10457 t = build_call_1 (return_type, fn, nargs);
10458 for (i = 0; i < nargs; i++)
10459 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10460 process_call_operands (t);
10461 return t;
10462 }
10463
10464 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10465 FN and a null static chain slot. NARGS is the number of call arguments
10466 which are specified as a tree array ARGS. */
10467
10468 tree
10469 build_call_array_loc (location_t loc, tree return_type, tree fn,
10470 int nargs, const tree *args)
10471 {
10472 tree t;
10473 int i;
10474
10475 t = build_call_1 (return_type, fn, nargs);
10476 for (i = 0; i < nargs; i++)
10477 CALL_EXPR_ARG (t, i) = args[i];
10478 process_call_operands (t);
10479 SET_EXPR_LOCATION (t, loc);
10480 return t;
10481 }
10482
10483 /* Like build_call_array, but takes a vec. */
10484
10485 tree
10486 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10487 {
10488 tree ret, t;
10489 unsigned int ix;
10490
10491 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10492 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10493 CALL_EXPR_ARG (ret, ix) = t;
10494 process_call_operands (ret);
10495 return ret;
10496 }
10497
10498 /* Conveniently construct a function call expression. FNDECL names the
10499 function to be called and N arguments are passed in the array
10500 ARGARRAY. */
10501
10502 tree
10503 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10504 {
10505 tree fntype = TREE_TYPE (fndecl);
10506 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10507
10508 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10509 }
10510
10511 /* Conveniently construct a function call expression. FNDECL names the
10512 function to be called and the arguments are passed in the vector
10513 VEC. */
10514
10515 tree
10516 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10517 {
10518 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10519 vec_safe_address (vec));
10520 }
10521
10522
10523 /* Conveniently construct a function call expression. FNDECL names the
10524 function to be called, N is the number of arguments, and the "..."
10525 parameters are the argument expressions. */
10526
10527 tree
10528 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10529 {
10530 va_list ap;
10531 tree *argarray = XALLOCAVEC (tree, n);
10532 int i;
10533
10534 va_start (ap, n);
10535 for (i = 0; i < n; i++)
10536 argarray[i] = va_arg (ap, tree);
10537 va_end (ap);
10538 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10539 }
10540
10541 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10542 varargs macros aren't supported by all bootstrap compilers. */
10543
10544 tree
10545 build_call_expr (tree fndecl, int n, ...)
10546 {
10547 va_list ap;
10548 tree *argarray = XALLOCAVEC (tree, n);
10549 int i;
10550
10551 va_start (ap, n);
10552 for (i = 0; i < n; i++)
10553 argarray[i] = va_arg (ap, tree);
10554 va_end (ap);
10555 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10556 }
10557
10558 /* Build internal call expression. This is just like CALL_EXPR, except
10559 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10560 internal function. */
10561
10562 tree
10563 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10564 tree type, int n, ...)
10565 {
10566 va_list ap;
10567 int i;
10568
10569 tree fn = build_call_1 (type, NULL_TREE, n);
10570 va_start (ap, n);
10571 for (i = 0; i < n; i++)
10572 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10573 va_end (ap);
10574 SET_EXPR_LOCATION (fn, loc);
10575 CALL_EXPR_IFN (fn) = ifn;
10576 return fn;
10577 }
10578
10579 /* Create a new constant string literal and return a char* pointer to it.
10580 The STRING_CST value is the LEN characters at STR. */
10581 tree
10582 build_string_literal (int len, const char *str)
10583 {
10584 tree t, elem, index, type;
10585
10586 t = build_string (len, str);
10587 elem = build_type_variant (char_type_node, 1, 0);
10588 index = build_index_type (size_int (len - 1));
10589 type = build_array_type (elem, index);
10590 TREE_TYPE (t) = type;
10591 TREE_CONSTANT (t) = 1;
10592 TREE_READONLY (t) = 1;
10593 TREE_STATIC (t) = 1;
10594
10595 type = build_pointer_type (elem);
10596 t = build1 (ADDR_EXPR, type,
10597 build4 (ARRAY_REF, elem,
10598 t, integer_zero_node, NULL_TREE, NULL_TREE));
10599 return t;
10600 }
10601
10602
10603
10604 /* Return true if T (assumed to be a DECL) must be assigned a memory
10605 location. */
10606
10607 bool
10608 needs_to_live_in_memory (const_tree t)
10609 {
10610 return (TREE_ADDRESSABLE (t)
10611 || is_global_var (t)
10612 || (TREE_CODE (t) == RESULT_DECL
10613 && !DECL_BY_REFERENCE (t)
10614 && aggregate_value_p (t, current_function_decl)));
10615 }
10616
10617 /* Return value of a constant X and sign-extend it. */
10618
10619 HOST_WIDE_INT
10620 int_cst_value (const_tree x)
10621 {
10622 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10623 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10624
10625 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10626 gcc_assert (cst_and_fits_in_hwi (x));
10627
10628 if (bits < HOST_BITS_PER_WIDE_INT)
10629 {
10630 bool negative = ((val >> (bits - 1)) & 1) != 0;
10631 if (negative)
10632 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10633 else
10634 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10635 }
10636
10637 return val;
10638 }
10639
10640 /* If TYPE is an integral or pointer type, return an integer type with
10641 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10642 if TYPE is already an integer type of signedness UNSIGNEDP. */
10643
10644 tree
10645 signed_or_unsigned_type_for (int unsignedp, tree type)
10646 {
10647 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10648 return type;
10649
10650 if (TREE_CODE (type) == VECTOR_TYPE)
10651 {
10652 tree inner = TREE_TYPE (type);
10653 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10654 if (!inner2)
10655 return NULL_TREE;
10656 if (inner == inner2)
10657 return type;
10658 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10659 }
10660
10661 if (!INTEGRAL_TYPE_P (type)
10662 && !POINTER_TYPE_P (type)
10663 && TREE_CODE (type) != OFFSET_TYPE)
10664 return NULL_TREE;
10665
10666 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10667 }
10668
10669 /* If TYPE is an integral or pointer type, return an integer type with
10670 the same precision which is unsigned, or itself if TYPE is already an
10671 unsigned integer type. */
10672
10673 tree
10674 unsigned_type_for (tree type)
10675 {
10676 return signed_or_unsigned_type_for (1, type);
10677 }
10678
10679 /* If TYPE is an integral or pointer type, return an integer type with
10680 the same precision which is signed, or itself if TYPE is already a
10681 signed integer type. */
10682
10683 tree
10684 signed_type_for (tree type)
10685 {
10686 return signed_or_unsigned_type_for (0, type);
10687 }
10688
10689 /* If TYPE is a vector type, return a signed integer vector type with the
10690 same width and number of subparts. Otherwise return boolean_type_node. */
10691
10692 tree
10693 truth_type_for (tree type)
10694 {
10695 if (TREE_CODE (type) == VECTOR_TYPE)
10696 {
10697 tree elem = lang_hooks.types.type_for_size
10698 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10699 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10700 }
10701 else
10702 return boolean_type_node;
10703 }
10704
10705 /* Returns the largest value obtainable by casting something in INNER type to
10706 OUTER type. */
10707
10708 tree
10709 upper_bound_in_type (tree outer, tree inner)
10710 {
10711 unsigned int det = 0;
10712 unsigned oprec = TYPE_PRECISION (outer);
10713 unsigned iprec = TYPE_PRECISION (inner);
10714 unsigned prec;
10715
10716 /* Compute a unique number for every combination. */
10717 det |= (oprec > iprec) ? 4 : 0;
10718 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10719 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10720
10721 /* Determine the exponent to use. */
10722 switch (det)
10723 {
10724 case 0:
10725 case 1:
10726 /* oprec <= iprec, outer: signed, inner: don't care. */
10727 prec = oprec - 1;
10728 break;
10729 case 2:
10730 case 3:
10731 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10732 prec = oprec;
10733 break;
10734 case 4:
10735 /* oprec > iprec, outer: signed, inner: signed. */
10736 prec = iprec - 1;
10737 break;
10738 case 5:
10739 /* oprec > iprec, outer: signed, inner: unsigned. */
10740 prec = iprec;
10741 break;
10742 case 6:
10743 /* oprec > iprec, outer: unsigned, inner: signed. */
10744 prec = oprec;
10745 break;
10746 case 7:
10747 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10748 prec = iprec;
10749 break;
10750 default:
10751 gcc_unreachable ();
10752 }
10753
10754 return wide_int_to_tree (outer,
10755 wi::mask (prec, false, TYPE_PRECISION (outer)));
10756 }
10757
10758 /* Returns the smallest value obtainable by casting something in INNER type to
10759 OUTER type. */
10760
10761 tree
10762 lower_bound_in_type (tree outer, tree inner)
10763 {
10764 unsigned oprec = TYPE_PRECISION (outer);
10765 unsigned iprec = TYPE_PRECISION (inner);
10766
10767 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10768 and obtain 0. */
10769 if (TYPE_UNSIGNED (outer)
10770 /* If we are widening something of an unsigned type, OUTER type
10771 contains all values of INNER type. In particular, both INNER
10772 and OUTER types have zero in common. */
10773 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10774 return build_int_cst (outer, 0);
10775 else
10776 {
10777 /* If we are widening a signed type to another signed type, we
10778 want to obtain -2^^(iprec-1). If we are keeping the
10779 precision or narrowing to a signed type, we want to obtain
10780 -2^(oprec-1). */
10781 unsigned prec = oprec > iprec ? iprec : oprec;
10782 return wide_int_to_tree (outer,
10783 wi::mask (prec - 1, true,
10784 TYPE_PRECISION (outer)));
10785 }
10786 }
10787
10788 /* Return nonzero if two operands that are suitable for PHI nodes are
10789 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10790 SSA_NAME or invariant. Note that this is strictly an optimization.
10791 That is, callers of this function can directly call operand_equal_p
10792 and get the same result, only slower. */
10793
10794 int
10795 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10796 {
10797 if (arg0 == arg1)
10798 return 1;
10799 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10800 return 0;
10801 return operand_equal_p (arg0, arg1, 0);
10802 }
10803
10804 /* Returns number of zeros at the end of binary representation of X. */
10805
10806 tree
10807 num_ending_zeros (const_tree x)
10808 {
10809 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10810 }
10811
10812
10813 #define WALK_SUBTREE(NODE) \
10814 do \
10815 { \
10816 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10817 if (result) \
10818 return result; \
10819 } \
10820 while (0)
10821
10822 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10823 be walked whenever a type is seen in the tree. Rest of operands and return
10824 value are as for walk_tree. */
10825
10826 static tree
10827 walk_type_fields (tree type, walk_tree_fn func, void *data,
10828 struct pointer_set_t *pset, walk_tree_lh lh)
10829 {
10830 tree result = NULL_TREE;
10831
10832 switch (TREE_CODE (type))
10833 {
10834 case POINTER_TYPE:
10835 case REFERENCE_TYPE:
10836 case VECTOR_TYPE:
10837 /* We have to worry about mutually recursive pointers. These can't
10838 be written in C. They can in Ada. It's pathological, but
10839 there's an ACATS test (c38102a) that checks it. Deal with this
10840 by checking if we're pointing to another pointer, that one
10841 points to another pointer, that one does too, and we have no htab.
10842 If so, get a hash table. We check three levels deep to avoid
10843 the cost of the hash table if we don't need one. */
10844 if (POINTER_TYPE_P (TREE_TYPE (type))
10845 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10846 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10847 && !pset)
10848 {
10849 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10850 func, data);
10851 if (result)
10852 return result;
10853
10854 break;
10855 }
10856
10857 /* ... fall through ... */
10858
10859 case COMPLEX_TYPE:
10860 WALK_SUBTREE (TREE_TYPE (type));
10861 break;
10862
10863 case METHOD_TYPE:
10864 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10865
10866 /* Fall through. */
10867
10868 case FUNCTION_TYPE:
10869 WALK_SUBTREE (TREE_TYPE (type));
10870 {
10871 tree arg;
10872
10873 /* We never want to walk into default arguments. */
10874 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10875 WALK_SUBTREE (TREE_VALUE (arg));
10876 }
10877 break;
10878
10879 case ARRAY_TYPE:
10880 /* Don't follow this nodes's type if a pointer for fear that
10881 we'll have infinite recursion. If we have a PSET, then we
10882 need not fear. */
10883 if (pset
10884 || (!POINTER_TYPE_P (TREE_TYPE (type))
10885 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10886 WALK_SUBTREE (TREE_TYPE (type));
10887 WALK_SUBTREE (TYPE_DOMAIN (type));
10888 break;
10889
10890 case OFFSET_TYPE:
10891 WALK_SUBTREE (TREE_TYPE (type));
10892 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10893 break;
10894
10895 default:
10896 break;
10897 }
10898
10899 return NULL_TREE;
10900 }
10901
10902 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10903 called with the DATA and the address of each sub-tree. If FUNC returns a
10904 non-NULL value, the traversal is stopped, and the value returned by FUNC
10905 is returned. If PSET is non-NULL it is used to record the nodes visited,
10906 and to avoid visiting a node more than once. */
10907
10908 tree
10909 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10910 struct pointer_set_t *pset, walk_tree_lh lh)
10911 {
10912 enum tree_code code;
10913 int walk_subtrees;
10914 tree result;
10915
10916 #define WALK_SUBTREE_TAIL(NODE) \
10917 do \
10918 { \
10919 tp = & (NODE); \
10920 goto tail_recurse; \
10921 } \
10922 while (0)
10923
10924 tail_recurse:
10925 /* Skip empty subtrees. */
10926 if (!*tp)
10927 return NULL_TREE;
10928
10929 /* Don't walk the same tree twice, if the user has requested
10930 that we avoid doing so. */
10931 if (pset && pointer_set_insert (pset, *tp))
10932 return NULL_TREE;
10933
10934 /* Call the function. */
10935 walk_subtrees = 1;
10936 result = (*func) (tp, &walk_subtrees, data);
10937
10938 /* If we found something, return it. */
10939 if (result)
10940 return result;
10941
10942 code = TREE_CODE (*tp);
10943
10944 /* Even if we didn't, FUNC may have decided that there was nothing
10945 interesting below this point in the tree. */
10946 if (!walk_subtrees)
10947 {
10948 /* But we still need to check our siblings. */
10949 if (code == TREE_LIST)
10950 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10951 else if (code == OMP_CLAUSE)
10952 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10953 else
10954 return NULL_TREE;
10955 }
10956
10957 if (lh)
10958 {
10959 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10960 if (result || !walk_subtrees)
10961 return result;
10962 }
10963
10964 switch (code)
10965 {
10966 case ERROR_MARK:
10967 case IDENTIFIER_NODE:
10968 case INTEGER_CST:
10969 case REAL_CST:
10970 case FIXED_CST:
10971 case VECTOR_CST:
10972 case STRING_CST:
10973 case BLOCK:
10974 case PLACEHOLDER_EXPR:
10975 case SSA_NAME:
10976 case FIELD_DECL:
10977 case RESULT_DECL:
10978 /* None of these have subtrees other than those already walked
10979 above. */
10980 break;
10981
10982 case TREE_LIST:
10983 WALK_SUBTREE (TREE_VALUE (*tp));
10984 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10985 break;
10986
10987 case TREE_VEC:
10988 {
10989 int len = TREE_VEC_LENGTH (*tp);
10990
10991 if (len == 0)
10992 break;
10993
10994 /* Walk all elements but the first. */
10995 while (--len)
10996 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10997
10998 /* Now walk the first one as a tail call. */
10999 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11000 }
11001
11002 case COMPLEX_CST:
11003 WALK_SUBTREE (TREE_REALPART (*tp));
11004 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11005
11006 case CONSTRUCTOR:
11007 {
11008 unsigned HOST_WIDE_INT idx;
11009 constructor_elt *ce;
11010
11011 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11012 idx++)
11013 WALK_SUBTREE (ce->value);
11014 }
11015 break;
11016
11017 case SAVE_EXPR:
11018 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11019
11020 case BIND_EXPR:
11021 {
11022 tree decl;
11023 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11024 {
11025 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11026 into declarations that are just mentioned, rather than
11027 declared; they don't really belong to this part of the tree.
11028 And, we can see cycles: the initializer for a declaration
11029 can refer to the declaration itself. */
11030 WALK_SUBTREE (DECL_INITIAL (decl));
11031 WALK_SUBTREE (DECL_SIZE (decl));
11032 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11033 }
11034 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11035 }
11036
11037 case STATEMENT_LIST:
11038 {
11039 tree_stmt_iterator i;
11040 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11041 WALK_SUBTREE (*tsi_stmt_ptr (i));
11042 }
11043 break;
11044
11045 case OMP_CLAUSE:
11046 switch (OMP_CLAUSE_CODE (*tp))
11047 {
11048 case OMP_CLAUSE_PRIVATE:
11049 case OMP_CLAUSE_SHARED:
11050 case OMP_CLAUSE_FIRSTPRIVATE:
11051 case OMP_CLAUSE_COPYIN:
11052 case OMP_CLAUSE_COPYPRIVATE:
11053 case OMP_CLAUSE_FINAL:
11054 case OMP_CLAUSE_IF:
11055 case OMP_CLAUSE_NUM_THREADS:
11056 case OMP_CLAUSE_SCHEDULE:
11057 case OMP_CLAUSE_UNIFORM:
11058 case OMP_CLAUSE_DEPEND:
11059 case OMP_CLAUSE_NUM_TEAMS:
11060 case OMP_CLAUSE_THREAD_LIMIT:
11061 case OMP_CLAUSE_DEVICE:
11062 case OMP_CLAUSE_DIST_SCHEDULE:
11063 case OMP_CLAUSE_SAFELEN:
11064 case OMP_CLAUSE_SIMDLEN:
11065 case OMP_CLAUSE__LOOPTEMP_:
11066 case OMP_CLAUSE__SIMDUID_:
11067 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11068 /* FALLTHRU */
11069
11070 case OMP_CLAUSE_NOWAIT:
11071 case OMP_CLAUSE_ORDERED:
11072 case OMP_CLAUSE_DEFAULT:
11073 case OMP_CLAUSE_UNTIED:
11074 case OMP_CLAUSE_MERGEABLE:
11075 case OMP_CLAUSE_PROC_BIND:
11076 case OMP_CLAUSE_INBRANCH:
11077 case OMP_CLAUSE_NOTINBRANCH:
11078 case OMP_CLAUSE_FOR:
11079 case OMP_CLAUSE_PARALLEL:
11080 case OMP_CLAUSE_SECTIONS:
11081 case OMP_CLAUSE_TASKGROUP:
11082 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11083
11084 case OMP_CLAUSE_LASTPRIVATE:
11085 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11086 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11087 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11088
11089 case OMP_CLAUSE_COLLAPSE:
11090 {
11091 int i;
11092 for (i = 0; i < 3; i++)
11093 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11094 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11095 }
11096
11097 case OMP_CLAUSE_LINEAR:
11098 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11099 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11100 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11101 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11102
11103 case OMP_CLAUSE_ALIGNED:
11104 case OMP_CLAUSE_FROM:
11105 case OMP_CLAUSE_TO:
11106 case OMP_CLAUSE_MAP:
11107 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11108 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11109 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11110
11111 case OMP_CLAUSE_REDUCTION:
11112 {
11113 int i;
11114 for (i = 0; i < 4; i++)
11115 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11116 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11117 }
11118
11119 default:
11120 gcc_unreachable ();
11121 }
11122 break;
11123
11124 case TARGET_EXPR:
11125 {
11126 int i, len;
11127
11128 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11129 But, we only want to walk once. */
11130 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11131 for (i = 0; i < len; ++i)
11132 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11133 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11134 }
11135
11136 case DECL_EXPR:
11137 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11138 defining. We only want to walk into these fields of a type in this
11139 case and not in the general case of a mere reference to the type.
11140
11141 The criterion is as follows: if the field can be an expression, it
11142 must be walked only here. This should be in keeping with the fields
11143 that are directly gimplified in gimplify_type_sizes in order for the
11144 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11145 variable-sized types.
11146
11147 Note that DECLs get walked as part of processing the BIND_EXPR. */
11148 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11149 {
11150 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11151 if (TREE_CODE (*type_p) == ERROR_MARK)
11152 return NULL_TREE;
11153
11154 /* Call the function for the type. See if it returns anything or
11155 doesn't want us to continue. If we are to continue, walk both
11156 the normal fields and those for the declaration case. */
11157 result = (*func) (type_p, &walk_subtrees, data);
11158 if (result || !walk_subtrees)
11159 return result;
11160
11161 /* But do not walk a pointed-to type since it may itself need to
11162 be walked in the declaration case if it isn't anonymous. */
11163 if (!POINTER_TYPE_P (*type_p))
11164 {
11165 result = walk_type_fields (*type_p, func, data, pset, lh);
11166 if (result)
11167 return result;
11168 }
11169
11170 /* If this is a record type, also walk the fields. */
11171 if (RECORD_OR_UNION_TYPE_P (*type_p))
11172 {
11173 tree field;
11174
11175 for (field = TYPE_FIELDS (*type_p); field;
11176 field = DECL_CHAIN (field))
11177 {
11178 /* We'd like to look at the type of the field, but we can
11179 easily get infinite recursion. So assume it's pointed
11180 to elsewhere in the tree. Also, ignore things that
11181 aren't fields. */
11182 if (TREE_CODE (field) != FIELD_DECL)
11183 continue;
11184
11185 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11186 WALK_SUBTREE (DECL_SIZE (field));
11187 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11188 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11189 WALK_SUBTREE (DECL_QUALIFIER (field));
11190 }
11191 }
11192
11193 /* Same for scalar types. */
11194 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11195 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11196 || TREE_CODE (*type_p) == INTEGER_TYPE
11197 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11198 || TREE_CODE (*type_p) == REAL_TYPE)
11199 {
11200 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11201 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11202 }
11203
11204 WALK_SUBTREE (TYPE_SIZE (*type_p));
11205 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11206 }
11207 /* FALLTHRU */
11208
11209 default:
11210 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11211 {
11212 int i, len;
11213
11214 /* Walk over all the sub-trees of this operand. */
11215 len = TREE_OPERAND_LENGTH (*tp);
11216
11217 /* Go through the subtrees. We need to do this in forward order so
11218 that the scope of a FOR_EXPR is handled properly. */
11219 if (len)
11220 {
11221 for (i = 0; i < len - 1; ++i)
11222 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11223 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11224 }
11225 }
11226 /* If this is a type, walk the needed fields in the type. */
11227 else if (TYPE_P (*tp))
11228 return walk_type_fields (*tp, func, data, pset, lh);
11229 break;
11230 }
11231
11232 /* We didn't find what we were looking for. */
11233 return NULL_TREE;
11234
11235 #undef WALK_SUBTREE_TAIL
11236 }
11237 #undef WALK_SUBTREE
11238
11239 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11240
11241 tree
11242 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11243 walk_tree_lh lh)
11244 {
11245 tree result;
11246 struct pointer_set_t *pset;
11247
11248 pset = pointer_set_create ();
11249 result = walk_tree_1 (tp, func, data, pset, lh);
11250 pointer_set_destroy (pset);
11251 return result;
11252 }
11253
11254
11255 tree
11256 tree_block (tree t)
11257 {
11258 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11259
11260 if (IS_EXPR_CODE_CLASS (c))
11261 return LOCATION_BLOCK (t->exp.locus);
11262 gcc_unreachable ();
11263 return NULL;
11264 }
11265
11266 void
11267 tree_set_block (tree t, tree b)
11268 {
11269 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11270
11271 if (IS_EXPR_CODE_CLASS (c))
11272 {
11273 if (b)
11274 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11275 else
11276 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11277 }
11278 else
11279 gcc_unreachable ();
11280 }
11281
11282 /* Create a nameless artificial label and put it in the current
11283 function context. The label has a location of LOC. Returns the
11284 newly created label. */
11285
11286 tree
11287 create_artificial_label (location_t loc)
11288 {
11289 tree lab = build_decl (loc,
11290 LABEL_DECL, NULL_TREE, void_type_node);
11291
11292 DECL_ARTIFICIAL (lab) = 1;
11293 DECL_IGNORED_P (lab) = 1;
11294 DECL_CONTEXT (lab) = current_function_decl;
11295 return lab;
11296 }
11297
11298 /* Given a tree, try to return a useful variable name that we can use
11299 to prefix a temporary that is being assigned the value of the tree.
11300 I.E. given <temp> = &A, return A. */
11301
11302 const char *
11303 get_name (tree t)
11304 {
11305 tree stripped_decl;
11306
11307 stripped_decl = t;
11308 STRIP_NOPS (stripped_decl);
11309 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11310 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11311 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11312 {
11313 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11314 if (!name)
11315 return NULL;
11316 return IDENTIFIER_POINTER (name);
11317 }
11318 else
11319 {
11320 switch (TREE_CODE (stripped_decl))
11321 {
11322 case ADDR_EXPR:
11323 return get_name (TREE_OPERAND (stripped_decl, 0));
11324 default:
11325 return NULL;
11326 }
11327 }
11328 }
11329
11330 /* Return true if TYPE has a variable argument list. */
11331
11332 bool
11333 stdarg_p (const_tree fntype)
11334 {
11335 function_args_iterator args_iter;
11336 tree n = NULL_TREE, t;
11337
11338 if (!fntype)
11339 return false;
11340
11341 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11342 {
11343 n = t;
11344 }
11345
11346 return n != NULL_TREE && n != void_type_node;
11347 }
11348
11349 /* Return true if TYPE has a prototype. */
11350
11351 bool
11352 prototype_p (tree fntype)
11353 {
11354 tree t;
11355
11356 gcc_assert (fntype != NULL_TREE);
11357
11358 t = TYPE_ARG_TYPES (fntype);
11359 return (t != NULL_TREE);
11360 }
11361
11362 /* If BLOCK is inlined from an __attribute__((__artificial__))
11363 routine, return pointer to location from where it has been
11364 called. */
11365 location_t *
11366 block_nonartificial_location (tree block)
11367 {
11368 location_t *ret = NULL;
11369
11370 while (block && TREE_CODE (block) == BLOCK
11371 && BLOCK_ABSTRACT_ORIGIN (block))
11372 {
11373 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11374
11375 while (TREE_CODE (ao) == BLOCK
11376 && BLOCK_ABSTRACT_ORIGIN (ao)
11377 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11378 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11379
11380 if (TREE_CODE (ao) == FUNCTION_DECL)
11381 {
11382 /* If AO is an artificial inline, point RET to the
11383 call site locus at which it has been inlined and continue
11384 the loop, in case AO's caller is also an artificial
11385 inline. */
11386 if (DECL_DECLARED_INLINE_P (ao)
11387 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11388 ret = &BLOCK_SOURCE_LOCATION (block);
11389 else
11390 break;
11391 }
11392 else if (TREE_CODE (ao) != BLOCK)
11393 break;
11394
11395 block = BLOCK_SUPERCONTEXT (block);
11396 }
11397 return ret;
11398 }
11399
11400
11401 /* If EXP is inlined from an __attribute__((__artificial__))
11402 function, return the location of the original call expression. */
11403
11404 location_t
11405 tree_nonartificial_location (tree exp)
11406 {
11407 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11408
11409 if (loc)
11410 return *loc;
11411 else
11412 return EXPR_LOCATION (exp);
11413 }
11414
11415
11416 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11417 nodes. */
11418
11419 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11420
11421 static hashval_t
11422 cl_option_hash_hash (const void *x)
11423 {
11424 const_tree const t = (const_tree) x;
11425 const char *p;
11426 size_t i;
11427 size_t len = 0;
11428 hashval_t hash = 0;
11429
11430 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11431 {
11432 p = (const char *)TREE_OPTIMIZATION (t);
11433 len = sizeof (struct cl_optimization);
11434 }
11435
11436 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11437 {
11438 p = (const char *)TREE_TARGET_OPTION (t);
11439 len = sizeof (struct cl_target_option);
11440 }
11441
11442 else
11443 gcc_unreachable ();
11444
11445 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11446 something else. */
11447 for (i = 0; i < len; i++)
11448 if (p[i])
11449 hash = (hash << 4) ^ ((i << 2) | p[i]);
11450
11451 return hash;
11452 }
11453
11454 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11455 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11456 same. */
11457
11458 static int
11459 cl_option_hash_eq (const void *x, const void *y)
11460 {
11461 const_tree const xt = (const_tree) x;
11462 const_tree const yt = (const_tree) y;
11463 const char *xp;
11464 const char *yp;
11465 size_t len;
11466
11467 if (TREE_CODE (xt) != TREE_CODE (yt))
11468 return 0;
11469
11470 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11471 {
11472 xp = (const char *)TREE_OPTIMIZATION (xt);
11473 yp = (const char *)TREE_OPTIMIZATION (yt);
11474 len = sizeof (struct cl_optimization);
11475 }
11476
11477 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11478 {
11479 xp = (const char *)TREE_TARGET_OPTION (xt);
11480 yp = (const char *)TREE_TARGET_OPTION (yt);
11481 len = sizeof (struct cl_target_option);
11482 }
11483
11484 else
11485 gcc_unreachable ();
11486
11487 return (memcmp (xp, yp, len) == 0);
11488 }
11489
11490 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11491
11492 tree
11493 build_optimization_node (struct gcc_options *opts)
11494 {
11495 tree t;
11496 void **slot;
11497
11498 /* Use the cache of optimization nodes. */
11499
11500 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11501 opts);
11502
11503 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11504 t = (tree) *slot;
11505 if (!t)
11506 {
11507 /* Insert this one into the hash table. */
11508 t = cl_optimization_node;
11509 *slot = t;
11510
11511 /* Make a new node for next time round. */
11512 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11513 }
11514
11515 return t;
11516 }
11517
11518 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11519
11520 tree
11521 build_target_option_node (struct gcc_options *opts)
11522 {
11523 tree t;
11524 void **slot;
11525
11526 /* Use the cache of optimization nodes. */
11527
11528 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11529 opts);
11530
11531 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11532 t = (tree) *slot;
11533 if (!t)
11534 {
11535 /* Insert this one into the hash table. */
11536 t = cl_target_option_node;
11537 *slot = t;
11538
11539 /* Make a new node for next time round. */
11540 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11541 }
11542
11543 return t;
11544 }
11545
11546 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11547 Called through htab_traverse. */
11548
11549 static int
11550 prepare_target_option_node_for_pch (void **slot, void *)
11551 {
11552 tree node = (tree) *slot;
11553 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11554 TREE_TARGET_GLOBALS (node) = NULL;
11555 return 1;
11556 }
11557
11558 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11559 so that they aren't saved during PCH writing. */
11560
11561 void
11562 prepare_target_option_nodes_for_pch (void)
11563 {
11564 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11565 NULL);
11566 }
11567
11568 /* Determine the "ultimate origin" of a block. The block may be an inlined
11569 instance of an inlined instance of a block which is local to an inline
11570 function, so we have to trace all of the way back through the origin chain
11571 to find out what sort of node actually served as the original seed for the
11572 given block. */
11573
11574 tree
11575 block_ultimate_origin (const_tree block)
11576 {
11577 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11578
11579 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11580 nodes in the function to point to themselves; ignore that if
11581 we're trying to output the abstract instance of this function. */
11582 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11583 return NULL_TREE;
11584
11585 if (immediate_origin == NULL_TREE)
11586 return NULL_TREE;
11587 else
11588 {
11589 tree ret_val;
11590 tree lookahead = immediate_origin;
11591
11592 do
11593 {
11594 ret_val = lookahead;
11595 lookahead = (TREE_CODE (ret_val) == BLOCK
11596 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11597 }
11598 while (lookahead != NULL && lookahead != ret_val);
11599
11600 /* The block's abstract origin chain may not be the *ultimate* origin of
11601 the block. It could lead to a DECL that has an abstract origin set.
11602 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11603 will give us if it has one). Note that DECL's abstract origins are
11604 supposed to be the most distant ancestor (or so decl_ultimate_origin
11605 claims), so we don't need to loop following the DECL origins. */
11606 if (DECL_P (ret_val))
11607 return DECL_ORIGIN (ret_val);
11608
11609 return ret_val;
11610 }
11611 }
11612
11613 /* Return true iff conversion in EXP generates no instruction. Mark
11614 it inline so that we fully inline into the stripping functions even
11615 though we have two uses of this function. */
11616
11617 static inline bool
11618 tree_nop_conversion (const_tree exp)
11619 {
11620 tree outer_type, inner_type;
11621
11622 if (!CONVERT_EXPR_P (exp)
11623 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11624 return false;
11625 if (TREE_OPERAND (exp, 0) == error_mark_node)
11626 return false;
11627
11628 outer_type = TREE_TYPE (exp);
11629 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11630
11631 if (!inner_type)
11632 return false;
11633
11634 /* Use precision rather then machine mode when we can, which gives
11635 the correct answer even for submode (bit-field) types. */
11636 if ((INTEGRAL_TYPE_P (outer_type)
11637 || POINTER_TYPE_P (outer_type)
11638 || TREE_CODE (outer_type) == OFFSET_TYPE)
11639 && (INTEGRAL_TYPE_P (inner_type)
11640 || POINTER_TYPE_P (inner_type)
11641 || TREE_CODE (inner_type) == OFFSET_TYPE))
11642 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11643
11644 /* Otherwise fall back on comparing machine modes (e.g. for
11645 aggregate types, floats). */
11646 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11647 }
11648
11649 /* Return true iff conversion in EXP generates no instruction. Don't
11650 consider conversions changing the signedness. */
11651
11652 static bool
11653 tree_sign_nop_conversion (const_tree exp)
11654 {
11655 tree outer_type, inner_type;
11656
11657 if (!tree_nop_conversion (exp))
11658 return false;
11659
11660 outer_type = TREE_TYPE (exp);
11661 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11662
11663 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11664 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11665 }
11666
11667 /* Strip conversions from EXP according to tree_nop_conversion and
11668 return the resulting expression. */
11669
11670 tree
11671 tree_strip_nop_conversions (tree exp)
11672 {
11673 while (tree_nop_conversion (exp))
11674 exp = TREE_OPERAND (exp, 0);
11675 return exp;
11676 }
11677
11678 /* Strip conversions from EXP according to tree_sign_nop_conversion
11679 and return the resulting expression. */
11680
11681 tree
11682 tree_strip_sign_nop_conversions (tree exp)
11683 {
11684 while (tree_sign_nop_conversion (exp))
11685 exp = TREE_OPERAND (exp, 0);
11686 return exp;
11687 }
11688
11689 /* Avoid any floating point extensions from EXP. */
11690 tree
11691 strip_float_extensions (tree exp)
11692 {
11693 tree sub, expt, subt;
11694
11695 /* For floating point constant look up the narrowest type that can hold
11696 it properly and handle it like (type)(narrowest_type)constant.
11697 This way we can optimize for instance a=a*2.0 where "a" is float
11698 but 2.0 is double constant. */
11699 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11700 {
11701 REAL_VALUE_TYPE orig;
11702 tree type = NULL;
11703
11704 orig = TREE_REAL_CST (exp);
11705 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11706 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11707 type = float_type_node;
11708 else if (TYPE_PRECISION (TREE_TYPE (exp))
11709 > TYPE_PRECISION (double_type_node)
11710 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11711 type = double_type_node;
11712 if (type)
11713 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11714 }
11715
11716 if (!CONVERT_EXPR_P (exp))
11717 return exp;
11718
11719 sub = TREE_OPERAND (exp, 0);
11720 subt = TREE_TYPE (sub);
11721 expt = TREE_TYPE (exp);
11722
11723 if (!FLOAT_TYPE_P (subt))
11724 return exp;
11725
11726 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11727 return exp;
11728
11729 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11730 return exp;
11731
11732 return strip_float_extensions (sub);
11733 }
11734
11735 /* Strip out all handled components that produce invariant
11736 offsets. */
11737
11738 const_tree
11739 strip_invariant_refs (const_tree op)
11740 {
11741 while (handled_component_p (op))
11742 {
11743 switch (TREE_CODE (op))
11744 {
11745 case ARRAY_REF:
11746 case ARRAY_RANGE_REF:
11747 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11748 || TREE_OPERAND (op, 2) != NULL_TREE
11749 || TREE_OPERAND (op, 3) != NULL_TREE)
11750 return NULL;
11751 break;
11752
11753 case COMPONENT_REF:
11754 if (TREE_OPERAND (op, 2) != NULL_TREE)
11755 return NULL;
11756 break;
11757
11758 default:;
11759 }
11760 op = TREE_OPERAND (op, 0);
11761 }
11762
11763 return op;
11764 }
11765
11766 static GTY(()) tree gcc_eh_personality_decl;
11767
11768 /* Return the GCC personality function decl. */
11769
11770 tree
11771 lhd_gcc_personality (void)
11772 {
11773 if (!gcc_eh_personality_decl)
11774 gcc_eh_personality_decl = build_personality_function ("gcc");
11775 return gcc_eh_personality_decl;
11776 }
11777
11778 /* TARGET is a call target of GIMPLE call statement
11779 (obtained by gimple_call_fn). Return true if it is
11780 OBJ_TYPE_REF representing an virtual call of C++ method.
11781 (As opposed to OBJ_TYPE_REF representing objc calls
11782 through a cast where middle-end devirtualization machinery
11783 can't apply.) */
11784
11785 bool
11786 virtual_method_call_p (tree target)
11787 {
11788 if (TREE_CODE (target) != OBJ_TYPE_REF)
11789 return false;
11790 target = TREE_TYPE (target);
11791 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11792 target = TREE_TYPE (target);
11793 if (TREE_CODE (target) == FUNCTION_TYPE)
11794 return false;
11795 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11796 return true;
11797 }
11798
11799 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11800
11801 tree
11802 obj_type_ref_class (tree ref)
11803 {
11804 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11805 ref = TREE_TYPE (ref);
11806 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11807 ref = TREE_TYPE (ref);
11808 /* We look for type THIS points to. ObjC also builds
11809 OBJ_TYPE_REF with non-method calls, Their first parameter
11810 ID however also corresponds to class type. */
11811 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11812 || TREE_CODE (ref) == FUNCTION_TYPE);
11813 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11814 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11815 return TREE_TYPE (ref);
11816 }
11817
11818 /* Return true if T is in anonymous namespace. */
11819
11820 bool
11821 type_in_anonymous_namespace_p (const_tree t)
11822 {
11823 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11824 bulitin types; those have CONTEXT NULL. */
11825 if (!TYPE_CONTEXT (t))
11826 return false;
11827 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11828 }
11829
11830 /* Try to find a base info of BINFO that would have its field decl at offset
11831 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11832 found, return, otherwise return NULL_TREE. */
11833
11834 tree
11835 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11836 {
11837 tree type = BINFO_TYPE (binfo);
11838
11839 while (true)
11840 {
11841 HOST_WIDE_INT pos, size;
11842 tree fld;
11843 int i;
11844
11845 if (types_same_for_odr (type, expected_type))
11846 return binfo;
11847 if (offset < 0)
11848 return NULL_TREE;
11849
11850 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11851 {
11852 if (TREE_CODE (fld) != FIELD_DECL)
11853 continue;
11854
11855 pos = int_bit_position (fld);
11856 size = tree_to_uhwi (DECL_SIZE (fld));
11857 if (pos <= offset && (pos + size) > offset)
11858 break;
11859 }
11860 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11861 return NULL_TREE;
11862
11863 if (!DECL_ARTIFICIAL (fld))
11864 {
11865 binfo = TYPE_BINFO (TREE_TYPE (fld));
11866 if (!binfo)
11867 return NULL_TREE;
11868 }
11869 /* Offset 0 indicates the primary base, whose vtable contents are
11870 represented in the binfo for the derived class. */
11871 else if (offset != 0)
11872 {
11873 tree base_binfo, binfo2 = binfo;
11874
11875 /* Find BINFO corresponding to FLD. This is bit harder
11876 by a fact that in virtual inheritance we may need to walk down
11877 the non-virtual inheritance chain. */
11878 while (true)
11879 {
11880 tree containing_binfo = NULL, found_binfo = NULL;
11881 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11882 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11883 {
11884 found_binfo = base_binfo;
11885 break;
11886 }
11887 else
11888 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11889 - tree_to_shwi (BINFO_OFFSET (binfo)))
11890 * BITS_PER_UNIT < pos
11891 /* Rule out types with no virtual methods or we can get confused
11892 here by zero sized bases. */
11893 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11894 && (!containing_binfo
11895 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11896 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11897 containing_binfo = base_binfo;
11898 if (found_binfo)
11899 {
11900 binfo = found_binfo;
11901 break;
11902 }
11903 if (!containing_binfo)
11904 return NULL_TREE;
11905 binfo2 = containing_binfo;
11906 }
11907 }
11908
11909 type = TREE_TYPE (fld);
11910 offset -= pos;
11911 }
11912 }
11913
11914 /* Returns true if X is a typedef decl. */
11915
11916 bool
11917 is_typedef_decl (tree x)
11918 {
11919 return (x && TREE_CODE (x) == TYPE_DECL
11920 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11921 }
11922
11923 /* Returns true iff TYPE is a type variant created for a typedef. */
11924
11925 bool
11926 typedef_variant_p (tree type)
11927 {
11928 return is_typedef_decl (TYPE_NAME (type));
11929 }
11930
11931 /* Warn about a use of an identifier which was marked deprecated. */
11932 void
11933 warn_deprecated_use (tree node, tree attr)
11934 {
11935 const char *msg;
11936
11937 if (node == 0 || !warn_deprecated_decl)
11938 return;
11939
11940 if (!attr)
11941 {
11942 if (DECL_P (node))
11943 attr = DECL_ATTRIBUTES (node);
11944 else if (TYPE_P (node))
11945 {
11946 tree decl = TYPE_STUB_DECL (node);
11947 if (decl)
11948 attr = lookup_attribute ("deprecated",
11949 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11950 }
11951 }
11952
11953 if (attr)
11954 attr = lookup_attribute ("deprecated", attr);
11955
11956 if (attr)
11957 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11958 else
11959 msg = NULL;
11960
11961 if (DECL_P (node))
11962 {
11963 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11964 if (msg)
11965 warning (OPT_Wdeprecated_declarations,
11966 "%qD is deprecated (declared at %r%s:%d%R): %s",
11967 node, "locus", xloc.file, xloc.line, msg);
11968 else
11969 warning (OPT_Wdeprecated_declarations,
11970 "%qD is deprecated (declared at %r%s:%d%R)",
11971 node, "locus", xloc.file, xloc.line);
11972 }
11973 else if (TYPE_P (node))
11974 {
11975 tree what = NULL_TREE;
11976 tree decl = TYPE_STUB_DECL (node);
11977
11978 if (TYPE_NAME (node))
11979 {
11980 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11981 what = TYPE_NAME (node);
11982 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11983 && DECL_NAME (TYPE_NAME (node)))
11984 what = DECL_NAME (TYPE_NAME (node));
11985 }
11986
11987 if (decl)
11988 {
11989 expanded_location xloc
11990 = expand_location (DECL_SOURCE_LOCATION (decl));
11991 if (what)
11992 {
11993 if (msg)
11994 warning (OPT_Wdeprecated_declarations,
11995 "%qE is deprecated (declared at %r%s:%d%R): %s",
11996 what, "locus", xloc.file, xloc.line, msg);
11997 else
11998 warning (OPT_Wdeprecated_declarations,
11999 "%qE is deprecated (declared at %r%s:%d%R)",
12000 what, "locus", xloc.file, xloc.line);
12001 }
12002 else
12003 {
12004 if (msg)
12005 warning (OPT_Wdeprecated_declarations,
12006 "type is deprecated (declared at %r%s:%d%R): %s",
12007 "locus", xloc.file, xloc.line, msg);
12008 else
12009 warning (OPT_Wdeprecated_declarations,
12010 "type is deprecated (declared at %r%s:%d%R)",
12011 "locus", xloc.file, xloc.line);
12012 }
12013 }
12014 else
12015 {
12016 if (what)
12017 {
12018 if (msg)
12019 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12020 what, msg);
12021 else
12022 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12023 }
12024 else
12025 {
12026 if (msg)
12027 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12028 msg);
12029 else
12030 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12031 }
12032 }
12033 }
12034 }
12035
12036 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12037 somewhere in it. */
12038
12039 bool
12040 contains_bitfld_component_ref_p (const_tree ref)
12041 {
12042 while (handled_component_p (ref))
12043 {
12044 if (TREE_CODE (ref) == COMPONENT_REF
12045 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12046 return true;
12047 ref = TREE_OPERAND (ref, 0);
12048 }
12049
12050 return false;
12051 }
12052
12053 /* Try to determine whether a TRY_CATCH expression can fall through.
12054 This is a subroutine of block_may_fallthru. */
12055
12056 static bool
12057 try_catch_may_fallthru (const_tree stmt)
12058 {
12059 tree_stmt_iterator i;
12060
12061 /* If the TRY block can fall through, the whole TRY_CATCH can
12062 fall through. */
12063 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12064 return true;
12065
12066 i = tsi_start (TREE_OPERAND (stmt, 1));
12067 switch (TREE_CODE (tsi_stmt (i)))
12068 {
12069 case CATCH_EXPR:
12070 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12071 catch expression and a body. The whole TRY_CATCH may fall
12072 through iff any of the catch bodies falls through. */
12073 for (; !tsi_end_p (i); tsi_next (&i))
12074 {
12075 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12076 return true;
12077 }
12078 return false;
12079
12080 case EH_FILTER_EXPR:
12081 /* The exception filter expression only matters if there is an
12082 exception. If the exception does not match EH_FILTER_TYPES,
12083 we will execute EH_FILTER_FAILURE, and we will fall through
12084 if that falls through. If the exception does match
12085 EH_FILTER_TYPES, the stack unwinder will continue up the
12086 stack, so we will not fall through. We don't know whether we
12087 will throw an exception which matches EH_FILTER_TYPES or not,
12088 so we just ignore EH_FILTER_TYPES and assume that we might
12089 throw an exception which doesn't match. */
12090 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12091
12092 default:
12093 /* This case represents statements to be executed when an
12094 exception occurs. Those statements are implicitly followed
12095 by a RESX statement to resume execution after the exception.
12096 So in this case the TRY_CATCH never falls through. */
12097 return false;
12098 }
12099 }
12100
12101 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12102 need not be 100% accurate; simply be conservative and return true if we
12103 don't know. This is used only to avoid stupidly generating extra code.
12104 If we're wrong, we'll just delete the extra code later. */
12105
12106 bool
12107 block_may_fallthru (const_tree block)
12108 {
12109 /* This CONST_CAST is okay because expr_last returns its argument
12110 unmodified and we assign it to a const_tree. */
12111 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12112
12113 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12114 {
12115 case GOTO_EXPR:
12116 case RETURN_EXPR:
12117 /* Easy cases. If the last statement of the block implies
12118 control transfer, then we can't fall through. */
12119 return false;
12120
12121 case SWITCH_EXPR:
12122 /* If SWITCH_LABELS is set, this is lowered, and represents a
12123 branch to a selected label and hence can not fall through.
12124 Otherwise SWITCH_BODY is set, and the switch can fall
12125 through. */
12126 return SWITCH_LABELS (stmt) == NULL_TREE;
12127
12128 case COND_EXPR:
12129 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12130 return true;
12131 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12132
12133 case BIND_EXPR:
12134 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12135
12136 case TRY_CATCH_EXPR:
12137 return try_catch_may_fallthru (stmt);
12138
12139 case TRY_FINALLY_EXPR:
12140 /* The finally clause is always executed after the try clause,
12141 so if it does not fall through, then the try-finally will not
12142 fall through. Otherwise, if the try clause does not fall
12143 through, then when the finally clause falls through it will
12144 resume execution wherever the try clause was going. So the
12145 whole try-finally will only fall through if both the try
12146 clause and the finally clause fall through. */
12147 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12148 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12149
12150 case MODIFY_EXPR:
12151 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12152 stmt = TREE_OPERAND (stmt, 1);
12153 else
12154 return true;
12155 /* FALLTHRU */
12156
12157 case CALL_EXPR:
12158 /* Functions that do not return do not fall through. */
12159 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12160
12161 case CLEANUP_POINT_EXPR:
12162 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12163
12164 case TARGET_EXPR:
12165 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12166
12167 case ERROR_MARK:
12168 return true;
12169
12170 default:
12171 return lang_hooks.block_may_fallthru (stmt);
12172 }
12173 }
12174
12175 /* True if we are using EH to handle cleanups. */
12176 static bool using_eh_for_cleanups_flag = false;
12177
12178 /* This routine is called from front ends to indicate eh should be used for
12179 cleanups. */
12180 void
12181 using_eh_for_cleanups (void)
12182 {
12183 using_eh_for_cleanups_flag = true;
12184 }
12185
12186 /* Query whether EH is used for cleanups. */
12187 bool
12188 using_eh_for_cleanups_p (void)
12189 {
12190 return using_eh_for_cleanups_flag;
12191 }
12192
12193 /* Wrapper for tree_code_name to ensure that tree code is valid */
12194 const char *
12195 get_tree_code_name (enum tree_code code)
12196 {
12197 const char *invalid = "<invalid tree code>";
12198
12199 if (code >= MAX_TREE_CODES)
12200 return invalid;
12201
12202 return tree_code_name[code];
12203 }
12204
12205 /* Drops the TREE_OVERFLOW flag from T. */
12206
12207 tree
12208 drop_tree_overflow (tree t)
12209 {
12210 gcc_checking_assert (TREE_OVERFLOW (t));
12211
12212 /* For tree codes with a sharing machinery re-build the result. */
12213 if (TREE_CODE (t) == INTEGER_CST)
12214 return wide_int_to_tree (TREE_TYPE (t), t);
12215
12216 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12217 and drop the flag. */
12218 t = copy_node (t);
12219 TREE_OVERFLOW (t) = 0;
12220 return t;
12221 }
12222
12223 /* Given a memory reference expression T, return its base address.
12224 The base address of a memory reference expression is the main
12225 object being referenced. For instance, the base address for
12226 'array[i].fld[j]' is 'array'. You can think of this as stripping
12227 away the offset part from a memory address.
12228
12229 This function calls handled_component_p to strip away all the inner
12230 parts of the memory reference until it reaches the base object. */
12231
12232 tree
12233 get_base_address (tree t)
12234 {
12235 while (handled_component_p (t))
12236 t = TREE_OPERAND (t, 0);
12237
12238 if ((TREE_CODE (t) == MEM_REF
12239 || TREE_CODE (t) == TARGET_MEM_REF)
12240 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12241 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12242
12243 /* ??? Either the alias oracle or all callers need to properly deal
12244 with WITH_SIZE_EXPRs before we can look through those. */
12245 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12246 return NULL_TREE;
12247
12248 return t;
12249 }
12250
12251 #include "gt-tree.h"