rs6000.c (output_vec_const_move): Handle little-endian code generation.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "filenames.h"
46 #include "output.h"
47 #include "target.h"
48 #include "common/common-target.h"
49 #include "langhooks.h"
50 #include "tree-inline.h"
51 #include "tree-iterator.h"
52 #include "basic-block.h"
53 #include "bitmap.h"
54 #include "pointer-set.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static unsigned int type_hash_list (const_tree, hashval_t);
234 static unsigned int attribute_hash_list (const_tree, hashval_t);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 };
285
286 const char * const omp_clause_code_name[] =
287 {
288 "error_clause",
289 "private",
290 "shared",
291 "firstprivate",
292 "lastprivate",
293 "reduction",
294 "copyin",
295 "copyprivate",
296 "linear",
297 "aligned",
298 "depend",
299 "uniform",
300 "from",
301 "to",
302 "map",
303 "_looptemp_",
304 "if",
305 "num_threads",
306 "schedule",
307 "nowait",
308 "ordered",
309 "default",
310 "collapse",
311 "untied",
312 "final",
313 "mergeable",
314 "device",
315 "dist_schedule",
316 "inbranch",
317 "notinbranch",
318 "num_teams",
319 "thread_limit",
320 "proc_bind",
321 "safelen",
322 "simdlen",
323 "for",
324 "parallel",
325 "sections",
326 "taskgroup",
327 "_simduid_"
328 };
329
330
331 /* Return the tree node structure used by tree code CODE. */
332
333 static inline enum tree_node_structure_enum
334 tree_node_structure_for_code (enum tree_code code)
335 {
336 switch (TREE_CODE_CLASS (code))
337 {
338 case tcc_declaration:
339 {
340 switch (code)
341 {
342 case FIELD_DECL:
343 return TS_FIELD_DECL;
344 case PARM_DECL:
345 return TS_PARM_DECL;
346 case VAR_DECL:
347 return TS_VAR_DECL;
348 case LABEL_DECL:
349 return TS_LABEL_DECL;
350 case RESULT_DECL:
351 return TS_RESULT_DECL;
352 case DEBUG_EXPR_DECL:
353 return TS_DECL_WRTL;
354 case CONST_DECL:
355 return TS_CONST_DECL;
356 case TYPE_DECL:
357 return TS_TYPE_DECL;
358 case FUNCTION_DECL:
359 return TS_FUNCTION_DECL;
360 case TRANSLATION_UNIT_DECL:
361 return TS_TRANSLATION_UNIT_DECL;
362 default:
363 return TS_DECL_NON_COMMON;
364 }
365 }
366 case tcc_type:
367 return TS_TYPE_NON_COMMON;
368 case tcc_reference:
369 case tcc_comparison:
370 case tcc_unary:
371 case tcc_binary:
372 case tcc_expression:
373 case tcc_statement:
374 case tcc_vl_exp:
375 return TS_EXP;
376 default: /* tcc_constant and tcc_exceptional */
377 break;
378 }
379 switch (code)
380 {
381 /* tcc_constant cases. */
382 case VOID_CST: return TS_TYPED;
383 case INTEGER_CST: return TS_INT_CST;
384 case REAL_CST: return TS_REAL_CST;
385 case FIXED_CST: return TS_FIXED_CST;
386 case COMPLEX_CST: return TS_COMPLEX;
387 case VECTOR_CST: return TS_VECTOR;
388 case STRING_CST: return TS_STRING;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK: return TS_COMMON;
391 case IDENTIFIER_NODE: return TS_IDENTIFIER;
392 case TREE_LIST: return TS_LIST;
393 case TREE_VEC: return TS_VEC;
394 case SSA_NAME: return TS_SSA_NAME;
395 case PLACEHOLDER_EXPR: return TS_COMMON;
396 case STATEMENT_LIST: return TS_STATEMENT_LIST;
397 case BLOCK: return TS_BLOCK;
398 case CONSTRUCTOR: return TS_CONSTRUCTOR;
399 case TREE_BINFO: return TS_BINFO;
400 case OMP_CLAUSE: return TS_OMP_CLAUSE;
401 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
402 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
403
404 default:
405 gcc_unreachable ();
406 }
407 }
408
409
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
411 nodes. */
412
413 static void
414 initialize_tree_contains_struct (void)
415 {
416 unsigned i;
417
418 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
419 {
420 enum tree_code code;
421 enum tree_node_structure_enum ts_code;
422
423 code = (enum tree_code) i;
424 ts_code = tree_node_structure_for_code (code);
425
426 /* Mark the TS structure itself. */
427 tree_contains_struct[code][ts_code] = 1;
428
429 /* Mark all the structures that TS is derived from. */
430 switch (ts_code)
431 {
432 case TS_TYPED:
433 case TS_BLOCK:
434 MARK_TS_BASE (code);
435 break;
436
437 case TS_COMMON:
438 case TS_INT_CST:
439 case TS_REAL_CST:
440 case TS_FIXED_CST:
441 case TS_VECTOR:
442 case TS_STRING:
443 case TS_COMPLEX:
444 case TS_SSA_NAME:
445 case TS_CONSTRUCTOR:
446 case TS_EXP:
447 case TS_STATEMENT_LIST:
448 MARK_TS_TYPED (code);
449 break;
450
451 case TS_IDENTIFIER:
452 case TS_DECL_MINIMAL:
453 case TS_TYPE_COMMON:
454 case TS_LIST:
455 case TS_VEC:
456 case TS_BINFO:
457 case TS_OMP_CLAUSE:
458 case TS_OPTIMIZATION:
459 case TS_TARGET_OPTION:
460 MARK_TS_COMMON (code);
461 break;
462
463 case TS_TYPE_WITH_LANG_SPECIFIC:
464 MARK_TS_TYPE_COMMON (code);
465 break;
466
467 case TS_TYPE_NON_COMMON:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
469 break;
470
471 case TS_DECL_COMMON:
472 MARK_TS_DECL_MINIMAL (code);
473 break;
474
475 case TS_DECL_WRTL:
476 case TS_CONST_DECL:
477 MARK_TS_DECL_COMMON (code);
478 break;
479
480 case TS_DECL_NON_COMMON:
481 MARK_TS_DECL_WITH_VIS (code);
482 break;
483
484 case TS_DECL_WITH_VIS:
485 case TS_PARM_DECL:
486 case TS_LABEL_DECL:
487 case TS_RESULT_DECL:
488 MARK_TS_DECL_WRTL (code);
489 break;
490
491 case TS_FIELD_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 case TS_VAR_DECL:
496 MARK_TS_DECL_WITH_VIS (code);
497 break;
498
499 case TS_TYPE_DECL:
500 case TS_FUNCTION_DECL:
501 MARK_TS_DECL_NON_COMMON (code);
502 break;
503
504 case TS_TRANSLATION_UNIT_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 }
512
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
540 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
543 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
544 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
547 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
549 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
550 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
552 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
554 }
555
556
557 /* Init tree.c. */
558
559 void
560 init_ttree (void)
561 {
562 /* Initialize the hash table of types. */
563 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
564 type_hash_eq, 0);
565
566 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
567 tree_decl_map_eq, 0);
568
569 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
570 tree_decl_map_eq, 0);
571
572 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
573 int_cst_hash_eq, NULL);
574
575 int_cst_node = make_int_cst (1, 1);
576
577 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
578 cl_option_hash_eq, NULL);
579
580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
581 cl_target_option_node = make_node (TARGET_OPTION_NODE);
582
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks.init_ts ();
586 }
587
588 \f
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
592 tree
593 decl_assembler_name (tree decl)
594 {
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
596 lang_hooks.set_decl_assembler_name (decl);
597 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
598 }
599
600 /* When the target supports COMDAT groups, this indicates which group the
601 DECL is associated with. This can be either an IDENTIFIER_NODE or a
602 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
603 tree
604 decl_comdat_group (const_tree node)
605 {
606 struct symtab_node *snode = symtab_get_node (node);
607 if (!snode)
608 return NULL;
609 return snode->get_comdat_group ();
610 }
611
612 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
613 tree
614 decl_comdat_group_id (const_tree node)
615 {
616 struct symtab_node *snode = symtab_get_node (node);
617 if (!snode)
618 return NULL;
619 return snode->get_comdat_group_id ();
620 }
621
622 /* When the target supports named section, return its name as IDENTIFIER_NODE
623 or NULL if it is in no section. */
624 const char *
625 decl_section_name (const_tree node)
626 {
627 struct symtab_node *snode = symtab_get_node (node);
628 if (!snode)
629 return NULL;
630 return snode->get_section ();
631 }
632
633 /* Set section section name of NODE to VALUE (that is expected to
634 be identifier node) */
635 void
636 set_decl_section_name (tree node, const char *value)
637 {
638 struct symtab_node *snode;
639
640 if (value == NULL)
641 {
642 snode = symtab_get_node (node);
643 if (!snode)
644 return;
645 }
646 else if (TREE_CODE (node) == VAR_DECL)
647 snode = varpool_node_for_decl (node);
648 else
649 snode = cgraph_get_create_node (node);
650 snode->set_section (value);
651 }
652
653 /* Return TLS model of a variable NODE. */
654 enum tls_model
655 decl_tls_model (const_tree node)
656 {
657 struct varpool_node *snode = varpool_get_node (node);
658 if (!snode)
659 return TLS_MODEL_NONE;
660 return snode->tls_model;
661 }
662
663 /* Set TLS model of variable NODE to MODEL. */
664 void
665 set_decl_tls_model (tree node, enum tls_model model)
666 {
667 struct varpool_node *vnode;
668
669 if (model == TLS_MODEL_NONE)
670 {
671 vnode = varpool_get_node (node);
672 if (!vnode)
673 return;
674 }
675 else
676 vnode = varpool_node_for_decl (node);
677 vnode->tls_model = model;
678 }
679
680 /* Compute the number of bytes occupied by a tree with code CODE.
681 This function cannot be used for nodes that have variable sizes,
682 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
683 size_t
684 tree_code_size (enum tree_code code)
685 {
686 switch (TREE_CODE_CLASS (code))
687 {
688 case tcc_declaration: /* A decl node */
689 {
690 switch (code)
691 {
692 case FIELD_DECL:
693 return sizeof (struct tree_field_decl);
694 case PARM_DECL:
695 return sizeof (struct tree_parm_decl);
696 case VAR_DECL:
697 return sizeof (struct tree_var_decl);
698 case LABEL_DECL:
699 return sizeof (struct tree_label_decl);
700 case RESULT_DECL:
701 return sizeof (struct tree_result_decl);
702 case CONST_DECL:
703 return sizeof (struct tree_const_decl);
704 case TYPE_DECL:
705 return sizeof (struct tree_type_decl);
706 case FUNCTION_DECL:
707 return sizeof (struct tree_function_decl);
708 case DEBUG_EXPR_DECL:
709 return sizeof (struct tree_decl_with_rtl);
710 default:
711 return sizeof (struct tree_decl_non_common);
712 }
713 }
714
715 case tcc_type: /* a type node */
716 return sizeof (struct tree_type_non_common);
717
718 case tcc_reference: /* a reference */
719 case tcc_expression: /* an expression */
720 case tcc_statement: /* an expression with side effects */
721 case tcc_comparison: /* a comparison expression */
722 case tcc_unary: /* a unary arithmetic expression */
723 case tcc_binary: /* a binary arithmetic expression */
724 return (sizeof (struct tree_exp)
725 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
726
727 case tcc_constant: /* a constant */
728 switch (code)
729 {
730 case VOID_CST: return sizeof (struct tree_typed);
731 case INTEGER_CST: gcc_unreachable ();
732 case REAL_CST: return sizeof (struct tree_real_cst);
733 case FIXED_CST: return sizeof (struct tree_fixed_cst);
734 case COMPLEX_CST: return sizeof (struct tree_complex);
735 case VECTOR_CST: return sizeof (struct tree_vector);
736 case STRING_CST: gcc_unreachable ();
737 default:
738 return lang_hooks.tree_size (code);
739 }
740
741 case tcc_exceptional: /* something random, like an identifier. */
742 switch (code)
743 {
744 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
745 case TREE_LIST: return sizeof (struct tree_list);
746
747 case ERROR_MARK:
748 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
749
750 case TREE_VEC:
751 case OMP_CLAUSE: gcc_unreachable ();
752
753 case SSA_NAME: return sizeof (struct tree_ssa_name);
754
755 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
756 case BLOCK: return sizeof (struct tree_block);
757 case CONSTRUCTOR: return sizeof (struct tree_constructor);
758 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
759 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
760
761 default:
762 return lang_hooks.tree_size (code);
763 }
764
765 default:
766 gcc_unreachable ();
767 }
768 }
769
770 /* Compute the number of bytes occupied by NODE. This routine only
771 looks at TREE_CODE, except for those nodes that have variable sizes. */
772 size_t
773 tree_size (const_tree node)
774 {
775 const enum tree_code code = TREE_CODE (node);
776 switch (code)
777 {
778 case INTEGER_CST:
779 return (sizeof (struct tree_int_cst)
780 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
781
782 case TREE_BINFO:
783 return (offsetof (struct tree_binfo, base_binfos)
784 + vec<tree, va_gc>
785 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
786
787 case TREE_VEC:
788 return (sizeof (struct tree_vec)
789 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
790
791 case VECTOR_CST:
792 return (sizeof (struct tree_vector)
793 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
794
795 case STRING_CST:
796 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
797
798 case OMP_CLAUSE:
799 return (sizeof (struct tree_omp_clause)
800 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
801 * sizeof (tree));
802
803 default:
804 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
805 return (sizeof (struct tree_exp)
806 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
807 else
808 return tree_code_size (code);
809 }
810 }
811
812 /* Record interesting allocation statistics for a tree node with CODE
813 and LENGTH. */
814
815 static void
816 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
817 size_t length ATTRIBUTE_UNUSED)
818 {
819 enum tree_code_class type = TREE_CODE_CLASS (code);
820 tree_node_kind kind;
821
822 if (!GATHER_STATISTICS)
823 return;
824
825 switch (type)
826 {
827 case tcc_declaration: /* A decl node */
828 kind = d_kind;
829 break;
830
831 case tcc_type: /* a type node */
832 kind = t_kind;
833 break;
834
835 case tcc_statement: /* an expression with side effects */
836 kind = s_kind;
837 break;
838
839 case tcc_reference: /* a reference */
840 kind = r_kind;
841 break;
842
843 case tcc_expression: /* an expression */
844 case tcc_comparison: /* a comparison expression */
845 case tcc_unary: /* a unary arithmetic expression */
846 case tcc_binary: /* a binary arithmetic expression */
847 kind = e_kind;
848 break;
849
850 case tcc_constant: /* a constant */
851 kind = c_kind;
852 break;
853
854 case tcc_exceptional: /* something random, like an identifier. */
855 switch (code)
856 {
857 case IDENTIFIER_NODE:
858 kind = id_kind;
859 break;
860
861 case TREE_VEC:
862 kind = vec_kind;
863 break;
864
865 case TREE_BINFO:
866 kind = binfo_kind;
867 break;
868
869 case SSA_NAME:
870 kind = ssa_name_kind;
871 break;
872
873 case BLOCK:
874 kind = b_kind;
875 break;
876
877 case CONSTRUCTOR:
878 kind = constr_kind;
879 break;
880
881 case OMP_CLAUSE:
882 kind = omp_clause_kind;
883 break;
884
885 default:
886 kind = x_kind;
887 break;
888 }
889 break;
890
891 case tcc_vl_exp:
892 kind = e_kind;
893 break;
894
895 default:
896 gcc_unreachable ();
897 }
898
899 tree_code_counts[(int) code]++;
900 tree_node_counts[(int) kind]++;
901 tree_node_sizes[(int) kind] += length;
902 }
903
904 /* Allocate and return a new UID from the DECL_UID namespace. */
905
906 int
907 allocate_decl_uid (void)
908 {
909 return next_decl_uid++;
910 }
911
912 /* Return a newly allocated node of code CODE. For decl and type
913 nodes, some other fields are initialized. The rest of the node is
914 initialized to zero. This function cannot be used for TREE_VEC,
915 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
916 tree_code_size.
917
918 Achoo! I got a code in the node. */
919
920 tree
921 make_node_stat (enum tree_code code MEM_STAT_DECL)
922 {
923 tree t;
924 enum tree_code_class type = TREE_CODE_CLASS (code);
925 size_t length = tree_code_size (code);
926
927 record_node_allocation_statistics (code, length);
928
929 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
930 TREE_SET_CODE (t, code);
931
932 switch (type)
933 {
934 case tcc_statement:
935 TREE_SIDE_EFFECTS (t) = 1;
936 break;
937
938 case tcc_declaration:
939 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
940 {
941 if (code == FUNCTION_DECL)
942 {
943 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
944 DECL_MODE (t) = FUNCTION_MODE;
945 }
946 else
947 DECL_ALIGN (t) = 1;
948 }
949 DECL_SOURCE_LOCATION (t) = input_location;
950 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
951 DECL_UID (t) = --next_debug_decl_uid;
952 else
953 {
954 DECL_UID (t) = allocate_decl_uid ();
955 SET_DECL_PT_UID (t, -1);
956 }
957 if (TREE_CODE (t) == LABEL_DECL)
958 LABEL_DECL_UID (t) = -1;
959
960 break;
961
962 case tcc_type:
963 TYPE_UID (t) = next_type_uid++;
964 TYPE_ALIGN (t) = BITS_PER_UNIT;
965 TYPE_USER_ALIGN (t) = 0;
966 TYPE_MAIN_VARIANT (t) = t;
967 TYPE_CANONICAL (t) = t;
968
969 /* Default to no attributes for type, but let target change that. */
970 TYPE_ATTRIBUTES (t) = NULL_TREE;
971 targetm.set_default_type_attributes (t);
972
973 /* We have not yet computed the alias set for this type. */
974 TYPE_ALIAS_SET (t) = -1;
975 break;
976
977 case tcc_constant:
978 TREE_CONSTANT (t) = 1;
979 break;
980
981 case tcc_expression:
982 switch (code)
983 {
984 case INIT_EXPR:
985 case MODIFY_EXPR:
986 case VA_ARG_EXPR:
987 case PREDECREMENT_EXPR:
988 case PREINCREMENT_EXPR:
989 case POSTDECREMENT_EXPR:
990 case POSTINCREMENT_EXPR:
991 /* All of these have side-effects, no matter what their
992 operands are. */
993 TREE_SIDE_EFFECTS (t) = 1;
994 break;
995
996 default:
997 break;
998 }
999 break;
1000
1001 default:
1002 /* Other classes need no special treatment. */
1003 break;
1004 }
1005
1006 return t;
1007 }
1008 \f
1009 /* Return a new node with the same contents as NODE except that its
1010 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1011
1012 tree
1013 copy_node_stat (tree node MEM_STAT_DECL)
1014 {
1015 tree t;
1016 enum tree_code code = TREE_CODE (node);
1017 size_t length;
1018
1019 gcc_assert (code != STATEMENT_LIST);
1020
1021 length = tree_size (node);
1022 record_node_allocation_statistics (code, length);
1023 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1024 memcpy (t, node, length);
1025
1026 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1027 TREE_CHAIN (t) = 0;
1028 TREE_ASM_WRITTEN (t) = 0;
1029 TREE_VISITED (t) = 0;
1030
1031 if (TREE_CODE_CLASS (code) == tcc_declaration)
1032 {
1033 if (code == DEBUG_EXPR_DECL)
1034 DECL_UID (t) = --next_debug_decl_uid;
1035 else
1036 {
1037 DECL_UID (t) = allocate_decl_uid ();
1038 if (DECL_PT_UID_SET_P (node))
1039 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1040 }
1041 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1042 && DECL_HAS_VALUE_EXPR_P (node))
1043 {
1044 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1045 DECL_HAS_VALUE_EXPR_P (t) = 1;
1046 }
1047 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1048 if (TREE_CODE (node) == VAR_DECL)
1049 {
1050 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1051 t->decl_with_vis.symtab_node = NULL;
1052 }
1053 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1054 {
1055 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1056 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1057 }
1058 if (TREE_CODE (node) == FUNCTION_DECL)
1059 {
1060 DECL_STRUCT_FUNCTION (t) = NULL;
1061 t->decl_with_vis.symtab_node = NULL;
1062 }
1063 }
1064 else if (TREE_CODE_CLASS (code) == tcc_type)
1065 {
1066 TYPE_UID (t) = next_type_uid++;
1067 /* The following is so that the debug code for
1068 the copy is different from the original type.
1069 The two statements usually duplicate each other
1070 (because they clear fields of the same union),
1071 but the optimizer should catch that. */
1072 TYPE_SYMTAB_POINTER (t) = 0;
1073 TYPE_SYMTAB_ADDRESS (t) = 0;
1074
1075 /* Do not copy the values cache. */
1076 if (TYPE_CACHED_VALUES_P (t))
1077 {
1078 TYPE_CACHED_VALUES_P (t) = 0;
1079 TYPE_CACHED_VALUES (t) = NULL_TREE;
1080 }
1081 }
1082
1083 return t;
1084 }
1085
1086 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1087 For example, this can copy a list made of TREE_LIST nodes. */
1088
1089 tree
1090 copy_list (tree list)
1091 {
1092 tree head;
1093 tree prev, next;
1094
1095 if (list == 0)
1096 return 0;
1097
1098 head = prev = copy_node (list);
1099 next = TREE_CHAIN (list);
1100 while (next)
1101 {
1102 TREE_CHAIN (prev) = copy_node (next);
1103 prev = TREE_CHAIN (prev);
1104 next = TREE_CHAIN (next);
1105 }
1106 return head;
1107 }
1108
1109 \f
1110 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1111 INTEGER_CST with value CST and type TYPE. */
1112
1113 static unsigned int
1114 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1115 {
1116 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1117 /* We need an extra zero HWI if CST is an unsigned integer with its
1118 upper bit set, and if CST occupies a whole number of HWIs. */
1119 if (TYPE_UNSIGNED (type)
1120 && wi::neg_p (cst)
1121 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1122 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1123 return cst.get_len ();
1124 }
1125
1126 /* Return a new INTEGER_CST with value CST and type TYPE. */
1127
1128 static tree
1129 build_new_int_cst (tree type, const wide_int &cst)
1130 {
1131 unsigned int len = cst.get_len ();
1132 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1133 tree nt = make_int_cst (len, ext_len);
1134
1135 if (len < ext_len)
1136 {
1137 --ext_len;
1138 TREE_INT_CST_ELT (nt, ext_len) = 0;
1139 for (unsigned int i = len; i < ext_len; ++i)
1140 TREE_INT_CST_ELT (nt, i) = -1;
1141 }
1142 else if (TYPE_UNSIGNED (type)
1143 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1144 {
1145 len--;
1146 TREE_INT_CST_ELT (nt, len)
1147 = zext_hwi (cst.elt (len),
1148 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1149 }
1150
1151 for (unsigned int i = 0; i < len; i++)
1152 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1153 TREE_TYPE (nt) = type;
1154 return nt;
1155 }
1156
1157 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1158
1159 tree
1160 build_int_cst (tree type, HOST_WIDE_INT low)
1161 {
1162 /* Support legacy code. */
1163 if (!type)
1164 type = integer_type_node;
1165
1166 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1167 }
1168
1169 tree
1170 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1171 {
1172 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1173 }
1174
1175 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1176
1177 tree
1178 build_int_cst_type (tree type, HOST_WIDE_INT low)
1179 {
1180 gcc_assert (type);
1181 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1182 }
1183
1184 /* Constructs tree in type TYPE from with value given by CST. Signedness
1185 of CST is assumed to be the same as the signedness of TYPE. */
1186
1187 tree
1188 double_int_to_tree (tree type, double_int cst)
1189 {
1190 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1191 }
1192
1193 /* We force the wide_int CST to the range of the type TYPE by sign or
1194 zero extending it. OVERFLOWABLE indicates if we are interested in
1195 overflow of the value, when >0 we are only interested in signed
1196 overflow, for <0 we are interested in any overflow. OVERFLOWED
1197 indicates whether overflow has already occurred. CONST_OVERFLOWED
1198 indicates whether constant overflow has already occurred. We force
1199 T's value to be within range of T's type (by setting to 0 or 1 all
1200 the bits outside the type's range). We set TREE_OVERFLOWED if,
1201 OVERFLOWED is nonzero,
1202 or OVERFLOWABLE is >0 and signed overflow occurs
1203 or OVERFLOWABLE is <0 and any overflow occurs
1204 We return a new tree node for the extended wide_int. The node
1205 is shared if no overflow flags are set. */
1206
1207
1208 tree
1209 force_fit_type (tree type, const wide_int_ref &cst,
1210 int overflowable, bool overflowed)
1211 {
1212 signop sign = TYPE_SIGN (type);
1213
1214 /* If we need to set overflow flags, return a new unshared node. */
1215 if (overflowed || !wi::fits_to_tree_p (cst, type))
1216 {
1217 if (overflowed
1218 || overflowable < 0
1219 || (overflowable > 0 && sign == SIGNED))
1220 {
1221 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1222 tree t = build_new_int_cst (type, tmp);
1223 TREE_OVERFLOW (t) = 1;
1224 return t;
1225 }
1226 }
1227
1228 /* Else build a shared node. */
1229 return wide_int_to_tree (type, cst);
1230 }
1231
1232 /* These are the hash table functions for the hash table of INTEGER_CST
1233 nodes of a sizetype. */
1234
1235 /* Return the hash code code X, an INTEGER_CST. */
1236
1237 static hashval_t
1238 int_cst_hash_hash (const void *x)
1239 {
1240 const_tree const t = (const_tree) x;
1241 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1242 int i;
1243
1244 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1245 code ^= TREE_INT_CST_ELT (t, i);
1246
1247 return code;
1248 }
1249
1250 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1251 is the same as that given by *Y, which is the same. */
1252
1253 static int
1254 int_cst_hash_eq (const void *x, const void *y)
1255 {
1256 const_tree const xt = (const_tree) x;
1257 const_tree const yt = (const_tree) y;
1258
1259 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1260 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1261 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1262 return false;
1263
1264 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1265 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1266 return false;
1267
1268 return true;
1269 }
1270
1271 /* Create an INT_CST node of TYPE and value CST.
1272 The returned node is always shared. For small integers we use a
1273 per-type vector cache, for larger ones we use a single hash table.
1274 The value is extended from its precision according to the sign of
1275 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1276 the upper bits and ensures that hashing and value equality based
1277 upon the underlying HOST_WIDE_INTs works without masking. */
1278
1279 tree
1280 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1281 {
1282 tree t;
1283 int ix = -1;
1284 int limit = 0;
1285
1286 gcc_assert (type);
1287 unsigned int prec = TYPE_PRECISION (type);
1288 signop sgn = TYPE_SIGN (type);
1289
1290 /* Verify that everything is canonical. */
1291 int l = pcst.get_len ();
1292 if (l > 1)
1293 {
1294 if (pcst.elt (l - 1) == 0)
1295 gcc_checking_assert (pcst.elt (l - 2) < 0);
1296 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1297 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1298 }
1299
1300 wide_int cst = wide_int::from (pcst, prec, sgn);
1301 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1302
1303 if (ext_len == 1)
1304 {
1305 /* We just need to store a single HOST_WIDE_INT. */
1306 HOST_WIDE_INT hwi;
1307 if (TYPE_UNSIGNED (type))
1308 hwi = cst.to_uhwi ();
1309 else
1310 hwi = cst.to_shwi ();
1311
1312 switch (TREE_CODE (type))
1313 {
1314 case NULLPTR_TYPE:
1315 gcc_assert (hwi == 0);
1316 /* Fallthru. */
1317
1318 case POINTER_TYPE:
1319 case REFERENCE_TYPE:
1320 /* Cache NULL pointer. */
1321 if (hwi == 0)
1322 {
1323 limit = 1;
1324 ix = 0;
1325 }
1326 break;
1327
1328 case BOOLEAN_TYPE:
1329 /* Cache false or true. */
1330 limit = 2;
1331 if (hwi < 2)
1332 ix = hwi;
1333 break;
1334
1335 case INTEGER_TYPE:
1336 case OFFSET_TYPE:
1337 if (TYPE_SIGN (type) == UNSIGNED)
1338 {
1339 /* Cache [0, N). */
1340 limit = INTEGER_SHARE_LIMIT;
1341 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1342 ix = hwi;
1343 }
1344 else
1345 {
1346 /* Cache [-1, N). */
1347 limit = INTEGER_SHARE_LIMIT + 1;
1348 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1349 ix = hwi + 1;
1350 }
1351 break;
1352
1353 case ENUMERAL_TYPE:
1354 break;
1355
1356 default:
1357 gcc_unreachable ();
1358 }
1359
1360 if (ix >= 0)
1361 {
1362 /* Look for it in the type's vector of small shared ints. */
1363 if (!TYPE_CACHED_VALUES_P (type))
1364 {
1365 TYPE_CACHED_VALUES_P (type) = 1;
1366 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1367 }
1368
1369 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1370 if (t)
1371 /* Make sure no one is clobbering the shared constant. */
1372 gcc_checking_assert (TREE_TYPE (t) == type
1373 && TREE_INT_CST_NUNITS (t) == 1
1374 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1375 && TREE_INT_CST_EXT_NUNITS (t) == 1
1376 && TREE_INT_CST_ELT (t, 0) == hwi);
1377 else
1378 {
1379 /* Create a new shared int. */
1380 t = build_new_int_cst (type, cst);
1381 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1382 }
1383 }
1384 else
1385 {
1386 /* Use the cache of larger shared ints, using int_cst_node as
1387 a temporary. */
1388 void **slot;
1389
1390 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1391 TREE_TYPE (int_cst_node) = type;
1392
1393 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1394 t = (tree) *slot;
1395 if (!t)
1396 {
1397 /* Insert this one into the hash table. */
1398 t = int_cst_node;
1399 *slot = t;
1400 /* Make a new node for next time round. */
1401 int_cst_node = make_int_cst (1, 1);
1402 }
1403 }
1404 }
1405 else
1406 {
1407 /* The value either hashes properly or we drop it on the floor
1408 for the gc to take care of. There will not be enough of them
1409 to worry about. */
1410 void **slot;
1411
1412 tree nt = build_new_int_cst (type, cst);
1413 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1414 t = (tree) *slot;
1415 if (!t)
1416 {
1417 /* Insert this one into the hash table. */
1418 t = nt;
1419 *slot = t;
1420 }
1421 }
1422
1423 return t;
1424 }
1425
1426 void
1427 cache_integer_cst (tree t)
1428 {
1429 tree type = TREE_TYPE (t);
1430 int ix = -1;
1431 int limit = 0;
1432 int prec = TYPE_PRECISION (type);
1433
1434 gcc_assert (!TREE_OVERFLOW (t));
1435
1436 switch (TREE_CODE (type))
1437 {
1438 case NULLPTR_TYPE:
1439 gcc_assert (integer_zerop (t));
1440 /* Fallthru. */
1441
1442 case POINTER_TYPE:
1443 case REFERENCE_TYPE:
1444 /* Cache NULL pointer. */
1445 if (integer_zerop (t))
1446 {
1447 limit = 1;
1448 ix = 0;
1449 }
1450 break;
1451
1452 case BOOLEAN_TYPE:
1453 /* Cache false or true. */
1454 limit = 2;
1455 if (wi::ltu_p (t, 2))
1456 ix = TREE_INT_CST_ELT (t, 0);
1457 break;
1458
1459 case INTEGER_TYPE:
1460 case OFFSET_TYPE:
1461 if (TYPE_UNSIGNED (type))
1462 {
1463 /* Cache 0..N */
1464 limit = INTEGER_SHARE_LIMIT;
1465
1466 /* This is a little hokie, but if the prec is smaller than
1467 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1468 obvious test will not get the correct answer. */
1469 if (prec < HOST_BITS_PER_WIDE_INT)
1470 {
1471 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1472 ix = tree_to_uhwi (t);
1473 }
1474 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1475 ix = tree_to_uhwi (t);
1476 }
1477 else
1478 {
1479 /* Cache -1..N */
1480 limit = INTEGER_SHARE_LIMIT + 1;
1481
1482 if (integer_minus_onep (t))
1483 ix = 0;
1484 else if (!wi::neg_p (t))
1485 {
1486 if (prec < HOST_BITS_PER_WIDE_INT)
1487 {
1488 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1489 ix = tree_to_shwi (t) + 1;
1490 }
1491 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1492 ix = tree_to_shwi (t) + 1;
1493 }
1494 }
1495 break;
1496
1497 case ENUMERAL_TYPE:
1498 break;
1499
1500 default:
1501 gcc_unreachable ();
1502 }
1503
1504 if (ix >= 0)
1505 {
1506 /* Look for it in the type's vector of small shared ints. */
1507 if (!TYPE_CACHED_VALUES_P (type))
1508 {
1509 TYPE_CACHED_VALUES_P (type) = 1;
1510 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1511 }
1512
1513 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1514 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1515 }
1516 else
1517 {
1518 /* Use the cache of larger shared ints. */
1519 void **slot;
1520
1521 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1522 /* If there is already an entry for the number verify it's the
1523 same. */
1524 if (*slot)
1525 gcc_assert (wi::eq_p (tree (*slot), t));
1526 else
1527 /* Otherwise insert this one into the hash table. */
1528 *slot = t;
1529 }
1530 }
1531
1532
1533 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1534 and the rest are zeros. */
1535
1536 tree
1537 build_low_bits_mask (tree type, unsigned bits)
1538 {
1539 gcc_assert (bits <= TYPE_PRECISION (type));
1540
1541 return wide_int_to_tree (type, wi::mask (bits, false,
1542 TYPE_PRECISION (type)));
1543 }
1544
1545 /* Checks that X is integer constant that can be expressed in (unsigned)
1546 HOST_WIDE_INT without loss of precision. */
1547
1548 bool
1549 cst_and_fits_in_hwi (const_tree x)
1550 {
1551 if (TREE_CODE (x) != INTEGER_CST)
1552 return false;
1553
1554 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1555 return false;
1556
1557 return TREE_INT_CST_NUNITS (x) == 1;
1558 }
1559
1560 /* Build a newly constructed TREE_VEC node of length LEN. */
1561
1562 tree
1563 make_vector_stat (unsigned len MEM_STAT_DECL)
1564 {
1565 tree t;
1566 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1567
1568 record_node_allocation_statistics (VECTOR_CST, length);
1569
1570 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1571
1572 TREE_SET_CODE (t, VECTOR_CST);
1573 TREE_CONSTANT (t) = 1;
1574
1575 return t;
1576 }
1577
1578 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1579 are in a list pointed to by VALS. */
1580
1581 tree
1582 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1583 {
1584 int over = 0;
1585 unsigned cnt = 0;
1586 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1587 TREE_TYPE (v) = type;
1588
1589 /* Iterate through elements and check for overflow. */
1590 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1591 {
1592 tree value = vals[cnt];
1593
1594 VECTOR_CST_ELT (v, cnt) = value;
1595
1596 /* Don't crash if we get an address constant. */
1597 if (!CONSTANT_CLASS_P (value))
1598 continue;
1599
1600 over |= TREE_OVERFLOW (value);
1601 }
1602
1603 TREE_OVERFLOW (v) = over;
1604 return v;
1605 }
1606
1607 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1608 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1609
1610 tree
1611 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1612 {
1613 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1614 unsigned HOST_WIDE_INT idx;
1615 tree value;
1616
1617 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1618 vec[idx] = value;
1619 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1620 vec[idx] = build_zero_cst (TREE_TYPE (type));
1621
1622 return build_vector (type, vec);
1623 }
1624
1625 /* Build a vector of type VECTYPE where all the elements are SCs. */
1626 tree
1627 build_vector_from_val (tree vectype, tree sc)
1628 {
1629 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1630
1631 if (sc == error_mark_node)
1632 return sc;
1633
1634 /* Verify that the vector type is suitable for SC. Note that there
1635 is some inconsistency in the type-system with respect to restrict
1636 qualifications of pointers. Vector types always have a main-variant
1637 element type and the qualification is applied to the vector-type.
1638 So TREE_TYPE (vector-type) does not return a properly qualified
1639 vector element-type. */
1640 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1641 TREE_TYPE (vectype)));
1642
1643 if (CONSTANT_CLASS_P (sc))
1644 {
1645 tree *v = XALLOCAVEC (tree, nunits);
1646 for (i = 0; i < nunits; ++i)
1647 v[i] = sc;
1648 return build_vector (vectype, v);
1649 }
1650 else
1651 {
1652 vec<constructor_elt, va_gc> *v;
1653 vec_alloc (v, nunits);
1654 for (i = 0; i < nunits; ++i)
1655 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1656 return build_constructor (vectype, v);
1657 }
1658 }
1659
1660 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1661 are in the vec pointed to by VALS. */
1662 tree
1663 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1664 {
1665 tree c = make_node (CONSTRUCTOR);
1666 unsigned int i;
1667 constructor_elt *elt;
1668 bool constant_p = true;
1669 bool side_effects_p = false;
1670
1671 TREE_TYPE (c) = type;
1672 CONSTRUCTOR_ELTS (c) = vals;
1673
1674 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1675 {
1676 /* Mostly ctors will have elts that don't have side-effects, so
1677 the usual case is to scan all the elements. Hence a single
1678 loop for both const and side effects, rather than one loop
1679 each (with early outs). */
1680 if (!TREE_CONSTANT (elt->value))
1681 constant_p = false;
1682 if (TREE_SIDE_EFFECTS (elt->value))
1683 side_effects_p = true;
1684 }
1685
1686 TREE_SIDE_EFFECTS (c) = side_effects_p;
1687 TREE_CONSTANT (c) = constant_p;
1688
1689 return c;
1690 }
1691
1692 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1693 INDEX and VALUE. */
1694 tree
1695 build_constructor_single (tree type, tree index, tree value)
1696 {
1697 vec<constructor_elt, va_gc> *v;
1698 constructor_elt elt = {index, value};
1699
1700 vec_alloc (v, 1);
1701 v->quick_push (elt);
1702
1703 return build_constructor (type, v);
1704 }
1705
1706
1707 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1708 are in a list pointed to by VALS. */
1709 tree
1710 build_constructor_from_list (tree type, tree vals)
1711 {
1712 tree t;
1713 vec<constructor_elt, va_gc> *v = NULL;
1714
1715 if (vals)
1716 {
1717 vec_alloc (v, list_length (vals));
1718 for (t = vals; t; t = TREE_CHAIN (t))
1719 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1720 }
1721
1722 return build_constructor (type, v);
1723 }
1724
1725 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1726 of elements, provided as index/value pairs. */
1727
1728 tree
1729 build_constructor_va (tree type, int nelts, ...)
1730 {
1731 vec<constructor_elt, va_gc> *v = NULL;
1732 va_list p;
1733
1734 va_start (p, nelts);
1735 vec_alloc (v, nelts);
1736 while (nelts--)
1737 {
1738 tree index = va_arg (p, tree);
1739 tree value = va_arg (p, tree);
1740 CONSTRUCTOR_APPEND_ELT (v, index, value);
1741 }
1742 va_end (p);
1743 return build_constructor (type, v);
1744 }
1745
1746 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1747
1748 tree
1749 build_fixed (tree type, FIXED_VALUE_TYPE f)
1750 {
1751 tree v;
1752 FIXED_VALUE_TYPE *fp;
1753
1754 v = make_node (FIXED_CST);
1755 fp = ggc_alloc<fixed_value> ();
1756 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1757
1758 TREE_TYPE (v) = type;
1759 TREE_FIXED_CST_PTR (v) = fp;
1760 return v;
1761 }
1762
1763 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1764
1765 tree
1766 build_real (tree type, REAL_VALUE_TYPE d)
1767 {
1768 tree v;
1769 REAL_VALUE_TYPE *dp;
1770 int overflow = 0;
1771
1772 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1773 Consider doing it via real_convert now. */
1774
1775 v = make_node (REAL_CST);
1776 dp = ggc_alloc<real_value> ();
1777 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1778
1779 TREE_TYPE (v) = type;
1780 TREE_REAL_CST_PTR (v) = dp;
1781 TREE_OVERFLOW (v) = overflow;
1782 return v;
1783 }
1784
1785 /* Return a new REAL_CST node whose type is TYPE
1786 and whose value is the integer value of the INTEGER_CST node I. */
1787
1788 REAL_VALUE_TYPE
1789 real_value_from_int_cst (const_tree type, const_tree i)
1790 {
1791 REAL_VALUE_TYPE d;
1792
1793 /* Clear all bits of the real value type so that we can later do
1794 bitwise comparisons to see if two values are the same. */
1795 memset (&d, 0, sizeof d);
1796
1797 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1798 TYPE_SIGN (TREE_TYPE (i)));
1799 return d;
1800 }
1801
1802 /* Given a tree representing an integer constant I, return a tree
1803 representing the same value as a floating-point constant of type TYPE. */
1804
1805 tree
1806 build_real_from_int_cst (tree type, const_tree i)
1807 {
1808 tree v;
1809 int overflow = TREE_OVERFLOW (i);
1810
1811 v = build_real (type, real_value_from_int_cst (type, i));
1812
1813 TREE_OVERFLOW (v) |= overflow;
1814 return v;
1815 }
1816
1817 /* Return a newly constructed STRING_CST node whose value is
1818 the LEN characters at STR.
1819 Note that for a C string literal, LEN should include the trailing NUL.
1820 The TREE_TYPE is not initialized. */
1821
1822 tree
1823 build_string (int len, const char *str)
1824 {
1825 tree s;
1826 size_t length;
1827
1828 /* Do not waste bytes provided by padding of struct tree_string. */
1829 length = len + offsetof (struct tree_string, str) + 1;
1830
1831 record_node_allocation_statistics (STRING_CST, length);
1832
1833 s = (tree) ggc_internal_alloc (length);
1834
1835 memset (s, 0, sizeof (struct tree_typed));
1836 TREE_SET_CODE (s, STRING_CST);
1837 TREE_CONSTANT (s) = 1;
1838 TREE_STRING_LENGTH (s) = len;
1839 memcpy (s->string.str, str, len);
1840 s->string.str[len] = '\0';
1841
1842 return s;
1843 }
1844
1845 /* Return a newly constructed COMPLEX_CST node whose value is
1846 specified by the real and imaginary parts REAL and IMAG.
1847 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1848 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1849
1850 tree
1851 build_complex (tree type, tree real, tree imag)
1852 {
1853 tree t = make_node (COMPLEX_CST);
1854
1855 TREE_REALPART (t) = real;
1856 TREE_IMAGPART (t) = imag;
1857 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1858 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1859 return t;
1860 }
1861
1862 /* Return a constant of arithmetic type TYPE which is the
1863 multiplicative identity of the set TYPE. */
1864
1865 tree
1866 build_one_cst (tree type)
1867 {
1868 switch (TREE_CODE (type))
1869 {
1870 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1871 case POINTER_TYPE: case REFERENCE_TYPE:
1872 case OFFSET_TYPE:
1873 return build_int_cst (type, 1);
1874
1875 case REAL_TYPE:
1876 return build_real (type, dconst1);
1877
1878 case FIXED_POINT_TYPE:
1879 /* We can only generate 1 for accum types. */
1880 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1881 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1882
1883 case VECTOR_TYPE:
1884 {
1885 tree scalar = build_one_cst (TREE_TYPE (type));
1886
1887 return build_vector_from_val (type, scalar);
1888 }
1889
1890 case COMPLEX_TYPE:
1891 return build_complex (type,
1892 build_one_cst (TREE_TYPE (type)),
1893 build_zero_cst (TREE_TYPE (type)));
1894
1895 default:
1896 gcc_unreachable ();
1897 }
1898 }
1899
1900 /* Return an integer of type TYPE containing all 1's in as much precision as
1901 it contains, or a complex or vector whose subparts are such integers. */
1902
1903 tree
1904 build_all_ones_cst (tree type)
1905 {
1906 if (TREE_CODE (type) == COMPLEX_TYPE)
1907 {
1908 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1909 return build_complex (type, scalar, scalar);
1910 }
1911 else
1912 return build_minus_one_cst (type);
1913 }
1914
1915 /* Return a constant of arithmetic type TYPE which is the
1916 opposite of the multiplicative identity of the set TYPE. */
1917
1918 tree
1919 build_minus_one_cst (tree type)
1920 {
1921 switch (TREE_CODE (type))
1922 {
1923 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1924 case POINTER_TYPE: case REFERENCE_TYPE:
1925 case OFFSET_TYPE:
1926 return build_int_cst (type, -1);
1927
1928 case REAL_TYPE:
1929 return build_real (type, dconstm1);
1930
1931 case FIXED_POINT_TYPE:
1932 /* We can only generate 1 for accum types. */
1933 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1934 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1935 TYPE_MODE (type)));
1936
1937 case VECTOR_TYPE:
1938 {
1939 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1940
1941 return build_vector_from_val (type, scalar);
1942 }
1943
1944 case COMPLEX_TYPE:
1945 return build_complex (type,
1946 build_minus_one_cst (TREE_TYPE (type)),
1947 build_zero_cst (TREE_TYPE (type)));
1948
1949 default:
1950 gcc_unreachable ();
1951 }
1952 }
1953
1954 /* Build 0 constant of type TYPE. This is used by constructor folding
1955 and thus the constant should be represented in memory by
1956 zero(es). */
1957
1958 tree
1959 build_zero_cst (tree type)
1960 {
1961 switch (TREE_CODE (type))
1962 {
1963 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1964 case POINTER_TYPE: case REFERENCE_TYPE:
1965 case OFFSET_TYPE: case NULLPTR_TYPE:
1966 return build_int_cst (type, 0);
1967
1968 case REAL_TYPE:
1969 return build_real (type, dconst0);
1970
1971 case FIXED_POINT_TYPE:
1972 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1973
1974 case VECTOR_TYPE:
1975 {
1976 tree scalar = build_zero_cst (TREE_TYPE (type));
1977
1978 return build_vector_from_val (type, scalar);
1979 }
1980
1981 case COMPLEX_TYPE:
1982 {
1983 tree zero = build_zero_cst (TREE_TYPE (type));
1984
1985 return build_complex (type, zero, zero);
1986 }
1987
1988 default:
1989 if (!AGGREGATE_TYPE_P (type))
1990 return fold_convert (type, integer_zero_node);
1991 return build_constructor (type, NULL);
1992 }
1993 }
1994
1995
1996 /* Build a BINFO with LEN language slots. */
1997
1998 tree
1999 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2000 {
2001 tree t;
2002 size_t length = (offsetof (struct tree_binfo, base_binfos)
2003 + vec<tree, va_gc>::embedded_size (base_binfos));
2004
2005 record_node_allocation_statistics (TREE_BINFO, length);
2006
2007 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2008
2009 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2010
2011 TREE_SET_CODE (t, TREE_BINFO);
2012
2013 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2014
2015 return t;
2016 }
2017
2018 /* Create a CASE_LABEL_EXPR tree node and return it. */
2019
2020 tree
2021 build_case_label (tree low_value, tree high_value, tree label_decl)
2022 {
2023 tree t = make_node (CASE_LABEL_EXPR);
2024
2025 TREE_TYPE (t) = void_type_node;
2026 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2027
2028 CASE_LOW (t) = low_value;
2029 CASE_HIGH (t) = high_value;
2030 CASE_LABEL (t) = label_decl;
2031 CASE_CHAIN (t) = NULL_TREE;
2032
2033 return t;
2034 }
2035
2036 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2037 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2038 The latter determines the length of the HOST_WIDE_INT vector. */
2039
2040 tree
2041 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2042 {
2043 tree t;
2044 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2045 + sizeof (struct tree_int_cst));
2046
2047 gcc_assert (len);
2048 record_node_allocation_statistics (INTEGER_CST, length);
2049
2050 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2051
2052 TREE_SET_CODE (t, INTEGER_CST);
2053 TREE_INT_CST_NUNITS (t) = len;
2054 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2055 /* to_offset can only be applied to trees that are offset_int-sized
2056 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2057 must be exactly the precision of offset_int and so LEN is correct. */
2058 if (ext_len <= OFFSET_INT_ELTS)
2059 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2060 else
2061 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2062
2063 TREE_CONSTANT (t) = 1;
2064
2065 return t;
2066 }
2067
2068 /* Build a newly constructed TREE_VEC node of length LEN. */
2069
2070 tree
2071 make_tree_vec_stat (int len MEM_STAT_DECL)
2072 {
2073 tree t;
2074 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2075
2076 record_node_allocation_statistics (TREE_VEC, length);
2077
2078 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2079
2080 TREE_SET_CODE (t, TREE_VEC);
2081 TREE_VEC_LENGTH (t) = len;
2082
2083 return t;
2084 }
2085
2086 /* Grow a TREE_VEC node to new length LEN. */
2087
2088 tree
2089 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2090 {
2091 gcc_assert (TREE_CODE (v) == TREE_VEC);
2092
2093 int oldlen = TREE_VEC_LENGTH (v);
2094 gcc_assert (len > oldlen);
2095
2096 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2097 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2098
2099 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2100
2101 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2102
2103 TREE_VEC_LENGTH (v) = len;
2104
2105 return v;
2106 }
2107 \f
2108 /* Return 1 if EXPR is the integer constant zero or a complex constant
2109 of zero. */
2110
2111 int
2112 integer_zerop (const_tree expr)
2113 {
2114 STRIP_NOPS (expr);
2115
2116 switch (TREE_CODE (expr))
2117 {
2118 case INTEGER_CST:
2119 return wi::eq_p (expr, 0);
2120 case COMPLEX_CST:
2121 return (integer_zerop (TREE_REALPART (expr))
2122 && integer_zerop (TREE_IMAGPART (expr)));
2123 case VECTOR_CST:
2124 {
2125 unsigned i;
2126 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2127 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2128 return false;
2129 return true;
2130 }
2131 default:
2132 return false;
2133 }
2134 }
2135
2136 /* Return 1 if EXPR is the integer constant one or the corresponding
2137 complex constant. */
2138
2139 int
2140 integer_onep (const_tree expr)
2141 {
2142 STRIP_NOPS (expr);
2143
2144 switch (TREE_CODE (expr))
2145 {
2146 case INTEGER_CST:
2147 return wi::eq_p (wi::to_widest (expr), 1);
2148 case COMPLEX_CST:
2149 return (integer_onep (TREE_REALPART (expr))
2150 && integer_zerop (TREE_IMAGPART (expr)));
2151 case VECTOR_CST:
2152 {
2153 unsigned i;
2154 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2155 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2156 return false;
2157 return true;
2158 }
2159 default:
2160 return false;
2161 }
2162 }
2163
2164 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2165 it contains, or a complex or vector whose subparts are such integers. */
2166
2167 int
2168 integer_all_onesp (const_tree expr)
2169 {
2170 STRIP_NOPS (expr);
2171
2172 if (TREE_CODE (expr) == COMPLEX_CST
2173 && integer_all_onesp (TREE_REALPART (expr))
2174 && integer_all_onesp (TREE_IMAGPART (expr)))
2175 return 1;
2176
2177 else if (TREE_CODE (expr) == VECTOR_CST)
2178 {
2179 unsigned i;
2180 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2181 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2182 return 0;
2183 return 1;
2184 }
2185
2186 else if (TREE_CODE (expr) != INTEGER_CST)
2187 return 0;
2188
2189 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2190 }
2191
2192 /* Return 1 if EXPR is the integer constant minus one. */
2193
2194 int
2195 integer_minus_onep (const_tree expr)
2196 {
2197 STRIP_NOPS (expr);
2198
2199 if (TREE_CODE (expr) == COMPLEX_CST)
2200 return (integer_all_onesp (TREE_REALPART (expr))
2201 && integer_zerop (TREE_IMAGPART (expr)));
2202 else
2203 return integer_all_onesp (expr);
2204 }
2205
2206 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2207 one bit on). */
2208
2209 int
2210 integer_pow2p (const_tree expr)
2211 {
2212 STRIP_NOPS (expr);
2213
2214 if (TREE_CODE (expr) == COMPLEX_CST
2215 && integer_pow2p (TREE_REALPART (expr))
2216 && integer_zerop (TREE_IMAGPART (expr)))
2217 return 1;
2218
2219 if (TREE_CODE (expr) != INTEGER_CST)
2220 return 0;
2221
2222 return wi::popcount (expr) == 1;
2223 }
2224
2225 /* Return 1 if EXPR is an integer constant other than zero or a
2226 complex constant other than zero. */
2227
2228 int
2229 integer_nonzerop (const_tree expr)
2230 {
2231 STRIP_NOPS (expr);
2232
2233 return ((TREE_CODE (expr) == INTEGER_CST
2234 && !wi::eq_p (expr, 0))
2235 || (TREE_CODE (expr) == COMPLEX_CST
2236 && (integer_nonzerop (TREE_REALPART (expr))
2237 || integer_nonzerop (TREE_IMAGPART (expr)))));
2238 }
2239
2240 /* Return 1 if EXPR is the fixed-point constant zero. */
2241
2242 int
2243 fixed_zerop (const_tree expr)
2244 {
2245 return (TREE_CODE (expr) == FIXED_CST
2246 && TREE_FIXED_CST (expr).data.is_zero ());
2247 }
2248
2249 /* Return the power of two represented by a tree node known to be a
2250 power of two. */
2251
2252 int
2253 tree_log2 (const_tree expr)
2254 {
2255 STRIP_NOPS (expr);
2256
2257 if (TREE_CODE (expr) == COMPLEX_CST)
2258 return tree_log2 (TREE_REALPART (expr));
2259
2260 return wi::exact_log2 (expr);
2261 }
2262
2263 /* Similar, but return the largest integer Y such that 2 ** Y is less
2264 than or equal to EXPR. */
2265
2266 int
2267 tree_floor_log2 (const_tree expr)
2268 {
2269 STRIP_NOPS (expr);
2270
2271 if (TREE_CODE (expr) == COMPLEX_CST)
2272 return tree_log2 (TREE_REALPART (expr));
2273
2274 return wi::floor_log2 (expr);
2275 }
2276
2277 /* Return number of known trailing zero bits in EXPR, or, if the value of
2278 EXPR is known to be zero, the precision of it's type. */
2279
2280 unsigned int
2281 tree_ctz (const_tree expr)
2282 {
2283 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2284 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2285 return 0;
2286
2287 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2288 switch (TREE_CODE (expr))
2289 {
2290 case INTEGER_CST:
2291 ret1 = wi::ctz (expr);
2292 return MIN (ret1, prec);
2293 case SSA_NAME:
2294 ret1 = wi::ctz (get_nonzero_bits (expr));
2295 return MIN (ret1, prec);
2296 case PLUS_EXPR:
2297 case MINUS_EXPR:
2298 case BIT_IOR_EXPR:
2299 case BIT_XOR_EXPR:
2300 case MIN_EXPR:
2301 case MAX_EXPR:
2302 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2303 if (ret1 == 0)
2304 return ret1;
2305 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2306 return MIN (ret1, ret2);
2307 case POINTER_PLUS_EXPR:
2308 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2309 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2310 /* Second operand is sizetype, which could be in theory
2311 wider than pointer's precision. Make sure we never
2312 return more than prec. */
2313 ret2 = MIN (ret2, prec);
2314 return MIN (ret1, ret2);
2315 case BIT_AND_EXPR:
2316 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2317 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2318 return MAX (ret1, ret2);
2319 case MULT_EXPR:
2320 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2321 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2322 return MIN (ret1 + ret2, prec);
2323 case LSHIFT_EXPR:
2324 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2325 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2326 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2327 {
2328 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2329 return MIN (ret1 + ret2, prec);
2330 }
2331 return ret1;
2332 case RSHIFT_EXPR:
2333 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2334 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2335 {
2336 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2337 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2338 if (ret1 > ret2)
2339 return ret1 - ret2;
2340 }
2341 return 0;
2342 case TRUNC_DIV_EXPR:
2343 case CEIL_DIV_EXPR:
2344 case FLOOR_DIV_EXPR:
2345 case ROUND_DIV_EXPR:
2346 case EXACT_DIV_EXPR:
2347 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2348 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2349 {
2350 int l = tree_log2 (TREE_OPERAND (expr, 1));
2351 if (l >= 0)
2352 {
2353 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2354 ret2 = l;
2355 if (ret1 > ret2)
2356 return ret1 - ret2;
2357 }
2358 }
2359 return 0;
2360 CASE_CONVERT:
2361 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2362 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2363 ret1 = prec;
2364 return MIN (ret1, prec);
2365 case SAVE_EXPR:
2366 return tree_ctz (TREE_OPERAND (expr, 0));
2367 case COND_EXPR:
2368 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2369 if (ret1 == 0)
2370 return 0;
2371 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2372 return MIN (ret1, ret2);
2373 case COMPOUND_EXPR:
2374 return tree_ctz (TREE_OPERAND (expr, 1));
2375 case ADDR_EXPR:
2376 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2377 if (ret1 > BITS_PER_UNIT)
2378 {
2379 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2380 return MIN (ret1, prec);
2381 }
2382 return 0;
2383 default:
2384 return 0;
2385 }
2386 }
2387
2388 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2389 decimal float constants, so don't return 1 for them. */
2390
2391 int
2392 real_zerop (const_tree expr)
2393 {
2394 STRIP_NOPS (expr);
2395
2396 switch (TREE_CODE (expr))
2397 {
2398 case REAL_CST:
2399 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2400 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2401 case COMPLEX_CST:
2402 return real_zerop (TREE_REALPART (expr))
2403 && real_zerop (TREE_IMAGPART (expr));
2404 case VECTOR_CST:
2405 {
2406 unsigned i;
2407 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2408 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2409 return false;
2410 return true;
2411 }
2412 default:
2413 return false;
2414 }
2415 }
2416
2417 /* Return 1 if EXPR is the real constant one in real or complex form.
2418 Trailing zeroes matter for decimal float constants, so don't return
2419 1 for them. */
2420
2421 int
2422 real_onep (const_tree expr)
2423 {
2424 STRIP_NOPS (expr);
2425
2426 switch (TREE_CODE (expr))
2427 {
2428 case REAL_CST:
2429 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2430 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2431 case COMPLEX_CST:
2432 return real_onep (TREE_REALPART (expr))
2433 && real_zerop (TREE_IMAGPART (expr));
2434 case VECTOR_CST:
2435 {
2436 unsigned i;
2437 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2438 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2439 return false;
2440 return true;
2441 }
2442 default:
2443 return false;
2444 }
2445 }
2446
2447 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2448 matter for decimal float constants, so don't return 1 for them. */
2449
2450 int
2451 real_minus_onep (const_tree expr)
2452 {
2453 STRIP_NOPS (expr);
2454
2455 switch (TREE_CODE (expr))
2456 {
2457 case REAL_CST:
2458 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2459 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2460 case COMPLEX_CST:
2461 return real_minus_onep (TREE_REALPART (expr))
2462 && real_zerop (TREE_IMAGPART (expr));
2463 case VECTOR_CST:
2464 {
2465 unsigned i;
2466 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2467 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2468 return false;
2469 return true;
2470 }
2471 default:
2472 return false;
2473 }
2474 }
2475
2476 /* Nonzero if EXP is a constant or a cast of a constant. */
2477
2478 int
2479 really_constant_p (const_tree exp)
2480 {
2481 /* This is not quite the same as STRIP_NOPS. It does more. */
2482 while (CONVERT_EXPR_P (exp)
2483 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2484 exp = TREE_OPERAND (exp, 0);
2485 return TREE_CONSTANT (exp);
2486 }
2487 \f
2488 /* Return first list element whose TREE_VALUE is ELEM.
2489 Return 0 if ELEM is not in LIST. */
2490
2491 tree
2492 value_member (tree elem, tree list)
2493 {
2494 while (list)
2495 {
2496 if (elem == TREE_VALUE (list))
2497 return list;
2498 list = TREE_CHAIN (list);
2499 }
2500 return NULL_TREE;
2501 }
2502
2503 /* Return first list element whose TREE_PURPOSE is ELEM.
2504 Return 0 if ELEM is not in LIST. */
2505
2506 tree
2507 purpose_member (const_tree elem, tree list)
2508 {
2509 while (list)
2510 {
2511 if (elem == TREE_PURPOSE (list))
2512 return list;
2513 list = TREE_CHAIN (list);
2514 }
2515 return NULL_TREE;
2516 }
2517
2518 /* Return true if ELEM is in V. */
2519
2520 bool
2521 vec_member (const_tree elem, vec<tree, va_gc> *v)
2522 {
2523 unsigned ix;
2524 tree t;
2525 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2526 if (elem == t)
2527 return true;
2528 return false;
2529 }
2530
2531 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2532 NULL_TREE. */
2533
2534 tree
2535 chain_index (int idx, tree chain)
2536 {
2537 for (; chain && idx > 0; --idx)
2538 chain = TREE_CHAIN (chain);
2539 return chain;
2540 }
2541
2542 /* Return nonzero if ELEM is part of the chain CHAIN. */
2543
2544 int
2545 chain_member (const_tree elem, const_tree chain)
2546 {
2547 while (chain)
2548 {
2549 if (elem == chain)
2550 return 1;
2551 chain = DECL_CHAIN (chain);
2552 }
2553
2554 return 0;
2555 }
2556
2557 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2558 We expect a null pointer to mark the end of the chain.
2559 This is the Lisp primitive `length'. */
2560
2561 int
2562 list_length (const_tree t)
2563 {
2564 const_tree p = t;
2565 #ifdef ENABLE_TREE_CHECKING
2566 const_tree q = t;
2567 #endif
2568 int len = 0;
2569
2570 while (p)
2571 {
2572 p = TREE_CHAIN (p);
2573 #ifdef ENABLE_TREE_CHECKING
2574 if (len % 2)
2575 q = TREE_CHAIN (q);
2576 gcc_assert (p != q);
2577 #endif
2578 len++;
2579 }
2580
2581 return len;
2582 }
2583
2584 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2585 UNION_TYPE TYPE, or NULL_TREE if none. */
2586
2587 tree
2588 first_field (const_tree type)
2589 {
2590 tree t = TYPE_FIELDS (type);
2591 while (t && TREE_CODE (t) != FIELD_DECL)
2592 t = TREE_CHAIN (t);
2593 return t;
2594 }
2595
2596 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2597 by modifying the last node in chain 1 to point to chain 2.
2598 This is the Lisp primitive `nconc'. */
2599
2600 tree
2601 chainon (tree op1, tree op2)
2602 {
2603 tree t1;
2604
2605 if (!op1)
2606 return op2;
2607 if (!op2)
2608 return op1;
2609
2610 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2611 continue;
2612 TREE_CHAIN (t1) = op2;
2613
2614 #ifdef ENABLE_TREE_CHECKING
2615 {
2616 tree t2;
2617 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2618 gcc_assert (t2 != t1);
2619 }
2620 #endif
2621
2622 return op1;
2623 }
2624
2625 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2626
2627 tree
2628 tree_last (tree chain)
2629 {
2630 tree next;
2631 if (chain)
2632 while ((next = TREE_CHAIN (chain)))
2633 chain = next;
2634 return chain;
2635 }
2636
2637 /* Reverse the order of elements in the chain T,
2638 and return the new head of the chain (old last element). */
2639
2640 tree
2641 nreverse (tree t)
2642 {
2643 tree prev = 0, decl, next;
2644 for (decl = t; decl; decl = next)
2645 {
2646 /* We shouldn't be using this function to reverse BLOCK chains; we
2647 have blocks_nreverse for that. */
2648 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2649 next = TREE_CHAIN (decl);
2650 TREE_CHAIN (decl) = prev;
2651 prev = decl;
2652 }
2653 return prev;
2654 }
2655 \f
2656 /* Return a newly created TREE_LIST node whose
2657 purpose and value fields are PARM and VALUE. */
2658
2659 tree
2660 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2661 {
2662 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2663 TREE_PURPOSE (t) = parm;
2664 TREE_VALUE (t) = value;
2665 return t;
2666 }
2667
2668 /* Build a chain of TREE_LIST nodes from a vector. */
2669
2670 tree
2671 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2672 {
2673 tree ret = NULL_TREE;
2674 tree *pp = &ret;
2675 unsigned int i;
2676 tree t;
2677 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2678 {
2679 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2680 pp = &TREE_CHAIN (*pp);
2681 }
2682 return ret;
2683 }
2684
2685 /* Return a newly created TREE_LIST node whose
2686 purpose and value fields are PURPOSE and VALUE
2687 and whose TREE_CHAIN is CHAIN. */
2688
2689 tree
2690 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2691 {
2692 tree node;
2693
2694 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2695 memset (node, 0, sizeof (struct tree_common));
2696
2697 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2698
2699 TREE_SET_CODE (node, TREE_LIST);
2700 TREE_CHAIN (node) = chain;
2701 TREE_PURPOSE (node) = purpose;
2702 TREE_VALUE (node) = value;
2703 return node;
2704 }
2705
2706 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2707 trees. */
2708
2709 vec<tree, va_gc> *
2710 ctor_to_vec (tree ctor)
2711 {
2712 vec<tree, va_gc> *vec;
2713 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2714 unsigned int ix;
2715 tree val;
2716
2717 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2718 vec->quick_push (val);
2719
2720 return vec;
2721 }
2722 \f
2723 /* Return the size nominally occupied by an object of type TYPE
2724 when it resides in memory. The value is measured in units of bytes,
2725 and its data type is that normally used for type sizes
2726 (which is the first type created by make_signed_type or
2727 make_unsigned_type). */
2728
2729 tree
2730 size_in_bytes (const_tree type)
2731 {
2732 tree t;
2733
2734 if (type == error_mark_node)
2735 return integer_zero_node;
2736
2737 type = TYPE_MAIN_VARIANT (type);
2738 t = TYPE_SIZE_UNIT (type);
2739
2740 if (t == 0)
2741 {
2742 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2743 return size_zero_node;
2744 }
2745
2746 return t;
2747 }
2748
2749 /* Return the size of TYPE (in bytes) as a wide integer
2750 or return -1 if the size can vary or is larger than an integer. */
2751
2752 HOST_WIDE_INT
2753 int_size_in_bytes (const_tree type)
2754 {
2755 tree t;
2756
2757 if (type == error_mark_node)
2758 return 0;
2759
2760 type = TYPE_MAIN_VARIANT (type);
2761 t = TYPE_SIZE_UNIT (type);
2762
2763 if (t && tree_fits_uhwi_p (t))
2764 return TREE_INT_CST_LOW (t);
2765 else
2766 return -1;
2767 }
2768
2769 /* Return the maximum size of TYPE (in bytes) as a wide integer
2770 or return -1 if the size can vary or is larger than an integer. */
2771
2772 HOST_WIDE_INT
2773 max_int_size_in_bytes (const_tree type)
2774 {
2775 HOST_WIDE_INT size = -1;
2776 tree size_tree;
2777
2778 /* If this is an array type, check for a possible MAX_SIZE attached. */
2779
2780 if (TREE_CODE (type) == ARRAY_TYPE)
2781 {
2782 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2783
2784 if (size_tree && tree_fits_uhwi_p (size_tree))
2785 size = tree_to_uhwi (size_tree);
2786 }
2787
2788 /* If we still haven't been able to get a size, see if the language
2789 can compute a maximum size. */
2790
2791 if (size == -1)
2792 {
2793 size_tree = lang_hooks.types.max_size (type);
2794
2795 if (size_tree && tree_fits_uhwi_p (size_tree))
2796 size = tree_to_uhwi (size_tree);
2797 }
2798
2799 return size;
2800 }
2801 \f
2802 /* Return the bit position of FIELD, in bits from the start of the record.
2803 This is a tree of type bitsizetype. */
2804
2805 tree
2806 bit_position (const_tree field)
2807 {
2808 return bit_from_pos (DECL_FIELD_OFFSET (field),
2809 DECL_FIELD_BIT_OFFSET (field));
2810 }
2811
2812 /* Likewise, but return as an integer. It must be representable in
2813 that way (since it could be a signed value, we don't have the
2814 option of returning -1 like int_size_in_byte can. */
2815
2816 HOST_WIDE_INT
2817 int_bit_position (const_tree field)
2818 {
2819 return tree_to_shwi (bit_position (field));
2820 }
2821 \f
2822 /* Return the byte position of FIELD, in bytes from the start of the record.
2823 This is a tree of type sizetype. */
2824
2825 tree
2826 byte_position (const_tree field)
2827 {
2828 return byte_from_pos (DECL_FIELD_OFFSET (field),
2829 DECL_FIELD_BIT_OFFSET (field));
2830 }
2831
2832 /* Likewise, but return as an integer. It must be representable in
2833 that way (since it could be a signed value, we don't have the
2834 option of returning -1 like int_size_in_byte can. */
2835
2836 HOST_WIDE_INT
2837 int_byte_position (const_tree field)
2838 {
2839 return tree_to_shwi (byte_position (field));
2840 }
2841 \f
2842 /* Return the strictest alignment, in bits, that T is known to have. */
2843
2844 unsigned int
2845 expr_align (const_tree t)
2846 {
2847 unsigned int align0, align1;
2848
2849 switch (TREE_CODE (t))
2850 {
2851 CASE_CONVERT: case NON_LVALUE_EXPR:
2852 /* If we have conversions, we know that the alignment of the
2853 object must meet each of the alignments of the types. */
2854 align0 = expr_align (TREE_OPERAND (t, 0));
2855 align1 = TYPE_ALIGN (TREE_TYPE (t));
2856 return MAX (align0, align1);
2857
2858 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2859 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2860 case CLEANUP_POINT_EXPR:
2861 /* These don't change the alignment of an object. */
2862 return expr_align (TREE_OPERAND (t, 0));
2863
2864 case COND_EXPR:
2865 /* The best we can do is say that the alignment is the least aligned
2866 of the two arms. */
2867 align0 = expr_align (TREE_OPERAND (t, 1));
2868 align1 = expr_align (TREE_OPERAND (t, 2));
2869 return MIN (align0, align1);
2870
2871 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2872 meaningfully, it's always 1. */
2873 case LABEL_DECL: case CONST_DECL:
2874 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2875 case FUNCTION_DECL:
2876 gcc_assert (DECL_ALIGN (t) != 0);
2877 return DECL_ALIGN (t);
2878
2879 default:
2880 break;
2881 }
2882
2883 /* Otherwise take the alignment from that of the type. */
2884 return TYPE_ALIGN (TREE_TYPE (t));
2885 }
2886 \f
2887 /* Return, as a tree node, the number of elements for TYPE (which is an
2888 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2889
2890 tree
2891 array_type_nelts (const_tree type)
2892 {
2893 tree index_type, min, max;
2894
2895 /* If they did it with unspecified bounds, then we should have already
2896 given an error about it before we got here. */
2897 if (! TYPE_DOMAIN (type))
2898 return error_mark_node;
2899
2900 index_type = TYPE_DOMAIN (type);
2901 min = TYPE_MIN_VALUE (index_type);
2902 max = TYPE_MAX_VALUE (index_type);
2903
2904 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2905 if (!max)
2906 return error_mark_node;
2907
2908 return (integer_zerop (min)
2909 ? max
2910 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2911 }
2912 \f
2913 /* If arg is static -- a reference to an object in static storage -- then
2914 return the object. This is not the same as the C meaning of `static'.
2915 If arg isn't static, return NULL. */
2916
2917 tree
2918 staticp (tree arg)
2919 {
2920 switch (TREE_CODE (arg))
2921 {
2922 case FUNCTION_DECL:
2923 /* Nested functions are static, even though taking their address will
2924 involve a trampoline as we unnest the nested function and create
2925 the trampoline on the tree level. */
2926 return arg;
2927
2928 case VAR_DECL:
2929 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2930 && ! DECL_THREAD_LOCAL_P (arg)
2931 && ! DECL_DLLIMPORT_P (arg)
2932 ? arg : NULL);
2933
2934 case CONST_DECL:
2935 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2936 ? arg : NULL);
2937
2938 case CONSTRUCTOR:
2939 return TREE_STATIC (arg) ? arg : NULL;
2940
2941 case LABEL_DECL:
2942 case STRING_CST:
2943 return arg;
2944
2945 case COMPONENT_REF:
2946 /* If the thing being referenced is not a field, then it is
2947 something language specific. */
2948 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2949
2950 /* If we are referencing a bitfield, we can't evaluate an
2951 ADDR_EXPR at compile time and so it isn't a constant. */
2952 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2953 return NULL;
2954
2955 return staticp (TREE_OPERAND (arg, 0));
2956
2957 case BIT_FIELD_REF:
2958 return NULL;
2959
2960 case INDIRECT_REF:
2961 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2962
2963 case ARRAY_REF:
2964 case ARRAY_RANGE_REF:
2965 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2966 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2967 return staticp (TREE_OPERAND (arg, 0));
2968 else
2969 return NULL;
2970
2971 case COMPOUND_LITERAL_EXPR:
2972 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2973
2974 default:
2975 return NULL;
2976 }
2977 }
2978
2979 \f
2980
2981
2982 /* Return whether OP is a DECL whose address is function-invariant. */
2983
2984 bool
2985 decl_address_invariant_p (const_tree op)
2986 {
2987 /* The conditions below are slightly less strict than the one in
2988 staticp. */
2989
2990 switch (TREE_CODE (op))
2991 {
2992 case PARM_DECL:
2993 case RESULT_DECL:
2994 case LABEL_DECL:
2995 case FUNCTION_DECL:
2996 return true;
2997
2998 case VAR_DECL:
2999 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3000 || DECL_THREAD_LOCAL_P (op)
3001 || DECL_CONTEXT (op) == current_function_decl
3002 || decl_function_context (op) == current_function_decl)
3003 return true;
3004 break;
3005
3006 case CONST_DECL:
3007 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3008 || decl_function_context (op) == current_function_decl)
3009 return true;
3010 break;
3011
3012 default:
3013 break;
3014 }
3015
3016 return false;
3017 }
3018
3019 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3020
3021 bool
3022 decl_address_ip_invariant_p (const_tree op)
3023 {
3024 /* The conditions below are slightly less strict than the one in
3025 staticp. */
3026
3027 switch (TREE_CODE (op))
3028 {
3029 case LABEL_DECL:
3030 case FUNCTION_DECL:
3031 case STRING_CST:
3032 return true;
3033
3034 case VAR_DECL:
3035 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3036 && !DECL_DLLIMPORT_P (op))
3037 || DECL_THREAD_LOCAL_P (op))
3038 return true;
3039 break;
3040
3041 case CONST_DECL:
3042 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3043 return true;
3044 break;
3045
3046 default:
3047 break;
3048 }
3049
3050 return false;
3051 }
3052
3053
3054 /* Return true if T is function-invariant (internal function, does
3055 not handle arithmetic; that's handled in skip_simple_arithmetic and
3056 tree_invariant_p). */
3057
3058 static bool tree_invariant_p (tree t);
3059
3060 static bool
3061 tree_invariant_p_1 (tree t)
3062 {
3063 tree op;
3064
3065 if (TREE_CONSTANT (t)
3066 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3067 return true;
3068
3069 switch (TREE_CODE (t))
3070 {
3071 case SAVE_EXPR:
3072 return true;
3073
3074 case ADDR_EXPR:
3075 op = TREE_OPERAND (t, 0);
3076 while (handled_component_p (op))
3077 {
3078 switch (TREE_CODE (op))
3079 {
3080 case ARRAY_REF:
3081 case ARRAY_RANGE_REF:
3082 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3083 || TREE_OPERAND (op, 2) != NULL_TREE
3084 || TREE_OPERAND (op, 3) != NULL_TREE)
3085 return false;
3086 break;
3087
3088 case COMPONENT_REF:
3089 if (TREE_OPERAND (op, 2) != NULL_TREE)
3090 return false;
3091 break;
3092
3093 default:;
3094 }
3095 op = TREE_OPERAND (op, 0);
3096 }
3097
3098 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3099
3100 default:
3101 break;
3102 }
3103
3104 return false;
3105 }
3106
3107 /* Return true if T is function-invariant. */
3108
3109 static bool
3110 tree_invariant_p (tree t)
3111 {
3112 tree inner = skip_simple_arithmetic (t);
3113 return tree_invariant_p_1 (inner);
3114 }
3115
3116 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3117 Do this to any expression which may be used in more than one place,
3118 but must be evaluated only once.
3119
3120 Normally, expand_expr would reevaluate the expression each time.
3121 Calling save_expr produces something that is evaluated and recorded
3122 the first time expand_expr is called on it. Subsequent calls to
3123 expand_expr just reuse the recorded value.
3124
3125 The call to expand_expr that generates code that actually computes
3126 the value is the first call *at compile time*. Subsequent calls
3127 *at compile time* generate code to use the saved value.
3128 This produces correct result provided that *at run time* control
3129 always flows through the insns made by the first expand_expr
3130 before reaching the other places where the save_expr was evaluated.
3131 You, the caller of save_expr, must make sure this is so.
3132
3133 Constants, and certain read-only nodes, are returned with no
3134 SAVE_EXPR because that is safe. Expressions containing placeholders
3135 are not touched; see tree.def for an explanation of what these
3136 are used for. */
3137
3138 tree
3139 save_expr (tree expr)
3140 {
3141 tree t = fold (expr);
3142 tree inner;
3143
3144 /* If the tree evaluates to a constant, then we don't want to hide that
3145 fact (i.e. this allows further folding, and direct checks for constants).
3146 However, a read-only object that has side effects cannot be bypassed.
3147 Since it is no problem to reevaluate literals, we just return the
3148 literal node. */
3149 inner = skip_simple_arithmetic (t);
3150 if (TREE_CODE (inner) == ERROR_MARK)
3151 return inner;
3152
3153 if (tree_invariant_p_1 (inner))
3154 return t;
3155
3156 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3157 it means that the size or offset of some field of an object depends on
3158 the value within another field.
3159
3160 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3161 and some variable since it would then need to be both evaluated once and
3162 evaluated more than once. Front-ends must assure this case cannot
3163 happen by surrounding any such subexpressions in their own SAVE_EXPR
3164 and forcing evaluation at the proper time. */
3165 if (contains_placeholder_p (inner))
3166 return t;
3167
3168 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3169 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3170
3171 /* This expression might be placed ahead of a jump to ensure that the
3172 value was computed on both sides of the jump. So make sure it isn't
3173 eliminated as dead. */
3174 TREE_SIDE_EFFECTS (t) = 1;
3175 return t;
3176 }
3177
3178 /* Look inside EXPR into any simple arithmetic operations. Return the
3179 outermost non-arithmetic or non-invariant node. */
3180
3181 tree
3182 skip_simple_arithmetic (tree expr)
3183 {
3184 /* We don't care about whether this can be used as an lvalue in this
3185 context. */
3186 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3187 expr = TREE_OPERAND (expr, 0);
3188
3189 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3190 a constant, it will be more efficient to not make another SAVE_EXPR since
3191 it will allow better simplification and GCSE will be able to merge the
3192 computations if they actually occur. */
3193 while (true)
3194 {
3195 if (UNARY_CLASS_P (expr))
3196 expr = TREE_OPERAND (expr, 0);
3197 else if (BINARY_CLASS_P (expr))
3198 {
3199 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3200 expr = TREE_OPERAND (expr, 0);
3201 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3202 expr = TREE_OPERAND (expr, 1);
3203 else
3204 break;
3205 }
3206 else
3207 break;
3208 }
3209
3210 return expr;
3211 }
3212
3213 /* Look inside EXPR into simple arithmetic operations involving constants.
3214 Return the outermost non-arithmetic or non-constant node. */
3215
3216 tree
3217 skip_simple_constant_arithmetic (tree expr)
3218 {
3219 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3220 expr = TREE_OPERAND (expr, 0);
3221
3222 while (true)
3223 {
3224 if (UNARY_CLASS_P (expr))
3225 expr = TREE_OPERAND (expr, 0);
3226 else if (BINARY_CLASS_P (expr))
3227 {
3228 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3229 expr = TREE_OPERAND (expr, 0);
3230 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3231 expr = TREE_OPERAND (expr, 1);
3232 else
3233 break;
3234 }
3235 else
3236 break;
3237 }
3238
3239 return expr;
3240 }
3241
3242 /* Return which tree structure is used by T. */
3243
3244 enum tree_node_structure_enum
3245 tree_node_structure (const_tree t)
3246 {
3247 const enum tree_code code = TREE_CODE (t);
3248 return tree_node_structure_for_code (code);
3249 }
3250
3251 /* Set various status flags when building a CALL_EXPR object T. */
3252
3253 static void
3254 process_call_operands (tree t)
3255 {
3256 bool side_effects = TREE_SIDE_EFFECTS (t);
3257 bool read_only = false;
3258 int i = call_expr_flags (t);
3259
3260 /* Calls have side-effects, except those to const or pure functions. */
3261 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3262 side_effects = true;
3263 /* Propagate TREE_READONLY of arguments for const functions. */
3264 if (i & ECF_CONST)
3265 read_only = true;
3266
3267 if (!side_effects || read_only)
3268 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3269 {
3270 tree op = TREE_OPERAND (t, i);
3271 if (op && TREE_SIDE_EFFECTS (op))
3272 side_effects = true;
3273 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3274 read_only = false;
3275 }
3276
3277 TREE_SIDE_EFFECTS (t) = side_effects;
3278 TREE_READONLY (t) = read_only;
3279 }
3280 \f
3281 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3282 size or offset that depends on a field within a record. */
3283
3284 bool
3285 contains_placeholder_p (const_tree exp)
3286 {
3287 enum tree_code code;
3288
3289 if (!exp)
3290 return 0;
3291
3292 code = TREE_CODE (exp);
3293 if (code == PLACEHOLDER_EXPR)
3294 return 1;
3295
3296 switch (TREE_CODE_CLASS (code))
3297 {
3298 case tcc_reference:
3299 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3300 position computations since they will be converted into a
3301 WITH_RECORD_EXPR involving the reference, which will assume
3302 here will be valid. */
3303 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3304
3305 case tcc_exceptional:
3306 if (code == TREE_LIST)
3307 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3308 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3309 break;
3310
3311 case tcc_unary:
3312 case tcc_binary:
3313 case tcc_comparison:
3314 case tcc_expression:
3315 switch (code)
3316 {
3317 case COMPOUND_EXPR:
3318 /* Ignoring the first operand isn't quite right, but works best. */
3319 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3320
3321 case COND_EXPR:
3322 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3323 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3324 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3325
3326 case SAVE_EXPR:
3327 /* The save_expr function never wraps anything containing
3328 a PLACEHOLDER_EXPR. */
3329 return 0;
3330
3331 default:
3332 break;
3333 }
3334
3335 switch (TREE_CODE_LENGTH (code))
3336 {
3337 case 1:
3338 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3339 case 2:
3340 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3341 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3342 default:
3343 return 0;
3344 }
3345
3346 case tcc_vl_exp:
3347 switch (code)
3348 {
3349 case CALL_EXPR:
3350 {
3351 const_tree arg;
3352 const_call_expr_arg_iterator iter;
3353 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3354 if (CONTAINS_PLACEHOLDER_P (arg))
3355 return 1;
3356 return 0;
3357 }
3358 default:
3359 return 0;
3360 }
3361
3362 default:
3363 return 0;
3364 }
3365 return 0;
3366 }
3367
3368 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3369 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3370 field positions. */
3371
3372 static bool
3373 type_contains_placeholder_1 (const_tree type)
3374 {
3375 /* If the size contains a placeholder or the parent type (component type in
3376 the case of arrays) type involves a placeholder, this type does. */
3377 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3378 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3379 || (!POINTER_TYPE_P (type)
3380 && TREE_TYPE (type)
3381 && type_contains_placeholder_p (TREE_TYPE (type))))
3382 return true;
3383
3384 /* Now do type-specific checks. Note that the last part of the check above
3385 greatly limits what we have to do below. */
3386 switch (TREE_CODE (type))
3387 {
3388 case VOID_TYPE:
3389 case COMPLEX_TYPE:
3390 case ENUMERAL_TYPE:
3391 case BOOLEAN_TYPE:
3392 case POINTER_TYPE:
3393 case OFFSET_TYPE:
3394 case REFERENCE_TYPE:
3395 case METHOD_TYPE:
3396 case FUNCTION_TYPE:
3397 case VECTOR_TYPE:
3398 case NULLPTR_TYPE:
3399 return false;
3400
3401 case INTEGER_TYPE:
3402 case REAL_TYPE:
3403 case FIXED_POINT_TYPE:
3404 /* Here we just check the bounds. */
3405 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3406 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3407
3408 case ARRAY_TYPE:
3409 /* We have already checked the component type above, so just check the
3410 domain type. */
3411 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3412
3413 case RECORD_TYPE:
3414 case UNION_TYPE:
3415 case QUAL_UNION_TYPE:
3416 {
3417 tree field;
3418
3419 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3420 if (TREE_CODE (field) == FIELD_DECL
3421 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3422 || (TREE_CODE (type) == QUAL_UNION_TYPE
3423 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3424 || type_contains_placeholder_p (TREE_TYPE (field))))
3425 return true;
3426
3427 return false;
3428 }
3429
3430 default:
3431 gcc_unreachable ();
3432 }
3433 }
3434
3435 /* Wrapper around above function used to cache its result. */
3436
3437 bool
3438 type_contains_placeholder_p (tree type)
3439 {
3440 bool result;
3441
3442 /* If the contains_placeholder_bits field has been initialized,
3443 then we know the answer. */
3444 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3445 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3446
3447 /* Indicate that we've seen this type node, and the answer is false.
3448 This is what we want to return if we run into recursion via fields. */
3449 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3450
3451 /* Compute the real value. */
3452 result = type_contains_placeholder_1 (type);
3453
3454 /* Store the real value. */
3455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3456
3457 return result;
3458 }
3459 \f
3460 /* Push tree EXP onto vector QUEUE if it is not already present. */
3461
3462 static void
3463 push_without_duplicates (tree exp, vec<tree> *queue)
3464 {
3465 unsigned int i;
3466 tree iter;
3467
3468 FOR_EACH_VEC_ELT (*queue, i, iter)
3469 if (simple_cst_equal (iter, exp) == 1)
3470 break;
3471
3472 if (!iter)
3473 queue->safe_push (exp);
3474 }
3475
3476 /* Given a tree EXP, find all occurrences of references to fields
3477 in a PLACEHOLDER_EXPR and place them in vector REFS without
3478 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3479 we assume here that EXP contains only arithmetic expressions
3480 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3481 argument list. */
3482
3483 void
3484 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3485 {
3486 enum tree_code code = TREE_CODE (exp);
3487 tree inner;
3488 int i;
3489
3490 /* We handle TREE_LIST and COMPONENT_REF separately. */
3491 if (code == TREE_LIST)
3492 {
3493 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3494 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3495 }
3496 else if (code == COMPONENT_REF)
3497 {
3498 for (inner = TREE_OPERAND (exp, 0);
3499 REFERENCE_CLASS_P (inner);
3500 inner = TREE_OPERAND (inner, 0))
3501 ;
3502
3503 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3504 push_without_duplicates (exp, refs);
3505 else
3506 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3507 }
3508 else
3509 switch (TREE_CODE_CLASS (code))
3510 {
3511 case tcc_constant:
3512 break;
3513
3514 case tcc_declaration:
3515 /* Variables allocated to static storage can stay. */
3516 if (!TREE_STATIC (exp))
3517 push_without_duplicates (exp, refs);
3518 break;
3519
3520 case tcc_expression:
3521 /* This is the pattern built in ada/make_aligning_type. */
3522 if (code == ADDR_EXPR
3523 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3524 {
3525 push_without_duplicates (exp, refs);
3526 break;
3527 }
3528
3529 /* Fall through... */
3530
3531 case tcc_exceptional:
3532 case tcc_unary:
3533 case tcc_binary:
3534 case tcc_comparison:
3535 case tcc_reference:
3536 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3537 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3538 break;
3539
3540 case tcc_vl_exp:
3541 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3542 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3543 break;
3544
3545 default:
3546 gcc_unreachable ();
3547 }
3548 }
3549
3550 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3551 return a tree with all occurrences of references to F in a
3552 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3553 CONST_DECLs. Note that we assume here that EXP contains only
3554 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3555 occurring only in their argument list. */
3556
3557 tree
3558 substitute_in_expr (tree exp, tree f, tree r)
3559 {
3560 enum tree_code code = TREE_CODE (exp);
3561 tree op0, op1, op2, op3;
3562 tree new_tree;
3563
3564 /* We handle TREE_LIST and COMPONENT_REF separately. */
3565 if (code == TREE_LIST)
3566 {
3567 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3568 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3569 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3570 return exp;
3571
3572 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3573 }
3574 else if (code == COMPONENT_REF)
3575 {
3576 tree inner;
3577
3578 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3579 and it is the right field, replace it with R. */
3580 for (inner = TREE_OPERAND (exp, 0);
3581 REFERENCE_CLASS_P (inner);
3582 inner = TREE_OPERAND (inner, 0))
3583 ;
3584
3585 /* The field. */
3586 op1 = TREE_OPERAND (exp, 1);
3587
3588 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3589 return r;
3590
3591 /* If this expression hasn't been completed let, leave it alone. */
3592 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3593 return exp;
3594
3595 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3596 if (op0 == TREE_OPERAND (exp, 0))
3597 return exp;
3598
3599 new_tree
3600 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3601 }
3602 else
3603 switch (TREE_CODE_CLASS (code))
3604 {
3605 case tcc_constant:
3606 return exp;
3607
3608 case tcc_declaration:
3609 if (exp == f)
3610 return r;
3611 else
3612 return exp;
3613
3614 case tcc_expression:
3615 if (exp == f)
3616 return r;
3617
3618 /* Fall through... */
3619
3620 case tcc_exceptional:
3621 case tcc_unary:
3622 case tcc_binary:
3623 case tcc_comparison:
3624 case tcc_reference:
3625 switch (TREE_CODE_LENGTH (code))
3626 {
3627 case 0:
3628 return exp;
3629
3630 case 1:
3631 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3632 if (op0 == TREE_OPERAND (exp, 0))
3633 return exp;
3634
3635 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3636 break;
3637
3638 case 2:
3639 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3640 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3641
3642 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3643 return exp;
3644
3645 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3646 break;
3647
3648 case 3:
3649 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3650 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3651 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3652
3653 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3654 && op2 == TREE_OPERAND (exp, 2))
3655 return exp;
3656
3657 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3658 break;
3659
3660 case 4:
3661 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3662 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3663 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3664 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3665
3666 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3667 && op2 == TREE_OPERAND (exp, 2)
3668 && op3 == TREE_OPERAND (exp, 3))
3669 return exp;
3670
3671 new_tree
3672 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3673 break;
3674
3675 default:
3676 gcc_unreachable ();
3677 }
3678 break;
3679
3680 case tcc_vl_exp:
3681 {
3682 int i;
3683
3684 new_tree = NULL_TREE;
3685
3686 /* If we are trying to replace F with a constant, inline back
3687 functions which do nothing else than computing a value from
3688 the arguments they are passed. This makes it possible to
3689 fold partially or entirely the replacement expression. */
3690 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3691 {
3692 tree t = maybe_inline_call_in_expr (exp);
3693 if (t)
3694 return SUBSTITUTE_IN_EXPR (t, f, r);
3695 }
3696
3697 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3698 {
3699 tree op = TREE_OPERAND (exp, i);
3700 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3701 if (new_op != op)
3702 {
3703 if (!new_tree)
3704 new_tree = copy_node (exp);
3705 TREE_OPERAND (new_tree, i) = new_op;
3706 }
3707 }
3708
3709 if (new_tree)
3710 {
3711 new_tree = fold (new_tree);
3712 if (TREE_CODE (new_tree) == CALL_EXPR)
3713 process_call_operands (new_tree);
3714 }
3715 else
3716 return exp;
3717 }
3718 break;
3719
3720 default:
3721 gcc_unreachable ();
3722 }
3723
3724 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3725
3726 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3727 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3728
3729 return new_tree;
3730 }
3731
3732 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3733 for it within OBJ, a tree that is an object or a chain of references. */
3734
3735 tree
3736 substitute_placeholder_in_expr (tree exp, tree obj)
3737 {
3738 enum tree_code code = TREE_CODE (exp);
3739 tree op0, op1, op2, op3;
3740 tree new_tree;
3741
3742 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3743 in the chain of OBJ. */
3744 if (code == PLACEHOLDER_EXPR)
3745 {
3746 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3747 tree elt;
3748
3749 for (elt = obj; elt != 0;
3750 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3751 || TREE_CODE (elt) == COND_EXPR)
3752 ? TREE_OPERAND (elt, 1)
3753 : (REFERENCE_CLASS_P (elt)
3754 || UNARY_CLASS_P (elt)
3755 || BINARY_CLASS_P (elt)
3756 || VL_EXP_CLASS_P (elt)
3757 || EXPRESSION_CLASS_P (elt))
3758 ? TREE_OPERAND (elt, 0) : 0))
3759 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3760 return elt;
3761
3762 for (elt = obj; elt != 0;
3763 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3764 || TREE_CODE (elt) == COND_EXPR)
3765 ? TREE_OPERAND (elt, 1)
3766 : (REFERENCE_CLASS_P (elt)
3767 || UNARY_CLASS_P (elt)
3768 || BINARY_CLASS_P (elt)
3769 || VL_EXP_CLASS_P (elt)
3770 || EXPRESSION_CLASS_P (elt))
3771 ? TREE_OPERAND (elt, 0) : 0))
3772 if (POINTER_TYPE_P (TREE_TYPE (elt))
3773 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3774 == need_type))
3775 return fold_build1 (INDIRECT_REF, need_type, elt);
3776
3777 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3778 survives until RTL generation, there will be an error. */
3779 return exp;
3780 }
3781
3782 /* TREE_LIST is special because we need to look at TREE_VALUE
3783 and TREE_CHAIN, not TREE_OPERANDS. */
3784 else if (code == TREE_LIST)
3785 {
3786 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3787 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3788 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3789 return exp;
3790
3791 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3792 }
3793 else
3794 switch (TREE_CODE_CLASS (code))
3795 {
3796 case tcc_constant:
3797 case tcc_declaration:
3798 return exp;
3799
3800 case tcc_exceptional:
3801 case tcc_unary:
3802 case tcc_binary:
3803 case tcc_comparison:
3804 case tcc_expression:
3805 case tcc_reference:
3806 case tcc_statement:
3807 switch (TREE_CODE_LENGTH (code))
3808 {
3809 case 0:
3810 return exp;
3811
3812 case 1:
3813 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3814 if (op0 == TREE_OPERAND (exp, 0))
3815 return exp;
3816
3817 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3818 break;
3819
3820 case 2:
3821 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3822 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3823
3824 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3825 return exp;
3826
3827 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3828 break;
3829
3830 case 3:
3831 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3832 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3833 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3834
3835 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3836 && op2 == TREE_OPERAND (exp, 2))
3837 return exp;
3838
3839 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3840 break;
3841
3842 case 4:
3843 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3844 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3845 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3846 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3847
3848 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3849 && op2 == TREE_OPERAND (exp, 2)
3850 && op3 == TREE_OPERAND (exp, 3))
3851 return exp;
3852
3853 new_tree
3854 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3855 break;
3856
3857 default:
3858 gcc_unreachable ();
3859 }
3860 break;
3861
3862 case tcc_vl_exp:
3863 {
3864 int i;
3865
3866 new_tree = NULL_TREE;
3867
3868 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3869 {
3870 tree op = TREE_OPERAND (exp, i);
3871 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3872 if (new_op != op)
3873 {
3874 if (!new_tree)
3875 new_tree = copy_node (exp);
3876 TREE_OPERAND (new_tree, i) = new_op;
3877 }
3878 }
3879
3880 if (new_tree)
3881 {
3882 new_tree = fold (new_tree);
3883 if (TREE_CODE (new_tree) == CALL_EXPR)
3884 process_call_operands (new_tree);
3885 }
3886 else
3887 return exp;
3888 }
3889 break;
3890
3891 default:
3892 gcc_unreachable ();
3893 }
3894
3895 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3896
3897 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3898 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3899
3900 return new_tree;
3901 }
3902 \f
3903
3904 /* Subroutine of stabilize_reference; this is called for subtrees of
3905 references. Any expression with side-effects must be put in a SAVE_EXPR
3906 to ensure that it is only evaluated once.
3907
3908 We don't put SAVE_EXPR nodes around everything, because assigning very
3909 simple expressions to temporaries causes us to miss good opportunities
3910 for optimizations. Among other things, the opportunity to fold in the
3911 addition of a constant into an addressing mode often gets lost, e.g.
3912 "y[i+1] += x;". In general, we take the approach that we should not make
3913 an assignment unless we are forced into it - i.e., that any non-side effect
3914 operator should be allowed, and that cse should take care of coalescing
3915 multiple utterances of the same expression should that prove fruitful. */
3916
3917 static tree
3918 stabilize_reference_1 (tree e)
3919 {
3920 tree result;
3921 enum tree_code code = TREE_CODE (e);
3922
3923 /* We cannot ignore const expressions because it might be a reference
3924 to a const array but whose index contains side-effects. But we can
3925 ignore things that are actual constant or that already have been
3926 handled by this function. */
3927
3928 if (tree_invariant_p (e))
3929 return e;
3930
3931 switch (TREE_CODE_CLASS (code))
3932 {
3933 case tcc_exceptional:
3934 case tcc_type:
3935 case tcc_declaration:
3936 case tcc_comparison:
3937 case tcc_statement:
3938 case tcc_expression:
3939 case tcc_reference:
3940 case tcc_vl_exp:
3941 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3942 so that it will only be evaluated once. */
3943 /* The reference (r) and comparison (<) classes could be handled as
3944 below, but it is generally faster to only evaluate them once. */
3945 if (TREE_SIDE_EFFECTS (e))
3946 return save_expr (e);
3947 return e;
3948
3949 case tcc_constant:
3950 /* Constants need no processing. In fact, we should never reach
3951 here. */
3952 return e;
3953
3954 case tcc_binary:
3955 /* Division is slow and tends to be compiled with jumps,
3956 especially the division by powers of 2 that is often
3957 found inside of an array reference. So do it just once. */
3958 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3959 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3960 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3961 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3962 return save_expr (e);
3963 /* Recursively stabilize each operand. */
3964 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3965 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3966 break;
3967
3968 case tcc_unary:
3969 /* Recursively stabilize each operand. */
3970 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3971 break;
3972
3973 default:
3974 gcc_unreachable ();
3975 }
3976
3977 TREE_TYPE (result) = TREE_TYPE (e);
3978 TREE_READONLY (result) = TREE_READONLY (e);
3979 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3980 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3981
3982 return result;
3983 }
3984
3985 /* Stabilize a reference so that we can use it any number of times
3986 without causing its operands to be evaluated more than once.
3987 Returns the stabilized reference. This works by means of save_expr,
3988 so see the caveats in the comments about save_expr.
3989
3990 Also allows conversion expressions whose operands are references.
3991 Any other kind of expression is returned unchanged. */
3992
3993 tree
3994 stabilize_reference (tree ref)
3995 {
3996 tree result;
3997 enum tree_code code = TREE_CODE (ref);
3998
3999 switch (code)
4000 {
4001 case VAR_DECL:
4002 case PARM_DECL:
4003 case RESULT_DECL:
4004 /* No action is needed in this case. */
4005 return ref;
4006
4007 CASE_CONVERT:
4008 case FLOAT_EXPR:
4009 case FIX_TRUNC_EXPR:
4010 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4011 break;
4012
4013 case INDIRECT_REF:
4014 result = build_nt (INDIRECT_REF,
4015 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4016 break;
4017
4018 case COMPONENT_REF:
4019 result = build_nt (COMPONENT_REF,
4020 stabilize_reference (TREE_OPERAND (ref, 0)),
4021 TREE_OPERAND (ref, 1), NULL_TREE);
4022 break;
4023
4024 case BIT_FIELD_REF:
4025 result = build_nt (BIT_FIELD_REF,
4026 stabilize_reference (TREE_OPERAND (ref, 0)),
4027 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4028 break;
4029
4030 case ARRAY_REF:
4031 result = build_nt (ARRAY_REF,
4032 stabilize_reference (TREE_OPERAND (ref, 0)),
4033 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4034 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4035 break;
4036
4037 case ARRAY_RANGE_REF:
4038 result = build_nt (ARRAY_RANGE_REF,
4039 stabilize_reference (TREE_OPERAND (ref, 0)),
4040 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4041 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4042 break;
4043
4044 case COMPOUND_EXPR:
4045 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4046 it wouldn't be ignored. This matters when dealing with
4047 volatiles. */
4048 return stabilize_reference_1 (ref);
4049
4050 /* If arg isn't a kind of lvalue we recognize, make no change.
4051 Caller should recognize the error for an invalid lvalue. */
4052 default:
4053 return ref;
4054
4055 case ERROR_MARK:
4056 return error_mark_node;
4057 }
4058
4059 TREE_TYPE (result) = TREE_TYPE (ref);
4060 TREE_READONLY (result) = TREE_READONLY (ref);
4061 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4062 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4063
4064 return result;
4065 }
4066 \f
4067 /* Low-level constructors for expressions. */
4068
4069 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4070 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4071
4072 void
4073 recompute_tree_invariant_for_addr_expr (tree t)
4074 {
4075 tree node;
4076 bool tc = true, se = false;
4077
4078 /* We started out assuming this address is both invariant and constant, but
4079 does not have side effects. Now go down any handled components and see if
4080 any of them involve offsets that are either non-constant or non-invariant.
4081 Also check for side-effects.
4082
4083 ??? Note that this code makes no attempt to deal with the case where
4084 taking the address of something causes a copy due to misalignment. */
4085
4086 #define UPDATE_FLAGS(NODE) \
4087 do { tree _node = (NODE); \
4088 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4089 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4090
4091 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4092 node = TREE_OPERAND (node, 0))
4093 {
4094 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4095 array reference (probably made temporarily by the G++ front end),
4096 so ignore all the operands. */
4097 if ((TREE_CODE (node) == ARRAY_REF
4098 || TREE_CODE (node) == ARRAY_RANGE_REF)
4099 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4100 {
4101 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4102 if (TREE_OPERAND (node, 2))
4103 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4104 if (TREE_OPERAND (node, 3))
4105 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4106 }
4107 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4108 FIELD_DECL, apparently. The G++ front end can put something else
4109 there, at least temporarily. */
4110 else if (TREE_CODE (node) == COMPONENT_REF
4111 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4112 {
4113 if (TREE_OPERAND (node, 2))
4114 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4115 }
4116 }
4117
4118 node = lang_hooks.expr_to_decl (node, &tc, &se);
4119
4120 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4121 the address, since &(*a)->b is a form of addition. If it's a constant, the
4122 address is constant too. If it's a decl, its address is constant if the
4123 decl is static. Everything else is not constant and, furthermore,
4124 taking the address of a volatile variable is not volatile. */
4125 if (TREE_CODE (node) == INDIRECT_REF
4126 || TREE_CODE (node) == MEM_REF)
4127 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4128 else if (CONSTANT_CLASS_P (node))
4129 ;
4130 else if (DECL_P (node))
4131 tc &= (staticp (node) != NULL_TREE);
4132 else
4133 {
4134 tc = false;
4135 se |= TREE_SIDE_EFFECTS (node);
4136 }
4137
4138
4139 TREE_CONSTANT (t) = tc;
4140 TREE_SIDE_EFFECTS (t) = se;
4141 #undef UPDATE_FLAGS
4142 }
4143
4144 /* Build an expression of code CODE, data type TYPE, and operands as
4145 specified. Expressions and reference nodes can be created this way.
4146 Constants, decls, types and misc nodes cannot be.
4147
4148 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4149 enough for all extant tree codes. */
4150
4151 tree
4152 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4153 {
4154 tree t;
4155
4156 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4157
4158 t = make_node_stat (code PASS_MEM_STAT);
4159 TREE_TYPE (t) = tt;
4160
4161 return t;
4162 }
4163
4164 tree
4165 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4166 {
4167 int length = sizeof (struct tree_exp);
4168 tree t;
4169
4170 record_node_allocation_statistics (code, length);
4171
4172 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4173
4174 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4175
4176 memset (t, 0, sizeof (struct tree_common));
4177
4178 TREE_SET_CODE (t, code);
4179
4180 TREE_TYPE (t) = type;
4181 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4182 TREE_OPERAND (t, 0) = node;
4183 if (node && !TYPE_P (node))
4184 {
4185 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4186 TREE_READONLY (t) = TREE_READONLY (node);
4187 }
4188
4189 if (TREE_CODE_CLASS (code) == tcc_statement)
4190 TREE_SIDE_EFFECTS (t) = 1;
4191 else switch (code)
4192 {
4193 case VA_ARG_EXPR:
4194 /* All of these have side-effects, no matter what their
4195 operands are. */
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 TREE_READONLY (t) = 0;
4198 break;
4199
4200 case INDIRECT_REF:
4201 /* Whether a dereference is readonly has nothing to do with whether
4202 its operand is readonly. */
4203 TREE_READONLY (t) = 0;
4204 break;
4205
4206 case ADDR_EXPR:
4207 if (node)
4208 recompute_tree_invariant_for_addr_expr (t);
4209 break;
4210
4211 default:
4212 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4213 && node && !TYPE_P (node)
4214 && TREE_CONSTANT (node))
4215 TREE_CONSTANT (t) = 1;
4216 if (TREE_CODE_CLASS (code) == tcc_reference
4217 && node && TREE_THIS_VOLATILE (node))
4218 TREE_THIS_VOLATILE (t) = 1;
4219 break;
4220 }
4221
4222 return t;
4223 }
4224
4225 #define PROCESS_ARG(N) \
4226 do { \
4227 TREE_OPERAND (t, N) = arg##N; \
4228 if (arg##N &&!TYPE_P (arg##N)) \
4229 { \
4230 if (TREE_SIDE_EFFECTS (arg##N)) \
4231 side_effects = 1; \
4232 if (!TREE_READONLY (arg##N) \
4233 && !CONSTANT_CLASS_P (arg##N)) \
4234 (void) (read_only = 0); \
4235 if (!TREE_CONSTANT (arg##N)) \
4236 (void) (constant = 0); \
4237 } \
4238 } while (0)
4239
4240 tree
4241 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4242 {
4243 bool constant, read_only, side_effects;
4244 tree t;
4245
4246 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4247
4248 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4249 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4250 /* When sizetype precision doesn't match that of pointers
4251 we need to be able to build explicit extensions or truncations
4252 of the offset argument. */
4253 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4254 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4255 && TREE_CODE (arg1) == INTEGER_CST);
4256
4257 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4258 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4259 && ptrofftype_p (TREE_TYPE (arg1)));
4260
4261 t = make_node_stat (code PASS_MEM_STAT);
4262 TREE_TYPE (t) = tt;
4263
4264 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4265 result based on those same flags for the arguments. But if the
4266 arguments aren't really even `tree' expressions, we shouldn't be trying
4267 to do this. */
4268
4269 /* Expressions without side effects may be constant if their
4270 arguments are as well. */
4271 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4272 || TREE_CODE_CLASS (code) == tcc_binary);
4273 read_only = 1;
4274 side_effects = TREE_SIDE_EFFECTS (t);
4275
4276 PROCESS_ARG (0);
4277 PROCESS_ARG (1);
4278
4279 TREE_READONLY (t) = read_only;
4280 TREE_CONSTANT (t) = constant;
4281 TREE_SIDE_EFFECTS (t) = side_effects;
4282 TREE_THIS_VOLATILE (t)
4283 = (TREE_CODE_CLASS (code) == tcc_reference
4284 && arg0 && TREE_THIS_VOLATILE (arg0));
4285
4286 return t;
4287 }
4288
4289
4290 tree
4291 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4292 tree arg2 MEM_STAT_DECL)
4293 {
4294 bool constant, read_only, side_effects;
4295 tree t;
4296
4297 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4298 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4299
4300 t = make_node_stat (code PASS_MEM_STAT);
4301 TREE_TYPE (t) = tt;
4302
4303 read_only = 1;
4304
4305 /* As a special exception, if COND_EXPR has NULL branches, we
4306 assume that it is a gimple statement and always consider
4307 it to have side effects. */
4308 if (code == COND_EXPR
4309 && tt == void_type_node
4310 && arg1 == NULL_TREE
4311 && arg2 == NULL_TREE)
4312 side_effects = true;
4313 else
4314 side_effects = TREE_SIDE_EFFECTS (t);
4315
4316 PROCESS_ARG (0);
4317 PROCESS_ARG (1);
4318 PROCESS_ARG (2);
4319
4320 if (code == COND_EXPR)
4321 TREE_READONLY (t) = read_only;
4322
4323 TREE_SIDE_EFFECTS (t) = side_effects;
4324 TREE_THIS_VOLATILE (t)
4325 = (TREE_CODE_CLASS (code) == tcc_reference
4326 && arg0 && TREE_THIS_VOLATILE (arg0));
4327
4328 return t;
4329 }
4330
4331 tree
4332 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4333 tree arg2, tree arg3 MEM_STAT_DECL)
4334 {
4335 bool constant, read_only, side_effects;
4336 tree t;
4337
4338 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4339
4340 t = make_node_stat (code PASS_MEM_STAT);
4341 TREE_TYPE (t) = tt;
4342
4343 side_effects = TREE_SIDE_EFFECTS (t);
4344
4345 PROCESS_ARG (0);
4346 PROCESS_ARG (1);
4347 PROCESS_ARG (2);
4348 PROCESS_ARG (3);
4349
4350 TREE_SIDE_EFFECTS (t) = side_effects;
4351 TREE_THIS_VOLATILE (t)
4352 = (TREE_CODE_CLASS (code) == tcc_reference
4353 && arg0 && TREE_THIS_VOLATILE (arg0));
4354
4355 return t;
4356 }
4357
4358 tree
4359 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4360 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4361 {
4362 bool constant, read_only, side_effects;
4363 tree t;
4364
4365 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4366
4367 t = make_node_stat (code PASS_MEM_STAT);
4368 TREE_TYPE (t) = tt;
4369
4370 side_effects = TREE_SIDE_EFFECTS (t);
4371
4372 PROCESS_ARG (0);
4373 PROCESS_ARG (1);
4374 PROCESS_ARG (2);
4375 PROCESS_ARG (3);
4376 PROCESS_ARG (4);
4377
4378 TREE_SIDE_EFFECTS (t) = side_effects;
4379 TREE_THIS_VOLATILE (t)
4380 = (TREE_CODE_CLASS (code) == tcc_reference
4381 && arg0 && TREE_THIS_VOLATILE (arg0));
4382
4383 return t;
4384 }
4385
4386 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4387 on the pointer PTR. */
4388
4389 tree
4390 build_simple_mem_ref_loc (location_t loc, tree ptr)
4391 {
4392 HOST_WIDE_INT offset = 0;
4393 tree ptype = TREE_TYPE (ptr);
4394 tree tem;
4395 /* For convenience allow addresses that collapse to a simple base
4396 and offset. */
4397 if (TREE_CODE (ptr) == ADDR_EXPR
4398 && (handled_component_p (TREE_OPERAND (ptr, 0))
4399 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4400 {
4401 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4402 gcc_assert (ptr);
4403 ptr = build_fold_addr_expr (ptr);
4404 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4405 }
4406 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4407 ptr, build_int_cst (ptype, offset));
4408 SET_EXPR_LOCATION (tem, loc);
4409 return tem;
4410 }
4411
4412 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4413
4414 offset_int
4415 mem_ref_offset (const_tree t)
4416 {
4417 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4418 }
4419
4420 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4421 offsetted by OFFSET units. */
4422
4423 tree
4424 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4425 {
4426 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4427 build_fold_addr_expr (base),
4428 build_int_cst (ptr_type_node, offset));
4429 tree addr = build1 (ADDR_EXPR, type, ref);
4430 recompute_tree_invariant_for_addr_expr (addr);
4431 return addr;
4432 }
4433
4434 /* Similar except don't specify the TREE_TYPE
4435 and leave the TREE_SIDE_EFFECTS as 0.
4436 It is permissible for arguments to be null,
4437 or even garbage if their values do not matter. */
4438
4439 tree
4440 build_nt (enum tree_code code, ...)
4441 {
4442 tree t;
4443 int length;
4444 int i;
4445 va_list p;
4446
4447 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4448
4449 va_start (p, code);
4450
4451 t = make_node (code);
4452 length = TREE_CODE_LENGTH (code);
4453
4454 for (i = 0; i < length; i++)
4455 TREE_OPERAND (t, i) = va_arg (p, tree);
4456
4457 va_end (p);
4458 return t;
4459 }
4460
4461 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4462 tree vec. */
4463
4464 tree
4465 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4466 {
4467 tree ret, t;
4468 unsigned int ix;
4469
4470 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4471 CALL_EXPR_FN (ret) = fn;
4472 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4473 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4474 CALL_EXPR_ARG (ret, ix) = t;
4475 return ret;
4476 }
4477 \f
4478 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4479 We do NOT enter this node in any sort of symbol table.
4480
4481 LOC is the location of the decl.
4482
4483 layout_decl is used to set up the decl's storage layout.
4484 Other slots are initialized to 0 or null pointers. */
4485
4486 tree
4487 build_decl_stat (location_t loc, enum tree_code code, tree name,
4488 tree type MEM_STAT_DECL)
4489 {
4490 tree t;
4491
4492 t = make_node_stat (code PASS_MEM_STAT);
4493 DECL_SOURCE_LOCATION (t) = loc;
4494
4495 /* if (type == error_mark_node)
4496 type = integer_type_node; */
4497 /* That is not done, deliberately, so that having error_mark_node
4498 as the type can suppress useless errors in the use of this variable. */
4499
4500 DECL_NAME (t) = name;
4501 TREE_TYPE (t) = type;
4502
4503 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4504 layout_decl (t, 0);
4505
4506 return t;
4507 }
4508
4509 /* Builds and returns function declaration with NAME and TYPE. */
4510
4511 tree
4512 build_fn_decl (const char *name, tree type)
4513 {
4514 tree id = get_identifier (name);
4515 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4516
4517 DECL_EXTERNAL (decl) = 1;
4518 TREE_PUBLIC (decl) = 1;
4519 DECL_ARTIFICIAL (decl) = 1;
4520 TREE_NOTHROW (decl) = 1;
4521
4522 return decl;
4523 }
4524
4525 vec<tree, va_gc> *all_translation_units;
4526
4527 /* Builds a new translation-unit decl with name NAME, queues it in the
4528 global list of translation-unit decls and returns it. */
4529
4530 tree
4531 build_translation_unit_decl (tree name)
4532 {
4533 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4534 name, NULL_TREE);
4535 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4536 vec_safe_push (all_translation_units, tu);
4537 return tu;
4538 }
4539
4540 \f
4541 /* BLOCK nodes are used to represent the structure of binding contours
4542 and declarations, once those contours have been exited and their contents
4543 compiled. This information is used for outputting debugging info. */
4544
4545 tree
4546 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4547 {
4548 tree block = make_node (BLOCK);
4549
4550 BLOCK_VARS (block) = vars;
4551 BLOCK_SUBBLOCKS (block) = subblocks;
4552 BLOCK_SUPERCONTEXT (block) = supercontext;
4553 BLOCK_CHAIN (block) = chain;
4554 return block;
4555 }
4556
4557 \f
4558 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4559
4560 LOC is the location to use in tree T. */
4561
4562 void
4563 protected_set_expr_location (tree t, location_t loc)
4564 {
4565 if (t && CAN_HAVE_LOCATION_P (t))
4566 SET_EXPR_LOCATION (t, loc);
4567 }
4568 \f
4569 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4570 is ATTRIBUTE. */
4571
4572 tree
4573 build_decl_attribute_variant (tree ddecl, tree attribute)
4574 {
4575 DECL_ATTRIBUTES (ddecl) = attribute;
4576 return ddecl;
4577 }
4578
4579 /* Borrowed from hashtab.c iterative_hash implementation. */
4580 #define mix(a,b,c) \
4581 { \
4582 a -= b; a -= c; a ^= (c>>13); \
4583 b -= c; b -= a; b ^= (a<< 8); \
4584 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4585 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4586 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4587 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4588 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4589 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4590 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4591 }
4592
4593
4594 /* Produce good hash value combining VAL and VAL2. */
4595 hashval_t
4596 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4597 {
4598 /* the golden ratio; an arbitrary value. */
4599 hashval_t a = 0x9e3779b9;
4600
4601 mix (a, val, val2);
4602 return val2;
4603 }
4604
4605 /* Produce good hash value combining VAL and VAL2. */
4606 hashval_t
4607 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4608 {
4609 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4610 return iterative_hash_hashval_t (val, val2);
4611 else
4612 {
4613 hashval_t a = (hashval_t) val;
4614 /* Avoid warnings about shifting of more than the width of the type on
4615 hosts that won't execute this path. */
4616 int zero = 0;
4617 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4618 mix (a, b, val2);
4619 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4620 {
4621 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4622 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4623 mix (a, b, val2);
4624 }
4625 return val2;
4626 }
4627 }
4628
4629 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4630 is ATTRIBUTE and its qualifiers are QUALS.
4631
4632 Record such modified types already made so we don't make duplicates. */
4633
4634 tree
4635 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4636 {
4637 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4638 {
4639 hashval_t hashcode = 0;
4640 tree ntype;
4641 int i;
4642 tree t;
4643 enum tree_code code = TREE_CODE (ttype);
4644
4645 /* Building a distinct copy of a tagged type is inappropriate; it
4646 causes breakage in code that expects there to be a one-to-one
4647 relationship between a struct and its fields.
4648 build_duplicate_type is another solution (as used in
4649 handle_transparent_union_attribute), but that doesn't play well
4650 with the stronger C++ type identity model. */
4651 if (TREE_CODE (ttype) == RECORD_TYPE
4652 || TREE_CODE (ttype) == UNION_TYPE
4653 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4654 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4655 {
4656 warning (OPT_Wattributes,
4657 "ignoring attributes applied to %qT after definition",
4658 TYPE_MAIN_VARIANT (ttype));
4659 return build_qualified_type (ttype, quals);
4660 }
4661
4662 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4663 ntype = build_distinct_type_copy (ttype);
4664
4665 TYPE_ATTRIBUTES (ntype) = attribute;
4666
4667 hashcode = iterative_hash_object (code, hashcode);
4668 if (TREE_TYPE (ntype))
4669 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4670 hashcode);
4671 hashcode = attribute_hash_list (attribute, hashcode);
4672
4673 switch (TREE_CODE (ntype))
4674 {
4675 case FUNCTION_TYPE:
4676 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4677 break;
4678 case ARRAY_TYPE:
4679 if (TYPE_DOMAIN (ntype))
4680 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4681 hashcode);
4682 break;
4683 case INTEGER_TYPE:
4684 t = TYPE_MAX_VALUE (ntype);
4685 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4686 hashcode = iterative_hash_object (TREE_INT_CST_ELT (t, i), hashcode);
4687 break;
4688 case REAL_TYPE:
4689 case FIXED_POINT_TYPE:
4690 {
4691 unsigned int precision = TYPE_PRECISION (ntype);
4692 hashcode = iterative_hash_object (precision, hashcode);
4693 }
4694 break;
4695 default:
4696 break;
4697 }
4698
4699 ntype = type_hash_canon (hashcode, ntype);
4700
4701 /* If the target-dependent attributes make NTYPE different from
4702 its canonical type, we will need to use structural equality
4703 checks for this type. */
4704 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4705 || !comp_type_attributes (ntype, ttype))
4706 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4707 else if (TYPE_CANONICAL (ntype) == ntype)
4708 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4709
4710 ttype = build_qualified_type (ntype, quals);
4711 }
4712 else if (TYPE_QUALS (ttype) != quals)
4713 ttype = build_qualified_type (ttype, quals);
4714
4715 return ttype;
4716 }
4717
4718 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4719 the same. */
4720
4721 static bool
4722 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4723 {
4724 tree cl1, cl2;
4725 for (cl1 = clauses1, cl2 = clauses2;
4726 cl1 && cl2;
4727 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4728 {
4729 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4730 return false;
4731 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4732 {
4733 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4734 OMP_CLAUSE_DECL (cl2)) != 1)
4735 return false;
4736 }
4737 switch (OMP_CLAUSE_CODE (cl1))
4738 {
4739 case OMP_CLAUSE_ALIGNED:
4740 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4741 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4742 return false;
4743 break;
4744 case OMP_CLAUSE_LINEAR:
4745 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4746 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4747 return false;
4748 break;
4749 case OMP_CLAUSE_SIMDLEN:
4750 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4751 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4752 return false;
4753 default:
4754 break;
4755 }
4756 }
4757 return true;
4758 }
4759
4760 /* Compare two constructor-element-type constants. Return 1 if the lists
4761 are known to be equal; otherwise return 0. */
4762
4763 static bool
4764 simple_cst_list_equal (const_tree l1, const_tree l2)
4765 {
4766 while (l1 != NULL_TREE && l2 != NULL_TREE)
4767 {
4768 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4769 return false;
4770
4771 l1 = TREE_CHAIN (l1);
4772 l2 = TREE_CHAIN (l2);
4773 }
4774
4775 return l1 == l2;
4776 }
4777
4778 /* Compare two attributes for their value identity. Return true if the
4779 attribute values are known to be equal; otherwise return false.
4780 */
4781
4782 static bool
4783 attribute_value_equal (const_tree attr1, const_tree attr2)
4784 {
4785 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4786 return true;
4787
4788 if (TREE_VALUE (attr1) != NULL_TREE
4789 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4790 && TREE_VALUE (attr2) != NULL
4791 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4792 return (simple_cst_list_equal (TREE_VALUE (attr1),
4793 TREE_VALUE (attr2)) == 1);
4794
4795 if ((flag_openmp || flag_openmp_simd)
4796 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4797 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4798 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4799 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4800 TREE_VALUE (attr2));
4801
4802 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4803 }
4804
4805 /* Return 0 if the attributes for two types are incompatible, 1 if they
4806 are compatible, and 2 if they are nearly compatible (which causes a
4807 warning to be generated). */
4808 int
4809 comp_type_attributes (const_tree type1, const_tree type2)
4810 {
4811 const_tree a1 = TYPE_ATTRIBUTES (type1);
4812 const_tree a2 = TYPE_ATTRIBUTES (type2);
4813 const_tree a;
4814
4815 if (a1 == a2)
4816 return 1;
4817 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4818 {
4819 const struct attribute_spec *as;
4820 const_tree attr;
4821
4822 as = lookup_attribute_spec (get_attribute_name (a));
4823 if (!as || as->affects_type_identity == false)
4824 continue;
4825
4826 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4827 if (!attr || !attribute_value_equal (a, attr))
4828 break;
4829 }
4830 if (!a)
4831 {
4832 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4833 {
4834 const struct attribute_spec *as;
4835
4836 as = lookup_attribute_spec (get_attribute_name (a));
4837 if (!as || as->affects_type_identity == false)
4838 continue;
4839
4840 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4841 break;
4842 /* We don't need to compare trees again, as we did this
4843 already in first loop. */
4844 }
4845 /* All types - affecting identity - are equal, so
4846 there is no need to call target hook for comparison. */
4847 if (!a)
4848 return 1;
4849 }
4850 /* As some type combinations - like default calling-convention - might
4851 be compatible, we have to call the target hook to get the final result. */
4852 return targetm.comp_type_attributes (type1, type2);
4853 }
4854
4855 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4856 is ATTRIBUTE.
4857
4858 Record such modified types already made so we don't make duplicates. */
4859
4860 tree
4861 build_type_attribute_variant (tree ttype, tree attribute)
4862 {
4863 return build_type_attribute_qual_variant (ttype, attribute,
4864 TYPE_QUALS (ttype));
4865 }
4866
4867
4868 /* Reset the expression *EXPR_P, a size or position.
4869
4870 ??? We could reset all non-constant sizes or positions. But it's cheap
4871 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4872
4873 We need to reset self-referential sizes or positions because they cannot
4874 be gimplified and thus can contain a CALL_EXPR after the gimplification
4875 is finished, which will run afoul of LTO streaming. And they need to be
4876 reset to something essentially dummy but not constant, so as to preserve
4877 the properties of the object they are attached to. */
4878
4879 static inline void
4880 free_lang_data_in_one_sizepos (tree *expr_p)
4881 {
4882 tree expr = *expr_p;
4883 if (CONTAINS_PLACEHOLDER_P (expr))
4884 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4885 }
4886
4887
4888 /* Reset all the fields in a binfo node BINFO. We only keep
4889 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4890
4891 static void
4892 free_lang_data_in_binfo (tree binfo)
4893 {
4894 unsigned i;
4895 tree t;
4896
4897 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4898
4899 BINFO_VIRTUALS (binfo) = NULL_TREE;
4900 BINFO_BASE_ACCESSES (binfo) = NULL;
4901 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4902 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4903
4904 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4905 free_lang_data_in_binfo (t);
4906 }
4907
4908
4909 /* Reset all language specific information still present in TYPE. */
4910
4911 static void
4912 free_lang_data_in_type (tree type)
4913 {
4914 gcc_assert (TYPE_P (type));
4915
4916 /* Give the FE a chance to remove its own data first. */
4917 lang_hooks.free_lang_data (type);
4918
4919 TREE_LANG_FLAG_0 (type) = 0;
4920 TREE_LANG_FLAG_1 (type) = 0;
4921 TREE_LANG_FLAG_2 (type) = 0;
4922 TREE_LANG_FLAG_3 (type) = 0;
4923 TREE_LANG_FLAG_4 (type) = 0;
4924 TREE_LANG_FLAG_5 (type) = 0;
4925 TREE_LANG_FLAG_6 (type) = 0;
4926
4927 if (TREE_CODE (type) == FUNCTION_TYPE)
4928 {
4929 /* Remove the const and volatile qualifiers from arguments. The
4930 C++ front end removes them, but the C front end does not,
4931 leading to false ODR violation errors when merging two
4932 instances of the same function signature compiled by
4933 different front ends. */
4934 tree p;
4935
4936 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4937 {
4938 tree arg_type = TREE_VALUE (p);
4939
4940 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4941 {
4942 int quals = TYPE_QUALS (arg_type)
4943 & ~TYPE_QUAL_CONST
4944 & ~TYPE_QUAL_VOLATILE;
4945 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4946 free_lang_data_in_type (TREE_VALUE (p));
4947 }
4948 }
4949 }
4950
4951 /* Remove members that are not actually FIELD_DECLs from the field
4952 list of an aggregate. These occur in C++. */
4953 if (RECORD_OR_UNION_TYPE_P (type))
4954 {
4955 tree prev, member;
4956
4957 /* Note that TYPE_FIELDS can be shared across distinct
4958 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4959 to be removed, we cannot set its TREE_CHAIN to NULL.
4960 Otherwise, we would not be able to find all the other fields
4961 in the other instances of this TREE_TYPE.
4962
4963 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4964 prev = NULL_TREE;
4965 member = TYPE_FIELDS (type);
4966 while (member)
4967 {
4968 if (TREE_CODE (member) == FIELD_DECL
4969 || TREE_CODE (member) == TYPE_DECL)
4970 {
4971 if (prev)
4972 TREE_CHAIN (prev) = member;
4973 else
4974 TYPE_FIELDS (type) = member;
4975 prev = member;
4976 }
4977
4978 member = TREE_CHAIN (member);
4979 }
4980
4981 if (prev)
4982 TREE_CHAIN (prev) = NULL_TREE;
4983 else
4984 TYPE_FIELDS (type) = NULL_TREE;
4985
4986 TYPE_METHODS (type) = NULL_TREE;
4987 if (TYPE_BINFO (type))
4988 free_lang_data_in_binfo (TYPE_BINFO (type));
4989 }
4990 else
4991 {
4992 /* For non-aggregate types, clear out the language slot (which
4993 overloads TYPE_BINFO). */
4994 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4995
4996 if (INTEGRAL_TYPE_P (type)
4997 || SCALAR_FLOAT_TYPE_P (type)
4998 || FIXED_POINT_TYPE_P (type))
4999 {
5000 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5001 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5002 }
5003 }
5004
5005 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5006 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5007
5008 if (TYPE_CONTEXT (type)
5009 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5010 {
5011 tree ctx = TYPE_CONTEXT (type);
5012 do
5013 {
5014 ctx = BLOCK_SUPERCONTEXT (ctx);
5015 }
5016 while (ctx && TREE_CODE (ctx) == BLOCK);
5017 TYPE_CONTEXT (type) = ctx;
5018 }
5019 }
5020
5021
5022 /* Return true if DECL may need an assembler name to be set. */
5023
5024 static inline bool
5025 need_assembler_name_p (tree decl)
5026 {
5027 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5028 if (TREE_CODE (decl) != FUNCTION_DECL
5029 && TREE_CODE (decl) != VAR_DECL)
5030 return false;
5031
5032 /* If DECL already has its assembler name set, it does not need a
5033 new one. */
5034 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5035 || DECL_ASSEMBLER_NAME_SET_P (decl))
5036 return false;
5037
5038 /* Abstract decls do not need an assembler name. */
5039 if (DECL_ABSTRACT (decl))
5040 return false;
5041
5042 /* For VAR_DECLs, only static, public and external symbols need an
5043 assembler name. */
5044 if (TREE_CODE (decl) == VAR_DECL
5045 && !TREE_STATIC (decl)
5046 && !TREE_PUBLIC (decl)
5047 && !DECL_EXTERNAL (decl))
5048 return false;
5049
5050 if (TREE_CODE (decl) == FUNCTION_DECL)
5051 {
5052 /* Do not set assembler name on builtins. Allow RTL expansion to
5053 decide whether to expand inline or via a regular call. */
5054 if (DECL_BUILT_IN (decl)
5055 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5056 return false;
5057
5058 /* Functions represented in the callgraph need an assembler name. */
5059 if (cgraph_get_node (decl) != NULL)
5060 return true;
5061
5062 /* Unused and not public functions don't need an assembler name. */
5063 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5064 return false;
5065 }
5066
5067 return true;
5068 }
5069
5070
5071 /* Reset all language specific information still present in symbol
5072 DECL. */
5073
5074 static void
5075 free_lang_data_in_decl (tree decl)
5076 {
5077 gcc_assert (DECL_P (decl));
5078
5079 /* Give the FE a chance to remove its own data first. */
5080 lang_hooks.free_lang_data (decl);
5081
5082 TREE_LANG_FLAG_0 (decl) = 0;
5083 TREE_LANG_FLAG_1 (decl) = 0;
5084 TREE_LANG_FLAG_2 (decl) = 0;
5085 TREE_LANG_FLAG_3 (decl) = 0;
5086 TREE_LANG_FLAG_4 (decl) = 0;
5087 TREE_LANG_FLAG_5 (decl) = 0;
5088 TREE_LANG_FLAG_6 (decl) = 0;
5089
5090 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5091 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5092 if (TREE_CODE (decl) == FIELD_DECL)
5093 {
5094 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5095 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5096 DECL_QUALIFIER (decl) = NULL_TREE;
5097 }
5098
5099 if (TREE_CODE (decl) == FUNCTION_DECL)
5100 {
5101 struct cgraph_node *node;
5102 if (!(node = cgraph_get_node (decl))
5103 || (!node->definition && !node->clones))
5104 {
5105 if (node)
5106 cgraph_release_function_body (node);
5107 else
5108 {
5109 release_function_body (decl);
5110 DECL_ARGUMENTS (decl) = NULL;
5111 DECL_RESULT (decl) = NULL;
5112 DECL_INITIAL (decl) = error_mark_node;
5113 }
5114 }
5115 if (gimple_has_body_p (decl))
5116 {
5117 tree t;
5118
5119 /* If DECL has a gimple body, then the context for its
5120 arguments must be DECL. Otherwise, it doesn't really
5121 matter, as we will not be emitting any code for DECL. In
5122 general, there may be other instances of DECL created by
5123 the front end and since PARM_DECLs are generally shared,
5124 their DECL_CONTEXT changes as the replicas of DECL are
5125 created. The only time where DECL_CONTEXT is important
5126 is for the FUNCTION_DECLs that have a gimple body (since
5127 the PARM_DECL will be used in the function's body). */
5128 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5129 DECL_CONTEXT (t) = decl;
5130 }
5131
5132 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5133 At this point, it is not needed anymore. */
5134 DECL_SAVED_TREE (decl) = NULL_TREE;
5135
5136 /* Clear the abstract origin if it refers to a method. Otherwise
5137 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5138 origin will not be output correctly. */
5139 if (DECL_ABSTRACT_ORIGIN (decl)
5140 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5141 && RECORD_OR_UNION_TYPE_P
5142 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5143 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5144
5145 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5146 DECL_VINDEX referring to itself into a vtable slot number as it
5147 should. Happens with functions that are copied and then forgotten
5148 about. Just clear it, it won't matter anymore. */
5149 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5150 DECL_VINDEX (decl) = NULL_TREE;
5151 }
5152 else if (TREE_CODE (decl) == VAR_DECL)
5153 {
5154 if ((DECL_EXTERNAL (decl)
5155 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5156 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5157 DECL_INITIAL (decl) = NULL_TREE;
5158 }
5159 else if (TREE_CODE (decl) == TYPE_DECL
5160 || TREE_CODE (decl) == FIELD_DECL)
5161 DECL_INITIAL (decl) = NULL_TREE;
5162 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5163 && DECL_INITIAL (decl)
5164 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5165 {
5166 /* Strip builtins from the translation-unit BLOCK. We still have targets
5167 without builtin_decl_explicit support and also builtins are shared
5168 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5169 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5170 while (*nextp)
5171 {
5172 tree var = *nextp;
5173 if (TREE_CODE (var) == FUNCTION_DECL
5174 && DECL_BUILT_IN (var))
5175 *nextp = TREE_CHAIN (var);
5176 else
5177 nextp = &TREE_CHAIN (var);
5178 }
5179 }
5180 }
5181
5182
5183 /* Data used when collecting DECLs and TYPEs for language data removal. */
5184
5185 struct free_lang_data_d
5186 {
5187 /* Worklist to avoid excessive recursion. */
5188 vec<tree> worklist;
5189
5190 /* Set of traversed objects. Used to avoid duplicate visits. */
5191 struct pointer_set_t *pset;
5192
5193 /* Array of symbols to process with free_lang_data_in_decl. */
5194 vec<tree> decls;
5195
5196 /* Array of types to process with free_lang_data_in_type. */
5197 vec<tree> types;
5198 };
5199
5200
5201 /* Save all language fields needed to generate proper debug information
5202 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5203
5204 static void
5205 save_debug_info_for_decl (tree t)
5206 {
5207 /*struct saved_debug_info_d *sdi;*/
5208
5209 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5210
5211 /* FIXME. Partial implementation for saving debug info removed. */
5212 }
5213
5214
5215 /* Save all language fields needed to generate proper debug information
5216 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5217
5218 static void
5219 save_debug_info_for_type (tree t)
5220 {
5221 /*struct saved_debug_info_d *sdi;*/
5222
5223 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5224
5225 /* FIXME. Partial implementation for saving debug info removed. */
5226 }
5227
5228
5229 /* Add type or decl T to one of the list of tree nodes that need their
5230 language data removed. The lists are held inside FLD. */
5231
5232 static void
5233 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5234 {
5235 if (DECL_P (t))
5236 {
5237 fld->decls.safe_push (t);
5238 if (debug_info_level > DINFO_LEVEL_TERSE)
5239 save_debug_info_for_decl (t);
5240 }
5241 else if (TYPE_P (t))
5242 {
5243 fld->types.safe_push (t);
5244 if (debug_info_level > DINFO_LEVEL_TERSE)
5245 save_debug_info_for_type (t);
5246 }
5247 else
5248 gcc_unreachable ();
5249 }
5250
5251 /* Push tree node T into FLD->WORKLIST. */
5252
5253 static inline void
5254 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5255 {
5256 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5257 fld->worklist.safe_push ((t));
5258 }
5259
5260
5261 /* Operand callback helper for free_lang_data_in_node. *TP is the
5262 subtree operand being considered. */
5263
5264 static tree
5265 find_decls_types_r (tree *tp, int *ws, void *data)
5266 {
5267 tree t = *tp;
5268 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5269
5270 if (TREE_CODE (t) == TREE_LIST)
5271 return NULL_TREE;
5272
5273 /* Language specific nodes will be removed, so there is no need
5274 to gather anything under them. */
5275 if (is_lang_specific (t))
5276 {
5277 *ws = 0;
5278 return NULL_TREE;
5279 }
5280
5281 if (DECL_P (t))
5282 {
5283 /* Note that walk_tree does not traverse every possible field in
5284 decls, so we have to do our own traversals here. */
5285 add_tree_to_fld_list (t, fld);
5286
5287 fld_worklist_push (DECL_NAME (t), fld);
5288 fld_worklist_push (DECL_CONTEXT (t), fld);
5289 fld_worklist_push (DECL_SIZE (t), fld);
5290 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5291
5292 /* We are going to remove everything under DECL_INITIAL for
5293 TYPE_DECLs. No point walking them. */
5294 if (TREE_CODE (t) != TYPE_DECL)
5295 fld_worklist_push (DECL_INITIAL (t), fld);
5296
5297 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5298 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5299
5300 if (TREE_CODE (t) == FUNCTION_DECL)
5301 {
5302 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5303 fld_worklist_push (DECL_RESULT (t), fld);
5304 }
5305 else if (TREE_CODE (t) == TYPE_DECL)
5306 {
5307 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5308 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5309 }
5310 else if (TREE_CODE (t) == FIELD_DECL)
5311 {
5312 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5313 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5314 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5315 fld_worklist_push (DECL_FCONTEXT (t), fld);
5316 }
5317
5318 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5319 && DECL_HAS_VALUE_EXPR_P (t))
5320 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5321
5322 if (TREE_CODE (t) != FIELD_DECL
5323 && TREE_CODE (t) != TYPE_DECL)
5324 fld_worklist_push (TREE_CHAIN (t), fld);
5325 *ws = 0;
5326 }
5327 else if (TYPE_P (t))
5328 {
5329 /* Note that walk_tree does not traverse every possible field in
5330 types, so we have to do our own traversals here. */
5331 add_tree_to_fld_list (t, fld);
5332
5333 if (!RECORD_OR_UNION_TYPE_P (t))
5334 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5335 fld_worklist_push (TYPE_SIZE (t), fld);
5336 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5337 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5338 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5339 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5340 fld_worklist_push (TYPE_NAME (t), fld);
5341 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5342 them and thus do not and want not to reach unused pointer types
5343 this way. */
5344 if (!POINTER_TYPE_P (t))
5345 fld_worklist_push (TYPE_MINVAL (t), fld);
5346 if (!RECORD_OR_UNION_TYPE_P (t))
5347 fld_worklist_push (TYPE_MAXVAL (t), fld);
5348 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5349 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5350 do not and want not to reach unused variants this way. */
5351 if (TYPE_CONTEXT (t))
5352 {
5353 tree ctx = TYPE_CONTEXT (t);
5354 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5355 So push that instead. */
5356 while (ctx && TREE_CODE (ctx) == BLOCK)
5357 ctx = BLOCK_SUPERCONTEXT (ctx);
5358 fld_worklist_push (ctx, fld);
5359 }
5360 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5361 and want not to reach unused types this way. */
5362
5363 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5364 {
5365 unsigned i;
5366 tree tem;
5367 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5368 fld_worklist_push (TREE_TYPE (tem), fld);
5369 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5370 if (tem
5371 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5372 && TREE_CODE (tem) == TREE_LIST)
5373 do
5374 {
5375 fld_worklist_push (TREE_VALUE (tem), fld);
5376 tem = TREE_CHAIN (tem);
5377 }
5378 while (tem);
5379 }
5380 if (RECORD_OR_UNION_TYPE_P (t))
5381 {
5382 tree tem;
5383 /* Push all TYPE_FIELDS - there can be interleaving interesting
5384 and non-interesting things. */
5385 tem = TYPE_FIELDS (t);
5386 while (tem)
5387 {
5388 if (TREE_CODE (tem) == FIELD_DECL
5389 || TREE_CODE (tem) == TYPE_DECL)
5390 fld_worklist_push (tem, fld);
5391 tem = TREE_CHAIN (tem);
5392 }
5393 }
5394
5395 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5396 *ws = 0;
5397 }
5398 else if (TREE_CODE (t) == BLOCK)
5399 {
5400 tree tem;
5401 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5402 fld_worklist_push (tem, fld);
5403 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5404 fld_worklist_push (tem, fld);
5405 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5406 }
5407
5408 if (TREE_CODE (t) != IDENTIFIER_NODE
5409 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5410 fld_worklist_push (TREE_TYPE (t), fld);
5411
5412 return NULL_TREE;
5413 }
5414
5415
5416 /* Find decls and types in T. */
5417
5418 static void
5419 find_decls_types (tree t, struct free_lang_data_d *fld)
5420 {
5421 while (1)
5422 {
5423 if (!pointer_set_contains (fld->pset, t))
5424 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5425 if (fld->worklist.is_empty ())
5426 break;
5427 t = fld->worklist.pop ();
5428 }
5429 }
5430
5431 /* Translate all the types in LIST with the corresponding runtime
5432 types. */
5433
5434 static tree
5435 get_eh_types_for_runtime (tree list)
5436 {
5437 tree head, prev;
5438
5439 if (list == NULL_TREE)
5440 return NULL_TREE;
5441
5442 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5443 prev = head;
5444 list = TREE_CHAIN (list);
5445 while (list)
5446 {
5447 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5448 TREE_CHAIN (prev) = n;
5449 prev = TREE_CHAIN (prev);
5450 list = TREE_CHAIN (list);
5451 }
5452
5453 return head;
5454 }
5455
5456
5457 /* Find decls and types referenced in EH region R and store them in
5458 FLD->DECLS and FLD->TYPES. */
5459
5460 static void
5461 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5462 {
5463 switch (r->type)
5464 {
5465 case ERT_CLEANUP:
5466 break;
5467
5468 case ERT_TRY:
5469 {
5470 eh_catch c;
5471
5472 /* The types referenced in each catch must first be changed to the
5473 EH types used at runtime. This removes references to FE types
5474 in the region. */
5475 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5476 {
5477 c->type_list = get_eh_types_for_runtime (c->type_list);
5478 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5479 }
5480 }
5481 break;
5482
5483 case ERT_ALLOWED_EXCEPTIONS:
5484 r->u.allowed.type_list
5485 = get_eh_types_for_runtime (r->u.allowed.type_list);
5486 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5487 break;
5488
5489 case ERT_MUST_NOT_THROW:
5490 walk_tree (&r->u.must_not_throw.failure_decl,
5491 find_decls_types_r, fld, fld->pset);
5492 break;
5493 }
5494 }
5495
5496
5497 /* Find decls and types referenced in cgraph node N and store them in
5498 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5499 look for *every* kind of DECL and TYPE node reachable from N,
5500 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5501 NAMESPACE_DECLs, etc). */
5502
5503 static void
5504 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5505 {
5506 basic_block bb;
5507 struct function *fn;
5508 unsigned ix;
5509 tree t;
5510
5511 find_decls_types (n->decl, fld);
5512
5513 if (!gimple_has_body_p (n->decl))
5514 return;
5515
5516 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5517
5518 fn = DECL_STRUCT_FUNCTION (n->decl);
5519
5520 /* Traverse locals. */
5521 FOR_EACH_LOCAL_DECL (fn, ix, t)
5522 find_decls_types (t, fld);
5523
5524 /* Traverse EH regions in FN. */
5525 {
5526 eh_region r;
5527 FOR_ALL_EH_REGION_FN (r, fn)
5528 find_decls_types_in_eh_region (r, fld);
5529 }
5530
5531 /* Traverse every statement in FN. */
5532 FOR_EACH_BB_FN (bb, fn)
5533 {
5534 gimple_stmt_iterator si;
5535 unsigned i;
5536
5537 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5538 {
5539 gimple phi = gsi_stmt (si);
5540
5541 for (i = 0; i < gimple_phi_num_args (phi); i++)
5542 {
5543 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5544 find_decls_types (*arg_p, fld);
5545 }
5546 }
5547
5548 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5549 {
5550 gimple stmt = gsi_stmt (si);
5551
5552 if (is_gimple_call (stmt))
5553 find_decls_types (gimple_call_fntype (stmt), fld);
5554
5555 for (i = 0; i < gimple_num_ops (stmt); i++)
5556 {
5557 tree arg = gimple_op (stmt, i);
5558 find_decls_types (arg, fld);
5559 }
5560 }
5561 }
5562 }
5563
5564
5565 /* Find decls and types referenced in varpool node N and store them in
5566 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5567 look for *every* kind of DECL and TYPE node reachable from N,
5568 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5569 NAMESPACE_DECLs, etc). */
5570
5571 static void
5572 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5573 {
5574 find_decls_types (v->decl, fld);
5575 }
5576
5577 /* If T needs an assembler name, have one created for it. */
5578
5579 void
5580 assign_assembler_name_if_neeeded (tree t)
5581 {
5582 if (need_assembler_name_p (t))
5583 {
5584 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5585 diagnostics that use input_location to show locus
5586 information. The problem here is that, at this point,
5587 input_location is generally anchored to the end of the file
5588 (since the parser is long gone), so we don't have a good
5589 position to pin it to.
5590
5591 To alleviate this problem, this uses the location of T's
5592 declaration. Examples of this are
5593 testsuite/g++.dg/template/cond2.C and
5594 testsuite/g++.dg/template/pr35240.C. */
5595 location_t saved_location = input_location;
5596 input_location = DECL_SOURCE_LOCATION (t);
5597
5598 decl_assembler_name (t);
5599
5600 input_location = saved_location;
5601 }
5602 }
5603
5604
5605 /* Free language specific information for every operand and expression
5606 in every node of the call graph. This process operates in three stages:
5607
5608 1- Every callgraph node and varpool node is traversed looking for
5609 decls and types embedded in them. This is a more exhaustive
5610 search than that done by find_referenced_vars, because it will
5611 also collect individual fields, decls embedded in types, etc.
5612
5613 2- All the decls found are sent to free_lang_data_in_decl.
5614
5615 3- All the types found are sent to free_lang_data_in_type.
5616
5617 The ordering between decls and types is important because
5618 free_lang_data_in_decl sets assembler names, which includes
5619 mangling. So types cannot be freed up until assembler names have
5620 been set up. */
5621
5622 static void
5623 free_lang_data_in_cgraph (void)
5624 {
5625 struct cgraph_node *n;
5626 varpool_node *v;
5627 struct free_lang_data_d fld;
5628 tree t;
5629 unsigned i;
5630 alias_pair *p;
5631
5632 /* Initialize sets and arrays to store referenced decls and types. */
5633 fld.pset = pointer_set_create ();
5634 fld.worklist.create (0);
5635 fld.decls.create (100);
5636 fld.types.create (100);
5637
5638 /* Find decls and types in the body of every function in the callgraph. */
5639 FOR_EACH_FUNCTION (n)
5640 find_decls_types_in_node (n, &fld);
5641
5642 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5643 find_decls_types (p->decl, &fld);
5644
5645 /* Find decls and types in every varpool symbol. */
5646 FOR_EACH_VARIABLE (v)
5647 find_decls_types_in_var (v, &fld);
5648
5649 /* Set the assembler name on every decl found. We need to do this
5650 now because free_lang_data_in_decl will invalidate data needed
5651 for mangling. This breaks mangling on interdependent decls. */
5652 FOR_EACH_VEC_ELT (fld.decls, i, t)
5653 assign_assembler_name_if_neeeded (t);
5654
5655 /* Traverse every decl found freeing its language data. */
5656 FOR_EACH_VEC_ELT (fld.decls, i, t)
5657 free_lang_data_in_decl (t);
5658
5659 /* Traverse every type found freeing its language data. */
5660 FOR_EACH_VEC_ELT (fld.types, i, t)
5661 free_lang_data_in_type (t);
5662
5663 pointer_set_destroy (fld.pset);
5664 fld.worklist.release ();
5665 fld.decls.release ();
5666 fld.types.release ();
5667 }
5668
5669
5670 /* Free resources that are used by FE but are not needed once they are done. */
5671
5672 static unsigned
5673 free_lang_data (void)
5674 {
5675 unsigned i;
5676
5677 /* If we are the LTO frontend we have freed lang-specific data already. */
5678 if (in_lto_p
5679 || !flag_generate_lto)
5680 return 0;
5681
5682 /* Allocate and assign alias sets to the standard integer types
5683 while the slots are still in the way the frontends generated them. */
5684 for (i = 0; i < itk_none; ++i)
5685 if (integer_types[i])
5686 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5687
5688 /* Traverse the IL resetting language specific information for
5689 operands, expressions, etc. */
5690 free_lang_data_in_cgraph ();
5691
5692 /* Create gimple variants for common types. */
5693 ptrdiff_type_node = integer_type_node;
5694 fileptr_type_node = ptr_type_node;
5695
5696 /* Reset some langhooks. Do not reset types_compatible_p, it may
5697 still be used indirectly via the get_alias_set langhook. */
5698 lang_hooks.dwarf_name = lhd_dwarf_name;
5699 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5700 /* We do not want the default decl_assembler_name implementation,
5701 rather if we have fixed everything we want a wrapper around it
5702 asserting that all non-local symbols already got their assembler
5703 name and only produce assembler names for local symbols. Or rather
5704 make sure we never call decl_assembler_name on local symbols and
5705 devise a separate, middle-end private scheme for it. */
5706
5707 /* Reset diagnostic machinery. */
5708 tree_diagnostics_defaults (global_dc);
5709
5710 return 0;
5711 }
5712
5713
5714 namespace {
5715
5716 const pass_data pass_data_ipa_free_lang_data =
5717 {
5718 SIMPLE_IPA_PASS, /* type */
5719 "*free_lang_data", /* name */
5720 OPTGROUP_NONE, /* optinfo_flags */
5721 true, /* has_execute */
5722 TV_IPA_FREE_LANG_DATA, /* tv_id */
5723 0, /* properties_required */
5724 0, /* properties_provided */
5725 0, /* properties_destroyed */
5726 0, /* todo_flags_start */
5727 0, /* todo_flags_finish */
5728 };
5729
5730 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5731 {
5732 public:
5733 pass_ipa_free_lang_data (gcc::context *ctxt)
5734 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5735 {}
5736
5737 /* opt_pass methods: */
5738 virtual unsigned int execute (function *) { return free_lang_data (); }
5739
5740 }; // class pass_ipa_free_lang_data
5741
5742 } // anon namespace
5743
5744 simple_ipa_opt_pass *
5745 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5746 {
5747 return new pass_ipa_free_lang_data (ctxt);
5748 }
5749
5750 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5751 ATTR_NAME. Also used internally by remove_attribute(). */
5752 bool
5753 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5754 {
5755 size_t ident_len = IDENTIFIER_LENGTH (ident);
5756
5757 if (ident_len == attr_len)
5758 {
5759 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5760 return true;
5761 }
5762 else if (ident_len == attr_len + 4)
5763 {
5764 /* There is the possibility that ATTR is 'text' and IDENT is
5765 '__text__'. */
5766 const char *p = IDENTIFIER_POINTER (ident);
5767 if (p[0] == '_' && p[1] == '_'
5768 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5769 && strncmp (attr_name, p + 2, attr_len) == 0)
5770 return true;
5771 }
5772
5773 return false;
5774 }
5775
5776 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5777 of ATTR_NAME, and LIST is not NULL_TREE. */
5778 tree
5779 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5780 {
5781 while (list)
5782 {
5783 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5784
5785 if (ident_len == attr_len)
5786 {
5787 if (!strcmp (attr_name,
5788 IDENTIFIER_POINTER (get_attribute_name (list))))
5789 break;
5790 }
5791 /* TODO: If we made sure that attributes were stored in the
5792 canonical form without '__...__' (ie, as in 'text' as opposed
5793 to '__text__') then we could avoid the following case. */
5794 else if (ident_len == attr_len + 4)
5795 {
5796 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5797 if (p[0] == '_' && p[1] == '_'
5798 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5799 && strncmp (attr_name, p + 2, attr_len) == 0)
5800 break;
5801 }
5802 list = TREE_CHAIN (list);
5803 }
5804
5805 return list;
5806 }
5807
5808 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5809 return a pointer to the attribute's list first element if the attribute
5810 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5811 '__text__'). */
5812
5813 tree
5814 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5815 tree list)
5816 {
5817 while (list)
5818 {
5819 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5820
5821 if (attr_len > ident_len)
5822 {
5823 list = TREE_CHAIN (list);
5824 continue;
5825 }
5826
5827 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5828
5829 if (strncmp (attr_name, p, attr_len) == 0)
5830 break;
5831
5832 /* TODO: If we made sure that attributes were stored in the
5833 canonical form without '__...__' (ie, as in 'text' as opposed
5834 to '__text__') then we could avoid the following case. */
5835 if (p[0] == '_' && p[1] == '_' &&
5836 strncmp (attr_name, p + 2, attr_len) == 0)
5837 break;
5838
5839 list = TREE_CHAIN (list);
5840 }
5841
5842 return list;
5843 }
5844
5845
5846 /* A variant of lookup_attribute() that can be used with an identifier
5847 as the first argument, and where the identifier can be either
5848 'text' or '__text__'.
5849
5850 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5851 return a pointer to the attribute's list element if the attribute
5852 is part of the list, or NULL_TREE if not found. If the attribute
5853 appears more than once, this only returns the first occurrence; the
5854 TREE_CHAIN of the return value should be passed back in if further
5855 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5856 can be in the form 'text' or '__text__'. */
5857 static tree
5858 lookup_ident_attribute (tree attr_identifier, tree list)
5859 {
5860 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5861
5862 while (list)
5863 {
5864 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5865 == IDENTIFIER_NODE);
5866
5867 /* Identifiers can be compared directly for equality. */
5868 if (attr_identifier == get_attribute_name (list))
5869 break;
5870
5871 /* If they are not equal, they may still be one in the form
5872 'text' while the other one is in the form '__text__'. TODO:
5873 If we were storing attributes in normalized 'text' form, then
5874 this could all go away and we could take full advantage of
5875 the fact that we're comparing identifiers. :-) */
5876 {
5877 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5878 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5879
5880 if (ident_len == attr_len + 4)
5881 {
5882 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5883 const char *q = IDENTIFIER_POINTER (attr_identifier);
5884 if (p[0] == '_' && p[1] == '_'
5885 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5886 && strncmp (q, p + 2, attr_len) == 0)
5887 break;
5888 }
5889 else if (ident_len + 4 == attr_len)
5890 {
5891 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5892 const char *q = IDENTIFIER_POINTER (attr_identifier);
5893 if (q[0] == '_' && q[1] == '_'
5894 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5895 && strncmp (q + 2, p, ident_len) == 0)
5896 break;
5897 }
5898 }
5899 list = TREE_CHAIN (list);
5900 }
5901
5902 return list;
5903 }
5904
5905 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5906 modified list. */
5907
5908 tree
5909 remove_attribute (const char *attr_name, tree list)
5910 {
5911 tree *p;
5912 size_t attr_len = strlen (attr_name);
5913
5914 gcc_checking_assert (attr_name[0] != '_');
5915
5916 for (p = &list; *p; )
5917 {
5918 tree l = *p;
5919 /* TODO: If we were storing attributes in normalized form, here
5920 we could use a simple strcmp(). */
5921 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5922 *p = TREE_CHAIN (l);
5923 else
5924 p = &TREE_CHAIN (l);
5925 }
5926
5927 return list;
5928 }
5929
5930 /* Return an attribute list that is the union of a1 and a2. */
5931
5932 tree
5933 merge_attributes (tree a1, tree a2)
5934 {
5935 tree attributes;
5936
5937 /* Either one unset? Take the set one. */
5938
5939 if ((attributes = a1) == 0)
5940 attributes = a2;
5941
5942 /* One that completely contains the other? Take it. */
5943
5944 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5945 {
5946 if (attribute_list_contained (a2, a1))
5947 attributes = a2;
5948 else
5949 {
5950 /* Pick the longest list, and hang on the other list. */
5951
5952 if (list_length (a1) < list_length (a2))
5953 attributes = a2, a2 = a1;
5954
5955 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5956 {
5957 tree a;
5958 for (a = lookup_ident_attribute (get_attribute_name (a2),
5959 attributes);
5960 a != NULL_TREE && !attribute_value_equal (a, a2);
5961 a = lookup_ident_attribute (get_attribute_name (a2),
5962 TREE_CHAIN (a)))
5963 ;
5964 if (a == NULL_TREE)
5965 {
5966 a1 = copy_node (a2);
5967 TREE_CHAIN (a1) = attributes;
5968 attributes = a1;
5969 }
5970 }
5971 }
5972 }
5973 return attributes;
5974 }
5975
5976 /* Given types T1 and T2, merge their attributes and return
5977 the result. */
5978
5979 tree
5980 merge_type_attributes (tree t1, tree t2)
5981 {
5982 return merge_attributes (TYPE_ATTRIBUTES (t1),
5983 TYPE_ATTRIBUTES (t2));
5984 }
5985
5986 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5987 the result. */
5988
5989 tree
5990 merge_decl_attributes (tree olddecl, tree newdecl)
5991 {
5992 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5993 DECL_ATTRIBUTES (newdecl));
5994 }
5995
5996 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5997
5998 /* Specialization of merge_decl_attributes for various Windows targets.
5999
6000 This handles the following situation:
6001
6002 __declspec (dllimport) int foo;
6003 int foo;
6004
6005 The second instance of `foo' nullifies the dllimport. */
6006
6007 tree
6008 merge_dllimport_decl_attributes (tree old, tree new_tree)
6009 {
6010 tree a;
6011 int delete_dllimport_p = 1;
6012
6013 /* What we need to do here is remove from `old' dllimport if it doesn't
6014 appear in `new'. dllimport behaves like extern: if a declaration is
6015 marked dllimport and a definition appears later, then the object
6016 is not dllimport'd. We also remove a `new' dllimport if the old list
6017 contains dllexport: dllexport always overrides dllimport, regardless
6018 of the order of declaration. */
6019 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6020 delete_dllimport_p = 0;
6021 else if (DECL_DLLIMPORT_P (new_tree)
6022 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6023 {
6024 DECL_DLLIMPORT_P (new_tree) = 0;
6025 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6026 "dllimport ignored", new_tree);
6027 }
6028 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6029 {
6030 /* Warn about overriding a symbol that has already been used, e.g.:
6031 extern int __attribute__ ((dllimport)) foo;
6032 int* bar () {return &foo;}
6033 int foo;
6034 */
6035 if (TREE_USED (old))
6036 {
6037 warning (0, "%q+D redeclared without dllimport attribute "
6038 "after being referenced with dll linkage", new_tree);
6039 /* If we have used a variable's address with dllimport linkage,
6040 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6041 decl may already have had TREE_CONSTANT computed.
6042 We still remove the attribute so that assembler code refers
6043 to '&foo rather than '_imp__foo'. */
6044 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6045 DECL_DLLIMPORT_P (new_tree) = 1;
6046 }
6047
6048 /* Let an inline definition silently override the external reference,
6049 but otherwise warn about attribute inconsistency. */
6050 else if (TREE_CODE (new_tree) == VAR_DECL
6051 || !DECL_DECLARED_INLINE_P (new_tree))
6052 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6053 "previous dllimport ignored", new_tree);
6054 }
6055 else
6056 delete_dllimport_p = 0;
6057
6058 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6059
6060 if (delete_dllimport_p)
6061 a = remove_attribute ("dllimport", a);
6062
6063 return a;
6064 }
6065
6066 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6067 struct attribute_spec.handler. */
6068
6069 tree
6070 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6071 bool *no_add_attrs)
6072 {
6073 tree node = *pnode;
6074 bool is_dllimport;
6075
6076 /* These attributes may apply to structure and union types being created,
6077 but otherwise should pass to the declaration involved. */
6078 if (!DECL_P (node))
6079 {
6080 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6081 | (int) ATTR_FLAG_ARRAY_NEXT))
6082 {
6083 *no_add_attrs = true;
6084 return tree_cons (name, args, NULL_TREE);
6085 }
6086 if (TREE_CODE (node) == RECORD_TYPE
6087 || TREE_CODE (node) == UNION_TYPE)
6088 {
6089 node = TYPE_NAME (node);
6090 if (!node)
6091 return NULL_TREE;
6092 }
6093 else
6094 {
6095 warning (OPT_Wattributes, "%qE attribute ignored",
6096 name);
6097 *no_add_attrs = true;
6098 return NULL_TREE;
6099 }
6100 }
6101
6102 if (TREE_CODE (node) != FUNCTION_DECL
6103 && TREE_CODE (node) != VAR_DECL
6104 && TREE_CODE (node) != TYPE_DECL)
6105 {
6106 *no_add_attrs = true;
6107 warning (OPT_Wattributes, "%qE attribute ignored",
6108 name);
6109 return NULL_TREE;
6110 }
6111
6112 if (TREE_CODE (node) == TYPE_DECL
6113 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6114 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6115 {
6116 *no_add_attrs = true;
6117 warning (OPT_Wattributes, "%qE attribute ignored",
6118 name);
6119 return NULL_TREE;
6120 }
6121
6122 is_dllimport = is_attribute_p ("dllimport", name);
6123
6124 /* Report error on dllimport ambiguities seen now before they cause
6125 any damage. */
6126 if (is_dllimport)
6127 {
6128 /* Honor any target-specific overrides. */
6129 if (!targetm.valid_dllimport_attribute_p (node))
6130 *no_add_attrs = true;
6131
6132 else if (TREE_CODE (node) == FUNCTION_DECL
6133 && DECL_DECLARED_INLINE_P (node))
6134 {
6135 warning (OPT_Wattributes, "inline function %q+D declared as "
6136 " dllimport: attribute ignored", node);
6137 *no_add_attrs = true;
6138 }
6139 /* Like MS, treat definition of dllimported variables and
6140 non-inlined functions on declaration as syntax errors. */
6141 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6142 {
6143 error ("function %q+D definition is marked dllimport", node);
6144 *no_add_attrs = true;
6145 }
6146
6147 else if (TREE_CODE (node) == VAR_DECL)
6148 {
6149 if (DECL_INITIAL (node))
6150 {
6151 error ("variable %q+D definition is marked dllimport",
6152 node);
6153 *no_add_attrs = true;
6154 }
6155
6156 /* `extern' needn't be specified with dllimport.
6157 Specify `extern' now and hope for the best. Sigh. */
6158 DECL_EXTERNAL (node) = 1;
6159 /* Also, implicitly give dllimport'd variables declared within
6160 a function global scope, unless declared static. */
6161 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6162 TREE_PUBLIC (node) = 1;
6163 }
6164
6165 if (*no_add_attrs == false)
6166 DECL_DLLIMPORT_P (node) = 1;
6167 }
6168 else if (TREE_CODE (node) == FUNCTION_DECL
6169 && DECL_DECLARED_INLINE_P (node)
6170 && flag_keep_inline_dllexport)
6171 /* An exported function, even if inline, must be emitted. */
6172 DECL_EXTERNAL (node) = 0;
6173
6174 /* Report error if symbol is not accessible at global scope. */
6175 if (!TREE_PUBLIC (node)
6176 && (TREE_CODE (node) == VAR_DECL
6177 || TREE_CODE (node) == FUNCTION_DECL))
6178 {
6179 error ("external linkage required for symbol %q+D because of "
6180 "%qE attribute", node, name);
6181 *no_add_attrs = true;
6182 }
6183
6184 /* A dllexport'd entity must have default visibility so that other
6185 program units (shared libraries or the main executable) can see
6186 it. A dllimport'd entity must have default visibility so that
6187 the linker knows that undefined references within this program
6188 unit can be resolved by the dynamic linker. */
6189 if (!*no_add_attrs)
6190 {
6191 if (DECL_VISIBILITY_SPECIFIED (node)
6192 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6193 error ("%qE implies default visibility, but %qD has already "
6194 "been declared with a different visibility",
6195 name, node);
6196 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6197 DECL_VISIBILITY_SPECIFIED (node) = 1;
6198 }
6199
6200 return NULL_TREE;
6201 }
6202
6203 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6204 \f
6205 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6206 of the various TYPE_QUAL values. */
6207
6208 static void
6209 set_type_quals (tree type, int type_quals)
6210 {
6211 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6212 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6213 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6214 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6215 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6216 }
6217
6218 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6219
6220 bool
6221 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6222 {
6223 return (TYPE_QUALS (cand) == type_quals
6224 && TYPE_NAME (cand) == TYPE_NAME (base)
6225 /* Apparently this is needed for Objective-C. */
6226 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6227 /* Check alignment. */
6228 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6229 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6230 TYPE_ATTRIBUTES (base)));
6231 }
6232
6233 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6234
6235 static bool
6236 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6237 {
6238 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6239 && TYPE_NAME (cand) == TYPE_NAME (base)
6240 /* Apparently this is needed for Objective-C. */
6241 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6242 /* Check alignment. */
6243 && TYPE_ALIGN (cand) == align
6244 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6245 TYPE_ATTRIBUTES (base)));
6246 }
6247
6248 /* This function checks to see if TYPE matches the size one of the built-in
6249 atomic types, and returns that core atomic type. */
6250
6251 static tree
6252 find_atomic_core_type (tree type)
6253 {
6254 tree base_atomic_type;
6255
6256 /* Only handle complete types. */
6257 if (TYPE_SIZE (type) == NULL_TREE)
6258 return NULL_TREE;
6259
6260 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6261 switch (type_size)
6262 {
6263 case 8:
6264 base_atomic_type = atomicQI_type_node;
6265 break;
6266
6267 case 16:
6268 base_atomic_type = atomicHI_type_node;
6269 break;
6270
6271 case 32:
6272 base_atomic_type = atomicSI_type_node;
6273 break;
6274
6275 case 64:
6276 base_atomic_type = atomicDI_type_node;
6277 break;
6278
6279 case 128:
6280 base_atomic_type = atomicTI_type_node;
6281 break;
6282
6283 default:
6284 base_atomic_type = NULL_TREE;
6285 }
6286
6287 return base_atomic_type;
6288 }
6289
6290 /* Return a version of the TYPE, qualified as indicated by the
6291 TYPE_QUALS, if one exists. If no qualified version exists yet,
6292 return NULL_TREE. */
6293
6294 tree
6295 get_qualified_type (tree type, int type_quals)
6296 {
6297 tree t;
6298
6299 if (TYPE_QUALS (type) == type_quals)
6300 return type;
6301
6302 /* Search the chain of variants to see if there is already one there just
6303 like the one we need to have. If so, use that existing one. We must
6304 preserve the TYPE_NAME, since there is code that depends on this. */
6305 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6306 if (check_qualified_type (t, type, type_quals))
6307 return t;
6308
6309 return NULL_TREE;
6310 }
6311
6312 /* Like get_qualified_type, but creates the type if it does not
6313 exist. This function never returns NULL_TREE. */
6314
6315 tree
6316 build_qualified_type (tree type, int type_quals)
6317 {
6318 tree t;
6319
6320 /* See if we already have the appropriate qualified variant. */
6321 t = get_qualified_type (type, type_quals);
6322
6323 /* If not, build it. */
6324 if (!t)
6325 {
6326 t = build_variant_type_copy (type);
6327 set_type_quals (t, type_quals);
6328
6329 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6330 {
6331 /* See if this object can map to a basic atomic type. */
6332 tree atomic_type = find_atomic_core_type (type);
6333 if (atomic_type)
6334 {
6335 /* Ensure the alignment of this type is compatible with
6336 the required alignment of the atomic type. */
6337 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6338 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6339 }
6340 }
6341
6342 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6343 /* Propagate structural equality. */
6344 SET_TYPE_STRUCTURAL_EQUALITY (t);
6345 else if (TYPE_CANONICAL (type) != type)
6346 /* Build the underlying canonical type, since it is different
6347 from TYPE. */
6348 {
6349 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6350 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6351 }
6352 else
6353 /* T is its own canonical type. */
6354 TYPE_CANONICAL (t) = t;
6355
6356 }
6357
6358 return t;
6359 }
6360
6361 /* Create a variant of type T with alignment ALIGN. */
6362
6363 tree
6364 build_aligned_type (tree type, unsigned int align)
6365 {
6366 tree t;
6367
6368 if (TYPE_PACKED (type)
6369 || TYPE_ALIGN (type) == align)
6370 return type;
6371
6372 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6373 if (check_aligned_type (t, type, align))
6374 return t;
6375
6376 t = build_variant_type_copy (type);
6377 TYPE_ALIGN (t) = align;
6378
6379 return t;
6380 }
6381
6382 /* Create a new distinct copy of TYPE. The new type is made its own
6383 MAIN_VARIANT. If TYPE requires structural equality checks, the
6384 resulting type requires structural equality checks; otherwise, its
6385 TYPE_CANONICAL points to itself. */
6386
6387 tree
6388 build_distinct_type_copy (tree type)
6389 {
6390 tree t = copy_node (type);
6391
6392 TYPE_POINTER_TO (t) = 0;
6393 TYPE_REFERENCE_TO (t) = 0;
6394
6395 /* Set the canonical type either to a new equivalence class, or
6396 propagate the need for structural equality checks. */
6397 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6398 SET_TYPE_STRUCTURAL_EQUALITY (t);
6399 else
6400 TYPE_CANONICAL (t) = t;
6401
6402 /* Make it its own variant. */
6403 TYPE_MAIN_VARIANT (t) = t;
6404 TYPE_NEXT_VARIANT (t) = 0;
6405
6406 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6407 whose TREE_TYPE is not t. This can also happen in the Ada
6408 frontend when using subtypes. */
6409
6410 return t;
6411 }
6412
6413 /* Create a new variant of TYPE, equivalent but distinct. This is so
6414 the caller can modify it. TYPE_CANONICAL for the return type will
6415 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6416 are considered equal by the language itself (or that both types
6417 require structural equality checks). */
6418
6419 tree
6420 build_variant_type_copy (tree type)
6421 {
6422 tree t, m = TYPE_MAIN_VARIANT (type);
6423
6424 t = build_distinct_type_copy (type);
6425
6426 /* Since we're building a variant, assume that it is a non-semantic
6427 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6428 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6429
6430 /* Add the new type to the chain of variants of TYPE. */
6431 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6432 TYPE_NEXT_VARIANT (m) = t;
6433 TYPE_MAIN_VARIANT (t) = m;
6434
6435 return t;
6436 }
6437 \f
6438 /* Return true if the from tree in both tree maps are equal. */
6439
6440 int
6441 tree_map_base_eq (const void *va, const void *vb)
6442 {
6443 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6444 *const b = (const struct tree_map_base *) vb;
6445 return (a->from == b->from);
6446 }
6447
6448 /* Hash a from tree in a tree_base_map. */
6449
6450 unsigned int
6451 tree_map_base_hash (const void *item)
6452 {
6453 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6454 }
6455
6456 /* Return true if this tree map structure is marked for garbage collection
6457 purposes. We simply return true if the from tree is marked, so that this
6458 structure goes away when the from tree goes away. */
6459
6460 int
6461 tree_map_base_marked_p (const void *p)
6462 {
6463 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6464 }
6465
6466 /* Hash a from tree in a tree_map. */
6467
6468 unsigned int
6469 tree_map_hash (const void *item)
6470 {
6471 return (((const struct tree_map *) item)->hash);
6472 }
6473
6474 /* Hash a from tree in a tree_decl_map. */
6475
6476 unsigned int
6477 tree_decl_map_hash (const void *item)
6478 {
6479 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6480 }
6481
6482 /* Return the initialization priority for DECL. */
6483
6484 priority_type
6485 decl_init_priority_lookup (tree decl)
6486 {
6487 symtab_node *snode = symtab_get_node (decl);
6488
6489 if (!snode)
6490 return DEFAULT_INIT_PRIORITY;
6491 return
6492 snode->get_init_priority ();
6493 }
6494
6495 /* Return the finalization priority for DECL. */
6496
6497 priority_type
6498 decl_fini_priority_lookup (tree decl)
6499 {
6500 cgraph_node *node = cgraph_get_node (decl);
6501
6502 if (!node)
6503 return DEFAULT_INIT_PRIORITY;
6504 return
6505 node->get_fini_priority ();
6506 }
6507
6508 /* Set the initialization priority for DECL to PRIORITY. */
6509
6510 void
6511 decl_init_priority_insert (tree decl, priority_type priority)
6512 {
6513 struct symtab_node *snode;
6514
6515 if (priority == DEFAULT_INIT_PRIORITY)
6516 {
6517 snode = symtab_get_node (decl);
6518 if (!snode)
6519 return;
6520 }
6521 else if (TREE_CODE (decl) == VAR_DECL)
6522 snode = varpool_node_for_decl (decl);
6523 else
6524 snode = cgraph_get_create_node (decl);
6525 snode->set_init_priority (priority);
6526 }
6527
6528 /* Set the finalization priority for DECL to PRIORITY. */
6529
6530 void
6531 decl_fini_priority_insert (tree decl, priority_type priority)
6532 {
6533 struct cgraph_node *node;
6534
6535 if (priority == DEFAULT_INIT_PRIORITY)
6536 {
6537 node = cgraph_get_node (decl);
6538 if (!node)
6539 return;
6540 }
6541 else
6542 node = cgraph_get_create_node (decl);
6543 node->set_fini_priority (priority);
6544 }
6545
6546 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6547
6548 static void
6549 print_debug_expr_statistics (void)
6550 {
6551 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6552 (long) htab_size (debug_expr_for_decl),
6553 (long) htab_elements (debug_expr_for_decl),
6554 htab_collisions (debug_expr_for_decl));
6555 }
6556
6557 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6558
6559 static void
6560 print_value_expr_statistics (void)
6561 {
6562 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6563 (long) htab_size (value_expr_for_decl),
6564 (long) htab_elements (value_expr_for_decl),
6565 htab_collisions (value_expr_for_decl));
6566 }
6567
6568 /* Lookup a debug expression for FROM, and return it if we find one. */
6569
6570 tree
6571 decl_debug_expr_lookup (tree from)
6572 {
6573 struct tree_decl_map *h, in;
6574 in.base.from = from;
6575
6576 h = (struct tree_decl_map *)
6577 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6578 if (h)
6579 return h->to;
6580 return NULL_TREE;
6581 }
6582
6583 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6584
6585 void
6586 decl_debug_expr_insert (tree from, tree to)
6587 {
6588 struct tree_decl_map *h;
6589 void **loc;
6590
6591 h = ggc_alloc<tree_decl_map> ();
6592 h->base.from = from;
6593 h->to = to;
6594 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6595 INSERT);
6596 *(struct tree_decl_map **) loc = h;
6597 }
6598
6599 /* Lookup a value expression for FROM, and return it if we find one. */
6600
6601 tree
6602 decl_value_expr_lookup (tree from)
6603 {
6604 struct tree_decl_map *h, in;
6605 in.base.from = from;
6606
6607 h = (struct tree_decl_map *)
6608 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6609 if (h)
6610 return h->to;
6611 return NULL_TREE;
6612 }
6613
6614 /* Insert a mapping FROM->TO in the value expression hashtable. */
6615
6616 void
6617 decl_value_expr_insert (tree from, tree to)
6618 {
6619 struct tree_decl_map *h;
6620 void **loc;
6621
6622 h = ggc_alloc<tree_decl_map> ();
6623 h->base.from = from;
6624 h->to = to;
6625 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6626 INSERT);
6627 *(struct tree_decl_map **) loc = h;
6628 }
6629
6630 /* Lookup a vector of debug arguments for FROM, and return it if we
6631 find one. */
6632
6633 vec<tree, va_gc> **
6634 decl_debug_args_lookup (tree from)
6635 {
6636 struct tree_vec_map *h, in;
6637
6638 if (!DECL_HAS_DEBUG_ARGS_P (from))
6639 return NULL;
6640 gcc_checking_assert (debug_args_for_decl != NULL);
6641 in.base.from = from;
6642 h = (struct tree_vec_map *)
6643 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6644 if (h)
6645 return &h->to;
6646 return NULL;
6647 }
6648
6649 /* Insert a mapping FROM->empty vector of debug arguments in the value
6650 expression hashtable. */
6651
6652 vec<tree, va_gc> **
6653 decl_debug_args_insert (tree from)
6654 {
6655 struct tree_vec_map *h;
6656 void **loc;
6657
6658 if (DECL_HAS_DEBUG_ARGS_P (from))
6659 return decl_debug_args_lookup (from);
6660 if (debug_args_for_decl == NULL)
6661 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6662 tree_vec_map_eq, 0);
6663 h = ggc_alloc<tree_vec_map> ();
6664 h->base.from = from;
6665 h->to = NULL;
6666 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6667 INSERT);
6668 *(struct tree_vec_map **) loc = h;
6669 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6670 return &h->to;
6671 }
6672
6673 /* Hashing of types so that we don't make duplicates.
6674 The entry point is `type_hash_canon'. */
6675
6676 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6677 with types in the TREE_VALUE slots), by adding the hash codes
6678 of the individual types. */
6679
6680 static unsigned int
6681 type_hash_list (const_tree list, hashval_t hashcode)
6682 {
6683 const_tree tail;
6684
6685 for (tail = list; tail; tail = TREE_CHAIN (tail))
6686 if (TREE_VALUE (tail) != error_mark_node)
6687 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6688 hashcode);
6689
6690 return hashcode;
6691 }
6692
6693 /* These are the Hashtable callback functions. */
6694
6695 /* Returns true iff the types are equivalent. */
6696
6697 static int
6698 type_hash_eq (const void *va, const void *vb)
6699 {
6700 const struct type_hash *const a = (const struct type_hash *) va,
6701 *const b = (const struct type_hash *) vb;
6702
6703 /* First test the things that are the same for all types. */
6704 if (a->hash != b->hash
6705 || TREE_CODE (a->type) != TREE_CODE (b->type)
6706 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6707 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6708 TYPE_ATTRIBUTES (b->type))
6709 || (TREE_CODE (a->type) != COMPLEX_TYPE
6710 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6711 return 0;
6712
6713 /* Be careful about comparing arrays before and after the element type
6714 has been completed; don't compare TYPE_ALIGN unless both types are
6715 complete. */
6716 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6717 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6718 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6719 return 0;
6720
6721 switch (TREE_CODE (a->type))
6722 {
6723 case VOID_TYPE:
6724 case COMPLEX_TYPE:
6725 case POINTER_TYPE:
6726 case REFERENCE_TYPE:
6727 case NULLPTR_TYPE:
6728 return 1;
6729
6730 case VECTOR_TYPE:
6731 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6732
6733 case ENUMERAL_TYPE:
6734 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6735 && !(TYPE_VALUES (a->type)
6736 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6737 && TYPE_VALUES (b->type)
6738 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6739 && type_list_equal (TYPE_VALUES (a->type),
6740 TYPE_VALUES (b->type))))
6741 return 0;
6742
6743 /* ... fall through ... */
6744
6745 case INTEGER_TYPE:
6746 case REAL_TYPE:
6747 case BOOLEAN_TYPE:
6748 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6749 return false;
6750 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6751 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6752 TYPE_MAX_VALUE (b->type)))
6753 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6754 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6755 TYPE_MIN_VALUE (b->type))));
6756
6757 case FIXED_POINT_TYPE:
6758 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6759
6760 case OFFSET_TYPE:
6761 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6762
6763 case METHOD_TYPE:
6764 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6765 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6766 || (TYPE_ARG_TYPES (a->type)
6767 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6768 && TYPE_ARG_TYPES (b->type)
6769 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6770 && type_list_equal (TYPE_ARG_TYPES (a->type),
6771 TYPE_ARG_TYPES (b->type)))))
6772 break;
6773 return 0;
6774 case ARRAY_TYPE:
6775 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6776
6777 case RECORD_TYPE:
6778 case UNION_TYPE:
6779 case QUAL_UNION_TYPE:
6780 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6781 || (TYPE_FIELDS (a->type)
6782 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6783 && TYPE_FIELDS (b->type)
6784 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6785 && type_list_equal (TYPE_FIELDS (a->type),
6786 TYPE_FIELDS (b->type))));
6787
6788 case FUNCTION_TYPE:
6789 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6790 || (TYPE_ARG_TYPES (a->type)
6791 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6792 && TYPE_ARG_TYPES (b->type)
6793 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6794 && type_list_equal (TYPE_ARG_TYPES (a->type),
6795 TYPE_ARG_TYPES (b->type))))
6796 break;
6797 return 0;
6798
6799 default:
6800 return 0;
6801 }
6802
6803 if (lang_hooks.types.type_hash_eq != NULL)
6804 return lang_hooks.types.type_hash_eq (a->type, b->type);
6805
6806 return 1;
6807 }
6808
6809 /* Return the cached hash value. */
6810
6811 static hashval_t
6812 type_hash_hash (const void *item)
6813 {
6814 return ((const struct type_hash *) item)->hash;
6815 }
6816
6817 /* Look in the type hash table for a type isomorphic to TYPE.
6818 If one is found, return it. Otherwise return 0. */
6819
6820 static tree
6821 type_hash_lookup (hashval_t hashcode, tree type)
6822 {
6823 struct type_hash *h, in;
6824
6825 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6826 must call that routine before comparing TYPE_ALIGNs. */
6827 layout_type (type);
6828
6829 in.hash = hashcode;
6830 in.type = type;
6831
6832 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6833 hashcode);
6834 if (h)
6835 return h->type;
6836 return NULL_TREE;
6837 }
6838
6839 /* Add an entry to the type-hash-table
6840 for a type TYPE whose hash code is HASHCODE. */
6841
6842 static void
6843 type_hash_add (hashval_t hashcode, tree type)
6844 {
6845 struct type_hash *h;
6846 void **loc;
6847
6848 h = ggc_alloc<type_hash> ();
6849 h->hash = hashcode;
6850 h->type = type;
6851 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6852 *loc = (void *)h;
6853 }
6854
6855 /* Given TYPE, and HASHCODE its hash code, return the canonical
6856 object for an identical type if one already exists.
6857 Otherwise, return TYPE, and record it as the canonical object.
6858
6859 To use this function, first create a type of the sort you want.
6860 Then compute its hash code from the fields of the type that
6861 make it different from other similar types.
6862 Then call this function and use the value. */
6863
6864 tree
6865 type_hash_canon (unsigned int hashcode, tree type)
6866 {
6867 tree t1;
6868
6869 /* The hash table only contains main variants, so ensure that's what we're
6870 being passed. */
6871 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6872
6873 /* See if the type is in the hash table already. If so, return it.
6874 Otherwise, add the type. */
6875 t1 = type_hash_lookup (hashcode, type);
6876 if (t1 != 0)
6877 {
6878 if (GATHER_STATISTICS)
6879 {
6880 tree_code_counts[(int) TREE_CODE (type)]--;
6881 tree_node_counts[(int) t_kind]--;
6882 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6883 }
6884 return t1;
6885 }
6886 else
6887 {
6888 type_hash_add (hashcode, type);
6889 return type;
6890 }
6891 }
6892
6893 /* See if the data pointed to by the type hash table is marked. We consider
6894 it marked if the type is marked or if a debug type number or symbol
6895 table entry has been made for the type. */
6896
6897 static int
6898 type_hash_marked_p (const void *p)
6899 {
6900 const_tree const type = ((const struct type_hash *) p)->type;
6901
6902 return ggc_marked_p (type);
6903 }
6904
6905 static void
6906 print_type_hash_statistics (void)
6907 {
6908 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6909 (long) htab_size (type_hash_table),
6910 (long) htab_elements (type_hash_table),
6911 htab_collisions (type_hash_table));
6912 }
6913
6914 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6915 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6916 by adding the hash codes of the individual attributes. */
6917
6918 static unsigned int
6919 attribute_hash_list (const_tree list, hashval_t hashcode)
6920 {
6921 const_tree tail;
6922
6923 for (tail = list; tail; tail = TREE_CHAIN (tail))
6924 /* ??? Do we want to add in TREE_VALUE too? */
6925 hashcode = iterative_hash_object
6926 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6927 return hashcode;
6928 }
6929
6930 /* Given two lists of attributes, return true if list l2 is
6931 equivalent to l1. */
6932
6933 int
6934 attribute_list_equal (const_tree l1, const_tree l2)
6935 {
6936 if (l1 == l2)
6937 return 1;
6938
6939 return attribute_list_contained (l1, l2)
6940 && attribute_list_contained (l2, l1);
6941 }
6942
6943 /* Given two lists of attributes, return true if list L2 is
6944 completely contained within L1. */
6945 /* ??? This would be faster if attribute names were stored in a canonicalized
6946 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6947 must be used to show these elements are equivalent (which they are). */
6948 /* ??? It's not clear that attributes with arguments will always be handled
6949 correctly. */
6950
6951 int
6952 attribute_list_contained (const_tree l1, const_tree l2)
6953 {
6954 const_tree t1, t2;
6955
6956 /* First check the obvious, maybe the lists are identical. */
6957 if (l1 == l2)
6958 return 1;
6959
6960 /* Maybe the lists are similar. */
6961 for (t1 = l1, t2 = l2;
6962 t1 != 0 && t2 != 0
6963 && get_attribute_name (t1) == get_attribute_name (t2)
6964 && TREE_VALUE (t1) == TREE_VALUE (t2);
6965 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6966 ;
6967
6968 /* Maybe the lists are equal. */
6969 if (t1 == 0 && t2 == 0)
6970 return 1;
6971
6972 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6973 {
6974 const_tree attr;
6975 /* This CONST_CAST is okay because lookup_attribute does not
6976 modify its argument and the return value is assigned to a
6977 const_tree. */
6978 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6979 CONST_CAST_TREE (l1));
6980 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6981 attr = lookup_ident_attribute (get_attribute_name (t2),
6982 TREE_CHAIN (attr)))
6983 ;
6984
6985 if (attr == NULL_TREE)
6986 return 0;
6987 }
6988
6989 return 1;
6990 }
6991
6992 /* Given two lists of types
6993 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6994 return 1 if the lists contain the same types in the same order.
6995 Also, the TREE_PURPOSEs must match. */
6996
6997 int
6998 type_list_equal (const_tree l1, const_tree l2)
6999 {
7000 const_tree t1, t2;
7001
7002 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7003 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7004 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7005 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7006 && (TREE_TYPE (TREE_PURPOSE (t1))
7007 == TREE_TYPE (TREE_PURPOSE (t2))))))
7008 return 0;
7009
7010 return t1 == t2;
7011 }
7012
7013 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7014 given by TYPE. If the argument list accepts variable arguments,
7015 then this function counts only the ordinary arguments. */
7016
7017 int
7018 type_num_arguments (const_tree type)
7019 {
7020 int i = 0;
7021 tree t;
7022
7023 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7024 /* If the function does not take a variable number of arguments,
7025 the last element in the list will have type `void'. */
7026 if (VOID_TYPE_P (TREE_VALUE (t)))
7027 break;
7028 else
7029 ++i;
7030
7031 return i;
7032 }
7033
7034 /* Nonzero if integer constants T1 and T2
7035 represent the same constant value. */
7036
7037 int
7038 tree_int_cst_equal (const_tree t1, const_tree t2)
7039 {
7040 if (t1 == t2)
7041 return 1;
7042
7043 if (t1 == 0 || t2 == 0)
7044 return 0;
7045
7046 if (TREE_CODE (t1) == INTEGER_CST
7047 && TREE_CODE (t2) == INTEGER_CST
7048 && wi::to_widest (t1) == wi::to_widest (t2))
7049 return 1;
7050
7051 return 0;
7052 }
7053
7054 /* Return true if T is an INTEGER_CST whose numerical value (extended
7055 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7056
7057 bool
7058 tree_fits_shwi_p (const_tree t)
7059 {
7060 return (t != NULL_TREE
7061 && TREE_CODE (t) == INTEGER_CST
7062 && wi::fits_shwi_p (wi::to_widest (t)));
7063 }
7064
7065 /* Return true if T is an INTEGER_CST whose numerical value (extended
7066 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7067
7068 bool
7069 tree_fits_uhwi_p (const_tree t)
7070 {
7071 return (t != NULL_TREE
7072 && TREE_CODE (t) == INTEGER_CST
7073 && wi::fits_uhwi_p (wi::to_widest (t)));
7074 }
7075
7076 /* T is an INTEGER_CST whose numerical value (extended according to
7077 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7078 HOST_WIDE_INT. */
7079
7080 HOST_WIDE_INT
7081 tree_to_shwi (const_tree t)
7082 {
7083 gcc_assert (tree_fits_shwi_p (t));
7084 return TREE_INT_CST_LOW (t);
7085 }
7086
7087 /* T is an INTEGER_CST whose numerical value (extended according to
7088 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7089 HOST_WIDE_INT. */
7090
7091 unsigned HOST_WIDE_INT
7092 tree_to_uhwi (const_tree t)
7093 {
7094 gcc_assert (tree_fits_uhwi_p (t));
7095 return TREE_INT_CST_LOW (t);
7096 }
7097
7098 /* Return the most significant (sign) bit of T. */
7099
7100 int
7101 tree_int_cst_sign_bit (const_tree t)
7102 {
7103 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7104
7105 return wi::extract_uhwi (t, bitno, 1);
7106 }
7107
7108 /* Return an indication of the sign of the integer constant T.
7109 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7110 Note that -1 will never be returned if T's type is unsigned. */
7111
7112 int
7113 tree_int_cst_sgn (const_tree t)
7114 {
7115 if (wi::eq_p (t, 0))
7116 return 0;
7117 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7118 return 1;
7119 else if (wi::neg_p (t))
7120 return -1;
7121 else
7122 return 1;
7123 }
7124
7125 /* Return the minimum number of bits needed to represent VALUE in a
7126 signed or unsigned type, UNSIGNEDP says which. */
7127
7128 unsigned int
7129 tree_int_cst_min_precision (tree value, signop sgn)
7130 {
7131 /* If the value is negative, compute its negative minus 1. The latter
7132 adjustment is because the absolute value of the largest negative value
7133 is one larger than the largest positive value. This is equivalent to
7134 a bit-wise negation, so use that operation instead. */
7135
7136 if (tree_int_cst_sgn (value) < 0)
7137 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7138
7139 /* Return the number of bits needed, taking into account the fact
7140 that we need one more bit for a signed than unsigned type.
7141 If value is 0 or -1, the minimum precision is 1 no matter
7142 whether unsignedp is true or false. */
7143
7144 if (integer_zerop (value))
7145 return 1;
7146 else
7147 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7148 }
7149
7150 /* Return truthvalue of whether T1 is the same tree structure as T2.
7151 Return 1 if they are the same.
7152 Return 0 if they are understandably different.
7153 Return -1 if either contains tree structure not understood by
7154 this function. */
7155
7156 int
7157 simple_cst_equal (const_tree t1, const_tree t2)
7158 {
7159 enum tree_code code1, code2;
7160 int cmp;
7161 int i;
7162
7163 if (t1 == t2)
7164 return 1;
7165 if (t1 == 0 || t2 == 0)
7166 return 0;
7167
7168 code1 = TREE_CODE (t1);
7169 code2 = TREE_CODE (t2);
7170
7171 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7172 {
7173 if (CONVERT_EXPR_CODE_P (code2)
7174 || code2 == NON_LVALUE_EXPR)
7175 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7176 else
7177 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7178 }
7179
7180 else if (CONVERT_EXPR_CODE_P (code2)
7181 || code2 == NON_LVALUE_EXPR)
7182 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7183
7184 if (code1 != code2)
7185 return 0;
7186
7187 switch (code1)
7188 {
7189 case INTEGER_CST:
7190 return wi::to_widest (t1) == wi::to_widest (t2);
7191
7192 case REAL_CST:
7193 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7194
7195 case FIXED_CST:
7196 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7197
7198 case STRING_CST:
7199 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7200 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7201 TREE_STRING_LENGTH (t1)));
7202
7203 case CONSTRUCTOR:
7204 {
7205 unsigned HOST_WIDE_INT idx;
7206 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7207 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7208
7209 if (vec_safe_length (v1) != vec_safe_length (v2))
7210 return false;
7211
7212 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7213 /* ??? Should we handle also fields here? */
7214 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7215 return false;
7216 return true;
7217 }
7218
7219 case SAVE_EXPR:
7220 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7221
7222 case CALL_EXPR:
7223 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7224 if (cmp <= 0)
7225 return cmp;
7226 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7227 return 0;
7228 {
7229 const_tree arg1, arg2;
7230 const_call_expr_arg_iterator iter1, iter2;
7231 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7232 arg2 = first_const_call_expr_arg (t2, &iter2);
7233 arg1 && arg2;
7234 arg1 = next_const_call_expr_arg (&iter1),
7235 arg2 = next_const_call_expr_arg (&iter2))
7236 {
7237 cmp = simple_cst_equal (arg1, arg2);
7238 if (cmp <= 0)
7239 return cmp;
7240 }
7241 return arg1 == arg2;
7242 }
7243
7244 case TARGET_EXPR:
7245 /* Special case: if either target is an unallocated VAR_DECL,
7246 it means that it's going to be unified with whatever the
7247 TARGET_EXPR is really supposed to initialize, so treat it
7248 as being equivalent to anything. */
7249 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7250 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7251 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7252 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7253 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7254 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7255 cmp = 1;
7256 else
7257 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7258
7259 if (cmp <= 0)
7260 return cmp;
7261
7262 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7263
7264 case WITH_CLEANUP_EXPR:
7265 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7266 if (cmp <= 0)
7267 return cmp;
7268
7269 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7270
7271 case COMPONENT_REF:
7272 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7273 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7274
7275 return 0;
7276
7277 case VAR_DECL:
7278 case PARM_DECL:
7279 case CONST_DECL:
7280 case FUNCTION_DECL:
7281 return 0;
7282
7283 default:
7284 break;
7285 }
7286
7287 /* This general rule works for most tree codes. All exceptions should be
7288 handled above. If this is a language-specific tree code, we can't
7289 trust what might be in the operand, so say we don't know
7290 the situation. */
7291 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7292 return -1;
7293
7294 switch (TREE_CODE_CLASS (code1))
7295 {
7296 case tcc_unary:
7297 case tcc_binary:
7298 case tcc_comparison:
7299 case tcc_expression:
7300 case tcc_reference:
7301 case tcc_statement:
7302 cmp = 1;
7303 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7304 {
7305 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7306 if (cmp <= 0)
7307 return cmp;
7308 }
7309
7310 return cmp;
7311
7312 default:
7313 return -1;
7314 }
7315 }
7316
7317 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7318 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7319 than U, respectively. */
7320
7321 int
7322 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7323 {
7324 if (tree_int_cst_sgn (t) < 0)
7325 return -1;
7326 else if (!tree_fits_uhwi_p (t))
7327 return 1;
7328 else if (TREE_INT_CST_LOW (t) == u)
7329 return 0;
7330 else if (TREE_INT_CST_LOW (t) < u)
7331 return -1;
7332 else
7333 return 1;
7334 }
7335
7336 /* Return true if SIZE represents a constant size that is in bounds of
7337 what the middle-end and the backend accepts (covering not more than
7338 half of the address-space). */
7339
7340 bool
7341 valid_constant_size_p (const_tree size)
7342 {
7343 if (! tree_fits_uhwi_p (size)
7344 || TREE_OVERFLOW (size)
7345 || tree_int_cst_sign_bit (size) != 0)
7346 return false;
7347 return true;
7348 }
7349
7350 /* Return the precision of the type, or for a complex or vector type the
7351 precision of the type of its elements. */
7352
7353 unsigned int
7354 element_precision (const_tree type)
7355 {
7356 enum tree_code code = TREE_CODE (type);
7357 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7358 type = TREE_TYPE (type);
7359
7360 return TYPE_PRECISION (type);
7361 }
7362
7363 /* Return true if CODE represents an associative tree code. Otherwise
7364 return false. */
7365 bool
7366 associative_tree_code (enum tree_code code)
7367 {
7368 switch (code)
7369 {
7370 case BIT_IOR_EXPR:
7371 case BIT_AND_EXPR:
7372 case BIT_XOR_EXPR:
7373 case PLUS_EXPR:
7374 case MULT_EXPR:
7375 case MIN_EXPR:
7376 case MAX_EXPR:
7377 return true;
7378
7379 default:
7380 break;
7381 }
7382 return false;
7383 }
7384
7385 /* Return true if CODE represents a commutative tree code. Otherwise
7386 return false. */
7387 bool
7388 commutative_tree_code (enum tree_code code)
7389 {
7390 switch (code)
7391 {
7392 case PLUS_EXPR:
7393 case MULT_EXPR:
7394 case MULT_HIGHPART_EXPR:
7395 case MIN_EXPR:
7396 case MAX_EXPR:
7397 case BIT_IOR_EXPR:
7398 case BIT_XOR_EXPR:
7399 case BIT_AND_EXPR:
7400 case NE_EXPR:
7401 case EQ_EXPR:
7402 case UNORDERED_EXPR:
7403 case ORDERED_EXPR:
7404 case UNEQ_EXPR:
7405 case LTGT_EXPR:
7406 case TRUTH_AND_EXPR:
7407 case TRUTH_XOR_EXPR:
7408 case TRUTH_OR_EXPR:
7409 case WIDEN_MULT_EXPR:
7410 case VEC_WIDEN_MULT_HI_EXPR:
7411 case VEC_WIDEN_MULT_LO_EXPR:
7412 case VEC_WIDEN_MULT_EVEN_EXPR:
7413 case VEC_WIDEN_MULT_ODD_EXPR:
7414 return true;
7415
7416 default:
7417 break;
7418 }
7419 return false;
7420 }
7421
7422 /* Return true if CODE represents a ternary tree code for which the
7423 first two operands are commutative. Otherwise return false. */
7424 bool
7425 commutative_ternary_tree_code (enum tree_code code)
7426 {
7427 switch (code)
7428 {
7429 case WIDEN_MULT_PLUS_EXPR:
7430 case WIDEN_MULT_MINUS_EXPR:
7431 return true;
7432
7433 default:
7434 break;
7435 }
7436 return false;
7437 }
7438
7439 /* Generate a hash value for an expression. This can be used iteratively
7440 by passing a previous result as the VAL argument.
7441
7442 This function is intended to produce the same hash for expressions which
7443 would compare equal using operand_equal_p. */
7444
7445 hashval_t
7446 iterative_hash_expr (const_tree t, hashval_t val)
7447 {
7448 int i;
7449 enum tree_code code;
7450 enum tree_code_class tclass;
7451
7452 if (t == NULL_TREE)
7453 return iterative_hash_hashval_t (0, val);
7454
7455 code = TREE_CODE (t);
7456
7457 switch (code)
7458 {
7459 /* Alas, constants aren't shared, so we can't rely on pointer
7460 identity. */
7461 case VOID_CST:
7462 return iterative_hash_hashval_t (0, val);
7463 case INTEGER_CST:
7464 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7465 val = iterative_hash_host_wide_int (TREE_INT_CST_ELT (t, i), val);
7466 return val;
7467 case REAL_CST:
7468 {
7469 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7470
7471 return iterative_hash_hashval_t (val2, val);
7472 }
7473 case FIXED_CST:
7474 {
7475 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7476
7477 return iterative_hash_hashval_t (val2, val);
7478 }
7479 case STRING_CST:
7480 return iterative_hash (TREE_STRING_POINTER (t),
7481 TREE_STRING_LENGTH (t), val);
7482 case COMPLEX_CST:
7483 val = iterative_hash_expr (TREE_REALPART (t), val);
7484 return iterative_hash_expr (TREE_IMAGPART (t), val);
7485 case VECTOR_CST:
7486 {
7487 unsigned i;
7488 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7489 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7490 return val;
7491 }
7492 case SSA_NAME:
7493 /* We can just compare by pointer. */
7494 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7495 case PLACEHOLDER_EXPR:
7496 /* The node itself doesn't matter. */
7497 return val;
7498 case TREE_LIST:
7499 /* A list of expressions, for a CALL_EXPR or as the elements of a
7500 VECTOR_CST. */
7501 for (; t; t = TREE_CHAIN (t))
7502 val = iterative_hash_expr (TREE_VALUE (t), val);
7503 return val;
7504 case CONSTRUCTOR:
7505 {
7506 unsigned HOST_WIDE_INT idx;
7507 tree field, value;
7508 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7509 {
7510 val = iterative_hash_expr (field, val);
7511 val = iterative_hash_expr (value, val);
7512 }
7513 return val;
7514 }
7515 case FUNCTION_DECL:
7516 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7517 Otherwise nodes that compare equal according to operand_equal_p might
7518 get different hash codes. However, don't do this for machine specific
7519 or front end builtins, since the function code is overloaded in those
7520 cases. */
7521 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7522 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7523 {
7524 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7525 code = TREE_CODE (t);
7526 }
7527 /* FALL THROUGH */
7528 default:
7529 tclass = TREE_CODE_CLASS (code);
7530
7531 if (tclass == tcc_declaration)
7532 {
7533 /* DECL's have a unique ID */
7534 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7535 }
7536 else
7537 {
7538 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7539
7540 val = iterative_hash_object (code, val);
7541
7542 /* Don't hash the type, that can lead to having nodes which
7543 compare equal according to operand_equal_p, but which
7544 have different hash codes. */
7545 if (CONVERT_EXPR_CODE_P (code)
7546 || code == NON_LVALUE_EXPR)
7547 {
7548 /* Make sure to include signness in the hash computation. */
7549 val += TYPE_UNSIGNED (TREE_TYPE (t));
7550 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7551 }
7552
7553 else if (commutative_tree_code (code))
7554 {
7555 /* It's a commutative expression. We want to hash it the same
7556 however it appears. We do this by first hashing both operands
7557 and then rehashing based on the order of their independent
7558 hashes. */
7559 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7560 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7561 hashval_t t;
7562
7563 if (one > two)
7564 t = one, one = two, two = t;
7565
7566 val = iterative_hash_hashval_t (one, val);
7567 val = iterative_hash_hashval_t (two, val);
7568 }
7569 else
7570 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7571 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7572 }
7573 return val;
7574 }
7575 }
7576
7577 /* Constructors for pointer, array and function types.
7578 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7579 constructed by language-dependent code, not here.) */
7580
7581 /* Construct, lay out and return the type of pointers to TO_TYPE with
7582 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7583 reference all of memory. If such a type has already been
7584 constructed, reuse it. */
7585
7586 tree
7587 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7588 bool can_alias_all)
7589 {
7590 tree t;
7591
7592 if (to_type == error_mark_node)
7593 return error_mark_node;
7594
7595 /* If the pointed-to type has the may_alias attribute set, force
7596 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7597 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7598 can_alias_all = true;
7599
7600 /* In some cases, languages will have things that aren't a POINTER_TYPE
7601 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7602 In that case, return that type without regard to the rest of our
7603 operands.
7604
7605 ??? This is a kludge, but consistent with the way this function has
7606 always operated and there doesn't seem to be a good way to avoid this
7607 at the moment. */
7608 if (TYPE_POINTER_TO (to_type) != 0
7609 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7610 return TYPE_POINTER_TO (to_type);
7611
7612 /* First, if we already have a type for pointers to TO_TYPE and it's
7613 the proper mode, use it. */
7614 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7615 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7616 return t;
7617
7618 t = make_node (POINTER_TYPE);
7619
7620 TREE_TYPE (t) = to_type;
7621 SET_TYPE_MODE (t, mode);
7622 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7623 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7624 TYPE_POINTER_TO (to_type) = t;
7625
7626 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7627 SET_TYPE_STRUCTURAL_EQUALITY (t);
7628 else if (TYPE_CANONICAL (to_type) != to_type)
7629 TYPE_CANONICAL (t)
7630 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7631 mode, can_alias_all);
7632
7633 /* Lay out the type. This function has many callers that are concerned
7634 with expression-construction, and this simplifies them all. */
7635 layout_type (t);
7636
7637 return t;
7638 }
7639
7640 /* By default build pointers in ptr_mode. */
7641
7642 tree
7643 build_pointer_type (tree to_type)
7644 {
7645 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7646 : TYPE_ADDR_SPACE (to_type);
7647 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7648 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7649 }
7650
7651 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7652
7653 tree
7654 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7655 bool can_alias_all)
7656 {
7657 tree t;
7658
7659 if (to_type == error_mark_node)
7660 return error_mark_node;
7661
7662 /* If the pointed-to type has the may_alias attribute set, force
7663 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7664 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7665 can_alias_all = true;
7666
7667 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7668 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7669 In that case, return that type without regard to the rest of our
7670 operands.
7671
7672 ??? This is a kludge, but consistent with the way this function has
7673 always operated and there doesn't seem to be a good way to avoid this
7674 at the moment. */
7675 if (TYPE_REFERENCE_TO (to_type) != 0
7676 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7677 return TYPE_REFERENCE_TO (to_type);
7678
7679 /* First, if we already have a type for pointers to TO_TYPE and it's
7680 the proper mode, use it. */
7681 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7682 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7683 return t;
7684
7685 t = make_node (REFERENCE_TYPE);
7686
7687 TREE_TYPE (t) = to_type;
7688 SET_TYPE_MODE (t, mode);
7689 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7690 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7691 TYPE_REFERENCE_TO (to_type) = t;
7692
7693 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7694 SET_TYPE_STRUCTURAL_EQUALITY (t);
7695 else if (TYPE_CANONICAL (to_type) != to_type)
7696 TYPE_CANONICAL (t)
7697 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7698 mode, can_alias_all);
7699
7700 layout_type (t);
7701
7702 return t;
7703 }
7704
7705
7706 /* Build the node for the type of references-to-TO_TYPE by default
7707 in ptr_mode. */
7708
7709 tree
7710 build_reference_type (tree to_type)
7711 {
7712 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7713 : TYPE_ADDR_SPACE (to_type);
7714 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7715 return build_reference_type_for_mode (to_type, pointer_mode, false);
7716 }
7717
7718 #define MAX_INT_CACHED_PREC \
7719 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7720 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7721
7722 /* Builds a signed or unsigned integer type of precision PRECISION.
7723 Used for C bitfields whose precision does not match that of
7724 built-in target types. */
7725 tree
7726 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7727 int unsignedp)
7728 {
7729 tree itype, ret;
7730
7731 if (unsignedp)
7732 unsignedp = MAX_INT_CACHED_PREC + 1;
7733
7734 if (precision <= MAX_INT_CACHED_PREC)
7735 {
7736 itype = nonstandard_integer_type_cache[precision + unsignedp];
7737 if (itype)
7738 return itype;
7739 }
7740
7741 itype = make_node (INTEGER_TYPE);
7742 TYPE_PRECISION (itype) = precision;
7743
7744 if (unsignedp)
7745 fixup_unsigned_type (itype);
7746 else
7747 fixup_signed_type (itype);
7748
7749 ret = itype;
7750 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7751 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7752 if (precision <= MAX_INT_CACHED_PREC)
7753 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7754
7755 return ret;
7756 }
7757
7758 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7759 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7760 is true, reuse such a type that has already been constructed. */
7761
7762 static tree
7763 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7764 {
7765 tree itype = make_node (INTEGER_TYPE);
7766 hashval_t hashcode = 0;
7767
7768 TREE_TYPE (itype) = type;
7769
7770 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7771 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7772
7773 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7774 SET_TYPE_MODE (itype, TYPE_MODE (type));
7775 TYPE_SIZE (itype) = TYPE_SIZE (type);
7776 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7777 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7778 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7779
7780 if (!shared)
7781 return itype;
7782
7783 if ((TYPE_MIN_VALUE (itype)
7784 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7785 || (TYPE_MAX_VALUE (itype)
7786 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7787 {
7788 /* Since we cannot reliably merge this type, we need to compare it using
7789 structural equality checks. */
7790 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7791 return itype;
7792 }
7793
7794 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7795 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7796 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7797 itype = type_hash_canon (hashcode, itype);
7798
7799 return itype;
7800 }
7801
7802 /* Wrapper around build_range_type_1 with SHARED set to true. */
7803
7804 tree
7805 build_range_type (tree type, tree lowval, tree highval)
7806 {
7807 return build_range_type_1 (type, lowval, highval, true);
7808 }
7809
7810 /* Wrapper around build_range_type_1 with SHARED set to false. */
7811
7812 tree
7813 build_nonshared_range_type (tree type, tree lowval, tree highval)
7814 {
7815 return build_range_type_1 (type, lowval, highval, false);
7816 }
7817
7818 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7819 MAXVAL should be the maximum value in the domain
7820 (one less than the length of the array).
7821
7822 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7823 We don't enforce this limit, that is up to caller (e.g. language front end).
7824 The limit exists because the result is a signed type and we don't handle
7825 sizes that use more than one HOST_WIDE_INT. */
7826
7827 tree
7828 build_index_type (tree maxval)
7829 {
7830 return build_range_type (sizetype, size_zero_node, maxval);
7831 }
7832
7833 /* Return true if the debug information for TYPE, a subtype, should be emitted
7834 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7835 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7836 debug info and doesn't reflect the source code. */
7837
7838 bool
7839 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7840 {
7841 tree base_type = TREE_TYPE (type), low, high;
7842
7843 /* Subrange types have a base type which is an integral type. */
7844 if (!INTEGRAL_TYPE_P (base_type))
7845 return false;
7846
7847 /* Get the real bounds of the subtype. */
7848 if (lang_hooks.types.get_subrange_bounds)
7849 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7850 else
7851 {
7852 low = TYPE_MIN_VALUE (type);
7853 high = TYPE_MAX_VALUE (type);
7854 }
7855
7856 /* If the type and its base type have the same representation and the same
7857 name, then the type is not a subrange but a copy of the base type. */
7858 if ((TREE_CODE (base_type) == INTEGER_TYPE
7859 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7860 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7861 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7862 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7863 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7864 return false;
7865
7866 if (lowval)
7867 *lowval = low;
7868 if (highval)
7869 *highval = high;
7870 return true;
7871 }
7872
7873 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7874 and number of elements specified by the range of values of INDEX_TYPE.
7875 If SHARED is true, reuse such a type that has already been constructed. */
7876
7877 static tree
7878 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7879 {
7880 tree t;
7881
7882 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7883 {
7884 error ("arrays of functions are not meaningful");
7885 elt_type = integer_type_node;
7886 }
7887
7888 t = make_node (ARRAY_TYPE);
7889 TREE_TYPE (t) = elt_type;
7890 TYPE_DOMAIN (t) = index_type;
7891 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7892 layout_type (t);
7893
7894 /* If the element type is incomplete at this point we get marked for
7895 structural equality. Do not record these types in the canonical
7896 type hashtable. */
7897 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7898 return t;
7899
7900 if (shared)
7901 {
7902 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7903 if (index_type)
7904 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7905 t = type_hash_canon (hashcode, t);
7906 }
7907
7908 if (TYPE_CANONICAL (t) == t)
7909 {
7910 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7911 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7912 SET_TYPE_STRUCTURAL_EQUALITY (t);
7913 else if (TYPE_CANONICAL (elt_type) != elt_type
7914 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7915 TYPE_CANONICAL (t)
7916 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7917 index_type
7918 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7919 shared);
7920 }
7921
7922 return t;
7923 }
7924
7925 /* Wrapper around build_array_type_1 with SHARED set to true. */
7926
7927 tree
7928 build_array_type (tree elt_type, tree index_type)
7929 {
7930 return build_array_type_1 (elt_type, index_type, true);
7931 }
7932
7933 /* Wrapper around build_array_type_1 with SHARED set to false. */
7934
7935 tree
7936 build_nonshared_array_type (tree elt_type, tree index_type)
7937 {
7938 return build_array_type_1 (elt_type, index_type, false);
7939 }
7940
7941 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7942 sizetype. */
7943
7944 tree
7945 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7946 {
7947 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7948 }
7949
7950 /* Recursively examines the array elements of TYPE, until a non-array
7951 element type is found. */
7952
7953 tree
7954 strip_array_types (tree type)
7955 {
7956 while (TREE_CODE (type) == ARRAY_TYPE)
7957 type = TREE_TYPE (type);
7958
7959 return type;
7960 }
7961
7962 /* Computes the canonical argument types from the argument type list
7963 ARGTYPES.
7964
7965 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7966 on entry to this function, or if any of the ARGTYPES are
7967 structural.
7968
7969 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7970 true on entry to this function, or if any of the ARGTYPES are
7971 non-canonical.
7972
7973 Returns a canonical argument list, which may be ARGTYPES when the
7974 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7975 true) or would not differ from ARGTYPES. */
7976
7977 static tree
7978 maybe_canonicalize_argtypes (tree argtypes,
7979 bool *any_structural_p,
7980 bool *any_noncanonical_p)
7981 {
7982 tree arg;
7983 bool any_noncanonical_argtypes_p = false;
7984
7985 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7986 {
7987 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7988 /* Fail gracefully by stating that the type is structural. */
7989 *any_structural_p = true;
7990 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7991 *any_structural_p = true;
7992 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7993 || TREE_PURPOSE (arg))
7994 /* If the argument has a default argument, we consider it
7995 non-canonical even though the type itself is canonical.
7996 That way, different variants of function and method types
7997 with default arguments will all point to the variant with
7998 no defaults as their canonical type. */
7999 any_noncanonical_argtypes_p = true;
8000 }
8001
8002 if (*any_structural_p)
8003 return argtypes;
8004
8005 if (any_noncanonical_argtypes_p)
8006 {
8007 /* Build the canonical list of argument types. */
8008 tree canon_argtypes = NULL_TREE;
8009 bool is_void = false;
8010
8011 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8012 {
8013 if (arg == void_list_node)
8014 is_void = true;
8015 else
8016 canon_argtypes = tree_cons (NULL_TREE,
8017 TYPE_CANONICAL (TREE_VALUE (arg)),
8018 canon_argtypes);
8019 }
8020
8021 canon_argtypes = nreverse (canon_argtypes);
8022 if (is_void)
8023 canon_argtypes = chainon (canon_argtypes, void_list_node);
8024
8025 /* There is a non-canonical type. */
8026 *any_noncanonical_p = true;
8027 return canon_argtypes;
8028 }
8029
8030 /* The canonical argument types are the same as ARGTYPES. */
8031 return argtypes;
8032 }
8033
8034 /* Construct, lay out and return
8035 the type of functions returning type VALUE_TYPE
8036 given arguments of types ARG_TYPES.
8037 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8038 are data type nodes for the arguments of the function.
8039 If such a type has already been constructed, reuse it. */
8040
8041 tree
8042 build_function_type (tree value_type, tree arg_types)
8043 {
8044 tree t;
8045 hashval_t hashcode = 0;
8046 bool any_structural_p, any_noncanonical_p;
8047 tree canon_argtypes;
8048
8049 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8050 {
8051 error ("function return type cannot be function");
8052 value_type = integer_type_node;
8053 }
8054
8055 /* Make a node of the sort we want. */
8056 t = make_node (FUNCTION_TYPE);
8057 TREE_TYPE (t) = value_type;
8058 TYPE_ARG_TYPES (t) = arg_types;
8059
8060 /* If we already have such a type, use the old one. */
8061 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
8062 hashcode = type_hash_list (arg_types, hashcode);
8063 t = type_hash_canon (hashcode, t);
8064
8065 /* Set up the canonical type. */
8066 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8067 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8068 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8069 &any_structural_p,
8070 &any_noncanonical_p);
8071 if (any_structural_p)
8072 SET_TYPE_STRUCTURAL_EQUALITY (t);
8073 else if (any_noncanonical_p)
8074 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8075 canon_argtypes);
8076
8077 if (!COMPLETE_TYPE_P (t))
8078 layout_type (t);
8079 return t;
8080 }
8081
8082 /* Build a function type. The RETURN_TYPE is the type returned by the
8083 function. If VAARGS is set, no void_type_node is appended to the
8084 the list. ARGP must be always be terminated be a NULL_TREE. */
8085
8086 static tree
8087 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8088 {
8089 tree t, args, last;
8090
8091 t = va_arg (argp, tree);
8092 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8093 args = tree_cons (NULL_TREE, t, args);
8094
8095 if (vaargs)
8096 {
8097 last = args;
8098 if (args != NULL_TREE)
8099 args = nreverse (args);
8100 gcc_assert (last != void_list_node);
8101 }
8102 else if (args == NULL_TREE)
8103 args = void_list_node;
8104 else
8105 {
8106 last = args;
8107 args = nreverse (args);
8108 TREE_CHAIN (last) = void_list_node;
8109 }
8110 args = build_function_type (return_type, args);
8111
8112 return args;
8113 }
8114
8115 /* Build a function type. The RETURN_TYPE is the type returned by the
8116 function. If additional arguments are provided, they are
8117 additional argument types. The list of argument types must always
8118 be terminated by NULL_TREE. */
8119
8120 tree
8121 build_function_type_list (tree return_type, ...)
8122 {
8123 tree args;
8124 va_list p;
8125
8126 va_start (p, return_type);
8127 args = build_function_type_list_1 (false, return_type, p);
8128 va_end (p);
8129 return args;
8130 }
8131
8132 /* Build a variable argument function type. The RETURN_TYPE is the
8133 type returned by the function. If additional arguments are provided,
8134 they are additional argument types. The list of argument types must
8135 always be terminated by NULL_TREE. */
8136
8137 tree
8138 build_varargs_function_type_list (tree return_type, ...)
8139 {
8140 tree args;
8141 va_list p;
8142
8143 va_start (p, return_type);
8144 args = build_function_type_list_1 (true, return_type, p);
8145 va_end (p);
8146
8147 return args;
8148 }
8149
8150 /* Build a function type. RETURN_TYPE is the type returned by the
8151 function; VAARGS indicates whether the function takes varargs. The
8152 function takes N named arguments, the types of which are provided in
8153 ARG_TYPES. */
8154
8155 static tree
8156 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8157 tree *arg_types)
8158 {
8159 int i;
8160 tree t = vaargs ? NULL_TREE : void_list_node;
8161
8162 for (i = n - 1; i >= 0; i--)
8163 t = tree_cons (NULL_TREE, arg_types[i], t);
8164
8165 return build_function_type (return_type, t);
8166 }
8167
8168 /* Build a function type. RETURN_TYPE is the type returned by the
8169 function. The function takes N named arguments, the types of which
8170 are provided in ARG_TYPES. */
8171
8172 tree
8173 build_function_type_array (tree return_type, int n, tree *arg_types)
8174 {
8175 return build_function_type_array_1 (false, return_type, n, arg_types);
8176 }
8177
8178 /* Build a variable argument function type. RETURN_TYPE is the type
8179 returned by the function. The function takes N named arguments, the
8180 types of which are provided in ARG_TYPES. */
8181
8182 tree
8183 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8184 {
8185 return build_function_type_array_1 (true, return_type, n, arg_types);
8186 }
8187
8188 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8189 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8190 for the method. An implicit additional parameter (of type
8191 pointer-to-BASETYPE) is added to the ARGTYPES. */
8192
8193 tree
8194 build_method_type_directly (tree basetype,
8195 tree rettype,
8196 tree argtypes)
8197 {
8198 tree t;
8199 tree ptype;
8200 int hashcode = 0;
8201 bool any_structural_p, any_noncanonical_p;
8202 tree canon_argtypes;
8203
8204 /* Make a node of the sort we want. */
8205 t = make_node (METHOD_TYPE);
8206
8207 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8208 TREE_TYPE (t) = rettype;
8209 ptype = build_pointer_type (basetype);
8210
8211 /* The actual arglist for this function includes a "hidden" argument
8212 which is "this". Put it into the list of argument types. */
8213 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8214 TYPE_ARG_TYPES (t) = argtypes;
8215
8216 /* If we already have such a type, use the old one. */
8217 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8218 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8219 hashcode = type_hash_list (argtypes, hashcode);
8220 t = type_hash_canon (hashcode, t);
8221
8222 /* Set up the canonical type. */
8223 any_structural_p
8224 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8225 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8226 any_noncanonical_p
8227 = (TYPE_CANONICAL (basetype) != basetype
8228 || TYPE_CANONICAL (rettype) != rettype);
8229 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8230 &any_structural_p,
8231 &any_noncanonical_p);
8232 if (any_structural_p)
8233 SET_TYPE_STRUCTURAL_EQUALITY (t);
8234 else if (any_noncanonical_p)
8235 TYPE_CANONICAL (t)
8236 = build_method_type_directly (TYPE_CANONICAL (basetype),
8237 TYPE_CANONICAL (rettype),
8238 canon_argtypes);
8239 if (!COMPLETE_TYPE_P (t))
8240 layout_type (t);
8241
8242 return t;
8243 }
8244
8245 /* Construct, lay out and return the type of methods belonging to class
8246 BASETYPE and whose arguments and values are described by TYPE.
8247 If that type exists already, reuse it.
8248 TYPE must be a FUNCTION_TYPE node. */
8249
8250 tree
8251 build_method_type (tree basetype, tree type)
8252 {
8253 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8254
8255 return build_method_type_directly (basetype,
8256 TREE_TYPE (type),
8257 TYPE_ARG_TYPES (type));
8258 }
8259
8260 /* Construct, lay out and return the type of offsets to a value
8261 of type TYPE, within an object of type BASETYPE.
8262 If a suitable offset type exists already, reuse it. */
8263
8264 tree
8265 build_offset_type (tree basetype, tree type)
8266 {
8267 tree t;
8268 hashval_t hashcode = 0;
8269
8270 /* Make a node of the sort we want. */
8271 t = make_node (OFFSET_TYPE);
8272
8273 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8274 TREE_TYPE (t) = type;
8275
8276 /* If we already have such a type, use the old one. */
8277 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8278 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8279 t = type_hash_canon (hashcode, t);
8280
8281 if (!COMPLETE_TYPE_P (t))
8282 layout_type (t);
8283
8284 if (TYPE_CANONICAL (t) == t)
8285 {
8286 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8287 || TYPE_STRUCTURAL_EQUALITY_P (type))
8288 SET_TYPE_STRUCTURAL_EQUALITY (t);
8289 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8290 || TYPE_CANONICAL (type) != type)
8291 TYPE_CANONICAL (t)
8292 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8293 TYPE_CANONICAL (type));
8294 }
8295
8296 return t;
8297 }
8298
8299 /* Create a complex type whose components are COMPONENT_TYPE. */
8300
8301 tree
8302 build_complex_type (tree component_type)
8303 {
8304 tree t;
8305 hashval_t hashcode;
8306
8307 gcc_assert (INTEGRAL_TYPE_P (component_type)
8308 || SCALAR_FLOAT_TYPE_P (component_type)
8309 || FIXED_POINT_TYPE_P (component_type));
8310
8311 /* Make a node of the sort we want. */
8312 t = make_node (COMPLEX_TYPE);
8313
8314 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8315
8316 /* If we already have such a type, use the old one. */
8317 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8318 t = type_hash_canon (hashcode, t);
8319
8320 if (!COMPLETE_TYPE_P (t))
8321 layout_type (t);
8322
8323 if (TYPE_CANONICAL (t) == t)
8324 {
8325 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8326 SET_TYPE_STRUCTURAL_EQUALITY (t);
8327 else if (TYPE_CANONICAL (component_type) != component_type)
8328 TYPE_CANONICAL (t)
8329 = build_complex_type (TYPE_CANONICAL (component_type));
8330 }
8331
8332 /* We need to create a name, since complex is a fundamental type. */
8333 if (! TYPE_NAME (t))
8334 {
8335 const char *name;
8336 if (component_type == char_type_node)
8337 name = "complex char";
8338 else if (component_type == signed_char_type_node)
8339 name = "complex signed char";
8340 else if (component_type == unsigned_char_type_node)
8341 name = "complex unsigned char";
8342 else if (component_type == short_integer_type_node)
8343 name = "complex short int";
8344 else if (component_type == short_unsigned_type_node)
8345 name = "complex short unsigned int";
8346 else if (component_type == integer_type_node)
8347 name = "complex int";
8348 else if (component_type == unsigned_type_node)
8349 name = "complex unsigned int";
8350 else if (component_type == long_integer_type_node)
8351 name = "complex long int";
8352 else if (component_type == long_unsigned_type_node)
8353 name = "complex long unsigned int";
8354 else if (component_type == long_long_integer_type_node)
8355 name = "complex long long int";
8356 else if (component_type == long_long_unsigned_type_node)
8357 name = "complex long long unsigned int";
8358 else
8359 name = 0;
8360
8361 if (name != 0)
8362 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8363 get_identifier (name), t);
8364 }
8365
8366 return build_qualified_type (t, TYPE_QUALS (component_type));
8367 }
8368
8369 /* If TYPE is a real or complex floating-point type and the target
8370 does not directly support arithmetic on TYPE then return the wider
8371 type to be used for arithmetic on TYPE. Otherwise, return
8372 NULL_TREE. */
8373
8374 tree
8375 excess_precision_type (tree type)
8376 {
8377 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8378 {
8379 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8380 switch (TREE_CODE (type))
8381 {
8382 case REAL_TYPE:
8383 switch (flt_eval_method)
8384 {
8385 case 1:
8386 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8387 return double_type_node;
8388 break;
8389 case 2:
8390 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8391 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8392 return long_double_type_node;
8393 break;
8394 default:
8395 gcc_unreachable ();
8396 }
8397 break;
8398 case COMPLEX_TYPE:
8399 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8400 return NULL_TREE;
8401 switch (flt_eval_method)
8402 {
8403 case 1:
8404 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8405 return complex_double_type_node;
8406 break;
8407 case 2:
8408 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8409 || (TYPE_MODE (TREE_TYPE (type))
8410 == TYPE_MODE (double_type_node)))
8411 return complex_long_double_type_node;
8412 break;
8413 default:
8414 gcc_unreachable ();
8415 }
8416 break;
8417 default:
8418 break;
8419 }
8420 }
8421 return NULL_TREE;
8422 }
8423 \f
8424 /* Return OP, stripped of any conversions to wider types as much as is safe.
8425 Converting the value back to OP's type makes a value equivalent to OP.
8426
8427 If FOR_TYPE is nonzero, we return a value which, if converted to
8428 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8429
8430 OP must have integer, real or enumeral type. Pointers are not allowed!
8431
8432 There are some cases where the obvious value we could return
8433 would regenerate to OP if converted to OP's type,
8434 but would not extend like OP to wider types.
8435 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8436 For example, if OP is (unsigned short)(signed char)-1,
8437 we avoid returning (signed char)-1 if FOR_TYPE is int,
8438 even though extending that to an unsigned short would regenerate OP,
8439 since the result of extending (signed char)-1 to (int)
8440 is different from (int) OP. */
8441
8442 tree
8443 get_unwidened (tree op, tree for_type)
8444 {
8445 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8446 tree type = TREE_TYPE (op);
8447 unsigned final_prec
8448 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8449 int uns
8450 = (for_type != 0 && for_type != type
8451 && final_prec > TYPE_PRECISION (type)
8452 && TYPE_UNSIGNED (type));
8453 tree win = op;
8454
8455 while (CONVERT_EXPR_P (op))
8456 {
8457 int bitschange;
8458
8459 /* TYPE_PRECISION on vector types has different meaning
8460 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8461 so avoid them here. */
8462 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8463 break;
8464
8465 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8466 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8467
8468 /* Truncations are many-one so cannot be removed.
8469 Unless we are later going to truncate down even farther. */
8470 if (bitschange < 0
8471 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8472 break;
8473
8474 /* See what's inside this conversion. If we decide to strip it,
8475 we will set WIN. */
8476 op = TREE_OPERAND (op, 0);
8477
8478 /* If we have not stripped any zero-extensions (uns is 0),
8479 we can strip any kind of extension.
8480 If we have previously stripped a zero-extension,
8481 only zero-extensions can safely be stripped.
8482 Any extension can be stripped if the bits it would produce
8483 are all going to be discarded later by truncating to FOR_TYPE. */
8484
8485 if (bitschange > 0)
8486 {
8487 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8488 win = op;
8489 /* TYPE_UNSIGNED says whether this is a zero-extension.
8490 Let's avoid computing it if it does not affect WIN
8491 and if UNS will not be needed again. */
8492 if ((uns
8493 || CONVERT_EXPR_P (op))
8494 && TYPE_UNSIGNED (TREE_TYPE (op)))
8495 {
8496 uns = 1;
8497 win = op;
8498 }
8499 }
8500 }
8501
8502 /* If we finally reach a constant see if it fits in for_type and
8503 in that case convert it. */
8504 if (for_type
8505 && TREE_CODE (win) == INTEGER_CST
8506 && TREE_TYPE (win) != for_type
8507 && int_fits_type_p (win, for_type))
8508 win = fold_convert (for_type, win);
8509
8510 return win;
8511 }
8512 \f
8513 /* Return OP or a simpler expression for a narrower value
8514 which can be sign-extended or zero-extended to give back OP.
8515 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8516 or 0 if the value should be sign-extended. */
8517
8518 tree
8519 get_narrower (tree op, int *unsignedp_ptr)
8520 {
8521 int uns = 0;
8522 int first = 1;
8523 tree win = op;
8524 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8525
8526 while (TREE_CODE (op) == NOP_EXPR)
8527 {
8528 int bitschange
8529 = (TYPE_PRECISION (TREE_TYPE (op))
8530 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8531
8532 /* Truncations are many-one so cannot be removed. */
8533 if (bitschange < 0)
8534 break;
8535
8536 /* See what's inside this conversion. If we decide to strip it,
8537 we will set WIN. */
8538
8539 if (bitschange > 0)
8540 {
8541 op = TREE_OPERAND (op, 0);
8542 /* An extension: the outermost one can be stripped,
8543 but remember whether it is zero or sign extension. */
8544 if (first)
8545 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8546 /* Otherwise, if a sign extension has been stripped,
8547 only sign extensions can now be stripped;
8548 if a zero extension has been stripped, only zero-extensions. */
8549 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8550 break;
8551 first = 0;
8552 }
8553 else /* bitschange == 0 */
8554 {
8555 /* A change in nominal type can always be stripped, but we must
8556 preserve the unsignedness. */
8557 if (first)
8558 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8559 first = 0;
8560 op = TREE_OPERAND (op, 0);
8561 /* Keep trying to narrow, but don't assign op to win if it
8562 would turn an integral type into something else. */
8563 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8564 continue;
8565 }
8566
8567 win = op;
8568 }
8569
8570 if (TREE_CODE (op) == COMPONENT_REF
8571 /* Since type_for_size always gives an integer type. */
8572 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8573 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8574 /* Ensure field is laid out already. */
8575 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8576 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8577 {
8578 unsigned HOST_WIDE_INT innerprec
8579 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8580 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8581 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8582 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8583
8584 /* We can get this structure field in a narrower type that fits it,
8585 but the resulting extension to its nominal type (a fullword type)
8586 must satisfy the same conditions as for other extensions.
8587
8588 Do this only for fields that are aligned (not bit-fields),
8589 because when bit-field insns will be used there is no
8590 advantage in doing this. */
8591
8592 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8593 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8594 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8595 && type != 0)
8596 {
8597 if (first)
8598 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8599 win = fold_convert (type, op);
8600 }
8601 }
8602
8603 *unsignedp_ptr = uns;
8604 return win;
8605 }
8606 \f
8607 /* Returns true if integer constant C has a value that is permissible
8608 for type TYPE (an INTEGER_TYPE). */
8609
8610 bool
8611 int_fits_type_p (const_tree c, const_tree type)
8612 {
8613 tree type_low_bound, type_high_bound;
8614 bool ok_for_low_bound, ok_for_high_bound;
8615 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8616
8617 retry:
8618 type_low_bound = TYPE_MIN_VALUE (type);
8619 type_high_bound = TYPE_MAX_VALUE (type);
8620
8621 /* If at least one bound of the type is a constant integer, we can check
8622 ourselves and maybe make a decision. If no such decision is possible, but
8623 this type is a subtype, try checking against that. Otherwise, use
8624 fits_to_tree_p, which checks against the precision.
8625
8626 Compute the status for each possibly constant bound, and return if we see
8627 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8628 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8629 for "constant known to fit". */
8630
8631 /* Check if c >= type_low_bound. */
8632 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8633 {
8634 if (tree_int_cst_lt (c, type_low_bound))
8635 return false;
8636 ok_for_low_bound = true;
8637 }
8638 else
8639 ok_for_low_bound = false;
8640
8641 /* Check if c <= type_high_bound. */
8642 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8643 {
8644 if (tree_int_cst_lt (type_high_bound, c))
8645 return false;
8646 ok_for_high_bound = true;
8647 }
8648 else
8649 ok_for_high_bound = false;
8650
8651 /* If the constant fits both bounds, the result is known. */
8652 if (ok_for_low_bound && ok_for_high_bound)
8653 return true;
8654
8655 /* Perform some generic filtering which may allow making a decision
8656 even if the bounds are not constant. First, negative integers
8657 never fit in unsigned types, */
8658 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8659 return false;
8660
8661 /* Second, narrower types always fit in wider ones. */
8662 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8663 return true;
8664
8665 /* Third, unsigned integers with top bit set never fit signed types. */
8666 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8667 {
8668 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8669 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8670 {
8671 /* When a tree_cst is converted to a wide-int, the precision
8672 is taken from the type. However, if the precision of the
8673 mode underneath the type is smaller than that, it is
8674 possible that the value will not fit. The test below
8675 fails if any bit is set between the sign bit of the
8676 underlying mode and the top bit of the type. */
8677 if (wi::ne_p (wi::zext (c, prec - 1), c))
8678 return false;
8679 }
8680 else if (wi::neg_p (c))
8681 return false;
8682 }
8683
8684 /* If we haven't been able to decide at this point, there nothing more we
8685 can check ourselves here. Look at the base type if we have one and it
8686 has the same precision. */
8687 if (TREE_CODE (type) == INTEGER_TYPE
8688 && TREE_TYPE (type) != 0
8689 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8690 {
8691 type = TREE_TYPE (type);
8692 goto retry;
8693 }
8694
8695 /* Or to fits_to_tree_p, if nothing else. */
8696 return wi::fits_to_tree_p (c, type);
8697 }
8698
8699 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8700 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8701 represented (assuming two's-complement arithmetic) within the bit
8702 precision of the type are returned instead. */
8703
8704 void
8705 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8706 {
8707 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8708 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8709 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8710 else
8711 {
8712 if (TYPE_UNSIGNED (type))
8713 mpz_set_ui (min, 0);
8714 else
8715 {
8716 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8717 wi::to_mpz (mn, min, SIGNED);
8718 }
8719 }
8720
8721 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8722 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8723 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8724 else
8725 {
8726 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8727 wi::to_mpz (mn, max, TYPE_SIGN (type));
8728 }
8729 }
8730
8731 /* Return true if VAR is an automatic variable defined in function FN. */
8732
8733 bool
8734 auto_var_in_fn_p (const_tree var, const_tree fn)
8735 {
8736 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8737 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8738 || TREE_CODE (var) == PARM_DECL)
8739 && ! TREE_STATIC (var))
8740 || TREE_CODE (var) == LABEL_DECL
8741 || TREE_CODE (var) == RESULT_DECL));
8742 }
8743
8744 /* Subprogram of following function. Called by walk_tree.
8745
8746 Return *TP if it is an automatic variable or parameter of the
8747 function passed in as DATA. */
8748
8749 static tree
8750 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8751 {
8752 tree fn = (tree) data;
8753
8754 if (TYPE_P (*tp))
8755 *walk_subtrees = 0;
8756
8757 else if (DECL_P (*tp)
8758 && auto_var_in_fn_p (*tp, fn))
8759 return *tp;
8760
8761 return NULL_TREE;
8762 }
8763
8764 /* Returns true if T is, contains, or refers to a type with variable
8765 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8766 arguments, but not the return type. If FN is nonzero, only return
8767 true if a modifier of the type or position of FN is a variable or
8768 parameter inside FN.
8769
8770 This concept is more general than that of C99 'variably modified types':
8771 in C99, a struct type is never variably modified because a VLA may not
8772 appear as a structure member. However, in GNU C code like:
8773
8774 struct S { int i[f()]; };
8775
8776 is valid, and other languages may define similar constructs. */
8777
8778 bool
8779 variably_modified_type_p (tree type, tree fn)
8780 {
8781 tree t;
8782
8783 /* Test if T is either variable (if FN is zero) or an expression containing
8784 a variable in FN. If TYPE isn't gimplified, return true also if
8785 gimplify_one_sizepos would gimplify the expression into a local
8786 variable. */
8787 #define RETURN_TRUE_IF_VAR(T) \
8788 do { tree _t = (T); \
8789 if (_t != NULL_TREE \
8790 && _t != error_mark_node \
8791 && TREE_CODE (_t) != INTEGER_CST \
8792 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8793 && (!fn \
8794 || (!TYPE_SIZES_GIMPLIFIED (type) \
8795 && !is_gimple_sizepos (_t)) \
8796 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8797 return true; } while (0)
8798
8799 if (type == error_mark_node)
8800 return false;
8801
8802 /* If TYPE itself has variable size, it is variably modified. */
8803 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8804 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8805
8806 switch (TREE_CODE (type))
8807 {
8808 case POINTER_TYPE:
8809 case REFERENCE_TYPE:
8810 case VECTOR_TYPE:
8811 if (variably_modified_type_p (TREE_TYPE (type), fn))
8812 return true;
8813 break;
8814
8815 case FUNCTION_TYPE:
8816 case METHOD_TYPE:
8817 /* If TYPE is a function type, it is variably modified if the
8818 return type is variably modified. */
8819 if (variably_modified_type_p (TREE_TYPE (type), fn))
8820 return true;
8821 break;
8822
8823 case INTEGER_TYPE:
8824 case REAL_TYPE:
8825 case FIXED_POINT_TYPE:
8826 case ENUMERAL_TYPE:
8827 case BOOLEAN_TYPE:
8828 /* Scalar types are variably modified if their end points
8829 aren't constant. */
8830 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8831 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8832 break;
8833
8834 case RECORD_TYPE:
8835 case UNION_TYPE:
8836 case QUAL_UNION_TYPE:
8837 /* We can't see if any of the fields are variably-modified by the
8838 definition we normally use, since that would produce infinite
8839 recursion via pointers. */
8840 /* This is variably modified if some field's type is. */
8841 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8842 if (TREE_CODE (t) == FIELD_DECL)
8843 {
8844 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8845 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8846 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8847
8848 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8849 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8850 }
8851 break;
8852
8853 case ARRAY_TYPE:
8854 /* Do not call ourselves to avoid infinite recursion. This is
8855 variably modified if the element type is. */
8856 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8857 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8858 break;
8859
8860 default:
8861 break;
8862 }
8863
8864 /* The current language may have other cases to check, but in general,
8865 all other types are not variably modified. */
8866 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8867
8868 #undef RETURN_TRUE_IF_VAR
8869 }
8870
8871 /* Given a DECL or TYPE, return the scope in which it was declared, or
8872 NULL_TREE if there is no containing scope. */
8873
8874 tree
8875 get_containing_scope (const_tree t)
8876 {
8877 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8878 }
8879
8880 /* Return the innermost context enclosing DECL that is
8881 a FUNCTION_DECL, or zero if none. */
8882
8883 tree
8884 decl_function_context (const_tree decl)
8885 {
8886 tree context;
8887
8888 if (TREE_CODE (decl) == ERROR_MARK)
8889 return 0;
8890
8891 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8892 where we look up the function at runtime. Such functions always take
8893 a first argument of type 'pointer to real context'.
8894
8895 C++ should really be fixed to use DECL_CONTEXT for the real context,
8896 and use something else for the "virtual context". */
8897 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8898 context
8899 = TYPE_MAIN_VARIANT
8900 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8901 else
8902 context = DECL_CONTEXT (decl);
8903
8904 while (context && TREE_CODE (context) != FUNCTION_DECL)
8905 {
8906 if (TREE_CODE (context) == BLOCK)
8907 context = BLOCK_SUPERCONTEXT (context);
8908 else
8909 context = get_containing_scope (context);
8910 }
8911
8912 return context;
8913 }
8914
8915 /* Return the innermost context enclosing DECL that is
8916 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8917 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8918
8919 tree
8920 decl_type_context (const_tree decl)
8921 {
8922 tree context = DECL_CONTEXT (decl);
8923
8924 while (context)
8925 switch (TREE_CODE (context))
8926 {
8927 case NAMESPACE_DECL:
8928 case TRANSLATION_UNIT_DECL:
8929 return NULL_TREE;
8930
8931 case RECORD_TYPE:
8932 case UNION_TYPE:
8933 case QUAL_UNION_TYPE:
8934 return context;
8935
8936 case TYPE_DECL:
8937 case FUNCTION_DECL:
8938 context = DECL_CONTEXT (context);
8939 break;
8940
8941 case BLOCK:
8942 context = BLOCK_SUPERCONTEXT (context);
8943 break;
8944
8945 default:
8946 gcc_unreachable ();
8947 }
8948
8949 return NULL_TREE;
8950 }
8951
8952 /* CALL is a CALL_EXPR. Return the declaration for the function
8953 called, or NULL_TREE if the called function cannot be
8954 determined. */
8955
8956 tree
8957 get_callee_fndecl (const_tree call)
8958 {
8959 tree addr;
8960
8961 if (call == error_mark_node)
8962 return error_mark_node;
8963
8964 /* It's invalid to call this function with anything but a
8965 CALL_EXPR. */
8966 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8967
8968 /* The first operand to the CALL is the address of the function
8969 called. */
8970 addr = CALL_EXPR_FN (call);
8971
8972 /* If there is no function, return early. */
8973 if (addr == NULL_TREE)
8974 return NULL_TREE;
8975
8976 STRIP_NOPS (addr);
8977
8978 /* If this is a readonly function pointer, extract its initial value. */
8979 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8980 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8981 && DECL_INITIAL (addr))
8982 addr = DECL_INITIAL (addr);
8983
8984 /* If the address is just `&f' for some function `f', then we know
8985 that `f' is being called. */
8986 if (TREE_CODE (addr) == ADDR_EXPR
8987 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8988 return TREE_OPERAND (addr, 0);
8989
8990 /* We couldn't figure out what was being called. */
8991 return NULL_TREE;
8992 }
8993
8994 /* Print debugging information about tree nodes generated during the compile,
8995 and any language-specific information. */
8996
8997 void
8998 dump_tree_statistics (void)
8999 {
9000 if (GATHER_STATISTICS)
9001 {
9002 int i;
9003 int total_nodes, total_bytes;
9004 fprintf (stderr, "Kind Nodes Bytes\n");
9005 fprintf (stderr, "---------------------------------------\n");
9006 total_nodes = total_bytes = 0;
9007 for (i = 0; i < (int) all_kinds; i++)
9008 {
9009 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9010 tree_node_counts[i], tree_node_sizes[i]);
9011 total_nodes += tree_node_counts[i];
9012 total_bytes += tree_node_sizes[i];
9013 }
9014 fprintf (stderr, "---------------------------------------\n");
9015 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9016 fprintf (stderr, "---------------------------------------\n");
9017 fprintf (stderr, "Code Nodes\n");
9018 fprintf (stderr, "----------------------------\n");
9019 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9020 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9021 tree_code_counts[i]);
9022 fprintf (stderr, "----------------------------\n");
9023 ssanames_print_statistics ();
9024 phinodes_print_statistics ();
9025 }
9026 else
9027 fprintf (stderr, "(No per-node statistics)\n");
9028
9029 print_type_hash_statistics ();
9030 print_debug_expr_statistics ();
9031 print_value_expr_statistics ();
9032 lang_hooks.print_statistics ();
9033 }
9034 \f
9035 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9036
9037 /* Generate a crc32 of a byte. */
9038
9039 static unsigned
9040 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9041 {
9042 unsigned ix;
9043
9044 for (ix = bits; ix--; value <<= 1)
9045 {
9046 unsigned feedback;
9047
9048 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9049 chksum <<= 1;
9050 chksum ^= feedback;
9051 }
9052 return chksum;
9053 }
9054
9055 /* Generate a crc32 of a 32-bit unsigned. */
9056
9057 unsigned
9058 crc32_unsigned (unsigned chksum, unsigned value)
9059 {
9060 return crc32_unsigned_bits (chksum, value, 32);
9061 }
9062
9063 /* Generate a crc32 of a byte. */
9064
9065 unsigned
9066 crc32_byte (unsigned chksum, char byte)
9067 {
9068 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9069 }
9070
9071 /* Generate a crc32 of a string. */
9072
9073 unsigned
9074 crc32_string (unsigned chksum, const char *string)
9075 {
9076 do
9077 {
9078 chksum = crc32_byte (chksum, *string);
9079 }
9080 while (*string++);
9081 return chksum;
9082 }
9083
9084 /* P is a string that will be used in a symbol. Mask out any characters
9085 that are not valid in that context. */
9086
9087 void
9088 clean_symbol_name (char *p)
9089 {
9090 for (; *p; p++)
9091 if (! (ISALNUM (*p)
9092 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9093 || *p == '$'
9094 #endif
9095 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9096 || *p == '.'
9097 #endif
9098 ))
9099 *p = '_';
9100 }
9101
9102 /* Generate a name for a special-purpose function.
9103 The generated name may need to be unique across the whole link.
9104 Changes to this function may also require corresponding changes to
9105 xstrdup_mask_random.
9106 TYPE is some string to identify the purpose of this function to the
9107 linker or collect2; it must start with an uppercase letter,
9108 one of:
9109 I - for constructors
9110 D - for destructors
9111 N - for C++ anonymous namespaces
9112 F - for DWARF unwind frame information. */
9113
9114 tree
9115 get_file_function_name (const char *type)
9116 {
9117 char *buf;
9118 const char *p;
9119 char *q;
9120
9121 /* If we already have a name we know to be unique, just use that. */
9122 if (first_global_object_name)
9123 p = q = ASTRDUP (first_global_object_name);
9124 /* If the target is handling the constructors/destructors, they
9125 will be local to this file and the name is only necessary for
9126 debugging purposes.
9127 We also assign sub_I and sub_D sufixes to constructors called from
9128 the global static constructors. These are always local. */
9129 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9130 || (strncmp (type, "sub_", 4) == 0
9131 && (type[4] == 'I' || type[4] == 'D')))
9132 {
9133 const char *file = main_input_filename;
9134 if (! file)
9135 file = LOCATION_FILE (input_location);
9136 /* Just use the file's basename, because the full pathname
9137 might be quite long. */
9138 p = q = ASTRDUP (lbasename (file));
9139 }
9140 else
9141 {
9142 /* Otherwise, the name must be unique across the entire link.
9143 We don't have anything that we know to be unique to this translation
9144 unit, so use what we do have and throw in some randomness. */
9145 unsigned len;
9146 const char *name = weak_global_object_name;
9147 const char *file = main_input_filename;
9148
9149 if (! name)
9150 name = "";
9151 if (! file)
9152 file = LOCATION_FILE (input_location);
9153
9154 len = strlen (file);
9155 q = (char *) alloca (9 + 17 + len + 1);
9156 memcpy (q, file, len + 1);
9157
9158 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9159 crc32_string (0, name), get_random_seed (false));
9160
9161 p = q;
9162 }
9163
9164 clean_symbol_name (q);
9165 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9166 + strlen (type));
9167
9168 /* Set up the name of the file-level functions we may need.
9169 Use a global object (which is already required to be unique over
9170 the program) rather than the file name (which imposes extra
9171 constraints). */
9172 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9173
9174 return get_identifier (buf);
9175 }
9176 \f
9177 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9178
9179 /* Complain that the tree code of NODE does not match the expected 0
9180 terminated list of trailing codes. The trailing code list can be
9181 empty, for a more vague error message. FILE, LINE, and FUNCTION
9182 are of the caller. */
9183
9184 void
9185 tree_check_failed (const_tree node, const char *file,
9186 int line, const char *function, ...)
9187 {
9188 va_list args;
9189 const char *buffer;
9190 unsigned length = 0;
9191 enum tree_code code;
9192
9193 va_start (args, function);
9194 while ((code = (enum tree_code) va_arg (args, int)))
9195 length += 4 + strlen (get_tree_code_name (code));
9196 va_end (args);
9197 if (length)
9198 {
9199 char *tmp;
9200 va_start (args, function);
9201 length += strlen ("expected ");
9202 buffer = tmp = (char *) alloca (length);
9203 length = 0;
9204 while ((code = (enum tree_code) va_arg (args, int)))
9205 {
9206 const char *prefix = length ? " or " : "expected ";
9207
9208 strcpy (tmp + length, prefix);
9209 length += strlen (prefix);
9210 strcpy (tmp + length, get_tree_code_name (code));
9211 length += strlen (get_tree_code_name (code));
9212 }
9213 va_end (args);
9214 }
9215 else
9216 buffer = "unexpected node";
9217
9218 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9219 buffer, get_tree_code_name (TREE_CODE (node)),
9220 function, trim_filename (file), line);
9221 }
9222
9223 /* Complain that the tree code of NODE does match the expected 0
9224 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9225 the caller. */
9226
9227 void
9228 tree_not_check_failed (const_tree node, const char *file,
9229 int line, const char *function, ...)
9230 {
9231 va_list args;
9232 char *buffer;
9233 unsigned length = 0;
9234 enum tree_code code;
9235
9236 va_start (args, function);
9237 while ((code = (enum tree_code) va_arg (args, int)))
9238 length += 4 + strlen (get_tree_code_name (code));
9239 va_end (args);
9240 va_start (args, function);
9241 buffer = (char *) alloca (length);
9242 length = 0;
9243 while ((code = (enum tree_code) va_arg (args, int)))
9244 {
9245 if (length)
9246 {
9247 strcpy (buffer + length, " or ");
9248 length += 4;
9249 }
9250 strcpy (buffer + length, get_tree_code_name (code));
9251 length += strlen (get_tree_code_name (code));
9252 }
9253 va_end (args);
9254
9255 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9256 buffer, get_tree_code_name (TREE_CODE (node)),
9257 function, trim_filename (file), line);
9258 }
9259
9260 /* Similar to tree_check_failed, except that we check for a class of tree
9261 code, given in CL. */
9262
9263 void
9264 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9265 const char *file, int line, const char *function)
9266 {
9267 internal_error
9268 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9269 TREE_CODE_CLASS_STRING (cl),
9270 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9271 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9272 }
9273
9274 /* Similar to tree_check_failed, except that instead of specifying a
9275 dozen codes, use the knowledge that they're all sequential. */
9276
9277 void
9278 tree_range_check_failed (const_tree node, const char *file, int line,
9279 const char *function, enum tree_code c1,
9280 enum tree_code c2)
9281 {
9282 char *buffer;
9283 unsigned length = 0;
9284 unsigned int c;
9285
9286 for (c = c1; c <= c2; ++c)
9287 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9288
9289 length += strlen ("expected ");
9290 buffer = (char *) alloca (length);
9291 length = 0;
9292
9293 for (c = c1; c <= c2; ++c)
9294 {
9295 const char *prefix = length ? " or " : "expected ";
9296
9297 strcpy (buffer + length, prefix);
9298 length += strlen (prefix);
9299 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9300 length += strlen (get_tree_code_name ((enum tree_code) c));
9301 }
9302
9303 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9304 buffer, get_tree_code_name (TREE_CODE (node)),
9305 function, trim_filename (file), line);
9306 }
9307
9308
9309 /* Similar to tree_check_failed, except that we check that a tree does
9310 not have the specified code, given in CL. */
9311
9312 void
9313 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9314 const char *file, int line, const char *function)
9315 {
9316 internal_error
9317 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9318 TREE_CODE_CLASS_STRING (cl),
9319 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9320 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9321 }
9322
9323
9324 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9325
9326 void
9327 omp_clause_check_failed (const_tree node, const char *file, int line,
9328 const char *function, enum omp_clause_code code)
9329 {
9330 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9331 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9332 function, trim_filename (file), line);
9333 }
9334
9335
9336 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9337
9338 void
9339 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9340 const char *function, enum omp_clause_code c1,
9341 enum omp_clause_code c2)
9342 {
9343 char *buffer;
9344 unsigned length = 0;
9345 unsigned int c;
9346
9347 for (c = c1; c <= c2; ++c)
9348 length += 4 + strlen (omp_clause_code_name[c]);
9349
9350 length += strlen ("expected ");
9351 buffer = (char *) alloca (length);
9352 length = 0;
9353
9354 for (c = c1; c <= c2; ++c)
9355 {
9356 const char *prefix = length ? " or " : "expected ";
9357
9358 strcpy (buffer + length, prefix);
9359 length += strlen (prefix);
9360 strcpy (buffer + length, omp_clause_code_name[c]);
9361 length += strlen (omp_clause_code_name[c]);
9362 }
9363
9364 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9365 buffer, omp_clause_code_name[TREE_CODE (node)],
9366 function, trim_filename (file), line);
9367 }
9368
9369
9370 #undef DEFTREESTRUCT
9371 #define DEFTREESTRUCT(VAL, NAME) NAME,
9372
9373 static const char *ts_enum_names[] = {
9374 #include "treestruct.def"
9375 };
9376 #undef DEFTREESTRUCT
9377
9378 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9379
9380 /* Similar to tree_class_check_failed, except that we check for
9381 whether CODE contains the tree structure identified by EN. */
9382
9383 void
9384 tree_contains_struct_check_failed (const_tree node,
9385 const enum tree_node_structure_enum en,
9386 const char *file, int line,
9387 const char *function)
9388 {
9389 internal_error
9390 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9391 TS_ENUM_NAME (en),
9392 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9393 }
9394
9395
9396 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9397 (dynamically sized) vector. */
9398
9399 void
9400 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9401 const char *function)
9402 {
9403 internal_error
9404 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9405 idx + 1, len, function, trim_filename (file), line);
9406 }
9407
9408 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9409 (dynamically sized) vector. */
9410
9411 void
9412 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9413 const char *function)
9414 {
9415 internal_error
9416 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9417 idx + 1, len, function, trim_filename (file), line);
9418 }
9419
9420 /* Similar to above, except that the check is for the bounds of the operand
9421 vector of an expression node EXP. */
9422
9423 void
9424 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9425 int line, const char *function)
9426 {
9427 enum tree_code code = TREE_CODE (exp);
9428 internal_error
9429 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9430 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9431 function, trim_filename (file), line);
9432 }
9433
9434 /* Similar to above, except that the check is for the number of
9435 operands of an OMP_CLAUSE node. */
9436
9437 void
9438 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9439 int line, const char *function)
9440 {
9441 internal_error
9442 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9443 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9444 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9445 trim_filename (file), line);
9446 }
9447 #endif /* ENABLE_TREE_CHECKING */
9448 \f
9449 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9450 and mapped to the machine mode MODE. Initialize its fields and build
9451 the information necessary for debugging output. */
9452
9453 static tree
9454 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9455 {
9456 tree t;
9457 hashval_t hashcode = 0;
9458
9459 t = make_node (VECTOR_TYPE);
9460 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9461 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9462 SET_TYPE_MODE (t, mode);
9463
9464 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9465 SET_TYPE_STRUCTURAL_EQUALITY (t);
9466 else if (TYPE_CANONICAL (innertype) != innertype
9467 || mode != VOIDmode)
9468 TYPE_CANONICAL (t)
9469 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9470
9471 layout_type (t);
9472
9473 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9474 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9475 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9476 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9477 t = type_hash_canon (hashcode, t);
9478
9479 /* We have built a main variant, based on the main variant of the
9480 inner type. Use it to build the variant we return. */
9481 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9482 && TREE_TYPE (t) != innertype)
9483 return build_type_attribute_qual_variant (t,
9484 TYPE_ATTRIBUTES (innertype),
9485 TYPE_QUALS (innertype));
9486
9487 return t;
9488 }
9489
9490 static tree
9491 make_or_reuse_type (unsigned size, int unsignedp)
9492 {
9493 if (size == INT_TYPE_SIZE)
9494 return unsignedp ? unsigned_type_node : integer_type_node;
9495 if (size == CHAR_TYPE_SIZE)
9496 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9497 if (size == SHORT_TYPE_SIZE)
9498 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9499 if (size == LONG_TYPE_SIZE)
9500 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9501 if (size == LONG_LONG_TYPE_SIZE)
9502 return (unsignedp ? long_long_unsigned_type_node
9503 : long_long_integer_type_node);
9504 if (size == 128 && int128_integer_type_node)
9505 return (unsignedp ? int128_unsigned_type_node
9506 : int128_integer_type_node);
9507
9508 if (unsignedp)
9509 return make_unsigned_type (size);
9510 else
9511 return make_signed_type (size);
9512 }
9513
9514 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9515
9516 static tree
9517 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9518 {
9519 if (satp)
9520 {
9521 if (size == SHORT_FRACT_TYPE_SIZE)
9522 return unsignedp ? sat_unsigned_short_fract_type_node
9523 : sat_short_fract_type_node;
9524 if (size == FRACT_TYPE_SIZE)
9525 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9526 if (size == LONG_FRACT_TYPE_SIZE)
9527 return unsignedp ? sat_unsigned_long_fract_type_node
9528 : sat_long_fract_type_node;
9529 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9530 return unsignedp ? sat_unsigned_long_long_fract_type_node
9531 : sat_long_long_fract_type_node;
9532 }
9533 else
9534 {
9535 if (size == SHORT_FRACT_TYPE_SIZE)
9536 return unsignedp ? unsigned_short_fract_type_node
9537 : short_fract_type_node;
9538 if (size == FRACT_TYPE_SIZE)
9539 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9540 if (size == LONG_FRACT_TYPE_SIZE)
9541 return unsignedp ? unsigned_long_fract_type_node
9542 : long_fract_type_node;
9543 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9544 return unsignedp ? unsigned_long_long_fract_type_node
9545 : long_long_fract_type_node;
9546 }
9547
9548 return make_fract_type (size, unsignedp, satp);
9549 }
9550
9551 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9552
9553 static tree
9554 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9555 {
9556 if (satp)
9557 {
9558 if (size == SHORT_ACCUM_TYPE_SIZE)
9559 return unsignedp ? sat_unsigned_short_accum_type_node
9560 : sat_short_accum_type_node;
9561 if (size == ACCUM_TYPE_SIZE)
9562 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9563 if (size == LONG_ACCUM_TYPE_SIZE)
9564 return unsignedp ? sat_unsigned_long_accum_type_node
9565 : sat_long_accum_type_node;
9566 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9567 return unsignedp ? sat_unsigned_long_long_accum_type_node
9568 : sat_long_long_accum_type_node;
9569 }
9570 else
9571 {
9572 if (size == SHORT_ACCUM_TYPE_SIZE)
9573 return unsignedp ? unsigned_short_accum_type_node
9574 : short_accum_type_node;
9575 if (size == ACCUM_TYPE_SIZE)
9576 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9577 if (size == LONG_ACCUM_TYPE_SIZE)
9578 return unsignedp ? unsigned_long_accum_type_node
9579 : long_accum_type_node;
9580 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9581 return unsignedp ? unsigned_long_long_accum_type_node
9582 : long_long_accum_type_node;
9583 }
9584
9585 return make_accum_type (size, unsignedp, satp);
9586 }
9587
9588
9589 /* Create an atomic variant node for TYPE. This routine is called
9590 during initialization of data types to create the 5 basic atomic
9591 types. The generic build_variant_type function requires these to
9592 already be set up in order to function properly, so cannot be
9593 called from there. If ALIGN is non-zero, then ensure alignment is
9594 overridden to this value. */
9595
9596 static tree
9597 build_atomic_base (tree type, unsigned int align)
9598 {
9599 tree t;
9600
9601 /* Make sure its not already registered. */
9602 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9603 return t;
9604
9605 t = build_variant_type_copy (type);
9606 set_type_quals (t, TYPE_QUAL_ATOMIC);
9607
9608 if (align)
9609 TYPE_ALIGN (t) = align;
9610
9611 return t;
9612 }
9613
9614 /* Create nodes for all integer types (and error_mark_node) using the sizes
9615 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9616 SHORT_DOUBLE specifies whether double should be of the same precision
9617 as float. */
9618
9619 void
9620 build_common_tree_nodes (bool signed_char, bool short_double)
9621 {
9622 error_mark_node = make_node (ERROR_MARK);
9623 TREE_TYPE (error_mark_node) = error_mark_node;
9624
9625 initialize_sizetypes ();
9626
9627 /* Define both `signed char' and `unsigned char'. */
9628 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9629 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9630 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9631 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9632
9633 /* Define `char', which is like either `signed char' or `unsigned char'
9634 but not the same as either. */
9635 char_type_node
9636 = (signed_char
9637 ? make_signed_type (CHAR_TYPE_SIZE)
9638 : make_unsigned_type (CHAR_TYPE_SIZE));
9639 TYPE_STRING_FLAG (char_type_node) = 1;
9640
9641 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9642 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9643 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9644 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9645 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9646 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9647 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9648 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9649 #if HOST_BITS_PER_WIDE_INT >= 64
9650 /* TODO: This isn't correct, but as logic depends at the moment on
9651 host's instead of target's wide-integer.
9652 If there is a target not supporting TImode, but has an 128-bit
9653 integer-scalar register, this target check needs to be adjusted. */
9654 if (targetm.scalar_mode_supported_p (TImode))
9655 {
9656 int128_integer_type_node = make_signed_type (128);
9657 int128_unsigned_type_node = make_unsigned_type (128);
9658 }
9659 #endif
9660
9661 /* Define a boolean type. This type only represents boolean values but
9662 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9663 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9664 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9665 TYPE_PRECISION (boolean_type_node) = 1;
9666 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9667
9668 /* Define what type to use for size_t. */
9669 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9670 size_type_node = unsigned_type_node;
9671 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9672 size_type_node = long_unsigned_type_node;
9673 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9674 size_type_node = long_long_unsigned_type_node;
9675 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9676 size_type_node = short_unsigned_type_node;
9677 else
9678 gcc_unreachable ();
9679
9680 /* Fill in the rest of the sized types. Reuse existing type nodes
9681 when possible. */
9682 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9683 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9684 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9685 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9686 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9687
9688 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9689 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9690 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9691 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9692 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9693
9694 /* Don't call build_qualified type for atomics. That routine does
9695 special processing for atomics, and until they are initialized
9696 it's better not to make that call.
9697
9698 Check to see if there is a target override for atomic types. */
9699
9700 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9701 targetm.atomic_align_for_mode (QImode));
9702 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9703 targetm.atomic_align_for_mode (HImode));
9704 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9705 targetm.atomic_align_for_mode (SImode));
9706 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9707 targetm.atomic_align_for_mode (DImode));
9708 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9709 targetm.atomic_align_for_mode (TImode));
9710
9711 access_public_node = get_identifier ("public");
9712 access_protected_node = get_identifier ("protected");
9713 access_private_node = get_identifier ("private");
9714
9715 /* Define these next since types below may used them. */
9716 integer_zero_node = build_int_cst (integer_type_node, 0);
9717 integer_one_node = build_int_cst (integer_type_node, 1);
9718 integer_three_node = build_int_cst (integer_type_node, 3);
9719 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9720
9721 size_zero_node = size_int (0);
9722 size_one_node = size_int (1);
9723 bitsize_zero_node = bitsize_int (0);
9724 bitsize_one_node = bitsize_int (1);
9725 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9726
9727 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9728 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9729
9730 void_type_node = make_node (VOID_TYPE);
9731 layout_type (void_type_node);
9732
9733 /* We are not going to have real types in C with less than byte alignment,
9734 so we might as well not have any types that claim to have it. */
9735 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9736 TYPE_USER_ALIGN (void_type_node) = 0;
9737
9738 void_node = make_node (VOID_CST);
9739 TREE_TYPE (void_node) = void_type_node;
9740
9741 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9742 layout_type (TREE_TYPE (null_pointer_node));
9743
9744 ptr_type_node = build_pointer_type (void_type_node);
9745 const_ptr_type_node
9746 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9747 fileptr_type_node = ptr_type_node;
9748
9749 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9750
9751 float_type_node = make_node (REAL_TYPE);
9752 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9753 layout_type (float_type_node);
9754
9755 double_type_node = make_node (REAL_TYPE);
9756 if (short_double)
9757 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9758 else
9759 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9760 layout_type (double_type_node);
9761
9762 long_double_type_node = make_node (REAL_TYPE);
9763 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9764 layout_type (long_double_type_node);
9765
9766 float_ptr_type_node = build_pointer_type (float_type_node);
9767 double_ptr_type_node = build_pointer_type (double_type_node);
9768 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9769 integer_ptr_type_node = build_pointer_type (integer_type_node);
9770
9771 /* Fixed size integer types. */
9772 uint16_type_node = build_nonstandard_integer_type (16, true);
9773 uint32_type_node = build_nonstandard_integer_type (32, true);
9774 uint64_type_node = build_nonstandard_integer_type (64, true);
9775
9776 /* Decimal float types. */
9777 dfloat32_type_node = make_node (REAL_TYPE);
9778 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9779 layout_type (dfloat32_type_node);
9780 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9781 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9782
9783 dfloat64_type_node = make_node (REAL_TYPE);
9784 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9785 layout_type (dfloat64_type_node);
9786 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9787 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9788
9789 dfloat128_type_node = make_node (REAL_TYPE);
9790 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9791 layout_type (dfloat128_type_node);
9792 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9793 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9794
9795 complex_integer_type_node = build_complex_type (integer_type_node);
9796 complex_float_type_node = build_complex_type (float_type_node);
9797 complex_double_type_node = build_complex_type (double_type_node);
9798 complex_long_double_type_node = build_complex_type (long_double_type_node);
9799
9800 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9801 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9802 sat_ ## KIND ## _type_node = \
9803 make_sat_signed_ ## KIND ## _type (SIZE); \
9804 sat_unsigned_ ## KIND ## _type_node = \
9805 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9806 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9807 unsigned_ ## KIND ## _type_node = \
9808 make_unsigned_ ## KIND ## _type (SIZE);
9809
9810 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9811 sat_ ## WIDTH ## KIND ## _type_node = \
9812 make_sat_signed_ ## KIND ## _type (SIZE); \
9813 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9814 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9815 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9816 unsigned_ ## WIDTH ## KIND ## _type_node = \
9817 make_unsigned_ ## KIND ## _type (SIZE);
9818
9819 /* Make fixed-point type nodes based on four different widths. */
9820 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9821 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9822 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9823 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9824 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9825
9826 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9827 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9828 NAME ## _type_node = \
9829 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9830 u ## NAME ## _type_node = \
9831 make_or_reuse_unsigned_ ## KIND ## _type \
9832 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9833 sat_ ## NAME ## _type_node = \
9834 make_or_reuse_sat_signed_ ## KIND ## _type \
9835 (GET_MODE_BITSIZE (MODE ## mode)); \
9836 sat_u ## NAME ## _type_node = \
9837 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9838 (GET_MODE_BITSIZE (U ## MODE ## mode));
9839
9840 /* Fixed-point type and mode nodes. */
9841 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9842 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9843 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9844 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9845 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9846 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9847 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9848 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9849 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9850 MAKE_FIXED_MODE_NODE (accum, da, DA)
9851 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9852
9853 {
9854 tree t = targetm.build_builtin_va_list ();
9855
9856 /* Many back-ends define record types without setting TYPE_NAME.
9857 If we copied the record type here, we'd keep the original
9858 record type without a name. This breaks name mangling. So,
9859 don't copy record types and let c_common_nodes_and_builtins()
9860 declare the type to be __builtin_va_list. */
9861 if (TREE_CODE (t) != RECORD_TYPE)
9862 t = build_variant_type_copy (t);
9863
9864 va_list_type_node = t;
9865 }
9866 }
9867
9868 /* Modify DECL for given flags.
9869 TM_PURE attribute is set only on types, so the function will modify
9870 DECL's type when ECF_TM_PURE is used. */
9871
9872 void
9873 set_call_expr_flags (tree decl, int flags)
9874 {
9875 if (flags & ECF_NOTHROW)
9876 TREE_NOTHROW (decl) = 1;
9877 if (flags & ECF_CONST)
9878 TREE_READONLY (decl) = 1;
9879 if (flags & ECF_PURE)
9880 DECL_PURE_P (decl) = 1;
9881 if (flags & ECF_LOOPING_CONST_OR_PURE)
9882 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9883 if (flags & ECF_NOVOPS)
9884 DECL_IS_NOVOPS (decl) = 1;
9885 if (flags & ECF_NORETURN)
9886 TREE_THIS_VOLATILE (decl) = 1;
9887 if (flags & ECF_MALLOC)
9888 DECL_IS_MALLOC (decl) = 1;
9889 if (flags & ECF_RETURNS_TWICE)
9890 DECL_IS_RETURNS_TWICE (decl) = 1;
9891 if (flags & ECF_LEAF)
9892 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9893 NULL, DECL_ATTRIBUTES (decl));
9894 if ((flags & ECF_TM_PURE) && flag_tm)
9895 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9896 /* Looping const or pure is implied by noreturn.
9897 There is currently no way to declare looping const or looping pure alone. */
9898 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9899 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9900 }
9901
9902
9903 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9904
9905 static void
9906 local_define_builtin (const char *name, tree type, enum built_in_function code,
9907 const char *library_name, int ecf_flags)
9908 {
9909 tree decl;
9910
9911 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9912 library_name, NULL_TREE);
9913 set_call_expr_flags (decl, ecf_flags);
9914
9915 set_builtin_decl (code, decl, true);
9916 }
9917
9918 /* Call this function after instantiating all builtins that the language
9919 front end cares about. This will build the rest of the builtins that
9920 are relied upon by the tree optimizers and the middle-end. */
9921
9922 void
9923 build_common_builtin_nodes (void)
9924 {
9925 tree tmp, ftype;
9926 int ecf_flags;
9927
9928 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9929 {
9930 ftype = build_function_type (void_type_node, void_list_node);
9931 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9932 "__builtin_unreachable",
9933 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9934 | ECF_CONST | ECF_LEAF);
9935 }
9936
9937 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9938 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9939 {
9940 ftype = build_function_type_list (ptr_type_node,
9941 ptr_type_node, const_ptr_type_node,
9942 size_type_node, NULL_TREE);
9943
9944 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9945 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9946 "memcpy", ECF_NOTHROW | ECF_LEAF);
9947 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9948 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9949 "memmove", ECF_NOTHROW | ECF_LEAF);
9950 }
9951
9952 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9953 {
9954 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9955 const_ptr_type_node, size_type_node,
9956 NULL_TREE);
9957 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9958 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9959 }
9960
9961 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9962 {
9963 ftype = build_function_type_list (ptr_type_node,
9964 ptr_type_node, integer_type_node,
9965 size_type_node, NULL_TREE);
9966 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9967 "memset", ECF_NOTHROW | ECF_LEAF);
9968 }
9969
9970 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9971 {
9972 ftype = build_function_type_list (ptr_type_node,
9973 size_type_node, NULL_TREE);
9974 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9975 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9976 }
9977
9978 ftype = build_function_type_list (ptr_type_node, size_type_node,
9979 size_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_alloca_with_align", ftype,
9981 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9982 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9983
9984 /* If we're checking the stack, `alloca' can throw. */
9985 if (flag_stack_check)
9986 {
9987 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9988 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9989 }
9990
9991 ftype = build_function_type_list (void_type_node,
9992 ptr_type_node, ptr_type_node,
9993 ptr_type_node, NULL_TREE);
9994 local_define_builtin ("__builtin_init_trampoline", ftype,
9995 BUILT_IN_INIT_TRAMPOLINE,
9996 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9997 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9998 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9999 "__builtin_init_heap_trampoline",
10000 ECF_NOTHROW | ECF_LEAF);
10001
10002 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10003 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10004 BUILT_IN_ADJUST_TRAMPOLINE,
10005 "__builtin_adjust_trampoline",
10006 ECF_CONST | ECF_NOTHROW);
10007
10008 ftype = build_function_type_list (void_type_node,
10009 ptr_type_node, ptr_type_node, NULL_TREE);
10010 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10011 BUILT_IN_NONLOCAL_GOTO,
10012 "__builtin_nonlocal_goto",
10013 ECF_NORETURN | ECF_NOTHROW);
10014
10015 ftype = build_function_type_list (void_type_node,
10016 ptr_type_node, ptr_type_node, NULL_TREE);
10017 local_define_builtin ("__builtin_setjmp_setup", ftype,
10018 BUILT_IN_SETJMP_SETUP,
10019 "__builtin_setjmp_setup", ECF_NOTHROW);
10020
10021 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10022 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10023 BUILT_IN_SETJMP_RECEIVER,
10024 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10025
10026 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10028 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10029
10030 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_stack_restore", ftype,
10032 BUILT_IN_STACK_RESTORE,
10033 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10034
10035 /* If there's a possibility that we might use the ARM EABI, build the
10036 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10037 if (targetm.arm_eabi_unwinder)
10038 {
10039 ftype = build_function_type_list (void_type_node, NULL_TREE);
10040 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10041 BUILT_IN_CXA_END_CLEANUP,
10042 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10043 }
10044
10045 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10046 local_define_builtin ("__builtin_unwind_resume", ftype,
10047 BUILT_IN_UNWIND_RESUME,
10048 ((targetm_common.except_unwind_info (&global_options)
10049 == UI_SJLJ)
10050 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10051 ECF_NORETURN);
10052
10053 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10054 {
10055 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10056 NULL_TREE);
10057 local_define_builtin ("__builtin_return_address", ftype,
10058 BUILT_IN_RETURN_ADDRESS,
10059 "__builtin_return_address",
10060 ECF_NOTHROW);
10061 }
10062
10063 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10064 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10065 {
10066 ftype = build_function_type_list (void_type_node, ptr_type_node,
10067 ptr_type_node, NULL_TREE);
10068 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10069 local_define_builtin ("__cyg_profile_func_enter", ftype,
10070 BUILT_IN_PROFILE_FUNC_ENTER,
10071 "__cyg_profile_func_enter", 0);
10072 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10073 local_define_builtin ("__cyg_profile_func_exit", ftype,
10074 BUILT_IN_PROFILE_FUNC_EXIT,
10075 "__cyg_profile_func_exit", 0);
10076 }
10077
10078 /* The exception object and filter values from the runtime. The argument
10079 must be zero before exception lowering, i.e. from the front end. After
10080 exception lowering, it will be the region number for the exception
10081 landing pad. These functions are PURE instead of CONST to prevent
10082 them from being hoisted past the exception edge that will initialize
10083 its value in the landing pad. */
10084 ftype = build_function_type_list (ptr_type_node,
10085 integer_type_node, NULL_TREE);
10086 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10087 /* Only use TM_PURE if we we have TM language support. */
10088 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10089 ecf_flags |= ECF_TM_PURE;
10090 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10091 "__builtin_eh_pointer", ecf_flags);
10092
10093 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10094 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10095 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10096 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10097
10098 ftype = build_function_type_list (void_type_node,
10099 integer_type_node, integer_type_node,
10100 NULL_TREE);
10101 local_define_builtin ("__builtin_eh_copy_values", ftype,
10102 BUILT_IN_EH_COPY_VALUES,
10103 "__builtin_eh_copy_values", ECF_NOTHROW);
10104
10105 /* Complex multiplication and division. These are handled as builtins
10106 rather than optabs because emit_library_call_value doesn't support
10107 complex. Further, we can do slightly better with folding these
10108 beasties if the real and complex parts of the arguments are separate. */
10109 {
10110 int mode;
10111
10112 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10113 {
10114 char mode_name_buf[4], *q;
10115 const char *p;
10116 enum built_in_function mcode, dcode;
10117 tree type, inner_type;
10118 const char *prefix = "__";
10119
10120 if (targetm.libfunc_gnu_prefix)
10121 prefix = "__gnu_";
10122
10123 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10124 if (type == NULL)
10125 continue;
10126 inner_type = TREE_TYPE (type);
10127
10128 ftype = build_function_type_list (type, inner_type, inner_type,
10129 inner_type, inner_type, NULL_TREE);
10130
10131 mcode = ((enum built_in_function)
10132 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10133 dcode = ((enum built_in_function)
10134 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10135
10136 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10137 *q = TOLOWER (*p);
10138 *q = '\0';
10139
10140 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10141 NULL);
10142 local_define_builtin (built_in_names[mcode], ftype, mcode,
10143 built_in_names[mcode],
10144 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10145
10146 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10147 NULL);
10148 local_define_builtin (built_in_names[dcode], ftype, dcode,
10149 built_in_names[dcode],
10150 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10151 }
10152 }
10153 }
10154
10155 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10156 better way.
10157
10158 If we requested a pointer to a vector, build up the pointers that
10159 we stripped off while looking for the inner type. Similarly for
10160 return values from functions.
10161
10162 The argument TYPE is the top of the chain, and BOTTOM is the
10163 new type which we will point to. */
10164
10165 tree
10166 reconstruct_complex_type (tree type, tree bottom)
10167 {
10168 tree inner, outer;
10169
10170 if (TREE_CODE (type) == POINTER_TYPE)
10171 {
10172 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10173 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10174 TYPE_REF_CAN_ALIAS_ALL (type));
10175 }
10176 else if (TREE_CODE (type) == REFERENCE_TYPE)
10177 {
10178 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10179 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10180 TYPE_REF_CAN_ALIAS_ALL (type));
10181 }
10182 else if (TREE_CODE (type) == ARRAY_TYPE)
10183 {
10184 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10185 outer = build_array_type (inner, TYPE_DOMAIN (type));
10186 }
10187 else if (TREE_CODE (type) == FUNCTION_TYPE)
10188 {
10189 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10190 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10191 }
10192 else if (TREE_CODE (type) == METHOD_TYPE)
10193 {
10194 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10195 /* The build_method_type_directly() routine prepends 'this' to argument list,
10196 so we must compensate by getting rid of it. */
10197 outer
10198 = build_method_type_directly
10199 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10200 inner,
10201 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10202 }
10203 else if (TREE_CODE (type) == OFFSET_TYPE)
10204 {
10205 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10206 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10207 }
10208 else
10209 return bottom;
10210
10211 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10212 TYPE_QUALS (type));
10213 }
10214
10215 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10216 the inner type. */
10217 tree
10218 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10219 {
10220 int nunits;
10221
10222 switch (GET_MODE_CLASS (mode))
10223 {
10224 case MODE_VECTOR_INT:
10225 case MODE_VECTOR_FLOAT:
10226 case MODE_VECTOR_FRACT:
10227 case MODE_VECTOR_UFRACT:
10228 case MODE_VECTOR_ACCUM:
10229 case MODE_VECTOR_UACCUM:
10230 nunits = GET_MODE_NUNITS (mode);
10231 break;
10232
10233 case MODE_INT:
10234 /* Check that there are no leftover bits. */
10235 gcc_assert (GET_MODE_BITSIZE (mode)
10236 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10237
10238 nunits = GET_MODE_BITSIZE (mode)
10239 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10240 break;
10241
10242 default:
10243 gcc_unreachable ();
10244 }
10245
10246 return make_vector_type (innertype, nunits, mode);
10247 }
10248
10249 /* Similarly, but takes the inner type and number of units, which must be
10250 a power of two. */
10251
10252 tree
10253 build_vector_type (tree innertype, int nunits)
10254 {
10255 return make_vector_type (innertype, nunits, VOIDmode);
10256 }
10257
10258 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10259
10260 tree
10261 build_opaque_vector_type (tree innertype, int nunits)
10262 {
10263 tree t = make_vector_type (innertype, nunits, VOIDmode);
10264 tree cand;
10265 /* We always build the non-opaque variant before the opaque one,
10266 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10267 cand = TYPE_NEXT_VARIANT (t);
10268 if (cand
10269 && TYPE_VECTOR_OPAQUE (cand)
10270 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10271 return cand;
10272 /* Othewise build a variant type and make sure to queue it after
10273 the non-opaque type. */
10274 cand = build_distinct_type_copy (t);
10275 TYPE_VECTOR_OPAQUE (cand) = true;
10276 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10277 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10278 TYPE_NEXT_VARIANT (t) = cand;
10279 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10280 return cand;
10281 }
10282
10283
10284 /* Given an initializer INIT, return TRUE if INIT is zero or some
10285 aggregate of zeros. Otherwise return FALSE. */
10286 bool
10287 initializer_zerop (const_tree init)
10288 {
10289 tree elt;
10290
10291 STRIP_NOPS (init);
10292
10293 switch (TREE_CODE (init))
10294 {
10295 case INTEGER_CST:
10296 return integer_zerop (init);
10297
10298 case REAL_CST:
10299 /* ??? Note that this is not correct for C4X float formats. There,
10300 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10301 negative exponent. */
10302 return real_zerop (init)
10303 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10304
10305 case FIXED_CST:
10306 return fixed_zerop (init);
10307
10308 case COMPLEX_CST:
10309 return integer_zerop (init)
10310 || (real_zerop (init)
10311 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10312 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10313
10314 case VECTOR_CST:
10315 {
10316 unsigned i;
10317 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10318 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10319 return false;
10320 return true;
10321 }
10322
10323 case CONSTRUCTOR:
10324 {
10325 unsigned HOST_WIDE_INT idx;
10326
10327 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10328 if (!initializer_zerop (elt))
10329 return false;
10330 return true;
10331 }
10332
10333 case STRING_CST:
10334 {
10335 int i;
10336
10337 /* We need to loop through all elements to handle cases like
10338 "\0" and "\0foobar". */
10339 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10340 if (TREE_STRING_POINTER (init)[i] != '\0')
10341 return false;
10342
10343 return true;
10344 }
10345
10346 default:
10347 return false;
10348 }
10349 }
10350
10351 /* Check if vector VEC consists of all the equal elements and
10352 that the number of elements corresponds to the type of VEC.
10353 The function returns first element of the vector
10354 or NULL_TREE if the vector is not uniform. */
10355 tree
10356 uniform_vector_p (const_tree vec)
10357 {
10358 tree first, t;
10359 unsigned i;
10360
10361 if (vec == NULL_TREE)
10362 return NULL_TREE;
10363
10364 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10365
10366 if (TREE_CODE (vec) == VECTOR_CST)
10367 {
10368 first = VECTOR_CST_ELT (vec, 0);
10369 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10370 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10371 return NULL_TREE;
10372
10373 return first;
10374 }
10375
10376 else if (TREE_CODE (vec) == CONSTRUCTOR)
10377 {
10378 first = error_mark_node;
10379
10380 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10381 {
10382 if (i == 0)
10383 {
10384 first = t;
10385 continue;
10386 }
10387 if (!operand_equal_p (first, t, 0))
10388 return NULL_TREE;
10389 }
10390 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10391 return NULL_TREE;
10392
10393 return first;
10394 }
10395
10396 return NULL_TREE;
10397 }
10398
10399 /* Build an empty statement at location LOC. */
10400
10401 tree
10402 build_empty_stmt (location_t loc)
10403 {
10404 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10405 SET_EXPR_LOCATION (t, loc);
10406 return t;
10407 }
10408
10409
10410 /* Build an OpenMP clause with code CODE. LOC is the location of the
10411 clause. */
10412
10413 tree
10414 build_omp_clause (location_t loc, enum omp_clause_code code)
10415 {
10416 tree t;
10417 int size, length;
10418
10419 length = omp_clause_num_ops[code];
10420 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10421
10422 record_node_allocation_statistics (OMP_CLAUSE, size);
10423
10424 t = (tree) ggc_internal_alloc (size);
10425 memset (t, 0, size);
10426 TREE_SET_CODE (t, OMP_CLAUSE);
10427 OMP_CLAUSE_SET_CODE (t, code);
10428 OMP_CLAUSE_LOCATION (t) = loc;
10429
10430 return t;
10431 }
10432
10433 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10434 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10435 Except for the CODE and operand count field, other storage for the
10436 object is initialized to zeros. */
10437
10438 tree
10439 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10440 {
10441 tree t;
10442 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10443
10444 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10445 gcc_assert (len >= 1);
10446
10447 record_node_allocation_statistics (code, length);
10448
10449 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10450
10451 TREE_SET_CODE (t, code);
10452
10453 /* Can't use TREE_OPERAND to store the length because if checking is
10454 enabled, it will try to check the length before we store it. :-P */
10455 t->exp.operands[0] = build_int_cst (sizetype, len);
10456
10457 return t;
10458 }
10459
10460 /* Helper function for build_call_* functions; build a CALL_EXPR with
10461 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10462 the argument slots. */
10463
10464 static tree
10465 build_call_1 (tree return_type, tree fn, int nargs)
10466 {
10467 tree t;
10468
10469 t = build_vl_exp (CALL_EXPR, nargs + 3);
10470 TREE_TYPE (t) = return_type;
10471 CALL_EXPR_FN (t) = fn;
10472 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10473
10474 return t;
10475 }
10476
10477 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10478 FN and a null static chain slot. NARGS is the number of call arguments
10479 which are specified as "..." arguments. */
10480
10481 tree
10482 build_call_nary (tree return_type, tree fn, int nargs, ...)
10483 {
10484 tree ret;
10485 va_list args;
10486 va_start (args, nargs);
10487 ret = build_call_valist (return_type, fn, nargs, args);
10488 va_end (args);
10489 return ret;
10490 }
10491
10492 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10493 FN and a null static chain slot. NARGS is the number of call arguments
10494 which are specified as a va_list ARGS. */
10495
10496 tree
10497 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10498 {
10499 tree t;
10500 int i;
10501
10502 t = build_call_1 (return_type, fn, nargs);
10503 for (i = 0; i < nargs; i++)
10504 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10505 process_call_operands (t);
10506 return t;
10507 }
10508
10509 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10510 FN and a null static chain slot. NARGS is the number of call arguments
10511 which are specified as a tree array ARGS. */
10512
10513 tree
10514 build_call_array_loc (location_t loc, tree return_type, tree fn,
10515 int nargs, const tree *args)
10516 {
10517 tree t;
10518 int i;
10519
10520 t = build_call_1 (return_type, fn, nargs);
10521 for (i = 0; i < nargs; i++)
10522 CALL_EXPR_ARG (t, i) = args[i];
10523 process_call_operands (t);
10524 SET_EXPR_LOCATION (t, loc);
10525 return t;
10526 }
10527
10528 /* Like build_call_array, but takes a vec. */
10529
10530 tree
10531 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10532 {
10533 tree ret, t;
10534 unsigned int ix;
10535
10536 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10537 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10538 CALL_EXPR_ARG (ret, ix) = t;
10539 process_call_operands (ret);
10540 return ret;
10541 }
10542
10543 /* Conveniently construct a function call expression. FNDECL names the
10544 function to be called and N arguments are passed in the array
10545 ARGARRAY. */
10546
10547 tree
10548 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10549 {
10550 tree fntype = TREE_TYPE (fndecl);
10551 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10552
10553 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10554 }
10555
10556 /* Conveniently construct a function call expression. FNDECL names the
10557 function to be called and the arguments are passed in the vector
10558 VEC. */
10559
10560 tree
10561 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10562 {
10563 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10564 vec_safe_address (vec));
10565 }
10566
10567
10568 /* Conveniently construct a function call expression. FNDECL names the
10569 function to be called, N is the number of arguments, and the "..."
10570 parameters are the argument expressions. */
10571
10572 tree
10573 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10574 {
10575 va_list ap;
10576 tree *argarray = XALLOCAVEC (tree, n);
10577 int i;
10578
10579 va_start (ap, n);
10580 for (i = 0; i < n; i++)
10581 argarray[i] = va_arg (ap, tree);
10582 va_end (ap);
10583 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10584 }
10585
10586 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10587 varargs macros aren't supported by all bootstrap compilers. */
10588
10589 tree
10590 build_call_expr (tree fndecl, int n, ...)
10591 {
10592 va_list ap;
10593 tree *argarray = XALLOCAVEC (tree, n);
10594 int i;
10595
10596 va_start (ap, n);
10597 for (i = 0; i < n; i++)
10598 argarray[i] = va_arg (ap, tree);
10599 va_end (ap);
10600 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10601 }
10602
10603 /* Build internal call expression. This is just like CALL_EXPR, except
10604 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10605 internal function. */
10606
10607 tree
10608 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10609 tree type, int n, ...)
10610 {
10611 va_list ap;
10612 int i;
10613
10614 tree fn = build_call_1 (type, NULL_TREE, n);
10615 va_start (ap, n);
10616 for (i = 0; i < n; i++)
10617 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10618 va_end (ap);
10619 SET_EXPR_LOCATION (fn, loc);
10620 CALL_EXPR_IFN (fn) = ifn;
10621 return fn;
10622 }
10623
10624 /* Create a new constant string literal and return a char* pointer to it.
10625 The STRING_CST value is the LEN characters at STR. */
10626 tree
10627 build_string_literal (int len, const char *str)
10628 {
10629 tree t, elem, index, type;
10630
10631 t = build_string (len, str);
10632 elem = build_type_variant (char_type_node, 1, 0);
10633 index = build_index_type (size_int (len - 1));
10634 type = build_array_type (elem, index);
10635 TREE_TYPE (t) = type;
10636 TREE_CONSTANT (t) = 1;
10637 TREE_READONLY (t) = 1;
10638 TREE_STATIC (t) = 1;
10639
10640 type = build_pointer_type (elem);
10641 t = build1 (ADDR_EXPR, type,
10642 build4 (ARRAY_REF, elem,
10643 t, integer_zero_node, NULL_TREE, NULL_TREE));
10644 return t;
10645 }
10646
10647
10648
10649 /* Return true if T (assumed to be a DECL) must be assigned a memory
10650 location. */
10651
10652 bool
10653 needs_to_live_in_memory (const_tree t)
10654 {
10655 return (TREE_ADDRESSABLE (t)
10656 || is_global_var (t)
10657 || (TREE_CODE (t) == RESULT_DECL
10658 && !DECL_BY_REFERENCE (t)
10659 && aggregate_value_p (t, current_function_decl)));
10660 }
10661
10662 /* Return value of a constant X and sign-extend it. */
10663
10664 HOST_WIDE_INT
10665 int_cst_value (const_tree x)
10666 {
10667 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10668 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10669
10670 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10671 gcc_assert (cst_and_fits_in_hwi (x));
10672
10673 if (bits < HOST_BITS_PER_WIDE_INT)
10674 {
10675 bool negative = ((val >> (bits - 1)) & 1) != 0;
10676 if (negative)
10677 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10678 else
10679 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10680 }
10681
10682 return val;
10683 }
10684
10685 /* If TYPE is an integral or pointer type, return an integer type with
10686 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10687 if TYPE is already an integer type of signedness UNSIGNEDP. */
10688
10689 tree
10690 signed_or_unsigned_type_for (int unsignedp, tree type)
10691 {
10692 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10693 return type;
10694
10695 if (TREE_CODE (type) == VECTOR_TYPE)
10696 {
10697 tree inner = TREE_TYPE (type);
10698 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10699 if (!inner2)
10700 return NULL_TREE;
10701 if (inner == inner2)
10702 return type;
10703 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10704 }
10705
10706 if (!INTEGRAL_TYPE_P (type)
10707 && !POINTER_TYPE_P (type)
10708 && TREE_CODE (type) != OFFSET_TYPE)
10709 return NULL_TREE;
10710
10711 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10712 }
10713
10714 /* If TYPE is an integral or pointer type, return an integer type with
10715 the same precision which is unsigned, or itself if TYPE is already an
10716 unsigned integer type. */
10717
10718 tree
10719 unsigned_type_for (tree type)
10720 {
10721 return signed_or_unsigned_type_for (1, type);
10722 }
10723
10724 /* If TYPE is an integral or pointer type, return an integer type with
10725 the same precision which is signed, or itself if TYPE is already a
10726 signed integer type. */
10727
10728 tree
10729 signed_type_for (tree type)
10730 {
10731 return signed_or_unsigned_type_for (0, type);
10732 }
10733
10734 /* If TYPE is a vector type, return a signed integer vector type with the
10735 same width and number of subparts. Otherwise return boolean_type_node. */
10736
10737 tree
10738 truth_type_for (tree type)
10739 {
10740 if (TREE_CODE (type) == VECTOR_TYPE)
10741 {
10742 tree elem = lang_hooks.types.type_for_size
10743 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10744 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10745 }
10746 else
10747 return boolean_type_node;
10748 }
10749
10750 /* Returns the largest value obtainable by casting something in INNER type to
10751 OUTER type. */
10752
10753 tree
10754 upper_bound_in_type (tree outer, tree inner)
10755 {
10756 unsigned int det = 0;
10757 unsigned oprec = TYPE_PRECISION (outer);
10758 unsigned iprec = TYPE_PRECISION (inner);
10759 unsigned prec;
10760
10761 /* Compute a unique number for every combination. */
10762 det |= (oprec > iprec) ? 4 : 0;
10763 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10764 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10765
10766 /* Determine the exponent to use. */
10767 switch (det)
10768 {
10769 case 0:
10770 case 1:
10771 /* oprec <= iprec, outer: signed, inner: don't care. */
10772 prec = oprec - 1;
10773 break;
10774 case 2:
10775 case 3:
10776 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10777 prec = oprec;
10778 break;
10779 case 4:
10780 /* oprec > iprec, outer: signed, inner: signed. */
10781 prec = iprec - 1;
10782 break;
10783 case 5:
10784 /* oprec > iprec, outer: signed, inner: unsigned. */
10785 prec = iprec;
10786 break;
10787 case 6:
10788 /* oprec > iprec, outer: unsigned, inner: signed. */
10789 prec = oprec;
10790 break;
10791 case 7:
10792 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10793 prec = iprec;
10794 break;
10795 default:
10796 gcc_unreachable ();
10797 }
10798
10799 return wide_int_to_tree (outer,
10800 wi::mask (prec, false, TYPE_PRECISION (outer)));
10801 }
10802
10803 /* Returns the smallest value obtainable by casting something in INNER type to
10804 OUTER type. */
10805
10806 tree
10807 lower_bound_in_type (tree outer, tree inner)
10808 {
10809 unsigned oprec = TYPE_PRECISION (outer);
10810 unsigned iprec = TYPE_PRECISION (inner);
10811
10812 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10813 and obtain 0. */
10814 if (TYPE_UNSIGNED (outer)
10815 /* If we are widening something of an unsigned type, OUTER type
10816 contains all values of INNER type. In particular, both INNER
10817 and OUTER types have zero in common. */
10818 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10819 return build_int_cst (outer, 0);
10820 else
10821 {
10822 /* If we are widening a signed type to another signed type, we
10823 want to obtain -2^^(iprec-1). If we are keeping the
10824 precision or narrowing to a signed type, we want to obtain
10825 -2^(oprec-1). */
10826 unsigned prec = oprec > iprec ? iprec : oprec;
10827 return wide_int_to_tree (outer,
10828 wi::mask (prec - 1, true,
10829 TYPE_PRECISION (outer)));
10830 }
10831 }
10832
10833 /* Return nonzero if two operands that are suitable for PHI nodes are
10834 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10835 SSA_NAME or invariant. Note that this is strictly an optimization.
10836 That is, callers of this function can directly call operand_equal_p
10837 and get the same result, only slower. */
10838
10839 int
10840 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10841 {
10842 if (arg0 == arg1)
10843 return 1;
10844 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10845 return 0;
10846 return operand_equal_p (arg0, arg1, 0);
10847 }
10848
10849 /* Returns number of zeros at the end of binary representation of X. */
10850
10851 tree
10852 num_ending_zeros (const_tree x)
10853 {
10854 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10855 }
10856
10857
10858 #define WALK_SUBTREE(NODE) \
10859 do \
10860 { \
10861 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10862 if (result) \
10863 return result; \
10864 } \
10865 while (0)
10866
10867 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10868 be walked whenever a type is seen in the tree. Rest of operands and return
10869 value are as for walk_tree. */
10870
10871 static tree
10872 walk_type_fields (tree type, walk_tree_fn func, void *data,
10873 struct pointer_set_t *pset, walk_tree_lh lh)
10874 {
10875 tree result = NULL_TREE;
10876
10877 switch (TREE_CODE (type))
10878 {
10879 case POINTER_TYPE:
10880 case REFERENCE_TYPE:
10881 case VECTOR_TYPE:
10882 /* We have to worry about mutually recursive pointers. These can't
10883 be written in C. They can in Ada. It's pathological, but
10884 there's an ACATS test (c38102a) that checks it. Deal with this
10885 by checking if we're pointing to another pointer, that one
10886 points to another pointer, that one does too, and we have no htab.
10887 If so, get a hash table. We check three levels deep to avoid
10888 the cost of the hash table if we don't need one. */
10889 if (POINTER_TYPE_P (TREE_TYPE (type))
10890 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10891 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10892 && !pset)
10893 {
10894 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10895 func, data);
10896 if (result)
10897 return result;
10898
10899 break;
10900 }
10901
10902 /* ... fall through ... */
10903
10904 case COMPLEX_TYPE:
10905 WALK_SUBTREE (TREE_TYPE (type));
10906 break;
10907
10908 case METHOD_TYPE:
10909 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10910
10911 /* Fall through. */
10912
10913 case FUNCTION_TYPE:
10914 WALK_SUBTREE (TREE_TYPE (type));
10915 {
10916 tree arg;
10917
10918 /* We never want to walk into default arguments. */
10919 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10920 WALK_SUBTREE (TREE_VALUE (arg));
10921 }
10922 break;
10923
10924 case ARRAY_TYPE:
10925 /* Don't follow this nodes's type if a pointer for fear that
10926 we'll have infinite recursion. If we have a PSET, then we
10927 need not fear. */
10928 if (pset
10929 || (!POINTER_TYPE_P (TREE_TYPE (type))
10930 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10931 WALK_SUBTREE (TREE_TYPE (type));
10932 WALK_SUBTREE (TYPE_DOMAIN (type));
10933 break;
10934
10935 case OFFSET_TYPE:
10936 WALK_SUBTREE (TREE_TYPE (type));
10937 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10938 break;
10939
10940 default:
10941 break;
10942 }
10943
10944 return NULL_TREE;
10945 }
10946
10947 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10948 called with the DATA and the address of each sub-tree. If FUNC returns a
10949 non-NULL value, the traversal is stopped, and the value returned by FUNC
10950 is returned. If PSET is non-NULL it is used to record the nodes visited,
10951 and to avoid visiting a node more than once. */
10952
10953 tree
10954 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10955 struct pointer_set_t *pset, walk_tree_lh lh)
10956 {
10957 enum tree_code code;
10958 int walk_subtrees;
10959 tree result;
10960
10961 #define WALK_SUBTREE_TAIL(NODE) \
10962 do \
10963 { \
10964 tp = & (NODE); \
10965 goto tail_recurse; \
10966 } \
10967 while (0)
10968
10969 tail_recurse:
10970 /* Skip empty subtrees. */
10971 if (!*tp)
10972 return NULL_TREE;
10973
10974 /* Don't walk the same tree twice, if the user has requested
10975 that we avoid doing so. */
10976 if (pset && pointer_set_insert (pset, *tp))
10977 return NULL_TREE;
10978
10979 /* Call the function. */
10980 walk_subtrees = 1;
10981 result = (*func) (tp, &walk_subtrees, data);
10982
10983 /* If we found something, return it. */
10984 if (result)
10985 return result;
10986
10987 code = TREE_CODE (*tp);
10988
10989 /* Even if we didn't, FUNC may have decided that there was nothing
10990 interesting below this point in the tree. */
10991 if (!walk_subtrees)
10992 {
10993 /* But we still need to check our siblings. */
10994 if (code == TREE_LIST)
10995 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10996 else if (code == OMP_CLAUSE)
10997 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10998 else
10999 return NULL_TREE;
11000 }
11001
11002 if (lh)
11003 {
11004 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11005 if (result || !walk_subtrees)
11006 return result;
11007 }
11008
11009 switch (code)
11010 {
11011 case ERROR_MARK:
11012 case IDENTIFIER_NODE:
11013 case INTEGER_CST:
11014 case REAL_CST:
11015 case FIXED_CST:
11016 case VECTOR_CST:
11017 case STRING_CST:
11018 case BLOCK:
11019 case PLACEHOLDER_EXPR:
11020 case SSA_NAME:
11021 case FIELD_DECL:
11022 case RESULT_DECL:
11023 /* None of these have subtrees other than those already walked
11024 above. */
11025 break;
11026
11027 case TREE_LIST:
11028 WALK_SUBTREE (TREE_VALUE (*tp));
11029 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11030 break;
11031
11032 case TREE_VEC:
11033 {
11034 int len = TREE_VEC_LENGTH (*tp);
11035
11036 if (len == 0)
11037 break;
11038
11039 /* Walk all elements but the first. */
11040 while (--len)
11041 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11042
11043 /* Now walk the first one as a tail call. */
11044 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11045 }
11046
11047 case COMPLEX_CST:
11048 WALK_SUBTREE (TREE_REALPART (*tp));
11049 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11050
11051 case CONSTRUCTOR:
11052 {
11053 unsigned HOST_WIDE_INT idx;
11054 constructor_elt *ce;
11055
11056 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11057 idx++)
11058 WALK_SUBTREE (ce->value);
11059 }
11060 break;
11061
11062 case SAVE_EXPR:
11063 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11064
11065 case BIND_EXPR:
11066 {
11067 tree decl;
11068 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11069 {
11070 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11071 into declarations that are just mentioned, rather than
11072 declared; they don't really belong to this part of the tree.
11073 And, we can see cycles: the initializer for a declaration
11074 can refer to the declaration itself. */
11075 WALK_SUBTREE (DECL_INITIAL (decl));
11076 WALK_SUBTREE (DECL_SIZE (decl));
11077 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11078 }
11079 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11080 }
11081
11082 case STATEMENT_LIST:
11083 {
11084 tree_stmt_iterator i;
11085 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11086 WALK_SUBTREE (*tsi_stmt_ptr (i));
11087 }
11088 break;
11089
11090 case OMP_CLAUSE:
11091 switch (OMP_CLAUSE_CODE (*tp))
11092 {
11093 case OMP_CLAUSE_PRIVATE:
11094 case OMP_CLAUSE_SHARED:
11095 case OMP_CLAUSE_FIRSTPRIVATE:
11096 case OMP_CLAUSE_COPYIN:
11097 case OMP_CLAUSE_COPYPRIVATE:
11098 case OMP_CLAUSE_FINAL:
11099 case OMP_CLAUSE_IF:
11100 case OMP_CLAUSE_NUM_THREADS:
11101 case OMP_CLAUSE_SCHEDULE:
11102 case OMP_CLAUSE_UNIFORM:
11103 case OMP_CLAUSE_DEPEND:
11104 case OMP_CLAUSE_NUM_TEAMS:
11105 case OMP_CLAUSE_THREAD_LIMIT:
11106 case OMP_CLAUSE_DEVICE:
11107 case OMP_CLAUSE_DIST_SCHEDULE:
11108 case OMP_CLAUSE_SAFELEN:
11109 case OMP_CLAUSE_SIMDLEN:
11110 case OMP_CLAUSE__LOOPTEMP_:
11111 case OMP_CLAUSE__SIMDUID_:
11112 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11113 /* FALLTHRU */
11114
11115 case OMP_CLAUSE_NOWAIT:
11116 case OMP_CLAUSE_ORDERED:
11117 case OMP_CLAUSE_DEFAULT:
11118 case OMP_CLAUSE_UNTIED:
11119 case OMP_CLAUSE_MERGEABLE:
11120 case OMP_CLAUSE_PROC_BIND:
11121 case OMP_CLAUSE_INBRANCH:
11122 case OMP_CLAUSE_NOTINBRANCH:
11123 case OMP_CLAUSE_FOR:
11124 case OMP_CLAUSE_PARALLEL:
11125 case OMP_CLAUSE_SECTIONS:
11126 case OMP_CLAUSE_TASKGROUP:
11127 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11128
11129 case OMP_CLAUSE_LASTPRIVATE:
11130 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11131 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11132 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11133
11134 case OMP_CLAUSE_COLLAPSE:
11135 {
11136 int i;
11137 for (i = 0; i < 3; i++)
11138 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11139 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11140 }
11141
11142 case OMP_CLAUSE_LINEAR:
11143 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11144 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11145 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11146 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11147
11148 case OMP_CLAUSE_ALIGNED:
11149 case OMP_CLAUSE_FROM:
11150 case OMP_CLAUSE_TO:
11151 case OMP_CLAUSE_MAP:
11152 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11153 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11154 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11155
11156 case OMP_CLAUSE_REDUCTION:
11157 {
11158 int i;
11159 for (i = 0; i < 4; i++)
11160 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11161 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11162 }
11163
11164 default:
11165 gcc_unreachable ();
11166 }
11167 break;
11168
11169 case TARGET_EXPR:
11170 {
11171 int i, len;
11172
11173 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11174 But, we only want to walk once. */
11175 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11176 for (i = 0; i < len; ++i)
11177 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11178 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11179 }
11180
11181 case DECL_EXPR:
11182 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11183 defining. We only want to walk into these fields of a type in this
11184 case and not in the general case of a mere reference to the type.
11185
11186 The criterion is as follows: if the field can be an expression, it
11187 must be walked only here. This should be in keeping with the fields
11188 that are directly gimplified in gimplify_type_sizes in order for the
11189 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11190 variable-sized types.
11191
11192 Note that DECLs get walked as part of processing the BIND_EXPR. */
11193 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11194 {
11195 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11196 if (TREE_CODE (*type_p) == ERROR_MARK)
11197 return NULL_TREE;
11198
11199 /* Call the function for the type. See if it returns anything or
11200 doesn't want us to continue. If we are to continue, walk both
11201 the normal fields and those for the declaration case. */
11202 result = (*func) (type_p, &walk_subtrees, data);
11203 if (result || !walk_subtrees)
11204 return result;
11205
11206 /* But do not walk a pointed-to type since it may itself need to
11207 be walked in the declaration case if it isn't anonymous. */
11208 if (!POINTER_TYPE_P (*type_p))
11209 {
11210 result = walk_type_fields (*type_p, func, data, pset, lh);
11211 if (result)
11212 return result;
11213 }
11214
11215 /* If this is a record type, also walk the fields. */
11216 if (RECORD_OR_UNION_TYPE_P (*type_p))
11217 {
11218 tree field;
11219
11220 for (field = TYPE_FIELDS (*type_p); field;
11221 field = DECL_CHAIN (field))
11222 {
11223 /* We'd like to look at the type of the field, but we can
11224 easily get infinite recursion. So assume it's pointed
11225 to elsewhere in the tree. Also, ignore things that
11226 aren't fields. */
11227 if (TREE_CODE (field) != FIELD_DECL)
11228 continue;
11229
11230 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11231 WALK_SUBTREE (DECL_SIZE (field));
11232 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11233 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11234 WALK_SUBTREE (DECL_QUALIFIER (field));
11235 }
11236 }
11237
11238 /* Same for scalar types. */
11239 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11240 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11241 || TREE_CODE (*type_p) == INTEGER_TYPE
11242 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11243 || TREE_CODE (*type_p) == REAL_TYPE)
11244 {
11245 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11246 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11247 }
11248
11249 WALK_SUBTREE (TYPE_SIZE (*type_p));
11250 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11251 }
11252 /* FALLTHRU */
11253
11254 default:
11255 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11256 {
11257 int i, len;
11258
11259 /* Walk over all the sub-trees of this operand. */
11260 len = TREE_OPERAND_LENGTH (*tp);
11261
11262 /* Go through the subtrees. We need to do this in forward order so
11263 that the scope of a FOR_EXPR is handled properly. */
11264 if (len)
11265 {
11266 for (i = 0; i < len - 1; ++i)
11267 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11268 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11269 }
11270 }
11271 /* If this is a type, walk the needed fields in the type. */
11272 else if (TYPE_P (*tp))
11273 return walk_type_fields (*tp, func, data, pset, lh);
11274 break;
11275 }
11276
11277 /* We didn't find what we were looking for. */
11278 return NULL_TREE;
11279
11280 #undef WALK_SUBTREE_TAIL
11281 }
11282 #undef WALK_SUBTREE
11283
11284 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11285
11286 tree
11287 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11288 walk_tree_lh lh)
11289 {
11290 tree result;
11291 struct pointer_set_t *pset;
11292
11293 pset = pointer_set_create ();
11294 result = walk_tree_1 (tp, func, data, pset, lh);
11295 pointer_set_destroy (pset);
11296 return result;
11297 }
11298
11299
11300 tree
11301 tree_block (tree t)
11302 {
11303 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11304
11305 if (IS_EXPR_CODE_CLASS (c))
11306 return LOCATION_BLOCK (t->exp.locus);
11307 gcc_unreachable ();
11308 return NULL;
11309 }
11310
11311 void
11312 tree_set_block (tree t, tree b)
11313 {
11314 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11315
11316 if (IS_EXPR_CODE_CLASS (c))
11317 {
11318 if (b)
11319 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11320 else
11321 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11322 }
11323 else
11324 gcc_unreachable ();
11325 }
11326
11327 /* Create a nameless artificial label and put it in the current
11328 function context. The label has a location of LOC. Returns the
11329 newly created label. */
11330
11331 tree
11332 create_artificial_label (location_t loc)
11333 {
11334 tree lab = build_decl (loc,
11335 LABEL_DECL, NULL_TREE, void_type_node);
11336
11337 DECL_ARTIFICIAL (lab) = 1;
11338 DECL_IGNORED_P (lab) = 1;
11339 DECL_CONTEXT (lab) = current_function_decl;
11340 return lab;
11341 }
11342
11343 /* Given a tree, try to return a useful variable name that we can use
11344 to prefix a temporary that is being assigned the value of the tree.
11345 I.E. given <temp> = &A, return A. */
11346
11347 const char *
11348 get_name (tree t)
11349 {
11350 tree stripped_decl;
11351
11352 stripped_decl = t;
11353 STRIP_NOPS (stripped_decl);
11354 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11355 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11356 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11357 {
11358 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11359 if (!name)
11360 return NULL;
11361 return IDENTIFIER_POINTER (name);
11362 }
11363 else
11364 {
11365 switch (TREE_CODE (stripped_decl))
11366 {
11367 case ADDR_EXPR:
11368 return get_name (TREE_OPERAND (stripped_decl, 0));
11369 default:
11370 return NULL;
11371 }
11372 }
11373 }
11374
11375 /* Return true if TYPE has a variable argument list. */
11376
11377 bool
11378 stdarg_p (const_tree fntype)
11379 {
11380 function_args_iterator args_iter;
11381 tree n = NULL_TREE, t;
11382
11383 if (!fntype)
11384 return false;
11385
11386 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11387 {
11388 n = t;
11389 }
11390
11391 return n != NULL_TREE && n != void_type_node;
11392 }
11393
11394 /* Return true if TYPE has a prototype. */
11395
11396 bool
11397 prototype_p (tree fntype)
11398 {
11399 tree t;
11400
11401 gcc_assert (fntype != NULL_TREE);
11402
11403 t = TYPE_ARG_TYPES (fntype);
11404 return (t != NULL_TREE);
11405 }
11406
11407 /* If BLOCK is inlined from an __attribute__((__artificial__))
11408 routine, return pointer to location from where it has been
11409 called. */
11410 location_t *
11411 block_nonartificial_location (tree block)
11412 {
11413 location_t *ret = NULL;
11414
11415 while (block && TREE_CODE (block) == BLOCK
11416 && BLOCK_ABSTRACT_ORIGIN (block))
11417 {
11418 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11419
11420 while (TREE_CODE (ao) == BLOCK
11421 && BLOCK_ABSTRACT_ORIGIN (ao)
11422 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11423 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11424
11425 if (TREE_CODE (ao) == FUNCTION_DECL)
11426 {
11427 /* If AO is an artificial inline, point RET to the
11428 call site locus at which it has been inlined and continue
11429 the loop, in case AO's caller is also an artificial
11430 inline. */
11431 if (DECL_DECLARED_INLINE_P (ao)
11432 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11433 ret = &BLOCK_SOURCE_LOCATION (block);
11434 else
11435 break;
11436 }
11437 else if (TREE_CODE (ao) != BLOCK)
11438 break;
11439
11440 block = BLOCK_SUPERCONTEXT (block);
11441 }
11442 return ret;
11443 }
11444
11445
11446 /* If EXP is inlined from an __attribute__((__artificial__))
11447 function, return the location of the original call expression. */
11448
11449 location_t
11450 tree_nonartificial_location (tree exp)
11451 {
11452 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11453
11454 if (loc)
11455 return *loc;
11456 else
11457 return EXPR_LOCATION (exp);
11458 }
11459
11460
11461 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11462 nodes. */
11463
11464 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11465
11466 static hashval_t
11467 cl_option_hash_hash (const void *x)
11468 {
11469 const_tree const t = (const_tree) x;
11470 const char *p;
11471 size_t i;
11472 size_t len = 0;
11473 hashval_t hash = 0;
11474
11475 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11476 {
11477 p = (const char *)TREE_OPTIMIZATION (t);
11478 len = sizeof (struct cl_optimization);
11479 }
11480
11481 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11482 {
11483 p = (const char *)TREE_TARGET_OPTION (t);
11484 len = sizeof (struct cl_target_option);
11485 }
11486
11487 else
11488 gcc_unreachable ();
11489
11490 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11491 something else. */
11492 for (i = 0; i < len; i++)
11493 if (p[i])
11494 hash = (hash << 4) ^ ((i << 2) | p[i]);
11495
11496 return hash;
11497 }
11498
11499 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11500 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11501 same. */
11502
11503 static int
11504 cl_option_hash_eq (const void *x, const void *y)
11505 {
11506 const_tree const xt = (const_tree) x;
11507 const_tree const yt = (const_tree) y;
11508 const char *xp;
11509 const char *yp;
11510 size_t len;
11511
11512 if (TREE_CODE (xt) != TREE_CODE (yt))
11513 return 0;
11514
11515 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11516 {
11517 xp = (const char *)TREE_OPTIMIZATION (xt);
11518 yp = (const char *)TREE_OPTIMIZATION (yt);
11519 len = sizeof (struct cl_optimization);
11520 }
11521
11522 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11523 {
11524 xp = (const char *)TREE_TARGET_OPTION (xt);
11525 yp = (const char *)TREE_TARGET_OPTION (yt);
11526 len = sizeof (struct cl_target_option);
11527 }
11528
11529 else
11530 gcc_unreachable ();
11531
11532 return (memcmp (xp, yp, len) == 0);
11533 }
11534
11535 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11536
11537 tree
11538 build_optimization_node (struct gcc_options *opts)
11539 {
11540 tree t;
11541 void **slot;
11542
11543 /* Use the cache of optimization nodes. */
11544
11545 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11546 opts);
11547
11548 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11549 t = (tree) *slot;
11550 if (!t)
11551 {
11552 /* Insert this one into the hash table. */
11553 t = cl_optimization_node;
11554 *slot = t;
11555
11556 /* Make a new node for next time round. */
11557 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11558 }
11559
11560 return t;
11561 }
11562
11563 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11564
11565 tree
11566 build_target_option_node (struct gcc_options *opts)
11567 {
11568 tree t;
11569 void **slot;
11570
11571 /* Use the cache of optimization nodes. */
11572
11573 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11574 opts);
11575
11576 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11577 t = (tree) *slot;
11578 if (!t)
11579 {
11580 /* Insert this one into the hash table. */
11581 t = cl_target_option_node;
11582 *slot = t;
11583
11584 /* Make a new node for next time round. */
11585 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11586 }
11587
11588 return t;
11589 }
11590
11591 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11592 Called through htab_traverse. */
11593
11594 static int
11595 prepare_target_option_node_for_pch (void **slot, void *)
11596 {
11597 tree node = (tree) *slot;
11598 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11599 TREE_TARGET_GLOBALS (node) = NULL;
11600 return 1;
11601 }
11602
11603 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11604 so that they aren't saved during PCH writing. */
11605
11606 void
11607 prepare_target_option_nodes_for_pch (void)
11608 {
11609 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11610 NULL);
11611 }
11612
11613 /* Determine the "ultimate origin" of a block. The block may be an inlined
11614 instance of an inlined instance of a block which is local to an inline
11615 function, so we have to trace all of the way back through the origin chain
11616 to find out what sort of node actually served as the original seed for the
11617 given block. */
11618
11619 tree
11620 block_ultimate_origin (const_tree block)
11621 {
11622 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11623
11624 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11625 nodes in the function to point to themselves; ignore that if
11626 we're trying to output the abstract instance of this function. */
11627 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11628 return NULL_TREE;
11629
11630 if (immediate_origin == NULL_TREE)
11631 return NULL_TREE;
11632 else
11633 {
11634 tree ret_val;
11635 tree lookahead = immediate_origin;
11636
11637 do
11638 {
11639 ret_val = lookahead;
11640 lookahead = (TREE_CODE (ret_val) == BLOCK
11641 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11642 }
11643 while (lookahead != NULL && lookahead != ret_val);
11644
11645 /* The block's abstract origin chain may not be the *ultimate* origin of
11646 the block. It could lead to a DECL that has an abstract origin set.
11647 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11648 will give us if it has one). Note that DECL's abstract origins are
11649 supposed to be the most distant ancestor (or so decl_ultimate_origin
11650 claims), so we don't need to loop following the DECL origins. */
11651 if (DECL_P (ret_val))
11652 return DECL_ORIGIN (ret_val);
11653
11654 return ret_val;
11655 }
11656 }
11657
11658 /* Return true iff conversion in EXP generates no instruction. Mark
11659 it inline so that we fully inline into the stripping functions even
11660 though we have two uses of this function. */
11661
11662 static inline bool
11663 tree_nop_conversion (const_tree exp)
11664 {
11665 tree outer_type, inner_type;
11666
11667 if (!CONVERT_EXPR_P (exp)
11668 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11669 return false;
11670 if (TREE_OPERAND (exp, 0) == error_mark_node)
11671 return false;
11672
11673 outer_type = TREE_TYPE (exp);
11674 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11675
11676 if (!inner_type)
11677 return false;
11678
11679 /* Use precision rather then machine mode when we can, which gives
11680 the correct answer even for submode (bit-field) types. */
11681 if ((INTEGRAL_TYPE_P (outer_type)
11682 || POINTER_TYPE_P (outer_type)
11683 || TREE_CODE (outer_type) == OFFSET_TYPE)
11684 && (INTEGRAL_TYPE_P (inner_type)
11685 || POINTER_TYPE_P (inner_type)
11686 || TREE_CODE (inner_type) == OFFSET_TYPE))
11687 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11688
11689 /* Otherwise fall back on comparing machine modes (e.g. for
11690 aggregate types, floats). */
11691 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11692 }
11693
11694 /* Return true iff conversion in EXP generates no instruction. Don't
11695 consider conversions changing the signedness. */
11696
11697 static bool
11698 tree_sign_nop_conversion (const_tree exp)
11699 {
11700 tree outer_type, inner_type;
11701
11702 if (!tree_nop_conversion (exp))
11703 return false;
11704
11705 outer_type = TREE_TYPE (exp);
11706 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11707
11708 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11709 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11710 }
11711
11712 /* Strip conversions from EXP according to tree_nop_conversion and
11713 return the resulting expression. */
11714
11715 tree
11716 tree_strip_nop_conversions (tree exp)
11717 {
11718 while (tree_nop_conversion (exp))
11719 exp = TREE_OPERAND (exp, 0);
11720 return exp;
11721 }
11722
11723 /* Strip conversions from EXP according to tree_sign_nop_conversion
11724 and return the resulting expression. */
11725
11726 tree
11727 tree_strip_sign_nop_conversions (tree exp)
11728 {
11729 while (tree_sign_nop_conversion (exp))
11730 exp = TREE_OPERAND (exp, 0);
11731 return exp;
11732 }
11733
11734 /* Avoid any floating point extensions from EXP. */
11735 tree
11736 strip_float_extensions (tree exp)
11737 {
11738 tree sub, expt, subt;
11739
11740 /* For floating point constant look up the narrowest type that can hold
11741 it properly and handle it like (type)(narrowest_type)constant.
11742 This way we can optimize for instance a=a*2.0 where "a" is float
11743 but 2.0 is double constant. */
11744 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11745 {
11746 REAL_VALUE_TYPE orig;
11747 tree type = NULL;
11748
11749 orig = TREE_REAL_CST (exp);
11750 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11751 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11752 type = float_type_node;
11753 else if (TYPE_PRECISION (TREE_TYPE (exp))
11754 > TYPE_PRECISION (double_type_node)
11755 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11756 type = double_type_node;
11757 if (type)
11758 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11759 }
11760
11761 if (!CONVERT_EXPR_P (exp))
11762 return exp;
11763
11764 sub = TREE_OPERAND (exp, 0);
11765 subt = TREE_TYPE (sub);
11766 expt = TREE_TYPE (exp);
11767
11768 if (!FLOAT_TYPE_P (subt))
11769 return exp;
11770
11771 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11772 return exp;
11773
11774 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11775 return exp;
11776
11777 return strip_float_extensions (sub);
11778 }
11779
11780 /* Strip out all handled components that produce invariant
11781 offsets. */
11782
11783 const_tree
11784 strip_invariant_refs (const_tree op)
11785 {
11786 while (handled_component_p (op))
11787 {
11788 switch (TREE_CODE (op))
11789 {
11790 case ARRAY_REF:
11791 case ARRAY_RANGE_REF:
11792 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11793 || TREE_OPERAND (op, 2) != NULL_TREE
11794 || TREE_OPERAND (op, 3) != NULL_TREE)
11795 return NULL;
11796 break;
11797
11798 case COMPONENT_REF:
11799 if (TREE_OPERAND (op, 2) != NULL_TREE)
11800 return NULL;
11801 break;
11802
11803 default:;
11804 }
11805 op = TREE_OPERAND (op, 0);
11806 }
11807
11808 return op;
11809 }
11810
11811 static GTY(()) tree gcc_eh_personality_decl;
11812
11813 /* Return the GCC personality function decl. */
11814
11815 tree
11816 lhd_gcc_personality (void)
11817 {
11818 if (!gcc_eh_personality_decl)
11819 gcc_eh_personality_decl = build_personality_function ("gcc");
11820 return gcc_eh_personality_decl;
11821 }
11822
11823 /* TARGET is a call target of GIMPLE call statement
11824 (obtained by gimple_call_fn). Return true if it is
11825 OBJ_TYPE_REF representing an virtual call of C++ method.
11826 (As opposed to OBJ_TYPE_REF representing objc calls
11827 through a cast where middle-end devirtualization machinery
11828 can't apply.) */
11829
11830 bool
11831 virtual_method_call_p (tree target)
11832 {
11833 if (TREE_CODE (target) != OBJ_TYPE_REF)
11834 return false;
11835 target = TREE_TYPE (target);
11836 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11837 target = TREE_TYPE (target);
11838 if (TREE_CODE (target) == FUNCTION_TYPE)
11839 return false;
11840 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11841 return true;
11842 }
11843
11844 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11845
11846 tree
11847 obj_type_ref_class (tree ref)
11848 {
11849 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11850 ref = TREE_TYPE (ref);
11851 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11852 ref = TREE_TYPE (ref);
11853 /* We look for type THIS points to. ObjC also builds
11854 OBJ_TYPE_REF with non-method calls, Their first parameter
11855 ID however also corresponds to class type. */
11856 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11857 || TREE_CODE (ref) == FUNCTION_TYPE);
11858 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11859 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11860 return TREE_TYPE (ref);
11861 }
11862
11863 /* Return true if T is in anonymous namespace. */
11864
11865 bool
11866 type_in_anonymous_namespace_p (const_tree t)
11867 {
11868 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11869 }
11870
11871 /* Try to find a base info of BINFO that would have its field decl at offset
11872 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11873 found, return, otherwise return NULL_TREE. */
11874
11875 tree
11876 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11877 {
11878 tree type = BINFO_TYPE (binfo);
11879
11880 while (true)
11881 {
11882 HOST_WIDE_INT pos, size;
11883 tree fld;
11884 int i;
11885
11886 if (types_same_for_odr (type, expected_type))
11887 return binfo;
11888 if (offset < 0)
11889 return NULL_TREE;
11890
11891 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11892 {
11893 if (TREE_CODE (fld) != FIELD_DECL)
11894 continue;
11895
11896 pos = int_bit_position (fld);
11897 size = tree_to_uhwi (DECL_SIZE (fld));
11898 if (pos <= offset && (pos + size) > offset)
11899 break;
11900 }
11901 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11902 return NULL_TREE;
11903
11904 if (!DECL_ARTIFICIAL (fld))
11905 {
11906 binfo = TYPE_BINFO (TREE_TYPE (fld));
11907 if (!binfo)
11908 return NULL_TREE;
11909 }
11910 /* Offset 0 indicates the primary base, whose vtable contents are
11911 represented in the binfo for the derived class. */
11912 else if (offset != 0)
11913 {
11914 tree base_binfo, binfo2 = binfo;
11915
11916 /* Find BINFO corresponding to FLD. This is bit harder
11917 by a fact that in virtual inheritance we may need to walk down
11918 the non-virtual inheritance chain. */
11919 while (true)
11920 {
11921 tree containing_binfo = NULL, found_binfo = NULL;
11922 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11923 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11924 {
11925 found_binfo = base_binfo;
11926 break;
11927 }
11928 else
11929 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11930 - tree_to_shwi (BINFO_OFFSET (binfo)))
11931 * BITS_PER_UNIT < pos
11932 /* Rule out types with no virtual methods or we can get confused
11933 here by zero sized bases. */
11934 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11935 && (!containing_binfo
11936 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11937 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11938 containing_binfo = base_binfo;
11939 if (found_binfo)
11940 {
11941 binfo = found_binfo;
11942 break;
11943 }
11944 if (!containing_binfo)
11945 return NULL_TREE;
11946 binfo2 = containing_binfo;
11947 }
11948 }
11949
11950 type = TREE_TYPE (fld);
11951 offset -= pos;
11952 }
11953 }
11954
11955 /* Returns true if X is a typedef decl. */
11956
11957 bool
11958 is_typedef_decl (tree x)
11959 {
11960 return (x && TREE_CODE (x) == TYPE_DECL
11961 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11962 }
11963
11964 /* Returns true iff TYPE is a type variant created for a typedef. */
11965
11966 bool
11967 typedef_variant_p (tree type)
11968 {
11969 return is_typedef_decl (TYPE_NAME (type));
11970 }
11971
11972 /* Warn about a use of an identifier which was marked deprecated. */
11973 void
11974 warn_deprecated_use (tree node, tree attr)
11975 {
11976 const char *msg;
11977
11978 if (node == 0 || !warn_deprecated_decl)
11979 return;
11980
11981 if (!attr)
11982 {
11983 if (DECL_P (node))
11984 attr = DECL_ATTRIBUTES (node);
11985 else if (TYPE_P (node))
11986 {
11987 tree decl = TYPE_STUB_DECL (node);
11988 if (decl)
11989 attr = lookup_attribute ("deprecated",
11990 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11991 }
11992 }
11993
11994 if (attr)
11995 attr = lookup_attribute ("deprecated", attr);
11996
11997 if (attr)
11998 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11999 else
12000 msg = NULL;
12001
12002 if (DECL_P (node))
12003 {
12004 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12005 if (msg)
12006 warning (OPT_Wdeprecated_declarations,
12007 "%qD is deprecated (declared at %r%s:%d%R): %s",
12008 node, "locus", xloc.file, xloc.line, msg);
12009 else
12010 warning (OPT_Wdeprecated_declarations,
12011 "%qD is deprecated (declared at %r%s:%d%R)",
12012 node, "locus", xloc.file, xloc.line);
12013 }
12014 else if (TYPE_P (node))
12015 {
12016 tree what = NULL_TREE;
12017 tree decl = TYPE_STUB_DECL (node);
12018
12019 if (TYPE_NAME (node))
12020 {
12021 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12022 what = TYPE_NAME (node);
12023 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12024 && DECL_NAME (TYPE_NAME (node)))
12025 what = DECL_NAME (TYPE_NAME (node));
12026 }
12027
12028 if (decl)
12029 {
12030 expanded_location xloc
12031 = expand_location (DECL_SOURCE_LOCATION (decl));
12032 if (what)
12033 {
12034 if (msg)
12035 warning (OPT_Wdeprecated_declarations,
12036 "%qE is deprecated (declared at %r%s:%d%R): %s",
12037 what, "locus", xloc.file, xloc.line, msg);
12038 else
12039 warning (OPT_Wdeprecated_declarations,
12040 "%qE is deprecated (declared at %r%s:%d%R)",
12041 what, "locus", xloc.file, xloc.line);
12042 }
12043 else
12044 {
12045 if (msg)
12046 warning (OPT_Wdeprecated_declarations,
12047 "type is deprecated (declared at %r%s:%d%R): %s",
12048 "locus", xloc.file, xloc.line, msg);
12049 else
12050 warning (OPT_Wdeprecated_declarations,
12051 "type is deprecated (declared at %r%s:%d%R)",
12052 "locus", xloc.file, xloc.line);
12053 }
12054 }
12055 else
12056 {
12057 if (what)
12058 {
12059 if (msg)
12060 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12061 what, msg);
12062 else
12063 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12064 }
12065 else
12066 {
12067 if (msg)
12068 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12069 msg);
12070 else
12071 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12072 }
12073 }
12074 }
12075 }
12076
12077 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12078 somewhere in it. */
12079
12080 bool
12081 contains_bitfld_component_ref_p (const_tree ref)
12082 {
12083 while (handled_component_p (ref))
12084 {
12085 if (TREE_CODE (ref) == COMPONENT_REF
12086 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12087 return true;
12088 ref = TREE_OPERAND (ref, 0);
12089 }
12090
12091 return false;
12092 }
12093
12094 /* Try to determine whether a TRY_CATCH expression can fall through.
12095 This is a subroutine of block_may_fallthru. */
12096
12097 static bool
12098 try_catch_may_fallthru (const_tree stmt)
12099 {
12100 tree_stmt_iterator i;
12101
12102 /* If the TRY block can fall through, the whole TRY_CATCH can
12103 fall through. */
12104 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12105 return true;
12106
12107 i = tsi_start (TREE_OPERAND (stmt, 1));
12108 switch (TREE_CODE (tsi_stmt (i)))
12109 {
12110 case CATCH_EXPR:
12111 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12112 catch expression and a body. The whole TRY_CATCH may fall
12113 through iff any of the catch bodies falls through. */
12114 for (; !tsi_end_p (i); tsi_next (&i))
12115 {
12116 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12117 return true;
12118 }
12119 return false;
12120
12121 case EH_FILTER_EXPR:
12122 /* The exception filter expression only matters if there is an
12123 exception. If the exception does not match EH_FILTER_TYPES,
12124 we will execute EH_FILTER_FAILURE, and we will fall through
12125 if that falls through. If the exception does match
12126 EH_FILTER_TYPES, the stack unwinder will continue up the
12127 stack, so we will not fall through. We don't know whether we
12128 will throw an exception which matches EH_FILTER_TYPES or not,
12129 so we just ignore EH_FILTER_TYPES and assume that we might
12130 throw an exception which doesn't match. */
12131 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12132
12133 default:
12134 /* This case represents statements to be executed when an
12135 exception occurs. Those statements are implicitly followed
12136 by a RESX statement to resume execution after the exception.
12137 So in this case the TRY_CATCH never falls through. */
12138 return false;
12139 }
12140 }
12141
12142 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12143 need not be 100% accurate; simply be conservative and return true if we
12144 don't know. This is used only to avoid stupidly generating extra code.
12145 If we're wrong, we'll just delete the extra code later. */
12146
12147 bool
12148 block_may_fallthru (const_tree block)
12149 {
12150 /* This CONST_CAST is okay because expr_last returns its argument
12151 unmodified and we assign it to a const_tree. */
12152 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12153
12154 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12155 {
12156 case GOTO_EXPR:
12157 case RETURN_EXPR:
12158 /* Easy cases. If the last statement of the block implies
12159 control transfer, then we can't fall through. */
12160 return false;
12161
12162 case SWITCH_EXPR:
12163 /* If SWITCH_LABELS is set, this is lowered, and represents a
12164 branch to a selected label and hence can not fall through.
12165 Otherwise SWITCH_BODY is set, and the switch can fall
12166 through. */
12167 return SWITCH_LABELS (stmt) == NULL_TREE;
12168
12169 case COND_EXPR:
12170 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12171 return true;
12172 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12173
12174 case BIND_EXPR:
12175 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12176
12177 case TRY_CATCH_EXPR:
12178 return try_catch_may_fallthru (stmt);
12179
12180 case TRY_FINALLY_EXPR:
12181 /* The finally clause is always executed after the try clause,
12182 so if it does not fall through, then the try-finally will not
12183 fall through. Otherwise, if the try clause does not fall
12184 through, then when the finally clause falls through it will
12185 resume execution wherever the try clause was going. So the
12186 whole try-finally will only fall through if both the try
12187 clause and the finally clause fall through. */
12188 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12189 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12190
12191 case MODIFY_EXPR:
12192 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12193 stmt = TREE_OPERAND (stmt, 1);
12194 else
12195 return true;
12196 /* FALLTHRU */
12197
12198 case CALL_EXPR:
12199 /* Functions that do not return do not fall through. */
12200 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12201
12202 case CLEANUP_POINT_EXPR:
12203 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12204
12205 case TARGET_EXPR:
12206 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12207
12208 case ERROR_MARK:
12209 return true;
12210
12211 default:
12212 return lang_hooks.block_may_fallthru (stmt);
12213 }
12214 }
12215
12216 /* True if we are using EH to handle cleanups. */
12217 static bool using_eh_for_cleanups_flag = false;
12218
12219 /* This routine is called from front ends to indicate eh should be used for
12220 cleanups. */
12221 void
12222 using_eh_for_cleanups (void)
12223 {
12224 using_eh_for_cleanups_flag = true;
12225 }
12226
12227 /* Query whether EH is used for cleanups. */
12228 bool
12229 using_eh_for_cleanups_p (void)
12230 {
12231 return using_eh_for_cleanups_flag;
12232 }
12233
12234 /* Wrapper for tree_code_name to ensure that tree code is valid */
12235 const char *
12236 get_tree_code_name (enum tree_code code)
12237 {
12238 const char *invalid = "<invalid tree code>";
12239
12240 if (code >= MAX_TREE_CODES)
12241 return invalid;
12242
12243 return tree_code_name[code];
12244 }
12245
12246 /* Drops the TREE_OVERFLOW flag from T. */
12247
12248 tree
12249 drop_tree_overflow (tree t)
12250 {
12251 gcc_checking_assert (TREE_OVERFLOW (t));
12252
12253 /* For tree codes with a sharing machinery re-build the result. */
12254 if (TREE_CODE (t) == INTEGER_CST)
12255 return wide_int_to_tree (TREE_TYPE (t), t);
12256
12257 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12258 and drop the flag. */
12259 t = copy_node (t);
12260 TREE_OVERFLOW (t) = 0;
12261 return t;
12262 }
12263
12264 /* Given a memory reference expression T, return its base address.
12265 The base address of a memory reference expression is the main
12266 object being referenced. For instance, the base address for
12267 'array[i].fld[j]' is 'array'. You can think of this as stripping
12268 away the offset part from a memory address.
12269
12270 This function calls handled_component_p to strip away all the inner
12271 parts of the memory reference until it reaches the base object. */
12272
12273 tree
12274 get_base_address (tree t)
12275 {
12276 while (handled_component_p (t))
12277 t = TREE_OPERAND (t, 0);
12278
12279 if ((TREE_CODE (t) == MEM_REF
12280 || TREE_CODE (t) == TARGET_MEM_REF)
12281 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12282 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12283
12284 /* ??? Either the alias oracle or all callers need to properly deal
12285 with WITH_SIZE_EXPRs before we can look through those. */
12286 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12287 return NULL_TREE;
12288
12289 return t;
12290 }
12291
12292 #include "gt-tree.h"