Added fnspec to internal functions.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static void type_hash_list (const_tree, inchash::hash &);
234 static void attribute_hash_list (const_tree, inchash::hash &);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 };
285
286 const char * const omp_clause_code_name[] =
287 {
288 "error_clause",
289 "private",
290 "shared",
291 "firstprivate",
292 "lastprivate",
293 "reduction",
294 "copyin",
295 "copyprivate",
296 "linear",
297 "aligned",
298 "depend",
299 "uniform",
300 "from",
301 "to",
302 "map",
303 "_looptemp_",
304 "if",
305 "num_threads",
306 "schedule",
307 "nowait",
308 "ordered",
309 "default",
310 "collapse",
311 "untied",
312 "final",
313 "mergeable",
314 "device",
315 "dist_schedule",
316 "inbranch",
317 "notinbranch",
318 "num_teams",
319 "thread_limit",
320 "proc_bind",
321 "safelen",
322 "simdlen",
323 "for",
324 "parallel",
325 "sections",
326 "taskgroup",
327 "_simduid_"
328 };
329
330
331 /* Return the tree node structure used by tree code CODE. */
332
333 static inline enum tree_node_structure_enum
334 tree_node_structure_for_code (enum tree_code code)
335 {
336 switch (TREE_CODE_CLASS (code))
337 {
338 case tcc_declaration:
339 {
340 switch (code)
341 {
342 case FIELD_DECL:
343 return TS_FIELD_DECL;
344 case PARM_DECL:
345 return TS_PARM_DECL;
346 case VAR_DECL:
347 return TS_VAR_DECL;
348 case LABEL_DECL:
349 return TS_LABEL_DECL;
350 case RESULT_DECL:
351 return TS_RESULT_DECL;
352 case DEBUG_EXPR_DECL:
353 return TS_DECL_WRTL;
354 case CONST_DECL:
355 return TS_CONST_DECL;
356 case TYPE_DECL:
357 return TS_TYPE_DECL;
358 case FUNCTION_DECL:
359 return TS_FUNCTION_DECL;
360 case TRANSLATION_UNIT_DECL:
361 return TS_TRANSLATION_UNIT_DECL;
362 default:
363 return TS_DECL_NON_COMMON;
364 }
365 }
366 case tcc_type:
367 return TS_TYPE_NON_COMMON;
368 case tcc_reference:
369 case tcc_comparison:
370 case tcc_unary:
371 case tcc_binary:
372 case tcc_expression:
373 case tcc_statement:
374 case tcc_vl_exp:
375 return TS_EXP;
376 default: /* tcc_constant and tcc_exceptional */
377 break;
378 }
379 switch (code)
380 {
381 /* tcc_constant cases. */
382 case VOID_CST: return TS_TYPED;
383 case INTEGER_CST: return TS_INT_CST;
384 case REAL_CST: return TS_REAL_CST;
385 case FIXED_CST: return TS_FIXED_CST;
386 case COMPLEX_CST: return TS_COMPLEX;
387 case VECTOR_CST: return TS_VECTOR;
388 case STRING_CST: return TS_STRING;
389 /* tcc_exceptional cases. */
390 case ERROR_MARK: return TS_COMMON;
391 case IDENTIFIER_NODE: return TS_IDENTIFIER;
392 case TREE_LIST: return TS_LIST;
393 case TREE_VEC: return TS_VEC;
394 case SSA_NAME: return TS_SSA_NAME;
395 case PLACEHOLDER_EXPR: return TS_COMMON;
396 case STATEMENT_LIST: return TS_STATEMENT_LIST;
397 case BLOCK: return TS_BLOCK;
398 case CONSTRUCTOR: return TS_CONSTRUCTOR;
399 case TREE_BINFO: return TS_BINFO;
400 case OMP_CLAUSE: return TS_OMP_CLAUSE;
401 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
402 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
403
404 default:
405 gcc_unreachable ();
406 }
407 }
408
409
410 /* Initialize tree_contains_struct to describe the hierarchy of tree
411 nodes. */
412
413 static void
414 initialize_tree_contains_struct (void)
415 {
416 unsigned i;
417
418 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
419 {
420 enum tree_code code;
421 enum tree_node_structure_enum ts_code;
422
423 code = (enum tree_code) i;
424 ts_code = tree_node_structure_for_code (code);
425
426 /* Mark the TS structure itself. */
427 tree_contains_struct[code][ts_code] = 1;
428
429 /* Mark all the structures that TS is derived from. */
430 switch (ts_code)
431 {
432 case TS_TYPED:
433 case TS_BLOCK:
434 MARK_TS_BASE (code);
435 break;
436
437 case TS_COMMON:
438 case TS_INT_CST:
439 case TS_REAL_CST:
440 case TS_FIXED_CST:
441 case TS_VECTOR:
442 case TS_STRING:
443 case TS_COMPLEX:
444 case TS_SSA_NAME:
445 case TS_CONSTRUCTOR:
446 case TS_EXP:
447 case TS_STATEMENT_LIST:
448 MARK_TS_TYPED (code);
449 break;
450
451 case TS_IDENTIFIER:
452 case TS_DECL_MINIMAL:
453 case TS_TYPE_COMMON:
454 case TS_LIST:
455 case TS_VEC:
456 case TS_BINFO:
457 case TS_OMP_CLAUSE:
458 case TS_OPTIMIZATION:
459 case TS_TARGET_OPTION:
460 MARK_TS_COMMON (code);
461 break;
462
463 case TS_TYPE_WITH_LANG_SPECIFIC:
464 MARK_TS_TYPE_COMMON (code);
465 break;
466
467 case TS_TYPE_NON_COMMON:
468 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
469 break;
470
471 case TS_DECL_COMMON:
472 MARK_TS_DECL_MINIMAL (code);
473 break;
474
475 case TS_DECL_WRTL:
476 case TS_CONST_DECL:
477 MARK_TS_DECL_COMMON (code);
478 break;
479
480 case TS_DECL_NON_COMMON:
481 MARK_TS_DECL_WITH_VIS (code);
482 break;
483
484 case TS_DECL_WITH_VIS:
485 case TS_PARM_DECL:
486 case TS_LABEL_DECL:
487 case TS_RESULT_DECL:
488 MARK_TS_DECL_WRTL (code);
489 break;
490
491 case TS_FIELD_DECL:
492 MARK_TS_DECL_COMMON (code);
493 break;
494
495 case TS_VAR_DECL:
496 MARK_TS_DECL_WITH_VIS (code);
497 break;
498
499 case TS_TYPE_DECL:
500 case TS_FUNCTION_DECL:
501 MARK_TS_DECL_NON_COMMON (code);
502 break;
503
504 case TS_TRANSLATION_UNIT_DECL:
505 MARK_TS_DECL_COMMON (code);
506 break;
507
508 default:
509 gcc_unreachable ();
510 }
511 }
512
513 /* Basic consistency checks for attributes used in fold. */
514 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
515 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
516 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
526 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
527 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
540 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
543 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
544 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
545 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
546 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
547 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
548 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
549 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
550 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
551 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
552 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
554 }
555
556
557 /* Init tree.c. */
558
559 void
560 init_ttree (void)
561 {
562 /* Initialize the hash table of types. */
563 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
564 type_hash_eq, 0);
565
566 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
567 tree_decl_map_eq, 0);
568
569 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
570 tree_decl_map_eq, 0);
571
572 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
573 int_cst_hash_eq, NULL);
574
575 int_cst_node = make_int_cst (1, 1);
576
577 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
578 cl_option_hash_eq, NULL);
579
580 cl_optimization_node = make_node (OPTIMIZATION_NODE);
581 cl_target_option_node = make_node (TARGET_OPTION_NODE);
582
583 /* Initialize the tree_contains_struct array. */
584 initialize_tree_contains_struct ();
585 lang_hooks.init_ts ();
586 }
587
588 \f
589 /* The name of the object as the assembler will see it (but before any
590 translations made by ASM_OUTPUT_LABELREF). Often this is the same
591 as DECL_NAME. It is an IDENTIFIER_NODE. */
592 tree
593 decl_assembler_name (tree decl)
594 {
595 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
596 lang_hooks.set_decl_assembler_name (decl);
597 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
598 }
599
600 /* When the target supports COMDAT groups, this indicates which group the
601 DECL is associated with. This can be either an IDENTIFIER_NODE or a
602 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
603 tree
604 decl_comdat_group (const_tree node)
605 {
606 struct symtab_node *snode = symtab_node::get (node);
607 if (!snode)
608 return NULL;
609 return snode->get_comdat_group ();
610 }
611
612 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
613 tree
614 decl_comdat_group_id (const_tree node)
615 {
616 struct symtab_node *snode = symtab_node::get (node);
617 if (!snode)
618 return NULL;
619 return snode->get_comdat_group_id ();
620 }
621
622 /* When the target supports named section, return its name as IDENTIFIER_NODE
623 or NULL if it is in no section. */
624 const char *
625 decl_section_name (const_tree node)
626 {
627 struct symtab_node *snode = symtab_node::get (node);
628 if (!snode)
629 return NULL;
630 return snode->get_section ();
631 }
632
633 /* Set section section name of NODE to VALUE (that is expected to
634 be identifier node) */
635 void
636 set_decl_section_name (tree node, const char *value)
637 {
638 struct symtab_node *snode;
639
640 if (value == NULL)
641 {
642 snode = symtab_node::get (node);
643 if (!snode)
644 return;
645 }
646 else if (TREE_CODE (node) == VAR_DECL)
647 snode = varpool_node::get_create (node);
648 else
649 snode = cgraph_node::get_create (node);
650 snode->set_section (value);
651 }
652
653 /* Return TLS model of a variable NODE. */
654 enum tls_model
655 decl_tls_model (const_tree node)
656 {
657 struct varpool_node *snode = varpool_node::get (node);
658 if (!snode)
659 return TLS_MODEL_NONE;
660 return snode->tls_model;
661 }
662
663 /* Set TLS model of variable NODE to MODEL. */
664 void
665 set_decl_tls_model (tree node, enum tls_model model)
666 {
667 struct varpool_node *vnode;
668
669 if (model == TLS_MODEL_NONE)
670 {
671 vnode = varpool_node::get (node);
672 if (!vnode)
673 return;
674 }
675 else
676 vnode = varpool_node::get_create (node);
677 vnode->tls_model = model;
678 }
679
680 /* Compute the number of bytes occupied by a tree with code CODE.
681 This function cannot be used for nodes that have variable sizes,
682 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
683 size_t
684 tree_code_size (enum tree_code code)
685 {
686 switch (TREE_CODE_CLASS (code))
687 {
688 case tcc_declaration: /* A decl node */
689 {
690 switch (code)
691 {
692 case FIELD_DECL:
693 return sizeof (struct tree_field_decl);
694 case PARM_DECL:
695 return sizeof (struct tree_parm_decl);
696 case VAR_DECL:
697 return sizeof (struct tree_var_decl);
698 case LABEL_DECL:
699 return sizeof (struct tree_label_decl);
700 case RESULT_DECL:
701 return sizeof (struct tree_result_decl);
702 case CONST_DECL:
703 return sizeof (struct tree_const_decl);
704 case TYPE_DECL:
705 return sizeof (struct tree_type_decl);
706 case FUNCTION_DECL:
707 return sizeof (struct tree_function_decl);
708 case DEBUG_EXPR_DECL:
709 return sizeof (struct tree_decl_with_rtl);
710 case TRANSLATION_UNIT_DECL:
711 return sizeof (struct tree_translation_unit_decl);
712 case NAMESPACE_DECL:
713 case IMPORTED_DECL:
714 case NAMELIST_DECL:
715 return sizeof (struct tree_decl_non_common);
716 default:
717 return lang_hooks.tree_size (code);
718 }
719 }
720
721 case tcc_type: /* a type node */
722 return sizeof (struct tree_type_non_common);
723
724 case tcc_reference: /* a reference */
725 case tcc_expression: /* an expression */
726 case tcc_statement: /* an expression with side effects */
727 case tcc_comparison: /* a comparison expression */
728 case tcc_unary: /* a unary arithmetic expression */
729 case tcc_binary: /* a binary arithmetic expression */
730 return (sizeof (struct tree_exp)
731 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
732
733 case tcc_constant: /* a constant */
734 switch (code)
735 {
736 case VOID_CST: return sizeof (struct tree_typed);
737 case INTEGER_CST: gcc_unreachable ();
738 case REAL_CST: return sizeof (struct tree_real_cst);
739 case FIXED_CST: return sizeof (struct tree_fixed_cst);
740 case COMPLEX_CST: return sizeof (struct tree_complex);
741 case VECTOR_CST: return sizeof (struct tree_vector);
742 case STRING_CST: gcc_unreachable ();
743 default:
744 return lang_hooks.tree_size (code);
745 }
746
747 case tcc_exceptional: /* something random, like an identifier. */
748 switch (code)
749 {
750 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
751 case TREE_LIST: return sizeof (struct tree_list);
752
753 case ERROR_MARK:
754 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
755
756 case TREE_VEC:
757 case OMP_CLAUSE: gcc_unreachable ();
758
759 case SSA_NAME: return sizeof (struct tree_ssa_name);
760
761 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
762 case BLOCK: return sizeof (struct tree_block);
763 case CONSTRUCTOR: return sizeof (struct tree_constructor);
764 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
765 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
766
767 default:
768 return lang_hooks.tree_size (code);
769 }
770
771 default:
772 gcc_unreachable ();
773 }
774 }
775
776 /* Compute the number of bytes occupied by NODE. This routine only
777 looks at TREE_CODE, except for those nodes that have variable sizes. */
778 size_t
779 tree_size (const_tree node)
780 {
781 const enum tree_code code = TREE_CODE (node);
782 switch (code)
783 {
784 case INTEGER_CST:
785 return (sizeof (struct tree_int_cst)
786 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
787
788 case TREE_BINFO:
789 return (offsetof (struct tree_binfo, base_binfos)
790 + vec<tree, va_gc>
791 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
792
793 case TREE_VEC:
794 return (sizeof (struct tree_vec)
795 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
796
797 case VECTOR_CST:
798 return (sizeof (struct tree_vector)
799 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
800
801 case STRING_CST:
802 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
803
804 case OMP_CLAUSE:
805 return (sizeof (struct tree_omp_clause)
806 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
807 * sizeof (tree));
808
809 default:
810 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
811 return (sizeof (struct tree_exp)
812 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
813 else
814 return tree_code_size (code);
815 }
816 }
817
818 /* Record interesting allocation statistics for a tree node with CODE
819 and LENGTH. */
820
821 static void
822 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
823 size_t length ATTRIBUTE_UNUSED)
824 {
825 enum tree_code_class type = TREE_CODE_CLASS (code);
826 tree_node_kind kind;
827
828 if (!GATHER_STATISTICS)
829 return;
830
831 switch (type)
832 {
833 case tcc_declaration: /* A decl node */
834 kind = d_kind;
835 break;
836
837 case tcc_type: /* a type node */
838 kind = t_kind;
839 break;
840
841 case tcc_statement: /* an expression with side effects */
842 kind = s_kind;
843 break;
844
845 case tcc_reference: /* a reference */
846 kind = r_kind;
847 break;
848
849 case tcc_expression: /* an expression */
850 case tcc_comparison: /* a comparison expression */
851 case tcc_unary: /* a unary arithmetic expression */
852 case tcc_binary: /* a binary arithmetic expression */
853 kind = e_kind;
854 break;
855
856 case tcc_constant: /* a constant */
857 kind = c_kind;
858 break;
859
860 case tcc_exceptional: /* something random, like an identifier. */
861 switch (code)
862 {
863 case IDENTIFIER_NODE:
864 kind = id_kind;
865 break;
866
867 case TREE_VEC:
868 kind = vec_kind;
869 break;
870
871 case TREE_BINFO:
872 kind = binfo_kind;
873 break;
874
875 case SSA_NAME:
876 kind = ssa_name_kind;
877 break;
878
879 case BLOCK:
880 kind = b_kind;
881 break;
882
883 case CONSTRUCTOR:
884 kind = constr_kind;
885 break;
886
887 case OMP_CLAUSE:
888 kind = omp_clause_kind;
889 break;
890
891 default:
892 kind = x_kind;
893 break;
894 }
895 break;
896
897 case tcc_vl_exp:
898 kind = e_kind;
899 break;
900
901 default:
902 gcc_unreachable ();
903 }
904
905 tree_code_counts[(int) code]++;
906 tree_node_counts[(int) kind]++;
907 tree_node_sizes[(int) kind] += length;
908 }
909
910 /* Allocate and return a new UID from the DECL_UID namespace. */
911
912 int
913 allocate_decl_uid (void)
914 {
915 return next_decl_uid++;
916 }
917
918 /* Return a newly allocated node of code CODE. For decl and type
919 nodes, some other fields are initialized. The rest of the node is
920 initialized to zero. This function cannot be used for TREE_VEC,
921 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
922 tree_code_size.
923
924 Achoo! I got a code in the node. */
925
926 tree
927 make_node_stat (enum tree_code code MEM_STAT_DECL)
928 {
929 tree t;
930 enum tree_code_class type = TREE_CODE_CLASS (code);
931 size_t length = tree_code_size (code);
932
933 record_node_allocation_statistics (code, length);
934
935 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
936 TREE_SET_CODE (t, code);
937
938 switch (type)
939 {
940 case tcc_statement:
941 TREE_SIDE_EFFECTS (t) = 1;
942 break;
943
944 case tcc_declaration:
945 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
946 {
947 if (code == FUNCTION_DECL)
948 {
949 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
950 DECL_MODE (t) = FUNCTION_MODE;
951 }
952 else
953 DECL_ALIGN (t) = 1;
954 }
955 DECL_SOURCE_LOCATION (t) = input_location;
956 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
957 DECL_UID (t) = --next_debug_decl_uid;
958 else
959 {
960 DECL_UID (t) = allocate_decl_uid ();
961 SET_DECL_PT_UID (t, -1);
962 }
963 if (TREE_CODE (t) == LABEL_DECL)
964 LABEL_DECL_UID (t) = -1;
965
966 break;
967
968 case tcc_type:
969 TYPE_UID (t) = next_type_uid++;
970 TYPE_ALIGN (t) = BITS_PER_UNIT;
971 TYPE_USER_ALIGN (t) = 0;
972 TYPE_MAIN_VARIANT (t) = t;
973 TYPE_CANONICAL (t) = t;
974
975 /* Default to no attributes for type, but let target change that. */
976 TYPE_ATTRIBUTES (t) = NULL_TREE;
977 targetm.set_default_type_attributes (t);
978
979 /* We have not yet computed the alias set for this type. */
980 TYPE_ALIAS_SET (t) = -1;
981 break;
982
983 case tcc_constant:
984 TREE_CONSTANT (t) = 1;
985 break;
986
987 case tcc_expression:
988 switch (code)
989 {
990 case INIT_EXPR:
991 case MODIFY_EXPR:
992 case VA_ARG_EXPR:
993 case PREDECREMENT_EXPR:
994 case PREINCREMENT_EXPR:
995 case POSTDECREMENT_EXPR:
996 case POSTINCREMENT_EXPR:
997 /* All of these have side-effects, no matter what their
998 operands are. */
999 TREE_SIDE_EFFECTS (t) = 1;
1000 break;
1001
1002 default:
1003 break;
1004 }
1005 break;
1006
1007 default:
1008 /* Other classes need no special treatment. */
1009 break;
1010 }
1011
1012 return t;
1013 }
1014 \f
1015 /* Return a new node with the same contents as NODE except that its
1016 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1017
1018 tree
1019 copy_node_stat (tree node MEM_STAT_DECL)
1020 {
1021 tree t;
1022 enum tree_code code = TREE_CODE (node);
1023 size_t length;
1024
1025 gcc_assert (code != STATEMENT_LIST);
1026
1027 length = tree_size (node);
1028 record_node_allocation_statistics (code, length);
1029 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1030 memcpy (t, node, length);
1031
1032 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1033 TREE_CHAIN (t) = 0;
1034 TREE_ASM_WRITTEN (t) = 0;
1035 TREE_VISITED (t) = 0;
1036
1037 if (TREE_CODE_CLASS (code) == tcc_declaration)
1038 {
1039 if (code == DEBUG_EXPR_DECL)
1040 DECL_UID (t) = --next_debug_decl_uid;
1041 else
1042 {
1043 DECL_UID (t) = allocate_decl_uid ();
1044 if (DECL_PT_UID_SET_P (node))
1045 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1046 }
1047 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1048 && DECL_HAS_VALUE_EXPR_P (node))
1049 {
1050 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1051 DECL_HAS_VALUE_EXPR_P (t) = 1;
1052 }
1053 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1054 if (TREE_CODE (node) == VAR_DECL)
1055 {
1056 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1057 t->decl_with_vis.symtab_node = NULL;
1058 }
1059 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1060 {
1061 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1062 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1063 }
1064 if (TREE_CODE (node) == FUNCTION_DECL)
1065 {
1066 DECL_STRUCT_FUNCTION (t) = NULL;
1067 t->decl_with_vis.symtab_node = NULL;
1068 }
1069 }
1070 else if (TREE_CODE_CLASS (code) == tcc_type)
1071 {
1072 TYPE_UID (t) = next_type_uid++;
1073 /* The following is so that the debug code for
1074 the copy is different from the original type.
1075 The two statements usually duplicate each other
1076 (because they clear fields of the same union),
1077 but the optimizer should catch that. */
1078 TYPE_SYMTAB_POINTER (t) = 0;
1079 TYPE_SYMTAB_ADDRESS (t) = 0;
1080
1081 /* Do not copy the values cache. */
1082 if (TYPE_CACHED_VALUES_P (t))
1083 {
1084 TYPE_CACHED_VALUES_P (t) = 0;
1085 TYPE_CACHED_VALUES (t) = NULL_TREE;
1086 }
1087 }
1088
1089 return t;
1090 }
1091
1092 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1093 For example, this can copy a list made of TREE_LIST nodes. */
1094
1095 tree
1096 copy_list (tree list)
1097 {
1098 tree head;
1099 tree prev, next;
1100
1101 if (list == 0)
1102 return 0;
1103
1104 head = prev = copy_node (list);
1105 next = TREE_CHAIN (list);
1106 while (next)
1107 {
1108 TREE_CHAIN (prev) = copy_node (next);
1109 prev = TREE_CHAIN (prev);
1110 next = TREE_CHAIN (next);
1111 }
1112 return head;
1113 }
1114
1115 \f
1116 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1117 INTEGER_CST with value CST and type TYPE. */
1118
1119 static unsigned int
1120 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1121 {
1122 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1123 /* We need an extra zero HWI if CST is an unsigned integer with its
1124 upper bit set, and if CST occupies a whole number of HWIs. */
1125 if (TYPE_UNSIGNED (type)
1126 && wi::neg_p (cst)
1127 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1128 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1129 return cst.get_len ();
1130 }
1131
1132 /* Return a new INTEGER_CST with value CST and type TYPE. */
1133
1134 static tree
1135 build_new_int_cst (tree type, const wide_int &cst)
1136 {
1137 unsigned int len = cst.get_len ();
1138 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1139 tree nt = make_int_cst (len, ext_len);
1140
1141 if (len < ext_len)
1142 {
1143 --ext_len;
1144 TREE_INT_CST_ELT (nt, ext_len) = 0;
1145 for (unsigned int i = len; i < ext_len; ++i)
1146 TREE_INT_CST_ELT (nt, i) = -1;
1147 }
1148 else if (TYPE_UNSIGNED (type)
1149 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1150 {
1151 len--;
1152 TREE_INT_CST_ELT (nt, len)
1153 = zext_hwi (cst.elt (len),
1154 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1155 }
1156
1157 for (unsigned int i = 0; i < len; i++)
1158 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1159 TREE_TYPE (nt) = type;
1160 return nt;
1161 }
1162
1163 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1164
1165 tree
1166 build_int_cst (tree type, HOST_WIDE_INT low)
1167 {
1168 /* Support legacy code. */
1169 if (!type)
1170 type = integer_type_node;
1171
1172 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1173 }
1174
1175 tree
1176 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1177 {
1178 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1179 }
1180
1181 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1182
1183 tree
1184 build_int_cst_type (tree type, HOST_WIDE_INT low)
1185 {
1186 gcc_assert (type);
1187 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1188 }
1189
1190 /* Constructs tree in type TYPE from with value given by CST. Signedness
1191 of CST is assumed to be the same as the signedness of TYPE. */
1192
1193 tree
1194 double_int_to_tree (tree type, double_int cst)
1195 {
1196 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1197 }
1198
1199 /* We force the wide_int CST to the range of the type TYPE by sign or
1200 zero extending it. OVERFLOWABLE indicates if we are interested in
1201 overflow of the value, when >0 we are only interested in signed
1202 overflow, for <0 we are interested in any overflow. OVERFLOWED
1203 indicates whether overflow has already occurred. CONST_OVERFLOWED
1204 indicates whether constant overflow has already occurred. We force
1205 T's value to be within range of T's type (by setting to 0 or 1 all
1206 the bits outside the type's range). We set TREE_OVERFLOWED if,
1207 OVERFLOWED is nonzero,
1208 or OVERFLOWABLE is >0 and signed overflow occurs
1209 or OVERFLOWABLE is <0 and any overflow occurs
1210 We return a new tree node for the extended wide_int. The node
1211 is shared if no overflow flags are set. */
1212
1213
1214 tree
1215 force_fit_type (tree type, const wide_int_ref &cst,
1216 int overflowable, bool overflowed)
1217 {
1218 signop sign = TYPE_SIGN (type);
1219
1220 /* If we need to set overflow flags, return a new unshared node. */
1221 if (overflowed || !wi::fits_to_tree_p (cst, type))
1222 {
1223 if (overflowed
1224 || overflowable < 0
1225 || (overflowable > 0 && sign == SIGNED))
1226 {
1227 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1228 tree t = build_new_int_cst (type, tmp);
1229 TREE_OVERFLOW (t) = 1;
1230 return t;
1231 }
1232 }
1233
1234 /* Else build a shared node. */
1235 return wide_int_to_tree (type, cst);
1236 }
1237
1238 /* These are the hash table functions for the hash table of INTEGER_CST
1239 nodes of a sizetype. */
1240
1241 /* Return the hash code code X, an INTEGER_CST. */
1242
1243 static hashval_t
1244 int_cst_hash_hash (const void *x)
1245 {
1246 const_tree const t = (const_tree) x;
1247 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1248 int i;
1249
1250 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1251 code ^= TREE_INT_CST_ELT (t, i);
1252
1253 return code;
1254 }
1255
1256 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1257 is the same as that given by *Y, which is the same. */
1258
1259 static int
1260 int_cst_hash_eq (const void *x, const void *y)
1261 {
1262 const_tree const xt = (const_tree) x;
1263 const_tree const yt = (const_tree) y;
1264
1265 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1266 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1267 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1268 return false;
1269
1270 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1271 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1272 return false;
1273
1274 return true;
1275 }
1276
1277 /* Create an INT_CST node of TYPE and value CST.
1278 The returned node is always shared. For small integers we use a
1279 per-type vector cache, for larger ones we use a single hash table.
1280 The value is extended from its precision according to the sign of
1281 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1282 the upper bits and ensures that hashing and value equality based
1283 upon the underlying HOST_WIDE_INTs works without masking. */
1284
1285 tree
1286 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1287 {
1288 tree t;
1289 int ix = -1;
1290 int limit = 0;
1291
1292 gcc_assert (type);
1293 unsigned int prec = TYPE_PRECISION (type);
1294 signop sgn = TYPE_SIGN (type);
1295
1296 /* Verify that everything is canonical. */
1297 int l = pcst.get_len ();
1298 if (l > 1)
1299 {
1300 if (pcst.elt (l - 1) == 0)
1301 gcc_checking_assert (pcst.elt (l - 2) < 0);
1302 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1303 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1304 }
1305
1306 wide_int cst = wide_int::from (pcst, prec, sgn);
1307 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1308
1309 if (ext_len == 1)
1310 {
1311 /* We just need to store a single HOST_WIDE_INT. */
1312 HOST_WIDE_INT hwi;
1313 if (TYPE_UNSIGNED (type))
1314 hwi = cst.to_uhwi ();
1315 else
1316 hwi = cst.to_shwi ();
1317
1318 switch (TREE_CODE (type))
1319 {
1320 case NULLPTR_TYPE:
1321 gcc_assert (hwi == 0);
1322 /* Fallthru. */
1323
1324 case POINTER_TYPE:
1325 case REFERENCE_TYPE:
1326 /* Cache NULL pointer. */
1327 if (hwi == 0)
1328 {
1329 limit = 1;
1330 ix = 0;
1331 }
1332 break;
1333
1334 case BOOLEAN_TYPE:
1335 /* Cache false or true. */
1336 limit = 2;
1337 if (hwi < 2)
1338 ix = hwi;
1339 break;
1340
1341 case INTEGER_TYPE:
1342 case OFFSET_TYPE:
1343 if (TYPE_SIGN (type) == UNSIGNED)
1344 {
1345 /* Cache [0, N). */
1346 limit = INTEGER_SHARE_LIMIT;
1347 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1348 ix = hwi;
1349 }
1350 else
1351 {
1352 /* Cache [-1, N). */
1353 limit = INTEGER_SHARE_LIMIT + 1;
1354 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1355 ix = hwi + 1;
1356 }
1357 break;
1358
1359 case ENUMERAL_TYPE:
1360 break;
1361
1362 default:
1363 gcc_unreachable ();
1364 }
1365
1366 if (ix >= 0)
1367 {
1368 /* Look for it in the type's vector of small shared ints. */
1369 if (!TYPE_CACHED_VALUES_P (type))
1370 {
1371 TYPE_CACHED_VALUES_P (type) = 1;
1372 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1373 }
1374
1375 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1376 if (t)
1377 /* Make sure no one is clobbering the shared constant. */
1378 gcc_checking_assert (TREE_TYPE (t) == type
1379 && TREE_INT_CST_NUNITS (t) == 1
1380 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1381 && TREE_INT_CST_EXT_NUNITS (t) == 1
1382 && TREE_INT_CST_ELT (t, 0) == hwi);
1383 else
1384 {
1385 /* Create a new shared int. */
1386 t = build_new_int_cst (type, cst);
1387 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1388 }
1389 }
1390 else
1391 {
1392 /* Use the cache of larger shared ints, using int_cst_node as
1393 a temporary. */
1394 void **slot;
1395
1396 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1397 TREE_TYPE (int_cst_node) = type;
1398
1399 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1400 t = (tree) *slot;
1401 if (!t)
1402 {
1403 /* Insert this one into the hash table. */
1404 t = int_cst_node;
1405 *slot = t;
1406 /* Make a new node for next time round. */
1407 int_cst_node = make_int_cst (1, 1);
1408 }
1409 }
1410 }
1411 else
1412 {
1413 /* The value either hashes properly or we drop it on the floor
1414 for the gc to take care of. There will not be enough of them
1415 to worry about. */
1416 void **slot;
1417
1418 tree nt = build_new_int_cst (type, cst);
1419 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1420 t = (tree) *slot;
1421 if (!t)
1422 {
1423 /* Insert this one into the hash table. */
1424 t = nt;
1425 *slot = t;
1426 }
1427 }
1428
1429 return t;
1430 }
1431
1432 void
1433 cache_integer_cst (tree t)
1434 {
1435 tree type = TREE_TYPE (t);
1436 int ix = -1;
1437 int limit = 0;
1438 int prec = TYPE_PRECISION (type);
1439
1440 gcc_assert (!TREE_OVERFLOW (t));
1441
1442 switch (TREE_CODE (type))
1443 {
1444 case NULLPTR_TYPE:
1445 gcc_assert (integer_zerop (t));
1446 /* Fallthru. */
1447
1448 case POINTER_TYPE:
1449 case REFERENCE_TYPE:
1450 /* Cache NULL pointer. */
1451 if (integer_zerop (t))
1452 {
1453 limit = 1;
1454 ix = 0;
1455 }
1456 break;
1457
1458 case BOOLEAN_TYPE:
1459 /* Cache false or true. */
1460 limit = 2;
1461 if (wi::ltu_p (t, 2))
1462 ix = TREE_INT_CST_ELT (t, 0);
1463 break;
1464
1465 case INTEGER_TYPE:
1466 case OFFSET_TYPE:
1467 if (TYPE_UNSIGNED (type))
1468 {
1469 /* Cache 0..N */
1470 limit = INTEGER_SHARE_LIMIT;
1471
1472 /* This is a little hokie, but if the prec is smaller than
1473 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1474 obvious test will not get the correct answer. */
1475 if (prec < HOST_BITS_PER_WIDE_INT)
1476 {
1477 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1478 ix = tree_to_uhwi (t);
1479 }
1480 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1481 ix = tree_to_uhwi (t);
1482 }
1483 else
1484 {
1485 /* Cache -1..N */
1486 limit = INTEGER_SHARE_LIMIT + 1;
1487
1488 if (integer_minus_onep (t))
1489 ix = 0;
1490 else if (!wi::neg_p (t))
1491 {
1492 if (prec < HOST_BITS_PER_WIDE_INT)
1493 {
1494 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1495 ix = tree_to_shwi (t) + 1;
1496 }
1497 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1498 ix = tree_to_shwi (t) + 1;
1499 }
1500 }
1501 break;
1502
1503 case ENUMERAL_TYPE:
1504 break;
1505
1506 default:
1507 gcc_unreachable ();
1508 }
1509
1510 if (ix >= 0)
1511 {
1512 /* Look for it in the type's vector of small shared ints. */
1513 if (!TYPE_CACHED_VALUES_P (type))
1514 {
1515 TYPE_CACHED_VALUES_P (type) = 1;
1516 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1517 }
1518
1519 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1520 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1521 }
1522 else
1523 {
1524 /* Use the cache of larger shared ints. */
1525 void **slot;
1526
1527 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1528 /* If there is already an entry for the number verify it's the
1529 same. */
1530 if (*slot)
1531 gcc_assert (wi::eq_p (tree (*slot), t));
1532 else
1533 /* Otherwise insert this one into the hash table. */
1534 *slot = t;
1535 }
1536 }
1537
1538
1539 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1540 and the rest are zeros. */
1541
1542 tree
1543 build_low_bits_mask (tree type, unsigned bits)
1544 {
1545 gcc_assert (bits <= TYPE_PRECISION (type));
1546
1547 return wide_int_to_tree (type, wi::mask (bits, false,
1548 TYPE_PRECISION (type)));
1549 }
1550
1551 /* Checks that X is integer constant that can be expressed in (unsigned)
1552 HOST_WIDE_INT without loss of precision. */
1553
1554 bool
1555 cst_and_fits_in_hwi (const_tree x)
1556 {
1557 if (TREE_CODE (x) != INTEGER_CST)
1558 return false;
1559
1560 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1561 return false;
1562
1563 return TREE_INT_CST_NUNITS (x) == 1;
1564 }
1565
1566 /* Build a newly constructed TREE_VEC node of length LEN. */
1567
1568 tree
1569 make_vector_stat (unsigned len MEM_STAT_DECL)
1570 {
1571 tree t;
1572 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1573
1574 record_node_allocation_statistics (VECTOR_CST, length);
1575
1576 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1577
1578 TREE_SET_CODE (t, VECTOR_CST);
1579 TREE_CONSTANT (t) = 1;
1580
1581 return t;
1582 }
1583
1584 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1585 are in a list pointed to by VALS. */
1586
1587 tree
1588 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1589 {
1590 int over = 0;
1591 unsigned cnt = 0;
1592 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1593 TREE_TYPE (v) = type;
1594
1595 /* Iterate through elements and check for overflow. */
1596 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1597 {
1598 tree value = vals[cnt];
1599
1600 VECTOR_CST_ELT (v, cnt) = value;
1601
1602 /* Don't crash if we get an address constant. */
1603 if (!CONSTANT_CLASS_P (value))
1604 continue;
1605
1606 over |= TREE_OVERFLOW (value);
1607 }
1608
1609 TREE_OVERFLOW (v) = over;
1610 return v;
1611 }
1612
1613 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1614 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1615
1616 tree
1617 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1618 {
1619 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1620 unsigned HOST_WIDE_INT idx;
1621 tree value;
1622
1623 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1624 vec[idx] = value;
1625 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1626 vec[idx] = build_zero_cst (TREE_TYPE (type));
1627
1628 return build_vector (type, vec);
1629 }
1630
1631 /* Build a vector of type VECTYPE where all the elements are SCs. */
1632 tree
1633 build_vector_from_val (tree vectype, tree sc)
1634 {
1635 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1636
1637 if (sc == error_mark_node)
1638 return sc;
1639
1640 /* Verify that the vector type is suitable for SC. Note that there
1641 is some inconsistency in the type-system with respect to restrict
1642 qualifications of pointers. Vector types always have a main-variant
1643 element type and the qualification is applied to the vector-type.
1644 So TREE_TYPE (vector-type) does not return a properly qualified
1645 vector element-type. */
1646 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1647 TREE_TYPE (vectype)));
1648
1649 if (CONSTANT_CLASS_P (sc))
1650 {
1651 tree *v = XALLOCAVEC (tree, nunits);
1652 for (i = 0; i < nunits; ++i)
1653 v[i] = sc;
1654 return build_vector (vectype, v);
1655 }
1656 else
1657 {
1658 vec<constructor_elt, va_gc> *v;
1659 vec_alloc (v, nunits);
1660 for (i = 0; i < nunits; ++i)
1661 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1662 return build_constructor (vectype, v);
1663 }
1664 }
1665
1666 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1667 are in the vec pointed to by VALS. */
1668 tree
1669 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1670 {
1671 tree c = make_node (CONSTRUCTOR);
1672 unsigned int i;
1673 constructor_elt *elt;
1674 bool constant_p = true;
1675 bool side_effects_p = false;
1676
1677 TREE_TYPE (c) = type;
1678 CONSTRUCTOR_ELTS (c) = vals;
1679
1680 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1681 {
1682 /* Mostly ctors will have elts that don't have side-effects, so
1683 the usual case is to scan all the elements. Hence a single
1684 loop for both const and side effects, rather than one loop
1685 each (with early outs). */
1686 if (!TREE_CONSTANT (elt->value))
1687 constant_p = false;
1688 if (TREE_SIDE_EFFECTS (elt->value))
1689 side_effects_p = true;
1690 }
1691
1692 TREE_SIDE_EFFECTS (c) = side_effects_p;
1693 TREE_CONSTANT (c) = constant_p;
1694
1695 return c;
1696 }
1697
1698 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1699 INDEX and VALUE. */
1700 tree
1701 build_constructor_single (tree type, tree index, tree value)
1702 {
1703 vec<constructor_elt, va_gc> *v;
1704 constructor_elt elt = {index, value};
1705
1706 vec_alloc (v, 1);
1707 v->quick_push (elt);
1708
1709 return build_constructor (type, v);
1710 }
1711
1712
1713 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1714 are in a list pointed to by VALS. */
1715 tree
1716 build_constructor_from_list (tree type, tree vals)
1717 {
1718 tree t;
1719 vec<constructor_elt, va_gc> *v = NULL;
1720
1721 if (vals)
1722 {
1723 vec_alloc (v, list_length (vals));
1724 for (t = vals; t; t = TREE_CHAIN (t))
1725 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1726 }
1727
1728 return build_constructor (type, v);
1729 }
1730
1731 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1732 of elements, provided as index/value pairs. */
1733
1734 tree
1735 build_constructor_va (tree type, int nelts, ...)
1736 {
1737 vec<constructor_elt, va_gc> *v = NULL;
1738 va_list p;
1739
1740 va_start (p, nelts);
1741 vec_alloc (v, nelts);
1742 while (nelts--)
1743 {
1744 tree index = va_arg (p, tree);
1745 tree value = va_arg (p, tree);
1746 CONSTRUCTOR_APPEND_ELT (v, index, value);
1747 }
1748 va_end (p);
1749 return build_constructor (type, v);
1750 }
1751
1752 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1753
1754 tree
1755 build_fixed (tree type, FIXED_VALUE_TYPE f)
1756 {
1757 tree v;
1758 FIXED_VALUE_TYPE *fp;
1759
1760 v = make_node (FIXED_CST);
1761 fp = ggc_alloc<fixed_value> ();
1762 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1763
1764 TREE_TYPE (v) = type;
1765 TREE_FIXED_CST_PTR (v) = fp;
1766 return v;
1767 }
1768
1769 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1770
1771 tree
1772 build_real (tree type, REAL_VALUE_TYPE d)
1773 {
1774 tree v;
1775 REAL_VALUE_TYPE *dp;
1776 int overflow = 0;
1777
1778 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1779 Consider doing it via real_convert now. */
1780
1781 v = make_node (REAL_CST);
1782 dp = ggc_alloc<real_value> ();
1783 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1784
1785 TREE_TYPE (v) = type;
1786 TREE_REAL_CST_PTR (v) = dp;
1787 TREE_OVERFLOW (v) = overflow;
1788 return v;
1789 }
1790
1791 /* Return a new REAL_CST node whose type is TYPE
1792 and whose value is the integer value of the INTEGER_CST node I. */
1793
1794 REAL_VALUE_TYPE
1795 real_value_from_int_cst (const_tree type, const_tree i)
1796 {
1797 REAL_VALUE_TYPE d;
1798
1799 /* Clear all bits of the real value type so that we can later do
1800 bitwise comparisons to see if two values are the same. */
1801 memset (&d, 0, sizeof d);
1802
1803 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1804 TYPE_SIGN (TREE_TYPE (i)));
1805 return d;
1806 }
1807
1808 /* Given a tree representing an integer constant I, return a tree
1809 representing the same value as a floating-point constant of type TYPE. */
1810
1811 tree
1812 build_real_from_int_cst (tree type, const_tree i)
1813 {
1814 tree v;
1815 int overflow = TREE_OVERFLOW (i);
1816
1817 v = build_real (type, real_value_from_int_cst (type, i));
1818
1819 TREE_OVERFLOW (v) |= overflow;
1820 return v;
1821 }
1822
1823 /* Return a newly constructed STRING_CST node whose value is
1824 the LEN characters at STR.
1825 Note that for a C string literal, LEN should include the trailing NUL.
1826 The TREE_TYPE is not initialized. */
1827
1828 tree
1829 build_string (int len, const char *str)
1830 {
1831 tree s;
1832 size_t length;
1833
1834 /* Do not waste bytes provided by padding of struct tree_string. */
1835 length = len + offsetof (struct tree_string, str) + 1;
1836
1837 record_node_allocation_statistics (STRING_CST, length);
1838
1839 s = (tree) ggc_internal_alloc (length);
1840
1841 memset (s, 0, sizeof (struct tree_typed));
1842 TREE_SET_CODE (s, STRING_CST);
1843 TREE_CONSTANT (s) = 1;
1844 TREE_STRING_LENGTH (s) = len;
1845 memcpy (s->string.str, str, len);
1846 s->string.str[len] = '\0';
1847
1848 return s;
1849 }
1850
1851 /* Return a newly constructed COMPLEX_CST node whose value is
1852 specified by the real and imaginary parts REAL and IMAG.
1853 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1854 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1855
1856 tree
1857 build_complex (tree type, tree real, tree imag)
1858 {
1859 tree t = make_node (COMPLEX_CST);
1860
1861 TREE_REALPART (t) = real;
1862 TREE_IMAGPART (t) = imag;
1863 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1864 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1865 return t;
1866 }
1867
1868 /* Return a constant of arithmetic type TYPE which is the
1869 multiplicative identity of the set TYPE. */
1870
1871 tree
1872 build_one_cst (tree type)
1873 {
1874 switch (TREE_CODE (type))
1875 {
1876 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1877 case POINTER_TYPE: case REFERENCE_TYPE:
1878 case OFFSET_TYPE:
1879 return build_int_cst (type, 1);
1880
1881 case REAL_TYPE:
1882 return build_real (type, dconst1);
1883
1884 case FIXED_POINT_TYPE:
1885 /* We can only generate 1 for accum types. */
1886 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1887 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1888
1889 case VECTOR_TYPE:
1890 {
1891 tree scalar = build_one_cst (TREE_TYPE (type));
1892
1893 return build_vector_from_val (type, scalar);
1894 }
1895
1896 case COMPLEX_TYPE:
1897 return build_complex (type,
1898 build_one_cst (TREE_TYPE (type)),
1899 build_zero_cst (TREE_TYPE (type)));
1900
1901 default:
1902 gcc_unreachable ();
1903 }
1904 }
1905
1906 /* Return an integer of type TYPE containing all 1's in as much precision as
1907 it contains, or a complex or vector whose subparts are such integers. */
1908
1909 tree
1910 build_all_ones_cst (tree type)
1911 {
1912 if (TREE_CODE (type) == COMPLEX_TYPE)
1913 {
1914 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1915 return build_complex (type, scalar, scalar);
1916 }
1917 else
1918 return build_minus_one_cst (type);
1919 }
1920
1921 /* Return a constant of arithmetic type TYPE which is the
1922 opposite of the multiplicative identity of the set TYPE. */
1923
1924 tree
1925 build_minus_one_cst (tree type)
1926 {
1927 switch (TREE_CODE (type))
1928 {
1929 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1930 case POINTER_TYPE: case REFERENCE_TYPE:
1931 case OFFSET_TYPE:
1932 return build_int_cst (type, -1);
1933
1934 case REAL_TYPE:
1935 return build_real (type, dconstm1);
1936
1937 case FIXED_POINT_TYPE:
1938 /* We can only generate 1 for accum types. */
1939 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1940 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1941 TYPE_MODE (type)));
1942
1943 case VECTOR_TYPE:
1944 {
1945 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1946
1947 return build_vector_from_val (type, scalar);
1948 }
1949
1950 case COMPLEX_TYPE:
1951 return build_complex (type,
1952 build_minus_one_cst (TREE_TYPE (type)),
1953 build_zero_cst (TREE_TYPE (type)));
1954
1955 default:
1956 gcc_unreachable ();
1957 }
1958 }
1959
1960 /* Build 0 constant of type TYPE. This is used by constructor folding
1961 and thus the constant should be represented in memory by
1962 zero(es). */
1963
1964 tree
1965 build_zero_cst (tree type)
1966 {
1967 switch (TREE_CODE (type))
1968 {
1969 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1970 case POINTER_TYPE: case REFERENCE_TYPE:
1971 case OFFSET_TYPE: case NULLPTR_TYPE:
1972 return build_int_cst (type, 0);
1973
1974 case REAL_TYPE:
1975 return build_real (type, dconst0);
1976
1977 case FIXED_POINT_TYPE:
1978 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1979
1980 case VECTOR_TYPE:
1981 {
1982 tree scalar = build_zero_cst (TREE_TYPE (type));
1983
1984 return build_vector_from_val (type, scalar);
1985 }
1986
1987 case COMPLEX_TYPE:
1988 {
1989 tree zero = build_zero_cst (TREE_TYPE (type));
1990
1991 return build_complex (type, zero, zero);
1992 }
1993
1994 default:
1995 if (!AGGREGATE_TYPE_P (type))
1996 return fold_convert (type, integer_zero_node);
1997 return build_constructor (type, NULL);
1998 }
1999 }
2000
2001
2002 /* Build a BINFO with LEN language slots. */
2003
2004 tree
2005 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2006 {
2007 tree t;
2008 size_t length = (offsetof (struct tree_binfo, base_binfos)
2009 + vec<tree, va_gc>::embedded_size (base_binfos));
2010
2011 record_node_allocation_statistics (TREE_BINFO, length);
2012
2013 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2014
2015 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2016
2017 TREE_SET_CODE (t, TREE_BINFO);
2018
2019 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2020
2021 return t;
2022 }
2023
2024 /* Create a CASE_LABEL_EXPR tree node and return it. */
2025
2026 tree
2027 build_case_label (tree low_value, tree high_value, tree label_decl)
2028 {
2029 tree t = make_node (CASE_LABEL_EXPR);
2030
2031 TREE_TYPE (t) = void_type_node;
2032 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2033
2034 CASE_LOW (t) = low_value;
2035 CASE_HIGH (t) = high_value;
2036 CASE_LABEL (t) = label_decl;
2037 CASE_CHAIN (t) = NULL_TREE;
2038
2039 return t;
2040 }
2041
2042 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2043 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2044 The latter determines the length of the HOST_WIDE_INT vector. */
2045
2046 tree
2047 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2048 {
2049 tree t;
2050 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2051 + sizeof (struct tree_int_cst));
2052
2053 gcc_assert (len);
2054 record_node_allocation_statistics (INTEGER_CST, length);
2055
2056 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2057
2058 TREE_SET_CODE (t, INTEGER_CST);
2059 TREE_INT_CST_NUNITS (t) = len;
2060 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2061 /* to_offset can only be applied to trees that are offset_int-sized
2062 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2063 must be exactly the precision of offset_int and so LEN is correct. */
2064 if (ext_len <= OFFSET_INT_ELTS)
2065 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2066 else
2067 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2068
2069 TREE_CONSTANT (t) = 1;
2070
2071 return t;
2072 }
2073
2074 /* Build a newly constructed TREE_VEC node of length LEN. */
2075
2076 tree
2077 make_tree_vec_stat (int len MEM_STAT_DECL)
2078 {
2079 tree t;
2080 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2081
2082 record_node_allocation_statistics (TREE_VEC, length);
2083
2084 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2085
2086 TREE_SET_CODE (t, TREE_VEC);
2087 TREE_VEC_LENGTH (t) = len;
2088
2089 return t;
2090 }
2091
2092 /* Grow a TREE_VEC node to new length LEN. */
2093
2094 tree
2095 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2096 {
2097 gcc_assert (TREE_CODE (v) == TREE_VEC);
2098
2099 int oldlen = TREE_VEC_LENGTH (v);
2100 gcc_assert (len > oldlen);
2101
2102 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2103 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2104
2105 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2106
2107 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2108
2109 TREE_VEC_LENGTH (v) = len;
2110
2111 return v;
2112 }
2113 \f
2114 /* Return 1 if EXPR is the integer constant zero or a complex constant
2115 of zero. */
2116
2117 int
2118 integer_zerop (const_tree expr)
2119 {
2120 STRIP_NOPS (expr);
2121
2122 switch (TREE_CODE (expr))
2123 {
2124 case INTEGER_CST:
2125 return wi::eq_p (expr, 0);
2126 case COMPLEX_CST:
2127 return (integer_zerop (TREE_REALPART (expr))
2128 && integer_zerop (TREE_IMAGPART (expr)));
2129 case VECTOR_CST:
2130 {
2131 unsigned i;
2132 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2133 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2134 return false;
2135 return true;
2136 }
2137 default:
2138 return false;
2139 }
2140 }
2141
2142 /* Return 1 if EXPR is the integer constant one or the corresponding
2143 complex constant. */
2144
2145 int
2146 integer_onep (const_tree expr)
2147 {
2148 STRIP_NOPS (expr);
2149
2150 switch (TREE_CODE (expr))
2151 {
2152 case INTEGER_CST:
2153 return wi::eq_p (wi::to_widest (expr), 1);
2154 case COMPLEX_CST:
2155 return (integer_onep (TREE_REALPART (expr))
2156 && integer_zerop (TREE_IMAGPART (expr)));
2157 case VECTOR_CST:
2158 {
2159 unsigned i;
2160 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2161 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2162 return false;
2163 return true;
2164 }
2165 default:
2166 return false;
2167 }
2168 }
2169
2170 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2171 it contains, or a complex or vector whose subparts are such integers. */
2172
2173 int
2174 integer_all_onesp (const_tree expr)
2175 {
2176 STRIP_NOPS (expr);
2177
2178 if (TREE_CODE (expr) == COMPLEX_CST
2179 && integer_all_onesp (TREE_REALPART (expr))
2180 && integer_all_onesp (TREE_IMAGPART (expr)))
2181 return 1;
2182
2183 else if (TREE_CODE (expr) == VECTOR_CST)
2184 {
2185 unsigned i;
2186 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2187 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2188 return 0;
2189 return 1;
2190 }
2191
2192 else if (TREE_CODE (expr) != INTEGER_CST)
2193 return 0;
2194
2195 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2196 }
2197
2198 /* Return 1 if EXPR is the integer constant minus one. */
2199
2200 int
2201 integer_minus_onep (const_tree expr)
2202 {
2203 STRIP_NOPS (expr);
2204
2205 if (TREE_CODE (expr) == COMPLEX_CST)
2206 return (integer_all_onesp (TREE_REALPART (expr))
2207 && integer_zerop (TREE_IMAGPART (expr)));
2208 else
2209 return integer_all_onesp (expr);
2210 }
2211
2212 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2213 one bit on). */
2214
2215 int
2216 integer_pow2p (const_tree expr)
2217 {
2218 STRIP_NOPS (expr);
2219
2220 if (TREE_CODE (expr) == COMPLEX_CST
2221 && integer_pow2p (TREE_REALPART (expr))
2222 && integer_zerop (TREE_IMAGPART (expr)))
2223 return 1;
2224
2225 if (TREE_CODE (expr) != INTEGER_CST)
2226 return 0;
2227
2228 return wi::popcount (expr) == 1;
2229 }
2230
2231 /* Return 1 if EXPR is an integer constant other than zero or a
2232 complex constant other than zero. */
2233
2234 int
2235 integer_nonzerop (const_tree expr)
2236 {
2237 STRIP_NOPS (expr);
2238
2239 return ((TREE_CODE (expr) == INTEGER_CST
2240 && !wi::eq_p (expr, 0))
2241 || (TREE_CODE (expr) == COMPLEX_CST
2242 && (integer_nonzerop (TREE_REALPART (expr))
2243 || integer_nonzerop (TREE_IMAGPART (expr)))));
2244 }
2245
2246 /* Return 1 if EXPR is the fixed-point constant zero. */
2247
2248 int
2249 fixed_zerop (const_tree expr)
2250 {
2251 return (TREE_CODE (expr) == FIXED_CST
2252 && TREE_FIXED_CST (expr).data.is_zero ());
2253 }
2254
2255 /* Return the power of two represented by a tree node known to be a
2256 power of two. */
2257
2258 int
2259 tree_log2 (const_tree expr)
2260 {
2261 STRIP_NOPS (expr);
2262
2263 if (TREE_CODE (expr) == COMPLEX_CST)
2264 return tree_log2 (TREE_REALPART (expr));
2265
2266 return wi::exact_log2 (expr);
2267 }
2268
2269 /* Similar, but return the largest integer Y such that 2 ** Y is less
2270 than or equal to EXPR. */
2271
2272 int
2273 tree_floor_log2 (const_tree expr)
2274 {
2275 STRIP_NOPS (expr);
2276
2277 if (TREE_CODE (expr) == COMPLEX_CST)
2278 return tree_log2 (TREE_REALPART (expr));
2279
2280 return wi::floor_log2 (expr);
2281 }
2282
2283 /* Return number of known trailing zero bits in EXPR, or, if the value of
2284 EXPR is known to be zero, the precision of it's type. */
2285
2286 unsigned int
2287 tree_ctz (const_tree expr)
2288 {
2289 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2290 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2291 return 0;
2292
2293 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2294 switch (TREE_CODE (expr))
2295 {
2296 case INTEGER_CST:
2297 ret1 = wi::ctz (expr);
2298 return MIN (ret1, prec);
2299 case SSA_NAME:
2300 ret1 = wi::ctz (get_nonzero_bits (expr));
2301 return MIN (ret1, prec);
2302 case PLUS_EXPR:
2303 case MINUS_EXPR:
2304 case BIT_IOR_EXPR:
2305 case BIT_XOR_EXPR:
2306 case MIN_EXPR:
2307 case MAX_EXPR:
2308 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2309 if (ret1 == 0)
2310 return ret1;
2311 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2312 return MIN (ret1, ret2);
2313 case POINTER_PLUS_EXPR:
2314 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2315 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2316 /* Second operand is sizetype, which could be in theory
2317 wider than pointer's precision. Make sure we never
2318 return more than prec. */
2319 ret2 = MIN (ret2, prec);
2320 return MIN (ret1, ret2);
2321 case BIT_AND_EXPR:
2322 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2323 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2324 return MAX (ret1, ret2);
2325 case MULT_EXPR:
2326 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2327 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2328 return MIN (ret1 + ret2, prec);
2329 case LSHIFT_EXPR:
2330 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2331 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2332 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2333 {
2334 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2335 return MIN (ret1 + ret2, prec);
2336 }
2337 return ret1;
2338 case RSHIFT_EXPR:
2339 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2340 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2341 {
2342 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2343 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2344 if (ret1 > ret2)
2345 return ret1 - ret2;
2346 }
2347 return 0;
2348 case TRUNC_DIV_EXPR:
2349 case CEIL_DIV_EXPR:
2350 case FLOOR_DIV_EXPR:
2351 case ROUND_DIV_EXPR:
2352 case EXACT_DIV_EXPR:
2353 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2354 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2355 {
2356 int l = tree_log2 (TREE_OPERAND (expr, 1));
2357 if (l >= 0)
2358 {
2359 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2360 ret2 = l;
2361 if (ret1 > ret2)
2362 return ret1 - ret2;
2363 }
2364 }
2365 return 0;
2366 CASE_CONVERT:
2367 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2368 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2369 ret1 = prec;
2370 return MIN (ret1, prec);
2371 case SAVE_EXPR:
2372 return tree_ctz (TREE_OPERAND (expr, 0));
2373 case COND_EXPR:
2374 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2375 if (ret1 == 0)
2376 return 0;
2377 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2378 return MIN (ret1, ret2);
2379 case COMPOUND_EXPR:
2380 return tree_ctz (TREE_OPERAND (expr, 1));
2381 case ADDR_EXPR:
2382 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2383 if (ret1 > BITS_PER_UNIT)
2384 {
2385 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2386 return MIN (ret1, prec);
2387 }
2388 return 0;
2389 default:
2390 return 0;
2391 }
2392 }
2393
2394 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2395 decimal float constants, so don't return 1 for them. */
2396
2397 int
2398 real_zerop (const_tree expr)
2399 {
2400 STRIP_NOPS (expr);
2401
2402 switch (TREE_CODE (expr))
2403 {
2404 case REAL_CST:
2405 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2406 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2407 case COMPLEX_CST:
2408 return real_zerop (TREE_REALPART (expr))
2409 && real_zerop (TREE_IMAGPART (expr));
2410 case VECTOR_CST:
2411 {
2412 unsigned i;
2413 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2414 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2415 return false;
2416 return true;
2417 }
2418 default:
2419 return false;
2420 }
2421 }
2422
2423 /* Return 1 if EXPR is the real constant one in real or complex form.
2424 Trailing zeroes matter for decimal float constants, so don't return
2425 1 for them. */
2426
2427 int
2428 real_onep (const_tree expr)
2429 {
2430 STRIP_NOPS (expr);
2431
2432 switch (TREE_CODE (expr))
2433 {
2434 case REAL_CST:
2435 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2436 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2437 case COMPLEX_CST:
2438 return real_onep (TREE_REALPART (expr))
2439 && real_zerop (TREE_IMAGPART (expr));
2440 case VECTOR_CST:
2441 {
2442 unsigned i;
2443 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2444 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2445 return false;
2446 return true;
2447 }
2448 default:
2449 return false;
2450 }
2451 }
2452
2453 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2454 matter for decimal float constants, so don't return 1 for them. */
2455
2456 int
2457 real_minus_onep (const_tree expr)
2458 {
2459 STRIP_NOPS (expr);
2460
2461 switch (TREE_CODE (expr))
2462 {
2463 case REAL_CST:
2464 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2465 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2466 case COMPLEX_CST:
2467 return real_minus_onep (TREE_REALPART (expr))
2468 && real_zerop (TREE_IMAGPART (expr));
2469 case VECTOR_CST:
2470 {
2471 unsigned i;
2472 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2473 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2474 return false;
2475 return true;
2476 }
2477 default:
2478 return false;
2479 }
2480 }
2481
2482 /* Nonzero if EXP is a constant or a cast of a constant. */
2483
2484 int
2485 really_constant_p (const_tree exp)
2486 {
2487 /* This is not quite the same as STRIP_NOPS. It does more. */
2488 while (CONVERT_EXPR_P (exp)
2489 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2490 exp = TREE_OPERAND (exp, 0);
2491 return TREE_CONSTANT (exp);
2492 }
2493 \f
2494 /* Return first list element whose TREE_VALUE is ELEM.
2495 Return 0 if ELEM is not in LIST. */
2496
2497 tree
2498 value_member (tree elem, tree list)
2499 {
2500 while (list)
2501 {
2502 if (elem == TREE_VALUE (list))
2503 return list;
2504 list = TREE_CHAIN (list);
2505 }
2506 return NULL_TREE;
2507 }
2508
2509 /* Return first list element whose TREE_PURPOSE is ELEM.
2510 Return 0 if ELEM is not in LIST. */
2511
2512 tree
2513 purpose_member (const_tree elem, tree list)
2514 {
2515 while (list)
2516 {
2517 if (elem == TREE_PURPOSE (list))
2518 return list;
2519 list = TREE_CHAIN (list);
2520 }
2521 return NULL_TREE;
2522 }
2523
2524 /* Return true if ELEM is in V. */
2525
2526 bool
2527 vec_member (const_tree elem, vec<tree, va_gc> *v)
2528 {
2529 unsigned ix;
2530 tree t;
2531 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2532 if (elem == t)
2533 return true;
2534 return false;
2535 }
2536
2537 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2538 NULL_TREE. */
2539
2540 tree
2541 chain_index (int idx, tree chain)
2542 {
2543 for (; chain && idx > 0; --idx)
2544 chain = TREE_CHAIN (chain);
2545 return chain;
2546 }
2547
2548 /* Return nonzero if ELEM is part of the chain CHAIN. */
2549
2550 int
2551 chain_member (const_tree elem, const_tree chain)
2552 {
2553 while (chain)
2554 {
2555 if (elem == chain)
2556 return 1;
2557 chain = DECL_CHAIN (chain);
2558 }
2559
2560 return 0;
2561 }
2562
2563 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2564 We expect a null pointer to mark the end of the chain.
2565 This is the Lisp primitive `length'. */
2566
2567 int
2568 list_length (const_tree t)
2569 {
2570 const_tree p = t;
2571 #ifdef ENABLE_TREE_CHECKING
2572 const_tree q = t;
2573 #endif
2574 int len = 0;
2575
2576 while (p)
2577 {
2578 p = TREE_CHAIN (p);
2579 #ifdef ENABLE_TREE_CHECKING
2580 if (len % 2)
2581 q = TREE_CHAIN (q);
2582 gcc_assert (p != q);
2583 #endif
2584 len++;
2585 }
2586
2587 return len;
2588 }
2589
2590 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2591 UNION_TYPE TYPE, or NULL_TREE if none. */
2592
2593 tree
2594 first_field (const_tree type)
2595 {
2596 tree t = TYPE_FIELDS (type);
2597 while (t && TREE_CODE (t) != FIELD_DECL)
2598 t = TREE_CHAIN (t);
2599 return t;
2600 }
2601
2602 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2603 by modifying the last node in chain 1 to point to chain 2.
2604 This is the Lisp primitive `nconc'. */
2605
2606 tree
2607 chainon (tree op1, tree op2)
2608 {
2609 tree t1;
2610
2611 if (!op1)
2612 return op2;
2613 if (!op2)
2614 return op1;
2615
2616 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2617 continue;
2618 TREE_CHAIN (t1) = op2;
2619
2620 #ifdef ENABLE_TREE_CHECKING
2621 {
2622 tree t2;
2623 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2624 gcc_assert (t2 != t1);
2625 }
2626 #endif
2627
2628 return op1;
2629 }
2630
2631 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2632
2633 tree
2634 tree_last (tree chain)
2635 {
2636 tree next;
2637 if (chain)
2638 while ((next = TREE_CHAIN (chain)))
2639 chain = next;
2640 return chain;
2641 }
2642
2643 /* Reverse the order of elements in the chain T,
2644 and return the new head of the chain (old last element). */
2645
2646 tree
2647 nreverse (tree t)
2648 {
2649 tree prev = 0, decl, next;
2650 for (decl = t; decl; decl = next)
2651 {
2652 /* We shouldn't be using this function to reverse BLOCK chains; we
2653 have blocks_nreverse for that. */
2654 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2655 next = TREE_CHAIN (decl);
2656 TREE_CHAIN (decl) = prev;
2657 prev = decl;
2658 }
2659 return prev;
2660 }
2661 \f
2662 /* Return a newly created TREE_LIST node whose
2663 purpose and value fields are PARM and VALUE. */
2664
2665 tree
2666 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2667 {
2668 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2669 TREE_PURPOSE (t) = parm;
2670 TREE_VALUE (t) = value;
2671 return t;
2672 }
2673
2674 /* Build a chain of TREE_LIST nodes from a vector. */
2675
2676 tree
2677 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2678 {
2679 tree ret = NULL_TREE;
2680 tree *pp = &ret;
2681 unsigned int i;
2682 tree t;
2683 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2684 {
2685 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2686 pp = &TREE_CHAIN (*pp);
2687 }
2688 return ret;
2689 }
2690
2691 /* Return a newly created TREE_LIST node whose
2692 purpose and value fields are PURPOSE and VALUE
2693 and whose TREE_CHAIN is CHAIN. */
2694
2695 tree
2696 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2697 {
2698 tree node;
2699
2700 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2701 memset (node, 0, sizeof (struct tree_common));
2702
2703 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2704
2705 TREE_SET_CODE (node, TREE_LIST);
2706 TREE_CHAIN (node) = chain;
2707 TREE_PURPOSE (node) = purpose;
2708 TREE_VALUE (node) = value;
2709 return node;
2710 }
2711
2712 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2713 trees. */
2714
2715 vec<tree, va_gc> *
2716 ctor_to_vec (tree ctor)
2717 {
2718 vec<tree, va_gc> *vec;
2719 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2720 unsigned int ix;
2721 tree val;
2722
2723 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2724 vec->quick_push (val);
2725
2726 return vec;
2727 }
2728 \f
2729 /* Return the size nominally occupied by an object of type TYPE
2730 when it resides in memory. The value is measured in units of bytes,
2731 and its data type is that normally used for type sizes
2732 (which is the first type created by make_signed_type or
2733 make_unsigned_type). */
2734
2735 tree
2736 size_in_bytes (const_tree type)
2737 {
2738 tree t;
2739
2740 if (type == error_mark_node)
2741 return integer_zero_node;
2742
2743 type = TYPE_MAIN_VARIANT (type);
2744 t = TYPE_SIZE_UNIT (type);
2745
2746 if (t == 0)
2747 {
2748 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2749 return size_zero_node;
2750 }
2751
2752 return t;
2753 }
2754
2755 /* Return the size of TYPE (in bytes) as a wide integer
2756 or return -1 if the size can vary or is larger than an integer. */
2757
2758 HOST_WIDE_INT
2759 int_size_in_bytes (const_tree type)
2760 {
2761 tree t;
2762
2763 if (type == error_mark_node)
2764 return 0;
2765
2766 type = TYPE_MAIN_VARIANT (type);
2767 t = TYPE_SIZE_UNIT (type);
2768
2769 if (t && tree_fits_uhwi_p (t))
2770 return TREE_INT_CST_LOW (t);
2771 else
2772 return -1;
2773 }
2774
2775 /* Return the maximum size of TYPE (in bytes) as a wide integer
2776 or return -1 if the size can vary or is larger than an integer. */
2777
2778 HOST_WIDE_INT
2779 max_int_size_in_bytes (const_tree type)
2780 {
2781 HOST_WIDE_INT size = -1;
2782 tree size_tree;
2783
2784 /* If this is an array type, check for a possible MAX_SIZE attached. */
2785
2786 if (TREE_CODE (type) == ARRAY_TYPE)
2787 {
2788 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2789
2790 if (size_tree && tree_fits_uhwi_p (size_tree))
2791 size = tree_to_uhwi (size_tree);
2792 }
2793
2794 /* If we still haven't been able to get a size, see if the language
2795 can compute a maximum size. */
2796
2797 if (size == -1)
2798 {
2799 size_tree = lang_hooks.types.max_size (type);
2800
2801 if (size_tree && tree_fits_uhwi_p (size_tree))
2802 size = tree_to_uhwi (size_tree);
2803 }
2804
2805 return size;
2806 }
2807 \f
2808 /* Return the bit position of FIELD, in bits from the start of the record.
2809 This is a tree of type bitsizetype. */
2810
2811 tree
2812 bit_position (const_tree field)
2813 {
2814 return bit_from_pos (DECL_FIELD_OFFSET (field),
2815 DECL_FIELD_BIT_OFFSET (field));
2816 }
2817
2818 /* Likewise, but return as an integer. It must be representable in
2819 that way (since it could be a signed value, we don't have the
2820 option of returning -1 like int_size_in_byte can. */
2821
2822 HOST_WIDE_INT
2823 int_bit_position (const_tree field)
2824 {
2825 return tree_to_shwi (bit_position (field));
2826 }
2827 \f
2828 /* Return the byte position of FIELD, in bytes from the start of the record.
2829 This is a tree of type sizetype. */
2830
2831 tree
2832 byte_position (const_tree field)
2833 {
2834 return byte_from_pos (DECL_FIELD_OFFSET (field),
2835 DECL_FIELD_BIT_OFFSET (field));
2836 }
2837
2838 /* Likewise, but return as an integer. It must be representable in
2839 that way (since it could be a signed value, we don't have the
2840 option of returning -1 like int_size_in_byte can. */
2841
2842 HOST_WIDE_INT
2843 int_byte_position (const_tree field)
2844 {
2845 return tree_to_shwi (byte_position (field));
2846 }
2847 \f
2848 /* Return the strictest alignment, in bits, that T is known to have. */
2849
2850 unsigned int
2851 expr_align (const_tree t)
2852 {
2853 unsigned int align0, align1;
2854
2855 switch (TREE_CODE (t))
2856 {
2857 CASE_CONVERT: case NON_LVALUE_EXPR:
2858 /* If we have conversions, we know that the alignment of the
2859 object must meet each of the alignments of the types. */
2860 align0 = expr_align (TREE_OPERAND (t, 0));
2861 align1 = TYPE_ALIGN (TREE_TYPE (t));
2862 return MAX (align0, align1);
2863
2864 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2865 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2866 case CLEANUP_POINT_EXPR:
2867 /* These don't change the alignment of an object. */
2868 return expr_align (TREE_OPERAND (t, 0));
2869
2870 case COND_EXPR:
2871 /* The best we can do is say that the alignment is the least aligned
2872 of the two arms. */
2873 align0 = expr_align (TREE_OPERAND (t, 1));
2874 align1 = expr_align (TREE_OPERAND (t, 2));
2875 return MIN (align0, align1);
2876
2877 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2878 meaningfully, it's always 1. */
2879 case LABEL_DECL: case CONST_DECL:
2880 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2881 case FUNCTION_DECL:
2882 gcc_assert (DECL_ALIGN (t) != 0);
2883 return DECL_ALIGN (t);
2884
2885 default:
2886 break;
2887 }
2888
2889 /* Otherwise take the alignment from that of the type. */
2890 return TYPE_ALIGN (TREE_TYPE (t));
2891 }
2892 \f
2893 /* Return, as a tree node, the number of elements for TYPE (which is an
2894 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2895
2896 tree
2897 array_type_nelts (const_tree type)
2898 {
2899 tree index_type, min, max;
2900
2901 /* If they did it with unspecified bounds, then we should have already
2902 given an error about it before we got here. */
2903 if (! TYPE_DOMAIN (type))
2904 return error_mark_node;
2905
2906 index_type = TYPE_DOMAIN (type);
2907 min = TYPE_MIN_VALUE (index_type);
2908 max = TYPE_MAX_VALUE (index_type);
2909
2910 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2911 if (!max)
2912 return error_mark_node;
2913
2914 return (integer_zerop (min)
2915 ? max
2916 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2917 }
2918 \f
2919 /* If arg is static -- a reference to an object in static storage -- then
2920 return the object. This is not the same as the C meaning of `static'.
2921 If arg isn't static, return NULL. */
2922
2923 tree
2924 staticp (tree arg)
2925 {
2926 switch (TREE_CODE (arg))
2927 {
2928 case FUNCTION_DECL:
2929 /* Nested functions are static, even though taking their address will
2930 involve a trampoline as we unnest the nested function and create
2931 the trampoline on the tree level. */
2932 return arg;
2933
2934 case VAR_DECL:
2935 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2936 && ! DECL_THREAD_LOCAL_P (arg)
2937 && ! DECL_DLLIMPORT_P (arg)
2938 ? arg : NULL);
2939
2940 case CONST_DECL:
2941 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2942 ? arg : NULL);
2943
2944 case CONSTRUCTOR:
2945 return TREE_STATIC (arg) ? arg : NULL;
2946
2947 case LABEL_DECL:
2948 case STRING_CST:
2949 return arg;
2950
2951 case COMPONENT_REF:
2952 /* If the thing being referenced is not a field, then it is
2953 something language specific. */
2954 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2955
2956 /* If we are referencing a bitfield, we can't evaluate an
2957 ADDR_EXPR at compile time and so it isn't a constant. */
2958 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2959 return NULL;
2960
2961 return staticp (TREE_OPERAND (arg, 0));
2962
2963 case BIT_FIELD_REF:
2964 return NULL;
2965
2966 case INDIRECT_REF:
2967 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2968
2969 case ARRAY_REF:
2970 case ARRAY_RANGE_REF:
2971 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2972 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2973 return staticp (TREE_OPERAND (arg, 0));
2974 else
2975 return NULL;
2976
2977 case COMPOUND_LITERAL_EXPR:
2978 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2979
2980 default:
2981 return NULL;
2982 }
2983 }
2984
2985 \f
2986
2987
2988 /* Return whether OP is a DECL whose address is function-invariant. */
2989
2990 bool
2991 decl_address_invariant_p (const_tree op)
2992 {
2993 /* The conditions below are slightly less strict than the one in
2994 staticp. */
2995
2996 switch (TREE_CODE (op))
2997 {
2998 case PARM_DECL:
2999 case RESULT_DECL:
3000 case LABEL_DECL:
3001 case FUNCTION_DECL:
3002 return true;
3003
3004 case VAR_DECL:
3005 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3006 || DECL_THREAD_LOCAL_P (op)
3007 || DECL_CONTEXT (op) == current_function_decl
3008 || decl_function_context (op) == current_function_decl)
3009 return true;
3010 break;
3011
3012 case CONST_DECL:
3013 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3014 || decl_function_context (op) == current_function_decl)
3015 return true;
3016 break;
3017
3018 default:
3019 break;
3020 }
3021
3022 return false;
3023 }
3024
3025 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3026
3027 bool
3028 decl_address_ip_invariant_p (const_tree op)
3029 {
3030 /* The conditions below are slightly less strict than the one in
3031 staticp. */
3032
3033 switch (TREE_CODE (op))
3034 {
3035 case LABEL_DECL:
3036 case FUNCTION_DECL:
3037 case STRING_CST:
3038 return true;
3039
3040 case VAR_DECL:
3041 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3042 && !DECL_DLLIMPORT_P (op))
3043 || DECL_THREAD_LOCAL_P (op))
3044 return true;
3045 break;
3046
3047 case CONST_DECL:
3048 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3049 return true;
3050 break;
3051
3052 default:
3053 break;
3054 }
3055
3056 return false;
3057 }
3058
3059
3060 /* Return true if T is function-invariant (internal function, does
3061 not handle arithmetic; that's handled in skip_simple_arithmetic and
3062 tree_invariant_p). */
3063
3064 static bool tree_invariant_p (tree t);
3065
3066 static bool
3067 tree_invariant_p_1 (tree t)
3068 {
3069 tree op;
3070
3071 if (TREE_CONSTANT (t)
3072 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3073 return true;
3074
3075 switch (TREE_CODE (t))
3076 {
3077 case SAVE_EXPR:
3078 return true;
3079
3080 case ADDR_EXPR:
3081 op = TREE_OPERAND (t, 0);
3082 while (handled_component_p (op))
3083 {
3084 switch (TREE_CODE (op))
3085 {
3086 case ARRAY_REF:
3087 case ARRAY_RANGE_REF:
3088 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3089 || TREE_OPERAND (op, 2) != NULL_TREE
3090 || TREE_OPERAND (op, 3) != NULL_TREE)
3091 return false;
3092 break;
3093
3094 case COMPONENT_REF:
3095 if (TREE_OPERAND (op, 2) != NULL_TREE)
3096 return false;
3097 break;
3098
3099 default:;
3100 }
3101 op = TREE_OPERAND (op, 0);
3102 }
3103
3104 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3105
3106 default:
3107 break;
3108 }
3109
3110 return false;
3111 }
3112
3113 /* Return true if T is function-invariant. */
3114
3115 static bool
3116 tree_invariant_p (tree t)
3117 {
3118 tree inner = skip_simple_arithmetic (t);
3119 return tree_invariant_p_1 (inner);
3120 }
3121
3122 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3123 Do this to any expression which may be used in more than one place,
3124 but must be evaluated only once.
3125
3126 Normally, expand_expr would reevaluate the expression each time.
3127 Calling save_expr produces something that is evaluated and recorded
3128 the first time expand_expr is called on it. Subsequent calls to
3129 expand_expr just reuse the recorded value.
3130
3131 The call to expand_expr that generates code that actually computes
3132 the value is the first call *at compile time*. Subsequent calls
3133 *at compile time* generate code to use the saved value.
3134 This produces correct result provided that *at run time* control
3135 always flows through the insns made by the first expand_expr
3136 before reaching the other places where the save_expr was evaluated.
3137 You, the caller of save_expr, must make sure this is so.
3138
3139 Constants, and certain read-only nodes, are returned with no
3140 SAVE_EXPR because that is safe. Expressions containing placeholders
3141 are not touched; see tree.def for an explanation of what these
3142 are used for. */
3143
3144 tree
3145 save_expr (tree expr)
3146 {
3147 tree t = fold (expr);
3148 tree inner;
3149
3150 /* If the tree evaluates to a constant, then we don't want to hide that
3151 fact (i.e. this allows further folding, and direct checks for constants).
3152 However, a read-only object that has side effects cannot be bypassed.
3153 Since it is no problem to reevaluate literals, we just return the
3154 literal node. */
3155 inner = skip_simple_arithmetic (t);
3156 if (TREE_CODE (inner) == ERROR_MARK)
3157 return inner;
3158
3159 if (tree_invariant_p_1 (inner))
3160 return t;
3161
3162 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3163 it means that the size or offset of some field of an object depends on
3164 the value within another field.
3165
3166 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3167 and some variable since it would then need to be both evaluated once and
3168 evaluated more than once. Front-ends must assure this case cannot
3169 happen by surrounding any such subexpressions in their own SAVE_EXPR
3170 and forcing evaluation at the proper time. */
3171 if (contains_placeholder_p (inner))
3172 return t;
3173
3174 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3175 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3176
3177 /* This expression might be placed ahead of a jump to ensure that the
3178 value was computed on both sides of the jump. So make sure it isn't
3179 eliminated as dead. */
3180 TREE_SIDE_EFFECTS (t) = 1;
3181 return t;
3182 }
3183
3184 /* Look inside EXPR into any simple arithmetic operations. Return the
3185 outermost non-arithmetic or non-invariant node. */
3186
3187 tree
3188 skip_simple_arithmetic (tree expr)
3189 {
3190 /* We don't care about whether this can be used as an lvalue in this
3191 context. */
3192 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3193 expr = TREE_OPERAND (expr, 0);
3194
3195 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3196 a constant, it will be more efficient to not make another SAVE_EXPR since
3197 it will allow better simplification and GCSE will be able to merge the
3198 computations if they actually occur. */
3199 while (true)
3200 {
3201 if (UNARY_CLASS_P (expr))
3202 expr = TREE_OPERAND (expr, 0);
3203 else if (BINARY_CLASS_P (expr))
3204 {
3205 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3206 expr = TREE_OPERAND (expr, 0);
3207 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3208 expr = TREE_OPERAND (expr, 1);
3209 else
3210 break;
3211 }
3212 else
3213 break;
3214 }
3215
3216 return expr;
3217 }
3218
3219 /* Look inside EXPR into simple arithmetic operations involving constants.
3220 Return the outermost non-arithmetic or non-constant node. */
3221
3222 tree
3223 skip_simple_constant_arithmetic (tree expr)
3224 {
3225 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3226 expr = TREE_OPERAND (expr, 0);
3227
3228 while (true)
3229 {
3230 if (UNARY_CLASS_P (expr))
3231 expr = TREE_OPERAND (expr, 0);
3232 else if (BINARY_CLASS_P (expr))
3233 {
3234 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3235 expr = TREE_OPERAND (expr, 0);
3236 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3237 expr = TREE_OPERAND (expr, 1);
3238 else
3239 break;
3240 }
3241 else
3242 break;
3243 }
3244
3245 return expr;
3246 }
3247
3248 /* Return which tree structure is used by T. */
3249
3250 enum tree_node_structure_enum
3251 tree_node_structure (const_tree t)
3252 {
3253 const enum tree_code code = TREE_CODE (t);
3254 return tree_node_structure_for_code (code);
3255 }
3256
3257 /* Set various status flags when building a CALL_EXPR object T. */
3258
3259 static void
3260 process_call_operands (tree t)
3261 {
3262 bool side_effects = TREE_SIDE_EFFECTS (t);
3263 bool read_only = false;
3264 int i = call_expr_flags (t);
3265
3266 /* Calls have side-effects, except those to const or pure functions. */
3267 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3268 side_effects = true;
3269 /* Propagate TREE_READONLY of arguments for const functions. */
3270 if (i & ECF_CONST)
3271 read_only = true;
3272
3273 if (!side_effects || read_only)
3274 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3275 {
3276 tree op = TREE_OPERAND (t, i);
3277 if (op && TREE_SIDE_EFFECTS (op))
3278 side_effects = true;
3279 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3280 read_only = false;
3281 }
3282
3283 TREE_SIDE_EFFECTS (t) = side_effects;
3284 TREE_READONLY (t) = read_only;
3285 }
3286 \f
3287 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3288 size or offset that depends on a field within a record. */
3289
3290 bool
3291 contains_placeholder_p (const_tree exp)
3292 {
3293 enum tree_code code;
3294
3295 if (!exp)
3296 return 0;
3297
3298 code = TREE_CODE (exp);
3299 if (code == PLACEHOLDER_EXPR)
3300 return 1;
3301
3302 switch (TREE_CODE_CLASS (code))
3303 {
3304 case tcc_reference:
3305 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3306 position computations since they will be converted into a
3307 WITH_RECORD_EXPR involving the reference, which will assume
3308 here will be valid. */
3309 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3310
3311 case tcc_exceptional:
3312 if (code == TREE_LIST)
3313 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3314 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3315 break;
3316
3317 case tcc_unary:
3318 case tcc_binary:
3319 case tcc_comparison:
3320 case tcc_expression:
3321 switch (code)
3322 {
3323 case COMPOUND_EXPR:
3324 /* Ignoring the first operand isn't quite right, but works best. */
3325 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3326
3327 case COND_EXPR:
3328 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3329 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3330 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3331
3332 case SAVE_EXPR:
3333 /* The save_expr function never wraps anything containing
3334 a PLACEHOLDER_EXPR. */
3335 return 0;
3336
3337 default:
3338 break;
3339 }
3340
3341 switch (TREE_CODE_LENGTH (code))
3342 {
3343 case 1:
3344 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3345 case 2:
3346 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3347 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3348 default:
3349 return 0;
3350 }
3351
3352 case tcc_vl_exp:
3353 switch (code)
3354 {
3355 case CALL_EXPR:
3356 {
3357 const_tree arg;
3358 const_call_expr_arg_iterator iter;
3359 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3360 if (CONTAINS_PLACEHOLDER_P (arg))
3361 return 1;
3362 return 0;
3363 }
3364 default:
3365 return 0;
3366 }
3367
3368 default:
3369 return 0;
3370 }
3371 return 0;
3372 }
3373
3374 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3375 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3376 field positions. */
3377
3378 static bool
3379 type_contains_placeholder_1 (const_tree type)
3380 {
3381 /* If the size contains a placeholder or the parent type (component type in
3382 the case of arrays) type involves a placeholder, this type does. */
3383 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3384 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3385 || (!POINTER_TYPE_P (type)
3386 && TREE_TYPE (type)
3387 && type_contains_placeholder_p (TREE_TYPE (type))))
3388 return true;
3389
3390 /* Now do type-specific checks. Note that the last part of the check above
3391 greatly limits what we have to do below. */
3392 switch (TREE_CODE (type))
3393 {
3394 case VOID_TYPE:
3395 case COMPLEX_TYPE:
3396 case ENUMERAL_TYPE:
3397 case BOOLEAN_TYPE:
3398 case POINTER_TYPE:
3399 case OFFSET_TYPE:
3400 case REFERENCE_TYPE:
3401 case METHOD_TYPE:
3402 case FUNCTION_TYPE:
3403 case VECTOR_TYPE:
3404 case NULLPTR_TYPE:
3405 return false;
3406
3407 case INTEGER_TYPE:
3408 case REAL_TYPE:
3409 case FIXED_POINT_TYPE:
3410 /* Here we just check the bounds. */
3411 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3412 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3413
3414 case ARRAY_TYPE:
3415 /* We have already checked the component type above, so just check the
3416 domain type. */
3417 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3418
3419 case RECORD_TYPE:
3420 case UNION_TYPE:
3421 case QUAL_UNION_TYPE:
3422 {
3423 tree field;
3424
3425 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3426 if (TREE_CODE (field) == FIELD_DECL
3427 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3428 || (TREE_CODE (type) == QUAL_UNION_TYPE
3429 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3430 || type_contains_placeholder_p (TREE_TYPE (field))))
3431 return true;
3432
3433 return false;
3434 }
3435
3436 default:
3437 gcc_unreachable ();
3438 }
3439 }
3440
3441 /* Wrapper around above function used to cache its result. */
3442
3443 bool
3444 type_contains_placeholder_p (tree type)
3445 {
3446 bool result;
3447
3448 /* If the contains_placeholder_bits field has been initialized,
3449 then we know the answer. */
3450 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3451 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3452
3453 /* Indicate that we've seen this type node, and the answer is false.
3454 This is what we want to return if we run into recursion via fields. */
3455 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3456
3457 /* Compute the real value. */
3458 result = type_contains_placeholder_1 (type);
3459
3460 /* Store the real value. */
3461 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3462
3463 return result;
3464 }
3465 \f
3466 /* Push tree EXP onto vector QUEUE if it is not already present. */
3467
3468 static void
3469 push_without_duplicates (tree exp, vec<tree> *queue)
3470 {
3471 unsigned int i;
3472 tree iter;
3473
3474 FOR_EACH_VEC_ELT (*queue, i, iter)
3475 if (simple_cst_equal (iter, exp) == 1)
3476 break;
3477
3478 if (!iter)
3479 queue->safe_push (exp);
3480 }
3481
3482 /* Given a tree EXP, find all occurrences of references to fields
3483 in a PLACEHOLDER_EXPR and place them in vector REFS without
3484 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3485 we assume here that EXP contains only arithmetic expressions
3486 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3487 argument list. */
3488
3489 void
3490 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3491 {
3492 enum tree_code code = TREE_CODE (exp);
3493 tree inner;
3494 int i;
3495
3496 /* We handle TREE_LIST and COMPONENT_REF separately. */
3497 if (code == TREE_LIST)
3498 {
3499 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3500 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3501 }
3502 else if (code == COMPONENT_REF)
3503 {
3504 for (inner = TREE_OPERAND (exp, 0);
3505 REFERENCE_CLASS_P (inner);
3506 inner = TREE_OPERAND (inner, 0))
3507 ;
3508
3509 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3510 push_without_duplicates (exp, refs);
3511 else
3512 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3513 }
3514 else
3515 switch (TREE_CODE_CLASS (code))
3516 {
3517 case tcc_constant:
3518 break;
3519
3520 case tcc_declaration:
3521 /* Variables allocated to static storage can stay. */
3522 if (!TREE_STATIC (exp))
3523 push_without_duplicates (exp, refs);
3524 break;
3525
3526 case tcc_expression:
3527 /* This is the pattern built in ada/make_aligning_type. */
3528 if (code == ADDR_EXPR
3529 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3530 {
3531 push_without_duplicates (exp, refs);
3532 break;
3533 }
3534
3535 /* Fall through... */
3536
3537 case tcc_exceptional:
3538 case tcc_unary:
3539 case tcc_binary:
3540 case tcc_comparison:
3541 case tcc_reference:
3542 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3543 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3544 break;
3545
3546 case tcc_vl_exp:
3547 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3548 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3549 break;
3550
3551 default:
3552 gcc_unreachable ();
3553 }
3554 }
3555
3556 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3557 return a tree with all occurrences of references to F in a
3558 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3559 CONST_DECLs. Note that we assume here that EXP contains only
3560 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3561 occurring only in their argument list. */
3562
3563 tree
3564 substitute_in_expr (tree exp, tree f, tree r)
3565 {
3566 enum tree_code code = TREE_CODE (exp);
3567 tree op0, op1, op2, op3;
3568 tree new_tree;
3569
3570 /* We handle TREE_LIST and COMPONENT_REF separately. */
3571 if (code == TREE_LIST)
3572 {
3573 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3574 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3575 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3576 return exp;
3577
3578 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3579 }
3580 else if (code == COMPONENT_REF)
3581 {
3582 tree inner;
3583
3584 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3585 and it is the right field, replace it with R. */
3586 for (inner = TREE_OPERAND (exp, 0);
3587 REFERENCE_CLASS_P (inner);
3588 inner = TREE_OPERAND (inner, 0))
3589 ;
3590
3591 /* The field. */
3592 op1 = TREE_OPERAND (exp, 1);
3593
3594 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3595 return r;
3596
3597 /* If this expression hasn't been completed let, leave it alone. */
3598 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3599 return exp;
3600
3601 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3602 if (op0 == TREE_OPERAND (exp, 0))
3603 return exp;
3604
3605 new_tree
3606 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3607 }
3608 else
3609 switch (TREE_CODE_CLASS (code))
3610 {
3611 case tcc_constant:
3612 return exp;
3613
3614 case tcc_declaration:
3615 if (exp == f)
3616 return r;
3617 else
3618 return exp;
3619
3620 case tcc_expression:
3621 if (exp == f)
3622 return r;
3623
3624 /* Fall through... */
3625
3626 case tcc_exceptional:
3627 case tcc_unary:
3628 case tcc_binary:
3629 case tcc_comparison:
3630 case tcc_reference:
3631 switch (TREE_CODE_LENGTH (code))
3632 {
3633 case 0:
3634 return exp;
3635
3636 case 1:
3637 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3638 if (op0 == TREE_OPERAND (exp, 0))
3639 return exp;
3640
3641 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3642 break;
3643
3644 case 2:
3645 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3646 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3647
3648 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3649 return exp;
3650
3651 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3652 break;
3653
3654 case 3:
3655 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3656 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3657 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3658
3659 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3660 && op2 == TREE_OPERAND (exp, 2))
3661 return exp;
3662
3663 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3664 break;
3665
3666 case 4:
3667 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3668 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3669 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3670 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3671
3672 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3673 && op2 == TREE_OPERAND (exp, 2)
3674 && op3 == TREE_OPERAND (exp, 3))
3675 return exp;
3676
3677 new_tree
3678 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3679 break;
3680
3681 default:
3682 gcc_unreachable ();
3683 }
3684 break;
3685
3686 case tcc_vl_exp:
3687 {
3688 int i;
3689
3690 new_tree = NULL_TREE;
3691
3692 /* If we are trying to replace F with a constant, inline back
3693 functions which do nothing else than computing a value from
3694 the arguments they are passed. This makes it possible to
3695 fold partially or entirely the replacement expression. */
3696 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3697 {
3698 tree t = maybe_inline_call_in_expr (exp);
3699 if (t)
3700 return SUBSTITUTE_IN_EXPR (t, f, r);
3701 }
3702
3703 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3704 {
3705 tree op = TREE_OPERAND (exp, i);
3706 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3707 if (new_op != op)
3708 {
3709 if (!new_tree)
3710 new_tree = copy_node (exp);
3711 TREE_OPERAND (new_tree, i) = new_op;
3712 }
3713 }
3714
3715 if (new_tree)
3716 {
3717 new_tree = fold (new_tree);
3718 if (TREE_CODE (new_tree) == CALL_EXPR)
3719 process_call_operands (new_tree);
3720 }
3721 else
3722 return exp;
3723 }
3724 break;
3725
3726 default:
3727 gcc_unreachable ();
3728 }
3729
3730 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3731
3732 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3733 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3734
3735 return new_tree;
3736 }
3737
3738 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3739 for it within OBJ, a tree that is an object or a chain of references. */
3740
3741 tree
3742 substitute_placeholder_in_expr (tree exp, tree obj)
3743 {
3744 enum tree_code code = TREE_CODE (exp);
3745 tree op0, op1, op2, op3;
3746 tree new_tree;
3747
3748 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3749 in the chain of OBJ. */
3750 if (code == PLACEHOLDER_EXPR)
3751 {
3752 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3753 tree elt;
3754
3755 for (elt = obj; elt != 0;
3756 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3757 || TREE_CODE (elt) == COND_EXPR)
3758 ? TREE_OPERAND (elt, 1)
3759 : (REFERENCE_CLASS_P (elt)
3760 || UNARY_CLASS_P (elt)
3761 || BINARY_CLASS_P (elt)
3762 || VL_EXP_CLASS_P (elt)
3763 || EXPRESSION_CLASS_P (elt))
3764 ? TREE_OPERAND (elt, 0) : 0))
3765 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3766 return elt;
3767
3768 for (elt = obj; elt != 0;
3769 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3770 || TREE_CODE (elt) == COND_EXPR)
3771 ? TREE_OPERAND (elt, 1)
3772 : (REFERENCE_CLASS_P (elt)
3773 || UNARY_CLASS_P (elt)
3774 || BINARY_CLASS_P (elt)
3775 || VL_EXP_CLASS_P (elt)
3776 || EXPRESSION_CLASS_P (elt))
3777 ? TREE_OPERAND (elt, 0) : 0))
3778 if (POINTER_TYPE_P (TREE_TYPE (elt))
3779 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3780 == need_type))
3781 return fold_build1 (INDIRECT_REF, need_type, elt);
3782
3783 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3784 survives until RTL generation, there will be an error. */
3785 return exp;
3786 }
3787
3788 /* TREE_LIST is special because we need to look at TREE_VALUE
3789 and TREE_CHAIN, not TREE_OPERANDS. */
3790 else if (code == TREE_LIST)
3791 {
3792 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3793 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3794 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3795 return exp;
3796
3797 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3798 }
3799 else
3800 switch (TREE_CODE_CLASS (code))
3801 {
3802 case tcc_constant:
3803 case tcc_declaration:
3804 return exp;
3805
3806 case tcc_exceptional:
3807 case tcc_unary:
3808 case tcc_binary:
3809 case tcc_comparison:
3810 case tcc_expression:
3811 case tcc_reference:
3812 case tcc_statement:
3813 switch (TREE_CODE_LENGTH (code))
3814 {
3815 case 0:
3816 return exp;
3817
3818 case 1:
3819 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3820 if (op0 == TREE_OPERAND (exp, 0))
3821 return exp;
3822
3823 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3824 break;
3825
3826 case 2:
3827 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3828 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3829
3830 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3831 return exp;
3832
3833 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3834 break;
3835
3836 case 3:
3837 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3838 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3839 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3840
3841 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3842 && op2 == TREE_OPERAND (exp, 2))
3843 return exp;
3844
3845 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3846 break;
3847
3848 case 4:
3849 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3850 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3851 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3852 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3853
3854 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3855 && op2 == TREE_OPERAND (exp, 2)
3856 && op3 == TREE_OPERAND (exp, 3))
3857 return exp;
3858
3859 new_tree
3860 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3861 break;
3862
3863 default:
3864 gcc_unreachable ();
3865 }
3866 break;
3867
3868 case tcc_vl_exp:
3869 {
3870 int i;
3871
3872 new_tree = NULL_TREE;
3873
3874 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3875 {
3876 tree op = TREE_OPERAND (exp, i);
3877 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3878 if (new_op != op)
3879 {
3880 if (!new_tree)
3881 new_tree = copy_node (exp);
3882 TREE_OPERAND (new_tree, i) = new_op;
3883 }
3884 }
3885
3886 if (new_tree)
3887 {
3888 new_tree = fold (new_tree);
3889 if (TREE_CODE (new_tree) == CALL_EXPR)
3890 process_call_operands (new_tree);
3891 }
3892 else
3893 return exp;
3894 }
3895 break;
3896
3897 default:
3898 gcc_unreachable ();
3899 }
3900
3901 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3902
3903 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3904 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3905
3906 return new_tree;
3907 }
3908 \f
3909
3910 /* Subroutine of stabilize_reference; this is called for subtrees of
3911 references. Any expression with side-effects must be put in a SAVE_EXPR
3912 to ensure that it is only evaluated once.
3913
3914 We don't put SAVE_EXPR nodes around everything, because assigning very
3915 simple expressions to temporaries causes us to miss good opportunities
3916 for optimizations. Among other things, the opportunity to fold in the
3917 addition of a constant into an addressing mode often gets lost, e.g.
3918 "y[i+1] += x;". In general, we take the approach that we should not make
3919 an assignment unless we are forced into it - i.e., that any non-side effect
3920 operator should be allowed, and that cse should take care of coalescing
3921 multiple utterances of the same expression should that prove fruitful. */
3922
3923 static tree
3924 stabilize_reference_1 (tree e)
3925 {
3926 tree result;
3927 enum tree_code code = TREE_CODE (e);
3928
3929 /* We cannot ignore const expressions because it might be a reference
3930 to a const array but whose index contains side-effects. But we can
3931 ignore things that are actual constant or that already have been
3932 handled by this function. */
3933
3934 if (tree_invariant_p (e))
3935 return e;
3936
3937 switch (TREE_CODE_CLASS (code))
3938 {
3939 case tcc_exceptional:
3940 case tcc_type:
3941 case tcc_declaration:
3942 case tcc_comparison:
3943 case tcc_statement:
3944 case tcc_expression:
3945 case tcc_reference:
3946 case tcc_vl_exp:
3947 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3948 so that it will only be evaluated once. */
3949 /* The reference (r) and comparison (<) classes could be handled as
3950 below, but it is generally faster to only evaluate them once. */
3951 if (TREE_SIDE_EFFECTS (e))
3952 return save_expr (e);
3953 return e;
3954
3955 case tcc_constant:
3956 /* Constants need no processing. In fact, we should never reach
3957 here. */
3958 return e;
3959
3960 case tcc_binary:
3961 /* Division is slow and tends to be compiled with jumps,
3962 especially the division by powers of 2 that is often
3963 found inside of an array reference. So do it just once. */
3964 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3965 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3966 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3967 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3968 return save_expr (e);
3969 /* Recursively stabilize each operand. */
3970 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3971 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3972 break;
3973
3974 case tcc_unary:
3975 /* Recursively stabilize each operand. */
3976 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3977 break;
3978
3979 default:
3980 gcc_unreachable ();
3981 }
3982
3983 TREE_TYPE (result) = TREE_TYPE (e);
3984 TREE_READONLY (result) = TREE_READONLY (e);
3985 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3986 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3987
3988 return result;
3989 }
3990
3991 /* Stabilize a reference so that we can use it any number of times
3992 without causing its operands to be evaluated more than once.
3993 Returns the stabilized reference. This works by means of save_expr,
3994 so see the caveats in the comments about save_expr.
3995
3996 Also allows conversion expressions whose operands are references.
3997 Any other kind of expression is returned unchanged. */
3998
3999 tree
4000 stabilize_reference (tree ref)
4001 {
4002 tree result;
4003 enum tree_code code = TREE_CODE (ref);
4004
4005 switch (code)
4006 {
4007 case VAR_DECL:
4008 case PARM_DECL:
4009 case RESULT_DECL:
4010 /* No action is needed in this case. */
4011 return ref;
4012
4013 CASE_CONVERT:
4014 case FLOAT_EXPR:
4015 case FIX_TRUNC_EXPR:
4016 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4017 break;
4018
4019 case INDIRECT_REF:
4020 result = build_nt (INDIRECT_REF,
4021 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4022 break;
4023
4024 case COMPONENT_REF:
4025 result = build_nt (COMPONENT_REF,
4026 stabilize_reference (TREE_OPERAND (ref, 0)),
4027 TREE_OPERAND (ref, 1), NULL_TREE);
4028 break;
4029
4030 case BIT_FIELD_REF:
4031 result = build_nt (BIT_FIELD_REF,
4032 stabilize_reference (TREE_OPERAND (ref, 0)),
4033 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4034 break;
4035
4036 case ARRAY_REF:
4037 result = build_nt (ARRAY_REF,
4038 stabilize_reference (TREE_OPERAND (ref, 0)),
4039 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4040 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4041 break;
4042
4043 case ARRAY_RANGE_REF:
4044 result = build_nt (ARRAY_RANGE_REF,
4045 stabilize_reference (TREE_OPERAND (ref, 0)),
4046 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4047 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4048 break;
4049
4050 case COMPOUND_EXPR:
4051 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4052 it wouldn't be ignored. This matters when dealing with
4053 volatiles. */
4054 return stabilize_reference_1 (ref);
4055
4056 /* If arg isn't a kind of lvalue we recognize, make no change.
4057 Caller should recognize the error for an invalid lvalue. */
4058 default:
4059 return ref;
4060
4061 case ERROR_MARK:
4062 return error_mark_node;
4063 }
4064
4065 TREE_TYPE (result) = TREE_TYPE (ref);
4066 TREE_READONLY (result) = TREE_READONLY (ref);
4067 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4068 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4069
4070 return result;
4071 }
4072 \f
4073 /* Low-level constructors for expressions. */
4074
4075 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4076 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4077
4078 void
4079 recompute_tree_invariant_for_addr_expr (tree t)
4080 {
4081 tree node;
4082 bool tc = true, se = false;
4083
4084 /* We started out assuming this address is both invariant and constant, but
4085 does not have side effects. Now go down any handled components and see if
4086 any of them involve offsets that are either non-constant or non-invariant.
4087 Also check for side-effects.
4088
4089 ??? Note that this code makes no attempt to deal with the case where
4090 taking the address of something causes a copy due to misalignment. */
4091
4092 #define UPDATE_FLAGS(NODE) \
4093 do { tree _node = (NODE); \
4094 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4095 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4096
4097 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4098 node = TREE_OPERAND (node, 0))
4099 {
4100 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4101 array reference (probably made temporarily by the G++ front end),
4102 so ignore all the operands. */
4103 if ((TREE_CODE (node) == ARRAY_REF
4104 || TREE_CODE (node) == ARRAY_RANGE_REF)
4105 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4106 {
4107 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4108 if (TREE_OPERAND (node, 2))
4109 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4110 if (TREE_OPERAND (node, 3))
4111 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4112 }
4113 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4114 FIELD_DECL, apparently. The G++ front end can put something else
4115 there, at least temporarily. */
4116 else if (TREE_CODE (node) == COMPONENT_REF
4117 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4118 {
4119 if (TREE_OPERAND (node, 2))
4120 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4121 }
4122 }
4123
4124 node = lang_hooks.expr_to_decl (node, &tc, &se);
4125
4126 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4127 the address, since &(*a)->b is a form of addition. If it's a constant, the
4128 address is constant too. If it's a decl, its address is constant if the
4129 decl is static. Everything else is not constant and, furthermore,
4130 taking the address of a volatile variable is not volatile. */
4131 if (TREE_CODE (node) == INDIRECT_REF
4132 || TREE_CODE (node) == MEM_REF)
4133 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4134 else if (CONSTANT_CLASS_P (node))
4135 ;
4136 else if (DECL_P (node))
4137 tc &= (staticp (node) != NULL_TREE);
4138 else
4139 {
4140 tc = false;
4141 se |= TREE_SIDE_EFFECTS (node);
4142 }
4143
4144
4145 TREE_CONSTANT (t) = tc;
4146 TREE_SIDE_EFFECTS (t) = se;
4147 #undef UPDATE_FLAGS
4148 }
4149
4150 /* Build an expression of code CODE, data type TYPE, and operands as
4151 specified. Expressions and reference nodes can be created this way.
4152 Constants, decls, types and misc nodes cannot be.
4153
4154 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4155 enough for all extant tree codes. */
4156
4157 tree
4158 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4159 {
4160 tree t;
4161
4162 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4163
4164 t = make_node_stat (code PASS_MEM_STAT);
4165 TREE_TYPE (t) = tt;
4166
4167 return t;
4168 }
4169
4170 tree
4171 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4172 {
4173 int length = sizeof (struct tree_exp);
4174 tree t;
4175
4176 record_node_allocation_statistics (code, length);
4177
4178 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4179
4180 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4181
4182 memset (t, 0, sizeof (struct tree_common));
4183
4184 TREE_SET_CODE (t, code);
4185
4186 TREE_TYPE (t) = type;
4187 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4188 TREE_OPERAND (t, 0) = node;
4189 if (node && !TYPE_P (node))
4190 {
4191 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4192 TREE_READONLY (t) = TREE_READONLY (node);
4193 }
4194
4195 if (TREE_CODE_CLASS (code) == tcc_statement)
4196 TREE_SIDE_EFFECTS (t) = 1;
4197 else switch (code)
4198 {
4199 case VA_ARG_EXPR:
4200 /* All of these have side-effects, no matter what their
4201 operands are. */
4202 TREE_SIDE_EFFECTS (t) = 1;
4203 TREE_READONLY (t) = 0;
4204 break;
4205
4206 case INDIRECT_REF:
4207 /* Whether a dereference is readonly has nothing to do with whether
4208 its operand is readonly. */
4209 TREE_READONLY (t) = 0;
4210 break;
4211
4212 case ADDR_EXPR:
4213 if (node)
4214 recompute_tree_invariant_for_addr_expr (t);
4215 break;
4216
4217 default:
4218 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4219 && node && !TYPE_P (node)
4220 && TREE_CONSTANT (node))
4221 TREE_CONSTANT (t) = 1;
4222 if (TREE_CODE_CLASS (code) == tcc_reference
4223 && node && TREE_THIS_VOLATILE (node))
4224 TREE_THIS_VOLATILE (t) = 1;
4225 break;
4226 }
4227
4228 return t;
4229 }
4230
4231 #define PROCESS_ARG(N) \
4232 do { \
4233 TREE_OPERAND (t, N) = arg##N; \
4234 if (arg##N &&!TYPE_P (arg##N)) \
4235 { \
4236 if (TREE_SIDE_EFFECTS (arg##N)) \
4237 side_effects = 1; \
4238 if (!TREE_READONLY (arg##N) \
4239 && !CONSTANT_CLASS_P (arg##N)) \
4240 (void) (read_only = 0); \
4241 if (!TREE_CONSTANT (arg##N)) \
4242 (void) (constant = 0); \
4243 } \
4244 } while (0)
4245
4246 tree
4247 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4248 {
4249 bool constant, read_only, side_effects;
4250 tree t;
4251
4252 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4253
4254 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4255 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4256 /* When sizetype precision doesn't match that of pointers
4257 we need to be able to build explicit extensions or truncations
4258 of the offset argument. */
4259 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4260 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4261 && TREE_CODE (arg1) == INTEGER_CST);
4262
4263 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4264 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4265 && ptrofftype_p (TREE_TYPE (arg1)));
4266
4267 t = make_node_stat (code PASS_MEM_STAT);
4268 TREE_TYPE (t) = tt;
4269
4270 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4271 result based on those same flags for the arguments. But if the
4272 arguments aren't really even `tree' expressions, we shouldn't be trying
4273 to do this. */
4274
4275 /* Expressions without side effects may be constant if their
4276 arguments are as well. */
4277 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4278 || TREE_CODE_CLASS (code) == tcc_binary);
4279 read_only = 1;
4280 side_effects = TREE_SIDE_EFFECTS (t);
4281
4282 PROCESS_ARG (0);
4283 PROCESS_ARG (1);
4284
4285 TREE_READONLY (t) = read_only;
4286 TREE_CONSTANT (t) = constant;
4287 TREE_SIDE_EFFECTS (t) = side_effects;
4288 TREE_THIS_VOLATILE (t)
4289 = (TREE_CODE_CLASS (code) == tcc_reference
4290 && arg0 && TREE_THIS_VOLATILE (arg0));
4291
4292 return t;
4293 }
4294
4295
4296 tree
4297 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4298 tree arg2 MEM_STAT_DECL)
4299 {
4300 bool constant, read_only, side_effects;
4301 tree t;
4302
4303 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4304 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4305
4306 t = make_node_stat (code PASS_MEM_STAT);
4307 TREE_TYPE (t) = tt;
4308
4309 read_only = 1;
4310
4311 /* As a special exception, if COND_EXPR has NULL branches, we
4312 assume that it is a gimple statement and always consider
4313 it to have side effects. */
4314 if (code == COND_EXPR
4315 && tt == void_type_node
4316 && arg1 == NULL_TREE
4317 && arg2 == NULL_TREE)
4318 side_effects = true;
4319 else
4320 side_effects = TREE_SIDE_EFFECTS (t);
4321
4322 PROCESS_ARG (0);
4323 PROCESS_ARG (1);
4324 PROCESS_ARG (2);
4325
4326 if (code == COND_EXPR)
4327 TREE_READONLY (t) = read_only;
4328
4329 TREE_SIDE_EFFECTS (t) = side_effects;
4330 TREE_THIS_VOLATILE (t)
4331 = (TREE_CODE_CLASS (code) == tcc_reference
4332 && arg0 && TREE_THIS_VOLATILE (arg0));
4333
4334 return t;
4335 }
4336
4337 tree
4338 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4339 tree arg2, tree arg3 MEM_STAT_DECL)
4340 {
4341 bool constant, read_only, side_effects;
4342 tree t;
4343
4344 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4345
4346 t = make_node_stat (code PASS_MEM_STAT);
4347 TREE_TYPE (t) = tt;
4348
4349 side_effects = TREE_SIDE_EFFECTS (t);
4350
4351 PROCESS_ARG (0);
4352 PROCESS_ARG (1);
4353 PROCESS_ARG (2);
4354 PROCESS_ARG (3);
4355
4356 TREE_SIDE_EFFECTS (t) = side_effects;
4357 TREE_THIS_VOLATILE (t)
4358 = (TREE_CODE_CLASS (code) == tcc_reference
4359 && arg0 && TREE_THIS_VOLATILE (arg0));
4360
4361 return t;
4362 }
4363
4364 tree
4365 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4366 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4367 {
4368 bool constant, read_only, side_effects;
4369 tree t;
4370
4371 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4372
4373 t = make_node_stat (code PASS_MEM_STAT);
4374 TREE_TYPE (t) = tt;
4375
4376 side_effects = TREE_SIDE_EFFECTS (t);
4377
4378 PROCESS_ARG (0);
4379 PROCESS_ARG (1);
4380 PROCESS_ARG (2);
4381 PROCESS_ARG (3);
4382 PROCESS_ARG (4);
4383
4384 TREE_SIDE_EFFECTS (t) = side_effects;
4385 TREE_THIS_VOLATILE (t)
4386 = (TREE_CODE_CLASS (code) == tcc_reference
4387 && arg0 && TREE_THIS_VOLATILE (arg0));
4388
4389 return t;
4390 }
4391
4392 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4393 on the pointer PTR. */
4394
4395 tree
4396 build_simple_mem_ref_loc (location_t loc, tree ptr)
4397 {
4398 HOST_WIDE_INT offset = 0;
4399 tree ptype = TREE_TYPE (ptr);
4400 tree tem;
4401 /* For convenience allow addresses that collapse to a simple base
4402 and offset. */
4403 if (TREE_CODE (ptr) == ADDR_EXPR
4404 && (handled_component_p (TREE_OPERAND (ptr, 0))
4405 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4406 {
4407 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4408 gcc_assert (ptr);
4409 ptr = build_fold_addr_expr (ptr);
4410 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4411 }
4412 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4413 ptr, build_int_cst (ptype, offset));
4414 SET_EXPR_LOCATION (tem, loc);
4415 return tem;
4416 }
4417
4418 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4419
4420 offset_int
4421 mem_ref_offset (const_tree t)
4422 {
4423 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4424 }
4425
4426 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4427 offsetted by OFFSET units. */
4428
4429 tree
4430 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4431 {
4432 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4433 build_fold_addr_expr (base),
4434 build_int_cst (ptr_type_node, offset));
4435 tree addr = build1 (ADDR_EXPR, type, ref);
4436 recompute_tree_invariant_for_addr_expr (addr);
4437 return addr;
4438 }
4439
4440 /* Similar except don't specify the TREE_TYPE
4441 and leave the TREE_SIDE_EFFECTS as 0.
4442 It is permissible for arguments to be null,
4443 or even garbage if their values do not matter. */
4444
4445 tree
4446 build_nt (enum tree_code code, ...)
4447 {
4448 tree t;
4449 int length;
4450 int i;
4451 va_list p;
4452
4453 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4454
4455 va_start (p, code);
4456
4457 t = make_node (code);
4458 length = TREE_CODE_LENGTH (code);
4459
4460 for (i = 0; i < length; i++)
4461 TREE_OPERAND (t, i) = va_arg (p, tree);
4462
4463 va_end (p);
4464 return t;
4465 }
4466
4467 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4468 tree vec. */
4469
4470 tree
4471 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4472 {
4473 tree ret, t;
4474 unsigned int ix;
4475
4476 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4477 CALL_EXPR_FN (ret) = fn;
4478 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4479 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4480 CALL_EXPR_ARG (ret, ix) = t;
4481 return ret;
4482 }
4483 \f
4484 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4485 We do NOT enter this node in any sort of symbol table.
4486
4487 LOC is the location of the decl.
4488
4489 layout_decl is used to set up the decl's storage layout.
4490 Other slots are initialized to 0 or null pointers. */
4491
4492 tree
4493 build_decl_stat (location_t loc, enum tree_code code, tree name,
4494 tree type MEM_STAT_DECL)
4495 {
4496 tree t;
4497
4498 t = make_node_stat (code PASS_MEM_STAT);
4499 DECL_SOURCE_LOCATION (t) = loc;
4500
4501 /* if (type == error_mark_node)
4502 type = integer_type_node; */
4503 /* That is not done, deliberately, so that having error_mark_node
4504 as the type can suppress useless errors in the use of this variable. */
4505
4506 DECL_NAME (t) = name;
4507 TREE_TYPE (t) = type;
4508
4509 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4510 layout_decl (t, 0);
4511
4512 return t;
4513 }
4514
4515 /* Builds and returns function declaration with NAME and TYPE. */
4516
4517 tree
4518 build_fn_decl (const char *name, tree type)
4519 {
4520 tree id = get_identifier (name);
4521 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4522
4523 DECL_EXTERNAL (decl) = 1;
4524 TREE_PUBLIC (decl) = 1;
4525 DECL_ARTIFICIAL (decl) = 1;
4526 TREE_NOTHROW (decl) = 1;
4527
4528 return decl;
4529 }
4530
4531 vec<tree, va_gc> *all_translation_units;
4532
4533 /* Builds a new translation-unit decl with name NAME, queues it in the
4534 global list of translation-unit decls and returns it. */
4535
4536 tree
4537 build_translation_unit_decl (tree name)
4538 {
4539 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4540 name, NULL_TREE);
4541 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4542 vec_safe_push (all_translation_units, tu);
4543 return tu;
4544 }
4545
4546 \f
4547 /* BLOCK nodes are used to represent the structure of binding contours
4548 and declarations, once those contours have been exited and their contents
4549 compiled. This information is used for outputting debugging info. */
4550
4551 tree
4552 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4553 {
4554 tree block = make_node (BLOCK);
4555
4556 BLOCK_VARS (block) = vars;
4557 BLOCK_SUBBLOCKS (block) = subblocks;
4558 BLOCK_SUPERCONTEXT (block) = supercontext;
4559 BLOCK_CHAIN (block) = chain;
4560 return block;
4561 }
4562
4563 \f
4564 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4565
4566 LOC is the location to use in tree T. */
4567
4568 void
4569 protected_set_expr_location (tree t, location_t loc)
4570 {
4571 if (t && CAN_HAVE_LOCATION_P (t))
4572 SET_EXPR_LOCATION (t, loc);
4573 }
4574 \f
4575 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4576 is ATTRIBUTE. */
4577
4578 tree
4579 build_decl_attribute_variant (tree ddecl, tree attribute)
4580 {
4581 DECL_ATTRIBUTES (ddecl) = attribute;
4582 return ddecl;
4583 }
4584
4585 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4586 is ATTRIBUTE and its qualifiers are QUALS.
4587
4588 Record such modified types already made so we don't make duplicates. */
4589
4590 tree
4591 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4592 {
4593 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4594 {
4595 inchash::hash hstate;
4596 tree ntype;
4597 int i;
4598 tree t;
4599 enum tree_code code = TREE_CODE (ttype);
4600
4601 /* Building a distinct copy of a tagged type is inappropriate; it
4602 causes breakage in code that expects there to be a one-to-one
4603 relationship between a struct and its fields.
4604 build_duplicate_type is another solution (as used in
4605 handle_transparent_union_attribute), but that doesn't play well
4606 with the stronger C++ type identity model. */
4607 if (TREE_CODE (ttype) == RECORD_TYPE
4608 || TREE_CODE (ttype) == UNION_TYPE
4609 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4610 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4611 {
4612 warning (OPT_Wattributes,
4613 "ignoring attributes applied to %qT after definition",
4614 TYPE_MAIN_VARIANT (ttype));
4615 return build_qualified_type (ttype, quals);
4616 }
4617
4618 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4619 ntype = build_distinct_type_copy (ttype);
4620
4621 TYPE_ATTRIBUTES (ntype) = attribute;
4622
4623 hstate.add_int (code);
4624 if (TREE_TYPE (ntype))
4625 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4626 attribute_hash_list (attribute, hstate);
4627
4628 switch (TREE_CODE (ntype))
4629 {
4630 case FUNCTION_TYPE:
4631 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4632 break;
4633 case ARRAY_TYPE:
4634 if (TYPE_DOMAIN (ntype))
4635 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4636 break;
4637 case INTEGER_TYPE:
4638 t = TYPE_MAX_VALUE (ntype);
4639 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4640 hstate.add_object (TREE_INT_CST_ELT (t, i));
4641 break;
4642 case REAL_TYPE:
4643 case FIXED_POINT_TYPE:
4644 {
4645 unsigned int precision = TYPE_PRECISION (ntype);
4646 hstate.add_object (precision);
4647 }
4648 break;
4649 default:
4650 break;
4651 }
4652
4653 ntype = type_hash_canon (hstate.end(), ntype);
4654
4655 /* If the target-dependent attributes make NTYPE different from
4656 its canonical type, we will need to use structural equality
4657 checks for this type. */
4658 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4659 || !comp_type_attributes (ntype, ttype))
4660 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4661 else if (TYPE_CANONICAL (ntype) == ntype)
4662 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4663
4664 ttype = build_qualified_type (ntype, quals);
4665 }
4666 else if (TYPE_QUALS (ttype) != quals)
4667 ttype = build_qualified_type (ttype, quals);
4668
4669 return ttype;
4670 }
4671
4672 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4673 the same. */
4674
4675 static bool
4676 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4677 {
4678 tree cl1, cl2;
4679 for (cl1 = clauses1, cl2 = clauses2;
4680 cl1 && cl2;
4681 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4682 {
4683 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4684 return false;
4685 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4686 {
4687 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4688 OMP_CLAUSE_DECL (cl2)) != 1)
4689 return false;
4690 }
4691 switch (OMP_CLAUSE_CODE (cl1))
4692 {
4693 case OMP_CLAUSE_ALIGNED:
4694 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4695 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4696 return false;
4697 break;
4698 case OMP_CLAUSE_LINEAR:
4699 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4700 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4701 return false;
4702 break;
4703 case OMP_CLAUSE_SIMDLEN:
4704 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4705 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4706 return false;
4707 default:
4708 break;
4709 }
4710 }
4711 return true;
4712 }
4713
4714 /* Compare two constructor-element-type constants. Return 1 if the lists
4715 are known to be equal; otherwise return 0. */
4716
4717 static bool
4718 simple_cst_list_equal (const_tree l1, const_tree l2)
4719 {
4720 while (l1 != NULL_TREE && l2 != NULL_TREE)
4721 {
4722 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4723 return false;
4724
4725 l1 = TREE_CHAIN (l1);
4726 l2 = TREE_CHAIN (l2);
4727 }
4728
4729 return l1 == l2;
4730 }
4731
4732 /* Compare two attributes for their value identity. Return true if the
4733 attribute values are known to be equal; otherwise return false.
4734 */
4735
4736 static bool
4737 attribute_value_equal (const_tree attr1, const_tree attr2)
4738 {
4739 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4740 return true;
4741
4742 if (TREE_VALUE (attr1) != NULL_TREE
4743 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4744 && TREE_VALUE (attr2) != NULL
4745 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4746 return (simple_cst_list_equal (TREE_VALUE (attr1),
4747 TREE_VALUE (attr2)) == 1);
4748
4749 if ((flag_openmp || flag_openmp_simd)
4750 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4751 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4752 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4753 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4754 TREE_VALUE (attr2));
4755
4756 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4757 }
4758
4759 /* Return 0 if the attributes for two types are incompatible, 1 if they
4760 are compatible, and 2 if they are nearly compatible (which causes a
4761 warning to be generated). */
4762 int
4763 comp_type_attributes (const_tree type1, const_tree type2)
4764 {
4765 const_tree a1 = TYPE_ATTRIBUTES (type1);
4766 const_tree a2 = TYPE_ATTRIBUTES (type2);
4767 const_tree a;
4768
4769 if (a1 == a2)
4770 return 1;
4771 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4772 {
4773 const struct attribute_spec *as;
4774 const_tree attr;
4775
4776 as = lookup_attribute_spec (get_attribute_name (a));
4777 if (!as || as->affects_type_identity == false)
4778 continue;
4779
4780 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4781 if (!attr || !attribute_value_equal (a, attr))
4782 break;
4783 }
4784 if (!a)
4785 {
4786 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4787 {
4788 const struct attribute_spec *as;
4789
4790 as = lookup_attribute_spec (get_attribute_name (a));
4791 if (!as || as->affects_type_identity == false)
4792 continue;
4793
4794 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4795 break;
4796 /* We don't need to compare trees again, as we did this
4797 already in first loop. */
4798 }
4799 /* All types - affecting identity - are equal, so
4800 there is no need to call target hook for comparison. */
4801 if (!a)
4802 return 1;
4803 }
4804 /* As some type combinations - like default calling-convention - might
4805 be compatible, we have to call the target hook to get the final result. */
4806 return targetm.comp_type_attributes (type1, type2);
4807 }
4808
4809 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4810 is ATTRIBUTE.
4811
4812 Record such modified types already made so we don't make duplicates. */
4813
4814 tree
4815 build_type_attribute_variant (tree ttype, tree attribute)
4816 {
4817 return build_type_attribute_qual_variant (ttype, attribute,
4818 TYPE_QUALS (ttype));
4819 }
4820
4821
4822 /* Reset the expression *EXPR_P, a size or position.
4823
4824 ??? We could reset all non-constant sizes or positions. But it's cheap
4825 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4826
4827 We need to reset self-referential sizes or positions because they cannot
4828 be gimplified and thus can contain a CALL_EXPR after the gimplification
4829 is finished, which will run afoul of LTO streaming. And they need to be
4830 reset to something essentially dummy but not constant, so as to preserve
4831 the properties of the object they are attached to. */
4832
4833 static inline void
4834 free_lang_data_in_one_sizepos (tree *expr_p)
4835 {
4836 tree expr = *expr_p;
4837 if (CONTAINS_PLACEHOLDER_P (expr))
4838 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4839 }
4840
4841
4842 /* Reset all the fields in a binfo node BINFO. We only keep
4843 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4844
4845 static void
4846 free_lang_data_in_binfo (tree binfo)
4847 {
4848 unsigned i;
4849 tree t;
4850
4851 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4852
4853 BINFO_VIRTUALS (binfo) = NULL_TREE;
4854 BINFO_BASE_ACCESSES (binfo) = NULL;
4855 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4856 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4857
4858 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4859 free_lang_data_in_binfo (t);
4860 }
4861
4862
4863 /* Reset all language specific information still present in TYPE. */
4864
4865 static void
4866 free_lang_data_in_type (tree type)
4867 {
4868 gcc_assert (TYPE_P (type));
4869
4870 /* Give the FE a chance to remove its own data first. */
4871 lang_hooks.free_lang_data (type);
4872
4873 TREE_LANG_FLAG_0 (type) = 0;
4874 TREE_LANG_FLAG_1 (type) = 0;
4875 TREE_LANG_FLAG_2 (type) = 0;
4876 TREE_LANG_FLAG_3 (type) = 0;
4877 TREE_LANG_FLAG_4 (type) = 0;
4878 TREE_LANG_FLAG_5 (type) = 0;
4879 TREE_LANG_FLAG_6 (type) = 0;
4880
4881 if (TREE_CODE (type) == FUNCTION_TYPE)
4882 {
4883 /* Remove the const and volatile qualifiers from arguments. The
4884 C++ front end removes them, but the C front end does not,
4885 leading to false ODR violation errors when merging two
4886 instances of the same function signature compiled by
4887 different front ends. */
4888 tree p;
4889
4890 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4891 {
4892 tree arg_type = TREE_VALUE (p);
4893
4894 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4895 {
4896 int quals = TYPE_QUALS (arg_type)
4897 & ~TYPE_QUAL_CONST
4898 & ~TYPE_QUAL_VOLATILE;
4899 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4900 free_lang_data_in_type (TREE_VALUE (p));
4901 }
4902 }
4903 }
4904
4905 /* Remove members that are not actually FIELD_DECLs from the field
4906 list of an aggregate. These occur in C++. */
4907 if (RECORD_OR_UNION_TYPE_P (type))
4908 {
4909 tree prev, member;
4910
4911 /* Note that TYPE_FIELDS can be shared across distinct
4912 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4913 to be removed, we cannot set its TREE_CHAIN to NULL.
4914 Otherwise, we would not be able to find all the other fields
4915 in the other instances of this TREE_TYPE.
4916
4917 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4918 prev = NULL_TREE;
4919 member = TYPE_FIELDS (type);
4920 while (member)
4921 {
4922 if (TREE_CODE (member) == FIELD_DECL
4923 || TREE_CODE (member) == TYPE_DECL)
4924 {
4925 if (prev)
4926 TREE_CHAIN (prev) = member;
4927 else
4928 TYPE_FIELDS (type) = member;
4929 prev = member;
4930 }
4931
4932 member = TREE_CHAIN (member);
4933 }
4934
4935 if (prev)
4936 TREE_CHAIN (prev) = NULL_TREE;
4937 else
4938 TYPE_FIELDS (type) = NULL_TREE;
4939
4940 TYPE_METHODS (type) = NULL_TREE;
4941 if (TYPE_BINFO (type))
4942 free_lang_data_in_binfo (TYPE_BINFO (type));
4943 }
4944 else
4945 {
4946 /* For non-aggregate types, clear out the language slot (which
4947 overloads TYPE_BINFO). */
4948 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4949
4950 if (INTEGRAL_TYPE_P (type)
4951 || SCALAR_FLOAT_TYPE_P (type)
4952 || FIXED_POINT_TYPE_P (type))
4953 {
4954 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4955 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4956 }
4957 }
4958
4959 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4960 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4961
4962 if (TYPE_CONTEXT (type)
4963 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4964 {
4965 tree ctx = TYPE_CONTEXT (type);
4966 do
4967 {
4968 ctx = BLOCK_SUPERCONTEXT (ctx);
4969 }
4970 while (ctx && TREE_CODE (ctx) == BLOCK);
4971 TYPE_CONTEXT (type) = ctx;
4972 }
4973 }
4974
4975
4976 /* Return true if DECL may need an assembler name to be set. */
4977
4978 static inline bool
4979 need_assembler_name_p (tree decl)
4980 {
4981 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4982 if (TREE_CODE (decl) != FUNCTION_DECL
4983 && TREE_CODE (decl) != VAR_DECL)
4984 return false;
4985
4986 /* If DECL already has its assembler name set, it does not need a
4987 new one. */
4988 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4989 || DECL_ASSEMBLER_NAME_SET_P (decl))
4990 return false;
4991
4992 /* Abstract decls do not need an assembler name. */
4993 if (DECL_ABSTRACT (decl))
4994 return false;
4995
4996 /* For VAR_DECLs, only static, public and external symbols need an
4997 assembler name. */
4998 if (TREE_CODE (decl) == VAR_DECL
4999 && !TREE_STATIC (decl)
5000 && !TREE_PUBLIC (decl)
5001 && !DECL_EXTERNAL (decl))
5002 return false;
5003
5004 if (TREE_CODE (decl) == FUNCTION_DECL)
5005 {
5006 /* Do not set assembler name on builtins. Allow RTL expansion to
5007 decide whether to expand inline or via a regular call. */
5008 if (DECL_BUILT_IN (decl)
5009 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5010 return false;
5011
5012 /* Functions represented in the callgraph need an assembler name. */
5013 if (cgraph_node::get (decl) != NULL)
5014 return true;
5015
5016 /* Unused and not public functions don't need an assembler name. */
5017 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5018 return false;
5019 }
5020
5021 return true;
5022 }
5023
5024
5025 /* Reset all language specific information still present in symbol
5026 DECL. */
5027
5028 static void
5029 free_lang_data_in_decl (tree decl)
5030 {
5031 gcc_assert (DECL_P (decl));
5032
5033 /* Give the FE a chance to remove its own data first. */
5034 lang_hooks.free_lang_data (decl);
5035
5036 TREE_LANG_FLAG_0 (decl) = 0;
5037 TREE_LANG_FLAG_1 (decl) = 0;
5038 TREE_LANG_FLAG_2 (decl) = 0;
5039 TREE_LANG_FLAG_3 (decl) = 0;
5040 TREE_LANG_FLAG_4 (decl) = 0;
5041 TREE_LANG_FLAG_5 (decl) = 0;
5042 TREE_LANG_FLAG_6 (decl) = 0;
5043
5044 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5045 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5046 if (TREE_CODE (decl) == FIELD_DECL)
5047 {
5048 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5049 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5050 DECL_QUALIFIER (decl) = NULL_TREE;
5051 }
5052
5053 if (TREE_CODE (decl) == FUNCTION_DECL)
5054 {
5055 struct cgraph_node *node;
5056 if (!(node = cgraph_node::get (decl))
5057 || (!node->definition && !node->clones))
5058 {
5059 if (node)
5060 node->release_body ();
5061 else
5062 {
5063 release_function_body (decl);
5064 DECL_ARGUMENTS (decl) = NULL;
5065 DECL_RESULT (decl) = NULL;
5066 DECL_INITIAL (decl) = error_mark_node;
5067 }
5068 }
5069 if (gimple_has_body_p (decl))
5070 {
5071 tree t;
5072
5073 /* If DECL has a gimple body, then the context for its
5074 arguments must be DECL. Otherwise, it doesn't really
5075 matter, as we will not be emitting any code for DECL. In
5076 general, there may be other instances of DECL created by
5077 the front end and since PARM_DECLs are generally shared,
5078 their DECL_CONTEXT changes as the replicas of DECL are
5079 created. The only time where DECL_CONTEXT is important
5080 is for the FUNCTION_DECLs that have a gimple body (since
5081 the PARM_DECL will be used in the function's body). */
5082 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5083 DECL_CONTEXT (t) = decl;
5084 }
5085
5086 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5087 At this point, it is not needed anymore. */
5088 DECL_SAVED_TREE (decl) = NULL_TREE;
5089
5090 /* Clear the abstract origin if it refers to a method. Otherwise
5091 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5092 origin will not be output correctly. */
5093 if (DECL_ABSTRACT_ORIGIN (decl)
5094 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5095 && RECORD_OR_UNION_TYPE_P
5096 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5097 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5098
5099 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5100 DECL_VINDEX referring to itself into a vtable slot number as it
5101 should. Happens with functions that are copied and then forgotten
5102 about. Just clear it, it won't matter anymore. */
5103 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5104 DECL_VINDEX (decl) = NULL_TREE;
5105 }
5106 else if (TREE_CODE (decl) == VAR_DECL)
5107 {
5108 if ((DECL_EXTERNAL (decl)
5109 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5110 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5111 DECL_INITIAL (decl) = NULL_TREE;
5112 }
5113 else if (TREE_CODE (decl) == TYPE_DECL
5114 || TREE_CODE (decl) == FIELD_DECL)
5115 DECL_INITIAL (decl) = NULL_TREE;
5116 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5117 && DECL_INITIAL (decl)
5118 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5119 {
5120 /* Strip builtins from the translation-unit BLOCK. We still have targets
5121 without builtin_decl_explicit support and also builtins are shared
5122 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5123 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5124 while (*nextp)
5125 {
5126 tree var = *nextp;
5127 if (TREE_CODE (var) == FUNCTION_DECL
5128 && DECL_BUILT_IN (var))
5129 *nextp = TREE_CHAIN (var);
5130 else
5131 nextp = &TREE_CHAIN (var);
5132 }
5133 }
5134 }
5135
5136
5137 /* Data used when collecting DECLs and TYPEs for language data removal. */
5138
5139 struct free_lang_data_d
5140 {
5141 /* Worklist to avoid excessive recursion. */
5142 vec<tree> worklist;
5143
5144 /* Set of traversed objects. Used to avoid duplicate visits. */
5145 hash_set<tree> *pset;
5146
5147 /* Array of symbols to process with free_lang_data_in_decl. */
5148 vec<tree> decls;
5149
5150 /* Array of types to process with free_lang_data_in_type. */
5151 vec<tree> types;
5152 };
5153
5154
5155 /* Save all language fields needed to generate proper debug information
5156 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5157
5158 static void
5159 save_debug_info_for_decl (tree t)
5160 {
5161 /*struct saved_debug_info_d *sdi;*/
5162
5163 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5164
5165 /* FIXME. Partial implementation for saving debug info removed. */
5166 }
5167
5168
5169 /* Save all language fields needed to generate proper debug information
5170 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5171
5172 static void
5173 save_debug_info_for_type (tree t)
5174 {
5175 /*struct saved_debug_info_d *sdi;*/
5176
5177 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5178
5179 /* FIXME. Partial implementation for saving debug info removed. */
5180 }
5181
5182
5183 /* Add type or decl T to one of the list of tree nodes that need their
5184 language data removed. The lists are held inside FLD. */
5185
5186 static void
5187 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5188 {
5189 if (DECL_P (t))
5190 {
5191 fld->decls.safe_push (t);
5192 if (debug_info_level > DINFO_LEVEL_TERSE)
5193 save_debug_info_for_decl (t);
5194 }
5195 else if (TYPE_P (t))
5196 {
5197 fld->types.safe_push (t);
5198 if (debug_info_level > DINFO_LEVEL_TERSE)
5199 save_debug_info_for_type (t);
5200 }
5201 else
5202 gcc_unreachable ();
5203 }
5204
5205 /* Push tree node T into FLD->WORKLIST. */
5206
5207 static inline void
5208 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5209 {
5210 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5211 fld->worklist.safe_push ((t));
5212 }
5213
5214
5215 /* Operand callback helper for free_lang_data_in_node. *TP is the
5216 subtree operand being considered. */
5217
5218 static tree
5219 find_decls_types_r (tree *tp, int *ws, void *data)
5220 {
5221 tree t = *tp;
5222 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5223
5224 if (TREE_CODE (t) == TREE_LIST)
5225 return NULL_TREE;
5226
5227 /* Language specific nodes will be removed, so there is no need
5228 to gather anything under them. */
5229 if (is_lang_specific (t))
5230 {
5231 *ws = 0;
5232 return NULL_TREE;
5233 }
5234
5235 if (DECL_P (t))
5236 {
5237 /* Note that walk_tree does not traverse every possible field in
5238 decls, so we have to do our own traversals here. */
5239 add_tree_to_fld_list (t, fld);
5240
5241 fld_worklist_push (DECL_NAME (t), fld);
5242 fld_worklist_push (DECL_CONTEXT (t), fld);
5243 fld_worklist_push (DECL_SIZE (t), fld);
5244 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5245
5246 /* We are going to remove everything under DECL_INITIAL for
5247 TYPE_DECLs. No point walking them. */
5248 if (TREE_CODE (t) != TYPE_DECL)
5249 fld_worklist_push (DECL_INITIAL (t), fld);
5250
5251 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5252 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5253
5254 if (TREE_CODE (t) == FUNCTION_DECL)
5255 {
5256 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5257 fld_worklist_push (DECL_RESULT (t), fld);
5258 }
5259 else if (TREE_CODE (t) == TYPE_DECL)
5260 {
5261 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5262 }
5263 else if (TREE_CODE (t) == FIELD_DECL)
5264 {
5265 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5266 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5267 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5268 fld_worklist_push (DECL_FCONTEXT (t), fld);
5269 }
5270
5271 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5272 && DECL_HAS_VALUE_EXPR_P (t))
5273 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5274
5275 if (TREE_CODE (t) != FIELD_DECL
5276 && TREE_CODE (t) != TYPE_DECL)
5277 fld_worklist_push (TREE_CHAIN (t), fld);
5278 *ws = 0;
5279 }
5280 else if (TYPE_P (t))
5281 {
5282 /* Note that walk_tree does not traverse every possible field in
5283 types, so we have to do our own traversals here. */
5284 add_tree_to_fld_list (t, fld);
5285
5286 if (!RECORD_OR_UNION_TYPE_P (t))
5287 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5288 fld_worklist_push (TYPE_SIZE (t), fld);
5289 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5290 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5291 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5292 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5293 fld_worklist_push (TYPE_NAME (t), fld);
5294 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5295 them and thus do not and want not to reach unused pointer types
5296 this way. */
5297 if (!POINTER_TYPE_P (t))
5298 fld_worklist_push (TYPE_MINVAL (t), fld);
5299 if (!RECORD_OR_UNION_TYPE_P (t))
5300 fld_worklist_push (TYPE_MAXVAL (t), fld);
5301 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5302 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5303 do not and want not to reach unused variants this way. */
5304 if (TYPE_CONTEXT (t))
5305 {
5306 tree ctx = TYPE_CONTEXT (t);
5307 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5308 So push that instead. */
5309 while (ctx && TREE_CODE (ctx) == BLOCK)
5310 ctx = BLOCK_SUPERCONTEXT (ctx);
5311 fld_worklist_push (ctx, fld);
5312 }
5313 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5314 and want not to reach unused types this way. */
5315
5316 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5317 {
5318 unsigned i;
5319 tree tem;
5320 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5321 fld_worklist_push (TREE_TYPE (tem), fld);
5322 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5323 if (tem
5324 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5325 && TREE_CODE (tem) == TREE_LIST)
5326 do
5327 {
5328 fld_worklist_push (TREE_VALUE (tem), fld);
5329 tem = TREE_CHAIN (tem);
5330 }
5331 while (tem);
5332 }
5333 if (RECORD_OR_UNION_TYPE_P (t))
5334 {
5335 tree tem;
5336 /* Push all TYPE_FIELDS - there can be interleaving interesting
5337 and non-interesting things. */
5338 tem = TYPE_FIELDS (t);
5339 while (tem)
5340 {
5341 if (TREE_CODE (tem) == FIELD_DECL
5342 || TREE_CODE (tem) == TYPE_DECL)
5343 fld_worklist_push (tem, fld);
5344 tem = TREE_CHAIN (tem);
5345 }
5346 }
5347
5348 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5349 *ws = 0;
5350 }
5351 else if (TREE_CODE (t) == BLOCK)
5352 {
5353 tree tem;
5354 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5355 fld_worklist_push (tem, fld);
5356 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5357 fld_worklist_push (tem, fld);
5358 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5359 }
5360
5361 if (TREE_CODE (t) != IDENTIFIER_NODE
5362 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5363 fld_worklist_push (TREE_TYPE (t), fld);
5364
5365 return NULL_TREE;
5366 }
5367
5368
5369 /* Find decls and types in T. */
5370
5371 static void
5372 find_decls_types (tree t, struct free_lang_data_d *fld)
5373 {
5374 while (1)
5375 {
5376 if (!fld->pset->contains (t))
5377 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5378 if (fld->worklist.is_empty ())
5379 break;
5380 t = fld->worklist.pop ();
5381 }
5382 }
5383
5384 /* Translate all the types in LIST with the corresponding runtime
5385 types. */
5386
5387 static tree
5388 get_eh_types_for_runtime (tree list)
5389 {
5390 tree head, prev;
5391
5392 if (list == NULL_TREE)
5393 return NULL_TREE;
5394
5395 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5396 prev = head;
5397 list = TREE_CHAIN (list);
5398 while (list)
5399 {
5400 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5401 TREE_CHAIN (prev) = n;
5402 prev = TREE_CHAIN (prev);
5403 list = TREE_CHAIN (list);
5404 }
5405
5406 return head;
5407 }
5408
5409
5410 /* Find decls and types referenced in EH region R and store them in
5411 FLD->DECLS and FLD->TYPES. */
5412
5413 static void
5414 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5415 {
5416 switch (r->type)
5417 {
5418 case ERT_CLEANUP:
5419 break;
5420
5421 case ERT_TRY:
5422 {
5423 eh_catch c;
5424
5425 /* The types referenced in each catch must first be changed to the
5426 EH types used at runtime. This removes references to FE types
5427 in the region. */
5428 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5429 {
5430 c->type_list = get_eh_types_for_runtime (c->type_list);
5431 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5432 }
5433 }
5434 break;
5435
5436 case ERT_ALLOWED_EXCEPTIONS:
5437 r->u.allowed.type_list
5438 = get_eh_types_for_runtime (r->u.allowed.type_list);
5439 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5440 break;
5441
5442 case ERT_MUST_NOT_THROW:
5443 walk_tree (&r->u.must_not_throw.failure_decl,
5444 find_decls_types_r, fld, fld->pset);
5445 break;
5446 }
5447 }
5448
5449
5450 /* Find decls and types referenced in cgraph node N and store them in
5451 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5452 look for *every* kind of DECL and TYPE node reachable from N,
5453 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5454 NAMESPACE_DECLs, etc). */
5455
5456 static void
5457 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5458 {
5459 basic_block bb;
5460 struct function *fn;
5461 unsigned ix;
5462 tree t;
5463
5464 find_decls_types (n->decl, fld);
5465
5466 if (!gimple_has_body_p (n->decl))
5467 return;
5468
5469 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5470
5471 fn = DECL_STRUCT_FUNCTION (n->decl);
5472
5473 /* Traverse locals. */
5474 FOR_EACH_LOCAL_DECL (fn, ix, t)
5475 find_decls_types (t, fld);
5476
5477 /* Traverse EH regions in FN. */
5478 {
5479 eh_region r;
5480 FOR_ALL_EH_REGION_FN (r, fn)
5481 find_decls_types_in_eh_region (r, fld);
5482 }
5483
5484 /* Traverse every statement in FN. */
5485 FOR_EACH_BB_FN (bb, fn)
5486 {
5487 gimple_stmt_iterator si;
5488 unsigned i;
5489
5490 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5491 {
5492 gimple phi = gsi_stmt (si);
5493
5494 for (i = 0; i < gimple_phi_num_args (phi); i++)
5495 {
5496 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5497 find_decls_types (*arg_p, fld);
5498 }
5499 }
5500
5501 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5502 {
5503 gimple stmt = gsi_stmt (si);
5504
5505 if (is_gimple_call (stmt))
5506 find_decls_types (gimple_call_fntype (stmt), fld);
5507
5508 for (i = 0; i < gimple_num_ops (stmt); i++)
5509 {
5510 tree arg = gimple_op (stmt, i);
5511 find_decls_types (arg, fld);
5512 }
5513 }
5514 }
5515 }
5516
5517
5518 /* Find decls and types referenced in varpool node N and store them in
5519 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5520 look for *every* kind of DECL and TYPE node reachable from N,
5521 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5522 NAMESPACE_DECLs, etc). */
5523
5524 static void
5525 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5526 {
5527 find_decls_types (v->decl, fld);
5528 }
5529
5530 /* If T needs an assembler name, have one created for it. */
5531
5532 void
5533 assign_assembler_name_if_neeeded (tree t)
5534 {
5535 if (need_assembler_name_p (t))
5536 {
5537 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5538 diagnostics that use input_location to show locus
5539 information. The problem here is that, at this point,
5540 input_location is generally anchored to the end of the file
5541 (since the parser is long gone), so we don't have a good
5542 position to pin it to.
5543
5544 To alleviate this problem, this uses the location of T's
5545 declaration. Examples of this are
5546 testsuite/g++.dg/template/cond2.C and
5547 testsuite/g++.dg/template/pr35240.C. */
5548 location_t saved_location = input_location;
5549 input_location = DECL_SOURCE_LOCATION (t);
5550
5551 decl_assembler_name (t);
5552
5553 input_location = saved_location;
5554 }
5555 }
5556
5557
5558 /* Free language specific information for every operand and expression
5559 in every node of the call graph. This process operates in three stages:
5560
5561 1- Every callgraph node and varpool node is traversed looking for
5562 decls and types embedded in them. This is a more exhaustive
5563 search than that done by find_referenced_vars, because it will
5564 also collect individual fields, decls embedded in types, etc.
5565
5566 2- All the decls found are sent to free_lang_data_in_decl.
5567
5568 3- All the types found are sent to free_lang_data_in_type.
5569
5570 The ordering between decls and types is important because
5571 free_lang_data_in_decl sets assembler names, which includes
5572 mangling. So types cannot be freed up until assembler names have
5573 been set up. */
5574
5575 static void
5576 free_lang_data_in_cgraph (void)
5577 {
5578 struct cgraph_node *n;
5579 varpool_node *v;
5580 struct free_lang_data_d fld;
5581 tree t;
5582 unsigned i;
5583 alias_pair *p;
5584
5585 /* Initialize sets and arrays to store referenced decls and types. */
5586 fld.pset = new hash_set<tree>;
5587 fld.worklist.create (0);
5588 fld.decls.create (100);
5589 fld.types.create (100);
5590
5591 /* Find decls and types in the body of every function in the callgraph. */
5592 FOR_EACH_FUNCTION (n)
5593 find_decls_types_in_node (n, &fld);
5594
5595 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5596 find_decls_types (p->decl, &fld);
5597
5598 /* Find decls and types in every varpool symbol. */
5599 FOR_EACH_VARIABLE (v)
5600 find_decls_types_in_var (v, &fld);
5601
5602 /* Set the assembler name on every decl found. We need to do this
5603 now because free_lang_data_in_decl will invalidate data needed
5604 for mangling. This breaks mangling on interdependent decls. */
5605 FOR_EACH_VEC_ELT (fld.decls, i, t)
5606 assign_assembler_name_if_neeeded (t);
5607
5608 /* Traverse every decl found freeing its language data. */
5609 FOR_EACH_VEC_ELT (fld.decls, i, t)
5610 free_lang_data_in_decl (t);
5611
5612 /* Traverse every type found freeing its language data. */
5613 FOR_EACH_VEC_ELT (fld.types, i, t)
5614 free_lang_data_in_type (t);
5615
5616 delete fld.pset;
5617 fld.worklist.release ();
5618 fld.decls.release ();
5619 fld.types.release ();
5620 }
5621
5622
5623 /* Free resources that are used by FE but are not needed once they are done. */
5624
5625 static unsigned
5626 free_lang_data (void)
5627 {
5628 unsigned i;
5629
5630 /* If we are the LTO frontend we have freed lang-specific data already. */
5631 if (in_lto_p
5632 || !flag_generate_lto)
5633 return 0;
5634
5635 /* Allocate and assign alias sets to the standard integer types
5636 while the slots are still in the way the frontends generated them. */
5637 for (i = 0; i < itk_none; ++i)
5638 if (integer_types[i])
5639 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5640
5641 /* Traverse the IL resetting language specific information for
5642 operands, expressions, etc. */
5643 free_lang_data_in_cgraph ();
5644
5645 /* Create gimple variants for common types. */
5646 ptrdiff_type_node = integer_type_node;
5647 fileptr_type_node = ptr_type_node;
5648
5649 /* Reset some langhooks. Do not reset types_compatible_p, it may
5650 still be used indirectly via the get_alias_set langhook. */
5651 lang_hooks.dwarf_name = lhd_dwarf_name;
5652 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5653 /* We do not want the default decl_assembler_name implementation,
5654 rather if we have fixed everything we want a wrapper around it
5655 asserting that all non-local symbols already got their assembler
5656 name and only produce assembler names for local symbols. Or rather
5657 make sure we never call decl_assembler_name on local symbols and
5658 devise a separate, middle-end private scheme for it. */
5659
5660 /* Reset diagnostic machinery. */
5661 tree_diagnostics_defaults (global_dc);
5662
5663 return 0;
5664 }
5665
5666
5667 namespace {
5668
5669 const pass_data pass_data_ipa_free_lang_data =
5670 {
5671 SIMPLE_IPA_PASS, /* type */
5672 "*free_lang_data", /* name */
5673 OPTGROUP_NONE, /* optinfo_flags */
5674 TV_IPA_FREE_LANG_DATA, /* tv_id */
5675 0, /* properties_required */
5676 0, /* properties_provided */
5677 0, /* properties_destroyed */
5678 0, /* todo_flags_start */
5679 0, /* todo_flags_finish */
5680 };
5681
5682 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5683 {
5684 public:
5685 pass_ipa_free_lang_data (gcc::context *ctxt)
5686 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5687 {}
5688
5689 /* opt_pass methods: */
5690 virtual unsigned int execute (function *) { return free_lang_data (); }
5691
5692 }; // class pass_ipa_free_lang_data
5693
5694 } // anon namespace
5695
5696 simple_ipa_opt_pass *
5697 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5698 {
5699 return new pass_ipa_free_lang_data (ctxt);
5700 }
5701
5702 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5703 ATTR_NAME. Also used internally by remove_attribute(). */
5704 bool
5705 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5706 {
5707 size_t ident_len = IDENTIFIER_LENGTH (ident);
5708
5709 if (ident_len == attr_len)
5710 {
5711 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5712 return true;
5713 }
5714 else if (ident_len == attr_len + 4)
5715 {
5716 /* There is the possibility that ATTR is 'text' and IDENT is
5717 '__text__'. */
5718 const char *p = IDENTIFIER_POINTER (ident);
5719 if (p[0] == '_' && p[1] == '_'
5720 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5721 && strncmp (attr_name, p + 2, attr_len) == 0)
5722 return true;
5723 }
5724
5725 return false;
5726 }
5727
5728 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5729 of ATTR_NAME, and LIST is not NULL_TREE. */
5730 tree
5731 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5732 {
5733 while (list)
5734 {
5735 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5736
5737 if (ident_len == attr_len)
5738 {
5739 if (!strcmp (attr_name,
5740 IDENTIFIER_POINTER (get_attribute_name (list))))
5741 break;
5742 }
5743 /* TODO: If we made sure that attributes were stored in the
5744 canonical form without '__...__' (ie, as in 'text' as opposed
5745 to '__text__') then we could avoid the following case. */
5746 else if (ident_len == attr_len + 4)
5747 {
5748 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5749 if (p[0] == '_' && p[1] == '_'
5750 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5751 && strncmp (attr_name, p + 2, attr_len) == 0)
5752 break;
5753 }
5754 list = TREE_CHAIN (list);
5755 }
5756
5757 return list;
5758 }
5759
5760 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5761 return a pointer to the attribute's list first element if the attribute
5762 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5763 '__text__'). */
5764
5765 tree
5766 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5767 tree list)
5768 {
5769 while (list)
5770 {
5771 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5772
5773 if (attr_len > ident_len)
5774 {
5775 list = TREE_CHAIN (list);
5776 continue;
5777 }
5778
5779 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5780
5781 if (strncmp (attr_name, p, attr_len) == 0)
5782 break;
5783
5784 /* TODO: If we made sure that attributes were stored in the
5785 canonical form without '__...__' (ie, as in 'text' as opposed
5786 to '__text__') then we could avoid the following case. */
5787 if (p[0] == '_' && p[1] == '_' &&
5788 strncmp (attr_name, p + 2, attr_len) == 0)
5789 break;
5790
5791 list = TREE_CHAIN (list);
5792 }
5793
5794 return list;
5795 }
5796
5797
5798 /* A variant of lookup_attribute() that can be used with an identifier
5799 as the first argument, and where the identifier can be either
5800 'text' or '__text__'.
5801
5802 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5803 return a pointer to the attribute's list element if the attribute
5804 is part of the list, or NULL_TREE if not found. If the attribute
5805 appears more than once, this only returns the first occurrence; the
5806 TREE_CHAIN of the return value should be passed back in if further
5807 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5808 can be in the form 'text' or '__text__'. */
5809 static tree
5810 lookup_ident_attribute (tree attr_identifier, tree list)
5811 {
5812 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5813
5814 while (list)
5815 {
5816 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5817 == IDENTIFIER_NODE);
5818
5819 /* Identifiers can be compared directly for equality. */
5820 if (attr_identifier == get_attribute_name (list))
5821 break;
5822
5823 /* If they are not equal, they may still be one in the form
5824 'text' while the other one is in the form '__text__'. TODO:
5825 If we were storing attributes in normalized 'text' form, then
5826 this could all go away and we could take full advantage of
5827 the fact that we're comparing identifiers. :-) */
5828 {
5829 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5830 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5831
5832 if (ident_len == attr_len + 4)
5833 {
5834 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5835 const char *q = IDENTIFIER_POINTER (attr_identifier);
5836 if (p[0] == '_' && p[1] == '_'
5837 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5838 && strncmp (q, p + 2, attr_len) == 0)
5839 break;
5840 }
5841 else if (ident_len + 4 == attr_len)
5842 {
5843 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5844 const char *q = IDENTIFIER_POINTER (attr_identifier);
5845 if (q[0] == '_' && q[1] == '_'
5846 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5847 && strncmp (q + 2, p, ident_len) == 0)
5848 break;
5849 }
5850 }
5851 list = TREE_CHAIN (list);
5852 }
5853
5854 return list;
5855 }
5856
5857 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5858 modified list. */
5859
5860 tree
5861 remove_attribute (const char *attr_name, tree list)
5862 {
5863 tree *p;
5864 size_t attr_len = strlen (attr_name);
5865
5866 gcc_checking_assert (attr_name[0] != '_');
5867
5868 for (p = &list; *p; )
5869 {
5870 tree l = *p;
5871 /* TODO: If we were storing attributes in normalized form, here
5872 we could use a simple strcmp(). */
5873 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5874 *p = TREE_CHAIN (l);
5875 else
5876 p = &TREE_CHAIN (l);
5877 }
5878
5879 return list;
5880 }
5881
5882 /* Return an attribute list that is the union of a1 and a2. */
5883
5884 tree
5885 merge_attributes (tree a1, tree a2)
5886 {
5887 tree attributes;
5888
5889 /* Either one unset? Take the set one. */
5890
5891 if ((attributes = a1) == 0)
5892 attributes = a2;
5893
5894 /* One that completely contains the other? Take it. */
5895
5896 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5897 {
5898 if (attribute_list_contained (a2, a1))
5899 attributes = a2;
5900 else
5901 {
5902 /* Pick the longest list, and hang on the other list. */
5903
5904 if (list_length (a1) < list_length (a2))
5905 attributes = a2, a2 = a1;
5906
5907 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5908 {
5909 tree a;
5910 for (a = lookup_ident_attribute (get_attribute_name (a2),
5911 attributes);
5912 a != NULL_TREE && !attribute_value_equal (a, a2);
5913 a = lookup_ident_attribute (get_attribute_name (a2),
5914 TREE_CHAIN (a)))
5915 ;
5916 if (a == NULL_TREE)
5917 {
5918 a1 = copy_node (a2);
5919 TREE_CHAIN (a1) = attributes;
5920 attributes = a1;
5921 }
5922 }
5923 }
5924 }
5925 return attributes;
5926 }
5927
5928 /* Given types T1 and T2, merge their attributes and return
5929 the result. */
5930
5931 tree
5932 merge_type_attributes (tree t1, tree t2)
5933 {
5934 return merge_attributes (TYPE_ATTRIBUTES (t1),
5935 TYPE_ATTRIBUTES (t2));
5936 }
5937
5938 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5939 the result. */
5940
5941 tree
5942 merge_decl_attributes (tree olddecl, tree newdecl)
5943 {
5944 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5945 DECL_ATTRIBUTES (newdecl));
5946 }
5947
5948 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5949
5950 /* Specialization of merge_decl_attributes for various Windows targets.
5951
5952 This handles the following situation:
5953
5954 __declspec (dllimport) int foo;
5955 int foo;
5956
5957 The second instance of `foo' nullifies the dllimport. */
5958
5959 tree
5960 merge_dllimport_decl_attributes (tree old, tree new_tree)
5961 {
5962 tree a;
5963 int delete_dllimport_p = 1;
5964
5965 /* What we need to do here is remove from `old' dllimport if it doesn't
5966 appear in `new'. dllimport behaves like extern: if a declaration is
5967 marked dllimport and a definition appears later, then the object
5968 is not dllimport'd. We also remove a `new' dllimport if the old list
5969 contains dllexport: dllexport always overrides dllimport, regardless
5970 of the order of declaration. */
5971 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5972 delete_dllimport_p = 0;
5973 else if (DECL_DLLIMPORT_P (new_tree)
5974 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5975 {
5976 DECL_DLLIMPORT_P (new_tree) = 0;
5977 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5978 "dllimport ignored", new_tree);
5979 }
5980 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5981 {
5982 /* Warn about overriding a symbol that has already been used, e.g.:
5983 extern int __attribute__ ((dllimport)) foo;
5984 int* bar () {return &foo;}
5985 int foo;
5986 */
5987 if (TREE_USED (old))
5988 {
5989 warning (0, "%q+D redeclared without dllimport attribute "
5990 "after being referenced with dll linkage", new_tree);
5991 /* If we have used a variable's address with dllimport linkage,
5992 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5993 decl may already have had TREE_CONSTANT computed.
5994 We still remove the attribute so that assembler code refers
5995 to '&foo rather than '_imp__foo'. */
5996 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5997 DECL_DLLIMPORT_P (new_tree) = 1;
5998 }
5999
6000 /* Let an inline definition silently override the external reference,
6001 but otherwise warn about attribute inconsistency. */
6002 else if (TREE_CODE (new_tree) == VAR_DECL
6003 || !DECL_DECLARED_INLINE_P (new_tree))
6004 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6005 "previous dllimport ignored", new_tree);
6006 }
6007 else
6008 delete_dllimport_p = 0;
6009
6010 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6011
6012 if (delete_dllimport_p)
6013 a = remove_attribute ("dllimport", a);
6014
6015 return a;
6016 }
6017
6018 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6019 struct attribute_spec.handler. */
6020
6021 tree
6022 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6023 bool *no_add_attrs)
6024 {
6025 tree node = *pnode;
6026 bool is_dllimport;
6027
6028 /* These attributes may apply to structure and union types being created,
6029 but otherwise should pass to the declaration involved. */
6030 if (!DECL_P (node))
6031 {
6032 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6033 | (int) ATTR_FLAG_ARRAY_NEXT))
6034 {
6035 *no_add_attrs = true;
6036 return tree_cons (name, args, NULL_TREE);
6037 }
6038 if (TREE_CODE (node) == RECORD_TYPE
6039 || TREE_CODE (node) == UNION_TYPE)
6040 {
6041 node = TYPE_NAME (node);
6042 if (!node)
6043 return NULL_TREE;
6044 }
6045 else
6046 {
6047 warning (OPT_Wattributes, "%qE attribute ignored",
6048 name);
6049 *no_add_attrs = true;
6050 return NULL_TREE;
6051 }
6052 }
6053
6054 if (TREE_CODE (node) != FUNCTION_DECL
6055 && TREE_CODE (node) != VAR_DECL
6056 && TREE_CODE (node) != TYPE_DECL)
6057 {
6058 *no_add_attrs = true;
6059 warning (OPT_Wattributes, "%qE attribute ignored",
6060 name);
6061 return NULL_TREE;
6062 }
6063
6064 if (TREE_CODE (node) == TYPE_DECL
6065 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6066 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6067 {
6068 *no_add_attrs = true;
6069 warning (OPT_Wattributes, "%qE attribute ignored",
6070 name);
6071 return NULL_TREE;
6072 }
6073
6074 is_dllimport = is_attribute_p ("dllimport", name);
6075
6076 /* Report error on dllimport ambiguities seen now before they cause
6077 any damage. */
6078 if (is_dllimport)
6079 {
6080 /* Honor any target-specific overrides. */
6081 if (!targetm.valid_dllimport_attribute_p (node))
6082 *no_add_attrs = true;
6083
6084 else if (TREE_CODE (node) == FUNCTION_DECL
6085 && DECL_DECLARED_INLINE_P (node))
6086 {
6087 warning (OPT_Wattributes, "inline function %q+D declared as "
6088 " dllimport: attribute ignored", node);
6089 *no_add_attrs = true;
6090 }
6091 /* Like MS, treat definition of dllimported variables and
6092 non-inlined functions on declaration as syntax errors. */
6093 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6094 {
6095 error ("function %q+D definition is marked dllimport", node);
6096 *no_add_attrs = true;
6097 }
6098
6099 else if (TREE_CODE (node) == VAR_DECL)
6100 {
6101 if (DECL_INITIAL (node))
6102 {
6103 error ("variable %q+D definition is marked dllimport",
6104 node);
6105 *no_add_attrs = true;
6106 }
6107
6108 /* `extern' needn't be specified with dllimport.
6109 Specify `extern' now and hope for the best. Sigh. */
6110 DECL_EXTERNAL (node) = 1;
6111 /* Also, implicitly give dllimport'd variables declared within
6112 a function global scope, unless declared static. */
6113 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6114 TREE_PUBLIC (node) = 1;
6115 }
6116
6117 if (*no_add_attrs == false)
6118 DECL_DLLIMPORT_P (node) = 1;
6119 }
6120 else if (TREE_CODE (node) == FUNCTION_DECL
6121 && DECL_DECLARED_INLINE_P (node)
6122 && flag_keep_inline_dllexport)
6123 /* An exported function, even if inline, must be emitted. */
6124 DECL_EXTERNAL (node) = 0;
6125
6126 /* Report error if symbol is not accessible at global scope. */
6127 if (!TREE_PUBLIC (node)
6128 && (TREE_CODE (node) == VAR_DECL
6129 || TREE_CODE (node) == FUNCTION_DECL))
6130 {
6131 error ("external linkage required for symbol %q+D because of "
6132 "%qE attribute", node, name);
6133 *no_add_attrs = true;
6134 }
6135
6136 /* A dllexport'd entity must have default visibility so that other
6137 program units (shared libraries or the main executable) can see
6138 it. A dllimport'd entity must have default visibility so that
6139 the linker knows that undefined references within this program
6140 unit can be resolved by the dynamic linker. */
6141 if (!*no_add_attrs)
6142 {
6143 if (DECL_VISIBILITY_SPECIFIED (node)
6144 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6145 error ("%qE implies default visibility, but %qD has already "
6146 "been declared with a different visibility",
6147 name, node);
6148 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6149 DECL_VISIBILITY_SPECIFIED (node) = 1;
6150 }
6151
6152 return NULL_TREE;
6153 }
6154
6155 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6156 \f
6157 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6158 of the various TYPE_QUAL values. */
6159
6160 static void
6161 set_type_quals (tree type, int type_quals)
6162 {
6163 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6164 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6165 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6166 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6167 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6168 }
6169
6170 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6171
6172 bool
6173 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6174 {
6175 return (TYPE_QUALS (cand) == type_quals
6176 && TYPE_NAME (cand) == TYPE_NAME (base)
6177 /* Apparently this is needed for Objective-C. */
6178 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6179 /* Check alignment. */
6180 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6181 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6182 TYPE_ATTRIBUTES (base)));
6183 }
6184
6185 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6186
6187 static bool
6188 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6189 {
6190 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6191 && TYPE_NAME (cand) == TYPE_NAME (base)
6192 /* Apparently this is needed for Objective-C. */
6193 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6194 /* Check alignment. */
6195 && TYPE_ALIGN (cand) == align
6196 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6197 TYPE_ATTRIBUTES (base)));
6198 }
6199
6200 /* This function checks to see if TYPE matches the size one of the built-in
6201 atomic types, and returns that core atomic type. */
6202
6203 static tree
6204 find_atomic_core_type (tree type)
6205 {
6206 tree base_atomic_type;
6207
6208 /* Only handle complete types. */
6209 if (TYPE_SIZE (type) == NULL_TREE)
6210 return NULL_TREE;
6211
6212 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6213 switch (type_size)
6214 {
6215 case 8:
6216 base_atomic_type = atomicQI_type_node;
6217 break;
6218
6219 case 16:
6220 base_atomic_type = atomicHI_type_node;
6221 break;
6222
6223 case 32:
6224 base_atomic_type = atomicSI_type_node;
6225 break;
6226
6227 case 64:
6228 base_atomic_type = atomicDI_type_node;
6229 break;
6230
6231 case 128:
6232 base_atomic_type = atomicTI_type_node;
6233 break;
6234
6235 default:
6236 base_atomic_type = NULL_TREE;
6237 }
6238
6239 return base_atomic_type;
6240 }
6241
6242 /* Return a version of the TYPE, qualified as indicated by the
6243 TYPE_QUALS, if one exists. If no qualified version exists yet,
6244 return NULL_TREE. */
6245
6246 tree
6247 get_qualified_type (tree type, int type_quals)
6248 {
6249 tree t;
6250
6251 if (TYPE_QUALS (type) == type_quals)
6252 return type;
6253
6254 /* Search the chain of variants to see if there is already one there just
6255 like the one we need to have. If so, use that existing one. We must
6256 preserve the TYPE_NAME, since there is code that depends on this. */
6257 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6258 if (check_qualified_type (t, type, type_quals))
6259 return t;
6260
6261 return NULL_TREE;
6262 }
6263
6264 /* Like get_qualified_type, but creates the type if it does not
6265 exist. This function never returns NULL_TREE. */
6266
6267 tree
6268 build_qualified_type (tree type, int type_quals)
6269 {
6270 tree t;
6271
6272 /* See if we already have the appropriate qualified variant. */
6273 t = get_qualified_type (type, type_quals);
6274
6275 /* If not, build it. */
6276 if (!t)
6277 {
6278 t = build_variant_type_copy (type);
6279 set_type_quals (t, type_quals);
6280
6281 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6282 {
6283 /* See if this object can map to a basic atomic type. */
6284 tree atomic_type = find_atomic_core_type (type);
6285 if (atomic_type)
6286 {
6287 /* Ensure the alignment of this type is compatible with
6288 the required alignment of the atomic type. */
6289 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6290 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6291 }
6292 }
6293
6294 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6295 /* Propagate structural equality. */
6296 SET_TYPE_STRUCTURAL_EQUALITY (t);
6297 else if (TYPE_CANONICAL (type) != type)
6298 /* Build the underlying canonical type, since it is different
6299 from TYPE. */
6300 {
6301 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6302 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6303 }
6304 else
6305 /* T is its own canonical type. */
6306 TYPE_CANONICAL (t) = t;
6307
6308 }
6309
6310 return t;
6311 }
6312
6313 /* Create a variant of type T with alignment ALIGN. */
6314
6315 tree
6316 build_aligned_type (tree type, unsigned int align)
6317 {
6318 tree t;
6319
6320 if (TYPE_PACKED (type)
6321 || TYPE_ALIGN (type) == align)
6322 return type;
6323
6324 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6325 if (check_aligned_type (t, type, align))
6326 return t;
6327
6328 t = build_variant_type_copy (type);
6329 TYPE_ALIGN (t) = align;
6330
6331 return t;
6332 }
6333
6334 /* Create a new distinct copy of TYPE. The new type is made its own
6335 MAIN_VARIANT. If TYPE requires structural equality checks, the
6336 resulting type requires structural equality checks; otherwise, its
6337 TYPE_CANONICAL points to itself. */
6338
6339 tree
6340 build_distinct_type_copy (tree type)
6341 {
6342 tree t = copy_node (type);
6343
6344 TYPE_POINTER_TO (t) = 0;
6345 TYPE_REFERENCE_TO (t) = 0;
6346
6347 /* Set the canonical type either to a new equivalence class, or
6348 propagate the need for structural equality checks. */
6349 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6350 SET_TYPE_STRUCTURAL_EQUALITY (t);
6351 else
6352 TYPE_CANONICAL (t) = t;
6353
6354 /* Make it its own variant. */
6355 TYPE_MAIN_VARIANT (t) = t;
6356 TYPE_NEXT_VARIANT (t) = 0;
6357
6358 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6359 whose TREE_TYPE is not t. This can also happen in the Ada
6360 frontend when using subtypes. */
6361
6362 return t;
6363 }
6364
6365 /* Create a new variant of TYPE, equivalent but distinct. This is so
6366 the caller can modify it. TYPE_CANONICAL for the return type will
6367 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6368 are considered equal by the language itself (or that both types
6369 require structural equality checks). */
6370
6371 tree
6372 build_variant_type_copy (tree type)
6373 {
6374 tree t, m = TYPE_MAIN_VARIANT (type);
6375
6376 t = build_distinct_type_copy (type);
6377
6378 /* Since we're building a variant, assume that it is a non-semantic
6379 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6380 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6381
6382 /* Add the new type to the chain of variants of TYPE. */
6383 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6384 TYPE_NEXT_VARIANT (m) = t;
6385 TYPE_MAIN_VARIANT (t) = m;
6386
6387 return t;
6388 }
6389 \f
6390 /* Return true if the from tree in both tree maps are equal. */
6391
6392 int
6393 tree_map_base_eq (const void *va, const void *vb)
6394 {
6395 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6396 *const b = (const struct tree_map_base *) vb;
6397 return (a->from == b->from);
6398 }
6399
6400 /* Hash a from tree in a tree_base_map. */
6401
6402 unsigned int
6403 tree_map_base_hash (const void *item)
6404 {
6405 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6406 }
6407
6408 /* Return true if this tree map structure is marked for garbage collection
6409 purposes. We simply return true if the from tree is marked, so that this
6410 structure goes away when the from tree goes away. */
6411
6412 int
6413 tree_map_base_marked_p (const void *p)
6414 {
6415 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6416 }
6417
6418 /* Hash a from tree in a tree_map. */
6419
6420 unsigned int
6421 tree_map_hash (const void *item)
6422 {
6423 return (((const struct tree_map *) item)->hash);
6424 }
6425
6426 /* Hash a from tree in a tree_decl_map. */
6427
6428 unsigned int
6429 tree_decl_map_hash (const void *item)
6430 {
6431 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6432 }
6433
6434 /* Return the initialization priority for DECL. */
6435
6436 priority_type
6437 decl_init_priority_lookup (tree decl)
6438 {
6439 symtab_node *snode = symtab_node::get (decl);
6440
6441 if (!snode)
6442 return DEFAULT_INIT_PRIORITY;
6443 return
6444 snode->get_init_priority ();
6445 }
6446
6447 /* Return the finalization priority for DECL. */
6448
6449 priority_type
6450 decl_fini_priority_lookup (tree decl)
6451 {
6452 cgraph_node *node = cgraph_node::get (decl);
6453
6454 if (!node)
6455 return DEFAULT_INIT_PRIORITY;
6456 return
6457 node->get_fini_priority ();
6458 }
6459
6460 /* Set the initialization priority for DECL to PRIORITY. */
6461
6462 void
6463 decl_init_priority_insert (tree decl, priority_type priority)
6464 {
6465 struct symtab_node *snode;
6466
6467 if (priority == DEFAULT_INIT_PRIORITY)
6468 {
6469 snode = symtab_node::get (decl);
6470 if (!snode)
6471 return;
6472 }
6473 else if (TREE_CODE (decl) == VAR_DECL)
6474 snode = varpool_node::get_create (decl);
6475 else
6476 snode = cgraph_node::get_create (decl);
6477 snode->set_init_priority (priority);
6478 }
6479
6480 /* Set the finalization priority for DECL to PRIORITY. */
6481
6482 void
6483 decl_fini_priority_insert (tree decl, priority_type priority)
6484 {
6485 struct cgraph_node *node;
6486
6487 if (priority == DEFAULT_INIT_PRIORITY)
6488 {
6489 node = cgraph_node::get (decl);
6490 if (!node)
6491 return;
6492 }
6493 else
6494 node = cgraph_node::get_create (decl);
6495 node->set_fini_priority (priority);
6496 }
6497
6498 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6499
6500 static void
6501 print_debug_expr_statistics (void)
6502 {
6503 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6504 (long) htab_size (debug_expr_for_decl),
6505 (long) htab_elements (debug_expr_for_decl),
6506 htab_collisions (debug_expr_for_decl));
6507 }
6508
6509 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6510
6511 static void
6512 print_value_expr_statistics (void)
6513 {
6514 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6515 (long) htab_size (value_expr_for_decl),
6516 (long) htab_elements (value_expr_for_decl),
6517 htab_collisions (value_expr_for_decl));
6518 }
6519
6520 /* Lookup a debug expression for FROM, and return it if we find one. */
6521
6522 tree
6523 decl_debug_expr_lookup (tree from)
6524 {
6525 struct tree_decl_map *h, in;
6526 in.base.from = from;
6527
6528 h = (struct tree_decl_map *)
6529 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6530 if (h)
6531 return h->to;
6532 return NULL_TREE;
6533 }
6534
6535 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6536
6537 void
6538 decl_debug_expr_insert (tree from, tree to)
6539 {
6540 struct tree_decl_map *h;
6541 void **loc;
6542
6543 h = ggc_alloc<tree_decl_map> ();
6544 h->base.from = from;
6545 h->to = to;
6546 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6547 INSERT);
6548 *(struct tree_decl_map **) loc = h;
6549 }
6550
6551 /* Lookup a value expression for FROM, and return it if we find one. */
6552
6553 tree
6554 decl_value_expr_lookup (tree from)
6555 {
6556 struct tree_decl_map *h, in;
6557 in.base.from = from;
6558
6559 h = (struct tree_decl_map *)
6560 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6561 if (h)
6562 return h->to;
6563 return NULL_TREE;
6564 }
6565
6566 /* Insert a mapping FROM->TO in the value expression hashtable. */
6567
6568 void
6569 decl_value_expr_insert (tree from, tree to)
6570 {
6571 struct tree_decl_map *h;
6572 void **loc;
6573
6574 h = ggc_alloc<tree_decl_map> ();
6575 h->base.from = from;
6576 h->to = to;
6577 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6578 INSERT);
6579 *(struct tree_decl_map **) loc = h;
6580 }
6581
6582 /* Lookup a vector of debug arguments for FROM, and return it if we
6583 find one. */
6584
6585 vec<tree, va_gc> **
6586 decl_debug_args_lookup (tree from)
6587 {
6588 struct tree_vec_map *h, in;
6589
6590 if (!DECL_HAS_DEBUG_ARGS_P (from))
6591 return NULL;
6592 gcc_checking_assert (debug_args_for_decl != NULL);
6593 in.base.from = from;
6594 h = (struct tree_vec_map *)
6595 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6596 if (h)
6597 return &h->to;
6598 return NULL;
6599 }
6600
6601 /* Insert a mapping FROM->empty vector of debug arguments in the value
6602 expression hashtable. */
6603
6604 vec<tree, va_gc> **
6605 decl_debug_args_insert (tree from)
6606 {
6607 struct tree_vec_map *h;
6608 void **loc;
6609
6610 if (DECL_HAS_DEBUG_ARGS_P (from))
6611 return decl_debug_args_lookup (from);
6612 if (debug_args_for_decl == NULL)
6613 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6614 tree_vec_map_eq, 0);
6615 h = ggc_alloc<tree_vec_map> ();
6616 h->base.from = from;
6617 h->to = NULL;
6618 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6619 INSERT);
6620 *(struct tree_vec_map **) loc = h;
6621 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6622 return &h->to;
6623 }
6624
6625 /* Hashing of types so that we don't make duplicates.
6626 The entry point is `type_hash_canon'. */
6627
6628 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6629 with types in the TREE_VALUE slots), by adding the hash codes
6630 of the individual types. */
6631
6632 static void
6633 type_hash_list (const_tree list, inchash::hash &hstate)
6634 {
6635 const_tree tail;
6636
6637 for (tail = list; tail; tail = TREE_CHAIN (tail))
6638 if (TREE_VALUE (tail) != error_mark_node)
6639 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6640 }
6641
6642 /* These are the Hashtable callback functions. */
6643
6644 /* Returns true iff the types are equivalent. */
6645
6646 static int
6647 type_hash_eq (const void *va, const void *vb)
6648 {
6649 const struct type_hash *const a = (const struct type_hash *) va,
6650 *const b = (const struct type_hash *) vb;
6651
6652 /* First test the things that are the same for all types. */
6653 if (a->hash != b->hash
6654 || TREE_CODE (a->type) != TREE_CODE (b->type)
6655 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6656 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6657 TYPE_ATTRIBUTES (b->type))
6658 || (TREE_CODE (a->type) != COMPLEX_TYPE
6659 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6660 return 0;
6661
6662 /* Be careful about comparing arrays before and after the element type
6663 has been completed; don't compare TYPE_ALIGN unless both types are
6664 complete. */
6665 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6666 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6667 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6668 return 0;
6669
6670 switch (TREE_CODE (a->type))
6671 {
6672 case VOID_TYPE:
6673 case COMPLEX_TYPE:
6674 case POINTER_TYPE:
6675 case REFERENCE_TYPE:
6676 case NULLPTR_TYPE:
6677 return 1;
6678
6679 case VECTOR_TYPE:
6680 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6681
6682 case ENUMERAL_TYPE:
6683 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6684 && !(TYPE_VALUES (a->type)
6685 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6686 && TYPE_VALUES (b->type)
6687 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6688 && type_list_equal (TYPE_VALUES (a->type),
6689 TYPE_VALUES (b->type))))
6690 return 0;
6691
6692 /* ... fall through ... */
6693
6694 case INTEGER_TYPE:
6695 case REAL_TYPE:
6696 case BOOLEAN_TYPE:
6697 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6698 return false;
6699 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6700 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6701 TYPE_MAX_VALUE (b->type)))
6702 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6703 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6704 TYPE_MIN_VALUE (b->type))));
6705
6706 case FIXED_POINT_TYPE:
6707 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6708
6709 case OFFSET_TYPE:
6710 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6711
6712 case METHOD_TYPE:
6713 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6714 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6715 || (TYPE_ARG_TYPES (a->type)
6716 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6717 && TYPE_ARG_TYPES (b->type)
6718 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6719 && type_list_equal (TYPE_ARG_TYPES (a->type),
6720 TYPE_ARG_TYPES (b->type)))))
6721 break;
6722 return 0;
6723 case ARRAY_TYPE:
6724 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6725
6726 case RECORD_TYPE:
6727 case UNION_TYPE:
6728 case QUAL_UNION_TYPE:
6729 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6730 || (TYPE_FIELDS (a->type)
6731 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6732 && TYPE_FIELDS (b->type)
6733 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6734 && type_list_equal (TYPE_FIELDS (a->type),
6735 TYPE_FIELDS (b->type))));
6736
6737 case FUNCTION_TYPE:
6738 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6739 || (TYPE_ARG_TYPES (a->type)
6740 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6741 && TYPE_ARG_TYPES (b->type)
6742 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6743 && type_list_equal (TYPE_ARG_TYPES (a->type),
6744 TYPE_ARG_TYPES (b->type))))
6745 break;
6746 return 0;
6747
6748 default:
6749 return 0;
6750 }
6751
6752 if (lang_hooks.types.type_hash_eq != NULL)
6753 return lang_hooks.types.type_hash_eq (a->type, b->type);
6754
6755 return 1;
6756 }
6757
6758 /* Return the cached hash value. */
6759
6760 static hashval_t
6761 type_hash_hash (const void *item)
6762 {
6763 return ((const struct type_hash *) item)->hash;
6764 }
6765
6766 /* Look in the type hash table for a type isomorphic to TYPE.
6767 If one is found, return it. Otherwise return 0. */
6768
6769 static tree
6770 type_hash_lookup (hashval_t hashcode, tree type)
6771 {
6772 struct type_hash *h, in;
6773
6774 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6775 must call that routine before comparing TYPE_ALIGNs. */
6776 layout_type (type);
6777
6778 in.hash = hashcode;
6779 in.type = type;
6780
6781 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6782 hashcode);
6783 if (h)
6784 return h->type;
6785 return NULL_TREE;
6786 }
6787
6788 /* Add an entry to the type-hash-table
6789 for a type TYPE whose hash code is HASHCODE. */
6790
6791 static void
6792 type_hash_add (hashval_t hashcode, tree type)
6793 {
6794 struct type_hash *h;
6795 void **loc;
6796
6797 h = ggc_alloc<type_hash> ();
6798 h->hash = hashcode;
6799 h->type = type;
6800 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6801 *loc = (void *)h;
6802 }
6803
6804 /* Given TYPE, and HASHCODE its hash code, return the canonical
6805 object for an identical type if one already exists.
6806 Otherwise, return TYPE, and record it as the canonical object.
6807
6808 To use this function, first create a type of the sort you want.
6809 Then compute its hash code from the fields of the type that
6810 make it different from other similar types.
6811 Then call this function and use the value. */
6812
6813 tree
6814 type_hash_canon (unsigned int hashcode, tree type)
6815 {
6816 tree t1;
6817
6818 /* The hash table only contains main variants, so ensure that's what we're
6819 being passed. */
6820 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6821
6822 /* See if the type is in the hash table already. If so, return it.
6823 Otherwise, add the type. */
6824 t1 = type_hash_lookup (hashcode, type);
6825 if (t1 != 0)
6826 {
6827 if (GATHER_STATISTICS)
6828 {
6829 tree_code_counts[(int) TREE_CODE (type)]--;
6830 tree_node_counts[(int) t_kind]--;
6831 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6832 }
6833 return t1;
6834 }
6835 else
6836 {
6837 type_hash_add (hashcode, type);
6838 return type;
6839 }
6840 }
6841
6842 /* See if the data pointed to by the type hash table is marked. We consider
6843 it marked if the type is marked or if a debug type number or symbol
6844 table entry has been made for the type. */
6845
6846 static int
6847 type_hash_marked_p (const void *p)
6848 {
6849 const_tree const type = ((const struct type_hash *) p)->type;
6850
6851 return ggc_marked_p (type);
6852 }
6853
6854 static void
6855 print_type_hash_statistics (void)
6856 {
6857 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6858 (long) htab_size (type_hash_table),
6859 (long) htab_elements (type_hash_table),
6860 htab_collisions (type_hash_table));
6861 }
6862
6863 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6864 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6865 by adding the hash codes of the individual attributes. */
6866
6867 static void
6868 attribute_hash_list (const_tree list, inchash::hash &hstate)
6869 {
6870 const_tree tail;
6871
6872 for (tail = list; tail; tail = TREE_CHAIN (tail))
6873 /* ??? Do we want to add in TREE_VALUE too? */
6874 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6875 }
6876
6877 /* Given two lists of attributes, return true if list l2 is
6878 equivalent to l1. */
6879
6880 int
6881 attribute_list_equal (const_tree l1, const_tree l2)
6882 {
6883 if (l1 == l2)
6884 return 1;
6885
6886 return attribute_list_contained (l1, l2)
6887 && attribute_list_contained (l2, l1);
6888 }
6889
6890 /* Given two lists of attributes, return true if list L2 is
6891 completely contained within L1. */
6892 /* ??? This would be faster if attribute names were stored in a canonicalized
6893 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6894 must be used to show these elements are equivalent (which they are). */
6895 /* ??? It's not clear that attributes with arguments will always be handled
6896 correctly. */
6897
6898 int
6899 attribute_list_contained (const_tree l1, const_tree l2)
6900 {
6901 const_tree t1, t2;
6902
6903 /* First check the obvious, maybe the lists are identical. */
6904 if (l1 == l2)
6905 return 1;
6906
6907 /* Maybe the lists are similar. */
6908 for (t1 = l1, t2 = l2;
6909 t1 != 0 && t2 != 0
6910 && get_attribute_name (t1) == get_attribute_name (t2)
6911 && TREE_VALUE (t1) == TREE_VALUE (t2);
6912 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6913 ;
6914
6915 /* Maybe the lists are equal. */
6916 if (t1 == 0 && t2 == 0)
6917 return 1;
6918
6919 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6920 {
6921 const_tree attr;
6922 /* This CONST_CAST is okay because lookup_attribute does not
6923 modify its argument and the return value is assigned to a
6924 const_tree. */
6925 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6926 CONST_CAST_TREE (l1));
6927 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6928 attr = lookup_ident_attribute (get_attribute_name (t2),
6929 TREE_CHAIN (attr)))
6930 ;
6931
6932 if (attr == NULL_TREE)
6933 return 0;
6934 }
6935
6936 return 1;
6937 }
6938
6939 /* Given two lists of types
6940 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6941 return 1 if the lists contain the same types in the same order.
6942 Also, the TREE_PURPOSEs must match. */
6943
6944 int
6945 type_list_equal (const_tree l1, const_tree l2)
6946 {
6947 const_tree t1, t2;
6948
6949 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6950 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6951 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6952 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6953 && (TREE_TYPE (TREE_PURPOSE (t1))
6954 == TREE_TYPE (TREE_PURPOSE (t2))))))
6955 return 0;
6956
6957 return t1 == t2;
6958 }
6959
6960 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6961 given by TYPE. If the argument list accepts variable arguments,
6962 then this function counts only the ordinary arguments. */
6963
6964 int
6965 type_num_arguments (const_tree type)
6966 {
6967 int i = 0;
6968 tree t;
6969
6970 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6971 /* If the function does not take a variable number of arguments,
6972 the last element in the list will have type `void'. */
6973 if (VOID_TYPE_P (TREE_VALUE (t)))
6974 break;
6975 else
6976 ++i;
6977
6978 return i;
6979 }
6980
6981 /* Nonzero if integer constants T1 and T2
6982 represent the same constant value. */
6983
6984 int
6985 tree_int_cst_equal (const_tree t1, const_tree t2)
6986 {
6987 if (t1 == t2)
6988 return 1;
6989
6990 if (t1 == 0 || t2 == 0)
6991 return 0;
6992
6993 if (TREE_CODE (t1) == INTEGER_CST
6994 && TREE_CODE (t2) == INTEGER_CST
6995 && wi::to_widest (t1) == wi::to_widest (t2))
6996 return 1;
6997
6998 return 0;
6999 }
7000
7001 /* Return true if T is an INTEGER_CST whose numerical value (extended
7002 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7003
7004 bool
7005 tree_fits_shwi_p (const_tree t)
7006 {
7007 return (t != NULL_TREE
7008 && TREE_CODE (t) == INTEGER_CST
7009 && wi::fits_shwi_p (wi::to_widest (t)));
7010 }
7011
7012 /* Return true if T is an INTEGER_CST whose numerical value (extended
7013 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7014
7015 bool
7016 tree_fits_uhwi_p (const_tree t)
7017 {
7018 return (t != NULL_TREE
7019 && TREE_CODE (t) == INTEGER_CST
7020 && wi::fits_uhwi_p (wi::to_widest (t)));
7021 }
7022
7023 /* T is an INTEGER_CST whose numerical value (extended according to
7024 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7025 HOST_WIDE_INT. */
7026
7027 HOST_WIDE_INT
7028 tree_to_shwi (const_tree t)
7029 {
7030 gcc_assert (tree_fits_shwi_p (t));
7031 return TREE_INT_CST_LOW (t);
7032 }
7033
7034 /* T is an INTEGER_CST whose numerical value (extended according to
7035 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7036 HOST_WIDE_INT. */
7037
7038 unsigned HOST_WIDE_INT
7039 tree_to_uhwi (const_tree t)
7040 {
7041 gcc_assert (tree_fits_uhwi_p (t));
7042 return TREE_INT_CST_LOW (t);
7043 }
7044
7045 /* Return the most significant (sign) bit of T. */
7046
7047 int
7048 tree_int_cst_sign_bit (const_tree t)
7049 {
7050 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7051
7052 return wi::extract_uhwi (t, bitno, 1);
7053 }
7054
7055 /* Return an indication of the sign of the integer constant T.
7056 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7057 Note that -1 will never be returned if T's type is unsigned. */
7058
7059 int
7060 tree_int_cst_sgn (const_tree t)
7061 {
7062 if (wi::eq_p (t, 0))
7063 return 0;
7064 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7065 return 1;
7066 else if (wi::neg_p (t))
7067 return -1;
7068 else
7069 return 1;
7070 }
7071
7072 /* Return the minimum number of bits needed to represent VALUE in a
7073 signed or unsigned type, UNSIGNEDP says which. */
7074
7075 unsigned int
7076 tree_int_cst_min_precision (tree value, signop sgn)
7077 {
7078 /* If the value is negative, compute its negative minus 1. The latter
7079 adjustment is because the absolute value of the largest negative value
7080 is one larger than the largest positive value. This is equivalent to
7081 a bit-wise negation, so use that operation instead. */
7082
7083 if (tree_int_cst_sgn (value) < 0)
7084 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7085
7086 /* Return the number of bits needed, taking into account the fact
7087 that we need one more bit for a signed than unsigned type.
7088 If value is 0 or -1, the minimum precision is 1 no matter
7089 whether unsignedp is true or false. */
7090
7091 if (integer_zerop (value))
7092 return 1;
7093 else
7094 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7095 }
7096
7097 /* Return truthvalue of whether T1 is the same tree structure as T2.
7098 Return 1 if they are the same.
7099 Return 0 if they are understandably different.
7100 Return -1 if either contains tree structure not understood by
7101 this function. */
7102
7103 int
7104 simple_cst_equal (const_tree t1, const_tree t2)
7105 {
7106 enum tree_code code1, code2;
7107 int cmp;
7108 int i;
7109
7110 if (t1 == t2)
7111 return 1;
7112 if (t1 == 0 || t2 == 0)
7113 return 0;
7114
7115 code1 = TREE_CODE (t1);
7116 code2 = TREE_CODE (t2);
7117
7118 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7119 {
7120 if (CONVERT_EXPR_CODE_P (code2)
7121 || code2 == NON_LVALUE_EXPR)
7122 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7123 else
7124 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7125 }
7126
7127 else if (CONVERT_EXPR_CODE_P (code2)
7128 || code2 == NON_LVALUE_EXPR)
7129 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7130
7131 if (code1 != code2)
7132 return 0;
7133
7134 switch (code1)
7135 {
7136 case INTEGER_CST:
7137 return wi::to_widest (t1) == wi::to_widest (t2);
7138
7139 case REAL_CST:
7140 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7141
7142 case FIXED_CST:
7143 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7144
7145 case STRING_CST:
7146 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7147 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7148 TREE_STRING_LENGTH (t1)));
7149
7150 case CONSTRUCTOR:
7151 {
7152 unsigned HOST_WIDE_INT idx;
7153 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7154 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7155
7156 if (vec_safe_length (v1) != vec_safe_length (v2))
7157 return false;
7158
7159 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7160 /* ??? Should we handle also fields here? */
7161 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7162 return false;
7163 return true;
7164 }
7165
7166 case SAVE_EXPR:
7167 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7168
7169 case CALL_EXPR:
7170 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7171 if (cmp <= 0)
7172 return cmp;
7173 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7174 return 0;
7175 {
7176 const_tree arg1, arg2;
7177 const_call_expr_arg_iterator iter1, iter2;
7178 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7179 arg2 = first_const_call_expr_arg (t2, &iter2);
7180 arg1 && arg2;
7181 arg1 = next_const_call_expr_arg (&iter1),
7182 arg2 = next_const_call_expr_arg (&iter2))
7183 {
7184 cmp = simple_cst_equal (arg1, arg2);
7185 if (cmp <= 0)
7186 return cmp;
7187 }
7188 return arg1 == arg2;
7189 }
7190
7191 case TARGET_EXPR:
7192 /* Special case: if either target is an unallocated VAR_DECL,
7193 it means that it's going to be unified with whatever the
7194 TARGET_EXPR is really supposed to initialize, so treat it
7195 as being equivalent to anything. */
7196 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7197 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7198 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7199 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7200 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7201 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7202 cmp = 1;
7203 else
7204 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7205
7206 if (cmp <= 0)
7207 return cmp;
7208
7209 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7210
7211 case WITH_CLEANUP_EXPR:
7212 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7213 if (cmp <= 0)
7214 return cmp;
7215
7216 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7217
7218 case COMPONENT_REF:
7219 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7220 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7221
7222 return 0;
7223
7224 case VAR_DECL:
7225 case PARM_DECL:
7226 case CONST_DECL:
7227 case FUNCTION_DECL:
7228 return 0;
7229
7230 default:
7231 break;
7232 }
7233
7234 /* This general rule works for most tree codes. All exceptions should be
7235 handled above. If this is a language-specific tree code, we can't
7236 trust what might be in the operand, so say we don't know
7237 the situation. */
7238 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7239 return -1;
7240
7241 switch (TREE_CODE_CLASS (code1))
7242 {
7243 case tcc_unary:
7244 case tcc_binary:
7245 case tcc_comparison:
7246 case tcc_expression:
7247 case tcc_reference:
7248 case tcc_statement:
7249 cmp = 1;
7250 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7251 {
7252 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7253 if (cmp <= 0)
7254 return cmp;
7255 }
7256
7257 return cmp;
7258
7259 default:
7260 return -1;
7261 }
7262 }
7263
7264 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7265 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7266 than U, respectively. */
7267
7268 int
7269 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7270 {
7271 if (tree_int_cst_sgn (t) < 0)
7272 return -1;
7273 else if (!tree_fits_uhwi_p (t))
7274 return 1;
7275 else if (TREE_INT_CST_LOW (t) == u)
7276 return 0;
7277 else if (TREE_INT_CST_LOW (t) < u)
7278 return -1;
7279 else
7280 return 1;
7281 }
7282
7283 /* Return true if SIZE represents a constant size that is in bounds of
7284 what the middle-end and the backend accepts (covering not more than
7285 half of the address-space). */
7286
7287 bool
7288 valid_constant_size_p (const_tree size)
7289 {
7290 if (! tree_fits_uhwi_p (size)
7291 || TREE_OVERFLOW (size)
7292 || tree_int_cst_sign_bit (size) != 0)
7293 return false;
7294 return true;
7295 }
7296
7297 /* Return the precision of the type, or for a complex or vector type the
7298 precision of the type of its elements. */
7299
7300 unsigned int
7301 element_precision (const_tree type)
7302 {
7303 enum tree_code code = TREE_CODE (type);
7304 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7305 type = TREE_TYPE (type);
7306
7307 return TYPE_PRECISION (type);
7308 }
7309
7310 /* Return true if CODE represents an associative tree code. Otherwise
7311 return false. */
7312 bool
7313 associative_tree_code (enum tree_code code)
7314 {
7315 switch (code)
7316 {
7317 case BIT_IOR_EXPR:
7318 case BIT_AND_EXPR:
7319 case BIT_XOR_EXPR:
7320 case PLUS_EXPR:
7321 case MULT_EXPR:
7322 case MIN_EXPR:
7323 case MAX_EXPR:
7324 return true;
7325
7326 default:
7327 break;
7328 }
7329 return false;
7330 }
7331
7332 /* Return true if CODE represents a commutative tree code. Otherwise
7333 return false. */
7334 bool
7335 commutative_tree_code (enum tree_code code)
7336 {
7337 switch (code)
7338 {
7339 case PLUS_EXPR:
7340 case MULT_EXPR:
7341 case MULT_HIGHPART_EXPR:
7342 case MIN_EXPR:
7343 case MAX_EXPR:
7344 case BIT_IOR_EXPR:
7345 case BIT_XOR_EXPR:
7346 case BIT_AND_EXPR:
7347 case NE_EXPR:
7348 case EQ_EXPR:
7349 case UNORDERED_EXPR:
7350 case ORDERED_EXPR:
7351 case UNEQ_EXPR:
7352 case LTGT_EXPR:
7353 case TRUTH_AND_EXPR:
7354 case TRUTH_XOR_EXPR:
7355 case TRUTH_OR_EXPR:
7356 case WIDEN_MULT_EXPR:
7357 case VEC_WIDEN_MULT_HI_EXPR:
7358 case VEC_WIDEN_MULT_LO_EXPR:
7359 case VEC_WIDEN_MULT_EVEN_EXPR:
7360 case VEC_WIDEN_MULT_ODD_EXPR:
7361 return true;
7362
7363 default:
7364 break;
7365 }
7366 return false;
7367 }
7368
7369 /* Return true if CODE represents a ternary tree code for which the
7370 first two operands are commutative. Otherwise return false. */
7371 bool
7372 commutative_ternary_tree_code (enum tree_code code)
7373 {
7374 switch (code)
7375 {
7376 case WIDEN_MULT_PLUS_EXPR:
7377 case WIDEN_MULT_MINUS_EXPR:
7378 return true;
7379
7380 default:
7381 break;
7382 }
7383 return false;
7384 }
7385
7386 namespace inchash
7387 {
7388
7389 /* Generate a hash value for an expression. This can be used iteratively
7390 by passing a previous result as the HSTATE argument.
7391
7392 This function is intended to produce the same hash for expressions which
7393 would compare equal using operand_equal_p. */
7394 void
7395 add_expr (const_tree t, inchash::hash &hstate)
7396 {
7397 int i;
7398 enum tree_code code;
7399 enum tree_code_class tclass;
7400
7401 if (t == NULL_TREE)
7402 {
7403 hstate.merge_hash (0);
7404 return;
7405 }
7406
7407 code = TREE_CODE (t);
7408
7409 switch (code)
7410 {
7411 /* Alas, constants aren't shared, so we can't rely on pointer
7412 identity. */
7413 case VOID_CST:
7414 hstate.merge_hash (0);
7415 return;
7416 case INTEGER_CST:
7417 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7418 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7419 return;
7420 case REAL_CST:
7421 {
7422 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7423 hstate.merge_hash (val2);
7424 return;
7425 }
7426 case FIXED_CST:
7427 {
7428 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7429 hstate.merge_hash (val2);
7430 return;
7431 }
7432 case STRING_CST:
7433 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7434 return;
7435 case COMPLEX_CST:
7436 inchash::add_expr (TREE_REALPART (t), hstate);
7437 inchash::add_expr (TREE_IMAGPART (t), hstate);
7438 return;
7439 case VECTOR_CST:
7440 {
7441 unsigned i;
7442 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7443 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7444 return;
7445 }
7446 case SSA_NAME:
7447 /* We can just compare by pointer. */
7448 hstate.add_wide_int (SSA_NAME_VERSION (t));
7449 return;
7450 case PLACEHOLDER_EXPR:
7451 /* The node itself doesn't matter. */
7452 return;
7453 case TREE_LIST:
7454 /* A list of expressions, for a CALL_EXPR or as the elements of a
7455 VECTOR_CST. */
7456 for (; t; t = TREE_CHAIN (t))
7457 inchash::add_expr (TREE_VALUE (t), hstate);
7458 return;
7459 case CONSTRUCTOR:
7460 {
7461 unsigned HOST_WIDE_INT idx;
7462 tree field, value;
7463 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7464 {
7465 inchash::add_expr (field, hstate);
7466 inchash::add_expr (value, hstate);
7467 }
7468 return;
7469 }
7470 case FUNCTION_DECL:
7471 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7472 Otherwise nodes that compare equal according to operand_equal_p might
7473 get different hash codes. However, don't do this for machine specific
7474 or front end builtins, since the function code is overloaded in those
7475 cases. */
7476 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7477 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7478 {
7479 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7480 code = TREE_CODE (t);
7481 }
7482 /* FALL THROUGH */
7483 default:
7484 tclass = TREE_CODE_CLASS (code);
7485
7486 if (tclass == tcc_declaration)
7487 {
7488 /* DECL's have a unique ID */
7489 hstate.add_wide_int (DECL_UID (t));
7490 }
7491 else
7492 {
7493 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7494
7495 hstate.add_object (code);
7496
7497 /* Don't hash the type, that can lead to having nodes which
7498 compare equal according to operand_equal_p, but which
7499 have different hash codes. */
7500 if (CONVERT_EXPR_CODE_P (code)
7501 || code == NON_LVALUE_EXPR)
7502 {
7503 /* Make sure to include signness in the hash computation. */
7504 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7505 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7506 }
7507
7508 else if (commutative_tree_code (code))
7509 {
7510 /* It's a commutative expression. We want to hash it the same
7511 however it appears. We do this by first hashing both operands
7512 and then rehashing based on the order of their independent
7513 hashes. */
7514 inchash::hash one, two;
7515 inchash::add_expr (TREE_OPERAND (t, 0), one);
7516 inchash::add_expr (TREE_OPERAND (t, 1), two);
7517 hstate.add_commutative (one, two);
7518 }
7519 else
7520 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7521 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7522 }
7523 return;
7524 }
7525 }
7526
7527 }
7528
7529 /* Constructors for pointer, array and function types.
7530 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7531 constructed by language-dependent code, not here.) */
7532
7533 /* Construct, lay out and return the type of pointers to TO_TYPE with
7534 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7535 reference all of memory. If such a type has already been
7536 constructed, reuse it. */
7537
7538 tree
7539 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7540 bool can_alias_all)
7541 {
7542 tree t;
7543
7544 if (to_type == error_mark_node)
7545 return error_mark_node;
7546
7547 /* If the pointed-to type has the may_alias attribute set, force
7548 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7549 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7550 can_alias_all = true;
7551
7552 /* In some cases, languages will have things that aren't a POINTER_TYPE
7553 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7554 In that case, return that type without regard to the rest of our
7555 operands.
7556
7557 ??? This is a kludge, but consistent with the way this function has
7558 always operated and there doesn't seem to be a good way to avoid this
7559 at the moment. */
7560 if (TYPE_POINTER_TO (to_type) != 0
7561 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7562 return TYPE_POINTER_TO (to_type);
7563
7564 /* First, if we already have a type for pointers to TO_TYPE and it's
7565 the proper mode, use it. */
7566 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7567 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7568 return t;
7569
7570 t = make_node (POINTER_TYPE);
7571
7572 TREE_TYPE (t) = to_type;
7573 SET_TYPE_MODE (t, mode);
7574 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7575 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7576 TYPE_POINTER_TO (to_type) = t;
7577
7578 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7579 SET_TYPE_STRUCTURAL_EQUALITY (t);
7580 else if (TYPE_CANONICAL (to_type) != to_type)
7581 TYPE_CANONICAL (t)
7582 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7583 mode, can_alias_all);
7584
7585 /* Lay out the type. This function has many callers that are concerned
7586 with expression-construction, and this simplifies them all. */
7587 layout_type (t);
7588
7589 return t;
7590 }
7591
7592 /* By default build pointers in ptr_mode. */
7593
7594 tree
7595 build_pointer_type (tree to_type)
7596 {
7597 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7598 : TYPE_ADDR_SPACE (to_type);
7599 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7600 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7601 }
7602
7603 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7604
7605 tree
7606 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7607 bool can_alias_all)
7608 {
7609 tree t;
7610
7611 if (to_type == error_mark_node)
7612 return error_mark_node;
7613
7614 /* If the pointed-to type has the may_alias attribute set, force
7615 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7616 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7617 can_alias_all = true;
7618
7619 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7620 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7621 In that case, return that type without regard to the rest of our
7622 operands.
7623
7624 ??? This is a kludge, but consistent with the way this function has
7625 always operated and there doesn't seem to be a good way to avoid this
7626 at the moment. */
7627 if (TYPE_REFERENCE_TO (to_type) != 0
7628 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7629 return TYPE_REFERENCE_TO (to_type);
7630
7631 /* First, if we already have a type for pointers to TO_TYPE and it's
7632 the proper mode, use it. */
7633 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7634 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7635 return t;
7636
7637 t = make_node (REFERENCE_TYPE);
7638
7639 TREE_TYPE (t) = to_type;
7640 SET_TYPE_MODE (t, mode);
7641 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7642 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7643 TYPE_REFERENCE_TO (to_type) = t;
7644
7645 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7646 SET_TYPE_STRUCTURAL_EQUALITY (t);
7647 else if (TYPE_CANONICAL (to_type) != to_type)
7648 TYPE_CANONICAL (t)
7649 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7650 mode, can_alias_all);
7651
7652 layout_type (t);
7653
7654 return t;
7655 }
7656
7657
7658 /* Build the node for the type of references-to-TO_TYPE by default
7659 in ptr_mode. */
7660
7661 tree
7662 build_reference_type (tree to_type)
7663 {
7664 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7665 : TYPE_ADDR_SPACE (to_type);
7666 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7667 return build_reference_type_for_mode (to_type, pointer_mode, false);
7668 }
7669
7670 #define MAX_INT_CACHED_PREC \
7671 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7672 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7673
7674 /* Builds a signed or unsigned integer type of precision PRECISION.
7675 Used for C bitfields whose precision does not match that of
7676 built-in target types. */
7677 tree
7678 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7679 int unsignedp)
7680 {
7681 tree itype, ret;
7682
7683 if (unsignedp)
7684 unsignedp = MAX_INT_CACHED_PREC + 1;
7685
7686 if (precision <= MAX_INT_CACHED_PREC)
7687 {
7688 itype = nonstandard_integer_type_cache[precision + unsignedp];
7689 if (itype)
7690 return itype;
7691 }
7692
7693 itype = make_node (INTEGER_TYPE);
7694 TYPE_PRECISION (itype) = precision;
7695
7696 if (unsignedp)
7697 fixup_unsigned_type (itype);
7698 else
7699 fixup_signed_type (itype);
7700
7701 ret = itype;
7702 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7703 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7704 if (precision <= MAX_INT_CACHED_PREC)
7705 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7706
7707 return ret;
7708 }
7709
7710 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7711 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7712 is true, reuse such a type that has already been constructed. */
7713
7714 static tree
7715 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7716 {
7717 tree itype = make_node (INTEGER_TYPE);
7718 inchash::hash hstate;
7719
7720 TREE_TYPE (itype) = type;
7721
7722 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7723 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7724
7725 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7726 SET_TYPE_MODE (itype, TYPE_MODE (type));
7727 TYPE_SIZE (itype) = TYPE_SIZE (type);
7728 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7729 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7730 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7731
7732 if (!shared)
7733 return itype;
7734
7735 if ((TYPE_MIN_VALUE (itype)
7736 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7737 || (TYPE_MAX_VALUE (itype)
7738 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7739 {
7740 /* Since we cannot reliably merge this type, we need to compare it using
7741 structural equality checks. */
7742 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7743 return itype;
7744 }
7745
7746 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7747 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7748 hstate.merge_hash (TYPE_HASH (type));
7749 itype = type_hash_canon (hstate.end (), itype);
7750
7751 return itype;
7752 }
7753
7754 /* Wrapper around build_range_type_1 with SHARED set to true. */
7755
7756 tree
7757 build_range_type (tree type, tree lowval, tree highval)
7758 {
7759 return build_range_type_1 (type, lowval, highval, true);
7760 }
7761
7762 /* Wrapper around build_range_type_1 with SHARED set to false. */
7763
7764 tree
7765 build_nonshared_range_type (tree type, tree lowval, tree highval)
7766 {
7767 return build_range_type_1 (type, lowval, highval, false);
7768 }
7769
7770 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7771 MAXVAL should be the maximum value in the domain
7772 (one less than the length of the array).
7773
7774 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7775 We don't enforce this limit, that is up to caller (e.g. language front end).
7776 The limit exists because the result is a signed type and we don't handle
7777 sizes that use more than one HOST_WIDE_INT. */
7778
7779 tree
7780 build_index_type (tree maxval)
7781 {
7782 return build_range_type (sizetype, size_zero_node, maxval);
7783 }
7784
7785 /* Return true if the debug information for TYPE, a subtype, should be emitted
7786 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7787 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7788 debug info and doesn't reflect the source code. */
7789
7790 bool
7791 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7792 {
7793 tree base_type = TREE_TYPE (type), low, high;
7794
7795 /* Subrange types have a base type which is an integral type. */
7796 if (!INTEGRAL_TYPE_P (base_type))
7797 return false;
7798
7799 /* Get the real bounds of the subtype. */
7800 if (lang_hooks.types.get_subrange_bounds)
7801 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7802 else
7803 {
7804 low = TYPE_MIN_VALUE (type);
7805 high = TYPE_MAX_VALUE (type);
7806 }
7807
7808 /* If the type and its base type have the same representation and the same
7809 name, then the type is not a subrange but a copy of the base type. */
7810 if ((TREE_CODE (base_type) == INTEGER_TYPE
7811 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7812 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7813 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7814 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7815 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7816 return false;
7817
7818 if (lowval)
7819 *lowval = low;
7820 if (highval)
7821 *highval = high;
7822 return true;
7823 }
7824
7825 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7826 and number of elements specified by the range of values of INDEX_TYPE.
7827 If SHARED is true, reuse such a type that has already been constructed. */
7828
7829 static tree
7830 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7831 {
7832 tree t;
7833
7834 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7835 {
7836 error ("arrays of functions are not meaningful");
7837 elt_type = integer_type_node;
7838 }
7839
7840 t = make_node (ARRAY_TYPE);
7841 TREE_TYPE (t) = elt_type;
7842 TYPE_DOMAIN (t) = index_type;
7843 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7844 layout_type (t);
7845
7846 /* If the element type is incomplete at this point we get marked for
7847 structural equality. Do not record these types in the canonical
7848 type hashtable. */
7849 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7850 return t;
7851
7852 if (shared)
7853 {
7854 inchash::hash hstate;
7855 hstate.add_object (TYPE_HASH (elt_type));
7856 if (index_type)
7857 hstate.add_object (TYPE_HASH (index_type));
7858 t = type_hash_canon (hstate.end (), t);
7859 }
7860
7861 if (TYPE_CANONICAL (t) == t)
7862 {
7863 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7864 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7865 SET_TYPE_STRUCTURAL_EQUALITY (t);
7866 else if (TYPE_CANONICAL (elt_type) != elt_type
7867 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7868 TYPE_CANONICAL (t)
7869 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7870 index_type
7871 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7872 shared);
7873 }
7874
7875 return t;
7876 }
7877
7878 /* Wrapper around build_array_type_1 with SHARED set to true. */
7879
7880 tree
7881 build_array_type (tree elt_type, tree index_type)
7882 {
7883 return build_array_type_1 (elt_type, index_type, true);
7884 }
7885
7886 /* Wrapper around build_array_type_1 with SHARED set to false. */
7887
7888 tree
7889 build_nonshared_array_type (tree elt_type, tree index_type)
7890 {
7891 return build_array_type_1 (elt_type, index_type, false);
7892 }
7893
7894 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7895 sizetype. */
7896
7897 tree
7898 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7899 {
7900 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7901 }
7902
7903 /* Recursively examines the array elements of TYPE, until a non-array
7904 element type is found. */
7905
7906 tree
7907 strip_array_types (tree type)
7908 {
7909 while (TREE_CODE (type) == ARRAY_TYPE)
7910 type = TREE_TYPE (type);
7911
7912 return type;
7913 }
7914
7915 /* Computes the canonical argument types from the argument type list
7916 ARGTYPES.
7917
7918 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7919 on entry to this function, or if any of the ARGTYPES are
7920 structural.
7921
7922 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7923 true on entry to this function, or if any of the ARGTYPES are
7924 non-canonical.
7925
7926 Returns a canonical argument list, which may be ARGTYPES when the
7927 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7928 true) or would not differ from ARGTYPES. */
7929
7930 static tree
7931 maybe_canonicalize_argtypes (tree argtypes,
7932 bool *any_structural_p,
7933 bool *any_noncanonical_p)
7934 {
7935 tree arg;
7936 bool any_noncanonical_argtypes_p = false;
7937
7938 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7939 {
7940 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7941 /* Fail gracefully by stating that the type is structural. */
7942 *any_structural_p = true;
7943 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7944 *any_structural_p = true;
7945 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7946 || TREE_PURPOSE (arg))
7947 /* If the argument has a default argument, we consider it
7948 non-canonical even though the type itself is canonical.
7949 That way, different variants of function and method types
7950 with default arguments will all point to the variant with
7951 no defaults as their canonical type. */
7952 any_noncanonical_argtypes_p = true;
7953 }
7954
7955 if (*any_structural_p)
7956 return argtypes;
7957
7958 if (any_noncanonical_argtypes_p)
7959 {
7960 /* Build the canonical list of argument types. */
7961 tree canon_argtypes = NULL_TREE;
7962 bool is_void = false;
7963
7964 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7965 {
7966 if (arg == void_list_node)
7967 is_void = true;
7968 else
7969 canon_argtypes = tree_cons (NULL_TREE,
7970 TYPE_CANONICAL (TREE_VALUE (arg)),
7971 canon_argtypes);
7972 }
7973
7974 canon_argtypes = nreverse (canon_argtypes);
7975 if (is_void)
7976 canon_argtypes = chainon (canon_argtypes, void_list_node);
7977
7978 /* There is a non-canonical type. */
7979 *any_noncanonical_p = true;
7980 return canon_argtypes;
7981 }
7982
7983 /* The canonical argument types are the same as ARGTYPES. */
7984 return argtypes;
7985 }
7986
7987 /* Construct, lay out and return
7988 the type of functions returning type VALUE_TYPE
7989 given arguments of types ARG_TYPES.
7990 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7991 are data type nodes for the arguments of the function.
7992 If such a type has already been constructed, reuse it. */
7993
7994 tree
7995 build_function_type (tree value_type, tree arg_types)
7996 {
7997 tree t;
7998 inchash::hash hstate;
7999 bool any_structural_p, any_noncanonical_p;
8000 tree canon_argtypes;
8001
8002 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8003 {
8004 error ("function return type cannot be function");
8005 value_type = integer_type_node;
8006 }
8007
8008 /* Make a node of the sort we want. */
8009 t = make_node (FUNCTION_TYPE);
8010 TREE_TYPE (t) = value_type;
8011 TYPE_ARG_TYPES (t) = arg_types;
8012
8013 /* If we already have such a type, use the old one. */
8014 hstate.add_object (TYPE_HASH (value_type));
8015 type_hash_list (arg_types, hstate);
8016 t = type_hash_canon (hstate.end (), t);
8017
8018 /* Set up the canonical type. */
8019 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8020 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8021 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8022 &any_structural_p,
8023 &any_noncanonical_p);
8024 if (any_structural_p)
8025 SET_TYPE_STRUCTURAL_EQUALITY (t);
8026 else if (any_noncanonical_p)
8027 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8028 canon_argtypes);
8029
8030 if (!COMPLETE_TYPE_P (t))
8031 layout_type (t);
8032 return t;
8033 }
8034
8035 /* Build a function type. The RETURN_TYPE is the type returned by the
8036 function. If VAARGS is set, no void_type_node is appended to the
8037 the list. ARGP must be always be terminated be a NULL_TREE. */
8038
8039 static tree
8040 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8041 {
8042 tree t, args, last;
8043
8044 t = va_arg (argp, tree);
8045 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8046 args = tree_cons (NULL_TREE, t, args);
8047
8048 if (vaargs)
8049 {
8050 last = args;
8051 if (args != NULL_TREE)
8052 args = nreverse (args);
8053 gcc_assert (last != void_list_node);
8054 }
8055 else if (args == NULL_TREE)
8056 args = void_list_node;
8057 else
8058 {
8059 last = args;
8060 args = nreverse (args);
8061 TREE_CHAIN (last) = void_list_node;
8062 }
8063 args = build_function_type (return_type, args);
8064
8065 return args;
8066 }
8067
8068 /* Build a function type. The RETURN_TYPE is the type returned by the
8069 function. If additional arguments are provided, they are
8070 additional argument types. The list of argument types must always
8071 be terminated by NULL_TREE. */
8072
8073 tree
8074 build_function_type_list (tree return_type, ...)
8075 {
8076 tree args;
8077 va_list p;
8078
8079 va_start (p, return_type);
8080 args = build_function_type_list_1 (false, return_type, p);
8081 va_end (p);
8082 return args;
8083 }
8084
8085 /* Build a variable argument function type. The RETURN_TYPE is the
8086 type returned by the function. If additional arguments are provided,
8087 they are additional argument types. The list of argument types must
8088 always be terminated by NULL_TREE. */
8089
8090 tree
8091 build_varargs_function_type_list (tree return_type, ...)
8092 {
8093 tree args;
8094 va_list p;
8095
8096 va_start (p, return_type);
8097 args = build_function_type_list_1 (true, return_type, p);
8098 va_end (p);
8099
8100 return args;
8101 }
8102
8103 /* Build a function type. RETURN_TYPE is the type returned by the
8104 function; VAARGS indicates whether the function takes varargs. The
8105 function takes N named arguments, the types of which are provided in
8106 ARG_TYPES. */
8107
8108 static tree
8109 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8110 tree *arg_types)
8111 {
8112 int i;
8113 tree t = vaargs ? NULL_TREE : void_list_node;
8114
8115 for (i = n - 1; i >= 0; i--)
8116 t = tree_cons (NULL_TREE, arg_types[i], t);
8117
8118 return build_function_type (return_type, t);
8119 }
8120
8121 /* Build a function type. RETURN_TYPE is the type returned by the
8122 function. The function takes N named arguments, the types of which
8123 are provided in ARG_TYPES. */
8124
8125 tree
8126 build_function_type_array (tree return_type, int n, tree *arg_types)
8127 {
8128 return build_function_type_array_1 (false, return_type, n, arg_types);
8129 }
8130
8131 /* Build a variable argument function type. RETURN_TYPE is the type
8132 returned by the function. The function takes N named arguments, the
8133 types of which are provided in ARG_TYPES. */
8134
8135 tree
8136 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8137 {
8138 return build_function_type_array_1 (true, return_type, n, arg_types);
8139 }
8140
8141 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8142 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8143 for the method. An implicit additional parameter (of type
8144 pointer-to-BASETYPE) is added to the ARGTYPES. */
8145
8146 tree
8147 build_method_type_directly (tree basetype,
8148 tree rettype,
8149 tree argtypes)
8150 {
8151 tree t;
8152 tree ptype;
8153 inchash::hash hstate;
8154 bool any_structural_p, any_noncanonical_p;
8155 tree canon_argtypes;
8156
8157 /* Make a node of the sort we want. */
8158 t = make_node (METHOD_TYPE);
8159
8160 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8161 TREE_TYPE (t) = rettype;
8162 ptype = build_pointer_type (basetype);
8163
8164 /* The actual arglist for this function includes a "hidden" argument
8165 which is "this". Put it into the list of argument types. */
8166 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8167 TYPE_ARG_TYPES (t) = argtypes;
8168
8169 /* If we already have such a type, use the old one. */
8170 hstate.add_object (TYPE_HASH (basetype));
8171 hstate.add_object (TYPE_HASH (rettype));
8172 type_hash_list (argtypes, hstate);
8173 t = type_hash_canon (hstate.end (), t);
8174
8175 /* Set up the canonical type. */
8176 any_structural_p
8177 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8178 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8179 any_noncanonical_p
8180 = (TYPE_CANONICAL (basetype) != basetype
8181 || TYPE_CANONICAL (rettype) != rettype);
8182 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8183 &any_structural_p,
8184 &any_noncanonical_p);
8185 if (any_structural_p)
8186 SET_TYPE_STRUCTURAL_EQUALITY (t);
8187 else if (any_noncanonical_p)
8188 TYPE_CANONICAL (t)
8189 = build_method_type_directly (TYPE_CANONICAL (basetype),
8190 TYPE_CANONICAL (rettype),
8191 canon_argtypes);
8192 if (!COMPLETE_TYPE_P (t))
8193 layout_type (t);
8194
8195 return t;
8196 }
8197
8198 /* Construct, lay out and return the type of methods belonging to class
8199 BASETYPE and whose arguments and values are described by TYPE.
8200 If that type exists already, reuse it.
8201 TYPE must be a FUNCTION_TYPE node. */
8202
8203 tree
8204 build_method_type (tree basetype, tree type)
8205 {
8206 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8207
8208 return build_method_type_directly (basetype,
8209 TREE_TYPE (type),
8210 TYPE_ARG_TYPES (type));
8211 }
8212
8213 /* Construct, lay out and return the type of offsets to a value
8214 of type TYPE, within an object of type BASETYPE.
8215 If a suitable offset type exists already, reuse it. */
8216
8217 tree
8218 build_offset_type (tree basetype, tree type)
8219 {
8220 tree t;
8221 inchash::hash hstate;
8222
8223 /* Make a node of the sort we want. */
8224 t = make_node (OFFSET_TYPE);
8225
8226 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8227 TREE_TYPE (t) = type;
8228
8229 /* If we already have such a type, use the old one. */
8230 hstate.add_object (TYPE_HASH (basetype));
8231 hstate.add_object (TYPE_HASH (type));
8232 t = type_hash_canon (hstate.end (), t);
8233
8234 if (!COMPLETE_TYPE_P (t))
8235 layout_type (t);
8236
8237 if (TYPE_CANONICAL (t) == t)
8238 {
8239 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8240 || TYPE_STRUCTURAL_EQUALITY_P (type))
8241 SET_TYPE_STRUCTURAL_EQUALITY (t);
8242 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8243 || TYPE_CANONICAL (type) != type)
8244 TYPE_CANONICAL (t)
8245 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8246 TYPE_CANONICAL (type));
8247 }
8248
8249 return t;
8250 }
8251
8252 /* Create a complex type whose components are COMPONENT_TYPE. */
8253
8254 tree
8255 build_complex_type (tree component_type)
8256 {
8257 tree t;
8258 inchash::hash hstate;
8259
8260 gcc_assert (INTEGRAL_TYPE_P (component_type)
8261 || SCALAR_FLOAT_TYPE_P (component_type)
8262 || FIXED_POINT_TYPE_P (component_type));
8263
8264 /* Make a node of the sort we want. */
8265 t = make_node (COMPLEX_TYPE);
8266
8267 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8268
8269 /* If we already have such a type, use the old one. */
8270 hstate.add_object (TYPE_HASH (component_type));
8271 t = type_hash_canon (hstate.end (), t);
8272
8273 if (!COMPLETE_TYPE_P (t))
8274 layout_type (t);
8275
8276 if (TYPE_CANONICAL (t) == t)
8277 {
8278 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8279 SET_TYPE_STRUCTURAL_EQUALITY (t);
8280 else if (TYPE_CANONICAL (component_type) != component_type)
8281 TYPE_CANONICAL (t)
8282 = build_complex_type (TYPE_CANONICAL (component_type));
8283 }
8284
8285 /* We need to create a name, since complex is a fundamental type. */
8286 if (! TYPE_NAME (t))
8287 {
8288 const char *name;
8289 if (component_type == char_type_node)
8290 name = "complex char";
8291 else if (component_type == signed_char_type_node)
8292 name = "complex signed char";
8293 else if (component_type == unsigned_char_type_node)
8294 name = "complex unsigned char";
8295 else if (component_type == short_integer_type_node)
8296 name = "complex short int";
8297 else if (component_type == short_unsigned_type_node)
8298 name = "complex short unsigned int";
8299 else if (component_type == integer_type_node)
8300 name = "complex int";
8301 else if (component_type == unsigned_type_node)
8302 name = "complex unsigned int";
8303 else if (component_type == long_integer_type_node)
8304 name = "complex long int";
8305 else if (component_type == long_unsigned_type_node)
8306 name = "complex long unsigned int";
8307 else if (component_type == long_long_integer_type_node)
8308 name = "complex long long int";
8309 else if (component_type == long_long_unsigned_type_node)
8310 name = "complex long long unsigned int";
8311 else
8312 name = 0;
8313
8314 if (name != 0)
8315 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8316 get_identifier (name), t);
8317 }
8318
8319 return build_qualified_type (t, TYPE_QUALS (component_type));
8320 }
8321
8322 /* If TYPE is a real or complex floating-point type and the target
8323 does not directly support arithmetic on TYPE then return the wider
8324 type to be used for arithmetic on TYPE. Otherwise, return
8325 NULL_TREE. */
8326
8327 tree
8328 excess_precision_type (tree type)
8329 {
8330 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8331 {
8332 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8333 switch (TREE_CODE (type))
8334 {
8335 case REAL_TYPE:
8336 switch (flt_eval_method)
8337 {
8338 case 1:
8339 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8340 return double_type_node;
8341 break;
8342 case 2:
8343 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8344 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8345 return long_double_type_node;
8346 break;
8347 default:
8348 gcc_unreachable ();
8349 }
8350 break;
8351 case COMPLEX_TYPE:
8352 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8353 return NULL_TREE;
8354 switch (flt_eval_method)
8355 {
8356 case 1:
8357 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8358 return complex_double_type_node;
8359 break;
8360 case 2:
8361 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8362 || (TYPE_MODE (TREE_TYPE (type))
8363 == TYPE_MODE (double_type_node)))
8364 return complex_long_double_type_node;
8365 break;
8366 default:
8367 gcc_unreachable ();
8368 }
8369 break;
8370 default:
8371 break;
8372 }
8373 }
8374 return NULL_TREE;
8375 }
8376 \f
8377 /* Return OP, stripped of any conversions to wider types as much as is safe.
8378 Converting the value back to OP's type makes a value equivalent to OP.
8379
8380 If FOR_TYPE is nonzero, we return a value which, if converted to
8381 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8382
8383 OP must have integer, real or enumeral type. Pointers are not allowed!
8384
8385 There are some cases where the obvious value we could return
8386 would regenerate to OP if converted to OP's type,
8387 but would not extend like OP to wider types.
8388 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8389 For example, if OP is (unsigned short)(signed char)-1,
8390 we avoid returning (signed char)-1 if FOR_TYPE is int,
8391 even though extending that to an unsigned short would regenerate OP,
8392 since the result of extending (signed char)-1 to (int)
8393 is different from (int) OP. */
8394
8395 tree
8396 get_unwidened (tree op, tree for_type)
8397 {
8398 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8399 tree type = TREE_TYPE (op);
8400 unsigned final_prec
8401 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8402 int uns
8403 = (for_type != 0 && for_type != type
8404 && final_prec > TYPE_PRECISION (type)
8405 && TYPE_UNSIGNED (type));
8406 tree win = op;
8407
8408 while (CONVERT_EXPR_P (op))
8409 {
8410 int bitschange;
8411
8412 /* TYPE_PRECISION on vector types has different meaning
8413 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8414 so avoid them here. */
8415 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8416 break;
8417
8418 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8419 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8420
8421 /* Truncations are many-one so cannot be removed.
8422 Unless we are later going to truncate down even farther. */
8423 if (bitschange < 0
8424 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8425 break;
8426
8427 /* See what's inside this conversion. If we decide to strip it,
8428 we will set WIN. */
8429 op = TREE_OPERAND (op, 0);
8430
8431 /* If we have not stripped any zero-extensions (uns is 0),
8432 we can strip any kind of extension.
8433 If we have previously stripped a zero-extension,
8434 only zero-extensions can safely be stripped.
8435 Any extension can be stripped if the bits it would produce
8436 are all going to be discarded later by truncating to FOR_TYPE. */
8437
8438 if (bitschange > 0)
8439 {
8440 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8441 win = op;
8442 /* TYPE_UNSIGNED says whether this is a zero-extension.
8443 Let's avoid computing it if it does not affect WIN
8444 and if UNS will not be needed again. */
8445 if ((uns
8446 || CONVERT_EXPR_P (op))
8447 && TYPE_UNSIGNED (TREE_TYPE (op)))
8448 {
8449 uns = 1;
8450 win = op;
8451 }
8452 }
8453 }
8454
8455 /* If we finally reach a constant see if it fits in for_type and
8456 in that case convert it. */
8457 if (for_type
8458 && TREE_CODE (win) == INTEGER_CST
8459 && TREE_TYPE (win) != for_type
8460 && int_fits_type_p (win, for_type))
8461 win = fold_convert (for_type, win);
8462
8463 return win;
8464 }
8465 \f
8466 /* Return OP or a simpler expression for a narrower value
8467 which can be sign-extended or zero-extended to give back OP.
8468 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8469 or 0 if the value should be sign-extended. */
8470
8471 tree
8472 get_narrower (tree op, int *unsignedp_ptr)
8473 {
8474 int uns = 0;
8475 int first = 1;
8476 tree win = op;
8477 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8478
8479 while (TREE_CODE (op) == NOP_EXPR)
8480 {
8481 int bitschange
8482 = (TYPE_PRECISION (TREE_TYPE (op))
8483 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8484
8485 /* Truncations are many-one so cannot be removed. */
8486 if (bitschange < 0)
8487 break;
8488
8489 /* See what's inside this conversion. If we decide to strip it,
8490 we will set WIN. */
8491
8492 if (bitschange > 0)
8493 {
8494 op = TREE_OPERAND (op, 0);
8495 /* An extension: the outermost one can be stripped,
8496 but remember whether it is zero or sign extension. */
8497 if (first)
8498 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8499 /* Otherwise, if a sign extension has been stripped,
8500 only sign extensions can now be stripped;
8501 if a zero extension has been stripped, only zero-extensions. */
8502 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8503 break;
8504 first = 0;
8505 }
8506 else /* bitschange == 0 */
8507 {
8508 /* A change in nominal type can always be stripped, but we must
8509 preserve the unsignedness. */
8510 if (first)
8511 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8512 first = 0;
8513 op = TREE_OPERAND (op, 0);
8514 /* Keep trying to narrow, but don't assign op to win if it
8515 would turn an integral type into something else. */
8516 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8517 continue;
8518 }
8519
8520 win = op;
8521 }
8522
8523 if (TREE_CODE (op) == COMPONENT_REF
8524 /* Since type_for_size always gives an integer type. */
8525 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8526 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8527 /* Ensure field is laid out already. */
8528 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8529 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8530 {
8531 unsigned HOST_WIDE_INT innerprec
8532 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8533 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8534 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8535 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8536
8537 /* We can get this structure field in a narrower type that fits it,
8538 but the resulting extension to its nominal type (a fullword type)
8539 must satisfy the same conditions as for other extensions.
8540
8541 Do this only for fields that are aligned (not bit-fields),
8542 because when bit-field insns will be used there is no
8543 advantage in doing this. */
8544
8545 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8546 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8547 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8548 && type != 0)
8549 {
8550 if (first)
8551 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8552 win = fold_convert (type, op);
8553 }
8554 }
8555
8556 *unsignedp_ptr = uns;
8557 return win;
8558 }
8559 \f
8560 /* Returns true if integer constant C has a value that is permissible
8561 for type TYPE (an INTEGER_TYPE). */
8562
8563 bool
8564 int_fits_type_p (const_tree c, const_tree type)
8565 {
8566 tree type_low_bound, type_high_bound;
8567 bool ok_for_low_bound, ok_for_high_bound;
8568 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8569
8570 retry:
8571 type_low_bound = TYPE_MIN_VALUE (type);
8572 type_high_bound = TYPE_MAX_VALUE (type);
8573
8574 /* If at least one bound of the type is a constant integer, we can check
8575 ourselves and maybe make a decision. If no such decision is possible, but
8576 this type is a subtype, try checking against that. Otherwise, use
8577 fits_to_tree_p, which checks against the precision.
8578
8579 Compute the status for each possibly constant bound, and return if we see
8580 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8581 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8582 for "constant known to fit". */
8583
8584 /* Check if c >= type_low_bound. */
8585 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8586 {
8587 if (tree_int_cst_lt (c, type_low_bound))
8588 return false;
8589 ok_for_low_bound = true;
8590 }
8591 else
8592 ok_for_low_bound = false;
8593
8594 /* Check if c <= type_high_bound. */
8595 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8596 {
8597 if (tree_int_cst_lt (type_high_bound, c))
8598 return false;
8599 ok_for_high_bound = true;
8600 }
8601 else
8602 ok_for_high_bound = false;
8603
8604 /* If the constant fits both bounds, the result is known. */
8605 if (ok_for_low_bound && ok_for_high_bound)
8606 return true;
8607
8608 /* Perform some generic filtering which may allow making a decision
8609 even if the bounds are not constant. First, negative integers
8610 never fit in unsigned types, */
8611 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8612 return false;
8613
8614 /* Second, narrower types always fit in wider ones. */
8615 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8616 return true;
8617
8618 /* Third, unsigned integers with top bit set never fit signed types. */
8619 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8620 {
8621 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8622 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8623 {
8624 /* When a tree_cst is converted to a wide-int, the precision
8625 is taken from the type. However, if the precision of the
8626 mode underneath the type is smaller than that, it is
8627 possible that the value will not fit. The test below
8628 fails if any bit is set between the sign bit of the
8629 underlying mode and the top bit of the type. */
8630 if (wi::ne_p (wi::zext (c, prec - 1), c))
8631 return false;
8632 }
8633 else if (wi::neg_p (c))
8634 return false;
8635 }
8636
8637 /* If we haven't been able to decide at this point, there nothing more we
8638 can check ourselves here. Look at the base type if we have one and it
8639 has the same precision. */
8640 if (TREE_CODE (type) == INTEGER_TYPE
8641 && TREE_TYPE (type) != 0
8642 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8643 {
8644 type = TREE_TYPE (type);
8645 goto retry;
8646 }
8647
8648 /* Or to fits_to_tree_p, if nothing else. */
8649 return wi::fits_to_tree_p (c, type);
8650 }
8651
8652 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8653 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8654 represented (assuming two's-complement arithmetic) within the bit
8655 precision of the type are returned instead. */
8656
8657 void
8658 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8659 {
8660 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8661 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8662 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8663 else
8664 {
8665 if (TYPE_UNSIGNED (type))
8666 mpz_set_ui (min, 0);
8667 else
8668 {
8669 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8670 wi::to_mpz (mn, min, SIGNED);
8671 }
8672 }
8673
8674 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8675 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8676 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8677 else
8678 {
8679 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8680 wi::to_mpz (mn, max, TYPE_SIGN (type));
8681 }
8682 }
8683
8684 /* Return true if VAR is an automatic variable defined in function FN. */
8685
8686 bool
8687 auto_var_in_fn_p (const_tree var, const_tree fn)
8688 {
8689 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8690 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8691 || TREE_CODE (var) == PARM_DECL)
8692 && ! TREE_STATIC (var))
8693 || TREE_CODE (var) == LABEL_DECL
8694 || TREE_CODE (var) == RESULT_DECL));
8695 }
8696
8697 /* Subprogram of following function. Called by walk_tree.
8698
8699 Return *TP if it is an automatic variable or parameter of the
8700 function passed in as DATA. */
8701
8702 static tree
8703 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8704 {
8705 tree fn = (tree) data;
8706
8707 if (TYPE_P (*tp))
8708 *walk_subtrees = 0;
8709
8710 else if (DECL_P (*tp)
8711 && auto_var_in_fn_p (*tp, fn))
8712 return *tp;
8713
8714 return NULL_TREE;
8715 }
8716
8717 /* Returns true if T is, contains, or refers to a type with variable
8718 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8719 arguments, but not the return type. If FN is nonzero, only return
8720 true if a modifier of the type or position of FN is a variable or
8721 parameter inside FN.
8722
8723 This concept is more general than that of C99 'variably modified types':
8724 in C99, a struct type is never variably modified because a VLA may not
8725 appear as a structure member. However, in GNU C code like:
8726
8727 struct S { int i[f()]; };
8728
8729 is valid, and other languages may define similar constructs. */
8730
8731 bool
8732 variably_modified_type_p (tree type, tree fn)
8733 {
8734 tree t;
8735
8736 /* Test if T is either variable (if FN is zero) or an expression containing
8737 a variable in FN. If TYPE isn't gimplified, return true also if
8738 gimplify_one_sizepos would gimplify the expression into a local
8739 variable. */
8740 #define RETURN_TRUE_IF_VAR(T) \
8741 do { tree _t = (T); \
8742 if (_t != NULL_TREE \
8743 && _t != error_mark_node \
8744 && TREE_CODE (_t) != INTEGER_CST \
8745 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8746 && (!fn \
8747 || (!TYPE_SIZES_GIMPLIFIED (type) \
8748 && !is_gimple_sizepos (_t)) \
8749 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8750 return true; } while (0)
8751
8752 if (type == error_mark_node)
8753 return false;
8754
8755 /* If TYPE itself has variable size, it is variably modified. */
8756 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8757 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8758
8759 switch (TREE_CODE (type))
8760 {
8761 case POINTER_TYPE:
8762 case REFERENCE_TYPE:
8763 case VECTOR_TYPE:
8764 if (variably_modified_type_p (TREE_TYPE (type), fn))
8765 return true;
8766 break;
8767
8768 case FUNCTION_TYPE:
8769 case METHOD_TYPE:
8770 /* If TYPE is a function type, it is variably modified if the
8771 return type is variably modified. */
8772 if (variably_modified_type_p (TREE_TYPE (type), fn))
8773 return true;
8774 break;
8775
8776 case INTEGER_TYPE:
8777 case REAL_TYPE:
8778 case FIXED_POINT_TYPE:
8779 case ENUMERAL_TYPE:
8780 case BOOLEAN_TYPE:
8781 /* Scalar types are variably modified if their end points
8782 aren't constant. */
8783 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8784 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8785 break;
8786
8787 case RECORD_TYPE:
8788 case UNION_TYPE:
8789 case QUAL_UNION_TYPE:
8790 /* We can't see if any of the fields are variably-modified by the
8791 definition we normally use, since that would produce infinite
8792 recursion via pointers. */
8793 /* This is variably modified if some field's type is. */
8794 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8795 if (TREE_CODE (t) == FIELD_DECL)
8796 {
8797 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8798 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8799 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8800
8801 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8802 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8803 }
8804 break;
8805
8806 case ARRAY_TYPE:
8807 /* Do not call ourselves to avoid infinite recursion. This is
8808 variably modified if the element type is. */
8809 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8810 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8811 break;
8812
8813 default:
8814 break;
8815 }
8816
8817 /* The current language may have other cases to check, but in general,
8818 all other types are not variably modified. */
8819 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8820
8821 #undef RETURN_TRUE_IF_VAR
8822 }
8823
8824 /* Given a DECL or TYPE, return the scope in which it was declared, or
8825 NULL_TREE if there is no containing scope. */
8826
8827 tree
8828 get_containing_scope (const_tree t)
8829 {
8830 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8831 }
8832
8833 /* Return the innermost context enclosing DECL that is
8834 a FUNCTION_DECL, or zero if none. */
8835
8836 tree
8837 decl_function_context (const_tree decl)
8838 {
8839 tree context;
8840
8841 if (TREE_CODE (decl) == ERROR_MARK)
8842 return 0;
8843
8844 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8845 where we look up the function at runtime. Such functions always take
8846 a first argument of type 'pointer to real context'.
8847
8848 C++ should really be fixed to use DECL_CONTEXT for the real context,
8849 and use something else for the "virtual context". */
8850 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8851 context
8852 = TYPE_MAIN_VARIANT
8853 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8854 else
8855 context = DECL_CONTEXT (decl);
8856
8857 while (context && TREE_CODE (context) != FUNCTION_DECL)
8858 {
8859 if (TREE_CODE (context) == BLOCK)
8860 context = BLOCK_SUPERCONTEXT (context);
8861 else
8862 context = get_containing_scope (context);
8863 }
8864
8865 return context;
8866 }
8867
8868 /* Return the innermost context enclosing DECL that is
8869 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8870 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8871
8872 tree
8873 decl_type_context (const_tree decl)
8874 {
8875 tree context = DECL_CONTEXT (decl);
8876
8877 while (context)
8878 switch (TREE_CODE (context))
8879 {
8880 case NAMESPACE_DECL:
8881 case TRANSLATION_UNIT_DECL:
8882 return NULL_TREE;
8883
8884 case RECORD_TYPE:
8885 case UNION_TYPE:
8886 case QUAL_UNION_TYPE:
8887 return context;
8888
8889 case TYPE_DECL:
8890 case FUNCTION_DECL:
8891 context = DECL_CONTEXT (context);
8892 break;
8893
8894 case BLOCK:
8895 context = BLOCK_SUPERCONTEXT (context);
8896 break;
8897
8898 default:
8899 gcc_unreachable ();
8900 }
8901
8902 return NULL_TREE;
8903 }
8904
8905 /* CALL is a CALL_EXPR. Return the declaration for the function
8906 called, or NULL_TREE if the called function cannot be
8907 determined. */
8908
8909 tree
8910 get_callee_fndecl (const_tree call)
8911 {
8912 tree addr;
8913
8914 if (call == error_mark_node)
8915 return error_mark_node;
8916
8917 /* It's invalid to call this function with anything but a
8918 CALL_EXPR. */
8919 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8920
8921 /* The first operand to the CALL is the address of the function
8922 called. */
8923 addr = CALL_EXPR_FN (call);
8924
8925 /* If there is no function, return early. */
8926 if (addr == NULL_TREE)
8927 return NULL_TREE;
8928
8929 STRIP_NOPS (addr);
8930
8931 /* If this is a readonly function pointer, extract its initial value. */
8932 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8933 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8934 && DECL_INITIAL (addr))
8935 addr = DECL_INITIAL (addr);
8936
8937 /* If the address is just `&f' for some function `f', then we know
8938 that `f' is being called. */
8939 if (TREE_CODE (addr) == ADDR_EXPR
8940 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8941 return TREE_OPERAND (addr, 0);
8942
8943 /* We couldn't figure out what was being called. */
8944 return NULL_TREE;
8945 }
8946
8947 /* Print debugging information about tree nodes generated during the compile,
8948 and any language-specific information. */
8949
8950 void
8951 dump_tree_statistics (void)
8952 {
8953 if (GATHER_STATISTICS)
8954 {
8955 int i;
8956 int total_nodes, total_bytes;
8957 fprintf (stderr, "Kind Nodes Bytes\n");
8958 fprintf (stderr, "---------------------------------------\n");
8959 total_nodes = total_bytes = 0;
8960 for (i = 0; i < (int) all_kinds; i++)
8961 {
8962 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8963 tree_node_counts[i], tree_node_sizes[i]);
8964 total_nodes += tree_node_counts[i];
8965 total_bytes += tree_node_sizes[i];
8966 }
8967 fprintf (stderr, "---------------------------------------\n");
8968 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8969 fprintf (stderr, "---------------------------------------\n");
8970 fprintf (stderr, "Code Nodes\n");
8971 fprintf (stderr, "----------------------------\n");
8972 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8973 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8974 tree_code_counts[i]);
8975 fprintf (stderr, "----------------------------\n");
8976 ssanames_print_statistics ();
8977 phinodes_print_statistics ();
8978 }
8979 else
8980 fprintf (stderr, "(No per-node statistics)\n");
8981
8982 print_type_hash_statistics ();
8983 print_debug_expr_statistics ();
8984 print_value_expr_statistics ();
8985 lang_hooks.print_statistics ();
8986 }
8987 \f
8988 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8989
8990 /* Generate a crc32 of a byte. */
8991
8992 static unsigned
8993 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8994 {
8995 unsigned ix;
8996
8997 for (ix = bits; ix--; value <<= 1)
8998 {
8999 unsigned feedback;
9000
9001 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9002 chksum <<= 1;
9003 chksum ^= feedback;
9004 }
9005 return chksum;
9006 }
9007
9008 /* Generate a crc32 of a 32-bit unsigned. */
9009
9010 unsigned
9011 crc32_unsigned (unsigned chksum, unsigned value)
9012 {
9013 return crc32_unsigned_bits (chksum, value, 32);
9014 }
9015
9016 /* Generate a crc32 of a byte. */
9017
9018 unsigned
9019 crc32_byte (unsigned chksum, char byte)
9020 {
9021 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9022 }
9023
9024 /* Generate a crc32 of a string. */
9025
9026 unsigned
9027 crc32_string (unsigned chksum, const char *string)
9028 {
9029 do
9030 {
9031 chksum = crc32_byte (chksum, *string);
9032 }
9033 while (*string++);
9034 return chksum;
9035 }
9036
9037 /* P is a string that will be used in a symbol. Mask out any characters
9038 that are not valid in that context. */
9039
9040 void
9041 clean_symbol_name (char *p)
9042 {
9043 for (; *p; p++)
9044 if (! (ISALNUM (*p)
9045 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9046 || *p == '$'
9047 #endif
9048 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9049 || *p == '.'
9050 #endif
9051 ))
9052 *p = '_';
9053 }
9054
9055 /* Generate a name for a special-purpose function.
9056 The generated name may need to be unique across the whole link.
9057 Changes to this function may also require corresponding changes to
9058 xstrdup_mask_random.
9059 TYPE is some string to identify the purpose of this function to the
9060 linker or collect2; it must start with an uppercase letter,
9061 one of:
9062 I - for constructors
9063 D - for destructors
9064 N - for C++ anonymous namespaces
9065 F - for DWARF unwind frame information. */
9066
9067 tree
9068 get_file_function_name (const char *type)
9069 {
9070 char *buf;
9071 const char *p;
9072 char *q;
9073
9074 /* If we already have a name we know to be unique, just use that. */
9075 if (first_global_object_name)
9076 p = q = ASTRDUP (first_global_object_name);
9077 /* If the target is handling the constructors/destructors, they
9078 will be local to this file and the name is only necessary for
9079 debugging purposes.
9080 We also assign sub_I and sub_D sufixes to constructors called from
9081 the global static constructors. These are always local. */
9082 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9083 || (strncmp (type, "sub_", 4) == 0
9084 && (type[4] == 'I' || type[4] == 'D')))
9085 {
9086 const char *file = main_input_filename;
9087 if (! file)
9088 file = LOCATION_FILE (input_location);
9089 /* Just use the file's basename, because the full pathname
9090 might be quite long. */
9091 p = q = ASTRDUP (lbasename (file));
9092 }
9093 else
9094 {
9095 /* Otherwise, the name must be unique across the entire link.
9096 We don't have anything that we know to be unique to this translation
9097 unit, so use what we do have and throw in some randomness. */
9098 unsigned len;
9099 const char *name = weak_global_object_name;
9100 const char *file = main_input_filename;
9101
9102 if (! name)
9103 name = "";
9104 if (! file)
9105 file = LOCATION_FILE (input_location);
9106
9107 len = strlen (file);
9108 q = (char *) alloca (9 + 17 + len + 1);
9109 memcpy (q, file, len + 1);
9110
9111 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9112 crc32_string (0, name), get_random_seed (false));
9113
9114 p = q;
9115 }
9116
9117 clean_symbol_name (q);
9118 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9119 + strlen (type));
9120
9121 /* Set up the name of the file-level functions we may need.
9122 Use a global object (which is already required to be unique over
9123 the program) rather than the file name (which imposes extra
9124 constraints). */
9125 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9126
9127 return get_identifier (buf);
9128 }
9129 \f
9130 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9131
9132 /* Complain that the tree code of NODE does not match the expected 0
9133 terminated list of trailing codes. The trailing code list can be
9134 empty, for a more vague error message. FILE, LINE, and FUNCTION
9135 are of the caller. */
9136
9137 void
9138 tree_check_failed (const_tree node, const char *file,
9139 int line, const char *function, ...)
9140 {
9141 va_list args;
9142 const char *buffer;
9143 unsigned length = 0;
9144 enum tree_code code;
9145
9146 va_start (args, function);
9147 while ((code = (enum tree_code) va_arg (args, int)))
9148 length += 4 + strlen (get_tree_code_name (code));
9149 va_end (args);
9150 if (length)
9151 {
9152 char *tmp;
9153 va_start (args, function);
9154 length += strlen ("expected ");
9155 buffer = tmp = (char *) alloca (length);
9156 length = 0;
9157 while ((code = (enum tree_code) va_arg (args, int)))
9158 {
9159 const char *prefix = length ? " or " : "expected ";
9160
9161 strcpy (tmp + length, prefix);
9162 length += strlen (prefix);
9163 strcpy (tmp + length, get_tree_code_name (code));
9164 length += strlen (get_tree_code_name (code));
9165 }
9166 va_end (args);
9167 }
9168 else
9169 buffer = "unexpected node";
9170
9171 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9172 buffer, get_tree_code_name (TREE_CODE (node)),
9173 function, trim_filename (file), line);
9174 }
9175
9176 /* Complain that the tree code of NODE does match the expected 0
9177 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9178 the caller. */
9179
9180 void
9181 tree_not_check_failed (const_tree node, const char *file,
9182 int line, const char *function, ...)
9183 {
9184 va_list args;
9185 char *buffer;
9186 unsigned length = 0;
9187 enum tree_code code;
9188
9189 va_start (args, function);
9190 while ((code = (enum tree_code) va_arg (args, int)))
9191 length += 4 + strlen (get_tree_code_name (code));
9192 va_end (args);
9193 va_start (args, function);
9194 buffer = (char *) alloca (length);
9195 length = 0;
9196 while ((code = (enum tree_code) va_arg (args, int)))
9197 {
9198 if (length)
9199 {
9200 strcpy (buffer + length, " or ");
9201 length += 4;
9202 }
9203 strcpy (buffer + length, get_tree_code_name (code));
9204 length += strlen (get_tree_code_name (code));
9205 }
9206 va_end (args);
9207
9208 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9209 buffer, get_tree_code_name (TREE_CODE (node)),
9210 function, trim_filename (file), line);
9211 }
9212
9213 /* Similar to tree_check_failed, except that we check for a class of tree
9214 code, given in CL. */
9215
9216 void
9217 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9218 const char *file, int line, const char *function)
9219 {
9220 internal_error
9221 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9222 TREE_CODE_CLASS_STRING (cl),
9223 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9224 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9225 }
9226
9227 /* Similar to tree_check_failed, except that instead of specifying a
9228 dozen codes, use the knowledge that they're all sequential. */
9229
9230 void
9231 tree_range_check_failed (const_tree node, const char *file, int line,
9232 const char *function, enum tree_code c1,
9233 enum tree_code c2)
9234 {
9235 char *buffer;
9236 unsigned length = 0;
9237 unsigned int c;
9238
9239 for (c = c1; c <= c2; ++c)
9240 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9241
9242 length += strlen ("expected ");
9243 buffer = (char *) alloca (length);
9244 length = 0;
9245
9246 for (c = c1; c <= c2; ++c)
9247 {
9248 const char *prefix = length ? " or " : "expected ";
9249
9250 strcpy (buffer + length, prefix);
9251 length += strlen (prefix);
9252 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9253 length += strlen (get_tree_code_name ((enum tree_code) c));
9254 }
9255
9256 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9257 buffer, get_tree_code_name (TREE_CODE (node)),
9258 function, trim_filename (file), line);
9259 }
9260
9261
9262 /* Similar to tree_check_failed, except that we check that a tree does
9263 not have the specified code, given in CL. */
9264
9265 void
9266 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9267 const char *file, int line, const char *function)
9268 {
9269 internal_error
9270 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9271 TREE_CODE_CLASS_STRING (cl),
9272 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9273 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9274 }
9275
9276
9277 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9278
9279 void
9280 omp_clause_check_failed (const_tree node, const char *file, int line,
9281 const char *function, enum omp_clause_code code)
9282 {
9283 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9284 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9285 function, trim_filename (file), line);
9286 }
9287
9288
9289 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9290
9291 void
9292 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9293 const char *function, enum omp_clause_code c1,
9294 enum omp_clause_code c2)
9295 {
9296 char *buffer;
9297 unsigned length = 0;
9298 unsigned int c;
9299
9300 for (c = c1; c <= c2; ++c)
9301 length += 4 + strlen (omp_clause_code_name[c]);
9302
9303 length += strlen ("expected ");
9304 buffer = (char *) alloca (length);
9305 length = 0;
9306
9307 for (c = c1; c <= c2; ++c)
9308 {
9309 const char *prefix = length ? " or " : "expected ";
9310
9311 strcpy (buffer + length, prefix);
9312 length += strlen (prefix);
9313 strcpy (buffer + length, omp_clause_code_name[c]);
9314 length += strlen (omp_clause_code_name[c]);
9315 }
9316
9317 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9318 buffer, omp_clause_code_name[TREE_CODE (node)],
9319 function, trim_filename (file), line);
9320 }
9321
9322
9323 #undef DEFTREESTRUCT
9324 #define DEFTREESTRUCT(VAL, NAME) NAME,
9325
9326 static const char *ts_enum_names[] = {
9327 #include "treestruct.def"
9328 };
9329 #undef DEFTREESTRUCT
9330
9331 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9332
9333 /* Similar to tree_class_check_failed, except that we check for
9334 whether CODE contains the tree structure identified by EN. */
9335
9336 void
9337 tree_contains_struct_check_failed (const_tree node,
9338 const enum tree_node_structure_enum en,
9339 const char *file, int line,
9340 const char *function)
9341 {
9342 internal_error
9343 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9344 TS_ENUM_NAME (en),
9345 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9346 }
9347
9348
9349 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9350 (dynamically sized) vector. */
9351
9352 void
9353 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9354 const char *function)
9355 {
9356 internal_error
9357 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9358 idx + 1, len, function, trim_filename (file), line);
9359 }
9360
9361 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9362 (dynamically sized) vector. */
9363
9364 void
9365 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9366 const char *function)
9367 {
9368 internal_error
9369 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9370 idx + 1, len, function, trim_filename (file), line);
9371 }
9372
9373 /* Similar to above, except that the check is for the bounds of the operand
9374 vector of an expression node EXP. */
9375
9376 void
9377 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9378 int line, const char *function)
9379 {
9380 enum tree_code code = TREE_CODE (exp);
9381 internal_error
9382 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9383 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9384 function, trim_filename (file), line);
9385 }
9386
9387 /* Similar to above, except that the check is for the number of
9388 operands of an OMP_CLAUSE node. */
9389
9390 void
9391 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9392 int line, const char *function)
9393 {
9394 internal_error
9395 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9396 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9397 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9398 trim_filename (file), line);
9399 }
9400 #endif /* ENABLE_TREE_CHECKING */
9401 \f
9402 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9403 and mapped to the machine mode MODE. Initialize its fields and build
9404 the information necessary for debugging output. */
9405
9406 static tree
9407 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9408 {
9409 tree t;
9410 inchash::hash hstate;
9411
9412 t = make_node (VECTOR_TYPE);
9413 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9414 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9415 SET_TYPE_MODE (t, mode);
9416
9417 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9418 SET_TYPE_STRUCTURAL_EQUALITY (t);
9419 else if (TYPE_CANONICAL (innertype) != innertype
9420 || mode != VOIDmode)
9421 TYPE_CANONICAL (t)
9422 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9423
9424 layout_type (t);
9425
9426 hstate.add_wide_int (VECTOR_TYPE);
9427 hstate.add_wide_int (nunits);
9428 hstate.add_wide_int (mode);
9429 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9430 t = type_hash_canon (hstate.end (), t);
9431
9432 /* We have built a main variant, based on the main variant of the
9433 inner type. Use it to build the variant we return. */
9434 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9435 && TREE_TYPE (t) != innertype)
9436 return build_type_attribute_qual_variant (t,
9437 TYPE_ATTRIBUTES (innertype),
9438 TYPE_QUALS (innertype));
9439
9440 return t;
9441 }
9442
9443 static tree
9444 make_or_reuse_type (unsigned size, int unsignedp)
9445 {
9446 if (size == INT_TYPE_SIZE)
9447 return unsignedp ? unsigned_type_node : integer_type_node;
9448 if (size == CHAR_TYPE_SIZE)
9449 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9450 if (size == SHORT_TYPE_SIZE)
9451 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9452 if (size == LONG_TYPE_SIZE)
9453 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9454 if (size == LONG_LONG_TYPE_SIZE)
9455 return (unsignedp ? long_long_unsigned_type_node
9456 : long_long_integer_type_node);
9457 if (size == 128 && int128_integer_type_node)
9458 return (unsignedp ? int128_unsigned_type_node
9459 : int128_integer_type_node);
9460
9461 if (unsignedp)
9462 return make_unsigned_type (size);
9463 else
9464 return make_signed_type (size);
9465 }
9466
9467 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9468
9469 static tree
9470 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9471 {
9472 if (satp)
9473 {
9474 if (size == SHORT_FRACT_TYPE_SIZE)
9475 return unsignedp ? sat_unsigned_short_fract_type_node
9476 : sat_short_fract_type_node;
9477 if (size == FRACT_TYPE_SIZE)
9478 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9479 if (size == LONG_FRACT_TYPE_SIZE)
9480 return unsignedp ? sat_unsigned_long_fract_type_node
9481 : sat_long_fract_type_node;
9482 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9483 return unsignedp ? sat_unsigned_long_long_fract_type_node
9484 : sat_long_long_fract_type_node;
9485 }
9486 else
9487 {
9488 if (size == SHORT_FRACT_TYPE_SIZE)
9489 return unsignedp ? unsigned_short_fract_type_node
9490 : short_fract_type_node;
9491 if (size == FRACT_TYPE_SIZE)
9492 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9493 if (size == LONG_FRACT_TYPE_SIZE)
9494 return unsignedp ? unsigned_long_fract_type_node
9495 : long_fract_type_node;
9496 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9497 return unsignedp ? unsigned_long_long_fract_type_node
9498 : long_long_fract_type_node;
9499 }
9500
9501 return make_fract_type (size, unsignedp, satp);
9502 }
9503
9504 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9505
9506 static tree
9507 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9508 {
9509 if (satp)
9510 {
9511 if (size == SHORT_ACCUM_TYPE_SIZE)
9512 return unsignedp ? sat_unsigned_short_accum_type_node
9513 : sat_short_accum_type_node;
9514 if (size == ACCUM_TYPE_SIZE)
9515 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9516 if (size == LONG_ACCUM_TYPE_SIZE)
9517 return unsignedp ? sat_unsigned_long_accum_type_node
9518 : sat_long_accum_type_node;
9519 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9520 return unsignedp ? sat_unsigned_long_long_accum_type_node
9521 : sat_long_long_accum_type_node;
9522 }
9523 else
9524 {
9525 if (size == SHORT_ACCUM_TYPE_SIZE)
9526 return unsignedp ? unsigned_short_accum_type_node
9527 : short_accum_type_node;
9528 if (size == ACCUM_TYPE_SIZE)
9529 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9530 if (size == LONG_ACCUM_TYPE_SIZE)
9531 return unsignedp ? unsigned_long_accum_type_node
9532 : long_accum_type_node;
9533 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9534 return unsignedp ? unsigned_long_long_accum_type_node
9535 : long_long_accum_type_node;
9536 }
9537
9538 return make_accum_type (size, unsignedp, satp);
9539 }
9540
9541
9542 /* Create an atomic variant node for TYPE. This routine is called
9543 during initialization of data types to create the 5 basic atomic
9544 types. The generic build_variant_type function requires these to
9545 already be set up in order to function properly, so cannot be
9546 called from there. If ALIGN is non-zero, then ensure alignment is
9547 overridden to this value. */
9548
9549 static tree
9550 build_atomic_base (tree type, unsigned int align)
9551 {
9552 tree t;
9553
9554 /* Make sure its not already registered. */
9555 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9556 return t;
9557
9558 t = build_variant_type_copy (type);
9559 set_type_quals (t, TYPE_QUAL_ATOMIC);
9560
9561 if (align)
9562 TYPE_ALIGN (t) = align;
9563
9564 return t;
9565 }
9566
9567 /* Create nodes for all integer types (and error_mark_node) using the sizes
9568 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9569 SHORT_DOUBLE specifies whether double should be of the same precision
9570 as float. */
9571
9572 void
9573 build_common_tree_nodes (bool signed_char, bool short_double)
9574 {
9575 error_mark_node = make_node (ERROR_MARK);
9576 TREE_TYPE (error_mark_node) = error_mark_node;
9577
9578 initialize_sizetypes ();
9579
9580 /* Define both `signed char' and `unsigned char'. */
9581 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9582 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9583 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9584 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9585
9586 /* Define `char', which is like either `signed char' or `unsigned char'
9587 but not the same as either. */
9588 char_type_node
9589 = (signed_char
9590 ? make_signed_type (CHAR_TYPE_SIZE)
9591 : make_unsigned_type (CHAR_TYPE_SIZE));
9592 TYPE_STRING_FLAG (char_type_node) = 1;
9593
9594 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9595 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9596 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9597 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9598 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9599 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9600 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9601 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9602 #if HOST_BITS_PER_WIDE_INT >= 64
9603 /* TODO: This isn't correct, but as logic depends at the moment on
9604 host's instead of target's wide-integer.
9605 If there is a target not supporting TImode, but has an 128-bit
9606 integer-scalar register, this target check needs to be adjusted. */
9607 if (targetm.scalar_mode_supported_p (TImode))
9608 {
9609 int128_integer_type_node = make_signed_type (128);
9610 int128_unsigned_type_node = make_unsigned_type (128);
9611 }
9612 #endif
9613
9614 /* Define a boolean type. This type only represents boolean values but
9615 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9616 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9617 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9618 TYPE_PRECISION (boolean_type_node) = 1;
9619 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9620
9621 /* Define what type to use for size_t. */
9622 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9623 size_type_node = unsigned_type_node;
9624 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9625 size_type_node = long_unsigned_type_node;
9626 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9627 size_type_node = long_long_unsigned_type_node;
9628 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9629 size_type_node = short_unsigned_type_node;
9630 else
9631 gcc_unreachable ();
9632
9633 /* Fill in the rest of the sized types. Reuse existing type nodes
9634 when possible. */
9635 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9636 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9637 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9638 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9639 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9640
9641 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9642 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9643 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9644 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9645 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9646
9647 /* Don't call build_qualified type for atomics. That routine does
9648 special processing for atomics, and until they are initialized
9649 it's better not to make that call.
9650
9651 Check to see if there is a target override for atomic types. */
9652
9653 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9654 targetm.atomic_align_for_mode (QImode));
9655 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9656 targetm.atomic_align_for_mode (HImode));
9657 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9658 targetm.atomic_align_for_mode (SImode));
9659 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9660 targetm.atomic_align_for_mode (DImode));
9661 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9662 targetm.atomic_align_for_mode (TImode));
9663
9664 access_public_node = get_identifier ("public");
9665 access_protected_node = get_identifier ("protected");
9666 access_private_node = get_identifier ("private");
9667
9668 /* Define these next since types below may used them. */
9669 integer_zero_node = build_int_cst (integer_type_node, 0);
9670 integer_one_node = build_int_cst (integer_type_node, 1);
9671 integer_three_node = build_int_cst (integer_type_node, 3);
9672 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9673
9674 size_zero_node = size_int (0);
9675 size_one_node = size_int (1);
9676 bitsize_zero_node = bitsize_int (0);
9677 bitsize_one_node = bitsize_int (1);
9678 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9679
9680 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9681 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9682
9683 void_type_node = make_node (VOID_TYPE);
9684 layout_type (void_type_node);
9685
9686 /* We are not going to have real types in C with less than byte alignment,
9687 so we might as well not have any types that claim to have it. */
9688 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9689 TYPE_USER_ALIGN (void_type_node) = 0;
9690
9691 void_node = make_node (VOID_CST);
9692 TREE_TYPE (void_node) = void_type_node;
9693
9694 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9695 layout_type (TREE_TYPE (null_pointer_node));
9696
9697 ptr_type_node = build_pointer_type (void_type_node);
9698 const_ptr_type_node
9699 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9700 fileptr_type_node = ptr_type_node;
9701
9702 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9703
9704 float_type_node = make_node (REAL_TYPE);
9705 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9706 layout_type (float_type_node);
9707
9708 double_type_node = make_node (REAL_TYPE);
9709 if (short_double)
9710 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9711 else
9712 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9713 layout_type (double_type_node);
9714
9715 long_double_type_node = make_node (REAL_TYPE);
9716 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9717 layout_type (long_double_type_node);
9718
9719 float_ptr_type_node = build_pointer_type (float_type_node);
9720 double_ptr_type_node = build_pointer_type (double_type_node);
9721 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9722 integer_ptr_type_node = build_pointer_type (integer_type_node);
9723
9724 /* Fixed size integer types. */
9725 uint16_type_node = build_nonstandard_integer_type (16, true);
9726 uint32_type_node = build_nonstandard_integer_type (32, true);
9727 uint64_type_node = build_nonstandard_integer_type (64, true);
9728
9729 /* Decimal float types. */
9730 dfloat32_type_node = make_node (REAL_TYPE);
9731 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9732 layout_type (dfloat32_type_node);
9733 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9734 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9735
9736 dfloat64_type_node = make_node (REAL_TYPE);
9737 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9738 layout_type (dfloat64_type_node);
9739 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9740 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9741
9742 dfloat128_type_node = make_node (REAL_TYPE);
9743 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9744 layout_type (dfloat128_type_node);
9745 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9746 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9747
9748 complex_integer_type_node = build_complex_type (integer_type_node);
9749 complex_float_type_node = build_complex_type (float_type_node);
9750 complex_double_type_node = build_complex_type (double_type_node);
9751 complex_long_double_type_node = build_complex_type (long_double_type_node);
9752
9753 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9754 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9755 sat_ ## KIND ## _type_node = \
9756 make_sat_signed_ ## KIND ## _type (SIZE); \
9757 sat_unsigned_ ## KIND ## _type_node = \
9758 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9759 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9760 unsigned_ ## KIND ## _type_node = \
9761 make_unsigned_ ## KIND ## _type (SIZE);
9762
9763 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9764 sat_ ## WIDTH ## KIND ## _type_node = \
9765 make_sat_signed_ ## KIND ## _type (SIZE); \
9766 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9767 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9768 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9769 unsigned_ ## WIDTH ## KIND ## _type_node = \
9770 make_unsigned_ ## KIND ## _type (SIZE);
9771
9772 /* Make fixed-point type nodes based on four different widths. */
9773 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9774 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9775 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9776 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9777 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9778
9779 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9780 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9781 NAME ## _type_node = \
9782 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9783 u ## NAME ## _type_node = \
9784 make_or_reuse_unsigned_ ## KIND ## _type \
9785 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9786 sat_ ## NAME ## _type_node = \
9787 make_or_reuse_sat_signed_ ## KIND ## _type \
9788 (GET_MODE_BITSIZE (MODE ## mode)); \
9789 sat_u ## NAME ## _type_node = \
9790 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9791 (GET_MODE_BITSIZE (U ## MODE ## mode));
9792
9793 /* Fixed-point type and mode nodes. */
9794 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9795 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9796 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9797 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9798 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9799 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9800 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9801 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9802 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9803 MAKE_FIXED_MODE_NODE (accum, da, DA)
9804 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9805
9806 {
9807 tree t = targetm.build_builtin_va_list ();
9808
9809 /* Many back-ends define record types without setting TYPE_NAME.
9810 If we copied the record type here, we'd keep the original
9811 record type without a name. This breaks name mangling. So,
9812 don't copy record types and let c_common_nodes_and_builtins()
9813 declare the type to be __builtin_va_list. */
9814 if (TREE_CODE (t) != RECORD_TYPE)
9815 t = build_variant_type_copy (t);
9816
9817 va_list_type_node = t;
9818 }
9819 }
9820
9821 /* Modify DECL for given flags.
9822 TM_PURE attribute is set only on types, so the function will modify
9823 DECL's type when ECF_TM_PURE is used. */
9824
9825 void
9826 set_call_expr_flags (tree decl, int flags)
9827 {
9828 if (flags & ECF_NOTHROW)
9829 TREE_NOTHROW (decl) = 1;
9830 if (flags & ECF_CONST)
9831 TREE_READONLY (decl) = 1;
9832 if (flags & ECF_PURE)
9833 DECL_PURE_P (decl) = 1;
9834 if (flags & ECF_LOOPING_CONST_OR_PURE)
9835 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9836 if (flags & ECF_NOVOPS)
9837 DECL_IS_NOVOPS (decl) = 1;
9838 if (flags & ECF_NORETURN)
9839 TREE_THIS_VOLATILE (decl) = 1;
9840 if (flags & ECF_MALLOC)
9841 DECL_IS_MALLOC (decl) = 1;
9842 if (flags & ECF_RETURNS_TWICE)
9843 DECL_IS_RETURNS_TWICE (decl) = 1;
9844 if (flags & ECF_LEAF)
9845 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9846 NULL, DECL_ATTRIBUTES (decl));
9847 if ((flags & ECF_TM_PURE) && flag_tm)
9848 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9849 /* Looping const or pure is implied by noreturn.
9850 There is currently no way to declare looping const or looping pure alone. */
9851 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9852 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9853 }
9854
9855
9856 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9857
9858 static void
9859 local_define_builtin (const char *name, tree type, enum built_in_function code,
9860 const char *library_name, int ecf_flags)
9861 {
9862 tree decl;
9863
9864 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9865 library_name, NULL_TREE);
9866 set_call_expr_flags (decl, ecf_flags);
9867
9868 set_builtin_decl (code, decl, true);
9869 }
9870
9871 /* Call this function after instantiating all builtins that the language
9872 front end cares about. This will build the rest of the builtins
9873 and internal function that are relied upon by the tree optimizers and
9874 the middle-end. */
9875
9876 void
9877 build_common_builtin_nodes (void)
9878 {
9879 tree tmp, ftype;
9880 int ecf_flags;
9881
9882 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9883 {
9884 ftype = build_function_type (void_type_node, void_list_node);
9885 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9886 "__builtin_unreachable",
9887 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9888 | ECF_CONST | ECF_LEAF);
9889 }
9890
9891 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9892 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9893 {
9894 ftype = build_function_type_list (ptr_type_node,
9895 ptr_type_node, const_ptr_type_node,
9896 size_type_node, NULL_TREE);
9897
9898 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9899 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9900 "memcpy", ECF_NOTHROW | ECF_LEAF);
9901 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9902 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9903 "memmove", ECF_NOTHROW | ECF_LEAF);
9904 }
9905
9906 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9907 {
9908 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9909 const_ptr_type_node, size_type_node,
9910 NULL_TREE);
9911 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9912 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9913 }
9914
9915 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9916 {
9917 ftype = build_function_type_list (ptr_type_node,
9918 ptr_type_node, integer_type_node,
9919 size_type_node, NULL_TREE);
9920 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9921 "memset", ECF_NOTHROW | ECF_LEAF);
9922 }
9923
9924 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9925 {
9926 ftype = build_function_type_list (ptr_type_node,
9927 size_type_node, NULL_TREE);
9928 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9929 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9930 }
9931
9932 ftype = build_function_type_list (ptr_type_node, size_type_node,
9933 size_type_node, NULL_TREE);
9934 local_define_builtin ("__builtin_alloca_with_align", ftype,
9935 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9936 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9937
9938 /* If we're checking the stack, `alloca' can throw. */
9939 if (flag_stack_check)
9940 {
9941 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9942 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9943 }
9944
9945 ftype = build_function_type_list (void_type_node,
9946 ptr_type_node, ptr_type_node,
9947 ptr_type_node, NULL_TREE);
9948 local_define_builtin ("__builtin_init_trampoline", ftype,
9949 BUILT_IN_INIT_TRAMPOLINE,
9950 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9951 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9952 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9953 "__builtin_init_heap_trampoline",
9954 ECF_NOTHROW | ECF_LEAF);
9955
9956 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9957 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9958 BUILT_IN_ADJUST_TRAMPOLINE,
9959 "__builtin_adjust_trampoline",
9960 ECF_CONST | ECF_NOTHROW);
9961
9962 ftype = build_function_type_list (void_type_node,
9963 ptr_type_node, ptr_type_node, NULL_TREE);
9964 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9965 BUILT_IN_NONLOCAL_GOTO,
9966 "__builtin_nonlocal_goto",
9967 ECF_NORETURN | ECF_NOTHROW);
9968
9969 ftype = build_function_type_list (void_type_node,
9970 ptr_type_node, ptr_type_node, NULL_TREE);
9971 local_define_builtin ("__builtin_setjmp_setup", ftype,
9972 BUILT_IN_SETJMP_SETUP,
9973 "__builtin_setjmp_setup", ECF_NOTHROW);
9974
9975 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9977 BUILT_IN_SETJMP_RECEIVER,
9978 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9979
9980 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9981 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9982 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9983
9984 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9985 local_define_builtin ("__builtin_stack_restore", ftype,
9986 BUILT_IN_STACK_RESTORE,
9987 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9988
9989 /* If there's a possibility that we might use the ARM EABI, build the
9990 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9991 if (targetm.arm_eabi_unwinder)
9992 {
9993 ftype = build_function_type_list (void_type_node, NULL_TREE);
9994 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9995 BUILT_IN_CXA_END_CLEANUP,
9996 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9997 }
9998
9999 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10000 local_define_builtin ("__builtin_unwind_resume", ftype,
10001 BUILT_IN_UNWIND_RESUME,
10002 ((targetm_common.except_unwind_info (&global_options)
10003 == UI_SJLJ)
10004 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10005 ECF_NORETURN);
10006
10007 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10008 {
10009 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10010 NULL_TREE);
10011 local_define_builtin ("__builtin_return_address", ftype,
10012 BUILT_IN_RETURN_ADDRESS,
10013 "__builtin_return_address",
10014 ECF_NOTHROW);
10015 }
10016
10017 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10018 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10019 {
10020 ftype = build_function_type_list (void_type_node, ptr_type_node,
10021 ptr_type_node, NULL_TREE);
10022 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10023 local_define_builtin ("__cyg_profile_func_enter", ftype,
10024 BUILT_IN_PROFILE_FUNC_ENTER,
10025 "__cyg_profile_func_enter", 0);
10026 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10027 local_define_builtin ("__cyg_profile_func_exit", ftype,
10028 BUILT_IN_PROFILE_FUNC_EXIT,
10029 "__cyg_profile_func_exit", 0);
10030 }
10031
10032 /* The exception object and filter values from the runtime. The argument
10033 must be zero before exception lowering, i.e. from the front end. After
10034 exception lowering, it will be the region number for the exception
10035 landing pad. These functions are PURE instead of CONST to prevent
10036 them from being hoisted past the exception edge that will initialize
10037 its value in the landing pad. */
10038 ftype = build_function_type_list (ptr_type_node,
10039 integer_type_node, NULL_TREE);
10040 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10041 /* Only use TM_PURE if we we have TM language support. */
10042 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10043 ecf_flags |= ECF_TM_PURE;
10044 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10045 "__builtin_eh_pointer", ecf_flags);
10046
10047 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10048 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10049 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10050 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10051
10052 ftype = build_function_type_list (void_type_node,
10053 integer_type_node, integer_type_node,
10054 NULL_TREE);
10055 local_define_builtin ("__builtin_eh_copy_values", ftype,
10056 BUILT_IN_EH_COPY_VALUES,
10057 "__builtin_eh_copy_values", ECF_NOTHROW);
10058
10059 /* Complex multiplication and division. These are handled as builtins
10060 rather than optabs because emit_library_call_value doesn't support
10061 complex. Further, we can do slightly better with folding these
10062 beasties if the real and complex parts of the arguments are separate. */
10063 {
10064 int mode;
10065
10066 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10067 {
10068 char mode_name_buf[4], *q;
10069 const char *p;
10070 enum built_in_function mcode, dcode;
10071 tree type, inner_type;
10072 const char *prefix = "__";
10073
10074 if (targetm.libfunc_gnu_prefix)
10075 prefix = "__gnu_";
10076
10077 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10078 if (type == NULL)
10079 continue;
10080 inner_type = TREE_TYPE (type);
10081
10082 ftype = build_function_type_list (type, inner_type, inner_type,
10083 inner_type, inner_type, NULL_TREE);
10084
10085 mcode = ((enum built_in_function)
10086 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10087 dcode = ((enum built_in_function)
10088 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10089
10090 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10091 *q = TOLOWER (*p);
10092 *q = '\0';
10093
10094 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10095 NULL);
10096 local_define_builtin (built_in_names[mcode], ftype, mcode,
10097 built_in_names[mcode],
10098 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10099
10100 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10101 NULL);
10102 local_define_builtin (built_in_names[dcode], ftype, dcode,
10103 built_in_names[dcode],
10104 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10105 }
10106 }
10107
10108 init_internal_fns ();
10109 }
10110
10111 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10112 better way.
10113
10114 If we requested a pointer to a vector, build up the pointers that
10115 we stripped off while looking for the inner type. Similarly for
10116 return values from functions.
10117
10118 The argument TYPE is the top of the chain, and BOTTOM is the
10119 new type which we will point to. */
10120
10121 tree
10122 reconstruct_complex_type (tree type, tree bottom)
10123 {
10124 tree inner, outer;
10125
10126 if (TREE_CODE (type) == POINTER_TYPE)
10127 {
10128 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10129 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10130 TYPE_REF_CAN_ALIAS_ALL (type));
10131 }
10132 else if (TREE_CODE (type) == REFERENCE_TYPE)
10133 {
10134 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10135 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10136 TYPE_REF_CAN_ALIAS_ALL (type));
10137 }
10138 else if (TREE_CODE (type) == ARRAY_TYPE)
10139 {
10140 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10141 outer = build_array_type (inner, TYPE_DOMAIN (type));
10142 }
10143 else if (TREE_CODE (type) == FUNCTION_TYPE)
10144 {
10145 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10146 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10147 }
10148 else if (TREE_CODE (type) == METHOD_TYPE)
10149 {
10150 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10151 /* The build_method_type_directly() routine prepends 'this' to argument list,
10152 so we must compensate by getting rid of it. */
10153 outer
10154 = build_method_type_directly
10155 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10156 inner,
10157 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10158 }
10159 else if (TREE_CODE (type) == OFFSET_TYPE)
10160 {
10161 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10162 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10163 }
10164 else
10165 return bottom;
10166
10167 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10168 TYPE_QUALS (type));
10169 }
10170
10171 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10172 the inner type. */
10173 tree
10174 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10175 {
10176 int nunits;
10177
10178 switch (GET_MODE_CLASS (mode))
10179 {
10180 case MODE_VECTOR_INT:
10181 case MODE_VECTOR_FLOAT:
10182 case MODE_VECTOR_FRACT:
10183 case MODE_VECTOR_UFRACT:
10184 case MODE_VECTOR_ACCUM:
10185 case MODE_VECTOR_UACCUM:
10186 nunits = GET_MODE_NUNITS (mode);
10187 break;
10188
10189 case MODE_INT:
10190 /* Check that there are no leftover bits. */
10191 gcc_assert (GET_MODE_BITSIZE (mode)
10192 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10193
10194 nunits = GET_MODE_BITSIZE (mode)
10195 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10196 break;
10197
10198 default:
10199 gcc_unreachable ();
10200 }
10201
10202 return make_vector_type (innertype, nunits, mode);
10203 }
10204
10205 /* Similarly, but takes the inner type and number of units, which must be
10206 a power of two. */
10207
10208 tree
10209 build_vector_type (tree innertype, int nunits)
10210 {
10211 return make_vector_type (innertype, nunits, VOIDmode);
10212 }
10213
10214 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10215
10216 tree
10217 build_opaque_vector_type (tree innertype, int nunits)
10218 {
10219 tree t = make_vector_type (innertype, nunits, VOIDmode);
10220 tree cand;
10221 /* We always build the non-opaque variant before the opaque one,
10222 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10223 cand = TYPE_NEXT_VARIANT (t);
10224 if (cand
10225 && TYPE_VECTOR_OPAQUE (cand)
10226 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10227 return cand;
10228 /* Othewise build a variant type and make sure to queue it after
10229 the non-opaque type. */
10230 cand = build_distinct_type_copy (t);
10231 TYPE_VECTOR_OPAQUE (cand) = true;
10232 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10233 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10234 TYPE_NEXT_VARIANT (t) = cand;
10235 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10236 return cand;
10237 }
10238
10239
10240 /* Given an initializer INIT, return TRUE if INIT is zero or some
10241 aggregate of zeros. Otherwise return FALSE. */
10242 bool
10243 initializer_zerop (const_tree init)
10244 {
10245 tree elt;
10246
10247 STRIP_NOPS (init);
10248
10249 switch (TREE_CODE (init))
10250 {
10251 case INTEGER_CST:
10252 return integer_zerop (init);
10253
10254 case REAL_CST:
10255 /* ??? Note that this is not correct for C4X float formats. There,
10256 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10257 negative exponent. */
10258 return real_zerop (init)
10259 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10260
10261 case FIXED_CST:
10262 return fixed_zerop (init);
10263
10264 case COMPLEX_CST:
10265 return integer_zerop (init)
10266 || (real_zerop (init)
10267 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10268 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10269
10270 case VECTOR_CST:
10271 {
10272 unsigned i;
10273 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10274 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10275 return false;
10276 return true;
10277 }
10278
10279 case CONSTRUCTOR:
10280 {
10281 unsigned HOST_WIDE_INT idx;
10282
10283 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10284 if (!initializer_zerop (elt))
10285 return false;
10286 return true;
10287 }
10288
10289 case STRING_CST:
10290 {
10291 int i;
10292
10293 /* We need to loop through all elements to handle cases like
10294 "\0" and "\0foobar". */
10295 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10296 if (TREE_STRING_POINTER (init)[i] != '\0')
10297 return false;
10298
10299 return true;
10300 }
10301
10302 default:
10303 return false;
10304 }
10305 }
10306
10307 /* Check if vector VEC consists of all the equal elements and
10308 that the number of elements corresponds to the type of VEC.
10309 The function returns first element of the vector
10310 or NULL_TREE if the vector is not uniform. */
10311 tree
10312 uniform_vector_p (const_tree vec)
10313 {
10314 tree first, t;
10315 unsigned i;
10316
10317 if (vec == NULL_TREE)
10318 return NULL_TREE;
10319
10320 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10321
10322 if (TREE_CODE (vec) == VECTOR_CST)
10323 {
10324 first = VECTOR_CST_ELT (vec, 0);
10325 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10326 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10327 return NULL_TREE;
10328
10329 return first;
10330 }
10331
10332 else if (TREE_CODE (vec) == CONSTRUCTOR)
10333 {
10334 first = error_mark_node;
10335
10336 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10337 {
10338 if (i == 0)
10339 {
10340 first = t;
10341 continue;
10342 }
10343 if (!operand_equal_p (first, t, 0))
10344 return NULL_TREE;
10345 }
10346 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10347 return NULL_TREE;
10348
10349 return first;
10350 }
10351
10352 return NULL_TREE;
10353 }
10354
10355 /* Build an empty statement at location LOC. */
10356
10357 tree
10358 build_empty_stmt (location_t loc)
10359 {
10360 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10361 SET_EXPR_LOCATION (t, loc);
10362 return t;
10363 }
10364
10365
10366 /* Build an OpenMP clause with code CODE. LOC is the location of the
10367 clause. */
10368
10369 tree
10370 build_omp_clause (location_t loc, enum omp_clause_code code)
10371 {
10372 tree t;
10373 int size, length;
10374
10375 length = omp_clause_num_ops[code];
10376 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10377
10378 record_node_allocation_statistics (OMP_CLAUSE, size);
10379
10380 t = (tree) ggc_internal_alloc (size);
10381 memset (t, 0, size);
10382 TREE_SET_CODE (t, OMP_CLAUSE);
10383 OMP_CLAUSE_SET_CODE (t, code);
10384 OMP_CLAUSE_LOCATION (t) = loc;
10385
10386 return t;
10387 }
10388
10389 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10390 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10391 Except for the CODE and operand count field, other storage for the
10392 object is initialized to zeros. */
10393
10394 tree
10395 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10396 {
10397 tree t;
10398 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10399
10400 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10401 gcc_assert (len >= 1);
10402
10403 record_node_allocation_statistics (code, length);
10404
10405 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10406
10407 TREE_SET_CODE (t, code);
10408
10409 /* Can't use TREE_OPERAND to store the length because if checking is
10410 enabled, it will try to check the length before we store it. :-P */
10411 t->exp.operands[0] = build_int_cst (sizetype, len);
10412
10413 return t;
10414 }
10415
10416 /* Helper function for build_call_* functions; build a CALL_EXPR with
10417 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10418 the argument slots. */
10419
10420 static tree
10421 build_call_1 (tree return_type, tree fn, int nargs)
10422 {
10423 tree t;
10424
10425 t = build_vl_exp (CALL_EXPR, nargs + 3);
10426 TREE_TYPE (t) = return_type;
10427 CALL_EXPR_FN (t) = fn;
10428 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10429
10430 return t;
10431 }
10432
10433 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10434 FN and a null static chain slot. NARGS is the number of call arguments
10435 which are specified as "..." arguments. */
10436
10437 tree
10438 build_call_nary (tree return_type, tree fn, int nargs, ...)
10439 {
10440 tree ret;
10441 va_list args;
10442 va_start (args, nargs);
10443 ret = build_call_valist (return_type, fn, nargs, args);
10444 va_end (args);
10445 return ret;
10446 }
10447
10448 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10449 FN and a null static chain slot. NARGS is the number of call arguments
10450 which are specified as a va_list ARGS. */
10451
10452 tree
10453 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10454 {
10455 tree t;
10456 int i;
10457
10458 t = build_call_1 (return_type, fn, nargs);
10459 for (i = 0; i < nargs; i++)
10460 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10461 process_call_operands (t);
10462 return t;
10463 }
10464
10465 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10466 FN and a null static chain slot. NARGS is the number of call arguments
10467 which are specified as a tree array ARGS. */
10468
10469 tree
10470 build_call_array_loc (location_t loc, tree return_type, tree fn,
10471 int nargs, const tree *args)
10472 {
10473 tree t;
10474 int i;
10475
10476 t = build_call_1 (return_type, fn, nargs);
10477 for (i = 0; i < nargs; i++)
10478 CALL_EXPR_ARG (t, i) = args[i];
10479 process_call_operands (t);
10480 SET_EXPR_LOCATION (t, loc);
10481 return t;
10482 }
10483
10484 /* Like build_call_array, but takes a vec. */
10485
10486 tree
10487 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10488 {
10489 tree ret, t;
10490 unsigned int ix;
10491
10492 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10493 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10494 CALL_EXPR_ARG (ret, ix) = t;
10495 process_call_operands (ret);
10496 return ret;
10497 }
10498
10499 /* Conveniently construct a function call expression. FNDECL names the
10500 function to be called and N arguments are passed in the array
10501 ARGARRAY. */
10502
10503 tree
10504 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10505 {
10506 tree fntype = TREE_TYPE (fndecl);
10507 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10508
10509 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10510 }
10511
10512 /* Conveniently construct a function call expression. FNDECL names the
10513 function to be called and the arguments are passed in the vector
10514 VEC. */
10515
10516 tree
10517 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10518 {
10519 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10520 vec_safe_address (vec));
10521 }
10522
10523
10524 /* Conveniently construct a function call expression. FNDECL names the
10525 function to be called, N is the number of arguments, and the "..."
10526 parameters are the argument expressions. */
10527
10528 tree
10529 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10530 {
10531 va_list ap;
10532 tree *argarray = XALLOCAVEC (tree, n);
10533 int i;
10534
10535 va_start (ap, n);
10536 for (i = 0; i < n; i++)
10537 argarray[i] = va_arg (ap, tree);
10538 va_end (ap);
10539 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10540 }
10541
10542 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10543 varargs macros aren't supported by all bootstrap compilers. */
10544
10545 tree
10546 build_call_expr (tree fndecl, int n, ...)
10547 {
10548 va_list ap;
10549 tree *argarray = XALLOCAVEC (tree, n);
10550 int i;
10551
10552 va_start (ap, n);
10553 for (i = 0; i < n; i++)
10554 argarray[i] = va_arg (ap, tree);
10555 va_end (ap);
10556 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10557 }
10558
10559 /* Build internal call expression. This is just like CALL_EXPR, except
10560 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10561 internal function. */
10562
10563 tree
10564 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10565 tree type, int n, ...)
10566 {
10567 va_list ap;
10568 int i;
10569
10570 tree fn = build_call_1 (type, NULL_TREE, n);
10571 va_start (ap, n);
10572 for (i = 0; i < n; i++)
10573 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10574 va_end (ap);
10575 SET_EXPR_LOCATION (fn, loc);
10576 CALL_EXPR_IFN (fn) = ifn;
10577 return fn;
10578 }
10579
10580 /* Create a new constant string literal and return a char* pointer to it.
10581 The STRING_CST value is the LEN characters at STR. */
10582 tree
10583 build_string_literal (int len, const char *str)
10584 {
10585 tree t, elem, index, type;
10586
10587 t = build_string (len, str);
10588 elem = build_type_variant (char_type_node, 1, 0);
10589 index = build_index_type (size_int (len - 1));
10590 type = build_array_type (elem, index);
10591 TREE_TYPE (t) = type;
10592 TREE_CONSTANT (t) = 1;
10593 TREE_READONLY (t) = 1;
10594 TREE_STATIC (t) = 1;
10595
10596 type = build_pointer_type (elem);
10597 t = build1 (ADDR_EXPR, type,
10598 build4 (ARRAY_REF, elem,
10599 t, integer_zero_node, NULL_TREE, NULL_TREE));
10600 return t;
10601 }
10602
10603
10604
10605 /* Return true if T (assumed to be a DECL) must be assigned a memory
10606 location. */
10607
10608 bool
10609 needs_to_live_in_memory (const_tree t)
10610 {
10611 return (TREE_ADDRESSABLE (t)
10612 || is_global_var (t)
10613 || (TREE_CODE (t) == RESULT_DECL
10614 && !DECL_BY_REFERENCE (t)
10615 && aggregate_value_p (t, current_function_decl)));
10616 }
10617
10618 /* Return value of a constant X and sign-extend it. */
10619
10620 HOST_WIDE_INT
10621 int_cst_value (const_tree x)
10622 {
10623 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10624 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10625
10626 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10627 gcc_assert (cst_and_fits_in_hwi (x));
10628
10629 if (bits < HOST_BITS_PER_WIDE_INT)
10630 {
10631 bool negative = ((val >> (bits - 1)) & 1) != 0;
10632 if (negative)
10633 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10634 else
10635 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10636 }
10637
10638 return val;
10639 }
10640
10641 /* If TYPE is an integral or pointer type, return an integer type with
10642 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10643 if TYPE is already an integer type of signedness UNSIGNEDP. */
10644
10645 tree
10646 signed_or_unsigned_type_for (int unsignedp, tree type)
10647 {
10648 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10649 return type;
10650
10651 if (TREE_CODE (type) == VECTOR_TYPE)
10652 {
10653 tree inner = TREE_TYPE (type);
10654 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10655 if (!inner2)
10656 return NULL_TREE;
10657 if (inner == inner2)
10658 return type;
10659 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10660 }
10661
10662 if (!INTEGRAL_TYPE_P (type)
10663 && !POINTER_TYPE_P (type)
10664 && TREE_CODE (type) != OFFSET_TYPE)
10665 return NULL_TREE;
10666
10667 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10668 }
10669
10670 /* If TYPE is an integral or pointer type, return an integer type with
10671 the same precision which is unsigned, or itself if TYPE is already an
10672 unsigned integer type. */
10673
10674 tree
10675 unsigned_type_for (tree type)
10676 {
10677 return signed_or_unsigned_type_for (1, type);
10678 }
10679
10680 /* If TYPE is an integral or pointer type, return an integer type with
10681 the same precision which is signed, or itself if TYPE is already a
10682 signed integer type. */
10683
10684 tree
10685 signed_type_for (tree type)
10686 {
10687 return signed_or_unsigned_type_for (0, type);
10688 }
10689
10690 /* If TYPE is a vector type, return a signed integer vector type with the
10691 same width and number of subparts. Otherwise return boolean_type_node. */
10692
10693 tree
10694 truth_type_for (tree type)
10695 {
10696 if (TREE_CODE (type) == VECTOR_TYPE)
10697 {
10698 tree elem = lang_hooks.types.type_for_size
10699 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10700 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10701 }
10702 else
10703 return boolean_type_node;
10704 }
10705
10706 /* Returns the largest value obtainable by casting something in INNER type to
10707 OUTER type. */
10708
10709 tree
10710 upper_bound_in_type (tree outer, tree inner)
10711 {
10712 unsigned int det = 0;
10713 unsigned oprec = TYPE_PRECISION (outer);
10714 unsigned iprec = TYPE_PRECISION (inner);
10715 unsigned prec;
10716
10717 /* Compute a unique number for every combination. */
10718 det |= (oprec > iprec) ? 4 : 0;
10719 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10720 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10721
10722 /* Determine the exponent to use. */
10723 switch (det)
10724 {
10725 case 0:
10726 case 1:
10727 /* oprec <= iprec, outer: signed, inner: don't care. */
10728 prec = oprec - 1;
10729 break;
10730 case 2:
10731 case 3:
10732 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10733 prec = oprec;
10734 break;
10735 case 4:
10736 /* oprec > iprec, outer: signed, inner: signed. */
10737 prec = iprec - 1;
10738 break;
10739 case 5:
10740 /* oprec > iprec, outer: signed, inner: unsigned. */
10741 prec = iprec;
10742 break;
10743 case 6:
10744 /* oprec > iprec, outer: unsigned, inner: signed. */
10745 prec = oprec;
10746 break;
10747 case 7:
10748 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10749 prec = iprec;
10750 break;
10751 default:
10752 gcc_unreachable ();
10753 }
10754
10755 return wide_int_to_tree (outer,
10756 wi::mask (prec, false, TYPE_PRECISION (outer)));
10757 }
10758
10759 /* Returns the smallest value obtainable by casting something in INNER type to
10760 OUTER type. */
10761
10762 tree
10763 lower_bound_in_type (tree outer, tree inner)
10764 {
10765 unsigned oprec = TYPE_PRECISION (outer);
10766 unsigned iprec = TYPE_PRECISION (inner);
10767
10768 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10769 and obtain 0. */
10770 if (TYPE_UNSIGNED (outer)
10771 /* If we are widening something of an unsigned type, OUTER type
10772 contains all values of INNER type. In particular, both INNER
10773 and OUTER types have zero in common. */
10774 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10775 return build_int_cst (outer, 0);
10776 else
10777 {
10778 /* If we are widening a signed type to another signed type, we
10779 want to obtain -2^^(iprec-1). If we are keeping the
10780 precision or narrowing to a signed type, we want to obtain
10781 -2^(oprec-1). */
10782 unsigned prec = oprec > iprec ? iprec : oprec;
10783 return wide_int_to_tree (outer,
10784 wi::mask (prec - 1, true,
10785 TYPE_PRECISION (outer)));
10786 }
10787 }
10788
10789 /* Return nonzero if two operands that are suitable for PHI nodes are
10790 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10791 SSA_NAME or invariant. Note that this is strictly an optimization.
10792 That is, callers of this function can directly call operand_equal_p
10793 and get the same result, only slower. */
10794
10795 int
10796 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10797 {
10798 if (arg0 == arg1)
10799 return 1;
10800 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10801 return 0;
10802 return operand_equal_p (arg0, arg1, 0);
10803 }
10804
10805 /* Returns number of zeros at the end of binary representation of X. */
10806
10807 tree
10808 num_ending_zeros (const_tree x)
10809 {
10810 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10811 }
10812
10813
10814 #define WALK_SUBTREE(NODE) \
10815 do \
10816 { \
10817 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10818 if (result) \
10819 return result; \
10820 } \
10821 while (0)
10822
10823 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10824 be walked whenever a type is seen in the tree. Rest of operands and return
10825 value are as for walk_tree. */
10826
10827 static tree
10828 walk_type_fields (tree type, walk_tree_fn func, void *data,
10829 hash_set<tree> *pset, walk_tree_lh lh)
10830 {
10831 tree result = NULL_TREE;
10832
10833 switch (TREE_CODE (type))
10834 {
10835 case POINTER_TYPE:
10836 case REFERENCE_TYPE:
10837 case VECTOR_TYPE:
10838 /* We have to worry about mutually recursive pointers. These can't
10839 be written in C. They can in Ada. It's pathological, but
10840 there's an ACATS test (c38102a) that checks it. Deal with this
10841 by checking if we're pointing to another pointer, that one
10842 points to another pointer, that one does too, and we have no htab.
10843 If so, get a hash table. We check three levels deep to avoid
10844 the cost of the hash table if we don't need one. */
10845 if (POINTER_TYPE_P (TREE_TYPE (type))
10846 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10847 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10848 && !pset)
10849 {
10850 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10851 func, data);
10852 if (result)
10853 return result;
10854
10855 break;
10856 }
10857
10858 /* ... fall through ... */
10859
10860 case COMPLEX_TYPE:
10861 WALK_SUBTREE (TREE_TYPE (type));
10862 break;
10863
10864 case METHOD_TYPE:
10865 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10866
10867 /* Fall through. */
10868
10869 case FUNCTION_TYPE:
10870 WALK_SUBTREE (TREE_TYPE (type));
10871 {
10872 tree arg;
10873
10874 /* We never want to walk into default arguments. */
10875 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10876 WALK_SUBTREE (TREE_VALUE (arg));
10877 }
10878 break;
10879
10880 case ARRAY_TYPE:
10881 /* Don't follow this nodes's type if a pointer for fear that
10882 we'll have infinite recursion. If we have a PSET, then we
10883 need not fear. */
10884 if (pset
10885 || (!POINTER_TYPE_P (TREE_TYPE (type))
10886 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10887 WALK_SUBTREE (TREE_TYPE (type));
10888 WALK_SUBTREE (TYPE_DOMAIN (type));
10889 break;
10890
10891 case OFFSET_TYPE:
10892 WALK_SUBTREE (TREE_TYPE (type));
10893 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10894 break;
10895
10896 default:
10897 break;
10898 }
10899
10900 return NULL_TREE;
10901 }
10902
10903 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10904 called with the DATA and the address of each sub-tree. If FUNC returns a
10905 non-NULL value, the traversal is stopped, and the value returned by FUNC
10906 is returned. If PSET is non-NULL it is used to record the nodes visited,
10907 and to avoid visiting a node more than once. */
10908
10909 tree
10910 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10911 hash_set<tree> *pset, walk_tree_lh lh)
10912 {
10913 enum tree_code code;
10914 int walk_subtrees;
10915 tree result;
10916
10917 #define WALK_SUBTREE_TAIL(NODE) \
10918 do \
10919 { \
10920 tp = & (NODE); \
10921 goto tail_recurse; \
10922 } \
10923 while (0)
10924
10925 tail_recurse:
10926 /* Skip empty subtrees. */
10927 if (!*tp)
10928 return NULL_TREE;
10929
10930 /* Don't walk the same tree twice, if the user has requested
10931 that we avoid doing so. */
10932 if (pset && pset->add (*tp))
10933 return NULL_TREE;
10934
10935 /* Call the function. */
10936 walk_subtrees = 1;
10937 result = (*func) (tp, &walk_subtrees, data);
10938
10939 /* If we found something, return it. */
10940 if (result)
10941 return result;
10942
10943 code = TREE_CODE (*tp);
10944
10945 /* Even if we didn't, FUNC may have decided that there was nothing
10946 interesting below this point in the tree. */
10947 if (!walk_subtrees)
10948 {
10949 /* But we still need to check our siblings. */
10950 if (code == TREE_LIST)
10951 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10952 else if (code == OMP_CLAUSE)
10953 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10954 else
10955 return NULL_TREE;
10956 }
10957
10958 if (lh)
10959 {
10960 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10961 if (result || !walk_subtrees)
10962 return result;
10963 }
10964
10965 switch (code)
10966 {
10967 case ERROR_MARK:
10968 case IDENTIFIER_NODE:
10969 case INTEGER_CST:
10970 case REAL_CST:
10971 case FIXED_CST:
10972 case VECTOR_CST:
10973 case STRING_CST:
10974 case BLOCK:
10975 case PLACEHOLDER_EXPR:
10976 case SSA_NAME:
10977 case FIELD_DECL:
10978 case RESULT_DECL:
10979 /* None of these have subtrees other than those already walked
10980 above. */
10981 break;
10982
10983 case TREE_LIST:
10984 WALK_SUBTREE (TREE_VALUE (*tp));
10985 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10986 break;
10987
10988 case TREE_VEC:
10989 {
10990 int len = TREE_VEC_LENGTH (*tp);
10991
10992 if (len == 0)
10993 break;
10994
10995 /* Walk all elements but the first. */
10996 while (--len)
10997 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10998
10999 /* Now walk the first one as a tail call. */
11000 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11001 }
11002
11003 case COMPLEX_CST:
11004 WALK_SUBTREE (TREE_REALPART (*tp));
11005 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11006
11007 case CONSTRUCTOR:
11008 {
11009 unsigned HOST_WIDE_INT idx;
11010 constructor_elt *ce;
11011
11012 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11013 idx++)
11014 WALK_SUBTREE (ce->value);
11015 }
11016 break;
11017
11018 case SAVE_EXPR:
11019 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11020
11021 case BIND_EXPR:
11022 {
11023 tree decl;
11024 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11025 {
11026 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11027 into declarations that are just mentioned, rather than
11028 declared; they don't really belong to this part of the tree.
11029 And, we can see cycles: the initializer for a declaration
11030 can refer to the declaration itself. */
11031 WALK_SUBTREE (DECL_INITIAL (decl));
11032 WALK_SUBTREE (DECL_SIZE (decl));
11033 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11034 }
11035 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11036 }
11037
11038 case STATEMENT_LIST:
11039 {
11040 tree_stmt_iterator i;
11041 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11042 WALK_SUBTREE (*tsi_stmt_ptr (i));
11043 }
11044 break;
11045
11046 case OMP_CLAUSE:
11047 switch (OMP_CLAUSE_CODE (*tp))
11048 {
11049 case OMP_CLAUSE_PRIVATE:
11050 case OMP_CLAUSE_SHARED:
11051 case OMP_CLAUSE_FIRSTPRIVATE:
11052 case OMP_CLAUSE_COPYIN:
11053 case OMP_CLAUSE_COPYPRIVATE:
11054 case OMP_CLAUSE_FINAL:
11055 case OMP_CLAUSE_IF:
11056 case OMP_CLAUSE_NUM_THREADS:
11057 case OMP_CLAUSE_SCHEDULE:
11058 case OMP_CLAUSE_UNIFORM:
11059 case OMP_CLAUSE_DEPEND:
11060 case OMP_CLAUSE_NUM_TEAMS:
11061 case OMP_CLAUSE_THREAD_LIMIT:
11062 case OMP_CLAUSE_DEVICE:
11063 case OMP_CLAUSE_DIST_SCHEDULE:
11064 case OMP_CLAUSE_SAFELEN:
11065 case OMP_CLAUSE_SIMDLEN:
11066 case OMP_CLAUSE__LOOPTEMP_:
11067 case OMP_CLAUSE__SIMDUID_:
11068 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11069 /* FALLTHRU */
11070
11071 case OMP_CLAUSE_NOWAIT:
11072 case OMP_CLAUSE_ORDERED:
11073 case OMP_CLAUSE_DEFAULT:
11074 case OMP_CLAUSE_UNTIED:
11075 case OMP_CLAUSE_MERGEABLE:
11076 case OMP_CLAUSE_PROC_BIND:
11077 case OMP_CLAUSE_INBRANCH:
11078 case OMP_CLAUSE_NOTINBRANCH:
11079 case OMP_CLAUSE_FOR:
11080 case OMP_CLAUSE_PARALLEL:
11081 case OMP_CLAUSE_SECTIONS:
11082 case OMP_CLAUSE_TASKGROUP:
11083 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11084
11085 case OMP_CLAUSE_LASTPRIVATE:
11086 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11087 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11088 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11089
11090 case OMP_CLAUSE_COLLAPSE:
11091 {
11092 int i;
11093 for (i = 0; i < 3; i++)
11094 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11095 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11096 }
11097
11098 case OMP_CLAUSE_LINEAR:
11099 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11100 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11101 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11102 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11103
11104 case OMP_CLAUSE_ALIGNED:
11105 case OMP_CLAUSE_FROM:
11106 case OMP_CLAUSE_TO:
11107 case OMP_CLAUSE_MAP:
11108 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11109 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11110 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11111
11112 case OMP_CLAUSE_REDUCTION:
11113 {
11114 int i;
11115 for (i = 0; i < 4; i++)
11116 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11117 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11118 }
11119
11120 default:
11121 gcc_unreachable ();
11122 }
11123 break;
11124
11125 case TARGET_EXPR:
11126 {
11127 int i, len;
11128
11129 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11130 But, we only want to walk once. */
11131 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11132 for (i = 0; i < len; ++i)
11133 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11134 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11135 }
11136
11137 case DECL_EXPR:
11138 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11139 defining. We only want to walk into these fields of a type in this
11140 case and not in the general case of a mere reference to the type.
11141
11142 The criterion is as follows: if the field can be an expression, it
11143 must be walked only here. This should be in keeping with the fields
11144 that are directly gimplified in gimplify_type_sizes in order for the
11145 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11146 variable-sized types.
11147
11148 Note that DECLs get walked as part of processing the BIND_EXPR. */
11149 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11150 {
11151 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11152 if (TREE_CODE (*type_p) == ERROR_MARK)
11153 return NULL_TREE;
11154
11155 /* Call the function for the type. See if it returns anything or
11156 doesn't want us to continue. If we are to continue, walk both
11157 the normal fields and those for the declaration case. */
11158 result = (*func) (type_p, &walk_subtrees, data);
11159 if (result || !walk_subtrees)
11160 return result;
11161
11162 /* But do not walk a pointed-to type since it may itself need to
11163 be walked in the declaration case if it isn't anonymous. */
11164 if (!POINTER_TYPE_P (*type_p))
11165 {
11166 result = walk_type_fields (*type_p, func, data, pset, lh);
11167 if (result)
11168 return result;
11169 }
11170
11171 /* If this is a record type, also walk the fields. */
11172 if (RECORD_OR_UNION_TYPE_P (*type_p))
11173 {
11174 tree field;
11175
11176 for (field = TYPE_FIELDS (*type_p); field;
11177 field = DECL_CHAIN (field))
11178 {
11179 /* We'd like to look at the type of the field, but we can
11180 easily get infinite recursion. So assume it's pointed
11181 to elsewhere in the tree. Also, ignore things that
11182 aren't fields. */
11183 if (TREE_CODE (field) != FIELD_DECL)
11184 continue;
11185
11186 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11187 WALK_SUBTREE (DECL_SIZE (field));
11188 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11189 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11190 WALK_SUBTREE (DECL_QUALIFIER (field));
11191 }
11192 }
11193
11194 /* Same for scalar types. */
11195 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11196 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11197 || TREE_CODE (*type_p) == INTEGER_TYPE
11198 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11199 || TREE_CODE (*type_p) == REAL_TYPE)
11200 {
11201 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11202 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11203 }
11204
11205 WALK_SUBTREE (TYPE_SIZE (*type_p));
11206 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11207 }
11208 /* FALLTHRU */
11209
11210 default:
11211 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11212 {
11213 int i, len;
11214
11215 /* Walk over all the sub-trees of this operand. */
11216 len = TREE_OPERAND_LENGTH (*tp);
11217
11218 /* Go through the subtrees. We need to do this in forward order so
11219 that the scope of a FOR_EXPR is handled properly. */
11220 if (len)
11221 {
11222 for (i = 0; i < len - 1; ++i)
11223 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11224 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11225 }
11226 }
11227 /* If this is a type, walk the needed fields in the type. */
11228 else if (TYPE_P (*tp))
11229 return walk_type_fields (*tp, func, data, pset, lh);
11230 break;
11231 }
11232
11233 /* We didn't find what we were looking for. */
11234 return NULL_TREE;
11235
11236 #undef WALK_SUBTREE_TAIL
11237 }
11238 #undef WALK_SUBTREE
11239
11240 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11241
11242 tree
11243 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11244 walk_tree_lh lh)
11245 {
11246 tree result;
11247
11248 hash_set<tree> pset;
11249 result = walk_tree_1 (tp, func, data, &pset, lh);
11250 return result;
11251 }
11252
11253
11254 tree
11255 tree_block (tree t)
11256 {
11257 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11258
11259 if (IS_EXPR_CODE_CLASS (c))
11260 return LOCATION_BLOCK (t->exp.locus);
11261 gcc_unreachable ();
11262 return NULL;
11263 }
11264
11265 void
11266 tree_set_block (tree t, tree b)
11267 {
11268 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11269
11270 if (IS_EXPR_CODE_CLASS (c))
11271 {
11272 if (b)
11273 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11274 else
11275 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11276 }
11277 else
11278 gcc_unreachable ();
11279 }
11280
11281 /* Create a nameless artificial label and put it in the current
11282 function context. The label has a location of LOC. Returns the
11283 newly created label. */
11284
11285 tree
11286 create_artificial_label (location_t loc)
11287 {
11288 tree lab = build_decl (loc,
11289 LABEL_DECL, NULL_TREE, void_type_node);
11290
11291 DECL_ARTIFICIAL (lab) = 1;
11292 DECL_IGNORED_P (lab) = 1;
11293 DECL_CONTEXT (lab) = current_function_decl;
11294 return lab;
11295 }
11296
11297 /* Given a tree, try to return a useful variable name that we can use
11298 to prefix a temporary that is being assigned the value of the tree.
11299 I.E. given <temp> = &A, return A. */
11300
11301 const char *
11302 get_name (tree t)
11303 {
11304 tree stripped_decl;
11305
11306 stripped_decl = t;
11307 STRIP_NOPS (stripped_decl);
11308 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11309 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11310 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11311 {
11312 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11313 if (!name)
11314 return NULL;
11315 return IDENTIFIER_POINTER (name);
11316 }
11317 else
11318 {
11319 switch (TREE_CODE (stripped_decl))
11320 {
11321 case ADDR_EXPR:
11322 return get_name (TREE_OPERAND (stripped_decl, 0));
11323 default:
11324 return NULL;
11325 }
11326 }
11327 }
11328
11329 /* Return true if TYPE has a variable argument list. */
11330
11331 bool
11332 stdarg_p (const_tree fntype)
11333 {
11334 function_args_iterator args_iter;
11335 tree n = NULL_TREE, t;
11336
11337 if (!fntype)
11338 return false;
11339
11340 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11341 {
11342 n = t;
11343 }
11344
11345 return n != NULL_TREE && n != void_type_node;
11346 }
11347
11348 /* Return true if TYPE has a prototype. */
11349
11350 bool
11351 prototype_p (tree fntype)
11352 {
11353 tree t;
11354
11355 gcc_assert (fntype != NULL_TREE);
11356
11357 t = TYPE_ARG_TYPES (fntype);
11358 return (t != NULL_TREE);
11359 }
11360
11361 /* If BLOCK is inlined from an __attribute__((__artificial__))
11362 routine, return pointer to location from where it has been
11363 called. */
11364 location_t *
11365 block_nonartificial_location (tree block)
11366 {
11367 location_t *ret = NULL;
11368
11369 while (block && TREE_CODE (block) == BLOCK
11370 && BLOCK_ABSTRACT_ORIGIN (block))
11371 {
11372 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11373
11374 while (TREE_CODE (ao) == BLOCK
11375 && BLOCK_ABSTRACT_ORIGIN (ao)
11376 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11377 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11378
11379 if (TREE_CODE (ao) == FUNCTION_DECL)
11380 {
11381 /* If AO is an artificial inline, point RET to the
11382 call site locus at which it has been inlined and continue
11383 the loop, in case AO's caller is also an artificial
11384 inline. */
11385 if (DECL_DECLARED_INLINE_P (ao)
11386 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11387 ret = &BLOCK_SOURCE_LOCATION (block);
11388 else
11389 break;
11390 }
11391 else if (TREE_CODE (ao) != BLOCK)
11392 break;
11393
11394 block = BLOCK_SUPERCONTEXT (block);
11395 }
11396 return ret;
11397 }
11398
11399
11400 /* If EXP is inlined from an __attribute__((__artificial__))
11401 function, return the location of the original call expression. */
11402
11403 location_t
11404 tree_nonartificial_location (tree exp)
11405 {
11406 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11407
11408 if (loc)
11409 return *loc;
11410 else
11411 return EXPR_LOCATION (exp);
11412 }
11413
11414
11415 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11416 nodes. */
11417
11418 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11419
11420 static hashval_t
11421 cl_option_hash_hash (const void *x)
11422 {
11423 const_tree const t = (const_tree) x;
11424 const char *p;
11425 size_t i;
11426 size_t len = 0;
11427 hashval_t hash = 0;
11428
11429 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11430 {
11431 p = (const char *)TREE_OPTIMIZATION (t);
11432 len = sizeof (struct cl_optimization);
11433 }
11434
11435 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11436 {
11437 p = (const char *)TREE_TARGET_OPTION (t);
11438 len = sizeof (struct cl_target_option);
11439 }
11440
11441 else
11442 gcc_unreachable ();
11443
11444 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11445 something else. */
11446 for (i = 0; i < len; i++)
11447 if (p[i])
11448 hash = (hash << 4) ^ ((i << 2) | p[i]);
11449
11450 return hash;
11451 }
11452
11453 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11454 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11455 same. */
11456
11457 static int
11458 cl_option_hash_eq (const void *x, const void *y)
11459 {
11460 const_tree const xt = (const_tree) x;
11461 const_tree const yt = (const_tree) y;
11462 const char *xp;
11463 const char *yp;
11464 size_t len;
11465
11466 if (TREE_CODE (xt) != TREE_CODE (yt))
11467 return 0;
11468
11469 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11470 {
11471 xp = (const char *)TREE_OPTIMIZATION (xt);
11472 yp = (const char *)TREE_OPTIMIZATION (yt);
11473 len = sizeof (struct cl_optimization);
11474 }
11475
11476 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11477 {
11478 xp = (const char *)TREE_TARGET_OPTION (xt);
11479 yp = (const char *)TREE_TARGET_OPTION (yt);
11480 len = sizeof (struct cl_target_option);
11481 }
11482
11483 else
11484 gcc_unreachable ();
11485
11486 return (memcmp (xp, yp, len) == 0);
11487 }
11488
11489 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11490
11491 tree
11492 build_optimization_node (struct gcc_options *opts)
11493 {
11494 tree t;
11495 void **slot;
11496
11497 /* Use the cache of optimization nodes. */
11498
11499 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11500 opts);
11501
11502 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11503 t = (tree) *slot;
11504 if (!t)
11505 {
11506 /* Insert this one into the hash table. */
11507 t = cl_optimization_node;
11508 *slot = t;
11509
11510 /* Make a new node for next time round. */
11511 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11512 }
11513
11514 return t;
11515 }
11516
11517 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11518
11519 tree
11520 build_target_option_node (struct gcc_options *opts)
11521 {
11522 tree t;
11523 void **slot;
11524
11525 /* Use the cache of optimization nodes. */
11526
11527 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11528 opts);
11529
11530 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11531 t = (tree) *slot;
11532 if (!t)
11533 {
11534 /* Insert this one into the hash table. */
11535 t = cl_target_option_node;
11536 *slot = t;
11537
11538 /* Make a new node for next time round. */
11539 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11540 }
11541
11542 return t;
11543 }
11544
11545 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11546 Called through htab_traverse. */
11547
11548 static int
11549 prepare_target_option_node_for_pch (void **slot, void *)
11550 {
11551 tree node = (tree) *slot;
11552 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11553 TREE_TARGET_GLOBALS (node) = NULL;
11554 return 1;
11555 }
11556
11557 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11558 so that they aren't saved during PCH writing. */
11559
11560 void
11561 prepare_target_option_nodes_for_pch (void)
11562 {
11563 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11564 NULL);
11565 }
11566
11567 /* Determine the "ultimate origin" of a block. The block may be an inlined
11568 instance of an inlined instance of a block which is local to an inline
11569 function, so we have to trace all of the way back through the origin chain
11570 to find out what sort of node actually served as the original seed for the
11571 given block. */
11572
11573 tree
11574 block_ultimate_origin (const_tree block)
11575 {
11576 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11577
11578 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11579 nodes in the function to point to themselves; ignore that if
11580 we're trying to output the abstract instance of this function. */
11581 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11582 return NULL_TREE;
11583
11584 if (immediate_origin == NULL_TREE)
11585 return NULL_TREE;
11586 else
11587 {
11588 tree ret_val;
11589 tree lookahead = immediate_origin;
11590
11591 do
11592 {
11593 ret_val = lookahead;
11594 lookahead = (TREE_CODE (ret_val) == BLOCK
11595 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11596 }
11597 while (lookahead != NULL && lookahead != ret_val);
11598
11599 /* The block's abstract origin chain may not be the *ultimate* origin of
11600 the block. It could lead to a DECL that has an abstract origin set.
11601 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11602 will give us if it has one). Note that DECL's abstract origins are
11603 supposed to be the most distant ancestor (or so decl_ultimate_origin
11604 claims), so we don't need to loop following the DECL origins. */
11605 if (DECL_P (ret_val))
11606 return DECL_ORIGIN (ret_val);
11607
11608 return ret_val;
11609 }
11610 }
11611
11612 /* Return true iff conversion in EXP generates no instruction. Mark
11613 it inline so that we fully inline into the stripping functions even
11614 though we have two uses of this function. */
11615
11616 static inline bool
11617 tree_nop_conversion (const_tree exp)
11618 {
11619 tree outer_type, inner_type;
11620
11621 if (!CONVERT_EXPR_P (exp)
11622 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11623 return false;
11624 if (TREE_OPERAND (exp, 0) == error_mark_node)
11625 return false;
11626
11627 outer_type = TREE_TYPE (exp);
11628 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11629
11630 if (!inner_type)
11631 return false;
11632
11633 /* Use precision rather then machine mode when we can, which gives
11634 the correct answer even for submode (bit-field) types. */
11635 if ((INTEGRAL_TYPE_P (outer_type)
11636 || POINTER_TYPE_P (outer_type)
11637 || TREE_CODE (outer_type) == OFFSET_TYPE)
11638 && (INTEGRAL_TYPE_P (inner_type)
11639 || POINTER_TYPE_P (inner_type)
11640 || TREE_CODE (inner_type) == OFFSET_TYPE))
11641 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11642
11643 /* Otherwise fall back on comparing machine modes (e.g. for
11644 aggregate types, floats). */
11645 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11646 }
11647
11648 /* Return true iff conversion in EXP generates no instruction. Don't
11649 consider conversions changing the signedness. */
11650
11651 static bool
11652 tree_sign_nop_conversion (const_tree exp)
11653 {
11654 tree outer_type, inner_type;
11655
11656 if (!tree_nop_conversion (exp))
11657 return false;
11658
11659 outer_type = TREE_TYPE (exp);
11660 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11661
11662 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11663 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11664 }
11665
11666 /* Strip conversions from EXP according to tree_nop_conversion and
11667 return the resulting expression. */
11668
11669 tree
11670 tree_strip_nop_conversions (tree exp)
11671 {
11672 while (tree_nop_conversion (exp))
11673 exp = TREE_OPERAND (exp, 0);
11674 return exp;
11675 }
11676
11677 /* Strip conversions from EXP according to tree_sign_nop_conversion
11678 and return the resulting expression. */
11679
11680 tree
11681 tree_strip_sign_nop_conversions (tree exp)
11682 {
11683 while (tree_sign_nop_conversion (exp))
11684 exp = TREE_OPERAND (exp, 0);
11685 return exp;
11686 }
11687
11688 /* Avoid any floating point extensions from EXP. */
11689 tree
11690 strip_float_extensions (tree exp)
11691 {
11692 tree sub, expt, subt;
11693
11694 /* For floating point constant look up the narrowest type that can hold
11695 it properly and handle it like (type)(narrowest_type)constant.
11696 This way we can optimize for instance a=a*2.0 where "a" is float
11697 but 2.0 is double constant. */
11698 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11699 {
11700 REAL_VALUE_TYPE orig;
11701 tree type = NULL;
11702
11703 orig = TREE_REAL_CST (exp);
11704 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11705 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11706 type = float_type_node;
11707 else if (TYPE_PRECISION (TREE_TYPE (exp))
11708 > TYPE_PRECISION (double_type_node)
11709 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11710 type = double_type_node;
11711 if (type)
11712 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11713 }
11714
11715 if (!CONVERT_EXPR_P (exp))
11716 return exp;
11717
11718 sub = TREE_OPERAND (exp, 0);
11719 subt = TREE_TYPE (sub);
11720 expt = TREE_TYPE (exp);
11721
11722 if (!FLOAT_TYPE_P (subt))
11723 return exp;
11724
11725 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11726 return exp;
11727
11728 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11729 return exp;
11730
11731 return strip_float_extensions (sub);
11732 }
11733
11734 /* Strip out all handled components that produce invariant
11735 offsets. */
11736
11737 const_tree
11738 strip_invariant_refs (const_tree op)
11739 {
11740 while (handled_component_p (op))
11741 {
11742 switch (TREE_CODE (op))
11743 {
11744 case ARRAY_REF:
11745 case ARRAY_RANGE_REF:
11746 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11747 || TREE_OPERAND (op, 2) != NULL_TREE
11748 || TREE_OPERAND (op, 3) != NULL_TREE)
11749 return NULL;
11750 break;
11751
11752 case COMPONENT_REF:
11753 if (TREE_OPERAND (op, 2) != NULL_TREE)
11754 return NULL;
11755 break;
11756
11757 default:;
11758 }
11759 op = TREE_OPERAND (op, 0);
11760 }
11761
11762 return op;
11763 }
11764
11765 static GTY(()) tree gcc_eh_personality_decl;
11766
11767 /* Return the GCC personality function decl. */
11768
11769 tree
11770 lhd_gcc_personality (void)
11771 {
11772 if (!gcc_eh_personality_decl)
11773 gcc_eh_personality_decl = build_personality_function ("gcc");
11774 return gcc_eh_personality_decl;
11775 }
11776
11777 /* TARGET is a call target of GIMPLE call statement
11778 (obtained by gimple_call_fn). Return true if it is
11779 OBJ_TYPE_REF representing an virtual call of C++ method.
11780 (As opposed to OBJ_TYPE_REF representing objc calls
11781 through a cast where middle-end devirtualization machinery
11782 can't apply.) */
11783
11784 bool
11785 virtual_method_call_p (tree target)
11786 {
11787 if (TREE_CODE (target) != OBJ_TYPE_REF)
11788 return false;
11789 target = TREE_TYPE (target);
11790 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11791 target = TREE_TYPE (target);
11792 if (TREE_CODE (target) == FUNCTION_TYPE)
11793 return false;
11794 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11795 return true;
11796 }
11797
11798 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11799
11800 tree
11801 obj_type_ref_class (tree ref)
11802 {
11803 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11804 ref = TREE_TYPE (ref);
11805 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11806 ref = TREE_TYPE (ref);
11807 /* We look for type THIS points to. ObjC also builds
11808 OBJ_TYPE_REF with non-method calls, Their first parameter
11809 ID however also corresponds to class type. */
11810 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11811 || TREE_CODE (ref) == FUNCTION_TYPE);
11812 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11813 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11814 return TREE_TYPE (ref);
11815 }
11816
11817 /* Return true if T is in anonymous namespace. */
11818
11819 bool
11820 type_in_anonymous_namespace_p (const_tree t)
11821 {
11822 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11823 bulitin types; those have CONTEXT NULL. */
11824 if (!TYPE_CONTEXT (t))
11825 return false;
11826 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11827 }
11828
11829 /* Try to find a base info of BINFO that would have its field decl at offset
11830 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11831 found, return, otherwise return NULL_TREE. */
11832
11833 tree
11834 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11835 {
11836 tree type = BINFO_TYPE (binfo);
11837
11838 while (true)
11839 {
11840 HOST_WIDE_INT pos, size;
11841 tree fld;
11842 int i;
11843
11844 if (types_same_for_odr (type, expected_type))
11845 return binfo;
11846 if (offset < 0)
11847 return NULL_TREE;
11848
11849 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11850 {
11851 if (TREE_CODE (fld) != FIELD_DECL)
11852 continue;
11853
11854 pos = int_bit_position (fld);
11855 size = tree_to_uhwi (DECL_SIZE (fld));
11856 if (pos <= offset && (pos + size) > offset)
11857 break;
11858 }
11859 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11860 return NULL_TREE;
11861
11862 if (!DECL_ARTIFICIAL (fld))
11863 {
11864 binfo = TYPE_BINFO (TREE_TYPE (fld));
11865 if (!binfo)
11866 return NULL_TREE;
11867 }
11868 /* Offset 0 indicates the primary base, whose vtable contents are
11869 represented in the binfo for the derived class. */
11870 else if (offset != 0)
11871 {
11872 tree base_binfo, binfo2 = binfo;
11873
11874 /* Find BINFO corresponding to FLD. This is bit harder
11875 by a fact that in virtual inheritance we may need to walk down
11876 the non-virtual inheritance chain. */
11877 while (true)
11878 {
11879 tree containing_binfo = NULL, found_binfo = NULL;
11880 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11881 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11882 {
11883 found_binfo = base_binfo;
11884 break;
11885 }
11886 else
11887 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11888 - tree_to_shwi (BINFO_OFFSET (binfo)))
11889 * BITS_PER_UNIT < pos
11890 /* Rule out types with no virtual methods or we can get confused
11891 here by zero sized bases. */
11892 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11893 && (!containing_binfo
11894 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11895 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11896 containing_binfo = base_binfo;
11897 if (found_binfo)
11898 {
11899 binfo = found_binfo;
11900 break;
11901 }
11902 if (!containing_binfo)
11903 return NULL_TREE;
11904 binfo2 = containing_binfo;
11905 }
11906 }
11907
11908 type = TREE_TYPE (fld);
11909 offset -= pos;
11910 }
11911 }
11912
11913 /* Returns true if X is a typedef decl. */
11914
11915 bool
11916 is_typedef_decl (tree x)
11917 {
11918 return (x && TREE_CODE (x) == TYPE_DECL
11919 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11920 }
11921
11922 /* Returns true iff TYPE is a type variant created for a typedef. */
11923
11924 bool
11925 typedef_variant_p (tree type)
11926 {
11927 return is_typedef_decl (TYPE_NAME (type));
11928 }
11929
11930 /* Warn about a use of an identifier which was marked deprecated. */
11931 void
11932 warn_deprecated_use (tree node, tree attr)
11933 {
11934 const char *msg;
11935
11936 if (node == 0 || !warn_deprecated_decl)
11937 return;
11938
11939 if (!attr)
11940 {
11941 if (DECL_P (node))
11942 attr = DECL_ATTRIBUTES (node);
11943 else if (TYPE_P (node))
11944 {
11945 tree decl = TYPE_STUB_DECL (node);
11946 if (decl)
11947 attr = lookup_attribute ("deprecated",
11948 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11949 }
11950 }
11951
11952 if (attr)
11953 attr = lookup_attribute ("deprecated", attr);
11954
11955 if (attr)
11956 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11957 else
11958 msg = NULL;
11959
11960 if (DECL_P (node))
11961 {
11962 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11963 if (msg)
11964 warning (OPT_Wdeprecated_declarations,
11965 "%qD is deprecated (declared at %r%s:%d%R): %s",
11966 node, "locus", xloc.file, xloc.line, msg);
11967 else
11968 warning (OPT_Wdeprecated_declarations,
11969 "%qD is deprecated (declared at %r%s:%d%R)",
11970 node, "locus", xloc.file, xloc.line);
11971 }
11972 else if (TYPE_P (node))
11973 {
11974 tree what = NULL_TREE;
11975 tree decl = TYPE_STUB_DECL (node);
11976
11977 if (TYPE_NAME (node))
11978 {
11979 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11980 what = TYPE_NAME (node);
11981 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11982 && DECL_NAME (TYPE_NAME (node)))
11983 what = DECL_NAME (TYPE_NAME (node));
11984 }
11985
11986 if (decl)
11987 {
11988 expanded_location xloc
11989 = expand_location (DECL_SOURCE_LOCATION (decl));
11990 if (what)
11991 {
11992 if (msg)
11993 warning (OPT_Wdeprecated_declarations,
11994 "%qE is deprecated (declared at %r%s:%d%R): %s",
11995 what, "locus", xloc.file, xloc.line, msg);
11996 else
11997 warning (OPT_Wdeprecated_declarations,
11998 "%qE is deprecated (declared at %r%s:%d%R)",
11999 what, "locus", xloc.file, xloc.line);
12000 }
12001 else
12002 {
12003 if (msg)
12004 warning (OPT_Wdeprecated_declarations,
12005 "type is deprecated (declared at %r%s:%d%R): %s",
12006 "locus", xloc.file, xloc.line, msg);
12007 else
12008 warning (OPT_Wdeprecated_declarations,
12009 "type is deprecated (declared at %r%s:%d%R)",
12010 "locus", xloc.file, xloc.line);
12011 }
12012 }
12013 else
12014 {
12015 if (what)
12016 {
12017 if (msg)
12018 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12019 what, msg);
12020 else
12021 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12022 }
12023 else
12024 {
12025 if (msg)
12026 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12027 msg);
12028 else
12029 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12030 }
12031 }
12032 }
12033 }
12034
12035 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12036 somewhere in it. */
12037
12038 bool
12039 contains_bitfld_component_ref_p (const_tree ref)
12040 {
12041 while (handled_component_p (ref))
12042 {
12043 if (TREE_CODE (ref) == COMPONENT_REF
12044 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12045 return true;
12046 ref = TREE_OPERAND (ref, 0);
12047 }
12048
12049 return false;
12050 }
12051
12052 /* Try to determine whether a TRY_CATCH expression can fall through.
12053 This is a subroutine of block_may_fallthru. */
12054
12055 static bool
12056 try_catch_may_fallthru (const_tree stmt)
12057 {
12058 tree_stmt_iterator i;
12059
12060 /* If the TRY block can fall through, the whole TRY_CATCH can
12061 fall through. */
12062 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12063 return true;
12064
12065 i = tsi_start (TREE_OPERAND (stmt, 1));
12066 switch (TREE_CODE (tsi_stmt (i)))
12067 {
12068 case CATCH_EXPR:
12069 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12070 catch expression and a body. The whole TRY_CATCH may fall
12071 through iff any of the catch bodies falls through. */
12072 for (; !tsi_end_p (i); tsi_next (&i))
12073 {
12074 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12075 return true;
12076 }
12077 return false;
12078
12079 case EH_FILTER_EXPR:
12080 /* The exception filter expression only matters if there is an
12081 exception. If the exception does not match EH_FILTER_TYPES,
12082 we will execute EH_FILTER_FAILURE, and we will fall through
12083 if that falls through. If the exception does match
12084 EH_FILTER_TYPES, the stack unwinder will continue up the
12085 stack, so we will not fall through. We don't know whether we
12086 will throw an exception which matches EH_FILTER_TYPES or not,
12087 so we just ignore EH_FILTER_TYPES and assume that we might
12088 throw an exception which doesn't match. */
12089 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12090
12091 default:
12092 /* This case represents statements to be executed when an
12093 exception occurs. Those statements are implicitly followed
12094 by a RESX statement to resume execution after the exception.
12095 So in this case the TRY_CATCH never falls through. */
12096 return false;
12097 }
12098 }
12099
12100 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12101 need not be 100% accurate; simply be conservative and return true if we
12102 don't know. This is used only to avoid stupidly generating extra code.
12103 If we're wrong, we'll just delete the extra code later. */
12104
12105 bool
12106 block_may_fallthru (const_tree block)
12107 {
12108 /* This CONST_CAST is okay because expr_last returns its argument
12109 unmodified and we assign it to a const_tree. */
12110 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12111
12112 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12113 {
12114 case GOTO_EXPR:
12115 case RETURN_EXPR:
12116 /* Easy cases. If the last statement of the block implies
12117 control transfer, then we can't fall through. */
12118 return false;
12119
12120 case SWITCH_EXPR:
12121 /* If SWITCH_LABELS is set, this is lowered, and represents a
12122 branch to a selected label and hence can not fall through.
12123 Otherwise SWITCH_BODY is set, and the switch can fall
12124 through. */
12125 return SWITCH_LABELS (stmt) == NULL_TREE;
12126
12127 case COND_EXPR:
12128 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12129 return true;
12130 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12131
12132 case BIND_EXPR:
12133 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12134
12135 case TRY_CATCH_EXPR:
12136 return try_catch_may_fallthru (stmt);
12137
12138 case TRY_FINALLY_EXPR:
12139 /* The finally clause is always executed after the try clause,
12140 so if it does not fall through, then the try-finally will not
12141 fall through. Otherwise, if the try clause does not fall
12142 through, then when the finally clause falls through it will
12143 resume execution wherever the try clause was going. So the
12144 whole try-finally will only fall through if both the try
12145 clause and the finally clause fall through. */
12146 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12147 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12148
12149 case MODIFY_EXPR:
12150 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12151 stmt = TREE_OPERAND (stmt, 1);
12152 else
12153 return true;
12154 /* FALLTHRU */
12155
12156 case CALL_EXPR:
12157 /* Functions that do not return do not fall through. */
12158 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12159
12160 case CLEANUP_POINT_EXPR:
12161 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12162
12163 case TARGET_EXPR:
12164 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12165
12166 case ERROR_MARK:
12167 return true;
12168
12169 default:
12170 return lang_hooks.block_may_fallthru (stmt);
12171 }
12172 }
12173
12174 /* True if we are using EH to handle cleanups. */
12175 static bool using_eh_for_cleanups_flag = false;
12176
12177 /* This routine is called from front ends to indicate eh should be used for
12178 cleanups. */
12179 void
12180 using_eh_for_cleanups (void)
12181 {
12182 using_eh_for_cleanups_flag = true;
12183 }
12184
12185 /* Query whether EH is used for cleanups. */
12186 bool
12187 using_eh_for_cleanups_p (void)
12188 {
12189 return using_eh_for_cleanups_flag;
12190 }
12191
12192 /* Wrapper for tree_code_name to ensure that tree code is valid */
12193 const char *
12194 get_tree_code_name (enum tree_code code)
12195 {
12196 const char *invalid = "<invalid tree code>";
12197
12198 if (code >= MAX_TREE_CODES)
12199 return invalid;
12200
12201 return tree_code_name[code];
12202 }
12203
12204 /* Drops the TREE_OVERFLOW flag from T. */
12205
12206 tree
12207 drop_tree_overflow (tree t)
12208 {
12209 gcc_checking_assert (TREE_OVERFLOW (t));
12210
12211 /* For tree codes with a sharing machinery re-build the result. */
12212 if (TREE_CODE (t) == INTEGER_CST)
12213 return wide_int_to_tree (TREE_TYPE (t), t);
12214
12215 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12216 and drop the flag. */
12217 t = copy_node (t);
12218 TREE_OVERFLOW (t) = 0;
12219 return t;
12220 }
12221
12222 /* Given a memory reference expression T, return its base address.
12223 The base address of a memory reference expression is the main
12224 object being referenced. For instance, the base address for
12225 'array[i].fld[j]' is 'array'. You can think of this as stripping
12226 away the offset part from a memory address.
12227
12228 This function calls handled_component_p to strip away all the inner
12229 parts of the memory reference until it reaches the base object. */
12230
12231 tree
12232 get_base_address (tree t)
12233 {
12234 while (handled_component_p (t))
12235 t = TREE_OPERAND (t, 0);
12236
12237 if ((TREE_CODE (t) == MEM_REF
12238 || TREE_CODE (t) == TARGET_MEM_REF)
12239 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12240 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12241
12242 /* ??? Either the alias oracle or all callers need to properly deal
12243 with WITH_SIZE_EXPRs before we can look through those. */
12244 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12245 return NULL_TREE;
12246
12247 return t;
12248 }
12249
12250 #include "gt-tree.h"