Merge remote-tracking branch 'origin/aldyh/debug-early' into debug-early
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static void type_hash_list (const_tree, inchash::hash &);
234 static void attribute_hash_list (const_tree, inchash::hash &);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_",
329 "_Cilk_for_count_"
330 };
331
332
333 /* Return the tree node structure used by tree code CODE. */
334
335 static inline enum tree_node_structure_enum
336 tree_node_structure_for_code (enum tree_code code)
337 {
338 switch (TREE_CODE_CLASS (code))
339 {
340 case tcc_declaration:
341 {
342 switch (code)
343 {
344 case FIELD_DECL:
345 return TS_FIELD_DECL;
346 case PARM_DECL:
347 return TS_PARM_DECL;
348 case VAR_DECL:
349 return TS_VAR_DECL;
350 case LABEL_DECL:
351 return TS_LABEL_DECL;
352 case RESULT_DECL:
353 return TS_RESULT_DECL;
354 case DEBUG_EXPR_DECL:
355 return TS_DECL_WRTL;
356 case CONST_DECL:
357 return TS_CONST_DECL;
358 case TYPE_DECL:
359 return TS_TYPE_DECL;
360 case FUNCTION_DECL:
361 return TS_FUNCTION_DECL;
362 case TRANSLATION_UNIT_DECL:
363 return TS_TRANSLATION_UNIT_DECL;
364 default:
365 return TS_DECL_NON_COMMON;
366 }
367 }
368 case tcc_type:
369 return TS_TYPE_NON_COMMON;
370 case tcc_reference:
371 case tcc_comparison:
372 case tcc_unary:
373 case tcc_binary:
374 case tcc_expression:
375 case tcc_statement:
376 case tcc_vl_exp:
377 return TS_EXP;
378 default: /* tcc_constant and tcc_exceptional */
379 break;
380 }
381 switch (code)
382 {
383 /* tcc_constant cases. */
384 case VOID_CST: return TS_TYPED;
385 case INTEGER_CST: return TS_INT_CST;
386 case REAL_CST: return TS_REAL_CST;
387 case FIXED_CST: return TS_FIXED_CST;
388 case COMPLEX_CST: return TS_COMPLEX;
389 case VECTOR_CST: return TS_VECTOR;
390 case STRING_CST: return TS_STRING;
391 /* tcc_exceptional cases. */
392 case ERROR_MARK: return TS_COMMON;
393 case IDENTIFIER_NODE: return TS_IDENTIFIER;
394 case TREE_LIST: return TS_LIST;
395 case TREE_VEC: return TS_VEC;
396 case SSA_NAME: return TS_SSA_NAME;
397 case PLACEHOLDER_EXPR: return TS_COMMON;
398 case STATEMENT_LIST: return TS_STATEMENT_LIST;
399 case BLOCK: return TS_BLOCK;
400 case CONSTRUCTOR: return TS_CONSTRUCTOR;
401 case TREE_BINFO: return TS_BINFO;
402 case OMP_CLAUSE: return TS_OMP_CLAUSE;
403 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
404 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
405
406 default:
407 gcc_unreachable ();
408 }
409 }
410
411
412 /* Initialize tree_contains_struct to describe the hierarchy of tree
413 nodes. */
414
415 static void
416 initialize_tree_contains_struct (void)
417 {
418 unsigned i;
419
420 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
421 {
422 enum tree_code code;
423 enum tree_node_structure_enum ts_code;
424
425 code = (enum tree_code) i;
426 ts_code = tree_node_structure_for_code (code);
427
428 /* Mark the TS structure itself. */
429 tree_contains_struct[code][ts_code] = 1;
430
431 /* Mark all the structures that TS is derived from. */
432 switch (ts_code)
433 {
434 case TS_TYPED:
435 case TS_BLOCK:
436 MARK_TS_BASE (code);
437 break;
438
439 case TS_COMMON:
440 case TS_INT_CST:
441 case TS_REAL_CST:
442 case TS_FIXED_CST:
443 case TS_VECTOR:
444 case TS_STRING:
445 case TS_COMPLEX:
446 case TS_SSA_NAME:
447 case TS_CONSTRUCTOR:
448 case TS_EXP:
449 case TS_STATEMENT_LIST:
450 MARK_TS_TYPED (code);
451 break;
452
453 case TS_IDENTIFIER:
454 case TS_DECL_MINIMAL:
455 case TS_TYPE_COMMON:
456 case TS_LIST:
457 case TS_VEC:
458 case TS_BINFO:
459 case TS_OMP_CLAUSE:
460 case TS_OPTIMIZATION:
461 case TS_TARGET_OPTION:
462 MARK_TS_COMMON (code);
463 break;
464
465 case TS_TYPE_WITH_LANG_SPECIFIC:
466 MARK_TS_TYPE_COMMON (code);
467 break;
468
469 case TS_TYPE_NON_COMMON:
470 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
471 break;
472
473 case TS_DECL_COMMON:
474 MARK_TS_DECL_MINIMAL (code);
475 break;
476
477 case TS_DECL_WRTL:
478 case TS_CONST_DECL:
479 MARK_TS_DECL_COMMON (code);
480 break;
481
482 case TS_DECL_NON_COMMON:
483 MARK_TS_DECL_WITH_VIS (code);
484 break;
485
486 case TS_DECL_WITH_VIS:
487 case TS_PARM_DECL:
488 case TS_LABEL_DECL:
489 case TS_RESULT_DECL:
490 MARK_TS_DECL_WRTL (code);
491 break;
492
493 case TS_FIELD_DECL:
494 MARK_TS_DECL_COMMON (code);
495 break;
496
497 case TS_VAR_DECL:
498 MARK_TS_DECL_WITH_VIS (code);
499 break;
500
501 case TS_TYPE_DECL:
502 case TS_FUNCTION_DECL:
503 MARK_TS_DECL_NON_COMMON (code);
504 break;
505
506 case TS_TRANSLATION_UNIT_DECL:
507 MARK_TS_DECL_COMMON (code);
508 break;
509
510 default:
511 gcc_unreachable ();
512 }
513 }
514
515 /* Basic consistency checks for attributes used in fold. */
516 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
518 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
527 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
544 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
545 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
546 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
547 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
548 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
549 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
550 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
551 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
556 }
557
558
559 /* Init tree.c. */
560
561 void
562 init_ttree (void)
563 {
564 /* Initialize the hash table of types. */
565 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
566 type_hash_eq, 0);
567
568 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
569 tree_decl_map_eq, 0);
570
571 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
572 tree_decl_map_eq, 0);
573
574 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
575 int_cst_hash_eq, NULL);
576
577 int_cst_node = make_int_cst (1, 1);
578
579 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
580 cl_option_hash_eq, NULL);
581
582 cl_optimization_node = make_node (OPTIMIZATION_NODE);
583 cl_target_option_node = make_node (TARGET_OPTION_NODE);
584
585 /* Initialize the tree_contains_struct array. */
586 initialize_tree_contains_struct ();
587 lang_hooks.init_ts ();
588 }
589
590 \f
591 /* The name of the object as the assembler will see it (but before any
592 translations made by ASM_OUTPUT_LABELREF). Often this is the same
593 as DECL_NAME. It is an IDENTIFIER_NODE. */
594 tree
595 decl_assembler_name (tree decl)
596 {
597 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
598 lang_hooks.set_decl_assembler_name (decl);
599 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
600 }
601
602 /* When the target supports COMDAT groups, this indicates which group the
603 DECL is associated with. This can be either an IDENTIFIER_NODE or a
604 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
605 tree
606 decl_comdat_group (const_tree node)
607 {
608 struct symtab_node *snode = symtab_node::get (node);
609 if (!snode)
610 return NULL;
611 return snode->get_comdat_group ();
612 }
613
614 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
615 tree
616 decl_comdat_group_id (const_tree node)
617 {
618 struct symtab_node *snode = symtab_node::get (node);
619 if (!snode)
620 return NULL;
621 return snode->get_comdat_group_id ();
622 }
623
624 /* When the target supports named section, return its name as IDENTIFIER_NODE
625 or NULL if it is in no section. */
626 const char *
627 decl_section_name (const_tree node)
628 {
629 struct symtab_node *snode = symtab_node::get (node);
630 if (!snode)
631 return NULL;
632 return snode->get_section ();
633 }
634
635 /* Set section section name of NODE to VALUE (that is expected to
636 be identifier node) */
637 void
638 set_decl_section_name (tree node, const char *value)
639 {
640 struct symtab_node *snode;
641
642 if (value == NULL)
643 {
644 snode = symtab_node::get (node);
645 if (!snode)
646 return;
647 }
648 else if (TREE_CODE (node) == VAR_DECL)
649 snode = varpool_node::get_create (node);
650 else
651 snode = cgraph_node::get_create (node);
652 snode->set_section (value);
653 }
654
655 /* Return TLS model of a variable NODE. */
656 enum tls_model
657 decl_tls_model (const_tree node)
658 {
659 struct varpool_node *snode = varpool_node::get (node);
660 if (!snode)
661 return TLS_MODEL_NONE;
662 return snode->tls_model;
663 }
664
665 /* Set TLS model of variable NODE to MODEL. */
666 void
667 set_decl_tls_model (tree node, enum tls_model model)
668 {
669 struct varpool_node *vnode;
670
671 if (model == TLS_MODEL_NONE)
672 {
673 vnode = varpool_node::get (node);
674 if (!vnode)
675 return;
676 }
677 else
678 vnode = varpool_node::get_create (node);
679 vnode->tls_model = model;
680 }
681
682 /* Compute the number of bytes occupied by a tree with code CODE.
683 This function cannot be used for nodes that have variable sizes,
684 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
685 size_t
686 tree_code_size (enum tree_code code)
687 {
688 switch (TREE_CODE_CLASS (code))
689 {
690 case tcc_declaration: /* A decl node */
691 {
692 switch (code)
693 {
694 case FIELD_DECL:
695 return sizeof (struct tree_field_decl);
696 case PARM_DECL:
697 return sizeof (struct tree_parm_decl);
698 case VAR_DECL:
699 return sizeof (struct tree_var_decl);
700 case LABEL_DECL:
701 return sizeof (struct tree_label_decl);
702 case RESULT_DECL:
703 return sizeof (struct tree_result_decl);
704 case CONST_DECL:
705 return sizeof (struct tree_const_decl);
706 case TYPE_DECL:
707 return sizeof (struct tree_type_decl);
708 case FUNCTION_DECL:
709 return sizeof (struct tree_function_decl);
710 case DEBUG_EXPR_DECL:
711 return sizeof (struct tree_decl_with_rtl);
712 case TRANSLATION_UNIT_DECL:
713 return sizeof (struct tree_translation_unit_decl);
714 case NAMESPACE_DECL:
715 case IMPORTED_DECL:
716 case NAMELIST_DECL:
717 return sizeof (struct tree_decl_non_common);
718 default:
719 return lang_hooks.tree_size (code);
720 }
721 }
722
723 case tcc_type: /* a type node */
724 return sizeof (struct tree_type_non_common);
725
726 case tcc_reference: /* a reference */
727 case tcc_expression: /* an expression */
728 case tcc_statement: /* an expression with side effects */
729 case tcc_comparison: /* a comparison expression */
730 case tcc_unary: /* a unary arithmetic expression */
731 case tcc_binary: /* a binary arithmetic expression */
732 return (sizeof (struct tree_exp)
733 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
734
735 case tcc_constant: /* a constant */
736 switch (code)
737 {
738 case VOID_CST: return sizeof (struct tree_typed);
739 case INTEGER_CST: gcc_unreachable ();
740 case REAL_CST: return sizeof (struct tree_real_cst);
741 case FIXED_CST: return sizeof (struct tree_fixed_cst);
742 case COMPLEX_CST: return sizeof (struct tree_complex);
743 case VECTOR_CST: return sizeof (struct tree_vector);
744 case STRING_CST: gcc_unreachable ();
745 default:
746 return lang_hooks.tree_size (code);
747 }
748
749 case tcc_exceptional: /* something random, like an identifier. */
750 switch (code)
751 {
752 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
753 case TREE_LIST: return sizeof (struct tree_list);
754
755 case ERROR_MARK:
756 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
757
758 case TREE_VEC:
759 case OMP_CLAUSE: gcc_unreachable ();
760
761 case SSA_NAME: return sizeof (struct tree_ssa_name);
762
763 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
764 case BLOCK: return sizeof (struct tree_block);
765 case CONSTRUCTOR: return sizeof (struct tree_constructor);
766 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
767 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
768
769 default:
770 return lang_hooks.tree_size (code);
771 }
772
773 default:
774 gcc_unreachable ();
775 }
776 }
777
778 /* Compute the number of bytes occupied by NODE. This routine only
779 looks at TREE_CODE, except for those nodes that have variable sizes. */
780 size_t
781 tree_size (const_tree node)
782 {
783 const enum tree_code code = TREE_CODE (node);
784 switch (code)
785 {
786 case INTEGER_CST:
787 return (sizeof (struct tree_int_cst)
788 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
789
790 case TREE_BINFO:
791 return (offsetof (struct tree_binfo, base_binfos)
792 + vec<tree, va_gc>
793 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
794
795 case TREE_VEC:
796 return (sizeof (struct tree_vec)
797 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
798
799 case VECTOR_CST:
800 return (sizeof (struct tree_vector)
801 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
802
803 case STRING_CST:
804 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
805
806 case OMP_CLAUSE:
807 return (sizeof (struct tree_omp_clause)
808 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
809 * sizeof (tree));
810
811 default:
812 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
813 return (sizeof (struct tree_exp)
814 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
815 else
816 return tree_code_size (code);
817 }
818 }
819
820 /* Record interesting allocation statistics for a tree node with CODE
821 and LENGTH. */
822
823 static void
824 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
825 size_t length ATTRIBUTE_UNUSED)
826 {
827 enum tree_code_class type = TREE_CODE_CLASS (code);
828 tree_node_kind kind;
829
830 if (!GATHER_STATISTICS)
831 return;
832
833 switch (type)
834 {
835 case tcc_declaration: /* A decl node */
836 kind = d_kind;
837 break;
838
839 case tcc_type: /* a type node */
840 kind = t_kind;
841 break;
842
843 case tcc_statement: /* an expression with side effects */
844 kind = s_kind;
845 break;
846
847 case tcc_reference: /* a reference */
848 kind = r_kind;
849 break;
850
851 case tcc_expression: /* an expression */
852 case tcc_comparison: /* a comparison expression */
853 case tcc_unary: /* a unary arithmetic expression */
854 case tcc_binary: /* a binary arithmetic expression */
855 kind = e_kind;
856 break;
857
858 case tcc_constant: /* a constant */
859 kind = c_kind;
860 break;
861
862 case tcc_exceptional: /* something random, like an identifier. */
863 switch (code)
864 {
865 case IDENTIFIER_NODE:
866 kind = id_kind;
867 break;
868
869 case TREE_VEC:
870 kind = vec_kind;
871 break;
872
873 case TREE_BINFO:
874 kind = binfo_kind;
875 break;
876
877 case SSA_NAME:
878 kind = ssa_name_kind;
879 break;
880
881 case BLOCK:
882 kind = b_kind;
883 break;
884
885 case CONSTRUCTOR:
886 kind = constr_kind;
887 break;
888
889 case OMP_CLAUSE:
890 kind = omp_clause_kind;
891 break;
892
893 default:
894 kind = x_kind;
895 break;
896 }
897 break;
898
899 case tcc_vl_exp:
900 kind = e_kind;
901 break;
902
903 default:
904 gcc_unreachable ();
905 }
906
907 tree_code_counts[(int) code]++;
908 tree_node_counts[(int) kind]++;
909 tree_node_sizes[(int) kind] += length;
910 }
911
912 /* Allocate and return a new UID from the DECL_UID namespace. */
913
914 int
915 allocate_decl_uid (void)
916 {
917 return next_decl_uid++;
918 }
919
920 /* Return a newly allocated node of code CODE. For decl and type
921 nodes, some other fields are initialized. The rest of the node is
922 initialized to zero. This function cannot be used for TREE_VEC,
923 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
924 tree_code_size.
925
926 Achoo! I got a code in the node. */
927
928 tree
929 make_node_stat (enum tree_code code MEM_STAT_DECL)
930 {
931 tree t;
932 enum tree_code_class type = TREE_CODE_CLASS (code);
933 size_t length = tree_code_size (code);
934
935 record_node_allocation_statistics (code, length);
936
937 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
938 TREE_SET_CODE (t, code);
939
940 switch (type)
941 {
942 case tcc_statement:
943 TREE_SIDE_EFFECTS (t) = 1;
944 break;
945
946 case tcc_declaration:
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
948 {
949 if (code == FUNCTION_DECL)
950 {
951 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
952 DECL_MODE (t) = FUNCTION_MODE;
953 }
954 else
955 DECL_ALIGN (t) = 1;
956 }
957 DECL_SOURCE_LOCATION (t) = input_location;
958 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
959 DECL_UID (t) = --next_debug_decl_uid;
960 else
961 {
962 DECL_UID (t) = allocate_decl_uid ();
963 SET_DECL_PT_UID (t, -1);
964 }
965 if (TREE_CODE (t) == LABEL_DECL)
966 LABEL_DECL_UID (t) = -1;
967
968 break;
969
970 case tcc_type:
971 TYPE_UID (t) = next_type_uid++;
972 TYPE_ALIGN (t) = BITS_PER_UNIT;
973 TYPE_USER_ALIGN (t) = 0;
974 TYPE_MAIN_VARIANT (t) = t;
975 TYPE_CANONICAL (t) = t;
976
977 /* Default to no attributes for type, but let target change that. */
978 TYPE_ATTRIBUTES (t) = NULL_TREE;
979 targetm.set_default_type_attributes (t);
980
981 /* We have not yet computed the alias set for this type. */
982 TYPE_ALIAS_SET (t) = -1;
983 break;
984
985 case tcc_constant:
986 TREE_CONSTANT (t) = 1;
987 break;
988
989 case tcc_expression:
990 switch (code)
991 {
992 case INIT_EXPR:
993 case MODIFY_EXPR:
994 case VA_ARG_EXPR:
995 case PREDECREMENT_EXPR:
996 case PREINCREMENT_EXPR:
997 case POSTDECREMENT_EXPR:
998 case POSTINCREMENT_EXPR:
999 /* All of these have side-effects, no matter what their
1000 operands are. */
1001 TREE_SIDE_EFFECTS (t) = 1;
1002 break;
1003
1004 default:
1005 break;
1006 }
1007 break;
1008
1009 default:
1010 /* Other classes need no special treatment. */
1011 break;
1012 }
1013
1014 return t;
1015 }
1016 \f
1017 /* Return a new node with the same contents as NODE except that its
1018 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1019
1020 tree
1021 copy_node_stat (tree node MEM_STAT_DECL)
1022 {
1023 tree t;
1024 enum tree_code code = TREE_CODE (node);
1025 size_t length;
1026
1027 gcc_assert (code != STATEMENT_LIST);
1028
1029 length = tree_size (node);
1030 record_node_allocation_statistics (code, length);
1031 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1032 memcpy (t, node, length);
1033
1034 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1035 TREE_CHAIN (t) = 0;
1036 TREE_ASM_WRITTEN (t) = 0;
1037 TREE_VISITED (t) = 0;
1038
1039 if (TREE_CODE_CLASS (code) == tcc_declaration)
1040 {
1041 if (code == DEBUG_EXPR_DECL)
1042 DECL_UID (t) = --next_debug_decl_uid;
1043 else
1044 {
1045 DECL_UID (t) = allocate_decl_uid ();
1046 if (DECL_PT_UID_SET_P (node))
1047 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1048 }
1049 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1050 && DECL_HAS_VALUE_EXPR_P (node))
1051 {
1052 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1053 DECL_HAS_VALUE_EXPR_P (t) = 1;
1054 }
1055 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1056 if (TREE_CODE (node) == VAR_DECL)
1057 {
1058 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1059 t->decl_with_vis.symtab_node = NULL;
1060 }
1061 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1062 {
1063 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1064 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1065 }
1066 if (TREE_CODE (node) == FUNCTION_DECL)
1067 {
1068 DECL_STRUCT_FUNCTION (t) = NULL;
1069 t->decl_with_vis.symtab_node = NULL;
1070 }
1071 }
1072 else if (TREE_CODE_CLASS (code) == tcc_type)
1073 {
1074 TYPE_UID (t) = next_type_uid++;
1075 /* The following is so that the debug code for
1076 the copy is different from the original type.
1077 The two statements usually duplicate each other
1078 (because they clear fields of the same union),
1079 but the optimizer should catch that. */
1080 TYPE_SYMTAB_POINTER (t) = 0;
1081 TYPE_SYMTAB_ADDRESS (t) = 0;
1082
1083 /* Do not copy the values cache. */
1084 if (TYPE_CACHED_VALUES_P (t))
1085 {
1086 TYPE_CACHED_VALUES_P (t) = 0;
1087 TYPE_CACHED_VALUES (t) = NULL_TREE;
1088 }
1089 }
1090
1091 return t;
1092 }
1093
1094 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1095 For example, this can copy a list made of TREE_LIST nodes. */
1096
1097 tree
1098 copy_list (tree list)
1099 {
1100 tree head;
1101 tree prev, next;
1102
1103 if (list == 0)
1104 return 0;
1105
1106 head = prev = copy_node (list);
1107 next = TREE_CHAIN (list);
1108 while (next)
1109 {
1110 TREE_CHAIN (prev) = copy_node (next);
1111 prev = TREE_CHAIN (prev);
1112 next = TREE_CHAIN (next);
1113 }
1114 return head;
1115 }
1116
1117 \f
1118 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1119 INTEGER_CST with value CST and type TYPE. */
1120
1121 static unsigned int
1122 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1123 {
1124 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1125 /* We need an extra zero HWI if CST is an unsigned integer with its
1126 upper bit set, and if CST occupies a whole number of HWIs. */
1127 if (TYPE_UNSIGNED (type)
1128 && wi::neg_p (cst)
1129 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1130 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1131 return cst.get_len ();
1132 }
1133
1134 /* Return a new INTEGER_CST with value CST and type TYPE. */
1135
1136 static tree
1137 build_new_int_cst (tree type, const wide_int &cst)
1138 {
1139 unsigned int len = cst.get_len ();
1140 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1141 tree nt = make_int_cst (len, ext_len);
1142
1143 if (len < ext_len)
1144 {
1145 --ext_len;
1146 TREE_INT_CST_ELT (nt, ext_len) = 0;
1147 for (unsigned int i = len; i < ext_len; ++i)
1148 TREE_INT_CST_ELT (nt, i) = -1;
1149 }
1150 else if (TYPE_UNSIGNED (type)
1151 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1152 {
1153 len--;
1154 TREE_INT_CST_ELT (nt, len)
1155 = zext_hwi (cst.elt (len),
1156 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1157 }
1158
1159 for (unsigned int i = 0; i < len; i++)
1160 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1161 TREE_TYPE (nt) = type;
1162 return nt;
1163 }
1164
1165 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1166
1167 tree
1168 build_int_cst (tree type, HOST_WIDE_INT low)
1169 {
1170 /* Support legacy code. */
1171 if (!type)
1172 type = integer_type_node;
1173
1174 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1175 }
1176
1177 tree
1178 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1179 {
1180 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1181 }
1182
1183 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1184
1185 tree
1186 build_int_cst_type (tree type, HOST_WIDE_INT low)
1187 {
1188 gcc_assert (type);
1189 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1190 }
1191
1192 /* Constructs tree in type TYPE from with value given by CST. Signedness
1193 of CST is assumed to be the same as the signedness of TYPE. */
1194
1195 tree
1196 double_int_to_tree (tree type, double_int cst)
1197 {
1198 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1199 }
1200
1201 /* We force the wide_int CST to the range of the type TYPE by sign or
1202 zero extending it. OVERFLOWABLE indicates if we are interested in
1203 overflow of the value, when >0 we are only interested in signed
1204 overflow, for <0 we are interested in any overflow. OVERFLOWED
1205 indicates whether overflow has already occurred. CONST_OVERFLOWED
1206 indicates whether constant overflow has already occurred. We force
1207 T's value to be within range of T's type (by setting to 0 or 1 all
1208 the bits outside the type's range). We set TREE_OVERFLOWED if,
1209 OVERFLOWED is nonzero,
1210 or OVERFLOWABLE is >0 and signed overflow occurs
1211 or OVERFLOWABLE is <0 and any overflow occurs
1212 We return a new tree node for the extended wide_int. The node
1213 is shared if no overflow flags are set. */
1214
1215
1216 tree
1217 force_fit_type (tree type, const wide_int_ref &cst,
1218 int overflowable, bool overflowed)
1219 {
1220 signop sign = TYPE_SIGN (type);
1221
1222 /* If we need to set overflow flags, return a new unshared node. */
1223 if (overflowed || !wi::fits_to_tree_p (cst, type))
1224 {
1225 if (overflowed
1226 || overflowable < 0
1227 || (overflowable > 0 && sign == SIGNED))
1228 {
1229 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1230 tree t = build_new_int_cst (type, tmp);
1231 TREE_OVERFLOW (t) = 1;
1232 return t;
1233 }
1234 }
1235
1236 /* Else build a shared node. */
1237 return wide_int_to_tree (type, cst);
1238 }
1239
1240 /* These are the hash table functions for the hash table of INTEGER_CST
1241 nodes of a sizetype. */
1242
1243 /* Return the hash code code X, an INTEGER_CST. */
1244
1245 static hashval_t
1246 int_cst_hash_hash (const void *x)
1247 {
1248 const_tree const t = (const_tree) x;
1249 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1250 int i;
1251
1252 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1253 code ^= TREE_INT_CST_ELT (t, i);
1254
1255 return code;
1256 }
1257
1258 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1259 is the same as that given by *Y, which is the same. */
1260
1261 static int
1262 int_cst_hash_eq (const void *x, const void *y)
1263 {
1264 const_tree const xt = (const_tree) x;
1265 const_tree const yt = (const_tree) y;
1266
1267 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1268 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1269 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1270 return false;
1271
1272 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1273 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1274 return false;
1275
1276 return true;
1277 }
1278
1279 /* Create an INT_CST node of TYPE and value CST.
1280 The returned node is always shared. For small integers we use a
1281 per-type vector cache, for larger ones we use a single hash table.
1282 The value is extended from its precision according to the sign of
1283 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1284 the upper bits and ensures that hashing and value equality based
1285 upon the underlying HOST_WIDE_INTs works without masking. */
1286
1287 tree
1288 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1289 {
1290 tree t;
1291 int ix = -1;
1292 int limit = 0;
1293
1294 gcc_assert (type);
1295 unsigned int prec = TYPE_PRECISION (type);
1296 signop sgn = TYPE_SIGN (type);
1297
1298 /* Verify that everything is canonical. */
1299 int l = pcst.get_len ();
1300 if (l > 1)
1301 {
1302 if (pcst.elt (l - 1) == 0)
1303 gcc_checking_assert (pcst.elt (l - 2) < 0);
1304 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1305 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1306 }
1307
1308 wide_int cst = wide_int::from (pcst, prec, sgn);
1309 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1310
1311 if (ext_len == 1)
1312 {
1313 /* We just need to store a single HOST_WIDE_INT. */
1314 HOST_WIDE_INT hwi;
1315 if (TYPE_UNSIGNED (type))
1316 hwi = cst.to_uhwi ();
1317 else
1318 hwi = cst.to_shwi ();
1319
1320 switch (TREE_CODE (type))
1321 {
1322 case NULLPTR_TYPE:
1323 gcc_assert (hwi == 0);
1324 /* Fallthru. */
1325
1326 case POINTER_TYPE:
1327 case REFERENCE_TYPE:
1328 /* Cache NULL pointer. */
1329 if (hwi == 0)
1330 {
1331 limit = 1;
1332 ix = 0;
1333 }
1334 break;
1335
1336 case BOOLEAN_TYPE:
1337 /* Cache false or true. */
1338 limit = 2;
1339 if (hwi < 2)
1340 ix = hwi;
1341 break;
1342
1343 case INTEGER_TYPE:
1344 case OFFSET_TYPE:
1345 if (TYPE_SIGN (type) == UNSIGNED)
1346 {
1347 /* Cache [0, N). */
1348 limit = INTEGER_SHARE_LIMIT;
1349 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1350 ix = hwi;
1351 }
1352 else
1353 {
1354 /* Cache [-1, N). */
1355 limit = INTEGER_SHARE_LIMIT + 1;
1356 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1357 ix = hwi + 1;
1358 }
1359 break;
1360
1361 case ENUMERAL_TYPE:
1362 break;
1363
1364 default:
1365 gcc_unreachable ();
1366 }
1367
1368 if (ix >= 0)
1369 {
1370 /* Look for it in the type's vector of small shared ints. */
1371 if (!TYPE_CACHED_VALUES_P (type))
1372 {
1373 TYPE_CACHED_VALUES_P (type) = 1;
1374 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1375 }
1376
1377 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1378 if (t)
1379 /* Make sure no one is clobbering the shared constant. */
1380 gcc_checking_assert (TREE_TYPE (t) == type
1381 && TREE_INT_CST_NUNITS (t) == 1
1382 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1383 && TREE_INT_CST_EXT_NUNITS (t) == 1
1384 && TREE_INT_CST_ELT (t, 0) == hwi);
1385 else
1386 {
1387 /* Create a new shared int. */
1388 t = build_new_int_cst (type, cst);
1389 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1390 }
1391 }
1392 else
1393 {
1394 /* Use the cache of larger shared ints, using int_cst_node as
1395 a temporary. */
1396 void **slot;
1397
1398 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1399 TREE_TYPE (int_cst_node) = type;
1400
1401 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1402 t = (tree) *slot;
1403 if (!t)
1404 {
1405 /* Insert this one into the hash table. */
1406 t = int_cst_node;
1407 *slot = t;
1408 /* Make a new node for next time round. */
1409 int_cst_node = make_int_cst (1, 1);
1410 }
1411 }
1412 }
1413 else
1414 {
1415 /* The value either hashes properly or we drop it on the floor
1416 for the gc to take care of. There will not be enough of them
1417 to worry about. */
1418 void **slot;
1419
1420 tree nt = build_new_int_cst (type, cst);
1421 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1422 t = (tree) *slot;
1423 if (!t)
1424 {
1425 /* Insert this one into the hash table. */
1426 t = nt;
1427 *slot = t;
1428 }
1429 }
1430
1431 return t;
1432 }
1433
1434 void
1435 cache_integer_cst (tree t)
1436 {
1437 tree type = TREE_TYPE (t);
1438 int ix = -1;
1439 int limit = 0;
1440 int prec = TYPE_PRECISION (type);
1441
1442 gcc_assert (!TREE_OVERFLOW (t));
1443
1444 switch (TREE_CODE (type))
1445 {
1446 case NULLPTR_TYPE:
1447 gcc_assert (integer_zerop (t));
1448 /* Fallthru. */
1449
1450 case POINTER_TYPE:
1451 case REFERENCE_TYPE:
1452 /* Cache NULL pointer. */
1453 if (integer_zerop (t))
1454 {
1455 limit = 1;
1456 ix = 0;
1457 }
1458 break;
1459
1460 case BOOLEAN_TYPE:
1461 /* Cache false or true. */
1462 limit = 2;
1463 if (wi::ltu_p (t, 2))
1464 ix = TREE_INT_CST_ELT (t, 0);
1465 break;
1466
1467 case INTEGER_TYPE:
1468 case OFFSET_TYPE:
1469 if (TYPE_UNSIGNED (type))
1470 {
1471 /* Cache 0..N */
1472 limit = INTEGER_SHARE_LIMIT;
1473
1474 /* This is a little hokie, but if the prec is smaller than
1475 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1476 obvious test will not get the correct answer. */
1477 if (prec < HOST_BITS_PER_WIDE_INT)
1478 {
1479 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1480 ix = tree_to_uhwi (t);
1481 }
1482 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1483 ix = tree_to_uhwi (t);
1484 }
1485 else
1486 {
1487 /* Cache -1..N */
1488 limit = INTEGER_SHARE_LIMIT + 1;
1489
1490 if (integer_minus_onep (t))
1491 ix = 0;
1492 else if (!wi::neg_p (t))
1493 {
1494 if (prec < HOST_BITS_PER_WIDE_INT)
1495 {
1496 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1497 ix = tree_to_shwi (t) + 1;
1498 }
1499 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1500 ix = tree_to_shwi (t) + 1;
1501 }
1502 }
1503 break;
1504
1505 case ENUMERAL_TYPE:
1506 break;
1507
1508 default:
1509 gcc_unreachable ();
1510 }
1511
1512 if (ix >= 0)
1513 {
1514 /* Look for it in the type's vector of small shared ints. */
1515 if (!TYPE_CACHED_VALUES_P (type))
1516 {
1517 TYPE_CACHED_VALUES_P (type) = 1;
1518 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1519 }
1520
1521 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1522 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1523 }
1524 else
1525 {
1526 /* Use the cache of larger shared ints. */
1527 void **slot;
1528
1529 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1530 /* If there is already an entry for the number verify it's the
1531 same. */
1532 if (*slot)
1533 gcc_assert (wi::eq_p (tree (*slot), t));
1534 else
1535 /* Otherwise insert this one into the hash table. */
1536 *slot = t;
1537 }
1538 }
1539
1540
1541 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1542 and the rest are zeros. */
1543
1544 tree
1545 build_low_bits_mask (tree type, unsigned bits)
1546 {
1547 gcc_assert (bits <= TYPE_PRECISION (type));
1548
1549 return wide_int_to_tree (type, wi::mask (bits, false,
1550 TYPE_PRECISION (type)));
1551 }
1552
1553 /* Checks that X is integer constant that can be expressed in (unsigned)
1554 HOST_WIDE_INT without loss of precision. */
1555
1556 bool
1557 cst_and_fits_in_hwi (const_tree x)
1558 {
1559 if (TREE_CODE (x) != INTEGER_CST)
1560 return false;
1561
1562 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1563 return false;
1564
1565 return TREE_INT_CST_NUNITS (x) == 1;
1566 }
1567
1568 /* Build a newly constructed TREE_VEC node of length LEN. */
1569
1570 tree
1571 make_vector_stat (unsigned len MEM_STAT_DECL)
1572 {
1573 tree t;
1574 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1575
1576 record_node_allocation_statistics (VECTOR_CST, length);
1577
1578 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1579
1580 TREE_SET_CODE (t, VECTOR_CST);
1581 TREE_CONSTANT (t) = 1;
1582
1583 return t;
1584 }
1585
1586 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1587 are in a list pointed to by VALS. */
1588
1589 tree
1590 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1591 {
1592 int over = 0;
1593 unsigned cnt = 0;
1594 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1595 TREE_TYPE (v) = type;
1596
1597 /* Iterate through elements and check for overflow. */
1598 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1599 {
1600 tree value = vals[cnt];
1601
1602 VECTOR_CST_ELT (v, cnt) = value;
1603
1604 /* Don't crash if we get an address constant. */
1605 if (!CONSTANT_CLASS_P (value))
1606 continue;
1607
1608 over |= TREE_OVERFLOW (value);
1609 }
1610
1611 TREE_OVERFLOW (v) = over;
1612 return v;
1613 }
1614
1615 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1616 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1617
1618 tree
1619 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1620 {
1621 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1622 unsigned HOST_WIDE_INT idx;
1623 tree value;
1624
1625 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1626 vec[idx] = value;
1627 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1628 vec[idx] = build_zero_cst (TREE_TYPE (type));
1629
1630 return build_vector (type, vec);
1631 }
1632
1633 /* Build a vector of type VECTYPE where all the elements are SCs. */
1634 tree
1635 build_vector_from_val (tree vectype, tree sc)
1636 {
1637 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1638
1639 if (sc == error_mark_node)
1640 return sc;
1641
1642 /* Verify that the vector type is suitable for SC. Note that there
1643 is some inconsistency in the type-system with respect to restrict
1644 qualifications of pointers. Vector types always have a main-variant
1645 element type and the qualification is applied to the vector-type.
1646 So TREE_TYPE (vector-type) does not return a properly qualified
1647 vector element-type. */
1648 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1649 TREE_TYPE (vectype)));
1650
1651 if (CONSTANT_CLASS_P (sc))
1652 {
1653 tree *v = XALLOCAVEC (tree, nunits);
1654 for (i = 0; i < nunits; ++i)
1655 v[i] = sc;
1656 return build_vector (vectype, v);
1657 }
1658 else
1659 {
1660 vec<constructor_elt, va_gc> *v;
1661 vec_alloc (v, nunits);
1662 for (i = 0; i < nunits; ++i)
1663 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1664 return build_constructor (vectype, v);
1665 }
1666 }
1667
1668 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1669 are in the vec pointed to by VALS. */
1670 tree
1671 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1672 {
1673 tree c = make_node (CONSTRUCTOR);
1674 unsigned int i;
1675 constructor_elt *elt;
1676 bool constant_p = true;
1677 bool side_effects_p = false;
1678
1679 TREE_TYPE (c) = type;
1680 CONSTRUCTOR_ELTS (c) = vals;
1681
1682 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1683 {
1684 /* Mostly ctors will have elts that don't have side-effects, so
1685 the usual case is to scan all the elements. Hence a single
1686 loop for both const and side effects, rather than one loop
1687 each (with early outs). */
1688 if (!TREE_CONSTANT (elt->value))
1689 constant_p = false;
1690 if (TREE_SIDE_EFFECTS (elt->value))
1691 side_effects_p = true;
1692 }
1693
1694 TREE_SIDE_EFFECTS (c) = side_effects_p;
1695 TREE_CONSTANT (c) = constant_p;
1696
1697 return c;
1698 }
1699
1700 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1701 INDEX and VALUE. */
1702 tree
1703 build_constructor_single (tree type, tree index, tree value)
1704 {
1705 vec<constructor_elt, va_gc> *v;
1706 constructor_elt elt = {index, value};
1707
1708 vec_alloc (v, 1);
1709 v->quick_push (elt);
1710
1711 return build_constructor (type, v);
1712 }
1713
1714
1715 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1716 are in a list pointed to by VALS. */
1717 tree
1718 build_constructor_from_list (tree type, tree vals)
1719 {
1720 tree t;
1721 vec<constructor_elt, va_gc> *v = NULL;
1722
1723 if (vals)
1724 {
1725 vec_alloc (v, list_length (vals));
1726 for (t = vals; t; t = TREE_CHAIN (t))
1727 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1728 }
1729
1730 return build_constructor (type, v);
1731 }
1732
1733 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1734 of elements, provided as index/value pairs. */
1735
1736 tree
1737 build_constructor_va (tree type, int nelts, ...)
1738 {
1739 vec<constructor_elt, va_gc> *v = NULL;
1740 va_list p;
1741
1742 va_start (p, nelts);
1743 vec_alloc (v, nelts);
1744 while (nelts--)
1745 {
1746 tree index = va_arg (p, tree);
1747 tree value = va_arg (p, tree);
1748 CONSTRUCTOR_APPEND_ELT (v, index, value);
1749 }
1750 va_end (p);
1751 return build_constructor (type, v);
1752 }
1753
1754 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1755
1756 tree
1757 build_fixed (tree type, FIXED_VALUE_TYPE f)
1758 {
1759 tree v;
1760 FIXED_VALUE_TYPE *fp;
1761
1762 v = make_node (FIXED_CST);
1763 fp = ggc_alloc<fixed_value> ();
1764 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1765
1766 TREE_TYPE (v) = type;
1767 TREE_FIXED_CST_PTR (v) = fp;
1768 return v;
1769 }
1770
1771 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1772
1773 tree
1774 build_real (tree type, REAL_VALUE_TYPE d)
1775 {
1776 tree v;
1777 REAL_VALUE_TYPE *dp;
1778 int overflow = 0;
1779
1780 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1781 Consider doing it via real_convert now. */
1782
1783 v = make_node (REAL_CST);
1784 dp = ggc_alloc<real_value> ();
1785 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1786
1787 TREE_TYPE (v) = type;
1788 TREE_REAL_CST_PTR (v) = dp;
1789 TREE_OVERFLOW (v) = overflow;
1790 return v;
1791 }
1792
1793 /* Return a new REAL_CST node whose type is TYPE
1794 and whose value is the integer value of the INTEGER_CST node I. */
1795
1796 REAL_VALUE_TYPE
1797 real_value_from_int_cst (const_tree type, const_tree i)
1798 {
1799 REAL_VALUE_TYPE d;
1800
1801 /* Clear all bits of the real value type so that we can later do
1802 bitwise comparisons to see if two values are the same. */
1803 memset (&d, 0, sizeof d);
1804
1805 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1806 TYPE_SIGN (TREE_TYPE (i)));
1807 return d;
1808 }
1809
1810 /* Given a tree representing an integer constant I, return a tree
1811 representing the same value as a floating-point constant of type TYPE. */
1812
1813 tree
1814 build_real_from_int_cst (tree type, const_tree i)
1815 {
1816 tree v;
1817 int overflow = TREE_OVERFLOW (i);
1818
1819 v = build_real (type, real_value_from_int_cst (type, i));
1820
1821 TREE_OVERFLOW (v) |= overflow;
1822 return v;
1823 }
1824
1825 /* Return a newly constructed STRING_CST node whose value is
1826 the LEN characters at STR.
1827 Note that for a C string literal, LEN should include the trailing NUL.
1828 The TREE_TYPE is not initialized. */
1829
1830 tree
1831 build_string (int len, const char *str)
1832 {
1833 tree s;
1834 size_t length;
1835
1836 /* Do not waste bytes provided by padding of struct tree_string. */
1837 length = len + offsetof (struct tree_string, str) + 1;
1838
1839 record_node_allocation_statistics (STRING_CST, length);
1840
1841 s = (tree) ggc_internal_alloc (length);
1842
1843 memset (s, 0, sizeof (struct tree_typed));
1844 TREE_SET_CODE (s, STRING_CST);
1845 TREE_CONSTANT (s) = 1;
1846 TREE_STRING_LENGTH (s) = len;
1847 memcpy (s->string.str, str, len);
1848 s->string.str[len] = '\0';
1849
1850 return s;
1851 }
1852
1853 /* Return a newly constructed COMPLEX_CST node whose value is
1854 specified by the real and imaginary parts REAL and IMAG.
1855 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1856 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1857
1858 tree
1859 build_complex (tree type, tree real, tree imag)
1860 {
1861 tree t = make_node (COMPLEX_CST);
1862
1863 TREE_REALPART (t) = real;
1864 TREE_IMAGPART (t) = imag;
1865 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1867 return t;
1868 }
1869
1870 /* Return a constant of arithmetic type TYPE which is the
1871 multiplicative identity of the set TYPE. */
1872
1873 tree
1874 build_one_cst (tree type)
1875 {
1876 switch (TREE_CODE (type))
1877 {
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case POINTER_TYPE: case REFERENCE_TYPE:
1880 case OFFSET_TYPE:
1881 return build_int_cst (type, 1);
1882
1883 case REAL_TYPE:
1884 return build_real (type, dconst1);
1885
1886 case FIXED_POINT_TYPE:
1887 /* We can only generate 1 for accum types. */
1888 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1889 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1890
1891 case VECTOR_TYPE:
1892 {
1893 tree scalar = build_one_cst (TREE_TYPE (type));
1894
1895 return build_vector_from_val (type, scalar);
1896 }
1897
1898 case COMPLEX_TYPE:
1899 return build_complex (type,
1900 build_one_cst (TREE_TYPE (type)),
1901 build_zero_cst (TREE_TYPE (type)));
1902
1903 default:
1904 gcc_unreachable ();
1905 }
1906 }
1907
1908 /* Return an integer of type TYPE containing all 1's in as much precision as
1909 it contains, or a complex or vector whose subparts are such integers. */
1910
1911 tree
1912 build_all_ones_cst (tree type)
1913 {
1914 if (TREE_CODE (type) == COMPLEX_TYPE)
1915 {
1916 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1917 return build_complex (type, scalar, scalar);
1918 }
1919 else
1920 return build_minus_one_cst (type);
1921 }
1922
1923 /* Return a constant of arithmetic type TYPE which is the
1924 opposite of the multiplicative identity of the set TYPE. */
1925
1926 tree
1927 build_minus_one_cst (tree type)
1928 {
1929 switch (TREE_CODE (type))
1930 {
1931 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case OFFSET_TYPE:
1934 return build_int_cst (type, -1);
1935
1936 case REAL_TYPE:
1937 return build_real (type, dconstm1);
1938
1939 case FIXED_POINT_TYPE:
1940 /* We can only generate 1 for accum types. */
1941 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1942 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1943 TYPE_MODE (type)));
1944
1945 case VECTOR_TYPE:
1946 {
1947 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1948
1949 return build_vector_from_val (type, scalar);
1950 }
1951
1952 case COMPLEX_TYPE:
1953 return build_complex (type,
1954 build_minus_one_cst (TREE_TYPE (type)),
1955 build_zero_cst (TREE_TYPE (type)));
1956
1957 default:
1958 gcc_unreachable ();
1959 }
1960 }
1961
1962 /* Build 0 constant of type TYPE. This is used by constructor folding
1963 and thus the constant should be represented in memory by
1964 zero(es). */
1965
1966 tree
1967 build_zero_cst (tree type)
1968 {
1969 switch (TREE_CODE (type))
1970 {
1971 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1972 case POINTER_TYPE: case REFERENCE_TYPE:
1973 case OFFSET_TYPE: case NULLPTR_TYPE:
1974 return build_int_cst (type, 0);
1975
1976 case REAL_TYPE:
1977 return build_real (type, dconst0);
1978
1979 case FIXED_POINT_TYPE:
1980 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1981
1982 case VECTOR_TYPE:
1983 {
1984 tree scalar = build_zero_cst (TREE_TYPE (type));
1985
1986 return build_vector_from_val (type, scalar);
1987 }
1988
1989 case COMPLEX_TYPE:
1990 {
1991 tree zero = build_zero_cst (TREE_TYPE (type));
1992
1993 return build_complex (type, zero, zero);
1994 }
1995
1996 default:
1997 if (!AGGREGATE_TYPE_P (type))
1998 return fold_convert (type, integer_zero_node);
1999 return build_constructor (type, NULL);
2000 }
2001 }
2002
2003
2004 /* Build a BINFO with LEN language slots. */
2005
2006 tree
2007 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2008 {
2009 tree t;
2010 size_t length = (offsetof (struct tree_binfo, base_binfos)
2011 + vec<tree, va_gc>::embedded_size (base_binfos));
2012
2013 record_node_allocation_statistics (TREE_BINFO, length);
2014
2015 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2016
2017 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2018
2019 TREE_SET_CODE (t, TREE_BINFO);
2020
2021 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2022
2023 return t;
2024 }
2025
2026 /* Create a CASE_LABEL_EXPR tree node and return it. */
2027
2028 tree
2029 build_case_label (tree low_value, tree high_value, tree label_decl)
2030 {
2031 tree t = make_node (CASE_LABEL_EXPR);
2032
2033 TREE_TYPE (t) = void_type_node;
2034 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2035
2036 CASE_LOW (t) = low_value;
2037 CASE_HIGH (t) = high_value;
2038 CASE_LABEL (t) = label_decl;
2039 CASE_CHAIN (t) = NULL_TREE;
2040
2041 return t;
2042 }
2043
2044 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2045 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2046 The latter determines the length of the HOST_WIDE_INT vector. */
2047
2048 tree
2049 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2050 {
2051 tree t;
2052 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2053 + sizeof (struct tree_int_cst));
2054
2055 gcc_assert (len);
2056 record_node_allocation_statistics (INTEGER_CST, length);
2057
2058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2059
2060 TREE_SET_CODE (t, INTEGER_CST);
2061 TREE_INT_CST_NUNITS (t) = len;
2062 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2063 /* to_offset can only be applied to trees that are offset_int-sized
2064 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2065 must be exactly the precision of offset_int and so LEN is correct. */
2066 if (ext_len <= OFFSET_INT_ELTS)
2067 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2068 else
2069 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2070
2071 TREE_CONSTANT (t) = 1;
2072
2073 return t;
2074 }
2075
2076 /* Build a newly constructed TREE_VEC node of length LEN. */
2077
2078 tree
2079 make_tree_vec_stat (int len MEM_STAT_DECL)
2080 {
2081 tree t;
2082 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2083
2084 record_node_allocation_statistics (TREE_VEC, length);
2085
2086 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2087
2088 TREE_SET_CODE (t, TREE_VEC);
2089 TREE_VEC_LENGTH (t) = len;
2090
2091 return t;
2092 }
2093
2094 /* Grow a TREE_VEC node to new length LEN. */
2095
2096 tree
2097 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2098 {
2099 gcc_assert (TREE_CODE (v) == TREE_VEC);
2100
2101 int oldlen = TREE_VEC_LENGTH (v);
2102 gcc_assert (len > oldlen);
2103
2104 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2105 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2106
2107 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2108
2109 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2110
2111 TREE_VEC_LENGTH (v) = len;
2112
2113 return v;
2114 }
2115 \f
2116 /* Return 1 if EXPR is the integer constant zero or a complex constant
2117 of zero. */
2118
2119 int
2120 integer_zerop (const_tree expr)
2121 {
2122 STRIP_NOPS (expr);
2123
2124 switch (TREE_CODE (expr))
2125 {
2126 case INTEGER_CST:
2127 return wi::eq_p (expr, 0);
2128 case COMPLEX_CST:
2129 return (integer_zerop (TREE_REALPART (expr))
2130 && integer_zerop (TREE_IMAGPART (expr)));
2131 case VECTOR_CST:
2132 {
2133 unsigned i;
2134 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2135 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2136 return false;
2137 return true;
2138 }
2139 default:
2140 return false;
2141 }
2142 }
2143
2144 /* Return 1 if EXPR is the integer constant one or the corresponding
2145 complex constant. */
2146
2147 int
2148 integer_onep (const_tree expr)
2149 {
2150 STRIP_NOPS (expr);
2151
2152 switch (TREE_CODE (expr))
2153 {
2154 case INTEGER_CST:
2155 return wi::eq_p (wi::to_widest (expr), 1);
2156 case COMPLEX_CST:
2157 return (integer_onep (TREE_REALPART (expr))
2158 && integer_zerop (TREE_IMAGPART (expr)));
2159 case VECTOR_CST:
2160 {
2161 unsigned i;
2162 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2163 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2164 return false;
2165 return true;
2166 }
2167 default:
2168 return false;
2169 }
2170 }
2171
2172 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2173 it contains, or a complex or vector whose subparts are such integers. */
2174
2175 int
2176 integer_all_onesp (const_tree expr)
2177 {
2178 STRIP_NOPS (expr);
2179
2180 if (TREE_CODE (expr) == COMPLEX_CST
2181 && integer_all_onesp (TREE_REALPART (expr))
2182 && integer_all_onesp (TREE_IMAGPART (expr)))
2183 return 1;
2184
2185 else if (TREE_CODE (expr) == VECTOR_CST)
2186 {
2187 unsigned i;
2188 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2189 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2190 return 0;
2191 return 1;
2192 }
2193
2194 else if (TREE_CODE (expr) != INTEGER_CST)
2195 return 0;
2196
2197 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2198 }
2199
2200 /* Return 1 if EXPR is the integer constant minus one. */
2201
2202 int
2203 integer_minus_onep (const_tree expr)
2204 {
2205 STRIP_NOPS (expr);
2206
2207 if (TREE_CODE (expr) == COMPLEX_CST)
2208 return (integer_all_onesp (TREE_REALPART (expr))
2209 && integer_zerop (TREE_IMAGPART (expr)));
2210 else
2211 return integer_all_onesp (expr);
2212 }
2213
2214 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2215 one bit on). */
2216
2217 int
2218 integer_pow2p (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 if (TREE_CODE (expr) == COMPLEX_CST
2223 && integer_pow2p (TREE_REALPART (expr))
2224 && integer_zerop (TREE_IMAGPART (expr)))
2225 return 1;
2226
2227 if (TREE_CODE (expr) != INTEGER_CST)
2228 return 0;
2229
2230 return wi::popcount (expr) == 1;
2231 }
2232
2233 /* Return 1 if EXPR is an integer constant other than zero or a
2234 complex constant other than zero. */
2235
2236 int
2237 integer_nonzerop (const_tree expr)
2238 {
2239 STRIP_NOPS (expr);
2240
2241 return ((TREE_CODE (expr) == INTEGER_CST
2242 && !wi::eq_p (expr, 0))
2243 || (TREE_CODE (expr) == COMPLEX_CST
2244 && (integer_nonzerop (TREE_REALPART (expr))
2245 || integer_nonzerop (TREE_IMAGPART (expr)))));
2246 }
2247
2248 /* Return 1 if EXPR is the fixed-point constant zero. */
2249
2250 int
2251 fixed_zerop (const_tree expr)
2252 {
2253 return (TREE_CODE (expr) == FIXED_CST
2254 && TREE_FIXED_CST (expr).data.is_zero ());
2255 }
2256
2257 /* Return the power of two represented by a tree node known to be a
2258 power of two. */
2259
2260 int
2261 tree_log2 (const_tree expr)
2262 {
2263 STRIP_NOPS (expr);
2264
2265 if (TREE_CODE (expr) == COMPLEX_CST)
2266 return tree_log2 (TREE_REALPART (expr));
2267
2268 return wi::exact_log2 (expr);
2269 }
2270
2271 /* Similar, but return the largest integer Y such that 2 ** Y is less
2272 than or equal to EXPR. */
2273
2274 int
2275 tree_floor_log2 (const_tree expr)
2276 {
2277 STRIP_NOPS (expr);
2278
2279 if (TREE_CODE (expr) == COMPLEX_CST)
2280 return tree_log2 (TREE_REALPART (expr));
2281
2282 return wi::floor_log2 (expr);
2283 }
2284
2285 /* Return number of known trailing zero bits in EXPR, or, if the value of
2286 EXPR is known to be zero, the precision of it's type. */
2287
2288 unsigned int
2289 tree_ctz (const_tree expr)
2290 {
2291 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2292 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2293 return 0;
2294
2295 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2296 switch (TREE_CODE (expr))
2297 {
2298 case INTEGER_CST:
2299 ret1 = wi::ctz (expr);
2300 return MIN (ret1, prec);
2301 case SSA_NAME:
2302 ret1 = wi::ctz (get_nonzero_bits (expr));
2303 return MIN (ret1, prec);
2304 case PLUS_EXPR:
2305 case MINUS_EXPR:
2306 case BIT_IOR_EXPR:
2307 case BIT_XOR_EXPR:
2308 case MIN_EXPR:
2309 case MAX_EXPR:
2310 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2311 if (ret1 == 0)
2312 return ret1;
2313 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2314 return MIN (ret1, ret2);
2315 case POINTER_PLUS_EXPR:
2316 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2317 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2318 /* Second operand is sizetype, which could be in theory
2319 wider than pointer's precision. Make sure we never
2320 return more than prec. */
2321 ret2 = MIN (ret2, prec);
2322 return MIN (ret1, ret2);
2323 case BIT_AND_EXPR:
2324 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2325 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2326 return MAX (ret1, ret2);
2327 case MULT_EXPR:
2328 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2329 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2330 return MIN (ret1 + ret2, prec);
2331 case LSHIFT_EXPR:
2332 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2333 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2334 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2335 {
2336 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2337 return MIN (ret1 + ret2, prec);
2338 }
2339 return ret1;
2340 case RSHIFT_EXPR:
2341 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2342 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2343 {
2344 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2345 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2346 if (ret1 > ret2)
2347 return ret1 - ret2;
2348 }
2349 return 0;
2350 case TRUNC_DIV_EXPR:
2351 case CEIL_DIV_EXPR:
2352 case FLOOR_DIV_EXPR:
2353 case ROUND_DIV_EXPR:
2354 case EXACT_DIV_EXPR:
2355 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2356 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2357 {
2358 int l = tree_log2 (TREE_OPERAND (expr, 1));
2359 if (l >= 0)
2360 {
2361 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2362 ret2 = l;
2363 if (ret1 > ret2)
2364 return ret1 - ret2;
2365 }
2366 }
2367 return 0;
2368 CASE_CONVERT:
2369 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2370 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2371 ret1 = prec;
2372 return MIN (ret1, prec);
2373 case SAVE_EXPR:
2374 return tree_ctz (TREE_OPERAND (expr, 0));
2375 case COND_EXPR:
2376 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2377 if (ret1 == 0)
2378 return 0;
2379 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2380 return MIN (ret1, ret2);
2381 case COMPOUND_EXPR:
2382 return tree_ctz (TREE_OPERAND (expr, 1));
2383 case ADDR_EXPR:
2384 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2385 if (ret1 > BITS_PER_UNIT)
2386 {
2387 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2388 return MIN (ret1, prec);
2389 }
2390 return 0;
2391 default:
2392 return 0;
2393 }
2394 }
2395
2396 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2397 decimal float constants, so don't return 1 for them. */
2398
2399 int
2400 real_zerop (const_tree expr)
2401 {
2402 STRIP_NOPS (expr);
2403
2404 switch (TREE_CODE (expr))
2405 {
2406 case REAL_CST:
2407 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2408 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2409 case COMPLEX_CST:
2410 return real_zerop (TREE_REALPART (expr))
2411 && real_zerop (TREE_IMAGPART (expr));
2412 case VECTOR_CST:
2413 {
2414 unsigned i;
2415 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2416 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2417 return false;
2418 return true;
2419 }
2420 default:
2421 return false;
2422 }
2423 }
2424
2425 /* Return 1 if EXPR is the real constant one in real or complex form.
2426 Trailing zeroes matter for decimal float constants, so don't return
2427 1 for them. */
2428
2429 int
2430 real_onep (const_tree expr)
2431 {
2432 STRIP_NOPS (expr);
2433
2434 switch (TREE_CODE (expr))
2435 {
2436 case REAL_CST:
2437 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2438 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2439 case COMPLEX_CST:
2440 return real_onep (TREE_REALPART (expr))
2441 && real_zerop (TREE_IMAGPART (expr));
2442 case VECTOR_CST:
2443 {
2444 unsigned i;
2445 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2446 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2447 return false;
2448 return true;
2449 }
2450 default:
2451 return false;
2452 }
2453 }
2454
2455 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2456 matter for decimal float constants, so don't return 1 for them. */
2457
2458 int
2459 real_minus_onep (const_tree expr)
2460 {
2461 STRIP_NOPS (expr);
2462
2463 switch (TREE_CODE (expr))
2464 {
2465 case REAL_CST:
2466 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2467 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2468 case COMPLEX_CST:
2469 return real_minus_onep (TREE_REALPART (expr))
2470 && real_zerop (TREE_IMAGPART (expr));
2471 case VECTOR_CST:
2472 {
2473 unsigned i;
2474 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2475 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2476 return false;
2477 return true;
2478 }
2479 default:
2480 return false;
2481 }
2482 }
2483
2484 /* Nonzero if EXP is a constant or a cast of a constant. */
2485
2486 int
2487 really_constant_p (const_tree exp)
2488 {
2489 /* This is not quite the same as STRIP_NOPS. It does more. */
2490 while (CONVERT_EXPR_P (exp)
2491 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2492 exp = TREE_OPERAND (exp, 0);
2493 return TREE_CONSTANT (exp);
2494 }
2495 \f
2496 /* Return first list element whose TREE_VALUE is ELEM.
2497 Return 0 if ELEM is not in LIST. */
2498
2499 tree
2500 value_member (tree elem, tree list)
2501 {
2502 while (list)
2503 {
2504 if (elem == TREE_VALUE (list))
2505 return list;
2506 list = TREE_CHAIN (list);
2507 }
2508 return NULL_TREE;
2509 }
2510
2511 /* Return first list element whose TREE_PURPOSE is ELEM.
2512 Return 0 if ELEM is not in LIST. */
2513
2514 tree
2515 purpose_member (const_tree elem, tree list)
2516 {
2517 while (list)
2518 {
2519 if (elem == TREE_PURPOSE (list))
2520 return list;
2521 list = TREE_CHAIN (list);
2522 }
2523 return NULL_TREE;
2524 }
2525
2526 /* Return true if ELEM is in V. */
2527
2528 bool
2529 vec_member (const_tree elem, vec<tree, va_gc> *v)
2530 {
2531 unsigned ix;
2532 tree t;
2533 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2534 if (elem == t)
2535 return true;
2536 return false;
2537 }
2538
2539 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2540 NULL_TREE. */
2541
2542 tree
2543 chain_index (int idx, tree chain)
2544 {
2545 for (; chain && idx > 0; --idx)
2546 chain = TREE_CHAIN (chain);
2547 return chain;
2548 }
2549
2550 /* Return nonzero if ELEM is part of the chain CHAIN. */
2551
2552 int
2553 chain_member (const_tree elem, const_tree chain)
2554 {
2555 while (chain)
2556 {
2557 if (elem == chain)
2558 return 1;
2559 chain = DECL_CHAIN (chain);
2560 }
2561
2562 return 0;
2563 }
2564
2565 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2566 We expect a null pointer to mark the end of the chain.
2567 This is the Lisp primitive `length'. */
2568
2569 int
2570 list_length (const_tree t)
2571 {
2572 const_tree p = t;
2573 #ifdef ENABLE_TREE_CHECKING
2574 const_tree q = t;
2575 #endif
2576 int len = 0;
2577
2578 while (p)
2579 {
2580 p = TREE_CHAIN (p);
2581 #ifdef ENABLE_TREE_CHECKING
2582 if (len % 2)
2583 q = TREE_CHAIN (q);
2584 gcc_assert (p != q);
2585 #endif
2586 len++;
2587 }
2588
2589 return len;
2590 }
2591
2592 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2593 UNION_TYPE TYPE, or NULL_TREE if none. */
2594
2595 tree
2596 first_field (const_tree type)
2597 {
2598 tree t = TYPE_FIELDS (type);
2599 while (t && TREE_CODE (t) != FIELD_DECL)
2600 t = TREE_CHAIN (t);
2601 return t;
2602 }
2603
2604 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2605 by modifying the last node in chain 1 to point to chain 2.
2606 This is the Lisp primitive `nconc'. */
2607
2608 tree
2609 chainon (tree op1, tree op2)
2610 {
2611 tree t1;
2612
2613 if (!op1)
2614 return op2;
2615 if (!op2)
2616 return op1;
2617
2618 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2619 continue;
2620 TREE_CHAIN (t1) = op2;
2621
2622 #ifdef ENABLE_TREE_CHECKING
2623 {
2624 tree t2;
2625 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2626 gcc_assert (t2 != t1);
2627 }
2628 #endif
2629
2630 return op1;
2631 }
2632
2633 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2634
2635 tree
2636 tree_last (tree chain)
2637 {
2638 tree next;
2639 if (chain)
2640 while ((next = TREE_CHAIN (chain)))
2641 chain = next;
2642 return chain;
2643 }
2644
2645 /* Reverse the order of elements in the chain T,
2646 and return the new head of the chain (old last element). */
2647
2648 tree
2649 nreverse (tree t)
2650 {
2651 tree prev = 0, decl, next;
2652 for (decl = t; decl; decl = next)
2653 {
2654 /* We shouldn't be using this function to reverse BLOCK chains; we
2655 have blocks_nreverse for that. */
2656 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2657 next = TREE_CHAIN (decl);
2658 TREE_CHAIN (decl) = prev;
2659 prev = decl;
2660 }
2661 return prev;
2662 }
2663 \f
2664 /* Return a newly created TREE_LIST node whose
2665 purpose and value fields are PARM and VALUE. */
2666
2667 tree
2668 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2669 {
2670 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2671 TREE_PURPOSE (t) = parm;
2672 TREE_VALUE (t) = value;
2673 return t;
2674 }
2675
2676 /* Build a chain of TREE_LIST nodes from a vector. */
2677
2678 tree
2679 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2680 {
2681 tree ret = NULL_TREE;
2682 tree *pp = &ret;
2683 unsigned int i;
2684 tree t;
2685 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2686 {
2687 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2688 pp = &TREE_CHAIN (*pp);
2689 }
2690 return ret;
2691 }
2692
2693 /* Return a newly created TREE_LIST node whose
2694 purpose and value fields are PURPOSE and VALUE
2695 and whose TREE_CHAIN is CHAIN. */
2696
2697 tree
2698 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2699 {
2700 tree node;
2701
2702 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2703 memset (node, 0, sizeof (struct tree_common));
2704
2705 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2706
2707 TREE_SET_CODE (node, TREE_LIST);
2708 TREE_CHAIN (node) = chain;
2709 TREE_PURPOSE (node) = purpose;
2710 TREE_VALUE (node) = value;
2711 return node;
2712 }
2713
2714 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2715 trees. */
2716
2717 vec<tree, va_gc> *
2718 ctor_to_vec (tree ctor)
2719 {
2720 vec<tree, va_gc> *vec;
2721 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2722 unsigned int ix;
2723 tree val;
2724
2725 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2726 vec->quick_push (val);
2727
2728 return vec;
2729 }
2730 \f
2731 /* Return the size nominally occupied by an object of type TYPE
2732 when it resides in memory. The value is measured in units of bytes,
2733 and its data type is that normally used for type sizes
2734 (which is the first type created by make_signed_type or
2735 make_unsigned_type). */
2736
2737 tree
2738 size_in_bytes (const_tree type)
2739 {
2740 tree t;
2741
2742 if (type == error_mark_node)
2743 return integer_zero_node;
2744
2745 type = TYPE_MAIN_VARIANT (type);
2746 t = TYPE_SIZE_UNIT (type);
2747
2748 if (t == 0)
2749 {
2750 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2751 return size_zero_node;
2752 }
2753
2754 return t;
2755 }
2756
2757 /* Return the size of TYPE (in bytes) as a wide integer
2758 or return -1 if the size can vary or is larger than an integer. */
2759
2760 HOST_WIDE_INT
2761 int_size_in_bytes (const_tree type)
2762 {
2763 tree t;
2764
2765 if (type == error_mark_node)
2766 return 0;
2767
2768 type = TYPE_MAIN_VARIANT (type);
2769 t = TYPE_SIZE_UNIT (type);
2770
2771 if (t && tree_fits_uhwi_p (t))
2772 return TREE_INT_CST_LOW (t);
2773 else
2774 return -1;
2775 }
2776
2777 /* Return the maximum size of TYPE (in bytes) as a wide integer
2778 or return -1 if the size can vary or is larger than an integer. */
2779
2780 HOST_WIDE_INT
2781 max_int_size_in_bytes (const_tree type)
2782 {
2783 HOST_WIDE_INT size = -1;
2784 tree size_tree;
2785
2786 /* If this is an array type, check for a possible MAX_SIZE attached. */
2787
2788 if (TREE_CODE (type) == ARRAY_TYPE)
2789 {
2790 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2791
2792 if (size_tree && tree_fits_uhwi_p (size_tree))
2793 size = tree_to_uhwi (size_tree);
2794 }
2795
2796 /* If we still haven't been able to get a size, see if the language
2797 can compute a maximum size. */
2798
2799 if (size == -1)
2800 {
2801 size_tree = lang_hooks.types.max_size (type);
2802
2803 if (size_tree && tree_fits_uhwi_p (size_tree))
2804 size = tree_to_uhwi (size_tree);
2805 }
2806
2807 return size;
2808 }
2809 \f
2810 /* Return the bit position of FIELD, in bits from the start of the record.
2811 This is a tree of type bitsizetype. */
2812
2813 tree
2814 bit_position (const_tree field)
2815 {
2816 return bit_from_pos (DECL_FIELD_OFFSET (field),
2817 DECL_FIELD_BIT_OFFSET (field));
2818 }
2819
2820 /* Likewise, but return as an integer. It must be representable in
2821 that way (since it could be a signed value, we don't have the
2822 option of returning -1 like int_size_in_byte can. */
2823
2824 HOST_WIDE_INT
2825 int_bit_position (const_tree field)
2826 {
2827 return tree_to_shwi (bit_position (field));
2828 }
2829 \f
2830 /* Return the byte position of FIELD, in bytes from the start of the record.
2831 This is a tree of type sizetype. */
2832
2833 tree
2834 byte_position (const_tree field)
2835 {
2836 return byte_from_pos (DECL_FIELD_OFFSET (field),
2837 DECL_FIELD_BIT_OFFSET (field));
2838 }
2839
2840 /* Likewise, but return as an integer. It must be representable in
2841 that way (since it could be a signed value, we don't have the
2842 option of returning -1 like int_size_in_byte can. */
2843
2844 HOST_WIDE_INT
2845 int_byte_position (const_tree field)
2846 {
2847 return tree_to_shwi (byte_position (field));
2848 }
2849 \f
2850 /* Return the strictest alignment, in bits, that T is known to have. */
2851
2852 unsigned int
2853 expr_align (const_tree t)
2854 {
2855 unsigned int align0, align1;
2856
2857 switch (TREE_CODE (t))
2858 {
2859 CASE_CONVERT: case NON_LVALUE_EXPR:
2860 /* If we have conversions, we know that the alignment of the
2861 object must meet each of the alignments of the types. */
2862 align0 = expr_align (TREE_OPERAND (t, 0));
2863 align1 = TYPE_ALIGN (TREE_TYPE (t));
2864 return MAX (align0, align1);
2865
2866 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2867 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2868 case CLEANUP_POINT_EXPR:
2869 /* These don't change the alignment of an object. */
2870 return expr_align (TREE_OPERAND (t, 0));
2871
2872 case COND_EXPR:
2873 /* The best we can do is say that the alignment is the least aligned
2874 of the two arms. */
2875 align0 = expr_align (TREE_OPERAND (t, 1));
2876 align1 = expr_align (TREE_OPERAND (t, 2));
2877 return MIN (align0, align1);
2878
2879 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2880 meaningfully, it's always 1. */
2881 case LABEL_DECL: case CONST_DECL:
2882 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2883 case FUNCTION_DECL:
2884 gcc_assert (DECL_ALIGN (t) != 0);
2885 return DECL_ALIGN (t);
2886
2887 default:
2888 break;
2889 }
2890
2891 /* Otherwise take the alignment from that of the type. */
2892 return TYPE_ALIGN (TREE_TYPE (t));
2893 }
2894 \f
2895 /* Return, as a tree node, the number of elements for TYPE (which is an
2896 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2897
2898 tree
2899 array_type_nelts (const_tree type)
2900 {
2901 tree index_type, min, max;
2902
2903 /* If they did it with unspecified bounds, then we should have already
2904 given an error about it before we got here. */
2905 if (! TYPE_DOMAIN (type))
2906 return error_mark_node;
2907
2908 index_type = TYPE_DOMAIN (type);
2909 min = TYPE_MIN_VALUE (index_type);
2910 max = TYPE_MAX_VALUE (index_type);
2911
2912 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2913 if (!max)
2914 return error_mark_node;
2915
2916 return (integer_zerop (min)
2917 ? max
2918 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2919 }
2920 \f
2921 /* If arg is static -- a reference to an object in static storage -- then
2922 return the object. This is not the same as the C meaning of `static'.
2923 If arg isn't static, return NULL. */
2924
2925 tree
2926 staticp (tree arg)
2927 {
2928 switch (TREE_CODE (arg))
2929 {
2930 case FUNCTION_DECL:
2931 /* Nested functions are static, even though taking their address will
2932 involve a trampoline as we unnest the nested function and create
2933 the trampoline on the tree level. */
2934 return arg;
2935
2936 case VAR_DECL:
2937 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2938 && ! DECL_THREAD_LOCAL_P (arg)
2939 && ! DECL_DLLIMPORT_P (arg)
2940 ? arg : NULL);
2941
2942 case CONST_DECL:
2943 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2944 ? arg : NULL);
2945
2946 case CONSTRUCTOR:
2947 return TREE_STATIC (arg) ? arg : NULL;
2948
2949 case LABEL_DECL:
2950 case STRING_CST:
2951 return arg;
2952
2953 case COMPONENT_REF:
2954 /* If the thing being referenced is not a field, then it is
2955 something language specific. */
2956 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2957
2958 /* If we are referencing a bitfield, we can't evaluate an
2959 ADDR_EXPR at compile time and so it isn't a constant. */
2960 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2961 return NULL;
2962
2963 return staticp (TREE_OPERAND (arg, 0));
2964
2965 case BIT_FIELD_REF:
2966 return NULL;
2967
2968 case INDIRECT_REF:
2969 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2970
2971 case ARRAY_REF:
2972 case ARRAY_RANGE_REF:
2973 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2974 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2975 return staticp (TREE_OPERAND (arg, 0));
2976 else
2977 return NULL;
2978
2979 case COMPOUND_LITERAL_EXPR:
2980 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2981
2982 default:
2983 return NULL;
2984 }
2985 }
2986
2987 \f
2988
2989
2990 /* Return whether OP is a DECL whose address is function-invariant. */
2991
2992 bool
2993 decl_address_invariant_p (const_tree op)
2994 {
2995 /* The conditions below are slightly less strict than the one in
2996 staticp. */
2997
2998 switch (TREE_CODE (op))
2999 {
3000 case PARM_DECL:
3001 case RESULT_DECL:
3002 case LABEL_DECL:
3003 case FUNCTION_DECL:
3004 return true;
3005
3006 case VAR_DECL:
3007 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3008 || DECL_THREAD_LOCAL_P (op)
3009 || DECL_CONTEXT (op) == current_function_decl
3010 || decl_function_context (op) == current_function_decl)
3011 return true;
3012 break;
3013
3014 case CONST_DECL:
3015 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3016 || decl_function_context (op) == current_function_decl)
3017 return true;
3018 break;
3019
3020 default:
3021 break;
3022 }
3023
3024 return false;
3025 }
3026
3027 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3028
3029 bool
3030 decl_address_ip_invariant_p (const_tree op)
3031 {
3032 /* The conditions below are slightly less strict than the one in
3033 staticp. */
3034
3035 switch (TREE_CODE (op))
3036 {
3037 case LABEL_DECL:
3038 case FUNCTION_DECL:
3039 case STRING_CST:
3040 return true;
3041
3042 case VAR_DECL:
3043 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3044 && !DECL_DLLIMPORT_P (op))
3045 || DECL_THREAD_LOCAL_P (op))
3046 return true;
3047 break;
3048
3049 case CONST_DECL:
3050 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3051 return true;
3052 break;
3053
3054 default:
3055 break;
3056 }
3057
3058 return false;
3059 }
3060
3061
3062 /* Return true if T is function-invariant (internal function, does
3063 not handle arithmetic; that's handled in skip_simple_arithmetic and
3064 tree_invariant_p). */
3065
3066 static bool tree_invariant_p (tree t);
3067
3068 static bool
3069 tree_invariant_p_1 (tree t)
3070 {
3071 tree op;
3072
3073 if (TREE_CONSTANT (t)
3074 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3075 return true;
3076
3077 switch (TREE_CODE (t))
3078 {
3079 case SAVE_EXPR:
3080 return true;
3081
3082 case ADDR_EXPR:
3083 op = TREE_OPERAND (t, 0);
3084 while (handled_component_p (op))
3085 {
3086 switch (TREE_CODE (op))
3087 {
3088 case ARRAY_REF:
3089 case ARRAY_RANGE_REF:
3090 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3091 || TREE_OPERAND (op, 2) != NULL_TREE
3092 || TREE_OPERAND (op, 3) != NULL_TREE)
3093 return false;
3094 break;
3095
3096 case COMPONENT_REF:
3097 if (TREE_OPERAND (op, 2) != NULL_TREE)
3098 return false;
3099 break;
3100
3101 default:;
3102 }
3103 op = TREE_OPERAND (op, 0);
3104 }
3105
3106 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3107
3108 default:
3109 break;
3110 }
3111
3112 return false;
3113 }
3114
3115 /* Return true if T is function-invariant. */
3116
3117 static bool
3118 tree_invariant_p (tree t)
3119 {
3120 tree inner = skip_simple_arithmetic (t);
3121 return tree_invariant_p_1 (inner);
3122 }
3123
3124 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3125 Do this to any expression which may be used in more than one place,
3126 but must be evaluated only once.
3127
3128 Normally, expand_expr would reevaluate the expression each time.
3129 Calling save_expr produces something that is evaluated and recorded
3130 the first time expand_expr is called on it. Subsequent calls to
3131 expand_expr just reuse the recorded value.
3132
3133 The call to expand_expr that generates code that actually computes
3134 the value is the first call *at compile time*. Subsequent calls
3135 *at compile time* generate code to use the saved value.
3136 This produces correct result provided that *at run time* control
3137 always flows through the insns made by the first expand_expr
3138 before reaching the other places where the save_expr was evaluated.
3139 You, the caller of save_expr, must make sure this is so.
3140
3141 Constants, and certain read-only nodes, are returned with no
3142 SAVE_EXPR because that is safe. Expressions containing placeholders
3143 are not touched; see tree.def for an explanation of what these
3144 are used for. */
3145
3146 tree
3147 save_expr (tree expr)
3148 {
3149 tree t = fold (expr);
3150 tree inner;
3151
3152 /* If the tree evaluates to a constant, then we don't want to hide that
3153 fact (i.e. this allows further folding, and direct checks for constants).
3154 However, a read-only object that has side effects cannot be bypassed.
3155 Since it is no problem to reevaluate literals, we just return the
3156 literal node. */
3157 inner = skip_simple_arithmetic (t);
3158 if (TREE_CODE (inner) == ERROR_MARK)
3159 return inner;
3160
3161 if (tree_invariant_p_1 (inner))
3162 return t;
3163
3164 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3165 it means that the size or offset of some field of an object depends on
3166 the value within another field.
3167
3168 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3169 and some variable since it would then need to be both evaluated once and
3170 evaluated more than once. Front-ends must assure this case cannot
3171 happen by surrounding any such subexpressions in their own SAVE_EXPR
3172 and forcing evaluation at the proper time. */
3173 if (contains_placeholder_p (inner))
3174 return t;
3175
3176 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3177 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3178
3179 /* This expression might be placed ahead of a jump to ensure that the
3180 value was computed on both sides of the jump. So make sure it isn't
3181 eliminated as dead. */
3182 TREE_SIDE_EFFECTS (t) = 1;
3183 return t;
3184 }
3185
3186 /* Look inside EXPR into any simple arithmetic operations. Return the
3187 outermost non-arithmetic or non-invariant node. */
3188
3189 tree
3190 skip_simple_arithmetic (tree expr)
3191 {
3192 /* We don't care about whether this can be used as an lvalue in this
3193 context. */
3194 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3195 expr = TREE_OPERAND (expr, 0);
3196
3197 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3198 a constant, it will be more efficient to not make another SAVE_EXPR since
3199 it will allow better simplification and GCSE will be able to merge the
3200 computations if they actually occur. */
3201 while (true)
3202 {
3203 if (UNARY_CLASS_P (expr))
3204 expr = TREE_OPERAND (expr, 0);
3205 else if (BINARY_CLASS_P (expr))
3206 {
3207 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3208 expr = TREE_OPERAND (expr, 0);
3209 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3210 expr = TREE_OPERAND (expr, 1);
3211 else
3212 break;
3213 }
3214 else
3215 break;
3216 }
3217
3218 return expr;
3219 }
3220
3221 /* Look inside EXPR into simple arithmetic operations involving constants.
3222 Return the outermost non-arithmetic or non-constant node. */
3223
3224 tree
3225 skip_simple_constant_arithmetic (tree expr)
3226 {
3227 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3228 expr = TREE_OPERAND (expr, 0);
3229
3230 while (true)
3231 {
3232 if (UNARY_CLASS_P (expr))
3233 expr = TREE_OPERAND (expr, 0);
3234 else if (BINARY_CLASS_P (expr))
3235 {
3236 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3237 expr = TREE_OPERAND (expr, 0);
3238 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3239 expr = TREE_OPERAND (expr, 1);
3240 else
3241 break;
3242 }
3243 else
3244 break;
3245 }
3246
3247 return expr;
3248 }
3249
3250 /* Return which tree structure is used by T. */
3251
3252 enum tree_node_structure_enum
3253 tree_node_structure (const_tree t)
3254 {
3255 const enum tree_code code = TREE_CODE (t);
3256 return tree_node_structure_for_code (code);
3257 }
3258
3259 /* Set various status flags when building a CALL_EXPR object T. */
3260
3261 static void
3262 process_call_operands (tree t)
3263 {
3264 bool side_effects = TREE_SIDE_EFFECTS (t);
3265 bool read_only = false;
3266 int i = call_expr_flags (t);
3267
3268 /* Calls have side-effects, except those to const or pure functions. */
3269 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3270 side_effects = true;
3271 /* Propagate TREE_READONLY of arguments for const functions. */
3272 if (i & ECF_CONST)
3273 read_only = true;
3274
3275 if (!side_effects || read_only)
3276 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3277 {
3278 tree op = TREE_OPERAND (t, i);
3279 if (op && TREE_SIDE_EFFECTS (op))
3280 side_effects = true;
3281 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3282 read_only = false;
3283 }
3284
3285 TREE_SIDE_EFFECTS (t) = side_effects;
3286 TREE_READONLY (t) = read_only;
3287 }
3288 \f
3289 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3290 size or offset that depends on a field within a record. */
3291
3292 bool
3293 contains_placeholder_p (const_tree exp)
3294 {
3295 enum tree_code code;
3296
3297 if (!exp)
3298 return 0;
3299
3300 code = TREE_CODE (exp);
3301 if (code == PLACEHOLDER_EXPR)
3302 return 1;
3303
3304 switch (TREE_CODE_CLASS (code))
3305 {
3306 case tcc_reference:
3307 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3308 position computations since they will be converted into a
3309 WITH_RECORD_EXPR involving the reference, which will assume
3310 here will be valid. */
3311 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3312
3313 case tcc_exceptional:
3314 if (code == TREE_LIST)
3315 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3316 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3317 break;
3318
3319 case tcc_unary:
3320 case tcc_binary:
3321 case tcc_comparison:
3322 case tcc_expression:
3323 switch (code)
3324 {
3325 case COMPOUND_EXPR:
3326 /* Ignoring the first operand isn't quite right, but works best. */
3327 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3328
3329 case COND_EXPR:
3330 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3331 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3332 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3333
3334 case SAVE_EXPR:
3335 /* The save_expr function never wraps anything containing
3336 a PLACEHOLDER_EXPR. */
3337 return 0;
3338
3339 default:
3340 break;
3341 }
3342
3343 switch (TREE_CODE_LENGTH (code))
3344 {
3345 case 1:
3346 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3347 case 2:
3348 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3349 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3350 default:
3351 return 0;
3352 }
3353
3354 case tcc_vl_exp:
3355 switch (code)
3356 {
3357 case CALL_EXPR:
3358 {
3359 const_tree arg;
3360 const_call_expr_arg_iterator iter;
3361 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3362 if (CONTAINS_PLACEHOLDER_P (arg))
3363 return 1;
3364 return 0;
3365 }
3366 default:
3367 return 0;
3368 }
3369
3370 default:
3371 return 0;
3372 }
3373 return 0;
3374 }
3375
3376 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3377 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3378 field positions. */
3379
3380 static bool
3381 type_contains_placeholder_1 (const_tree type)
3382 {
3383 /* If the size contains a placeholder or the parent type (component type in
3384 the case of arrays) type involves a placeholder, this type does. */
3385 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3386 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3387 || (!POINTER_TYPE_P (type)
3388 && TREE_TYPE (type)
3389 && type_contains_placeholder_p (TREE_TYPE (type))))
3390 return true;
3391
3392 /* Now do type-specific checks. Note that the last part of the check above
3393 greatly limits what we have to do below. */
3394 switch (TREE_CODE (type))
3395 {
3396 case VOID_TYPE:
3397 case COMPLEX_TYPE:
3398 case ENUMERAL_TYPE:
3399 case BOOLEAN_TYPE:
3400 case POINTER_TYPE:
3401 case OFFSET_TYPE:
3402 case REFERENCE_TYPE:
3403 case METHOD_TYPE:
3404 case FUNCTION_TYPE:
3405 case VECTOR_TYPE:
3406 case NULLPTR_TYPE:
3407 return false;
3408
3409 case INTEGER_TYPE:
3410 case REAL_TYPE:
3411 case FIXED_POINT_TYPE:
3412 /* Here we just check the bounds. */
3413 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3414 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3415
3416 case ARRAY_TYPE:
3417 /* We have already checked the component type above, so just check the
3418 domain type. */
3419 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3420
3421 case RECORD_TYPE:
3422 case UNION_TYPE:
3423 case QUAL_UNION_TYPE:
3424 {
3425 tree field;
3426
3427 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3428 if (TREE_CODE (field) == FIELD_DECL
3429 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3430 || (TREE_CODE (type) == QUAL_UNION_TYPE
3431 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3432 || type_contains_placeholder_p (TREE_TYPE (field))))
3433 return true;
3434
3435 return false;
3436 }
3437
3438 default:
3439 gcc_unreachable ();
3440 }
3441 }
3442
3443 /* Wrapper around above function used to cache its result. */
3444
3445 bool
3446 type_contains_placeholder_p (tree type)
3447 {
3448 bool result;
3449
3450 /* If the contains_placeholder_bits field has been initialized,
3451 then we know the answer. */
3452 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3453 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3454
3455 /* Indicate that we've seen this type node, and the answer is false.
3456 This is what we want to return if we run into recursion via fields. */
3457 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3458
3459 /* Compute the real value. */
3460 result = type_contains_placeholder_1 (type);
3461
3462 /* Store the real value. */
3463 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3464
3465 return result;
3466 }
3467 \f
3468 /* Push tree EXP onto vector QUEUE if it is not already present. */
3469
3470 static void
3471 push_without_duplicates (tree exp, vec<tree> *queue)
3472 {
3473 unsigned int i;
3474 tree iter;
3475
3476 FOR_EACH_VEC_ELT (*queue, i, iter)
3477 if (simple_cst_equal (iter, exp) == 1)
3478 break;
3479
3480 if (!iter)
3481 queue->safe_push (exp);
3482 }
3483
3484 /* Given a tree EXP, find all occurrences of references to fields
3485 in a PLACEHOLDER_EXPR and place them in vector REFS without
3486 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3487 we assume here that EXP contains only arithmetic expressions
3488 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3489 argument list. */
3490
3491 void
3492 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3493 {
3494 enum tree_code code = TREE_CODE (exp);
3495 tree inner;
3496 int i;
3497
3498 /* We handle TREE_LIST and COMPONENT_REF separately. */
3499 if (code == TREE_LIST)
3500 {
3501 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3502 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3503 }
3504 else if (code == COMPONENT_REF)
3505 {
3506 for (inner = TREE_OPERAND (exp, 0);
3507 REFERENCE_CLASS_P (inner);
3508 inner = TREE_OPERAND (inner, 0))
3509 ;
3510
3511 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3512 push_without_duplicates (exp, refs);
3513 else
3514 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3515 }
3516 else
3517 switch (TREE_CODE_CLASS (code))
3518 {
3519 case tcc_constant:
3520 break;
3521
3522 case tcc_declaration:
3523 /* Variables allocated to static storage can stay. */
3524 if (!TREE_STATIC (exp))
3525 push_without_duplicates (exp, refs);
3526 break;
3527
3528 case tcc_expression:
3529 /* This is the pattern built in ada/make_aligning_type. */
3530 if (code == ADDR_EXPR
3531 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3532 {
3533 push_without_duplicates (exp, refs);
3534 break;
3535 }
3536
3537 /* Fall through... */
3538
3539 case tcc_exceptional:
3540 case tcc_unary:
3541 case tcc_binary:
3542 case tcc_comparison:
3543 case tcc_reference:
3544 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3545 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3546 break;
3547
3548 case tcc_vl_exp:
3549 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3550 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3551 break;
3552
3553 default:
3554 gcc_unreachable ();
3555 }
3556 }
3557
3558 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3559 return a tree with all occurrences of references to F in a
3560 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3561 CONST_DECLs. Note that we assume here that EXP contains only
3562 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3563 occurring only in their argument list. */
3564
3565 tree
3566 substitute_in_expr (tree exp, tree f, tree r)
3567 {
3568 enum tree_code code = TREE_CODE (exp);
3569 tree op0, op1, op2, op3;
3570 tree new_tree;
3571
3572 /* We handle TREE_LIST and COMPONENT_REF separately. */
3573 if (code == TREE_LIST)
3574 {
3575 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3576 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3577 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3578 return exp;
3579
3580 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3581 }
3582 else if (code == COMPONENT_REF)
3583 {
3584 tree inner;
3585
3586 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3587 and it is the right field, replace it with R. */
3588 for (inner = TREE_OPERAND (exp, 0);
3589 REFERENCE_CLASS_P (inner);
3590 inner = TREE_OPERAND (inner, 0))
3591 ;
3592
3593 /* The field. */
3594 op1 = TREE_OPERAND (exp, 1);
3595
3596 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3597 return r;
3598
3599 /* If this expression hasn't been completed let, leave it alone. */
3600 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3601 return exp;
3602
3603 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3604 if (op0 == TREE_OPERAND (exp, 0))
3605 return exp;
3606
3607 new_tree
3608 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3609 }
3610 else
3611 switch (TREE_CODE_CLASS (code))
3612 {
3613 case tcc_constant:
3614 return exp;
3615
3616 case tcc_declaration:
3617 if (exp == f)
3618 return r;
3619 else
3620 return exp;
3621
3622 case tcc_expression:
3623 if (exp == f)
3624 return r;
3625
3626 /* Fall through... */
3627
3628 case tcc_exceptional:
3629 case tcc_unary:
3630 case tcc_binary:
3631 case tcc_comparison:
3632 case tcc_reference:
3633 switch (TREE_CODE_LENGTH (code))
3634 {
3635 case 0:
3636 return exp;
3637
3638 case 1:
3639 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3640 if (op0 == TREE_OPERAND (exp, 0))
3641 return exp;
3642
3643 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3644 break;
3645
3646 case 2:
3647 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3648 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3649
3650 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3651 return exp;
3652
3653 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3654 break;
3655
3656 case 3:
3657 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3658 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3659 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3660
3661 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3662 && op2 == TREE_OPERAND (exp, 2))
3663 return exp;
3664
3665 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3666 break;
3667
3668 case 4:
3669 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3670 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3671 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3672 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3673
3674 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3675 && op2 == TREE_OPERAND (exp, 2)
3676 && op3 == TREE_OPERAND (exp, 3))
3677 return exp;
3678
3679 new_tree
3680 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3681 break;
3682
3683 default:
3684 gcc_unreachable ();
3685 }
3686 break;
3687
3688 case tcc_vl_exp:
3689 {
3690 int i;
3691
3692 new_tree = NULL_TREE;
3693
3694 /* If we are trying to replace F with a constant, inline back
3695 functions which do nothing else than computing a value from
3696 the arguments they are passed. This makes it possible to
3697 fold partially or entirely the replacement expression. */
3698 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3699 {
3700 tree t = maybe_inline_call_in_expr (exp);
3701 if (t)
3702 return SUBSTITUTE_IN_EXPR (t, f, r);
3703 }
3704
3705 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3706 {
3707 tree op = TREE_OPERAND (exp, i);
3708 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3709 if (new_op != op)
3710 {
3711 if (!new_tree)
3712 new_tree = copy_node (exp);
3713 TREE_OPERAND (new_tree, i) = new_op;
3714 }
3715 }
3716
3717 if (new_tree)
3718 {
3719 new_tree = fold (new_tree);
3720 if (TREE_CODE (new_tree) == CALL_EXPR)
3721 process_call_operands (new_tree);
3722 }
3723 else
3724 return exp;
3725 }
3726 break;
3727
3728 default:
3729 gcc_unreachable ();
3730 }
3731
3732 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3733
3734 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3735 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3736
3737 return new_tree;
3738 }
3739
3740 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3741 for it within OBJ, a tree that is an object or a chain of references. */
3742
3743 tree
3744 substitute_placeholder_in_expr (tree exp, tree obj)
3745 {
3746 enum tree_code code = TREE_CODE (exp);
3747 tree op0, op1, op2, op3;
3748 tree new_tree;
3749
3750 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3751 in the chain of OBJ. */
3752 if (code == PLACEHOLDER_EXPR)
3753 {
3754 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3755 tree elt;
3756
3757 for (elt = obj; elt != 0;
3758 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3759 || TREE_CODE (elt) == COND_EXPR)
3760 ? TREE_OPERAND (elt, 1)
3761 : (REFERENCE_CLASS_P (elt)
3762 || UNARY_CLASS_P (elt)
3763 || BINARY_CLASS_P (elt)
3764 || VL_EXP_CLASS_P (elt)
3765 || EXPRESSION_CLASS_P (elt))
3766 ? TREE_OPERAND (elt, 0) : 0))
3767 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3768 return elt;
3769
3770 for (elt = obj; elt != 0;
3771 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3772 || TREE_CODE (elt) == COND_EXPR)
3773 ? TREE_OPERAND (elt, 1)
3774 : (REFERENCE_CLASS_P (elt)
3775 || UNARY_CLASS_P (elt)
3776 || BINARY_CLASS_P (elt)
3777 || VL_EXP_CLASS_P (elt)
3778 || EXPRESSION_CLASS_P (elt))
3779 ? TREE_OPERAND (elt, 0) : 0))
3780 if (POINTER_TYPE_P (TREE_TYPE (elt))
3781 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3782 == need_type))
3783 return fold_build1 (INDIRECT_REF, need_type, elt);
3784
3785 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3786 survives until RTL generation, there will be an error. */
3787 return exp;
3788 }
3789
3790 /* TREE_LIST is special because we need to look at TREE_VALUE
3791 and TREE_CHAIN, not TREE_OPERANDS. */
3792 else if (code == TREE_LIST)
3793 {
3794 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3795 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3796 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3797 return exp;
3798
3799 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3800 }
3801 else
3802 switch (TREE_CODE_CLASS (code))
3803 {
3804 case tcc_constant:
3805 case tcc_declaration:
3806 return exp;
3807
3808 case tcc_exceptional:
3809 case tcc_unary:
3810 case tcc_binary:
3811 case tcc_comparison:
3812 case tcc_expression:
3813 case tcc_reference:
3814 case tcc_statement:
3815 switch (TREE_CODE_LENGTH (code))
3816 {
3817 case 0:
3818 return exp;
3819
3820 case 1:
3821 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3822 if (op0 == TREE_OPERAND (exp, 0))
3823 return exp;
3824
3825 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3826 break;
3827
3828 case 2:
3829 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3830 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3831
3832 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3833 return exp;
3834
3835 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3836 break;
3837
3838 case 3:
3839 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3840 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3841 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3842
3843 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3844 && op2 == TREE_OPERAND (exp, 2))
3845 return exp;
3846
3847 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3848 break;
3849
3850 case 4:
3851 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3852 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3853 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3854 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3855
3856 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3857 && op2 == TREE_OPERAND (exp, 2)
3858 && op3 == TREE_OPERAND (exp, 3))
3859 return exp;
3860
3861 new_tree
3862 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3863 break;
3864
3865 default:
3866 gcc_unreachable ();
3867 }
3868 break;
3869
3870 case tcc_vl_exp:
3871 {
3872 int i;
3873
3874 new_tree = NULL_TREE;
3875
3876 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3877 {
3878 tree op = TREE_OPERAND (exp, i);
3879 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3880 if (new_op != op)
3881 {
3882 if (!new_tree)
3883 new_tree = copy_node (exp);
3884 TREE_OPERAND (new_tree, i) = new_op;
3885 }
3886 }
3887
3888 if (new_tree)
3889 {
3890 new_tree = fold (new_tree);
3891 if (TREE_CODE (new_tree) == CALL_EXPR)
3892 process_call_operands (new_tree);
3893 }
3894 else
3895 return exp;
3896 }
3897 break;
3898
3899 default:
3900 gcc_unreachable ();
3901 }
3902
3903 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3904
3905 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3906 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3907
3908 return new_tree;
3909 }
3910 \f
3911
3912 /* Subroutine of stabilize_reference; this is called for subtrees of
3913 references. Any expression with side-effects must be put in a SAVE_EXPR
3914 to ensure that it is only evaluated once.
3915
3916 We don't put SAVE_EXPR nodes around everything, because assigning very
3917 simple expressions to temporaries causes us to miss good opportunities
3918 for optimizations. Among other things, the opportunity to fold in the
3919 addition of a constant into an addressing mode often gets lost, e.g.
3920 "y[i+1] += x;". In general, we take the approach that we should not make
3921 an assignment unless we are forced into it - i.e., that any non-side effect
3922 operator should be allowed, and that cse should take care of coalescing
3923 multiple utterances of the same expression should that prove fruitful. */
3924
3925 static tree
3926 stabilize_reference_1 (tree e)
3927 {
3928 tree result;
3929 enum tree_code code = TREE_CODE (e);
3930
3931 /* We cannot ignore const expressions because it might be a reference
3932 to a const array but whose index contains side-effects. But we can
3933 ignore things that are actual constant or that already have been
3934 handled by this function. */
3935
3936 if (tree_invariant_p (e))
3937 return e;
3938
3939 switch (TREE_CODE_CLASS (code))
3940 {
3941 case tcc_exceptional:
3942 case tcc_type:
3943 case tcc_declaration:
3944 case tcc_comparison:
3945 case tcc_statement:
3946 case tcc_expression:
3947 case tcc_reference:
3948 case tcc_vl_exp:
3949 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3950 so that it will only be evaluated once. */
3951 /* The reference (r) and comparison (<) classes could be handled as
3952 below, but it is generally faster to only evaluate them once. */
3953 if (TREE_SIDE_EFFECTS (e))
3954 return save_expr (e);
3955 return e;
3956
3957 case tcc_constant:
3958 /* Constants need no processing. In fact, we should never reach
3959 here. */
3960 return e;
3961
3962 case tcc_binary:
3963 /* Division is slow and tends to be compiled with jumps,
3964 especially the division by powers of 2 that is often
3965 found inside of an array reference. So do it just once. */
3966 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3967 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3968 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3969 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3970 return save_expr (e);
3971 /* Recursively stabilize each operand. */
3972 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3973 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3974 break;
3975
3976 case tcc_unary:
3977 /* Recursively stabilize each operand. */
3978 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3979 break;
3980
3981 default:
3982 gcc_unreachable ();
3983 }
3984
3985 TREE_TYPE (result) = TREE_TYPE (e);
3986 TREE_READONLY (result) = TREE_READONLY (e);
3987 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3988 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3989
3990 return result;
3991 }
3992
3993 /* Stabilize a reference so that we can use it any number of times
3994 without causing its operands to be evaluated more than once.
3995 Returns the stabilized reference. This works by means of save_expr,
3996 so see the caveats in the comments about save_expr.
3997
3998 Also allows conversion expressions whose operands are references.
3999 Any other kind of expression is returned unchanged. */
4000
4001 tree
4002 stabilize_reference (tree ref)
4003 {
4004 tree result;
4005 enum tree_code code = TREE_CODE (ref);
4006
4007 switch (code)
4008 {
4009 case VAR_DECL:
4010 case PARM_DECL:
4011 case RESULT_DECL:
4012 /* No action is needed in this case. */
4013 return ref;
4014
4015 CASE_CONVERT:
4016 case FLOAT_EXPR:
4017 case FIX_TRUNC_EXPR:
4018 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4019 break;
4020
4021 case INDIRECT_REF:
4022 result = build_nt (INDIRECT_REF,
4023 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4024 break;
4025
4026 case COMPONENT_REF:
4027 result = build_nt (COMPONENT_REF,
4028 stabilize_reference (TREE_OPERAND (ref, 0)),
4029 TREE_OPERAND (ref, 1), NULL_TREE);
4030 break;
4031
4032 case BIT_FIELD_REF:
4033 result = build_nt (BIT_FIELD_REF,
4034 stabilize_reference (TREE_OPERAND (ref, 0)),
4035 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4036 break;
4037
4038 case ARRAY_REF:
4039 result = build_nt (ARRAY_REF,
4040 stabilize_reference (TREE_OPERAND (ref, 0)),
4041 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4042 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4043 break;
4044
4045 case ARRAY_RANGE_REF:
4046 result = build_nt (ARRAY_RANGE_REF,
4047 stabilize_reference (TREE_OPERAND (ref, 0)),
4048 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4049 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4050 break;
4051
4052 case COMPOUND_EXPR:
4053 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4054 it wouldn't be ignored. This matters when dealing with
4055 volatiles. */
4056 return stabilize_reference_1 (ref);
4057
4058 /* If arg isn't a kind of lvalue we recognize, make no change.
4059 Caller should recognize the error for an invalid lvalue. */
4060 default:
4061 return ref;
4062
4063 case ERROR_MARK:
4064 return error_mark_node;
4065 }
4066
4067 TREE_TYPE (result) = TREE_TYPE (ref);
4068 TREE_READONLY (result) = TREE_READONLY (ref);
4069 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4070 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4071
4072 return result;
4073 }
4074 \f
4075 /* Low-level constructors for expressions. */
4076
4077 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4078 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4079
4080 void
4081 recompute_tree_invariant_for_addr_expr (tree t)
4082 {
4083 tree node;
4084 bool tc = true, se = false;
4085
4086 /* We started out assuming this address is both invariant and constant, but
4087 does not have side effects. Now go down any handled components and see if
4088 any of them involve offsets that are either non-constant or non-invariant.
4089 Also check for side-effects.
4090
4091 ??? Note that this code makes no attempt to deal with the case where
4092 taking the address of something causes a copy due to misalignment. */
4093
4094 #define UPDATE_FLAGS(NODE) \
4095 do { tree _node = (NODE); \
4096 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4097 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4098
4099 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4100 node = TREE_OPERAND (node, 0))
4101 {
4102 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4103 array reference (probably made temporarily by the G++ front end),
4104 so ignore all the operands. */
4105 if ((TREE_CODE (node) == ARRAY_REF
4106 || TREE_CODE (node) == ARRAY_RANGE_REF)
4107 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4108 {
4109 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4110 if (TREE_OPERAND (node, 2))
4111 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4112 if (TREE_OPERAND (node, 3))
4113 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4114 }
4115 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4116 FIELD_DECL, apparently. The G++ front end can put something else
4117 there, at least temporarily. */
4118 else if (TREE_CODE (node) == COMPONENT_REF
4119 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4120 {
4121 if (TREE_OPERAND (node, 2))
4122 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4123 }
4124 }
4125
4126 node = lang_hooks.expr_to_decl (node, &tc, &se);
4127
4128 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4129 the address, since &(*a)->b is a form of addition. If it's a constant, the
4130 address is constant too. If it's a decl, its address is constant if the
4131 decl is static. Everything else is not constant and, furthermore,
4132 taking the address of a volatile variable is not volatile. */
4133 if (TREE_CODE (node) == INDIRECT_REF
4134 || TREE_CODE (node) == MEM_REF)
4135 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4136 else if (CONSTANT_CLASS_P (node))
4137 ;
4138 else if (DECL_P (node))
4139 tc &= (staticp (node) != NULL_TREE);
4140 else
4141 {
4142 tc = false;
4143 se |= TREE_SIDE_EFFECTS (node);
4144 }
4145
4146
4147 TREE_CONSTANT (t) = tc;
4148 TREE_SIDE_EFFECTS (t) = se;
4149 #undef UPDATE_FLAGS
4150 }
4151
4152 /* Build an expression of code CODE, data type TYPE, and operands as
4153 specified. Expressions and reference nodes can be created this way.
4154 Constants, decls, types and misc nodes cannot be.
4155
4156 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4157 enough for all extant tree codes. */
4158
4159 tree
4160 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4161 {
4162 tree t;
4163
4164 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4165
4166 t = make_node_stat (code PASS_MEM_STAT);
4167 TREE_TYPE (t) = tt;
4168
4169 return t;
4170 }
4171
4172 tree
4173 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4174 {
4175 int length = sizeof (struct tree_exp);
4176 tree t;
4177
4178 record_node_allocation_statistics (code, length);
4179
4180 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4181
4182 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4183
4184 memset (t, 0, sizeof (struct tree_common));
4185
4186 TREE_SET_CODE (t, code);
4187
4188 TREE_TYPE (t) = type;
4189 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4190 TREE_OPERAND (t, 0) = node;
4191 if (node && !TYPE_P (node))
4192 {
4193 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4194 TREE_READONLY (t) = TREE_READONLY (node);
4195 }
4196
4197 if (TREE_CODE_CLASS (code) == tcc_statement)
4198 TREE_SIDE_EFFECTS (t) = 1;
4199 else switch (code)
4200 {
4201 case VA_ARG_EXPR:
4202 /* All of these have side-effects, no matter what their
4203 operands are. */
4204 TREE_SIDE_EFFECTS (t) = 1;
4205 TREE_READONLY (t) = 0;
4206 break;
4207
4208 case INDIRECT_REF:
4209 /* Whether a dereference is readonly has nothing to do with whether
4210 its operand is readonly. */
4211 TREE_READONLY (t) = 0;
4212 break;
4213
4214 case ADDR_EXPR:
4215 if (node)
4216 recompute_tree_invariant_for_addr_expr (t);
4217 break;
4218
4219 default:
4220 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4221 && node && !TYPE_P (node)
4222 && TREE_CONSTANT (node))
4223 TREE_CONSTANT (t) = 1;
4224 if (TREE_CODE_CLASS (code) == tcc_reference
4225 && node && TREE_THIS_VOLATILE (node))
4226 TREE_THIS_VOLATILE (t) = 1;
4227 break;
4228 }
4229
4230 return t;
4231 }
4232
4233 #define PROCESS_ARG(N) \
4234 do { \
4235 TREE_OPERAND (t, N) = arg##N; \
4236 if (arg##N &&!TYPE_P (arg##N)) \
4237 { \
4238 if (TREE_SIDE_EFFECTS (arg##N)) \
4239 side_effects = 1; \
4240 if (!TREE_READONLY (arg##N) \
4241 && !CONSTANT_CLASS_P (arg##N)) \
4242 (void) (read_only = 0); \
4243 if (!TREE_CONSTANT (arg##N)) \
4244 (void) (constant = 0); \
4245 } \
4246 } while (0)
4247
4248 tree
4249 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4250 {
4251 bool constant, read_only, side_effects;
4252 tree t;
4253
4254 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4255
4256 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4257 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4258 /* When sizetype precision doesn't match that of pointers
4259 we need to be able to build explicit extensions or truncations
4260 of the offset argument. */
4261 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4262 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4263 && TREE_CODE (arg1) == INTEGER_CST);
4264
4265 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4266 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4267 && ptrofftype_p (TREE_TYPE (arg1)));
4268
4269 t = make_node_stat (code PASS_MEM_STAT);
4270 TREE_TYPE (t) = tt;
4271
4272 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4273 result based on those same flags for the arguments. But if the
4274 arguments aren't really even `tree' expressions, we shouldn't be trying
4275 to do this. */
4276
4277 /* Expressions without side effects may be constant if their
4278 arguments are as well. */
4279 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4280 || TREE_CODE_CLASS (code) == tcc_binary);
4281 read_only = 1;
4282 side_effects = TREE_SIDE_EFFECTS (t);
4283
4284 PROCESS_ARG (0);
4285 PROCESS_ARG (1);
4286
4287 TREE_READONLY (t) = read_only;
4288 TREE_CONSTANT (t) = constant;
4289 TREE_SIDE_EFFECTS (t) = side_effects;
4290 TREE_THIS_VOLATILE (t)
4291 = (TREE_CODE_CLASS (code) == tcc_reference
4292 && arg0 && TREE_THIS_VOLATILE (arg0));
4293
4294 return t;
4295 }
4296
4297
4298 tree
4299 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4300 tree arg2 MEM_STAT_DECL)
4301 {
4302 bool constant, read_only, side_effects;
4303 tree t;
4304
4305 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4306 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4307
4308 t = make_node_stat (code PASS_MEM_STAT);
4309 TREE_TYPE (t) = tt;
4310
4311 read_only = 1;
4312
4313 /* As a special exception, if COND_EXPR has NULL branches, we
4314 assume that it is a gimple statement and always consider
4315 it to have side effects. */
4316 if (code == COND_EXPR
4317 && tt == void_type_node
4318 && arg1 == NULL_TREE
4319 && arg2 == NULL_TREE)
4320 side_effects = true;
4321 else
4322 side_effects = TREE_SIDE_EFFECTS (t);
4323
4324 PROCESS_ARG (0);
4325 PROCESS_ARG (1);
4326 PROCESS_ARG (2);
4327
4328 if (code == COND_EXPR)
4329 TREE_READONLY (t) = read_only;
4330
4331 TREE_SIDE_EFFECTS (t) = side_effects;
4332 TREE_THIS_VOLATILE (t)
4333 = (TREE_CODE_CLASS (code) == tcc_reference
4334 && arg0 && TREE_THIS_VOLATILE (arg0));
4335
4336 return t;
4337 }
4338
4339 tree
4340 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4341 tree arg2, tree arg3 MEM_STAT_DECL)
4342 {
4343 bool constant, read_only, side_effects;
4344 tree t;
4345
4346 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4347
4348 t = make_node_stat (code PASS_MEM_STAT);
4349 TREE_TYPE (t) = tt;
4350
4351 side_effects = TREE_SIDE_EFFECTS (t);
4352
4353 PROCESS_ARG (0);
4354 PROCESS_ARG (1);
4355 PROCESS_ARG (2);
4356 PROCESS_ARG (3);
4357
4358 TREE_SIDE_EFFECTS (t) = side_effects;
4359 TREE_THIS_VOLATILE (t)
4360 = (TREE_CODE_CLASS (code) == tcc_reference
4361 && arg0 && TREE_THIS_VOLATILE (arg0));
4362
4363 return t;
4364 }
4365
4366 tree
4367 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4368 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4369 {
4370 bool constant, read_only, side_effects;
4371 tree t;
4372
4373 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4374
4375 t = make_node_stat (code PASS_MEM_STAT);
4376 TREE_TYPE (t) = tt;
4377
4378 side_effects = TREE_SIDE_EFFECTS (t);
4379
4380 PROCESS_ARG (0);
4381 PROCESS_ARG (1);
4382 PROCESS_ARG (2);
4383 PROCESS_ARG (3);
4384 PROCESS_ARG (4);
4385
4386 TREE_SIDE_EFFECTS (t) = side_effects;
4387 TREE_THIS_VOLATILE (t)
4388 = (TREE_CODE_CLASS (code) == tcc_reference
4389 && arg0 && TREE_THIS_VOLATILE (arg0));
4390
4391 return t;
4392 }
4393
4394 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4395 on the pointer PTR. */
4396
4397 tree
4398 build_simple_mem_ref_loc (location_t loc, tree ptr)
4399 {
4400 HOST_WIDE_INT offset = 0;
4401 tree ptype = TREE_TYPE (ptr);
4402 tree tem;
4403 /* For convenience allow addresses that collapse to a simple base
4404 and offset. */
4405 if (TREE_CODE (ptr) == ADDR_EXPR
4406 && (handled_component_p (TREE_OPERAND (ptr, 0))
4407 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4408 {
4409 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4410 gcc_assert (ptr);
4411 ptr = build_fold_addr_expr (ptr);
4412 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4413 }
4414 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4415 ptr, build_int_cst (ptype, offset));
4416 SET_EXPR_LOCATION (tem, loc);
4417 return tem;
4418 }
4419
4420 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4421
4422 offset_int
4423 mem_ref_offset (const_tree t)
4424 {
4425 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4426 }
4427
4428 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4429 offsetted by OFFSET units. */
4430
4431 tree
4432 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4433 {
4434 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4435 build_fold_addr_expr (base),
4436 build_int_cst (ptr_type_node, offset));
4437 tree addr = build1 (ADDR_EXPR, type, ref);
4438 recompute_tree_invariant_for_addr_expr (addr);
4439 return addr;
4440 }
4441
4442 /* Similar except don't specify the TREE_TYPE
4443 and leave the TREE_SIDE_EFFECTS as 0.
4444 It is permissible for arguments to be null,
4445 or even garbage if their values do not matter. */
4446
4447 tree
4448 build_nt (enum tree_code code, ...)
4449 {
4450 tree t;
4451 int length;
4452 int i;
4453 va_list p;
4454
4455 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4456
4457 va_start (p, code);
4458
4459 t = make_node (code);
4460 length = TREE_CODE_LENGTH (code);
4461
4462 for (i = 0; i < length; i++)
4463 TREE_OPERAND (t, i) = va_arg (p, tree);
4464
4465 va_end (p);
4466 return t;
4467 }
4468
4469 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4470 tree vec. */
4471
4472 tree
4473 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4474 {
4475 tree ret, t;
4476 unsigned int ix;
4477
4478 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4479 CALL_EXPR_FN (ret) = fn;
4480 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4481 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4482 CALL_EXPR_ARG (ret, ix) = t;
4483 return ret;
4484 }
4485 \f
4486 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4487 We do NOT enter this node in any sort of symbol table.
4488
4489 LOC is the location of the decl.
4490
4491 layout_decl is used to set up the decl's storage layout.
4492 Other slots are initialized to 0 or null pointers. */
4493
4494 tree
4495 build_decl_stat (location_t loc, enum tree_code code, tree name,
4496 tree type MEM_STAT_DECL)
4497 {
4498 tree t;
4499
4500 t = make_node_stat (code PASS_MEM_STAT);
4501 DECL_SOURCE_LOCATION (t) = loc;
4502
4503 /* if (type == error_mark_node)
4504 type = integer_type_node; */
4505 /* That is not done, deliberately, so that having error_mark_node
4506 as the type can suppress useless errors in the use of this variable. */
4507
4508 DECL_NAME (t) = name;
4509 TREE_TYPE (t) = type;
4510
4511 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4512 layout_decl (t, 0);
4513
4514 return t;
4515 }
4516
4517 /* Builds and returns function declaration with NAME and TYPE. */
4518
4519 tree
4520 build_fn_decl (const char *name, tree type)
4521 {
4522 tree id = get_identifier (name);
4523 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4524
4525 DECL_EXTERNAL (decl) = 1;
4526 TREE_PUBLIC (decl) = 1;
4527 DECL_ARTIFICIAL (decl) = 1;
4528 TREE_NOTHROW (decl) = 1;
4529
4530 return decl;
4531 }
4532
4533 vec<tree, va_gc> *all_translation_units;
4534
4535 /* Builds a new translation-unit decl with name NAME, queues it in the
4536 global list of translation-unit decls and returns it. */
4537
4538 tree
4539 build_translation_unit_decl (tree name)
4540 {
4541 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4542 name, NULL_TREE);
4543 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4544 vec_safe_push (all_translation_units, tu);
4545 return tu;
4546 }
4547
4548 \f
4549 /* BLOCK nodes are used to represent the structure of binding contours
4550 and declarations, once those contours have been exited and their contents
4551 compiled. This information is used for outputting debugging info. */
4552
4553 tree
4554 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4555 {
4556 tree block = make_node (BLOCK);
4557
4558 BLOCK_VARS (block) = vars;
4559 BLOCK_SUBBLOCKS (block) = subblocks;
4560 BLOCK_SUPERCONTEXT (block) = supercontext;
4561 BLOCK_CHAIN (block) = chain;
4562 return block;
4563 }
4564
4565 \f
4566 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4567
4568 LOC is the location to use in tree T. */
4569
4570 void
4571 protected_set_expr_location (tree t, location_t loc)
4572 {
4573 if (t && CAN_HAVE_LOCATION_P (t))
4574 SET_EXPR_LOCATION (t, loc);
4575 }
4576 \f
4577 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4578 is ATTRIBUTE. */
4579
4580 tree
4581 build_decl_attribute_variant (tree ddecl, tree attribute)
4582 {
4583 DECL_ATTRIBUTES (ddecl) = attribute;
4584 return ddecl;
4585 }
4586
4587 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4588 is ATTRIBUTE and its qualifiers are QUALS.
4589
4590 Record such modified types already made so we don't make duplicates. */
4591
4592 tree
4593 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4594 {
4595 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4596 {
4597 inchash::hash hstate;
4598 tree ntype;
4599 int i;
4600 tree t;
4601 enum tree_code code = TREE_CODE (ttype);
4602
4603 /* Building a distinct copy of a tagged type is inappropriate; it
4604 causes breakage in code that expects there to be a one-to-one
4605 relationship between a struct and its fields.
4606 build_duplicate_type is another solution (as used in
4607 handle_transparent_union_attribute), but that doesn't play well
4608 with the stronger C++ type identity model. */
4609 if (TREE_CODE (ttype) == RECORD_TYPE
4610 || TREE_CODE (ttype) == UNION_TYPE
4611 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4612 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4613 {
4614 warning (OPT_Wattributes,
4615 "ignoring attributes applied to %qT after definition",
4616 TYPE_MAIN_VARIANT (ttype));
4617 return build_qualified_type (ttype, quals);
4618 }
4619
4620 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4621 ntype = build_distinct_type_copy (ttype);
4622
4623 TYPE_ATTRIBUTES (ntype) = attribute;
4624
4625 hstate.add_int (code);
4626 if (TREE_TYPE (ntype))
4627 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4628 attribute_hash_list (attribute, hstate);
4629
4630 switch (TREE_CODE (ntype))
4631 {
4632 case FUNCTION_TYPE:
4633 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4634 break;
4635 case ARRAY_TYPE:
4636 if (TYPE_DOMAIN (ntype))
4637 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4638 break;
4639 case INTEGER_TYPE:
4640 t = TYPE_MAX_VALUE (ntype);
4641 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4642 hstate.add_object (TREE_INT_CST_ELT (t, i));
4643 break;
4644 case REAL_TYPE:
4645 case FIXED_POINT_TYPE:
4646 {
4647 unsigned int precision = TYPE_PRECISION (ntype);
4648 hstate.add_object (precision);
4649 }
4650 break;
4651 default:
4652 break;
4653 }
4654
4655 ntype = type_hash_canon (hstate.end(), ntype);
4656
4657 /* If the target-dependent attributes make NTYPE different from
4658 its canonical type, we will need to use structural equality
4659 checks for this type. */
4660 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4661 || !comp_type_attributes (ntype, ttype))
4662 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4663 else if (TYPE_CANONICAL (ntype) == ntype)
4664 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4665
4666 ttype = build_qualified_type (ntype, quals);
4667 }
4668 else if (TYPE_QUALS (ttype) != quals)
4669 ttype = build_qualified_type (ttype, quals);
4670
4671 return ttype;
4672 }
4673
4674 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4675 the same. */
4676
4677 static bool
4678 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4679 {
4680 tree cl1, cl2;
4681 for (cl1 = clauses1, cl2 = clauses2;
4682 cl1 && cl2;
4683 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4684 {
4685 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4686 return false;
4687 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4688 {
4689 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4690 OMP_CLAUSE_DECL (cl2)) != 1)
4691 return false;
4692 }
4693 switch (OMP_CLAUSE_CODE (cl1))
4694 {
4695 case OMP_CLAUSE_ALIGNED:
4696 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4697 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4698 return false;
4699 break;
4700 case OMP_CLAUSE_LINEAR:
4701 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4702 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4703 return false;
4704 break;
4705 case OMP_CLAUSE_SIMDLEN:
4706 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4707 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4708 return false;
4709 default:
4710 break;
4711 }
4712 }
4713 return true;
4714 }
4715
4716 /* Compare two constructor-element-type constants. Return 1 if the lists
4717 are known to be equal; otherwise return 0. */
4718
4719 static bool
4720 simple_cst_list_equal (const_tree l1, const_tree l2)
4721 {
4722 while (l1 != NULL_TREE && l2 != NULL_TREE)
4723 {
4724 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4725 return false;
4726
4727 l1 = TREE_CHAIN (l1);
4728 l2 = TREE_CHAIN (l2);
4729 }
4730
4731 return l1 == l2;
4732 }
4733
4734 /* Compare two attributes for their value identity. Return true if the
4735 attribute values are known to be equal; otherwise return false.
4736 */
4737
4738 static bool
4739 attribute_value_equal (const_tree attr1, const_tree attr2)
4740 {
4741 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4742 return true;
4743
4744 if (TREE_VALUE (attr1) != NULL_TREE
4745 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4746 && TREE_VALUE (attr2) != NULL
4747 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4748 return (simple_cst_list_equal (TREE_VALUE (attr1),
4749 TREE_VALUE (attr2)) == 1);
4750
4751 if ((flag_openmp || flag_openmp_simd)
4752 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4753 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4754 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4755 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4756 TREE_VALUE (attr2));
4757
4758 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4759 }
4760
4761 /* Return 0 if the attributes for two types are incompatible, 1 if they
4762 are compatible, and 2 if they are nearly compatible (which causes a
4763 warning to be generated). */
4764 int
4765 comp_type_attributes (const_tree type1, const_tree type2)
4766 {
4767 const_tree a1 = TYPE_ATTRIBUTES (type1);
4768 const_tree a2 = TYPE_ATTRIBUTES (type2);
4769 const_tree a;
4770
4771 if (a1 == a2)
4772 return 1;
4773 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4774 {
4775 const struct attribute_spec *as;
4776 const_tree attr;
4777
4778 as = lookup_attribute_spec (get_attribute_name (a));
4779 if (!as || as->affects_type_identity == false)
4780 continue;
4781
4782 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4783 if (!attr || !attribute_value_equal (a, attr))
4784 break;
4785 }
4786 if (!a)
4787 {
4788 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4789 {
4790 const struct attribute_spec *as;
4791
4792 as = lookup_attribute_spec (get_attribute_name (a));
4793 if (!as || as->affects_type_identity == false)
4794 continue;
4795
4796 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4797 break;
4798 /* We don't need to compare trees again, as we did this
4799 already in first loop. */
4800 }
4801 /* All types - affecting identity - are equal, so
4802 there is no need to call target hook for comparison. */
4803 if (!a)
4804 return 1;
4805 }
4806 /* As some type combinations - like default calling-convention - might
4807 be compatible, we have to call the target hook to get the final result. */
4808 return targetm.comp_type_attributes (type1, type2);
4809 }
4810
4811 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4812 is ATTRIBUTE.
4813
4814 Record such modified types already made so we don't make duplicates. */
4815
4816 tree
4817 build_type_attribute_variant (tree ttype, tree attribute)
4818 {
4819 return build_type_attribute_qual_variant (ttype, attribute,
4820 TYPE_QUALS (ttype));
4821 }
4822
4823
4824 /* Reset the expression *EXPR_P, a size or position.
4825
4826 ??? We could reset all non-constant sizes or positions. But it's cheap
4827 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4828
4829 We need to reset self-referential sizes or positions because they cannot
4830 be gimplified and thus can contain a CALL_EXPR after the gimplification
4831 is finished, which will run afoul of LTO streaming. And they need to be
4832 reset to something essentially dummy but not constant, so as to preserve
4833 the properties of the object they are attached to. */
4834
4835 static inline void
4836 free_lang_data_in_one_sizepos (tree *expr_p)
4837 {
4838 tree expr = *expr_p;
4839 if (CONTAINS_PLACEHOLDER_P (expr))
4840 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4841 }
4842
4843
4844 /* Reset all the fields in a binfo node BINFO. We only keep
4845 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4846
4847 static void
4848 free_lang_data_in_binfo (tree binfo)
4849 {
4850 unsigned i;
4851 tree t;
4852
4853 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4854
4855 BINFO_VIRTUALS (binfo) = NULL_TREE;
4856 BINFO_BASE_ACCESSES (binfo) = NULL;
4857 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4858 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4859
4860 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4861 free_lang_data_in_binfo (t);
4862 }
4863
4864
4865 /* Reset all language specific information still present in TYPE. */
4866
4867 static void
4868 free_lang_data_in_type (tree type)
4869 {
4870 gcc_assert (TYPE_P (type));
4871
4872 /* Give the FE a chance to remove its own data first. */
4873 lang_hooks.free_lang_data (type);
4874
4875 TREE_LANG_FLAG_0 (type) = 0;
4876 TREE_LANG_FLAG_1 (type) = 0;
4877 TREE_LANG_FLAG_2 (type) = 0;
4878 TREE_LANG_FLAG_3 (type) = 0;
4879 TREE_LANG_FLAG_4 (type) = 0;
4880 TREE_LANG_FLAG_5 (type) = 0;
4881 TREE_LANG_FLAG_6 (type) = 0;
4882
4883 if (TREE_CODE (type) == FUNCTION_TYPE)
4884 {
4885 /* Remove the const and volatile qualifiers from arguments. The
4886 C++ front end removes them, but the C front end does not,
4887 leading to false ODR violation errors when merging two
4888 instances of the same function signature compiled by
4889 different front ends. */
4890 tree p;
4891
4892 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4893 {
4894 tree arg_type = TREE_VALUE (p);
4895
4896 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4897 {
4898 int quals = TYPE_QUALS (arg_type)
4899 & ~TYPE_QUAL_CONST
4900 & ~TYPE_QUAL_VOLATILE;
4901 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4902 free_lang_data_in_type (TREE_VALUE (p));
4903 }
4904 }
4905 }
4906
4907 /* Remove members that are not actually FIELD_DECLs from the field
4908 list of an aggregate. These occur in C++. */
4909 if (RECORD_OR_UNION_TYPE_P (type))
4910 {
4911 tree prev, member;
4912
4913 /* Note that TYPE_FIELDS can be shared across distinct
4914 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4915 to be removed, we cannot set its TREE_CHAIN to NULL.
4916 Otherwise, we would not be able to find all the other fields
4917 in the other instances of this TREE_TYPE.
4918
4919 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4920 prev = NULL_TREE;
4921 member = TYPE_FIELDS (type);
4922 while (member)
4923 {
4924 if (TREE_CODE (member) == FIELD_DECL
4925 || TREE_CODE (member) == TYPE_DECL)
4926 {
4927 if (prev)
4928 TREE_CHAIN (prev) = member;
4929 else
4930 TYPE_FIELDS (type) = member;
4931 prev = member;
4932 }
4933
4934 member = TREE_CHAIN (member);
4935 }
4936
4937 if (prev)
4938 TREE_CHAIN (prev) = NULL_TREE;
4939 else
4940 TYPE_FIELDS (type) = NULL_TREE;
4941
4942 TYPE_METHODS (type) = NULL_TREE;
4943 if (TYPE_BINFO (type))
4944 free_lang_data_in_binfo (TYPE_BINFO (type));
4945 }
4946 else
4947 {
4948 /* For non-aggregate types, clear out the language slot (which
4949 overloads TYPE_BINFO). */
4950 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4951
4952 if (INTEGRAL_TYPE_P (type)
4953 || SCALAR_FLOAT_TYPE_P (type)
4954 || FIXED_POINT_TYPE_P (type))
4955 {
4956 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4957 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4958 }
4959 }
4960
4961 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4962 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4963
4964 if (TYPE_CONTEXT (type)
4965 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4966 {
4967 tree ctx = TYPE_CONTEXT (type);
4968 do
4969 {
4970 ctx = BLOCK_SUPERCONTEXT (ctx);
4971 }
4972 while (ctx && TREE_CODE (ctx) == BLOCK);
4973 TYPE_CONTEXT (type) = ctx;
4974 }
4975 }
4976
4977
4978 /* Return true if DECL may need an assembler name to be set. */
4979
4980 static inline bool
4981 need_assembler_name_p (tree decl)
4982 {
4983 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4984 if (TREE_CODE (decl) != FUNCTION_DECL
4985 && TREE_CODE (decl) != VAR_DECL)
4986 return false;
4987
4988 /* If DECL already has its assembler name set, it does not need a
4989 new one. */
4990 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4991 || DECL_ASSEMBLER_NAME_SET_P (decl))
4992 return false;
4993
4994 /* Abstract decls do not need an assembler name. */
4995 if (DECL_ABSTRACT (decl))
4996 return false;
4997
4998 /* For VAR_DECLs, only static, public and external symbols need an
4999 assembler name. */
5000 if (TREE_CODE (decl) == VAR_DECL
5001 && !TREE_STATIC (decl)
5002 && !TREE_PUBLIC (decl)
5003 && !DECL_EXTERNAL (decl))
5004 return false;
5005
5006 if (TREE_CODE (decl) == FUNCTION_DECL)
5007 {
5008 /* Do not set assembler name on builtins. Allow RTL expansion to
5009 decide whether to expand inline or via a regular call. */
5010 if (DECL_BUILT_IN (decl)
5011 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5012 return false;
5013
5014 /* Functions represented in the callgraph need an assembler name. */
5015 if (cgraph_node::get (decl) != NULL)
5016 return true;
5017
5018 /* Unused and not public functions don't need an assembler name. */
5019 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5020 return false;
5021 }
5022
5023 return true;
5024 }
5025
5026
5027 /* Reset all language specific information still present in symbol
5028 DECL. */
5029
5030 static void
5031 free_lang_data_in_decl (tree decl)
5032 {
5033 gcc_assert (DECL_P (decl));
5034
5035 /* Early dumping of DECLs before we lose language data. */
5036 if (debug_info_level > DINFO_LEVEL_NONE)
5037 dwarf2out_early_decl (decl);
5038
5039 /* Give the FE a chance to remove its own data first. */
5040 lang_hooks.free_lang_data (decl);
5041
5042 TREE_LANG_FLAG_0 (decl) = 0;
5043 TREE_LANG_FLAG_1 (decl) = 0;
5044 TREE_LANG_FLAG_2 (decl) = 0;
5045 TREE_LANG_FLAG_3 (decl) = 0;
5046 TREE_LANG_FLAG_4 (decl) = 0;
5047 TREE_LANG_FLAG_5 (decl) = 0;
5048 TREE_LANG_FLAG_6 (decl) = 0;
5049
5050 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5051 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5052 if (TREE_CODE (decl) == FIELD_DECL)
5053 {
5054 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5055 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5056 DECL_QUALIFIER (decl) = NULL_TREE;
5057 }
5058
5059 if (TREE_CODE (decl) == FUNCTION_DECL)
5060 {
5061 struct cgraph_node *node;
5062 if (!(node = cgraph_node::get (decl))
5063 || (!node->definition && !node->clones))
5064 {
5065 if (node)
5066 node->release_body ();
5067 else
5068 {
5069 release_function_body (decl);
5070 DECL_ARGUMENTS (decl) = NULL;
5071 DECL_RESULT (decl) = NULL;
5072 DECL_INITIAL (decl) = error_mark_node;
5073 }
5074 }
5075 if (gimple_has_body_p (decl))
5076 {
5077 tree t;
5078
5079 /* If DECL has a gimple body, then the context for its
5080 arguments must be DECL. Otherwise, it doesn't really
5081 matter, as we will not be emitting any code for DECL. In
5082 general, there may be other instances of DECL created by
5083 the front end and since PARM_DECLs are generally shared,
5084 their DECL_CONTEXT changes as the replicas of DECL are
5085 created. The only time where DECL_CONTEXT is important
5086 is for the FUNCTION_DECLs that have a gimple body (since
5087 the PARM_DECL will be used in the function's body). */
5088 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5089 DECL_CONTEXT (t) = decl;
5090 }
5091
5092 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5093 At this point, it is not needed anymore. */
5094 DECL_SAVED_TREE (decl) = NULL_TREE;
5095
5096 /* Clear the abstract origin if it refers to a method. Otherwise
5097 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5098 origin will not be output correctly. */
5099 if (DECL_ABSTRACT_ORIGIN (decl)
5100 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5101 && RECORD_OR_UNION_TYPE_P
5102 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5103 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5104
5105 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5106 DECL_VINDEX referring to itself into a vtable slot number as it
5107 should. Happens with functions that are copied and then forgotten
5108 about. Just clear it, it won't matter anymore. */
5109 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5110 DECL_VINDEX (decl) = NULL_TREE;
5111 }
5112 else if (TREE_CODE (decl) == VAR_DECL)
5113 {
5114 if ((DECL_EXTERNAL (decl)
5115 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5116 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5117 DECL_INITIAL (decl) = NULL_TREE;
5118 }
5119 else if (TREE_CODE (decl) == TYPE_DECL
5120 || TREE_CODE (decl) == FIELD_DECL)
5121 DECL_INITIAL (decl) = NULL_TREE;
5122 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5123 && DECL_INITIAL (decl)
5124 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5125 {
5126 /* Strip builtins from the translation-unit BLOCK. We still have targets
5127 without builtin_decl_explicit support and also builtins are shared
5128 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5129 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5130 while (*nextp)
5131 {
5132 tree var = *nextp;
5133 if (TREE_CODE (var) == FUNCTION_DECL
5134 && DECL_BUILT_IN (var))
5135 *nextp = TREE_CHAIN (var);
5136 else
5137 nextp = &TREE_CHAIN (var);
5138 }
5139 }
5140 }
5141
5142
5143 /* Data used when collecting DECLs and TYPEs for language data removal. */
5144
5145 struct free_lang_data_d
5146 {
5147 /* Worklist to avoid excessive recursion. */
5148 vec<tree> worklist;
5149
5150 /* Set of traversed objects. Used to avoid duplicate visits. */
5151 hash_set<tree> *pset;
5152
5153 /* Array of symbols to process with free_lang_data_in_decl. */
5154 vec<tree> decls;
5155
5156 /* Array of types to process with free_lang_data_in_type. */
5157 vec<tree> types;
5158 };
5159
5160
5161 /* Save all language fields needed to generate proper debug information
5162 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5163
5164 static void
5165 save_debug_info_for_decl (tree t)
5166 {
5167 /*struct saved_debug_info_d *sdi;*/
5168
5169 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5170
5171 /* FIXME. Partial implementation for saving debug info removed. */
5172 }
5173
5174
5175 /* Save all language fields needed to generate proper debug information
5176 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5177
5178 static void
5179 save_debug_info_for_type (tree t)
5180 {
5181 /*struct saved_debug_info_d *sdi;*/
5182
5183 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5184
5185 /* FIXME. Partial implementation for saving debug info removed. */
5186 }
5187
5188
5189 /* Add type or decl T to one of the list of tree nodes that need their
5190 language data removed. The lists are held inside FLD. */
5191
5192 static void
5193 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5194 {
5195 if (DECL_P (t))
5196 {
5197 fld->decls.safe_push (t);
5198 if (debug_info_level > DINFO_LEVEL_TERSE)
5199 save_debug_info_for_decl (t);
5200 }
5201 else if (TYPE_P (t))
5202 {
5203 fld->types.safe_push (t);
5204 if (debug_info_level > DINFO_LEVEL_TERSE)
5205 save_debug_info_for_type (t);
5206 }
5207 else
5208 gcc_unreachable ();
5209 }
5210
5211 /* Push tree node T into FLD->WORKLIST. */
5212
5213 static inline void
5214 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5215 {
5216 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5217 fld->worklist.safe_push ((t));
5218 }
5219
5220
5221 /* Operand callback helper for free_lang_data_in_node. *TP is the
5222 subtree operand being considered. */
5223
5224 static tree
5225 find_decls_types_r (tree *tp, int *ws, void *data)
5226 {
5227 tree t = *tp;
5228 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5229
5230 if (TREE_CODE (t) == TREE_LIST)
5231 return NULL_TREE;
5232
5233 /* Language specific nodes will be removed, so there is no need
5234 to gather anything under them. */
5235 if (is_lang_specific (t))
5236 {
5237 *ws = 0;
5238 return NULL_TREE;
5239 }
5240
5241 if (DECL_P (t))
5242 {
5243 /* Note that walk_tree does not traverse every possible field in
5244 decls, so we have to do our own traversals here. */
5245 add_tree_to_fld_list (t, fld);
5246
5247 fld_worklist_push (DECL_NAME (t), fld);
5248 fld_worklist_push (DECL_CONTEXT (t), fld);
5249 fld_worklist_push (DECL_SIZE (t), fld);
5250 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5251
5252 /* We are going to remove everything under DECL_INITIAL for
5253 TYPE_DECLs. No point walking them. */
5254 if (TREE_CODE (t) != TYPE_DECL)
5255 fld_worklist_push (DECL_INITIAL (t), fld);
5256
5257 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5258 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5259
5260 if (TREE_CODE (t) == FUNCTION_DECL)
5261 {
5262 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5263 fld_worklist_push (DECL_RESULT (t), fld);
5264 }
5265 else if (TREE_CODE (t) == TYPE_DECL)
5266 {
5267 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5268 }
5269 else if (TREE_CODE (t) == FIELD_DECL)
5270 {
5271 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5272 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5273 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5274 fld_worklist_push (DECL_FCONTEXT (t), fld);
5275 }
5276
5277 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5278 && DECL_HAS_VALUE_EXPR_P (t))
5279 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5280
5281 if (TREE_CODE (t) != FIELD_DECL
5282 && TREE_CODE (t) != TYPE_DECL)
5283 fld_worklist_push (TREE_CHAIN (t), fld);
5284 *ws = 0;
5285 }
5286 else if (TYPE_P (t))
5287 {
5288 /* Note that walk_tree does not traverse every possible field in
5289 types, so we have to do our own traversals here. */
5290 add_tree_to_fld_list (t, fld);
5291
5292 if (!RECORD_OR_UNION_TYPE_P (t))
5293 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5294 fld_worklist_push (TYPE_SIZE (t), fld);
5295 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5296 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5297 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5298 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5299 fld_worklist_push (TYPE_NAME (t), fld);
5300 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5301 them and thus do not and want not to reach unused pointer types
5302 this way. */
5303 if (!POINTER_TYPE_P (t))
5304 fld_worklist_push (TYPE_MINVAL (t), fld);
5305 if (!RECORD_OR_UNION_TYPE_P (t))
5306 fld_worklist_push (TYPE_MAXVAL (t), fld);
5307 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5308 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5309 do not and want not to reach unused variants this way. */
5310 if (TYPE_CONTEXT (t))
5311 {
5312 tree ctx = TYPE_CONTEXT (t);
5313 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5314 So push that instead. */
5315 while (ctx && TREE_CODE (ctx) == BLOCK)
5316 ctx = BLOCK_SUPERCONTEXT (ctx);
5317 fld_worklist_push (ctx, fld);
5318 }
5319 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5320 and want not to reach unused types this way. */
5321
5322 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5323 {
5324 unsigned i;
5325 tree tem;
5326 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5327 fld_worklist_push (TREE_TYPE (tem), fld);
5328 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5329 if (tem
5330 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5331 && TREE_CODE (tem) == TREE_LIST)
5332 do
5333 {
5334 fld_worklist_push (TREE_VALUE (tem), fld);
5335 tem = TREE_CHAIN (tem);
5336 }
5337 while (tem);
5338 }
5339 if (RECORD_OR_UNION_TYPE_P (t))
5340 {
5341 tree tem;
5342 /* Push all TYPE_FIELDS - there can be interleaving interesting
5343 and non-interesting things. */
5344 tem = TYPE_FIELDS (t);
5345 while (tem)
5346 {
5347 if (TREE_CODE (tem) == FIELD_DECL
5348 || TREE_CODE (tem) == TYPE_DECL)
5349 fld_worklist_push (tem, fld);
5350 tem = TREE_CHAIN (tem);
5351 }
5352 }
5353
5354 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5355 *ws = 0;
5356 }
5357 else if (TREE_CODE (t) == BLOCK)
5358 {
5359 tree tem;
5360 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5361 fld_worklist_push (tem, fld);
5362 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5363 fld_worklist_push (tem, fld);
5364 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5365 }
5366
5367 if (TREE_CODE (t) != IDENTIFIER_NODE
5368 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5369 fld_worklist_push (TREE_TYPE (t), fld);
5370
5371 return NULL_TREE;
5372 }
5373
5374
5375 /* Find decls and types in T. */
5376
5377 static void
5378 find_decls_types (tree t, struct free_lang_data_d *fld)
5379 {
5380 while (1)
5381 {
5382 if (!fld->pset->contains (t))
5383 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5384 if (fld->worklist.is_empty ())
5385 break;
5386 t = fld->worklist.pop ();
5387 }
5388 }
5389
5390 /* Translate all the types in LIST with the corresponding runtime
5391 types. */
5392
5393 static tree
5394 get_eh_types_for_runtime (tree list)
5395 {
5396 tree head, prev;
5397
5398 if (list == NULL_TREE)
5399 return NULL_TREE;
5400
5401 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5402 prev = head;
5403 list = TREE_CHAIN (list);
5404 while (list)
5405 {
5406 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5407 TREE_CHAIN (prev) = n;
5408 prev = TREE_CHAIN (prev);
5409 list = TREE_CHAIN (list);
5410 }
5411
5412 return head;
5413 }
5414
5415
5416 /* Find decls and types referenced in EH region R and store them in
5417 FLD->DECLS and FLD->TYPES. */
5418
5419 static void
5420 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5421 {
5422 switch (r->type)
5423 {
5424 case ERT_CLEANUP:
5425 break;
5426
5427 case ERT_TRY:
5428 {
5429 eh_catch c;
5430
5431 /* The types referenced in each catch must first be changed to the
5432 EH types used at runtime. This removes references to FE types
5433 in the region. */
5434 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5435 {
5436 c->type_list = get_eh_types_for_runtime (c->type_list);
5437 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5438 }
5439 }
5440 break;
5441
5442 case ERT_ALLOWED_EXCEPTIONS:
5443 r->u.allowed.type_list
5444 = get_eh_types_for_runtime (r->u.allowed.type_list);
5445 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5446 break;
5447
5448 case ERT_MUST_NOT_THROW:
5449 walk_tree (&r->u.must_not_throw.failure_decl,
5450 find_decls_types_r, fld, fld->pset);
5451 break;
5452 }
5453 }
5454
5455
5456 /* Find decls and types referenced in cgraph node N and store them in
5457 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5458 look for *every* kind of DECL and TYPE node reachable from N,
5459 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5460 NAMESPACE_DECLs, etc). */
5461
5462 static void
5463 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5464 {
5465 basic_block bb;
5466 struct function *fn;
5467 unsigned ix;
5468 tree t;
5469
5470 find_decls_types (n->decl, fld);
5471
5472 if (!gimple_has_body_p (n->decl))
5473 return;
5474
5475 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5476
5477 fn = DECL_STRUCT_FUNCTION (n->decl);
5478
5479 /* Traverse locals. */
5480 FOR_EACH_LOCAL_DECL (fn, ix, t)
5481 find_decls_types (t, fld);
5482
5483 /* Traverse EH regions in FN. */
5484 {
5485 eh_region r;
5486 FOR_ALL_EH_REGION_FN (r, fn)
5487 find_decls_types_in_eh_region (r, fld);
5488 }
5489
5490 /* Traverse every statement in FN. */
5491 FOR_EACH_BB_FN (bb, fn)
5492 {
5493 gimple_stmt_iterator si;
5494 unsigned i;
5495
5496 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5497 {
5498 gimple phi = gsi_stmt (si);
5499
5500 for (i = 0; i < gimple_phi_num_args (phi); i++)
5501 {
5502 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5503 find_decls_types (*arg_p, fld);
5504 }
5505 }
5506
5507 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5508 {
5509 gimple stmt = gsi_stmt (si);
5510
5511 if (is_gimple_call (stmt))
5512 find_decls_types (gimple_call_fntype (stmt), fld);
5513
5514 for (i = 0; i < gimple_num_ops (stmt); i++)
5515 {
5516 tree arg = gimple_op (stmt, i);
5517 find_decls_types (arg, fld);
5518 }
5519 }
5520 }
5521 }
5522
5523
5524 /* Find decls and types referenced in varpool node N and store them in
5525 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5526 look for *every* kind of DECL and TYPE node reachable from N,
5527 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5528 NAMESPACE_DECLs, etc). */
5529
5530 static void
5531 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5532 {
5533 find_decls_types (v->decl, fld);
5534 }
5535
5536 /* If T needs an assembler name, have one created for it. */
5537
5538 void
5539 assign_assembler_name_if_neeeded (tree t)
5540 {
5541 if (need_assembler_name_p (t))
5542 {
5543 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5544 diagnostics that use input_location to show locus
5545 information. The problem here is that, at this point,
5546 input_location is generally anchored to the end of the file
5547 (since the parser is long gone), so we don't have a good
5548 position to pin it to.
5549
5550 To alleviate this problem, this uses the location of T's
5551 declaration. Examples of this are
5552 testsuite/g++.dg/template/cond2.C and
5553 testsuite/g++.dg/template/pr35240.C. */
5554 location_t saved_location = input_location;
5555 input_location = DECL_SOURCE_LOCATION (t);
5556
5557 decl_assembler_name (t);
5558
5559 input_location = saved_location;
5560 }
5561 }
5562
5563
5564 /* Free language specific information for every operand and expression
5565 in every node of the call graph. This process operates in three stages:
5566
5567 1- Every callgraph node and varpool node is traversed looking for
5568 decls and types embedded in them. This is a more exhaustive
5569 search than that done by find_referenced_vars, because it will
5570 also collect individual fields, decls embedded in types, etc.
5571
5572 2- All the decls found are sent to free_lang_data_in_decl.
5573
5574 3- All the types found are sent to free_lang_data_in_type.
5575
5576 The ordering between decls and types is important because
5577 free_lang_data_in_decl sets assembler names, which includes
5578 mangling. So types cannot be freed up until assembler names have
5579 been set up. */
5580
5581 static void
5582 free_lang_data_in_cgraph (void)
5583 {
5584 struct cgraph_node *n;
5585 varpool_node *v;
5586 struct free_lang_data_d fld;
5587 tree t;
5588 unsigned i;
5589 alias_pair *p;
5590
5591 /* Initialize sets and arrays to store referenced decls and types. */
5592 fld.pset = new hash_set<tree>;
5593 fld.worklist.create (0);
5594 fld.decls.create (100);
5595 fld.types.create (100);
5596
5597 /* Find decls and types in the body of every function in the callgraph. */
5598 FOR_EACH_FUNCTION (n)
5599 find_decls_types_in_node (n, &fld);
5600
5601 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5602 find_decls_types (p->decl, &fld);
5603
5604 /* Find decls and types in every varpool symbol. */
5605 FOR_EACH_VARIABLE (v)
5606 find_decls_types_in_var (v, &fld);
5607
5608 /* Set the assembler name on every decl found. We need to do this
5609 now because free_lang_data_in_decl will invalidate data needed
5610 for mangling. This breaks mangling on interdependent decls. */
5611 FOR_EACH_VEC_ELT (fld.decls, i, t)
5612 assign_assembler_name_if_neeeded (t);
5613
5614 /* Traverse every decl found freeing its language data. */
5615 FOR_EACH_VEC_ELT (fld.decls, i, t)
5616 free_lang_data_in_decl (t);
5617
5618 /* Traverse every type found freeing its language data. */
5619 FOR_EACH_VEC_ELT (fld.types, i, t)
5620 free_lang_data_in_type (t);
5621
5622 delete fld.pset;
5623 fld.worklist.release ();
5624 fld.decls.release ();
5625 fld.types.release ();
5626 }
5627
5628
5629 /* Free resources that are used by FE but are not needed once they are done. */
5630
5631 static unsigned
5632 free_lang_data (void)
5633 {
5634 unsigned i;
5635
5636 /* If we are the LTO frontend we have freed lang-specific data already. */
5637 if (in_lto_p
5638 || !flag_generate_lto)
5639 return 0;
5640
5641 /* Allocate and assign alias sets to the standard integer types
5642 while the slots are still in the way the frontends generated them. */
5643 for (i = 0; i < itk_none; ++i)
5644 if (integer_types[i])
5645 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5646
5647 /* Traverse the IL resetting language specific information for
5648 operands, expressions, etc. */
5649 free_lang_data_in_cgraph ();
5650
5651 /* Create gimple variants for common types. */
5652 ptrdiff_type_node = integer_type_node;
5653 fileptr_type_node = ptr_type_node;
5654
5655 /* Reset some langhooks. Do not reset types_compatible_p, it may
5656 still be used indirectly via the get_alias_set langhook. */
5657 lang_hooks.dwarf_name = lhd_dwarf_name;
5658 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5659 /* We do not want the default decl_assembler_name implementation,
5660 rather if we have fixed everything we want a wrapper around it
5661 asserting that all non-local symbols already got their assembler
5662 name and only produce assembler names for local symbols. Or rather
5663 make sure we never call decl_assembler_name on local symbols and
5664 devise a separate, middle-end private scheme for it. */
5665
5666 /* Reset diagnostic machinery. */
5667 tree_diagnostics_defaults (global_dc);
5668
5669 return 0;
5670 }
5671
5672
5673 namespace {
5674
5675 const pass_data pass_data_ipa_free_lang_data =
5676 {
5677 SIMPLE_IPA_PASS, /* type */
5678 "*free_lang_data", /* name */
5679 OPTGROUP_NONE, /* optinfo_flags */
5680 TV_IPA_FREE_LANG_DATA, /* tv_id */
5681 0, /* properties_required */
5682 0, /* properties_provided */
5683 0, /* properties_destroyed */
5684 0, /* todo_flags_start */
5685 0, /* todo_flags_finish */
5686 };
5687
5688 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5689 {
5690 public:
5691 pass_ipa_free_lang_data (gcc::context *ctxt)
5692 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5693 {}
5694
5695 /* opt_pass methods: */
5696 virtual unsigned int execute (function *) { return free_lang_data (); }
5697
5698 }; // class pass_ipa_free_lang_data
5699
5700 } // anon namespace
5701
5702 simple_ipa_opt_pass *
5703 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5704 {
5705 return new pass_ipa_free_lang_data (ctxt);
5706 }
5707
5708 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5709 ATTR_NAME. Also used internally by remove_attribute(). */
5710 bool
5711 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5712 {
5713 size_t ident_len = IDENTIFIER_LENGTH (ident);
5714
5715 if (ident_len == attr_len)
5716 {
5717 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5718 return true;
5719 }
5720 else if (ident_len == attr_len + 4)
5721 {
5722 /* There is the possibility that ATTR is 'text' and IDENT is
5723 '__text__'. */
5724 const char *p = IDENTIFIER_POINTER (ident);
5725 if (p[0] == '_' && p[1] == '_'
5726 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5727 && strncmp (attr_name, p + 2, attr_len) == 0)
5728 return true;
5729 }
5730
5731 return false;
5732 }
5733
5734 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5735 of ATTR_NAME, and LIST is not NULL_TREE. */
5736 tree
5737 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5738 {
5739 while (list)
5740 {
5741 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5742
5743 if (ident_len == attr_len)
5744 {
5745 if (!strcmp (attr_name,
5746 IDENTIFIER_POINTER (get_attribute_name (list))))
5747 break;
5748 }
5749 /* TODO: If we made sure that attributes were stored in the
5750 canonical form without '__...__' (ie, as in 'text' as opposed
5751 to '__text__') then we could avoid the following case. */
5752 else if (ident_len == attr_len + 4)
5753 {
5754 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5755 if (p[0] == '_' && p[1] == '_'
5756 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5757 && strncmp (attr_name, p + 2, attr_len) == 0)
5758 break;
5759 }
5760 list = TREE_CHAIN (list);
5761 }
5762
5763 return list;
5764 }
5765
5766 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5767 return a pointer to the attribute's list first element if the attribute
5768 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5769 '__text__'). */
5770
5771 tree
5772 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5773 tree list)
5774 {
5775 while (list)
5776 {
5777 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5778
5779 if (attr_len > ident_len)
5780 {
5781 list = TREE_CHAIN (list);
5782 continue;
5783 }
5784
5785 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5786
5787 if (strncmp (attr_name, p, attr_len) == 0)
5788 break;
5789
5790 /* TODO: If we made sure that attributes were stored in the
5791 canonical form without '__...__' (ie, as in 'text' as opposed
5792 to '__text__') then we could avoid the following case. */
5793 if (p[0] == '_' && p[1] == '_' &&
5794 strncmp (attr_name, p + 2, attr_len) == 0)
5795 break;
5796
5797 list = TREE_CHAIN (list);
5798 }
5799
5800 return list;
5801 }
5802
5803
5804 /* A variant of lookup_attribute() that can be used with an identifier
5805 as the first argument, and where the identifier can be either
5806 'text' or '__text__'.
5807
5808 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5809 return a pointer to the attribute's list element if the attribute
5810 is part of the list, or NULL_TREE if not found. If the attribute
5811 appears more than once, this only returns the first occurrence; the
5812 TREE_CHAIN of the return value should be passed back in if further
5813 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5814 can be in the form 'text' or '__text__'. */
5815 static tree
5816 lookup_ident_attribute (tree attr_identifier, tree list)
5817 {
5818 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5819
5820 while (list)
5821 {
5822 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5823 == IDENTIFIER_NODE);
5824
5825 /* Identifiers can be compared directly for equality. */
5826 if (attr_identifier == get_attribute_name (list))
5827 break;
5828
5829 /* If they are not equal, they may still be one in the form
5830 'text' while the other one is in the form '__text__'. TODO:
5831 If we were storing attributes in normalized 'text' form, then
5832 this could all go away and we could take full advantage of
5833 the fact that we're comparing identifiers. :-) */
5834 {
5835 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5836 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5837
5838 if (ident_len == attr_len + 4)
5839 {
5840 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5841 const char *q = IDENTIFIER_POINTER (attr_identifier);
5842 if (p[0] == '_' && p[1] == '_'
5843 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5844 && strncmp (q, p + 2, attr_len) == 0)
5845 break;
5846 }
5847 else if (ident_len + 4 == attr_len)
5848 {
5849 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5850 const char *q = IDENTIFIER_POINTER (attr_identifier);
5851 if (q[0] == '_' && q[1] == '_'
5852 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5853 && strncmp (q + 2, p, ident_len) == 0)
5854 break;
5855 }
5856 }
5857 list = TREE_CHAIN (list);
5858 }
5859
5860 return list;
5861 }
5862
5863 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5864 modified list. */
5865
5866 tree
5867 remove_attribute (const char *attr_name, tree list)
5868 {
5869 tree *p;
5870 size_t attr_len = strlen (attr_name);
5871
5872 gcc_checking_assert (attr_name[0] != '_');
5873
5874 for (p = &list; *p; )
5875 {
5876 tree l = *p;
5877 /* TODO: If we were storing attributes in normalized form, here
5878 we could use a simple strcmp(). */
5879 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5880 *p = TREE_CHAIN (l);
5881 else
5882 p = &TREE_CHAIN (l);
5883 }
5884
5885 return list;
5886 }
5887
5888 /* Return an attribute list that is the union of a1 and a2. */
5889
5890 tree
5891 merge_attributes (tree a1, tree a2)
5892 {
5893 tree attributes;
5894
5895 /* Either one unset? Take the set one. */
5896
5897 if ((attributes = a1) == 0)
5898 attributes = a2;
5899
5900 /* One that completely contains the other? Take it. */
5901
5902 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5903 {
5904 if (attribute_list_contained (a2, a1))
5905 attributes = a2;
5906 else
5907 {
5908 /* Pick the longest list, and hang on the other list. */
5909
5910 if (list_length (a1) < list_length (a2))
5911 attributes = a2, a2 = a1;
5912
5913 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5914 {
5915 tree a;
5916 for (a = lookup_ident_attribute (get_attribute_name (a2),
5917 attributes);
5918 a != NULL_TREE && !attribute_value_equal (a, a2);
5919 a = lookup_ident_attribute (get_attribute_name (a2),
5920 TREE_CHAIN (a)))
5921 ;
5922 if (a == NULL_TREE)
5923 {
5924 a1 = copy_node (a2);
5925 TREE_CHAIN (a1) = attributes;
5926 attributes = a1;
5927 }
5928 }
5929 }
5930 }
5931 return attributes;
5932 }
5933
5934 /* Given types T1 and T2, merge their attributes and return
5935 the result. */
5936
5937 tree
5938 merge_type_attributes (tree t1, tree t2)
5939 {
5940 return merge_attributes (TYPE_ATTRIBUTES (t1),
5941 TYPE_ATTRIBUTES (t2));
5942 }
5943
5944 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5945 the result. */
5946
5947 tree
5948 merge_decl_attributes (tree olddecl, tree newdecl)
5949 {
5950 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5951 DECL_ATTRIBUTES (newdecl));
5952 }
5953
5954 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5955
5956 /* Specialization of merge_decl_attributes for various Windows targets.
5957
5958 This handles the following situation:
5959
5960 __declspec (dllimport) int foo;
5961 int foo;
5962
5963 The second instance of `foo' nullifies the dllimport. */
5964
5965 tree
5966 merge_dllimport_decl_attributes (tree old, tree new_tree)
5967 {
5968 tree a;
5969 int delete_dllimport_p = 1;
5970
5971 /* What we need to do here is remove from `old' dllimport if it doesn't
5972 appear in `new'. dllimport behaves like extern: if a declaration is
5973 marked dllimport and a definition appears later, then the object
5974 is not dllimport'd. We also remove a `new' dllimport if the old list
5975 contains dllexport: dllexport always overrides dllimport, regardless
5976 of the order of declaration. */
5977 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5978 delete_dllimport_p = 0;
5979 else if (DECL_DLLIMPORT_P (new_tree)
5980 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5981 {
5982 DECL_DLLIMPORT_P (new_tree) = 0;
5983 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5984 "dllimport ignored", new_tree);
5985 }
5986 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5987 {
5988 /* Warn about overriding a symbol that has already been used, e.g.:
5989 extern int __attribute__ ((dllimport)) foo;
5990 int* bar () {return &foo;}
5991 int foo;
5992 */
5993 if (TREE_USED (old))
5994 {
5995 warning (0, "%q+D redeclared without dllimport attribute "
5996 "after being referenced with dll linkage", new_tree);
5997 /* If we have used a variable's address with dllimport linkage,
5998 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5999 decl may already have had TREE_CONSTANT computed.
6000 We still remove the attribute so that assembler code refers
6001 to '&foo rather than '_imp__foo'. */
6002 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6003 DECL_DLLIMPORT_P (new_tree) = 1;
6004 }
6005
6006 /* Let an inline definition silently override the external reference,
6007 but otherwise warn about attribute inconsistency. */
6008 else if (TREE_CODE (new_tree) == VAR_DECL
6009 || !DECL_DECLARED_INLINE_P (new_tree))
6010 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6011 "previous dllimport ignored", new_tree);
6012 }
6013 else
6014 delete_dllimport_p = 0;
6015
6016 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6017
6018 if (delete_dllimport_p)
6019 a = remove_attribute ("dllimport", a);
6020
6021 return a;
6022 }
6023
6024 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6025 struct attribute_spec.handler. */
6026
6027 tree
6028 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6029 bool *no_add_attrs)
6030 {
6031 tree node = *pnode;
6032 bool is_dllimport;
6033
6034 /* These attributes may apply to structure and union types being created,
6035 but otherwise should pass to the declaration involved. */
6036 if (!DECL_P (node))
6037 {
6038 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6039 | (int) ATTR_FLAG_ARRAY_NEXT))
6040 {
6041 *no_add_attrs = true;
6042 return tree_cons (name, args, NULL_TREE);
6043 }
6044 if (TREE_CODE (node) == RECORD_TYPE
6045 || TREE_CODE (node) == UNION_TYPE)
6046 {
6047 node = TYPE_NAME (node);
6048 if (!node)
6049 return NULL_TREE;
6050 }
6051 else
6052 {
6053 warning (OPT_Wattributes, "%qE attribute ignored",
6054 name);
6055 *no_add_attrs = true;
6056 return NULL_TREE;
6057 }
6058 }
6059
6060 if (TREE_CODE (node) != FUNCTION_DECL
6061 && TREE_CODE (node) != VAR_DECL
6062 && TREE_CODE (node) != TYPE_DECL)
6063 {
6064 *no_add_attrs = true;
6065 warning (OPT_Wattributes, "%qE attribute ignored",
6066 name);
6067 return NULL_TREE;
6068 }
6069
6070 if (TREE_CODE (node) == TYPE_DECL
6071 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6072 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6073 {
6074 *no_add_attrs = true;
6075 warning (OPT_Wattributes, "%qE attribute ignored",
6076 name);
6077 return NULL_TREE;
6078 }
6079
6080 is_dllimport = is_attribute_p ("dllimport", name);
6081
6082 /* Report error on dllimport ambiguities seen now before they cause
6083 any damage. */
6084 if (is_dllimport)
6085 {
6086 /* Honor any target-specific overrides. */
6087 if (!targetm.valid_dllimport_attribute_p (node))
6088 *no_add_attrs = true;
6089
6090 else if (TREE_CODE (node) == FUNCTION_DECL
6091 && DECL_DECLARED_INLINE_P (node))
6092 {
6093 warning (OPT_Wattributes, "inline function %q+D declared as "
6094 " dllimport: attribute ignored", node);
6095 *no_add_attrs = true;
6096 }
6097 /* Like MS, treat definition of dllimported variables and
6098 non-inlined functions on declaration as syntax errors. */
6099 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6100 {
6101 error ("function %q+D definition is marked dllimport", node);
6102 *no_add_attrs = true;
6103 }
6104
6105 else if (TREE_CODE (node) == VAR_DECL)
6106 {
6107 if (DECL_INITIAL (node))
6108 {
6109 error ("variable %q+D definition is marked dllimport",
6110 node);
6111 *no_add_attrs = true;
6112 }
6113
6114 /* `extern' needn't be specified with dllimport.
6115 Specify `extern' now and hope for the best. Sigh. */
6116 DECL_EXTERNAL (node) = 1;
6117 /* Also, implicitly give dllimport'd variables declared within
6118 a function global scope, unless declared static. */
6119 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6120 TREE_PUBLIC (node) = 1;
6121 }
6122
6123 if (*no_add_attrs == false)
6124 DECL_DLLIMPORT_P (node) = 1;
6125 }
6126 else if (TREE_CODE (node) == FUNCTION_DECL
6127 && DECL_DECLARED_INLINE_P (node)
6128 && flag_keep_inline_dllexport)
6129 /* An exported function, even if inline, must be emitted. */
6130 DECL_EXTERNAL (node) = 0;
6131
6132 /* Report error if symbol is not accessible at global scope. */
6133 if (!TREE_PUBLIC (node)
6134 && (TREE_CODE (node) == VAR_DECL
6135 || TREE_CODE (node) == FUNCTION_DECL))
6136 {
6137 error ("external linkage required for symbol %q+D because of "
6138 "%qE attribute", node, name);
6139 *no_add_attrs = true;
6140 }
6141
6142 /* A dllexport'd entity must have default visibility so that other
6143 program units (shared libraries or the main executable) can see
6144 it. A dllimport'd entity must have default visibility so that
6145 the linker knows that undefined references within this program
6146 unit can be resolved by the dynamic linker. */
6147 if (!*no_add_attrs)
6148 {
6149 if (DECL_VISIBILITY_SPECIFIED (node)
6150 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6151 error ("%qE implies default visibility, but %qD has already "
6152 "been declared with a different visibility",
6153 name, node);
6154 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6155 DECL_VISIBILITY_SPECIFIED (node) = 1;
6156 }
6157
6158 return NULL_TREE;
6159 }
6160
6161 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6162 \f
6163 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6164 of the various TYPE_QUAL values. */
6165
6166 static void
6167 set_type_quals (tree type, int type_quals)
6168 {
6169 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6170 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6171 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6172 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6173 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6174 }
6175
6176 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6177
6178 bool
6179 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6180 {
6181 return (TYPE_QUALS (cand) == type_quals
6182 && TYPE_NAME (cand) == TYPE_NAME (base)
6183 /* Apparently this is needed for Objective-C. */
6184 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6185 /* Check alignment. */
6186 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6187 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6188 TYPE_ATTRIBUTES (base)));
6189 }
6190
6191 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6192
6193 static bool
6194 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6195 {
6196 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6197 && TYPE_NAME (cand) == TYPE_NAME (base)
6198 /* Apparently this is needed for Objective-C. */
6199 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6200 /* Check alignment. */
6201 && TYPE_ALIGN (cand) == align
6202 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6203 TYPE_ATTRIBUTES (base)));
6204 }
6205
6206 /* This function checks to see if TYPE matches the size one of the built-in
6207 atomic types, and returns that core atomic type. */
6208
6209 static tree
6210 find_atomic_core_type (tree type)
6211 {
6212 tree base_atomic_type;
6213
6214 /* Only handle complete types. */
6215 if (TYPE_SIZE (type) == NULL_TREE)
6216 return NULL_TREE;
6217
6218 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6219 switch (type_size)
6220 {
6221 case 8:
6222 base_atomic_type = atomicQI_type_node;
6223 break;
6224
6225 case 16:
6226 base_atomic_type = atomicHI_type_node;
6227 break;
6228
6229 case 32:
6230 base_atomic_type = atomicSI_type_node;
6231 break;
6232
6233 case 64:
6234 base_atomic_type = atomicDI_type_node;
6235 break;
6236
6237 case 128:
6238 base_atomic_type = atomicTI_type_node;
6239 break;
6240
6241 default:
6242 base_atomic_type = NULL_TREE;
6243 }
6244
6245 return base_atomic_type;
6246 }
6247
6248 /* Return a version of the TYPE, qualified as indicated by the
6249 TYPE_QUALS, if one exists. If no qualified version exists yet,
6250 return NULL_TREE. */
6251
6252 tree
6253 get_qualified_type (tree type, int type_quals)
6254 {
6255 tree t;
6256
6257 if (TYPE_QUALS (type) == type_quals)
6258 return type;
6259
6260 /* Search the chain of variants to see if there is already one there just
6261 like the one we need to have. If so, use that existing one. We must
6262 preserve the TYPE_NAME, since there is code that depends on this. */
6263 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6264 if (check_qualified_type (t, type, type_quals))
6265 return t;
6266
6267 return NULL_TREE;
6268 }
6269
6270 /* Like get_qualified_type, but creates the type if it does not
6271 exist. This function never returns NULL_TREE. */
6272
6273 tree
6274 build_qualified_type (tree type, int type_quals)
6275 {
6276 tree t;
6277
6278 /* See if we already have the appropriate qualified variant. */
6279 t = get_qualified_type (type, type_quals);
6280
6281 /* If not, build it. */
6282 if (!t)
6283 {
6284 t = build_variant_type_copy (type);
6285 set_type_quals (t, type_quals);
6286
6287 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6288 {
6289 /* See if this object can map to a basic atomic type. */
6290 tree atomic_type = find_atomic_core_type (type);
6291 if (atomic_type)
6292 {
6293 /* Ensure the alignment of this type is compatible with
6294 the required alignment of the atomic type. */
6295 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6296 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6297 }
6298 }
6299
6300 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6301 /* Propagate structural equality. */
6302 SET_TYPE_STRUCTURAL_EQUALITY (t);
6303 else if (TYPE_CANONICAL (type) != type)
6304 /* Build the underlying canonical type, since it is different
6305 from TYPE. */
6306 {
6307 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6308 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6309 }
6310 else
6311 /* T is its own canonical type. */
6312 TYPE_CANONICAL (t) = t;
6313
6314 }
6315
6316 return t;
6317 }
6318
6319 /* Create a variant of type T with alignment ALIGN. */
6320
6321 tree
6322 build_aligned_type (tree type, unsigned int align)
6323 {
6324 tree t;
6325
6326 if (TYPE_PACKED (type)
6327 || TYPE_ALIGN (type) == align)
6328 return type;
6329
6330 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6331 if (check_aligned_type (t, type, align))
6332 return t;
6333
6334 t = build_variant_type_copy (type);
6335 TYPE_ALIGN (t) = align;
6336
6337 return t;
6338 }
6339
6340 /* Create a new distinct copy of TYPE. The new type is made its own
6341 MAIN_VARIANT. If TYPE requires structural equality checks, the
6342 resulting type requires structural equality checks; otherwise, its
6343 TYPE_CANONICAL points to itself. */
6344
6345 tree
6346 build_distinct_type_copy (tree type)
6347 {
6348 tree t = copy_node (type);
6349
6350 TYPE_POINTER_TO (t) = 0;
6351 TYPE_REFERENCE_TO (t) = 0;
6352
6353 /* Set the canonical type either to a new equivalence class, or
6354 propagate the need for structural equality checks. */
6355 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6356 SET_TYPE_STRUCTURAL_EQUALITY (t);
6357 else
6358 TYPE_CANONICAL (t) = t;
6359
6360 /* Make it its own variant. */
6361 TYPE_MAIN_VARIANT (t) = t;
6362 TYPE_NEXT_VARIANT (t) = 0;
6363
6364 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6365 whose TREE_TYPE is not t. This can also happen in the Ada
6366 frontend when using subtypes. */
6367
6368 return t;
6369 }
6370
6371 /* Create a new variant of TYPE, equivalent but distinct. This is so
6372 the caller can modify it. TYPE_CANONICAL for the return type will
6373 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6374 are considered equal by the language itself (or that both types
6375 require structural equality checks). */
6376
6377 tree
6378 build_variant_type_copy (tree type)
6379 {
6380 tree t, m = TYPE_MAIN_VARIANT (type);
6381
6382 t = build_distinct_type_copy (type);
6383
6384 /* Since we're building a variant, assume that it is a non-semantic
6385 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6386 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6387
6388 /* Add the new type to the chain of variants of TYPE. */
6389 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6390 TYPE_NEXT_VARIANT (m) = t;
6391 TYPE_MAIN_VARIANT (t) = m;
6392
6393 return t;
6394 }
6395 \f
6396 /* Return true if the from tree in both tree maps are equal. */
6397
6398 int
6399 tree_map_base_eq (const void *va, const void *vb)
6400 {
6401 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6402 *const b = (const struct tree_map_base *) vb;
6403 return (a->from == b->from);
6404 }
6405
6406 /* Hash a from tree in a tree_base_map. */
6407
6408 unsigned int
6409 tree_map_base_hash (const void *item)
6410 {
6411 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6412 }
6413
6414 /* Return true if this tree map structure is marked for garbage collection
6415 purposes. We simply return true if the from tree is marked, so that this
6416 structure goes away when the from tree goes away. */
6417
6418 int
6419 tree_map_base_marked_p (const void *p)
6420 {
6421 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6422 }
6423
6424 /* Hash a from tree in a tree_map. */
6425
6426 unsigned int
6427 tree_map_hash (const void *item)
6428 {
6429 return (((const struct tree_map *) item)->hash);
6430 }
6431
6432 /* Hash a from tree in a tree_decl_map. */
6433
6434 unsigned int
6435 tree_decl_map_hash (const void *item)
6436 {
6437 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6438 }
6439
6440 /* Return the initialization priority for DECL. */
6441
6442 priority_type
6443 decl_init_priority_lookup (tree decl)
6444 {
6445 symtab_node *snode = symtab_node::get (decl);
6446
6447 if (!snode)
6448 return DEFAULT_INIT_PRIORITY;
6449 return
6450 snode->get_init_priority ();
6451 }
6452
6453 /* Return the finalization priority for DECL. */
6454
6455 priority_type
6456 decl_fini_priority_lookup (tree decl)
6457 {
6458 cgraph_node *node = cgraph_node::get (decl);
6459
6460 if (!node)
6461 return DEFAULT_INIT_PRIORITY;
6462 return
6463 node->get_fini_priority ();
6464 }
6465
6466 /* Set the initialization priority for DECL to PRIORITY. */
6467
6468 void
6469 decl_init_priority_insert (tree decl, priority_type priority)
6470 {
6471 struct symtab_node *snode;
6472
6473 if (priority == DEFAULT_INIT_PRIORITY)
6474 {
6475 snode = symtab_node::get (decl);
6476 if (!snode)
6477 return;
6478 }
6479 else if (TREE_CODE (decl) == VAR_DECL)
6480 snode = varpool_node::get_create (decl);
6481 else
6482 snode = cgraph_node::get_create (decl);
6483 snode->set_init_priority (priority);
6484 }
6485
6486 /* Set the finalization priority for DECL to PRIORITY. */
6487
6488 void
6489 decl_fini_priority_insert (tree decl, priority_type priority)
6490 {
6491 struct cgraph_node *node;
6492
6493 if (priority == DEFAULT_INIT_PRIORITY)
6494 {
6495 node = cgraph_node::get (decl);
6496 if (!node)
6497 return;
6498 }
6499 else
6500 node = cgraph_node::get_create (decl);
6501 node->set_fini_priority (priority);
6502 }
6503
6504 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6505
6506 static void
6507 print_debug_expr_statistics (void)
6508 {
6509 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6510 (long) htab_size (debug_expr_for_decl),
6511 (long) htab_elements (debug_expr_for_decl),
6512 htab_collisions (debug_expr_for_decl));
6513 }
6514
6515 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6516
6517 static void
6518 print_value_expr_statistics (void)
6519 {
6520 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6521 (long) htab_size (value_expr_for_decl),
6522 (long) htab_elements (value_expr_for_decl),
6523 htab_collisions (value_expr_for_decl));
6524 }
6525
6526 /* Lookup a debug expression for FROM, and return it if we find one. */
6527
6528 tree
6529 decl_debug_expr_lookup (tree from)
6530 {
6531 struct tree_decl_map *h, in;
6532 in.base.from = from;
6533
6534 h = (struct tree_decl_map *)
6535 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6536 if (h)
6537 return h->to;
6538 return NULL_TREE;
6539 }
6540
6541 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6542
6543 void
6544 decl_debug_expr_insert (tree from, tree to)
6545 {
6546 struct tree_decl_map *h;
6547 void **loc;
6548
6549 h = ggc_alloc<tree_decl_map> ();
6550 h->base.from = from;
6551 h->to = to;
6552 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6553 INSERT);
6554 *(struct tree_decl_map **) loc = h;
6555 }
6556
6557 /* Lookup a value expression for FROM, and return it if we find one. */
6558
6559 tree
6560 decl_value_expr_lookup (tree from)
6561 {
6562 struct tree_decl_map *h, in;
6563 in.base.from = from;
6564
6565 h = (struct tree_decl_map *)
6566 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6567 if (h)
6568 return h->to;
6569 return NULL_TREE;
6570 }
6571
6572 /* Insert a mapping FROM->TO in the value expression hashtable. */
6573
6574 void
6575 decl_value_expr_insert (tree from, tree to)
6576 {
6577 struct tree_decl_map *h;
6578 void **loc;
6579
6580 h = ggc_alloc<tree_decl_map> ();
6581 h->base.from = from;
6582 h->to = to;
6583 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6584 INSERT);
6585 *(struct tree_decl_map **) loc = h;
6586 }
6587
6588 /* Lookup a vector of debug arguments for FROM, and return it if we
6589 find one. */
6590
6591 vec<tree, va_gc> **
6592 decl_debug_args_lookup (tree from)
6593 {
6594 struct tree_vec_map *h, in;
6595
6596 if (!DECL_HAS_DEBUG_ARGS_P (from))
6597 return NULL;
6598 gcc_checking_assert (debug_args_for_decl != NULL);
6599 in.base.from = from;
6600 h = (struct tree_vec_map *)
6601 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6602 if (h)
6603 return &h->to;
6604 return NULL;
6605 }
6606
6607 /* Insert a mapping FROM->empty vector of debug arguments in the value
6608 expression hashtable. */
6609
6610 vec<tree, va_gc> **
6611 decl_debug_args_insert (tree from)
6612 {
6613 struct tree_vec_map *h;
6614 void **loc;
6615
6616 if (DECL_HAS_DEBUG_ARGS_P (from))
6617 return decl_debug_args_lookup (from);
6618 if (debug_args_for_decl == NULL)
6619 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6620 tree_vec_map_eq, 0);
6621 h = ggc_alloc<tree_vec_map> ();
6622 h->base.from = from;
6623 h->to = NULL;
6624 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6625 INSERT);
6626 *(struct tree_vec_map **) loc = h;
6627 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6628 return &h->to;
6629 }
6630
6631 /* Hashing of types so that we don't make duplicates.
6632 The entry point is `type_hash_canon'. */
6633
6634 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6635 with types in the TREE_VALUE slots), by adding the hash codes
6636 of the individual types. */
6637
6638 static void
6639 type_hash_list (const_tree list, inchash::hash &hstate)
6640 {
6641 const_tree tail;
6642
6643 for (tail = list; tail; tail = TREE_CHAIN (tail))
6644 if (TREE_VALUE (tail) != error_mark_node)
6645 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6646 }
6647
6648 /* These are the Hashtable callback functions. */
6649
6650 /* Returns true iff the types are equivalent. */
6651
6652 static int
6653 type_hash_eq (const void *va, const void *vb)
6654 {
6655 const struct type_hash *const a = (const struct type_hash *) va,
6656 *const b = (const struct type_hash *) vb;
6657
6658 /* First test the things that are the same for all types. */
6659 if (a->hash != b->hash
6660 || TREE_CODE (a->type) != TREE_CODE (b->type)
6661 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6662 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6663 TYPE_ATTRIBUTES (b->type))
6664 || (TREE_CODE (a->type) != COMPLEX_TYPE
6665 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6666 return 0;
6667
6668 /* Be careful about comparing arrays before and after the element type
6669 has been completed; don't compare TYPE_ALIGN unless both types are
6670 complete. */
6671 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6672 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6673 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6674 return 0;
6675
6676 switch (TREE_CODE (a->type))
6677 {
6678 case VOID_TYPE:
6679 case COMPLEX_TYPE:
6680 case POINTER_TYPE:
6681 case REFERENCE_TYPE:
6682 case NULLPTR_TYPE:
6683 return 1;
6684
6685 case VECTOR_TYPE:
6686 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6687
6688 case ENUMERAL_TYPE:
6689 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6690 && !(TYPE_VALUES (a->type)
6691 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6692 && TYPE_VALUES (b->type)
6693 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6694 && type_list_equal (TYPE_VALUES (a->type),
6695 TYPE_VALUES (b->type))))
6696 return 0;
6697
6698 /* ... fall through ... */
6699
6700 case INTEGER_TYPE:
6701 case REAL_TYPE:
6702 case BOOLEAN_TYPE:
6703 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6704 return false;
6705 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6706 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6707 TYPE_MAX_VALUE (b->type)))
6708 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6709 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6710 TYPE_MIN_VALUE (b->type))));
6711
6712 case FIXED_POINT_TYPE:
6713 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6714
6715 case OFFSET_TYPE:
6716 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6717
6718 case METHOD_TYPE:
6719 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6720 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6721 || (TYPE_ARG_TYPES (a->type)
6722 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6723 && TYPE_ARG_TYPES (b->type)
6724 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6725 && type_list_equal (TYPE_ARG_TYPES (a->type),
6726 TYPE_ARG_TYPES (b->type)))))
6727 break;
6728 return 0;
6729 case ARRAY_TYPE:
6730 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6731
6732 case RECORD_TYPE:
6733 case UNION_TYPE:
6734 case QUAL_UNION_TYPE:
6735 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6736 || (TYPE_FIELDS (a->type)
6737 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6738 && TYPE_FIELDS (b->type)
6739 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6740 && type_list_equal (TYPE_FIELDS (a->type),
6741 TYPE_FIELDS (b->type))));
6742
6743 case FUNCTION_TYPE:
6744 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6745 || (TYPE_ARG_TYPES (a->type)
6746 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6747 && TYPE_ARG_TYPES (b->type)
6748 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6749 && type_list_equal (TYPE_ARG_TYPES (a->type),
6750 TYPE_ARG_TYPES (b->type))))
6751 break;
6752 return 0;
6753
6754 default:
6755 return 0;
6756 }
6757
6758 if (lang_hooks.types.type_hash_eq != NULL)
6759 return lang_hooks.types.type_hash_eq (a->type, b->type);
6760
6761 return 1;
6762 }
6763
6764 /* Return the cached hash value. */
6765
6766 static hashval_t
6767 type_hash_hash (const void *item)
6768 {
6769 return ((const struct type_hash *) item)->hash;
6770 }
6771
6772 /* Given TYPE, and HASHCODE its hash code, return the canonical
6773 object for an identical type if one already exists.
6774 Otherwise, return TYPE, and record it as the canonical object.
6775
6776 To use this function, first create a type of the sort you want.
6777 Then compute its hash code from the fields of the type that
6778 make it different from other similar types.
6779 Then call this function and use the value. */
6780
6781 tree
6782 type_hash_canon (unsigned int hashcode, tree type)
6783 {
6784 type_hash in;
6785 void **loc;
6786
6787 /* The hash table only contains main variants, so ensure that's what we're
6788 being passed. */
6789 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6790
6791 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6792 must call that routine before comparing TYPE_ALIGNs. */
6793 layout_type (type);
6794
6795 in.hash = hashcode;
6796 in.type = type;
6797
6798 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6799 if (*loc)
6800 {
6801 tree t1 = ((type_hash *) *loc)->type;
6802 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6803 if (GATHER_STATISTICS)
6804 {
6805 tree_code_counts[(int) TREE_CODE (type)]--;
6806 tree_node_counts[(int) t_kind]--;
6807 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6808 }
6809 return t1;
6810 }
6811 else
6812 {
6813 struct type_hash *h;
6814
6815 h = ggc_alloc<type_hash> ();
6816 h->hash = hashcode;
6817 h->type = type;
6818 *loc = (void *)h;
6819
6820 return type;
6821 }
6822 }
6823
6824 /* See if the data pointed to by the type hash table is marked. We consider
6825 it marked if the type is marked or if a debug type number or symbol
6826 table entry has been made for the type. */
6827
6828 static int
6829 type_hash_marked_p (const void *p)
6830 {
6831 const_tree const type = ((const struct type_hash *) p)->type;
6832
6833 return ggc_marked_p (type);
6834 }
6835
6836 static void
6837 print_type_hash_statistics (void)
6838 {
6839 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6840 (long) htab_size (type_hash_table),
6841 (long) htab_elements (type_hash_table),
6842 htab_collisions (type_hash_table));
6843 }
6844
6845 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6846 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6847 by adding the hash codes of the individual attributes. */
6848
6849 static void
6850 attribute_hash_list (const_tree list, inchash::hash &hstate)
6851 {
6852 const_tree tail;
6853
6854 for (tail = list; tail; tail = TREE_CHAIN (tail))
6855 /* ??? Do we want to add in TREE_VALUE too? */
6856 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6857 }
6858
6859 /* Given two lists of attributes, return true if list l2 is
6860 equivalent to l1. */
6861
6862 int
6863 attribute_list_equal (const_tree l1, const_tree l2)
6864 {
6865 if (l1 == l2)
6866 return 1;
6867
6868 return attribute_list_contained (l1, l2)
6869 && attribute_list_contained (l2, l1);
6870 }
6871
6872 /* Given two lists of attributes, return true if list L2 is
6873 completely contained within L1. */
6874 /* ??? This would be faster if attribute names were stored in a canonicalized
6875 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6876 must be used to show these elements are equivalent (which they are). */
6877 /* ??? It's not clear that attributes with arguments will always be handled
6878 correctly. */
6879
6880 int
6881 attribute_list_contained (const_tree l1, const_tree l2)
6882 {
6883 const_tree t1, t2;
6884
6885 /* First check the obvious, maybe the lists are identical. */
6886 if (l1 == l2)
6887 return 1;
6888
6889 /* Maybe the lists are similar. */
6890 for (t1 = l1, t2 = l2;
6891 t1 != 0 && t2 != 0
6892 && get_attribute_name (t1) == get_attribute_name (t2)
6893 && TREE_VALUE (t1) == TREE_VALUE (t2);
6894 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6895 ;
6896
6897 /* Maybe the lists are equal. */
6898 if (t1 == 0 && t2 == 0)
6899 return 1;
6900
6901 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6902 {
6903 const_tree attr;
6904 /* This CONST_CAST is okay because lookup_attribute does not
6905 modify its argument and the return value is assigned to a
6906 const_tree. */
6907 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6908 CONST_CAST_TREE (l1));
6909 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6910 attr = lookup_ident_attribute (get_attribute_name (t2),
6911 TREE_CHAIN (attr)))
6912 ;
6913
6914 if (attr == NULL_TREE)
6915 return 0;
6916 }
6917
6918 return 1;
6919 }
6920
6921 /* Given two lists of types
6922 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6923 return 1 if the lists contain the same types in the same order.
6924 Also, the TREE_PURPOSEs must match. */
6925
6926 int
6927 type_list_equal (const_tree l1, const_tree l2)
6928 {
6929 const_tree t1, t2;
6930
6931 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6932 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6933 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6934 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6935 && (TREE_TYPE (TREE_PURPOSE (t1))
6936 == TREE_TYPE (TREE_PURPOSE (t2))))))
6937 return 0;
6938
6939 return t1 == t2;
6940 }
6941
6942 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6943 given by TYPE. If the argument list accepts variable arguments,
6944 then this function counts only the ordinary arguments. */
6945
6946 int
6947 type_num_arguments (const_tree type)
6948 {
6949 int i = 0;
6950 tree t;
6951
6952 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6953 /* If the function does not take a variable number of arguments,
6954 the last element in the list will have type `void'. */
6955 if (VOID_TYPE_P (TREE_VALUE (t)))
6956 break;
6957 else
6958 ++i;
6959
6960 return i;
6961 }
6962
6963 /* Nonzero if integer constants T1 and T2
6964 represent the same constant value. */
6965
6966 int
6967 tree_int_cst_equal (const_tree t1, const_tree t2)
6968 {
6969 if (t1 == t2)
6970 return 1;
6971
6972 if (t1 == 0 || t2 == 0)
6973 return 0;
6974
6975 if (TREE_CODE (t1) == INTEGER_CST
6976 && TREE_CODE (t2) == INTEGER_CST
6977 && wi::to_widest (t1) == wi::to_widest (t2))
6978 return 1;
6979
6980 return 0;
6981 }
6982
6983 /* Return true if T is an INTEGER_CST whose numerical value (extended
6984 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
6985
6986 bool
6987 tree_fits_shwi_p (const_tree t)
6988 {
6989 return (t != NULL_TREE
6990 && TREE_CODE (t) == INTEGER_CST
6991 && wi::fits_shwi_p (wi::to_widest (t)));
6992 }
6993
6994 /* Return true if T is an INTEGER_CST whose numerical value (extended
6995 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
6996
6997 bool
6998 tree_fits_uhwi_p (const_tree t)
6999 {
7000 return (t != NULL_TREE
7001 && TREE_CODE (t) == INTEGER_CST
7002 && wi::fits_uhwi_p (wi::to_widest (t)));
7003 }
7004
7005 /* T is an INTEGER_CST whose numerical value (extended according to
7006 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7007 HOST_WIDE_INT. */
7008
7009 HOST_WIDE_INT
7010 tree_to_shwi (const_tree t)
7011 {
7012 gcc_assert (tree_fits_shwi_p (t));
7013 return TREE_INT_CST_LOW (t);
7014 }
7015
7016 /* T is an INTEGER_CST whose numerical value (extended according to
7017 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7018 HOST_WIDE_INT. */
7019
7020 unsigned HOST_WIDE_INT
7021 tree_to_uhwi (const_tree t)
7022 {
7023 gcc_assert (tree_fits_uhwi_p (t));
7024 return TREE_INT_CST_LOW (t);
7025 }
7026
7027 /* Return the most significant (sign) bit of T. */
7028
7029 int
7030 tree_int_cst_sign_bit (const_tree t)
7031 {
7032 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7033
7034 return wi::extract_uhwi (t, bitno, 1);
7035 }
7036
7037 /* Return an indication of the sign of the integer constant T.
7038 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7039 Note that -1 will never be returned if T's type is unsigned. */
7040
7041 int
7042 tree_int_cst_sgn (const_tree t)
7043 {
7044 if (wi::eq_p (t, 0))
7045 return 0;
7046 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7047 return 1;
7048 else if (wi::neg_p (t))
7049 return -1;
7050 else
7051 return 1;
7052 }
7053
7054 /* Return the minimum number of bits needed to represent VALUE in a
7055 signed or unsigned type, UNSIGNEDP says which. */
7056
7057 unsigned int
7058 tree_int_cst_min_precision (tree value, signop sgn)
7059 {
7060 /* If the value is negative, compute its negative minus 1. The latter
7061 adjustment is because the absolute value of the largest negative value
7062 is one larger than the largest positive value. This is equivalent to
7063 a bit-wise negation, so use that operation instead. */
7064
7065 if (tree_int_cst_sgn (value) < 0)
7066 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7067
7068 /* Return the number of bits needed, taking into account the fact
7069 that we need one more bit for a signed than unsigned type.
7070 If value is 0 or -1, the minimum precision is 1 no matter
7071 whether unsignedp is true or false. */
7072
7073 if (integer_zerop (value))
7074 return 1;
7075 else
7076 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7077 }
7078
7079 /* Return truthvalue of whether T1 is the same tree structure as T2.
7080 Return 1 if they are the same.
7081 Return 0 if they are understandably different.
7082 Return -1 if either contains tree structure not understood by
7083 this function. */
7084
7085 int
7086 simple_cst_equal (const_tree t1, const_tree t2)
7087 {
7088 enum tree_code code1, code2;
7089 int cmp;
7090 int i;
7091
7092 if (t1 == t2)
7093 return 1;
7094 if (t1 == 0 || t2 == 0)
7095 return 0;
7096
7097 code1 = TREE_CODE (t1);
7098 code2 = TREE_CODE (t2);
7099
7100 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7101 {
7102 if (CONVERT_EXPR_CODE_P (code2)
7103 || code2 == NON_LVALUE_EXPR)
7104 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7105 else
7106 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7107 }
7108
7109 else if (CONVERT_EXPR_CODE_P (code2)
7110 || code2 == NON_LVALUE_EXPR)
7111 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7112
7113 if (code1 != code2)
7114 return 0;
7115
7116 switch (code1)
7117 {
7118 case INTEGER_CST:
7119 return wi::to_widest (t1) == wi::to_widest (t2);
7120
7121 case REAL_CST:
7122 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7123
7124 case FIXED_CST:
7125 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7126
7127 case STRING_CST:
7128 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7129 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7130 TREE_STRING_LENGTH (t1)));
7131
7132 case CONSTRUCTOR:
7133 {
7134 unsigned HOST_WIDE_INT idx;
7135 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7136 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7137
7138 if (vec_safe_length (v1) != vec_safe_length (v2))
7139 return false;
7140
7141 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7142 /* ??? Should we handle also fields here? */
7143 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7144 return false;
7145 return true;
7146 }
7147
7148 case SAVE_EXPR:
7149 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7150
7151 case CALL_EXPR:
7152 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7153 if (cmp <= 0)
7154 return cmp;
7155 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7156 return 0;
7157 {
7158 const_tree arg1, arg2;
7159 const_call_expr_arg_iterator iter1, iter2;
7160 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7161 arg2 = first_const_call_expr_arg (t2, &iter2);
7162 arg1 && arg2;
7163 arg1 = next_const_call_expr_arg (&iter1),
7164 arg2 = next_const_call_expr_arg (&iter2))
7165 {
7166 cmp = simple_cst_equal (arg1, arg2);
7167 if (cmp <= 0)
7168 return cmp;
7169 }
7170 return arg1 == arg2;
7171 }
7172
7173 case TARGET_EXPR:
7174 /* Special case: if either target is an unallocated VAR_DECL,
7175 it means that it's going to be unified with whatever the
7176 TARGET_EXPR is really supposed to initialize, so treat it
7177 as being equivalent to anything. */
7178 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7179 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7180 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7181 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7182 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7183 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7184 cmp = 1;
7185 else
7186 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7187
7188 if (cmp <= 0)
7189 return cmp;
7190
7191 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7192
7193 case WITH_CLEANUP_EXPR:
7194 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7195 if (cmp <= 0)
7196 return cmp;
7197
7198 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7199
7200 case COMPONENT_REF:
7201 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7202 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7203
7204 return 0;
7205
7206 case VAR_DECL:
7207 case PARM_DECL:
7208 case CONST_DECL:
7209 case FUNCTION_DECL:
7210 return 0;
7211
7212 default:
7213 break;
7214 }
7215
7216 /* This general rule works for most tree codes. All exceptions should be
7217 handled above. If this is a language-specific tree code, we can't
7218 trust what might be in the operand, so say we don't know
7219 the situation. */
7220 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7221 return -1;
7222
7223 switch (TREE_CODE_CLASS (code1))
7224 {
7225 case tcc_unary:
7226 case tcc_binary:
7227 case tcc_comparison:
7228 case tcc_expression:
7229 case tcc_reference:
7230 case tcc_statement:
7231 cmp = 1;
7232 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7233 {
7234 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7235 if (cmp <= 0)
7236 return cmp;
7237 }
7238
7239 return cmp;
7240
7241 default:
7242 return -1;
7243 }
7244 }
7245
7246 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7247 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7248 than U, respectively. */
7249
7250 int
7251 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7252 {
7253 if (tree_int_cst_sgn (t) < 0)
7254 return -1;
7255 else if (!tree_fits_uhwi_p (t))
7256 return 1;
7257 else if (TREE_INT_CST_LOW (t) == u)
7258 return 0;
7259 else if (TREE_INT_CST_LOW (t) < u)
7260 return -1;
7261 else
7262 return 1;
7263 }
7264
7265 /* Return true if SIZE represents a constant size that is in bounds of
7266 what the middle-end and the backend accepts (covering not more than
7267 half of the address-space). */
7268
7269 bool
7270 valid_constant_size_p (const_tree size)
7271 {
7272 if (! tree_fits_uhwi_p (size)
7273 || TREE_OVERFLOW (size)
7274 || tree_int_cst_sign_bit (size) != 0)
7275 return false;
7276 return true;
7277 }
7278
7279 /* Return the precision of the type, or for a complex or vector type the
7280 precision of the type of its elements. */
7281
7282 unsigned int
7283 element_precision (const_tree type)
7284 {
7285 enum tree_code code = TREE_CODE (type);
7286 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7287 type = TREE_TYPE (type);
7288
7289 return TYPE_PRECISION (type);
7290 }
7291
7292 /* Return true if CODE represents an associative tree code. Otherwise
7293 return false. */
7294 bool
7295 associative_tree_code (enum tree_code code)
7296 {
7297 switch (code)
7298 {
7299 case BIT_IOR_EXPR:
7300 case BIT_AND_EXPR:
7301 case BIT_XOR_EXPR:
7302 case PLUS_EXPR:
7303 case MULT_EXPR:
7304 case MIN_EXPR:
7305 case MAX_EXPR:
7306 return true;
7307
7308 default:
7309 break;
7310 }
7311 return false;
7312 }
7313
7314 /* Return true if CODE represents a commutative tree code. Otherwise
7315 return false. */
7316 bool
7317 commutative_tree_code (enum tree_code code)
7318 {
7319 switch (code)
7320 {
7321 case PLUS_EXPR:
7322 case MULT_EXPR:
7323 case MULT_HIGHPART_EXPR:
7324 case MIN_EXPR:
7325 case MAX_EXPR:
7326 case BIT_IOR_EXPR:
7327 case BIT_XOR_EXPR:
7328 case BIT_AND_EXPR:
7329 case NE_EXPR:
7330 case EQ_EXPR:
7331 case UNORDERED_EXPR:
7332 case ORDERED_EXPR:
7333 case UNEQ_EXPR:
7334 case LTGT_EXPR:
7335 case TRUTH_AND_EXPR:
7336 case TRUTH_XOR_EXPR:
7337 case TRUTH_OR_EXPR:
7338 case WIDEN_MULT_EXPR:
7339 case VEC_WIDEN_MULT_HI_EXPR:
7340 case VEC_WIDEN_MULT_LO_EXPR:
7341 case VEC_WIDEN_MULT_EVEN_EXPR:
7342 case VEC_WIDEN_MULT_ODD_EXPR:
7343 return true;
7344
7345 default:
7346 break;
7347 }
7348 return false;
7349 }
7350
7351 /* Return true if CODE represents a ternary tree code for which the
7352 first two operands are commutative. Otherwise return false. */
7353 bool
7354 commutative_ternary_tree_code (enum tree_code code)
7355 {
7356 switch (code)
7357 {
7358 case WIDEN_MULT_PLUS_EXPR:
7359 case WIDEN_MULT_MINUS_EXPR:
7360 return true;
7361
7362 default:
7363 break;
7364 }
7365 return false;
7366 }
7367
7368 namespace inchash
7369 {
7370
7371 /* Generate a hash value for an expression. This can be used iteratively
7372 by passing a previous result as the HSTATE argument.
7373
7374 This function is intended to produce the same hash for expressions which
7375 would compare equal using operand_equal_p. */
7376 void
7377 add_expr (const_tree t, inchash::hash &hstate)
7378 {
7379 int i;
7380 enum tree_code code;
7381 enum tree_code_class tclass;
7382
7383 if (t == NULL_TREE)
7384 {
7385 hstate.merge_hash (0);
7386 return;
7387 }
7388
7389 code = TREE_CODE (t);
7390
7391 switch (code)
7392 {
7393 /* Alas, constants aren't shared, so we can't rely on pointer
7394 identity. */
7395 case VOID_CST:
7396 hstate.merge_hash (0);
7397 return;
7398 case INTEGER_CST:
7399 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7400 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7401 return;
7402 case REAL_CST:
7403 {
7404 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7405 hstate.merge_hash (val2);
7406 return;
7407 }
7408 case FIXED_CST:
7409 {
7410 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7411 hstate.merge_hash (val2);
7412 return;
7413 }
7414 case STRING_CST:
7415 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7416 return;
7417 case COMPLEX_CST:
7418 inchash::add_expr (TREE_REALPART (t), hstate);
7419 inchash::add_expr (TREE_IMAGPART (t), hstate);
7420 return;
7421 case VECTOR_CST:
7422 {
7423 unsigned i;
7424 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7425 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7426 return;
7427 }
7428 case SSA_NAME:
7429 /* We can just compare by pointer. */
7430 hstate.add_wide_int (SSA_NAME_VERSION (t));
7431 return;
7432 case PLACEHOLDER_EXPR:
7433 /* The node itself doesn't matter. */
7434 return;
7435 case TREE_LIST:
7436 /* A list of expressions, for a CALL_EXPR or as the elements of a
7437 VECTOR_CST. */
7438 for (; t; t = TREE_CHAIN (t))
7439 inchash::add_expr (TREE_VALUE (t), hstate);
7440 return;
7441 case CONSTRUCTOR:
7442 {
7443 unsigned HOST_WIDE_INT idx;
7444 tree field, value;
7445 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7446 {
7447 inchash::add_expr (field, hstate);
7448 inchash::add_expr (value, hstate);
7449 }
7450 return;
7451 }
7452 case FUNCTION_DECL:
7453 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7454 Otherwise nodes that compare equal according to operand_equal_p might
7455 get different hash codes. However, don't do this for machine specific
7456 or front end builtins, since the function code is overloaded in those
7457 cases. */
7458 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7459 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7460 {
7461 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7462 code = TREE_CODE (t);
7463 }
7464 /* FALL THROUGH */
7465 default:
7466 tclass = TREE_CODE_CLASS (code);
7467
7468 if (tclass == tcc_declaration)
7469 {
7470 /* DECL's have a unique ID */
7471 hstate.add_wide_int (DECL_UID (t));
7472 }
7473 else
7474 {
7475 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7476
7477 hstate.add_object (code);
7478
7479 /* Don't hash the type, that can lead to having nodes which
7480 compare equal according to operand_equal_p, but which
7481 have different hash codes. */
7482 if (CONVERT_EXPR_CODE_P (code)
7483 || code == NON_LVALUE_EXPR)
7484 {
7485 /* Make sure to include signness in the hash computation. */
7486 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7487 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7488 }
7489
7490 else if (commutative_tree_code (code))
7491 {
7492 /* It's a commutative expression. We want to hash it the same
7493 however it appears. We do this by first hashing both operands
7494 and then rehashing based on the order of their independent
7495 hashes. */
7496 inchash::hash one, two;
7497 inchash::add_expr (TREE_OPERAND (t, 0), one);
7498 inchash::add_expr (TREE_OPERAND (t, 1), two);
7499 hstate.add_commutative (one, two);
7500 }
7501 else
7502 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7503 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7504 }
7505 return;
7506 }
7507 }
7508
7509 }
7510
7511 /* Constructors for pointer, array and function types.
7512 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7513 constructed by language-dependent code, not here.) */
7514
7515 /* Construct, lay out and return the type of pointers to TO_TYPE with
7516 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7517 reference all of memory. If such a type has already been
7518 constructed, reuse it. */
7519
7520 tree
7521 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7522 bool can_alias_all)
7523 {
7524 tree t;
7525
7526 if (to_type == error_mark_node)
7527 return error_mark_node;
7528
7529 /* If the pointed-to type has the may_alias attribute set, force
7530 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7531 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7532 can_alias_all = true;
7533
7534 /* In some cases, languages will have things that aren't a POINTER_TYPE
7535 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7536 In that case, return that type without regard to the rest of our
7537 operands.
7538
7539 ??? This is a kludge, but consistent with the way this function has
7540 always operated and there doesn't seem to be a good way to avoid this
7541 at the moment. */
7542 if (TYPE_POINTER_TO (to_type) != 0
7543 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7544 return TYPE_POINTER_TO (to_type);
7545
7546 /* First, if we already have a type for pointers to TO_TYPE and it's
7547 the proper mode, use it. */
7548 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7549 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7550 return t;
7551
7552 t = make_node (POINTER_TYPE);
7553
7554 TREE_TYPE (t) = to_type;
7555 SET_TYPE_MODE (t, mode);
7556 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7557 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7558 TYPE_POINTER_TO (to_type) = t;
7559
7560 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7561 SET_TYPE_STRUCTURAL_EQUALITY (t);
7562 else if (TYPE_CANONICAL (to_type) != to_type)
7563 TYPE_CANONICAL (t)
7564 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7565 mode, can_alias_all);
7566
7567 /* Lay out the type. This function has many callers that are concerned
7568 with expression-construction, and this simplifies them all. */
7569 layout_type (t);
7570
7571 return t;
7572 }
7573
7574 /* By default build pointers in ptr_mode. */
7575
7576 tree
7577 build_pointer_type (tree to_type)
7578 {
7579 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7580 : TYPE_ADDR_SPACE (to_type);
7581 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7582 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7583 }
7584
7585 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7586
7587 tree
7588 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7589 bool can_alias_all)
7590 {
7591 tree t;
7592
7593 if (to_type == error_mark_node)
7594 return error_mark_node;
7595
7596 /* If the pointed-to type has the may_alias attribute set, force
7597 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7598 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7599 can_alias_all = true;
7600
7601 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7602 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7603 In that case, return that type without regard to the rest of our
7604 operands.
7605
7606 ??? This is a kludge, but consistent with the way this function has
7607 always operated and there doesn't seem to be a good way to avoid this
7608 at the moment. */
7609 if (TYPE_REFERENCE_TO (to_type) != 0
7610 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7611 return TYPE_REFERENCE_TO (to_type);
7612
7613 /* First, if we already have a type for pointers to TO_TYPE and it's
7614 the proper mode, use it. */
7615 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7616 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7617 return t;
7618
7619 t = make_node (REFERENCE_TYPE);
7620
7621 TREE_TYPE (t) = to_type;
7622 SET_TYPE_MODE (t, mode);
7623 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7624 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7625 TYPE_REFERENCE_TO (to_type) = t;
7626
7627 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7628 SET_TYPE_STRUCTURAL_EQUALITY (t);
7629 else if (TYPE_CANONICAL (to_type) != to_type)
7630 TYPE_CANONICAL (t)
7631 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7632 mode, can_alias_all);
7633
7634 layout_type (t);
7635
7636 return t;
7637 }
7638
7639
7640 /* Build the node for the type of references-to-TO_TYPE by default
7641 in ptr_mode. */
7642
7643 tree
7644 build_reference_type (tree to_type)
7645 {
7646 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7647 : TYPE_ADDR_SPACE (to_type);
7648 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7649 return build_reference_type_for_mode (to_type, pointer_mode, false);
7650 }
7651
7652 #define MAX_INT_CACHED_PREC \
7653 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7654 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7655
7656 /* Builds a signed or unsigned integer type of precision PRECISION.
7657 Used for C bitfields whose precision does not match that of
7658 built-in target types. */
7659 tree
7660 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7661 int unsignedp)
7662 {
7663 tree itype, ret;
7664
7665 if (unsignedp)
7666 unsignedp = MAX_INT_CACHED_PREC + 1;
7667
7668 if (precision <= MAX_INT_CACHED_PREC)
7669 {
7670 itype = nonstandard_integer_type_cache[precision + unsignedp];
7671 if (itype)
7672 return itype;
7673 }
7674
7675 itype = make_node (INTEGER_TYPE);
7676 TYPE_PRECISION (itype) = precision;
7677
7678 if (unsignedp)
7679 fixup_unsigned_type (itype);
7680 else
7681 fixup_signed_type (itype);
7682
7683 ret = itype;
7684 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7685 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7686 if (precision <= MAX_INT_CACHED_PREC)
7687 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7688
7689 return ret;
7690 }
7691
7692 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7693 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7694 is true, reuse such a type that has already been constructed. */
7695
7696 static tree
7697 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7698 {
7699 tree itype = make_node (INTEGER_TYPE);
7700 inchash::hash hstate;
7701
7702 TREE_TYPE (itype) = type;
7703
7704 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7705 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7706
7707 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7708 SET_TYPE_MODE (itype, TYPE_MODE (type));
7709 TYPE_SIZE (itype) = TYPE_SIZE (type);
7710 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7711 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7712 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7713
7714 if (!shared)
7715 return itype;
7716
7717 if ((TYPE_MIN_VALUE (itype)
7718 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7719 || (TYPE_MAX_VALUE (itype)
7720 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7721 {
7722 /* Since we cannot reliably merge this type, we need to compare it using
7723 structural equality checks. */
7724 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7725 return itype;
7726 }
7727
7728 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7729 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7730 hstate.merge_hash (TYPE_HASH (type));
7731 itype = type_hash_canon (hstate.end (), itype);
7732
7733 return itype;
7734 }
7735
7736 /* Wrapper around build_range_type_1 with SHARED set to true. */
7737
7738 tree
7739 build_range_type (tree type, tree lowval, tree highval)
7740 {
7741 return build_range_type_1 (type, lowval, highval, true);
7742 }
7743
7744 /* Wrapper around build_range_type_1 with SHARED set to false. */
7745
7746 tree
7747 build_nonshared_range_type (tree type, tree lowval, tree highval)
7748 {
7749 return build_range_type_1 (type, lowval, highval, false);
7750 }
7751
7752 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7753 MAXVAL should be the maximum value in the domain
7754 (one less than the length of the array).
7755
7756 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7757 We don't enforce this limit, that is up to caller (e.g. language front end).
7758 The limit exists because the result is a signed type and we don't handle
7759 sizes that use more than one HOST_WIDE_INT. */
7760
7761 tree
7762 build_index_type (tree maxval)
7763 {
7764 return build_range_type (sizetype, size_zero_node, maxval);
7765 }
7766
7767 /* Return true if the debug information for TYPE, a subtype, should be emitted
7768 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7769 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7770 debug info and doesn't reflect the source code. */
7771
7772 bool
7773 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7774 {
7775 tree base_type = TREE_TYPE (type), low, high;
7776
7777 /* Subrange types have a base type which is an integral type. */
7778 if (!INTEGRAL_TYPE_P (base_type))
7779 return false;
7780
7781 /* Get the real bounds of the subtype. */
7782 if (lang_hooks.types.get_subrange_bounds)
7783 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7784 else
7785 {
7786 low = TYPE_MIN_VALUE (type);
7787 high = TYPE_MAX_VALUE (type);
7788 }
7789
7790 /* If the type and its base type have the same representation and the same
7791 name, then the type is not a subrange but a copy of the base type. */
7792 if ((TREE_CODE (base_type) == INTEGER_TYPE
7793 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7794 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7795 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7796 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7797 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7798 return false;
7799
7800 if (lowval)
7801 *lowval = low;
7802 if (highval)
7803 *highval = high;
7804 return true;
7805 }
7806
7807 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7808 and number of elements specified by the range of values of INDEX_TYPE.
7809 If SHARED is true, reuse such a type that has already been constructed. */
7810
7811 static tree
7812 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7813 {
7814 tree t;
7815
7816 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7817 {
7818 error ("arrays of functions are not meaningful");
7819 elt_type = integer_type_node;
7820 }
7821
7822 t = make_node (ARRAY_TYPE);
7823 TREE_TYPE (t) = elt_type;
7824 TYPE_DOMAIN (t) = index_type;
7825 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7826 layout_type (t);
7827
7828 /* If the element type is incomplete at this point we get marked for
7829 structural equality. Do not record these types in the canonical
7830 type hashtable. */
7831 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7832 return t;
7833
7834 if (shared)
7835 {
7836 inchash::hash hstate;
7837 hstate.add_object (TYPE_HASH (elt_type));
7838 if (index_type)
7839 hstate.add_object (TYPE_HASH (index_type));
7840 t = type_hash_canon (hstate.end (), t);
7841 }
7842
7843 if (TYPE_CANONICAL (t) == t)
7844 {
7845 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7846 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7847 SET_TYPE_STRUCTURAL_EQUALITY (t);
7848 else if (TYPE_CANONICAL (elt_type) != elt_type
7849 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7850 TYPE_CANONICAL (t)
7851 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7852 index_type
7853 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7854 shared);
7855 }
7856
7857 return t;
7858 }
7859
7860 /* Wrapper around build_array_type_1 with SHARED set to true. */
7861
7862 tree
7863 build_array_type (tree elt_type, tree index_type)
7864 {
7865 return build_array_type_1 (elt_type, index_type, true);
7866 }
7867
7868 /* Wrapper around build_array_type_1 with SHARED set to false. */
7869
7870 tree
7871 build_nonshared_array_type (tree elt_type, tree index_type)
7872 {
7873 return build_array_type_1 (elt_type, index_type, false);
7874 }
7875
7876 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7877 sizetype. */
7878
7879 tree
7880 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7881 {
7882 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7883 }
7884
7885 /* Recursively examines the array elements of TYPE, until a non-array
7886 element type is found. */
7887
7888 tree
7889 strip_array_types (tree type)
7890 {
7891 while (TREE_CODE (type) == ARRAY_TYPE)
7892 type = TREE_TYPE (type);
7893
7894 return type;
7895 }
7896
7897 /* Computes the canonical argument types from the argument type list
7898 ARGTYPES.
7899
7900 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7901 on entry to this function, or if any of the ARGTYPES are
7902 structural.
7903
7904 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7905 true on entry to this function, or if any of the ARGTYPES are
7906 non-canonical.
7907
7908 Returns a canonical argument list, which may be ARGTYPES when the
7909 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7910 true) or would not differ from ARGTYPES. */
7911
7912 static tree
7913 maybe_canonicalize_argtypes (tree argtypes,
7914 bool *any_structural_p,
7915 bool *any_noncanonical_p)
7916 {
7917 tree arg;
7918 bool any_noncanonical_argtypes_p = false;
7919
7920 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7921 {
7922 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7923 /* Fail gracefully by stating that the type is structural. */
7924 *any_structural_p = true;
7925 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7926 *any_structural_p = true;
7927 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7928 || TREE_PURPOSE (arg))
7929 /* If the argument has a default argument, we consider it
7930 non-canonical even though the type itself is canonical.
7931 That way, different variants of function and method types
7932 with default arguments will all point to the variant with
7933 no defaults as their canonical type. */
7934 any_noncanonical_argtypes_p = true;
7935 }
7936
7937 if (*any_structural_p)
7938 return argtypes;
7939
7940 if (any_noncanonical_argtypes_p)
7941 {
7942 /* Build the canonical list of argument types. */
7943 tree canon_argtypes = NULL_TREE;
7944 bool is_void = false;
7945
7946 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7947 {
7948 if (arg == void_list_node)
7949 is_void = true;
7950 else
7951 canon_argtypes = tree_cons (NULL_TREE,
7952 TYPE_CANONICAL (TREE_VALUE (arg)),
7953 canon_argtypes);
7954 }
7955
7956 canon_argtypes = nreverse (canon_argtypes);
7957 if (is_void)
7958 canon_argtypes = chainon (canon_argtypes, void_list_node);
7959
7960 /* There is a non-canonical type. */
7961 *any_noncanonical_p = true;
7962 return canon_argtypes;
7963 }
7964
7965 /* The canonical argument types are the same as ARGTYPES. */
7966 return argtypes;
7967 }
7968
7969 /* Construct, lay out and return
7970 the type of functions returning type VALUE_TYPE
7971 given arguments of types ARG_TYPES.
7972 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7973 are data type nodes for the arguments of the function.
7974 If such a type has already been constructed, reuse it. */
7975
7976 tree
7977 build_function_type (tree value_type, tree arg_types)
7978 {
7979 tree t;
7980 inchash::hash hstate;
7981 bool any_structural_p, any_noncanonical_p;
7982 tree canon_argtypes;
7983
7984 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7985 {
7986 error ("function return type cannot be function");
7987 value_type = integer_type_node;
7988 }
7989
7990 /* Make a node of the sort we want. */
7991 t = make_node (FUNCTION_TYPE);
7992 TREE_TYPE (t) = value_type;
7993 TYPE_ARG_TYPES (t) = arg_types;
7994
7995 /* If we already have such a type, use the old one. */
7996 hstate.add_object (TYPE_HASH (value_type));
7997 type_hash_list (arg_types, hstate);
7998 t = type_hash_canon (hstate.end (), t);
7999
8000 /* Set up the canonical type. */
8001 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8002 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8003 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8004 &any_structural_p,
8005 &any_noncanonical_p);
8006 if (any_structural_p)
8007 SET_TYPE_STRUCTURAL_EQUALITY (t);
8008 else if (any_noncanonical_p)
8009 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8010 canon_argtypes);
8011
8012 if (!COMPLETE_TYPE_P (t))
8013 layout_type (t);
8014 return t;
8015 }
8016
8017 /* Build a function type. The RETURN_TYPE is the type returned by the
8018 function. If VAARGS is set, no void_type_node is appended to the
8019 the list. ARGP must be always be terminated be a NULL_TREE. */
8020
8021 static tree
8022 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8023 {
8024 tree t, args, last;
8025
8026 t = va_arg (argp, tree);
8027 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8028 args = tree_cons (NULL_TREE, t, args);
8029
8030 if (vaargs)
8031 {
8032 last = args;
8033 if (args != NULL_TREE)
8034 args = nreverse (args);
8035 gcc_assert (last != void_list_node);
8036 }
8037 else if (args == NULL_TREE)
8038 args = void_list_node;
8039 else
8040 {
8041 last = args;
8042 args = nreverse (args);
8043 TREE_CHAIN (last) = void_list_node;
8044 }
8045 args = build_function_type (return_type, args);
8046
8047 return args;
8048 }
8049
8050 /* Build a function type. The RETURN_TYPE is the type returned by the
8051 function. If additional arguments are provided, they are
8052 additional argument types. The list of argument types must always
8053 be terminated by NULL_TREE. */
8054
8055 tree
8056 build_function_type_list (tree return_type, ...)
8057 {
8058 tree args;
8059 va_list p;
8060
8061 va_start (p, return_type);
8062 args = build_function_type_list_1 (false, return_type, p);
8063 va_end (p);
8064 return args;
8065 }
8066
8067 /* Build a variable argument function type. The RETURN_TYPE is the
8068 type returned by the function. If additional arguments are provided,
8069 they are additional argument types. The list of argument types must
8070 always be terminated by NULL_TREE. */
8071
8072 tree
8073 build_varargs_function_type_list (tree return_type, ...)
8074 {
8075 tree args;
8076 va_list p;
8077
8078 va_start (p, return_type);
8079 args = build_function_type_list_1 (true, return_type, p);
8080 va_end (p);
8081
8082 return args;
8083 }
8084
8085 /* Build a function type. RETURN_TYPE is the type returned by the
8086 function; VAARGS indicates whether the function takes varargs. The
8087 function takes N named arguments, the types of which are provided in
8088 ARG_TYPES. */
8089
8090 static tree
8091 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8092 tree *arg_types)
8093 {
8094 int i;
8095 tree t = vaargs ? NULL_TREE : void_list_node;
8096
8097 for (i = n - 1; i >= 0; i--)
8098 t = tree_cons (NULL_TREE, arg_types[i], t);
8099
8100 return build_function_type (return_type, t);
8101 }
8102
8103 /* Build a function type. RETURN_TYPE is the type returned by the
8104 function. The function takes N named arguments, the types of which
8105 are provided in ARG_TYPES. */
8106
8107 tree
8108 build_function_type_array (tree return_type, int n, tree *arg_types)
8109 {
8110 return build_function_type_array_1 (false, return_type, n, arg_types);
8111 }
8112
8113 /* Build a variable argument function type. RETURN_TYPE is the type
8114 returned by the function. The function takes N named arguments, the
8115 types of which are provided in ARG_TYPES. */
8116
8117 tree
8118 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8119 {
8120 return build_function_type_array_1 (true, return_type, n, arg_types);
8121 }
8122
8123 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8124 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8125 for the method. An implicit additional parameter (of type
8126 pointer-to-BASETYPE) is added to the ARGTYPES. */
8127
8128 tree
8129 build_method_type_directly (tree basetype,
8130 tree rettype,
8131 tree argtypes)
8132 {
8133 tree t;
8134 tree ptype;
8135 inchash::hash hstate;
8136 bool any_structural_p, any_noncanonical_p;
8137 tree canon_argtypes;
8138
8139 /* Make a node of the sort we want. */
8140 t = make_node (METHOD_TYPE);
8141
8142 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8143 TREE_TYPE (t) = rettype;
8144 ptype = build_pointer_type (basetype);
8145
8146 /* The actual arglist for this function includes a "hidden" argument
8147 which is "this". Put it into the list of argument types. */
8148 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8149 TYPE_ARG_TYPES (t) = argtypes;
8150
8151 /* If we already have such a type, use the old one. */
8152 hstate.add_object (TYPE_HASH (basetype));
8153 hstate.add_object (TYPE_HASH (rettype));
8154 type_hash_list (argtypes, hstate);
8155 t = type_hash_canon (hstate.end (), t);
8156
8157 /* Set up the canonical type. */
8158 any_structural_p
8159 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8160 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8161 any_noncanonical_p
8162 = (TYPE_CANONICAL (basetype) != basetype
8163 || TYPE_CANONICAL (rettype) != rettype);
8164 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8165 &any_structural_p,
8166 &any_noncanonical_p);
8167 if (any_structural_p)
8168 SET_TYPE_STRUCTURAL_EQUALITY (t);
8169 else if (any_noncanonical_p)
8170 TYPE_CANONICAL (t)
8171 = build_method_type_directly (TYPE_CANONICAL (basetype),
8172 TYPE_CANONICAL (rettype),
8173 canon_argtypes);
8174 if (!COMPLETE_TYPE_P (t))
8175 layout_type (t);
8176
8177 return t;
8178 }
8179
8180 /* Construct, lay out and return the type of methods belonging to class
8181 BASETYPE and whose arguments and values are described by TYPE.
8182 If that type exists already, reuse it.
8183 TYPE must be a FUNCTION_TYPE node. */
8184
8185 tree
8186 build_method_type (tree basetype, tree type)
8187 {
8188 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8189
8190 return build_method_type_directly (basetype,
8191 TREE_TYPE (type),
8192 TYPE_ARG_TYPES (type));
8193 }
8194
8195 /* Construct, lay out and return the type of offsets to a value
8196 of type TYPE, within an object of type BASETYPE.
8197 If a suitable offset type exists already, reuse it. */
8198
8199 tree
8200 build_offset_type (tree basetype, tree type)
8201 {
8202 tree t;
8203 inchash::hash hstate;
8204
8205 /* Make a node of the sort we want. */
8206 t = make_node (OFFSET_TYPE);
8207
8208 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8209 TREE_TYPE (t) = type;
8210
8211 /* If we already have such a type, use the old one. */
8212 hstate.add_object (TYPE_HASH (basetype));
8213 hstate.add_object (TYPE_HASH (type));
8214 t = type_hash_canon (hstate.end (), t);
8215
8216 if (!COMPLETE_TYPE_P (t))
8217 layout_type (t);
8218
8219 if (TYPE_CANONICAL (t) == t)
8220 {
8221 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8222 || TYPE_STRUCTURAL_EQUALITY_P (type))
8223 SET_TYPE_STRUCTURAL_EQUALITY (t);
8224 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8225 || TYPE_CANONICAL (type) != type)
8226 TYPE_CANONICAL (t)
8227 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8228 TYPE_CANONICAL (type));
8229 }
8230
8231 return t;
8232 }
8233
8234 /* Create a complex type whose components are COMPONENT_TYPE. */
8235
8236 tree
8237 build_complex_type (tree component_type)
8238 {
8239 tree t;
8240 inchash::hash hstate;
8241
8242 gcc_assert (INTEGRAL_TYPE_P (component_type)
8243 || SCALAR_FLOAT_TYPE_P (component_type)
8244 || FIXED_POINT_TYPE_P (component_type));
8245
8246 /* Make a node of the sort we want. */
8247 t = make_node (COMPLEX_TYPE);
8248
8249 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8250
8251 /* If we already have such a type, use the old one. */
8252 hstate.add_object (TYPE_HASH (component_type));
8253 t = type_hash_canon (hstate.end (), t);
8254
8255 if (!COMPLETE_TYPE_P (t))
8256 layout_type (t);
8257
8258 if (TYPE_CANONICAL (t) == t)
8259 {
8260 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8261 SET_TYPE_STRUCTURAL_EQUALITY (t);
8262 else if (TYPE_CANONICAL (component_type) != component_type)
8263 TYPE_CANONICAL (t)
8264 = build_complex_type (TYPE_CANONICAL (component_type));
8265 }
8266
8267 /* We need to create a name, since complex is a fundamental type. */
8268 if (! TYPE_NAME (t))
8269 {
8270 const char *name;
8271 if (component_type == char_type_node)
8272 name = "complex char";
8273 else if (component_type == signed_char_type_node)
8274 name = "complex signed char";
8275 else if (component_type == unsigned_char_type_node)
8276 name = "complex unsigned char";
8277 else if (component_type == short_integer_type_node)
8278 name = "complex short int";
8279 else if (component_type == short_unsigned_type_node)
8280 name = "complex short unsigned int";
8281 else if (component_type == integer_type_node)
8282 name = "complex int";
8283 else if (component_type == unsigned_type_node)
8284 name = "complex unsigned int";
8285 else if (component_type == long_integer_type_node)
8286 name = "complex long int";
8287 else if (component_type == long_unsigned_type_node)
8288 name = "complex long unsigned int";
8289 else if (component_type == long_long_integer_type_node)
8290 name = "complex long long int";
8291 else if (component_type == long_long_unsigned_type_node)
8292 name = "complex long long unsigned int";
8293 else
8294 name = 0;
8295
8296 if (name != 0)
8297 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8298 get_identifier (name), t);
8299 }
8300
8301 return build_qualified_type (t, TYPE_QUALS (component_type));
8302 }
8303
8304 /* If TYPE is a real or complex floating-point type and the target
8305 does not directly support arithmetic on TYPE then return the wider
8306 type to be used for arithmetic on TYPE. Otherwise, return
8307 NULL_TREE. */
8308
8309 tree
8310 excess_precision_type (tree type)
8311 {
8312 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8313 {
8314 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8315 switch (TREE_CODE (type))
8316 {
8317 case REAL_TYPE:
8318 switch (flt_eval_method)
8319 {
8320 case 1:
8321 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8322 return double_type_node;
8323 break;
8324 case 2:
8325 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8326 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8327 return long_double_type_node;
8328 break;
8329 default:
8330 gcc_unreachable ();
8331 }
8332 break;
8333 case COMPLEX_TYPE:
8334 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8335 return NULL_TREE;
8336 switch (flt_eval_method)
8337 {
8338 case 1:
8339 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8340 return complex_double_type_node;
8341 break;
8342 case 2:
8343 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8344 || (TYPE_MODE (TREE_TYPE (type))
8345 == TYPE_MODE (double_type_node)))
8346 return complex_long_double_type_node;
8347 break;
8348 default:
8349 gcc_unreachable ();
8350 }
8351 break;
8352 default:
8353 break;
8354 }
8355 }
8356 return NULL_TREE;
8357 }
8358 \f
8359 /* Return OP, stripped of any conversions to wider types as much as is safe.
8360 Converting the value back to OP's type makes a value equivalent to OP.
8361
8362 If FOR_TYPE is nonzero, we return a value which, if converted to
8363 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8364
8365 OP must have integer, real or enumeral type. Pointers are not allowed!
8366
8367 There are some cases where the obvious value we could return
8368 would regenerate to OP if converted to OP's type,
8369 but would not extend like OP to wider types.
8370 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8371 For example, if OP is (unsigned short)(signed char)-1,
8372 we avoid returning (signed char)-1 if FOR_TYPE is int,
8373 even though extending that to an unsigned short would regenerate OP,
8374 since the result of extending (signed char)-1 to (int)
8375 is different from (int) OP. */
8376
8377 tree
8378 get_unwidened (tree op, tree for_type)
8379 {
8380 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8381 tree type = TREE_TYPE (op);
8382 unsigned final_prec
8383 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8384 int uns
8385 = (for_type != 0 && for_type != type
8386 && final_prec > TYPE_PRECISION (type)
8387 && TYPE_UNSIGNED (type));
8388 tree win = op;
8389
8390 while (CONVERT_EXPR_P (op))
8391 {
8392 int bitschange;
8393
8394 /* TYPE_PRECISION on vector types has different meaning
8395 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8396 so avoid them here. */
8397 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8398 break;
8399
8400 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8401 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8402
8403 /* Truncations are many-one so cannot be removed.
8404 Unless we are later going to truncate down even farther. */
8405 if (bitschange < 0
8406 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8407 break;
8408
8409 /* See what's inside this conversion. If we decide to strip it,
8410 we will set WIN. */
8411 op = TREE_OPERAND (op, 0);
8412
8413 /* If we have not stripped any zero-extensions (uns is 0),
8414 we can strip any kind of extension.
8415 If we have previously stripped a zero-extension,
8416 only zero-extensions can safely be stripped.
8417 Any extension can be stripped if the bits it would produce
8418 are all going to be discarded later by truncating to FOR_TYPE. */
8419
8420 if (bitschange > 0)
8421 {
8422 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8423 win = op;
8424 /* TYPE_UNSIGNED says whether this is a zero-extension.
8425 Let's avoid computing it if it does not affect WIN
8426 and if UNS will not be needed again. */
8427 if ((uns
8428 || CONVERT_EXPR_P (op))
8429 && TYPE_UNSIGNED (TREE_TYPE (op)))
8430 {
8431 uns = 1;
8432 win = op;
8433 }
8434 }
8435 }
8436
8437 /* If we finally reach a constant see if it fits in for_type and
8438 in that case convert it. */
8439 if (for_type
8440 && TREE_CODE (win) == INTEGER_CST
8441 && TREE_TYPE (win) != for_type
8442 && int_fits_type_p (win, for_type))
8443 win = fold_convert (for_type, win);
8444
8445 return win;
8446 }
8447 \f
8448 /* Return OP or a simpler expression for a narrower value
8449 which can be sign-extended or zero-extended to give back OP.
8450 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8451 or 0 if the value should be sign-extended. */
8452
8453 tree
8454 get_narrower (tree op, int *unsignedp_ptr)
8455 {
8456 int uns = 0;
8457 int first = 1;
8458 tree win = op;
8459 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8460
8461 while (TREE_CODE (op) == NOP_EXPR)
8462 {
8463 int bitschange
8464 = (TYPE_PRECISION (TREE_TYPE (op))
8465 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8466
8467 /* Truncations are many-one so cannot be removed. */
8468 if (bitschange < 0)
8469 break;
8470
8471 /* See what's inside this conversion. If we decide to strip it,
8472 we will set WIN. */
8473
8474 if (bitschange > 0)
8475 {
8476 op = TREE_OPERAND (op, 0);
8477 /* An extension: the outermost one can be stripped,
8478 but remember whether it is zero or sign extension. */
8479 if (first)
8480 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8481 /* Otherwise, if a sign extension has been stripped,
8482 only sign extensions can now be stripped;
8483 if a zero extension has been stripped, only zero-extensions. */
8484 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8485 break;
8486 first = 0;
8487 }
8488 else /* bitschange == 0 */
8489 {
8490 /* A change in nominal type can always be stripped, but we must
8491 preserve the unsignedness. */
8492 if (first)
8493 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8494 first = 0;
8495 op = TREE_OPERAND (op, 0);
8496 /* Keep trying to narrow, but don't assign op to win if it
8497 would turn an integral type into something else. */
8498 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8499 continue;
8500 }
8501
8502 win = op;
8503 }
8504
8505 if (TREE_CODE (op) == COMPONENT_REF
8506 /* Since type_for_size always gives an integer type. */
8507 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8508 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8509 /* Ensure field is laid out already. */
8510 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8511 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8512 {
8513 unsigned HOST_WIDE_INT innerprec
8514 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8515 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8516 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8517 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8518
8519 /* We can get this structure field in a narrower type that fits it,
8520 but the resulting extension to its nominal type (a fullword type)
8521 must satisfy the same conditions as for other extensions.
8522
8523 Do this only for fields that are aligned (not bit-fields),
8524 because when bit-field insns will be used there is no
8525 advantage in doing this. */
8526
8527 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8528 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8529 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8530 && type != 0)
8531 {
8532 if (first)
8533 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8534 win = fold_convert (type, op);
8535 }
8536 }
8537
8538 *unsignedp_ptr = uns;
8539 return win;
8540 }
8541 \f
8542 /* Returns true if integer constant C has a value that is permissible
8543 for type TYPE (an INTEGER_TYPE). */
8544
8545 bool
8546 int_fits_type_p (const_tree c, const_tree type)
8547 {
8548 tree type_low_bound, type_high_bound;
8549 bool ok_for_low_bound, ok_for_high_bound;
8550 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8551
8552 retry:
8553 type_low_bound = TYPE_MIN_VALUE (type);
8554 type_high_bound = TYPE_MAX_VALUE (type);
8555
8556 /* If at least one bound of the type is a constant integer, we can check
8557 ourselves and maybe make a decision. If no such decision is possible, but
8558 this type is a subtype, try checking against that. Otherwise, use
8559 fits_to_tree_p, which checks against the precision.
8560
8561 Compute the status for each possibly constant bound, and return if we see
8562 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8563 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8564 for "constant known to fit". */
8565
8566 /* Check if c >= type_low_bound. */
8567 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8568 {
8569 if (tree_int_cst_lt (c, type_low_bound))
8570 return false;
8571 ok_for_low_bound = true;
8572 }
8573 else
8574 ok_for_low_bound = false;
8575
8576 /* Check if c <= type_high_bound. */
8577 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8578 {
8579 if (tree_int_cst_lt (type_high_bound, c))
8580 return false;
8581 ok_for_high_bound = true;
8582 }
8583 else
8584 ok_for_high_bound = false;
8585
8586 /* If the constant fits both bounds, the result is known. */
8587 if (ok_for_low_bound && ok_for_high_bound)
8588 return true;
8589
8590 /* Perform some generic filtering which may allow making a decision
8591 even if the bounds are not constant. First, negative integers
8592 never fit in unsigned types, */
8593 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8594 return false;
8595
8596 /* Second, narrower types always fit in wider ones. */
8597 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8598 return true;
8599
8600 /* Third, unsigned integers with top bit set never fit signed types. */
8601 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8602 {
8603 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8604 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8605 {
8606 /* When a tree_cst is converted to a wide-int, the precision
8607 is taken from the type. However, if the precision of the
8608 mode underneath the type is smaller than that, it is
8609 possible that the value will not fit. The test below
8610 fails if any bit is set between the sign bit of the
8611 underlying mode and the top bit of the type. */
8612 if (wi::ne_p (wi::zext (c, prec - 1), c))
8613 return false;
8614 }
8615 else if (wi::neg_p (c))
8616 return false;
8617 }
8618
8619 /* If we haven't been able to decide at this point, there nothing more we
8620 can check ourselves here. Look at the base type if we have one and it
8621 has the same precision. */
8622 if (TREE_CODE (type) == INTEGER_TYPE
8623 && TREE_TYPE (type) != 0
8624 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8625 {
8626 type = TREE_TYPE (type);
8627 goto retry;
8628 }
8629
8630 /* Or to fits_to_tree_p, if nothing else. */
8631 return wi::fits_to_tree_p (c, type);
8632 }
8633
8634 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8635 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8636 represented (assuming two's-complement arithmetic) within the bit
8637 precision of the type are returned instead. */
8638
8639 void
8640 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8641 {
8642 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8643 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8644 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8645 else
8646 {
8647 if (TYPE_UNSIGNED (type))
8648 mpz_set_ui (min, 0);
8649 else
8650 {
8651 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8652 wi::to_mpz (mn, min, SIGNED);
8653 }
8654 }
8655
8656 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8657 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8658 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8659 else
8660 {
8661 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8662 wi::to_mpz (mn, max, TYPE_SIGN (type));
8663 }
8664 }
8665
8666 /* Return true if VAR is an automatic variable defined in function FN. */
8667
8668 bool
8669 auto_var_in_fn_p (const_tree var, const_tree fn)
8670 {
8671 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8672 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8673 || TREE_CODE (var) == PARM_DECL)
8674 && ! TREE_STATIC (var))
8675 || TREE_CODE (var) == LABEL_DECL
8676 || TREE_CODE (var) == RESULT_DECL));
8677 }
8678
8679 /* Subprogram of following function. Called by walk_tree.
8680
8681 Return *TP if it is an automatic variable or parameter of the
8682 function passed in as DATA. */
8683
8684 static tree
8685 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8686 {
8687 tree fn = (tree) data;
8688
8689 if (TYPE_P (*tp))
8690 *walk_subtrees = 0;
8691
8692 else if (DECL_P (*tp)
8693 && auto_var_in_fn_p (*tp, fn))
8694 return *tp;
8695
8696 return NULL_TREE;
8697 }
8698
8699 /* Returns true if T is, contains, or refers to a type with variable
8700 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8701 arguments, but not the return type. If FN is nonzero, only return
8702 true if a modifier of the type or position of FN is a variable or
8703 parameter inside FN.
8704
8705 This concept is more general than that of C99 'variably modified types':
8706 in C99, a struct type is never variably modified because a VLA may not
8707 appear as a structure member. However, in GNU C code like:
8708
8709 struct S { int i[f()]; };
8710
8711 is valid, and other languages may define similar constructs. */
8712
8713 bool
8714 variably_modified_type_p (tree type, tree fn)
8715 {
8716 tree t;
8717
8718 /* Test if T is either variable (if FN is zero) or an expression containing
8719 a variable in FN. If TYPE isn't gimplified, return true also if
8720 gimplify_one_sizepos would gimplify the expression into a local
8721 variable. */
8722 #define RETURN_TRUE_IF_VAR(T) \
8723 do { tree _t = (T); \
8724 if (_t != NULL_TREE \
8725 && _t != error_mark_node \
8726 && TREE_CODE (_t) != INTEGER_CST \
8727 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8728 && (!fn \
8729 || (!TYPE_SIZES_GIMPLIFIED (type) \
8730 && !is_gimple_sizepos (_t)) \
8731 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8732 return true; } while (0)
8733
8734 if (type == error_mark_node)
8735 return false;
8736
8737 /* If TYPE itself has variable size, it is variably modified. */
8738 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8739 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8740
8741 switch (TREE_CODE (type))
8742 {
8743 case POINTER_TYPE:
8744 case REFERENCE_TYPE:
8745 case VECTOR_TYPE:
8746 if (variably_modified_type_p (TREE_TYPE (type), fn))
8747 return true;
8748 break;
8749
8750 case FUNCTION_TYPE:
8751 case METHOD_TYPE:
8752 /* If TYPE is a function type, it is variably modified if the
8753 return type is variably modified. */
8754 if (variably_modified_type_p (TREE_TYPE (type), fn))
8755 return true;
8756 break;
8757
8758 case INTEGER_TYPE:
8759 case REAL_TYPE:
8760 case FIXED_POINT_TYPE:
8761 case ENUMERAL_TYPE:
8762 case BOOLEAN_TYPE:
8763 /* Scalar types are variably modified if their end points
8764 aren't constant. */
8765 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8766 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8767 break;
8768
8769 case RECORD_TYPE:
8770 case UNION_TYPE:
8771 case QUAL_UNION_TYPE:
8772 /* We can't see if any of the fields are variably-modified by the
8773 definition we normally use, since that would produce infinite
8774 recursion via pointers. */
8775 /* This is variably modified if some field's type is. */
8776 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8777 if (TREE_CODE (t) == FIELD_DECL)
8778 {
8779 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8780 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8781 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8782
8783 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8784 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8785 }
8786 break;
8787
8788 case ARRAY_TYPE:
8789 /* Do not call ourselves to avoid infinite recursion. This is
8790 variably modified if the element type is. */
8791 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8792 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8793 break;
8794
8795 default:
8796 break;
8797 }
8798
8799 /* The current language may have other cases to check, but in general,
8800 all other types are not variably modified. */
8801 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8802
8803 #undef RETURN_TRUE_IF_VAR
8804 }
8805
8806 /* Given a DECL or TYPE, return the scope in which it was declared, or
8807 NULL_TREE if there is no containing scope. */
8808
8809 tree
8810 get_containing_scope (const_tree t)
8811 {
8812 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8813 }
8814
8815 /* Return the innermost context enclosing DECL that is
8816 a FUNCTION_DECL, or zero if none. */
8817
8818 tree
8819 decl_function_context (const_tree decl)
8820 {
8821 tree context;
8822
8823 if (TREE_CODE (decl) == ERROR_MARK)
8824 return 0;
8825
8826 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8827 where we look up the function at runtime. Such functions always take
8828 a first argument of type 'pointer to real context'.
8829
8830 C++ should really be fixed to use DECL_CONTEXT for the real context,
8831 and use something else for the "virtual context". */
8832 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8833 context
8834 = TYPE_MAIN_VARIANT
8835 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8836 else
8837 context = DECL_CONTEXT (decl);
8838
8839 while (context && TREE_CODE (context) != FUNCTION_DECL)
8840 {
8841 if (TREE_CODE (context) == BLOCK)
8842 context = BLOCK_SUPERCONTEXT (context);
8843 else
8844 context = get_containing_scope (context);
8845 }
8846
8847 return context;
8848 }
8849
8850 /* Return the innermost context enclosing DECL that is
8851 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8852 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8853
8854 tree
8855 decl_type_context (const_tree decl)
8856 {
8857 tree context = DECL_CONTEXT (decl);
8858
8859 while (context)
8860 switch (TREE_CODE (context))
8861 {
8862 case NAMESPACE_DECL:
8863 case TRANSLATION_UNIT_DECL:
8864 return NULL_TREE;
8865
8866 case RECORD_TYPE:
8867 case UNION_TYPE:
8868 case QUAL_UNION_TYPE:
8869 return context;
8870
8871 case TYPE_DECL:
8872 case FUNCTION_DECL:
8873 context = DECL_CONTEXT (context);
8874 break;
8875
8876 case BLOCK:
8877 context = BLOCK_SUPERCONTEXT (context);
8878 break;
8879
8880 default:
8881 gcc_unreachable ();
8882 }
8883
8884 return NULL_TREE;
8885 }
8886
8887 /* CALL is a CALL_EXPR. Return the declaration for the function
8888 called, or NULL_TREE if the called function cannot be
8889 determined. */
8890
8891 tree
8892 get_callee_fndecl (const_tree call)
8893 {
8894 tree addr;
8895
8896 if (call == error_mark_node)
8897 return error_mark_node;
8898
8899 /* It's invalid to call this function with anything but a
8900 CALL_EXPR. */
8901 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8902
8903 /* The first operand to the CALL is the address of the function
8904 called. */
8905 addr = CALL_EXPR_FN (call);
8906
8907 /* If there is no function, return early. */
8908 if (addr == NULL_TREE)
8909 return NULL_TREE;
8910
8911 STRIP_NOPS (addr);
8912
8913 /* If this is a readonly function pointer, extract its initial value. */
8914 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8915 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8916 && DECL_INITIAL (addr))
8917 addr = DECL_INITIAL (addr);
8918
8919 /* If the address is just `&f' for some function `f', then we know
8920 that `f' is being called. */
8921 if (TREE_CODE (addr) == ADDR_EXPR
8922 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8923 return TREE_OPERAND (addr, 0);
8924
8925 /* We couldn't figure out what was being called. */
8926 return NULL_TREE;
8927 }
8928
8929 /* Print debugging information about tree nodes generated during the compile,
8930 and any language-specific information. */
8931
8932 void
8933 dump_tree_statistics (void)
8934 {
8935 if (GATHER_STATISTICS)
8936 {
8937 int i;
8938 int total_nodes, total_bytes;
8939 fprintf (stderr, "Kind Nodes Bytes\n");
8940 fprintf (stderr, "---------------------------------------\n");
8941 total_nodes = total_bytes = 0;
8942 for (i = 0; i < (int) all_kinds; i++)
8943 {
8944 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8945 tree_node_counts[i], tree_node_sizes[i]);
8946 total_nodes += tree_node_counts[i];
8947 total_bytes += tree_node_sizes[i];
8948 }
8949 fprintf (stderr, "---------------------------------------\n");
8950 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8951 fprintf (stderr, "---------------------------------------\n");
8952 fprintf (stderr, "Code Nodes\n");
8953 fprintf (stderr, "----------------------------\n");
8954 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8955 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8956 tree_code_counts[i]);
8957 fprintf (stderr, "----------------------------\n");
8958 ssanames_print_statistics ();
8959 phinodes_print_statistics ();
8960 }
8961 else
8962 fprintf (stderr, "(No per-node statistics)\n");
8963
8964 print_type_hash_statistics ();
8965 print_debug_expr_statistics ();
8966 print_value_expr_statistics ();
8967 lang_hooks.print_statistics ();
8968 }
8969 \f
8970 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8971
8972 /* Generate a crc32 of a byte. */
8973
8974 static unsigned
8975 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8976 {
8977 unsigned ix;
8978
8979 for (ix = bits; ix--; value <<= 1)
8980 {
8981 unsigned feedback;
8982
8983 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8984 chksum <<= 1;
8985 chksum ^= feedback;
8986 }
8987 return chksum;
8988 }
8989
8990 /* Generate a crc32 of a 32-bit unsigned. */
8991
8992 unsigned
8993 crc32_unsigned (unsigned chksum, unsigned value)
8994 {
8995 return crc32_unsigned_bits (chksum, value, 32);
8996 }
8997
8998 /* Generate a crc32 of a byte. */
8999
9000 unsigned
9001 crc32_byte (unsigned chksum, char byte)
9002 {
9003 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9004 }
9005
9006 /* Generate a crc32 of a string. */
9007
9008 unsigned
9009 crc32_string (unsigned chksum, const char *string)
9010 {
9011 do
9012 {
9013 chksum = crc32_byte (chksum, *string);
9014 }
9015 while (*string++);
9016 return chksum;
9017 }
9018
9019 /* P is a string that will be used in a symbol. Mask out any characters
9020 that are not valid in that context. */
9021
9022 void
9023 clean_symbol_name (char *p)
9024 {
9025 for (; *p; p++)
9026 if (! (ISALNUM (*p)
9027 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9028 || *p == '$'
9029 #endif
9030 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9031 || *p == '.'
9032 #endif
9033 ))
9034 *p = '_';
9035 }
9036
9037 /* Generate a name for a special-purpose function.
9038 The generated name may need to be unique across the whole link.
9039 Changes to this function may also require corresponding changes to
9040 xstrdup_mask_random.
9041 TYPE is some string to identify the purpose of this function to the
9042 linker or collect2; it must start with an uppercase letter,
9043 one of:
9044 I - for constructors
9045 D - for destructors
9046 N - for C++ anonymous namespaces
9047 F - for DWARF unwind frame information. */
9048
9049 tree
9050 get_file_function_name (const char *type)
9051 {
9052 char *buf;
9053 const char *p;
9054 char *q;
9055
9056 /* If we already have a name we know to be unique, just use that. */
9057 if (first_global_object_name)
9058 p = q = ASTRDUP (first_global_object_name);
9059 /* If the target is handling the constructors/destructors, they
9060 will be local to this file and the name is only necessary for
9061 debugging purposes.
9062 We also assign sub_I and sub_D sufixes to constructors called from
9063 the global static constructors. These are always local. */
9064 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9065 || (strncmp (type, "sub_", 4) == 0
9066 && (type[4] == 'I' || type[4] == 'D')))
9067 {
9068 const char *file = main_input_filename;
9069 if (! file)
9070 file = LOCATION_FILE (input_location);
9071 /* Just use the file's basename, because the full pathname
9072 might be quite long. */
9073 p = q = ASTRDUP (lbasename (file));
9074 }
9075 else
9076 {
9077 /* Otherwise, the name must be unique across the entire link.
9078 We don't have anything that we know to be unique to this translation
9079 unit, so use what we do have and throw in some randomness. */
9080 unsigned len;
9081 const char *name = weak_global_object_name;
9082 const char *file = main_input_filename;
9083
9084 if (! name)
9085 name = "";
9086 if (! file)
9087 file = LOCATION_FILE (input_location);
9088
9089 len = strlen (file);
9090 q = (char *) alloca (9 + 17 + len + 1);
9091 memcpy (q, file, len + 1);
9092
9093 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9094 crc32_string (0, name), get_random_seed (false));
9095
9096 p = q;
9097 }
9098
9099 clean_symbol_name (q);
9100 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9101 + strlen (type));
9102
9103 /* Set up the name of the file-level functions we may need.
9104 Use a global object (which is already required to be unique over
9105 the program) rather than the file name (which imposes extra
9106 constraints). */
9107 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9108
9109 return get_identifier (buf);
9110 }
9111 \f
9112 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9113
9114 /* Complain that the tree code of NODE does not match the expected 0
9115 terminated list of trailing codes. The trailing code list can be
9116 empty, for a more vague error message. FILE, LINE, and FUNCTION
9117 are of the caller. */
9118
9119 void
9120 tree_check_failed (const_tree node, const char *file,
9121 int line, const char *function, ...)
9122 {
9123 va_list args;
9124 const char *buffer;
9125 unsigned length = 0;
9126 enum tree_code code;
9127
9128 va_start (args, function);
9129 while ((code = (enum tree_code) va_arg (args, int)))
9130 length += 4 + strlen (get_tree_code_name (code));
9131 va_end (args);
9132 if (length)
9133 {
9134 char *tmp;
9135 va_start (args, function);
9136 length += strlen ("expected ");
9137 buffer = tmp = (char *) alloca (length);
9138 length = 0;
9139 while ((code = (enum tree_code) va_arg (args, int)))
9140 {
9141 const char *prefix = length ? " or " : "expected ";
9142
9143 strcpy (tmp + length, prefix);
9144 length += strlen (prefix);
9145 strcpy (tmp + length, get_tree_code_name (code));
9146 length += strlen (get_tree_code_name (code));
9147 }
9148 va_end (args);
9149 }
9150 else
9151 buffer = "unexpected node";
9152
9153 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9154 buffer, get_tree_code_name (TREE_CODE (node)),
9155 function, trim_filename (file), line);
9156 }
9157
9158 /* Complain that the tree code of NODE does match the expected 0
9159 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9160 the caller. */
9161
9162 void
9163 tree_not_check_failed (const_tree node, const char *file,
9164 int line, const char *function, ...)
9165 {
9166 va_list args;
9167 char *buffer;
9168 unsigned length = 0;
9169 enum tree_code code;
9170
9171 va_start (args, function);
9172 while ((code = (enum tree_code) va_arg (args, int)))
9173 length += 4 + strlen (get_tree_code_name (code));
9174 va_end (args);
9175 va_start (args, function);
9176 buffer = (char *) alloca (length);
9177 length = 0;
9178 while ((code = (enum tree_code) va_arg (args, int)))
9179 {
9180 if (length)
9181 {
9182 strcpy (buffer + length, " or ");
9183 length += 4;
9184 }
9185 strcpy (buffer + length, get_tree_code_name (code));
9186 length += strlen (get_tree_code_name (code));
9187 }
9188 va_end (args);
9189
9190 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9191 buffer, get_tree_code_name (TREE_CODE (node)),
9192 function, trim_filename (file), line);
9193 }
9194
9195 /* Similar to tree_check_failed, except that we check for a class of tree
9196 code, given in CL. */
9197
9198 void
9199 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9200 const char *file, int line, const char *function)
9201 {
9202 internal_error
9203 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9204 TREE_CODE_CLASS_STRING (cl),
9205 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9206 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9207 }
9208
9209 /* Similar to tree_check_failed, except that instead of specifying a
9210 dozen codes, use the knowledge that they're all sequential. */
9211
9212 void
9213 tree_range_check_failed (const_tree node, const char *file, int line,
9214 const char *function, enum tree_code c1,
9215 enum tree_code c2)
9216 {
9217 char *buffer;
9218 unsigned length = 0;
9219 unsigned int c;
9220
9221 for (c = c1; c <= c2; ++c)
9222 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9223
9224 length += strlen ("expected ");
9225 buffer = (char *) alloca (length);
9226 length = 0;
9227
9228 for (c = c1; c <= c2; ++c)
9229 {
9230 const char *prefix = length ? " or " : "expected ";
9231
9232 strcpy (buffer + length, prefix);
9233 length += strlen (prefix);
9234 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9235 length += strlen (get_tree_code_name ((enum tree_code) c));
9236 }
9237
9238 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9239 buffer, get_tree_code_name (TREE_CODE (node)),
9240 function, trim_filename (file), line);
9241 }
9242
9243
9244 /* Similar to tree_check_failed, except that we check that a tree does
9245 not have the specified code, given in CL. */
9246
9247 void
9248 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9249 const char *file, int line, const char *function)
9250 {
9251 internal_error
9252 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9253 TREE_CODE_CLASS_STRING (cl),
9254 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9255 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9256 }
9257
9258
9259 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9260
9261 void
9262 omp_clause_check_failed (const_tree node, const char *file, int line,
9263 const char *function, enum omp_clause_code code)
9264 {
9265 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9266 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9267 function, trim_filename (file), line);
9268 }
9269
9270
9271 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9272
9273 void
9274 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9275 const char *function, enum omp_clause_code c1,
9276 enum omp_clause_code c2)
9277 {
9278 char *buffer;
9279 unsigned length = 0;
9280 unsigned int c;
9281
9282 for (c = c1; c <= c2; ++c)
9283 length += 4 + strlen (omp_clause_code_name[c]);
9284
9285 length += strlen ("expected ");
9286 buffer = (char *) alloca (length);
9287 length = 0;
9288
9289 for (c = c1; c <= c2; ++c)
9290 {
9291 const char *prefix = length ? " or " : "expected ";
9292
9293 strcpy (buffer + length, prefix);
9294 length += strlen (prefix);
9295 strcpy (buffer + length, omp_clause_code_name[c]);
9296 length += strlen (omp_clause_code_name[c]);
9297 }
9298
9299 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9300 buffer, omp_clause_code_name[TREE_CODE (node)],
9301 function, trim_filename (file), line);
9302 }
9303
9304
9305 #undef DEFTREESTRUCT
9306 #define DEFTREESTRUCT(VAL, NAME) NAME,
9307
9308 static const char *ts_enum_names[] = {
9309 #include "treestruct.def"
9310 };
9311 #undef DEFTREESTRUCT
9312
9313 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9314
9315 /* Similar to tree_class_check_failed, except that we check for
9316 whether CODE contains the tree structure identified by EN. */
9317
9318 void
9319 tree_contains_struct_check_failed (const_tree node,
9320 const enum tree_node_structure_enum en,
9321 const char *file, int line,
9322 const char *function)
9323 {
9324 internal_error
9325 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9326 TS_ENUM_NAME (en),
9327 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9328 }
9329
9330
9331 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9332 (dynamically sized) vector. */
9333
9334 void
9335 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9336 const char *function)
9337 {
9338 internal_error
9339 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9340 idx + 1, len, function, trim_filename (file), line);
9341 }
9342
9343 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9344 (dynamically sized) vector. */
9345
9346 void
9347 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9348 const char *function)
9349 {
9350 internal_error
9351 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9352 idx + 1, len, function, trim_filename (file), line);
9353 }
9354
9355 /* Similar to above, except that the check is for the bounds of the operand
9356 vector of an expression node EXP. */
9357
9358 void
9359 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9360 int line, const char *function)
9361 {
9362 enum tree_code code = TREE_CODE (exp);
9363 internal_error
9364 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9365 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9366 function, trim_filename (file), line);
9367 }
9368
9369 /* Similar to above, except that the check is for the number of
9370 operands of an OMP_CLAUSE node. */
9371
9372 void
9373 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9374 int line, const char *function)
9375 {
9376 internal_error
9377 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9378 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9379 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9380 trim_filename (file), line);
9381 }
9382 #endif /* ENABLE_TREE_CHECKING */
9383 \f
9384 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9385 and mapped to the machine mode MODE. Initialize its fields and build
9386 the information necessary for debugging output. */
9387
9388 static tree
9389 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9390 {
9391 tree t;
9392 inchash::hash hstate;
9393
9394 t = make_node (VECTOR_TYPE);
9395 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9396 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9397 SET_TYPE_MODE (t, mode);
9398
9399 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9400 SET_TYPE_STRUCTURAL_EQUALITY (t);
9401 else if (TYPE_CANONICAL (innertype) != innertype
9402 || mode != VOIDmode)
9403 TYPE_CANONICAL (t)
9404 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9405
9406 layout_type (t);
9407
9408 hstate.add_wide_int (VECTOR_TYPE);
9409 hstate.add_wide_int (nunits);
9410 hstate.add_wide_int (mode);
9411 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9412 t = type_hash_canon (hstate.end (), t);
9413
9414 /* We have built a main variant, based on the main variant of the
9415 inner type. Use it to build the variant we return. */
9416 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9417 && TREE_TYPE (t) != innertype)
9418 return build_type_attribute_qual_variant (t,
9419 TYPE_ATTRIBUTES (innertype),
9420 TYPE_QUALS (innertype));
9421
9422 return t;
9423 }
9424
9425 static tree
9426 make_or_reuse_type (unsigned size, int unsignedp)
9427 {
9428 if (size == INT_TYPE_SIZE)
9429 return unsignedp ? unsigned_type_node : integer_type_node;
9430 if (size == CHAR_TYPE_SIZE)
9431 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9432 if (size == SHORT_TYPE_SIZE)
9433 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9434 if (size == LONG_TYPE_SIZE)
9435 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9436 if (size == LONG_LONG_TYPE_SIZE)
9437 return (unsignedp ? long_long_unsigned_type_node
9438 : long_long_integer_type_node);
9439 if (size == 128 && int128_integer_type_node)
9440 return (unsignedp ? int128_unsigned_type_node
9441 : int128_integer_type_node);
9442
9443 if (unsignedp)
9444 return make_unsigned_type (size);
9445 else
9446 return make_signed_type (size);
9447 }
9448
9449 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9450
9451 static tree
9452 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9453 {
9454 if (satp)
9455 {
9456 if (size == SHORT_FRACT_TYPE_SIZE)
9457 return unsignedp ? sat_unsigned_short_fract_type_node
9458 : sat_short_fract_type_node;
9459 if (size == FRACT_TYPE_SIZE)
9460 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9461 if (size == LONG_FRACT_TYPE_SIZE)
9462 return unsignedp ? sat_unsigned_long_fract_type_node
9463 : sat_long_fract_type_node;
9464 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9465 return unsignedp ? sat_unsigned_long_long_fract_type_node
9466 : sat_long_long_fract_type_node;
9467 }
9468 else
9469 {
9470 if (size == SHORT_FRACT_TYPE_SIZE)
9471 return unsignedp ? unsigned_short_fract_type_node
9472 : short_fract_type_node;
9473 if (size == FRACT_TYPE_SIZE)
9474 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9475 if (size == LONG_FRACT_TYPE_SIZE)
9476 return unsignedp ? unsigned_long_fract_type_node
9477 : long_fract_type_node;
9478 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9479 return unsignedp ? unsigned_long_long_fract_type_node
9480 : long_long_fract_type_node;
9481 }
9482
9483 return make_fract_type (size, unsignedp, satp);
9484 }
9485
9486 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9487
9488 static tree
9489 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9490 {
9491 if (satp)
9492 {
9493 if (size == SHORT_ACCUM_TYPE_SIZE)
9494 return unsignedp ? sat_unsigned_short_accum_type_node
9495 : sat_short_accum_type_node;
9496 if (size == ACCUM_TYPE_SIZE)
9497 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9498 if (size == LONG_ACCUM_TYPE_SIZE)
9499 return unsignedp ? sat_unsigned_long_accum_type_node
9500 : sat_long_accum_type_node;
9501 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9502 return unsignedp ? sat_unsigned_long_long_accum_type_node
9503 : sat_long_long_accum_type_node;
9504 }
9505 else
9506 {
9507 if (size == SHORT_ACCUM_TYPE_SIZE)
9508 return unsignedp ? unsigned_short_accum_type_node
9509 : short_accum_type_node;
9510 if (size == ACCUM_TYPE_SIZE)
9511 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9512 if (size == LONG_ACCUM_TYPE_SIZE)
9513 return unsignedp ? unsigned_long_accum_type_node
9514 : long_accum_type_node;
9515 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9516 return unsignedp ? unsigned_long_long_accum_type_node
9517 : long_long_accum_type_node;
9518 }
9519
9520 return make_accum_type (size, unsignedp, satp);
9521 }
9522
9523
9524 /* Create an atomic variant node for TYPE. This routine is called
9525 during initialization of data types to create the 5 basic atomic
9526 types. The generic build_variant_type function requires these to
9527 already be set up in order to function properly, so cannot be
9528 called from there. If ALIGN is non-zero, then ensure alignment is
9529 overridden to this value. */
9530
9531 static tree
9532 build_atomic_base (tree type, unsigned int align)
9533 {
9534 tree t;
9535
9536 /* Make sure its not already registered. */
9537 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9538 return t;
9539
9540 t = build_variant_type_copy (type);
9541 set_type_quals (t, TYPE_QUAL_ATOMIC);
9542
9543 if (align)
9544 TYPE_ALIGN (t) = align;
9545
9546 return t;
9547 }
9548
9549 /* Create nodes for all integer types (and error_mark_node) using the sizes
9550 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9551 SHORT_DOUBLE specifies whether double should be of the same precision
9552 as float. */
9553
9554 void
9555 build_common_tree_nodes (bool signed_char, bool short_double)
9556 {
9557 error_mark_node = make_node (ERROR_MARK);
9558 TREE_TYPE (error_mark_node) = error_mark_node;
9559
9560 initialize_sizetypes ();
9561
9562 /* Define both `signed char' and `unsigned char'. */
9563 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9564 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9565 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9566 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9567
9568 /* Define `char', which is like either `signed char' or `unsigned char'
9569 but not the same as either. */
9570 char_type_node
9571 = (signed_char
9572 ? make_signed_type (CHAR_TYPE_SIZE)
9573 : make_unsigned_type (CHAR_TYPE_SIZE));
9574 TYPE_STRING_FLAG (char_type_node) = 1;
9575
9576 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9577 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9578 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9579 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9580 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9581 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9582 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9583 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9584 #if HOST_BITS_PER_WIDE_INT >= 64
9585 /* TODO: This isn't correct, but as logic depends at the moment on
9586 host's instead of target's wide-integer.
9587 If there is a target not supporting TImode, but has an 128-bit
9588 integer-scalar register, this target check needs to be adjusted. */
9589 if (targetm.scalar_mode_supported_p (TImode))
9590 {
9591 int128_integer_type_node = make_signed_type (128);
9592 int128_unsigned_type_node = make_unsigned_type (128);
9593 }
9594 #endif
9595
9596 /* Define a boolean type. This type only represents boolean values but
9597 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9598 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9599 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9600 TYPE_PRECISION (boolean_type_node) = 1;
9601 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9602
9603 /* Define what type to use for size_t. */
9604 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9605 size_type_node = unsigned_type_node;
9606 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9607 size_type_node = long_unsigned_type_node;
9608 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9609 size_type_node = long_long_unsigned_type_node;
9610 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9611 size_type_node = short_unsigned_type_node;
9612 else
9613 gcc_unreachable ();
9614
9615 /* Fill in the rest of the sized types. Reuse existing type nodes
9616 when possible. */
9617 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9618 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9619 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9620 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9621 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9622
9623 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9624 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9625 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9626 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9627 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9628
9629 /* Don't call build_qualified type for atomics. That routine does
9630 special processing for atomics, and until they are initialized
9631 it's better not to make that call.
9632
9633 Check to see if there is a target override for atomic types. */
9634
9635 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9636 targetm.atomic_align_for_mode (QImode));
9637 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9638 targetm.atomic_align_for_mode (HImode));
9639 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9640 targetm.atomic_align_for_mode (SImode));
9641 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9642 targetm.atomic_align_for_mode (DImode));
9643 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9644 targetm.atomic_align_for_mode (TImode));
9645
9646 access_public_node = get_identifier ("public");
9647 access_protected_node = get_identifier ("protected");
9648 access_private_node = get_identifier ("private");
9649
9650 /* Define these next since types below may used them. */
9651 integer_zero_node = build_int_cst (integer_type_node, 0);
9652 integer_one_node = build_int_cst (integer_type_node, 1);
9653 integer_three_node = build_int_cst (integer_type_node, 3);
9654 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9655
9656 size_zero_node = size_int (0);
9657 size_one_node = size_int (1);
9658 bitsize_zero_node = bitsize_int (0);
9659 bitsize_one_node = bitsize_int (1);
9660 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9661
9662 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9663 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9664
9665 void_type_node = make_node (VOID_TYPE);
9666 layout_type (void_type_node);
9667
9668 /* We are not going to have real types in C with less than byte alignment,
9669 so we might as well not have any types that claim to have it. */
9670 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9671 TYPE_USER_ALIGN (void_type_node) = 0;
9672
9673 void_node = make_node (VOID_CST);
9674 TREE_TYPE (void_node) = void_type_node;
9675
9676 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9677 layout_type (TREE_TYPE (null_pointer_node));
9678
9679 ptr_type_node = build_pointer_type (void_type_node);
9680 const_ptr_type_node
9681 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9682 fileptr_type_node = ptr_type_node;
9683
9684 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9685
9686 float_type_node = make_node (REAL_TYPE);
9687 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9688 layout_type (float_type_node);
9689
9690 double_type_node = make_node (REAL_TYPE);
9691 if (short_double)
9692 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9693 else
9694 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9695 layout_type (double_type_node);
9696
9697 long_double_type_node = make_node (REAL_TYPE);
9698 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9699 layout_type (long_double_type_node);
9700
9701 float_ptr_type_node = build_pointer_type (float_type_node);
9702 double_ptr_type_node = build_pointer_type (double_type_node);
9703 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9704 integer_ptr_type_node = build_pointer_type (integer_type_node);
9705
9706 /* Fixed size integer types. */
9707 uint16_type_node = make_or_reuse_type (16, 1);
9708 uint32_type_node = make_or_reuse_type (32, 1);
9709 uint64_type_node = make_or_reuse_type (64, 1);
9710
9711 /* Decimal float types. */
9712 dfloat32_type_node = make_node (REAL_TYPE);
9713 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9714 layout_type (dfloat32_type_node);
9715 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9716 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9717
9718 dfloat64_type_node = make_node (REAL_TYPE);
9719 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9720 layout_type (dfloat64_type_node);
9721 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9722 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9723
9724 dfloat128_type_node = make_node (REAL_TYPE);
9725 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9726 layout_type (dfloat128_type_node);
9727 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9728 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9729
9730 complex_integer_type_node = build_complex_type (integer_type_node);
9731 complex_float_type_node = build_complex_type (float_type_node);
9732 complex_double_type_node = build_complex_type (double_type_node);
9733 complex_long_double_type_node = build_complex_type (long_double_type_node);
9734
9735 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9736 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9737 sat_ ## KIND ## _type_node = \
9738 make_sat_signed_ ## KIND ## _type (SIZE); \
9739 sat_unsigned_ ## KIND ## _type_node = \
9740 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9741 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9742 unsigned_ ## KIND ## _type_node = \
9743 make_unsigned_ ## KIND ## _type (SIZE);
9744
9745 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9746 sat_ ## WIDTH ## KIND ## _type_node = \
9747 make_sat_signed_ ## KIND ## _type (SIZE); \
9748 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9749 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9750 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9751 unsigned_ ## WIDTH ## KIND ## _type_node = \
9752 make_unsigned_ ## KIND ## _type (SIZE);
9753
9754 /* Make fixed-point type nodes based on four different widths. */
9755 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9756 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9757 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9758 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9759 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9760
9761 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9762 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9763 NAME ## _type_node = \
9764 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9765 u ## NAME ## _type_node = \
9766 make_or_reuse_unsigned_ ## KIND ## _type \
9767 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9768 sat_ ## NAME ## _type_node = \
9769 make_or_reuse_sat_signed_ ## KIND ## _type \
9770 (GET_MODE_BITSIZE (MODE ## mode)); \
9771 sat_u ## NAME ## _type_node = \
9772 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9773 (GET_MODE_BITSIZE (U ## MODE ## mode));
9774
9775 /* Fixed-point type and mode nodes. */
9776 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9777 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9778 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9779 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9780 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9781 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9782 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9783 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9784 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9785 MAKE_FIXED_MODE_NODE (accum, da, DA)
9786 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9787
9788 {
9789 tree t = targetm.build_builtin_va_list ();
9790
9791 /* Many back-ends define record types without setting TYPE_NAME.
9792 If we copied the record type here, we'd keep the original
9793 record type without a name. This breaks name mangling. So,
9794 don't copy record types and let c_common_nodes_and_builtins()
9795 declare the type to be __builtin_va_list. */
9796 if (TREE_CODE (t) != RECORD_TYPE)
9797 t = build_variant_type_copy (t);
9798
9799 va_list_type_node = t;
9800 }
9801 }
9802
9803 /* Modify DECL for given flags.
9804 TM_PURE attribute is set only on types, so the function will modify
9805 DECL's type when ECF_TM_PURE is used. */
9806
9807 void
9808 set_call_expr_flags (tree decl, int flags)
9809 {
9810 if (flags & ECF_NOTHROW)
9811 TREE_NOTHROW (decl) = 1;
9812 if (flags & ECF_CONST)
9813 TREE_READONLY (decl) = 1;
9814 if (flags & ECF_PURE)
9815 DECL_PURE_P (decl) = 1;
9816 if (flags & ECF_LOOPING_CONST_OR_PURE)
9817 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9818 if (flags & ECF_NOVOPS)
9819 DECL_IS_NOVOPS (decl) = 1;
9820 if (flags & ECF_NORETURN)
9821 TREE_THIS_VOLATILE (decl) = 1;
9822 if (flags & ECF_MALLOC)
9823 DECL_IS_MALLOC (decl) = 1;
9824 if (flags & ECF_RETURNS_TWICE)
9825 DECL_IS_RETURNS_TWICE (decl) = 1;
9826 if (flags & ECF_LEAF)
9827 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9828 NULL, DECL_ATTRIBUTES (decl));
9829 if ((flags & ECF_TM_PURE) && flag_tm)
9830 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9831 /* Looping const or pure is implied by noreturn.
9832 There is currently no way to declare looping const or looping pure alone. */
9833 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9834 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9835 }
9836
9837
9838 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9839
9840 static void
9841 local_define_builtin (const char *name, tree type, enum built_in_function code,
9842 const char *library_name, int ecf_flags)
9843 {
9844 tree decl;
9845
9846 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9847 library_name, NULL_TREE);
9848 set_call_expr_flags (decl, ecf_flags);
9849
9850 set_builtin_decl (code, decl, true);
9851 }
9852
9853 /* Call this function after instantiating all builtins that the language
9854 front end cares about. This will build the rest of the builtins
9855 and internal functions that are relied upon by the tree optimizers and
9856 the middle-end. */
9857
9858 void
9859 build_common_builtin_nodes (void)
9860 {
9861 tree tmp, ftype;
9862 int ecf_flags;
9863
9864 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9865 {
9866 ftype = build_function_type (void_type_node, void_list_node);
9867 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9868 "__builtin_unreachable",
9869 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9870 | ECF_CONST | ECF_LEAF);
9871 }
9872
9873 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9874 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9875 {
9876 ftype = build_function_type_list (ptr_type_node,
9877 ptr_type_node, const_ptr_type_node,
9878 size_type_node, NULL_TREE);
9879
9880 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9881 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9882 "memcpy", ECF_NOTHROW | ECF_LEAF);
9883 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9884 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9885 "memmove", ECF_NOTHROW | ECF_LEAF);
9886 }
9887
9888 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9889 {
9890 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9891 const_ptr_type_node, size_type_node,
9892 NULL_TREE);
9893 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9894 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9895 }
9896
9897 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9898 {
9899 ftype = build_function_type_list (ptr_type_node,
9900 ptr_type_node, integer_type_node,
9901 size_type_node, NULL_TREE);
9902 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9903 "memset", ECF_NOTHROW | ECF_LEAF);
9904 }
9905
9906 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9907 {
9908 ftype = build_function_type_list (ptr_type_node,
9909 size_type_node, NULL_TREE);
9910 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9911 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9912 }
9913
9914 ftype = build_function_type_list (ptr_type_node, size_type_node,
9915 size_type_node, NULL_TREE);
9916 local_define_builtin ("__builtin_alloca_with_align", ftype,
9917 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9918 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9919
9920 /* If we're checking the stack, `alloca' can throw. */
9921 if (flag_stack_check)
9922 {
9923 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9924 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9925 }
9926
9927 ftype = build_function_type_list (void_type_node,
9928 ptr_type_node, ptr_type_node,
9929 ptr_type_node, NULL_TREE);
9930 local_define_builtin ("__builtin_init_trampoline", ftype,
9931 BUILT_IN_INIT_TRAMPOLINE,
9932 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9933 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9934 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9935 "__builtin_init_heap_trampoline",
9936 ECF_NOTHROW | ECF_LEAF);
9937
9938 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9939 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9940 BUILT_IN_ADJUST_TRAMPOLINE,
9941 "__builtin_adjust_trampoline",
9942 ECF_CONST | ECF_NOTHROW);
9943
9944 ftype = build_function_type_list (void_type_node,
9945 ptr_type_node, ptr_type_node, NULL_TREE);
9946 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9947 BUILT_IN_NONLOCAL_GOTO,
9948 "__builtin_nonlocal_goto",
9949 ECF_NORETURN | ECF_NOTHROW);
9950
9951 ftype = build_function_type_list (void_type_node,
9952 ptr_type_node, ptr_type_node, NULL_TREE);
9953 local_define_builtin ("__builtin_setjmp_setup", ftype,
9954 BUILT_IN_SETJMP_SETUP,
9955 "__builtin_setjmp_setup", ECF_NOTHROW);
9956
9957 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9958 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9959 BUILT_IN_SETJMP_RECEIVER,
9960 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9961
9962 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9963 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9964 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9965
9966 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9967 local_define_builtin ("__builtin_stack_restore", ftype,
9968 BUILT_IN_STACK_RESTORE,
9969 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9970
9971 /* If there's a possibility that we might use the ARM EABI, build the
9972 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9973 if (targetm.arm_eabi_unwinder)
9974 {
9975 ftype = build_function_type_list (void_type_node, NULL_TREE);
9976 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9977 BUILT_IN_CXA_END_CLEANUP,
9978 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9979 }
9980
9981 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9982 local_define_builtin ("__builtin_unwind_resume", ftype,
9983 BUILT_IN_UNWIND_RESUME,
9984 ((targetm_common.except_unwind_info (&global_options)
9985 == UI_SJLJ)
9986 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9987 ECF_NORETURN);
9988
9989 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9990 {
9991 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9992 NULL_TREE);
9993 local_define_builtin ("__builtin_return_address", ftype,
9994 BUILT_IN_RETURN_ADDRESS,
9995 "__builtin_return_address",
9996 ECF_NOTHROW);
9997 }
9998
9999 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10000 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10001 {
10002 ftype = build_function_type_list (void_type_node, ptr_type_node,
10003 ptr_type_node, NULL_TREE);
10004 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10005 local_define_builtin ("__cyg_profile_func_enter", ftype,
10006 BUILT_IN_PROFILE_FUNC_ENTER,
10007 "__cyg_profile_func_enter", 0);
10008 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10009 local_define_builtin ("__cyg_profile_func_exit", ftype,
10010 BUILT_IN_PROFILE_FUNC_EXIT,
10011 "__cyg_profile_func_exit", 0);
10012 }
10013
10014 /* The exception object and filter values from the runtime. The argument
10015 must be zero before exception lowering, i.e. from the front end. After
10016 exception lowering, it will be the region number for the exception
10017 landing pad. These functions are PURE instead of CONST to prevent
10018 them from being hoisted past the exception edge that will initialize
10019 its value in the landing pad. */
10020 ftype = build_function_type_list (ptr_type_node,
10021 integer_type_node, NULL_TREE);
10022 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10023 /* Only use TM_PURE if we we have TM language support. */
10024 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10025 ecf_flags |= ECF_TM_PURE;
10026 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10027 "__builtin_eh_pointer", ecf_flags);
10028
10029 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10030 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10031 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10032 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10033
10034 ftype = build_function_type_list (void_type_node,
10035 integer_type_node, integer_type_node,
10036 NULL_TREE);
10037 local_define_builtin ("__builtin_eh_copy_values", ftype,
10038 BUILT_IN_EH_COPY_VALUES,
10039 "__builtin_eh_copy_values", ECF_NOTHROW);
10040
10041 /* Complex multiplication and division. These are handled as builtins
10042 rather than optabs because emit_library_call_value doesn't support
10043 complex. Further, we can do slightly better with folding these
10044 beasties if the real and complex parts of the arguments are separate. */
10045 {
10046 int mode;
10047
10048 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10049 {
10050 char mode_name_buf[4], *q;
10051 const char *p;
10052 enum built_in_function mcode, dcode;
10053 tree type, inner_type;
10054 const char *prefix = "__";
10055
10056 if (targetm.libfunc_gnu_prefix)
10057 prefix = "__gnu_";
10058
10059 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10060 if (type == NULL)
10061 continue;
10062 inner_type = TREE_TYPE (type);
10063
10064 ftype = build_function_type_list (type, inner_type, inner_type,
10065 inner_type, inner_type, NULL_TREE);
10066
10067 mcode = ((enum built_in_function)
10068 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10069 dcode = ((enum built_in_function)
10070 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10071
10072 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10073 *q = TOLOWER (*p);
10074 *q = '\0';
10075
10076 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10077 NULL);
10078 local_define_builtin (built_in_names[mcode], ftype, mcode,
10079 built_in_names[mcode],
10080 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10081
10082 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10083 NULL);
10084 local_define_builtin (built_in_names[dcode], ftype, dcode,
10085 built_in_names[dcode],
10086 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10087 }
10088 }
10089
10090 init_internal_fns ();
10091 }
10092
10093 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10094 better way.
10095
10096 If we requested a pointer to a vector, build up the pointers that
10097 we stripped off while looking for the inner type. Similarly for
10098 return values from functions.
10099
10100 The argument TYPE is the top of the chain, and BOTTOM is the
10101 new type which we will point to. */
10102
10103 tree
10104 reconstruct_complex_type (tree type, tree bottom)
10105 {
10106 tree inner, outer;
10107
10108 if (TREE_CODE (type) == POINTER_TYPE)
10109 {
10110 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10111 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10112 TYPE_REF_CAN_ALIAS_ALL (type));
10113 }
10114 else if (TREE_CODE (type) == REFERENCE_TYPE)
10115 {
10116 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10117 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10118 TYPE_REF_CAN_ALIAS_ALL (type));
10119 }
10120 else if (TREE_CODE (type) == ARRAY_TYPE)
10121 {
10122 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10123 outer = build_array_type (inner, TYPE_DOMAIN (type));
10124 }
10125 else if (TREE_CODE (type) == FUNCTION_TYPE)
10126 {
10127 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10128 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10129 }
10130 else if (TREE_CODE (type) == METHOD_TYPE)
10131 {
10132 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10133 /* The build_method_type_directly() routine prepends 'this' to argument list,
10134 so we must compensate by getting rid of it. */
10135 outer
10136 = build_method_type_directly
10137 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10138 inner,
10139 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10140 }
10141 else if (TREE_CODE (type) == OFFSET_TYPE)
10142 {
10143 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10144 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10145 }
10146 else
10147 return bottom;
10148
10149 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10150 TYPE_QUALS (type));
10151 }
10152
10153 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10154 the inner type. */
10155 tree
10156 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10157 {
10158 int nunits;
10159
10160 switch (GET_MODE_CLASS (mode))
10161 {
10162 case MODE_VECTOR_INT:
10163 case MODE_VECTOR_FLOAT:
10164 case MODE_VECTOR_FRACT:
10165 case MODE_VECTOR_UFRACT:
10166 case MODE_VECTOR_ACCUM:
10167 case MODE_VECTOR_UACCUM:
10168 nunits = GET_MODE_NUNITS (mode);
10169 break;
10170
10171 case MODE_INT:
10172 /* Check that there are no leftover bits. */
10173 gcc_assert (GET_MODE_BITSIZE (mode)
10174 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10175
10176 nunits = GET_MODE_BITSIZE (mode)
10177 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10178 break;
10179
10180 default:
10181 gcc_unreachable ();
10182 }
10183
10184 return make_vector_type (innertype, nunits, mode);
10185 }
10186
10187 /* Similarly, but takes the inner type and number of units, which must be
10188 a power of two. */
10189
10190 tree
10191 build_vector_type (tree innertype, int nunits)
10192 {
10193 return make_vector_type (innertype, nunits, VOIDmode);
10194 }
10195
10196 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10197
10198 tree
10199 build_opaque_vector_type (tree innertype, int nunits)
10200 {
10201 tree t = make_vector_type (innertype, nunits, VOIDmode);
10202 tree cand;
10203 /* We always build the non-opaque variant before the opaque one,
10204 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10205 cand = TYPE_NEXT_VARIANT (t);
10206 if (cand
10207 && TYPE_VECTOR_OPAQUE (cand)
10208 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10209 return cand;
10210 /* Othewise build a variant type and make sure to queue it after
10211 the non-opaque type. */
10212 cand = build_distinct_type_copy (t);
10213 TYPE_VECTOR_OPAQUE (cand) = true;
10214 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10215 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10216 TYPE_NEXT_VARIANT (t) = cand;
10217 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10218 return cand;
10219 }
10220
10221
10222 /* Given an initializer INIT, return TRUE if INIT is zero or some
10223 aggregate of zeros. Otherwise return FALSE. */
10224 bool
10225 initializer_zerop (const_tree init)
10226 {
10227 tree elt;
10228
10229 STRIP_NOPS (init);
10230
10231 switch (TREE_CODE (init))
10232 {
10233 case INTEGER_CST:
10234 return integer_zerop (init);
10235
10236 case REAL_CST:
10237 /* ??? Note that this is not correct for C4X float formats. There,
10238 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10239 negative exponent. */
10240 return real_zerop (init)
10241 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10242
10243 case FIXED_CST:
10244 return fixed_zerop (init);
10245
10246 case COMPLEX_CST:
10247 return integer_zerop (init)
10248 || (real_zerop (init)
10249 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10250 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10251
10252 case VECTOR_CST:
10253 {
10254 unsigned i;
10255 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10256 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10257 return false;
10258 return true;
10259 }
10260
10261 case CONSTRUCTOR:
10262 {
10263 unsigned HOST_WIDE_INT idx;
10264
10265 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10266 if (!initializer_zerop (elt))
10267 return false;
10268 return true;
10269 }
10270
10271 case STRING_CST:
10272 {
10273 int i;
10274
10275 /* We need to loop through all elements to handle cases like
10276 "\0" and "\0foobar". */
10277 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10278 if (TREE_STRING_POINTER (init)[i] != '\0')
10279 return false;
10280
10281 return true;
10282 }
10283
10284 default:
10285 return false;
10286 }
10287 }
10288
10289 /* Check if vector VEC consists of all the equal elements and
10290 that the number of elements corresponds to the type of VEC.
10291 The function returns first element of the vector
10292 or NULL_TREE if the vector is not uniform. */
10293 tree
10294 uniform_vector_p (const_tree vec)
10295 {
10296 tree first, t;
10297 unsigned i;
10298
10299 if (vec == NULL_TREE)
10300 return NULL_TREE;
10301
10302 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10303
10304 if (TREE_CODE (vec) == VECTOR_CST)
10305 {
10306 first = VECTOR_CST_ELT (vec, 0);
10307 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10308 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10309 return NULL_TREE;
10310
10311 return first;
10312 }
10313
10314 else if (TREE_CODE (vec) == CONSTRUCTOR)
10315 {
10316 first = error_mark_node;
10317
10318 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10319 {
10320 if (i == 0)
10321 {
10322 first = t;
10323 continue;
10324 }
10325 if (!operand_equal_p (first, t, 0))
10326 return NULL_TREE;
10327 }
10328 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10329 return NULL_TREE;
10330
10331 return first;
10332 }
10333
10334 return NULL_TREE;
10335 }
10336
10337 /* Build an empty statement at location LOC. */
10338
10339 tree
10340 build_empty_stmt (location_t loc)
10341 {
10342 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10343 SET_EXPR_LOCATION (t, loc);
10344 return t;
10345 }
10346
10347
10348 /* Build an OpenMP clause with code CODE. LOC is the location of the
10349 clause. */
10350
10351 tree
10352 build_omp_clause (location_t loc, enum omp_clause_code code)
10353 {
10354 tree t;
10355 int size, length;
10356
10357 length = omp_clause_num_ops[code];
10358 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10359
10360 record_node_allocation_statistics (OMP_CLAUSE, size);
10361
10362 t = (tree) ggc_internal_alloc (size);
10363 memset (t, 0, size);
10364 TREE_SET_CODE (t, OMP_CLAUSE);
10365 OMP_CLAUSE_SET_CODE (t, code);
10366 OMP_CLAUSE_LOCATION (t) = loc;
10367
10368 return t;
10369 }
10370
10371 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10372 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10373 Except for the CODE and operand count field, other storage for the
10374 object is initialized to zeros. */
10375
10376 tree
10377 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10378 {
10379 tree t;
10380 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10381
10382 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10383 gcc_assert (len >= 1);
10384
10385 record_node_allocation_statistics (code, length);
10386
10387 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10388
10389 TREE_SET_CODE (t, code);
10390
10391 /* Can't use TREE_OPERAND to store the length because if checking is
10392 enabled, it will try to check the length before we store it. :-P */
10393 t->exp.operands[0] = build_int_cst (sizetype, len);
10394
10395 return t;
10396 }
10397
10398 /* Helper function for build_call_* functions; build a CALL_EXPR with
10399 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10400 the argument slots. */
10401
10402 static tree
10403 build_call_1 (tree return_type, tree fn, int nargs)
10404 {
10405 tree t;
10406
10407 t = build_vl_exp (CALL_EXPR, nargs + 3);
10408 TREE_TYPE (t) = return_type;
10409 CALL_EXPR_FN (t) = fn;
10410 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10411
10412 return t;
10413 }
10414
10415 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10416 FN and a null static chain slot. NARGS is the number of call arguments
10417 which are specified as "..." arguments. */
10418
10419 tree
10420 build_call_nary (tree return_type, tree fn, int nargs, ...)
10421 {
10422 tree ret;
10423 va_list args;
10424 va_start (args, nargs);
10425 ret = build_call_valist (return_type, fn, nargs, args);
10426 va_end (args);
10427 return ret;
10428 }
10429
10430 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10431 FN and a null static chain slot. NARGS is the number of call arguments
10432 which are specified as a va_list ARGS. */
10433
10434 tree
10435 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10436 {
10437 tree t;
10438 int i;
10439
10440 t = build_call_1 (return_type, fn, nargs);
10441 for (i = 0; i < nargs; i++)
10442 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10443 process_call_operands (t);
10444 return t;
10445 }
10446
10447 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10448 FN and a null static chain slot. NARGS is the number of call arguments
10449 which are specified as a tree array ARGS. */
10450
10451 tree
10452 build_call_array_loc (location_t loc, tree return_type, tree fn,
10453 int nargs, const tree *args)
10454 {
10455 tree t;
10456 int i;
10457
10458 t = build_call_1 (return_type, fn, nargs);
10459 for (i = 0; i < nargs; i++)
10460 CALL_EXPR_ARG (t, i) = args[i];
10461 process_call_operands (t);
10462 SET_EXPR_LOCATION (t, loc);
10463 return t;
10464 }
10465
10466 /* Like build_call_array, but takes a vec. */
10467
10468 tree
10469 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10470 {
10471 tree ret, t;
10472 unsigned int ix;
10473
10474 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10475 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10476 CALL_EXPR_ARG (ret, ix) = t;
10477 process_call_operands (ret);
10478 return ret;
10479 }
10480
10481 /* Conveniently construct a function call expression. FNDECL names the
10482 function to be called and N arguments are passed in the array
10483 ARGARRAY. */
10484
10485 tree
10486 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10487 {
10488 tree fntype = TREE_TYPE (fndecl);
10489 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10490
10491 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10492 }
10493
10494 /* Conveniently construct a function call expression. FNDECL names the
10495 function to be called and the arguments are passed in the vector
10496 VEC. */
10497
10498 tree
10499 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10500 {
10501 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10502 vec_safe_address (vec));
10503 }
10504
10505
10506 /* Conveniently construct a function call expression. FNDECL names the
10507 function to be called, N is the number of arguments, and the "..."
10508 parameters are the argument expressions. */
10509
10510 tree
10511 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10512 {
10513 va_list ap;
10514 tree *argarray = XALLOCAVEC (tree, n);
10515 int i;
10516
10517 va_start (ap, n);
10518 for (i = 0; i < n; i++)
10519 argarray[i] = va_arg (ap, tree);
10520 va_end (ap);
10521 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10522 }
10523
10524 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10525 varargs macros aren't supported by all bootstrap compilers. */
10526
10527 tree
10528 build_call_expr (tree fndecl, int n, ...)
10529 {
10530 va_list ap;
10531 tree *argarray = XALLOCAVEC (tree, n);
10532 int i;
10533
10534 va_start (ap, n);
10535 for (i = 0; i < n; i++)
10536 argarray[i] = va_arg (ap, tree);
10537 va_end (ap);
10538 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10539 }
10540
10541 /* Build internal call expression. This is just like CALL_EXPR, except
10542 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10543 internal function. */
10544
10545 tree
10546 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10547 tree type, int n, ...)
10548 {
10549 va_list ap;
10550 int i;
10551
10552 tree fn = build_call_1 (type, NULL_TREE, n);
10553 va_start (ap, n);
10554 for (i = 0; i < n; i++)
10555 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10556 va_end (ap);
10557 SET_EXPR_LOCATION (fn, loc);
10558 CALL_EXPR_IFN (fn) = ifn;
10559 return fn;
10560 }
10561
10562 /* Create a new constant string literal and return a char* pointer to it.
10563 The STRING_CST value is the LEN characters at STR. */
10564 tree
10565 build_string_literal (int len, const char *str)
10566 {
10567 tree t, elem, index, type;
10568
10569 t = build_string (len, str);
10570 elem = build_type_variant (char_type_node, 1, 0);
10571 index = build_index_type (size_int (len - 1));
10572 type = build_array_type (elem, index);
10573 TREE_TYPE (t) = type;
10574 TREE_CONSTANT (t) = 1;
10575 TREE_READONLY (t) = 1;
10576 TREE_STATIC (t) = 1;
10577
10578 type = build_pointer_type (elem);
10579 t = build1 (ADDR_EXPR, type,
10580 build4 (ARRAY_REF, elem,
10581 t, integer_zero_node, NULL_TREE, NULL_TREE));
10582 return t;
10583 }
10584
10585
10586
10587 /* Return true if T (assumed to be a DECL) must be assigned a memory
10588 location. */
10589
10590 bool
10591 needs_to_live_in_memory (const_tree t)
10592 {
10593 return (TREE_ADDRESSABLE (t)
10594 || is_global_var (t)
10595 || (TREE_CODE (t) == RESULT_DECL
10596 && !DECL_BY_REFERENCE (t)
10597 && aggregate_value_p (t, current_function_decl)));
10598 }
10599
10600 /* Return value of a constant X and sign-extend it. */
10601
10602 HOST_WIDE_INT
10603 int_cst_value (const_tree x)
10604 {
10605 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10606 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10607
10608 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10609 gcc_assert (cst_and_fits_in_hwi (x));
10610
10611 if (bits < HOST_BITS_PER_WIDE_INT)
10612 {
10613 bool negative = ((val >> (bits - 1)) & 1) != 0;
10614 if (negative)
10615 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10616 else
10617 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10618 }
10619
10620 return val;
10621 }
10622
10623 /* If TYPE is an integral or pointer type, return an integer type with
10624 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10625 if TYPE is already an integer type of signedness UNSIGNEDP. */
10626
10627 tree
10628 signed_or_unsigned_type_for (int unsignedp, tree type)
10629 {
10630 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10631 return type;
10632
10633 if (TREE_CODE (type) == VECTOR_TYPE)
10634 {
10635 tree inner = TREE_TYPE (type);
10636 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10637 if (!inner2)
10638 return NULL_TREE;
10639 if (inner == inner2)
10640 return type;
10641 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10642 }
10643
10644 if (!INTEGRAL_TYPE_P (type)
10645 && !POINTER_TYPE_P (type)
10646 && TREE_CODE (type) != OFFSET_TYPE)
10647 return NULL_TREE;
10648
10649 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10650 }
10651
10652 /* If TYPE is an integral or pointer type, return an integer type with
10653 the same precision which is unsigned, or itself if TYPE is already an
10654 unsigned integer type. */
10655
10656 tree
10657 unsigned_type_for (tree type)
10658 {
10659 return signed_or_unsigned_type_for (1, type);
10660 }
10661
10662 /* If TYPE is an integral or pointer type, return an integer type with
10663 the same precision which is signed, or itself if TYPE is already a
10664 signed integer type. */
10665
10666 tree
10667 signed_type_for (tree type)
10668 {
10669 return signed_or_unsigned_type_for (0, type);
10670 }
10671
10672 /* If TYPE is a vector type, return a signed integer vector type with the
10673 same width and number of subparts. Otherwise return boolean_type_node. */
10674
10675 tree
10676 truth_type_for (tree type)
10677 {
10678 if (TREE_CODE (type) == VECTOR_TYPE)
10679 {
10680 tree elem = lang_hooks.types.type_for_size
10681 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10682 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10683 }
10684 else
10685 return boolean_type_node;
10686 }
10687
10688 /* Returns the largest value obtainable by casting something in INNER type to
10689 OUTER type. */
10690
10691 tree
10692 upper_bound_in_type (tree outer, tree inner)
10693 {
10694 unsigned int det = 0;
10695 unsigned oprec = TYPE_PRECISION (outer);
10696 unsigned iprec = TYPE_PRECISION (inner);
10697 unsigned prec;
10698
10699 /* Compute a unique number for every combination. */
10700 det |= (oprec > iprec) ? 4 : 0;
10701 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10702 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10703
10704 /* Determine the exponent to use. */
10705 switch (det)
10706 {
10707 case 0:
10708 case 1:
10709 /* oprec <= iprec, outer: signed, inner: don't care. */
10710 prec = oprec - 1;
10711 break;
10712 case 2:
10713 case 3:
10714 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10715 prec = oprec;
10716 break;
10717 case 4:
10718 /* oprec > iprec, outer: signed, inner: signed. */
10719 prec = iprec - 1;
10720 break;
10721 case 5:
10722 /* oprec > iprec, outer: signed, inner: unsigned. */
10723 prec = iprec;
10724 break;
10725 case 6:
10726 /* oprec > iprec, outer: unsigned, inner: signed. */
10727 prec = oprec;
10728 break;
10729 case 7:
10730 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10731 prec = iprec;
10732 break;
10733 default:
10734 gcc_unreachable ();
10735 }
10736
10737 return wide_int_to_tree (outer,
10738 wi::mask (prec, false, TYPE_PRECISION (outer)));
10739 }
10740
10741 /* Returns the smallest value obtainable by casting something in INNER type to
10742 OUTER type. */
10743
10744 tree
10745 lower_bound_in_type (tree outer, tree inner)
10746 {
10747 unsigned oprec = TYPE_PRECISION (outer);
10748 unsigned iprec = TYPE_PRECISION (inner);
10749
10750 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10751 and obtain 0. */
10752 if (TYPE_UNSIGNED (outer)
10753 /* If we are widening something of an unsigned type, OUTER type
10754 contains all values of INNER type. In particular, both INNER
10755 and OUTER types have zero in common. */
10756 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10757 return build_int_cst (outer, 0);
10758 else
10759 {
10760 /* If we are widening a signed type to another signed type, we
10761 want to obtain -2^^(iprec-1). If we are keeping the
10762 precision or narrowing to a signed type, we want to obtain
10763 -2^(oprec-1). */
10764 unsigned prec = oprec > iprec ? iprec : oprec;
10765 return wide_int_to_tree (outer,
10766 wi::mask (prec - 1, true,
10767 TYPE_PRECISION (outer)));
10768 }
10769 }
10770
10771 /* Return nonzero if two operands that are suitable for PHI nodes are
10772 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10773 SSA_NAME or invariant. Note that this is strictly an optimization.
10774 That is, callers of this function can directly call operand_equal_p
10775 and get the same result, only slower. */
10776
10777 int
10778 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10779 {
10780 if (arg0 == arg1)
10781 return 1;
10782 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10783 return 0;
10784 return operand_equal_p (arg0, arg1, 0);
10785 }
10786
10787 /* Returns number of zeros at the end of binary representation of X. */
10788
10789 tree
10790 num_ending_zeros (const_tree x)
10791 {
10792 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10793 }
10794
10795
10796 #define WALK_SUBTREE(NODE) \
10797 do \
10798 { \
10799 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10800 if (result) \
10801 return result; \
10802 } \
10803 while (0)
10804
10805 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10806 be walked whenever a type is seen in the tree. Rest of operands and return
10807 value are as for walk_tree. */
10808
10809 static tree
10810 walk_type_fields (tree type, walk_tree_fn func, void *data,
10811 hash_set<tree> *pset, walk_tree_lh lh)
10812 {
10813 tree result = NULL_TREE;
10814
10815 switch (TREE_CODE (type))
10816 {
10817 case POINTER_TYPE:
10818 case REFERENCE_TYPE:
10819 case VECTOR_TYPE:
10820 /* We have to worry about mutually recursive pointers. These can't
10821 be written in C. They can in Ada. It's pathological, but
10822 there's an ACATS test (c38102a) that checks it. Deal with this
10823 by checking if we're pointing to another pointer, that one
10824 points to another pointer, that one does too, and we have no htab.
10825 If so, get a hash table. We check three levels deep to avoid
10826 the cost of the hash table if we don't need one. */
10827 if (POINTER_TYPE_P (TREE_TYPE (type))
10828 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10829 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10830 && !pset)
10831 {
10832 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10833 func, data);
10834 if (result)
10835 return result;
10836
10837 break;
10838 }
10839
10840 /* ... fall through ... */
10841
10842 case COMPLEX_TYPE:
10843 WALK_SUBTREE (TREE_TYPE (type));
10844 break;
10845
10846 case METHOD_TYPE:
10847 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10848
10849 /* Fall through. */
10850
10851 case FUNCTION_TYPE:
10852 WALK_SUBTREE (TREE_TYPE (type));
10853 {
10854 tree arg;
10855
10856 /* We never want to walk into default arguments. */
10857 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10858 WALK_SUBTREE (TREE_VALUE (arg));
10859 }
10860 break;
10861
10862 case ARRAY_TYPE:
10863 /* Don't follow this nodes's type if a pointer for fear that
10864 we'll have infinite recursion. If we have a PSET, then we
10865 need not fear. */
10866 if (pset
10867 || (!POINTER_TYPE_P (TREE_TYPE (type))
10868 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10869 WALK_SUBTREE (TREE_TYPE (type));
10870 WALK_SUBTREE (TYPE_DOMAIN (type));
10871 break;
10872
10873 case OFFSET_TYPE:
10874 WALK_SUBTREE (TREE_TYPE (type));
10875 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10876 break;
10877
10878 default:
10879 break;
10880 }
10881
10882 return NULL_TREE;
10883 }
10884
10885 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10886 called with the DATA and the address of each sub-tree. If FUNC returns a
10887 non-NULL value, the traversal is stopped, and the value returned by FUNC
10888 is returned. If PSET is non-NULL it is used to record the nodes visited,
10889 and to avoid visiting a node more than once. */
10890
10891 tree
10892 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10893 hash_set<tree> *pset, walk_tree_lh lh)
10894 {
10895 enum tree_code code;
10896 int walk_subtrees;
10897 tree result;
10898
10899 #define WALK_SUBTREE_TAIL(NODE) \
10900 do \
10901 { \
10902 tp = & (NODE); \
10903 goto tail_recurse; \
10904 } \
10905 while (0)
10906
10907 tail_recurse:
10908 /* Skip empty subtrees. */
10909 if (!*tp)
10910 return NULL_TREE;
10911
10912 /* Don't walk the same tree twice, if the user has requested
10913 that we avoid doing so. */
10914 if (pset && pset->add (*tp))
10915 return NULL_TREE;
10916
10917 /* Call the function. */
10918 walk_subtrees = 1;
10919 result = (*func) (tp, &walk_subtrees, data);
10920
10921 /* If we found something, return it. */
10922 if (result)
10923 return result;
10924
10925 code = TREE_CODE (*tp);
10926
10927 /* Even if we didn't, FUNC may have decided that there was nothing
10928 interesting below this point in the tree. */
10929 if (!walk_subtrees)
10930 {
10931 /* But we still need to check our siblings. */
10932 if (code == TREE_LIST)
10933 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10934 else if (code == OMP_CLAUSE)
10935 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10936 else
10937 return NULL_TREE;
10938 }
10939
10940 if (lh)
10941 {
10942 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10943 if (result || !walk_subtrees)
10944 return result;
10945 }
10946
10947 switch (code)
10948 {
10949 case ERROR_MARK:
10950 case IDENTIFIER_NODE:
10951 case INTEGER_CST:
10952 case REAL_CST:
10953 case FIXED_CST:
10954 case VECTOR_CST:
10955 case STRING_CST:
10956 case BLOCK:
10957 case PLACEHOLDER_EXPR:
10958 case SSA_NAME:
10959 case FIELD_DECL:
10960 case RESULT_DECL:
10961 /* None of these have subtrees other than those already walked
10962 above. */
10963 break;
10964
10965 case TREE_LIST:
10966 WALK_SUBTREE (TREE_VALUE (*tp));
10967 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10968 break;
10969
10970 case TREE_VEC:
10971 {
10972 int len = TREE_VEC_LENGTH (*tp);
10973
10974 if (len == 0)
10975 break;
10976
10977 /* Walk all elements but the first. */
10978 while (--len)
10979 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10980
10981 /* Now walk the first one as a tail call. */
10982 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10983 }
10984
10985 case COMPLEX_CST:
10986 WALK_SUBTREE (TREE_REALPART (*tp));
10987 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10988
10989 case CONSTRUCTOR:
10990 {
10991 unsigned HOST_WIDE_INT idx;
10992 constructor_elt *ce;
10993
10994 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10995 idx++)
10996 WALK_SUBTREE (ce->value);
10997 }
10998 break;
10999
11000 case SAVE_EXPR:
11001 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11002
11003 case BIND_EXPR:
11004 {
11005 tree decl;
11006 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11007 {
11008 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11009 into declarations that are just mentioned, rather than
11010 declared; they don't really belong to this part of the tree.
11011 And, we can see cycles: the initializer for a declaration
11012 can refer to the declaration itself. */
11013 WALK_SUBTREE (DECL_INITIAL (decl));
11014 WALK_SUBTREE (DECL_SIZE (decl));
11015 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11016 }
11017 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11018 }
11019
11020 case STATEMENT_LIST:
11021 {
11022 tree_stmt_iterator i;
11023 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11024 WALK_SUBTREE (*tsi_stmt_ptr (i));
11025 }
11026 break;
11027
11028 case OMP_CLAUSE:
11029 switch (OMP_CLAUSE_CODE (*tp))
11030 {
11031 case OMP_CLAUSE_PRIVATE:
11032 case OMP_CLAUSE_SHARED:
11033 case OMP_CLAUSE_FIRSTPRIVATE:
11034 case OMP_CLAUSE_COPYIN:
11035 case OMP_CLAUSE_COPYPRIVATE:
11036 case OMP_CLAUSE_FINAL:
11037 case OMP_CLAUSE_IF:
11038 case OMP_CLAUSE_NUM_THREADS:
11039 case OMP_CLAUSE_SCHEDULE:
11040 case OMP_CLAUSE_UNIFORM:
11041 case OMP_CLAUSE_DEPEND:
11042 case OMP_CLAUSE_NUM_TEAMS:
11043 case OMP_CLAUSE_THREAD_LIMIT:
11044 case OMP_CLAUSE_DEVICE:
11045 case OMP_CLAUSE_DIST_SCHEDULE:
11046 case OMP_CLAUSE_SAFELEN:
11047 case OMP_CLAUSE_SIMDLEN:
11048 case OMP_CLAUSE__LOOPTEMP_:
11049 case OMP_CLAUSE__SIMDUID_:
11050 case OMP_CLAUSE__CILK_FOR_COUNT_:
11051 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11052 /* FALLTHRU */
11053
11054 case OMP_CLAUSE_NOWAIT:
11055 case OMP_CLAUSE_ORDERED:
11056 case OMP_CLAUSE_DEFAULT:
11057 case OMP_CLAUSE_UNTIED:
11058 case OMP_CLAUSE_MERGEABLE:
11059 case OMP_CLAUSE_PROC_BIND:
11060 case OMP_CLAUSE_INBRANCH:
11061 case OMP_CLAUSE_NOTINBRANCH:
11062 case OMP_CLAUSE_FOR:
11063 case OMP_CLAUSE_PARALLEL:
11064 case OMP_CLAUSE_SECTIONS:
11065 case OMP_CLAUSE_TASKGROUP:
11066 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11067
11068 case OMP_CLAUSE_LASTPRIVATE:
11069 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11070 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11071 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11072
11073 case OMP_CLAUSE_COLLAPSE:
11074 {
11075 int i;
11076 for (i = 0; i < 3; i++)
11077 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11078 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11079 }
11080
11081 case OMP_CLAUSE_LINEAR:
11082 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11083 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11084 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11085 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11086
11087 case OMP_CLAUSE_ALIGNED:
11088 case OMP_CLAUSE_FROM:
11089 case OMP_CLAUSE_TO:
11090 case OMP_CLAUSE_MAP:
11091 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11092 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11093 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11094
11095 case OMP_CLAUSE_REDUCTION:
11096 {
11097 int i;
11098 for (i = 0; i < 4; i++)
11099 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11100 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11101 }
11102
11103 default:
11104 gcc_unreachable ();
11105 }
11106 break;
11107
11108 case TARGET_EXPR:
11109 {
11110 int i, len;
11111
11112 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11113 But, we only want to walk once. */
11114 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11115 for (i = 0; i < len; ++i)
11116 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11117 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11118 }
11119
11120 case DECL_EXPR:
11121 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11122 defining. We only want to walk into these fields of a type in this
11123 case and not in the general case of a mere reference to the type.
11124
11125 The criterion is as follows: if the field can be an expression, it
11126 must be walked only here. This should be in keeping with the fields
11127 that are directly gimplified in gimplify_type_sizes in order for the
11128 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11129 variable-sized types.
11130
11131 Note that DECLs get walked as part of processing the BIND_EXPR. */
11132 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11133 {
11134 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11135 if (TREE_CODE (*type_p) == ERROR_MARK)
11136 return NULL_TREE;
11137
11138 /* Call the function for the type. See if it returns anything or
11139 doesn't want us to continue. If we are to continue, walk both
11140 the normal fields and those for the declaration case. */
11141 result = (*func) (type_p, &walk_subtrees, data);
11142 if (result || !walk_subtrees)
11143 return result;
11144
11145 /* But do not walk a pointed-to type since it may itself need to
11146 be walked in the declaration case if it isn't anonymous. */
11147 if (!POINTER_TYPE_P (*type_p))
11148 {
11149 result = walk_type_fields (*type_p, func, data, pset, lh);
11150 if (result)
11151 return result;
11152 }
11153
11154 /* If this is a record type, also walk the fields. */
11155 if (RECORD_OR_UNION_TYPE_P (*type_p))
11156 {
11157 tree field;
11158
11159 for (field = TYPE_FIELDS (*type_p); field;
11160 field = DECL_CHAIN (field))
11161 {
11162 /* We'd like to look at the type of the field, but we can
11163 easily get infinite recursion. So assume it's pointed
11164 to elsewhere in the tree. Also, ignore things that
11165 aren't fields. */
11166 if (TREE_CODE (field) != FIELD_DECL)
11167 continue;
11168
11169 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11170 WALK_SUBTREE (DECL_SIZE (field));
11171 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11172 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11173 WALK_SUBTREE (DECL_QUALIFIER (field));
11174 }
11175 }
11176
11177 /* Same for scalar types. */
11178 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11179 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11180 || TREE_CODE (*type_p) == INTEGER_TYPE
11181 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11182 || TREE_CODE (*type_p) == REAL_TYPE)
11183 {
11184 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11185 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11186 }
11187
11188 WALK_SUBTREE (TYPE_SIZE (*type_p));
11189 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11190 }
11191 /* FALLTHRU */
11192
11193 default:
11194 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11195 {
11196 int i, len;
11197
11198 /* Walk over all the sub-trees of this operand. */
11199 len = TREE_OPERAND_LENGTH (*tp);
11200
11201 /* Go through the subtrees. We need to do this in forward order so
11202 that the scope of a FOR_EXPR is handled properly. */
11203 if (len)
11204 {
11205 for (i = 0; i < len - 1; ++i)
11206 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11207 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11208 }
11209 }
11210 /* If this is a type, walk the needed fields in the type. */
11211 else if (TYPE_P (*tp))
11212 return walk_type_fields (*tp, func, data, pset, lh);
11213 break;
11214 }
11215
11216 /* We didn't find what we were looking for. */
11217 return NULL_TREE;
11218
11219 #undef WALK_SUBTREE_TAIL
11220 }
11221 #undef WALK_SUBTREE
11222
11223 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11224
11225 tree
11226 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11227 walk_tree_lh lh)
11228 {
11229 tree result;
11230
11231 hash_set<tree> pset;
11232 result = walk_tree_1 (tp, func, data, &pset, lh);
11233 return result;
11234 }
11235
11236
11237 tree
11238 tree_block (tree t)
11239 {
11240 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11241
11242 if (IS_EXPR_CODE_CLASS (c))
11243 return LOCATION_BLOCK (t->exp.locus);
11244 gcc_unreachable ();
11245 return NULL;
11246 }
11247
11248 void
11249 tree_set_block (tree t, tree b)
11250 {
11251 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11252
11253 if (IS_EXPR_CODE_CLASS (c))
11254 {
11255 if (b)
11256 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11257 else
11258 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11259 }
11260 else
11261 gcc_unreachable ();
11262 }
11263
11264 /* Create a nameless artificial label and put it in the current
11265 function context. The label has a location of LOC. Returns the
11266 newly created label. */
11267
11268 tree
11269 create_artificial_label (location_t loc)
11270 {
11271 tree lab = build_decl (loc,
11272 LABEL_DECL, NULL_TREE, void_type_node);
11273
11274 DECL_ARTIFICIAL (lab) = 1;
11275 DECL_IGNORED_P (lab) = 1;
11276 DECL_CONTEXT (lab) = current_function_decl;
11277 return lab;
11278 }
11279
11280 /* Given a tree, try to return a useful variable name that we can use
11281 to prefix a temporary that is being assigned the value of the tree.
11282 I.E. given <temp> = &A, return A. */
11283
11284 const char *
11285 get_name (tree t)
11286 {
11287 tree stripped_decl;
11288
11289 stripped_decl = t;
11290 STRIP_NOPS (stripped_decl);
11291 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11292 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11293 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11294 {
11295 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11296 if (!name)
11297 return NULL;
11298 return IDENTIFIER_POINTER (name);
11299 }
11300 else
11301 {
11302 switch (TREE_CODE (stripped_decl))
11303 {
11304 case ADDR_EXPR:
11305 return get_name (TREE_OPERAND (stripped_decl, 0));
11306 default:
11307 return NULL;
11308 }
11309 }
11310 }
11311
11312 /* Return true if TYPE has a variable argument list. */
11313
11314 bool
11315 stdarg_p (const_tree fntype)
11316 {
11317 function_args_iterator args_iter;
11318 tree n = NULL_TREE, t;
11319
11320 if (!fntype)
11321 return false;
11322
11323 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11324 {
11325 n = t;
11326 }
11327
11328 return n != NULL_TREE && n != void_type_node;
11329 }
11330
11331 /* Return true if TYPE has a prototype. */
11332
11333 bool
11334 prototype_p (tree fntype)
11335 {
11336 tree t;
11337
11338 gcc_assert (fntype != NULL_TREE);
11339
11340 t = TYPE_ARG_TYPES (fntype);
11341 return (t != NULL_TREE);
11342 }
11343
11344 /* If BLOCK is inlined from an __attribute__((__artificial__))
11345 routine, return pointer to location from where it has been
11346 called. */
11347 location_t *
11348 block_nonartificial_location (tree block)
11349 {
11350 location_t *ret = NULL;
11351
11352 while (block && TREE_CODE (block) == BLOCK
11353 && BLOCK_ABSTRACT_ORIGIN (block))
11354 {
11355 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11356
11357 while (TREE_CODE (ao) == BLOCK
11358 && BLOCK_ABSTRACT_ORIGIN (ao)
11359 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11360 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11361
11362 if (TREE_CODE (ao) == FUNCTION_DECL)
11363 {
11364 /* If AO is an artificial inline, point RET to the
11365 call site locus at which it has been inlined and continue
11366 the loop, in case AO's caller is also an artificial
11367 inline. */
11368 if (DECL_DECLARED_INLINE_P (ao)
11369 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11370 ret = &BLOCK_SOURCE_LOCATION (block);
11371 else
11372 break;
11373 }
11374 else if (TREE_CODE (ao) != BLOCK)
11375 break;
11376
11377 block = BLOCK_SUPERCONTEXT (block);
11378 }
11379 return ret;
11380 }
11381
11382
11383 /* If EXP is inlined from an __attribute__((__artificial__))
11384 function, return the location of the original call expression. */
11385
11386 location_t
11387 tree_nonartificial_location (tree exp)
11388 {
11389 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11390
11391 if (loc)
11392 return *loc;
11393 else
11394 return EXPR_LOCATION (exp);
11395 }
11396
11397
11398 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11399 nodes. */
11400
11401 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11402
11403 static hashval_t
11404 cl_option_hash_hash (const void *x)
11405 {
11406 const_tree const t = (const_tree) x;
11407 const char *p;
11408 size_t i;
11409 size_t len = 0;
11410 hashval_t hash = 0;
11411
11412 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11413 {
11414 p = (const char *)TREE_OPTIMIZATION (t);
11415 len = sizeof (struct cl_optimization);
11416 }
11417
11418 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11419 {
11420 p = (const char *)TREE_TARGET_OPTION (t);
11421 len = sizeof (struct cl_target_option);
11422 }
11423
11424 else
11425 gcc_unreachable ();
11426
11427 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11428 something else. */
11429 for (i = 0; i < len; i++)
11430 if (p[i])
11431 hash = (hash << 4) ^ ((i << 2) | p[i]);
11432
11433 return hash;
11434 }
11435
11436 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11437 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11438 same. */
11439
11440 static int
11441 cl_option_hash_eq (const void *x, const void *y)
11442 {
11443 const_tree const xt = (const_tree) x;
11444 const_tree const yt = (const_tree) y;
11445 const char *xp;
11446 const char *yp;
11447 size_t len;
11448
11449 if (TREE_CODE (xt) != TREE_CODE (yt))
11450 return 0;
11451
11452 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11453 {
11454 xp = (const char *)TREE_OPTIMIZATION (xt);
11455 yp = (const char *)TREE_OPTIMIZATION (yt);
11456 len = sizeof (struct cl_optimization);
11457 }
11458
11459 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11460 {
11461 xp = (const char *)TREE_TARGET_OPTION (xt);
11462 yp = (const char *)TREE_TARGET_OPTION (yt);
11463 len = sizeof (struct cl_target_option);
11464 }
11465
11466 else
11467 gcc_unreachable ();
11468
11469 return (memcmp (xp, yp, len) == 0);
11470 }
11471
11472 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11473
11474 tree
11475 build_optimization_node (struct gcc_options *opts)
11476 {
11477 tree t;
11478 void **slot;
11479
11480 /* Use the cache of optimization nodes. */
11481
11482 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11483 opts);
11484
11485 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11486 t = (tree) *slot;
11487 if (!t)
11488 {
11489 /* Insert this one into the hash table. */
11490 t = cl_optimization_node;
11491 *slot = t;
11492
11493 /* Make a new node for next time round. */
11494 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11495 }
11496
11497 return t;
11498 }
11499
11500 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11501
11502 tree
11503 build_target_option_node (struct gcc_options *opts)
11504 {
11505 tree t;
11506 void **slot;
11507
11508 /* Use the cache of optimization nodes. */
11509
11510 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11511 opts);
11512
11513 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11514 t = (tree) *slot;
11515 if (!t)
11516 {
11517 /* Insert this one into the hash table. */
11518 t = cl_target_option_node;
11519 *slot = t;
11520
11521 /* Make a new node for next time round. */
11522 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11523 }
11524
11525 return t;
11526 }
11527
11528 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11529 Called through htab_traverse. */
11530
11531 static int
11532 prepare_target_option_node_for_pch (void **slot, void *)
11533 {
11534 tree node = (tree) *slot;
11535 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11536 TREE_TARGET_GLOBALS (node) = NULL;
11537 return 1;
11538 }
11539
11540 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11541 so that they aren't saved during PCH writing. */
11542
11543 void
11544 prepare_target_option_nodes_for_pch (void)
11545 {
11546 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11547 NULL);
11548 }
11549
11550 /* Determine the "ultimate origin" of a block. The block may be an inlined
11551 instance of an inlined instance of a block which is local to an inline
11552 function, so we have to trace all of the way back through the origin chain
11553 to find out what sort of node actually served as the original seed for the
11554 given block. */
11555
11556 tree
11557 block_ultimate_origin (const_tree block)
11558 {
11559 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11560
11561 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11562 nodes in the function to point to themselves; ignore that if
11563 we're trying to output the abstract instance of this function. */
11564 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11565 return NULL_TREE;
11566
11567 if (immediate_origin == NULL_TREE)
11568 return NULL_TREE;
11569 else
11570 {
11571 tree ret_val;
11572 tree lookahead = immediate_origin;
11573
11574 do
11575 {
11576 ret_val = lookahead;
11577 lookahead = (TREE_CODE (ret_val) == BLOCK
11578 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11579 }
11580 while (lookahead != NULL && lookahead != ret_val);
11581
11582 /* The block's abstract origin chain may not be the *ultimate* origin of
11583 the block. It could lead to a DECL that has an abstract origin set.
11584 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11585 will give us if it has one). Note that DECL's abstract origins are
11586 supposed to be the most distant ancestor (or so decl_ultimate_origin
11587 claims), so we don't need to loop following the DECL origins. */
11588 if (DECL_P (ret_val))
11589 return DECL_ORIGIN (ret_val);
11590
11591 return ret_val;
11592 }
11593 }
11594
11595 /* Return true iff conversion in EXP generates no instruction. Mark
11596 it inline so that we fully inline into the stripping functions even
11597 though we have two uses of this function. */
11598
11599 static inline bool
11600 tree_nop_conversion (const_tree exp)
11601 {
11602 tree outer_type, inner_type;
11603
11604 if (!CONVERT_EXPR_P (exp)
11605 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11606 return false;
11607 if (TREE_OPERAND (exp, 0) == error_mark_node)
11608 return false;
11609
11610 outer_type = TREE_TYPE (exp);
11611 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11612
11613 if (!inner_type)
11614 return false;
11615
11616 /* Use precision rather then machine mode when we can, which gives
11617 the correct answer even for submode (bit-field) types. */
11618 if ((INTEGRAL_TYPE_P (outer_type)
11619 || POINTER_TYPE_P (outer_type)
11620 || TREE_CODE (outer_type) == OFFSET_TYPE)
11621 && (INTEGRAL_TYPE_P (inner_type)
11622 || POINTER_TYPE_P (inner_type)
11623 || TREE_CODE (inner_type) == OFFSET_TYPE))
11624 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11625
11626 /* Otherwise fall back on comparing machine modes (e.g. for
11627 aggregate types, floats). */
11628 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11629 }
11630
11631 /* Return true iff conversion in EXP generates no instruction. Don't
11632 consider conversions changing the signedness. */
11633
11634 static bool
11635 tree_sign_nop_conversion (const_tree exp)
11636 {
11637 tree outer_type, inner_type;
11638
11639 if (!tree_nop_conversion (exp))
11640 return false;
11641
11642 outer_type = TREE_TYPE (exp);
11643 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11644
11645 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11646 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11647 }
11648
11649 /* Strip conversions from EXP according to tree_nop_conversion and
11650 return the resulting expression. */
11651
11652 tree
11653 tree_strip_nop_conversions (tree exp)
11654 {
11655 while (tree_nop_conversion (exp))
11656 exp = TREE_OPERAND (exp, 0);
11657 return exp;
11658 }
11659
11660 /* Strip conversions from EXP according to tree_sign_nop_conversion
11661 and return the resulting expression. */
11662
11663 tree
11664 tree_strip_sign_nop_conversions (tree exp)
11665 {
11666 while (tree_sign_nop_conversion (exp))
11667 exp = TREE_OPERAND (exp, 0);
11668 return exp;
11669 }
11670
11671 /* Avoid any floating point extensions from EXP. */
11672 tree
11673 strip_float_extensions (tree exp)
11674 {
11675 tree sub, expt, subt;
11676
11677 /* For floating point constant look up the narrowest type that can hold
11678 it properly and handle it like (type)(narrowest_type)constant.
11679 This way we can optimize for instance a=a*2.0 where "a" is float
11680 but 2.0 is double constant. */
11681 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11682 {
11683 REAL_VALUE_TYPE orig;
11684 tree type = NULL;
11685
11686 orig = TREE_REAL_CST (exp);
11687 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11688 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11689 type = float_type_node;
11690 else if (TYPE_PRECISION (TREE_TYPE (exp))
11691 > TYPE_PRECISION (double_type_node)
11692 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11693 type = double_type_node;
11694 if (type)
11695 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11696 }
11697
11698 if (!CONVERT_EXPR_P (exp))
11699 return exp;
11700
11701 sub = TREE_OPERAND (exp, 0);
11702 subt = TREE_TYPE (sub);
11703 expt = TREE_TYPE (exp);
11704
11705 if (!FLOAT_TYPE_P (subt))
11706 return exp;
11707
11708 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11709 return exp;
11710
11711 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11712 return exp;
11713
11714 return strip_float_extensions (sub);
11715 }
11716
11717 /* Strip out all handled components that produce invariant
11718 offsets. */
11719
11720 const_tree
11721 strip_invariant_refs (const_tree op)
11722 {
11723 while (handled_component_p (op))
11724 {
11725 switch (TREE_CODE (op))
11726 {
11727 case ARRAY_REF:
11728 case ARRAY_RANGE_REF:
11729 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11730 || TREE_OPERAND (op, 2) != NULL_TREE
11731 || TREE_OPERAND (op, 3) != NULL_TREE)
11732 return NULL;
11733 break;
11734
11735 case COMPONENT_REF:
11736 if (TREE_OPERAND (op, 2) != NULL_TREE)
11737 return NULL;
11738 break;
11739
11740 default:;
11741 }
11742 op = TREE_OPERAND (op, 0);
11743 }
11744
11745 return op;
11746 }
11747
11748 static GTY(()) tree gcc_eh_personality_decl;
11749
11750 /* Return the GCC personality function decl. */
11751
11752 tree
11753 lhd_gcc_personality (void)
11754 {
11755 if (!gcc_eh_personality_decl)
11756 gcc_eh_personality_decl = build_personality_function ("gcc");
11757 return gcc_eh_personality_decl;
11758 }
11759
11760 /* TARGET is a call target of GIMPLE call statement
11761 (obtained by gimple_call_fn). Return true if it is
11762 OBJ_TYPE_REF representing an virtual call of C++ method.
11763 (As opposed to OBJ_TYPE_REF representing objc calls
11764 through a cast where middle-end devirtualization machinery
11765 can't apply.) */
11766
11767 bool
11768 virtual_method_call_p (tree target)
11769 {
11770 if (TREE_CODE (target) != OBJ_TYPE_REF)
11771 return false;
11772 target = TREE_TYPE (target);
11773 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11774 target = TREE_TYPE (target);
11775 if (TREE_CODE (target) == FUNCTION_TYPE)
11776 return false;
11777 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11778 return true;
11779 }
11780
11781 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11782
11783 tree
11784 obj_type_ref_class (tree ref)
11785 {
11786 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11787 ref = TREE_TYPE (ref);
11788 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11789 ref = TREE_TYPE (ref);
11790 /* We look for type THIS points to. ObjC also builds
11791 OBJ_TYPE_REF with non-method calls, Their first parameter
11792 ID however also corresponds to class type. */
11793 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11794 || TREE_CODE (ref) == FUNCTION_TYPE);
11795 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11796 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11797 return TREE_TYPE (ref);
11798 }
11799
11800 /* Return true if T is in anonymous namespace. */
11801
11802 bool
11803 type_in_anonymous_namespace_p (const_tree t)
11804 {
11805 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11806 bulitin types; those have CONTEXT NULL. */
11807 if (!TYPE_CONTEXT (t))
11808 return false;
11809 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11810 }
11811
11812 /* Try to find a base info of BINFO that would have its field decl at offset
11813 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11814 found, return, otherwise return NULL_TREE. */
11815
11816 tree
11817 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11818 {
11819 tree type = BINFO_TYPE (binfo);
11820
11821 while (true)
11822 {
11823 HOST_WIDE_INT pos, size;
11824 tree fld;
11825 int i;
11826
11827 if (types_same_for_odr (type, expected_type))
11828 return binfo;
11829 if (offset < 0)
11830 return NULL_TREE;
11831
11832 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11833 {
11834 if (TREE_CODE (fld) != FIELD_DECL)
11835 continue;
11836
11837 pos = int_bit_position (fld);
11838 size = tree_to_uhwi (DECL_SIZE (fld));
11839 if (pos <= offset && (pos + size) > offset)
11840 break;
11841 }
11842 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11843 return NULL_TREE;
11844
11845 if (!DECL_ARTIFICIAL (fld))
11846 {
11847 binfo = TYPE_BINFO (TREE_TYPE (fld));
11848 if (!binfo)
11849 return NULL_TREE;
11850 }
11851 /* Offset 0 indicates the primary base, whose vtable contents are
11852 represented in the binfo for the derived class. */
11853 else if (offset != 0)
11854 {
11855 tree base_binfo, binfo2 = binfo;
11856
11857 /* Find BINFO corresponding to FLD. This is bit harder
11858 by a fact that in virtual inheritance we may need to walk down
11859 the non-virtual inheritance chain. */
11860 while (true)
11861 {
11862 tree containing_binfo = NULL, found_binfo = NULL;
11863 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11864 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11865 {
11866 found_binfo = base_binfo;
11867 break;
11868 }
11869 else
11870 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11871 - tree_to_shwi (BINFO_OFFSET (binfo)))
11872 * BITS_PER_UNIT < pos
11873 /* Rule out types with no virtual methods or we can get confused
11874 here by zero sized bases. */
11875 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11876 && (!containing_binfo
11877 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11878 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11879 containing_binfo = base_binfo;
11880 if (found_binfo)
11881 {
11882 binfo = found_binfo;
11883 break;
11884 }
11885 if (!containing_binfo)
11886 return NULL_TREE;
11887 binfo2 = containing_binfo;
11888 }
11889 }
11890
11891 type = TREE_TYPE (fld);
11892 offset -= pos;
11893 }
11894 }
11895
11896 /* Returns true if X is a typedef decl. */
11897
11898 bool
11899 is_typedef_decl (tree x)
11900 {
11901 return (x && TREE_CODE (x) == TYPE_DECL
11902 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11903 }
11904
11905 /* Returns true iff TYPE is a type variant created for a typedef. */
11906
11907 bool
11908 typedef_variant_p (tree type)
11909 {
11910 return is_typedef_decl (TYPE_NAME (type));
11911 }
11912
11913 /* Warn about a use of an identifier which was marked deprecated. */
11914 void
11915 warn_deprecated_use (tree node, tree attr)
11916 {
11917 const char *msg;
11918
11919 if (node == 0 || !warn_deprecated_decl)
11920 return;
11921
11922 if (!attr)
11923 {
11924 if (DECL_P (node))
11925 attr = DECL_ATTRIBUTES (node);
11926 else if (TYPE_P (node))
11927 {
11928 tree decl = TYPE_STUB_DECL (node);
11929 if (decl)
11930 attr = lookup_attribute ("deprecated",
11931 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11932 }
11933 }
11934
11935 if (attr)
11936 attr = lookup_attribute ("deprecated", attr);
11937
11938 if (attr)
11939 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11940 else
11941 msg = NULL;
11942
11943 if (DECL_P (node))
11944 {
11945 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11946 if (msg)
11947 warning (OPT_Wdeprecated_declarations,
11948 "%qD is deprecated (declared at %r%s:%d%R): %s",
11949 node, "locus", xloc.file, xloc.line, msg);
11950 else
11951 warning (OPT_Wdeprecated_declarations,
11952 "%qD is deprecated (declared at %r%s:%d%R)",
11953 node, "locus", xloc.file, xloc.line);
11954 }
11955 else if (TYPE_P (node))
11956 {
11957 tree what = NULL_TREE;
11958 tree decl = TYPE_STUB_DECL (node);
11959
11960 if (TYPE_NAME (node))
11961 {
11962 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11963 what = TYPE_NAME (node);
11964 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11965 && DECL_NAME (TYPE_NAME (node)))
11966 what = DECL_NAME (TYPE_NAME (node));
11967 }
11968
11969 if (decl)
11970 {
11971 expanded_location xloc
11972 = expand_location (DECL_SOURCE_LOCATION (decl));
11973 if (what)
11974 {
11975 if (msg)
11976 warning (OPT_Wdeprecated_declarations,
11977 "%qE is deprecated (declared at %r%s:%d%R): %s",
11978 what, "locus", xloc.file, xloc.line, msg);
11979 else
11980 warning (OPT_Wdeprecated_declarations,
11981 "%qE is deprecated (declared at %r%s:%d%R)",
11982 what, "locus", xloc.file, xloc.line);
11983 }
11984 else
11985 {
11986 if (msg)
11987 warning (OPT_Wdeprecated_declarations,
11988 "type is deprecated (declared at %r%s:%d%R): %s",
11989 "locus", xloc.file, xloc.line, msg);
11990 else
11991 warning (OPT_Wdeprecated_declarations,
11992 "type is deprecated (declared at %r%s:%d%R)",
11993 "locus", xloc.file, xloc.line);
11994 }
11995 }
11996 else
11997 {
11998 if (what)
11999 {
12000 if (msg)
12001 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12002 what, msg);
12003 else
12004 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12005 }
12006 else
12007 {
12008 if (msg)
12009 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12010 msg);
12011 else
12012 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12013 }
12014 }
12015 }
12016 }
12017
12018 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12019 somewhere in it. */
12020
12021 bool
12022 contains_bitfld_component_ref_p (const_tree ref)
12023 {
12024 while (handled_component_p (ref))
12025 {
12026 if (TREE_CODE (ref) == COMPONENT_REF
12027 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12028 return true;
12029 ref = TREE_OPERAND (ref, 0);
12030 }
12031
12032 return false;
12033 }
12034
12035 /* Try to determine whether a TRY_CATCH expression can fall through.
12036 This is a subroutine of block_may_fallthru. */
12037
12038 static bool
12039 try_catch_may_fallthru (const_tree stmt)
12040 {
12041 tree_stmt_iterator i;
12042
12043 /* If the TRY block can fall through, the whole TRY_CATCH can
12044 fall through. */
12045 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12046 return true;
12047
12048 i = tsi_start (TREE_OPERAND (stmt, 1));
12049 switch (TREE_CODE (tsi_stmt (i)))
12050 {
12051 case CATCH_EXPR:
12052 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12053 catch expression and a body. The whole TRY_CATCH may fall
12054 through iff any of the catch bodies falls through. */
12055 for (; !tsi_end_p (i); tsi_next (&i))
12056 {
12057 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12058 return true;
12059 }
12060 return false;
12061
12062 case EH_FILTER_EXPR:
12063 /* The exception filter expression only matters if there is an
12064 exception. If the exception does not match EH_FILTER_TYPES,
12065 we will execute EH_FILTER_FAILURE, and we will fall through
12066 if that falls through. If the exception does match
12067 EH_FILTER_TYPES, the stack unwinder will continue up the
12068 stack, so we will not fall through. We don't know whether we
12069 will throw an exception which matches EH_FILTER_TYPES or not,
12070 so we just ignore EH_FILTER_TYPES and assume that we might
12071 throw an exception which doesn't match. */
12072 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12073
12074 default:
12075 /* This case represents statements to be executed when an
12076 exception occurs. Those statements are implicitly followed
12077 by a RESX statement to resume execution after the exception.
12078 So in this case the TRY_CATCH never falls through. */
12079 return false;
12080 }
12081 }
12082
12083 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12084 need not be 100% accurate; simply be conservative and return true if we
12085 don't know. This is used only to avoid stupidly generating extra code.
12086 If we're wrong, we'll just delete the extra code later. */
12087
12088 bool
12089 block_may_fallthru (const_tree block)
12090 {
12091 /* This CONST_CAST is okay because expr_last returns its argument
12092 unmodified and we assign it to a const_tree. */
12093 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12094
12095 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12096 {
12097 case GOTO_EXPR:
12098 case RETURN_EXPR:
12099 /* Easy cases. If the last statement of the block implies
12100 control transfer, then we can't fall through. */
12101 return false;
12102
12103 case SWITCH_EXPR:
12104 /* If SWITCH_LABELS is set, this is lowered, and represents a
12105 branch to a selected label and hence can not fall through.
12106 Otherwise SWITCH_BODY is set, and the switch can fall
12107 through. */
12108 return SWITCH_LABELS (stmt) == NULL_TREE;
12109
12110 case COND_EXPR:
12111 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12112 return true;
12113 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12114
12115 case BIND_EXPR:
12116 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12117
12118 case TRY_CATCH_EXPR:
12119 return try_catch_may_fallthru (stmt);
12120
12121 case TRY_FINALLY_EXPR:
12122 /* The finally clause is always executed after the try clause,
12123 so if it does not fall through, then the try-finally will not
12124 fall through. Otherwise, if the try clause does not fall
12125 through, then when the finally clause falls through it will
12126 resume execution wherever the try clause was going. So the
12127 whole try-finally will only fall through if both the try
12128 clause and the finally clause fall through. */
12129 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12130 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12131
12132 case MODIFY_EXPR:
12133 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12134 stmt = TREE_OPERAND (stmt, 1);
12135 else
12136 return true;
12137 /* FALLTHRU */
12138
12139 case CALL_EXPR:
12140 /* Functions that do not return do not fall through. */
12141 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12142
12143 case CLEANUP_POINT_EXPR:
12144 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12145
12146 case TARGET_EXPR:
12147 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12148
12149 case ERROR_MARK:
12150 return true;
12151
12152 default:
12153 return lang_hooks.block_may_fallthru (stmt);
12154 }
12155 }
12156
12157 /* True if we are using EH to handle cleanups. */
12158 static bool using_eh_for_cleanups_flag = false;
12159
12160 /* This routine is called from front ends to indicate eh should be used for
12161 cleanups. */
12162 void
12163 using_eh_for_cleanups (void)
12164 {
12165 using_eh_for_cleanups_flag = true;
12166 }
12167
12168 /* Query whether EH is used for cleanups. */
12169 bool
12170 using_eh_for_cleanups_p (void)
12171 {
12172 return using_eh_for_cleanups_flag;
12173 }
12174
12175 /* Wrapper for tree_code_name to ensure that tree code is valid */
12176 const char *
12177 get_tree_code_name (enum tree_code code)
12178 {
12179 const char *invalid = "<invalid tree code>";
12180
12181 if (code >= MAX_TREE_CODES)
12182 return invalid;
12183
12184 return tree_code_name[code];
12185 }
12186
12187 /* Drops the TREE_OVERFLOW flag from T. */
12188
12189 tree
12190 drop_tree_overflow (tree t)
12191 {
12192 gcc_checking_assert (TREE_OVERFLOW (t));
12193
12194 /* For tree codes with a sharing machinery re-build the result. */
12195 if (TREE_CODE (t) == INTEGER_CST)
12196 return wide_int_to_tree (TREE_TYPE (t), t);
12197
12198 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12199 and drop the flag. */
12200 t = copy_node (t);
12201 TREE_OVERFLOW (t) = 0;
12202 return t;
12203 }
12204
12205 /* Given a memory reference expression T, return its base address.
12206 The base address of a memory reference expression is the main
12207 object being referenced. For instance, the base address for
12208 'array[i].fld[j]' is 'array'. You can think of this as stripping
12209 away the offset part from a memory address.
12210
12211 This function calls handled_component_p to strip away all the inner
12212 parts of the memory reference until it reaches the base object. */
12213
12214 tree
12215 get_base_address (tree t)
12216 {
12217 while (handled_component_p (t))
12218 t = TREE_OPERAND (t, 0);
12219
12220 if ((TREE_CODE (t) == MEM_REF
12221 || TREE_CODE (t) == TARGET_MEM_REF)
12222 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12223 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12224
12225 /* ??? Either the alias oracle or all callers need to properly deal
12226 with WITH_SIZE_EXPRs before we can look through those. */
12227 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12228 return NULL_TREE;
12229
12230 return t;
12231 }
12232
12233 #include "gt-tree.h"