re PR lto/63286 (FAIL: g++.dg/lto/20101014-2 cp_lto_20101014-2_0.o assemble, * after...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "stor-layout.h"
37 #include "calls.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "tm_p.h"
41 #include "function.h"
42 #include "obstack.h"
43 #include "toplev.h" /* get_random_seed */
44 #include "hashtab.h"
45 #include "inchash.h"
46 #include "filenames.h"
47 #include "output.h"
48 #include "target.h"
49 #include "common/common-target.h"
50 #include "langhooks.h"
51 #include "tree-inline.h"
52 #include "tree-iterator.h"
53 #include "basic-block.h"
54 #include "bitmap.h"
55 #include "tree-ssa-alias.h"
56 #include "internal-fn.h"
57 #include "gimple-expr.h"
58 #include "is-a.h"
59 #include "gimple.h"
60 #include "gimple-iterator.h"
61 #include "gimplify.h"
62 #include "gimple-ssa.h"
63 #include "cgraph.h"
64 #include "tree-phinodes.h"
65 #include "stringpool.h"
66 #include "tree-ssanames.h"
67 #include "expr.h"
68 #include "tree-dfa.h"
69 #include "params.h"
70 #include "tree-pass.h"
71 #include "langhooks-def.h"
72 #include "diagnostic.h"
73 #include "tree-diagnostic.h"
74 #include "tree-pretty-print.h"
75 #include "except.h"
76 #include "debug.h"
77 #include "intl.h"
78 #include "wide-int.h"
79 #include "builtins.h"
80
81 /* Tree code classes. */
82
83 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
84 #define END_OF_BASE_TREE_CODES tcc_exceptional,
85
86 const enum tree_code_class tree_code_type[] = {
87 #include "all-tree.def"
88 };
89
90 #undef DEFTREECODE
91 #undef END_OF_BASE_TREE_CODES
92
93 /* Table indexed by tree code giving number of expression
94 operands beyond the fixed part of the node structure.
95 Not used for types or decls. */
96
97 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
98 #define END_OF_BASE_TREE_CODES 0,
99
100 const unsigned char tree_code_length[] = {
101 #include "all-tree.def"
102 };
103
104 #undef DEFTREECODE
105 #undef END_OF_BASE_TREE_CODES
106
107 /* Names of tree components.
108 Used for printing out the tree and error messages. */
109 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
110 #define END_OF_BASE_TREE_CODES "@dummy",
111
112 static const char *const tree_code_name[] = {
113 #include "all-tree.def"
114 };
115
116 #undef DEFTREECODE
117 #undef END_OF_BASE_TREE_CODES
118
119 /* Each tree code class has an associated string representation.
120 These must correspond to the tree_code_class entries. */
121
122 const char *const tree_code_class_strings[] =
123 {
124 "exceptional",
125 "constant",
126 "type",
127 "declaration",
128 "reference",
129 "comparison",
130 "unary",
131 "binary",
132 "statement",
133 "vl_exp",
134 "expression"
135 };
136
137 /* obstack.[ch] explicitly declined to prototype this. */
138 extern int _obstack_allocated_p (struct obstack *h, void *obj);
139
140 /* Statistics-gathering stuff. */
141
142 static int tree_code_counts[MAX_TREE_CODES];
143 int tree_node_counts[(int) all_kinds];
144 int tree_node_sizes[(int) all_kinds];
145
146 /* Keep in sync with tree.h:enum tree_node_kind. */
147 static const char * const tree_node_kind_names[] = {
148 "decls",
149 "types",
150 "blocks",
151 "stmts",
152 "refs",
153 "exprs",
154 "constants",
155 "identifiers",
156 "vecs",
157 "binfos",
158 "ssa names",
159 "constructors",
160 "random kinds",
161 "lang_decl kinds",
162 "lang_type kinds",
163 "omp clauses",
164 };
165
166 /* Unique id for next decl created. */
167 static GTY(()) int next_decl_uid;
168 /* Unique id for next type created. */
169 static GTY(()) int next_type_uid = 1;
170 /* Unique id for next debug decl created. Use negative numbers,
171 to catch erroneous uses. */
172 static GTY(()) int next_debug_decl_uid;
173
174 /* Since we cannot rehash a type after it is in the table, we have to
175 keep the hash code. */
176
177 struct GTY(()) type_hash {
178 unsigned long hash;
179 tree type;
180 };
181
182 /* Initial size of the hash table (rounded to next prime). */
183 #define TYPE_HASH_INITIAL_SIZE 1000
184
185 /* Now here is the hash table. When recording a type, it is added to
186 the slot whose index is the hash code. Note that the hash table is
187 used for several kinds of types (function types, array types and
188 array index range types, for now). While all these live in the
189 same table, they are completely independent, and the hash code is
190 computed differently for each of these. */
191
192 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
193 htab_t type_hash_table;
194
195 /* Hash table and temporary node for larger integer const values. */
196 static GTY (()) tree int_cst_node;
197 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
198 htab_t int_cst_hash_table;
199
200 /* Hash table for optimization flags and target option flags. Use the same
201 hash table for both sets of options. Nodes for building the current
202 optimization and target option nodes. The assumption is most of the time
203 the options created will already be in the hash table, so we avoid
204 allocating and freeing up a node repeatably. */
205 static GTY (()) tree cl_optimization_node;
206 static GTY (()) tree cl_target_option_node;
207 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
208 htab_t cl_option_hash_table;
209
210 /* General tree->tree mapping structure for use in hash tables. */
211
212
213 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
214 htab_t debug_expr_for_decl;
215
216 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
217 htab_t value_expr_for_decl;
218
219 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
220 htab_t debug_args_for_decl;
221
222 static void set_type_quals (tree, int);
223 static int type_hash_eq (const void *, const void *);
224 static hashval_t type_hash_hash (const void *);
225 static hashval_t int_cst_hash_hash (const void *);
226 static int int_cst_hash_eq (const void *, const void *);
227 static hashval_t cl_option_hash_hash (const void *);
228 static int cl_option_hash_eq (const void *, const void *);
229 static void print_type_hash_statistics (void);
230 static void print_debug_expr_statistics (void);
231 static void print_value_expr_statistics (void);
232 static int type_hash_marked_p (const void *);
233 static void type_hash_list (const_tree, inchash::hash &);
234 static void attribute_hash_list (const_tree, inchash::hash &);
235
236 tree global_trees[TI_MAX];
237 tree integer_types[itk_none];
238
239 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
240
241 /* Number of operands for each OpenMP clause. */
242 unsigned const char omp_clause_num_ops[] =
243 {
244 0, /* OMP_CLAUSE_ERROR */
245 1, /* OMP_CLAUSE_PRIVATE */
246 1, /* OMP_CLAUSE_SHARED */
247 1, /* OMP_CLAUSE_FIRSTPRIVATE */
248 2, /* OMP_CLAUSE_LASTPRIVATE */
249 4, /* OMP_CLAUSE_REDUCTION */
250 1, /* OMP_CLAUSE_COPYIN */
251 1, /* OMP_CLAUSE_COPYPRIVATE */
252 3, /* OMP_CLAUSE_LINEAR */
253 2, /* OMP_CLAUSE_ALIGNED */
254 1, /* OMP_CLAUSE_DEPEND */
255 1, /* OMP_CLAUSE_UNIFORM */
256 2, /* OMP_CLAUSE_FROM */
257 2, /* OMP_CLAUSE_TO */
258 2, /* OMP_CLAUSE_MAP */
259 1, /* OMP_CLAUSE__LOOPTEMP_ */
260 1, /* OMP_CLAUSE_IF */
261 1, /* OMP_CLAUSE_NUM_THREADS */
262 1, /* OMP_CLAUSE_SCHEDULE */
263 0, /* OMP_CLAUSE_NOWAIT */
264 0, /* OMP_CLAUSE_ORDERED */
265 0, /* OMP_CLAUSE_DEFAULT */
266 3, /* OMP_CLAUSE_COLLAPSE */
267 0, /* OMP_CLAUSE_UNTIED */
268 1, /* OMP_CLAUSE_FINAL */
269 0, /* OMP_CLAUSE_MERGEABLE */
270 1, /* OMP_CLAUSE_DEVICE */
271 1, /* OMP_CLAUSE_DIST_SCHEDULE */
272 0, /* OMP_CLAUSE_INBRANCH */
273 0, /* OMP_CLAUSE_NOTINBRANCH */
274 1, /* OMP_CLAUSE_NUM_TEAMS */
275 1, /* OMP_CLAUSE_THREAD_LIMIT */
276 0, /* OMP_CLAUSE_PROC_BIND */
277 1, /* OMP_CLAUSE_SAFELEN */
278 1, /* OMP_CLAUSE_SIMDLEN */
279 0, /* OMP_CLAUSE_FOR */
280 0, /* OMP_CLAUSE_PARALLEL */
281 0, /* OMP_CLAUSE_SECTIONS */
282 0, /* OMP_CLAUSE_TASKGROUP */
283 1, /* OMP_CLAUSE__SIMDUID_ */
284 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
285 };
286
287 const char * const omp_clause_code_name[] =
288 {
289 "error_clause",
290 "private",
291 "shared",
292 "firstprivate",
293 "lastprivate",
294 "reduction",
295 "copyin",
296 "copyprivate",
297 "linear",
298 "aligned",
299 "depend",
300 "uniform",
301 "from",
302 "to",
303 "map",
304 "_looptemp_",
305 "if",
306 "num_threads",
307 "schedule",
308 "nowait",
309 "ordered",
310 "default",
311 "collapse",
312 "untied",
313 "final",
314 "mergeable",
315 "device",
316 "dist_schedule",
317 "inbranch",
318 "notinbranch",
319 "num_teams",
320 "thread_limit",
321 "proc_bind",
322 "safelen",
323 "simdlen",
324 "for",
325 "parallel",
326 "sections",
327 "taskgroup",
328 "_simduid_",
329 "_Cilk_for_count_"
330 };
331
332
333 /* Return the tree node structure used by tree code CODE. */
334
335 static inline enum tree_node_structure_enum
336 tree_node_structure_for_code (enum tree_code code)
337 {
338 switch (TREE_CODE_CLASS (code))
339 {
340 case tcc_declaration:
341 {
342 switch (code)
343 {
344 case FIELD_DECL:
345 return TS_FIELD_DECL;
346 case PARM_DECL:
347 return TS_PARM_DECL;
348 case VAR_DECL:
349 return TS_VAR_DECL;
350 case LABEL_DECL:
351 return TS_LABEL_DECL;
352 case RESULT_DECL:
353 return TS_RESULT_DECL;
354 case DEBUG_EXPR_DECL:
355 return TS_DECL_WRTL;
356 case CONST_DECL:
357 return TS_CONST_DECL;
358 case TYPE_DECL:
359 return TS_TYPE_DECL;
360 case FUNCTION_DECL:
361 return TS_FUNCTION_DECL;
362 case TRANSLATION_UNIT_DECL:
363 return TS_TRANSLATION_UNIT_DECL;
364 default:
365 return TS_DECL_NON_COMMON;
366 }
367 }
368 case tcc_type:
369 return TS_TYPE_NON_COMMON;
370 case tcc_reference:
371 case tcc_comparison:
372 case tcc_unary:
373 case tcc_binary:
374 case tcc_expression:
375 case tcc_statement:
376 case tcc_vl_exp:
377 return TS_EXP;
378 default: /* tcc_constant and tcc_exceptional */
379 break;
380 }
381 switch (code)
382 {
383 /* tcc_constant cases. */
384 case VOID_CST: return TS_TYPED;
385 case INTEGER_CST: return TS_INT_CST;
386 case REAL_CST: return TS_REAL_CST;
387 case FIXED_CST: return TS_FIXED_CST;
388 case COMPLEX_CST: return TS_COMPLEX;
389 case VECTOR_CST: return TS_VECTOR;
390 case STRING_CST: return TS_STRING;
391 /* tcc_exceptional cases. */
392 case ERROR_MARK: return TS_COMMON;
393 case IDENTIFIER_NODE: return TS_IDENTIFIER;
394 case TREE_LIST: return TS_LIST;
395 case TREE_VEC: return TS_VEC;
396 case SSA_NAME: return TS_SSA_NAME;
397 case PLACEHOLDER_EXPR: return TS_COMMON;
398 case STATEMENT_LIST: return TS_STATEMENT_LIST;
399 case BLOCK: return TS_BLOCK;
400 case CONSTRUCTOR: return TS_CONSTRUCTOR;
401 case TREE_BINFO: return TS_BINFO;
402 case OMP_CLAUSE: return TS_OMP_CLAUSE;
403 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
404 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
405
406 default:
407 gcc_unreachable ();
408 }
409 }
410
411
412 /* Initialize tree_contains_struct to describe the hierarchy of tree
413 nodes. */
414
415 static void
416 initialize_tree_contains_struct (void)
417 {
418 unsigned i;
419
420 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
421 {
422 enum tree_code code;
423 enum tree_node_structure_enum ts_code;
424
425 code = (enum tree_code) i;
426 ts_code = tree_node_structure_for_code (code);
427
428 /* Mark the TS structure itself. */
429 tree_contains_struct[code][ts_code] = 1;
430
431 /* Mark all the structures that TS is derived from. */
432 switch (ts_code)
433 {
434 case TS_TYPED:
435 case TS_BLOCK:
436 MARK_TS_BASE (code);
437 break;
438
439 case TS_COMMON:
440 case TS_INT_CST:
441 case TS_REAL_CST:
442 case TS_FIXED_CST:
443 case TS_VECTOR:
444 case TS_STRING:
445 case TS_COMPLEX:
446 case TS_SSA_NAME:
447 case TS_CONSTRUCTOR:
448 case TS_EXP:
449 case TS_STATEMENT_LIST:
450 MARK_TS_TYPED (code);
451 break;
452
453 case TS_IDENTIFIER:
454 case TS_DECL_MINIMAL:
455 case TS_TYPE_COMMON:
456 case TS_LIST:
457 case TS_VEC:
458 case TS_BINFO:
459 case TS_OMP_CLAUSE:
460 case TS_OPTIMIZATION:
461 case TS_TARGET_OPTION:
462 MARK_TS_COMMON (code);
463 break;
464
465 case TS_TYPE_WITH_LANG_SPECIFIC:
466 MARK_TS_TYPE_COMMON (code);
467 break;
468
469 case TS_TYPE_NON_COMMON:
470 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
471 break;
472
473 case TS_DECL_COMMON:
474 MARK_TS_DECL_MINIMAL (code);
475 break;
476
477 case TS_DECL_WRTL:
478 case TS_CONST_DECL:
479 MARK_TS_DECL_COMMON (code);
480 break;
481
482 case TS_DECL_NON_COMMON:
483 MARK_TS_DECL_WITH_VIS (code);
484 break;
485
486 case TS_DECL_WITH_VIS:
487 case TS_PARM_DECL:
488 case TS_LABEL_DECL:
489 case TS_RESULT_DECL:
490 MARK_TS_DECL_WRTL (code);
491 break;
492
493 case TS_FIELD_DECL:
494 MARK_TS_DECL_COMMON (code);
495 break;
496
497 case TS_VAR_DECL:
498 MARK_TS_DECL_WITH_VIS (code);
499 break;
500
501 case TS_TYPE_DECL:
502 case TS_FUNCTION_DECL:
503 MARK_TS_DECL_NON_COMMON (code);
504 break;
505
506 case TS_TRANSLATION_UNIT_DECL:
507 MARK_TS_DECL_COMMON (code);
508 break;
509
510 default:
511 gcc_unreachable ();
512 }
513 }
514
515 /* Basic consistency checks for attributes used in fold. */
516 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
517 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
518 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
519 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
520 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
521 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
522 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
523 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
524 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
525 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
526 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
527 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
528 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
529 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
530 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
531 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
532 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
533 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
534 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
535 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
536 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
537 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
538 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
539 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
540 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
541 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
542 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
543 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
544 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
545 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
546 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
547 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
548 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
549 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
550 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
551 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
552 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
553 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
554 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
555 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
556 }
557
558
559 /* Init tree.c. */
560
561 void
562 init_ttree (void)
563 {
564 /* Initialize the hash table of types. */
565 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
566 type_hash_eq, 0);
567
568 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
569 tree_decl_map_eq, 0);
570
571 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
572 tree_decl_map_eq, 0);
573
574 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
575 int_cst_hash_eq, NULL);
576
577 int_cst_node = make_int_cst (1, 1);
578
579 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
580 cl_option_hash_eq, NULL);
581
582 cl_optimization_node = make_node (OPTIMIZATION_NODE);
583 cl_target_option_node = make_node (TARGET_OPTION_NODE);
584
585 /* Initialize the tree_contains_struct array. */
586 initialize_tree_contains_struct ();
587 lang_hooks.init_ts ();
588 }
589
590 \f
591 /* The name of the object as the assembler will see it (but before any
592 translations made by ASM_OUTPUT_LABELREF). Often this is the same
593 as DECL_NAME. It is an IDENTIFIER_NODE. */
594 tree
595 decl_assembler_name (tree decl)
596 {
597 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
598 lang_hooks.set_decl_assembler_name (decl);
599 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
600 }
601
602 /* When the target supports COMDAT groups, this indicates which group the
603 DECL is associated with. This can be either an IDENTIFIER_NODE or a
604 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
605 tree
606 decl_comdat_group (const_tree node)
607 {
608 struct symtab_node *snode = symtab_node::get (node);
609 if (!snode)
610 return NULL;
611 return snode->get_comdat_group ();
612 }
613
614 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
615 tree
616 decl_comdat_group_id (const_tree node)
617 {
618 struct symtab_node *snode = symtab_node::get (node);
619 if (!snode)
620 return NULL;
621 return snode->get_comdat_group_id ();
622 }
623
624 /* When the target supports named section, return its name as IDENTIFIER_NODE
625 or NULL if it is in no section. */
626 const char *
627 decl_section_name (const_tree node)
628 {
629 struct symtab_node *snode = symtab_node::get (node);
630 if (!snode)
631 return NULL;
632 return snode->get_section ();
633 }
634
635 /* Set section section name of NODE to VALUE (that is expected to
636 be identifier node) */
637 void
638 set_decl_section_name (tree node, const char *value)
639 {
640 struct symtab_node *snode;
641
642 if (value == NULL)
643 {
644 snode = symtab_node::get (node);
645 if (!snode)
646 return;
647 }
648 else if (TREE_CODE (node) == VAR_DECL)
649 snode = varpool_node::get_create (node);
650 else
651 snode = cgraph_node::get_create (node);
652 snode->set_section (value);
653 }
654
655 /* Return TLS model of a variable NODE. */
656 enum tls_model
657 decl_tls_model (const_tree node)
658 {
659 struct varpool_node *snode = varpool_node::get (node);
660 if (!snode)
661 return TLS_MODEL_NONE;
662 return snode->tls_model;
663 }
664
665 /* Set TLS model of variable NODE to MODEL. */
666 void
667 set_decl_tls_model (tree node, enum tls_model model)
668 {
669 struct varpool_node *vnode;
670
671 if (model == TLS_MODEL_NONE)
672 {
673 vnode = varpool_node::get (node);
674 if (!vnode)
675 return;
676 }
677 else
678 vnode = varpool_node::get_create (node);
679 vnode->tls_model = model;
680 }
681
682 /* Compute the number of bytes occupied by a tree with code CODE.
683 This function cannot be used for nodes that have variable sizes,
684 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
685 size_t
686 tree_code_size (enum tree_code code)
687 {
688 switch (TREE_CODE_CLASS (code))
689 {
690 case tcc_declaration: /* A decl node */
691 {
692 switch (code)
693 {
694 case FIELD_DECL:
695 return sizeof (struct tree_field_decl);
696 case PARM_DECL:
697 return sizeof (struct tree_parm_decl);
698 case VAR_DECL:
699 return sizeof (struct tree_var_decl);
700 case LABEL_DECL:
701 return sizeof (struct tree_label_decl);
702 case RESULT_DECL:
703 return sizeof (struct tree_result_decl);
704 case CONST_DECL:
705 return sizeof (struct tree_const_decl);
706 case TYPE_DECL:
707 return sizeof (struct tree_type_decl);
708 case FUNCTION_DECL:
709 return sizeof (struct tree_function_decl);
710 case DEBUG_EXPR_DECL:
711 return sizeof (struct tree_decl_with_rtl);
712 case TRANSLATION_UNIT_DECL:
713 return sizeof (struct tree_translation_unit_decl);
714 case NAMESPACE_DECL:
715 case IMPORTED_DECL:
716 case NAMELIST_DECL:
717 return sizeof (struct tree_decl_non_common);
718 default:
719 return lang_hooks.tree_size (code);
720 }
721 }
722
723 case tcc_type: /* a type node */
724 return sizeof (struct tree_type_non_common);
725
726 case tcc_reference: /* a reference */
727 case tcc_expression: /* an expression */
728 case tcc_statement: /* an expression with side effects */
729 case tcc_comparison: /* a comparison expression */
730 case tcc_unary: /* a unary arithmetic expression */
731 case tcc_binary: /* a binary arithmetic expression */
732 return (sizeof (struct tree_exp)
733 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
734
735 case tcc_constant: /* a constant */
736 switch (code)
737 {
738 case VOID_CST: return sizeof (struct tree_typed);
739 case INTEGER_CST: gcc_unreachable ();
740 case REAL_CST: return sizeof (struct tree_real_cst);
741 case FIXED_CST: return sizeof (struct tree_fixed_cst);
742 case COMPLEX_CST: return sizeof (struct tree_complex);
743 case VECTOR_CST: return sizeof (struct tree_vector);
744 case STRING_CST: gcc_unreachable ();
745 default:
746 return lang_hooks.tree_size (code);
747 }
748
749 case tcc_exceptional: /* something random, like an identifier. */
750 switch (code)
751 {
752 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
753 case TREE_LIST: return sizeof (struct tree_list);
754
755 case ERROR_MARK:
756 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
757
758 case TREE_VEC:
759 case OMP_CLAUSE: gcc_unreachable ();
760
761 case SSA_NAME: return sizeof (struct tree_ssa_name);
762
763 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
764 case BLOCK: return sizeof (struct tree_block);
765 case CONSTRUCTOR: return sizeof (struct tree_constructor);
766 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
767 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
768
769 default:
770 return lang_hooks.tree_size (code);
771 }
772
773 default:
774 gcc_unreachable ();
775 }
776 }
777
778 /* Compute the number of bytes occupied by NODE. This routine only
779 looks at TREE_CODE, except for those nodes that have variable sizes. */
780 size_t
781 tree_size (const_tree node)
782 {
783 const enum tree_code code = TREE_CODE (node);
784 switch (code)
785 {
786 case INTEGER_CST:
787 return (sizeof (struct tree_int_cst)
788 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
789
790 case TREE_BINFO:
791 return (offsetof (struct tree_binfo, base_binfos)
792 + vec<tree, va_gc>
793 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
794
795 case TREE_VEC:
796 return (sizeof (struct tree_vec)
797 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
798
799 case VECTOR_CST:
800 return (sizeof (struct tree_vector)
801 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
802
803 case STRING_CST:
804 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
805
806 case OMP_CLAUSE:
807 return (sizeof (struct tree_omp_clause)
808 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
809 * sizeof (tree));
810
811 default:
812 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
813 return (sizeof (struct tree_exp)
814 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
815 else
816 return tree_code_size (code);
817 }
818 }
819
820 /* Record interesting allocation statistics for a tree node with CODE
821 and LENGTH. */
822
823 static void
824 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
825 size_t length ATTRIBUTE_UNUSED)
826 {
827 enum tree_code_class type = TREE_CODE_CLASS (code);
828 tree_node_kind kind;
829
830 if (!GATHER_STATISTICS)
831 return;
832
833 switch (type)
834 {
835 case tcc_declaration: /* A decl node */
836 kind = d_kind;
837 break;
838
839 case tcc_type: /* a type node */
840 kind = t_kind;
841 break;
842
843 case tcc_statement: /* an expression with side effects */
844 kind = s_kind;
845 break;
846
847 case tcc_reference: /* a reference */
848 kind = r_kind;
849 break;
850
851 case tcc_expression: /* an expression */
852 case tcc_comparison: /* a comparison expression */
853 case tcc_unary: /* a unary arithmetic expression */
854 case tcc_binary: /* a binary arithmetic expression */
855 kind = e_kind;
856 break;
857
858 case tcc_constant: /* a constant */
859 kind = c_kind;
860 break;
861
862 case tcc_exceptional: /* something random, like an identifier. */
863 switch (code)
864 {
865 case IDENTIFIER_NODE:
866 kind = id_kind;
867 break;
868
869 case TREE_VEC:
870 kind = vec_kind;
871 break;
872
873 case TREE_BINFO:
874 kind = binfo_kind;
875 break;
876
877 case SSA_NAME:
878 kind = ssa_name_kind;
879 break;
880
881 case BLOCK:
882 kind = b_kind;
883 break;
884
885 case CONSTRUCTOR:
886 kind = constr_kind;
887 break;
888
889 case OMP_CLAUSE:
890 kind = omp_clause_kind;
891 break;
892
893 default:
894 kind = x_kind;
895 break;
896 }
897 break;
898
899 case tcc_vl_exp:
900 kind = e_kind;
901 break;
902
903 default:
904 gcc_unreachable ();
905 }
906
907 tree_code_counts[(int) code]++;
908 tree_node_counts[(int) kind]++;
909 tree_node_sizes[(int) kind] += length;
910 }
911
912 /* Allocate and return a new UID from the DECL_UID namespace. */
913
914 int
915 allocate_decl_uid (void)
916 {
917 return next_decl_uid++;
918 }
919
920 /* Return a newly allocated node of code CODE. For decl and type
921 nodes, some other fields are initialized. The rest of the node is
922 initialized to zero. This function cannot be used for TREE_VEC,
923 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
924 tree_code_size.
925
926 Achoo! I got a code in the node. */
927
928 tree
929 make_node_stat (enum tree_code code MEM_STAT_DECL)
930 {
931 tree t;
932 enum tree_code_class type = TREE_CODE_CLASS (code);
933 size_t length = tree_code_size (code);
934
935 record_node_allocation_statistics (code, length);
936
937 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
938 TREE_SET_CODE (t, code);
939
940 switch (type)
941 {
942 case tcc_statement:
943 TREE_SIDE_EFFECTS (t) = 1;
944 break;
945
946 case tcc_declaration:
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
948 {
949 if (code == FUNCTION_DECL)
950 {
951 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
952 DECL_MODE (t) = FUNCTION_MODE;
953 }
954 else
955 DECL_ALIGN (t) = 1;
956 }
957 DECL_SOURCE_LOCATION (t) = input_location;
958 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
959 DECL_UID (t) = --next_debug_decl_uid;
960 else
961 {
962 DECL_UID (t) = allocate_decl_uid ();
963 SET_DECL_PT_UID (t, -1);
964 }
965 if (TREE_CODE (t) == LABEL_DECL)
966 LABEL_DECL_UID (t) = -1;
967
968 break;
969
970 case tcc_type:
971 TYPE_UID (t) = next_type_uid++;
972 TYPE_ALIGN (t) = BITS_PER_UNIT;
973 TYPE_USER_ALIGN (t) = 0;
974 TYPE_MAIN_VARIANT (t) = t;
975 TYPE_CANONICAL (t) = t;
976
977 /* Default to no attributes for type, but let target change that. */
978 TYPE_ATTRIBUTES (t) = NULL_TREE;
979 targetm.set_default_type_attributes (t);
980
981 /* We have not yet computed the alias set for this type. */
982 TYPE_ALIAS_SET (t) = -1;
983 break;
984
985 case tcc_constant:
986 TREE_CONSTANT (t) = 1;
987 break;
988
989 case tcc_expression:
990 switch (code)
991 {
992 case INIT_EXPR:
993 case MODIFY_EXPR:
994 case VA_ARG_EXPR:
995 case PREDECREMENT_EXPR:
996 case PREINCREMENT_EXPR:
997 case POSTDECREMENT_EXPR:
998 case POSTINCREMENT_EXPR:
999 /* All of these have side-effects, no matter what their
1000 operands are. */
1001 TREE_SIDE_EFFECTS (t) = 1;
1002 break;
1003
1004 default:
1005 break;
1006 }
1007 break;
1008
1009 default:
1010 /* Other classes need no special treatment. */
1011 break;
1012 }
1013
1014 return t;
1015 }
1016 \f
1017 /* Return a new node with the same contents as NODE except that its
1018 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1019
1020 tree
1021 copy_node_stat (tree node MEM_STAT_DECL)
1022 {
1023 tree t;
1024 enum tree_code code = TREE_CODE (node);
1025 size_t length;
1026
1027 gcc_assert (code != STATEMENT_LIST);
1028
1029 length = tree_size (node);
1030 record_node_allocation_statistics (code, length);
1031 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1032 memcpy (t, node, length);
1033
1034 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1035 TREE_CHAIN (t) = 0;
1036 TREE_ASM_WRITTEN (t) = 0;
1037 TREE_VISITED (t) = 0;
1038
1039 if (TREE_CODE_CLASS (code) == tcc_declaration)
1040 {
1041 if (code == DEBUG_EXPR_DECL)
1042 DECL_UID (t) = --next_debug_decl_uid;
1043 else
1044 {
1045 DECL_UID (t) = allocate_decl_uid ();
1046 if (DECL_PT_UID_SET_P (node))
1047 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1048 }
1049 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1050 && DECL_HAS_VALUE_EXPR_P (node))
1051 {
1052 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1053 DECL_HAS_VALUE_EXPR_P (t) = 1;
1054 }
1055 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1056 if (TREE_CODE (node) == VAR_DECL)
1057 {
1058 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1059 t->decl_with_vis.symtab_node = NULL;
1060 }
1061 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1062 {
1063 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1064 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1065 }
1066 if (TREE_CODE (node) == FUNCTION_DECL)
1067 {
1068 DECL_STRUCT_FUNCTION (t) = NULL;
1069 t->decl_with_vis.symtab_node = NULL;
1070 }
1071 }
1072 else if (TREE_CODE_CLASS (code) == tcc_type)
1073 {
1074 TYPE_UID (t) = next_type_uid++;
1075 /* The following is so that the debug code for
1076 the copy is different from the original type.
1077 The two statements usually duplicate each other
1078 (because they clear fields of the same union),
1079 but the optimizer should catch that. */
1080 TYPE_SYMTAB_POINTER (t) = 0;
1081 TYPE_SYMTAB_ADDRESS (t) = 0;
1082
1083 /* Do not copy the values cache. */
1084 if (TYPE_CACHED_VALUES_P (t))
1085 {
1086 TYPE_CACHED_VALUES_P (t) = 0;
1087 TYPE_CACHED_VALUES (t) = NULL_TREE;
1088 }
1089 }
1090
1091 return t;
1092 }
1093
1094 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1095 For example, this can copy a list made of TREE_LIST nodes. */
1096
1097 tree
1098 copy_list (tree list)
1099 {
1100 tree head;
1101 tree prev, next;
1102
1103 if (list == 0)
1104 return 0;
1105
1106 head = prev = copy_node (list);
1107 next = TREE_CHAIN (list);
1108 while (next)
1109 {
1110 TREE_CHAIN (prev) = copy_node (next);
1111 prev = TREE_CHAIN (prev);
1112 next = TREE_CHAIN (next);
1113 }
1114 return head;
1115 }
1116
1117 \f
1118 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1119 INTEGER_CST with value CST and type TYPE. */
1120
1121 static unsigned int
1122 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1123 {
1124 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1125 /* We need an extra zero HWI if CST is an unsigned integer with its
1126 upper bit set, and if CST occupies a whole number of HWIs. */
1127 if (TYPE_UNSIGNED (type)
1128 && wi::neg_p (cst)
1129 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1130 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1131 return cst.get_len ();
1132 }
1133
1134 /* Return a new INTEGER_CST with value CST and type TYPE. */
1135
1136 static tree
1137 build_new_int_cst (tree type, const wide_int &cst)
1138 {
1139 unsigned int len = cst.get_len ();
1140 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1141 tree nt = make_int_cst (len, ext_len);
1142
1143 if (len < ext_len)
1144 {
1145 --ext_len;
1146 TREE_INT_CST_ELT (nt, ext_len) = 0;
1147 for (unsigned int i = len; i < ext_len; ++i)
1148 TREE_INT_CST_ELT (nt, i) = -1;
1149 }
1150 else if (TYPE_UNSIGNED (type)
1151 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1152 {
1153 len--;
1154 TREE_INT_CST_ELT (nt, len)
1155 = zext_hwi (cst.elt (len),
1156 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1157 }
1158
1159 for (unsigned int i = 0; i < len; i++)
1160 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1161 TREE_TYPE (nt) = type;
1162 return nt;
1163 }
1164
1165 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1166
1167 tree
1168 build_int_cst (tree type, HOST_WIDE_INT low)
1169 {
1170 /* Support legacy code. */
1171 if (!type)
1172 type = integer_type_node;
1173
1174 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1175 }
1176
1177 tree
1178 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1179 {
1180 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1181 }
1182
1183 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1184
1185 tree
1186 build_int_cst_type (tree type, HOST_WIDE_INT low)
1187 {
1188 gcc_assert (type);
1189 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1190 }
1191
1192 /* Constructs tree in type TYPE from with value given by CST. Signedness
1193 of CST is assumed to be the same as the signedness of TYPE. */
1194
1195 tree
1196 double_int_to_tree (tree type, double_int cst)
1197 {
1198 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1199 }
1200
1201 /* We force the wide_int CST to the range of the type TYPE by sign or
1202 zero extending it. OVERFLOWABLE indicates if we are interested in
1203 overflow of the value, when >0 we are only interested in signed
1204 overflow, for <0 we are interested in any overflow. OVERFLOWED
1205 indicates whether overflow has already occurred. CONST_OVERFLOWED
1206 indicates whether constant overflow has already occurred. We force
1207 T's value to be within range of T's type (by setting to 0 or 1 all
1208 the bits outside the type's range). We set TREE_OVERFLOWED if,
1209 OVERFLOWED is nonzero,
1210 or OVERFLOWABLE is >0 and signed overflow occurs
1211 or OVERFLOWABLE is <0 and any overflow occurs
1212 We return a new tree node for the extended wide_int. The node
1213 is shared if no overflow flags are set. */
1214
1215
1216 tree
1217 force_fit_type (tree type, const wide_int_ref &cst,
1218 int overflowable, bool overflowed)
1219 {
1220 signop sign = TYPE_SIGN (type);
1221
1222 /* If we need to set overflow flags, return a new unshared node. */
1223 if (overflowed || !wi::fits_to_tree_p (cst, type))
1224 {
1225 if (overflowed
1226 || overflowable < 0
1227 || (overflowable > 0 && sign == SIGNED))
1228 {
1229 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1230 tree t = build_new_int_cst (type, tmp);
1231 TREE_OVERFLOW (t) = 1;
1232 return t;
1233 }
1234 }
1235
1236 /* Else build a shared node. */
1237 return wide_int_to_tree (type, cst);
1238 }
1239
1240 /* These are the hash table functions for the hash table of INTEGER_CST
1241 nodes of a sizetype. */
1242
1243 /* Return the hash code code X, an INTEGER_CST. */
1244
1245 static hashval_t
1246 int_cst_hash_hash (const void *x)
1247 {
1248 const_tree const t = (const_tree) x;
1249 hashval_t code = htab_hash_pointer (TREE_TYPE (t));
1250 int i;
1251
1252 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1253 code ^= TREE_INT_CST_ELT (t, i);
1254
1255 return code;
1256 }
1257
1258 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1259 is the same as that given by *Y, which is the same. */
1260
1261 static int
1262 int_cst_hash_eq (const void *x, const void *y)
1263 {
1264 const_tree const xt = (const_tree) x;
1265 const_tree const yt = (const_tree) y;
1266
1267 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1268 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1269 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1270 return false;
1271
1272 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1273 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1274 return false;
1275
1276 return true;
1277 }
1278
1279 /* Create an INT_CST node of TYPE and value CST.
1280 The returned node is always shared. For small integers we use a
1281 per-type vector cache, for larger ones we use a single hash table.
1282 The value is extended from its precision according to the sign of
1283 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1284 the upper bits and ensures that hashing and value equality based
1285 upon the underlying HOST_WIDE_INTs works without masking. */
1286
1287 tree
1288 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1289 {
1290 tree t;
1291 int ix = -1;
1292 int limit = 0;
1293
1294 gcc_assert (type);
1295 unsigned int prec = TYPE_PRECISION (type);
1296 signop sgn = TYPE_SIGN (type);
1297
1298 /* Verify that everything is canonical. */
1299 int l = pcst.get_len ();
1300 if (l > 1)
1301 {
1302 if (pcst.elt (l - 1) == 0)
1303 gcc_checking_assert (pcst.elt (l - 2) < 0);
1304 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1305 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1306 }
1307
1308 wide_int cst = wide_int::from (pcst, prec, sgn);
1309 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1310
1311 if (ext_len == 1)
1312 {
1313 /* We just need to store a single HOST_WIDE_INT. */
1314 HOST_WIDE_INT hwi;
1315 if (TYPE_UNSIGNED (type))
1316 hwi = cst.to_uhwi ();
1317 else
1318 hwi = cst.to_shwi ();
1319
1320 switch (TREE_CODE (type))
1321 {
1322 case NULLPTR_TYPE:
1323 gcc_assert (hwi == 0);
1324 /* Fallthru. */
1325
1326 case POINTER_TYPE:
1327 case REFERENCE_TYPE:
1328 /* Cache NULL pointer. */
1329 if (hwi == 0)
1330 {
1331 limit = 1;
1332 ix = 0;
1333 }
1334 break;
1335
1336 case BOOLEAN_TYPE:
1337 /* Cache false or true. */
1338 limit = 2;
1339 if (hwi < 2)
1340 ix = hwi;
1341 break;
1342
1343 case INTEGER_TYPE:
1344 case OFFSET_TYPE:
1345 if (TYPE_SIGN (type) == UNSIGNED)
1346 {
1347 /* Cache [0, N). */
1348 limit = INTEGER_SHARE_LIMIT;
1349 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1350 ix = hwi;
1351 }
1352 else
1353 {
1354 /* Cache [-1, N). */
1355 limit = INTEGER_SHARE_LIMIT + 1;
1356 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1357 ix = hwi + 1;
1358 }
1359 break;
1360
1361 case ENUMERAL_TYPE:
1362 break;
1363
1364 default:
1365 gcc_unreachable ();
1366 }
1367
1368 if (ix >= 0)
1369 {
1370 /* Look for it in the type's vector of small shared ints. */
1371 if (!TYPE_CACHED_VALUES_P (type))
1372 {
1373 TYPE_CACHED_VALUES_P (type) = 1;
1374 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1375 }
1376
1377 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1378 if (t)
1379 /* Make sure no one is clobbering the shared constant. */
1380 gcc_checking_assert (TREE_TYPE (t) == type
1381 && TREE_INT_CST_NUNITS (t) == 1
1382 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1383 && TREE_INT_CST_EXT_NUNITS (t) == 1
1384 && TREE_INT_CST_ELT (t, 0) == hwi);
1385 else
1386 {
1387 /* Create a new shared int. */
1388 t = build_new_int_cst (type, cst);
1389 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1390 }
1391 }
1392 else
1393 {
1394 /* Use the cache of larger shared ints, using int_cst_node as
1395 a temporary. */
1396 void **slot;
1397
1398 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1399 TREE_TYPE (int_cst_node) = type;
1400
1401 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1402 t = (tree) *slot;
1403 if (!t)
1404 {
1405 /* Insert this one into the hash table. */
1406 t = int_cst_node;
1407 *slot = t;
1408 /* Make a new node for next time round. */
1409 int_cst_node = make_int_cst (1, 1);
1410 }
1411 }
1412 }
1413 else
1414 {
1415 /* The value either hashes properly or we drop it on the floor
1416 for the gc to take care of. There will not be enough of them
1417 to worry about. */
1418 void **slot;
1419
1420 tree nt = build_new_int_cst (type, cst);
1421 slot = htab_find_slot (int_cst_hash_table, nt, INSERT);
1422 t = (tree) *slot;
1423 if (!t)
1424 {
1425 /* Insert this one into the hash table. */
1426 t = nt;
1427 *slot = t;
1428 }
1429 }
1430
1431 return t;
1432 }
1433
1434 void
1435 cache_integer_cst (tree t)
1436 {
1437 tree type = TREE_TYPE (t);
1438 int ix = -1;
1439 int limit = 0;
1440 int prec = TYPE_PRECISION (type);
1441
1442 gcc_assert (!TREE_OVERFLOW (t));
1443
1444 switch (TREE_CODE (type))
1445 {
1446 case NULLPTR_TYPE:
1447 gcc_assert (integer_zerop (t));
1448 /* Fallthru. */
1449
1450 case POINTER_TYPE:
1451 case REFERENCE_TYPE:
1452 /* Cache NULL pointer. */
1453 if (integer_zerop (t))
1454 {
1455 limit = 1;
1456 ix = 0;
1457 }
1458 break;
1459
1460 case BOOLEAN_TYPE:
1461 /* Cache false or true. */
1462 limit = 2;
1463 if (wi::ltu_p (t, 2))
1464 ix = TREE_INT_CST_ELT (t, 0);
1465 break;
1466
1467 case INTEGER_TYPE:
1468 case OFFSET_TYPE:
1469 if (TYPE_UNSIGNED (type))
1470 {
1471 /* Cache 0..N */
1472 limit = INTEGER_SHARE_LIMIT;
1473
1474 /* This is a little hokie, but if the prec is smaller than
1475 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1476 obvious test will not get the correct answer. */
1477 if (prec < HOST_BITS_PER_WIDE_INT)
1478 {
1479 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1480 ix = tree_to_uhwi (t);
1481 }
1482 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1483 ix = tree_to_uhwi (t);
1484 }
1485 else
1486 {
1487 /* Cache -1..N */
1488 limit = INTEGER_SHARE_LIMIT + 1;
1489
1490 if (integer_minus_onep (t))
1491 ix = 0;
1492 else if (!wi::neg_p (t))
1493 {
1494 if (prec < HOST_BITS_PER_WIDE_INT)
1495 {
1496 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1497 ix = tree_to_shwi (t) + 1;
1498 }
1499 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1500 ix = tree_to_shwi (t) + 1;
1501 }
1502 }
1503 break;
1504
1505 case ENUMERAL_TYPE:
1506 break;
1507
1508 default:
1509 gcc_unreachable ();
1510 }
1511
1512 if (ix >= 0)
1513 {
1514 /* Look for it in the type's vector of small shared ints. */
1515 if (!TYPE_CACHED_VALUES_P (type))
1516 {
1517 TYPE_CACHED_VALUES_P (type) = 1;
1518 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1519 }
1520
1521 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1522 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1523 }
1524 else
1525 {
1526 /* Use the cache of larger shared ints. */
1527 void **slot;
1528
1529 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1530 /* If there is already an entry for the number verify it's the
1531 same. */
1532 if (*slot)
1533 gcc_assert (wi::eq_p (tree (*slot), t));
1534 else
1535 /* Otherwise insert this one into the hash table. */
1536 *slot = t;
1537 }
1538 }
1539
1540
1541 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1542 and the rest are zeros. */
1543
1544 tree
1545 build_low_bits_mask (tree type, unsigned bits)
1546 {
1547 gcc_assert (bits <= TYPE_PRECISION (type));
1548
1549 return wide_int_to_tree (type, wi::mask (bits, false,
1550 TYPE_PRECISION (type)));
1551 }
1552
1553 /* Checks that X is integer constant that can be expressed in (unsigned)
1554 HOST_WIDE_INT without loss of precision. */
1555
1556 bool
1557 cst_and_fits_in_hwi (const_tree x)
1558 {
1559 if (TREE_CODE (x) != INTEGER_CST)
1560 return false;
1561
1562 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1563 return false;
1564
1565 return TREE_INT_CST_NUNITS (x) == 1;
1566 }
1567
1568 /* Build a newly constructed TREE_VEC node of length LEN. */
1569
1570 tree
1571 make_vector_stat (unsigned len MEM_STAT_DECL)
1572 {
1573 tree t;
1574 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1575
1576 record_node_allocation_statistics (VECTOR_CST, length);
1577
1578 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1579
1580 TREE_SET_CODE (t, VECTOR_CST);
1581 TREE_CONSTANT (t) = 1;
1582
1583 return t;
1584 }
1585
1586 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1587 are in a list pointed to by VALS. */
1588
1589 tree
1590 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1591 {
1592 int over = 0;
1593 unsigned cnt = 0;
1594 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1595 TREE_TYPE (v) = type;
1596
1597 /* Iterate through elements and check for overflow. */
1598 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1599 {
1600 tree value = vals[cnt];
1601
1602 VECTOR_CST_ELT (v, cnt) = value;
1603
1604 /* Don't crash if we get an address constant. */
1605 if (!CONSTANT_CLASS_P (value))
1606 continue;
1607
1608 over |= TREE_OVERFLOW (value);
1609 }
1610
1611 TREE_OVERFLOW (v) = over;
1612 return v;
1613 }
1614
1615 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1616 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1617
1618 tree
1619 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1620 {
1621 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1622 unsigned HOST_WIDE_INT idx;
1623 tree value;
1624
1625 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1626 vec[idx] = value;
1627 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1628 vec[idx] = build_zero_cst (TREE_TYPE (type));
1629
1630 return build_vector (type, vec);
1631 }
1632
1633 /* Build a vector of type VECTYPE where all the elements are SCs. */
1634 tree
1635 build_vector_from_val (tree vectype, tree sc)
1636 {
1637 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1638
1639 if (sc == error_mark_node)
1640 return sc;
1641
1642 /* Verify that the vector type is suitable for SC. Note that there
1643 is some inconsistency in the type-system with respect to restrict
1644 qualifications of pointers. Vector types always have a main-variant
1645 element type and the qualification is applied to the vector-type.
1646 So TREE_TYPE (vector-type) does not return a properly qualified
1647 vector element-type. */
1648 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1649 TREE_TYPE (vectype)));
1650
1651 if (CONSTANT_CLASS_P (sc))
1652 {
1653 tree *v = XALLOCAVEC (tree, nunits);
1654 for (i = 0; i < nunits; ++i)
1655 v[i] = sc;
1656 return build_vector (vectype, v);
1657 }
1658 else
1659 {
1660 vec<constructor_elt, va_gc> *v;
1661 vec_alloc (v, nunits);
1662 for (i = 0; i < nunits; ++i)
1663 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1664 return build_constructor (vectype, v);
1665 }
1666 }
1667
1668 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1669 are in the vec pointed to by VALS. */
1670 tree
1671 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1672 {
1673 tree c = make_node (CONSTRUCTOR);
1674 unsigned int i;
1675 constructor_elt *elt;
1676 bool constant_p = true;
1677 bool side_effects_p = false;
1678
1679 TREE_TYPE (c) = type;
1680 CONSTRUCTOR_ELTS (c) = vals;
1681
1682 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1683 {
1684 /* Mostly ctors will have elts that don't have side-effects, so
1685 the usual case is to scan all the elements. Hence a single
1686 loop for both const and side effects, rather than one loop
1687 each (with early outs). */
1688 if (!TREE_CONSTANT (elt->value))
1689 constant_p = false;
1690 if (TREE_SIDE_EFFECTS (elt->value))
1691 side_effects_p = true;
1692 }
1693
1694 TREE_SIDE_EFFECTS (c) = side_effects_p;
1695 TREE_CONSTANT (c) = constant_p;
1696
1697 return c;
1698 }
1699
1700 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1701 INDEX and VALUE. */
1702 tree
1703 build_constructor_single (tree type, tree index, tree value)
1704 {
1705 vec<constructor_elt, va_gc> *v;
1706 constructor_elt elt = {index, value};
1707
1708 vec_alloc (v, 1);
1709 v->quick_push (elt);
1710
1711 return build_constructor (type, v);
1712 }
1713
1714
1715 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1716 are in a list pointed to by VALS. */
1717 tree
1718 build_constructor_from_list (tree type, tree vals)
1719 {
1720 tree t;
1721 vec<constructor_elt, va_gc> *v = NULL;
1722
1723 if (vals)
1724 {
1725 vec_alloc (v, list_length (vals));
1726 for (t = vals; t; t = TREE_CHAIN (t))
1727 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1728 }
1729
1730 return build_constructor (type, v);
1731 }
1732
1733 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1734 of elements, provided as index/value pairs. */
1735
1736 tree
1737 build_constructor_va (tree type, int nelts, ...)
1738 {
1739 vec<constructor_elt, va_gc> *v = NULL;
1740 va_list p;
1741
1742 va_start (p, nelts);
1743 vec_alloc (v, nelts);
1744 while (nelts--)
1745 {
1746 tree index = va_arg (p, tree);
1747 tree value = va_arg (p, tree);
1748 CONSTRUCTOR_APPEND_ELT (v, index, value);
1749 }
1750 va_end (p);
1751 return build_constructor (type, v);
1752 }
1753
1754 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1755
1756 tree
1757 build_fixed (tree type, FIXED_VALUE_TYPE f)
1758 {
1759 tree v;
1760 FIXED_VALUE_TYPE *fp;
1761
1762 v = make_node (FIXED_CST);
1763 fp = ggc_alloc<fixed_value> ();
1764 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1765
1766 TREE_TYPE (v) = type;
1767 TREE_FIXED_CST_PTR (v) = fp;
1768 return v;
1769 }
1770
1771 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1772
1773 tree
1774 build_real (tree type, REAL_VALUE_TYPE d)
1775 {
1776 tree v;
1777 REAL_VALUE_TYPE *dp;
1778 int overflow = 0;
1779
1780 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1781 Consider doing it via real_convert now. */
1782
1783 v = make_node (REAL_CST);
1784 dp = ggc_alloc<real_value> ();
1785 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1786
1787 TREE_TYPE (v) = type;
1788 TREE_REAL_CST_PTR (v) = dp;
1789 TREE_OVERFLOW (v) = overflow;
1790 return v;
1791 }
1792
1793 /* Return a new REAL_CST node whose type is TYPE
1794 and whose value is the integer value of the INTEGER_CST node I. */
1795
1796 REAL_VALUE_TYPE
1797 real_value_from_int_cst (const_tree type, const_tree i)
1798 {
1799 REAL_VALUE_TYPE d;
1800
1801 /* Clear all bits of the real value type so that we can later do
1802 bitwise comparisons to see if two values are the same. */
1803 memset (&d, 0, sizeof d);
1804
1805 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1806 TYPE_SIGN (TREE_TYPE (i)));
1807 return d;
1808 }
1809
1810 /* Given a tree representing an integer constant I, return a tree
1811 representing the same value as a floating-point constant of type TYPE. */
1812
1813 tree
1814 build_real_from_int_cst (tree type, const_tree i)
1815 {
1816 tree v;
1817 int overflow = TREE_OVERFLOW (i);
1818
1819 v = build_real (type, real_value_from_int_cst (type, i));
1820
1821 TREE_OVERFLOW (v) |= overflow;
1822 return v;
1823 }
1824
1825 /* Return a newly constructed STRING_CST node whose value is
1826 the LEN characters at STR.
1827 Note that for a C string literal, LEN should include the trailing NUL.
1828 The TREE_TYPE is not initialized. */
1829
1830 tree
1831 build_string (int len, const char *str)
1832 {
1833 tree s;
1834 size_t length;
1835
1836 /* Do not waste bytes provided by padding of struct tree_string. */
1837 length = len + offsetof (struct tree_string, str) + 1;
1838
1839 record_node_allocation_statistics (STRING_CST, length);
1840
1841 s = (tree) ggc_internal_alloc (length);
1842
1843 memset (s, 0, sizeof (struct tree_typed));
1844 TREE_SET_CODE (s, STRING_CST);
1845 TREE_CONSTANT (s) = 1;
1846 TREE_STRING_LENGTH (s) = len;
1847 memcpy (s->string.str, str, len);
1848 s->string.str[len] = '\0';
1849
1850 return s;
1851 }
1852
1853 /* Return a newly constructed COMPLEX_CST node whose value is
1854 specified by the real and imaginary parts REAL and IMAG.
1855 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1856 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1857
1858 tree
1859 build_complex (tree type, tree real, tree imag)
1860 {
1861 tree t = make_node (COMPLEX_CST);
1862
1863 TREE_REALPART (t) = real;
1864 TREE_IMAGPART (t) = imag;
1865 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1866 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1867 return t;
1868 }
1869
1870 /* Return a constant of arithmetic type TYPE which is the
1871 multiplicative identity of the set TYPE. */
1872
1873 tree
1874 build_one_cst (tree type)
1875 {
1876 switch (TREE_CODE (type))
1877 {
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879 case POINTER_TYPE: case REFERENCE_TYPE:
1880 case OFFSET_TYPE:
1881 return build_int_cst (type, 1);
1882
1883 case REAL_TYPE:
1884 return build_real (type, dconst1);
1885
1886 case FIXED_POINT_TYPE:
1887 /* We can only generate 1 for accum types. */
1888 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1889 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1890
1891 case VECTOR_TYPE:
1892 {
1893 tree scalar = build_one_cst (TREE_TYPE (type));
1894
1895 return build_vector_from_val (type, scalar);
1896 }
1897
1898 case COMPLEX_TYPE:
1899 return build_complex (type,
1900 build_one_cst (TREE_TYPE (type)),
1901 build_zero_cst (TREE_TYPE (type)));
1902
1903 default:
1904 gcc_unreachable ();
1905 }
1906 }
1907
1908 /* Return an integer of type TYPE containing all 1's in as much precision as
1909 it contains, or a complex or vector whose subparts are such integers. */
1910
1911 tree
1912 build_all_ones_cst (tree type)
1913 {
1914 if (TREE_CODE (type) == COMPLEX_TYPE)
1915 {
1916 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1917 return build_complex (type, scalar, scalar);
1918 }
1919 else
1920 return build_minus_one_cst (type);
1921 }
1922
1923 /* Return a constant of arithmetic type TYPE which is the
1924 opposite of the multiplicative identity of the set TYPE. */
1925
1926 tree
1927 build_minus_one_cst (tree type)
1928 {
1929 switch (TREE_CODE (type))
1930 {
1931 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1932 case POINTER_TYPE: case REFERENCE_TYPE:
1933 case OFFSET_TYPE:
1934 return build_int_cst (type, -1);
1935
1936 case REAL_TYPE:
1937 return build_real (type, dconstm1);
1938
1939 case FIXED_POINT_TYPE:
1940 /* We can only generate 1 for accum types. */
1941 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1942 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1943 TYPE_MODE (type)));
1944
1945 case VECTOR_TYPE:
1946 {
1947 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1948
1949 return build_vector_from_val (type, scalar);
1950 }
1951
1952 case COMPLEX_TYPE:
1953 return build_complex (type,
1954 build_minus_one_cst (TREE_TYPE (type)),
1955 build_zero_cst (TREE_TYPE (type)));
1956
1957 default:
1958 gcc_unreachable ();
1959 }
1960 }
1961
1962 /* Build 0 constant of type TYPE. This is used by constructor folding
1963 and thus the constant should be represented in memory by
1964 zero(es). */
1965
1966 tree
1967 build_zero_cst (tree type)
1968 {
1969 switch (TREE_CODE (type))
1970 {
1971 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1972 case POINTER_TYPE: case REFERENCE_TYPE:
1973 case OFFSET_TYPE: case NULLPTR_TYPE:
1974 return build_int_cst (type, 0);
1975
1976 case REAL_TYPE:
1977 return build_real (type, dconst0);
1978
1979 case FIXED_POINT_TYPE:
1980 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1981
1982 case VECTOR_TYPE:
1983 {
1984 tree scalar = build_zero_cst (TREE_TYPE (type));
1985
1986 return build_vector_from_val (type, scalar);
1987 }
1988
1989 case COMPLEX_TYPE:
1990 {
1991 tree zero = build_zero_cst (TREE_TYPE (type));
1992
1993 return build_complex (type, zero, zero);
1994 }
1995
1996 default:
1997 if (!AGGREGATE_TYPE_P (type))
1998 return fold_convert (type, integer_zero_node);
1999 return build_constructor (type, NULL);
2000 }
2001 }
2002
2003
2004 /* Build a BINFO with LEN language slots. */
2005
2006 tree
2007 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2008 {
2009 tree t;
2010 size_t length = (offsetof (struct tree_binfo, base_binfos)
2011 + vec<tree, va_gc>::embedded_size (base_binfos));
2012
2013 record_node_allocation_statistics (TREE_BINFO, length);
2014
2015 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2016
2017 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2018
2019 TREE_SET_CODE (t, TREE_BINFO);
2020
2021 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2022
2023 return t;
2024 }
2025
2026 /* Create a CASE_LABEL_EXPR tree node and return it. */
2027
2028 tree
2029 build_case_label (tree low_value, tree high_value, tree label_decl)
2030 {
2031 tree t = make_node (CASE_LABEL_EXPR);
2032
2033 TREE_TYPE (t) = void_type_node;
2034 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2035
2036 CASE_LOW (t) = low_value;
2037 CASE_HIGH (t) = high_value;
2038 CASE_LABEL (t) = label_decl;
2039 CASE_CHAIN (t) = NULL_TREE;
2040
2041 return t;
2042 }
2043
2044 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2045 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2046 The latter determines the length of the HOST_WIDE_INT vector. */
2047
2048 tree
2049 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2050 {
2051 tree t;
2052 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2053 + sizeof (struct tree_int_cst));
2054
2055 gcc_assert (len);
2056 record_node_allocation_statistics (INTEGER_CST, length);
2057
2058 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2059
2060 TREE_SET_CODE (t, INTEGER_CST);
2061 TREE_INT_CST_NUNITS (t) = len;
2062 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2063 /* to_offset can only be applied to trees that are offset_int-sized
2064 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2065 must be exactly the precision of offset_int and so LEN is correct. */
2066 if (ext_len <= OFFSET_INT_ELTS)
2067 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2068 else
2069 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2070
2071 TREE_CONSTANT (t) = 1;
2072
2073 return t;
2074 }
2075
2076 /* Build a newly constructed TREE_VEC node of length LEN. */
2077
2078 tree
2079 make_tree_vec_stat (int len MEM_STAT_DECL)
2080 {
2081 tree t;
2082 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2083
2084 record_node_allocation_statistics (TREE_VEC, length);
2085
2086 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2087
2088 TREE_SET_CODE (t, TREE_VEC);
2089 TREE_VEC_LENGTH (t) = len;
2090
2091 return t;
2092 }
2093
2094 /* Grow a TREE_VEC node to new length LEN. */
2095
2096 tree
2097 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2098 {
2099 gcc_assert (TREE_CODE (v) == TREE_VEC);
2100
2101 int oldlen = TREE_VEC_LENGTH (v);
2102 gcc_assert (len > oldlen);
2103
2104 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2105 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2106
2107 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2108
2109 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2110
2111 TREE_VEC_LENGTH (v) = len;
2112
2113 return v;
2114 }
2115 \f
2116 /* Return 1 if EXPR is the integer constant zero or a complex constant
2117 of zero. */
2118
2119 int
2120 integer_zerop (const_tree expr)
2121 {
2122 STRIP_NOPS (expr);
2123
2124 switch (TREE_CODE (expr))
2125 {
2126 case INTEGER_CST:
2127 return wi::eq_p (expr, 0);
2128 case COMPLEX_CST:
2129 return (integer_zerop (TREE_REALPART (expr))
2130 && integer_zerop (TREE_IMAGPART (expr)));
2131 case VECTOR_CST:
2132 {
2133 unsigned i;
2134 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2135 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2136 return false;
2137 return true;
2138 }
2139 default:
2140 return false;
2141 }
2142 }
2143
2144 /* Return 1 if EXPR is the integer constant one or the corresponding
2145 complex constant. */
2146
2147 int
2148 integer_onep (const_tree expr)
2149 {
2150 STRIP_NOPS (expr);
2151
2152 switch (TREE_CODE (expr))
2153 {
2154 case INTEGER_CST:
2155 return wi::eq_p (wi::to_widest (expr), 1);
2156 case COMPLEX_CST:
2157 return (integer_onep (TREE_REALPART (expr))
2158 && integer_zerop (TREE_IMAGPART (expr)));
2159 case VECTOR_CST:
2160 {
2161 unsigned i;
2162 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2163 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2164 return false;
2165 return true;
2166 }
2167 default:
2168 return false;
2169 }
2170 }
2171
2172 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2173 return 1 if every piece is the integer constant one. */
2174
2175 int
2176 integer_each_onep (const_tree expr)
2177 {
2178 STRIP_NOPS (expr);
2179
2180 if (TREE_CODE (expr) == COMPLEX_CST)
2181 return (integer_onep (TREE_REALPART (expr))
2182 && integer_onep (TREE_IMAGPART (expr)));
2183 else
2184 return integer_onep (expr);
2185 }
2186
2187 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2188 it contains, or a complex or vector whose subparts are such integers. */
2189
2190 int
2191 integer_all_onesp (const_tree expr)
2192 {
2193 STRIP_NOPS (expr);
2194
2195 if (TREE_CODE (expr) == COMPLEX_CST
2196 && integer_all_onesp (TREE_REALPART (expr))
2197 && integer_all_onesp (TREE_IMAGPART (expr)))
2198 return 1;
2199
2200 else if (TREE_CODE (expr) == VECTOR_CST)
2201 {
2202 unsigned i;
2203 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2204 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2205 return 0;
2206 return 1;
2207 }
2208
2209 else if (TREE_CODE (expr) != INTEGER_CST)
2210 return 0;
2211
2212 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2213 }
2214
2215 /* Return 1 if EXPR is the integer constant minus one. */
2216
2217 int
2218 integer_minus_onep (const_tree expr)
2219 {
2220 STRIP_NOPS (expr);
2221
2222 if (TREE_CODE (expr) == COMPLEX_CST)
2223 return (integer_all_onesp (TREE_REALPART (expr))
2224 && integer_zerop (TREE_IMAGPART (expr)));
2225 else
2226 return integer_all_onesp (expr);
2227 }
2228
2229 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2230 one bit on). */
2231
2232 int
2233 integer_pow2p (const_tree expr)
2234 {
2235 STRIP_NOPS (expr);
2236
2237 if (TREE_CODE (expr) == COMPLEX_CST
2238 && integer_pow2p (TREE_REALPART (expr))
2239 && integer_zerop (TREE_IMAGPART (expr)))
2240 return 1;
2241
2242 if (TREE_CODE (expr) != INTEGER_CST)
2243 return 0;
2244
2245 return wi::popcount (expr) == 1;
2246 }
2247
2248 /* Return 1 if EXPR is an integer constant other than zero or a
2249 complex constant other than zero. */
2250
2251 int
2252 integer_nonzerop (const_tree expr)
2253 {
2254 STRIP_NOPS (expr);
2255
2256 return ((TREE_CODE (expr) == INTEGER_CST
2257 && !wi::eq_p (expr, 0))
2258 || (TREE_CODE (expr) == COMPLEX_CST
2259 && (integer_nonzerop (TREE_REALPART (expr))
2260 || integer_nonzerop (TREE_IMAGPART (expr)))));
2261 }
2262
2263 /* Return 1 if EXPR is the fixed-point constant zero. */
2264
2265 int
2266 fixed_zerop (const_tree expr)
2267 {
2268 return (TREE_CODE (expr) == FIXED_CST
2269 && TREE_FIXED_CST (expr).data.is_zero ());
2270 }
2271
2272 /* Return the power of two represented by a tree node known to be a
2273 power of two. */
2274
2275 int
2276 tree_log2 (const_tree expr)
2277 {
2278 STRIP_NOPS (expr);
2279
2280 if (TREE_CODE (expr) == COMPLEX_CST)
2281 return tree_log2 (TREE_REALPART (expr));
2282
2283 return wi::exact_log2 (expr);
2284 }
2285
2286 /* Similar, but return the largest integer Y such that 2 ** Y is less
2287 than or equal to EXPR. */
2288
2289 int
2290 tree_floor_log2 (const_tree expr)
2291 {
2292 STRIP_NOPS (expr);
2293
2294 if (TREE_CODE (expr) == COMPLEX_CST)
2295 return tree_log2 (TREE_REALPART (expr));
2296
2297 return wi::floor_log2 (expr);
2298 }
2299
2300 /* Return number of known trailing zero bits in EXPR, or, if the value of
2301 EXPR is known to be zero, the precision of it's type. */
2302
2303 unsigned int
2304 tree_ctz (const_tree expr)
2305 {
2306 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2307 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2308 return 0;
2309
2310 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2311 switch (TREE_CODE (expr))
2312 {
2313 case INTEGER_CST:
2314 ret1 = wi::ctz (expr);
2315 return MIN (ret1, prec);
2316 case SSA_NAME:
2317 ret1 = wi::ctz (get_nonzero_bits (expr));
2318 return MIN (ret1, prec);
2319 case PLUS_EXPR:
2320 case MINUS_EXPR:
2321 case BIT_IOR_EXPR:
2322 case BIT_XOR_EXPR:
2323 case MIN_EXPR:
2324 case MAX_EXPR:
2325 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2326 if (ret1 == 0)
2327 return ret1;
2328 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2329 return MIN (ret1, ret2);
2330 case POINTER_PLUS_EXPR:
2331 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2332 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2333 /* Second operand is sizetype, which could be in theory
2334 wider than pointer's precision. Make sure we never
2335 return more than prec. */
2336 ret2 = MIN (ret2, prec);
2337 return MIN (ret1, ret2);
2338 case BIT_AND_EXPR:
2339 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2340 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2341 return MAX (ret1, ret2);
2342 case MULT_EXPR:
2343 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2344 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2345 return MIN (ret1 + ret2, prec);
2346 case LSHIFT_EXPR:
2347 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2348 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2349 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2350 {
2351 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2352 return MIN (ret1 + ret2, prec);
2353 }
2354 return ret1;
2355 case RSHIFT_EXPR:
2356 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2357 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2358 {
2359 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2360 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2361 if (ret1 > ret2)
2362 return ret1 - ret2;
2363 }
2364 return 0;
2365 case TRUNC_DIV_EXPR:
2366 case CEIL_DIV_EXPR:
2367 case FLOOR_DIV_EXPR:
2368 case ROUND_DIV_EXPR:
2369 case EXACT_DIV_EXPR:
2370 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2371 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2372 {
2373 int l = tree_log2 (TREE_OPERAND (expr, 1));
2374 if (l >= 0)
2375 {
2376 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2377 ret2 = l;
2378 if (ret1 > ret2)
2379 return ret1 - ret2;
2380 }
2381 }
2382 return 0;
2383 CASE_CONVERT:
2384 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2385 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2386 ret1 = prec;
2387 return MIN (ret1, prec);
2388 case SAVE_EXPR:
2389 return tree_ctz (TREE_OPERAND (expr, 0));
2390 case COND_EXPR:
2391 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2392 if (ret1 == 0)
2393 return 0;
2394 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2395 return MIN (ret1, ret2);
2396 case COMPOUND_EXPR:
2397 return tree_ctz (TREE_OPERAND (expr, 1));
2398 case ADDR_EXPR:
2399 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2400 if (ret1 > BITS_PER_UNIT)
2401 {
2402 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2403 return MIN (ret1, prec);
2404 }
2405 return 0;
2406 default:
2407 return 0;
2408 }
2409 }
2410
2411 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2412 decimal float constants, so don't return 1 for them. */
2413
2414 int
2415 real_zerop (const_tree expr)
2416 {
2417 STRIP_NOPS (expr);
2418
2419 switch (TREE_CODE (expr))
2420 {
2421 case REAL_CST:
2422 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2423 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2424 case COMPLEX_CST:
2425 return real_zerop (TREE_REALPART (expr))
2426 && real_zerop (TREE_IMAGPART (expr));
2427 case VECTOR_CST:
2428 {
2429 unsigned i;
2430 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2431 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2432 return false;
2433 return true;
2434 }
2435 default:
2436 return false;
2437 }
2438 }
2439
2440 /* Return 1 if EXPR is the real constant one in real or complex form.
2441 Trailing zeroes matter for decimal float constants, so don't return
2442 1 for them. */
2443
2444 int
2445 real_onep (const_tree expr)
2446 {
2447 STRIP_NOPS (expr);
2448
2449 switch (TREE_CODE (expr))
2450 {
2451 case REAL_CST:
2452 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2453 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2454 case COMPLEX_CST:
2455 return real_onep (TREE_REALPART (expr))
2456 && real_zerop (TREE_IMAGPART (expr));
2457 case VECTOR_CST:
2458 {
2459 unsigned i;
2460 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2461 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2462 return false;
2463 return true;
2464 }
2465 default:
2466 return false;
2467 }
2468 }
2469
2470 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2471 matter for decimal float constants, so don't return 1 for them. */
2472
2473 int
2474 real_minus_onep (const_tree expr)
2475 {
2476 STRIP_NOPS (expr);
2477
2478 switch (TREE_CODE (expr))
2479 {
2480 case REAL_CST:
2481 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2482 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2483 case COMPLEX_CST:
2484 return real_minus_onep (TREE_REALPART (expr))
2485 && real_zerop (TREE_IMAGPART (expr));
2486 case VECTOR_CST:
2487 {
2488 unsigned i;
2489 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2490 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2491 return false;
2492 return true;
2493 }
2494 default:
2495 return false;
2496 }
2497 }
2498
2499 /* Nonzero if EXP is a constant or a cast of a constant. */
2500
2501 int
2502 really_constant_p (const_tree exp)
2503 {
2504 /* This is not quite the same as STRIP_NOPS. It does more. */
2505 while (CONVERT_EXPR_P (exp)
2506 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2507 exp = TREE_OPERAND (exp, 0);
2508 return TREE_CONSTANT (exp);
2509 }
2510 \f
2511 /* Return first list element whose TREE_VALUE is ELEM.
2512 Return 0 if ELEM is not in LIST. */
2513
2514 tree
2515 value_member (tree elem, tree list)
2516 {
2517 while (list)
2518 {
2519 if (elem == TREE_VALUE (list))
2520 return list;
2521 list = TREE_CHAIN (list);
2522 }
2523 return NULL_TREE;
2524 }
2525
2526 /* Return first list element whose TREE_PURPOSE is ELEM.
2527 Return 0 if ELEM is not in LIST. */
2528
2529 tree
2530 purpose_member (const_tree elem, tree list)
2531 {
2532 while (list)
2533 {
2534 if (elem == TREE_PURPOSE (list))
2535 return list;
2536 list = TREE_CHAIN (list);
2537 }
2538 return NULL_TREE;
2539 }
2540
2541 /* Return true if ELEM is in V. */
2542
2543 bool
2544 vec_member (const_tree elem, vec<tree, va_gc> *v)
2545 {
2546 unsigned ix;
2547 tree t;
2548 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2549 if (elem == t)
2550 return true;
2551 return false;
2552 }
2553
2554 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2555 NULL_TREE. */
2556
2557 tree
2558 chain_index (int idx, tree chain)
2559 {
2560 for (; chain && idx > 0; --idx)
2561 chain = TREE_CHAIN (chain);
2562 return chain;
2563 }
2564
2565 /* Return nonzero if ELEM is part of the chain CHAIN. */
2566
2567 int
2568 chain_member (const_tree elem, const_tree chain)
2569 {
2570 while (chain)
2571 {
2572 if (elem == chain)
2573 return 1;
2574 chain = DECL_CHAIN (chain);
2575 }
2576
2577 return 0;
2578 }
2579
2580 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2581 We expect a null pointer to mark the end of the chain.
2582 This is the Lisp primitive `length'. */
2583
2584 int
2585 list_length (const_tree t)
2586 {
2587 const_tree p = t;
2588 #ifdef ENABLE_TREE_CHECKING
2589 const_tree q = t;
2590 #endif
2591 int len = 0;
2592
2593 while (p)
2594 {
2595 p = TREE_CHAIN (p);
2596 #ifdef ENABLE_TREE_CHECKING
2597 if (len % 2)
2598 q = TREE_CHAIN (q);
2599 gcc_assert (p != q);
2600 #endif
2601 len++;
2602 }
2603
2604 return len;
2605 }
2606
2607 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2608 UNION_TYPE TYPE, or NULL_TREE if none. */
2609
2610 tree
2611 first_field (const_tree type)
2612 {
2613 tree t = TYPE_FIELDS (type);
2614 while (t && TREE_CODE (t) != FIELD_DECL)
2615 t = TREE_CHAIN (t);
2616 return t;
2617 }
2618
2619 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2620 by modifying the last node in chain 1 to point to chain 2.
2621 This is the Lisp primitive `nconc'. */
2622
2623 tree
2624 chainon (tree op1, tree op2)
2625 {
2626 tree t1;
2627
2628 if (!op1)
2629 return op2;
2630 if (!op2)
2631 return op1;
2632
2633 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2634 continue;
2635 TREE_CHAIN (t1) = op2;
2636
2637 #ifdef ENABLE_TREE_CHECKING
2638 {
2639 tree t2;
2640 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2641 gcc_assert (t2 != t1);
2642 }
2643 #endif
2644
2645 return op1;
2646 }
2647
2648 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2649
2650 tree
2651 tree_last (tree chain)
2652 {
2653 tree next;
2654 if (chain)
2655 while ((next = TREE_CHAIN (chain)))
2656 chain = next;
2657 return chain;
2658 }
2659
2660 /* Reverse the order of elements in the chain T,
2661 and return the new head of the chain (old last element). */
2662
2663 tree
2664 nreverse (tree t)
2665 {
2666 tree prev = 0, decl, next;
2667 for (decl = t; decl; decl = next)
2668 {
2669 /* We shouldn't be using this function to reverse BLOCK chains; we
2670 have blocks_nreverse for that. */
2671 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2672 next = TREE_CHAIN (decl);
2673 TREE_CHAIN (decl) = prev;
2674 prev = decl;
2675 }
2676 return prev;
2677 }
2678 \f
2679 /* Return a newly created TREE_LIST node whose
2680 purpose and value fields are PARM and VALUE. */
2681
2682 tree
2683 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2684 {
2685 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2686 TREE_PURPOSE (t) = parm;
2687 TREE_VALUE (t) = value;
2688 return t;
2689 }
2690
2691 /* Build a chain of TREE_LIST nodes from a vector. */
2692
2693 tree
2694 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2695 {
2696 tree ret = NULL_TREE;
2697 tree *pp = &ret;
2698 unsigned int i;
2699 tree t;
2700 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2701 {
2702 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2703 pp = &TREE_CHAIN (*pp);
2704 }
2705 return ret;
2706 }
2707
2708 /* Return a newly created TREE_LIST node whose
2709 purpose and value fields are PURPOSE and VALUE
2710 and whose TREE_CHAIN is CHAIN. */
2711
2712 tree
2713 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2714 {
2715 tree node;
2716
2717 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2718 memset (node, 0, sizeof (struct tree_common));
2719
2720 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2721
2722 TREE_SET_CODE (node, TREE_LIST);
2723 TREE_CHAIN (node) = chain;
2724 TREE_PURPOSE (node) = purpose;
2725 TREE_VALUE (node) = value;
2726 return node;
2727 }
2728
2729 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2730 trees. */
2731
2732 vec<tree, va_gc> *
2733 ctor_to_vec (tree ctor)
2734 {
2735 vec<tree, va_gc> *vec;
2736 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2737 unsigned int ix;
2738 tree val;
2739
2740 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2741 vec->quick_push (val);
2742
2743 return vec;
2744 }
2745 \f
2746 /* Return the size nominally occupied by an object of type TYPE
2747 when it resides in memory. The value is measured in units of bytes,
2748 and its data type is that normally used for type sizes
2749 (which is the first type created by make_signed_type or
2750 make_unsigned_type). */
2751
2752 tree
2753 size_in_bytes (const_tree type)
2754 {
2755 tree t;
2756
2757 if (type == error_mark_node)
2758 return integer_zero_node;
2759
2760 type = TYPE_MAIN_VARIANT (type);
2761 t = TYPE_SIZE_UNIT (type);
2762
2763 if (t == 0)
2764 {
2765 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2766 return size_zero_node;
2767 }
2768
2769 return t;
2770 }
2771
2772 /* Return the size of TYPE (in bytes) as a wide integer
2773 or return -1 if the size can vary or is larger than an integer. */
2774
2775 HOST_WIDE_INT
2776 int_size_in_bytes (const_tree type)
2777 {
2778 tree t;
2779
2780 if (type == error_mark_node)
2781 return 0;
2782
2783 type = TYPE_MAIN_VARIANT (type);
2784 t = TYPE_SIZE_UNIT (type);
2785
2786 if (t && tree_fits_uhwi_p (t))
2787 return TREE_INT_CST_LOW (t);
2788 else
2789 return -1;
2790 }
2791
2792 /* Return the maximum size of TYPE (in bytes) as a wide integer
2793 or return -1 if the size can vary or is larger than an integer. */
2794
2795 HOST_WIDE_INT
2796 max_int_size_in_bytes (const_tree type)
2797 {
2798 HOST_WIDE_INT size = -1;
2799 tree size_tree;
2800
2801 /* If this is an array type, check for a possible MAX_SIZE attached. */
2802
2803 if (TREE_CODE (type) == ARRAY_TYPE)
2804 {
2805 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2806
2807 if (size_tree && tree_fits_uhwi_p (size_tree))
2808 size = tree_to_uhwi (size_tree);
2809 }
2810
2811 /* If we still haven't been able to get a size, see if the language
2812 can compute a maximum size. */
2813
2814 if (size == -1)
2815 {
2816 size_tree = lang_hooks.types.max_size (type);
2817
2818 if (size_tree && tree_fits_uhwi_p (size_tree))
2819 size = tree_to_uhwi (size_tree);
2820 }
2821
2822 return size;
2823 }
2824 \f
2825 /* Return the bit position of FIELD, in bits from the start of the record.
2826 This is a tree of type bitsizetype. */
2827
2828 tree
2829 bit_position (const_tree field)
2830 {
2831 return bit_from_pos (DECL_FIELD_OFFSET (field),
2832 DECL_FIELD_BIT_OFFSET (field));
2833 }
2834
2835 /* Likewise, but return as an integer. It must be representable in
2836 that way (since it could be a signed value, we don't have the
2837 option of returning -1 like int_size_in_byte can. */
2838
2839 HOST_WIDE_INT
2840 int_bit_position (const_tree field)
2841 {
2842 return tree_to_shwi (bit_position (field));
2843 }
2844 \f
2845 /* Return the byte position of FIELD, in bytes from the start of the record.
2846 This is a tree of type sizetype. */
2847
2848 tree
2849 byte_position (const_tree field)
2850 {
2851 return byte_from_pos (DECL_FIELD_OFFSET (field),
2852 DECL_FIELD_BIT_OFFSET (field));
2853 }
2854
2855 /* Likewise, but return as an integer. It must be representable in
2856 that way (since it could be a signed value, we don't have the
2857 option of returning -1 like int_size_in_byte can. */
2858
2859 HOST_WIDE_INT
2860 int_byte_position (const_tree field)
2861 {
2862 return tree_to_shwi (byte_position (field));
2863 }
2864 \f
2865 /* Return the strictest alignment, in bits, that T is known to have. */
2866
2867 unsigned int
2868 expr_align (const_tree t)
2869 {
2870 unsigned int align0, align1;
2871
2872 switch (TREE_CODE (t))
2873 {
2874 CASE_CONVERT: case NON_LVALUE_EXPR:
2875 /* If we have conversions, we know that the alignment of the
2876 object must meet each of the alignments of the types. */
2877 align0 = expr_align (TREE_OPERAND (t, 0));
2878 align1 = TYPE_ALIGN (TREE_TYPE (t));
2879 return MAX (align0, align1);
2880
2881 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2882 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2883 case CLEANUP_POINT_EXPR:
2884 /* These don't change the alignment of an object. */
2885 return expr_align (TREE_OPERAND (t, 0));
2886
2887 case COND_EXPR:
2888 /* The best we can do is say that the alignment is the least aligned
2889 of the two arms. */
2890 align0 = expr_align (TREE_OPERAND (t, 1));
2891 align1 = expr_align (TREE_OPERAND (t, 2));
2892 return MIN (align0, align1);
2893
2894 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2895 meaningfully, it's always 1. */
2896 case LABEL_DECL: case CONST_DECL:
2897 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2898 case FUNCTION_DECL:
2899 gcc_assert (DECL_ALIGN (t) != 0);
2900 return DECL_ALIGN (t);
2901
2902 default:
2903 break;
2904 }
2905
2906 /* Otherwise take the alignment from that of the type. */
2907 return TYPE_ALIGN (TREE_TYPE (t));
2908 }
2909 \f
2910 /* Return, as a tree node, the number of elements for TYPE (which is an
2911 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2912
2913 tree
2914 array_type_nelts (const_tree type)
2915 {
2916 tree index_type, min, max;
2917
2918 /* If they did it with unspecified bounds, then we should have already
2919 given an error about it before we got here. */
2920 if (! TYPE_DOMAIN (type))
2921 return error_mark_node;
2922
2923 index_type = TYPE_DOMAIN (type);
2924 min = TYPE_MIN_VALUE (index_type);
2925 max = TYPE_MAX_VALUE (index_type);
2926
2927 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2928 if (!max)
2929 return error_mark_node;
2930
2931 return (integer_zerop (min)
2932 ? max
2933 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2934 }
2935 \f
2936 /* If arg is static -- a reference to an object in static storage -- then
2937 return the object. This is not the same as the C meaning of `static'.
2938 If arg isn't static, return NULL. */
2939
2940 tree
2941 staticp (tree arg)
2942 {
2943 switch (TREE_CODE (arg))
2944 {
2945 case FUNCTION_DECL:
2946 /* Nested functions are static, even though taking their address will
2947 involve a trampoline as we unnest the nested function and create
2948 the trampoline on the tree level. */
2949 return arg;
2950
2951 case VAR_DECL:
2952 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2953 && ! DECL_THREAD_LOCAL_P (arg)
2954 && ! DECL_DLLIMPORT_P (arg)
2955 ? arg : NULL);
2956
2957 case CONST_DECL:
2958 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2959 ? arg : NULL);
2960
2961 case CONSTRUCTOR:
2962 return TREE_STATIC (arg) ? arg : NULL;
2963
2964 case LABEL_DECL:
2965 case STRING_CST:
2966 return arg;
2967
2968 case COMPONENT_REF:
2969 /* If the thing being referenced is not a field, then it is
2970 something language specific. */
2971 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2972
2973 /* If we are referencing a bitfield, we can't evaluate an
2974 ADDR_EXPR at compile time and so it isn't a constant. */
2975 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2976 return NULL;
2977
2978 return staticp (TREE_OPERAND (arg, 0));
2979
2980 case BIT_FIELD_REF:
2981 return NULL;
2982
2983 case INDIRECT_REF:
2984 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2985
2986 case ARRAY_REF:
2987 case ARRAY_RANGE_REF:
2988 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2989 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2990 return staticp (TREE_OPERAND (arg, 0));
2991 else
2992 return NULL;
2993
2994 case COMPOUND_LITERAL_EXPR:
2995 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2996
2997 default:
2998 return NULL;
2999 }
3000 }
3001
3002 \f
3003
3004
3005 /* Return whether OP is a DECL whose address is function-invariant. */
3006
3007 bool
3008 decl_address_invariant_p (const_tree op)
3009 {
3010 /* The conditions below are slightly less strict than the one in
3011 staticp. */
3012
3013 switch (TREE_CODE (op))
3014 {
3015 case PARM_DECL:
3016 case RESULT_DECL:
3017 case LABEL_DECL:
3018 case FUNCTION_DECL:
3019 return true;
3020
3021 case VAR_DECL:
3022 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3023 || DECL_THREAD_LOCAL_P (op)
3024 || DECL_CONTEXT (op) == current_function_decl
3025 || decl_function_context (op) == current_function_decl)
3026 return true;
3027 break;
3028
3029 case CONST_DECL:
3030 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3031 || decl_function_context (op) == current_function_decl)
3032 return true;
3033 break;
3034
3035 default:
3036 break;
3037 }
3038
3039 return false;
3040 }
3041
3042 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3043
3044 bool
3045 decl_address_ip_invariant_p (const_tree op)
3046 {
3047 /* The conditions below are slightly less strict than the one in
3048 staticp. */
3049
3050 switch (TREE_CODE (op))
3051 {
3052 case LABEL_DECL:
3053 case FUNCTION_DECL:
3054 case STRING_CST:
3055 return true;
3056
3057 case VAR_DECL:
3058 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3059 && !DECL_DLLIMPORT_P (op))
3060 || DECL_THREAD_LOCAL_P (op))
3061 return true;
3062 break;
3063
3064 case CONST_DECL:
3065 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3066 return true;
3067 break;
3068
3069 default:
3070 break;
3071 }
3072
3073 return false;
3074 }
3075
3076
3077 /* Return true if T is function-invariant (internal function, does
3078 not handle arithmetic; that's handled in skip_simple_arithmetic and
3079 tree_invariant_p). */
3080
3081 static bool tree_invariant_p (tree t);
3082
3083 static bool
3084 tree_invariant_p_1 (tree t)
3085 {
3086 tree op;
3087
3088 if (TREE_CONSTANT (t)
3089 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3090 return true;
3091
3092 switch (TREE_CODE (t))
3093 {
3094 case SAVE_EXPR:
3095 return true;
3096
3097 case ADDR_EXPR:
3098 op = TREE_OPERAND (t, 0);
3099 while (handled_component_p (op))
3100 {
3101 switch (TREE_CODE (op))
3102 {
3103 case ARRAY_REF:
3104 case ARRAY_RANGE_REF:
3105 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3106 || TREE_OPERAND (op, 2) != NULL_TREE
3107 || TREE_OPERAND (op, 3) != NULL_TREE)
3108 return false;
3109 break;
3110
3111 case COMPONENT_REF:
3112 if (TREE_OPERAND (op, 2) != NULL_TREE)
3113 return false;
3114 break;
3115
3116 default:;
3117 }
3118 op = TREE_OPERAND (op, 0);
3119 }
3120
3121 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3122
3123 default:
3124 break;
3125 }
3126
3127 return false;
3128 }
3129
3130 /* Return true if T is function-invariant. */
3131
3132 static bool
3133 tree_invariant_p (tree t)
3134 {
3135 tree inner = skip_simple_arithmetic (t);
3136 return tree_invariant_p_1 (inner);
3137 }
3138
3139 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3140 Do this to any expression which may be used in more than one place,
3141 but must be evaluated only once.
3142
3143 Normally, expand_expr would reevaluate the expression each time.
3144 Calling save_expr produces something that is evaluated and recorded
3145 the first time expand_expr is called on it. Subsequent calls to
3146 expand_expr just reuse the recorded value.
3147
3148 The call to expand_expr that generates code that actually computes
3149 the value is the first call *at compile time*. Subsequent calls
3150 *at compile time* generate code to use the saved value.
3151 This produces correct result provided that *at run time* control
3152 always flows through the insns made by the first expand_expr
3153 before reaching the other places where the save_expr was evaluated.
3154 You, the caller of save_expr, must make sure this is so.
3155
3156 Constants, and certain read-only nodes, are returned with no
3157 SAVE_EXPR because that is safe. Expressions containing placeholders
3158 are not touched; see tree.def for an explanation of what these
3159 are used for. */
3160
3161 tree
3162 save_expr (tree expr)
3163 {
3164 tree t = fold (expr);
3165 tree inner;
3166
3167 /* If the tree evaluates to a constant, then we don't want to hide that
3168 fact (i.e. this allows further folding, and direct checks for constants).
3169 However, a read-only object that has side effects cannot be bypassed.
3170 Since it is no problem to reevaluate literals, we just return the
3171 literal node. */
3172 inner = skip_simple_arithmetic (t);
3173 if (TREE_CODE (inner) == ERROR_MARK)
3174 return inner;
3175
3176 if (tree_invariant_p_1 (inner))
3177 return t;
3178
3179 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3180 it means that the size or offset of some field of an object depends on
3181 the value within another field.
3182
3183 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3184 and some variable since it would then need to be both evaluated once and
3185 evaluated more than once. Front-ends must assure this case cannot
3186 happen by surrounding any such subexpressions in their own SAVE_EXPR
3187 and forcing evaluation at the proper time. */
3188 if (contains_placeholder_p (inner))
3189 return t;
3190
3191 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3192 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3193
3194 /* This expression might be placed ahead of a jump to ensure that the
3195 value was computed on both sides of the jump. So make sure it isn't
3196 eliminated as dead. */
3197 TREE_SIDE_EFFECTS (t) = 1;
3198 return t;
3199 }
3200
3201 /* Look inside EXPR into any simple arithmetic operations. Return the
3202 outermost non-arithmetic or non-invariant node. */
3203
3204 tree
3205 skip_simple_arithmetic (tree expr)
3206 {
3207 /* We don't care about whether this can be used as an lvalue in this
3208 context. */
3209 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3210 expr = TREE_OPERAND (expr, 0);
3211
3212 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3213 a constant, it will be more efficient to not make another SAVE_EXPR since
3214 it will allow better simplification and GCSE will be able to merge the
3215 computations if they actually occur. */
3216 while (true)
3217 {
3218 if (UNARY_CLASS_P (expr))
3219 expr = TREE_OPERAND (expr, 0);
3220 else if (BINARY_CLASS_P (expr))
3221 {
3222 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3223 expr = TREE_OPERAND (expr, 0);
3224 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3225 expr = TREE_OPERAND (expr, 1);
3226 else
3227 break;
3228 }
3229 else
3230 break;
3231 }
3232
3233 return expr;
3234 }
3235
3236 /* Look inside EXPR into simple arithmetic operations involving constants.
3237 Return the outermost non-arithmetic or non-constant node. */
3238
3239 tree
3240 skip_simple_constant_arithmetic (tree expr)
3241 {
3242 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3243 expr = TREE_OPERAND (expr, 0);
3244
3245 while (true)
3246 {
3247 if (UNARY_CLASS_P (expr))
3248 expr = TREE_OPERAND (expr, 0);
3249 else if (BINARY_CLASS_P (expr))
3250 {
3251 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3252 expr = TREE_OPERAND (expr, 0);
3253 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3254 expr = TREE_OPERAND (expr, 1);
3255 else
3256 break;
3257 }
3258 else
3259 break;
3260 }
3261
3262 return expr;
3263 }
3264
3265 /* Return which tree structure is used by T. */
3266
3267 enum tree_node_structure_enum
3268 tree_node_structure (const_tree t)
3269 {
3270 const enum tree_code code = TREE_CODE (t);
3271 return tree_node_structure_for_code (code);
3272 }
3273
3274 /* Set various status flags when building a CALL_EXPR object T. */
3275
3276 static void
3277 process_call_operands (tree t)
3278 {
3279 bool side_effects = TREE_SIDE_EFFECTS (t);
3280 bool read_only = false;
3281 int i = call_expr_flags (t);
3282
3283 /* Calls have side-effects, except those to const or pure functions. */
3284 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3285 side_effects = true;
3286 /* Propagate TREE_READONLY of arguments for const functions. */
3287 if (i & ECF_CONST)
3288 read_only = true;
3289
3290 if (!side_effects || read_only)
3291 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3292 {
3293 tree op = TREE_OPERAND (t, i);
3294 if (op && TREE_SIDE_EFFECTS (op))
3295 side_effects = true;
3296 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3297 read_only = false;
3298 }
3299
3300 TREE_SIDE_EFFECTS (t) = side_effects;
3301 TREE_READONLY (t) = read_only;
3302 }
3303 \f
3304 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3305 size or offset that depends on a field within a record. */
3306
3307 bool
3308 contains_placeholder_p (const_tree exp)
3309 {
3310 enum tree_code code;
3311
3312 if (!exp)
3313 return 0;
3314
3315 code = TREE_CODE (exp);
3316 if (code == PLACEHOLDER_EXPR)
3317 return 1;
3318
3319 switch (TREE_CODE_CLASS (code))
3320 {
3321 case tcc_reference:
3322 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3323 position computations since they will be converted into a
3324 WITH_RECORD_EXPR involving the reference, which will assume
3325 here will be valid. */
3326 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3327
3328 case tcc_exceptional:
3329 if (code == TREE_LIST)
3330 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3331 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3332 break;
3333
3334 case tcc_unary:
3335 case tcc_binary:
3336 case tcc_comparison:
3337 case tcc_expression:
3338 switch (code)
3339 {
3340 case COMPOUND_EXPR:
3341 /* Ignoring the first operand isn't quite right, but works best. */
3342 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3343
3344 case COND_EXPR:
3345 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3346 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3347 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3348
3349 case SAVE_EXPR:
3350 /* The save_expr function never wraps anything containing
3351 a PLACEHOLDER_EXPR. */
3352 return 0;
3353
3354 default:
3355 break;
3356 }
3357
3358 switch (TREE_CODE_LENGTH (code))
3359 {
3360 case 1:
3361 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3362 case 2:
3363 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3364 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3365 default:
3366 return 0;
3367 }
3368
3369 case tcc_vl_exp:
3370 switch (code)
3371 {
3372 case CALL_EXPR:
3373 {
3374 const_tree arg;
3375 const_call_expr_arg_iterator iter;
3376 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3377 if (CONTAINS_PLACEHOLDER_P (arg))
3378 return 1;
3379 return 0;
3380 }
3381 default:
3382 return 0;
3383 }
3384
3385 default:
3386 return 0;
3387 }
3388 return 0;
3389 }
3390
3391 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3392 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3393 field positions. */
3394
3395 static bool
3396 type_contains_placeholder_1 (const_tree type)
3397 {
3398 /* If the size contains a placeholder or the parent type (component type in
3399 the case of arrays) type involves a placeholder, this type does. */
3400 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3401 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3402 || (!POINTER_TYPE_P (type)
3403 && TREE_TYPE (type)
3404 && type_contains_placeholder_p (TREE_TYPE (type))))
3405 return true;
3406
3407 /* Now do type-specific checks. Note that the last part of the check above
3408 greatly limits what we have to do below. */
3409 switch (TREE_CODE (type))
3410 {
3411 case VOID_TYPE:
3412 case COMPLEX_TYPE:
3413 case ENUMERAL_TYPE:
3414 case BOOLEAN_TYPE:
3415 case POINTER_TYPE:
3416 case OFFSET_TYPE:
3417 case REFERENCE_TYPE:
3418 case METHOD_TYPE:
3419 case FUNCTION_TYPE:
3420 case VECTOR_TYPE:
3421 case NULLPTR_TYPE:
3422 return false;
3423
3424 case INTEGER_TYPE:
3425 case REAL_TYPE:
3426 case FIXED_POINT_TYPE:
3427 /* Here we just check the bounds. */
3428 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3429 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3430
3431 case ARRAY_TYPE:
3432 /* We have already checked the component type above, so just check the
3433 domain type. */
3434 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3435
3436 case RECORD_TYPE:
3437 case UNION_TYPE:
3438 case QUAL_UNION_TYPE:
3439 {
3440 tree field;
3441
3442 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3443 if (TREE_CODE (field) == FIELD_DECL
3444 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3445 || (TREE_CODE (type) == QUAL_UNION_TYPE
3446 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3447 || type_contains_placeholder_p (TREE_TYPE (field))))
3448 return true;
3449
3450 return false;
3451 }
3452
3453 default:
3454 gcc_unreachable ();
3455 }
3456 }
3457
3458 /* Wrapper around above function used to cache its result. */
3459
3460 bool
3461 type_contains_placeholder_p (tree type)
3462 {
3463 bool result;
3464
3465 /* If the contains_placeholder_bits field has been initialized,
3466 then we know the answer. */
3467 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3468 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3469
3470 /* Indicate that we've seen this type node, and the answer is false.
3471 This is what we want to return if we run into recursion via fields. */
3472 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3473
3474 /* Compute the real value. */
3475 result = type_contains_placeholder_1 (type);
3476
3477 /* Store the real value. */
3478 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3479
3480 return result;
3481 }
3482 \f
3483 /* Push tree EXP onto vector QUEUE if it is not already present. */
3484
3485 static void
3486 push_without_duplicates (tree exp, vec<tree> *queue)
3487 {
3488 unsigned int i;
3489 tree iter;
3490
3491 FOR_EACH_VEC_ELT (*queue, i, iter)
3492 if (simple_cst_equal (iter, exp) == 1)
3493 break;
3494
3495 if (!iter)
3496 queue->safe_push (exp);
3497 }
3498
3499 /* Given a tree EXP, find all occurrences of references to fields
3500 in a PLACEHOLDER_EXPR and place them in vector REFS without
3501 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3502 we assume here that EXP contains only arithmetic expressions
3503 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3504 argument list. */
3505
3506 void
3507 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3508 {
3509 enum tree_code code = TREE_CODE (exp);
3510 tree inner;
3511 int i;
3512
3513 /* We handle TREE_LIST and COMPONENT_REF separately. */
3514 if (code == TREE_LIST)
3515 {
3516 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3517 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3518 }
3519 else if (code == COMPONENT_REF)
3520 {
3521 for (inner = TREE_OPERAND (exp, 0);
3522 REFERENCE_CLASS_P (inner);
3523 inner = TREE_OPERAND (inner, 0))
3524 ;
3525
3526 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3527 push_without_duplicates (exp, refs);
3528 else
3529 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3530 }
3531 else
3532 switch (TREE_CODE_CLASS (code))
3533 {
3534 case tcc_constant:
3535 break;
3536
3537 case tcc_declaration:
3538 /* Variables allocated to static storage can stay. */
3539 if (!TREE_STATIC (exp))
3540 push_without_duplicates (exp, refs);
3541 break;
3542
3543 case tcc_expression:
3544 /* This is the pattern built in ada/make_aligning_type. */
3545 if (code == ADDR_EXPR
3546 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3547 {
3548 push_without_duplicates (exp, refs);
3549 break;
3550 }
3551
3552 /* Fall through... */
3553
3554 case tcc_exceptional:
3555 case tcc_unary:
3556 case tcc_binary:
3557 case tcc_comparison:
3558 case tcc_reference:
3559 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3560 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3561 break;
3562
3563 case tcc_vl_exp:
3564 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3565 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3566 break;
3567
3568 default:
3569 gcc_unreachable ();
3570 }
3571 }
3572
3573 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3574 return a tree with all occurrences of references to F in a
3575 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3576 CONST_DECLs. Note that we assume here that EXP contains only
3577 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3578 occurring only in their argument list. */
3579
3580 tree
3581 substitute_in_expr (tree exp, tree f, tree r)
3582 {
3583 enum tree_code code = TREE_CODE (exp);
3584 tree op0, op1, op2, op3;
3585 tree new_tree;
3586
3587 /* We handle TREE_LIST and COMPONENT_REF separately. */
3588 if (code == TREE_LIST)
3589 {
3590 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3591 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3592 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3593 return exp;
3594
3595 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3596 }
3597 else if (code == COMPONENT_REF)
3598 {
3599 tree inner;
3600
3601 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3602 and it is the right field, replace it with R. */
3603 for (inner = TREE_OPERAND (exp, 0);
3604 REFERENCE_CLASS_P (inner);
3605 inner = TREE_OPERAND (inner, 0))
3606 ;
3607
3608 /* The field. */
3609 op1 = TREE_OPERAND (exp, 1);
3610
3611 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3612 return r;
3613
3614 /* If this expression hasn't been completed let, leave it alone. */
3615 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3616 return exp;
3617
3618 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3619 if (op0 == TREE_OPERAND (exp, 0))
3620 return exp;
3621
3622 new_tree
3623 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3624 }
3625 else
3626 switch (TREE_CODE_CLASS (code))
3627 {
3628 case tcc_constant:
3629 return exp;
3630
3631 case tcc_declaration:
3632 if (exp == f)
3633 return r;
3634 else
3635 return exp;
3636
3637 case tcc_expression:
3638 if (exp == f)
3639 return r;
3640
3641 /* Fall through... */
3642
3643 case tcc_exceptional:
3644 case tcc_unary:
3645 case tcc_binary:
3646 case tcc_comparison:
3647 case tcc_reference:
3648 switch (TREE_CODE_LENGTH (code))
3649 {
3650 case 0:
3651 return exp;
3652
3653 case 1:
3654 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3655 if (op0 == TREE_OPERAND (exp, 0))
3656 return exp;
3657
3658 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3659 break;
3660
3661 case 2:
3662 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3663 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3664
3665 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3666 return exp;
3667
3668 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3669 break;
3670
3671 case 3:
3672 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3673 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3674 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3675
3676 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3677 && op2 == TREE_OPERAND (exp, 2))
3678 return exp;
3679
3680 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3681 break;
3682
3683 case 4:
3684 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3685 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3686 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3687 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3688
3689 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3690 && op2 == TREE_OPERAND (exp, 2)
3691 && op3 == TREE_OPERAND (exp, 3))
3692 return exp;
3693
3694 new_tree
3695 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3696 break;
3697
3698 default:
3699 gcc_unreachable ();
3700 }
3701 break;
3702
3703 case tcc_vl_exp:
3704 {
3705 int i;
3706
3707 new_tree = NULL_TREE;
3708
3709 /* If we are trying to replace F with a constant, inline back
3710 functions which do nothing else than computing a value from
3711 the arguments they are passed. This makes it possible to
3712 fold partially or entirely the replacement expression. */
3713 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3714 {
3715 tree t = maybe_inline_call_in_expr (exp);
3716 if (t)
3717 return SUBSTITUTE_IN_EXPR (t, f, r);
3718 }
3719
3720 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3721 {
3722 tree op = TREE_OPERAND (exp, i);
3723 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3724 if (new_op != op)
3725 {
3726 if (!new_tree)
3727 new_tree = copy_node (exp);
3728 TREE_OPERAND (new_tree, i) = new_op;
3729 }
3730 }
3731
3732 if (new_tree)
3733 {
3734 new_tree = fold (new_tree);
3735 if (TREE_CODE (new_tree) == CALL_EXPR)
3736 process_call_operands (new_tree);
3737 }
3738 else
3739 return exp;
3740 }
3741 break;
3742
3743 default:
3744 gcc_unreachable ();
3745 }
3746
3747 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3748
3749 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3750 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3751
3752 return new_tree;
3753 }
3754
3755 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3756 for it within OBJ, a tree that is an object or a chain of references. */
3757
3758 tree
3759 substitute_placeholder_in_expr (tree exp, tree obj)
3760 {
3761 enum tree_code code = TREE_CODE (exp);
3762 tree op0, op1, op2, op3;
3763 tree new_tree;
3764
3765 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3766 in the chain of OBJ. */
3767 if (code == PLACEHOLDER_EXPR)
3768 {
3769 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3770 tree elt;
3771
3772 for (elt = obj; elt != 0;
3773 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3774 || TREE_CODE (elt) == COND_EXPR)
3775 ? TREE_OPERAND (elt, 1)
3776 : (REFERENCE_CLASS_P (elt)
3777 || UNARY_CLASS_P (elt)
3778 || BINARY_CLASS_P (elt)
3779 || VL_EXP_CLASS_P (elt)
3780 || EXPRESSION_CLASS_P (elt))
3781 ? TREE_OPERAND (elt, 0) : 0))
3782 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3783 return elt;
3784
3785 for (elt = obj; elt != 0;
3786 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3787 || TREE_CODE (elt) == COND_EXPR)
3788 ? TREE_OPERAND (elt, 1)
3789 : (REFERENCE_CLASS_P (elt)
3790 || UNARY_CLASS_P (elt)
3791 || BINARY_CLASS_P (elt)
3792 || VL_EXP_CLASS_P (elt)
3793 || EXPRESSION_CLASS_P (elt))
3794 ? TREE_OPERAND (elt, 0) : 0))
3795 if (POINTER_TYPE_P (TREE_TYPE (elt))
3796 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3797 == need_type))
3798 return fold_build1 (INDIRECT_REF, need_type, elt);
3799
3800 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3801 survives until RTL generation, there will be an error. */
3802 return exp;
3803 }
3804
3805 /* TREE_LIST is special because we need to look at TREE_VALUE
3806 and TREE_CHAIN, not TREE_OPERANDS. */
3807 else if (code == TREE_LIST)
3808 {
3809 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3810 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3811 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3812 return exp;
3813
3814 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3815 }
3816 else
3817 switch (TREE_CODE_CLASS (code))
3818 {
3819 case tcc_constant:
3820 case tcc_declaration:
3821 return exp;
3822
3823 case tcc_exceptional:
3824 case tcc_unary:
3825 case tcc_binary:
3826 case tcc_comparison:
3827 case tcc_expression:
3828 case tcc_reference:
3829 case tcc_statement:
3830 switch (TREE_CODE_LENGTH (code))
3831 {
3832 case 0:
3833 return exp;
3834
3835 case 1:
3836 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3837 if (op0 == TREE_OPERAND (exp, 0))
3838 return exp;
3839
3840 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3841 break;
3842
3843 case 2:
3844 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3845 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3846
3847 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3848 return exp;
3849
3850 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3851 break;
3852
3853 case 3:
3854 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3855 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3856 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3857
3858 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3859 && op2 == TREE_OPERAND (exp, 2))
3860 return exp;
3861
3862 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3863 break;
3864
3865 case 4:
3866 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3867 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3868 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3869 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3870
3871 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3872 && op2 == TREE_OPERAND (exp, 2)
3873 && op3 == TREE_OPERAND (exp, 3))
3874 return exp;
3875
3876 new_tree
3877 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3878 break;
3879
3880 default:
3881 gcc_unreachable ();
3882 }
3883 break;
3884
3885 case tcc_vl_exp:
3886 {
3887 int i;
3888
3889 new_tree = NULL_TREE;
3890
3891 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3892 {
3893 tree op = TREE_OPERAND (exp, i);
3894 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3895 if (new_op != op)
3896 {
3897 if (!new_tree)
3898 new_tree = copy_node (exp);
3899 TREE_OPERAND (new_tree, i) = new_op;
3900 }
3901 }
3902
3903 if (new_tree)
3904 {
3905 new_tree = fold (new_tree);
3906 if (TREE_CODE (new_tree) == CALL_EXPR)
3907 process_call_operands (new_tree);
3908 }
3909 else
3910 return exp;
3911 }
3912 break;
3913
3914 default:
3915 gcc_unreachable ();
3916 }
3917
3918 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3919
3920 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3921 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3922
3923 return new_tree;
3924 }
3925 \f
3926
3927 /* Subroutine of stabilize_reference; this is called for subtrees of
3928 references. Any expression with side-effects must be put in a SAVE_EXPR
3929 to ensure that it is only evaluated once.
3930
3931 We don't put SAVE_EXPR nodes around everything, because assigning very
3932 simple expressions to temporaries causes us to miss good opportunities
3933 for optimizations. Among other things, the opportunity to fold in the
3934 addition of a constant into an addressing mode often gets lost, e.g.
3935 "y[i+1] += x;". In general, we take the approach that we should not make
3936 an assignment unless we are forced into it - i.e., that any non-side effect
3937 operator should be allowed, and that cse should take care of coalescing
3938 multiple utterances of the same expression should that prove fruitful. */
3939
3940 static tree
3941 stabilize_reference_1 (tree e)
3942 {
3943 tree result;
3944 enum tree_code code = TREE_CODE (e);
3945
3946 /* We cannot ignore const expressions because it might be a reference
3947 to a const array but whose index contains side-effects. But we can
3948 ignore things that are actual constant or that already have been
3949 handled by this function. */
3950
3951 if (tree_invariant_p (e))
3952 return e;
3953
3954 switch (TREE_CODE_CLASS (code))
3955 {
3956 case tcc_exceptional:
3957 case tcc_type:
3958 case tcc_declaration:
3959 case tcc_comparison:
3960 case tcc_statement:
3961 case tcc_expression:
3962 case tcc_reference:
3963 case tcc_vl_exp:
3964 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3965 so that it will only be evaluated once. */
3966 /* The reference (r) and comparison (<) classes could be handled as
3967 below, but it is generally faster to only evaluate them once. */
3968 if (TREE_SIDE_EFFECTS (e))
3969 return save_expr (e);
3970 return e;
3971
3972 case tcc_constant:
3973 /* Constants need no processing. In fact, we should never reach
3974 here. */
3975 return e;
3976
3977 case tcc_binary:
3978 /* Division is slow and tends to be compiled with jumps,
3979 especially the division by powers of 2 that is often
3980 found inside of an array reference. So do it just once. */
3981 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3982 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3983 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3984 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3985 return save_expr (e);
3986 /* Recursively stabilize each operand. */
3987 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3988 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3989 break;
3990
3991 case tcc_unary:
3992 /* Recursively stabilize each operand. */
3993 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3994 break;
3995
3996 default:
3997 gcc_unreachable ();
3998 }
3999
4000 TREE_TYPE (result) = TREE_TYPE (e);
4001 TREE_READONLY (result) = TREE_READONLY (e);
4002 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4003 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4004
4005 return result;
4006 }
4007
4008 /* Stabilize a reference so that we can use it any number of times
4009 without causing its operands to be evaluated more than once.
4010 Returns the stabilized reference. This works by means of save_expr,
4011 so see the caveats in the comments about save_expr.
4012
4013 Also allows conversion expressions whose operands are references.
4014 Any other kind of expression is returned unchanged. */
4015
4016 tree
4017 stabilize_reference (tree ref)
4018 {
4019 tree result;
4020 enum tree_code code = TREE_CODE (ref);
4021
4022 switch (code)
4023 {
4024 case VAR_DECL:
4025 case PARM_DECL:
4026 case RESULT_DECL:
4027 /* No action is needed in this case. */
4028 return ref;
4029
4030 CASE_CONVERT:
4031 case FLOAT_EXPR:
4032 case FIX_TRUNC_EXPR:
4033 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4034 break;
4035
4036 case INDIRECT_REF:
4037 result = build_nt (INDIRECT_REF,
4038 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4039 break;
4040
4041 case COMPONENT_REF:
4042 result = build_nt (COMPONENT_REF,
4043 stabilize_reference (TREE_OPERAND (ref, 0)),
4044 TREE_OPERAND (ref, 1), NULL_TREE);
4045 break;
4046
4047 case BIT_FIELD_REF:
4048 result = build_nt (BIT_FIELD_REF,
4049 stabilize_reference (TREE_OPERAND (ref, 0)),
4050 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4051 break;
4052
4053 case ARRAY_REF:
4054 result = build_nt (ARRAY_REF,
4055 stabilize_reference (TREE_OPERAND (ref, 0)),
4056 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4057 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4058 break;
4059
4060 case ARRAY_RANGE_REF:
4061 result = build_nt (ARRAY_RANGE_REF,
4062 stabilize_reference (TREE_OPERAND (ref, 0)),
4063 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4064 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4065 break;
4066
4067 case COMPOUND_EXPR:
4068 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4069 it wouldn't be ignored. This matters when dealing with
4070 volatiles. */
4071 return stabilize_reference_1 (ref);
4072
4073 /* If arg isn't a kind of lvalue we recognize, make no change.
4074 Caller should recognize the error for an invalid lvalue. */
4075 default:
4076 return ref;
4077
4078 case ERROR_MARK:
4079 return error_mark_node;
4080 }
4081
4082 TREE_TYPE (result) = TREE_TYPE (ref);
4083 TREE_READONLY (result) = TREE_READONLY (ref);
4084 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4085 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4086
4087 return result;
4088 }
4089 \f
4090 /* Low-level constructors for expressions. */
4091
4092 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4093 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4094
4095 void
4096 recompute_tree_invariant_for_addr_expr (tree t)
4097 {
4098 tree node;
4099 bool tc = true, se = false;
4100
4101 /* We started out assuming this address is both invariant and constant, but
4102 does not have side effects. Now go down any handled components and see if
4103 any of them involve offsets that are either non-constant or non-invariant.
4104 Also check for side-effects.
4105
4106 ??? Note that this code makes no attempt to deal with the case where
4107 taking the address of something causes a copy due to misalignment. */
4108
4109 #define UPDATE_FLAGS(NODE) \
4110 do { tree _node = (NODE); \
4111 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4112 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4113
4114 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4115 node = TREE_OPERAND (node, 0))
4116 {
4117 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4118 array reference (probably made temporarily by the G++ front end),
4119 so ignore all the operands. */
4120 if ((TREE_CODE (node) == ARRAY_REF
4121 || TREE_CODE (node) == ARRAY_RANGE_REF)
4122 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4123 {
4124 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4125 if (TREE_OPERAND (node, 2))
4126 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4127 if (TREE_OPERAND (node, 3))
4128 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4129 }
4130 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4131 FIELD_DECL, apparently. The G++ front end can put something else
4132 there, at least temporarily. */
4133 else if (TREE_CODE (node) == COMPONENT_REF
4134 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4135 {
4136 if (TREE_OPERAND (node, 2))
4137 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4138 }
4139 }
4140
4141 node = lang_hooks.expr_to_decl (node, &tc, &se);
4142
4143 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4144 the address, since &(*a)->b is a form of addition. If it's a constant, the
4145 address is constant too. If it's a decl, its address is constant if the
4146 decl is static. Everything else is not constant and, furthermore,
4147 taking the address of a volatile variable is not volatile. */
4148 if (TREE_CODE (node) == INDIRECT_REF
4149 || TREE_CODE (node) == MEM_REF)
4150 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4151 else if (CONSTANT_CLASS_P (node))
4152 ;
4153 else if (DECL_P (node))
4154 tc &= (staticp (node) != NULL_TREE);
4155 else
4156 {
4157 tc = false;
4158 se |= TREE_SIDE_EFFECTS (node);
4159 }
4160
4161
4162 TREE_CONSTANT (t) = tc;
4163 TREE_SIDE_EFFECTS (t) = se;
4164 #undef UPDATE_FLAGS
4165 }
4166
4167 /* Build an expression of code CODE, data type TYPE, and operands as
4168 specified. Expressions and reference nodes can be created this way.
4169 Constants, decls, types and misc nodes cannot be.
4170
4171 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4172 enough for all extant tree codes. */
4173
4174 tree
4175 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4176 {
4177 tree t;
4178
4179 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4180
4181 t = make_node_stat (code PASS_MEM_STAT);
4182 TREE_TYPE (t) = tt;
4183
4184 return t;
4185 }
4186
4187 tree
4188 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4189 {
4190 int length = sizeof (struct tree_exp);
4191 tree t;
4192
4193 record_node_allocation_statistics (code, length);
4194
4195 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4196
4197 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4198
4199 memset (t, 0, sizeof (struct tree_common));
4200
4201 TREE_SET_CODE (t, code);
4202
4203 TREE_TYPE (t) = type;
4204 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4205 TREE_OPERAND (t, 0) = node;
4206 if (node && !TYPE_P (node))
4207 {
4208 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4209 TREE_READONLY (t) = TREE_READONLY (node);
4210 }
4211
4212 if (TREE_CODE_CLASS (code) == tcc_statement)
4213 TREE_SIDE_EFFECTS (t) = 1;
4214 else switch (code)
4215 {
4216 case VA_ARG_EXPR:
4217 /* All of these have side-effects, no matter what their
4218 operands are. */
4219 TREE_SIDE_EFFECTS (t) = 1;
4220 TREE_READONLY (t) = 0;
4221 break;
4222
4223 case INDIRECT_REF:
4224 /* Whether a dereference is readonly has nothing to do with whether
4225 its operand is readonly. */
4226 TREE_READONLY (t) = 0;
4227 break;
4228
4229 case ADDR_EXPR:
4230 if (node)
4231 recompute_tree_invariant_for_addr_expr (t);
4232 break;
4233
4234 default:
4235 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4236 && node && !TYPE_P (node)
4237 && TREE_CONSTANT (node))
4238 TREE_CONSTANT (t) = 1;
4239 if (TREE_CODE_CLASS (code) == tcc_reference
4240 && node && TREE_THIS_VOLATILE (node))
4241 TREE_THIS_VOLATILE (t) = 1;
4242 break;
4243 }
4244
4245 return t;
4246 }
4247
4248 #define PROCESS_ARG(N) \
4249 do { \
4250 TREE_OPERAND (t, N) = arg##N; \
4251 if (arg##N &&!TYPE_P (arg##N)) \
4252 { \
4253 if (TREE_SIDE_EFFECTS (arg##N)) \
4254 side_effects = 1; \
4255 if (!TREE_READONLY (arg##N) \
4256 && !CONSTANT_CLASS_P (arg##N)) \
4257 (void) (read_only = 0); \
4258 if (!TREE_CONSTANT (arg##N)) \
4259 (void) (constant = 0); \
4260 } \
4261 } while (0)
4262
4263 tree
4264 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4265 {
4266 bool constant, read_only, side_effects;
4267 tree t;
4268
4269 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4270
4271 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4272 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4273 /* When sizetype precision doesn't match that of pointers
4274 we need to be able to build explicit extensions or truncations
4275 of the offset argument. */
4276 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4277 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4278 && TREE_CODE (arg1) == INTEGER_CST);
4279
4280 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4281 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4282 && ptrofftype_p (TREE_TYPE (arg1)));
4283
4284 t = make_node_stat (code PASS_MEM_STAT);
4285 TREE_TYPE (t) = tt;
4286
4287 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4288 result based on those same flags for the arguments. But if the
4289 arguments aren't really even `tree' expressions, we shouldn't be trying
4290 to do this. */
4291
4292 /* Expressions without side effects may be constant if their
4293 arguments are as well. */
4294 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4295 || TREE_CODE_CLASS (code) == tcc_binary);
4296 read_only = 1;
4297 side_effects = TREE_SIDE_EFFECTS (t);
4298
4299 PROCESS_ARG (0);
4300 PROCESS_ARG (1);
4301
4302 TREE_READONLY (t) = read_only;
4303 TREE_CONSTANT (t) = constant;
4304 TREE_SIDE_EFFECTS (t) = side_effects;
4305 TREE_THIS_VOLATILE (t)
4306 = (TREE_CODE_CLASS (code) == tcc_reference
4307 && arg0 && TREE_THIS_VOLATILE (arg0));
4308
4309 return t;
4310 }
4311
4312
4313 tree
4314 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4315 tree arg2 MEM_STAT_DECL)
4316 {
4317 bool constant, read_only, side_effects;
4318 tree t;
4319
4320 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4321 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4322
4323 t = make_node_stat (code PASS_MEM_STAT);
4324 TREE_TYPE (t) = tt;
4325
4326 read_only = 1;
4327
4328 /* As a special exception, if COND_EXPR has NULL branches, we
4329 assume that it is a gimple statement and always consider
4330 it to have side effects. */
4331 if (code == COND_EXPR
4332 && tt == void_type_node
4333 && arg1 == NULL_TREE
4334 && arg2 == NULL_TREE)
4335 side_effects = true;
4336 else
4337 side_effects = TREE_SIDE_EFFECTS (t);
4338
4339 PROCESS_ARG (0);
4340 PROCESS_ARG (1);
4341 PROCESS_ARG (2);
4342
4343 if (code == COND_EXPR)
4344 TREE_READONLY (t) = read_only;
4345
4346 TREE_SIDE_EFFECTS (t) = side_effects;
4347 TREE_THIS_VOLATILE (t)
4348 = (TREE_CODE_CLASS (code) == tcc_reference
4349 && arg0 && TREE_THIS_VOLATILE (arg0));
4350
4351 return t;
4352 }
4353
4354 tree
4355 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4356 tree arg2, tree arg3 MEM_STAT_DECL)
4357 {
4358 bool constant, read_only, side_effects;
4359 tree t;
4360
4361 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4362
4363 t = make_node_stat (code PASS_MEM_STAT);
4364 TREE_TYPE (t) = tt;
4365
4366 side_effects = TREE_SIDE_EFFECTS (t);
4367
4368 PROCESS_ARG (0);
4369 PROCESS_ARG (1);
4370 PROCESS_ARG (2);
4371 PROCESS_ARG (3);
4372
4373 TREE_SIDE_EFFECTS (t) = side_effects;
4374 TREE_THIS_VOLATILE (t)
4375 = (TREE_CODE_CLASS (code) == tcc_reference
4376 && arg0 && TREE_THIS_VOLATILE (arg0));
4377
4378 return t;
4379 }
4380
4381 tree
4382 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4383 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4384 {
4385 bool constant, read_only, side_effects;
4386 tree t;
4387
4388 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4389
4390 t = make_node_stat (code PASS_MEM_STAT);
4391 TREE_TYPE (t) = tt;
4392
4393 side_effects = TREE_SIDE_EFFECTS (t);
4394
4395 PROCESS_ARG (0);
4396 PROCESS_ARG (1);
4397 PROCESS_ARG (2);
4398 PROCESS_ARG (3);
4399 PROCESS_ARG (4);
4400
4401 TREE_SIDE_EFFECTS (t) = side_effects;
4402 TREE_THIS_VOLATILE (t)
4403 = (TREE_CODE_CLASS (code) == tcc_reference
4404 && arg0 && TREE_THIS_VOLATILE (arg0));
4405
4406 return t;
4407 }
4408
4409 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4410 on the pointer PTR. */
4411
4412 tree
4413 build_simple_mem_ref_loc (location_t loc, tree ptr)
4414 {
4415 HOST_WIDE_INT offset = 0;
4416 tree ptype = TREE_TYPE (ptr);
4417 tree tem;
4418 /* For convenience allow addresses that collapse to a simple base
4419 and offset. */
4420 if (TREE_CODE (ptr) == ADDR_EXPR
4421 && (handled_component_p (TREE_OPERAND (ptr, 0))
4422 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4423 {
4424 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4425 gcc_assert (ptr);
4426 ptr = build_fold_addr_expr (ptr);
4427 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4428 }
4429 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4430 ptr, build_int_cst (ptype, offset));
4431 SET_EXPR_LOCATION (tem, loc);
4432 return tem;
4433 }
4434
4435 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4436
4437 offset_int
4438 mem_ref_offset (const_tree t)
4439 {
4440 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4441 }
4442
4443 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4444 offsetted by OFFSET units. */
4445
4446 tree
4447 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4448 {
4449 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4450 build_fold_addr_expr (base),
4451 build_int_cst (ptr_type_node, offset));
4452 tree addr = build1 (ADDR_EXPR, type, ref);
4453 recompute_tree_invariant_for_addr_expr (addr);
4454 return addr;
4455 }
4456
4457 /* Similar except don't specify the TREE_TYPE
4458 and leave the TREE_SIDE_EFFECTS as 0.
4459 It is permissible for arguments to be null,
4460 or even garbage if their values do not matter. */
4461
4462 tree
4463 build_nt (enum tree_code code, ...)
4464 {
4465 tree t;
4466 int length;
4467 int i;
4468 va_list p;
4469
4470 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4471
4472 va_start (p, code);
4473
4474 t = make_node (code);
4475 length = TREE_CODE_LENGTH (code);
4476
4477 for (i = 0; i < length; i++)
4478 TREE_OPERAND (t, i) = va_arg (p, tree);
4479
4480 va_end (p);
4481 return t;
4482 }
4483
4484 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4485 tree vec. */
4486
4487 tree
4488 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4489 {
4490 tree ret, t;
4491 unsigned int ix;
4492
4493 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4494 CALL_EXPR_FN (ret) = fn;
4495 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4496 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4497 CALL_EXPR_ARG (ret, ix) = t;
4498 return ret;
4499 }
4500 \f
4501 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4502 We do NOT enter this node in any sort of symbol table.
4503
4504 LOC is the location of the decl.
4505
4506 layout_decl is used to set up the decl's storage layout.
4507 Other slots are initialized to 0 or null pointers. */
4508
4509 tree
4510 build_decl_stat (location_t loc, enum tree_code code, tree name,
4511 tree type MEM_STAT_DECL)
4512 {
4513 tree t;
4514
4515 t = make_node_stat (code PASS_MEM_STAT);
4516 DECL_SOURCE_LOCATION (t) = loc;
4517
4518 /* if (type == error_mark_node)
4519 type = integer_type_node; */
4520 /* That is not done, deliberately, so that having error_mark_node
4521 as the type can suppress useless errors in the use of this variable. */
4522
4523 DECL_NAME (t) = name;
4524 TREE_TYPE (t) = type;
4525
4526 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4527 layout_decl (t, 0);
4528
4529 return t;
4530 }
4531
4532 /* Builds and returns function declaration with NAME and TYPE. */
4533
4534 tree
4535 build_fn_decl (const char *name, tree type)
4536 {
4537 tree id = get_identifier (name);
4538 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4539
4540 DECL_EXTERNAL (decl) = 1;
4541 TREE_PUBLIC (decl) = 1;
4542 DECL_ARTIFICIAL (decl) = 1;
4543 TREE_NOTHROW (decl) = 1;
4544
4545 return decl;
4546 }
4547
4548 vec<tree, va_gc> *all_translation_units;
4549
4550 /* Builds a new translation-unit decl with name NAME, queues it in the
4551 global list of translation-unit decls and returns it. */
4552
4553 tree
4554 build_translation_unit_decl (tree name)
4555 {
4556 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4557 name, NULL_TREE);
4558 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4559 vec_safe_push (all_translation_units, tu);
4560 return tu;
4561 }
4562
4563 \f
4564 /* BLOCK nodes are used to represent the structure of binding contours
4565 and declarations, once those contours have been exited and their contents
4566 compiled. This information is used for outputting debugging info. */
4567
4568 tree
4569 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4570 {
4571 tree block = make_node (BLOCK);
4572
4573 BLOCK_VARS (block) = vars;
4574 BLOCK_SUBBLOCKS (block) = subblocks;
4575 BLOCK_SUPERCONTEXT (block) = supercontext;
4576 BLOCK_CHAIN (block) = chain;
4577 return block;
4578 }
4579
4580 \f
4581 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4582
4583 LOC is the location to use in tree T. */
4584
4585 void
4586 protected_set_expr_location (tree t, location_t loc)
4587 {
4588 if (CAN_HAVE_LOCATION_P (t))
4589 SET_EXPR_LOCATION (t, loc);
4590 }
4591 \f
4592 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4593 is ATTRIBUTE. */
4594
4595 tree
4596 build_decl_attribute_variant (tree ddecl, tree attribute)
4597 {
4598 DECL_ATTRIBUTES (ddecl) = attribute;
4599 return ddecl;
4600 }
4601
4602 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4603 is ATTRIBUTE and its qualifiers are QUALS.
4604
4605 Record such modified types already made so we don't make duplicates. */
4606
4607 tree
4608 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4609 {
4610 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4611 {
4612 inchash::hash hstate;
4613 tree ntype;
4614 int i;
4615 tree t;
4616 enum tree_code code = TREE_CODE (ttype);
4617
4618 /* Building a distinct copy of a tagged type is inappropriate; it
4619 causes breakage in code that expects there to be a one-to-one
4620 relationship between a struct and its fields.
4621 build_duplicate_type is another solution (as used in
4622 handle_transparent_union_attribute), but that doesn't play well
4623 with the stronger C++ type identity model. */
4624 if (TREE_CODE (ttype) == RECORD_TYPE
4625 || TREE_CODE (ttype) == UNION_TYPE
4626 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4627 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4628 {
4629 warning (OPT_Wattributes,
4630 "ignoring attributes applied to %qT after definition",
4631 TYPE_MAIN_VARIANT (ttype));
4632 return build_qualified_type (ttype, quals);
4633 }
4634
4635 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4636 ntype = build_distinct_type_copy (ttype);
4637
4638 TYPE_ATTRIBUTES (ntype) = attribute;
4639
4640 hstate.add_int (code);
4641 if (TREE_TYPE (ntype))
4642 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4643 attribute_hash_list (attribute, hstate);
4644
4645 switch (TREE_CODE (ntype))
4646 {
4647 case FUNCTION_TYPE:
4648 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4649 break;
4650 case ARRAY_TYPE:
4651 if (TYPE_DOMAIN (ntype))
4652 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4653 break;
4654 case INTEGER_TYPE:
4655 t = TYPE_MAX_VALUE (ntype);
4656 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4657 hstate.add_object (TREE_INT_CST_ELT (t, i));
4658 break;
4659 case REAL_TYPE:
4660 case FIXED_POINT_TYPE:
4661 {
4662 unsigned int precision = TYPE_PRECISION (ntype);
4663 hstate.add_object (precision);
4664 }
4665 break;
4666 default:
4667 break;
4668 }
4669
4670 ntype = type_hash_canon (hstate.end(), ntype);
4671
4672 /* If the target-dependent attributes make NTYPE different from
4673 its canonical type, we will need to use structural equality
4674 checks for this type. */
4675 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4676 || !comp_type_attributes (ntype, ttype))
4677 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4678 else if (TYPE_CANONICAL (ntype) == ntype)
4679 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4680
4681 ttype = build_qualified_type (ntype, quals);
4682 }
4683 else if (TYPE_QUALS (ttype) != quals)
4684 ttype = build_qualified_type (ttype, quals);
4685
4686 return ttype;
4687 }
4688
4689 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4690 the same. */
4691
4692 static bool
4693 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4694 {
4695 tree cl1, cl2;
4696 for (cl1 = clauses1, cl2 = clauses2;
4697 cl1 && cl2;
4698 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4699 {
4700 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4701 return false;
4702 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4703 {
4704 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4705 OMP_CLAUSE_DECL (cl2)) != 1)
4706 return false;
4707 }
4708 switch (OMP_CLAUSE_CODE (cl1))
4709 {
4710 case OMP_CLAUSE_ALIGNED:
4711 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4712 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4713 return false;
4714 break;
4715 case OMP_CLAUSE_LINEAR:
4716 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4717 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4718 return false;
4719 break;
4720 case OMP_CLAUSE_SIMDLEN:
4721 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4722 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4723 return false;
4724 default:
4725 break;
4726 }
4727 }
4728 return true;
4729 }
4730
4731 /* Compare two constructor-element-type constants. Return 1 if the lists
4732 are known to be equal; otherwise return 0. */
4733
4734 static bool
4735 simple_cst_list_equal (const_tree l1, const_tree l2)
4736 {
4737 while (l1 != NULL_TREE && l2 != NULL_TREE)
4738 {
4739 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4740 return false;
4741
4742 l1 = TREE_CHAIN (l1);
4743 l2 = TREE_CHAIN (l2);
4744 }
4745
4746 return l1 == l2;
4747 }
4748
4749 /* Compare two attributes for their value identity. Return true if the
4750 attribute values are known to be equal; otherwise return false.
4751 */
4752
4753 static bool
4754 attribute_value_equal (const_tree attr1, const_tree attr2)
4755 {
4756 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4757 return true;
4758
4759 if (TREE_VALUE (attr1) != NULL_TREE
4760 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4761 && TREE_VALUE (attr2) != NULL
4762 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4763 return (simple_cst_list_equal (TREE_VALUE (attr1),
4764 TREE_VALUE (attr2)) == 1);
4765
4766 if ((flag_openmp || flag_openmp_simd)
4767 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4768 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4769 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4770 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4771 TREE_VALUE (attr2));
4772
4773 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4774 }
4775
4776 /* Return 0 if the attributes for two types are incompatible, 1 if they
4777 are compatible, and 2 if they are nearly compatible (which causes a
4778 warning to be generated). */
4779 int
4780 comp_type_attributes (const_tree type1, const_tree type2)
4781 {
4782 const_tree a1 = TYPE_ATTRIBUTES (type1);
4783 const_tree a2 = TYPE_ATTRIBUTES (type2);
4784 const_tree a;
4785
4786 if (a1 == a2)
4787 return 1;
4788 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4789 {
4790 const struct attribute_spec *as;
4791 const_tree attr;
4792
4793 as = lookup_attribute_spec (get_attribute_name (a));
4794 if (!as || as->affects_type_identity == false)
4795 continue;
4796
4797 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4798 if (!attr || !attribute_value_equal (a, attr))
4799 break;
4800 }
4801 if (!a)
4802 {
4803 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4804 {
4805 const struct attribute_spec *as;
4806
4807 as = lookup_attribute_spec (get_attribute_name (a));
4808 if (!as || as->affects_type_identity == false)
4809 continue;
4810
4811 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4812 break;
4813 /* We don't need to compare trees again, as we did this
4814 already in first loop. */
4815 }
4816 /* All types - affecting identity - are equal, so
4817 there is no need to call target hook for comparison. */
4818 if (!a)
4819 return 1;
4820 }
4821 /* As some type combinations - like default calling-convention - might
4822 be compatible, we have to call the target hook to get the final result. */
4823 return targetm.comp_type_attributes (type1, type2);
4824 }
4825
4826 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4827 is ATTRIBUTE.
4828
4829 Record such modified types already made so we don't make duplicates. */
4830
4831 tree
4832 build_type_attribute_variant (tree ttype, tree attribute)
4833 {
4834 return build_type_attribute_qual_variant (ttype, attribute,
4835 TYPE_QUALS (ttype));
4836 }
4837
4838
4839 /* Reset the expression *EXPR_P, a size or position.
4840
4841 ??? We could reset all non-constant sizes or positions. But it's cheap
4842 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4843
4844 We need to reset self-referential sizes or positions because they cannot
4845 be gimplified and thus can contain a CALL_EXPR after the gimplification
4846 is finished, which will run afoul of LTO streaming. And they need to be
4847 reset to something essentially dummy but not constant, so as to preserve
4848 the properties of the object they are attached to. */
4849
4850 static inline void
4851 free_lang_data_in_one_sizepos (tree *expr_p)
4852 {
4853 tree expr = *expr_p;
4854 if (CONTAINS_PLACEHOLDER_P (expr))
4855 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4856 }
4857
4858
4859 /* Reset all the fields in a binfo node BINFO. We only keep
4860 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4861
4862 static void
4863 free_lang_data_in_binfo (tree binfo)
4864 {
4865 unsigned i;
4866 tree t;
4867
4868 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4869
4870 BINFO_VIRTUALS (binfo) = NULL_TREE;
4871 BINFO_BASE_ACCESSES (binfo) = NULL;
4872 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4873 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4874
4875 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4876 free_lang_data_in_binfo (t);
4877 }
4878
4879
4880 /* Reset all language specific information still present in TYPE. */
4881
4882 static void
4883 free_lang_data_in_type (tree type)
4884 {
4885 gcc_assert (TYPE_P (type));
4886
4887 /* Give the FE a chance to remove its own data first. */
4888 lang_hooks.free_lang_data (type);
4889
4890 TREE_LANG_FLAG_0 (type) = 0;
4891 TREE_LANG_FLAG_1 (type) = 0;
4892 TREE_LANG_FLAG_2 (type) = 0;
4893 TREE_LANG_FLAG_3 (type) = 0;
4894 TREE_LANG_FLAG_4 (type) = 0;
4895 TREE_LANG_FLAG_5 (type) = 0;
4896 TREE_LANG_FLAG_6 (type) = 0;
4897
4898 if (TREE_CODE (type) == FUNCTION_TYPE)
4899 {
4900 /* Remove the const and volatile qualifiers from arguments. The
4901 C++ front end removes them, but the C front end does not,
4902 leading to false ODR violation errors when merging two
4903 instances of the same function signature compiled by
4904 different front ends. */
4905 tree p;
4906
4907 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4908 {
4909 tree arg_type = TREE_VALUE (p);
4910
4911 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4912 {
4913 int quals = TYPE_QUALS (arg_type)
4914 & ~TYPE_QUAL_CONST
4915 & ~TYPE_QUAL_VOLATILE;
4916 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4917 free_lang_data_in_type (TREE_VALUE (p));
4918 }
4919 }
4920 }
4921
4922 /* Remove members that are not actually FIELD_DECLs from the field
4923 list of an aggregate. These occur in C++. */
4924 if (RECORD_OR_UNION_TYPE_P (type))
4925 {
4926 tree prev, member;
4927
4928 /* Note that TYPE_FIELDS can be shared across distinct
4929 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4930 to be removed, we cannot set its TREE_CHAIN to NULL.
4931 Otherwise, we would not be able to find all the other fields
4932 in the other instances of this TREE_TYPE.
4933
4934 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4935 prev = NULL_TREE;
4936 member = TYPE_FIELDS (type);
4937 while (member)
4938 {
4939 if (TREE_CODE (member) == FIELD_DECL
4940 || TREE_CODE (member) == TYPE_DECL)
4941 {
4942 if (prev)
4943 TREE_CHAIN (prev) = member;
4944 else
4945 TYPE_FIELDS (type) = member;
4946 prev = member;
4947 }
4948
4949 member = TREE_CHAIN (member);
4950 }
4951
4952 if (prev)
4953 TREE_CHAIN (prev) = NULL_TREE;
4954 else
4955 TYPE_FIELDS (type) = NULL_TREE;
4956
4957 TYPE_METHODS (type) = NULL_TREE;
4958 if (TYPE_BINFO (type))
4959 free_lang_data_in_binfo (TYPE_BINFO (type));
4960 }
4961 else
4962 {
4963 /* For non-aggregate types, clear out the language slot (which
4964 overloads TYPE_BINFO). */
4965 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4966
4967 if (INTEGRAL_TYPE_P (type)
4968 || SCALAR_FLOAT_TYPE_P (type)
4969 || FIXED_POINT_TYPE_P (type))
4970 {
4971 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4972 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4973 }
4974 }
4975
4976 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4977 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4978
4979 if (TYPE_CONTEXT (type)
4980 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4981 {
4982 tree ctx = TYPE_CONTEXT (type);
4983 do
4984 {
4985 ctx = BLOCK_SUPERCONTEXT (ctx);
4986 }
4987 while (ctx && TREE_CODE (ctx) == BLOCK);
4988 TYPE_CONTEXT (type) = ctx;
4989 }
4990 }
4991
4992
4993 /* Return true if DECL may need an assembler name to be set. */
4994
4995 static inline bool
4996 need_assembler_name_p (tree decl)
4997 {
4998 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
4999 merging. */
5000 if (flag_lto_odr_type_mering
5001 && TREE_CODE (decl) == TYPE_DECL
5002 && DECL_NAME (decl)
5003 && decl == TYPE_NAME (TREE_TYPE (decl))
5004 && !is_lang_specific (TREE_TYPE (decl))
5005 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5006 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5007 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5008 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5009 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5010 if (TREE_CODE (decl) != FUNCTION_DECL
5011 && TREE_CODE (decl) != VAR_DECL)
5012 return false;
5013
5014 /* If DECL already has its assembler name set, it does not need a
5015 new one. */
5016 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5017 || DECL_ASSEMBLER_NAME_SET_P (decl))
5018 return false;
5019
5020 /* Abstract decls do not need an assembler name. */
5021 if (DECL_ABSTRACT (decl))
5022 return false;
5023
5024 /* For VAR_DECLs, only static, public and external symbols need an
5025 assembler name. */
5026 if (TREE_CODE (decl) == VAR_DECL
5027 && !TREE_STATIC (decl)
5028 && !TREE_PUBLIC (decl)
5029 && !DECL_EXTERNAL (decl))
5030 return false;
5031
5032 if (TREE_CODE (decl) == FUNCTION_DECL)
5033 {
5034 /* Do not set assembler name on builtins. Allow RTL expansion to
5035 decide whether to expand inline or via a regular call. */
5036 if (DECL_BUILT_IN (decl)
5037 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5038 return false;
5039
5040 /* Functions represented in the callgraph need an assembler name. */
5041 if (cgraph_node::get (decl) != NULL)
5042 return true;
5043
5044 /* Unused and not public functions don't need an assembler name. */
5045 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5046 return false;
5047 }
5048
5049 return true;
5050 }
5051
5052
5053 /* Reset all language specific information still present in symbol
5054 DECL. */
5055
5056 static void
5057 free_lang_data_in_decl (tree decl)
5058 {
5059 gcc_assert (DECL_P (decl));
5060
5061 /* Give the FE a chance to remove its own data first. */
5062 lang_hooks.free_lang_data (decl);
5063
5064 TREE_LANG_FLAG_0 (decl) = 0;
5065 TREE_LANG_FLAG_1 (decl) = 0;
5066 TREE_LANG_FLAG_2 (decl) = 0;
5067 TREE_LANG_FLAG_3 (decl) = 0;
5068 TREE_LANG_FLAG_4 (decl) = 0;
5069 TREE_LANG_FLAG_5 (decl) = 0;
5070 TREE_LANG_FLAG_6 (decl) = 0;
5071
5072 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5073 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5074 if (TREE_CODE (decl) == FIELD_DECL)
5075 {
5076 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5077 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5078 DECL_QUALIFIER (decl) = NULL_TREE;
5079 }
5080
5081 if (TREE_CODE (decl) == FUNCTION_DECL)
5082 {
5083 struct cgraph_node *node;
5084 if (!(node = cgraph_node::get (decl))
5085 || (!node->definition && !node->clones))
5086 {
5087 if (node)
5088 node->release_body ();
5089 else
5090 {
5091 release_function_body (decl);
5092 DECL_ARGUMENTS (decl) = NULL;
5093 DECL_RESULT (decl) = NULL;
5094 DECL_INITIAL (decl) = error_mark_node;
5095 }
5096 }
5097 if (gimple_has_body_p (decl))
5098 {
5099 tree t;
5100
5101 /* If DECL has a gimple body, then the context for its
5102 arguments must be DECL. Otherwise, it doesn't really
5103 matter, as we will not be emitting any code for DECL. In
5104 general, there may be other instances of DECL created by
5105 the front end and since PARM_DECLs are generally shared,
5106 their DECL_CONTEXT changes as the replicas of DECL are
5107 created. The only time where DECL_CONTEXT is important
5108 is for the FUNCTION_DECLs that have a gimple body (since
5109 the PARM_DECL will be used in the function's body). */
5110 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5111 DECL_CONTEXT (t) = decl;
5112 }
5113
5114 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5115 At this point, it is not needed anymore. */
5116 DECL_SAVED_TREE (decl) = NULL_TREE;
5117
5118 /* Clear the abstract origin if it refers to a method. Otherwise
5119 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5120 origin will not be output correctly. */
5121 if (DECL_ABSTRACT_ORIGIN (decl)
5122 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5123 && RECORD_OR_UNION_TYPE_P
5124 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5125 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5126
5127 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5128 DECL_VINDEX referring to itself into a vtable slot number as it
5129 should. Happens with functions that are copied and then forgotten
5130 about. Just clear it, it won't matter anymore. */
5131 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5132 DECL_VINDEX (decl) = NULL_TREE;
5133 }
5134 else if (TREE_CODE (decl) == VAR_DECL)
5135 {
5136 if ((DECL_EXTERNAL (decl)
5137 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5138 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5139 DECL_INITIAL (decl) = NULL_TREE;
5140 }
5141 else if (TREE_CODE (decl) == TYPE_DECL
5142 || TREE_CODE (decl) == FIELD_DECL)
5143 DECL_INITIAL (decl) = NULL_TREE;
5144 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5145 && DECL_INITIAL (decl)
5146 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5147 {
5148 /* Strip builtins from the translation-unit BLOCK. We still have targets
5149 without builtin_decl_explicit support and also builtins are shared
5150 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5151 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5152 while (*nextp)
5153 {
5154 tree var = *nextp;
5155 if (TREE_CODE (var) == FUNCTION_DECL
5156 && DECL_BUILT_IN (var))
5157 *nextp = TREE_CHAIN (var);
5158 else
5159 nextp = &TREE_CHAIN (var);
5160 }
5161 }
5162 }
5163
5164
5165 /* Data used when collecting DECLs and TYPEs for language data removal. */
5166
5167 struct free_lang_data_d
5168 {
5169 /* Worklist to avoid excessive recursion. */
5170 vec<tree> worklist;
5171
5172 /* Set of traversed objects. Used to avoid duplicate visits. */
5173 hash_set<tree> *pset;
5174
5175 /* Array of symbols to process with free_lang_data_in_decl. */
5176 vec<tree> decls;
5177
5178 /* Array of types to process with free_lang_data_in_type. */
5179 vec<tree> types;
5180 };
5181
5182
5183 /* Save all language fields needed to generate proper debug information
5184 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5185
5186 static void
5187 save_debug_info_for_decl (tree t)
5188 {
5189 /*struct saved_debug_info_d *sdi;*/
5190
5191 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5192
5193 /* FIXME. Partial implementation for saving debug info removed. */
5194 }
5195
5196
5197 /* Save all language fields needed to generate proper debug information
5198 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5199
5200 static void
5201 save_debug_info_for_type (tree t)
5202 {
5203 /*struct saved_debug_info_d *sdi;*/
5204
5205 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5206
5207 /* FIXME. Partial implementation for saving debug info removed. */
5208 }
5209
5210
5211 /* Add type or decl T to one of the list of tree nodes that need their
5212 language data removed. The lists are held inside FLD. */
5213
5214 static void
5215 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5216 {
5217 if (DECL_P (t))
5218 {
5219 fld->decls.safe_push (t);
5220 if (debug_info_level > DINFO_LEVEL_TERSE)
5221 save_debug_info_for_decl (t);
5222 }
5223 else if (TYPE_P (t))
5224 {
5225 fld->types.safe_push (t);
5226 if (debug_info_level > DINFO_LEVEL_TERSE)
5227 save_debug_info_for_type (t);
5228 }
5229 else
5230 gcc_unreachable ();
5231 }
5232
5233 /* Push tree node T into FLD->WORKLIST. */
5234
5235 static inline void
5236 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5237 {
5238 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5239 fld->worklist.safe_push ((t));
5240 }
5241
5242
5243 /* Operand callback helper for free_lang_data_in_node. *TP is the
5244 subtree operand being considered. */
5245
5246 static tree
5247 find_decls_types_r (tree *tp, int *ws, void *data)
5248 {
5249 tree t = *tp;
5250 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5251
5252 if (TREE_CODE (t) == TREE_LIST)
5253 return NULL_TREE;
5254
5255 /* Language specific nodes will be removed, so there is no need
5256 to gather anything under them. */
5257 if (is_lang_specific (t))
5258 {
5259 *ws = 0;
5260 return NULL_TREE;
5261 }
5262
5263 if (DECL_P (t))
5264 {
5265 /* Note that walk_tree does not traverse every possible field in
5266 decls, so we have to do our own traversals here. */
5267 add_tree_to_fld_list (t, fld);
5268
5269 fld_worklist_push (DECL_NAME (t), fld);
5270 fld_worklist_push (DECL_CONTEXT (t), fld);
5271 fld_worklist_push (DECL_SIZE (t), fld);
5272 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5273
5274 /* We are going to remove everything under DECL_INITIAL for
5275 TYPE_DECLs. No point walking them. */
5276 if (TREE_CODE (t) != TYPE_DECL)
5277 fld_worklist_push (DECL_INITIAL (t), fld);
5278
5279 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5280 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5281
5282 if (TREE_CODE (t) == FUNCTION_DECL)
5283 {
5284 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5285 fld_worklist_push (DECL_RESULT (t), fld);
5286 }
5287 else if (TREE_CODE (t) == TYPE_DECL)
5288 {
5289 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5290 }
5291 else if (TREE_CODE (t) == FIELD_DECL)
5292 {
5293 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5294 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5295 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5296 fld_worklist_push (DECL_FCONTEXT (t), fld);
5297 }
5298
5299 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5300 && DECL_HAS_VALUE_EXPR_P (t))
5301 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5302
5303 if (TREE_CODE (t) != FIELD_DECL
5304 && TREE_CODE (t) != TYPE_DECL)
5305 fld_worklist_push (TREE_CHAIN (t), fld);
5306 *ws = 0;
5307 }
5308 else if (TYPE_P (t))
5309 {
5310 /* Note that walk_tree does not traverse every possible field in
5311 types, so we have to do our own traversals here. */
5312 add_tree_to_fld_list (t, fld);
5313
5314 if (!RECORD_OR_UNION_TYPE_P (t))
5315 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5316 fld_worklist_push (TYPE_SIZE (t), fld);
5317 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5318 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5319 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5320 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5321 fld_worklist_push (TYPE_NAME (t), fld);
5322 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5323 them and thus do not and want not to reach unused pointer types
5324 this way. */
5325 if (!POINTER_TYPE_P (t))
5326 fld_worklist_push (TYPE_MINVAL (t), fld);
5327 if (!RECORD_OR_UNION_TYPE_P (t))
5328 fld_worklist_push (TYPE_MAXVAL (t), fld);
5329 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5330 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5331 do not and want not to reach unused variants this way. */
5332 if (TYPE_CONTEXT (t))
5333 {
5334 tree ctx = TYPE_CONTEXT (t);
5335 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5336 So push that instead. */
5337 while (ctx && TREE_CODE (ctx) == BLOCK)
5338 ctx = BLOCK_SUPERCONTEXT (ctx);
5339 fld_worklist_push (ctx, fld);
5340 }
5341 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5342 and want not to reach unused types this way. */
5343
5344 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5345 {
5346 unsigned i;
5347 tree tem;
5348 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5349 fld_worklist_push (TREE_TYPE (tem), fld);
5350 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5351 if (tem
5352 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5353 && TREE_CODE (tem) == TREE_LIST)
5354 do
5355 {
5356 fld_worklist_push (TREE_VALUE (tem), fld);
5357 tem = TREE_CHAIN (tem);
5358 }
5359 while (tem);
5360 }
5361 if (RECORD_OR_UNION_TYPE_P (t))
5362 {
5363 tree tem;
5364 /* Push all TYPE_FIELDS - there can be interleaving interesting
5365 and non-interesting things. */
5366 tem = TYPE_FIELDS (t);
5367 while (tem)
5368 {
5369 if (TREE_CODE (tem) == FIELD_DECL
5370 || TREE_CODE (tem) == TYPE_DECL)
5371 fld_worklist_push (tem, fld);
5372 tem = TREE_CHAIN (tem);
5373 }
5374 }
5375
5376 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5377 *ws = 0;
5378 }
5379 else if (TREE_CODE (t) == BLOCK)
5380 {
5381 tree tem;
5382 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5383 fld_worklist_push (tem, fld);
5384 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5385 fld_worklist_push (tem, fld);
5386 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5387 }
5388
5389 if (TREE_CODE (t) != IDENTIFIER_NODE
5390 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5391 fld_worklist_push (TREE_TYPE (t), fld);
5392
5393 return NULL_TREE;
5394 }
5395
5396
5397 /* Find decls and types in T. */
5398
5399 static void
5400 find_decls_types (tree t, struct free_lang_data_d *fld)
5401 {
5402 while (1)
5403 {
5404 if (!fld->pset->contains (t))
5405 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5406 if (fld->worklist.is_empty ())
5407 break;
5408 t = fld->worklist.pop ();
5409 }
5410 }
5411
5412 /* Translate all the types in LIST with the corresponding runtime
5413 types. */
5414
5415 static tree
5416 get_eh_types_for_runtime (tree list)
5417 {
5418 tree head, prev;
5419
5420 if (list == NULL_TREE)
5421 return NULL_TREE;
5422
5423 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5424 prev = head;
5425 list = TREE_CHAIN (list);
5426 while (list)
5427 {
5428 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5429 TREE_CHAIN (prev) = n;
5430 prev = TREE_CHAIN (prev);
5431 list = TREE_CHAIN (list);
5432 }
5433
5434 return head;
5435 }
5436
5437
5438 /* Find decls and types referenced in EH region R and store them in
5439 FLD->DECLS and FLD->TYPES. */
5440
5441 static void
5442 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5443 {
5444 switch (r->type)
5445 {
5446 case ERT_CLEANUP:
5447 break;
5448
5449 case ERT_TRY:
5450 {
5451 eh_catch c;
5452
5453 /* The types referenced in each catch must first be changed to the
5454 EH types used at runtime. This removes references to FE types
5455 in the region. */
5456 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5457 {
5458 c->type_list = get_eh_types_for_runtime (c->type_list);
5459 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5460 }
5461 }
5462 break;
5463
5464 case ERT_ALLOWED_EXCEPTIONS:
5465 r->u.allowed.type_list
5466 = get_eh_types_for_runtime (r->u.allowed.type_list);
5467 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5468 break;
5469
5470 case ERT_MUST_NOT_THROW:
5471 walk_tree (&r->u.must_not_throw.failure_decl,
5472 find_decls_types_r, fld, fld->pset);
5473 break;
5474 }
5475 }
5476
5477
5478 /* Find decls and types referenced in cgraph node N and store them in
5479 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5480 look for *every* kind of DECL and TYPE node reachable from N,
5481 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5482 NAMESPACE_DECLs, etc). */
5483
5484 static void
5485 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5486 {
5487 basic_block bb;
5488 struct function *fn;
5489 unsigned ix;
5490 tree t;
5491
5492 find_decls_types (n->decl, fld);
5493
5494 if (!gimple_has_body_p (n->decl))
5495 return;
5496
5497 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5498
5499 fn = DECL_STRUCT_FUNCTION (n->decl);
5500
5501 /* Traverse locals. */
5502 FOR_EACH_LOCAL_DECL (fn, ix, t)
5503 find_decls_types (t, fld);
5504
5505 /* Traverse EH regions in FN. */
5506 {
5507 eh_region r;
5508 FOR_ALL_EH_REGION_FN (r, fn)
5509 find_decls_types_in_eh_region (r, fld);
5510 }
5511
5512 /* Traverse every statement in FN. */
5513 FOR_EACH_BB_FN (bb, fn)
5514 {
5515 gimple_stmt_iterator si;
5516 unsigned i;
5517
5518 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5519 {
5520 gimple phi = gsi_stmt (si);
5521
5522 for (i = 0; i < gimple_phi_num_args (phi); i++)
5523 {
5524 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5525 find_decls_types (*arg_p, fld);
5526 }
5527 }
5528
5529 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5530 {
5531 gimple stmt = gsi_stmt (si);
5532
5533 if (is_gimple_call (stmt))
5534 find_decls_types (gimple_call_fntype (stmt), fld);
5535
5536 for (i = 0; i < gimple_num_ops (stmt); i++)
5537 {
5538 tree arg = gimple_op (stmt, i);
5539 find_decls_types (arg, fld);
5540 }
5541 }
5542 }
5543 }
5544
5545
5546 /* Find decls and types referenced in varpool node N and store them in
5547 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5548 look for *every* kind of DECL and TYPE node reachable from N,
5549 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5550 NAMESPACE_DECLs, etc). */
5551
5552 static void
5553 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5554 {
5555 find_decls_types (v->decl, fld);
5556 }
5557
5558 /* If T needs an assembler name, have one created for it. */
5559
5560 void
5561 assign_assembler_name_if_neeeded (tree t)
5562 {
5563 if (need_assembler_name_p (t))
5564 {
5565 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5566 diagnostics that use input_location to show locus
5567 information. The problem here is that, at this point,
5568 input_location is generally anchored to the end of the file
5569 (since the parser is long gone), so we don't have a good
5570 position to pin it to.
5571
5572 To alleviate this problem, this uses the location of T's
5573 declaration. Examples of this are
5574 testsuite/g++.dg/template/cond2.C and
5575 testsuite/g++.dg/template/pr35240.C. */
5576 location_t saved_location = input_location;
5577 input_location = DECL_SOURCE_LOCATION (t);
5578
5579 decl_assembler_name (t);
5580
5581 input_location = saved_location;
5582 }
5583 }
5584
5585
5586 /* Free language specific information for every operand and expression
5587 in every node of the call graph. This process operates in three stages:
5588
5589 1- Every callgraph node and varpool node is traversed looking for
5590 decls and types embedded in them. This is a more exhaustive
5591 search than that done by find_referenced_vars, because it will
5592 also collect individual fields, decls embedded in types, etc.
5593
5594 2- All the decls found are sent to free_lang_data_in_decl.
5595
5596 3- All the types found are sent to free_lang_data_in_type.
5597
5598 The ordering between decls and types is important because
5599 free_lang_data_in_decl sets assembler names, which includes
5600 mangling. So types cannot be freed up until assembler names have
5601 been set up. */
5602
5603 static void
5604 free_lang_data_in_cgraph (void)
5605 {
5606 struct cgraph_node *n;
5607 varpool_node *v;
5608 struct free_lang_data_d fld;
5609 tree t;
5610 unsigned i;
5611 alias_pair *p;
5612
5613 /* Initialize sets and arrays to store referenced decls and types. */
5614 fld.pset = new hash_set<tree>;
5615 fld.worklist.create (0);
5616 fld.decls.create (100);
5617 fld.types.create (100);
5618
5619 /* Find decls and types in the body of every function in the callgraph. */
5620 FOR_EACH_FUNCTION (n)
5621 find_decls_types_in_node (n, &fld);
5622
5623 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5624 find_decls_types (p->decl, &fld);
5625
5626 /* Find decls and types in every varpool symbol. */
5627 FOR_EACH_VARIABLE (v)
5628 find_decls_types_in_var (v, &fld);
5629
5630 /* Set the assembler name on every decl found. We need to do this
5631 now because free_lang_data_in_decl will invalidate data needed
5632 for mangling. This breaks mangling on interdependent decls. */
5633 FOR_EACH_VEC_ELT (fld.decls, i, t)
5634 assign_assembler_name_if_neeeded (t);
5635
5636 /* Traverse every decl found freeing its language data. */
5637 FOR_EACH_VEC_ELT (fld.decls, i, t)
5638 free_lang_data_in_decl (t);
5639
5640 /* Traverse every type found freeing its language data. */
5641 FOR_EACH_VEC_ELT (fld.types, i, t)
5642 free_lang_data_in_type (t);
5643
5644 delete fld.pset;
5645 fld.worklist.release ();
5646 fld.decls.release ();
5647 fld.types.release ();
5648 }
5649
5650
5651 /* Free resources that are used by FE but are not needed once they are done. */
5652
5653 static unsigned
5654 free_lang_data (void)
5655 {
5656 unsigned i;
5657
5658 /* If we are the LTO frontend we have freed lang-specific data already. */
5659 if (in_lto_p
5660 || !flag_generate_lto)
5661 return 0;
5662
5663 /* Allocate and assign alias sets to the standard integer types
5664 while the slots are still in the way the frontends generated them. */
5665 for (i = 0; i < itk_none; ++i)
5666 if (integer_types[i])
5667 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5668
5669 /* Traverse the IL resetting language specific information for
5670 operands, expressions, etc. */
5671 free_lang_data_in_cgraph ();
5672
5673 /* Create gimple variants for common types. */
5674 ptrdiff_type_node = integer_type_node;
5675 fileptr_type_node = ptr_type_node;
5676
5677 /* Reset some langhooks. Do not reset types_compatible_p, it may
5678 still be used indirectly via the get_alias_set langhook. */
5679 lang_hooks.dwarf_name = lhd_dwarf_name;
5680 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5681 /* We do not want the default decl_assembler_name implementation,
5682 rather if we have fixed everything we want a wrapper around it
5683 asserting that all non-local symbols already got their assembler
5684 name and only produce assembler names for local symbols. Or rather
5685 make sure we never call decl_assembler_name on local symbols and
5686 devise a separate, middle-end private scheme for it. */
5687
5688 /* Reset diagnostic machinery. */
5689 tree_diagnostics_defaults (global_dc);
5690
5691 return 0;
5692 }
5693
5694
5695 namespace {
5696
5697 const pass_data pass_data_ipa_free_lang_data =
5698 {
5699 SIMPLE_IPA_PASS, /* type */
5700 "*free_lang_data", /* name */
5701 OPTGROUP_NONE, /* optinfo_flags */
5702 TV_IPA_FREE_LANG_DATA, /* tv_id */
5703 0, /* properties_required */
5704 0, /* properties_provided */
5705 0, /* properties_destroyed */
5706 0, /* todo_flags_start */
5707 0, /* todo_flags_finish */
5708 };
5709
5710 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5711 {
5712 public:
5713 pass_ipa_free_lang_data (gcc::context *ctxt)
5714 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5715 {}
5716
5717 /* opt_pass methods: */
5718 virtual unsigned int execute (function *) { return free_lang_data (); }
5719
5720 }; // class pass_ipa_free_lang_data
5721
5722 } // anon namespace
5723
5724 simple_ipa_opt_pass *
5725 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5726 {
5727 return new pass_ipa_free_lang_data (ctxt);
5728 }
5729
5730 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5731 ATTR_NAME. Also used internally by remove_attribute(). */
5732 bool
5733 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5734 {
5735 size_t ident_len = IDENTIFIER_LENGTH (ident);
5736
5737 if (ident_len == attr_len)
5738 {
5739 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5740 return true;
5741 }
5742 else if (ident_len == attr_len + 4)
5743 {
5744 /* There is the possibility that ATTR is 'text' and IDENT is
5745 '__text__'. */
5746 const char *p = IDENTIFIER_POINTER (ident);
5747 if (p[0] == '_' && p[1] == '_'
5748 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5749 && strncmp (attr_name, p + 2, attr_len) == 0)
5750 return true;
5751 }
5752
5753 return false;
5754 }
5755
5756 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5757 of ATTR_NAME, and LIST is not NULL_TREE. */
5758 tree
5759 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5760 {
5761 while (list)
5762 {
5763 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5764
5765 if (ident_len == attr_len)
5766 {
5767 if (!strcmp (attr_name,
5768 IDENTIFIER_POINTER (get_attribute_name (list))))
5769 break;
5770 }
5771 /* TODO: If we made sure that attributes were stored in the
5772 canonical form without '__...__' (ie, as in 'text' as opposed
5773 to '__text__') then we could avoid the following case. */
5774 else if (ident_len == attr_len + 4)
5775 {
5776 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5777 if (p[0] == '_' && p[1] == '_'
5778 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5779 && strncmp (attr_name, p + 2, attr_len) == 0)
5780 break;
5781 }
5782 list = TREE_CHAIN (list);
5783 }
5784
5785 return list;
5786 }
5787
5788 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5789 return a pointer to the attribute's list first element if the attribute
5790 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5791 '__text__'). */
5792
5793 tree
5794 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5795 tree list)
5796 {
5797 while (list)
5798 {
5799 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5800
5801 if (attr_len > ident_len)
5802 {
5803 list = TREE_CHAIN (list);
5804 continue;
5805 }
5806
5807 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5808
5809 if (strncmp (attr_name, p, attr_len) == 0)
5810 break;
5811
5812 /* TODO: If we made sure that attributes were stored in the
5813 canonical form without '__...__' (ie, as in 'text' as opposed
5814 to '__text__') then we could avoid the following case. */
5815 if (p[0] == '_' && p[1] == '_' &&
5816 strncmp (attr_name, p + 2, attr_len) == 0)
5817 break;
5818
5819 list = TREE_CHAIN (list);
5820 }
5821
5822 return list;
5823 }
5824
5825
5826 /* A variant of lookup_attribute() that can be used with an identifier
5827 as the first argument, and where the identifier can be either
5828 'text' or '__text__'.
5829
5830 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5831 return a pointer to the attribute's list element if the attribute
5832 is part of the list, or NULL_TREE if not found. If the attribute
5833 appears more than once, this only returns the first occurrence; the
5834 TREE_CHAIN of the return value should be passed back in if further
5835 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5836 can be in the form 'text' or '__text__'. */
5837 static tree
5838 lookup_ident_attribute (tree attr_identifier, tree list)
5839 {
5840 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5841
5842 while (list)
5843 {
5844 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5845 == IDENTIFIER_NODE);
5846
5847 /* Identifiers can be compared directly for equality. */
5848 if (attr_identifier == get_attribute_name (list))
5849 break;
5850
5851 /* If they are not equal, they may still be one in the form
5852 'text' while the other one is in the form '__text__'. TODO:
5853 If we were storing attributes in normalized 'text' form, then
5854 this could all go away and we could take full advantage of
5855 the fact that we're comparing identifiers. :-) */
5856 {
5857 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5858 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5859
5860 if (ident_len == attr_len + 4)
5861 {
5862 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5863 const char *q = IDENTIFIER_POINTER (attr_identifier);
5864 if (p[0] == '_' && p[1] == '_'
5865 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5866 && strncmp (q, p + 2, attr_len) == 0)
5867 break;
5868 }
5869 else if (ident_len + 4 == attr_len)
5870 {
5871 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5872 const char *q = IDENTIFIER_POINTER (attr_identifier);
5873 if (q[0] == '_' && q[1] == '_'
5874 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5875 && strncmp (q + 2, p, ident_len) == 0)
5876 break;
5877 }
5878 }
5879 list = TREE_CHAIN (list);
5880 }
5881
5882 return list;
5883 }
5884
5885 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5886 modified list. */
5887
5888 tree
5889 remove_attribute (const char *attr_name, tree list)
5890 {
5891 tree *p;
5892 size_t attr_len = strlen (attr_name);
5893
5894 gcc_checking_assert (attr_name[0] != '_');
5895
5896 for (p = &list; *p; )
5897 {
5898 tree l = *p;
5899 /* TODO: If we were storing attributes in normalized form, here
5900 we could use a simple strcmp(). */
5901 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5902 *p = TREE_CHAIN (l);
5903 else
5904 p = &TREE_CHAIN (l);
5905 }
5906
5907 return list;
5908 }
5909
5910 /* Return an attribute list that is the union of a1 and a2. */
5911
5912 tree
5913 merge_attributes (tree a1, tree a2)
5914 {
5915 tree attributes;
5916
5917 /* Either one unset? Take the set one. */
5918
5919 if ((attributes = a1) == 0)
5920 attributes = a2;
5921
5922 /* One that completely contains the other? Take it. */
5923
5924 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5925 {
5926 if (attribute_list_contained (a2, a1))
5927 attributes = a2;
5928 else
5929 {
5930 /* Pick the longest list, and hang on the other list. */
5931
5932 if (list_length (a1) < list_length (a2))
5933 attributes = a2, a2 = a1;
5934
5935 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5936 {
5937 tree a;
5938 for (a = lookup_ident_attribute (get_attribute_name (a2),
5939 attributes);
5940 a != NULL_TREE && !attribute_value_equal (a, a2);
5941 a = lookup_ident_attribute (get_attribute_name (a2),
5942 TREE_CHAIN (a)))
5943 ;
5944 if (a == NULL_TREE)
5945 {
5946 a1 = copy_node (a2);
5947 TREE_CHAIN (a1) = attributes;
5948 attributes = a1;
5949 }
5950 }
5951 }
5952 }
5953 return attributes;
5954 }
5955
5956 /* Given types T1 and T2, merge their attributes and return
5957 the result. */
5958
5959 tree
5960 merge_type_attributes (tree t1, tree t2)
5961 {
5962 return merge_attributes (TYPE_ATTRIBUTES (t1),
5963 TYPE_ATTRIBUTES (t2));
5964 }
5965
5966 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5967 the result. */
5968
5969 tree
5970 merge_decl_attributes (tree olddecl, tree newdecl)
5971 {
5972 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5973 DECL_ATTRIBUTES (newdecl));
5974 }
5975
5976 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5977
5978 /* Specialization of merge_decl_attributes for various Windows targets.
5979
5980 This handles the following situation:
5981
5982 __declspec (dllimport) int foo;
5983 int foo;
5984
5985 The second instance of `foo' nullifies the dllimport. */
5986
5987 tree
5988 merge_dllimport_decl_attributes (tree old, tree new_tree)
5989 {
5990 tree a;
5991 int delete_dllimport_p = 1;
5992
5993 /* What we need to do here is remove from `old' dllimport if it doesn't
5994 appear in `new'. dllimport behaves like extern: if a declaration is
5995 marked dllimport and a definition appears later, then the object
5996 is not dllimport'd. We also remove a `new' dllimport if the old list
5997 contains dllexport: dllexport always overrides dllimport, regardless
5998 of the order of declaration. */
5999 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6000 delete_dllimport_p = 0;
6001 else if (DECL_DLLIMPORT_P (new_tree)
6002 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6003 {
6004 DECL_DLLIMPORT_P (new_tree) = 0;
6005 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6006 "dllimport ignored", new_tree);
6007 }
6008 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6009 {
6010 /* Warn about overriding a symbol that has already been used, e.g.:
6011 extern int __attribute__ ((dllimport)) foo;
6012 int* bar () {return &foo;}
6013 int foo;
6014 */
6015 if (TREE_USED (old))
6016 {
6017 warning (0, "%q+D redeclared without dllimport attribute "
6018 "after being referenced with dll linkage", new_tree);
6019 /* If we have used a variable's address with dllimport linkage,
6020 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6021 decl may already have had TREE_CONSTANT computed.
6022 We still remove the attribute so that assembler code refers
6023 to '&foo rather than '_imp__foo'. */
6024 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6025 DECL_DLLIMPORT_P (new_tree) = 1;
6026 }
6027
6028 /* Let an inline definition silently override the external reference,
6029 but otherwise warn about attribute inconsistency. */
6030 else if (TREE_CODE (new_tree) == VAR_DECL
6031 || !DECL_DECLARED_INLINE_P (new_tree))
6032 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6033 "previous dllimport ignored", new_tree);
6034 }
6035 else
6036 delete_dllimport_p = 0;
6037
6038 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6039
6040 if (delete_dllimport_p)
6041 a = remove_attribute ("dllimport", a);
6042
6043 return a;
6044 }
6045
6046 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6047 struct attribute_spec.handler. */
6048
6049 tree
6050 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6051 bool *no_add_attrs)
6052 {
6053 tree node = *pnode;
6054 bool is_dllimport;
6055
6056 /* These attributes may apply to structure and union types being created,
6057 but otherwise should pass to the declaration involved. */
6058 if (!DECL_P (node))
6059 {
6060 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6061 | (int) ATTR_FLAG_ARRAY_NEXT))
6062 {
6063 *no_add_attrs = true;
6064 return tree_cons (name, args, NULL_TREE);
6065 }
6066 if (TREE_CODE (node) == RECORD_TYPE
6067 || TREE_CODE (node) == UNION_TYPE)
6068 {
6069 node = TYPE_NAME (node);
6070 if (!node)
6071 return NULL_TREE;
6072 }
6073 else
6074 {
6075 warning (OPT_Wattributes, "%qE attribute ignored",
6076 name);
6077 *no_add_attrs = true;
6078 return NULL_TREE;
6079 }
6080 }
6081
6082 if (TREE_CODE (node) != FUNCTION_DECL
6083 && TREE_CODE (node) != VAR_DECL
6084 && TREE_CODE (node) != TYPE_DECL)
6085 {
6086 *no_add_attrs = true;
6087 warning (OPT_Wattributes, "%qE attribute ignored",
6088 name);
6089 return NULL_TREE;
6090 }
6091
6092 if (TREE_CODE (node) == TYPE_DECL
6093 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6094 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6095 {
6096 *no_add_attrs = true;
6097 warning (OPT_Wattributes, "%qE attribute ignored",
6098 name);
6099 return NULL_TREE;
6100 }
6101
6102 is_dllimport = is_attribute_p ("dllimport", name);
6103
6104 /* Report error on dllimport ambiguities seen now before they cause
6105 any damage. */
6106 if (is_dllimport)
6107 {
6108 /* Honor any target-specific overrides. */
6109 if (!targetm.valid_dllimport_attribute_p (node))
6110 *no_add_attrs = true;
6111
6112 else if (TREE_CODE (node) == FUNCTION_DECL
6113 && DECL_DECLARED_INLINE_P (node))
6114 {
6115 warning (OPT_Wattributes, "inline function %q+D declared as "
6116 " dllimport: attribute ignored", node);
6117 *no_add_attrs = true;
6118 }
6119 /* Like MS, treat definition of dllimported variables and
6120 non-inlined functions on declaration as syntax errors. */
6121 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6122 {
6123 error ("function %q+D definition is marked dllimport", node);
6124 *no_add_attrs = true;
6125 }
6126
6127 else if (TREE_CODE (node) == VAR_DECL)
6128 {
6129 if (DECL_INITIAL (node))
6130 {
6131 error ("variable %q+D definition is marked dllimport",
6132 node);
6133 *no_add_attrs = true;
6134 }
6135
6136 /* `extern' needn't be specified with dllimport.
6137 Specify `extern' now and hope for the best. Sigh. */
6138 DECL_EXTERNAL (node) = 1;
6139 /* Also, implicitly give dllimport'd variables declared within
6140 a function global scope, unless declared static. */
6141 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6142 TREE_PUBLIC (node) = 1;
6143 }
6144
6145 if (*no_add_attrs == false)
6146 DECL_DLLIMPORT_P (node) = 1;
6147 }
6148 else if (TREE_CODE (node) == FUNCTION_DECL
6149 && DECL_DECLARED_INLINE_P (node)
6150 && flag_keep_inline_dllexport)
6151 /* An exported function, even if inline, must be emitted. */
6152 DECL_EXTERNAL (node) = 0;
6153
6154 /* Report error if symbol is not accessible at global scope. */
6155 if (!TREE_PUBLIC (node)
6156 && (TREE_CODE (node) == VAR_DECL
6157 || TREE_CODE (node) == FUNCTION_DECL))
6158 {
6159 error ("external linkage required for symbol %q+D because of "
6160 "%qE attribute", node, name);
6161 *no_add_attrs = true;
6162 }
6163
6164 /* A dllexport'd entity must have default visibility so that other
6165 program units (shared libraries or the main executable) can see
6166 it. A dllimport'd entity must have default visibility so that
6167 the linker knows that undefined references within this program
6168 unit can be resolved by the dynamic linker. */
6169 if (!*no_add_attrs)
6170 {
6171 if (DECL_VISIBILITY_SPECIFIED (node)
6172 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6173 error ("%qE implies default visibility, but %qD has already "
6174 "been declared with a different visibility",
6175 name, node);
6176 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6177 DECL_VISIBILITY_SPECIFIED (node) = 1;
6178 }
6179
6180 return NULL_TREE;
6181 }
6182
6183 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6184 \f
6185 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6186 of the various TYPE_QUAL values. */
6187
6188 static void
6189 set_type_quals (tree type, int type_quals)
6190 {
6191 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6192 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6193 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6194 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6195 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6196 }
6197
6198 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6199
6200 bool
6201 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6202 {
6203 return (TYPE_QUALS (cand) == type_quals
6204 && TYPE_NAME (cand) == TYPE_NAME (base)
6205 /* Apparently this is needed for Objective-C. */
6206 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6207 /* Check alignment. */
6208 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6209 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6210 TYPE_ATTRIBUTES (base)));
6211 }
6212
6213 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6214
6215 static bool
6216 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6217 {
6218 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6219 && TYPE_NAME (cand) == TYPE_NAME (base)
6220 /* Apparently this is needed for Objective-C. */
6221 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6222 /* Check alignment. */
6223 && TYPE_ALIGN (cand) == align
6224 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6225 TYPE_ATTRIBUTES (base)));
6226 }
6227
6228 /* This function checks to see if TYPE matches the size one of the built-in
6229 atomic types, and returns that core atomic type. */
6230
6231 static tree
6232 find_atomic_core_type (tree type)
6233 {
6234 tree base_atomic_type;
6235
6236 /* Only handle complete types. */
6237 if (TYPE_SIZE (type) == NULL_TREE)
6238 return NULL_TREE;
6239
6240 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6241 switch (type_size)
6242 {
6243 case 8:
6244 base_atomic_type = atomicQI_type_node;
6245 break;
6246
6247 case 16:
6248 base_atomic_type = atomicHI_type_node;
6249 break;
6250
6251 case 32:
6252 base_atomic_type = atomicSI_type_node;
6253 break;
6254
6255 case 64:
6256 base_atomic_type = atomicDI_type_node;
6257 break;
6258
6259 case 128:
6260 base_atomic_type = atomicTI_type_node;
6261 break;
6262
6263 default:
6264 base_atomic_type = NULL_TREE;
6265 }
6266
6267 return base_atomic_type;
6268 }
6269
6270 /* Return a version of the TYPE, qualified as indicated by the
6271 TYPE_QUALS, if one exists. If no qualified version exists yet,
6272 return NULL_TREE. */
6273
6274 tree
6275 get_qualified_type (tree type, int type_quals)
6276 {
6277 tree t;
6278
6279 if (TYPE_QUALS (type) == type_quals)
6280 return type;
6281
6282 /* Search the chain of variants to see if there is already one there just
6283 like the one we need to have. If so, use that existing one. We must
6284 preserve the TYPE_NAME, since there is code that depends on this. */
6285 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6286 if (check_qualified_type (t, type, type_quals))
6287 return t;
6288
6289 return NULL_TREE;
6290 }
6291
6292 /* Like get_qualified_type, but creates the type if it does not
6293 exist. This function never returns NULL_TREE. */
6294
6295 tree
6296 build_qualified_type (tree type, int type_quals)
6297 {
6298 tree t;
6299
6300 /* See if we already have the appropriate qualified variant. */
6301 t = get_qualified_type (type, type_quals);
6302
6303 /* If not, build it. */
6304 if (!t)
6305 {
6306 t = build_variant_type_copy (type);
6307 set_type_quals (t, type_quals);
6308
6309 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6310 {
6311 /* See if this object can map to a basic atomic type. */
6312 tree atomic_type = find_atomic_core_type (type);
6313 if (atomic_type)
6314 {
6315 /* Ensure the alignment of this type is compatible with
6316 the required alignment of the atomic type. */
6317 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6318 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6319 }
6320 }
6321
6322 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6323 /* Propagate structural equality. */
6324 SET_TYPE_STRUCTURAL_EQUALITY (t);
6325 else if (TYPE_CANONICAL (type) != type)
6326 /* Build the underlying canonical type, since it is different
6327 from TYPE. */
6328 {
6329 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6330 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6331 }
6332 else
6333 /* T is its own canonical type. */
6334 TYPE_CANONICAL (t) = t;
6335
6336 }
6337
6338 return t;
6339 }
6340
6341 /* Create a variant of type T with alignment ALIGN. */
6342
6343 tree
6344 build_aligned_type (tree type, unsigned int align)
6345 {
6346 tree t;
6347
6348 if (TYPE_PACKED (type)
6349 || TYPE_ALIGN (type) == align)
6350 return type;
6351
6352 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6353 if (check_aligned_type (t, type, align))
6354 return t;
6355
6356 t = build_variant_type_copy (type);
6357 TYPE_ALIGN (t) = align;
6358
6359 return t;
6360 }
6361
6362 /* Create a new distinct copy of TYPE. The new type is made its own
6363 MAIN_VARIANT. If TYPE requires structural equality checks, the
6364 resulting type requires structural equality checks; otherwise, its
6365 TYPE_CANONICAL points to itself. */
6366
6367 tree
6368 build_distinct_type_copy (tree type)
6369 {
6370 tree t = copy_node (type);
6371
6372 TYPE_POINTER_TO (t) = 0;
6373 TYPE_REFERENCE_TO (t) = 0;
6374
6375 /* Set the canonical type either to a new equivalence class, or
6376 propagate the need for structural equality checks. */
6377 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6378 SET_TYPE_STRUCTURAL_EQUALITY (t);
6379 else
6380 TYPE_CANONICAL (t) = t;
6381
6382 /* Make it its own variant. */
6383 TYPE_MAIN_VARIANT (t) = t;
6384 TYPE_NEXT_VARIANT (t) = 0;
6385
6386 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6387 whose TREE_TYPE is not t. This can also happen in the Ada
6388 frontend when using subtypes. */
6389
6390 return t;
6391 }
6392
6393 /* Create a new variant of TYPE, equivalent but distinct. This is so
6394 the caller can modify it. TYPE_CANONICAL for the return type will
6395 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6396 are considered equal by the language itself (or that both types
6397 require structural equality checks). */
6398
6399 tree
6400 build_variant_type_copy (tree type)
6401 {
6402 tree t, m = TYPE_MAIN_VARIANT (type);
6403
6404 t = build_distinct_type_copy (type);
6405
6406 /* Since we're building a variant, assume that it is a non-semantic
6407 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6408 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6409
6410 /* Add the new type to the chain of variants of TYPE. */
6411 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6412 TYPE_NEXT_VARIANT (m) = t;
6413 TYPE_MAIN_VARIANT (t) = m;
6414
6415 return t;
6416 }
6417 \f
6418 /* Return true if the from tree in both tree maps are equal. */
6419
6420 int
6421 tree_map_base_eq (const void *va, const void *vb)
6422 {
6423 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6424 *const b = (const struct tree_map_base *) vb;
6425 return (a->from == b->from);
6426 }
6427
6428 /* Hash a from tree in a tree_base_map. */
6429
6430 unsigned int
6431 tree_map_base_hash (const void *item)
6432 {
6433 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6434 }
6435
6436 /* Return true if this tree map structure is marked for garbage collection
6437 purposes. We simply return true if the from tree is marked, so that this
6438 structure goes away when the from tree goes away. */
6439
6440 int
6441 tree_map_base_marked_p (const void *p)
6442 {
6443 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6444 }
6445
6446 /* Hash a from tree in a tree_map. */
6447
6448 unsigned int
6449 tree_map_hash (const void *item)
6450 {
6451 return (((const struct tree_map *) item)->hash);
6452 }
6453
6454 /* Hash a from tree in a tree_decl_map. */
6455
6456 unsigned int
6457 tree_decl_map_hash (const void *item)
6458 {
6459 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6460 }
6461
6462 /* Return the initialization priority for DECL. */
6463
6464 priority_type
6465 decl_init_priority_lookup (tree decl)
6466 {
6467 symtab_node *snode = symtab_node::get (decl);
6468
6469 if (!snode)
6470 return DEFAULT_INIT_PRIORITY;
6471 return
6472 snode->get_init_priority ();
6473 }
6474
6475 /* Return the finalization priority for DECL. */
6476
6477 priority_type
6478 decl_fini_priority_lookup (tree decl)
6479 {
6480 cgraph_node *node = cgraph_node::get (decl);
6481
6482 if (!node)
6483 return DEFAULT_INIT_PRIORITY;
6484 return
6485 node->get_fini_priority ();
6486 }
6487
6488 /* Set the initialization priority for DECL to PRIORITY. */
6489
6490 void
6491 decl_init_priority_insert (tree decl, priority_type priority)
6492 {
6493 struct symtab_node *snode;
6494
6495 if (priority == DEFAULT_INIT_PRIORITY)
6496 {
6497 snode = symtab_node::get (decl);
6498 if (!snode)
6499 return;
6500 }
6501 else if (TREE_CODE (decl) == VAR_DECL)
6502 snode = varpool_node::get_create (decl);
6503 else
6504 snode = cgraph_node::get_create (decl);
6505 snode->set_init_priority (priority);
6506 }
6507
6508 /* Set the finalization priority for DECL to PRIORITY. */
6509
6510 void
6511 decl_fini_priority_insert (tree decl, priority_type priority)
6512 {
6513 struct cgraph_node *node;
6514
6515 if (priority == DEFAULT_INIT_PRIORITY)
6516 {
6517 node = cgraph_node::get (decl);
6518 if (!node)
6519 return;
6520 }
6521 else
6522 node = cgraph_node::get_create (decl);
6523 node->set_fini_priority (priority);
6524 }
6525
6526 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6527
6528 static void
6529 print_debug_expr_statistics (void)
6530 {
6531 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6532 (long) htab_size (debug_expr_for_decl),
6533 (long) htab_elements (debug_expr_for_decl),
6534 htab_collisions (debug_expr_for_decl));
6535 }
6536
6537 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6538
6539 static void
6540 print_value_expr_statistics (void)
6541 {
6542 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6543 (long) htab_size (value_expr_for_decl),
6544 (long) htab_elements (value_expr_for_decl),
6545 htab_collisions (value_expr_for_decl));
6546 }
6547
6548 /* Lookup a debug expression for FROM, and return it if we find one. */
6549
6550 tree
6551 decl_debug_expr_lookup (tree from)
6552 {
6553 struct tree_decl_map *h, in;
6554 in.base.from = from;
6555
6556 h = (struct tree_decl_map *)
6557 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6558 if (h)
6559 return h->to;
6560 return NULL_TREE;
6561 }
6562
6563 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6564
6565 void
6566 decl_debug_expr_insert (tree from, tree to)
6567 {
6568 struct tree_decl_map *h;
6569 void **loc;
6570
6571 h = ggc_alloc<tree_decl_map> ();
6572 h->base.from = from;
6573 h->to = to;
6574 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6575 INSERT);
6576 *(struct tree_decl_map **) loc = h;
6577 }
6578
6579 /* Lookup a value expression for FROM, and return it if we find one. */
6580
6581 tree
6582 decl_value_expr_lookup (tree from)
6583 {
6584 struct tree_decl_map *h, in;
6585 in.base.from = from;
6586
6587 h = (struct tree_decl_map *)
6588 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6589 if (h)
6590 return h->to;
6591 return NULL_TREE;
6592 }
6593
6594 /* Insert a mapping FROM->TO in the value expression hashtable. */
6595
6596 void
6597 decl_value_expr_insert (tree from, tree to)
6598 {
6599 struct tree_decl_map *h;
6600 void **loc;
6601
6602 h = ggc_alloc<tree_decl_map> ();
6603 h->base.from = from;
6604 h->to = to;
6605 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6606 INSERT);
6607 *(struct tree_decl_map **) loc = h;
6608 }
6609
6610 /* Lookup a vector of debug arguments for FROM, and return it if we
6611 find one. */
6612
6613 vec<tree, va_gc> **
6614 decl_debug_args_lookup (tree from)
6615 {
6616 struct tree_vec_map *h, in;
6617
6618 if (!DECL_HAS_DEBUG_ARGS_P (from))
6619 return NULL;
6620 gcc_checking_assert (debug_args_for_decl != NULL);
6621 in.base.from = from;
6622 h = (struct tree_vec_map *)
6623 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6624 if (h)
6625 return &h->to;
6626 return NULL;
6627 }
6628
6629 /* Insert a mapping FROM->empty vector of debug arguments in the value
6630 expression hashtable. */
6631
6632 vec<tree, va_gc> **
6633 decl_debug_args_insert (tree from)
6634 {
6635 struct tree_vec_map *h;
6636 void **loc;
6637
6638 if (DECL_HAS_DEBUG_ARGS_P (from))
6639 return decl_debug_args_lookup (from);
6640 if (debug_args_for_decl == NULL)
6641 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6642 tree_vec_map_eq, 0);
6643 h = ggc_alloc<tree_vec_map> ();
6644 h->base.from = from;
6645 h->to = NULL;
6646 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6647 INSERT);
6648 *(struct tree_vec_map **) loc = h;
6649 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6650 return &h->to;
6651 }
6652
6653 /* Hashing of types so that we don't make duplicates.
6654 The entry point is `type_hash_canon'. */
6655
6656 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6657 with types in the TREE_VALUE slots), by adding the hash codes
6658 of the individual types. */
6659
6660 static void
6661 type_hash_list (const_tree list, inchash::hash &hstate)
6662 {
6663 const_tree tail;
6664
6665 for (tail = list; tail; tail = TREE_CHAIN (tail))
6666 if (TREE_VALUE (tail) != error_mark_node)
6667 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6668 }
6669
6670 /* These are the Hashtable callback functions. */
6671
6672 /* Returns true iff the types are equivalent. */
6673
6674 static int
6675 type_hash_eq (const void *va, const void *vb)
6676 {
6677 const struct type_hash *const a = (const struct type_hash *) va,
6678 *const b = (const struct type_hash *) vb;
6679
6680 /* First test the things that are the same for all types. */
6681 if (a->hash != b->hash
6682 || TREE_CODE (a->type) != TREE_CODE (b->type)
6683 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6684 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6685 TYPE_ATTRIBUTES (b->type))
6686 || (TREE_CODE (a->type) != COMPLEX_TYPE
6687 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6688 return 0;
6689
6690 /* Be careful about comparing arrays before and after the element type
6691 has been completed; don't compare TYPE_ALIGN unless both types are
6692 complete. */
6693 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6694 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6695 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6696 return 0;
6697
6698 switch (TREE_CODE (a->type))
6699 {
6700 case VOID_TYPE:
6701 case COMPLEX_TYPE:
6702 case POINTER_TYPE:
6703 case REFERENCE_TYPE:
6704 case NULLPTR_TYPE:
6705 return 1;
6706
6707 case VECTOR_TYPE:
6708 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6709
6710 case ENUMERAL_TYPE:
6711 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6712 && !(TYPE_VALUES (a->type)
6713 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6714 && TYPE_VALUES (b->type)
6715 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6716 && type_list_equal (TYPE_VALUES (a->type),
6717 TYPE_VALUES (b->type))))
6718 return 0;
6719
6720 /* ... fall through ... */
6721
6722 case INTEGER_TYPE:
6723 case REAL_TYPE:
6724 case BOOLEAN_TYPE:
6725 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6726 return false;
6727 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6728 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6729 TYPE_MAX_VALUE (b->type)))
6730 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6731 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6732 TYPE_MIN_VALUE (b->type))));
6733
6734 case FIXED_POINT_TYPE:
6735 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6736
6737 case OFFSET_TYPE:
6738 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6739
6740 case METHOD_TYPE:
6741 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6742 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6743 || (TYPE_ARG_TYPES (a->type)
6744 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6745 && TYPE_ARG_TYPES (b->type)
6746 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6747 && type_list_equal (TYPE_ARG_TYPES (a->type),
6748 TYPE_ARG_TYPES (b->type)))))
6749 break;
6750 return 0;
6751 case ARRAY_TYPE:
6752 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6753
6754 case RECORD_TYPE:
6755 case UNION_TYPE:
6756 case QUAL_UNION_TYPE:
6757 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6758 || (TYPE_FIELDS (a->type)
6759 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6760 && TYPE_FIELDS (b->type)
6761 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6762 && type_list_equal (TYPE_FIELDS (a->type),
6763 TYPE_FIELDS (b->type))));
6764
6765 case FUNCTION_TYPE:
6766 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6767 || (TYPE_ARG_TYPES (a->type)
6768 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6769 && TYPE_ARG_TYPES (b->type)
6770 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6771 && type_list_equal (TYPE_ARG_TYPES (a->type),
6772 TYPE_ARG_TYPES (b->type))))
6773 break;
6774 return 0;
6775
6776 default:
6777 return 0;
6778 }
6779
6780 if (lang_hooks.types.type_hash_eq != NULL)
6781 return lang_hooks.types.type_hash_eq (a->type, b->type);
6782
6783 return 1;
6784 }
6785
6786 /* Return the cached hash value. */
6787
6788 static hashval_t
6789 type_hash_hash (const void *item)
6790 {
6791 return ((const struct type_hash *) item)->hash;
6792 }
6793
6794 /* Given TYPE, and HASHCODE its hash code, return the canonical
6795 object for an identical type if one already exists.
6796 Otherwise, return TYPE, and record it as the canonical object.
6797
6798 To use this function, first create a type of the sort you want.
6799 Then compute its hash code from the fields of the type that
6800 make it different from other similar types.
6801 Then call this function and use the value. */
6802
6803 tree
6804 type_hash_canon (unsigned int hashcode, tree type)
6805 {
6806 type_hash in;
6807 void **loc;
6808
6809 /* The hash table only contains main variants, so ensure that's what we're
6810 being passed. */
6811 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6812
6813 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6814 must call that routine before comparing TYPE_ALIGNs. */
6815 layout_type (type);
6816
6817 in.hash = hashcode;
6818 in.type = type;
6819
6820 loc = htab_find_slot_with_hash (type_hash_table, &in, hashcode, INSERT);
6821 if (*loc)
6822 {
6823 tree t1 = ((type_hash *) *loc)->type;
6824 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6825 if (GATHER_STATISTICS)
6826 {
6827 tree_code_counts[(int) TREE_CODE (type)]--;
6828 tree_node_counts[(int) t_kind]--;
6829 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6830 }
6831 return t1;
6832 }
6833 else
6834 {
6835 struct type_hash *h;
6836
6837 h = ggc_alloc<type_hash> ();
6838 h->hash = hashcode;
6839 h->type = type;
6840 *loc = (void *)h;
6841
6842 return type;
6843 }
6844 }
6845
6846 /* See if the data pointed to by the type hash table is marked. We consider
6847 it marked if the type is marked or if a debug type number or symbol
6848 table entry has been made for the type. */
6849
6850 static int
6851 type_hash_marked_p (const void *p)
6852 {
6853 const_tree const type = ((const struct type_hash *) p)->type;
6854
6855 return ggc_marked_p (type);
6856 }
6857
6858 static void
6859 print_type_hash_statistics (void)
6860 {
6861 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6862 (long) htab_size (type_hash_table),
6863 (long) htab_elements (type_hash_table),
6864 htab_collisions (type_hash_table));
6865 }
6866
6867 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6868 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6869 by adding the hash codes of the individual attributes. */
6870
6871 static void
6872 attribute_hash_list (const_tree list, inchash::hash &hstate)
6873 {
6874 const_tree tail;
6875
6876 for (tail = list; tail; tail = TREE_CHAIN (tail))
6877 /* ??? Do we want to add in TREE_VALUE too? */
6878 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6879 }
6880
6881 /* Given two lists of attributes, return true if list l2 is
6882 equivalent to l1. */
6883
6884 int
6885 attribute_list_equal (const_tree l1, const_tree l2)
6886 {
6887 if (l1 == l2)
6888 return 1;
6889
6890 return attribute_list_contained (l1, l2)
6891 && attribute_list_contained (l2, l1);
6892 }
6893
6894 /* Given two lists of attributes, return true if list L2 is
6895 completely contained within L1. */
6896 /* ??? This would be faster if attribute names were stored in a canonicalized
6897 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6898 must be used to show these elements are equivalent (which they are). */
6899 /* ??? It's not clear that attributes with arguments will always be handled
6900 correctly. */
6901
6902 int
6903 attribute_list_contained (const_tree l1, const_tree l2)
6904 {
6905 const_tree t1, t2;
6906
6907 /* First check the obvious, maybe the lists are identical. */
6908 if (l1 == l2)
6909 return 1;
6910
6911 /* Maybe the lists are similar. */
6912 for (t1 = l1, t2 = l2;
6913 t1 != 0 && t2 != 0
6914 && get_attribute_name (t1) == get_attribute_name (t2)
6915 && TREE_VALUE (t1) == TREE_VALUE (t2);
6916 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6917 ;
6918
6919 /* Maybe the lists are equal. */
6920 if (t1 == 0 && t2 == 0)
6921 return 1;
6922
6923 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6924 {
6925 const_tree attr;
6926 /* This CONST_CAST is okay because lookup_attribute does not
6927 modify its argument and the return value is assigned to a
6928 const_tree. */
6929 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6930 CONST_CAST_TREE (l1));
6931 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6932 attr = lookup_ident_attribute (get_attribute_name (t2),
6933 TREE_CHAIN (attr)))
6934 ;
6935
6936 if (attr == NULL_TREE)
6937 return 0;
6938 }
6939
6940 return 1;
6941 }
6942
6943 /* Given two lists of types
6944 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6945 return 1 if the lists contain the same types in the same order.
6946 Also, the TREE_PURPOSEs must match. */
6947
6948 int
6949 type_list_equal (const_tree l1, const_tree l2)
6950 {
6951 const_tree t1, t2;
6952
6953 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6954 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6955 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6956 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6957 && (TREE_TYPE (TREE_PURPOSE (t1))
6958 == TREE_TYPE (TREE_PURPOSE (t2))))))
6959 return 0;
6960
6961 return t1 == t2;
6962 }
6963
6964 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6965 given by TYPE. If the argument list accepts variable arguments,
6966 then this function counts only the ordinary arguments. */
6967
6968 int
6969 type_num_arguments (const_tree type)
6970 {
6971 int i = 0;
6972 tree t;
6973
6974 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6975 /* If the function does not take a variable number of arguments,
6976 the last element in the list will have type `void'. */
6977 if (VOID_TYPE_P (TREE_VALUE (t)))
6978 break;
6979 else
6980 ++i;
6981
6982 return i;
6983 }
6984
6985 /* Nonzero if integer constants T1 and T2
6986 represent the same constant value. */
6987
6988 int
6989 tree_int_cst_equal (const_tree t1, const_tree t2)
6990 {
6991 if (t1 == t2)
6992 return 1;
6993
6994 if (t1 == 0 || t2 == 0)
6995 return 0;
6996
6997 if (TREE_CODE (t1) == INTEGER_CST
6998 && TREE_CODE (t2) == INTEGER_CST
6999 && wi::to_widest (t1) == wi::to_widest (t2))
7000 return 1;
7001
7002 return 0;
7003 }
7004
7005 /* Return true if T is an INTEGER_CST whose numerical value (extended
7006 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7007
7008 bool
7009 tree_fits_shwi_p (const_tree t)
7010 {
7011 return (t != NULL_TREE
7012 && TREE_CODE (t) == INTEGER_CST
7013 && wi::fits_shwi_p (wi::to_widest (t)));
7014 }
7015
7016 /* Return true if T is an INTEGER_CST whose numerical value (extended
7017 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7018
7019 bool
7020 tree_fits_uhwi_p (const_tree t)
7021 {
7022 return (t != NULL_TREE
7023 && TREE_CODE (t) == INTEGER_CST
7024 && wi::fits_uhwi_p (wi::to_widest (t)));
7025 }
7026
7027 /* T is an INTEGER_CST whose numerical value (extended according to
7028 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7029 HOST_WIDE_INT. */
7030
7031 HOST_WIDE_INT
7032 tree_to_shwi (const_tree t)
7033 {
7034 gcc_assert (tree_fits_shwi_p (t));
7035 return TREE_INT_CST_LOW (t);
7036 }
7037
7038 /* T is an INTEGER_CST whose numerical value (extended according to
7039 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7040 HOST_WIDE_INT. */
7041
7042 unsigned HOST_WIDE_INT
7043 tree_to_uhwi (const_tree t)
7044 {
7045 gcc_assert (tree_fits_uhwi_p (t));
7046 return TREE_INT_CST_LOW (t);
7047 }
7048
7049 /* Return the most significant (sign) bit of T. */
7050
7051 int
7052 tree_int_cst_sign_bit (const_tree t)
7053 {
7054 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7055
7056 return wi::extract_uhwi (t, bitno, 1);
7057 }
7058
7059 /* Return an indication of the sign of the integer constant T.
7060 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7061 Note that -1 will never be returned if T's type is unsigned. */
7062
7063 int
7064 tree_int_cst_sgn (const_tree t)
7065 {
7066 if (wi::eq_p (t, 0))
7067 return 0;
7068 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7069 return 1;
7070 else if (wi::neg_p (t))
7071 return -1;
7072 else
7073 return 1;
7074 }
7075
7076 /* Return the minimum number of bits needed to represent VALUE in a
7077 signed or unsigned type, UNSIGNEDP says which. */
7078
7079 unsigned int
7080 tree_int_cst_min_precision (tree value, signop sgn)
7081 {
7082 /* If the value is negative, compute its negative minus 1. The latter
7083 adjustment is because the absolute value of the largest negative value
7084 is one larger than the largest positive value. This is equivalent to
7085 a bit-wise negation, so use that operation instead. */
7086
7087 if (tree_int_cst_sgn (value) < 0)
7088 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7089
7090 /* Return the number of bits needed, taking into account the fact
7091 that we need one more bit for a signed than unsigned type.
7092 If value is 0 or -1, the minimum precision is 1 no matter
7093 whether unsignedp is true or false. */
7094
7095 if (integer_zerop (value))
7096 return 1;
7097 else
7098 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7099 }
7100
7101 /* Return truthvalue of whether T1 is the same tree structure as T2.
7102 Return 1 if they are the same.
7103 Return 0 if they are understandably different.
7104 Return -1 if either contains tree structure not understood by
7105 this function. */
7106
7107 int
7108 simple_cst_equal (const_tree t1, const_tree t2)
7109 {
7110 enum tree_code code1, code2;
7111 int cmp;
7112 int i;
7113
7114 if (t1 == t2)
7115 return 1;
7116 if (t1 == 0 || t2 == 0)
7117 return 0;
7118
7119 code1 = TREE_CODE (t1);
7120 code2 = TREE_CODE (t2);
7121
7122 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7123 {
7124 if (CONVERT_EXPR_CODE_P (code2)
7125 || code2 == NON_LVALUE_EXPR)
7126 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7127 else
7128 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7129 }
7130
7131 else if (CONVERT_EXPR_CODE_P (code2)
7132 || code2 == NON_LVALUE_EXPR)
7133 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7134
7135 if (code1 != code2)
7136 return 0;
7137
7138 switch (code1)
7139 {
7140 case INTEGER_CST:
7141 return wi::to_widest (t1) == wi::to_widest (t2);
7142
7143 case REAL_CST:
7144 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7145
7146 case FIXED_CST:
7147 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7148
7149 case STRING_CST:
7150 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7151 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7152 TREE_STRING_LENGTH (t1)));
7153
7154 case CONSTRUCTOR:
7155 {
7156 unsigned HOST_WIDE_INT idx;
7157 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7158 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7159
7160 if (vec_safe_length (v1) != vec_safe_length (v2))
7161 return false;
7162
7163 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7164 /* ??? Should we handle also fields here? */
7165 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7166 return false;
7167 return true;
7168 }
7169
7170 case SAVE_EXPR:
7171 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7172
7173 case CALL_EXPR:
7174 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7175 if (cmp <= 0)
7176 return cmp;
7177 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7178 return 0;
7179 {
7180 const_tree arg1, arg2;
7181 const_call_expr_arg_iterator iter1, iter2;
7182 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7183 arg2 = first_const_call_expr_arg (t2, &iter2);
7184 arg1 && arg2;
7185 arg1 = next_const_call_expr_arg (&iter1),
7186 arg2 = next_const_call_expr_arg (&iter2))
7187 {
7188 cmp = simple_cst_equal (arg1, arg2);
7189 if (cmp <= 0)
7190 return cmp;
7191 }
7192 return arg1 == arg2;
7193 }
7194
7195 case TARGET_EXPR:
7196 /* Special case: if either target is an unallocated VAR_DECL,
7197 it means that it's going to be unified with whatever the
7198 TARGET_EXPR is really supposed to initialize, so treat it
7199 as being equivalent to anything. */
7200 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7201 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7202 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7203 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7204 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7205 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7206 cmp = 1;
7207 else
7208 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7209
7210 if (cmp <= 0)
7211 return cmp;
7212
7213 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7214
7215 case WITH_CLEANUP_EXPR:
7216 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7217 if (cmp <= 0)
7218 return cmp;
7219
7220 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7221
7222 case COMPONENT_REF:
7223 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7224 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7225
7226 return 0;
7227
7228 case VAR_DECL:
7229 case PARM_DECL:
7230 case CONST_DECL:
7231 case FUNCTION_DECL:
7232 return 0;
7233
7234 default:
7235 break;
7236 }
7237
7238 /* This general rule works for most tree codes. All exceptions should be
7239 handled above. If this is a language-specific tree code, we can't
7240 trust what might be in the operand, so say we don't know
7241 the situation. */
7242 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7243 return -1;
7244
7245 switch (TREE_CODE_CLASS (code1))
7246 {
7247 case tcc_unary:
7248 case tcc_binary:
7249 case tcc_comparison:
7250 case tcc_expression:
7251 case tcc_reference:
7252 case tcc_statement:
7253 cmp = 1;
7254 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7255 {
7256 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7257 if (cmp <= 0)
7258 return cmp;
7259 }
7260
7261 return cmp;
7262
7263 default:
7264 return -1;
7265 }
7266 }
7267
7268 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7269 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7270 than U, respectively. */
7271
7272 int
7273 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7274 {
7275 if (tree_int_cst_sgn (t) < 0)
7276 return -1;
7277 else if (!tree_fits_uhwi_p (t))
7278 return 1;
7279 else if (TREE_INT_CST_LOW (t) == u)
7280 return 0;
7281 else if (TREE_INT_CST_LOW (t) < u)
7282 return -1;
7283 else
7284 return 1;
7285 }
7286
7287 /* Return true if SIZE represents a constant size that is in bounds of
7288 what the middle-end and the backend accepts (covering not more than
7289 half of the address-space). */
7290
7291 bool
7292 valid_constant_size_p (const_tree size)
7293 {
7294 if (! tree_fits_uhwi_p (size)
7295 || TREE_OVERFLOW (size)
7296 || tree_int_cst_sign_bit (size) != 0)
7297 return false;
7298 return true;
7299 }
7300
7301 /* Return the precision of the type, or for a complex or vector type the
7302 precision of the type of its elements. */
7303
7304 unsigned int
7305 element_precision (const_tree type)
7306 {
7307 enum tree_code code = TREE_CODE (type);
7308 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7309 type = TREE_TYPE (type);
7310
7311 return TYPE_PRECISION (type);
7312 }
7313
7314 /* Return true if CODE represents an associative tree code. Otherwise
7315 return false. */
7316 bool
7317 associative_tree_code (enum tree_code code)
7318 {
7319 switch (code)
7320 {
7321 case BIT_IOR_EXPR:
7322 case BIT_AND_EXPR:
7323 case BIT_XOR_EXPR:
7324 case PLUS_EXPR:
7325 case MULT_EXPR:
7326 case MIN_EXPR:
7327 case MAX_EXPR:
7328 return true;
7329
7330 default:
7331 break;
7332 }
7333 return false;
7334 }
7335
7336 /* Return true if CODE represents a commutative tree code. Otherwise
7337 return false. */
7338 bool
7339 commutative_tree_code (enum tree_code code)
7340 {
7341 switch (code)
7342 {
7343 case PLUS_EXPR:
7344 case MULT_EXPR:
7345 case MULT_HIGHPART_EXPR:
7346 case MIN_EXPR:
7347 case MAX_EXPR:
7348 case BIT_IOR_EXPR:
7349 case BIT_XOR_EXPR:
7350 case BIT_AND_EXPR:
7351 case NE_EXPR:
7352 case EQ_EXPR:
7353 case UNORDERED_EXPR:
7354 case ORDERED_EXPR:
7355 case UNEQ_EXPR:
7356 case LTGT_EXPR:
7357 case TRUTH_AND_EXPR:
7358 case TRUTH_XOR_EXPR:
7359 case TRUTH_OR_EXPR:
7360 case WIDEN_MULT_EXPR:
7361 case VEC_WIDEN_MULT_HI_EXPR:
7362 case VEC_WIDEN_MULT_LO_EXPR:
7363 case VEC_WIDEN_MULT_EVEN_EXPR:
7364 case VEC_WIDEN_MULT_ODD_EXPR:
7365 return true;
7366
7367 default:
7368 break;
7369 }
7370 return false;
7371 }
7372
7373 /* Return true if CODE represents a ternary tree code for which the
7374 first two operands are commutative. Otherwise return false. */
7375 bool
7376 commutative_ternary_tree_code (enum tree_code code)
7377 {
7378 switch (code)
7379 {
7380 case WIDEN_MULT_PLUS_EXPR:
7381 case WIDEN_MULT_MINUS_EXPR:
7382 return true;
7383
7384 default:
7385 break;
7386 }
7387 return false;
7388 }
7389
7390 namespace inchash
7391 {
7392
7393 /* Generate a hash value for an expression. This can be used iteratively
7394 by passing a previous result as the HSTATE argument.
7395
7396 This function is intended to produce the same hash for expressions which
7397 would compare equal using operand_equal_p. */
7398 void
7399 add_expr (const_tree t, inchash::hash &hstate)
7400 {
7401 int i;
7402 enum tree_code code;
7403 enum tree_code_class tclass;
7404
7405 if (t == NULL_TREE)
7406 {
7407 hstate.merge_hash (0);
7408 return;
7409 }
7410
7411 code = TREE_CODE (t);
7412
7413 switch (code)
7414 {
7415 /* Alas, constants aren't shared, so we can't rely on pointer
7416 identity. */
7417 case VOID_CST:
7418 hstate.merge_hash (0);
7419 return;
7420 case INTEGER_CST:
7421 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7422 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7423 return;
7424 case REAL_CST:
7425 {
7426 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7427 hstate.merge_hash (val2);
7428 return;
7429 }
7430 case FIXED_CST:
7431 {
7432 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7433 hstate.merge_hash (val2);
7434 return;
7435 }
7436 case STRING_CST:
7437 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7438 return;
7439 case COMPLEX_CST:
7440 inchash::add_expr (TREE_REALPART (t), hstate);
7441 inchash::add_expr (TREE_IMAGPART (t), hstate);
7442 return;
7443 case VECTOR_CST:
7444 {
7445 unsigned i;
7446 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7447 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7448 return;
7449 }
7450 case SSA_NAME:
7451 /* We can just compare by pointer. */
7452 hstate.add_wide_int (SSA_NAME_VERSION (t));
7453 return;
7454 case PLACEHOLDER_EXPR:
7455 /* The node itself doesn't matter. */
7456 return;
7457 case TREE_LIST:
7458 /* A list of expressions, for a CALL_EXPR or as the elements of a
7459 VECTOR_CST. */
7460 for (; t; t = TREE_CHAIN (t))
7461 inchash::add_expr (TREE_VALUE (t), hstate);
7462 return;
7463 case CONSTRUCTOR:
7464 {
7465 unsigned HOST_WIDE_INT idx;
7466 tree field, value;
7467 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7468 {
7469 inchash::add_expr (field, hstate);
7470 inchash::add_expr (value, hstate);
7471 }
7472 return;
7473 }
7474 case FUNCTION_DECL:
7475 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7476 Otherwise nodes that compare equal according to operand_equal_p might
7477 get different hash codes. However, don't do this for machine specific
7478 or front end builtins, since the function code is overloaded in those
7479 cases. */
7480 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7481 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7482 {
7483 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7484 code = TREE_CODE (t);
7485 }
7486 /* FALL THROUGH */
7487 default:
7488 tclass = TREE_CODE_CLASS (code);
7489
7490 if (tclass == tcc_declaration)
7491 {
7492 /* DECL's have a unique ID */
7493 hstate.add_wide_int (DECL_UID (t));
7494 }
7495 else
7496 {
7497 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7498
7499 hstate.add_object (code);
7500
7501 /* Don't hash the type, that can lead to having nodes which
7502 compare equal according to operand_equal_p, but which
7503 have different hash codes. */
7504 if (CONVERT_EXPR_CODE_P (code)
7505 || code == NON_LVALUE_EXPR)
7506 {
7507 /* Make sure to include signness in the hash computation. */
7508 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7509 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7510 }
7511
7512 else if (commutative_tree_code (code))
7513 {
7514 /* It's a commutative expression. We want to hash it the same
7515 however it appears. We do this by first hashing both operands
7516 and then rehashing based on the order of their independent
7517 hashes. */
7518 inchash::hash one, two;
7519 inchash::add_expr (TREE_OPERAND (t, 0), one);
7520 inchash::add_expr (TREE_OPERAND (t, 1), two);
7521 hstate.add_commutative (one, two);
7522 }
7523 else
7524 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7525 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7526 }
7527 return;
7528 }
7529 }
7530
7531 }
7532
7533 /* Constructors for pointer, array and function types.
7534 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7535 constructed by language-dependent code, not here.) */
7536
7537 /* Construct, lay out and return the type of pointers to TO_TYPE with
7538 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7539 reference all of memory. If such a type has already been
7540 constructed, reuse it. */
7541
7542 tree
7543 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7544 bool can_alias_all)
7545 {
7546 tree t;
7547
7548 if (to_type == error_mark_node)
7549 return error_mark_node;
7550
7551 /* If the pointed-to type has the may_alias attribute set, force
7552 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7553 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7554 can_alias_all = true;
7555
7556 /* In some cases, languages will have things that aren't a POINTER_TYPE
7557 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7558 In that case, return that type without regard to the rest of our
7559 operands.
7560
7561 ??? This is a kludge, but consistent with the way this function has
7562 always operated and there doesn't seem to be a good way to avoid this
7563 at the moment. */
7564 if (TYPE_POINTER_TO (to_type) != 0
7565 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7566 return TYPE_POINTER_TO (to_type);
7567
7568 /* First, if we already have a type for pointers to TO_TYPE and it's
7569 the proper mode, use it. */
7570 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7571 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7572 return t;
7573
7574 t = make_node (POINTER_TYPE);
7575
7576 TREE_TYPE (t) = to_type;
7577 SET_TYPE_MODE (t, mode);
7578 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7579 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7580 TYPE_POINTER_TO (to_type) = t;
7581
7582 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7583 SET_TYPE_STRUCTURAL_EQUALITY (t);
7584 else if (TYPE_CANONICAL (to_type) != to_type)
7585 TYPE_CANONICAL (t)
7586 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7587 mode, can_alias_all);
7588
7589 /* Lay out the type. This function has many callers that are concerned
7590 with expression-construction, and this simplifies them all. */
7591 layout_type (t);
7592
7593 return t;
7594 }
7595
7596 /* By default build pointers in ptr_mode. */
7597
7598 tree
7599 build_pointer_type (tree to_type)
7600 {
7601 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7602 : TYPE_ADDR_SPACE (to_type);
7603 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7604 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7605 }
7606
7607 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7608
7609 tree
7610 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7611 bool can_alias_all)
7612 {
7613 tree t;
7614
7615 if (to_type == error_mark_node)
7616 return error_mark_node;
7617
7618 /* If the pointed-to type has the may_alias attribute set, force
7619 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7620 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7621 can_alias_all = true;
7622
7623 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7624 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7625 In that case, return that type without regard to the rest of our
7626 operands.
7627
7628 ??? This is a kludge, but consistent with the way this function has
7629 always operated and there doesn't seem to be a good way to avoid this
7630 at the moment. */
7631 if (TYPE_REFERENCE_TO (to_type) != 0
7632 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7633 return TYPE_REFERENCE_TO (to_type);
7634
7635 /* First, if we already have a type for pointers to TO_TYPE and it's
7636 the proper mode, use it. */
7637 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7638 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7639 return t;
7640
7641 t = make_node (REFERENCE_TYPE);
7642
7643 TREE_TYPE (t) = to_type;
7644 SET_TYPE_MODE (t, mode);
7645 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7646 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7647 TYPE_REFERENCE_TO (to_type) = t;
7648
7649 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7650 SET_TYPE_STRUCTURAL_EQUALITY (t);
7651 else if (TYPE_CANONICAL (to_type) != to_type)
7652 TYPE_CANONICAL (t)
7653 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7654 mode, can_alias_all);
7655
7656 layout_type (t);
7657
7658 return t;
7659 }
7660
7661
7662 /* Build the node for the type of references-to-TO_TYPE by default
7663 in ptr_mode. */
7664
7665 tree
7666 build_reference_type (tree to_type)
7667 {
7668 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7669 : TYPE_ADDR_SPACE (to_type);
7670 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7671 return build_reference_type_for_mode (to_type, pointer_mode, false);
7672 }
7673
7674 #define MAX_INT_CACHED_PREC \
7675 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7676 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7677
7678 /* Builds a signed or unsigned integer type of precision PRECISION.
7679 Used for C bitfields whose precision does not match that of
7680 built-in target types. */
7681 tree
7682 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7683 int unsignedp)
7684 {
7685 tree itype, ret;
7686
7687 if (unsignedp)
7688 unsignedp = MAX_INT_CACHED_PREC + 1;
7689
7690 if (precision <= MAX_INT_CACHED_PREC)
7691 {
7692 itype = nonstandard_integer_type_cache[precision + unsignedp];
7693 if (itype)
7694 return itype;
7695 }
7696
7697 itype = make_node (INTEGER_TYPE);
7698 TYPE_PRECISION (itype) = precision;
7699
7700 if (unsignedp)
7701 fixup_unsigned_type (itype);
7702 else
7703 fixup_signed_type (itype);
7704
7705 ret = itype;
7706 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7707 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7708 if (precision <= MAX_INT_CACHED_PREC)
7709 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7710
7711 return ret;
7712 }
7713
7714 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7715 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7716 is true, reuse such a type that has already been constructed. */
7717
7718 static tree
7719 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7720 {
7721 tree itype = make_node (INTEGER_TYPE);
7722 inchash::hash hstate;
7723
7724 TREE_TYPE (itype) = type;
7725
7726 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7727 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7728
7729 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7730 SET_TYPE_MODE (itype, TYPE_MODE (type));
7731 TYPE_SIZE (itype) = TYPE_SIZE (type);
7732 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7733 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7734 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7735
7736 if (!shared)
7737 return itype;
7738
7739 if ((TYPE_MIN_VALUE (itype)
7740 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7741 || (TYPE_MAX_VALUE (itype)
7742 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7743 {
7744 /* Since we cannot reliably merge this type, we need to compare it using
7745 structural equality checks. */
7746 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7747 return itype;
7748 }
7749
7750 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7751 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7752 hstate.merge_hash (TYPE_HASH (type));
7753 itype = type_hash_canon (hstate.end (), itype);
7754
7755 return itype;
7756 }
7757
7758 /* Wrapper around build_range_type_1 with SHARED set to true. */
7759
7760 tree
7761 build_range_type (tree type, tree lowval, tree highval)
7762 {
7763 return build_range_type_1 (type, lowval, highval, true);
7764 }
7765
7766 /* Wrapper around build_range_type_1 with SHARED set to false. */
7767
7768 tree
7769 build_nonshared_range_type (tree type, tree lowval, tree highval)
7770 {
7771 return build_range_type_1 (type, lowval, highval, false);
7772 }
7773
7774 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7775 MAXVAL should be the maximum value in the domain
7776 (one less than the length of the array).
7777
7778 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7779 We don't enforce this limit, that is up to caller (e.g. language front end).
7780 The limit exists because the result is a signed type and we don't handle
7781 sizes that use more than one HOST_WIDE_INT. */
7782
7783 tree
7784 build_index_type (tree maxval)
7785 {
7786 return build_range_type (sizetype, size_zero_node, maxval);
7787 }
7788
7789 /* Return true if the debug information for TYPE, a subtype, should be emitted
7790 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7791 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7792 debug info and doesn't reflect the source code. */
7793
7794 bool
7795 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7796 {
7797 tree base_type = TREE_TYPE (type), low, high;
7798
7799 /* Subrange types have a base type which is an integral type. */
7800 if (!INTEGRAL_TYPE_P (base_type))
7801 return false;
7802
7803 /* Get the real bounds of the subtype. */
7804 if (lang_hooks.types.get_subrange_bounds)
7805 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7806 else
7807 {
7808 low = TYPE_MIN_VALUE (type);
7809 high = TYPE_MAX_VALUE (type);
7810 }
7811
7812 /* If the type and its base type have the same representation and the same
7813 name, then the type is not a subrange but a copy of the base type. */
7814 if ((TREE_CODE (base_type) == INTEGER_TYPE
7815 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7816 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7817 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7818 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7819 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7820 return false;
7821
7822 if (lowval)
7823 *lowval = low;
7824 if (highval)
7825 *highval = high;
7826 return true;
7827 }
7828
7829 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7830 and number of elements specified by the range of values of INDEX_TYPE.
7831 If SHARED is true, reuse such a type that has already been constructed. */
7832
7833 static tree
7834 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7835 {
7836 tree t;
7837
7838 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7839 {
7840 error ("arrays of functions are not meaningful");
7841 elt_type = integer_type_node;
7842 }
7843
7844 t = make_node (ARRAY_TYPE);
7845 TREE_TYPE (t) = elt_type;
7846 TYPE_DOMAIN (t) = index_type;
7847 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7848 layout_type (t);
7849
7850 /* If the element type is incomplete at this point we get marked for
7851 structural equality. Do not record these types in the canonical
7852 type hashtable. */
7853 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7854 return t;
7855
7856 if (shared)
7857 {
7858 inchash::hash hstate;
7859 hstate.add_object (TYPE_HASH (elt_type));
7860 if (index_type)
7861 hstate.add_object (TYPE_HASH (index_type));
7862 t = type_hash_canon (hstate.end (), t);
7863 }
7864
7865 if (TYPE_CANONICAL (t) == t)
7866 {
7867 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7868 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7869 SET_TYPE_STRUCTURAL_EQUALITY (t);
7870 else if (TYPE_CANONICAL (elt_type) != elt_type
7871 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7872 TYPE_CANONICAL (t)
7873 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7874 index_type
7875 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7876 shared);
7877 }
7878
7879 return t;
7880 }
7881
7882 /* Wrapper around build_array_type_1 with SHARED set to true. */
7883
7884 tree
7885 build_array_type (tree elt_type, tree index_type)
7886 {
7887 return build_array_type_1 (elt_type, index_type, true);
7888 }
7889
7890 /* Wrapper around build_array_type_1 with SHARED set to false. */
7891
7892 tree
7893 build_nonshared_array_type (tree elt_type, tree index_type)
7894 {
7895 return build_array_type_1 (elt_type, index_type, false);
7896 }
7897
7898 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7899 sizetype. */
7900
7901 tree
7902 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7903 {
7904 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7905 }
7906
7907 /* Recursively examines the array elements of TYPE, until a non-array
7908 element type is found. */
7909
7910 tree
7911 strip_array_types (tree type)
7912 {
7913 while (TREE_CODE (type) == ARRAY_TYPE)
7914 type = TREE_TYPE (type);
7915
7916 return type;
7917 }
7918
7919 /* Computes the canonical argument types from the argument type list
7920 ARGTYPES.
7921
7922 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7923 on entry to this function, or if any of the ARGTYPES are
7924 structural.
7925
7926 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7927 true on entry to this function, or if any of the ARGTYPES are
7928 non-canonical.
7929
7930 Returns a canonical argument list, which may be ARGTYPES when the
7931 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7932 true) or would not differ from ARGTYPES. */
7933
7934 static tree
7935 maybe_canonicalize_argtypes (tree argtypes,
7936 bool *any_structural_p,
7937 bool *any_noncanonical_p)
7938 {
7939 tree arg;
7940 bool any_noncanonical_argtypes_p = false;
7941
7942 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7943 {
7944 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7945 /* Fail gracefully by stating that the type is structural. */
7946 *any_structural_p = true;
7947 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7948 *any_structural_p = true;
7949 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7950 || TREE_PURPOSE (arg))
7951 /* If the argument has a default argument, we consider it
7952 non-canonical even though the type itself is canonical.
7953 That way, different variants of function and method types
7954 with default arguments will all point to the variant with
7955 no defaults as their canonical type. */
7956 any_noncanonical_argtypes_p = true;
7957 }
7958
7959 if (*any_structural_p)
7960 return argtypes;
7961
7962 if (any_noncanonical_argtypes_p)
7963 {
7964 /* Build the canonical list of argument types. */
7965 tree canon_argtypes = NULL_TREE;
7966 bool is_void = false;
7967
7968 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7969 {
7970 if (arg == void_list_node)
7971 is_void = true;
7972 else
7973 canon_argtypes = tree_cons (NULL_TREE,
7974 TYPE_CANONICAL (TREE_VALUE (arg)),
7975 canon_argtypes);
7976 }
7977
7978 canon_argtypes = nreverse (canon_argtypes);
7979 if (is_void)
7980 canon_argtypes = chainon (canon_argtypes, void_list_node);
7981
7982 /* There is a non-canonical type. */
7983 *any_noncanonical_p = true;
7984 return canon_argtypes;
7985 }
7986
7987 /* The canonical argument types are the same as ARGTYPES. */
7988 return argtypes;
7989 }
7990
7991 /* Construct, lay out and return
7992 the type of functions returning type VALUE_TYPE
7993 given arguments of types ARG_TYPES.
7994 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7995 are data type nodes for the arguments of the function.
7996 If such a type has already been constructed, reuse it. */
7997
7998 tree
7999 build_function_type (tree value_type, tree arg_types)
8000 {
8001 tree t;
8002 inchash::hash hstate;
8003 bool any_structural_p, any_noncanonical_p;
8004 tree canon_argtypes;
8005
8006 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8007 {
8008 error ("function return type cannot be function");
8009 value_type = integer_type_node;
8010 }
8011
8012 /* Make a node of the sort we want. */
8013 t = make_node (FUNCTION_TYPE);
8014 TREE_TYPE (t) = value_type;
8015 TYPE_ARG_TYPES (t) = arg_types;
8016
8017 /* If we already have such a type, use the old one. */
8018 hstate.add_object (TYPE_HASH (value_type));
8019 type_hash_list (arg_types, hstate);
8020 t = type_hash_canon (hstate.end (), t);
8021
8022 /* Set up the canonical type. */
8023 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8024 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8025 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8026 &any_structural_p,
8027 &any_noncanonical_p);
8028 if (any_structural_p)
8029 SET_TYPE_STRUCTURAL_EQUALITY (t);
8030 else if (any_noncanonical_p)
8031 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8032 canon_argtypes);
8033
8034 if (!COMPLETE_TYPE_P (t))
8035 layout_type (t);
8036 return t;
8037 }
8038
8039 /* Build a function type. The RETURN_TYPE is the type returned by the
8040 function. If VAARGS is set, no void_type_node is appended to the
8041 the list. ARGP must be always be terminated be a NULL_TREE. */
8042
8043 static tree
8044 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8045 {
8046 tree t, args, last;
8047
8048 t = va_arg (argp, tree);
8049 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8050 args = tree_cons (NULL_TREE, t, args);
8051
8052 if (vaargs)
8053 {
8054 last = args;
8055 if (args != NULL_TREE)
8056 args = nreverse (args);
8057 gcc_assert (last != void_list_node);
8058 }
8059 else if (args == NULL_TREE)
8060 args = void_list_node;
8061 else
8062 {
8063 last = args;
8064 args = nreverse (args);
8065 TREE_CHAIN (last) = void_list_node;
8066 }
8067 args = build_function_type (return_type, args);
8068
8069 return args;
8070 }
8071
8072 /* Build a function type. The RETURN_TYPE is the type returned by the
8073 function. If additional arguments are provided, they are
8074 additional argument types. The list of argument types must always
8075 be terminated by NULL_TREE. */
8076
8077 tree
8078 build_function_type_list (tree return_type, ...)
8079 {
8080 tree args;
8081 va_list p;
8082
8083 va_start (p, return_type);
8084 args = build_function_type_list_1 (false, return_type, p);
8085 va_end (p);
8086 return args;
8087 }
8088
8089 /* Build a variable argument function type. The RETURN_TYPE is the
8090 type returned by the function. If additional arguments are provided,
8091 they are additional argument types. The list of argument types must
8092 always be terminated by NULL_TREE. */
8093
8094 tree
8095 build_varargs_function_type_list (tree return_type, ...)
8096 {
8097 tree args;
8098 va_list p;
8099
8100 va_start (p, return_type);
8101 args = build_function_type_list_1 (true, return_type, p);
8102 va_end (p);
8103
8104 return args;
8105 }
8106
8107 /* Build a function type. RETURN_TYPE is the type returned by the
8108 function; VAARGS indicates whether the function takes varargs. The
8109 function takes N named arguments, the types of which are provided in
8110 ARG_TYPES. */
8111
8112 static tree
8113 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8114 tree *arg_types)
8115 {
8116 int i;
8117 tree t = vaargs ? NULL_TREE : void_list_node;
8118
8119 for (i = n - 1; i >= 0; i--)
8120 t = tree_cons (NULL_TREE, arg_types[i], t);
8121
8122 return build_function_type (return_type, t);
8123 }
8124
8125 /* Build a function type. RETURN_TYPE is the type returned by the
8126 function. The function takes N named arguments, the types of which
8127 are provided in ARG_TYPES. */
8128
8129 tree
8130 build_function_type_array (tree return_type, int n, tree *arg_types)
8131 {
8132 return build_function_type_array_1 (false, return_type, n, arg_types);
8133 }
8134
8135 /* Build a variable argument function type. RETURN_TYPE is the type
8136 returned by the function. The function takes N named arguments, the
8137 types of which are provided in ARG_TYPES. */
8138
8139 tree
8140 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8141 {
8142 return build_function_type_array_1 (true, return_type, n, arg_types);
8143 }
8144
8145 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8146 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8147 for the method. An implicit additional parameter (of type
8148 pointer-to-BASETYPE) is added to the ARGTYPES. */
8149
8150 tree
8151 build_method_type_directly (tree basetype,
8152 tree rettype,
8153 tree argtypes)
8154 {
8155 tree t;
8156 tree ptype;
8157 inchash::hash hstate;
8158 bool any_structural_p, any_noncanonical_p;
8159 tree canon_argtypes;
8160
8161 /* Make a node of the sort we want. */
8162 t = make_node (METHOD_TYPE);
8163
8164 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8165 TREE_TYPE (t) = rettype;
8166 ptype = build_pointer_type (basetype);
8167
8168 /* The actual arglist for this function includes a "hidden" argument
8169 which is "this". Put it into the list of argument types. */
8170 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8171 TYPE_ARG_TYPES (t) = argtypes;
8172
8173 /* If we already have such a type, use the old one. */
8174 hstate.add_object (TYPE_HASH (basetype));
8175 hstate.add_object (TYPE_HASH (rettype));
8176 type_hash_list (argtypes, hstate);
8177 t = type_hash_canon (hstate.end (), t);
8178
8179 /* Set up the canonical type. */
8180 any_structural_p
8181 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8182 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8183 any_noncanonical_p
8184 = (TYPE_CANONICAL (basetype) != basetype
8185 || TYPE_CANONICAL (rettype) != rettype);
8186 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8187 &any_structural_p,
8188 &any_noncanonical_p);
8189 if (any_structural_p)
8190 SET_TYPE_STRUCTURAL_EQUALITY (t);
8191 else if (any_noncanonical_p)
8192 TYPE_CANONICAL (t)
8193 = build_method_type_directly (TYPE_CANONICAL (basetype),
8194 TYPE_CANONICAL (rettype),
8195 canon_argtypes);
8196 if (!COMPLETE_TYPE_P (t))
8197 layout_type (t);
8198
8199 return t;
8200 }
8201
8202 /* Construct, lay out and return the type of methods belonging to class
8203 BASETYPE and whose arguments and values are described by TYPE.
8204 If that type exists already, reuse it.
8205 TYPE must be a FUNCTION_TYPE node. */
8206
8207 tree
8208 build_method_type (tree basetype, tree type)
8209 {
8210 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8211
8212 return build_method_type_directly (basetype,
8213 TREE_TYPE (type),
8214 TYPE_ARG_TYPES (type));
8215 }
8216
8217 /* Construct, lay out and return the type of offsets to a value
8218 of type TYPE, within an object of type BASETYPE.
8219 If a suitable offset type exists already, reuse it. */
8220
8221 tree
8222 build_offset_type (tree basetype, tree type)
8223 {
8224 tree t;
8225 inchash::hash hstate;
8226
8227 /* Make a node of the sort we want. */
8228 t = make_node (OFFSET_TYPE);
8229
8230 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8231 TREE_TYPE (t) = type;
8232
8233 /* If we already have such a type, use the old one. */
8234 hstate.add_object (TYPE_HASH (basetype));
8235 hstate.add_object (TYPE_HASH (type));
8236 t = type_hash_canon (hstate.end (), t);
8237
8238 if (!COMPLETE_TYPE_P (t))
8239 layout_type (t);
8240
8241 if (TYPE_CANONICAL (t) == t)
8242 {
8243 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8244 || TYPE_STRUCTURAL_EQUALITY_P (type))
8245 SET_TYPE_STRUCTURAL_EQUALITY (t);
8246 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8247 || TYPE_CANONICAL (type) != type)
8248 TYPE_CANONICAL (t)
8249 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8250 TYPE_CANONICAL (type));
8251 }
8252
8253 return t;
8254 }
8255
8256 /* Create a complex type whose components are COMPONENT_TYPE. */
8257
8258 tree
8259 build_complex_type (tree component_type)
8260 {
8261 tree t;
8262 inchash::hash hstate;
8263
8264 gcc_assert (INTEGRAL_TYPE_P (component_type)
8265 || SCALAR_FLOAT_TYPE_P (component_type)
8266 || FIXED_POINT_TYPE_P (component_type));
8267
8268 /* Make a node of the sort we want. */
8269 t = make_node (COMPLEX_TYPE);
8270
8271 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8272
8273 /* If we already have such a type, use the old one. */
8274 hstate.add_object (TYPE_HASH (component_type));
8275 t = type_hash_canon (hstate.end (), t);
8276
8277 if (!COMPLETE_TYPE_P (t))
8278 layout_type (t);
8279
8280 if (TYPE_CANONICAL (t) == t)
8281 {
8282 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8283 SET_TYPE_STRUCTURAL_EQUALITY (t);
8284 else if (TYPE_CANONICAL (component_type) != component_type)
8285 TYPE_CANONICAL (t)
8286 = build_complex_type (TYPE_CANONICAL (component_type));
8287 }
8288
8289 /* We need to create a name, since complex is a fundamental type. */
8290 if (! TYPE_NAME (t))
8291 {
8292 const char *name;
8293 if (component_type == char_type_node)
8294 name = "complex char";
8295 else if (component_type == signed_char_type_node)
8296 name = "complex signed char";
8297 else if (component_type == unsigned_char_type_node)
8298 name = "complex unsigned char";
8299 else if (component_type == short_integer_type_node)
8300 name = "complex short int";
8301 else if (component_type == short_unsigned_type_node)
8302 name = "complex short unsigned int";
8303 else if (component_type == integer_type_node)
8304 name = "complex int";
8305 else if (component_type == unsigned_type_node)
8306 name = "complex unsigned int";
8307 else if (component_type == long_integer_type_node)
8308 name = "complex long int";
8309 else if (component_type == long_unsigned_type_node)
8310 name = "complex long unsigned int";
8311 else if (component_type == long_long_integer_type_node)
8312 name = "complex long long int";
8313 else if (component_type == long_long_unsigned_type_node)
8314 name = "complex long long unsigned int";
8315 else
8316 name = 0;
8317
8318 if (name != 0)
8319 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8320 get_identifier (name), t);
8321 }
8322
8323 return build_qualified_type (t, TYPE_QUALS (component_type));
8324 }
8325
8326 /* If TYPE is a real or complex floating-point type and the target
8327 does not directly support arithmetic on TYPE then return the wider
8328 type to be used for arithmetic on TYPE. Otherwise, return
8329 NULL_TREE. */
8330
8331 tree
8332 excess_precision_type (tree type)
8333 {
8334 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8335 {
8336 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8337 switch (TREE_CODE (type))
8338 {
8339 case REAL_TYPE:
8340 switch (flt_eval_method)
8341 {
8342 case 1:
8343 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8344 return double_type_node;
8345 break;
8346 case 2:
8347 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8348 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8349 return long_double_type_node;
8350 break;
8351 default:
8352 gcc_unreachable ();
8353 }
8354 break;
8355 case COMPLEX_TYPE:
8356 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8357 return NULL_TREE;
8358 switch (flt_eval_method)
8359 {
8360 case 1:
8361 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8362 return complex_double_type_node;
8363 break;
8364 case 2:
8365 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8366 || (TYPE_MODE (TREE_TYPE (type))
8367 == TYPE_MODE (double_type_node)))
8368 return complex_long_double_type_node;
8369 break;
8370 default:
8371 gcc_unreachable ();
8372 }
8373 break;
8374 default:
8375 break;
8376 }
8377 }
8378 return NULL_TREE;
8379 }
8380 \f
8381 /* Return OP, stripped of any conversions to wider types as much as is safe.
8382 Converting the value back to OP's type makes a value equivalent to OP.
8383
8384 If FOR_TYPE is nonzero, we return a value which, if converted to
8385 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8386
8387 OP must have integer, real or enumeral type. Pointers are not allowed!
8388
8389 There are some cases where the obvious value we could return
8390 would regenerate to OP if converted to OP's type,
8391 but would not extend like OP to wider types.
8392 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8393 For example, if OP is (unsigned short)(signed char)-1,
8394 we avoid returning (signed char)-1 if FOR_TYPE is int,
8395 even though extending that to an unsigned short would regenerate OP,
8396 since the result of extending (signed char)-1 to (int)
8397 is different from (int) OP. */
8398
8399 tree
8400 get_unwidened (tree op, tree for_type)
8401 {
8402 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8403 tree type = TREE_TYPE (op);
8404 unsigned final_prec
8405 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8406 int uns
8407 = (for_type != 0 && for_type != type
8408 && final_prec > TYPE_PRECISION (type)
8409 && TYPE_UNSIGNED (type));
8410 tree win = op;
8411
8412 while (CONVERT_EXPR_P (op))
8413 {
8414 int bitschange;
8415
8416 /* TYPE_PRECISION on vector types has different meaning
8417 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8418 so avoid them here. */
8419 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8420 break;
8421
8422 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8423 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8424
8425 /* Truncations are many-one so cannot be removed.
8426 Unless we are later going to truncate down even farther. */
8427 if (bitschange < 0
8428 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8429 break;
8430
8431 /* See what's inside this conversion. If we decide to strip it,
8432 we will set WIN. */
8433 op = TREE_OPERAND (op, 0);
8434
8435 /* If we have not stripped any zero-extensions (uns is 0),
8436 we can strip any kind of extension.
8437 If we have previously stripped a zero-extension,
8438 only zero-extensions can safely be stripped.
8439 Any extension can be stripped if the bits it would produce
8440 are all going to be discarded later by truncating to FOR_TYPE. */
8441
8442 if (bitschange > 0)
8443 {
8444 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8445 win = op;
8446 /* TYPE_UNSIGNED says whether this is a zero-extension.
8447 Let's avoid computing it if it does not affect WIN
8448 and if UNS will not be needed again. */
8449 if ((uns
8450 || CONVERT_EXPR_P (op))
8451 && TYPE_UNSIGNED (TREE_TYPE (op)))
8452 {
8453 uns = 1;
8454 win = op;
8455 }
8456 }
8457 }
8458
8459 /* If we finally reach a constant see if it fits in for_type and
8460 in that case convert it. */
8461 if (for_type
8462 && TREE_CODE (win) == INTEGER_CST
8463 && TREE_TYPE (win) != for_type
8464 && int_fits_type_p (win, for_type))
8465 win = fold_convert (for_type, win);
8466
8467 return win;
8468 }
8469 \f
8470 /* Return OP or a simpler expression for a narrower value
8471 which can be sign-extended or zero-extended to give back OP.
8472 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8473 or 0 if the value should be sign-extended. */
8474
8475 tree
8476 get_narrower (tree op, int *unsignedp_ptr)
8477 {
8478 int uns = 0;
8479 int first = 1;
8480 tree win = op;
8481 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8482
8483 while (TREE_CODE (op) == NOP_EXPR)
8484 {
8485 int bitschange
8486 = (TYPE_PRECISION (TREE_TYPE (op))
8487 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8488
8489 /* Truncations are many-one so cannot be removed. */
8490 if (bitschange < 0)
8491 break;
8492
8493 /* See what's inside this conversion. If we decide to strip it,
8494 we will set WIN. */
8495
8496 if (bitschange > 0)
8497 {
8498 op = TREE_OPERAND (op, 0);
8499 /* An extension: the outermost one can be stripped,
8500 but remember whether it is zero or sign extension. */
8501 if (first)
8502 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8503 /* Otherwise, if a sign extension has been stripped,
8504 only sign extensions can now be stripped;
8505 if a zero extension has been stripped, only zero-extensions. */
8506 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8507 break;
8508 first = 0;
8509 }
8510 else /* bitschange == 0 */
8511 {
8512 /* A change in nominal type can always be stripped, but we must
8513 preserve the unsignedness. */
8514 if (first)
8515 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8516 first = 0;
8517 op = TREE_OPERAND (op, 0);
8518 /* Keep trying to narrow, but don't assign op to win if it
8519 would turn an integral type into something else. */
8520 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8521 continue;
8522 }
8523
8524 win = op;
8525 }
8526
8527 if (TREE_CODE (op) == COMPONENT_REF
8528 /* Since type_for_size always gives an integer type. */
8529 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8530 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8531 /* Ensure field is laid out already. */
8532 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8533 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8534 {
8535 unsigned HOST_WIDE_INT innerprec
8536 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8537 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8538 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8539 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8540
8541 /* We can get this structure field in a narrower type that fits it,
8542 but the resulting extension to its nominal type (a fullword type)
8543 must satisfy the same conditions as for other extensions.
8544
8545 Do this only for fields that are aligned (not bit-fields),
8546 because when bit-field insns will be used there is no
8547 advantage in doing this. */
8548
8549 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8550 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8551 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8552 && type != 0)
8553 {
8554 if (first)
8555 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8556 win = fold_convert (type, op);
8557 }
8558 }
8559
8560 *unsignedp_ptr = uns;
8561 return win;
8562 }
8563 \f
8564 /* Returns true if integer constant C has a value that is permissible
8565 for type TYPE (an INTEGER_TYPE). */
8566
8567 bool
8568 int_fits_type_p (const_tree c, const_tree type)
8569 {
8570 tree type_low_bound, type_high_bound;
8571 bool ok_for_low_bound, ok_for_high_bound;
8572 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8573
8574 retry:
8575 type_low_bound = TYPE_MIN_VALUE (type);
8576 type_high_bound = TYPE_MAX_VALUE (type);
8577
8578 /* If at least one bound of the type is a constant integer, we can check
8579 ourselves and maybe make a decision. If no such decision is possible, but
8580 this type is a subtype, try checking against that. Otherwise, use
8581 fits_to_tree_p, which checks against the precision.
8582
8583 Compute the status for each possibly constant bound, and return if we see
8584 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8585 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8586 for "constant known to fit". */
8587
8588 /* Check if c >= type_low_bound. */
8589 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8590 {
8591 if (tree_int_cst_lt (c, type_low_bound))
8592 return false;
8593 ok_for_low_bound = true;
8594 }
8595 else
8596 ok_for_low_bound = false;
8597
8598 /* Check if c <= type_high_bound. */
8599 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8600 {
8601 if (tree_int_cst_lt (type_high_bound, c))
8602 return false;
8603 ok_for_high_bound = true;
8604 }
8605 else
8606 ok_for_high_bound = false;
8607
8608 /* If the constant fits both bounds, the result is known. */
8609 if (ok_for_low_bound && ok_for_high_bound)
8610 return true;
8611
8612 /* Perform some generic filtering which may allow making a decision
8613 even if the bounds are not constant. First, negative integers
8614 never fit in unsigned types, */
8615 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8616 return false;
8617
8618 /* Second, narrower types always fit in wider ones. */
8619 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8620 return true;
8621
8622 /* Third, unsigned integers with top bit set never fit signed types. */
8623 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8624 {
8625 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8626 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8627 {
8628 /* When a tree_cst is converted to a wide-int, the precision
8629 is taken from the type. However, if the precision of the
8630 mode underneath the type is smaller than that, it is
8631 possible that the value will not fit. The test below
8632 fails if any bit is set between the sign bit of the
8633 underlying mode and the top bit of the type. */
8634 if (wi::ne_p (wi::zext (c, prec - 1), c))
8635 return false;
8636 }
8637 else if (wi::neg_p (c))
8638 return false;
8639 }
8640
8641 /* If we haven't been able to decide at this point, there nothing more we
8642 can check ourselves here. Look at the base type if we have one and it
8643 has the same precision. */
8644 if (TREE_CODE (type) == INTEGER_TYPE
8645 && TREE_TYPE (type) != 0
8646 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8647 {
8648 type = TREE_TYPE (type);
8649 goto retry;
8650 }
8651
8652 /* Or to fits_to_tree_p, if nothing else. */
8653 return wi::fits_to_tree_p (c, type);
8654 }
8655
8656 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8657 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8658 represented (assuming two's-complement arithmetic) within the bit
8659 precision of the type are returned instead. */
8660
8661 void
8662 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8663 {
8664 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8665 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8666 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8667 else
8668 {
8669 if (TYPE_UNSIGNED (type))
8670 mpz_set_ui (min, 0);
8671 else
8672 {
8673 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8674 wi::to_mpz (mn, min, SIGNED);
8675 }
8676 }
8677
8678 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8679 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8680 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8681 else
8682 {
8683 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8684 wi::to_mpz (mn, max, TYPE_SIGN (type));
8685 }
8686 }
8687
8688 /* Return true if VAR is an automatic variable defined in function FN. */
8689
8690 bool
8691 auto_var_in_fn_p (const_tree var, const_tree fn)
8692 {
8693 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8694 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8695 || TREE_CODE (var) == PARM_DECL)
8696 && ! TREE_STATIC (var))
8697 || TREE_CODE (var) == LABEL_DECL
8698 || TREE_CODE (var) == RESULT_DECL));
8699 }
8700
8701 /* Subprogram of following function. Called by walk_tree.
8702
8703 Return *TP if it is an automatic variable or parameter of the
8704 function passed in as DATA. */
8705
8706 static tree
8707 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8708 {
8709 tree fn = (tree) data;
8710
8711 if (TYPE_P (*tp))
8712 *walk_subtrees = 0;
8713
8714 else if (DECL_P (*tp)
8715 && auto_var_in_fn_p (*tp, fn))
8716 return *tp;
8717
8718 return NULL_TREE;
8719 }
8720
8721 /* Returns true if T is, contains, or refers to a type with variable
8722 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8723 arguments, but not the return type. If FN is nonzero, only return
8724 true if a modifier of the type or position of FN is a variable or
8725 parameter inside FN.
8726
8727 This concept is more general than that of C99 'variably modified types':
8728 in C99, a struct type is never variably modified because a VLA may not
8729 appear as a structure member. However, in GNU C code like:
8730
8731 struct S { int i[f()]; };
8732
8733 is valid, and other languages may define similar constructs. */
8734
8735 bool
8736 variably_modified_type_p (tree type, tree fn)
8737 {
8738 tree t;
8739
8740 /* Test if T is either variable (if FN is zero) or an expression containing
8741 a variable in FN. If TYPE isn't gimplified, return true also if
8742 gimplify_one_sizepos would gimplify the expression into a local
8743 variable. */
8744 #define RETURN_TRUE_IF_VAR(T) \
8745 do { tree _t = (T); \
8746 if (_t != NULL_TREE \
8747 && _t != error_mark_node \
8748 && TREE_CODE (_t) != INTEGER_CST \
8749 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8750 && (!fn \
8751 || (!TYPE_SIZES_GIMPLIFIED (type) \
8752 && !is_gimple_sizepos (_t)) \
8753 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8754 return true; } while (0)
8755
8756 if (type == error_mark_node)
8757 return false;
8758
8759 /* If TYPE itself has variable size, it is variably modified. */
8760 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8761 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8762
8763 switch (TREE_CODE (type))
8764 {
8765 case POINTER_TYPE:
8766 case REFERENCE_TYPE:
8767 case VECTOR_TYPE:
8768 if (variably_modified_type_p (TREE_TYPE (type), fn))
8769 return true;
8770 break;
8771
8772 case FUNCTION_TYPE:
8773 case METHOD_TYPE:
8774 /* If TYPE is a function type, it is variably modified if the
8775 return type is variably modified. */
8776 if (variably_modified_type_p (TREE_TYPE (type), fn))
8777 return true;
8778 break;
8779
8780 case INTEGER_TYPE:
8781 case REAL_TYPE:
8782 case FIXED_POINT_TYPE:
8783 case ENUMERAL_TYPE:
8784 case BOOLEAN_TYPE:
8785 /* Scalar types are variably modified if their end points
8786 aren't constant. */
8787 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8788 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8789 break;
8790
8791 case RECORD_TYPE:
8792 case UNION_TYPE:
8793 case QUAL_UNION_TYPE:
8794 /* We can't see if any of the fields are variably-modified by the
8795 definition we normally use, since that would produce infinite
8796 recursion via pointers. */
8797 /* This is variably modified if some field's type is. */
8798 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8799 if (TREE_CODE (t) == FIELD_DECL)
8800 {
8801 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8802 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8803 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8804
8805 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8806 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8807 }
8808 break;
8809
8810 case ARRAY_TYPE:
8811 /* Do not call ourselves to avoid infinite recursion. This is
8812 variably modified if the element type is. */
8813 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8814 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8815 break;
8816
8817 default:
8818 break;
8819 }
8820
8821 /* The current language may have other cases to check, but in general,
8822 all other types are not variably modified. */
8823 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8824
8825 #undef RETURN_TRUE_IF_VAR
8826 }
8827
8828 /* Given a DECL or TYPE, return the scope in which it was declared, or
8829 NULL_TREE if there is no containing scope. */
8830
8831 tree
8832 get_containing_scope (const_tree t)
8833 {
8834 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8835 }
8836
8837 /* Return the innermost context enclosing DECL that is
8838 a FUNCTION_DECL, or zero if none. */
8839
8840 tree
8841 decl_function_context (const_tree decl)
8842 {
8843 tree context;
8844
8845 if (TREE_CODE (decl) == ERROR_MARK)
8846 return 0;
8847
8848 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8849 where we look up the function at runtime. Such functions always take
8850 a first argument of type 'pointer to real context'.
8851
8852 C++ should really be fixed to use DECL_CONTEXT for the real context,
8853 and use something else for the "virtual context". */
8854 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8855 context
8856 = TYPE_MAIN_VARIANT
8857 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8858 else
8859 context = DECL_CONTEXT (decl);
8860
8861 while (context && TREE_CODE (context) != FUNCTION_DECL)
8862 {
8863 if (TREE_CODE (context) == BLOCK)
8864 context = BLOCK_SUPERCONTEXT (context);
8865 else
8866 context = get_containing_scope (context);
8867 }
8868
8869 return context;
8870 }
8871
8872 /* Return the innermost context enclosing DECL that is
8873 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8874 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8875
8876 tree
8877 decl_type_context (const_tree decl)
8878 {
8879 tree context = DECL_CONTEXT (decl);
8880
8881 while (context)
8882 switch (TREE_CODE (context))
8883 {
8884 case NAMESPACE_DECL:
8885 case TRANSLATION_UNIT_DECL:
8886 return NULL_TREE;
8887
8888 case RECORD_TYPE:
8889 case UNION_TYPE:
8890 case QUAL_UNION_TYPE:
8891 return context;
8892
8893 case TYPE_DECL:
8894 case FUNCTION_DECL:
8895 context = DECL_CONTEXT (context);
8896 break;
8897
8898 case BLOCK:
8899 context = BLOCK_SUPERCONTEXT (context);
8900 break;
8901
8902 default:
8903 gcc_unreachable ();
8904 }
8905
8906 return NULL_TREE;
8907 }
8908
8909 /* CALL is a CALL_EXPR. Return the declaration for the function
8910 called, or NULL_TREE if the called function cannot be
8911 determined. */
8912
8913 tree
8914 get_callee_fndecl (const_tree call)
8915 {
8916 tree addr;
8917
8918 if (call == error_mark_node)
8919 return error_mark_node;
8920
8921 /* It's invalid to call this function with anything but a
8922 CALL_EXPR. */
8923 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8924
8925 /* The first operand to the CALL is the address of the function
8926 called. */
8927 addr = CALL_EXPR_FN (call);
8928
8929 /* If there is no function, return early. */
8930 if (addr == NULL_TREE)
8931 return NULL_TREE;
8932
8933 STRIP_NOPS (addr);
8934
8935 /* If this is a readonly function pointer, extract its initial value. */
8936 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8937 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8938 && DECL_INITIAL (addr))
8939 addr = DECL_INITIAL (addr);
8940
8941 /* If the address is just `&f' for some function `f', then we know
8942 that `f' is being called. */
8943 if (TREE_CODE (addr) == ADDR_EXPR
8944 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8945 return TREE_OPERAND (addr, 0);
8946
8947 /* We couldn't figure out what was being called. */
8948 return NULL_TREE;
8949 }
8950
8951 /* Print debugging information about tree nodes generated during the compile,
8952 and any language-specific information. */
8953
8954 void
8955 dump_tree_statistics (void)
8956 {
8957 if (GATHER_STATISTICS)
8958 {
8959 int i;
8960 int total_nodes, total_bytes;
8961 fprintf (stderr, "Kind Nodes Bytes\n");
8962 fprintf (stderr, "---------------------------------------\n");
8963 total_nodes = total_bytes = 0;
8964 for (i = 0; i < (int) all_kinds; i++)
8965 {
8966 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8967 tree_node_counts[i], tree_node_sizes[i]);
8968 total_nodes += tree_node_counts[i];
8969 total_bytes += tree_node_sizes[i];
8970 }
8971 fprintf (stderr, "---------------------------------------\n");
8972 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8973 fprintf (stderr, "---------------------------------------\n");
8974 fprintf (stderr, "Code Nodes\n");
8975 fprintf (stderr, "----------------------------\n");
8976 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8977 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8978 tree_code_counts[i]);
8979 fprintf (stderr, "----------------------------\n");
8980 ssanames_print_statistics ();
8981 phinodes_print_statistics ();
8982 }
8983 else
8984 fprintf (stderr, "(No per-node statistics)\n");
8985
8986 print_type_hash_statistics ();
8987 print_debug_expr_statistics ();
8988 print_value_expr_statistics ();
8989 lang_hooks.print_statistics ();
8990 }
8991 \f
8992 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8993
8994 /* Generate a crc32 of a byte. */
8995
8996 static unsigned
8997 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8998 {
8999 unsigned ix;
9000
9001 for (ix = bits; ix--; value <<= 1)
9002 {
9003 unsigned feedback;
9004
9005 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9006 chksum <<= 1;
9007 chksum ^= feedback;
9008 }
9009 return chksum;
9010 }
9011
9012 /* Generate a crc32 of a 32-bit unsigned. */
9013
9014 unsigned
9015 crc32_unsigned (unsigned chksum, unsigned value)
9016 {
9017 return crc32_unsigned_bits (chksum, value, 32);
9018 }
9019
9020 /* Generate a crc32 of a byte. */
9021
9022 unsigned
9023 crc32_byte (unsigned chksum, char byte)
9024 {
9025 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9026 }
9027
9028 /* Generate a crc32 of a string. */
9029
9030 unsigned
9031 crc32_string (unsigned chksum, const char *string)
9032 {
9033 do
9034 {
9035 chksum = crc32_byte (chksum, *string);
9036 }
9037 while (*string++);
9038 return chksum;
9039 }
9040
9041 /* P is a string that will be used in a symbol. Mask out any characters
9042 that are not valid in that context. */
9043
9044 void
9045 clean_symbol_name (char *p)
9046 {
9047 for (; *p; p++)
9048 if (! (ISALNUM (*p)
9049 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9050 || *p == '$'
9051 #endif
9052 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9053 || *p == '.'
9054 #endif
9055 ))
9056 *p = '_';
9057 }
9058
9059 /* Generate a name for a special-purpose function.
9060 The generated name may need to be unique across the whole link.
9061 Changes to this function may also require corresponding changes to
9062 xstrdup_mask_random.
9063 TYPE is some string to identify the purpose of this function to the
9064 linker or collect2; it must start with an uppercase letter,
9065 one of:
9066 I - for constructors
9067 D - for destructors
9068 N - for C++ anonymous namespaces
9069 F - for DWARF unwind frame information. */
9070
9071 tree
9072 get_file_function_name (const char *type)
9073 {
9074 char *buf;
9075 const char *p;
9076 char *q;
9077
9078 /* If we already have a name we know to be unique, just use that. */
9079 if (first_global_object_name)
9080 p = q = ASTRDUP (first_global_object_name);
9081 /* If the target is handling the constructors/destructors, they
9082 will be local to this file and the name is only necessary for
9083 debugging purposes.
9084 We also assign sub_I and sub_D sufixes to constructors called from
9085 the global static constructors. These are always local. */
9086 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9087 || (strncmp (type, "sub_", 4) == 0
9088 && (type[4] == 'I' || type[4] == 'D')))
9089 {
9090 const char *file = main_input_filename;
9091 if (! file)
9092 file = LOCATION_FILE (input_location);
9093 /* Just use the file's basename, because the full pathname
9094 might be quite long. */
9095 p = q = ASTRDUP (lbasename (file));
9096 }
9097 else
9098 {
9099 /* Otherwise, the name must be unique across the entire link.
9100 We don't have anything that we know to be unique to this translation
9101 unit, so use what we do have and throw in some randomness. */
9102 unsigned len;
9103 const char *name = weak_global_object_name;
9104 const char *file = main_input_filename;
9105
9106 if (! name)
9107 name = "";
9108 if (! file)
9109 file = LOCATION_FILE (input_location);
9110
9111 len = strlen (file);
9112 q = (char *) alloca (9 + 17 + len + 1);
9113 memcpy (q, file, len + 1);
9114
9115 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9116 crc32_string (0, name), get_random_seed (false));
9117
9118 p = q;
9119 }
9120
9121 clean_symbol_name (q);
9122 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9123 + strlen (type));
9124
9125 /* Set up the name of the file-level functions we may need.
9126 Use a global object (which is already required to be unique over
9127 the program) rather than the file name (which imposes extra
9128 constraints). */
9129 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9130
9131 return get_identifier (buf);
9132 }
9133 \f
9134 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9135
9136 /* Complain that the tree code of NODE does not match the expected 0
9137 terminated list of trailing codes. The trailing code list can be
9138 empty, for a more vague error message. FILE, LINE, and FUNCTION
9139 are of the caller. */
9140
9141 void
9142 tree_check_failed (const_tree node, const char *file,
9143 int line, const char *function, ...)
9144 {
9145 va_list args;
9146 const char *buffer;
9147 unsigned length = 0;
9148 enum tree_code code;
9149
9150 va_start (args, function);
9151 while ((code = (enum tree_code) va_arg (args, int)))
9152 length += 4 + strlen (get_tree_code_name (code));
9153 va_end (args);
9154 if (length)
9155 {
9156 char *tmp;
9157 va_start (args, function);
9158 length += strlen ("expected ");
9159 buffer = tmp = (char *) alloca (length);
9160 length = 0;
9161 while ((code = (enum tree_code) va_arg (args, int)))
9162 {
9163 const char *prefix = length ? " or " : "expected ";
9164
9165 strcpy (tmp + length, prefix);
9166 length += strlen (prefix);
9167 strcpy (tmp + length, get_tree_code_name (code));
9168 length += strlen (get_tree_code_name (code));
9169 }
9170 va_end (args);
9171 }
9172 else
9173 buffer = "unexpected node";
9174
9175 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9176 buffer, get_tree_code_name (TREE_CODE (node)),
9177 function, trim_filename (file), line);
9178 }
9179
9180 /* Complain that the tree code of NODE does match the expected 0
9181 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9182 the caller. */
9183
9184 void
9185 tree_not_check_failed (const_tree node, const char *file,
9186 int line, const char *function, ...)
9187 {
9188 va_list args;
9189 char *buffer;
9190 unsigned length = 0;
9191 enum tree_code code;
9192
9193 va_start (args, function);
9194 while ((code = (enum tree_code) va_arg (args, int)))
9195 length += 4 + strlen (get_tree_code_name (code));
9196 va_end (args);
9197 va_start (args, function);
9198 buffer = (char *) alloca (length);
9199 length = 0;
9200 while ((code = (enum tree_code) va_arg (args, int)))
9201 {
9202 if (length)
9203 {
9204 strcpy (buffer + length, " or ");
9205 length += 4;
9206 }
9207 strcpy (buffer + length, get_tree_code_name (code));
9208 length += strlen (get_tree_code_name (code));
9209 }
9210 va_end (args);
9211
9212 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9213 buffer, get_tree_code_name (TREE_CODE (node)),
9214 function, trim_filename (file), line);
9215 }
9216
9217 /* Similar to tree_check_failed, except that we check for a class of tree
9218 code, given in CL. */
9219
9220 void
9221 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9222 const char *file, int line, const char *function)
9223 {
9224 internal_error
9225 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9226 TREE_CODE_CLASS_STRING (cl),
9227 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9228 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9229 }
9230
9231 /* Similar to tree_check_failed, except that instead of specifying a
9232 dozen codes, use the knowledge that they're all sequential. */
9233
9234 void
9235 tree_range_check_failed (const_tree node, const char *file, int line,
9236 const char *function, enum tree_code c1,
9237 enum tree_code c2)
9238 {
9239 char *buffer;
9240 unsigned length = 0;
9241 unsigned int c;
9242
9243 for (c = c1; c <= c2; ++c)
9244 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9245
9246 length += strlen ("expected ");
9247 buffer = (char *) alloca (length);
9248 length = 0;
9249
9250 for (c = c1; c <= c2; ++c)
9251 {
9252 const char *prefix = length ? " or " : "expected ";
9253
9254 strcpy (buffer + length, prefix);
9255 length += strlen (prefix);
9256 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9257 length += strlen (get_tree_code_name ((enum tree_code) c));
9258 }
9259
9260 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9261 buffer, get_tree_code_name (TREE_CODE (node)),
9262 function, trim_filename (file), line);
9263 }
9264
9265
9266 /* Similar to tree_check_failed, except that we check that a tree does
9267 not have the specified code, given in CL. */
9268
9269 void
9270 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9271 const char *file, int line, const char *function)
9272 {
9273 internal_error
9274 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9275 TREE_CODE_CLASS_STRING (cl),
9276 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9277 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9278 }
9279
9280
9281 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9282
9283 void
9284 omp_clause_check_failed (const_tree node, const char *file, int line,
9285 const char *function, enum omp_clause_code code)
9286 {
9287 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9288 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9289 function, trim_filename (file), line);
9290 }
9291
9292
9293 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9294
9295 void
9296 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9297 const char *function, enum omp_clause_code c1,
9298 enum omp_clause_code c2)
9299 {
9300 char *buffer;
9301 unsigned length = 0;
9302 unsigned int c;
9303
9304 for (c = c1; c <= c2; ++c)
9305 length += 4 + strlen (omp_clause_code_name[c]);
9306
9307 length += strlen ("expected ");
9308 buffer = (char *) alloca (length);
9309 length = 0;
9310
9311 for (c = c1; c <= c2; ++c)
9312 {
9313 const char *prefix = length ? " or " : "expected ";
9314
9315 strcpy (buffer + length, prefix);
9316 length += strlen (prefix);
9317 strcpy (buffer + length, omp_clause_code_name[c]);
9318 length += strlen (omp_clause_code_name[c]);
9319 }
9320
9321 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9322 buffer, omp_clause_code_name[TREE_CODE (node)],
9323 function, trim_filename (file), line);
9324 }
9325
9326
9327 #undef DEFTREESTRUCT
9328 #define DEFTREESTRUCT(VAL, NAME) NAME,
9329
9330 static const char *ts_enum_names[] = {
9331 #include "treestruct.def"
9332 };
9333 #undef DEFTREESTRUCT
9334
9335 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9336
9337 /* Similar to tree_class_check_failed, except that we check for
9338 whether CODE contains the tree structure identified by EN. */
9339
9340 void
9341 tree_contains_struct_check_failed (const_tree node,
9342 const enum tree_node_structure_enum en,
9343 const char *file, int line,
9344 const char *function)
9345 {
9346 internal_error
9347 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9348 TS_ENUM_NAME (en),
9349 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9350 }
9351
9352
9353 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9354 (dynamically sized) vector. */
9355
9356 void
9357 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9358 const char *function)
9359 {
9360 internal_error
9361 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9362 idx + 1, len, function, trim_filename (file), line);
9363 }
9364
9365 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9366 (dynamically sized) vector. */
9367
9368 void
9369 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9370 const char *function)
9371 {
9372 internal_error
9373 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9374 idx + 1, len, function, trim_filename (file), line);
9375 }
9376
9377 /* Similar to above, except that the check is for the bounds of the operand
9378 vector of an expression node EXP. */
9379
9380 void
9381 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9382 int line, const char *function)
9383 {
9384 enum tree_code code = TREE_CODE (exp);
9385 internal_error
9386 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9387 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9388 function, trim_filename (file), line);
9389 }
9390
9391 /* Similar to above, except that the check is for the number of
9392 operands of an OMP_CLAUSE node. */
9393
9394 void
9395 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9396 int line, const char *function)
9397 {
9398 internal_error
9399 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9400 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9401 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9402 trim_filename (file), line);
9403 }
9404 #endif /* ENABLE_TREE_CHECKING */
9405 \f
9406 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9407 and mapped to the machine mode MODE. Initialize its fields and build
9408 the information necessary for debugging output. */
9409
9410 static tree
9411 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9412 {
9413 tree t;
9414 inchash::hash hstate;
9415
9416 t = make_node (VECTOR_TYPE);
9417 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9418 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9419 SET_TYPE_MODE (t, mode);
9420
9421 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9422 SET_TYPE_STRUCTURAL_EQUALITY (t);
9423 else if (TYPE_CANONICAL (innertype) != innertype
9424 || mode != VOIDmode)
9425 TYPE_CANONICAL (t)
9426 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9427
9428 layout_type (t);
9429
9430 hstate.add_wide_int (VECTOR_TYPE);
9431 hstate.add_wide_int (nunits);
9432 hstate.add_wide_int (mode);
9433 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9434 t = type_hash_canon (hstate.end (), t);
9435
9436 /* We have built a main variant, based on the main variant of the
9437 inner type. Use it to build the variant we return. */
9438 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9439 && TREE_TYPE (t) != innertype)
9440 return build_type_attribute_qual_variant (t,
9441 TYPE_ATTRIBUTES (innertype),
9442 TYPE_QUALS (innertype));
9443
9444 return t;
9445 }
9446
9447 static tree
9448 make_or_reuse_type (unsigned size, int unsignedp)
9449 {
9450 if (size == INT_TYPE_SIZE)
9451 return unsignedp ? unsigned_type_node : integer_type_node;
9452 if (size == CHAR_TYPE_SIZE)
9453 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9454 if (size == SHORT_TYPE_SIZE)
9455 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9456 if (size == LONG_TYPE_SIZE)
9457 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9458 if (size == LONG_LONG_TYPE_SIZE)
9459 return (unsignedp ? long_long_unsigned_type_node
9460 : long_long_integer_type_node);
9461 if (size == 128 && int128_integer_type_node)
9462 return (unsignedp ? int128_unsigned_type_node
9463 : int128_integer_type_node);
9464
9465 if (unsignedp)
9466 return make_unsigned_type (size);
9467 else
9468 return make_signed_type (size);
9469 }
9470
9471 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9472
9473 static tree
9474 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9475 {
9476 if (satp)
9477 {
9478 if (size == SHORT_FRACT_TYPE_SIZE)
9479 return unsignedp ? sat_unsigned_short_fract_type_node
9480 : sat_short_fract_type_node;
9481 if (size == FRACT_TYPE_SIZE)
9482 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9483 if (size == LONG_FRACT_TYPE_SIZE)
9484 return unsignedp ? sat_unsigned_long_fract_type_node
9485 : sat_long_fract_type_node;
9486 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9487 return unsignedp ? sat_unsigned_long_long_fract_type_node
9488 : sat_long_long_fract_type_node;
9489 }
9490 else
9491 {
9492 if (size == SHORT_FRACT_TYPE_SIZE)
9493 return unsignedp ? unsigned_short_fract_type_node
9494 : short_fract_type_node;
9495 if (size == FRACT_TYPE_SIZE)
9496 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9497 if (size == LONG_FRACT_TYPE_SIZE)
9498 return unsignedp ? unsigned_long_fract_type_node
9499 : long_fract_type_node;
9500 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9501 return unsignedp ? unsigned_long_long_fract_type_node
9502 : long_long_fract_type_node;
9503 }
9504
9505 return make_fract_type (size, unsignedp, satp);
9506 }
9507
9508 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9509
9510 static tree
9511 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9512 {
9513 if (satp)
9514 {
9515 if (size == SHORT_ACCUM_TYPE_SIZE)
9516 return unsignedp ? sat_unsigned_short_accum_type_node
9517 : sat_short_accum_type_node;
9518 if (size == ACCUM_TYPE_SIZE)
9519 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9520 if (size == LONG_ACCUM_TYPE_SIZE)
9521 return unsignedp ? sat_unsigned_long_accum_type_node
9522 : sat_long_accum_type_node;
9523 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9524 return unsignedp ? sat_unsigned_long_long_accum_type_node
9525 : sat_long_long_accum_type_node;
9526 }
9527 else
9528 {
9529 if (size == SHORT_ACCUM_TYPE_SIZE)
9530 return unsignedp ? unsigned_short_accum_type_node
9531 : short_accum_type_node;
9532 if (size == ACCUM_TYPE_SIZE)
9533 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9534 if (size == LONG_ACCUM_TYPE_SIZE)
9535 return unsignedp ? unsigned_long_accum_type_node
9536 : long_accum_type_node;
9537 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9538 return unsignedp ? unsigned_long_long_accum_type_node
9539 : long_long_accum_type_node;
9540 }
9541
9542 return make_accum_type (size, unsignedp, satp);
9543 }
9544
9545
9546 /* Create an atomic variant node for TYPE. This routine is called
9547 during initialization of data types to create the 5 basic atomic
9548 types. The generic build_variant_type function requires these to
9549 already be set up in order to function properly, so cannot be
9550 called from there. If ALIGN is non-zero, then ensure alignment is
9551 overridden to this value. */
9552
9553 static tree
9554 build_atomic_base (tree type, unsigned int align)
9555 {
9556 tree t;
9557
9558 /* Make sure its not already registered. */
9559 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9560 return t;
9561
9562 t = build_variant_type_copy (type);
9563 set_type_quals (t, TYPE_QUAL_ATOMIC);
9564
9565 if (align)
9566 TYPE_ALIGN (t) = align;
9567
9568 return t;
9569 }
9570
9571 /* Create nodes for all integer types (and error_mark_node) using the sizes
9572 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9573 SHORT_DOUBLE specifies whether double should be of the same precision
9574 as float. */
9575
9576 void
9577 build_common_tree_nodes (bool signed_char, bool short_double)
9578 {
9579 error_mark_node = make_node (ERROR_MARK);
9580 TREE_TYPE (error_mark_node) = error_mark_node;
9581
9582 initialize_sizetypes ();
9583
9584 /* Define both `signed char' and `unsigned char'. */
9585 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9586 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9587 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9588 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9589
9590 /* Define `char', which is like either `signed char' or `unsigned char'
9591 but not the same as either. */
9592 char_type_node
9593 = (signed_char
9594 ? make_signed_type (CHAR_TYPE_SIZE)
9595 : make_unsigned_type (CHAR_TYPE_SIZE));
9596 TYPE_STRING_FLAG (char_type_node) = 1;
9597
9598 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9599 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9600 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9601 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9602 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9603 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9604 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9605 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9606 #if HOST_BITS_PER_WIDE_INT >= 64
9607 /* TODO: This isn't correct, but as logic depends at the moment on
9608 host's instead of target's wide-integer.
9609 If there is a target not supporting TImode, but has an 128-bit
9610 integer-scalar register, this target check needs to be adjusted. */
9611 if (targetm.scalar_mode_supported_p (TImode))
9612 {
9613 int128_integer_type_node = make_signed_type (128);
9614 int128_unsigned_type_node = make_unsigned_type (128);
9615 }
9616 #endif
9617
9618 /* Define a boolean type. This type only represents boolean values but
9619 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9620 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9621 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9622 TYPE_PRECISION (boolean_type_node) = 1;
9623 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9624
9625 /* Define what type to use for size_t. */
9626 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9627 size_type_node = unsigned_type_node;
9628 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9629 size_type_node = long_unsigned_type_node;
9630 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9631 size_type_node = long_long_unsigned_type_node;
9632 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9633 size_type_node = short_unsigned_type_node;
9634 else
9635 gcc_unreachable ();
9636
9637 /* Fill in the rest of the sized types. Reuse existing type nodes
9638 when possible. */
9639 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9640 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9641 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9642 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9643 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9644
9645 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9646 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9647 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9648 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9649 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9650
9651 /* Don't call build_qualified type for atomics. That routine does
9652 special processing for atomics, and until they are initialized
9653 it's better not to make that call.
9654
9655 Check to see if there is a target override for atomic types. */
9656
9657 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9658 targetm.atomic_align_for_mode (QImode));
9659 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9660 targetm.atomic_align_for_mode (HImode));
9661 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9662 targetm.atomic_align_for_mode (SImode));
9663 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9664 targetm.atomic_align_for_mode (DImode));
9665 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9666 targetm.atomic_align_for_mode (TImode));
9667
9668 access_public_node = get_identifier ("public");
9669 access_protected_node = get_identifier ("protected");
9670 access_private_node = get_identifier ("private");
9671
9672 /* Define these next since types below may used them. */
9673 integer_zero_node = build_int_cst (integer_type_node, 0);
9674 integer_one_node = build_int_cst (integer_type_node, 1);
9675 integer_three_node = build_int_cst (integer_type_node, 3);
9676 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9677
9678 size_zero_node = size_int (0);
9679 size_one_node = size_int (1);
9680 bitsize_zero_node = bitsize_int (0);
9681 bitsize_one_node = bitsize_int (1);
9682 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9683
9684 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9685 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9686
9687 void_type_node = make_node (VOID_TYPE);
9688 layout_type (void_type_node);
9689
9690 /* We are not going to have real types in C with less than byte alignment,
9691 so we might as well not have any types that claim to have it. */
9692 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9693 TYPE_USER_ALIGN (void_type_node) = 0;
9694
9695 void_node = make_node (VOID_CST);
9696 TREE_TYPE (void_node) = void_type_node;
9697
9698 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9699 layout_type (TREE_TYPE (null_pointer_node));
9700
9701 ptr_type_node = build_pointer_type (void_type_node);
9702 const_ptr_type_node
9703 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9704 fileptr_type_node = ptr_type_node;
9705
9706 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9707
9708 float_type_node = make_node (REAL_TYPE);
9709 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9710 layout_type (float_type_node);
9711
9712 double_type_node = make_node (REAL_TYPE);
9713 if (short_double)
9714 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9715 else
9716 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9717 layout_type (double_type_node);
9718
9719 long_double_type_node = make_node (REAL_TYPE);
9720 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9721 layout_type (long_double_type_node);
9722
9723 float_ptr_type_node = build_pointer_type (float_type_node);
9724 double_ptr_type_node = build_pointer_type (double_type_node);
9725 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9726 integer_ptr_type_node = build_pointer_type (integer_type_node);
9727
9728 /* Fixed size integer types. */
9729 uint16_type_node = make_or_reuse_type (16, 1);
9730 uint32_type_node = make_or_reuse_type (32, 1);
9731 uint64_type_node = make_or_reuse_type (64, 1);
9732
9733 /* Decimal float types. */
9734 dfloat32_type_node = make_node (REAL_TYPE);
9735 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9736 layout_type (dfloat32_type_node);
9737 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9738 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9739
9740 dfloat64_type_node = make_node (REAL_TYPE);
9741 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9742 layout_type (dfloat64_type_node);
9743 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9744 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9745
9746 dfloat128_type_node = make_node (REAL_TYPE);
9747 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9748 layout_type (dfloat128_type_node);
9749 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9750 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9751
9752 complex_integer_type_node = build_complex_type (integer_type_node);
9753 complex_float_type_node = build_complex_type (float_type_node);
9754 complex_double_type_node = build_complex_type (double_type_node);
9755 complex_long_double_type_node = build_complex_type (long_double_type_node);
9756
9757 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9758 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9759 sat_ ## KIND ## _type_node = \
9760 make_sat_signed_ ## KIND ## _type (SIZE); \
9761 sat_unsigned_ ## KIND ## _type_node = \
9762 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9763 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9764 unsigned_ ## KIND ## _type_node = \
9765 make_unsigned_ ## KIND ## _type (SIZE);
9766
9767 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9768 sat_ ## WIDTH ## KIND ## _type_node = \
9769 make_sat_signed_ ## KIND ## _type (SIZE); \
9770 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9771 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9772 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9773 unsigned_ ## WIDTH ## KIND ## _type_node = \
9774 make_unsigned_ ## KIND ## _type (SIZE);
9775
9776 /* Make fixed-point type nodes based on four different widths. */
9777 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9778 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9779 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9780 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9781 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9782
9783 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9784 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9785 NAME ## _type_node = \
9786 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9787 u ## NAME ## _type_node = \
9788 make_or_reuse_unsigned_ ## KIND ## _type \
9789 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9790 sat_ ## NAME ## _type_node = \
9791 make_or_reuse_sat_signed_ ## KIND ## _type \
9792 (GET_MODE_BITSIZE (MODE ## mode)); \
9793 sat_u ## NAME ## _type_node = \
9794 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9795 (GET_MODE_BITSIZE (U ## MODE ## mode));
9796
9797 /* Fixed-point type and mode nodes. */
9798 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9799 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9800 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9801 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9802 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9803 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9804 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9805 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9806 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9807 MAKE_FIXED_MODE_NODE (accum, da, DA)
9808 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9809
9810 {
9811 tree t = targetm.build_builtin_va_list ();
9812
9813 /* Many back-ends define record types without setting TYPE_NAME.
9814 If we copied the record type here, we'd keep the original
9815 record type without a name. This breaks name mangling. So,
9816 don't copy record types and let c_common_nodes_and_builtins()
9817 declare the type to be __builtin_va_list. */
9818 if (TREE_CODE (t) != RECORD_TYPE)
9819 t = build_variant_type_copy (t);
9820
9821 va_list_type_node = t;
9822 }
9823 }
9824
9825 /* Modify DECL for given flags.
9826 TM_PURE attribute is set only on types, so the function will modify
9827 DECL's type when ECF_TM_PURE is used. */
9828
9829 void
9830 set_call_expr_flags (tree decl, int flags)
9831 {
9832 if (flags & ECF_NOTHROW)
9833 TREE_NOTHROW (decl) = 1;
9834 if (flags & ECF_CONST)
9835 TREE_READONLY (decl) = 1;
9836 if (flags & ECF_PURE)
9837 DECL_PURE_P (decl) = 1;
9838 if (flags & ECF_LOOPING_CONST_OR_PURE)
9839 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9840 if (flags & ECF_NOVOPS)
9841 DECL_IS_NOVOPS (decl) = 1;
9842 if (flags & ECF_NORETURN)
9843 TREE_THIS_VOLATILE (decl) = 1;
9844 if (flags & ECF_MALLOC)
9845 DECL_IS_MALLOC (decl) = 1;
9846 if (flags & ECF_RETURNS_TWICE)
9847 DECL_IS_RETURNS_TWICE (decl) = 1;
9848 if (flags & ECF_LEAF)
9849 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9850 NULL, DECL_ATTRIBUTES (decl));
9851 if ((flags & ECF_TM_PURE) && flag_tm)
9852 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9853 /* Looping const or pure is implied by noreturn.
9854 There is currently no way to declare looping const or looping pure alone. */
9855 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9856 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9857 }
9858
9859
9860 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9861
9862 static void
9863 local_define_builtin (const char *name, tree type, enum built_in_function code,
9864 const char *library_name, int ecf_flags)
9865 {
9866 tree decl;
9867
9868 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9869 library_name, NULL_TREE);
9870 set_call_expr_flags (decl, ecf_flags);
9871
9872 set_builtin_decl (code, decl, true);
9873 }
9874
9875 /* Call this function after instantiating all builtins that the language
9876 front end cares about. This will build the rest of the builtins
9877 and internal functions that are relied upon by the tree optimizers and
9878 the middle-end. */
9879
9880 void
9881 build_common_builtin_nodes (void)
9882 {
9883 tree tmp, ftype;
9884 int ecf_flags;
9885
9886 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9887 {
9888 ftype = build_function_type (void_type_node, void_list_node);
9889 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9890 "__builtin_unreachable",
9891 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9892 | ECF_CONST | ECF_LEAF);
9893 }
9894
9895 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9896 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9897 {
9898 ftype = build_function_type_list (ptr_type_node,
9899 ptr_type_node, const_ptr_type_node,
9900 size_type_node, NULL_TREE);
9901
9902 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9903 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9904 "memcpy", ECF_NOTHROW | ECF_LEAF);
9905 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9906 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9907 "memmove", ECF_NOTHROW | ECF_LEAF);
9908 }
9909
9910 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9911 {
9912 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9913 const_ptr_type_node, size_type_node,
9914 NULL_TREE);
9915 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9916 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9917 }
9918
9919 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9920 {
9921 ftype = build_function_type_list (ptr_type_node,
9922 ptr_type_node, integer_type_node,
9923 size_type_node, NULL_TREE);
9924 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9925 "memset", ECF_NOTHROW | ECF_LEAF);
9926 }
9927
9928 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9929 {
9930 ftype = build_function_type_list (ptr_type_node,
9931 size_type_node, NULL_TREE);
9932 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9933 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9934 }
9935
9936 ftype = build_function_type_list (ptr_type_node, size_type_node,
9937 size_type_node, NULL_TREE);
9938 local_define_builtin ("__builtin_alloca_with_align", ftype,
9939 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9940 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9941
9942 /* If we're checking the stack, `alloca' can throw. */
9943 if (flag_stack_check)
9944 {
9945 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9946 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9947 }
9948
9949 ftype = build_function_type_list (void_type_node,
9950 ptr_type_node, ptr_type_node,
9951 ptr_type_node, NULL_TREE);
9952 local_define_builtin ("__builtin_init_trampoline", ftype,
9953 BUILT_IN_INIT_TRAMPOLINE,
9954 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9955 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9956 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9957 "__builtin_init_heap_trampoline",
9958 ECF_NOTHROW | ECF_LEAF);
9959
9960 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9961 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9962 BUILT_IN_ADJUST_TRAMPOLINE,
9963 "__builtin_adjust_trampoline",
9964 ECF_CONST | ECF_NOTHROW);
9965
9966 ftype = build_function_type_list (void_type_node,
9967 ptr_type_node, ptr_type_node, NULL_TREE);
9968 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9969 BUILT_IN_NONLOCAL_GOTO,
9970 "__builtin_nonlocal_goto",
9971 ECF_NORETURN | ECF_NOTHROW);
9972
9973 ftype = build_function_type_list (void_type_node,
9974 ptr_type_node, ptr_type_node, NULL_TREE);
9975 local_define_builtin ("__builtin_setjmp_setup", ftype,
9976 BUILT_IN_SETJMP_SETUP,
9977 "__builtin_setjmp_setup", ECF_NOTHROW);
9978
9979 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9980 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9981 BUILT_IN_SETJMP_RECEIVER,
9982 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
9983
9984 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9985 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9986 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9987
9988 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9989 local_define_builtin ("__builtin_stack_restore", ftype,
9990 BUILT_IN_STACK_RESTORE,
9991 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9992
9993 /* If there's a possibility that we might use the ARM EABI, build the
9994 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9995 if (targetm.arm_eabi_unwinder)
9996 {
9997 ftype = build_function_type_list (void_type_node, NULL_TREE);
9998 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9999 BUILT_IN_CXA_END_CLEANUP,
10000 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10001 }
10002
10003 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10004 local_define_builtin ("__builtin_unwind_resume", ftype,
10005 BUILT_IN_UNWIND_RESUME,
10006 ((targetm_common.except_unwind_info (&global_options)
10007 == UI_SJLJ)
10008 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10009 ECF_NORETURN);
10010
10011 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10012 {
10013 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10014 NULL_TREE);
10015 local_define_builtin ("__builtin_return_address", ftype,
10016 BUILT_IN_RETURN_ADDRESS,
10017 "__builtin_return_address",
10018 ECF_NOTHROW);
10019 }
10020
10021 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10022 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10023 {
10024 ftype = build_function_type_list (void_type_node, ptr_type_node,
10025 ptr_type_node, NULL_TREE);
10026 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10027 local_define_builtin ("__cyg_profile_func_enter", ftype,
10028 BUILT_IN_PROFILE_FUNC_ENTER,
10029 "__cyg_profile_func_enter", 0);
10030 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10031 local_define_builtin ("__cyg_profile_func_exit", ftype,
10032 BUILT_IN_PROFILE_FUNC_EXIT,
10033 "__cyg_profile_func_exit", 0);
10034 }
10035
10036 /* The exception object and filter values from the runtime. The argument
10037 must be zero before exception lowering, i.e. from the front end. After
10038 exception lowering, it will be the region number for the exception
10039 landing pad. These functions are PURE instead of CONST to prevent
10040 them from being hoisted past the exception edge that will initialize
10041 its value in the landing pad. */
10042 ftype = build_function_type_list (ptr_type_node,
10043 integer_type_node, NULL_TREE);
10044 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10045 /* Only use TM_PURE if we we have TM language support. */
10046 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10047 ecf_flags |= ECF_TM_PURE;
10048 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10049 "__builtin_eh_pointer", ecf_flags);
10050
10051 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10052 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10053 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10054 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10055
10056 ftype = build_function_type_list (void_type_node,
10057 integer_type_node, integer_type_node,
10058 NULL_TREE);
10059 local_define_builtin ("__builtin_eh_copy_values", ftype,
10060 BUILT_IN_EH_COPY_VALUES,
10061 "__builtin_eh_copy_values", ECF_NOTHROW);
10062
10063 /* Complex multiplication and division. These are handled as builtins
10064 rather than optabs because emit_library_call_value doesn't support
10065 complex. Further, we can do slightly better with folding these
10066 beasties if the real and complex parts of the arguments are separate. */
10067 {
10068 int mode;
10069
10070 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10071 {
10072 char mode_name_buf[4], *q;
10073 const char *p;
10074 enum built_in_function mcode, dcode;
10075 tree type, inner_type;
10076 const char *prefix = "__";
10077
10078 if (targetm.libfunc_gnu_prefix)
10079 prefix = "__gnu_";
10080
10081 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10082 if (type == NULL)
10083 continue;
10084 inner_type = TREE_TYPE (type);
10085
10086 ftype = build_function_type_list (type, inner_type, inner_type,
10087 inner_type, inner_type, NULL_TREE);
10088
10089 mcode = ((enum built_in_function)
10090 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10091 dcode = ((enum built_in_function)
10092 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10093
10094 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10095 *q = TOLOWER (*p);
10096 *q = '\0';
10097
10098 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10099 NULL);
10100 local_define_builtin (built_in_names[mcode], ftype, mcode,
10101 built_in_names[mcode],
10102 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10103
10104 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10105 NULL);
10106 local_define_builtin (built_in_names[dcode], ftype, dcode,
10107 built_in_names[dcode],
10108 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10109 }
10110 }
10111
10112 init_internal_fns ();
10113 }
10114
10115 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10116 better way.
10117
10118 If we requested a pointer to a vector, build up the pointers that
10119 we stripped off while looking for the inner type. Similarly for
10120 return values from functions.
10121
10122 The argument TYPE is the top of the chain, and BOTTOM is the
10123 new type which we will point to. */
10124
10125 tree
10126 reconstruct_complex_type (tree type, tree bottom)
10127 {
10128 tree inner, outer;
10129
10130 if (TREE_CODE (type) == POINTER_TYPE)
10131 {
10132 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10133 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10134 TYPE_REF_CAN_ALIAS_ALL (type));
10135 }
10136 else if (TREE_CODE (type) == REFERENCE_TYPE)
10137 {
10138 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10139 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10140 TYPE_REF_CAN_ALIAS_ALL (type));
10141 }
10142 else if (TREE_CODE (type) == ARRAY_TYPE)
10143 {
10144 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10145 outer = build_array_type (inner, TYPE_DOMAIN (type));
10146 }
10147 else if (TREE_CODE (type) == FUNCTION_TYPE)
10148 {
10149 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10150 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10151 }
10152 else if (TREE_CODE (type) == METHOD_TYPE)
10153 {
10154 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10155 /* The build_method_type_directly() routine prepends 'this' to argument list,
10156 so we must compensate by getting rid of it. */
10157 outer
10158 = build_method_type_directly
10159 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10160 inner,
10161 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10162 }
10163 else if (TREE_CODE (type) == OFFSET_TYPE)
10164 {
10165 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10166 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10167 }
10168 else
10169 return bottom;
10170
10171 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10172 TYPE_QUALS (type));
10173 }
10174
10175 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10176 the inner type. */
10177 tree
10178 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10179 {
10180 int nunits;
10181
10182 switch (GET_MODE_CLASS (mode))
10183 {
10184 case MODE_VECTOR_INT:
10185 case MODE_VECTOR_FLOAT:
10186 case MODE_VECTOR_FRACT:
10187 case MODE_VECTOR_UFRACT:
10188 case MODE_VECTOR_ACCUM:
10189 case MODE_VECTOR_UACCUM:
10190 nunits = GET_MODE_NUNITS (mode);
10191 break;
10192
10193 case MODE_INT:
10194 /* Check that there are no leftover bits. */
10195 gcc_assert (GET_MODE_BITSIZE (mode)
10196 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10197
10198 nunits = GET_MODE_BITSIZE (mode)
10199 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10200 break;
10201
10202 default:
10203 gcc_unreachable ();
10204 }
10205
10206 return make_vector_type (innertype, nunits, mode);
10207 }
10208
10209 /* Similarly, but takes the inner type and number of units, which must be
10210 a power of two. */
10211
10212 tree
10213 build_vector_type (tree innertype, int nunits)
10214 {
10215 return make_vector_type (innertype, nunits, VOIDmode);
10216 }
10217
10218 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10219
10220 tree
10221 build_opaque_vector_type (tree innertype, int nunits)
10222 {
10223 tree t = make_vector_type (innertype, nunits, VOIDmode);
10224 tree cand;
10225 /* We always build the non-opaque variant before the opaque one,
10226 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10227 cand = TYPE_NEXT_VARIANT (t);
10228 if (cand
10229 && TYPE_VECTOR_OPAQUE (cand)
10230 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10231 return cand;
10232 /* Othewise build a variant type and make sure to queue it after
10233 the non-opaque type. */
10234 cand = build_distinct_type_copy (t);
10235 TYPE_VECTOR_OPAQUE (cand) = true;
10236 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10237 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10238 TYPE_NEXT_VARIANT (t) = cand;
10239 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10240 return cand;
10241 }
10242
10243
10244 /* Given an initializer INIT, return TRUE if INIT is zero or some
10245 aggregate of zeros. Otherwise return FALSE. */
10246 bool
10247 initializer_zerop (const_tree init)
10248 {
10249 tree elt;
10250
10251 STRIP_NOPS (init);
10252
10253 switch (TREE_CODE (init))
10254 {
10255 case INTEGER_CST:
10256 return integer_zerop (init);
10257
10258 case REAL_CST:
10259 /* ??? Note that this is not correct for C4X float formats. There,
10260 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10261 negative exponent. */
10262 return real_zerop (init)
10263 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10264
10265 case FIXED_CST:
10266 return fixed_zerop (init);
10267
10268 case COMPLEX_CST:
10269 return integer_zerop (init)
10270 || (real_zerop (init)
10271 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10272 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10273
10274 case VECTOR_CST:
10275 {
10276 unsigned i;
10277 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10278 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10279 return false;
10280 return true;
10281 }
10282
10283 case CONSTRUCTOR:
10284 {
10285 unsigned HOST_WIDE_INT idx;
10286
10287 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10288 if (!initializer_zerop (elt))
10289 return false;
10290 return true;
10291 }
10292
10293 case STRING_CST:
10294 {
10295 int i;
10296
10297 /* We need to loop through all elements to handle cases like
10298 "\0" and "\0foobar". */
10299 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10300 if (TREE_STRING_POINTER (init)[i] != '\0')
10301 return false;
10302
10303 return true;
10304 }
10305
10306 default:
10307 return false;
10308 }
10309 }
10310
10311 /* Check if vector VEC consists of all the equal elements and
10312 that the number of elements corresponds to the type of VEC.
10313 The function returns first element of the vector
10314 or NULL_TREE if the vector is not uniform. */
10315 tree
10316 uniform_vector_p (const_tree vec)
10317 {
10318 tree first, t;
10319 unsigned i;
10320
10321 if (vec == NULL_TREE)
10322 return NULL_TREE;
10323
10324 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10325
10326 if (TREE_CODE (vec) == VECTOR_CST)
10327 {
10328 first = VECTOR_CST_ELT (vec, 0);
10329 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10330 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10331 return NULL_TREE;
10332
10333 return first;
10334 }
10335
10336 else if (TREE_CODE (vec) == CONSTRUCTOR)
10337 {
10338 first = error_mark_node;
10339
10340 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10341 {
10342 if (i == 0)
10343 {
10344 first = t;
10345 continue;
10346 }
10347 if (!operand_equal_p (first, t, 0))
10348 return NULL_TREE;
10349 }
10350 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10351 return NULL_TREE;
10352
10353 return first;
10354 }
10355
10356 return NULL_TREE;
10357 }
10358
10359 /* Build an empty statement at location LOC. */
10360
10361 tree
10362 build_empty_stmt (location_t loc)
10363 {
10364 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10365 SET_EXPR_LOCATION (t, loc);
10366 return t;
10367 }
10368
10369
10370 /* Build an OpenMP clause with code CODE. LOC is the location of the
10371 clause. */
10372
10373 tree
10374 build_omp_clause (location_t loc, enum omp_clause_code code)
10375 {
10376 tree t;
10377 int size, length;
10378
10379 length = omp_clause_num_ops[code];
10380 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10381
10382 record_node_allocation_statistics (OMP_CLAUSE, size);
10383
10384 t = (tree) ggc_internal_alloc (size);
10385 memset (t, 0, size);
10386 TREE_SET_CODE (t, OMP_CLAUSE);
10387 OMP_CLAUSE_SET_CODE (t, code);
10388 OMP_CLAUSE_LOCATION (t) = loc;
10389
10390 return t;
10391 }
10392
10393 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10394 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10395 Except for the CODE and operand count field, other storage for the
10396 object is initialized to zeros. */
10397
10398 tree
10399 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10400 {
10401 tree t;
10402 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10403
10404 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10405 gcc_assert (len >= 1);
10406
10407 record_node_allocation_statistics (code, length);
10408
10409 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10410
10411 TREE_SET_CODE (t, code);
10412
10413 /* Can't use TREE_OPERAND to store the length because if checking is
10414 enabled, it will try to check the length before we store it. :-P */
10415 t->exp.operands[0] = build_int_cst (sizetype, len);
10416
10417 return t;
10418 }
10419
10420 /* Helper function for build_call_* functions; build a CALL_EXPR with
10421 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10422 the argument slots. */
10423
10424 static tree
10425 build_call_1 (tree return_type, tree fn, int nargs)
10426 {
10427 tree t;
10428
10429 t = build_vl_exp (CALL_EXPR, nargs + 3);
10430 TREE_TYPE (t) = return_type;
10431 CALL_EXPR_FN (t) = fn;
10432 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10433
10434 return t;
10435 }
10436
10437 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10438 FN and a null static chain slot. NARGS is the number of call arguments
10439 which are specified as "..." arguments. */
10440
10441 tree
10442 build_call_nary (tree return_type, tree fn, int nargs, ...)
10443 {
10444 tree ret;
10445 va_list args;
10446 va_start (args, nargs);
10447 ret = build_call_valist (return_type, fn, nargs, args);
10448 va_end (args);
10449 return ret;
10450 }
10451
10452 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10453 FN and a null static chain slot. NARGS is the number of call arguments
10454 which are specified as a va_list ARGS. */
10455
10456 tree
10457 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10458 {
10459 tree t;
10460 int i;
10461
10462 t = build_call_1 (return_type, fn, nargs);
10463 for (i = 0; i < nargs; i++)
10464 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10465 process_call_operands (t);
10466 return t;
10467 }
10468
10469 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10470 FN and a null static chain slot. NARGS is the number of call arguments
10471 which are specified as a tree array ARGS. */
10472
10473 tree
10474 build_call_array_loc (location_t loc, tree return_type, tree fn,
10475 int nargs, const tree *args)
10476 {
10477 tree t;
10478 int i;
10479
10480 t = build_call_1 (return_type, fn, nargs);
10481 for (i = 0; i < nargs; i++)
10482 CALL_EXPR_ARG (t, i) = args[i];
10483 process_call_operands (t);
10484 SET_EXPR_LOCATION (t, loc);
10485 return t;
10486 }
10487
10488 /* Like build_call_array, but takes a vec. */
10489
10490 tree
10491 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10492 {
10493 tree ret, t;
10494 unsigned int ix;
10495
10496 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10497 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10498 CALL_EXPR_ARG (ret, ix) = t;
10499 process_call_operands (ret);
10500 return ret;
10501 }
10502
10503 /* Conveniently construct a function call expression. FNDECL names the
10504 function to be called and N arguments are passed in the array
10505 ARGARRAY. */
10506
10507 tree
10508 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10509 {
10510 tree fntype = TREE_TYPE (fndecl);
10511 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10512
10513 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
10514 }
10515
10516 /* Conveniently construct a function call expression. FNDECL names the
10517 function to be called and the arguments are passed in the vector
10518 VEC. */
10519
10520 tree
10521 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10522 {
10523 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10524 vec_safe_address (vec));
10525 }
10526
10527
10528 /* Conveniently construct a function call expression. FNDECL names the
10529 function to be called, N is the number of arguments, and the "..."
10530 parameters are the argument expressions. */
10531
10532 tree
10533 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10534 {
10535 va_list ap;
10536 tree *argarray = XALLOCAVEC (tree, n);
10537 int i;
10538
10539 va_start (ap, n);
10540 for (i = 0; i < n; i++)
10541 argarray[i] = va_arg (ap, tree);
10542 va_end (ap);
10543 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10544 }
10545
10546 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10547 varargs macros aren't supported by all bootstrap compilers. */
10548
10549 tree
10550 build_call_expr (tree fndecl, int n, ...)
10551 {
10552 va_list ap;
10553 tree *argarray = XALLOCAVEC (tree, n);
10554 int i;
10555
10556 va_start (ap, n);
10557 for (i = 0; i < n; i++)
10558 argarray[i] = va_arg (ap, tree);
10559 va_end (ap);
10560 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10561 }
10562
10563 /* Build internal call expression. This is just like CALL_EXPR, except
10564 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10565 internal function. */
10566
10567 tree
10568 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10569 tree type, int n, ...)
10570 {
10571 va_list ap;
10572 int i;
10573
10574 tree fn = build_call_1 (type, NULL_TREE, n);
10575 va_start (ap, n);
10576 for (i = 0; i < n; i++)
10577 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10578 va_end (ap);
10579 SET_EXPR_LOCATION (fn, loc);
10580 CALL_EXPR_IFN (fn) = ifn;
10581 return fn;
10582 }
10583
10584 /* Create a new constant string literal and return a char* pointer to it.
10585 The STRING_CST value is the LEN characters at STR. */
10586 tree
10587 build_string_literal (int len, const char *str)
10588 {
10589 tree t, elem, index, type;
10590
10591 t = build_string (len, str);
10592 elem = build_type_variant (char_type_node, 1, 0);
10593 index = build_index_type (size_int (len - 1));
10594 type = build_array_type (elem, index);
10595 TREE_TYPE (t) = type;
10596 TREE_CONSTANT (t) = 1;
10597 TREE_READONLY (t) = 1;
10598 TREE_STATIC (t) = 1;
10599
10600 type = build_pointer_type (elem);
10601 t = build1 (ADDR_EXPR, type,
10602 build4 (ARRAY_REF, elem,
10603 t, integer_zero_node, NULL_TREE, NULL_TREE));
10604 return t;
10605 }
10606
10607
10608
10609 /* Return true if T (assumed to be a DECL) must be assigned a memory
10610 location. */
10611
10612 bool
10613 needs_to_live_in_memory (const_tree t)
10614 {
10615 return (TREE_ADDRESSABLE (t)
10616 || is_global_var (t)
10617 || (TREE_CODE (t) == RESULT_DECL
10618 && !DECL_BY_REFERENCE (t)
10619 && aggregate_value_p (t, current_function_decl)));
10620 }
10621
10622 /* Return value of a constant X and sign-extend it. */
10623
10624 HOST_WIDE_INT
10625 int_cst_value (const_tree x)
10626 {
10627 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10628 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10629
10630 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10631 gcc_assert (cst_and_fits_in_hwi (x));
10632
10633 if (bits < HOST_BITS_PER_WIDE_INT)
10634 {
10635 bool negative = ((val >> (bits - 1)) & 1) != 0;
10636 if (negative)
10637 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10638 else
10639 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10640 }
10641
10642 return val;
10643 }
10644
10645 /* If TYPE is an integral or pointer type, return an integer type with
10646 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10647 if TYPE is already an integer type of signedness UNSIGNEDP. */
10648
10649 tree
10650 signed_or_unsigned_type_for (int unsignedp, tree type)
10651 {
10652 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10653 return type;
10654
10655 if (TREE_CODE (type) == VECTOR_TYPE)
10656 {
10657 tree inner = TREE_TYPE (type);
10658 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10659 if (!inner2)
10660 return NULL_TREE;
10661 if (inner == inner2)
10662 return type;
10663 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10664 }
10665
10666 if (!INTEGRAL_TYPE_P (type)
10667 && !POINTER_TYPE_P (type)
10668 && TREE_CODE (type) != OFFSET_TYPE)
10669 return NULL_TREE;
10670
10671 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10672 }
10673
10674 /* If TYPE is an integral or pointer type, return an integer type with
10675 the same precision which is unsigned, or itself if TYPE is already an
10676 unsigned integer type. */
10677
10678 tree
10679 unsigned_type_for (tree type)
10680 {
10681 return signed_or_unsigned_type_for (1, type);
10682 }
10683
10684 /* If TYPE is an integral or pointer type, return an integer type with
10685 the same precision which is signed, or itself if TYPE is already a
10686 signed integer type. */
10687
10688 tree
10689 signed_type_for (tree type)
10690 {
10691 return signed_or_unsigned_type_for (0, type);
10692 }
10693
10694 /* If TYPE is a vector type, return a signed integer vector type with the
10695 same width and number of subparts. Otherwise return boolean_type_node. */
10696
10697 tree
10698 truth_type_for (tree type)
10699 {
10700 if (TREE_CODE (type) == VECTOR_TYPE)
10701 {
10702 tree elem = lang_hooks.types.type_for_size
10703 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10704 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10705 }
10706 else
10707 return boolean_type_node;
10708 }
10709
10710 /* Returns the largest value obtainable by casting something in INNER type to
10711 OUTER type. */
10712
10713 tree
10714 upper_bound_in_type (tree outer, tree inner)
10715 {
10716 unsigned int det = 0;
10717 unsigned oprec = TYPE_PRECISION (outer);
10718 unsigned iprec = TYPE_PRECISION (inner);
10719 unsigned prec;
10720
10721 /* Compute a unique number for every combination. */
10722 det |= (oprec > iprec) ? 4 : 0;
10723 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10724 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10725
10726 /* Determine the exponent to use. */
10727 switch (det)
10728 {
10729 case 0:
10730 case 1:
10731 /* oprec <= iprec, outer: signed, inner: don't care. */
10732 prec = oprec - 1;
10733 break;
10734 case 2:
10735 case 3:
10736 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10737 prec = oprec;
10738 break;
10739 case 4:
10740 /* oprec > iprec, outer: signed, inner: signed. */
10741 prec = iprec - 1;
10742 break;
10743 case 5:
10744 /* oprec > iprec, outer: signed, inner: unsigned. */
10745 prec = iprec;
10746 break;
10747 case 6:
10748 /* oprec > iprec, outer: unsigned, inner: signed. */
10749 prec = oprec;
10750 break;
10751 case 7:
10752 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10753 prec = iprec;
10754 break;
10755 default:
10756 gcc_unreachable ();
10757 }
10758
10759 return wide_int_to_tree (outer,
10760 wi::mask (prec, false, TYPE_PRECISION (outer)));
10761 }
10762
10763 /* Returns the smallest value obtainable by casting something in INNER type to
10764 OUTER type. */
10765
10766 tree
10767 lower_bound_in_type (tree outer, tree inner)
10768 {
10769 unsigned oprec = TYPE_PRECISION (outer);
10770 unsigned iprec = TYPE_PRECISION (inner);
10771
10772 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10773 and obtain 0. */
10774 if (TYPE_UNSIGNED (outer)
10775 /* If we are widening something of an unsigned type, OUTER type
10776 contains all values of INNER type. In particular, both INNER
10777 and OUTER types have zero in common. */
10778 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10779 return build_int_cst (outer, 0);
10780 else
10781 {
10782 /* If we are widening a signed type to another signed type, we
10783 want to obtain -2^^(iprec-1). If we are keeping the
10784 precision or narrowing to a signed type, we want to obtain
10785 -2^(oprec-1). */
10786 unsigned prec = oprec > iprec ? iprec : oprec;
10787 return wide_int_to_tree (outer,
10788 wi::mask (prec - 1, true,
10789 TYPE_PRECISION (outer)));
10790 }
10791 }
10792
10793 /* Return nonzero if two operands that are suitable for PHI nodes are
10794 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10795 SSA_NAME or invariant. Note that this is strictly an optimization.
10796 That is, callers of this function can directly call operand_equal_p
10797 and get the same result, only slower. */
10798
10799 int
10800 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10801 {
10802 if (arg0 == arg1)
10803 return 1;
10804 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10805 return 0;
10806 return operand_equal_p (arg0, arg1, 0);
10807 }
10808
10809 /* Returns number of zeros at the end of binary representation of X. */
10810
10811 tree
10812 num_ending_zeros (const_tree x)
10813 {
10814 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10815 }
10816
10817
10818 #define WALK_SUBTREE(NODE) \
10819 do \
10820 { \
10821 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10822 if (result) \
10823 return result; \
10824 } \
10825 while (0)
10826
10827 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10828 be walked whenever a type is seen in the tree. Rest of operands and return
10829 value are as for walk_tree. */
10830
10831 static tree
10832 walk_type_fields (tree type, walk_tree_fn func, void *data,
10833 hash_set<tree> *pset, walk_tree_lh lh)
10834 {
10835 tree result = NULL_TREE;
10836
10837 switch (TREE_CODE (type))
10838 {
10839 case POINTER_TYPE:
10840 case REFERENCE_TYPE:
10841 case VECTOR_TYPE:
10842 /* We have to worry about mutually recursive pointers. These can't
10843 be written in C. They can in Ada. It's pathological, but
10844 there's an ACATS test (c38102a) that checks it. Deal with this
10845 by checking if we're pointing to another pointer, that one
10846 points to another pointer, that one does too, and we have no htab.
10847 If so, get a hash table. We check three levels deep to avoid
10848 the cost of the hash table if we don't need one. */
10849 if (POINTER_TYPE_P (TREE_TYPE (type))
10850 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10851 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10852 && !pset)
10853 {
10854 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10855 func, data);
10856 if (result)
10857 return result;
10858
10859 break;
10860 }
10861
10862 /* ... fall through ... */
10863
10864 case COMPLEX_TYPE:
10865 WALK_SUBTREE (TREE_TYPE (type));
10866 break;
10867
10868 case METHOD_TYPE:
10869 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10870
10871 /* Fall through. */
10872
10873 case FUNCTION_TYPE:
10874 WALK_SUBTREE (TREE_TYPE (type));
10875 {
10876 tree arg;
10877
10878 /* We never want to walk into default arguments. */
10879 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10880 WALK_SUBTREE (TREE_VALUE (arg));
10881 }
10882 break;
10883
10884 case ARRAY_TYPE:
10885 /* Don't follow this nodes's type if a pointer for fear that
10886 we'll have infinite recursion. If we have a PSET, then we
10887 need not fear. */
10888 if (pset
10889 || (!POINTER_TYPE_P (TREE_TYPE (type))
10890 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10891 WALK_SUBTREE (TREE_TYPE (type));
10892 WALK_SUBTREE (TYPE_DOMAIN (type));
10893 break;
10894
10895 case OFFSET_TYPE:
10896 WALK_SUBTREE (TREE_TYPE (type));
10897 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10898 break;
10899
10900 default:
10901 break;
10902 }
10903
10904 return NULL_TREE;
10905 }
10906
10907 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10908 called with the DATA and the address of each sub-tree. If FUNC returns a
10909 non-NULL value, the traversal is stopped, and the value returned by FUNC
10910 is returned. If PSET is non-NULL it is used to record the nodes visited,
10911 and to avoid visiting a node more than once. */
10912
10913 tree
10914 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10915 hash_set<tree> *pset, walk_tree_lh lh)
10916 {
10917 enum tree_code code;
10918 int walk_subtrees;
10919 tree result;
10920
10921 #define WALK_SUBTREE_TAIL(NODE) \
10922 do \
10923 { \
10924 tp = & (NODE); \
10925 goto tail_recurse; \
10926 } \
10927 while (0)
10928
10929 tail_recurse:
10930 /* Skip empty subtrees. */
10931 if (!*tp)
10932 return NULL_TREE;
10933
10934 /* Don't walk the same tree twice, if the user has requested
10935 that we avoid doing so. */
10936 if (pset && pset->add (*tp))
10937 return NULL_TREE;
10938
10939 /* Call the function. */
10940 walk_subtrees = 1;
10941 result = (*func) (tp, &walk_subtrees, data);
10942
10943 /* If we found something, return it. */
10944 if (result)
10945 return result;
10946
10947 code = TREE_CODE (*tp);
10948
10949 /* Even if we didn't, FUNC may have decided that there was nothing
10950 interesting below this point in the tree. */
10951 if (!walk_subtrees)
10952 {
10953 /* But we still need to check our siblings. */
10954 if (code == TREE_LIST)
10955 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10956 else if (code == OMP_CLAUSE)
10957 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10958 else
10959 return NULL_TREE;
10960 }
10961
10962 if (lh)
10963 {
10964 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10965 if (result || !walk_subtrees)
10966 return result;
10967 }
10968
10969 switch (code)
10970 {
10971 case ERROR_MARK:
10972 case IDENTIFIER_NODE:
10973 case INTEGER_CST:
10974 case REAL_CST:
10975 case FIXED_CST:
10976 case VECTOR_CST:
10977 case STRING_CST:
10978 case BLOCK:
10979 case PLACEHOLDER_EXPR:
10980 case SSA_NAME:
10981 case FIELD_DECL:
10982 case RESULT_DECL:
10983 /* None of these have subtrees other than those already walked
10984 above. */
10985 break;
10986
10987 case TREE_LIST:
10988 WALK_SUBTREE (TREE_VALUE (*tp));
10989 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10990 break;
10991
10992 case TREE_VEC:
10993 {
10994 int len = TREE_VEC_LENGTH (*tp);
10995
10996 if (len == 0)
10997 break;
10998
10999 /* Walk all elements but the first. */
11000 while (--len)
11001 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11002
11003 /* Now walk the first one as a tail call. */
11004 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11005 }
11006
11007 case COMPLEX_CST:
11008 WALK_SUBTREE (TREE_REALPART (*tp));
11009 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11010
11011 case CONSTRUCTOR:
11012 {
11013 unsigned HOST_WIDE_INT idx;
11014 constructor_elt *ce;
11015
11016 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11017 idx++)
11018 WALK_SUBTREE (ce->value);
11019 }
11020 break;
11021
11022 case SAVE_EXPR:
11023 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11024
11025 case BIND_EXPR:
11026 {
11027 tree decl;
11028 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11029 {
11030 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11031 into declarations that are just mentioned, rather than
11032 declared; they don't really belong to this part of the tree.
11033 And, we can see cycles: the initializer for a declaration
11034 can refer to the declaration itself. */
11035 WALK_SUBTREE (DECL_INITIAL (decl));
11036 WALK_SUBTREE (DECL_SIZE (decl));
11037 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11038 }
11039 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11040 }
11041
11042 case STATEMENT_LIST:
11043 {
11044 tree_stmt_iterator i;
11045 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11046 WALK_SUBTREE (*tsi_stmt_ptr (i));
11047 }
11048 break;
11049
11050 case OMP_CLAUSE:
11051 switch (OMP_CLAUSE_CODE (*tp))
11052 {
11053 case OMP_CLAUSE_PRIVATE:
11054 case OMP_CLAUSE_SHARED:
11055 case OMP_CLAUSE_FIRSTPRIVATE:
11056 case OMP_CLAUSE_COPYIN:
11057 case OMP_CLAUSE_COPYPRIVATE:
11058 case OMP_CLAUSE_FINAL:
11059 case OMP_CLAUSE_IF:
11060 case OMP_CLAUSE_NUM_THREADS:
11061 case OMP_CLAUSE_SCHEDULE:
11062 case OMP_CLAUSE_UNIFORM:
11063 case OMP_CLAUSE_DEPEND:
11064 case OMP_CLAUSE_NUM_TEAMS:
11065 case OMP_CLAUSE_THREAD_LIMIT:
11066 case OMP_CLAUSE_DEVICE:
11067 case OMP_CLAUSE_DIST_SCHEDULE:
11068 case OMP_CLAUSE_SAFELEN:
11069 case OMP_CLAUSE_SIMDLEN:
11070 case OMP_CLAUSE__LOOPTEMP_:
11071 case OMP_CLAUSE__SIMDUID_:
11072 case OMP_CLAUSE__CILK_FOR_COUNT_:
11073 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11074 /* FALLTHRU */
11075
11076 case OMP_CLAUSE_NOWAIT:
11077 case OMP_CLAUSE_ORDERED:
11078 case OMP_CLAUSE_DEFAULT:
11079 case OMP_CLAUSE_UNTIED:
11080 case OMP_CLAUSE_MERGEABLE:
11081 case OMP_CLAUSE_PROC_BIND:
11082 case OMP_CLAUSE_INBRANCH:
11083 case OMP_CLAUSE_NOTINBRANCH:
11084 case OMP_CLAUSE_FOR:
11085 case OMP_CLAUSE_PARALLEL:
11086 case OMP_CLAUSE_SECTIONS:
11087 case OMP_CLAUSE_TASKGROUP:
11088 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11089
11090 case OMP_CLAUSE_LASTPRIVATE:
11091 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11092 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11093 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11094
11095 case OMP_CLAUSE_COLLAPSE:
11096 {
11097 int i;
11098 for (i = 0; i < 3; i++)
11099 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11100 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11101 }
11102
11103 case OMP_CLAUSE_LINEAR:
11104 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11105 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11106 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11107 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11108
11109 case OMP_CLAUSE_ALIGNED:
11110 case OMP_CLAUSE_FROM:
11111 case OMP_CLAUSE_TO:
11112 case OMP_CLAUSE_MAP:
11113 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11114 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11115 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11116
11117 case OMP_CLAUSE_REDUCTION:
11118 {
11119 int i;
11120 for (i = 0; i < 4; i++)
11121 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11122 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11123 }
11124
11125 default:
11126 gcc_unreachable ();
11127 }
11128 break;
11129
11130 case TARGET_EXPR:
11131 {
11132 int i, len;
11133
11134 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11135 But, we only want to walk once. */
11136 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11137 for (i = 0; i < len; ++i)
11138 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11139 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11140 }
11141
11142 case DECL_EXPR:
11143 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11144 defining. We only want to walk into these fields of a type in this
11145 case and not in the general case of a mere reference to the type.
11146
11147 The criterion is as follows: if the field can be an expression, it
11148 must be walked only here. This should be in keeping with the fields
11149 that are directly gimplified in gimplify_type_sizes in order for the
11150 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11151 variable-sized types.
11152
11153 Note that DECLs get walked as part of processing the BIND_EXPR. */
11154 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11155 {
11156 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11157 if (TREE_CODE (*type_p) == ERROR_MARK)
11158 return NULL_TREE;
11159
11160 /* Call the function for the type. See if it returns anything or
11161 doesn't want us to continue. If we are to continue, walk both
11162 the normal fields and those for the declaration case. */
11163 result = (*func) (type_p, &walk_subtrees, data);
11164 if (result || !walk_subtrees)
11165 return result;
11166
11167 /* But do not walk a pointed-to type since it may itself need to
11168 be walked in the declaration case if it isn't anonymous. */
11169 if (!POINTER_TYPE_P (*type_p))
11170 {
11171 result = walk_type_fields (*type_p, func, data, pset, lh);
11172 if (result)
11173 return result;
11174 }
11175
11176 /* If this is a record type, also walk the fields. */
11177 if (RECORD_OR_UNION_TYPE_P (*type_p))
11178 {
11179 tree field;
11180
11181 for (field = TYPE_FIELDS (*type_p); field;
11182 field = DECL_CHAIN (field))
11183 {
11184 /* We'd like to look at the type of the field, but we can
11185 easily get infinite recursion. So assume it's pointed
11186 to elsewhere in the tree. Also, ignore things that
11187 aren't fields. */
11188 if (TREE_CODE (field) != FIELD_DECL)
11189 continue;
11190
11191 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11192 WALK_SUBTREE (DECL_SIZE (field));
11193 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11194 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11195 WALK_SUBTREE (DECL_QUALIFIER (field));
11196 }
11197 }
11198
11199 /* Same for scalar types. */
11200 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11201 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11202 || TREE_CODE (*type_p) == INTEGER_TYPE
11203 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11204 || TREE_CODE (*type_p) == REAL_TYPE)
11205 {
11206 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11207 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11208 }
11209
11210 WALK_SUBTREE (TYPE_SIZE (*type_p));
11211 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11212 }
11213 /* FALLTHRU */
11214
11215 default:
11216 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11217 {
11218 int i, len;
11219
11220 /* Walk over all the sub-trees of this operand. */
11221 len = TREE_OPERAND_LENGTH (*tp);
11222
11223 /* Go through the subtrees. We need to do this in forward order so
11224 that the scope of a FOR_EXPR is handled properly. */
11225 if (len)
11226 {
11227 for (i = 0; i < len - 1; ++i)
11228 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11229 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11230 }
11231 }
11232 /* If this is a type, walk the needed fields in the type. */
11233 else if (TYPE_P (*tp))
11234 return walk_type_fields (*tp, func, data, pset, lh);
11235 break;
11236 }
11237
11238 /* We didn't find what we were looking for. */
11239 return NULL_TREE;
11240
11241 #undef WALK_SUBTREE_TAIL
11242 }
11243 #undef WALK_SUBTREE
11244
11245 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11246
11247 tree
11248 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11249 walk_tree_lh lh)
11250 {
11251 tree result;
11252
11253 hash_set<tree> pset;
11254 result = walk_tree_1 (tp, func, data, &pset, lh);
11255 return result;
11256 }
11257
11258
11259 tree
11260 tree_block (tree t)
11261 {
11262 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11263
11264 if (IS_EXPR_CODE_CLASS (c))
11265 return LOCATION_BLOCK (t->exp.locus);
11266 gcc_unreachable ();
11267 return NULL;
11268 }
11269
11270 void
11271 tree_set_block (tree t, tree b)
11272 {
11273 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11274
11275 if (IS_EXPR_CODE_CLASS (c))
11276 {
11277 if (b)
11278 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11279 else
11280 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11281 }
11282 else
11283 gcc_unreachable ();
11284 }
11285
11286 /* Create a nameless artificial label and put it in the current
11287 function context. The label has a location of LOC. Returns the
11288 newly created label. */
11289
11290 tree
11291 create_artificial_label (location_t loc)
11292 {
11293 tree lab = build_decl (loc,
11294 LABEL_DECL, NULL_TREE, void_type_node);
11295
11296 DECL_ARTIFICIAL (lab) = 1;
11297 DECL_IGNORED_P (lab) = 1;
11298 DECL_CONTEXT (lab) = current_function_decl;
11299 return lab;
11300 }
11301
11302 /* Given a tree, try to return a useful variable name that we can use
11303 to prefix a temporary that is being assigned the value of the tree.
11304 I.E. given <temp> = &A, return A. */
11305
11306 const char *
11307 get_name (tree t)
11308 {
11309 tree stripped_decl;
11310
11311 stripped_decl = t;
11312 STRIP_NOPS (stripped_decl);
11313 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11314 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11315 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11316 {
11317 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11318 if (!name)
11319 return NULL;
11320 return IDENTIFIER_POINTER (name);
11321 }
11322 else
11323 {
11324 switch (TREE_CODE (stripped_decl))
11325 {
11326 case ADDR_EXPR:
11327 return get_name (TREE_OPERAND (stripped_decl, 0));
11328 default:
11329 return NULL;
11330 }
11331 }
11332 }
11333
11334 /* Return true if TYPE has a variable argument list. */
11335
11336 bool
11337 stdarg_p (const_tree fntype)
11338 {
11339 function_args_iterator args_iter;
11340 tree n = NULL_TREE, t;
11341
11342 if (!fntype)
11343 return false;
11344
11345 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11346 {
11347 n = t;
11348 }
11349
11350 return n != NULL_TREE && n != void_type_node;
11351 }
11352
11353 /* Return true if TYPE has a prototype. */
11354
11355 bool
11356 prototype_p (tree fntype)
11357 {
11358 tree t;
11359
11360 gcc_assert (fntype != NULL_TREE);
11361
11362 t = TYPE_ARG_TYPES (fntype);
11363 return (t != NULL_TREE);
11364 }
11365
11366 /* If BLOCK is inlined from an __attribute__((__artificial__))
11367 routine, return pointer to location from where it has been
11368 called. */
11369 location_t *
11370 block_nonartificial_location (tree block)
11371 {
11372 location_t *ret = NULL;
11373
11374 while (block && TREE_CODE (block) == BLOCK
11375 && BLOCK_ABSTRACT_ORIGIN (block))
11376 {
11377 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11378
11379 while (TREE_CODE (ao) == BLOCK
11380 && BLOCK_ABSTRACT_ORIGIN (ao)
11381 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11382 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11383
11384 if (TREE_CODE (ao) == FUNCTION_DECL)
11385 {
11386 /* If AO is an artificial inline, point RET to the
11387 call site locus at which it has been inlined and continue
11388 the loop, in case AO's caller is also an artificial
11389 inline. */
11390 if (DECL_DECLARED_INLINE_P (ao)
11391 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11392 ret = &BLOCK_SOURCE_LOCATION (block);
11393 else
11394 break;
11395 }
11396 else if (TREE_CODE (ao) != BLOCK)
11397 break;
11398
11399 block = BLOCK_SUPERCONTEXT (block);
11400 }
11401 return ret;
11402 }
11403
11404
11405 /* If EXP is inlined from an __attribute__((__artificial__))
11406 function, return the location of the original call expression. */
11407
11408 location_t
11409 tree_nonartificial_location (tree exp)
11410 {
11411 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11412
11413 if (loc)
11414 return *loc;
11415 else
11416 return EXPR_LOCATION (exp);
11417 }
11418
11419
11420 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11421 nodes. */
11422
11423 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11424
11425 static hashval_t
11426 cl_option_hash_hash (const void *x)
11427 {
11428 const_tree const t = (const_tree) x;
11429 const char *p;
11430 size_t i;
11431 size_t len = 0;
11432 hashval_t hash = 0;
11433
11434 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11435 {
11436 p = (const char *)TREE_OPTIMIZATION (t);
11437 len = sizeof (struct cl_optimization);
11438 }
11439
11440 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11441 {
11442 p = (const char *)TREE_TARGET_OPTION (t);
11443 len = sizeof (struct cl_target_option);
11444 }
11445
11446 else
11447 gcc_unreachable ();
11448
11449 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11450 something else. */
11451 for (i = 0; i < len; i++)
11452 if (p[i])
11453 hash = (hash << 4) ^ ((i << 2) | p[i]);
11454
11455 return hash;
11456 }
11457
11458 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11459 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11460 same. */
11461
11462 static int
11463 cl_option_hash_eq (const void *x, const void *y)
11464 {
11465 const_tree const xt = (const_tree) x;
11466 const_tree const yt = (const_tree) y;
11467 const char *xp;
11468 const char *yp;
11469 size_t len;
11470
11471 if (TREE_CODE (xt) != TREE_CODE (yt))
11472 return 0;
11473
11474 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11475 {
11476 xp = (const char *)TREE_OPTIMIZATION (xt);
11477 yp = (const char *)TREE_OPTIMIZATION (yt);
11478 len = sizeof (struct cl_optimization);
11479 }
11480
11481 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11482 {
11483 xp = (const char *)TREE_TARGET_OPTION (xt);
11484 yp = (const char *)TREE_TARGET_OPTION (yt);
11485 len = sizeof (struct cl_target_option);
11486 }
11487
11488 else
11489 gcc_unreachable ();
11490
11491 return (memcmp (xp, yp, len) == 0);
11492 }
11493
11494 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11495
11496 tree
11497 build_optimization_node (struct gcc_options *opts)
11498 {
11499 tree t;
11500 void **slot;
11501
11502 /* Use the cache of optimization nodes. */
11503
11504 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11505 opts);
11506
11507 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11508 t = (tree) *slot;
11509 if (!t)
11510 {
11511 /* Insert this one into the hash table. */
11512 t = cl_optimization_node;
11513 *slot = t;
11514
11515 /* Make a new node for next time round. */
11516 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11517 }
11518
11519 return t;
11520 }
11521
11522 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11523
11524 tree
11525 build_target_option_node (struct gcc_options *opts)
11526 {
11527 tree t;
11528 void **slot;
11529
11530 /* Use the cache of optimization nodes. */
11531
11532 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11533 opts);
11534
11535 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11536 t = (tree) *slot;
11537 if (!t)
11538 {
11539 /* Insert this one into the hash table. */
11540 t = cl_target_option_node;
11541 *slot = t;
11542
11543 /* Make a new node for next time round. */
11544 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11545 }
11546
11547 return t;
11548 }
11549
11550 /* Reset TREE_TARGET_GLOBALS cache for TARGET_OPTION_NODE.
11551 Called through htab_traverse. */
11552
11553 static int
11554 prepare_target_option_node_for_pch (void **slot, void *)
11555 {
11556 tree node = (tree) *slot;
11557 if (TREE_CODE (node) == TARGET_OPTION_NODE)
11558 TREE_TARGET_GLOBALS (node) = NULL;
11559 return 1;
11560 }
11561
11562 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11563 so that they aren't saved during PCH writing. */
11564
11565 void
11566 prepare_target_option_nodes_for_pch (void)
11567 {
11568 htab_traverse (cl_option_hash_table, prepare_target_option_node_for_pch,
11569 NULL);
11570 }
11571
11572 /* Determine the "ultimate origin" of a block. The block may be an inlined
11573 instance of an inlined instance of a block which is local to an inline
11574 function, so we have to trace all of the way back through the origin chain
11575 to find out what sort of node actually served as the original seed for the
11576 given block. */
11577
11578 tree
11579 block_ultimate_origin (const_tree block)
11580 {
11581 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11582
11583 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11584 we're trying to output the abstract instance of this function. */
11585 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11586 return NULL_TREE;
11587
11588 if (immediate_origin == NULL_TREE)
11589 return NULL_TREE;
11590 else
11591 {
11592 tree ret_val;
11593 tree lookahead = immediate_origin;
11594
11595 do
11596 {
11597 ret_val = lookahead;
11598 lookahead = (TREE_CODE (ret_val) == BLOCK
11599 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11600 }
11601 while (lookahead != NULL && lookahead != ret_val);
11602
11603 /* The block's abstract origin chain may not be the *ultimate* origin of
11604 the block. It could lead to a DECL that has an abstract origin set.
11605 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11606 will give us if it has one). Note that DECL's abstract origins are
11607 supposed to be the most distant ancestor (or so decl_ultimate_origin
11608 claims), so we don't need to loop following the DECL origins. */
11609 if (DECL_P (ret_val))
11610 return DECL_ORIGIN (ret_val);
11611
11612 return ret_val;
11613 }
11614 }
11615
11616 /* Return true iff conversion in EXP generates no instruction. Mark
11617 it inline so that we fully inline into the stripping functions even
11618 though we have two uses of this function. */
11619
11620 static inline bool
11621 tree_nop_conversion (const_tree exp)
11622 {
11623 tree outer_type, inner_type;
11624
11625 if (!CONVERT_EXPR_P (exp)
11626 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11627 return false;
11628 if (TREE_OPERAND (exp, 0) == error_mark_node)
11629 return false;
11630
11631 outer_type = TREE_TYPE (exp);
11632 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11633
11634 if (!inner_type)
11635 return false;
11636
11637 /* Use precision rather then machine mode when we can, which gives
11638 the correct answer even for submode (bit-field) types. */
11639 if ((INTEGRAL_TYPE_P (outer_type)
11640 || POINTER_TYPE_P (outer_type)
11641 || TREE_CODE (outer_type) == OFFSET_TYPE)
11642 && (INTEGRAL_TYPE_P (inner_type)
11643 || POINTER_TYPE_P (inner_type)
11644 || TREE_CODE (inner_type) == OFFSET_TYPE))
11645 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11646
11647 /* Otherwise fall back on comparing machine modes (e.g. for
11648 aggregate types, floats). */
11649 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11650 }
11651
11652 /* Return true iff conversion in EXP generates no instruction. Don't
11653 consider conversions changing the signedness. */
11654
11655 static bool
11656 tree_sign_nop_conversion (const_tree exp)
11657 {
11658 tree outer_type, inner_type;
11659
11660 if (!tree_nop_conversion (exp))
11661 return false;
11662
11663 outer_type = TREE_TYPE (exp);
11664 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11665
11666 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11667 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11668 }
11669
11670 /* Strip conversions from EXP according to tree_nop_conversion and
11671 return the resulting expression. */
11672
11673 tree
11674 tree_strip_nop_conversions (tree exp)
11675 {
11676 while (tree_nop_conversion (exp))
11677 exp = TREE_OPERAND (exp, 0);
11678 return exp;
11679 }
11680
11681 /* Strip conversions from EXP according to tree_sign_nop_conversion
11682 and return the resulting expression. */
11683
11684 tree
11685 tree_strip_sign_nop_conversions (tree exp)
11686 {
11687 while (tree_sign_nop_conversion (exp))
11688 exp = TREE_OPERAND (exp, 0);
11689 return exp;
11690 }
11691
11692 /* Avoid any floating point extensions from EXP. */
11693 tree
11694 strip_float_extensions (tree exp)
11695 {
11696 tree sub, expt, subt;
11697
11698 /* For floating point constant look up the narrowest type that can hold
11699 it properly and handle it like (type)(narrowest_type)constant.
11700 This way we can optimize for instance a=a*2.0 where "a" is float
11701 but 2.0 is double constant. */
11702 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11703 {
11704 REAL_VALUE_TYPE orig;
11705 tree type = NULL;
11706
11707 orig = TREE_REAL_CST (exp);
11708 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11709 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11710 type = float_type_node;
11711 else if (TYPE_PRECISION (TREE_TYPE (exp))
11712 > TYPE_PRECISION (double_type_node)
11713 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11714 type = double_type_node;
11715 if (type)
11716 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11717 }
11718
11719 if (!CONVERT_EXPR_P (exp))
11720 return exp;
11721
11722 sub = TREE_OPERAND (exp, 0);
11723 subt = TREE_TYPE (sub);
11724 expt = TREE_TYPE (exp);
11725
11726 if (!FLOAT_TYPE_P (subt))
11727 return exp;
11728
11729 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11730 return exp;
11731
11732 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11733 return exp;
11734
11735 return strip_float_extensions (sub);
11736 }
11737
11738 /* Strip out all handled components that produce invariant
11739 offsets. */
11740
11741 const_tree
11742 strip_invariant_refs (const_tree op)
11743 {
11744 while (handled_component_p (op))
11745 {
11746 switch (TREE_CODE (op))
11747 {
11748 case ARRAY_REF:
11749 case ARRAY_RANGE_REF:
11750 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11751 || TREE_OPERAND (op, 2) != NULL_TREE
11752 || TREE_OPERAND (op, 3) != NULL_TREE)
11753 return NULL;
11754 break;
11755
11756 case COMPONENT_REF:
11757 if (TREE_OPERAND (op, 2) != NULL_TREE)
11758 return NULL;
11759 break;
11760
11761 default:;
11762 }
11763 op = TREE_OPERAND (op, 0);
11764 }
11765
11766 return op;
11767 }
11768
11769 static GTY(()) tree gcc_eh_personality_decl;
11770
11771 /* Return the GCC personality function decl. */
11772
11773 tree
11774 lhd_gcc_personality (void)
11775 {
11776 if (!gcc_eh_personality_decl)
11777 gcc_eh_personality_decl = build_personality_function ("gcc");
11778 return gcc_eh_personality_decl;
11779 }
11780
11781 /* TARGET is a call target of GIMPLE call statement
11782 (obtained by gimple_call_fn). Return true if it is
11783 OBJ_TYPE_REF representing an virtual call of C++ method.
11784 (As opposed to OBJ_TYPE_REF representing objc calls
11785 through a cast where middle-end devirtualization machinery
11786 can't apply.) */
11787
11788 bool
11789 virtual_method_call_p (tree target)
11790 {
11791 if (TREE_CODE (target) != OBJ_TYPE_REF)
11792 return false;
11793 target = TREE_TYPE (target);
11794 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11795 target = TREE_TYPE (target);
11796 if (TREE_CODE (target) == FUNCTION_TYPE)
11797 return false;
11798 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11799 return true;
11800 }
11801
11802 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11803
11804 tree
11805 obj_type_ref_class (tree ref)
11806 {
11807 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11808 ref = TREE_TYPE (ref);
11809 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11810 ref = TREE_TYPE (ref);
11811 /* We look for type THIS points to. ObjC also builds
11812 OBJ_TYPE_REF with non-method calls, Their first parameter
11813 ID however also corresponds to class type. */
11814 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11815 || TREE_CODE (ref) == FUNCTION_TYPE);
11816 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11817 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11818 return TREE_TYPE (ref);
11819 }
11820
11821 /* Return true if T is in anonymous namespace. */
11822
11823 bool
11824 type_in_anonymous_namespace_p (const_tree t)
11825 {
11826 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11827 bulitin types; those have CONTEXT NULL. */
11828 if (!TYPE_CONTEXT (t))
11829 return false;
11830 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11831 }
11832
11833 /* Try to find a base info of BINFO that would have its field decl at offset
11834 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11835 found, return, otherwise return NULL_TREE. */
11836
11837 tree
11838 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11839 {
11840 tree type = BINFO_TYPE (binfo);
11841
11842 while (true)
11843 {
11844 HOST_WIDE_INT pos, size;
11845 tree fld;
11846 int i;
11847
11848 if (types_same_for_odr (type, expected_type))
11849 return binfo;
11850 if (offset < 0)
11851 return NULL_TREE;
11852
11853 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11854 {
11855 if (TREE_CODE (fld) != FIELD_DECL)
11856 continue;
11857
11858 pos = int_bit_position (fld);
11859 size = tree_to_uhwi (DECL_SIZE (fld));
11860 if (pos <= offset && (pos + size) > offset)
11861 break;
11862 }
11863 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11864 return NULL_TREE;
11865
11866 if (!DECL_ARTIFICIAL (fld))
11867 {
11868 binfo = TYPE_BINFO (TREE_TYPE (fld));
11869 if (!binfo)
11870 return NULL_TREE;
11871 }
11872 /* Offset 0 indicates the primary base, whose vtable contents are
11873 represented in the binfo for the derived class. */
11874 else if (offset != 0)
11875 {
11876 tree base_binfo, binfo2 = binfo;
11877
11878 /* Find BINFO corresponding to FLD. This is bit harder
11879 by a fact that in virtual inheritance we may need to walk down
11880 the non-virtual inheritance chain. */
11881 while (true)
11882 {
11883 tree containing_binfo = NULL, found_binfo = NULL;
11884 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
11885 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11886 {
11887 found_binfo = base_binfo;
11888 break;
11889 }
11890 else
11891 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
11892 - tree_to_shwi (BINFO_OFFSET (binfo)))
11893 * BITS_PER_UNIT < pos
11894 /* Rule out types with no virtual methods or we can get confused
11895 here by zero sized bases. */
11896 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
11897 && (!containing_binfo
11898 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
11899 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
11900 containing_binfo = base_binfo;
11901 if (found_binfo)
11902 {
11903 binfo = found_binfo;
11904 break;
11905 }
11906 if (!containing_binfo)
11907 return NULL_TREE;
11908 binfo2 = containing_binfo;
11909 }
11910 }
11911
11912 type = TREE_TYPE (fld);
11913 offset -= pos;
11914 }
11915 }
11916
11917 /* Returns true if X is a typedef decl. */
11918
11919 bool
11920 is_typedef_decl (tree x)
11921 {
11922 return (x && TREE_CODE (x) == TYPE_DECL
11923 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11924 }
11925
11926 /* Returns true iff TYPE is a type variant created for a typedef. */
11927
11928 bool
11929 typedef_variant_p (tree type)
11930 {
11931 return is_typedef_decl (TYPE_NAME (type));
11932 }
11933
11934 /* Warn about a use of an identifier which was marked deprecated. */
11935 void
11936 warn_deprecated_use (tree node, tree attr)
11937 {
11938 const char *msg;
11939
11940 if (node == 0 || !warn_deprecated_decl)
11941 return;
11942
11943 if (!attr)
11944 {
11945 if (DECL_P (node))
11946 attr = DECL_ATTRIBUTES (node);
11947 else if (TYPE_P (node))
11948 {
11949 tree decl = TYPE_STUB_DECL (node);
11950 if (decl)
11951 attr = lookup_attribute ("deprecated",
11952 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11953 }
11954 }
11955
11956 if (attr)
11957 attr = lookup_attribute ("deprecated", attr);
11958
11959 if (attr)
11960 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
11961 else
11962 msg = NULL;
11963
11964 if (DECL_P (node))
11965 {
11966 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
11967 if (msg)
11968 warning (OPT_Wdeprecated_declarations,
11969 "%qD is deprecated (declared at %r%s:%d%R): %s",
11970 node, "locus", xloc.file, xloc.line, msg);
11971 else
11972 warning (OPT_Wdeprecated_declarations,
11973 "%qD is deprecated (declared at %r%s:%d%R)",
11974 node, "locus", xloc.file, xloc.line);
11975 }
11976 else if (TYPE_P (node))
11977 {
11978 tree what = NULL_TREE;
11979 tree decl = TYPE_STUB_DECL (node);
11980
11981 if (TYPE_NAME (node))
11982 {
11983 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
11984 what = TYPE_NAME (node);
11985 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
11986 && DECL_NAME (TYPE_NAME (node)))
11987 what = DECL_NAME (TYPE_NAME (node));
11988 }
11989
11990 if (decl)
11991 {
11992 expanded_location xloc
11993 = expand_location (DECL_SOURCE_LOCATION (decl));
11994 if (what)
11995 {
11996 if (msg)
11997 warning (OPT_Wdeprecated_declarations,
11998 "%qE is deprecated (declared at %r%s:%d%R): %s",
11999 what, "locus", xloc.file, xloc.line, msg);
12000 else
12001 warning (OPT_Wdeprecated_declarations,
12002 "%qE is deprecated (declared at %r%s:%d%R)",
12003 what, "locus", xloc.file, xloc.line);
12004 }
12005 else
12006 {
12007 if (msg)
12008 warning (OPT_Wdeprecated_declarations,
12009 "type is deprecated (declared at %r%s:%d%R): %s",
12010 "locus", xloc.file, xloc.line, msg);
12011 else
12012 warning (OPT_Wdeprecated_declarations,
12013 "type is deprecated (declared at %r%s:%d%R)",
12014 "locus", xloc.file, xloc.line);
12015 }
12016 }
12017 else
12018 {
12019 if (what)
12020 {
12021 if (msg)
12022 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12023 what, msg);
12024 else
12025 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12026 }
12027 else
12028 {
12029 if (msg)
12030 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12031 msg);
12032 else
12033 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12034 }
12035 }
12036 }
12037 }
12038
12039 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12040 somewhere in it. */
12041
12042 bool
12043 contains_bitfld_component_ref_p (const_tree ref)
12044 {
12045 while (handled_component_p (ref))
12046 {
12047 if (TREE_CODE (ref) == COMPONENT_REF
12048 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12049 return true;
12050 ref = TREE_OPERAND (ref, 0);
12051 }
12052
12053 return false;
12054 }
12055
12056 /* Try to determine whether a TRY_CATCH expression can fall through.
12057 This is a subroutine of block_may_fallthru. */
12058
12059 static bool
12060 try_catch_may_fallthru (const_tree stmt)
12061 {
12062 tree_stmt_iterator i;
12063
12064 /* If the TRY block can fall through, the whole TRY_CATCH can
12065 fall through. */
12066 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12067 return true;
12068
12069 i = tsi_start (TREE_OPERAND (stmt, 1));
12070 switch (TREE_CODE (tsi_stmt (i)))
12071 {
12072 case CATCH_EXPR:
12073 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12074 catch expression and a body. The whole TRY_CATCH may fall
12075 through iff any of the catch bodies falls through. */
12076 for (; !tsi_end_p (i); tsi_next (&i))
12077 {
12078 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12079 return true;
12080 }
12081 return false;
12082
12083 case EH_FILTER_EXPR:
12084 /* The exception filter expression only matters if there is an
12085 exception. If the exception does not match EH_FILTER_TYPES,
12086 we will execute EH_FILTER_FAILURE, and we will fall through
12087 if that falls through. If the exception does match
12088 EH_FILTER_TYPES, the stack unwinder will continue up the
12089 stack, so we will not fall through. We don't know whether we
12090 will throw an exception which matches EH_FILTER_TYPES or not,
12091 so we just ignore EH_FILTER_TYPES and assume that we might
12092 throw an exception which doesn't match. */
12093 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12094
12095 default:
12096 /* This case represents statements to be executed when an
12097 exception occurs. Those statements are implicitly followed
12098 by a RESX statement to resume execution after the exception.
12099 So in this case the TRY_CATCH never falls through. */
12100 return false;
12101 }
12102 }
12103
12104 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12105 need not be 100% accurate; simply be conservative and return true if we
12106 don't know. This is used only to avoid stupidly generating extra code.
12107 If we're wrong, we'll just delete the extra code later. */
12108
12109 bool
12110 block_may_fallthru (const_tree block)
12111 {
12112 /* This CONST_CAST is okay because expr_last returns its argument
12113 unmodified and we assign it to a const_tree. */
12114 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12115
12116 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12117 {
12118 case GOTO_EXPR:
12119 case RETURN_EXPR:
12120 /* Easy cases. If the last statement of the block implies
12121 control transfer, then we can't fall through. */
12122 return false;
12123
12124 case SWITCH_EXPR:
12125 /* If SWITCH_LABELS is set, this is lowered, and represents a
12126 branch to a selected label and hence can not fall through.
12127 Otherwise SWITCH_BODY is set, and the switch can fall
12128 through. */
12129 return SWITCH_LABELS (stmt) == NULL_TREE;
12130
12131 case COND_EXPR:
12132 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12133 return true;
12134 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12135
12136 case BIND_EXPR:
12137 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12138
12139 case TRY_CATCH_EXPR:
12140 return try_catch_may_fallthru (stmt);
12141
12142 case TRY_FINALLY_EXPR:
12143 /* The finally clause is always executed after the try clause,
12144 so if it does not fall through, then the try-finally will not
12145 fall through. Otherwise, if the try clause does not fall
12146 through, then when the finally clause falls through it will
12147 resume execution wherever the try clause was going. So the
12148 whole try-finally will only fall through if both the try
12149 clause and the finally clause fall through. */
12150 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12151 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12152
12153 case MODIFY_EXPR:
12154 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12155 stmt = TREE_OPERAND (stmt, 1);
12156 else
12157 return true;
12158 /* FALLTHRU */
12159
12160 case CALL_EXPR:
12161 /* Functions that do not return do not fall through. */
12162 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12163
12164 case CLEANUP_POINT_EXPR:
12165 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12166
12167 case TARGET_EXPR:
12168 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12169
12170 case ERROR_MARK:
12171 return true;
12172
12173 default:
12174 return lang_hooks.block_may_fallthru (stmt);
12175 }
12176 }
12177
12178 /* True if we are using EH to handle cleanups. */
12179 static bool using_eh_for_cleanups_flag = false;
12180
12181 /* This routine is called from front ends to indicate eh should be used for
12182 cleanups. */
12183 void
12184 using_eh_for_cleanups (void)
12185 {
12186 using_eh_for_cleanups_flag = true;
12187 }
12188
12189 /* Query whether EH is used for cleanups. */
12190 bool
12191 using_eh_for_cleanups_p (void)
12192 {
12193 return using_eh_for_cleanups_flag;
12194 }
12195
12196 /* Wrapper for tree_code_name to ensure that tree code is valid */
12197 const char *
12198 get_tree_code_name (enum tree_code code)
12199 {
12200 const char *invalid = "<invalid tree code>";
12201
12202 if (code >= MAX_TREE_CODES)
12203 return invalid;
12204
12205 return tree_code_name[code];
12206 }
12207
12208 /* Drops the TREE_OVERFLOW flag from T. */
12209
12210 tree
12211 drop_tree_overflow (tree t)
12212 {
12213 gcc_checking_assert (TREE_OVERFLOW (t));
12214
12215 /* For tree codes with a sharing machinery re-build the result. */
12216 if (TREE_CODE (t) == INTEGER_CST)
12217 return wide_int_to_tree (TREE_TYPE (t), t);
12218
12219 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12220 and drop the flag. */
12221 t = copy_node (t);
12222 TREE_OVERFLOW (t) = 0;
12223 return t;
12224 }
12225
12226 /* Given a memory reference expression T, return its base address.
12227 The base address of a memory reference expression is the main
12228 object being referenced. For instance, the base address for
12229 'array[i].fld[j]' is 'array'. You can think of this as stripping
12230 away the offset part from a memory address.
12231
12232 This function calls handled_component_p to strip away all the inner
12233 parts of the memory reference until it reaches the base object. */
12234
12235 tree
12236 get_base_address (tree t)
12237 {
12238 while (handled_component_p (t))
12239 t = TREE_OPERAND (t, 0);
12240
12241 if ((TREE_CODE (t) == MEM_REF
12242 || TREE_CODE (t) == TARGET_MEM_REF)
12243 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12244 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12245
12246 /* ??? Either the alias oracle or all callers need to properly deal
12247 with WITH_SIZE_EXPRs before we can look through those. */
12248 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12249 return NULL_TREE;
12250
12251 return t;
12252 }
12253
12254 #include "gt-tree.h"