97c9c8ac37d0b84cb4ab46937a4424e9d7c6ffd5
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "tree.h"
36 #include "tm_p.h"
37 #include "function.h"
38 #include "obstack.h"
39 #include "toplev.h" /* get_random_seed */
40 #include "ggc.h"
41 #include "hashtab.h"
42 #include "filenames.h"
43 #include "output.h"
44 #include "target.h"
45 #include "common/common-target.h"
46 #include "langhooks.h"
47 #include "tree-inline.h"
48 #include "tree-iterator.h"
49 #include "basic-block.h"
50 #include "bitmap.h"
51 #include "gimple.h"
52 #include "gimple-iterator.h"
53 #include "gimplify.h"
54 #include "gimple-ssa.h"
55 #include "cgraph.h"
56 #include "tree-phinodes.h"
57 #include "tree-ssanames.h"
58 #include "tree-dfa.h"
59 #include "params.h"
60 #include "pointer-set.h"
61 #include "tree-pass.h"
62 #include "langhooks-def.h"
63 #include "diagnostic.h"
64 #include "tree-diagnostic.h"
65 #include "tree-pretty-print.h"
66 #include "except.h"
67 #include "debug.h"
68 #include "intl.h"
69
70 /* Tree code classes. */
71
72 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
73 #define END_OF_BASE_TREE_CODES tcc_exceptional,
74
75 const enum tree_code_class tree_code_type[] = {
76 #include "all-tree.def"
77 };
78
79 #undef DEFTREECODE
80 #undef END_OF_BASE_TREE_CODES
81
82 /* Table indexed by tree code giving number of expression
83 operands beyond the fixed part of the node structure.
84 Not used for types or decls. */
85
86 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
87 #define END_OF_BASE_TREE_CODES 0,
88
89 const unsigned char tree_code_length[] = {
90 #include "all-tree.def"
91 };
92
93 #undef DEFTREECODE
94 #undef END_OF_BASE_TREE_CODES
95
96 /* Names of tree components.
97 Used for printing out the tree and error messages. */
98 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
99 #define END_OF_BASE_TREE_CODES "@dummy",
100
101 static const char *const tree_code_name[] = {
102 #include "all-tree.def"
103 };
104
105 #undef DEFTREECODE
106 #undef END_OF_BASE_TREE_CODES
107
108 /* Each tree code class has an associated string representation.
109 These must correspond to the tree_code_class entries. */
110
111 const char *const tree_code_class_strings[] =
112 {
113 "exceptional",
114 "constant",
115 "type",
116 "declaration",
117 "reference",
118 "comparison",
119 "unary",
120 "binary",
121 "statement",
122 "vl_exp",
123 "expression"
124 };
125
126 /* obstack.[ch] explicitly declined to prototype this. */
127 extern int _obstack_allocated_p (struct obstack *h, void *obj);
128
129 /* Statistics-gathering stuff. */
130
131 static int tree_code_counts[MAX_TREE_CODES];
132 int tree_node_counts[(int) all_kinds];
133 int tree_node_sizes[(int) all_kinds];
134
135 /* Keep in sync with tree.h:enum tree_node_kind. */
136 static const char * const tree_node_kind_names[] = {
137 "decls",
138 "types",
139 "blocks",
140 "stmts",
141 "refs",
142 "exprs",
143 "constants",
144 "identifiers",
145 "vecs",
146 "binfos",
147 "ssa names",
148 "constructors",
149 "random kinds",
150 "lang_decl kinds",
151 "lang_type kinds",
152 "omp clauses",
153 };
154
155 /* Unique id for next decl created. */
156 static GTY(()) int next_decl_uid;
157 /* Unique id for next type created. */
158 static GTY(()) int next_type_uid = 1;
159 /* Unique id for next debug decl created. Use negative numbers,
160 to catch erroneous uses. */
161 static GTY(()) int next_debug_decl_uid;
162
163 /* Since we cannot rehash a type after it is in the table, we have to
164 keep the hash code. */
165
166 struct GTY(()) type_hash {
167 unsigned long hash;
168 tree type;
169 };
170
171 /* Initial size of the hash table (rounded to next prime). */
172 #define TYPE_HASH_INITIAL_SIZE 1000
173
174 /* Now here is the hash table. When recording a type, it is added to
175 the slot whose index is the hash code. Note that the hash table is
176 used for several kinds of types (function types, array types and
177 array index range types, for now). While all these live in the
178 same table, they are completely independent, and the hash code is
179 computed differently for each of these. */
180
181 static GTY ((if_marked ("type_hash_marked_p"), param_is (struct type_hash)))
182 htab_t type_hash_table;
183
184 /* Hash table and temporary node for larger integer const values. */
185 static GTY (()) tree int_cst_node;
186 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
187 htab_t int_cst_hash_table;
188
189 /* Hash table for optimization flags and target option flags. Use the same
190 hash table for both sets of options. Nodes for building the current
191 optimization and target option nodes. The assumption is most of the time
192 the options created will already be in the hash table, so we avoid
193 allocating and freeing up a node repeatably. */
194 static GTY (()) tree cl_optimization_node;
195 static GTY (()) tree cl_target_option_node;
196 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
197 htab_t cl_option_hash_table;
198
199 /* General tree->tree mapping structure for use in hash tables. */
200
201
202 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
203 htab_t debug_expr_for_decl;
204
205 static GTY ((if_marked ("tree_decl_map_marked_p"), param_is (struct tree_decl_map)))
206 htab_t value_expr_for_decl;
207
208 static GTY ((if_marked ("tree_vec_map_marked_p"), param_is (struct tree_vec_map)))
209 htab_t debug_args_for_decl;
210
211 static GTY ((if_marked ("tree_priority_map_marked_p"),
212 param_is (struct tree_priority_map)))
213 htab_t init_priority_for_decl;
214
215 static void set_type_quals (tree, int);
216 static int type_hash_eq (const void *, const void *);
217 static hashval_t type_hash_hash (const void *);
218 static hashval_t int_cst_hash_hash (const void *);
219 static int int_cst_hash_eq (const void *, const void *);
220 static hashval_t cl_option_hash_hash (const void *);
221 static int cl_option_hash_eq (const void *, const void *);
222 static void print_type_hash_statistics (void);
223 static void print_debug_expr_statistics (void);
224 static void print_value_expr_statistics (void);
225 static int type_hash_marked_p (const void *);
226 static unsigned int type_hash_list (const_tree, hashval_t);
227 static unsigned int attribute_hash_list (const_tree, hashval_t);
228 static bool decls_same_for_odr (tree decl1, tree decl2);
229
230 tree global_trees[TI_MAX];
231 tree integer_types[itk_none];
232
233 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
234
235 /* Number of operands for each OpenMP clause. */
236 unsigned const char omp_clause_num_ops[] =
237 {
238 0, /* OMP_CLAUSE_ERROR */
239 1, /* OMP_CLAUSE_PRIVATE */
240 1, /* OMP_CLAUSE_SHARED */
241 1, /* OMP_CLAUSE_FIRSTPRIVATE */
242 2, /* OMP_CLAUSE_LASTPRIVATE */
243 4, /* OMP_CLAUSE_REDUCTION */
244 1, /* OMP_CLAUSE_COPYIN */
245 1, /* OMP_CLAUSE_COPYPRIVATE */
246 2, /* OMP_CLAUSE_LINEAR */
247 2, /* OMP_CLAUSE_ALIGNED */
248 1, /* OMP_CLAUSE_DEPEND */
249 1, /* OMP_CLAUSE_UNIFORM */
250 2, /* OMP_CLAUSE_FROM */
251 2, /* OMP_CLAUSE_TO */
252 2, /* OMP_CLAUSE_MAP */
253 1, /* OMP_CLAUSE__LOOPTEMP_ */
254 1, /* OMP_CLAUSE_IF */
255 1, /* OMP_CLAUSE_NUM_THREADS */
256 1, /* OMP_CLAUSE_SCHEDULE */
257 0, /* OMP_CLAUSE_NOWAIT */
258 0, /* OMP_CLAUSE_ORDERED */
259 0, /* OMP_CLAUSE_DEFAULT */
260 3, /* OMP_CLAUSE_COLLAPSE */
261 0, /* OMP_CLAUSE_UNTIED */
262 1, /* OMP_CLAUSE_FINAL */
263 0, /* OMP_CLAUSE_MERGEABLE */
264 1, /* OMP_CLAUSE_DEVICE */
265 1, /* OMP_CLAUSE_DIST_SCHEDULE */
266 0, /* OMP_CLAUSE_INBRANCH */
267 0, /* OMP_CLAUSE_NOTINBRANCH */
268 1, /* OMP_CLAUSE_NUM_TEAMS */
269 1, /* OMP_CLAUSE_THREAD_LIMIT */
270 0, /* OMP_CLAUSE_PROC_BIND */
271 1, /* OMP_CLAUSE_SAFELEN */
272 1, /* OMP_CLAUSE_SIMDLEN */
273 0, /* OMP_CLAUSE_FOR */
274 0, /* OMP_CLAUSE_PARALLEL */
275 0, /* OMP_CLAUSE_SECTIONS */
276 0, /* OMP_CLAUSE_TASKGROUP */
277 1, /* OMP_CLAUSE__SIMDUID_ */
278 };
279
280 const char * const omp_clause_code_name[] =
281 {
282 "error_clause",
283 "private",
284 "shared",
285 "firstprivate",
286 "lastprivate",
287 "reduction",
288 "copyin",
289 "copyprivate",
290 "linear",
291 "aligned",
292 "depend",
293 "uniform",
294 "from",
295 "to",
296 "map",
297 "_looptemp_",
298 "if",
299 "num_threads",
300 "schedule",
301 "nowait",
302 "ordered",
303 "default",
304 "collapse",
305 "untied",
306 "final",
307 "mergeable",
308 "device",
309 "dist_schedule",
310 "inbranch",
311 "notinbranch",
312 "num_teams",
313 "thread_limit",
314 "proc_bind",
315 "safelen",
316 "simdlen",
317 "for",
318 "parallel",
319 "sections",
320 "taskgroup",
321 "_simduid_"
322 };
323
324
325 /* Return the tree node structure used by tree code CODE. */
326
327 static inline enum tree_node_structure_enum
328 tree_node_structure_for_code (enum tree_code code)
329 {
330 switch (TREE_CODE_CLASS (code))
331 {
332 case tcc_declaration:
333 {
334 switch (code)
335 {
336 case FIELD_DECL:
337 return TS_FIELD_DECL;
338 case PARM_DECL:
339 return TS_PARM_DECL;
340 case VAR_DECL:
341 return TS_VAR_DECL;
342 case LABEL_DECL:
343 return TS_LABEL_DECL;
344 case RESULT_DECL:
345 return TS_RESULT_DECL;
346 case DEBUG_EXPR_DECL:
347 return TS_DECL_WRTL;
348 case CONST_DECL:
349 return TS_CONST_DECL;
350 case TYPE_DECL:
351 return TS_TYPE_DECL;
352 case FUNCTION_DECL:
353 return TS_FUNCTION_DECL;
354 case TRANSLATION_UNIT_DECL:
355 return TS_TRANSLATION_UNIT_DECL;
356 default:
357 return TS_DECL_NON_COMMON;
358 }
359 }
360 case tcc_type:
361 return TS_TYPE_NON_COMMON;
362 case tcc_reference:
363 case tcc_comparison:
364 case tcc_unary:
365 case tcc_binary:
366 case tcc_expression:
367 case tcc_statement:
368 case tcc_vl_exp:
369 return TS_EXP;
370 default: /* tcc_constant and tcc_exceptional */
371 break;
372 }
373 switch (code)
374 {
375 /* tcc_constant cases. */
376 case INTEGER_CST: return TS_INT_CST;
377 case REAL_CST: return TS_REAL_CST;
378 case FIXED_CST: return TS_FIXED_CST;
379 case COMPLEX_CST: return TS_COMPLEX;
380 case VECTOR_CST: return TS_VECTOR;
381 case STRING_CST: return TS_STRING;
382 /* tcc_exceptional cases. */
383 case ERROR_MARK: return TS_COMMON;
384 case IDENTIFIER_NODE: return TS_IDENTIFIER;
385 case TREE_LIST: return TS_LIST;
386 case TREE_VEC: return TS_VEC;
387 case SSA_NAME: return TS_SSA_NAME;
388 case PLACEHOLDER_EXPR: return TS_COMMON;
389 case STATEMENT_LIST: return TS_STATEMENT_LIST;
390 case BLOCK: return TS_BLOCK;
391 case CONSTRUCTOR: return TS_CONSTRUCTOR;
392 case TREE_BINFO: return TS_BINFO;
393 case OMP_CLAUSE: return TS_OMP_CLAUSE;
394 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
395 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
396
397 default:
398 gcc_unreachable ();
399 }
400 }
401
402
403 /* Initialize tree_contains_struct to describe the hierarchy of tree
404 nodes. */
405
406 static void
407 initialize_tree_contains_struct (void)
408 {
409 unsigned i;
410
411 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
412 {
413 enum tree_code code;
414 enum tree_node_structure_enum ts_code;
415
416 code = (enum tree_code) i;
417 ts_code = tree_node_structure_for_code (code);
418
419 /* Mark the TS structure itself. */
420 tree_contains_struct[code][ts_code] = 1;
421
422 /* Mark all the structures that TS is derived from. */
423 switch (ts_code)
424 {
425 case TS_TYPED:
426 case TS_BLOCK:
427 MARK_TS_BASE (code);
428 break;
429
430 case TS_COMMON:
431 case TS_INT_CST:
432 case TS_REAL_CST:
433 case TS_FIXED_CST:
434 case TS_VECTOR:
435 case TS_STRING:
436 case TS_COMPLEX:
437 case TS_SSA_NAME:
438 case TS_CONSTRUCTOR:
439 case TS_EXP:
440 case TS_STATEMENT_LIST:
441 MARK_TS_TYPED (code);
442 break;
443
444 case TS_IDENTIFIER:
445 case TS_DECL_MINIMAL:
446 case TS_TYPE_COMMON:
447 case TS_LIST:
448 case TS_VEC:
449 case TS_BINFO:
450 case TS_OMP_CLAUSE:
451 case TS_OPTIMIZATION:
452 case TS_TARGET_OPTION:
453 MARK_TS_COMMON (code);
454 break;
455
456 case TS_TYPE_WITH_LANG_SPECIFIC:
457 MARK_TS_TYPE_COMMON (code);
458 break;
459
460 case TS_TYPE_NON_COMMON:
461 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
462 break;
463
464 case TS_DECL_COMMON:
465 MARK_TS_DECL_MINIMAL (code);
466 break;
467
468 case TS_DECL_WRTL:
469 case TS_CONST_DECL:
470 MARK_TS_DECL_COMMON (code);
471 break;
472
473 case TS_DECL_NON_COMMON:
474 MARK_TS_DECL_WITH_VIS (code);
475 break;
476
477 case TS_DECL_WITH_VIS:
478 case TS_PARM_DECL:
479 case TS_LABEL_DECL:
480 case TS_RESULT_DECL:
481 MARK_TS_DECL_WRTL (code);
482 break;
483
484 case TS_FIELD_DECL:
485 MARK_TS_DECL_COMMON (code);
486 break;
487
488 case TS_VAR_DECL:
489 MARK_TS_DECL_WITH_VIS (code);
490 break;
491
492 case TS_TYPE_DECL:
493 case TS_FUNCTION_DECL:
494 MARK_TS_DECL_NON_COMMON (code);
495 break;
496
497 case TS_TRANSLATION_UNIT_DECL:
498 MARK_TS_DECL_COMMON (code);
499 break;
500
501 default:
502 gcc_unreachable ();
503 }
504 }
505
506 /* Basic consistency checks for attributes used in fold. */
507 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
508 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
509 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
510 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
511 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
512 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
513 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
514 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
515 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
516 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
517 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
518 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
519 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
520 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
521 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
522 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
523 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
524 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
525 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
526 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
527 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
528 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
529 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
530 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
531 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
532 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
533 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
534 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
535 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
536 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
537 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
538 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
539 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
540 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
541 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
542 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
543 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
544 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
545 }
546
547
548 /* Init tree.c. */
549
550 void
551 init_ttree (void)
552 {
553 /* Initialize the hash table of types. */
554 type_hash_table = htab_create_ggc (TYPE_HASH_INITIAL_SIZE, type_hash_hash,
555 type_hash_eq, 0);
556
557 debug_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
558 tree_decl_map_eq, 0);
559
560 value_expr_for_decl = htab_create_ggc (512, tree_decl_map_hash,
561 tree_decl_map_eq, 0);
562 init_priority_for_decl = htab_create_ggc (512, tree_priority_map_hash,
563 tree_priority_map_eq, 0);
564
565 int_cst_hash_table = htab_create_ggc (1024, int_cst_hash_hash,
566 int_cst_hash_eq, NULL);
567
568 int_cst_node = make_node (INTEGER_CST);
569
570 cl_option_hash_table = htab_create_ggc (64, cl_option_hash_hash,
571 cl_option_hash_eq, NULL);
572
573 cl_optimization_node = make_node (OPTIMIZATION_NODE);
574 cl_target_option_node = make_node (TARGET_OPTION_NODE);
575
576 /* Initialize the tree_contains_struct array. */
577 initialize_tree_contains_struct ();
578 lang_hooks.init_ts ();
579 }
580
581 \f
582 /* The name of the object as the assembler will see it (but before any
583 translations made by ASM_OUTPUT_LABELREF). Often this is the same
584 as DECL_NAME. It is an IDENTIFIER_NODE. */
585 tree
586 decl_assembler_name (tree decl)
587 {
588 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
589 lang_hooks.set_decl_assembler_name (decl);
590 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
591 }
592
593 /* Compute the number of bytes occupied by a tree with code CODE.
594 This function cannot be used for nodes that have variable sizes,
595 including TREE_VEC, STRING_CST, and CALL_EXPR. */
596 size_t
597 tree_code_size (enum tree_code code)
598 {
599 switch (TREE_CODE_CLASS (code))
600 {
601 case tcc_declaration: /* A decl node */
602 {
603 switch (code)
604 {
605 case FIELD_DECL:
606 return sizeof (struct tree_field_decl);
607 case PARM_DECL:
608 return sizeof (struct tree_parm_decl);
609 case VAR_DECL:
610 return sizeof (struct tree_var_decl);
611 case LABEL_DECL:
612 return sizeof (struct tree_label_decl);
613 case RESULT_DECL:
614 return sizeof (struct tree_result_decl);
615 case CONST_DECL:
616 return sizeof (struct tree_const_decl);
617 case TYPE_DECL:
618 return sizeof (struct tree_type_decl);
619 case FUNCTION_DECL:
620 return sizeof (struct tree_function_decl);
621 case DEBUG_EXPR_DECL:
622 return sizeof (struct tree_decl_with_rtl);
623 default:
624 return sizeof (struct tree_decl_non_common);
625 }
626 }
627
628 case tcc_type: /* a type node */
629 return sizeof (struct tree_type_non_common);
630
631 case tcc_reference: /* a reference */
632 case tcc_expression: /* an expression */
633 case tcc_statement: /* an expression with side effects */
634 case tcc_comparison: /* a comparison expression */
635 case tcc_unary: /* a unary arithmetic expression */
636 case tcc_binary: /* a binary arithmetic expression */
637 return (sizeof (struct tree_exp)
638 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
639
640 case tcc_constant: /* a constant */
641 switch (code)
642 {
643 case INTEGER_CST: return sizeof (struct tree_int_cst);
644 case REAL_CST: return sizeof (struct tree_real_cst);
645 case FIXED_CST: return sizeof (struct tree_fixed_cst);
646 case COMPLEX_CST: return sizeof (struct tree_complex);
647 case VECTOR_CST: return sizeof (struct tree_vector);
648 case STRING_CST: gcc_unreachable ();
649 default:
650 return lang_hooks.tree_size (code);
651 }
652
653 case tcc_exceptional: /* something random, like an identifier. */
654 switch (code)
655 {
656 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
657 case TREE_LIST: return sizeof (struct tree_list);
658
659 case ERROR_MARK:
660 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
661
662 case TREE_VEC:
663 case OMP_CLAUSE: gcc_unreachable ();
664
665 case SSA_NAME: return sizeof (struct tree_ssa_name);
666
667 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
668 case BLOCK: return sizeof (struct tree_block);
669 case CONSTRUCTOR: return sizeof (struct tree_constructor);
670 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
671 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
672
673 default:
674 return lang_hooks.tree_size (code);
675 }
676
677 default:
678 gcc_unreachable ();
679 }
680 }
681
682 /* Compute the number of bytes occupied by NODE. This routine only
683 looks at TREE_CODE, except for those nodes that have variable sizes. */
684 size_t
685 tree_size (const_tree node)
686 {
687 const enum tree_code code = TREE_CODE (node);
688 switch (code)
689 {
690 case TREE_BINFO:
691 return (offsetof (struct tree_binfo, base_binfos)
692 + vec<tree, va_gc>
693 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
694
695 case TREE_VEC:
696 return (sizeof (struct tree_vec)
697 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
698
699 case VECTOR_CST:
700 return (sizeof (struct tree_vector)
701 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
702
703 case STRING_CST:
704 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
705
706 case OMP_CLAUSE:
707 return (sizeof (struct tree_omp_clause)
708 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
709 * sizeof (tree));
710
711 default:
712 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
713 return (sizeof (struct tree_exp)
714 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
715 else
716 return tree_code_size (code);
717 }
718 }
719
720 /* Record interesting allocation statistics for a tree node with CODE
721 and LENGTH. */
722
723 static void
724 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
725 size_t length ATTRIBUTE_UNUSED)
726 {
727 enum tree_code_class type = TREE_CODE_CLASS (code);
728 tree_node_kind kind;
729
730 if (!GATHER_STATISTICS)
731 return;
732
733 switch (type)
734 {
735 case tcc_declaration: /* A decl node */
736 kind = d_kind;
737 break;
738
739 case tcc_type: /* a type node */
740 kind = t_kind;
741 break;
742
743 case tcc_statement: /* an expression with side effects */
744 kind = s_kind;
745 break;
746
747 case tcc_reference: /* a reference */
748 kind = r_kind;
749 break;
750
751 case tcc_expression: /* an expression */
752 case tcc_comparison: /* a comparison expression */
753 case tcc_unary: /* a unary arithmetic expression */
754 case tcc_binary: /* a binary arithmetic expression */
755 kind = e_kind;
756 break;
757
758 case tcc_constant: /* a constant */
759 kind = c_kind;
760 break;
761
762 case tcc_exceptional: /* something random, like an identifier. */
763 switch (code)
764 {
765 case IDENTIFIER_NODE:
766 kind = id_kind;
767 break;
768
769 case TREE_VEC:
770 kind = vec_kind;
771 break;
772
773 case TREE_BINFO:
774 kind = binfo_kind;
775 break;
776
777 case SSA_NAME:
778 kind = ssa_name_kind;
779 break;
780
781 case BLOCK:
782 kind = b_kind;
783 break;
784
785 case CONSTRUCTOR:
786 kind = constr_kind;
787 break;
788
789 case OMP_CLAUSE:
790 kind = omp_clause_kind;
791 break;
792
793 default:
794 kind = x_kind;
795 break;
796 }
797 break;
798
799 case tcc_vl_exp:
800 kind = e_kind;
801 break;
802
803 default:
804 gcc_unreachable ();
805 }
806
807 tree_code_counts[(int) code]++;
808 tree_node_counts[(int) kind]++;
809 tree_node_sizes[(int) kind] += length;
810 }
811
812 /* Allocate and return a new UID from the DECL_UID namespace. */
813
814 int
815 allocate_decl_uid (void)
816 {
817 return next_decl_uid++;
818 }
819
820 /* Return a newly allocated node of code CODE. For decl and type
821 nodes, some other fields are initialized. The rest of the node is
822 initialized to zero. This function cannot be used for TREE_VEC or
823 OMP_CLAUSE nodes, which is enforced by asserts in tree_code_size.
824
825 Achoo! I got a code in the node. */
826
827 tree
828 make_node_stat (enum tree_code code MEM_STAT_DECL)
829 {
830 tree t;
831 enum tree_code_class type = TREE_CODE_CLASS (code);
832 size_t length = tree_code_size (code);
833
834 record_node_allocation_statistics (code, length);
835
836 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
837 TREE_SET_CODE (t, code);
838
839 switch (type)
840 {
841 case tcc_statement:
842 TREE_SIDE_EFFECTS (t) = 1;
843 break;
844
845 case tcc_declaration:
846 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
847 {
848 if (code == FUNCTION_DECL)
849 {
850 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
851 DECL_MODE (t) = FUNCTION_MODE;
852 }
853 else
854 DECL_ALIGN (t) = 1;
855 }
856 DECL_SOURCE_LOCATION (t) = input_location;
857 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
858 DECL_UID (t) = --next_debug_decl_uid;
859 else
860 {
861 DECL_UID (t) = allocate_decl_uid ();
862 SET_DECL_PT_UID (t, -1);
863 }
864 if (TREE_CODE (t) == LABEL_DECL)
865 LABEL_DECL_UID (t) = -1;
866
867 break;
868
869 case tcc_type:
870 TYPE_UID (t) = next_type_uid++;
871 TYPE_ALIGN (t) = BITS_PER_UNIT;
872 TYPE_USER_ALIGN (t) = 0;
873 TYPE_MAIN_VARIANT (t) = t;
874 TYPE_CANONICAL (t) = t;
875
876 /* Default to no attributes for type, but let target change that. */
877 TYPE_ATTRIBUTES (t) = NULL_TREE;
878 targetm.set_default_type_attributes (t);
879
880 /* We have not yet computed the alias set for this type. */
881 TYPE_ALIAS_SET (t) = -1;
882 break;
883
884 case tcc_constant:
885 TREE_CONSTANT (t) = 1;
886 break;
887
888 case tcc_expression:
889 switch (code)
890 {
891 case INIT_EXPR:
892 case MODIFY_EXPR:
893 case VA_ARG_EXPR:
894 case PREDECREMENT_EXPR:
895 case PREINCREMENT_EXPR:
896 case POSTDECREMENT_EXPR:
897 case POSTINCREMENT_EXPR:
898 /* All of these have side-effects, no matter what their
899 operands are. */
900 TREE_SIDE_EFFECTS (t) = 1;
901 break;
902
903 default:
904 break;
905 }
906 break;
907
908 default:
909 /* Other classes need no special treatment. */
910 break;
911 }
912
913 return t;
914 }
915 \f
916 /* Return a new node with the same contents as NODE except that its
917 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
918
919 tree
920 copy_node_stat (tree node MEM_STAT_DECL)
921 {
922 tree t;
923 enum tree_code code = TREE_CODE (node);
924 size_t length;
925
926 gcc_assert (code != STATEMENT_LIST);
927
928 length = tree_size (node);
929 record_node_allocation_statistics (code, length);
930 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
931 memcpy (t, node, length);
932
933 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
934 TREE_CHAIN (t) = 0;
935 TREE_ASM_WRITTEN (t) = 0;
936 TREE_VISITED (t) = 0;
937
938 if (TREE_CODE_CLASS (code) == tcc_declaration)
939 {
940 if (code == DEBUG_EXPR_DECL)
941 DECL_UID (t) = --next_debug_decl_uid;
942 else
943 {
944 DECL_UID (t) = allocate_decl_uid ();
945 if (DECL_PT_UID_SET_P (node))
946 SET_DECL_PT_UID (t, DECL_PT_UID (node));
947 }
948 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
949 && DECL_HAS_VALUE_EXPR_P (node))
950 {
951 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
952 DECL_HAS_VALUE_EXPR_P (t) = 1;
953 }
954 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
955 if (TREE_CODE (node) == VAR_DECL)
956 DECL_HAS_DEBUG_EXPR_P (t) = 0;
957 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
958 {
959 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
960 DECL_HAS_INIT_PRIORITY_P (t) = 1;
961 }
962 if (TREE_CODE (node) == FUNCTION_DECL)
963 DECL_STRUCT_FUNCTION (t) = NULL;
964 }
965 else if (TREE_CODE_CLASS (code) == tcc_type)
966 {
967 TYPE_UID (t) = next_type_uid++;
968 /* The following is so that the debug code for
969 the copy is different from the original type.
970 The two statements usually duplicate each other
971 (because they clear fields of the same union),
972 but the optimizer should catch that. */
973 TYPE_SYMTAB_POINTER (t) = 0;
974 TYPE_SYMTAB_ADDRESS (t) = 0;
975
976 /* Do not copy the values cache. */
977 if (TYPE_CACHED_VALUES_P (t))
978 {
979 TYPE_CACHED_VALUES_P (t) = 0;
980 TYPE_CACHED_VALUES (t) = NULL_TREE;
981 }
982 }
983
984 return t;
985 }
986
987 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
988 For example, this can copy a list made of TREE_LIST nodes. */
989
990 tree
991 copy_list (tree list)
992 {
993 tree head;
994 tree prev, next;
995
996 if (list == 0)
997 return 0;
998
999 head = prev = copy_node (list);
1000 next = TREE_CHAIN (list);
1001 while (next)
1002 {
1003 TREE_CHAIN (prev) = copy_node (next);
1004 prev = TREE_CHAIN (prev);
1005 next = TREE_CHAIN (next);
1006 }
1007 return head;
1008 }
1009
1010 \f
1011 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1012
1013 tree
1014 build_int_cst (tree type, HOST_WIDE_INT low)
1015 {
1016 /* Support legacy code. */
1017 if (!type)
1018 type = integer_type_node;
1019
1020 return double_int_to_tree (type, double_int::from_shwi (low));
1021 }
1022
1023 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1024
1025 tree
1026 build_int_cst_type (tree type, HOST_WIDE_INT low)
1027 {
1028 gcc_assert (type);
1029
1030 return double_int_to_tree (type, double_int::from_shwi (low));
1031 }
1032
1033 /* Constructs tree in type TYPE from with value given by CST. Signedness
1034 of CST is assumed to be the same as the signedness of TYPE. */
1035
1036 tree
1037 double_int_to_tree (tree type, double_int cst)
1038 {
1039 bool sign_extended_type = !TYPE_UNSIGNED (type);
1040
1041 cst = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1042
1043 return build_int_cst_wide (type, cst.low, cst.high);
1044 }
1045
1046 /* Returns true if CST fits into range of TYPE. Signedness of CST is assumed
1047 to be the same as the signedness of TYPE. */
1048
1049 bool
1050 double_int_fits_to_tree_p (const_tree type, double_int cst)
1051 {
1052 bool sign_extended_type = !TYPE_UNSIGNED (type);
1053
1054 double_int ext
1055 = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1056
1057 return cst == ext;
1058 }
1059
1060 /* We force the double_int CST to the range of the type TYPE by sign or
1061 zero extending it. OVERFLOWABLE indicates if we are interested in
1062 overflow of the value, when >0 we are only interested in signed
1063 overflow, for <0 we are interested in any overflow. OVERFLOWED
1064 indicates whether overflow has already occurred. CONST_OVERFLOWED
1065 indicates whether constant overflow has already occurred. We force
1066 T's value to be within range of T's type (by setting to 0 or 1 all
1067 the bits outside the type's range). We set TREE_OVERFLOWED if,
1068 OVERFLOWED is nonzero,
1069 or OVERFLOWABLE is >0 and signed overflow occurs
1070 or OVERFLOWABLE is <0 and any overflow occurs
1071 We return a new tree node for the extended double_int. The node
1072 is shared if no overflow flags are set. */
1073
1074
1075 tree
1076 force_fit_type_double (tree type, double_int cst, int overflowable,
1077 bool overflowed)
1078 {
1079 bool sign_extended_type = !TYPE_UNSIGNED (type);
1080
1081 /* If we need to set overflow flags, return a new unshared node. */
1082 if (overflowed || !double_int_fits_to_tree_p (type, cst))
1083 {
1084 if (overflowed
1085 || overflowable < 0
1086 || (overflowable > 0 && sign_extended_type))
1087 {
1088 tree t = make_node (INTEGER_CST);
1089 TREE_INT_CST (t)
1090 = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
1091 TREE_TYPE (t) = type;
1092 TREE_OVERFLOW (t) = 1;
1093 return t;
1094 }
1095 }
1096
1097 /* Else build a shared node. */
1098 return double_int_to_tree (type, cst);
1099 }
1100
1101 /* These are the hash table functions for the hash table of INTEGER_CST
1102 nodes of a sizetype. */
1103
1104 /* Return the hash code code X, an INTEGER_CST. */
1105
1106 static hashval_t
1107 int_cst_hash_hash (const void *x)
1108 {
1109 const_tree const t = (const_tree) x;
1110
1111 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1112 ^ htab_hash_pointer (TREE_TYPE (t)));
1113 }
1114
1115 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1116 is the same as that given by *Y, which is the same. */
1117
1118 static int
1119 int_cst_hash_eq (const void *x, const void *y)
1120 {
1121 const_tree const xt = (const_tree) x;
1122 const_tree const yt = (const_tree) y;
1123
1124 return (TREE_TYPE (xt) == TREE_TYPE (yt)
1125 && TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1126 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt));
1127 }
1128
1129 /* Create an INT_CST node of TYPE and value HI:LOW.
1130 The returned node is always shared. For small integers we use a
1131 per-type vector cache, for larger ones we use a single hash table. */
1132
1133 tree
1134 build_int_cst_wide (tree type, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
1135 {
1136 tree t;
1137 int ix = -1;
1138 int limit = 0;
1139
1140 gcc_assert (type);
1141
1142 switch (TREE_CODE (type))
1143 {
1144 case NULLPTR_TYPE:
1145 gcc_assert (hi == 0 && low == 0);
1146 /* Fallthru. */
1147
1148 case POINTER_TYPE:
1149 case REFERENCE_TYPE:
1150 case POINTER_BOUNDS_TYPE:
1151 /* Cache NULL pointer and zero bounds. */
1152 if (!hi && !low)
1153 {
1154 limit = 1;
1155 ix = 0;
1156 }
1157 break;
1158
1159 case BOOLEAN_TYPE:
1160 /* Cache false or true. */
1161 limit = 2;
1162 if (!hi && low < 2)
1163 ix = low;
1164 break;
1165
1166 case INTEGER_TYPE:
1167 case OFFSET_TYPE:
1168 if (TYPE_UNSIGNED (type))
1169 {
1170 /* Cache 0..N */
1171 limit = INTEGER_SHARE_LIMIT;
1172 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1173 ix = low;
1174 }
1175 else
1176 {
1177 /* Cache -1..N */
1178 limit = INTEGER_SHARE_LIMIT + 1;
1179 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1180 ix = low + 1;
1181 else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1)
1182 ix = 0;
1183 }
1184 break;
1185
1186 case ENUMERAL_TYPE:
1187 break;
1188
1189 default:
1190 gcc_unreachable ();
1191 }
1192
1193 if (ix >= 0)
1194 {
1195 /* Look for it in the type's vector of small shared ints. */
1196 if (!TYPE_CACHED_VALUES_P (type))
1197 {
1198 TYPE_CACHED_VALUES_P (type) = 1;
1199 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1200 }
1201
1202 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1203 if (t)
1204 {
1205 /* Make sure no one is clobbering the shared constant. */
1206 gcc_assert (TREE_TYPE (t) == type);
1207 gcc_assert (TREE_INT_CST_LOW (t) == low);
1208 gcc_assert (TREE_INT_CST_HIGH (t) == hi);
1209 }
1210 else
1211 {
1212 /* Create a new shared int. */
1213 t = make_node (INTEGER_CST);
1214
1215 TREE_INT_CST_LOW (t) = low;
1216 TREE_INT_CST_HIGH (t) = hi;
1217 TREE_TYPE (t) = type;
1218
1219 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1220 }
1221 }
1222 else
1223 {
1224 /* Use the cache of larger shared ints. */
1225 void **slot;
1226
1227 TREE_INT_CST_LOW (int_cst_node) = low;
1228 TREE_INT_CST_HIGH (int_cst_node) = hi;
1229 TREE_TYPE (int_cst_node) = type;
1230
1231 slot = htab_find_slot (int_cst_hash_table, int_cst_node, INSERT);
1232 t = (tree) *slot;
1233 if (!t)
1234 {
1235 /* Insert this one into the hash table. */
1236 t = int_cst_node;
1237 *slot = t;
1238 /* Make a new node for next time round. */
1239 int_cst_node = make_node (INTEGER_CST);
1240 }
1241 }
1242
1243 return t;
1244 }
1245
1246 void
1247 cache_integer_cst (tree t)
1248 {
1249 tree type = TREE_TYPE (t);
1250 HOST_WIDE_INT hi = TREE_INT_CST_HIGH (t);
1251 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (t);
1252 int ix = -1;
1253 int limit = 0;
1254
1255 gcc_assert (!TREE_OVERFLOW (t));
1256
1257 switch (TREE_CODE (type))
1258 {
1259 case NULLPTR_TYPE:
1260 gcc_assert (hi == 0 && low == 0);
1261 /* Fallthru. */
1262
1263 case POINTER_TYPE:
1264 case REFERENCE_TYPE:
1265 /* Cache NULL pointer. */
1266 if (!hi && !low)
1267 {
1268 limit = 1;
1269 ix = 0;
1270 }
1271 break;
1272
1273 case BOOLEAN_TYPE:
1274 /* Cache false or true. */
1275 limit = 2;
1276 if (!hi && low < 2)
1277 ix = low;
1278 break;
1279
1280 case INTEGER_TYPE:
1281 case OFFSET_TYPE:
1282 if (TYPE_UNSIGNED (type))
1283 {
1284 /* Cache 0..N */
1285 limit = INTEGER_SHARE_LIMIT;
1286 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1287 ix = low;
1288 }
1289 else
1290 {
1291 /* Cache -1..N */
1292 limit = INTEGER_SHARE_LIMIT + 1;
1293 if (!hi && low < (unsigned HOST_WIDE_INT)INTEGER_SHARE_LIMIT)
1294 ix = low + 1;
1295 else if (hi == -1 && low == -(unsigned HOST_WIDE_INT)1)
1296 ix = 0;
1297 }
1298 break;
1299
1300 case ENUMERAL_TYPE:
1301 break;
1302
1303 default:
1304 gcc_unreachable ();
1305 }
1306
1307 if (ix >= 0)
1308 {
1309 /* Look for it in the type's vector of small shared ints. */
1310 if (!TYPE_CACHED_VALUES_P (type))
1311 {
1312 TYPE_CACHED_VALUES_P (type) = 1;
1313 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1314 }
1315
1316 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1317 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1318 }
1319 else
1320 {
1321 /* Use the cache of larger shared ints. */
1322 void **slot;
1323
1324 slot = htab_find_slot (int_cst_hash_table, t, INSERT);
1325 /* If there is already an entry for the number verify it's the
1326 same. */
1327 if (*slot)
1328 {
1329 gcc_assert (TREE_INT_CST_LOW ((tree)*slot) == low
1330 && TREE_INT_CST_HIGH ((tree)*slot) == hi);
1331 return;
1332 }
1333 /* Otherwise insert this one into the hash table. */
1334 *slot = t;
1335 }
1336 }
1337
1338
1339 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1340 and the rest are zeros. */
1341
1342 tree
1343 build_low_bits_mask (tree type, unsigned bits)
1344 {
1345 double_int mask;
1346
1347 gcc_assert (bits <= TYPE_PRECISION (type));
1348
1349 if (bits == TYPE_PRECISION (type)
1350 && !TYPE_UNSIGNED (type))
1351 /* Sign extended all-ones mask. */
1352 mask = double_int_minus_one;
1353 else
1354 mask = double_int::mask (bits);
1355
1356 return build_int_cst_wide (type, mask.low, mask.high);
1357 }
1358
1359 /* Checks that X is integer constant that can be expressed in (unsigned)
1360 HOST_WIDE_INT without loss of precision. */
1361
1362 bool
1363 cst_and_fits_in_hwi (const_tree x)
1364 {
1365 if (TREE_CODE (x) != INTEGER_CST)
1366 return false;
1367
1368 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1369 return false;
1370
1371 return (TREE_INT_CST_HIGH (x) == 0
1372 || TREE_INT_CST_HIGH (x) == -1);
1373 }
1374
1375 /* Build a newly constructed TREE_VEC node of length LEN. */
1376
1377 tree
1378 make_vector_stat (unsigned len MEM_STAT_DECL)
1379 {
1380 tree t;
1381 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1382
1383 record_node_allocation_statistics (VECTOR_CST, length);
1384
1385 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1386
1387 TREE_SET_CODE (t, VECTOR_CST);
1388 TREE_CONSTANT (t) = 1;
1389
1390 return t;
1391 }
1392
1393 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1394 are in a list pointed to by VALS. */
1395
1396 tree
1397 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1398 {
1399 int over = 0;
1400 unsigned cnt = 0;
1401 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1402 TREE_TYPE (v) = type;
1403
1404 /* Iterate through elements and check for overflow. */
1405 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1406 {
1407 tree value = vals[cnt];
1408
1409 VECTOR_CST_ELT (v, cnt) = value;
1410
1411 /* Don't crash if we get an address constant. */
1412 if (!CONSTANT_CLASS_P (value))
1413 continue;
1414
1415 over |= TREE_OVERFLOW (value);
1416 }
1417
1418 TREE_OVERFLOW (v) = over;
1419 return v;
1420 }
1421
1422 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1423 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1424
1425 tree
1426 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1427 {
1428 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1429 unsigned HOST_WIDE_INT idx;
1430 tree value;
1431
1432 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1433 vec[idx] = value;
1434 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1435 vec[idx] = build_zero_cst (TREE_TYPE (type));
1436
1437 return build_vector (type, vec);
1438 }
1439
1440 /* Build a vector of type VECTYPE where all the elements are SCs. */
1441 tree
1442 build_vector_from_val (tree vectype, tree sc)
1443 {
1444 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1445
1446 if (sc == error_mark_node)
1447 return sc;
1448
1449 /* Verify that the vector type is suitable for SC. Note that there
1450 is some inconsistency in the type-system with respect to restrict
1451 qualifications of pointers. Vector types always have a main-variant
1452 element type and the qualification is applied to the vector-type.
1453 So TREE_TYPE (vector-type) does not return a properly qualified
1454 vector element-type. */
1455 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1456 TREE_TYPE (vectype)));
1457
1458 if (CONSTANT_CLASS_P (sc))
1459 {
1460 tree *v = XALLOCAVEC (tree, nunits);
1461 for (i = 0; i < nunits; ++i)
1462 v[i] = sc;
1463 return build_vector (vectype, v);
1464 }
1465 else
1466 {
1467 vec<constructor_elt, va_gc> *v;
1468 vec_alloc (v, nunits);
1469 for (i = 0; i < nunits; ++i)
1470 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1471 return build_constructor (vectype, v);
1472 }
1473 }
1474
1475 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1476 are in the vec pointed to by VALS. */
1477 tree
1478 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1479 {
1480 tree c = make_node (CONSTRUCTOR);
1481 unsigned int i;
1482 constructor_elt *elt;
1483 bool constant_p = true;
1484 bool side_effects_p = false;
1485
1486 TREE_TYPE (c) = type;
1487 CONSTRUCTOR_ELTS (c) = vals;
1488
1489 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1490 {
1491 /* Mostly ctors will have elts that don't have side-effects, so
1492 the usual case is to scan all the elements. Hence a single
1493 loop for both const and side effects, rather than one loop
1494 each (with early outs). */
1495 if (!TREE_CONSTANT (elt->value))
1496 constant_p = false;
1497 if (TREE_SIDE_EFFECTS (elt->value))
1498 side_effects_p = true;
1499 }
1500
1501 TREE_SIDE_EFFECTS (c) = side_effects_p;
1502 TREE_CONSTANT (c) = constant_p;
1503
1504 return c;
1505 }
1506
1507 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1508 INDEX and VALUE. */
1509 tree
1510 build_constructor_single (tree type, tree index, tree value)
1511 {
1512 vec<constructor_elt, va_gc> *v;
1513 constructor_elt elt = {index, value};
1514
1515 vec_alloc (v, 1);
1516 v->quick_push (elt);
1517
1518 return build_constructor (type, v);
1519 }
1520
1521
1522 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1523 are in a list pointed to by VALS. */
1524 tree
1525 build_constructor_from_list (tree type, tree vals)
1526 {
1527 tree t;
1528 vec<constructor_elt, va_gc> *v = NULL;
1529
1530 if (vals)
1531 {
1532 vec_alloc (v, list_length (vals));
1533 for (t = vals; t; t = TREE_CHAIN (t))
1534 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1535 }
1536
1537 return build_constructor (type, v);
1538 }
1539
1540 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1541 of elements, provided as index/value pairs. */
1542
1543 tree
1544 build_constructor_va (tree type, int nelts, ...)
1545 {
1546 vec<constructor_elt, va_gc> *v = NULL;
1547 va_list p;
1548
1549 va_start (p, nelts);
1550 vec_alloc (v, nelts);
1551 while (nelts--)
1552 {
1553 tree index = va_arg (p, tree);
1554 tree value = va_arg (p, tree);
1555 CONSTRUCTOR_APPEND_ELT (v, index, value);
1556 }
1557 va_end (p);
1558 return build_constructor (type, v);
1559 }
1560
1561 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1562
1563 tree
1564 build_fixed (tree type, FIXED_VALUE_TYPE f)
1565 {
1566 tree v;
1567 FIXED_VALUE_TYPE *fp;
1568
1569 v = make_node (FIXED_CST);
1570 fp = ggc_alloc_fixed_value ();
1571 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1572
1573 TREE_TYPE (v) = type;
1574 TREE_FIXED_CST_PTR (v) = fp;
1575 return v;
1576 }
1577
1578 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1579
1580 tree
1581 build_real (tree type, REAL_VALUE_TYPE d)
1582 {
1583 tree v;
1584 REAL_VALUE_TYPE *dp;
1585 int overflow = 0;
1586
1587 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1588 Consider doing it via real_convert now. */
1589
1590 v = make_node (REAL_CST);
1591 dp = ggc_alloc_real_value ();
1592 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1593
1594 TREE_TYPE (v) = type;
1595 TREE_REAL_CST_PTR (v) = dp;
1596 TREE_OVERFLOW (v) = overflow;
1597 return v;
1598 }
1599
1600 /* Return a new REAL_CST node whose type is TYPE
1601 and whose value is the integer value of the INTEGER_CST node I. */
1602
1603 REAL_VALUE_TYPE
1604 real_value_from_int_cst (const_tree type, const_tree i)
1605 {
1606 REAL_VALUE_TYPE d;
1607
1608 /* Clear all bits of the real value type so that we can later do
1609 bitwise comparisons to see if two values are the same. */
1610 memset (&d, 0, sizeof d);
1611
1612 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode,
1613 TREE_INT_CST_LOW (i), TREE_INT_CST_HIGH (i),
1614 TYPE_UNSIGNED (TREE_TYPE (i)));
1615 return d;
1616 }
1617
1618 /* Given a tree representing an integer constant I, return a tree
1619 representing the same value as a floating-point constant of type TYPE. */
1620
1621 tree
1622 build_real_from_int_cst (tree type, const_tree i)
1623 {
1624 tree v;
1625 int overflow = TREE_OVERFLOW (i);
1626
1627 v = build_real (type, real_value_from_int_cst (type, i));
1628
1629 TREE_OVERFLOW (v) |= overflow;
1630 return v;
1631 }
1632
1633 /* Return a newly constructed STRING_CST node whose value is
1634 the LEN characters at STR.
1635 Note that for a C string literal, LEN should include the trailing NUL.
1636 The TREE_TYPE is not initialized. */
1637
1638 tree
1639 build_string (int len, const char *str)
1640 {
1641 tree s;
1642 size_t length;
1643
1644 /* Do not waste bytes provided by padding of struct tree_string. */
1645 length = len + offsetof (struct tree_string, str) + 1;
1646
1647 record_node_allocation_statistics (STRING_CST, length);
1648
1649 s = ggc_alloc_tree_node (length);
1650
1651 memset (s, 0, sizeof (struct tree_typed));
1652 TREE_SET_CODE (s, STRING_CST);
1653 TREE_CONSTANT (s) = 1;
1654 TREE_STRING_LENGTH (s) = len;
1655 memcpy (s->string.str, str, len);
1656 s->string.str[len] = '\0';
1657
1658 return s;
1659 }
1660
1661 /* Return a newly constructed COMPLEX_CST node whose value is
1662 specified by the real and imaginary parts REAL and IMAG.
1663 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1664 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1665
1666 tree
1667 build_complex (tree type, tree real, tree imag)
1668 {
1669 tree t = make_node (COMPLEX_CST);
1670
1671 TREE_REALPART (t) = real;
1672 TREE_IMAGPART (t) = imag;
1673 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1674 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1675 return t;
1676 }
1677
1678 /* Return a constant of arithmetic type TYPE which is the
1679 multiplicative identity of the set TYPE. */
1680
1681 tree
1682 build_one_cst (tree type)
1683 {
1684 switch (TREE_CODE (type))
1685 {
1686 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1687 case POINTER_TYPE: case REFERENCE_TYPE:
1688 case OFFSET_TYPE:
1689 return build_int_cst (type, 1);
1690
1691 case REAL_TYPE:
1692 return build_real (type, dconst1);
1693
1694 case FIXED_POINT_TYPE:
1695 /* We can only generate 1 for accum types. */
1696 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1697 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1698
1699 case VECTOR_TYPE:
1700 {
1701 tree scalar = build_one_cst (TREE_TYPE (type));
1702
1703 return build_vector_from_val (type, scalar);
1704 }
1705
1706 case COMPLEX_TYPE:
1707 return build_complex (type,
1708 build_one_cst (TREE_TYPE (type)),
1709 build_zero_cst (TREE_TYPE (type)));
1710
1711 default:
1712 gcc_unreachable ();
1713 }
1714 }
1715
1716 /* Return an integer of type TYPE containing all 1's in as much precision as
1717 it contains, or a complex or vector whose subparts are such integers. */
1718
1719 tree
1720 build_all_ones_cst (tree type)
1721 {
1722 if (TREE_CODE (type) == COMPLEX_TYPE)
1723 {
1724 tree scalar = build_all_ones_cst (TREE_TYPE (type));
1725 return build_complex (type, scalar, scalar);
1726 }
1727 else
1728 return build_minus_one_cst (type);
1729 }
1730
1731 /* Return a constant of arithmetic type TYPE which is the
1732 opposite of the multiplicative identity of the set TYPE. */
1733
1734 tree
1735 build_minus_one_cst (tree type)
1736 {
1737 switch (TREE_CODE (type))
1738 {
1739 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1740 case POINTER_TYPE: case REFERENCE_TYPE:
1741 case OFFSET_TYPE:
1742 return build_int_cst (type, -1);
1743
1744 case REAL_TYPE:
1745 return build_real (type, dconstm1);
1746
1747 case FIXED_POINT_TYPE:
1748 /* We can only generate 1 for accum types. */
1749 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1750 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
1751 TYPE_MODE (type)));
1752
1753 case VECTOR_TYPE:
1754 {
1755 tree scalar = build_minus_one_cst (TREE_TYPE (type));
1756
1757 return build_vector_from_val (type, scalar);
1758 }
1759
1760 case COMPLEX_TYPE:
1761 return build_complex (type,
1762 build_minus_one_cst (TREE_TYPE (type)),
1763 build_zero_cst (TREE_TYPE (type)));
1764
1765 default:
1766 gcc_unreachable ();
1767 }
1768 }
1769
1770 /* Build 0 constant of type TYPE. This is used by constructor folding
1771 and thus the constant should be represented in memory by
1772 zero(es). */
1773
1774 tree
1775 build_zero_cst (tree type)
1776 {
1777 switch (TREE_CODE (type))
1778 {
1779 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1780 case POINTER_TYPE: case REFERENCE_TYPE:
1781 case OFFSET_TYPE: case NULLPTR_TYPE:
1782 return build_int_cst (type, 0);
1783
1784 case REAL_TYPE:
1785 return build_real (type, dconst0);
1786
1787 case FIXED_POINT_TYPE:
1788 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
1789
1790 case VECTOR_TYPE:
1791 {
1792 tree scalar = build_zero_cst (TREE_TYPE (type));
1793
1794 return build_vector_from_val (type, scalar);
1795 }
1796
1797 case COMPLEX_TYPE:
1798 {
1799 tree zero = build_zero_cst (TREE_TYPE (type));
1800
1801 return build_complex (type, zero, zero);
1802 }
1803
1804 default:
1805 if (!AGGREGATE_TYPE_P (type))
1806 return fold_convert (type, integer_zero_node);
1807 return build_constructor (type, NULL);
1808 }
1809 }
1810
1811
1812 /* Build a BINFO with LEN language slots. */
1813
1814 tree
1815 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
1816 {
1817 tree t;
1818 size_t length = (offsetof (struct tree_binfo, base_binfos)
1819 + vec<tree, va_gc>::embedded_size (base_binfos));
1820
1821 record_node_allocation_statistics (TREE_BINFO, length);
1822
1823 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1824
1825 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
1826
1827 TREE_SET_CODE (t, TREE_BINFO);
1828
1829 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
1830
1831 return t;
1832 }
1833
1834 /* Create a CASE_LABEL_EXPR tree node and return it. */
1835
1836 tree
1837 build_case_label (tree low_value, tree high_value, tree label_decl)
1838 {
1839 tree t = make_node (CASE_LABEL_EXPR);
1840
1841 TREE_TYPE (t) = void_type_node;
1842 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
1843
1844 CASE_LOW (t) = low_value;
1845 CASE_HIGH (t) = high_value;
1846 CASE_LABEL (t) = label_decl;
1847 CASE_CHAIN (t) = NULL_TREE;
1848
1849 return t;
1850 }
1851
1852 /* Build a newly constructed TREE_VEC node of length LEN. */
1853
1854 tree
1855 make_tree_vec_stat (int len MEM_STAT_DECL)
1856 {
1857 tree t;
1858 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1859
1860 record_node_allocation_statistics (TREE_VEC, length);
1861
1862 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1863
1864 TREE_SET_CODE (t, TREE_VEC);
1865 TREE_VEC_LENGTH (t) = len;
1866
1867 return t;
1868 }
1869
1870 /* Grow a TREE_VEC node to new length LEN. */
1871
1872 tree
1873 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
1874 {
1875 gcc_assert (TREE_CODE (v) == TREE_VEC);
1876
1877 int oldlen = TREE_VEC_LENGTH (v);
1878 gcc_assert (len > oldlen);
1879
1880 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
1881 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
1882
1883 record_node_allocation_statistics (TREE_VEC, length - oldlength);
1884
1885 v = (tree) ggc_realloc_stat (v, length PASS_MEM_STAT);
1886
1887 TREE_VEC_LENGTH (v) = len;
1888
1889 return v;
1890 }
1891 \f
1892 /* Return 1 if EXPR is the integer constant zero or a complex constant
1893 of zero. */
1894
1895 int
1896 integer_zerop (const_tree expr)
1897 {
1898 STRIP_NOPS (expr);
1899
1900 switch (TREE_CODE (expr))
1901 {
1902 case INTEGER_CST:
1903 return (TREE_INT_CST_LOW (expr) == 0
1904 && TREE_INT_CST_HIGH (expr) == 0);
1905 case COMPLEX_CST:
1906 return (integer_zerop (TREE_REALPART (expr))
1907 && integer_zerop (TREE_IMAGPART (expr)));
1908 case VECTOR_CST:
1909 {
1910 unsigned i;
1911 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1912 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
1913 return false;
1914 return true;
1915 }
1916 default:
1917 return false;
1918 }
1919 }
1920
1921 /* Return 1 if EXPR is the integer constant one or the corresponding
1922 complex constant. */
1923
1924 int
1925 integer_onep (const_tree expr)
1926 {
1927 STRIP_NOPS (expr);
1928
1929 switch (TREE_CODE (expr))
1930 {
1931 case INTEGER_CST:
1932 return (TREE_INT_CST_LOW (expr) == 1
1933 && TREE_INT_CST_HIGH (expr) == 0);
1934 case COMPLEX_CST:
1935 return (integer_onep (TREE_REALPART (expr))
1936 && integer_zerop (TREE_IMAGPART (expr)));
1937 case VECTOR_CST:
1938 {
1939 unsigned i;
1940 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1941 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
1942 return false;
1943 return true;
1944 }
1945 default:
1946 return false;
1947 }
1948 }
1949
1950 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
1951 it contains, or a complex or vector whose subparts are such integers. */
1952
1953 int
1954 integer_all_onesp (const_tree expr)
1955 {
1956 int prec;
1957 int uns;
1958
1959 STRIP_NOPS (expr);
1960
1961 if (TREE_CODE (expr) == COMPLEX_CST
1962 && integer_all_onesp (TREE_REALPART (expr))
1963 && integer_all_onesp (TREE_IMAGPART (expr)))
1964 return 1;
1965
1966 else if (TREE_CODE (expr) == VECTOR_CST)
1967 {
1968 unsigned i;
1969 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
1970 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
1971 return 0;
1972 return 1;
1973 }
1974
1975 else if (TREE_CODE (expr) != INTEGER_CST)
1976 return 0;
1977
1978 uns = TYPE_UNSIGNED (TREE_TYPE (expr));
1979 if (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
1980 && TREE_INT_CST_HIGH (expr) == -1)
1981 return 1;
1982 if (!uns)
1983 return 0;
1984
1985 prec = TYPE_PRECISION (TREE_TYPE (expr));
1986 if (prec >= HOST_BITS_PER_WIDE_INT)
1987 {
1988 HOST_WIDE_INT high_value;
1989 int shift_amount;
1990
1991 shift_amount = prec - HOST_BITS_PER_WIDE_INT;
1992
1993 /* Can not handle precisions greater than twice the host int size. */
1994 gcc_assert (shift_amount <= HOST_BITS_PER_WIDE_INT);
1995 if (shift_amount == HOST_BITS_PER_WIDE_INT)
1996 /* Shifting by the host word size is undefined according to the ANSI
1997 standard, so we must handle this as a special case. */
1998 high_value = -1;
1999 else
2000 high_value = ((HOST_WIDE_INT) 1 << shift_amount) - 1;
2001
2002 return (TREE_INT_CST_LOW (expr) == ~(unsigned HOST_WIDE_INT) 0
2003 && TREE_INT_CST_HIGH (expr) == high_value);
2004 }
2005 else
2006 return TREE_INT_CST_LOW (expr) == ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
2007 }
2008
2009 /* Return 1 if EXPR is the integer constant minus one. */
2010
2011 int
2012 integer_minus_onep (const_tree expr)
2013 {
2014 STRIP_NOPS (expr);
2015
2016 if (TREE_CODE (expr) == COMPLEX_CST)
2017 return (integer_all_onesp (TREE_REALPART (expr))
2018 && integer_zerop (TREE_IMAGPART (expr)));
2019 else
2020 return integer_all_onesp (expr);
2021 }
2022
2023 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2024 one bit on). */
2025
2026 int
2027 integer_pow2p (const_tree expr)
2028 {
2029 int prec;
2030 unsigned HOST_WIDE_INT high, low;
2031
2032 STRIP_NOPS (expr);
2033
2034 if (TREE_CODE (expr) == COMPLEX_CST
2035 && integer_pow2p (TREE_REALPART (expr))
2036 && integer_zerop (TREE_IMAGPART (expr)))
2037 return 1;
2038
2039 if (TREE_CODE (expr) != INTEGER_CST)
2040 return 0;
2041
2042 prec = TYPE_PRECISION (TREE_TYPE (expr));
2043 high = TREE_INT_CST_HIGH (expr);
2044 low = TREE_INT_CST_LOW (expr);
2045
2046 /* First clear all bits that are beyond the type's precision in case
2047 we've been sign extended. */
2048
2049 if (prec == HOST_BITS_PER_DOUBLE_INT)
2050 ;
2051 else if (prec > HOST_BITS_PER_WIDE_INT)
2052 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2053 else
2054 {
2055 high = 0;
2056 if (prec < HOST_BITS_PER_WIDE_INT)
2057 low &= ~(HOST_WIDE_INT_M1U << prec);
2058 }
2059
2060 if (high == 0 && low == 0)
2061 return 0;
2062
2063 return ((high == 0 && (low & (low - 1)) == 0)
2064 || (low == 0 && (high & (high - 1)) == 0));
2065 }
2066
2067 /* Return 1 if EXPR is an integer constant other than zero or a
2068 complex constant other than zero. */
2069
2070 int
2071 integer_nonzerop (const_tree expr)
2072 {
2073 STRIP_NOPS (expr);
2074
2075 return ((TREE_CODE (expr) == INTEGER_CST
2076 && (TREE_INT_CST_LOW (expr) != 0
2077 || TREE_INT_CST_HIGH (expr) != 0))
2078 || (TREE_CODE (expr) == COMPLEX_CST
2079 && (integer_nonzerop (TREE_REALPART (expr))
2080 || integer_nonzerop (TREE_IMAGPART (expr)))));
2081 }
2082
2083 /* Return 1 if EXPR is the fixed-point constant zero. */
2084
2085 int
2086 fixed_zerop (const_tree expr)
2087 {
2088 return (TREE_CODE (expr) == FIXED_CST
2089 && TREE_FIXED_CST (expr).data.is_zero ());
2090 }
2091
2092 /* Return the power of two represented by a tree node known to be a
2093 power of two. */
2094
2095 int
2096 tree_log2 (const_tree expr)
2097 {
2098 int prec;
2099 HOST_WIDE_INT high, low;
2100
2101 STRIP_NOPS (expr);
2102
2103 if (TREE_CODE (expr) == COMPLEX_CST)
2104 return tree_log2 (TREE_REALPART (expr));
2105
2106 prec = TYPE_PRECISION (TREE_TYPE (expr));
2107 high = TREE_INT_CST_HIGH (expr);
2108 low = TREE_INT_CST_LOW (expr);
2109
2110 /* First clear all bits that are beyond the type's precision in case
2111 we've been sign extended. */
2112
2113 if (prec == HOST_BITS_PER_DOUBLE_INT)
2114 ;
2115 else if (prec > HOST_BITS_PER_WIDE_INT)
2116 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2117 else
2118 {
2119 high = 0;
2120 if (prec < HOST_BITS_PER_WIDE_INT)
2121 low &= ~(HOST_WIDE_INT_M1U << prec);
2122 }
2123
2124 return (high != 0 ? HOST_BITS_PER_WIDE_INT + exact_log2 (high)
2125 : exact_log2 (low));
2126 }
2127
2128 /* Similar, but return the largest integer Y such that 2 ** Y is less
2129 than or equal to EXPR. */
2130
2131 int
2132 tree_floor_log2 (const_tree expr)
2133 {
2134 int prec;
2135 HOST_WIDE_INT high, low;
2136
2137 STRIP_NOPS (expr);
2138
2139 if (TREE_CODE (expr) == COMPLEX_CST)
2140 return tree_log2 (TREE_REALPART (expr));
2141
2142 prec = TYPE_PRECISION (TREE_TYPE (expr));
2143 high = TREE_INT_CST_HIGH (expr);
2144 low = TREE_INT_CST_LOW (expr);
2145
2146 /* First clear all bits that are beyond the type's precision in case
2147 we've been sign extended. Ignore if type's precision hasn't been set
2148 since what we are doing is setting it. */
2149
2150 if (prec == HOST_BITS_PER_DOUBLE_INT || prec == 0)
2151 ;
2152 else if (prec > HOST_BITS_PER_WIDE_INT)
2153 high &= ~(HOST_WIDE_INT_M1U << (prec - HOST_BITS_PER_WIDE_INT));
2154 else
2155 {
2156 high = 0;
2157 if (prec < HOST_BITS_PER_WIDE_INT)
2158 low &= ~(HOST_WIDE_INT_M1U << prec);
2159 }
2160
2161 return (high != 0 ? HOST_BITS_PER_WIDE_INT + floor_log2 (high)
2162 : floor_log2 (low));
2163 }
2164
2165 /* Return number of known trailing zero bits in EXPR, or, if the value of
2166 EXPR is known to be zero, the precision of it's type. */
2167
2168 unsigned int
2169 tree_ctz (const_tree expr)
2170 {
2171 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2172 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2173 return 0;
2174
2175 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2176 switch (TREE_CODE (expr))
2177 {
2178 case INTEGER_CST:
2179 ret1 = tree_to_double_int (expr).trailing_zeros ();
2180 return MIN (ret1, prec);
2181 case SSA_NAME:
2182 ret1 = get_nonzero_bits (expr).trailing_zeros ();
2183 return MIN (ret1, prec);
2184 case PLUS_EXPR:
2185 case MINUS_EXPR:
2186 case BIT_IOR_EXPR:
2187 case BIT_XOR_EXPR:
2188 case MIN_EXPR:
2189 case MAX_EXPR:
2190 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2191 if (ret1 == 0)
2192 return ret1;
2193 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2194 return MIN (ret1, ret2);
2195 case POINTER_PLUS_EXPR:
2196 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2197 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2198 /* Second operand is sizetype, which could be in theory
2199 wider than pointer's precision. Make sure we never
2200 return more than prec. */
2201 ret2 = MIN (ret2, prec);
2202 return MIN (ret1, ret2);
2203 case BIT_AND_EXPR:
2204 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2205 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2206 return MAX (ret1, ret2);
2207 case MULT_EXPR:
2208 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2209 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2210 return MIN (ret1 + ret2, prec);
2211 case LSHIFT_EXPR:
2212 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2213 if (host_integerp (TREE_OPERAND (expr, 1), 1)
2214 && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
2215 < (unsigned HOST_WIDE_INT) prec))
2216 {
2217 ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
2218 return MIN (ret1 + ret2, prec);
2219 }
2220 return ret1;
2221 case RSHIFT_EXPR:
2222 if (host_integerp (TREE_OPERAND (expr, 1), 1)
2223 && ((unsigned HOST_WIDE_INT) tree_low_cst (TREE_OPERAND (expr, 1), 1)
2224 < (unsigned HOST_WIDE_INT) prec))
2225 {
2226 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2227 ret2 = tree_low_cst (TREE_OPERAND (expr, 1), 1);
2228 if (ret1 > ret2)
2229 return ret1 - ret2;
2230 }
2231 return 0;
2232 case TRUNC_DIV_EXPR:
2233 case CEIL_DIV_EXPR:
2234 case FLOOR_DIV_EXPR:
2235 case ROUND_DIV_EXPR:
2236 case EXACT_DIV_EXPR:
2237 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2238 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2239 {
2240 int l = tree_log2 (TREE_OPERAND (expr, 1));
2241 if (l >= 0)
2242 {
2243 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2244 ret2 = l;
2245 if (ret1 > ret2)
2246 return ret1 - ret2;
2247 }
2248 }
2249 return 0;
2250 CASE_CONVERT:
2251 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2252 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2253 ret1 = prec;
2254 return MIN (ret1, prec);
2255 case SAVE_EXPR:
2256 return tree_ctz (TREE_OPERAND (expr, 0));
2257 case COND_EXPR:
2258 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2259 if (ret1 == 0)
2260 return 0;
2261 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2262 return MIN (ret1, ret2);
2263 case COMPOUND_EXPR:
2264 return tree_ctz (TREE_OPERAND (expr, 1));
2265 case ADDR_EXPR:
2266 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2267 if (ret1 > BITS_PER_UNIT)
2268 {
2269 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2270 return MIN (ret1, prec);
2271 }
2272 return 0;
2273 default:
2274 return 0;
2275 }
2276 }
2277
2278 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2279 decimal float constants, so don't return 1 for them. */
2280
2281 int
2282 real_zerop (const_tree expr)
2283 {
2284 STRIP_NOPS (expr);
2285
2286 switch (TREE_CODE (expr))
2287 {
2288 case REAL_CST:
2289 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2290 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2291 case COMPLEX_CST:
2292 return real_zerop (TREE_REALPART (expr))
2293 && real_zerop (TREE_IMAGPART (expr));
2294 case VECTOR_CST:
2295 {
2296 unsigned i;
2297 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2298 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2299 return false;
2300 return true;
2301 }
2302 default:
2303 return false;
2304 }
2305 }
2306
2307 /* Return 1 if EXPR is the real constant one in real or complex form.
2308 Trailing zeroes matter for decimal float constants, so don't return
2309 1 for them. */
2310
2311 int
2312 real_onep (const_tree expr)
2313 {
2314 STRIP_NOPS (expr);
2315
2316 switch (TREE_CODE (expr))
2317 {
2318 case REAL_CST:
2319 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2320 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2321 case COMPLEX_CST:
2322 return real_onep (TREE_REALPART (expr))
2323 && real_zerop (TREE_IMAGPART (expr));
2324 case VECTOR_CST:
2325 {
2326 unsigned i;
2327 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2328 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2329 return false;
2330 return true;
2331 }
2332 default:
2333 return false;
2334 }
2335 }
2336
2337 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2338 matter for decimal float constants, so don't return 1 for them. */
2339
2340 int
2341 real_minus_onep (const_tree expr)
2342 {
2343 STRIP_NOPS (expr);
2344
2345 switch (TREE_CODE (expr))
2346 {
2347 case REAL_CST:
2348 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2349 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2350 case COMPLEX_CST:
2351 return real_minus_onep (TREE_REALPART (expr))
2352 && real_zerop (TREE_IMAGPART (expr));
2353 case VECTOR_CST:
2354 {
2355 unsigned i;
2356 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2357 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2358 return false;
2359 return true;
2360 }
2361 default:
2362 return false;
2363 }
2364 }
2365
2366 /* Nonzero if EXP is a constant or a cast of a constant. */
2367
2368 int
2369 really_constant_p (const_tree exp)
2370 {
2371 /* This is not quite the same as STRIP_NOPS. It does more. */
2372 while (CONVERT_EXPR_P (exp)
2373 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2374 exp = TREE_OPERAND (exp, 0);
2375 return TREE_CONSTANT (exp);
2376 }
2377 \f
2378 /* Return first list element whose TREE_VALUE is ELEM.
2379 Return 0 if ELEM is not in LIST. */
2380
2381 tree
2382 value_member (tree elem, tree list)
2383 {
2384 while (list)
2385 {
2386 if (elem == TREE_VALUE (list))
2387 return list;
2388 list = TREE_CHAIN (list);
2389 }
2390 return NULL_TREE;
2391 }
2392
2393 /* Return first list element whose TREE_PURPOSE is ELEM.
2394 Return 0 if ELEM is not in LIST. */
2395
2396 tree
2397 purpose_member (const_tree elem, tree list)
2398 {
2399 while (list)
2400 {
2401 if (elem == TREE_PURPOSE (list))
2402 return list;
2403 list = TREE_CHAIN (list);
2404 }
2405 return NULL_TREE;
2406 }
2407
2408 /* Return true if ELEM is in V. */
2409
2410 bool
2411 vec_member (const_tree elem, vec<tree, va_gc> *v)
2412 {
2413 unsigned ix;
2414 tree t;
2415 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2416 if (elem == t)
2417 return true;
2418 return false;
2419 }
2420
2421 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2422 NULL_TREE. */
2423
2424 tree
2425 chain_index (int idx, tree chain)
2426 {
2427 for (; chain && idx > 0; --idx)
2428 chain = TREE_CHAIN (chain);
2429 return chain;
2430 }
2431
2432 /* Return nonzero if ELEM is part of the chain CHAIN. */
2433
2434 int
2435 chain_member (const_tree elem, const_tree chain)
2436 {
2437 while (chain)
2438 {
2439 if (elem == chain)
2440 return 1;
2441 chain = DECL_CHAIN (chain);
2442 }
2443
2444 return 0;
2445 }
2446
2447 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2448 We expect a null pointer to mark the end of the chain.
2449 This is the Lisp primitive `length'. */
2450
2451 int
2452 list_length (const_tree t)
2453 {
2454 const_tree p = t;
2455 #ifdef ENABLE_TREE_CHECKING
2456 const_tree q = t;
2457 #endif
2458 int len = 0;
2459
2460 while (p)
2461 {
2462 p = TREE_CHAIN (p);
2463 #ifdef ENABLE_TREE_CHECKING
2464 if (len % 2)
2465 q = TREE_CHAIN (q);
2466 gcc_assert (p != q);
2467 #endif
2468 len++;
2469 }
2470
2471 return len;
2472 }
2473
2474 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2475 UNION_TYPE TYPE, or NULL_TREE if none. */
2476
2477 tree
2478 first_field (const_tree type)
2479 {
2480 tree t = TYPE_FIELDS (type);
2481 while (t && TREE_CODE (t) != FIELD_DECL)
2482 t = TREE_CHAIN (t);
2483 return t;
2484 }
2485
2486 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2487 by modifying the last node in chain 1 to point to chain 2.
2488 This is the Lisp primitive `nconc'. */
2489
2490 tree
2491 chainon (tree op1, tree op2)
2492 {
2493 tree t1;
2494
2495 if (!op1)
2496 return op2;
2497 if (!op2)
2498 return op1;
2499
2500 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2501 continue;
2502 TREE_CHAIN (t1) = op2;
2503
2504 #ifdef ENABLE_TREE_CHECKING
2505 {
2506 tree t2;
2507 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2508 gcc_assert (t2 != t1);
2509 }
2510 #endif
2511
2512 return op1;
2513 }
2514
2515 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2516
2517 tree
2518 tree_last (tree chain)
2519 {
2520 tree next;
2521 if (chain)
2522 while ((next = TREE_CHAIN (chain)))
2523 chain = next;
2524 return chain;
2525 }
2526
2527 /* Reverse the order of elements in the chain T,
2528 and return the new head of the chain (old last element). */
2529
2530 tree
2531 nreverse (tree t)
2532 {
2533 tree prev = 0, decl, next;
2534 for (decl = t; decl; decl = next)
2535 {
2536 /* We shouldn't be using this function to reverse BLOCK chains; we
2537 have blocks_nreverse for that. */
2538 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2539 next = TREE_CHAIN (decl);
2540 TREE_CHAIN (decl) = prev;
2541 prev = decl;
2542 }
2543 return prev;
2544 }
2545 \f
2546 /* Return a newly created TREE_LIST node whose
2547 purpose and value fields are PARM and VALUE. */
2548
2549 tree
2550 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2551 {
2552 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2553 TREE_PURPOSE (t) = parm;
2554 TREE_VALUE (t) = value;
2555 return t;
2556 }
2557
2558 /* Build a chain of TREE_LIST nodes from a vector. */
2559
2560 tree
2561 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2562 {
2563 tree ret = NULL_TREE;
2564 tree *pp = &ret;
2565 unsigned int i;
2566 tree t;
2567 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2568 {
2569 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2570 pp = &TREE_CHAIN (*pp);
2571 }
2572 return ret;
2573 }
2574
2575 /* Return a newly created TREE_LIST node whose
2576 purpose and value fields are PURPOSE and VALUE
2577 and whose TREE_CHAIN is CHAIN. */
2578
2579 tree
2580 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2581 {
2582 tree node;
2583
2584 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2585 memset (node, 0, sizeof (struct tree_common));
2586
2587 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2588
2589 TREE_SET_CODE (node, TREE_LIST);
2590 TREE_CHAIN (node) = chain;
2591 TREE_PURPOSE (node) = purpose;
2592 TREE_VALUE (node) = value;
2593 return node;
2594 }
2595
2596 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2597 trees. */
2598
2599 vec<tree, va_gc> *
2600 ctor_to_vec (tree ctor)
2601 {
2602 vec<tree, va_gc> *vec;
2603 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2604 unsigned int ix;
2605 tree val;
2606
2607 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2608 vec->quick_push (val);
2609
2610 return vec;
2611 }
2612 \f
2613 /* Return the size nominally occupied by an object of type TYPE
2614 when it resides in memory. The value is measured in units of bytes,
2615 and its data type is that normally used for type sizes
2616 (which is the first type created by make_signed_type or
2617 make_unsigned_type). */
2618
2619 tree
2620 size_in_bytes (const_tree type)
2621 {
2622 tree t;
2623
2624 if (type == error_mark_node)
2625 return integer_zero_node;
2626
2627 type = TYPE_MAIN_VARIANT (type);
2628 t = TYPE_SIZE_UNIT (type);
2629
2630 if (t == 0)
2631 {
2632 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2633 return size_zero_node;
2634 }
2635
2636 return t;
2637 }
2638
2639 /* Return the size of TYPE (in bytes) as a wide integer
2640 or return -1 if the size can vary or is larger than an integer. */
2641
2642 HOST_WIDE_INT
2643 int_size_in_bytes (const_tree type)
2644 {
2645 tree t;
2646
2647 if (type == error_mark_node)
2648 return 0;
2649
2650 type = TYPE_MAIN_VARIANT (type);
2651 t = TYPE_SIZE_UNIT (type);
2652 if (t == 0
2653 || TREE_CODE (t) != INTEGER_CST
2654 || TREE_INT_CST_HIGH (t) != 0
2655 /* If the result would appear negative, it's too big to represent. */
2656 || (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0)
2657 return -1;
2658
2659 return TREE_INT_CST_LOW (t);
2660 }
2661
2662 /* Return the maximum size of TYPE (in bytes) as a wide integer
2663 or return -1 if the size can vary or is larger than an integer. */
2664
2665 HOST_WIDE_INT
2666 max_int_size_in_bytes (const_tree type)
2667 {
2668 HOST_WIDE_INT size = -1;
2669 tree size_tree;
2670
2671 /* If this is an array type, check for a possible MAX_SIZE attached. */
2672
2673 if (TREE_CODE (type) == ARRAY_TYPE)
2674 {
2675 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2676
2677 if (size_tree && host_integerp (size_tree, 1))
2678 size = tree_low_cst (size_tree, 1);
2679 }
2680
2681 /* If we still haven't been able to get a size, see if the language
2682 can compute a maximum size. */
2683
2684 if (size == -1)
2685 {
2686 size_tree = lang_hooks.types.max_size (type);
2687
2688 if (size_tree && host_integerp (size_tree, 1))
2689 size = tree_low_cst (size_tree, 1);
2690 }
2691
2692 return size;
2693 }
2694 \f
2695 /* Return the bit position of FIELD, in bits from the start of the record.
2696 This is a tree of type bitsizetype. */
2697
2698 tree
2699 bit_position (const_tree field)
2700 {
2701 return bit_from_pos (DECL_FIELD_OFFSET (field),
2702 DECL_FIELD_BIT_OFFSET (field));
2703 }
2704
2705 /* Likewise, but return as an integer. It must be representable in
2706 that way (since it could be a signed value, we don't have the
2707 option of returning -1 like int_size_in_byte can. */
2708
2709 HOST_WIDE_INT
2710 int_bit_position (const_tree field)
2711 {
2712 return tree_low_cst (bit_position (field), 0);
2713 }
2714 \f
2715 /* Return the byte position of FIELD, in bytes from the start of the record.
2716 This is a tree of type sizetype. */
2717
2718 tree
2719 byte_position (const_tree field)
2720 {
2721 return byte_from_pos (DECL_FIELD_OFFSET (field),
2722 DECL_FIELD_BIT_OFFSET (field));
2723 }
2724
2725 /* Likewise, but return as an integer. It must be representable in
2726 that way (since it could be a signed value, we don't have the
2727 option of returning -1 like int_size_in_byte can. */
2728
2729 HOST_WIDE_INT
2730 int_byte_position (const_tree field)
2731 {
2732 return tree_low_cst (byte_position (field), 0);
2733 }
2734 \f
2735 /* Return the strictest alignment, in bits, that T is known to have. */
2736
2737 unsigned int
2738 expr_align (const_tree t)
2739 {
2740 unsigned int align0, align1;
2741
2742 switch (TREE_CODE (t))
2743 {
2744 CASE_CONVERT: case NON_LVALUE_EXPR:
2745 /* If we have conversions, we know that the alignment of the
2746 object must meet each of the alignments of the types. */
2747 align0 = expr_align (TREE_OPERAND (t, 0));
2748 align1 = TYPE_ALIGN (TREE_TYPE (t));
2749 return MAX (align0, align1);
2750
2751 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2752 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2753 case CLEANUP_POINT_EXPR:
2754 /* These don't change the alignment of an object. */
2755 return expr_align (TREE_OPERAND (t, 0));
2756
2757 case COND_EXPR:
2758 /* The best we can do is say that the alignment is the least aligned
2759 of the two arms. */
2760 align0 = expr_align (TREE_OPERAND (t, 1));
2761 align1 = expr_align (TREE_OPERAND (t, 2));
2762 return MIN (align0, align1);
2763
2764 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2765 meaningfully, it's always 1. */
2766 case LABEL_DECL: case CONST_DECL:
2767 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2768 case FUNCTION_DECL:
2769 gcc_assert (DECL_ALIGN (t) != 0);
2770 return DECL_ALIGN (t);
2771
2772 default:
2773 break;
2774 }
2775
2776 /* Otherwise take the alignment from that of the type. */
2777 return TYPE_ALIGN (TREE_TYPE (t));
2778 }
2779 \f
2780 /* Return, as a tree node, the number of elements for TYPE (which is an
2781 ARRAY_TYPE) minus one. This counts only elements of the top array. */
2782
2783 tree
2784 array_type_nelts (const_tree type)
2785 {
2786 tree index_type, min, max;
2787
2788 /* If they did it with unspecified bounds, then we should have already
2789 given an error about it before we got here. */
2790 if (! TYPE_DOMAIN (type))
2791 return error_mark_node;
2792
2793 index_type = TYPE_DOMAIN (type);
2794 min = TYPE_MIN_VALUE (index_type);
2795 max = TYPE_MAX_VALUE (index_type);
2796
2797 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
2798 if (!max)
2799 return error_mark_node;
2800
2801 return (integer_zerop (min)
2802 ? max
2803 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
2804 }
2805 \f
2806 /* If arg is static -- a reference to an object in static storage -- then
2807 return the object. This is not the same as the C meaning of `static'.
2808 If arg isn't static, return NULL. */
2809
2810 tree
2811 staticp (tree arg)
2812 {
2813 switch (TREE_CODE (arg))
2814 {
2815 case FUNCTION_DECL:
2816 /* Nested functions are static, even though taking their address will
2817 involve a trampoline as we unnest the nested function and create
2818 the trampoline on the tree level. */
2819 return arg;
2820
2821 case VAR_DECL:
2822 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2823 && ! DECL_THREAD_LOCAL_P (arg)
2824 && ! DECL_DLLIMPORT_P (arg)
2825 ? arg : NULL);
2826
2827 case CONST_DECL:
2828 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
2829 ? arg : NULL);
2830
2831 case CONSTRUCTOR:
2832 return TREE_STATIC (arg) ? arg : NULL;
2833
2834 case LABEL_DECL:
2835 case STRING_CST:
2836 return arg;
2837
2838 case COMPONENT_REF:
2839 /* If the thing being referenced is not a field, then it is
2840 something language specific. */
2841 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
2842
2843 /* If we are referencing a bitfield, we can't evaluate an
2844 ADDR_EXPR at compile time and so it isn't a constant. */
2845 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
2846 return NULL;
2847
2848 return staticp (TREE_OPERAND (arg, 0));
2849
2850 case BIT_FIELD_REF:
2851 return NULL;
2852
2853 case INDIRECT_REF:
2854 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
2855
2856 case ARRAY_REF:
2857 case ARRAY_RANGE_REF:
2858 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
2859 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
2860 return staticp (TREE_OPERAND (arg, 0));
2861 else
2862 return NULL;
2863
2864 case COMPOUND_LITERAL_EXPR:
2865 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
2866
2867 default:
2868 return NULL;
2869 }
2870 }
2871
2872 \f
2873
2874
2875 /* Return whether OP is a DECL whose address is function-invariant. */
2876
2877 bool
2878 decl_address_invariant_p (const_tree op)
2879 {
2880 /* The conditions below are slightly less strict than the one in
2881 staticp. */
2882
2883 switch (TREE_CODE (op))
2884 {
2885 case PARM_DECL:
2886 case RESULT_DECL:
2887 case LABEL_DECL:
2888 case FUNCTION_DECL:
2889 return true;
2890
2891 case VAR_DECL:
2892 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2893 || DECL_THREAD_LOCAL_P (op)
2894 || DECL_CONTEXT (op) == current_function_decl
2895 || decl_function_context (op) == current_function_decl)
2896 return true;
2897 break;
2898
2899 case CONST_DECL:
2900 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
2901 || decl_function_context (op) == current_function_decl)
2902 return true;
2903 break;
2904
2905 default:
2906 break;
2907 }
2908
2909 return false;
2910 }
2911
2912 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
2913
2914 bool
2915 decl_address_ip_invariant_p (const_tree op)
2916 {
2917 /* The conditions below are slightly less strict than the one in
2918 staticp. */
2919
2920 switch (TREE_CODE (op))
2921 {
2922 case LABEL_DECL:
2923 case FUNCTION_DECL:
2924 case STRING_CST:
2925 return true;
2926
2927 case VAR_DECL:
2928 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
2929 && !DECL_DLLIMPORT_P (op))
2930 || DECL_THREAD_LOCAL_P (op))
2931 return true;
2932 break;
2933
2934 case CONST_DECL:
2935 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
2936 return true;
2937 break;
2938
2939 default:
2940 break;
2941 }
2942
2943 return false;
2944 }
2945
2946
2947 /* Return true if T is function-invariant (internal function, does
2948 not handle arithmetic; that's handled in skip_simple_arithmetic and
2949 tree_invariant_p). */
2950
2951 static bool tree_invariant_p (tree t);
2952
2953 static bool
2954 tree_invariant_p_1 (tree t)
2955 {
2956 tree op;
2957
2958 if (TREE_CONSTANT (t)
2959 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
2960 return true;
2961
2962 switch (TREE_CODE (t))
2963 {
2964 case SAVE_EXPR:
2965 return true;
2966
2967 case ADDR_EXPR:
2968 op = TREE_OPERAND (t, 0);
2969 while (handled_component_p (op))
2970 {
2971 switch (TREE_CODE (op))
2972 {
2973 case ARRAY_REF:
2974 case ARRAY_RANGE_REF:
2975 if (!tree_invariant_p (TREE_OPERAND (op, 1))
2976 || TREE_OPERAND (op, 2) != NULL_TREE
2977 || TREE_OPERAND (op, 3) != NULL_TREE)
2978 return false;
2979 break;
2980
2981 case COMPONENT_REF:
2982 if (TREE_OPERAND (op, 2) != NULL_TREE)
2983 return false;
2984 break;
2985
2986 default:;
2987 }
2988 op = TREE_OPERAND (op, 0);
2989 }
2990
2991 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2992
2993 default:
2994 break;
2995 }
2996
2997 return false;
2998 }
2999
3000 /* Return true if T is function-invariant. */
3001
3002 static bool
3003 tree_invariant_p (tree t)
3004 {
3005 tree inner = skip_simple_arithmetic (t);
3006 return tree_invariant_p_1 (inner);
3007 }
3008
3009 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3010 Do this to any expression which may be used in more than one place,
3011 but must be evaluated only once.
3012
3013 Normally, expand_expr would reevaluate the expression each time.
3014 Calling save_expr produces something that is evaluated and recorded
3015 the first time expand_expr is called on it. Subsequent calls to
3016 expand_expr just reuse the recorded value.
3017
3018 The call to expand_expr that generates code that actually computes
3019 the value is the first call *at compile time*. Subsequent calls
3020 *at compile time* generate code to use the saved value.
3021 This produces correct result provided that *at run time* control
3022 always flows through the insns made by the first expand_expr
3023 before reaching the other places where the save_expr was evaluated.
3024 You, the caller of save_expr, must make sure this is so.
3025
3026 Constants, and certain read-only nodes, are returned with no
3027 SAVE_EXPR because that is safe. Expressions containing placeholders
3028 are not touched; see tree.def for an explanation of what these
3029 are used for. */
3030
3031 tree
3032 save_expr (tree expr)
3033 {
3034 tree t = fold (expr);
3035 tree inner;
3036
3037 /* If the tree evaluates to a constant, then we don't want to hide that
3038 fact (i.e. this allows further folding, and direct checks for constants).
3039 However, a read-only object that has side effects cannot be bypassed.
3040 Since it is no problem to reevaluate literals, we just return the
3041 literal node. */
3042 inner = skip_simple_arithmetic (t);
3043 if (TREE_CODE (inner) == ERROR_MARK)
3044 return inner;
3045
3046 if (tree_invariant_p_1 (inner))
3047 return t;
3048
3049 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3050 it means that the size or offset of some field of an object depends on
3051 the value within another field.
3052
3053 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3054 and some variable since it would then need to be both evaluated once and
3055 evaluated more than once. Front-ends must assure this case cannot
3056 happen by surrounding any such subexpressions in their own SAVE_EXPR
3057 and forcing evaluation at the proper time. */
3058 if (contains_placeholder_p (inner))
3059 return t;
3060
3061 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3062 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3063
3064 /* This expression might be placed ahead of a jump to ensure that the
3065 value was computed on both sides of the jump. So make sure it isn't
3066 eliminated as dead. */
3067 TREE_SIDE_EFFECTS (t) = 1;
3068 return t;
3069 }
3070
3071 /* Look inside EXPR into any simple arithmetic operations. Return the
3072 outermost non-arithmetic or non-invariant node. */
3073
3074 tree
3075 skip_simple_arithmetic (tree expr)
3076 {
3077 /* We don't care about whether this can be used as an lvalue in this
3078 context. */
3079 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3080 expr = TREE_OPERAND (expr, 0);
3081
3082 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3083 a constant, it will be more efficient to not make another SAVE_EXPR since
3084 it will allow better simplification and GCSE will be able to merge the
3085 computations if they actually occur. */
3086 while (true)
3087 {
3088 if (UNARY_CLASS_P (expr))
3089 expr = TREE_OPERAND (expr, 0);
3090 else if (BINARY_CLASS_P (expr))
3091 {
3092 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3093 expr = TREE_OPERAND (expr, 0);
3094 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3095 expr = TREE_OPERAND (expr, 1);
3096 else
3097 break;
3098 }
3099 else
3100 break;
3101 }
3102
3103 return expr;
3104 }
3105
3106 /* Look inside EXPR into simple arithmetic operations involving constants.
3107 Return the outermost non-arithmetic or non-constant node. */
3108
3109 tree
3110 skip_simple_constant_arithmetic (tree expr)
3111 {
3112 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3113 expr = TREE_OPERAND (expr, 0);
3114
3115 while (true)
3116 {
3117 if (UNARY_CLASS_P (expr))
3118 expr = TREE_OPERAND (expr, 0);
3119 else if (BINARY_CLASS_P (expr))
3120 {
3121 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3122 expr = TREE_OPERAND (expr, 0);
3123 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3124 expr = TREE_OPERAND (expr, 1);
3125 else
3126 break;
3127 }
3128 else
3129 break;
3130 }
3131
3132 return expr;
3133 }
3134
3135 /* Return which tree structure is used by T. */
3136
3137 enum tree_node_structure_enum
3138 tree_node_structure (const_tree t)
3139 {
3140 const enum tree_code code = TREE_CODE (t);
3141 return tree_node_structure_for_code (code);
3142 }
3143
3144 /* Set various status flags when building a CALL_EXPR object T. */
3145
3146 static void
3147 process_call_operands (tree t)
3148 {
3149 bool side_effects = TREE_SIDE_EFFECTS (t);
3150 bool read_only = false;
3151 int i = call_expr_flags (t);
3152
3153 /* Calls have side-effects, except those to const or pure functions. */
3154 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3155 side_effects = true;
3156 /* Propagate TREE_READONLY of arguments for const functions. */
3157 if (i & ECF_CONST)
3158 read_only = true;
3159
3160 if (!side_effects || read_only)
3161 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3162 {
3163 tree op = TREE_OPERAND (t, i);
3164 if (op && TREE_SIDE_EFFECTS (op))
3165 side_effects = true;
3166 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3167 read_only = false;
3168 }
3169
3170 TREE_SIDE_EFFECTS (t) = side_effects;
3171 TREE_READONLY (t) = read_only;
3172 }
3173 \f
3174 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3175 size or offset that depends on a field within a record. */
3176
3177 bool
3178 contains_placeholder_p (const_tree exp)
3179 {
3180 enum tree_code code;
3181
3182 if (!exp)
3183 return 0;
3184
3185 code = TREE_CODE (exp);
3186 if (code == PLACEHOLDER_EXPR)
3187 return 1;
3188
3189 switch (TREE_CODE_CLASS (code))
3190 {
3191 case tcc_reference:
3192 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3193 position computations since they will be converted into a
3194 WITH_RECORD_EXPR involving the reference, which will assume
3195 here will be valid. */
3196 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3197
3198 case tcc_exceptional:
3199 if (code == TREE_LIST)
3200 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3201 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3202 break;
3203
3204 case tcc_unary:
3205 case tcc_binary:
3206 case tcc_comparison:
3207 case tcc_expression:
3208 switch (code)
3209 {
3210 case COMPOUND_EXPR:
3211 /* Ignoring the first operand isn't quite right, but works best. */
3212 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3213
3214 case COND_EXPR:
3215 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3216 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3217 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3218
3219 case SAVE_EXPR:
3220 /* The save_expr function never wraps anything containing
3221 a PLACEHOLDER_EXPR. */
3222 return 0;
3223
3224 default:
3225 break;
3226 }
3227
3228 switch (TREE_CODE_LENGTH (code))
3229 {
3230 case 1:
3231 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3232 case 2:
3233 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3234 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3235 default:
3236 return 0;
3237 }
3238
3239 case tcc_vl_exp:
3240 switch (code)
3241 {
3242 case CALL_EXPR:
3243 {
3244 const_tree arg;
3245 const_call_expr_arg_iterator iter;
3246 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3247 if (CONTAINS_PLACEHOLDER_P (arg))
3248 return 1;
3249 return 0;
3250 }
3251 default:
3252 return 0;
3253 }
3254
3255 default:
3256 return 0;
3257 }
3258 return 0;
3259 }
3260
3261 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3262 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3263 field positions. */
3264
3265 static bool
3266 type_contains_placeholder_1 (const_tree type)
3267 {
3268 /* If the size contains a placeholder or the parent type (component type in
3269 the case of arrays) type involves a placeholder, this type does. */
3270 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3271 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3272 || (!POINTER_TYPE_P (type)
3273 && TREE_TYPE (type)
3274 && type_contains_placeholder_p (TREE_TYPE (type))))
3275 return true;
3276
3277 /* Now do type-specific checks. Note that the last part of the check above
3278 greatly limits what we have to do below. */
3279 switch (TREE_CODE (type))
3280 {
3281 case VOID_TYPE:
3282 case POINTER_BOUNDS_TYPE:
3283 case COMPLEX_TYPE:
3284 case ENUMERAL_TYPE:
3285 case BOOLEAN_TYPE:
3286 case POINTER_TYPE:
3287 case OFFSET_TYPE:
3288 case REFERENCE_TYPE:
3289 case METHOD_TYPE:
3290 case FUNCTION_TYPE:
3291 case VECTOR_TYPE:
3292 case NULLPTR_TYPE:
3293 return false;
3294
3295 case INTEGER_TYPE:
3296 case REAL_TYPE:
3297 case FIXED_POINT_TYPE:
3298 /* Here we just check the bounds. */
3299 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3300 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3301
3302 case ARRAY_TYPE:
3303 /* We have already checked the component type above, so just check the
3304 domain type. */
3305 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3306
3307 case RECORD_TYPE:
3308 case UNION_TYPE:
3309 case QUAL_UNION_TYPE:
3310 {
3311 tree field;
3312
3313 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3314 if (TREE_CODE (field) == FIELD_DECL
3315 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3316 || (TREE_CODE (type) == QUAL_UNION_TYPE
3317 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3318 || type_contains_placeholder_p (TREE_TYPE (field))))
3319 return true;
3320
3321 return false;
3322 }
3323
3324 default:
3325 gcc_unreachable ();
3326 }
3327 }
3328
3329 /* Wrapper around above function used to cache its result. */
3330
3331 bool
3332 type_contains_placeholder_p (tree type)
3333 {
3334 bool result;
3335
3336 /* If the contains_placeholder_bits field has been initialized,
3337 then we know the answer. */
3338 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3339 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3340
3341 /* Indicate that we've seen this type node, and the answer is false.
3342 This is what we want to return if we run into recursion via fields. */
3343 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3344
3345 /* Compute the real value. */
3346 result = type_contains_placeholder_1 (type);
3347
3348 /* Store the real value. */
3349 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3350
3351 return result;
3352 }
3353 \f
3354 /* Push tree EXP onto vector QUEUE if it is not already present. */
3355
3356 static void
3357 push_without_duplicates (tree exp, vec<tree> *queue)
3358 {
3359 unsigned int i;
3360 tree iter;
3361
3362 FOR_EACH_VEC_ELT (*queue, i, iter)
3363 if (simple_cst_equal (iter, exp) == 1)
3364 break;
3365
3366 if (!iter)
3367 queue->safe_push (exp);
3368 }
3369
3370 /* Given a tree EXP, find all occurrences of references to fields
3371 in a PLACEHOLDER_EXPR and place them in vector REFS without
3372 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3373 we assume here that EXP contains only arithmetic expressions
3374 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3375 argument list. */
3376
3377 void
3378 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3379 {
3380 enum tree_code code = TREE_CODE (exp);
3381 tree inner;
3382 int i;
3383
3384 /* We handle TREE_LIST and COMPONENT_REF separately. */
3385 if (code == TREE_LIST)
3386 {
3387 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3388 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3389 }
3390 else if (code == COMPONENT_REF)
3391 {
3392 for (inner = TREE_OPERAND (exp, 0);
3393 REFERENCE_CLASS_P (inner);
3394 inner = TREE_OPERAND (inner, 0))
3395 ;
3396
3397 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3398 push_without_duplicates (exp, refs);
3399 else
3400 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3401 }
3402 else
3403 switch (TREE_CODE_CLASS (code))
3404 {
3405 case tcc_constant:
3406 break;
3407
3408 case tcc_declaration:
3409 /* Variables allocated to static storage can stay. */
3410 if (!TREE_STATIC (exp))
3411 push_without_duplicates (exp, refs);
3412 break;
3413
3414 case tcc_expression:
3415 /* This is the pattern built in ada/make_aligning_type. */
3416 if (code == ADDR_EXPR
3417 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3418 {
3419 push_without_duplicates (exp, refs);
3420 break;
3421 }
3422
3423 /* Fall through... */
3424
3425 case tcc_exceptional:
3426 case tcc_unary:
3427 case tcc_binary:
3428 case tcc_comparison:
3429 case tcc_reference:
3430 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3431 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3432 break;
3433
3434 case tcc_vl_exp:
3435 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3436 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3437 break;
3438
3439 default:
3440 gcc_unreachable ();
3441 }
3442 }
3443
3444 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3445 return a tree with all occurrences of references to F in a
3446 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3447 CONST_DECLs. Note that we assume here that EXP contains only
3448 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3449 occurring only in their argument list. */
3450
3451 tree
3452 substitute_in_expr (tree exp, tree f, tree r)
3453 {
3454 enum tree_code code = TREE_CODE (exp);
3455 tree op0, op1, op2, op3;
3456 tree new_tree;
3457
3458 /* We handle TREE_LIST and COMPONENT_REF separately. */
3459 if (code == TREE_LIST)
3460 {
3461 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3462 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3463 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3464 return exp;
3465
3466 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3467 }
3468 else if (code == COMPONENT_REF)
3469 {
3470 tree inner;
3471
3472 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3473 and it is the right field, replace it with R. */
3474 for (inner = TREE_OPERAND (exp, 0);
3475 REFERENCE_CLASS_P (inner);
3476 inner = TREE_OPERAND (inner, 0))
3477 ;
3478
3479 /* The field. */
3480 op1 = TREE_OPERAND (exp, 1);
3481
3482 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3483 return r;
3484
3485 /* If this expression hasn't been completed let, leave it alone. */
3486 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3487 return exp;
3488
3489 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3490 if (op0 == TREE_OPERAND (exp, 0))
3491 return exp;
3492
3493 new_tree
3494 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3495 }
3496 else
3497 switch (TREE_CODE_CLASS (code))
3498 {
3499 case tcc_constant:
3500 return exp;
3501
3502 case tcc_declaration:
3503 if (exp == f)
3504 return r;
3505 else
3506 return exp;
3507
3508 case tcc_expression:
3509 if (exp == f)
3510 return r;
3511
3512 /* Fall through... */
3513
3514 case tcc_exceptional:
3515 case tcc_unary:
3516 case tcc_binary:
3517 case tcc_comparison:
3518 case tcc_reference:
3519 switch (TREE_CODE_LENGTH (code))
3520 {
3521 case 0:
3522 return exp;
3523
3524 case 1:
3525 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3526 if (op0 == TREE_OPERAND (exp, 0))
3527 return exp;
3528
3529 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3530 break;
3531
3532 case 2:
3533 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3534 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3535
3536 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3537 return exp;
3538
3539 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3540 break;
3541
3542 case 3:
3543 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3544 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3545 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3546
3547 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3548 && op2 == TREE_OPERAND (exp, 2))
3549 return exp;
3550
3551 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3552 break;
3553
3554 case 4:
3555 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3556 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3557 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3558 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3559
3560 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3561 && op2 == TREE_OPERAND (exp, 2)
3562 && op3 == TREE_OPERAND (exp, 3))
3563 return exp;
3564
3565 new_tree
3566 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3567 break;
3568
3569 default:
3570 gcc_unreachable ();
3571 }
3572 break;
3573
3574 case tcc_vl_exp:
3575 {
3576 int i;
3577
3578 new_tree = NULL_TREE;
3579
3580 /* If we are trying to replace F with a constant, inline back
3581 functions which do nothing else than computing a value from
3582 the arguments they are passed. This makes it possible to
3583 fold partially or entirely the replacement expression. */
3584 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3585 {
3586 tree t = maybe_inline_call_in_expr (exp);
3587 if (t)
3588 return SUBSTITUTE_IN_EXPR (t, f, r);
3589 }
3590
3591 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3592 {
3593 tree op = TREE_OPERAND (exp, i);
3594 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3595 if (new_op != op)
3596 {
3597 if (!new_tree)
3598 new_tree = copy_node (exp);
3599 TREE_OPERAND (new_tree, i) = new_op;
3600 }
3601 }
3602
3603 if (new_tree)
3604 {
3605 new_tree = fold (new_tree);
3606 if (TREE_CODE (new_tree) == CALL_EXPR)
3607 process_call_operands (new_tree);
3608 }
3609 else
3610 return exp;
3611 }
3612 break;
3613
3614 default:
3615 gcc_unreachable ();
3616 }
3617
3618 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3619
3620 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3621 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3622
3623 return new_tree;
3624 }
3625
3626 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3627 for it within OBJ, a tree that is an object or a chain of references. */
3628
3629 tree
3630 substitute_placeholder_in_expr (tree exp, tree obj)
3631 {
3632 enum tree_code code = TREE_CODE (exp);
3633 tree op0, op1, op2, op3;
3634 tree new_tree;
3635
3636 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3637 in the chain of OBJ. */
3638 if (code == PLACEHOLDER_EXPR)
3639 {
3640 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3641 tree elt;
3642
3643 for (elt = obj; elt != 0;
3644 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3645 || TREE_CODE (elt) == COND_EXPR)
3646 ? TREE_OPERAND (elt, 1)
3647 : (REFERENCE_CLASS_P (elt)
3648 || UNARY_CLASS_P (elt)
3649 || BINARY_CLASS_P (elt)
3650 || VL_EXP_CLASS_P (elt)
3651 || EXPRESSION_CLASS_P (elt))
3652 ? TREE_OPERAND (elt, 0) : 0))
3653 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3654 return elt;
3655
3656 for (elt = obj; elt != 0;
3657 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3658 || TREE_CODE (elt) == COND_EXPR)
3659 ? TREE_OPERAND (elt, 1)
3660 : (REFERENCE_CLASS_P (elt)
3661 || UNARY_CLASS_P (elt)
3662 || BINARY_CLASS_P (elt)
3663 || VL_EXP_CLASS_P (elt)
3664 || EXPRESSION_CLASS_P (elt))
3665 ? TREE_OPERAND (elt, 0) : 0))
3666 if (POINTER_TYPE_P (TREE_TYPE (elt))
3667 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3668 == need_type))
3669 return fold_build1 (INDIRECT_REF, need_type, elt);
3670
3671 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3672 survives until RTL generation, there will be an error. */
3673 return exp;
3674 }
3675
3676 /* TREE_LIST is special because we need to look at TREE_VALUE
3677 and TREE_CHAIN, not TREE_OPERANDS. */
3678 else if (code == TREE_LIST)
3679 {
3680 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3681 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3682 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3683 return exp;
3684
3685 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3686 }
3687 else
3688 switch (TREE_CODE_CLASS (code))
3689 {
3690 case tcc_constant:
3691 case tcc_declaration:
3692 return exp;
3693
3694 case tcc_exceptional:
3695 case tcc_unary:
3696 case tcc_binary:
3697 case tcc_comparison:
3698 case tcc_expression:
3699 case tcc_reference:
3700 case tcc_statement:
3701 switch (TREE_CODE_LENGTH (code))
3702 {
3703 case 0:
3704 return exp;
3705
3706 case 1:
3707 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3708 if (op0 == TREE_OPERAND (exp, 0))
3709 return exp;
3710
3711 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3712 break;
3713
3714 case 2:
3715 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3716 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3717
3718 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3719 return exp;
3720
3721 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3722 break;
3723
3724 case 3:
3725 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3726 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3727 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3728
3729 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3730 && op2 == TREE_OPERAND (exp, 2))
3731 return exp;
3732
3733 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3734 break;
3735
3736 case 4:
3737 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3738 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3739 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3740 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3741
3742 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3743 && op2 == TREE_OPERAND (exp, 2)
3744 && op3 == TREE_OPERAND (exp, 3))
3745 return exp;
3746
3747 new_tree
3748 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3749 break;
3750
3751 default:
3752 gcc_unreachable ();
3753 }
3754 break;
3755
3756 case tcc_vl_exp:
3757 {
3758 int i;
3759
3760 new_tree = NULL_TREE;
3761
3762 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3763 {
3764 tree op = TREE_OPERAND (exp, i);
3765 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3766 if (new_op != op)
3767 {
3768 if (!new_tree)
3769 new_tree = copy_node (exp);
3770 TREE_OPERAND (new_tree, i) = new_op;
3771 }
3772 }
3773
3774 if (new_tree)
3775 {
3776 new_tree = fold (new_tree);
3777 if (TREE_CODE (new_tree) == CALL_EXPR)
3778 process_call_operands (new_tree);
3779 }
3780 else
3781 return exp;
3782 }
3783 break;
3784
3785 default:
3786 gcc_unreachable ();
3787 }
3788
3789 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3790
3791 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3792 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3793
3794 return new_tree;
3795 }
3796 \f
3797
3798 /* Subroutine of stabilize_reference; this is called for subtrees of
3799 references. Any expression with side-effects must be put in a SAVE_EXPR
3800 to ensure that it is only evaluated once.
3801
3802 We don't put SAVE_EXPR nodes around everything, because assigning very
3803 simple expressions to temporaries causes us to miss good opportunities
3804 for optimizations. Among other things, the opportunity to fold in the
3805 addition of a constant into an addressing mode often gets lost, e.g.
3806 "y[i+1] += x;". In general, we take the approach that we should not make
3807 an assignment unless we are forced into it - i.e., that any non-side effect
3808 operator should be allowed, and that cse should take care of coalescing
3809 multiple utterances of the same expression should that prove fruitful. */
3810
3811 static tree
3812 stabilize_reference_1 (tree e)
3813 {
3814 tree result;
3815 enum tree_code code = TREE_CODE (e);
3816
3817 /* We cannot ignore const expressions because it might be a reference
3818 to a const array but whose index contains side-effects. But we can
3819 ignore things that are actual constant or that already have been
3820 handled by this function. */
3821
3822 if (tree_invariant_p (e))
3823 return e;
3824
3825 switch (TREE_CODE_CLASS (code))
3826 {
3827 case tcc_exceptional:
3828 case tcc_type:
3829 case tcc_declaration:
3830 case tcc_comparison:
3831 case tcc_statement:
3832 case tcc_expression:
3833 case tcc_reference:
3834 case tcc_vl_exp:
3835 /* If the expression has side-effects, then encase it in a SAVE_EXPR
3836 so that it will only be evaluated once. */
3837 /* The reference (r) and comparison (<) classes could be handled as
3838 below, but it is generally faster to only evaluate them once. */
3839 if (TREE_SIDE_EFFECTS (e))
3840 return save_expr (e);
3841 return e;
3842
3843 case tcc_constant:
3844 /* Constants need no processing. In fact, we should never reach
3845 here. */
3846 return e;
3847
3848 case tcc_binary:
3849 /* Division is slow and tends to be compiled with jumps,
3850 especially the division by powers of 2 that is often
3851 found inside of an array reference. So do it just once. */
3852 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
3853 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
3854 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
3855 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
3856 return save_expr (e);
3857 /* Recursively stabilize each operand. */
3858 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
3859 stabilize_reference_1 (TREE_OPERAND (e, 1)));
3860 break;
3861
3862 case tcc_unary:
3863 /* Recursively stabilize each operand. */
3864 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
3865 break;
3866
3867 default:
3868 gcc_unreachable ();
3869 }
3870
3871 TREE_TYPE (result) = TREE_TYPE (e);
3872 TREE_READONLY (result) = TREE_READONLY (e);
3873 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
3874 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
3875
3876 return result;
3877 }
3878
3879 /* Stabilize a reference so that we can use it any number of times
3880 without causing its operands to be evaluated more than once.
3881 Returns the stabilized reference. This works by means of save_expr,
3882 so see the caveats in the comments about save_expr.
3883
3884 Also allows conversion expressions whose operands are references.
3885 Any other kind of expression is returned unchanged. */
3886
3887 tree
3888 stabilize_reference (tree ref)
3889 {
3890 tree result;
3891 enum tree_code code = TREE_CODE (ref);
3892
3893 switch (code)
3894 {
3895 case VAR_DECL:
3896 case PARM_DECL:
3897 case RESULT_DECL:
3898 /* No action is needed in this case. */
3899 return ref;
3900
3901 CASE_CONVERT:
3902 case FLOAT_EXPR:
3903 case FIX_TRUNC_EXPR:
3904 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
3905 break;
3906
3907 case INDIRECT_REF:
3908 result = build_nt (INDIRECT_REF,
3909 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
3910 break;
3911
3912 case COMPONENT_REF:
3913 result = build_nt (COMPONENT_REF,
3914 stabilize_reference (TREE_OPERAND (ref, 0)),
3915 TREE_OPERAND (ref, 1), NULL_TREE);
3916 break;
3917
3918 case BIT_FIELD_REF:
3919 result = build_nt (BIT_FIELD_REF,
3920 stabilize_reference (TREE_OPERAND (ref, 0)),
3921 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
3922 break;
3923
3924 case ARRAY_REF:
3925 result = build_nt (ARRAY_REF,
3926 stabilize_reference (TREE_OPERAND (ref, 0)),
3927 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3928 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3929 break;
3930
3931 case ARRAY_RANGE_REF:
3932 result = build_nt (ARRAY_RANGE_REF,
3933 stabilize_reference (TREE_OPERAND (ref, 0)),
3934 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
3935 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
3936 break;
3937
3938 case COMPOUND_EXPR:
3939 /* We cannot wrap the first expression in a SAVE_EXPR, as then
3940 it wouldn't be ignored. This matters when dealing with
3941 volatiles. */
3942 return stabilize_reference_1 (ref);
3943
3944 /* If arg isn't a kind of lvalue we recognize, make no change.
3945 Caller should recognize the error for an invalid lvalue. */
3946 default:
3947 return ref;
3948
3949 case ERROR_MARK:
3950 return error_mark_node;
3951 }
3952
3953 TREE_TYPE (result) = TREE_TYPE (ref);
3954 TREE_READONLY (result) = TREE_READONLY (ref);
3955 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
3956 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
3957
3958 return result;
3959 }
3960 \f
3961 /* Low-level constructors for expressions. */
3962
3963 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
3964 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
3965
3966 void
3967 recompute_tree_invariant_for_addr_expr (tree t)
3968 {
3969 tree node;
3970 bool tc = true, se = false;
3971
3972 /* We started out assuming this address is both invariant and constant, but
3973 does not have side effects. Now go down any handled components and see if
3974 any of them involve offsets that are either non-constant or non-invariant.
3975 Also check for side-effects.
3976
3977 ??? Note that this code makes no attempt to deal with the case where
3978 taking the address of something causes a copy due to misalignment. */
3979
3980 #define UPDATE_FLAGS(NODE) \
3981 do { tree _node = (NODE); \
3982 if (_node && !TREE_CONSTANT (_node)) tc = false; \
3983 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
3984
3985 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
3986 node = TREE_OPERAND (node, 0))
3987 {
3988 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
3989 array reference (probably made temporarily by the G++ front end),
3990 so ignore all the operands. */
3991 if ((TREE_CODE (node) == ARRAY_REF
3992 || TREE_CODE (node) == ARRAY_RANGE_REF)
3993 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
3994 {
3995 UPDATE_FLAGS (TREE_OPERAND (node, 1));
3996 if (TREE_OPERAND (node, 2))
3997 UPDATE_FLAGS (TREE_OPERAND (node, 2));
3998 if (TREE_OPERAND (node, 3))
3999 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4000 }
4001 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4002 FIELD_DECL, apparently. The G++ front end can put something else
4003 there, at least temporarily. */
4004 else if (TREE_CODE (node) == COMPONENT_REF
4005 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4006 {
4007 if (TREE_OPERAND (node, 2))
4008 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4009 }
4010 }
4011
4012 node = lang_hooks.expr_to_decl (node, &tc, &se);
4013
4014 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4015 the address, since &(*a)->b is a form of addition. If it's a constant, the
4016 address is constant too. If it's a decl, its address is constant if the
4017 decl is static. Everything else is not constant and, furthermore,
4018 taking the address of a volatile variable is not volatile. */
4019 if (TREE_CODE (node) == INDIRECT_REF
4020 || TREE_CODE (node) == MEM_REF)
4021 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4022 else if (CONSTANT_CLASS_P (node))
4023 ;
4024 else if (DECL_P (node))
4025 tc &= (staticp (node) != NULL_TREE);
4026 else
4027 {
4028 tc = false;
4029 se |= TREE_SIDE_EFFECTS (node);
4030 }
4031
4032
4033 TREE_CONSTANT (t) = tc;
4034 TREE_SIDE_EFFECTS (t) = se;
4035 #undef UPDATE_FLAGS
4036 }
4037
4038 /* Build an expression of code CODE, data type TYPE, and operands as
4039 specified. Expressions and reference nodes can be created this way.
4040 Constants, decls, types and misc nodes cannot be.
4041
4042 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4043 enough for all extant tree codes. */
4044
4045 tree
4046 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4047 {
4048 tree t;
4049
4050 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4051
4052 t = make_node_stat (code PASS_MEM_STAT);
4053 TREE_TYPE (t) = tt;
4054
4055 return t;
4056 }
4057
4058 tree
4059 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4060 {
4061 int length = sizeof (struct tree_exp);
4062 tree t;
4063
4064 record_node_allocation_statistics (code, length);
4065
4066 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4067
4068 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4069
4070 memset (t, 0, sizeof (struct tree_common));
4071
4072 TREE_SET_CODE (t, code);
4073
4074 TREE_TYPE (t) = type;
4075 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4076 TREE_OPERAND (t, 0) = node;
4077 if (node && !TYPE_P (node))
4078 {
4079 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4080 TREE_READONLY (t) = TREE_READONLY (node);
4081 }
4082
4083 if (TREE_CODE_CLASS (code) == tcc_statement)
4084 TREE_SIDE_EFFECTS (t) = 1;
4085 else switch (code)
4086 {
4087 case VA_ARG_EXPR:
4088 /* All of these have side-effects, no matter what their
4089 operands are. */
4090 TREE_SIDE_EFFECTS (t) = 1;
4091 TREE_READONLY (t) = 0;
4092 break;
4093
4094 case INDIRECT_REF:
4095 /* Whether a dereference is readonly has nothing to do with whether
4096 its operand is readonly. */
4097 TREE_READONLY (t) = 0;
4098 break;
4099
4100 case ADDR_EXPR:
4101 if (node)
4102 recompute_tree_invariant_for_addr_expr (t);
4103 break;
4104
4105 default:
4106 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4107 && node && !TYPE_P (node)
4108 && TREE_CONSTANT (node))
4109 TREE_CONSTANT (t) = 1;
4110 if (TREE_CODE_CLASS (code) == tcc_reference
4111 && node && TREE_THIS_VOLATILE (node))
4112 TREE_THIS_VOLATILE (t) = 1;
4113 break;
4114 }
4115
4116 return t;
4117 }
4118
4119 #define PROCESS_ARG(N) \
4120 do { \
4121 TREE_OPERAND (t, N) = arg##N; \
4122 if (arg##N &&!TYPE_P (arg##N)) \
4123 { \
4124 if (TREE_SIDE_EFFECTS (arg##N)) \
4125 side_effects = 1; \
4126 if (!TREE_READONLY (arg##N) \
4127 && !CONSTANT_CLASS_P (arg##N)) \
4128 (void) (read_only = 0); \
4129 if (!TREE_CONSTANT (arg##N)) \
4130 (void) (constant = 0); \
4131 } \
4132 } while (0)
4133
4134 tree
4135 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4136 {
4137 bool constant, read_only, side_effects;
4138 tree t;
4139
4140 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4141
4142 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4143 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4144 /* When sizetype precision doesn't match that of pointers
4145 we need to be able to build explicit extensions or truncations
4146 of the offset argument. */
4147 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4148 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4149 && TREE_CODE (arg1) == INTEGER_CST);
4150
4151 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4152 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4153 && ptrofftype_p (TREE_TYPE (arg1)));
4154
4155 t = make_node_stat (code PASS_MEM_STAT);
4156 TREE_TYPE (t) = tt;
4157
4158 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4159 result based on those same flags for the arguments. But if the
4160 arguments aren't really even `tree' expressions, we shouldn't be trying
4161 to do this. */
4162
4163 /* Expressions without side effects may be constant if their
4164 arguments are as well. */
4165 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4166 || TREE_CODE_CLASS (code) == tcc_binary);
4167 read_only = 1;
4168 side_effects = TREE_SIDE_EFFECTS (t);
4169
4170 PROCESS_ARG (0);
4171 PROCESS_ARG (1);
4172
4173 TREE_READONLY (t) = read_only;
4174 TREE_CONSTANT (t) = constant;
4175 TREE_SIDE_EFFECTS (t) = side_effects;
4176 TREE_THIS_VOLATILE (t)
4177 = (TREE_CODE_CLASS (code) == tcc_reference
4178 && arg0 && TREE_THIS_VOLATILE (arg0));
4179
4180 return t;
4181 }
4182
4183
4184 tree
4185 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4186 tree arg2 MEM_STAT_DECL)
4187 {
4188 bool constant, read_only, side_effects;
4189 tree t;
4190
4191 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4192 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4193
4194 t = make_node_stat (code PASS_MEM_STAT);
4195 TREE_TYPE (t) = tt;
4196
4197 read_only = 1;
4198
4199 /* As a special exception, if COND_EXPR has NULL branches, we
4200 assume that it is a gimple statement and always consider
4201 it to have side effects. */
4202 if (code == COND_EXPR
4203 && tt == void_type_node
4204 && arg1 == NULL_TREE
4205 && arg2 == NULL_TREE)
4206 side_effects = true;
4207 else
4208 side_effects = TREE_SIDE_EFFECTS (t);
4209
4210 PROCESS_ARG (0);
4211 PROCESS_ARG (1);
4212 PROCESS_ARG (2);
4213
4214 if (code == COND_EXPR)
4215 TREE_READONLY (t) = read_only;
4216
4217 TREE_SIDE_EFFECTS (t) = side_effects;
4218 TREE_THIS_VOLATILE (t)
4219 = (TREE_CODE_CLASS (code) == tcc_reference
4220 && arg0 && TREE_THIS_VOLATILE (arg0));
4221
4222 return t;
4223 }
4224
4225 tree
4226 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4227 tree arg2, tree arg3 MEM_STAT_DECL)
4228 {
4229 bool constant, read_only, side_effects;
4230 tree t;
4231
4232 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4233
4234 t = make_node_stat (code PASS_MEM_STAT);
4235 TREE_TYPE (t) = tt;
4236
4237 side_effects = TREE_SIDE_EFFECTS (t);
4238
4239 PROCESS_ARG (0);
4240 PROCESS_ARG (1);
4241 PROCESS_ARG (2);
4242 PROCESS_ARG (3);
4243
4244 TREE_SIDE_EFFECTS (t) = side_effects;
4245 TREE_THIS_VOLATILE (t)
4246 = (TREE_CODE_CLASS (code) == tcc_reference
4247 && arg0 && TREE_THIS_VOLATILE (arg0));
4248
4249 return t;
4250 }
4251
4252 tree
4253 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4254 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4255 {
4256 bool constant, read_only, side_effects;
4257 tree t;
4258
4259 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4260
4261 t = make_node_stat (code PASS_MEM_STAT);
4262 TREE_TYPE (t) = tt;
4263
4264 side_effects = TREE_SIDE_EFFECTS (t);
4265
4266 PROCESS_ARG (0);
4267 PROCESS_ARG (1);
4268 PROCESS_ARG (2);
4269 PROCESS_ARG (3);
4270 PROCESS_ARG (4);
4271
4272 TREE_SIDE_EFFECTS (t) = side_effects;
4273 TREE_THIS_VOLATILE (t)
4274 = (TREE_CODE_CLASS (code) == tcc_reference
4275 && arg0 && TREE_THIS_VOLATILE (arg0));
4276
4277 return t;
4278 }
4279
4280 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4281 on the pointer PTR. */
4282
4283 tree
4284 build_simple_mem_ref_loc (location_t loc, tree ptr)
4285 {
4286 HOST_WIDE_INT offset = 0;
4287 tree ptype = TREE_TYPE (ptr);
4288 tree tem;
4289 /* For convenience allow addresses that collapse to a simple base
4290 and offset. */
4291 if (TREE_CODE (ptr) == ADDR_EXPR
4292 && (handled_component_p (TREE_OPERAND (ptr, 0))
4293 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4294 {
4295 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4296 gcc_assert (ptr);
4297 ptr = build_fold_addr_expr (ptr);
4298 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4299 }
4300 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4301 ptr, build_int_cst (ptype, offset));
4302 SET_EXPR_LOCATION (tem, loc);
4303 return tem;
4304 }
4305
4306 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4307
4308 double_int
4309 mem_ref_offset (const_tree t)
4310 {
4311 tree toff = TREE_OPERAND (t, 1);
4312 return tree_to_double_int (toff).sext (TYPE_PRECISION (TREE_TYPE (toff)));
4313 }
4314
4315 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4316 offsetted by OFFSET units. */
4317
4318 tree
4319 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4320 {
4321 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4322 build_fold_addr_expr (base),
4323 build_int_cst (ptr_type_node, offset));
4324 tree addr = build1 (ADDR_EXPR, type, ref);
4325 recompute_tree_invariant_for_addr_expr (addr);
4326 return addr;
4327 }
4328
4329 /* Similar except don't specify the TREE_TYPE
4330 and leave the TREE_SIDE_EFFECTS as 0.
4331 It is permissible for arguments to be null,
4332 or even garbage if their values do not matter. */
4333
4334 tree
4335 build_nt (enum tree_code code, ...)
4336 {
4337 tree t;
4338 int length;
4339 int i;
4340 va_list p;
4341
4342 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4343
4344 va_start (p, code);
4345
4346 t = make_node (code);
4347 length = TREE_CODE_LENGTH (code);
4348
4349 for (i = 0; i < length; i++)
4350 TREE_OPERAND (t, i) = va_arg (p, tree);
4351
4352 va_end (p);
4353 return t;
4354 }
4355
4356 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4357 tree vec. */
4358
4359 tree
4360 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4361 {
4362 tree ret, t;
4363 unsigned int ix;
4364
4365 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4366 CALL_EXPR_FN (ret) = fn;
4367 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4368 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4369 CALL_EXPR_ARG (ret, ix) = t;
4370 return ret;
4371 }
4372 \f
4373 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4374 We do NOT enter this node in any sort of symbol table.
4375
4376 LOC is the location of the decl.
4377
4378 layout_decl is used to set up the decl's storage layout.
4379 Other slots are initialized to 0 or null pointers. */
4380
4381 tree
4382 build_decl_stat (location_t loc, enum tree_code code, tree name,
4383 tree type MEM_STAT_DECL)
4384 {
4385 tree t;
4386
4387 t = make_node_stat (code PASS_MEM_STAT);
4388 DECL_SOURCE_LOCATION (t) = loc;
4389
4390 /* if (type == error_mark_node)
4391 type = integer_type_node; */
4392 /* That is not done, deliberately, so that having error_mark_node
4393 as the type can suppress useless errors in the use of this variable. */
4394
4395 DECL_NAME (t) = name;
4396 TREE_TYPE (t) = type;
4397
4398 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4399 layout_decl (t, 0);
4400
4401 return t;
4402 }
4403
4404 /* Builds and returns function declaration with NAME and TYPE. */
4405
4406 tree
4407 build_fn_decl (const char *name, tree type)
4408 {
4409 tree id = get_identifier (name);
4410 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4411
4412 DECL_EXTERNAL (decl) = 1;
4413 TREE_PUBLIC (decl) = 1;
4414 DECL_ARTIFICIAL (decl) = 1;
4415 TREE_NOTHROW (decl) = 1;
4416
4417 return decl;
4418 }
4419
4420 vec<tree, va_gc> *all_translation_units;
4421
4422 /* Builds a new translation-unit decl with name NAME, queues it in the
4423 global list of translation-unit decls and returns it. */
4424
4425 tree
4426 build_translation_unit_decl (tree name)
4427 {
4428 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4429 name, NULL_TREE);
4430 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4431 vec_safe_push (all_translation_units, tu);
4432 return tu;
4433 }
4434
4435 \f
4436 /* BLOCK nodes are used to represent the structure of binding contours
4437 and declarations, once those contours have been exited and their contents
4438 compiled. This information is used for outputting debugging info. */
4439
4440 tree
4441 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4442 {
4443 tree block = make_node (BLOCK);
4444
4445 BLOCK_VARS (block) = vars;
4446 BLOCK_SUBBLOCKS (block) = subblocks;
4447 BLOCK_SUPERCONTEXT (block) = supercontext;
4448 BLOCK_CHAIN (block) = chain;
4449 return block;
4450 }
4451
4452 \f
4453 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4454
4455 LOC is the location to use in tree T. */
4456
4457 void
4458 protected_set_expr_location (tree t, location_t loc)
4459 {
4460 if (t && CAN_HAVE_LOCATION_P (t))
4461 SET_EXPR_LOCATION (t, loc);
4462 }
4463 \f
4464 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4465 is ATTRIBUTE. */
4466
4467 tree
4468 build_decl_attribute_variant (tree ddecl, tree attribute)
4469 {
4470 DECL_ATTRIBUTES (ddecl) = attribute;
4471 return ddecl;
4472 }
4473
4474 /* Borrowed from hashtab.c iterative_hash implementation. */
4475 #define mix(a,b,c) \
4476 { \
4477 a -= b; a -= c; a ^= (c>>13); \
4478 b -= c; b -= a; b ^= (a<< 8); \
4479 c -= a; c -= b; c ^= ((b&0xffffffff)>>13); \
4480 a -= b; a -= c; a ^= ((c&0xffffffff)>>12); \
4481 b -= c; b -= a; b = (b ^ (a<<16)) & 0xffffffff; \
4482 c -= a; c -= b; c = (c ^ (b>> 5)) & 0xffffffff; \
4483 a -= b; a -= c; a = (a ^ (c>> 3)) & 0xffffffff; \
4484 b -= c; b -= a; b = (b ^ (a<<10)) & 0xffffffff; \
4485 c -= a; c -= b; c = (c ^ (b>>15)) & 0xffffffff; \
4486 }
4487
4488
4489 /* Produce good hash value combining VAL and VAL2. */
4490 hashval_t
4491 iterative_hash_hashval_t (hashval_t val, hashval_t val2)
4492 {
4493 /* the golden ratio; an arbitrary value. */
4494 hashval_t a = 0x9e3779b9;
4495
4496 mix (a, val, val2);
4497 return val2;
4498 }
4499
4500 /* Produce good hash value combining VAL and VAL2. */
4501 hashval_t
4502 iterative_hash_host_wide_int (HOST_WIDE_INT val, hashval_t val2)
4503 {
4504 if (sizeof (HOST_WIDE_INT) == sizeof (hashval_t))
4505 return iterative_hash_hashval_t (val, val2);
4506 else
4507 {
4508 hashval_t a = (hashval_t) val;
4509 /* Avoid warnings about shifting of more than the width of the type on
4510 hosts that won't execute this path. */
4511 int zero = 0;
4512 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 8 + zero));
4513 mix (a, b, val2);
4514 if (sizeof (HOST_WIDE_INT) > 2 * sizeof (hashval_t))
4515 {
4516 hashval_t a = (hashval_t) (val >> (sizeof (hashval_t) * 16 + zero));
4517 hashval_t b = (hashval_t) (val >> (sizeof (hashval_t) * 24 + zero));
4518 mix (a, b, val2);
4519 }
4520 return val2;
4521 }
4522 }
4523
4524 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4525 is ATTRIBUTE and its qualifiers are QUALS.
4526
4527 Record such modified types already made so we don't make duplicates. */
4528
4529 tree
4530 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4531 {
4532 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4533 {
4534 hashval_t hashcode = 0;
4535 tree ntype;
4536 enum tree_code code = TREE_CODE (ttype);
4537
4538 /* Building a distinct copy of a tagged type is inappropriate; it
4539 causes breakage in code that expects there to be a one-to-one
4540 relationship between a struct and its fields.
4541 build_duplicate_type is another solution (as used in
4542 handle_transparent_union_attribute), but that doesn't play well
4543 with the stronger C++ type identity model. */
4544 if (TREE_CODE (ttype) == RECORD_TYPE
4545 || TREE_CODE (ttype) == UNION_TYPE
4546 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4547 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4548 {
4549 warning (OPT_Wattributes,
4550 "ignoring attributes applied to %qT after definition",
4551 TYPE_MAIN_VARIANT (ttype));
4552 return build_qualified_type (ttype, quals);
4553 }
4554
4555 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4556 ntype = build_distinct_type_copy (ttype);
4557
4558 TYPE_ATTRIBUTES (ntype) = attribute;
4559
4560 hashcode = iterative_hash_object (code, hashcode);
4561 if (TREE_TYPE (ntype))
4562 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (ntype)),
4563 hashcode);
4564 hashcode = attribute_hash_list (attribute, hashcode);
4565
4566 switch (TREE_CODE (ntype))
4567 {
4568 case FUNCTION_TYPE:
4569 hashcode = type_hash_list (TYPE_ARG_TYPES (ntype), hashcode);
4570 break;
4571 case ARRAY_TYPE:
4572 if (TYPE_DOMAIN (ntype))
4573 hashcode = iterative_hash_object (TYPE_HASH (TYPE_DOMAIN (ntype)),
4574 hashcode);
4575 break;
4576 case INTEGER_TYPE:
4577 hashcode = iterative_hash_object
4578 (TREE_INT_CST_LOW (TYPE_MAX_VALUE (ntype)), hashcode);
4579 hashcode = iterative_hash_object
4580 (TREE_INT_CST_HIGH (TYPE_MAX_VALUE (ntype)), hashcode);
4581 break;
4582 case REAL_TYPE:
4583 case FIXED_POINT_TYPE:
4584 {
4585 unsigned int precision = TYPE_PRECISION (ntype);
4586 hashcode = iterative_hash_object (precision, hashcode);
4587 }
4588 break;
4589 default:
4590 break;
4591 }
4592
4593 ntype = type_hash_canon (hashcode, ntype);
4594
4595 /* If the target-dependent attributes make NTYPE different from
4596 its canonical type, we will need to use structural equality
4597 checks for this type. */
4598 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4599 || !comp_type_attributes (ntype, ttype))
4600 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4601 else if (TYPE_CANONICAL (ntype) == ntype)
4602 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4603
4604 ttype = build_qualified_type (ntype, quals);
4605 }
4606 else if (TYPE_QUALS (ttype) != quals)
4607 ttype = build_qualified_type (ttype, quals);
4608
4609 return ttype;
4610 }
4611
4612 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4613 the same. */
4614
4615 static bool
4616 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4617 {
4618 tree cl1, cl2;
4619 for (cl1 = clauses1, cl2 = clauses2;
4620 cl1 && cl2;
4621 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4622 {
4623 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4624 return false;
4625 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4626 {
4627 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4628 OMP_CLAUSE_DECL (cl2)) != 1)
4629 return false;
4630 }
4631 switch (OMP_CLAUSE_CODE (cl1))
4632 {
4633 case OMP_CLAUSE_ALIGNED:
4634 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4635 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4636 return false;
4637 break;
4638 case OMP_CLAUSE_LINEAR:
4639 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4640 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4641 return false;
4642 break;
4643 case OMP_CLAUSE_SIMDLEN:
4644 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4645 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4646 return false;
4647 default:
4648 break;
4649 }
4650 }
4651 return true;
4652 }
4653
4654 /* Compare two constructor-element-type constants. Return 1 if the lists
4655 are known to be equal; otherwise return 0. */
4656
4657 static bool
4658 simple_cst_list_equal (const_tree l1, const_tree l2)
4659 {
4660 while (l1 != NULL_TREE && l2 != NULL_TREE)
4661 {
4662 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4663 return false;
4664
4665 l1 = TREE_CHAIN (l1);
4666 l2 = TREE_CHAIN (l2);
4667 }
4668
4669 return l1 == l2;
4670 }
4671
4672 /* Compare two attributes for their value identity. Return true if the
4673 attribute values are known to be equal; otherwise return false.
4674 */
4675
4676 static bool
4677 attribute_value_equal (const_tree attr1, const_tree attr2)
4678 {
4679 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4680 return true;
4681
4682 if (TREE_VALUE (attr1) != NULL_TREE
4683 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4684 && TREE_VALUE (attr2) != NULL
4685 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4686 return (simple_cst_list_equal (TREE_VALUE (attr1),
4687 TREE_VALUE (attr2)) == 1);
4688
4689 if ((flag_openmp || flag_openmp_simd)
4690 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4691 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4692 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4693 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4694 TREE_VALUE (attr2));
4695
4696 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4697 }
4698
4699 /* Return 0 if the attributes for two types are incompatible, 1 if they
4700 are compatible, and 2 if they are nearly compatible (which causes a
4701 warning to be generated). */
4702 int
4703 comp_type_attributes (const_tree type1, const_tree type2)
4704 {
4705 const_tree a1 = TYPE_ATTRIBUTES (type1);
4706 const_tree a2 = TYPE_ATTRIBUTES (type2);
4707 const_tree a;
4708
4709 if (a1 == a2)
4710 return 1;
4711 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4712 {
4713 const struct attribute_spec *as;
4714 const_tree attr;
4715
4716 as = lookup_attribute_spec (get_attribute_name (a));
4717 if (!as || as->affects_type_identity == false)
4718 continue;
4719
4720 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4721 if (!attr || !attribute_value_equal (a, attr))
4722 break;
4723 }
4724 if (!a)
4725 {
4726 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4727 {
4728 const struct attribute_spec *as;
4729
4730 as = lookup_attribute_spec (get_attribute_name (a));
4731 if (!as || as->affects_type_identity == false)
4732 continue;
4733
4734 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4735 break;
4736 /* We don't need to compare trees again, as we did this
4737 already in first loop. */
4738 }
4739 /* All types - affecting identity - are equal, so
4740 there is no need to call target hook for comparison. */
4741 if (!a)
4742 return 1;
4743 }
4744 /* As some type combinations - like default calling-convention - might
4745 be compatible, we have to call the target hook to get the final result. */
4746 return targetm.comp_type_attributes (type1, type2);
4747 }
4748
4749 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4750 is ATTRIBUTE.
4751
4752 Record such modified types already made so we don't make duplicates. */
4753
4754 tree
4755 build_type_attribute_variant (tree ttype, tree attribute)
4756 {
4757 return build_type_attribute_qual_variant (ttype, attribute,
4758 TYPE_QUALS (ttype));
4759 }
4760
4761
4762 /* Reset the expression *EXPR_P, a size or position.
4763
4764 ??? We could reset all non-constant sizes or positions. But it's cheap
4765 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4766
4767 We need to reset self-referential sizes or positions because they cannot
4768 be gimplified and thus can contain a CALL_EXPR after the gimplification
4769 is finished, which will run afoul of LTO streaming. And they need to be
4770 reset to something essentially dummy but not constant, so as to preserve
4771 the properties of the object they are attached to. */
4772
4773 static inline void
4774 free_lang_data_in_one_sizepos (tree *expr_p)
4775 {
4776 tree expr = *expr_p;
4777 if (CONTAINS_PLACEHOLDER_P (expr))
4778 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4779 }
4780
4781
4782 /* Reset all the fields in a binfo node BINFO. We only keep
4783 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4784
4785 static void
4786 free_lang_data_in_binfo (tree binfo)
4787 {
4788 unsigned i;
4789 tree t;
4790
4791 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4792
4793 BINFO_VIRTUALS (binfo) = NULL_TREE;
4794 BINFO_BASE_ACCESSES (binfo) = NULL;
4795 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4796 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4797
4798 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4799 free_lang_data_in_binfo (t);
4800 }
4801
4802
4803 /* Reset all language specific information still present in TYPE. */
4804
4805 static void
4806 free_lang_data_in_type (tree type)
4807 {
4808 gcc_assert (TYPE_P (type));
4809
4810 /* Give the FE a chance to remove its own data first. */
4811 lang_hooks.free_lang_data (type);
4812
4813 TREE_LANG_FLAG_0 (type) = 0;
4814 TREE_LANG_FLAG_1 (type) = 0;
4815 TREE_LANG_FLAG_2 (type) = 0;
4816 TREE_LANG_FLAG_3 (type) = 0;
4817 TREE_LANG_FLAG_4 (type) = 0;
4818 TREE_LANG_FLAG_5 (type) = 0;
4819 TREE_LANG_FLAG_6 (type) = 0;
4820
4821 if (TREE_CODE (type) == FUNCTION_TYPE)
4822 {
4823 /* Remove the const and volatile qualifiers from arguments. The
4824 C++ front end removes them, but the C front end does not,
4825 leading to false ODR violation errors when merging two
4826 instances of the same function signature compiled by
4827 different front ends. */
4828 tree p;
4829
4830 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
4831 {
4832 tree arg_type = TREE_VALUE (p);
4833
4834 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
4835 {
4836 int quals = TYPE_QUALS (arg_type)
4837 & ~TYPE_QUAL_CONST
4838 & ~TYPE_QUAL_VOLATILE;
4839 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
4840 free_lang_data_in_type (TREE_VALUE (p));
4841 }
4842 }
4843 }
4844
4845 /* Remove members that are not actually FIELD_DECLs from the field
4846 list of an aggregate. These occur in C++. */
4847 if (RECORD_OR_UNION_TYPE_P (type))
4848 {
4849 tree prev, member;
4850
4851 /* Note that TYPE_FIELDS can be shared across distinct
4852 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
4853 to be removed, we cannot set its TREE_CHAIN to NULL.
4854 Otherwise, we would not be able to find all the other fields
4855 in the other instances of this TREE_TYPE.
4856
4857 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
4858 prev = NULL_TREE;
4859 member = TYPE_FIELDS (type);
4860 while (member)
4861 {
4862 if (TREE_CODE (member) == FIELD_DECL
4863 || TREE_CODE (member) == TYPE_DECL)
4864 {
4865 if (prev)
4866 TREE_CHAIN (prev) = member;
4867 else
4868 TYPE_FIELDS (type) = member;
4869 prev = member;
4870 }
4871
4872 member = TREE_CHAIN (member);
4873 }
4874
4875 if (prev)
4876 TREE_CHAIN (prev) = NULL_TREE;
4877 else
4878 TYPE_FIELDS (type) = NULL_TREE;
4879
4880 TYPE_METHODS (type) = NULL_TREE;
4881 if (TYPE_BINFO (type))
4882 free_lang_data_in_binfo (TYPE_BINFO (type));
4883 }
4884 else
4885 {
4886 /* For non-aggregate types, clear out the language slot (which
4887 overloads TYPE_BINFO). */
4888 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
4889
4890 if (INTEGRAL_TYPE_P (type)
4891 || SCALAR_FLOAT_TYPE_P (type)
4892 || FIXED_POINT_TYPE_P (type))
4893 {
4894 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
4895 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
4896 }
4897 }
4898
4899 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
4900 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
4901
4902 if (TYPE_CONTEXT (type)
4903 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
4904 {
4905 tree ctx = TYPE_CONTEXT (type);
4906 do
4907 {
4908 ctx = BLOCK_SUPERCONTEXT (ctx);
4909 }
4910 while (ctx && TREE_CODE (ctx) == BLOCK);
4911 TYPE_CONTEXT (type) = ctx;
4912 }
4913 }
4914
4915
4916 /* Return true if DECL may need an assembler name to be set. */
4917
4918 static inline bool
4919 need_assembler_name_p (tree decl)
4920 {
4921 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
4922 if (TREE_CODE (decl) != FUNCTION_DECL
4923 && TREE_CODE (decl) != VAR_DECL)
4924 return false;
4925
4926 /* If DECL already has its assembler name set, it does not need a
4927 new one. */
4928 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
4929 || DECL_ASSEMBLER_NAME_SET_P (decl))
4930 return false;
4931
4932 /* Abstract decls do not need an assembler name. */
4933 if (DECL_ABSTRACT (decl))
4934 return false;
4935
4936 /* For VAR_DECLs, only static, public and external symbols need an
4937 assembler name. */
4938 if (TREE_CODE (decl) == VAR_DECL
4939 && !TREE_STATIC (decl)
4940 && !TREE_PUBLIC (decl)
4941 && !DECL_EXTERNAL (decl))
4942 return false;
4943
4944 if (TREE_CODE (decl) == FUNCTION_DECL)
4945 {
4946 /* Do not set assembler name on builtins. Allow RTL expansion to
4947 decide whether to expand inline or via a regular call. */
4948 if (DECL_BUILT_IN (decl)
4949 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
4950 return false;
4951
4952 /* Functions represented in the callgraph need an assembler name. */
4953 if (cgraph_get_node (decl) != NULL)
4954 return true;
4955
4956 /* Unused and not public functions don't need an assembler name. */
4957 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
4958 return false;
4959 }
4960
4961 return true;
4962 }
4963
4964
4965 /* Reset all language specific information still present in symbol
4966 DECL. */
4967
4968 static void
4969 free_lang_data_in_decl (tree decl)
4970 {
4971 gcc_assert (DECL_P (decl));
4972
4973 /* Give the FE a chance to remove its own data first. */
4974 lang_hooks.free_lang_data (decl);
4975
4976 TREE_LANG_FLAG_0 (decl) = 0;
4977 TREE_LANG_FLAG_1 (decl) = 0;
4978 TREE_LANG_FLAG_2 (decl) = 0;
4979 TREE_LANG_FLAG_3 (decl) = 0;
4980 TREE_LANG_FLAG_4 (decl) = 0;
4981 TREE_LANG_FLAG_5 (decl) = 0;
4982 TREE_LANG_FLAG_6 (decl) = 0;
4983
4984 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
4985 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
4986 if (TREE_CODE (decl) == FIELD_DECL)
4987 {
4988 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
4989 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
4990 DECL_QUALIFIER (decl) = NULL_TREE;
4991 }
4992
4993 if (TREE_CODE (decl) == FUNCTION_DECL)
4994 {
4995 struct cgraph_node *node;
4996 if (!(node = cgraph_get_node (decl))
4997 || (!node->definition && !node->clones))
4998 {
4999 if (node)
5000 cgraph_release_function_body (node);
5001 else
5002 {
5003 release_function_body (decl);
5004 DECL_ARGUMENTS (decl) = NULL;
5005 DECL_RESULT (decl) = NULL;
5006 DECL_INITIAL (decl) = error_mark_node;
5007 }
5008 }
5009 if (gimple_has_body_p (decl))
5010 {
5011 tree t;
5012
5013 /* If DECL has a gimple body, then the context for its
5014 arguments must be DECL. Otherwise, it doesn't really
5015 matter, as we will not be emitting any code for DECL. In
5016 general, there may be other instances of DECL created by
5017 the front end and since PARM_DECLs are generally shared,
5018 their DECL_CONTEXT changes as the replicas of DECL are
5019 created. The only time where DECL_CONTEXT is important
5020 is for the FUNCTION_DECLs that have a gimple body (since
5021 the PARM_DECL will be used in the function's body). */
5022 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5023 DECL_CONTEXT (t) = decl;
5024 }
5025
5026 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5027 At this point, it is not needed anymore. */
5028 DECL_SAVED_TREE (decl) = NULL_TREE;
5029
5030 /* Clear the abstract origin if it refers to a method. Otherwise
5031 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5032 origin will not be output correctly. */
5033 if (DECL_ABSTRACT_ORIGIN (decl)
5034 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5035 && RECORD_OR_UNION_TYPE_P
5036 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5037 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5038
5039 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5040 DECL_VINDEX referring to itself into a vtable slot number as it
5041 should. Happens with functions that are copied and then forgotten
5042 about. Just clear it, it won't matter anymore. */
5043 if (DECL_VINDEX (decl) && !host_integerp (DECL_VINDEX (decl), 0))
5044 DECL_VINDEX (decl) = NULL_TREE;
5045 }
5046 else if (TREE_CODE (decl) == VAR_DECL)
5047 {
5048 if ((DECL_EXTERNAL (decl)
5049 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5050 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5051 DECL_INITIAL (decl) = NULL_TREE;
5052 }
5053 else if (TREE_CODE (decl) == TYPE_DECL
5054 || TREE_CODE (decl) == FIELD_DECL)
5055 DECL_INITIAL (decl) = NULL_TREE;
5056 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5057 && DECL_INITIAL (decl)
5058 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5059 {
5060 /* Strip builtins from the translation-unit BLOCK. We still have targets
5061 without builtin_decl_explicit support and also builtins are shared
5062 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5063 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5064 while (*nextp)
5065 {
5066 tree var = *nextp;
5067 if (TREE_CODE (var) == FUNCTION_DECL
5068 && DECL_BUILT_IN (var))
5069 *nextp = TREE_CHAIN (var);
5070 else
5071 nextp = &TREE_CHAIN (var);
5072 }
5073 }
5074 }
5075
5076
5077 /* Data used when collecting DECLs and TYPEs for language data removal. */
5078
5079 struct free_lang_data_d
5080 {
5081 /* Worklist to avoid excessive recursion. */
5082 vec<tree> worklist;
5083
5084 /* Set of traversed objects. Used to avoid duplicate visits. */
5085 struct pointer_set_t *pset;
5086
5087 /* Array of symbols to process with free_lang_data_in_decl. */
5088 vec<tree> decls;
5089
5090 /* Array of types to process with free_lang_data_in_type. */
5091 vec<tree> types;
5092 };
5093
5094
5095 /* Save all language fields needed to generate proper debug information
5096 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5097
5098 static void
5099 save_debug_info_for_decl (tree t)
5100 {
5101 /*struct saved_debug_info_d *sdi;*/
5102
5103 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5104
5105 /* FIXME. Partial implementation for saving debug info removed. */
5106 }
5107
5108
5109 /* Save all language fields needed to generate proper debug information
5110 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5111
5112 static void
5113 save_debug_info_for_type (tree t)
5114 {
5115 /*struct saved_debug_info_d *sdi;*/
5116
5117 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5118
5119 /* FIXME. Partial implementation for saving debug info removed. */
5120 }
5121
5122
5123 /* Add type or decl T to one of the list of tree nodes that need their
5124 language data removed. The lists are held inside FLD. */
5125
5126 static void
5127 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5128 {
5129 if (DECL_P (t))
5130 {
5131 fld->decls.safe_push (t);
5132 if (debug_info_level > DINFO_LEVEL_TERSE)
5133 save_debug_info_for_decl (t);
5134 }
5135 else if (TYPE_P (t))
5136 {
5137 fld->types.safe_push (t);
5138 if (debug_info_level > DINFO_LEVEL_TERSE)
5139 save_debug_info_for_type (t);
5140 }
5141 else
5142 gcc_unreachable ();
5143 }
5144
5145 /* Push tree node T into FLD->WORKLIST. */
5146
5147 static inline void
5148 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5149 {
5150 if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
5151 fld->worklist.safe_push ((t));
5152 }
5153
5154
5155 /* Operand callback helper for free_lang_data_in_node. *TP is the
5156 subtree operand being considered. */
5157
5158 static tree
5159 find_decls_types_r (tree *tp, int *ws, void *data)
5160 {
5161 tree t = *tp;
5162 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5163
5164 if (TREE_CODE (t) == TREE_LIST)
5165 return NULL_TREE;
5166
5167 /* Language specific nodes will be removed, so there is no need
5168 to gather anything under them. */
5169 if (is_lang_specific (t))
5170 {
5171 *ws = 0;
5172 return NULL_TREE;
5173 }
5174
5175 if (DECL_P (t))
5176 {
5177 /* Note that walk_tree does not traverse every possible field in
5178 decls, so we have to do our own traversals here. */
5179 add_tree_to_fld_list (t, fld);
5180
5181 fld_worklist_push (DECL_NAME (t), fld);
5182 fld_worklist_push (DECL_CONTEXT (t), fld);
5183 fld_worklist_push (DECL_SIZE (t), fld);
5184 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5185
5186 /* We are going to remove everything under DECL_INITIAL for
5187 TYPE_DECLs. No point walking them. */
5188 if (TREE_CODE (t) != TYPE_DECL)
5189 fld_worklist_push (DECL_INITIAL (t), fld);
5190
5191 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5192 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5193
5194 if (TREE_CODE (t) == FUNCTION_DECL)
5195 {
5196 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5197 fld_worklist_push (DECL_RESULT (t), fld);
5198 }
5199 else if (TREE_CODE (t) == TYPE_DECL)
5200 {
5201 fld_worklist_push (DECL_ARGUMENT_FLD (t), fld);
5202 fld_worklist_push (DECL_VINDEX (t), fld);
5203 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5204 }
5205 else if (TREE_CODE (t) == FIELD_DECL)
5206 {
5207 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5208 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5209 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5210 fld_worklist_push (DECL_FCONTEXT (t), fld);
5211 }
5212 else if (TREE_CODE (t) == VAR_DECL)
5213 {
5214 fld_worklist_push (DECL_SECTION_NAME (t), fld);
5215 fld_worklist_push (DECL_COMDAT_GROUP (t), fld);
5216 }
5217
5218 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5219 && DECL_HAS_VALUE_EXPR_P (t))
5220 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5221
5222 if (TREE_CODE (t) != FIELD_DECL
5223 && TREE_CODE (t) != TYPE_DECL)
5224 fld_worklist_push (TREE_CHAIN (t), fld);
5225 *ws = 0;
5226 }
5227 else if (TYPE_P (t))
5228 {
5229 /* Note that walk_tree does not traverse every possible field in
5230 types, so we have to do our own traversals here. */
5231 add_tree_to_fld_list (t, fld);
5232
5233 if (!RECORD_OR_UNION_TYPE_P (t))
5234 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5235 fld_worklist_push (TYPE_SIZE (t), fld);
5236 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5237 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5238 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5239 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5240 fld_worklist_push (TYPE_NAME (t), fld);
5241 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5242 them and thus do not and want not to reach unused pointer types
5243 this way. */
5244 if (!POINTER_TYPE_P (t))
5245 fld_worklist_push (TYPE_MINVAL (t), fld);
5246 if (!RECORD_OR_UNION_TYPE_P (t))
5247 fld_worklist_push (TYPE_MAXVAL (t), fld);
5248 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5249 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5250 do not and want not to reach unused variants this way. */
5251 if (TYPE_CONTEXT (t))
5252 {
5253 tree ctx = TYPE_CONTEXT (t);
5254 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5255 So push that instead. */
5256 while (ctx && TREE_CODE (ctx) == BLOCK)
5257 ctx = BLOCK_SUPERCONTEXT (ctx);
5258 fld_worklist_push (ctx, fld);
5259 }
5260 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5261 and want not to reach unused types this way. */
5262
5263 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5264 {
5265 unsigned i;
5266 tree tem;
5267 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5268 fld_worklist_push (TREE_TYPE (tem), fld);
5269 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5270 if (tem
5271 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5272 && TREE_CODE (tem) == TREE_LIST)
5273 do
5274 {
5275 fld_worklist_push (TREE_VALUE (tem), fld);
5276 tem = TREE_CHAIN (tem);
5277 }
5278 while (tem);
5279 }
5280 if (RECORD_OR_UNION_TYPE_P (t))
5281 {
5282 tree tem;
5283 /* Push all TYPE_FIELDS - there can be interleaving interesting
5284 and non-interesting things. */
5285 tem = TYPE_FIELDS (t);
5286 while (tem)
5287 {
5288 if (TREE_CODE (tem) == FIELD_DECL
5289 || TREE_CODE (tem) == TYPE_DECL)
5290 fld_worklist_push (tem, fld);
5291 tem = TREE_CHAIN (tem);
5292 }
5293 }
5294
5295 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5296 *ws = 0;
5297 }
5298 else if (TREE_CODE (t) == BLOCK)
5299 {
5300 tree tem;
5301 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5302 fld_worklist_push (tem, fld);
5303 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5304 fld_worklist_push (tem, fld);
5305 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5306 }
5307
5308 if (TREE_CODE (t) != IDENTIFIER_NODE
5309 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5310 fld_worklist_push (TREE_TYPE (t), fld);
5311
5312 return NULL_TREE;
5313 }
5314
5315
5316 /* Find decls and types in T. */
5317
5318 static void
5319 find_decls_types (tree t, struct free_lang_data_d *fld)
5320 {
5321 while (1)
5322 {
5323 if (!pointer_set_contains (fld->pset, t))
5324 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5325 if (fld->worklist.is_empty ())
5326 break;
5327 t = fld->worklist.pop ();
5328 }
5329 }
5330
5331 /* Translate all the types in LIST with the corresponding runtime
5332 types. */
5333
5334 static tree
5335 get_eh_types_for_runtime (tree list)
5336 {
5337 tree head, prev;
5338
5339 if (list == NULL_TREE)
5340 return NULL_TREE;
5341
5342 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5343 prev = head;
5344 list = TREE_CHAIN (list);
5345 while (list)
5346 {
5347 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5348 TREE_CHAIN (prev) = n;
5349 prev = TREE_CHAIN (prev);
5350 list = TREE_CHAIN (list);
5351 }
5352
5353 return head;
5354 }
5355
5356
5357 /* Find decls and types referenced in EH region R and store them in
5358 FLD->DECLS and FLD->TYPES. */
5359
5360 static void
5361 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5362 {
5363 switch (r->type)
5364 {
5365 case ERT_CLEANUP:
5366 break;
5367
5368 case ERT_TRY:
5369 {
5370 eh_catch c;
5371
5372 /* The types referenced in each catch must first be changed to the
5373 EH types used at runtime. This removes references to FE types
5374 in the region. */
5375 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5376 {
5377 c->type_list = get_eh_types_for_runtime (c->type_list);
5378 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5379 }
5380 }
5381 break;
5382
5383 case ERT_ALLOWED_EXCEPTIONS:
5384 r->u.allowed.type_list
5385 = get_eh_types_for_runtime (r->u.allowed.type_list);
5386 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5387 break;
5388
5389 case ERT_MUST_NOT_THROW:
5390 walk_tree (&r->u.must_not_throw.failure_decl,
5391 find_decls_types_r, fld, fld->pset);
5392 break;
5393 }
5394 }
5395
5396
5397 /* Find decls and types referenced in cgraph node N and store them in
5398 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5399 look for *every* kind of DECL and TYPE node reachable from N,
5400 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5401 NAMESPACE_DECLs, etc). */
5402
5403 static void
5404 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5405 {
5406 basic_block bb;
5407 struct function *fn;
5408 unsigned ix;
5409 tree t;
5410
5411 find_decls_types (n->decl, fld);
5412
5413 if (!gimple_has_body_p (n->decl))
5414 return;
5415
5416 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5417
5418 fn = DECL_STRUCT_FUNCTION (n->decl);
5419
5420 /* Traverse locals. */
5421 FOR_EACH_LOCAL_DECL (fn, ix, t)
5422 find_decls_types (t, fld);
5423
5424 /* Traverse EH regions in FN. */
5425 {
5426 eh_region r;
5427 FOR_ALL_EH_REGION_FN (r, fn)
5428 find_decls_types_in_eh_region (r, fld);
5429 }
5430
5431 /* Traverse every statement in FN. */
5432 FOR_EACH_BB_FN (bb, fn)
5433 {
5434 gimple_stmt_iterator si;
5435 unsigned i;
5436
5437 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
5438 {
5439 gimple phi = gsi_stmt (si);
5440
5441 for (i = 0; i < gimple_phi_num_args (phi); i++)
5442 {
5443 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5444 find_decls_types (*arg_p, fld);
5445 }
5446 }
5447
5448 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5449 {
5450 gimple stmt = gsi_stmt (si);
5451
5452 if (is_gimple_call (stmt))
5453 find_decls_types (gimple_call_fntype (stmt), fld);
5454
5455 for (i = 0; i < gimple_num_ops (stmt); i++)
5456 {
5457 tree arg = gimple_op (stmt, i);
5458 find_decls_types (arg, fld);
5459 }
5460 }
5461 }
5462 }
5463
5464
5465 /* Find decls and types referenced in varpool node N and store them in
5466 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5467 look for *every* kind of DECL and TYPE node reachable from N,
5468 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5469 NAMESPACE_DECLs, etc). */
5470
5471 static void
5472 find_decls_types_in_var (struct varpool_node *v, struct free_lang_data_d *fld)
5473 {
5474 find_decls_types (v->decl, fld);
5475 }
5476
5477 /* If T needs an assembler name, have one created for it. */
5478
5479 void
5480 assign_assembler_name_if_neeeded (tree t)
5481 {
5482 if (need_assembler_name_p (t))
5483 {
5484 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5485 diagnostics that use input_location to show locus
5486 information. The problem here is that, at this point,
5487 input_location is generally anchored to the end of the file
5488 (since the parser is long gone), so we don't have a good
5489 position to pin it to.
5490
5491 To alleviate this problem, this uses the location of T's
5492 declaration. Examples of this are
5493 testsuite/g++.dg/template/cond2.C and
5494 testsuite/g++.dg/template/pr35240.C. */
5495 location_t saved_location = input_location;
5496 input_location = DECL_SOURCE_LOCATION (t);
5497
5498 decl_assembler_name (t);
5499
5500 input_location = saved_location;
5501 }
5502 }
5503
5504
5505 /* Free language specific information for every operand and expression
5506 in every node of the call graph. This process operates in three stages:
5507
5508 1- Every callgraph node and varpool node is traversed looking for
5509 decls and types embedded in them. This is a more exhaustive
5510 search than that done by find_referenced_vars, because it will
5511 also collect individual fields, decls embedded in types, etc.
5512
5513 2- All the decls found are sent to free_lang_data_in_decl.
5514
5515 3- All the types found are sent to free_lang_data_in_type.
5516
5517 The ordering between decls and types is important because
5518 free_lang_data_in_decl sets assembler names, which includes
5519 mangling. So types cannot be freed up until assembler names have
5520 been set up. */
5521
5522 static void
5523 free_lang_data_in_cgraph (void)
5524 {
5525 struct cgraph_node *n;
5526 struct varpool_node *v;
5527 struct free_lang_data_d fld;
5528 tree t;
5529 unsigned i;
5530 alias_pair *p;
5531
5532 /* Initialize sets and arrays to store referenced decls and types. */
5533 fld.pset = pointer_set_create ();
5534 fld.worklist.create (0);
5535 fld.decls.create (100);
5536 fld.types.create (100);
5537
5538 /* Find decls and types in the body of every function in the callgraph. */
5539 FOR_EACH_FUNCTION (n)
5540 find_decls_types_in_node (n, &fld);
5541
5542 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5543 find_decls_types (p->decl, &fld);
5544
5545 /* Find decls and types in every varpool symbol. */
5546 FOR_EACH_VARIABLE (v)
5547 find_decls_types_in_var (v, &fld);
5548
5549 /* Set the assembler name on every decl found. We need to do this
5550 now because free_lang_data_in_decl will invalidate data needed
5551 for mangling. This breaks mangling on interdependent decls. */
5552 FOR_EACH_VEC_ELT (fld.decls, i, t)
5553 assign_assembler_name_if_neeeded (t);
5554
5555 /* Traverse every decl found freeing its language data. */
5556 FOR_EACH_VEC_ELT (fld.decls, i, t)
5557 free_lang_data_in_decl (t);
5558
5559 /* Traverse every type found freeing its language data. */
5560 FOR_EACH_VEC_ELT (fld.types, i, t)
5561 free_lang_data_in_type (t);
5562
5563 pointer_set_destroy (fld.pset);
5564 fld.worklist.release ();
5565 fld.decls.release ();
5566 fld.types.release ();
5567 }
5568
5569
5570 /* Free resources that are used by FE but are not needed once they are done. */
5571
5572 static unsigned
5573 free_lang_data (void)
5574 {
5575 unsigned i;
5576
5577 /* If we are the LTO frontend we have freed lang-specific data already. */
5578 if (in_lto_p
5579 || !flag_generate_lto)
5580 return 0;
5581
5582 /* Allocate and assign alias sets to the standard integer types
5583 while the slots are still in the way the frontends generated them. */
5584 for (i = 0; i < itk_none; ++i)
5585 if (integer_types[i])
5586 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5587
5588 /* Traverse the IL resetting language specific information for
5589 operands, expressions, etc. */
5590 free_lang_data_in_cgraph ();
5591
5592 /* Create gimple variants for common types. */
5593 ptrdiff_type_node = integer_type_node;
5594 fileptr_type_node = ptr_type_node;
5595
5596 /* Reset some langhooks. Do not reset types_compatible_p, it may
5597 still be used indirectly via the get_alias_set langhook. */
5598 lang_hooks.dwarf_name = lhd_dwarf_name;
5599 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5600 /* We do not want the default decl_assembler_name implementation,
5601 rather if we have fixed everything we want a wrapper around it
5602 asserting that all non-local symbols already got their assembler
5603 name and only produce assembler names for local symbols. Or rather
5604 make sure we never call decl_assembler_name on local symbols and
5605 devise a separate, middle-end private scheme for it. */
5606
5607 /* Reset diagnostic machinery. */
5608 tree_diagnostics_defaults (global_dc);
5609
5610 return 0;
5611 }
5612
5613
5614 namespace {
5615
5616 const pass_data pass_data_ipa_free_lang_data =
5617 {
5618 SIMPLE_IPA_PASS, /* type */
5619 "*free_lang_data", /* name */
5620 OPTGROUP_NONE, /* optinfo_flags */
5621 false, /* has_gate */
5622 true, /* has_execute */
5623 TV_IPA_FREE_LANG_DATA, /* tv_id */
5624 0, /* properties_required */
5625 0, /* properties_provided */
5626 0, /* properties_destroyed */
5627 0, /* todo_flags_start */
5628 0, /* todo_flags_finish */
5629 };
5630
5631 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5632 {
5633 public:
5634 pass_ipa_free_lang_data (gcc::context *ctxt)
5635 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5636 {}
5637
5638 /* opt_pass methods: */
5639 unsigned int execute () { return free_lang_data (); }
5640
5641 }; // class pass_ipa_free_lang_data
5642
5643 } // anon namespace
5644
5645 simple_ipa_opt_pass *
5646 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5647 {
5648 return new pass_ipa_free_lang_data (ctxt);
5649 }
5650
5651 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5652 ATTR_NAME. Also used internally by remove_attribute(). */
5653 bool
5654 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5655 {
5656 size_t ident_len = IDENTIFIER_LENGTH (ident);
5657
5658 if (ident_len == attr_len)
5659 {
5660 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5661 return true;
5662 }
5663 else if (ident_len == attr_len + 4)
5664 {
5665 /* There is the possibility that ATTR is 'text' and IDENT is
5666 '__text__'. */
5667 const char *p = IDENTIFIER_POINTER (ident);
5668 if (p[0] == '_' && p[1] == '_'
5669 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5670 && strncmp (attr_name, p + 2, attr_len) == 0)
5671 return true;
5672 }
5673
5674 return false;
5675 }
5676
5677 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5678 of ATTR_NAME, and LIST is not NULL_TREE. */
5679 tree
5680 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5681 {
5682 while (list)
5683 {
5684 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5685
5686 if (ident_len == attr_len)
5687 {
5688 if (!strcmp (attr_name,
5689 IDENTIFIER_POINTER (get_attribute_name (list))))
5690 break;
5691 }
5692 /* TODO: If we made sure that attributes were stored in the
5693 canonical form without '__...__' (ie, as in 'text' as opposed
5694 to '__text__') then we could avoid the following case. */
5695 else if (ident_len == attr_len + 4)
5696 {
5697 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5698 if (p[0] == '_' && p[1] == '_'
5699 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5700 && strncmp (attr_name, p + 2, attr_len) == 0)
5701 break;
5702 }
5703 list = TREE_CHAIN (list);
5704 }
5705
5706 return list;
5707 }
5708
5709 /* A variant of lookup_attribute() that can be used with an identifier
5710 as the first argument, and where the identifier can be either
5711 'text' or '__text__'.
5712
5713 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5714 return a pointer to the attribute's list element if the attribute
5715 is part of the list, or NULL_TREE if not found. If the attribute
5716 appears more than once, this only returns the first occurrence; the
5717 TREE_CHAIN of the return value should be passed back in if further
5718 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5719 can be in the form 'text' or '__text__'. */
5720 static tree
5721 lookup_ident_attribute (tree attr_identifier, tree list)
5722 {
5723 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5724
5725 while (list)
5726 {
5727 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5728 == IDENTIFIER_NODE);
5729
5730 /* Identifiers can be compared directly for equality. */
5731 if (attr_identifier == get_attribute_name (list))
5732 break;
5733
5734 /* If they are not equal, they may still be one in the form
5735 'text' while the other one is in the form '__text__'. TODO:
5736 If we were storing attributes in normalized 'text' form, then
5737 this could all go away and we could take full advantage of
5738 the fact that we're comparing identifiers. :-) */
5739 {
5740 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5741 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5742
5743 if (ident_len == attr_len + 4)
5744 {
5745 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5746 const char *q = IDENTIFIER_POINTER (attr_identifier);
5747 if (p[0] == '_' && p[1] == '_'
5748 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5749 && strncmp (q, p + 2, attr_len) == 0)
5750 break;
5751 }
5752 else if (ident_len + 4 == attr_len)
5753 {
5754 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5755 const char *q = IDENTIFIER_POINTER (attr_identifier);
5756 if (q[0] == '_' && q[1] == '_'
5757 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
5758 && strncmp (q + 2, p, ident_len) == 0)
5759 break;
5760 }
5761 }
5762 list = TREE_CHAIN (list);
5763 }
5764
5765 return list;
5766 }
5767
5768 /* Remove any instances of attribute ATTR_NAME in LIST and return the
5769 modified list. */
5770
5771 tree
5772 remove_attribute (const char *attr_name, tree list)
5773 {
5774 tree *p;
5775 size_t attr_len = strlen (attr_name);
5776
5777 gcc_checking_assert (attr_name[0] != '_');
5778
5779 for (p = &list; *p; )
5780 {
5781 tree l = *p;
5782 /* TODO: If we were storing attributes in normalized form, here
5783 we could use a simple strcmp(). */
5784 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
5785 *p = TREE_CHAIN (l);
5786 else
5787 p = &TREE_CHAIN (l);
5788 }
5789
5790 return list;
5791 }
5792
5793 /* Return an attribute list that is the union of a1 and a2. */
5794
5795 tree
5796 merge_attributes (tree a1, tree a2)
5797 {
5798 tree attributes;
5799
5800 /* Either one unset? Take the set one. */
5801
5802 if ((attributes = a1) == 0)
5803 attributes = a2;
5804
5805 /* One that completely contains the other? Take it. */
5806
5807 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
5808 {
5809 if (attribute_list_contained (a2, a1))
5810 attributes = a2;
5811 else
5812 {
5813 /* Pick the longest list, and hang on the other list. */
5814
5815 if (list_length (a1) < list_length (a2))
5816 attributes = a2, a2 = a1;
5817
5818 for (; a2 != 0; a2 = TREE_CHAIN (a2))
5819 {
5820 tree a;
5821 for (a = lookup_ident_attribute (get_attribute_name (a2),
5822 attributes);
5823 a != NULL_TREE && !attribute_value_equal (a, a2);
5824 a = lookup_ident_attribute (get_attribute_name (a2),
5825 TREE_CHAIN (a)))
5826 ;
5827 if (a == NULL_TREE)
5828 {
5829 a1 = copy_node (a2);
5830 TREE_CHAIN (a1) = attributes;
5831 attributes = a1;
5832 }
5833 }
5834 }
5835 }
5836 return attributes;
5837 }
5838
5839 /* Given types T1 and T2, merge their attributes and return
5840 the result. */
5841
5842 tree
5843 merge_type_attributes (tree t1, tree t2)
5844 {
5845 return merge_attributes (TYPE_ATTRIBUTES (t1),
5846 TYPE_ATTRIBUTES (t2));
5847 }
5848
5849 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
5850 the result. */
5851
5852 tree
5853 merge_decl_attributes (tree olddecl, tree newdecl)
5854 {
5855 return merge_attributes (DECL_ATTRIBUTES (olddecl),
5856 DECL_ATTRIBUTES (newdecl));
5857 }
5858
5859 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
5860
5861 /* Specialization of merge_decl_attributes for various Windows targets.
5862
5863 This handles the following situation:
5864
5865 __declspec (dllimport) int foo;
5866 int foo;
5867
5868 The second instance of `foo' nullifies the dllimport. */
5869
5870 tree
5871 merge_dllimport_decl_attributes (tree old, tree new_tree)
5872 {
5873 tree a;
5874 int delete_dllimport_p = 1;
5875
5876 /* What we need to do here is remove from `old' dllimport if it doesn't
5877 appear in `new'. dllimport behaves like extern: if a declaration is
5878 marked dllimport and a definition appears later, then the object
5879 is not dllimport'd. We also remove a `new' dllimport if the old list
5880 contains dllexport: dllexport always overrides dllimport, regardless
5881 of the order of declaration. */
5882 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
5883 delete_dllimport_p = 0;
5884 else if (DECL_DLLIMPORT_P (new_tree)
5885 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
5886 {
5887 DECL_DLLIMPORT_P (new_tree) = 0;
5888 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
5889 "dllimport ignored", new_tree);
5890 }
5891 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
5892 {
5893 /* Warn about overriding a symbol that has already been used, e.g.:
5894 extern int __attribute__ ((dllimport)) foo;
5895 int* bar () {return &foo;}
5896 int foo;
5897 */
5898 if (TREE_USED (old))
5899 {
5900 warning (0, "%q+D redeclared without dllimport attribute "
5901 "after being referenced with dll linkage", new_tree);
5902 /* If we have used a variable's address with dllimport linkage,
5903 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
5904 decl may already have had TREE_CONSTANT computed.
5905 We still remove the attribute so that assembler code refers
5906 to '&foo rather than '_imp__foo'. */
5907 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
5908 DECL_DLLIMPORT_P (new_tree) = 1;
5909 }
5910
5911 /* Let an inline definition silently override the external reference,
5912 but otherwise warn about attribute inconsistency. */
5913 else if (TREE_CODE (new_tree) == VAR_DECL
5914 || !DECL_DECLARED_INLINE_P (new_tree))
5915 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
5916 "previous dllimport ignored", new_tree);
5917 }
5918 else
5919 delete_dllimport_p = 0;
5920
5921 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
5922
5923 if (delete_dllimport_p)
5924 a = remove_attribute ("dllimport", a);
5925
5926 return a;
5927 }
5928
5929 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
5930 struct attribute_spec.handler. */
5931
5932 tree
5933 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
5934 bool *no_add_attrs)
5935 {
5936 tree node = *pnode;
5937 bool is_dllimport;
5938
5939 /* These attributes may apply to structure and union types being created,
5940 but otherwise should pass to the declaration involved. */
5941 if (!DECL_P (node))
5942 {
5943 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
5944 | (int) ATTR_FLAG_ARRAY_NEXT))
5945 {
5946 *no_add_attrs = true;
5947 return tree_cons (name, args, NULL_TREE);
5948 }
5949 if (TREE_CODE (node) == RECORD_TYPE
5950 || TREE_CODE (node) == UNION_TYPE)
5951 {
5952 node = TYPE_NAME (node);
5953 if (!node)
5954 return NULL_TREE;
5955 }
5956 else
5957 {
5958 warning (OPT_Wattributes, "%qE attribute ignored",
5959 name);
5960 *no_add_attrs = true;
5961 return NULL_TREE;
5962 }
5963 }
5964
5965 if (TREE_CODE (node) != FUNCTION_DECL
5966 && TREE_CODE (node) != VAR_DECL
5967 && TREE_CODE (node) != TYPE_DECL)
5968 {
5969 *no_add_attrs = true;
5970 warning (OPT_Wattributes, "%qE attribute ignored",
5971 name);
5972 return NULL_TREE;
5973 }
5974
5975 if (TREE_CODE (node) == TYPE_DECL
5976 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
5977 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
5978 {
5979 *no_add_attrs = true;
5980 warning (OPT_Wattributes, "%qE attribute ignored",
5981 name);
5982 return NULL_TREE;
5983 }
5984
5985 is_dllimport = is_attribute_p ("dllimport", name);
5986
5987 /* Report error on dllimport ambiguities seen now before they cause
5988 any damage. */
5989 if (is_dllimport)
5990 {
5991 /* Honor any target-specific overrides. */
5992 if (!targetm.valid_dllimport_attribute_p (node))
5993 *no_add_attrs = true;
5994
5995 else if (TREE_CODE (node) == FUNCTION_DECL
5996 && DECL_DECLARED_INLINE_P (node))
5997 {
5998 warning (OPT_Wattributes, "inline function %q+D declared as "
5999 " dllimport: attribute ignored", node);
6000 *no_add_attrs = true;
6001 }
6002 /* Like MS, treat definition of dllimported variables and
6003 non-inlined functions on declaration as syntax errors. */
6004 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6005 {
6006 error ("function %q+D definition is marked dllimport", node);
6007 *no_add_attrs = true;
6008 }
6009
6010 else if (TREE_CODE (node) == VAR_DECL)
6011 {
6012 if (DECL_INITIAL (node))
6013 {
6014 error ("variable %q+D definition is marked dllimport",
6015 node);
6016 *no_add_attrs = true;
6017 }
6018
6019 /* `extern' needn't be specified with dllimport.
6020 Specify `extern' now and hope for the best. Sigh. */
6021 DECL_EXTERNAL (node) = 1;
6022 /* Also, implicitly give dllimport'd variables declared within
6023 a function global scope, unless declared static. */
6024 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6025 TREE_PUBLIC (node) = 1;
6026 }
6027
6028 if (*no_add_attrs == false)
6029 DECL_DLLIMPORT_P (node) = 1;
6030 }
6031 else if (TREE_CODE (node) == FUNCTION_DECL
6032 && DECL_DECLARED_INLINE_P (node)
6033 && flag_keep_inline_dllexport)
6034 /* An exported function, even if inline, must be emitted. */
6035 DECL_EXTERNAL (node) = 0;
6036
6037 /* Report error if symbol is not accessible at global scope. */
6038 if (!TREE_PUBLIC (node)
6039 && (TREE_CODE (node) == VAR_DECL
6040 || TREE_CODE (node) == FUNCTION_DECL))
6041 {
6042 error ("external linkage required for symbol %q+D because of "
6043 "%qE attribute", node, name);
6044 *no_add_attrs = true;
6045 }
6046
6047 /* A dllexport'd entity must have default visibility so that other
6048 program units (shared libraries or the main executable) can see
6049 it. A dllimport'd entity must have default visibility so that
6050 the linker knows that undefined references within this program
6051 unit can be resolved by the dynamic linker. */
6052 if (!*no_add_attrs)
6053 {
6054 if (DECL_VISIBILITY_SPECIFIED (node)
6055 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6056 error ("%qE implies default visibility, but %qD has already "
6057 "been declared with a different visibility",
6058 name, node);
6059 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6060 DECL_VISIBILITY_SPECIFIED (node) = 1;
6061 }
6062
6063 return NULL_TREE;
6064 }
6065
6066 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6067 \f
6068 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6069 of the various TYPE_QUAL values. */
6070
6071 static void
6072 set_type_quals (tree type, int type_quals)
6073 {
6074 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6075 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6076 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6077 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6078 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6079 }
6080
6081 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6082
6083 bool
6084 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6085 {
6086 return (TYPE_QUALS (cand) == type_quals
6087 && TYPE_NAME (cand) == TYPE_NAME (base)
6088 /* Apparently this is needed for Objective-C. */
6089 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6090 /* Check alignment. */
6091 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6092 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6093 TYPE_ATTRIBUTES (base)));
6094 }
6095
6096 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6097
6098 static bool
6099 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6100 {
6101 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6102 && TYPE_NAME (cand) == TYPE_NAME (base)
6103 /* Apparently this is needed for Objective-C. */
6104 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6105 /* Check alignment. */
6106 && TYPE_ALIGN (cand) == align
6107 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6108 TYPE_ATTRIBUTES (base)));
6109 }
6110
6111 /* This function checks to see if TYPE matches the size one of the built-in
6112 atomic types, and returns that core atomic type. */
6113
6114 static tree
6115 find_atomic_core_type (tree type)
6116 {
6117 tree base_atomic_type;
6118
6119 /* Only handle complete types. */
6120 if (TYPE_SIZE (type) == NULL_TREE)
6121 return NULL_TREE;
6122
6123 HOST_WIDE_INT type_size = tree_low_cst (TYPE_SIZE (type), 1);
6124 switch (type_size)
6125 {
6126 case 8:
6127 base_atomic_type = atomicQI_type_node;
6128 break;
6129
6130 case 16:
6131 base_atomic_type = atomicHI_type_node;
6132 break;
6133
6134 case 32:
6135 base_atomic_type = atomicSI_type_node;
6136 break;
6137
6138 case 64:
6139 base_atomic_type = atomicDI_type_node;
6140 break;
6141
6142 case 128:
6143 base_atomic_type = atomicTI_type_node;
6144 break;
6145
6146 default:
6147 base_atomic_type = NULL_TREE;
6148 }
6149
6150 return base_atomic_type;
6151 }
6152
6153 /* Return a version of the TYPE, qualified as indicated by the
6154 TYPE_QUALS, if one exists. If no qualified version exists yet,
6155 return NULL_TREE. */
6156
6157 tree
6158 get_qualified_type (tree type, int type_quals)
6159 {
6160 tree t;
6161
6162 if (TYPE_QUALS (type) == type_quals)
6163 return type;
6164
6165 /* Search the chain of variants to see if there is already one there just
6166 like the one we need to have. If so, use that existing one. We must
6167 preserve the TYPE_NAME, since there is code that depends on this. */
6168 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6169 if (check_qualified_type (t, type, type_quals))
6170 return t;
6171
6172 return NULL_TREE;
6173 }
6174
6175 /* Like get_qualified_type, but creates the type if it does not
6176 exist. This function never returns NULL_TREE. */
6177
6178 tree
6179 build_qualified_type (tree type, int type_quals)
6180 {
6181 tree t;
6182
6183 /* See if we already have the appropriate qualified variant. */
6184 t = get_qualified_type (type, type_quals);
6185
6186 /* If not, build it. */
6187 if (!t)
6188 {
6189 t = build_variant_type_copy (type);
6190 set_type_quals (t, type_quals);
6191
6192 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6193 {
6194 /* See if this object can map to a basic atomic type. */
6195 tree atomic_type = find_atomic_core_type (type);
6196 if (atomic_type)
6197 {
6198 /* Ensure the alignment of this type is compatible with
6199 the required alignment of the atomic type. */
6200 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6201 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6202 }
6203 }
6204
6205 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6206 /* Propagate structural equality. */
6207 SET_TYPE_STRUCTURAL_EQUALITY (t);
6208 else if (TYPE_CANONICAL (type) != type)
6209 /* Build the underlying canonical type, since it is different
6210 from TYPE. */
6211 TYPE_CANONICAL (t) = build_qualified_type (TYPE_CANONICAL (type),
6212 type_quals);
6213 else
6214 /* T is its own canonical type. */
6215 TYPE_CANONICAL (t) = t;
6216
6217 }
6218
6219 return t;
6220 }
6221
6222 /* Create a variant of type T with alignment ALIGN. */
6223
6224 tree
6225 build_aligned_type (tree type, unsigned int align)
6226 {
6227 tree t;
6228
6229 if (TYPE_PACKED (type)
6230 || TYPE_ALIGN (type) == align)
6231 return type;
6232
6233 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6234 if (check_aligned_type (t, type, align))
6235 return t;
6236
6237 t = build_variant_type_copy (type);
6238 TYPE_ALIGN (t) = align;
6239
6240 return t;
6241 }
6242
6243 /* Create a new distinct copy of TYPE. The new type is made its own
6244 MAIN_VARIANT. If TYPE requires structural equality checks, the
6245 resulting type requires structural equality checks; otherwise, its
6246 TYPE_CANONICAL points to itself. */
6247
6248 tree
6249 build_distinct_type_copy (tree type)
6250 {
6251 tree t = copy_node (type);
6252
6253 TYPE_POINTER_TO (t) = 0;
6254 TYPE_REFERENCE_TO (t) = 0;
6255
6256 /* Set the canonical type either to a new equivalence class, or
6257 propagate the need for structural equality checks. */
6258 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6259 SET_TYPE_STRUCTURAL_EQUALITY (t);
6260 else
6261 TYPE_CANONICAL (t) = t;
6262
6263 /* Make it its own variant. */
6264 TYPE_MAIN_VARIANT (t) = t;
6265 TYPE_NEXT_VARIANT (t) = 0;
6266
6267 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6268 whose TREE_TYPE is not t. This can also happen in the Ada
6269 frontend when using subtypes. */
6270
6271 return t;
6272 }
6273
6274 /* Create a new variant of TYPE, equivalent but distinct. This is so
6275 the caller can modify it. TYPE_CANONICAL for the return type will
6276 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6277 are considered equal by the language itself (or that both types
6278 require structural equality checks). */
6279
6280 tree
6281 build_variant_type_copy (tree type)
6282 {
6283 tree t, m = TYPE_MAIN_VARIANT (type);
6284
6285 t = build_distinct_type_copy (type);
6286
6287 /* Since we're building a variant, assume that it is a non-semantic
6288 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6289 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6290
6291 /* Add the new type to the chain of variants of TYPE. */
6292 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6293 TYPE_NEXT_VARIANT (m) = t;
6294 TYPE_MAIN_VARIANT (t) = m;
6295
6296 return t;
6297 }
6298 \f
6299 /* Return true if the from tree in both tree maps are equal. */
6300
6301 int
6302 tree_map_base_eq (const void *va, const void *vb)
6303 {
6304 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6305 *const b = (const struct tree_map_base *) vb;
6306 return (a->from == b->from);
6307 }
6308
6309 /* Hash a from tree in a tree_base_map. */
6310
6311 unsigned int
6312 tree_map_base_hash (const void *item)
6313 {
6314 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6315 }
6316
6317 /* Return true if this tree map structure is marked for garbage collection
6318 purposes. We simply return true if the from tree is marked, so that this
6319 structure goes away when the from tree goes away. */
6320
6321 int
6322 tree_map_base_marked_p (const void *p)
6323 {
6324 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6325 }
6326
6327 /* Hash a from tree in a tree_map. */
6328
6329 unsigned int
6330 tree_map_hash (const void *item)
6331 {
6332 return (((const struct tree_map *) item)->hash);
6333 }
6334
6335 /* Hash a from tree in a tree_decl_map. */
6336
6337 unsigned int
6338 tree_decl_map_hash (const void *item)
6339 {
6340 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6341 }
6342
6343 /* Return the initialization priority for DECL. */
6344
6345 priority_type
6346 decl_init_priority_lookup (tree decl)
6347 {
6348 struct tree_priority_map *h;
6349 struct tree_map_base in;
6350
6351 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6352 in.from = decl;
6353 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6354 return h ? h->init : DEFAULT_INIT_PRIORITY;
6355 }
6356
6357 /* Return the finalization priority for DECL. */
6358
6359 priority_type
6360 decl_fini_priority_lookup (tree decl)
6361 {
6362 struct tree_priority_map *h;
6363 struct tree_map_base in;
6364
6365 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6366 in.from = decl;
6367 h = (struct tree_priority_map *) htab_find (init_priority_for_decl, &in);
6368 return h ? h->fini : DEFAULT_INIT_PRIORITY;
6369 }
6370
6371 /* Return the initialization and finalization priority information for
6372 DECL. If there is no previous priority information, a freshly
6373 allocated structure is returned. */
6374
6375 static struct tree_priority_map *
6376 decl_priority_info (tree decl)
6377 {
6378 struct tree_priority_map in;
6379 struct tree_priority_map *h;
6380 void **loc;
6381
6382 in.base.from = decl;
6383 loc = htab_find_slot (init_priority_for_decl, &in, INSERT);
6384 h = (struct tree_priority_map *) *loc;
6385 if (!h)
6386 {
6387 h = ggc_alloc_cleared_tree_priority_map ();
6388 *loc = h;
6389 h->base.from = decl;
6390 h->init = DEFAULT_INIT_PRIORITY;
6391 h->fini = DEFAULT_INIT_PRIORITY;
6392 }
6393
6394 return h;
6395 }
6396
6397 /* Set the initialization priority for DECL to PRIORITY. */
6398
6399 void
6400 decl_init_priority_insert (tree decl, priority_type priority)
6401 {
6402 struct tree_priority_map *h;
6403
6404 gcc_assert (VAR_OR_FUNCTION_DECL_P (decl));
6405 if (priority == DEFAULT_INIT_PRIORITY)
6406 return;
6407 h = decl_priority_info (decl);
6408 h->init = priority;
6409 }
6410
6411 /* Set the finalization priority for DECL to PRIORITY. */
6412
6413 void
6414 decl_fini_priority_insert (tree decl, priority_type priority)
6415 {
6416 struct tree_priority_map *h;
6417
6418 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
6419 if (priority == DEFAULT_INIT_PRIORITY)
6420 return;
6421 h = decl_priority_info (decl);
6422 h->fini = priority;
6423 }
6424
6425 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6426
6427 static void
6428 print_debug_expr_statistics (void)
6429 {
6430 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6431 (long) htab_size (debug_expr_for_decl),
6432 (long) htab_elements (debug_expr_for_decl),
6433 htab_collisions (debug_expr_for_decl));
6434 }
6435
6436 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6437
6438 static void
6439 print_value_expr_statistics (void)
6440 {
6441 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6442 (long) htab_size (value_expr_for_decl),
6443 (long) htab_elements (value_expr_for_decl),
6444 htab_collisions (value_expr_for_decl));
6445 }
6446
6447 /* Lookup a debug expression for FROM, and return it if we find one. */
6448
6449 tree
6450 decl_debug_expr_lookup (tree from)
6451 {
6452 struct tree_decl_map *h, in;
6453 in.base.from = from;
6454
6455 h = (struct tree_decl_map *)
6456 htab_find_with_hash (debug_expr_for_decl, &in, DECL_UID (from));
6457 if (h)
6458 return h->to;
6459 return NULL_TREE;
6460 }
6461
6462 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6463
6464 void
6465 decl_debug_expr_insert (tree from, tree to)
6466 {
6467 struct tree_decl_map *h;
6468 void **loc;
6469
6470 h = ggc_alloc_tree_decl_map ();
6471 h->base.from = from;
6472 h->to = to;
6473 loc = htab_find_slot_with_hash (debug_expr_for_decl, h, DECL_UID (from),
6474 INSERT);
6475 *(struct tree_decl_map **) loc = h;
6476 }
6477
6478 /* Lookup a value expression for FROM, and return it if we find one. */
6479
6480 tree
6481 decl_value_expr_lookup (tree from)
6482 {
6483 struct tree_decl_map *h, in;
6484 in.base.from = from;
6485
6486 h = (struct tree_decl_map *)
6487 htab_find_with_hash (value_expr_for_decl, &in, DECL_UID (from));
6488 if (h)
6489 return h->to;
6490 return NULL_TREE;
6491 }
6492
6493 /* Insert a mapping FROM->TO in the value expression hashtable. */
6494
6495 void
6496 decl_value_expr_insert (tree from, tree to)
6497 {
6498 struct tree_decl_map *h;
6499 void **loc;
6500
6501 h = ggc_alloc_tree_decl_map ();
6502 h->base.from = from;
6503 h->to = to;
6504 loc = htab_find_slot_with_hash (value_expr_for_decl, h, DECL_UID (from),
6505 INSERT);
6506 *(struct tree_decl_map **) loc = h;
6507 }
6508
6509 /* Lookup a vector of debug arguments for FROM, and return it if we
6510 find one. */
6511
6512 vec<tree, va_gc> **
6513 decl_debug_args_lookup (tree from)
6514 {
6515 struct tree_vec_map *h, in;
6516
6517 if (!DECL_HAS_DEBUG_ARGS_P (from))
6518 return NULL;
6519 gcc_checking_assert (debug_args_for_decl != NULL);
6520 in.base.from = from;
6521 h = (struct tree_vec_map *)
6522 htab_find_with_hash (debug_args_for_decl, &in, DECL_UID (from));
6523 if (h)
6524 return &h->to;
6525 return NULL;
6526 }
6527
6528 /* Insert a mapping FROM->empty vector of debug arguments in the value
6529 expression hashtable. */
6530
6531 vec<tree, va_gc> **
6532 decl_debug_args_insert (tree from)
6533 {
6534 struct tree_vec_map *h;
6535 void **loc;
6536
6537 if (DECL_HAS_DEBUG_ARGS_P (from))
6538 return decl_debug_args_lookup (from);
6539 if (debug_args_for_decl == NULL)
6540 debug_args_for_decl = htab_create_ggc (64, tree_vec_map_hash,
6541 tree_vec_map_eq, 0);
6542 h = ggc_alloc_tree_vec_map ();
6543 h->base.from = from;
6544 h->to = NULL;
6545 loc = htab_find_slot_with_hash (debug_args_for_decl, h, DECL_UID (from),
6546 INSERT);
6547 *(struct tree_vec_map **) loc = h;
6548 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6549 return &h->to;
6550 }
6551
6552 /* Hashing of types so that we don't make duplicates.
6553 The entry point is `type_hash_canon'. */
6554
6555 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6556 with types in the TREE_VALUE slots), by adding the hash codes
6557 of the individual types. */
6558
6559 static unsigned int
6560 type_hash_list (const_tree list, hashval_t hashcode)
6561 {
6562 const_tree tail;
6563
6564 for (tail = list; tail; tail = TREE_CHAIN (tail))
6565 if (TREE_VALUE (tail) != error_mark_node)
6566 hashcode = iterative_hash_object (TYPE_HASH (TREE_VALUE (tail)),
6567 hashcode);
6568
6569 return hashcode;
6570 }
6571
6572 /* These are the Hashtable callback functions. */
6573
6574 /* Returns true iff the types are equivalent. */
6575
6576 static int
6577 type_hash_eq (const void *va, const void *vb)
6578 {
6579 const struct type_hash *const a = (const struct type_hash *) va,
6580 *const b = (const struct type_hash *) vb;
6581
6582 /* First test the things that are the same for all types. */
6583 if (a->hash != b->hash
6584 || TREE_CODE (a->type) != TREE_CODE (b->type)
6585 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6586 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6587 TYPE_ATTRIBUTES (b->type))
6588 || (TREE_CODE (a->type) != COMPLEX_TYPE
6589 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6590 return 0;
6591
6592 /* Be careful about comparing arrays before and after the element type
6593 has been completed; don't compare TYPE_ALIGN unless both types are
6594 complete. */
6595 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6596 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6597 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6598 return 0;
6599
6600 switch (TREE_CODE (a->type))
6601 {
6602 case VOID_TYPE:
6603 case COMPLEX_TYPE:
6604 case POINTER_TYPE:
6605 case REFERENCE_TYPE:
6606 case NULLPTR_TYPE:
6607 return 1;
6608
6609 case VECTOR_TYPE:
6610 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6611
6612 case ENUMERAL_TYPE:
6613 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6614 && !(TYPE_VALUES (a->type)
6615 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6616 && TYPE_VALUES (b->type)
6617 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6618 && type_list_equal (TYPE_VALUES (a->type),
6619 TYPE_VALUES (b->type))))
6620 return 0;
6621
6622 /* ... fall through ... */
6623
6624 case INTEGER_TYPE:
6625 case REAL_TYPE:
6626 case BOOLEAN_TYPE:
6627 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6628 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6629 TYPE_MAX_VALUE (b->type)))
6630 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6631 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6632 TYPE_MIN_VALUE (b->type))));
6633
6634 case FIXED_POINT_TYPE:
6635 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6636
6637 case OFFSET_TYPE:
6638 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6639
6640 case METHOD_TYPE:
6641 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6642 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6643 || (TYPE_ARG_TYPES (a->type)
6644 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6645 && TYPE_ARG_TYPES (b->type)
6646 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6647 && type_list_equal (TYPE_ARG_TYPES (a->type),
6648 TYPE_ARG_TYPES (b->type)))))
6649 break;
6650 return 0;
6651 case ARRAY_TYPE:
6652 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6653
6654 case RECORD_TYPE:
6655 case UNION_TYPE:
6656 case QUAL_UNION_TYPE:
6657 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6658 || (TYPE_FIELDS (a->type)
6659 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6660 && TYPE_FIELDS (b->type)
6661 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6662 && type_list_equal (TYPE_FIELDS (a->type),
6663 TYPE_FIELDS (b->type))));
6664
6665 case FUNCTION_TYPE:
6666 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6667 || (TYPE_ARG_TYPES (a->type)
6668 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6669 && TYPE_ARG_TYPES (b->type)
6670 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6671 && type_list_equal (TYPE_ARG_TYPES (a->type),
6672 TYPE_ARG_TYPES (b->type))))
6673 break;
6674 return 0;
6675
6676 default:
6677 return 0;
6678 }
6679
6680 if (lang_hooks.types.type_hash_eq != NULL)
6681 return lang_hooks.types.type_hash_eq (a->type, b->type);
6682
6683 return 1;
6684 }
6685
6686 /* Return the cached hash value. */
6687
6688 static hashval_t
6689 type_hash_hash (const void *item)
6690 {
6691 return ((const struct type_hash *) item)->hash;
6692 }
6693
6694 /* Look in the type hash table for a type isomorphic to TYPE.
6695 If one is found, return it. Otherwise return 0. */
6696
6697 static tree
6698 type_hash_lookup (hashval_t hashcode, tree type)
6699 {
6700 struct type_hash *h, in;
6701
6702 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6703 must call that routine before comparing TYPE_ALIGNs. */
6704 layout_type (type);
6705
6706 in.hash = hashcode;
6707 in.type = type;
6708
6709 h = (struct type_hash *) htab_find_with_hash (type_hash_table, &in,
6710 hashcode);
6711 if (h)
6712 return h->type;
6713 return NULL_TREE;
6714 }
6715
6716 /* Add an entry to the type-hash-table
6717 for a type TYPE whose hash code is HASHCODE. */
6718
6719 static void
6720 type_hash_add (hashval_t hashcode, tree type)
6721 {
6722 struct type_hash *h;
6723 void **loc;
6724
6725 h = ggc_alloc_type_hash ();
6726 h->hash = hashcode;
6727 h->type = type;
6728 loc = htab_find_slot_with_hash (type_hash_table, h, hashcode, INSERT);
6729 *loc = (void *)h;
6730 }
6731
6732 /* Given TYPE, and HASHCODE its hash code, return the canonical
6733 object for an identical type if one already exists.
6734 Otherwise, return TYPE, and record it as the canonical object.
6735
6736 To use this function, first create a type of the sort you want.
6737 Then compute its hash code from the fields of the type that
6738 make it different from other similar types.
6739 Then call this function and use the value. */
6740
6741 tree
6742 type_hash_canon (unsigned int hashcode, tree type)
6743 {
6744 tree t1;
6745
6746 /* The hash table only contains main variants, so ensure that's what we're
6747 being passed. */
6748 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6749
6750 /* See if the type is in the hash table already. If so, return it.
6751 Otherwise, add the type. */
6752 t1 = type_hash_lookup (hashcode, type);
6753 if (t1 != 0)
6754 {
6755 if (GATHER_STATISTICS)
6756 {
6757 tree_code_counts[(int) TREE_CODE (type)]--;
6758 tree_node_counts[(int) t_kind]--;
6759 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6760 }
6761 return t1;
6762 }
6763 else
6764 {
6765 type_hash_add (hashcode, type);
6766 return type;
6767 }
6768 }
6769
6770 /* See if the data pointed to by the type hash table is marked. We consider
6771 it marked if the type is marked or if a debug type number or symbol
6772 table entry has been made for the type. */
6773
6774 static int
6775 type_hash_marked_p (const void *p)
6776 {
6777 const_tree const type = ((const struct type_hash *) p)->type;
6778
6779 return ggc_marked_p (type);
6780 }
6781
6782 static void
6783 print_type_hash_statistics (void)
6784 {
6785 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6786 (long) htab_size (type_hash_table),
6787 (long) htab_elements (type_hash_table),
6788 htab_collisions (type_hash_table));
6789 }
6790
6791 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6792 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6793 by adding the hash codes of the individual attributes. */
6794
6795 static unsigned int
6796 attribute_hash_list (const_tree list, hashval_t hashcode)
6797 {
6798 const_tree tail;
6799
6800 for (tail = list; tail; tail = TREE_CHAIN (tail))
6801 /* ??? Do we want to add in TREE_VALUE too? */
6802 hashcode = iterative_hash_object
6803 (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)), hashcode);
6804 return hashcode;
6805 }
6806
6807 /* Given two lists of attributes, return true if list l2 is
6808 equivalent to l1. */
6809
6810 int
6811 attribute_list_equal (const_tree l1, const_tree l2)
6812 {
6813 if (l1 == l2)
6814 return 1;
6815
6816 return attribute_list_contained (l1, l2)
6817 && attribute_list_contained (l2, l1);
6818 }
6819
6820 /* Given two lists of attributes, return true if list L2 is
6821 completely contained within L1. */
6822 /* ??? This would be faster if attribute names were stored in a canonicalized
6823 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
6824 must be used to show these elements are equivalent (which they are). */
6825 /* ??? It's not clear that attributes with arguments will always be handled
6826 correctly. */
6827
6828 int
6829 attribute_list_contained (const_tree l1, const_tree l2)
6830 {
6831 const_tree t1, t2;
6832
6833 /* First check the obvious, maybe the lists are identical. */
6834 if (l1 == l2)
6835 return 1;
6836
6837 /* Maybe the lists are similar. */
6838 for (t1 = l1, t2 = l2;
6839 t1 != 0 && t2 != 0
6840 && get_attribute_name (t1) == get_attribute_name (t2)
6841 && TREE_VALUE (t1) == TREE_VALUE (t2);
6842 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6843 ;
6844
6845 /* Maybe the lists are equal. */
6846 if (t1 == 0 && t2 == 0)
6847 return 1;
6848
6849 for (; t2 != 0; t2 = TREE_CHAIN (t2))
6850 {
6851 const_tree attr;
6852 /* This CONST_CAST is okay because lookup_attribute does not
6853 modify its argument and the return value is assigned to a
6854 const_tree. */
6855 for (attr = lookup_ident_attribute (get_attribute_name (t2),
6856 CONST_CAST_TREE (l1));
6857 attr != NULL_TREE && !attribute_value_equal (t2, attr);
6858 attr = lookup_ident_attribute (get_attribute_name (t2),
6859 TREE_CHAIN (attr)))
6860 ;
6861
6862 if (attr == NULL_TREE)
6863 return 0;
6864 }
6865
6866 return 1;
6867 }
6868
6869 /* Given two lists of types
6870 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
6871 return 1 if the lists contain the same types in the same order.
6872 Also, the TREE_PURPOSEs must match. */
6873
6874 int
6875 type_list_equal (const_tree l1, const_tree l2)
6876 {
6877 const_tree t1, t2;
6878
6879 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
6880 if (TREE_VALUE (t1) != TREE_VALUE (t2)
6881 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
6882 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
6883 && (TREE_TYPE (TREE_PURPOSE (t1))
6884 == TREE_TYPE (TREE_PURPOSE (t2))))))
6885 return 0;
6886
6887 return t1 == t2;
6888 }
6889
6890 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
6891 given by TYPE. If the argument list accepts variable arguments,
6892 then this function counts only the ordinary arguments. */
6893
6894 int
6895 type_num_arguments (const_tree type)
6896 {
6897 int i = 0;
6898 tree t;
6899
6900 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
6901 /* If the function does not take a variable number of arguments,
6902 the last element in the list will have type `void'. */
6903 if (VOID_TYPE_P (TREE_VALUE (t)))
6904 break;
6905 else
6906 ++i;
6907
6908 return i;
6909 }
6910
6911 /* Nonzero if integer constants T1 and T2
6912 represent the same constant value. */
6913
6914 int
6915 tree_int_cst_equal (const_tree t1, const_tree t2)
6916 {
6917 if (t1 == t2)
6918 return 1;
6919
6920 if (t1 == 0 || t2 == 0)
6921 return 0;
6922
6923 if (TREE_CODE (t1) == INTEGER_CST
6924 && TREE_CODE (t2) == INTEGER_CST
6925 && TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
6926 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2))
6927 return 1;
6928
6929 return 0;
6930 }
6931
6932 /* Nonzero if integer constants T1 and T2 represent values that satisfy <.
6933 The precise way of comparison depends on their data type. */
6934
6935 int
6936 tree_int_cst_lt (const_tree t1, const_tree t2)
6937 {
6938 if (t1 == t2)
6939 return 0;
6940
6941 if (TYPE_UNSIGNED (TREE_TYPE (t1)) != TYPE_UNSIGNED (TREE_TYPE (t2)))
6942 {
6943 int t1_sgn = tree_int_cst_sgn (t1);
6944 int t2_sgn = tree_int_cst_sgn (t2);
6945
6946 if (t1_sgn < t2_sgn)
6947 return 1;
6948 else if (t1_sgn > t2_sgn)
6949 return 0;
6950 /* Otherwise, both are non-negative, so we compare them as
6951 unsigned just in case one of them would overflow a signed
6952 type. */
6953 }
6954 else if (!TYPE_UNSIGNED (TREE_TYPE (t1)))
6955 return INT_CST_LT (t1, t2);
6956
6957 return INT_CST_LT_UNSIGNED (t1, t2);
6958 }
6959
6960 /* Returns -1 if T1 < T2, 0 if T1 == T2, and 1 if T1 > T2. */
6961
6962 int
6963 tree_int_cst_compare (const_tree t1, const_tree t2)
6964 {
6965 if (tree_int_cst_lt (t1, t2))
6966 return -1;
6967 else if (tree_int_cst_lt (t2, t1))
6968 return 1;
6969 else
6970 return 0;
6971 }
6972
6973 /* Return 1 if T is an INTEGER_CST that can be manipulated efficiently on
6974 the host. If POS is zero, the value can be represented in a single
6975 HOST_WIDE_INT. If POS is nonzero, the value must be non-negative and can
6976 be represented in a single unsigned HOST_WIDE_INT. */
6977
6978 int
6979 host_integerp (const_tree t, int pos)
6980 {
6981 if (t == NULL_TREE)
6982 return 0;
6983
6984 return (TREE_CODE (t) == INTEGER_CST
6985 && ((TREE_INT_CST_HIGH (t) == 0
6986 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
6987 || (! pos && TREE_INT_CST_HIGH (t) == -1
6988 && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
6989 && !TYPE_UNSIGNED (TREE_TYPE (t)))
6990 || (pos && TREE_INT_CST_HIGH (t) == 0)));
6991 }
6992
6993 /* Return the HOST_WIDE_INT least significant bits of T if it is an
6994 INTEGER_CST and there is no overflow. POS is nonzero if the result must
6995 be non-negative. We must be able to satisfy the above conditions. */
6996
6997 HOST_WIDE_INT
6998 tree_low_cst (const_tree t, int pos)
6999 {
7000 gcc_assert (host_integerp (t, pos));
7001 return TREE_INT_CST_LOW (t);
7002 }
7003
7004 /* Return the most significant (sign) bit of T. */
7005
7006 int
7007 tree_int_cst_sign_bit (const_tree t)
7008 {
7009 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7010 unsigned HOST_WIDE_INT w;
7011
7012 if (bitno < HOST_BITS_PER_WIDE_INT)
7013 w = TREE_INT_CST_LOW (t);
7014 else
7015 {
7016 w = TREE_INT_CST_HIGH (t);
7017 bitno -= HOST_BITS_PER_WIDE_INT;
7018 }
7019
7020 return (w >> bitno) & 1;
7021 }
7022
7023 /* Return an indication of the sign of the integer constant T.
7024 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7025 Note that -1 will never be returned if T's type is unsigned. */
7026
7027 int
7028 tree_int_cst_sgn (const_tree t)
7029 {
7030 if (TREE_INT_CST_LOW (t) == 0 && TREE_INT_CST_HIGH (t) == 0)
7031 return 0;
7032 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7033 return 1;
7034 else if (TREE_INT_CST_HIGH (t) < 0)
7035 return -1;
7036 else
7037 return 1;
7038 }
7039
7040 /* Return the minimum number of bits needed to represent VALUE in a
7041 signed or unsigned type, UNSIGNEDP says which. */
7042
7043 unsigned int
7044 tree_int_cst_min_precision (tree value, bool unsignedp)
7045 {
7046 /* If the value is negative, compute its negative minus 1. The latter
7047 adjustment is because the absolute value of the largest negative value
7048 is one larger than the largest positive value. This is equivalent to
7049 a bit-wise negation, so use that operation instead. */
7050
7051 if (tree_int_cst_sgn (value) < 0)
7052 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7053
7054 /* Return the number of bits needed, taking into account the fact
7055 that we need one more bit for a signed than unsigned type.
7056 If value is 0 or -1, the minimum precision is 1 no matter
7057 whether unsignedp is true or false. */
7058
7059 if (integer_zerop (value))
7060 return 1;
7061 else
7062 return tree_floor_log2 (value) + 1 + !unsignedp;
7063 }
7064
7065 /* Return truthvalue of whether T1 is the same tree structure as T2.
7066 Return 1 if they are the same.
7067 Return 0 if they are understandably different.
7068 Return -1 if either contains tree structure not understood by
7069 this function. */
7070
7071 int
7072 simple_cst_equal (const_tree t1, const_tree t2)
7073 {
7074 enum tree_code code1, code2;
7075 int cmp;
7076 int i;
7077
7078 if (t1 == t2)
7079 return 1;
7080 if (t1 == 0 || t2 == 0)
7081 return 0;
7082
7083 code1 = TREE_CODE (t1);
7084 code2 = TREE_CODE (t2);
7085
7086 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7087 {
7088 if (CONVERT_EXPR_CODE_P (code2)
7089 || code2 == NON_LVALUE_EXPR)
7090 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7091 else
7092 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7093 }
7094
7095 else if (CONVERT_EXPR_CODE_P (code2)
7096 || code2 == NON_LVALUE_EXPR)
7097 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7098
7099 if (code1 != code2)
7100 return 0;
7101
7102 switch (code1)
7103 {
7104 case INTEGER_CST:
7105 return (TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
7106 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2));
7107
7108 case REAL_CST:
7109 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7110
7111 case FIXED_CST:
7112 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7113
7114 case STRING_CST:
7115 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7116 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7117 TREE_STRING_LENGTH (t1)));
7118
7119 case CONSTRUCTOR:
7120 {
7121 unsigned HOST_WIDE_INT idx;
7122 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7123 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7124
7125 if (vec_safe_length (v1) != vec_safe_length (v2))
7126 return false;
7127
7128 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7129 /* ??? Should we handle also fields here? */
7130 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7131 return false;
7132 return true;
7133 }
7134
7135 case SAVE_EXPR:
7136 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7137
7138 case CALL_EXPR:
7139 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7140 if (cmp <= 0)
7141 return cmp;
7142 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7143 return 0;
7144 {
7145 const_tree arg1, arg2;
7146 const_call_expr_arg_iterator iter1, iter2;
7147 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7148 arg2 = first_const_call_expr_arg (t2, &iter2);
7149 arg1 && arg2;
7150 arg1 = next_const_call_expr_arg (&iter1),
7151 arg2 = next_const_call_expr_arg (&iter2))
7152 {
7153 cmp = simple_cst_equal (arg1, arg2);
7154 if (cmp <= 0)
7155 return cmp;
7156 }
7157 return arg1 == arg2;
7158 }
7159
7160 case TARGET_EXPR:
7161 /* Special case: if either target is an unallocated VAR_DECL,
7162 it means that it's going to be unified with whatever the
7163 TARGET_EXPR is really supposed to initialize, so treat it
7164 as being equivalent to anything. */
7165 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7166 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7167 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7168 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7169 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7170 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7171 cmp = 1;
7172 else
7173 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7174
7175 if (cmp <= 0)
7176 return cmp;
7177
7178 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7179
7180 case WITH_CLEANUP_EXPR:
7181 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7182 if (cmp <= 0)
7183 return cmp;
7184
7185 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7186
7187 case COMPONENT_REF:
7188 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7189 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7190
7191 return 0;
7192
7193 case VAR_DECL:
7194 case PARM_DECL:
7195 case CONST_DECL:
7196 case FUNCTION_DECL:
7197 return 0;
7198
7199 default:
7200 break;
7201 }
7202
7203 /* This general rule works for most tree codes. All exceptions should be
7204 handled above. If this is a language-specific tree code, we can't
7205 trust what might be in the operand, so say we don't know
7206 the situation. */
7207 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7208 return -1;
7209
7210 switch (TREE_CODE_CLASS (code1))
7211 {
7212 case tcc_unary:
7213 case tcc_binary:
7214 case tcc_comparison:
7215 case tcc_expression:
7216 case tcc_reference:
7217 case tcc_statement:
7218 cmp = 1;
7219 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7220 {
7221 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7222 if (cmp <= 0)
7223 return cmp;
7224 }
7225
7226 return cmp;
7227
7228 default:
7229 return -1;
7230 }
7231 }
7232
7233 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7234 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7235 than U, respectively. */
7236
7237 int
7238 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7239 {
7240 if (tree_int_cst_sgn (t) < 0)
7241 return -1;
7242 else if (TREE_INT_CST_HIGH (t) != 0)
7243 return 1;
7244 else if (TREE_INT_CST_LOW (t) == u)
7245 return 0;
7246 else if (TREE_INT_CST_LOW (t) < u)
7247 return -1;
7248 else
7249 return 1;
7250 }
7251
7252 /* Return true if SIZE represents a constant size that is in bounds of
7253 what the middle-end and the backend accepts (covering not more than
7254 half of the address-space). */
7255
7256 bool
7257 valid_constant_size_p (const_tree size)
7258 {
7259 if (! host_integerp (size, 1)
7260 || TREE_OVERFLOW (size)
7261 || tree_int_cst_sign_bit (size) != 0)
7262 return false;
7263 return true;
7264 }
7265
7266 /* Return the precision of the type, or for a complex or vector type the
7267 precision of the type of its elements. */
7268
7269 unsigned int
7270 element_precision (const_tree type)
7271 {
7272 enum tree_code code = TREE_CODE (type);
7273 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7274 type = TREE_TYPE (type);
7275
7276 return TYPE_PRECISION (type);
7277 }
7278
7279 /* Return true if CODE represents an associative tree code. Otherwise
7280 return false. */
7281 bool
7282 associative_tree_code (enum tree_code code)
7283 {
7284 switch (code)
7285 {
7286 case BIT_IOR_EXPR:
7287 case BIT_AND_EXPR:
7288 case BIT_XOR_EXPR:
7289 case PLUS_EXPR:
7290 case MULT_EXPR:
7291 case MIN_EXPR:
7292 case MAX_EXPR:
7293 return true;
7294
7295 default:
7296 break;
7297 }
7298 return false;
7299 }
7300
7301 /* Return true if CODE represents a commutative tree code. Otherwise
7302 return false. */
7303 bool
7304 commutative_tree_code (enum tree_code code)
7305 {
7306 switch (code)
7307 {
7308 case PLUS_EXPR:
7309 case MULT_EXPR:
7310 case MULT_HIGHPART_EXPR:
7311 case MIN_EXPR:
7312 case MAX_EXPR:
7313 case BIT_IOR_EXPR:
7314 case BIT_XOR_EXPR:
7315 case BIT_AND_EXPR:
7316 case NE_EXPR:
7317 case EQ_EXPR:
7318 case UNORDERED_EXPR:
7319 case ORDERED_EXPR:
7320 case UNEQ_EXPR:
7321 case LTGT_EXPR:
7322 case TRUTH_AND_EXPR:
7323 case TRUTH_XOR_EXPR:
7324 case TRUTH_OR_EXPR:
7325 case WIDEN_MULT_EXPR:
7326 case VEC_WIDEN_MULT_HI_EXPR:
7327 case VEC_WIDEN_MULT_LO_EXPR:
7328 case VEC_WIDEN_MULT_EVEN_EXPR:
7329 case VEC_WIDEN_MULT_ODD_EXPR:
7330 return true;
7331
7332 default:
7333 break;
7334 }
7335 return false;
7336 }
7337
7338 /* Return true if CODE represents a ternary tree code for which the
7339 first two operands are commutative. Otherwise return false. */
7340 bool
7341 commutative_ternary_tree_code (enum tree_code code)
7342 {
7343 switch (code)
7344 {
7345 case WIDEN_MULT_PLUS_EXPR:
7346 case WIDEN_MULT_MINUS_EXPR:
7347 return true;
7348
7349 default:
7350 break;
7351 }
7352 return false;
7353 }
7354
7355 /* Generate a hash value for an expression. This can be used iteratively
7356 by passing a previous result as the VAL argument.
7357
7358 This function is intended to produce the same hash for expressions which
7359 would compare equal using operand_equal_p. */
7360
7361 hashval_t
7362 iterative_hash_expr (const_tree t, hashval_t val)
7363 {
7364 int i;
7365 enum tree_code code;
7366 char tclass;
7367
7368 if (t == NULL_TREE)
7369 return iterative_hash_hashval_t (0, val);
7370
7371 code = TREE_CODE (t);
7372
7373 switch (code)
7374 {
7375 /* Alas, constants aren't shared, so we can't rely on pointer
7376 identity. */
7377 case INTEGER_CST:
7378 val = iterative_hash_host_wide_int (TREE_INT_CST_LOW (t), val);
7379 return iterative_hash_host_wide_int (TREE_INT_CST_HIGH (t), val);
7380 case REAL_CST:
7381 {
7382 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7383
7384 return iterative_hash_hashval_t (val2, val);
7385 }
7386 case FIXED_CST:
7387 {
7388 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7389
7390 return iterative_hash_hashval_t (val2, val);
7391 }
7392 case STRING_CST:
7393 return iterative_hash (TREE_STRING_POINTER (t),
7394 TREE_STRING_LENGTH (t), val);
7395 case COMPLEX_CST:
7396 val = iterative_hash_expr (TREE_REALPART (t), val);
7397 return iterative_hash_expr (TREE_IMAGPART (t), val);
7398 case VECTOR_CST:
7399 {
7400 unsigned i;
7401 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7402 val = iterative_hash_expr (VECTOR_CST_ELT (t, i), val);
7403 return val;
7404 }
7405 case SSA_NAME:
7406 /* We can just compare by pointer. */
7407 return iterative_hash_host_wide_int (SSA_NAME_VERSION (t), val);
7408 case PLACEHOLDER_EXPR:
7409 /* The node itself doesn't matter. */
7410 return val;
7411 case TREE_LIST:
7412 /* A list of expressions, for a CALL_EXPR or as the elements of a
7413 VECTOR_CST. */
7414 for (; t; t = TREE_CHAIN (t))
7415 val = iterative_hash_expr (TREE_VALUE (t), val);
7416 return val;
7417 case CONSTRUCTOR:
7418 {
7419 unsigned HOST_WIDE_INT idx;
7420 tree field, value;
7421 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7422 {
7423 val = iterative_hash_expr (field, val);
7424 val = iterative_hash_expr (value, val);
7425 }
7426 return val;
7427 }
7428 case FUNCTION_DECL:
7429 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7430 Otherwise nodes that compare equal according to operand_equal_p might
7431 get different hash codes. However, don't do this for machine specific
7432 or front end builtins, since the function code is overloaded in those
7433 cases. */
7434 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7435 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7436 {
7437 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7438 code = TREE_CODE (t);
7439 }
7440 /* FALL THROUGH */
7441 default:
7442 tclass = TREE_CODE_CLASS (code);
7443
7444 if (tclass == tcc_declaration)
7445 {
7446 /* DECL's have a unique ID */
7447 val = iterative_hash_host_wide_int (DECL_UID (t), val);
7448 }
7449 else
7450 {
7451 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7452
7453 val = iterative_hash_object (code, val);
7454
7455 /* Don't hash the type, that can lead to having nodes which
7456 compare equal according to operand_equal_p, but which
7457 have different hash codes. */
7458 if (CONVERT_EXPR_CODE_P (code)
7459 || code == NON_LVALUE_EXPR)
7460 {
7461 /* Make sure to include signness in the hash computation. */
7462 val += TYPE_UNSIGNED (TREE_TYPE (t));
7463 val = iterative_hash_expr (TREE_OPERAND (t, 0), val);
7464 }
7465
7466 else if (commutative_tree_code (code))
7467 {
7468 /* It's a commutative expression. We want to hash it the same
7469 however it appears. We do this by first hashing both operands
7470 and then rehashing based on the order of their independent
7471 hashes. */
7472 hashval_t one = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
7473 hashval_t two = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
7474 hashval_t t;
7475
7476 if (one > two)
7477 t = one, one = two, two = t;
7478
7479 val = iterative_hash_hashval_t (one, val);
7480 val = iterative_hash_hashval_t (two, val);
7481 }
7482 else
7483 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7484 val = iterative_hash_expr (TREE_OPERAND (t, i), val);
7485 }
7486 return val;
7487 }
7488 }
7489
7490 /* Constructors for pointer, array and function types.
7491 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7492 constructed by language-dependent code, not here.) */
7493
7494 /* Construct, lay out and return the type of pointers to TO_TYPE with
7495 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7496 reference all of memory. If such a type has already been
7497 constructed, reuse it. */
7498
7499 tree
7500 build_pointer_type_for_mode (tree to_type, enum machine_mode mode,
7501 bool can_alias_all)
7502 {
7503 tree t;
7504
7505 if (to_type == error_mark_node)
7506 return error_mark_node;
7507
7508 /* If the pointed-to type has the may_alias attribute set, force
7509 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7510 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7511 can_alias_all = true;
7512
7513 /* In some cases, languages will have things that aren't a POINTER_TYPE
7514 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7515 In that case, return that type without regard to the rest of our
7516 operands.
7517
7518 ??? This is a kludge, but consistent with the way this function has
7519 always operated and there doesn't seem to be a good way to avoid this
7520 at the moment. */
7521 if (TYPE_POINTER_TO (to_type) != 0
7522 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7523 return TYPE_POINTER_TO (to_type);
7524
7525 /* First, if we already have a type for pointers to TO_TYPE and it's
7526 the proper mode, use it. */
7527 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7528 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7529 return t;
7530
7531 t = make_node (POINTER_TYPE);
7532
7533 TREE_TYPE (t) = to_type;
7534 SET_TYPE_MODE (t, mode);
7535 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7536 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7537 TYPE_POINTER_TO (to_type) = t;
7538
7539 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7540 SET_TYPE_STRUCTURAL_EQUALITY (t);
7541 else if (TYPE_CANONICAL (to_type) != to_type)
7542 TYPE_CANONICAL (t)
7543 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7544 mode, can_alias_all);
7545
7546 /* Lay out the type. This function has many callers that are concerned
7547 with expression-construction, and this simplifies them all. */
7548 layout_type (t);
7549
7550 return t;
7551 }
7552
7553 /* By default build pointers in ptr_mode. */
7554
7555 tree
7556 build_pointer_type (tree to_type)
7557 {
7558 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7559 : TYPE_ADDR_SPACE (to_type);
7560 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7561 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7562 }
7563
7564 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7565
7566 tree
7567 build_reference_type_for_mode (tree to_type, enum machine_mode mode,
7568 bool can_alias_all)
7569 {
7570 tree t;
7571
7572 if (to_type == error_mark_node)
7573 return error_mark_node;
7574
7575 /* If the pointed-to type has the may_alias attribute set, force
7576 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7577 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7578 can_alias_all = true;
7579
7580 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7581 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7582 In that case, return that type without regard to the rest of our
7583 operands.
7584
7585 ??? This is a kludge, but consistent with the way this function has
7586 always operated and there doesn't seem to be a good way to avoid this
7587 at the moment. */
7588 if (TYPE_REFERENCE_TO (to_type) != 0
7589 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7590 return TYPE_REFERENCE_TO (to_type);
7591
7592 /* First, if we already have a type for pointers to TO_TYPE and it's
7593 the proper mode, use it. */
7594 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7595 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7596 return t;
7597
7598 t = make_node (REFERENCE_TYPE);
7599
7600 TREE_TYPE (t) = to_type;
7601 SET_TYPE_MODE (t, mode);
7602 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7603 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7604 TYPE_REFERENCE_TO (to_type) = t;
7605
7606 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7607 SET_TYPE_STRUCTURAL_EQUALITY (t);
7608 else if (TYPE_CANONICAL (to_type) != to_type)
7609 TYPE_CANONICAL (t)
7610 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7611 mode, can_alias_all);
7612
7613 layout_type (t);
7614
7615 return t;
7616 }
7617
7618
7619 /* Build the node for the type of references-to-TO_TYPE by default
7620 in ptr_mode. */
7621
7622 tree
7623 build_reference_type (tree to_type)
7624 {
7625 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7626 : TYPE_ADDR_SPACE (to_type);
7627 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7628 return build_reference_type_for_mode (to_type, pointer_mode, false);
7629 }
7630
7631 #define MAX_INT_CACHED_PREC \
7632 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7633 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7634
7635 /* Builds a signed or unsigned integer type of precision PRECISION.
7636 Used for C bitfields whose precision does not match that of
7637 built-in target types. */
7638 tree
7639 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7640 int unsignedp)
7641 {
7642 tree itype, ret;
7643
7644 if (unsignedp)
7645 unsignedp = MAX_INT_CACHED_PREC + 1;
7646
7647 if (precision <= MAX_INT_CACHED_PREC)
7648 {
7649 itype = nonstandard_integer_type_cache[precision + unsignedp];
7650 if (itype)
7651 return itype;
7652 }
7653
7654 itype = make_node (INTEGER_TYPE);
7655 TYPE_PRECISION (itype) = precision;
7656
7657 if (unsignedp)
7658 fixup_unsigned_type (itype);
7659 else
7660 fixup_signed_type (itype);
7661
7662 ret = itype;
7663 if (host_integerp (TYPE_MAX_VALUE (itype), 1))
7664 ret = type_hash_canon (tree_low_cst (TYPE_MAX_VALUE (itype), 1), itype);
7665 if (precision <= MAX_INT_CACHED_PREC)
7666 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7667
7668 return ret;
7669 }
7670
7671 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7672 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7673 is true, reuse such a type that has already been constructed. */
7674
7675 static tree
7676 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7677 {
7678 tree itype = make_node (INTEGER_TYPE);
7679 hashval_t hashcode = 0;
7680
7681 TREE_TYPE (itype) = type;
7682
7683 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7684 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7685
7686 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7687 SET_TYPE_MODE (itype, TYPE_MODE (type));
7688 TYPE_SIZE (itype) = TYPE_SIZE (type);
7689 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7690 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7691 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7692
7693 if (!shared)
7694 return itype;
7695
7696 if ((TYPE_MIN_VALUE (itype)
7697 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7698 || (TYPE_MAX_VALUE (itype)
7699 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7700 {
7701 /* Since we cannot reliably merge this type, we need to compare it using
7702 structural equality checks. */
7703 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7704 return itype;
7705 }
7706
7707 hashcode = iterative_hash_expr (TYPE_MIN_VALUE (itype), hashcode);
7708 hashcode = iterative_hash_expr (TYPE_MAX_VALUE (itype), hashcode);
7709 hashcode = iterative_hash_hashval_t (TYPE_HASH (type), hashcode);
7710 itype = type_hash_canon (hashcode, itype);
7711
7712 return itype;
7713 }
7714
7715 /* Wrapper around build_range_type_1 with SHARED set to true. */
7716
7717 tree
7718 build_range_type (tree type, tree lowval, tree highval)
7719 {
7720 return build_range_type_1 (type, lowval, highval, true);
7721 }
7722
7723 /* Wrapper around build_range_type_1 with SHARED set to false. */
7724
7725 tree
7726 build_nonshared_range_type (tree type, tree lowval, tree highval)
7727 {
7728 return build_range_type_1 (type, lowval, highval, false);
7729 }
7730
7731 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7732 MAXVAL should be the maximum value in the domain
7733 (one less than the length of the array).
7734
7735 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7736 We don't enforce this limit, that is up to caller (e.g. language front end).
7737 The limit exists because the result is a signed type and we don't handle
7738 sizes that use more than one HOST_WIDE_INT. */
7739
7740 tree
7741 build_index_type (tree maxval)
7742 {
7743 return build_range_type (sizetype, size_zero_node, maxval);
7744 }
7745
7746 /* Return true if the debug information for TYPE, a subtype, should be emitted
7747 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7748 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7749 debug info and doesn't reflect the source code. */
7750
7751 bool
7752 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7753 {
7754 tree base_type = TREE_TYPE (type), low, high;
7755
7756 /* Subrange types have a base type which is an integral type. */
7757 if (!INTEGRAL_TYPE_P (base_type))
7758 return false;
7759
7760 /* Get the real bounds of the subtype. */
7761 if (lang_hooks.types.get_subrange_bounds)
7762 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7763 else
7764 {
7765 low = TYPE_MIN_VALUE (type);
7766 high = TYPE_MAX_VALUE (type);
7767 }
7768
7769 /* If the type and its base type have the same representation and the same
7770 name, then the type is not a subrange but a copy of the base type. */
7771 if ((TREE_CODE (base_type) == INTEGER_TYPE
7772 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7773 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7774 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7775 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)))
7776 {
7777 tree type_name = TYPE_NAME (type);
7778 tree base_type_name = TYPE_NAME (base_type);
7779
7780 if (type_name && TREE_CODE (type_name) == TYPE_DECL)
7781 type_name = DECL_NAME (type_name);
7782
7783 if (base_type_name && TREE_CODE (base_type_name) == TYPE_DECL)
7784 base_type_name = DECL_NAME (base_type_name);
7785
7786 if (type_name == base_type_name)
7787 return false;
7788 }
7789
7790 if (lowval)
7791 *lowval = low;
7792 if (highval)
7793 *highval = high;
7794 return true;
7795 }
7796
7797 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7798 and number of elements specified by the range of values of INDEX_TYPE.
7799 If SHARED is true, reuse such a type that has already been constructed. */
7800
7801 static tree
7802 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7803 {
7804 tree t;
7805
7806 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7807 {
7808 error ("arrays of functions are not meaningful");
7809 elt_type = integer_type_node;
7810 }
7811
7812 t = make_node (ARRAY_TYPE);
7813 TREE_TYPE (t) = elt_type;
7814 TYPE_DOMAIN (t) = index_type;
7815 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7816 layout_type (t);
7817
7818 /* If the element type is incomplete at this point we get marked for
7819 structural equality. Do not record these types in the canonical
7820 type hashtable. */
7821 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7822 return t;
7823
7824 if (shared)
7825 {
7826 hashval_t hashcode = iterative_hash_object (TYPE_HASH (elt_type), 0);
7827 if (index_type)
7828 hashcode = iterative_hash_object (TYPE_HASH (index_type), hashcode);
7829 t = type_hash_canon (hashcode, t);
7830 }
7831
7832 if (TYPE_CANONICAL (t) == t)
7833 {
7834 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7835 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7836 SET_TYPE_STRUCTURAL_EQUALITY (t);
7837 else if (TYPE_CANONICAL (elt_type) != elt_type
7838 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7839 TYPE_CANONICAL (t)
7840 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7841 index_type
7842 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7843 shared);
7844 }
7845
7846 return t;
7847 }
7848
7849 /* Wrapper around build_array_type_1 with SHARED set to true. */
7850
7851 tree
7852 build_array_type (tree elt_type, tree index_type)
7853 {
7854 return build_array_type_1 (elt_type, index_type, true);
7855 }
7856
7857 /* Wrapper around build_array_type_1 with SHARED set to false. */
7858
7859 tree
7860 build_nonshared_array_type (tree elt_type, tree index_type)
7861 {
7862 return build_array_type_1 (elt_type, index_type, false);
7863 }
7864
7865 /* Return a representation of ELT_TYPE[NELTS], using indices of type
7866 sizetype. */
7867
7868 tree
7869 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
7870 {
7871 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
7872 }
7873
7874 /* Recursively examines the array elements of TYPE, until a non-array
7875 element type is found. */
7876
7877 tree
7878 strip_array_types (tree type)
7879 {
7880 while (TREE_CODE (type) == ARRAY_TYPE)
7881 type = TREE_TYPE (type);
7882
7883 return type;
7884 }
7885
7886 /* Computes the canonical argument types from the argument type list
7887 ARGTYPES.
7888
7889 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
7890 on entry to this function, or if any of the ARGTYPES are
7891 structural.
7892
7893 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
7894 true on entry to this function, or if any of the ARGTYPES are
7895 non-canonical.
7896
7897 Returns a canonical argument list, which may be ARGTYPES when the
7898 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
7899 true) or would not differ from ARGTYPES. */
7900
7901 static tree
7902 maybe_canonicalize_argtypes (tree argtypes,
7903 bool *any_structural_p,
7904 bool *any_noncanonical_p)
7905 {
7906 tree arg;
7907 bool any_noncanonical_argtypes_p = false;
7908
7909 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
7910 {
7911 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
7912 /* Fail gracefully by stating that the type is structural. */
7913 *any_structural_p = true;
7914 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
7915 *any_structural_p = true;
7916 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
7917 || TREE_PURPOSE (arg))
7918 /* If the argument has a default argument, we consider it
7919 non-canonical even though the type itself is canonical.
7920 That way, different variants of function and method types
7921 with default arguments will all point to the variant with
7922 no defaults as their canonical type. */
7923 any_noncanonical_argtypes_p = true;
7924 }
7925
7926 if (*any_structural_p)
7927 return argtypes;
7928
7929 if (any_noncanonical_argtypes_p)
7930 {
7931 /* Build the canonical list of argument types. */
7932 tree canon_argtypes = NULL_TREE;
7933 bool is_void = false;
7934
7935 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
7936 {
7937 if (arg == void_list_node)
7938 is_void = true;
7939 else
7940 canon_argtypes = tree_cons (NULL_TREE,
7941 TYPE_CANONICAL (TREE_VALUE (arg)),
7942 canon_argtypes);
7943 }
7944
7945 canon_argtypes = nreverse (canon_argtypes);
7946 if (is_void)
7947 canon_argtypes = chainon (canon_argtypes, void_list_node);
7948
7949 /* There is a non-canonical type. */
7950 *any_noncanonical_p = true;
7951 return canon_argtypes;
7952 }
7953
7954 /* The canonical argument types are the same as ARGTYPES. */
7955 return argtypes;
7956 }
7957
7958 /* Construct, lay out and return
7959 the type of functions returning type VALUE_TYPE
7960 given arguments of types ARG_TYPES.
7961 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
7962 are data type nodes for the arguments of the function.
7963 If such a type has already been constructed, reuse it. */
7964
7965 tree
7966 build_function_type (tree value_type, tree arg_types)
7967 {
7968 tree t;
7969 hashval_t hashcode = 0;
7970 bool any_structural_p, any_noncanonical_p;
7971 tree canon_argtypes;
7972
7973 if (TREE_CODE (value_type) == FUNCTION_TYPE)
7974 {
7975 error ("function return type cannot be function");
7976 value_type = integer_type_node;
7977 }
7978
7979 /* Make a node of the sort we want. */
7980 t = make_node (FUNCTION_TYPE);
7981 TREE_TYPE (t) = value_type;
7982 TYPE_ARG_TYPES (t) = arg_types;
7983
7984 /* If we already have such a type, use the old one. */
7985 hashcode = iterative_hash_object (TYPE_HASH (value_type), hashcode);
7986 hashcode = type_hash_list (arg_types, hashcode);
7987 t = type_hash_canon (hashcode, t);
7988
7989 /* Set up the canonical type. */
7990 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
7991 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
7992 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
7993 &any_structural_p,
7994 &any_noncanonical_p);
7995 if (any_structural_p)
7996 SET_TYPE_STRUCTURAL_EQUALITY (t);
7997 else if (any_noncanonical_p)
7998 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
7999 canon_argtypes);
8000
8001 if (!COMPLETE_TYPE_P (t))
8002 layout_type (t);
8003 return t;
8004 }
8005
8006 /* Build a function type. The RETURN_TYPE is the type returned by the
8007 function. If VAARGS is set, no void_type_node is appended to the
8008 the list. ARGP must be always be terminated be a NULL_TREE. */
8009
8010 static tree
8011 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8012 {
8013 tree t, args, last;
8014
8015 t = va_arg (argp, tree);
8016 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8017 args = tree_cons (NULL_TREE, t, args);
8018
8019 if (vaargs)
8020 {
8021 last = args;
8022 if (args != NULL_TREE)
8023 args = nreverse (args);
8024 gcc_assert (last != void_list_node);
8025 }
8026 else if (args == NULL_TREE)
8027 args = void_list_node;
8028 else
8029 {
8030 last = args;
8031 args = nreverse (args);
8032 TREE_CHAIN (last) = void_list_node;
8033 }
8034 args = build_function_type (return_type, args);
8035
8036 return args;
8037 }
8038
8039 /* Build a function type. The RETURN_TYPE is the type returned by the
8040 function. If additional arguments are provided, they are
8041 additional argument types. The list of argument types must always
8042 be terminated by NULL_TREE. */
8043
8044 tree
8045 build_function_type_list (tree return_type, ...)
8046 {
8047 tree args;
8048 va_list p;
8049
8050 va_start (p, return_type);
8051 args = build_function_type_list_1 (false, return_type, p);
8052 va_end (p);
8053 return args;
8054 }
8055
8056 /* Build a variable argument function type. The RETURN_TYPE is the
8057 type returned by the function. If additional arguments are provided,
8058 they are additional argument types. The list of argument types must
8059 always be terminated by NULL_TREE. */
8060
8061 tree
8062 build_varargs_function_type_list (tree return_type, ...)
8063 {
8064 tree args;
8065 va_list p;
8066
8067 va_start (p, return_type);
8068 args = build_function_type_list_1 (true, return_type, p);
8069 va_end (p);
8070
8071 return args;
8072 }
8073
8074 /* Build a function type. RETURN_TYPE is the type returned by the
8075 function; VAARGS indicates whether the function takes varargs. The
8076 function takes N named arguments, the types of which are provided in
8077 ARG_TYPES. */
8078
8079 static tree
8080 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8081 tree *arg_types)
8082 {
8083 int i;
8084 tree t = vaargs ? NULL_TREE : void_list_node;
8085
8086 for (i = n - 1; i >= 0; i--)
8087 t = tree_cons (NULL_TREE, arg_types[i], t);
8088
8089 return build_function_type (return_type, t);
8090 }
8091
8092 /* Build a function type. RETURN_TYPE is the type returned by the
8093 function. The function takes N named arguments, the types of which
8094 are provided in ARG_TYPES. */
8095
8096 tree
8097 build_function_type_array (tree return_type, int n, tree *arg_types)
8098 {
8099 return build_function_type_array_1 (false, return_type, n, arg_types);
8100 }
8101
8102 /* Build a variable argument function type. RETURN_TYPE is the type
8103 returned by the function. The function takes N named arguments, the
8104 types of which are provided in ARG_TYPES. */
8105
8106 tree
8107 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8108 {
8109 return build_function_type_array_1 (true, return_type, n, arg_types);
8110 }
8111
8112 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8113 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8114 for the method. An implicit additional parameter (of type
8115 pointer-to-BASETYPE) is added to the ARGTYPES. */
8116
8117 tree
8118 build_method_type_directly (tree basetype,
8119 tree rettype,
8120 tree argtypes)
8121 {
8122 tree t;
8123 tree ptype;
8124 int hashcode = 0;
8125 bool any_structural_p, any_noncanonical_p;
8126 tree canon_argtypes;
8127
8128 /* Make a node of the sort we want. */
8129 t = make_node (METHOD_TYPE);
8130
8131 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8132 TREE_TYPE (t) = rettype;
8133 ptype = build_pointer_type (basetype);
8134
8135 /* The actual arglist for this function includes a "hidden" argument
8136 which is "this". Put it into the list of argument types. */
8137 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8138 TYPE_ARG_TYPES (t) = argtypes;
8139
8140 /* If we already have such a type, use the old one. */
8141 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8142 hashcode = iterative_hash_object (TYPE_HASH (rettype), hashcode);
8143 hashcode = type_hash_list (argtypes, hashcode);
8144 t = type_hash_canon (hashcode, t);
8145
8146 /* Set up the canonical type. */
8147 any_structural_p
8148 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8149 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8150 any_noncanonical_p
8151 = (TYPE_CANONICAL (basetype) != basetype
8152 || TYPE_CANONICAL (rettype) != rettype);
8153 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8154 &any_structural_p,
8155 &any_noncanonical_p);
8156 if (any_structural_p)
8157 SET_TYPE_STRUCTURAL_EQUALITY (t);
8158 else if (any_noncanonical_p)
8159 TYPE_CANONICAL (t)
8160 = build_method_type_directly (TYPE_CANONICAL (basetype),
8161 TYPE_CANONICAL (rettype),
8162 canon_argtypes);
8163 if (!COMPLETE_TYPE_P (t))
8164 layout_type (t);
8165
8166 return t;
8167 }
8168
8169 /* Construct, lay out and return the type of methods belonging to class
8170 BASETYPE and whose arguments and values are described by TYPE.
8171 If that type exists already, reuse it.
8172 TYPE must be a FUNCTION_TYPE node. */
8173
8174 tree
8175 build_method_type (tree basetype, tree type)
8176 {
8177 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8178
8179 return build_method_type_directly (basetype,
8180 TREE_TYPE (type),
8181 TYPE_ARG_TYPES (type));
8182 }
8183
8184 /* Construct, lay out and return the type of offsets to a value
8185 of type TYPE, within an object of type BASETYPE.
8186 If a suitable offset type exists already, reuse it. */
8187
8188 tree
8189 build_offset_type (tree basetype, tree type)
8190 {
8191 tree t;
8192 hashval_t hashcode = 0;
8193
8194 /* Make a node of the sort we want. */
8195 t = make_node (OFFSET_TYPE);
8196
8197 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8198 TREE_TYPE (t) = type;
8199
8200 /* If we already have such a type, use the old one. */
8201 hashcode = iterative_hash_object (TYPE_HASH (basetype), hashcode);
8202 hashcode = iterative_hash_object (TYPE_HASH (type), hashcode);
8203 t = type_hash_canon (hashcode, t);
8204
8205 if (!COMPLETE_TYPE_P (t))
8206 layout_type (t);
8207
8208 if (TYPE_CANONICAL (t) == t)
8209 {
8210 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8211 || TYPE_STRUCTURAL_EQUALITY_P (type))
8212 SET_TYPE_STRUCTURAL_EQUALITY (t);
8213 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8214 || TYPE_CANONICAL (type) != type)
8215 TYPE_CANONICAL (t)
8216 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8217 TYPE_CANONICAL (type));
8218 }
8219
8220 return t;
8221 }
8222
8223 /* Create a complex type whose components are COMPONENT_TYPE. */
8224
8225 tree
8226 build_complex_type (tree component_type)
8227 {
8228 tree t;
8229 hashval_t hashcode;
8230
8231 gcc_assert (INTEGRAL_TYPE_P (component_type)
8232 || SCALAR_FLOAT_TYPE_P (component_type)
8233 || FIXED_POINT_TYPE_P (component_type));
8234
8235 /* Make a node of the sort we want. */
8236 t = make_node (COMPLEX_TYPE);
8237
8238 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8239
8240 /* If we already have such a type, use the old one. */
8241 hashcode = iterative_hash_object (TYPE_HASH (component_type), 0);
8242 t = type_hash_canon (hashcode, t);
8243
8244 if (!COMPLETE_TYPE_P (t))
8245 layout_type (t);
8246
8247 if (TYPE_CANONICAL (t) == t)
8248 {
8249 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8250 SET_TYPE_STRUCTURAL_EQUALITY (t);
8251 else if (TYPE_CANONICAL (component_type) != component_type)
8252 TYPE_CANONICAL (t)
8253 = build_complex_type (TYPE_CANONICAL (component_type));
8254 }
8255
8256 /* We need to create a name, since complex is a fundamental type. */
8257 if (! TYPE_NAME (t))
8258 {
8259 const char *name;
8260 if (component_type == char_type_node)
8261 name = "complex char";
8262 else if (component_type == signed_char_type_node)
8263 name = "complex signed char";
8264 else if (component_type == unsigned_char_type_node)
8265 name = "complex unsigned char";
8266 else if (component_type == short_integer_type_node)
8267 name = "complex short int";
8268 else if (component_type == short_unsigned_type_node)
8269 name = "complex short unsigned int";
8270 else if (component_type == integer_type_node)
8271 name = "complex int";
8272 else if (component_type == unsigned_type_node)
8273 name = "complex unsigned int";
8274 else if (component_type == long_integer_type_node)
8275 name = "complex long int";
8276 else if (component_type == long_unsigned_type_node)
8277 name = "complex long unsigned int";
8278 else if (component_type == long_long_integer_type_node)
8279 name = "complex long long int";
8280 else if (component_type == long_long_unsigned_type_node)
8281 name = "complex long long unsigned int";
8282 else
8283 name = 0;
8284
8285 if (name != 0)
8286 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8287 get_identifier (name), t);
8288 }
8289
8290 return build_qualified_type (t, TYPE_QUALS (component_type));
8291 }
8292
8293 /* If TYPE is a real or complex floating-point type and the target
8294 does not directly support arithmetic on TYPE then return the wider
8295 type to be used for arithmetic on TYPE. Otherwise, return
8296 NULL_TREE. */
8297
8298 tree
8299 excess_precision_type (tree type)
8300 {
8301 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8302 {
8303 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8304 switch (TREE_CODE (type))
8305 {
8306 case REAL_TYPE:
8307 switch (flt_eval_method)
8308 {
8309 case 1:
8310 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8311 return double_type_node;
8312 break;
8313 case 2:
8314 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8315 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8316 return long_double_type_node;
8317 break;
8318 default:
8319 gcc_unreachable ();
8320 }
8321 break;
8322 case COMPLEX_TYPE:
8323 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8324 return NULL_TREE;
8325 switch (flt_eval_method)
8326 {
8327 case 1:
8328 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8329 return complex_double_type_node;
8330 break;
8331 case 2:
8332 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8333 || (TYPE_MODE (TREE_TYPE (type))
8334 == TYPE_MODE (double_type_node)))
8335 return complex_long_double_type_node;
8336 break;
8337 default:
8338 gcc_unreachable ();
8339 }
8340 break;
8341 default:
8342 break;
8343 }
8344 }
8345 return NULL_TREE;
8346 }
8347 \f
8348 /* Return OP, stripped of any conversions to wider types as much as is safe.
8349 Converting the value back to OP's type makes a value equivalent to OP.
8350
8351 If FOR_TYPE is nonzero, we return a value which, if converted to
8352 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8353
8354 OP must have integer, real or enumeral type. Pointers are not allowed!
8355
8356 There are some cases where the obvious value we could return
8357 would regenerate to OP if converted to OP's type,
8358 but would not extend like OP to wider types.
8359 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8360 For example, if OP is (unsigned short)(signed char)-1,
8361 we avoid returning (signed char)-1 if FOR_TYPE is int,
8362 even though extending that to an unsigned short would regenerate OP,
8363 since the result of extending (signed char)-1 to (int)
8364 is different from (int) OP. */
8365
8366 tree
8367 get_unwidened (tree op, tree for_type)
8368 {
8369 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8370 tree type = TREE_TYPE (op);
8371 unsigned final_prec
8372 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8373 int uns
8374 = (for_type != 0 && for_type != type
8375 && final_prec > TYPE_PRECISION (type)
8376 && TYPE_UNSIGNED (type));
8377 tree win = op;
8378
8379 while (CONVERT_EXPR_P (op))
8380 {
8381 int bitschange;
8382
8383 /* TYPE_PRECISION on vector types has different meaning
8384 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8385 so avoid them here. */
8386 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8387 break;
8388
8389 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8390 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8391
8392 /* Truncations are many-one so cannot be removed.
8393 Unless we are later going to truncate down even farther. */
8394 if (bitschange < 0
8395 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8396 break;
8397
8398 /* See what's inside this conversion. If we decide to strip it,
8399 we will set WIN. */
8400 op = TREE_OPERAND (op, 0);
8401
8402 /* If we have not stripped any zero-extensions (uns is 0),
8403 we can strip any kind of extension.
8404 If we have previously stripped a zero-extension,
8405 only zero-extensions can safely be stripped.
8406 Any extension can be stripped if the bits it would produce
8407 are all going to be discarded later by truncating to FOR_TYPE. */
8408
8409 if (bitschange > 0)
8410 {
8411 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8412 win = op;
8413 /* TYPE_UNSIGNED says whether this is a zero-extension.
8414 Let's avoid computing it if it does not affect WIN
8415 and if UNS will not be needed again. */
8416 if ((uns
8417 || CONVERT_EXPR_P (op))
8418 && TYPE_UNSIGNED (TREE_TYPE (op)))
8419 {
8420 uns = 1;
8421 win = op;
8422 }
8423 }
8424 }
8425
8426 /* If we finally reach a constant see if it fits in for_type and
8427 in that case convert it. */
8428 if (for_type
8429 && TREE_CODE (win) == INTEGER_CST
8430 && TREE_TYPE (win) != for_type
8431 && int_fits_type_p (win, for_type))
8432 win = fold_convert (for_type, win);
8433
8434 return win;
8435 }
8436 \f
8437 /* Return OP or a simpler expression for a narrower value
8438 which can be sign-extended or zero-extended to give back OP.
8439 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8440 or 0 if the value should be sign-extended. */
8441
8442 tree
8443 get_narrower (tree op, int *unsignedp_ptr)
8444 {
8445 int uns = 0;
8446 int first = 1;
8447 tree win = op;
8448 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8449
8450 while (TREE_CODE (op) == NOP_EXPR)
8451 {
8452 int bitschange
8453 = (TYPE_PRECISION (TREE_TYPE (op))
8454 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8455
8456 /* Truncations are many-one so cannot be removed. */
8457 if (bitschange < 0)
8458 break;
8459
8460 /* See what's inside this conversion. If we decide to strip it,
8461 we will set WIN. */
8462
8463 if (bitschange > 0)
8464 {
8465 op = TREE_OPERAND (op, 0);
8466 /* An extension: the outermost one can be stripped,
8467 but remember whether it is zero or sign extension. */
8468 if (first)
8469 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8470 /* Otherwise, if a sign extension has been stripped,
8471 only sign extensions can now be stripped;
8472 if a zero extension has been stripped, only zero-extensions. */
8473 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8474 break;
8475 first = 0;
8476 }
8477 else /* bitschange == 0 */
8478 {
8479 /* A change in nominal type can always be stripped, but we must
8480 preserve the unsignedness. */
8481 if (first)
8482 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8483 first = 0;
8484 op = TREE_OPERAND (op, 0);
8485 /* Keep trying to narrow, but don't assign op to win if it
8486 would turn an integral type into something else. */
8487 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8488 continue;
8489 }
8490
8491 win = op;
8492 }
8493
8494 if (TREE_CODE (op) == COMPONENT_REF
8495 /* Since type_for_size always gives an integer type. */
8496 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8497 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8498 /* Ensure field is laid out already. */
8499 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8500 && host_integerp (DECL_SIZE (TREE_OPERAND (op, 1)), 1))
8501 {
8502 unsigned HOST_WIDE_INT innerprec
8503 = tree_low_cst (DECL_SIZE (TREE_OPERAND (op, 1)), 1);
8504 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8505 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8506 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8507
8508 /* We can get this structure field in a narrower type that fits it,
8509 but the resulting extension to its nominal type (a fullword type)
8510 must satisfy the same conditions as for other extensions.
8511
8512 Do this only for fields that are aligned (not bit-fields),
8513 because when bit-field insns will be used there is no
8514 advantage in doing this. */
8515
8516 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8517 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8518 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8519 && type != 0)
8520 {
8521 if (first)
8522 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8523 win = fold_convert (type, op);
8524 }
8525 }
8526
8527 *unsignedp_ptr = uns;
8528 return win;
8529 }
8530 \f
8531 /* Returns true if integer constant C has a value that is permissible
8532 for type TYPE (an INTEGER_TYPE). */
8533
8534 bool
8535 int_fits_type_p (const_tree c, const_tree type)
8536 {
8537 tree type_low_bound, type_high_bound;
8538 bool ok_for_low_bound, ok_for_high_bound, unsc;
8539 double_int dc, dd;
8540
8541 dc = tree_to_double_int (c);
8542 unsc = TYPE_UNSIGNED (TREE_TYPE (c));
8543
8544 retry:
8545 type_low_bound = TYPE_MIN_VALUE (type);
8546 type_high_bound = TYPE_MAX_VALUE (type);
8547
8548 /* If at least one bound of the type is a constant integer, we can check
8549 ourselves and maybe make a decision. If no such decision is possible, but
8550 this type is a subtype, try checking against that. Otherwise, use
8551 double_int_fits_to_tree_p, which checks against the precision.
8552
8553 Compute the status for each possibly constant bound, and return if we see
8554 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8555 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8556 for "constant known to fit". */
8557
8558 /* Check if c >= type_low_bound. */
8559 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8560 {
8561 dd = tree_to_double_int (type_low_bound);
8562 if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound)))
8563 {
8564 int c_neg = (!unsc && dc.is_negative ());
8565 int t_neg = (unsc && dd.is_negative ());
8566
8567 if (c_neg && !t_neg)
8568 return false;
8569 if ((c_neg || !t_neg) && dc.ult (dd))
8570 return false;
8571 }
8572 else if (dc.cmp (dd, unsc) < 0)
8573 return false;
8574 ok_for_low_bound = true;
8575 }
8576 else
8577 ok_for_low_bound = false;
8578
8579 /* Check if c <= type_high_bound. */
8580 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8581 {
8582 dd = tree_to_double_int (type_high_bound);
8583 if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound)))
8584 {
8585 int c_neg = (!unsc && dc.is_negative ());
8586 int t_neg = (unsc && dd.is_negative ());
8587
8588 if (t_neg && !c_neg)
8589 return false;
8590 if ((t_neg || !c_neg) && dc.ugt (dd))
8591 return false;
8592 }
8593 else if (dc.cmp (dd, unsc) > 0)
8594 return false;
8595 ok_for_high_bound = true;
8596 }
8597 else
8598 ok_for_high_bound = false;
8599
8600 /* If the constant fits both bounds, the result is known. */
8601 if (ok_for_low_bound && ok_for_high_bound)
8602 return true;
8603
8604 /* Perform some generic filtering which may allow making a decision
8605 even if the bounds are not constant. First, negative integers
8606 never fit in unsigned types, */
8607 if (TYPE_UNSIGNED (type) && !unsc && dc.is_negative ())
8608 return false;
8609
8610 /* Second, narrower types always fit in wider ones. */
8611 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8612 return true;
8613
8614 /* Third, unsigned integers with top bit set never fit signed types. */
8615 if (! TYPE_UNSIGNED (type) && unsc)
8616 {
8617 int prec = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (c))) - 1;
8618 if (prec < HOST_BITS_PER_WIDE_INT)
8619 {
8620 if (((((unsigned HOST_WIDE_INT) 1) << prec) & dc.low) != 0)
8621 return false;
8622 }
8623 else if (((((unsigned HOST_WIDE_INT) 1)
8624 << (prec - HOST_BITS_PER_WIDE_INT)) & dc.high) != 0)
8625 return false;
8626 }
8627
8628 /* If we haven't been able to decide at this point, there nothing more we
8629 can check ourselves here. Look at the base type if we have one and it
8630 has the same precision. */
8631 if (TREE_CODE (type) == INTEGER_TYPE
8632 && TREE_TYPE (type) != 0
8633 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8634 {
8635 type = TREE_TYPE (type);
8636 goto retry;
8637 }
8638
8639 /* Or to double_int_fits_to_tree_p, if nothing else. */
8640 return double_int_fits_to_tree_p (type, dc);
8641 }
8642
8643 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8644 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8645 represented (assuming two's-complement arithmetic) within the bit
8646 precision of the type are returned instead. */
8647
8648 void
8649 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8650 {
8651 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8652 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8653 mpz_set_double_int (min, tree_to_double_int (TYPE_MIN_VALUE (type)),
8654 TYPE_UNSIGNED (type));
8655 else
8656 {
8657 if (TYPE_UNSIGNED (type))
8658 mpz_set_ui (min, 0);
8659 else
8660 {
8661 double_int mn;
8662 mn = double_int::mask (TYPE_PRECISION (type) - 1);
8663 mn = (mn + double_int_one).sext (TYPE_PRECISION (type));
8664 mpz_set_double_int (min, mn, false);
8665 }
8666 }
8667
8668 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8669 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8670 mpz_set_double_int (max, tree_to_double_int (TYPE_MAX_VALUE (type)),
8671 TYPE_UNSIGNED (type));
8672 else
8673 {
8674 if (TYPE_UNSIGNED (type))
8675 mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type)),
8676 true);
8677 else
8678 mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type) - 1),
8679 true);
8680 }
8681 }
8682
8683 /* Return true if VAR is an automatic variable defined in function FN. */
8684
8685 bool
8686 auto_var_in_fn_p (const_tree var, const_tree fn)
8687 {
8688 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8689 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8690 || TREE_CODE (var) == PARM_DECL)
8691 && ! TREE_STATIC (var))
8692 || TREE_CODE (var) == LABEL_DECL
8693 || TREE_CODE (var) == RESULT_DECL));
8694 }
8695
8696 /* Subprogram of following function. Called by walk_tree.
8697
8698 Return *TP if it is an automatic variable or parameter of the
8699 function passed in as DATA. */
8700
8701 static tree
8702 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8703 {
8704 tree fn = (tree) data;
8705
8706 if (TYPE_P (*tp))
8707 *walk_subtrees = 0;
8708
8709 else if (DECL_P (*tp)
8710 && auto_var_in_fn_p (*tp, fn))
8711 return *tp;
8712
8713 return NULL_TREE;
8714 }
8715
8716 /* Returns true if T is, contains, or refers to a type with variable
8717 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8718 arguments, but not the return type. If FN is nonzero, only return
8719 true if a modifier of the type or position of FN is a variable or
8720 parameter inside FN.
8721
8722 This concept is more general than that of C99 'variably modified types':
8723 in C99, a struct type is never variably modified because a VLA may not
8724 appear as a structure member. However, in GNU C code like:
8725
8726 struct S { int i[f()]; };
8727
8728 is valid, and other languages may define similar constructs. */
8729
8730 bool
8731 variably_modified_type_p (tree type, tree fn)
8732 {
8733 tree t;
8734
8735 /* Test if T is either variable (if FN is zero) or an expression containing
8736 a variable in FN. If TYPE isn't gimplified, return true also if
8737 gimplify_one_sizepos would gimplify the expression into a local
8738 variable. */
8739 #define RETURN_TRUE_IF_VAR(T) \
8740 do { tree _t = (T); \
8741 if (_t != NULL_TREE \
8742 && _t != error_mark_node \
8743 && TREE_CODE (_t) != INTEGER_CST \
8744 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8745 && (!fn \
8746 || (!TYPE_SIZES_GIMPLIFIED (type) \
8747 && !is_gimple_sizepos (_t)) \
8748 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8749 return true; } while (0)
8750
8751 if (type == error_mark_node)
8752 return false;
8753
8754 /* If TYPE itself has variable size, it is variably modified. */
8755 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8756 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8757
8758 switch (TREE_CODE (type))
8759 {
8760 case POINTER_TYPE:
8761 case REFERENCE_TYPE:
8762 case VECTOR_TYPE:
8763 if (variably_modified_type_p (TREE_TYPE (type), fn))
8764 return true;
8765 break;
8766
8767 case FUNCTION_TYPE:
8768 case METHOD_TYPE:
8769 /* If TYPE is a function type, it is variably modified if the
8770 return type is variably modified. */
8771 if (variably_modified_type_p (TREE_TYPE (type), fn))
8772 return true;
8773 break;
8774
8775 case INTEGER_TYPE:
8776 case REAL_TYPE:
8777 case FIXED_POINT_TYPE:
8778 case ENUMERAL_TYPE:
8779 case BOOLEAN_TYPE:
8780 /* Scalar types are variably modified if their end points
8781 aren't constant. */
8782 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8783 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8784 break;
8785
8786 case RECORD_TYPE:
8787 case UNION_TYPE:
8788 case QUAL_UNION_TYPE:
8789 /* We can't see if any of the fields are variably-modified by the
8790 definition we normally use, since that would produce infinite
8791 recursion via pointers. */
8792 /* This is variably modified if some field's type is. */
8793 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8794 if (TREE_CODE (t) == FIELD_DECL)
8795 {
8796 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8797 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8798 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8799
8800 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8801 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8802 }
8803 break;
8804
8805 case ARRAY_TYPE:
8806 /* Do not call ourselves to avoid infinite recursion. This is
8807 variably modified if the element type is. */
8808 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8809 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8810 break;
8811
8812 default:
8813 break;
8814 }
8815
8816 /* The current language may have other cases to check, but in general,
8817 all other types are not variably modified. */
8818 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8819
8820 #undef RETURN_TRUE_IF_VAR
8821 }
8822
8823 /* Given a DECL or TYPE, return the scope in which it was declared, or
8824 NULL_TREE if there is no containing scope. */
8825
8826 tree
8827 get_containing_scope (const_tree t)
8828 {
8829 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8830 }
8831
8832 /* Return the innermost context enclosing DECL that is
8833 a FUNCTION_DECL, or zero if none. */
8834
8835 tree
8836 decl_function_context (const_tree decl)
8837 {
8838 tree context;
8839
8840 if (TREE_CODE (decl) == ERROR_MARK)
8841 return 0;
8842
8843 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8844 where we look up the function at runtime. Such functions always take
8845 a first argument of type 'pointer to real context'.
8846
8847 C++ should really be fixed to use DECL_CONTEXT for the real context,
8848 and use something else for the "virtual context". */
8849 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8850 context
8851 = TYPE_MAIN_VARIANT
8852 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8853 else
8854 context = DECL_CONTEXT (decl);
8855
8856 while (context && TREE_CODE (context) != FUNCTION_DECL)
8857 {
8858 if (TREE_CODE (context) == BLOCK)
8859 context = BLOCK_SUPERCONTEXT (context);
8860 else
8861 context = get_containing_scope (context);
8862 }
8863
8864 return context;
8865 }
8866
8867 /* Return the innermost context enclosing DECL that is
8868 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8869 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8870
8871 tree
8872 decl_type_context (const_tree decl)
8873 {
8874 tree context = DECL_CONTEXT (decl);
8875
8876 while (context)
8877 switch (TREE_CODE (context))
8878 {
8879 case NAMESPACE_DECL:
8880 case TRANSLATION_UNIT_DECL:
8881 return NULL_TREE;
8882
8883 case RECORD_TYPE:
8884 case UNION_TYPE:
8885 case QUAL_UNION_TYPE:
8886 return context;
8887
8888 case TYPE_DECL:
8889 case FUNCTION_DECL:
8890 context = DECL_CONTEXT (context);
8891 break;
8892
8893 case BLOCK:
8894 context = BLOCK_SUPERCONTEXT (context);
8895 break;
8896
8897 default:
8898 gcc_unreachable ();
8899 }
8900
8901 return NULL_TREE;
8902 }
8903
8904 /* CALL is a CALL_EXPR. Return the declaration for the function
8905 called, or NULL_TREE if the called function cannot be
8906 determined. */
8907
8908 tree
8909 get_callee_fndecl (const_tree call)
8910 {
8911 tree addr;
8912
8913 if (call == error_mark_node)
8914 return error_mark_node;
8915
8916 /* It's invalid to call this function with anything but a
8917 CALL_EXPR. */
8918 gcc_assert (TREE_CODE (call) == CALL_EXPR);
8919
8920 /* The first operand to the CALL is the address of the function
8921 called. */
8922 addr = CALL_EXPR_FN (call);
8923
8924 STRIP_NOPS (addr);
8925
8926 /* If this is a readonly function pointer, extract its initial value. */
8927 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
8928 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
8929 && DECL_INITIAL (addr))
8930 addr = DECL_INITIAL (addr);
8931
8932 /* If the address is just `&f' for some function `f', then we know
8933 that `f' is being called. */
8934 if (TREE_CODE (addr) == ADDR_EXPR
8935 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
8936 return TREE_OPERAND (addr, 0);
8937
8938 /* We couldn't figure out what was being called. */
8939 return NULL_TREE;
8940 }
8941
8942 /* Print debugging information about tree nodes generated during the compile,
8943 and any language-specific information. */
8944
8945 void
8946 dump_tree_statistics (void)
8947 {
8948 if (GATHER_STATISTICS)
8949 {
8950 int i;
8951 int total_nodes, total_bytes;
8952 fprintf (stderr, "Kind Nodes Bytes\n");
8953 fprintf (stderr, "---------------------------------------\n");
8954 total_nodes = total_bytes = 0;
8955 for (i = 0; i < (int) all_kinds; i++)
8956 {
8957 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
8958 tree_node_counts[i], tree_node_sizes[i]);
8959 total_nodes += tree_node_counts[i];
8960 total_bytes += tree_node_sizes[i];
8961 }
8962 fprintf (stderr, "---------------------------------------\n");
8963 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
8964 fprintf (stderr, "---------------------------------------\n");
8965 fprintf (stderr, "Code Nodes\n");
8966 fprintf (stderr, "----------------------------\n");
8967 for (i = 0; i < (int) MAX_TREE_CODES; i++)
8968 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
8969 tree_code_counts[i]);
8970 fprintf (stderr, "----------------------------\n");
8971 ssanames_print_statistics ();
8972 phinodes_print_statistics ();
8973 }
8974 else
8975 fprintf (stderr, "(No per-node statistics)\n");
8976
8977 print_type_hash_statistics ();
8978 print_debug_expr_statistics ();
8979 print_value_expr_statistics ();
8980 lang_hooks.print_statistics ();
8981 }
8982 \f
8983 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
8984
8985 /* Generate a crc32 of a byte. */
8986
8987 static unsigned
8988 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
8989 {
8990 unsigned ix;
8991
8992 for (ix = bits; ix--; value <<= 1)
8993 {
8994 unsigned feedback;
8995
8996 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
8997 chksum <<= 1;
8998 chksum ^= feedback;
8999 }
9000 return chksum;
9001 }
9002
9003 /* Generate a crc32 of a 32-bit unsigned. */
9004
9005 unsigned
9006 crc32_unsigned (unsigned chksum, unsigned value)
9007 {
9008 return crc32_unsigned_bits (chksum, value, 32);
9009 }
9010
9011 /* Generate a crc32 of a byte. */
9012
9013 unsigned
9014 crc32_byte (unsigned chksum, char byte)
9015 {
9016 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9017 }
9018
9019 /* Generate a crc32 of a string. */
9020
9021 unsigned
9022 crc32_string (unsigned chksum, const char *string)
9023 {
9024 do
9025 {
9026 chksum = crc32_byte (chksum, *string);
9027 }
9028 while (*string++);
9029 return chksum;
9030 }
9031
9032 /* P is a string that will be used in a symbol. Mask out any characters
9033 that are not valid in that context. */
9034
9035 void
9036 clean_symbol_name (char *p)
9037 {
9038 for (; *p; p++)
9039 if (! (ISALNUM (*p)
9040 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9041 || *p == '$'
9042 #endif
9043 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9044 || *p == '.'
9045 #endif
9046 ))
9047 *p = '_';
9048 }
9049
9050 /* Generate a name for a special-purpose function.
9051 The generated name may need to be unique across the whole link.
9052 Changes to this function may also require corresponding changes to
9053 xstrdup_mask_random.
9054 TYPE is some string to identify the purpose of this function to the
9055 linker or collect2; it must start with an uppercase letter,
9056 one of:
9057 I - for constructors
9058 D - for destructors
9059 N - for C++ anonymous namespaces
9060 F - for DWARF unwind frame information. */
9061
9062 tree
9063 get_file_function_name (const char *type)
9064 {
9065 char *buf;
9066 const char *p;
9067 char *q;
9068
9069 /* If we already have a name we know to be unique, just use that. */
9070 if (first_global_object_name)
9071 p = q = ASTRDUP (first_global_object_name);
9072 /* If the target is handling the constructors/destructors, they
9073 will be local to this file and the name is only necessary for
9074 debugging purposes.
9075 We also assign sub_I and sub_D sufixes to constructors called from
9076 the global static constructors. These are always local. */
9077 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9078 || (strncmp (type, "sub_", 4) == 0
9079 && (type[4] == 'I' || type[4] == 'D')))
9080 {
9081 const char *file = main_input_filename;
9082 if (! file)
9083 file = input_filename;
9084 /* Just use the file's basename, because the full pathname
9085 might be quite long. */
9086 p = q = ASTRDUP (lbasename (file));
9087 }
9088 else
9089 {
9090 /* Otherwise, the name must be unique across the entire link.
9091 We don't have anything that we know to be unique to this translation
9092 unit, so use what we do have and throw in some randomness. */
9093 unsigned len;
9094 const char *name = weak_global_object_name;
9095 const char *file = main_input_filename;
9096
9097 if (! name)
9098 name = "";
9099 if (! file)
9100 file = input_filename;
9101
9102 len = strlen (file);
9103 q = (char *) alloca (9 + 17 + len + 1);
9104 memcpy (q, file, len + 1);
9105
9106 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9107 crc32_string (0, name), get_random_seed (false));
9108
9109 p = q;
9110 }
9111
9112 clean_symbol_name (q);
9113 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9114 + strlen (type));
9115
9116 /* Set up the name of the file-level functions we may need.
9117 Use a global object (which is already required to be unique over
9118 the program) rather than the file name (which imposes extra
9119 constraints). */
9120 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9121
9122 return get_identifier (buf);
9123 }
9124 \f
9125 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9126
9127 /* Complain that the tree code of NODE does not match the expected 0
9128 terminated list of trailing codes. The trailing code list can be
9129 empty, for a more vague error message. FILE, LINE, and FUNCTION
9130 are of the caller. */
9131
9132 void
9133 tree_check_failed (const_tree node, const char *file,
9134 int line, const char *function, ...)
9135 {
9136 va_list args;
9137 const char *buffer;
9138 unsigned length = 0;
9139 enum tree_code code;
9140
9141 va_start (args, function);
9142 while ((code = (enum tree_code) va_arg (args, int)))
9143 length += 4 + strlen (get_tree_code_name (code));
9144 va_end (args);
9145 if (length)
9146 {
9147 char *tmp;
9148 va_start (args, function);
9149 length += strlen ("expected ");
9150 buffer = tmp = (char *) alloca (length);
9151 length = 0;
9152 while ((code = (enum tree_code) va_arg (args, int)))
9153 {
9154 const char *prefix = length ? " or " : "expected ";
9155
9156 strcpy (tmp + length, prefix);
9157 length += strlen (prefix);
9158 strcpy (tmp + length, get_tree_code_name (code));
9159 length += strlen (get_tree_code_name (code));
9160 }
9161 va_end (args);
9162 }
9163 else
9164 buffer = "unexpected node";
9165
9166 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9167 buffer, get_tree_code_name (TREE_CODE (node)),
9168 function, trim_filename (file), line);
9169 }
9170
9171 /* Complain that the tree code of NODE does match the expected 0
9172 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9173 the caller. */
9174
9175 void
9176 tree_not_check_failed (const_tree node, const char *file,
9177 int line, const char *function, ...)
9178 {
9179 va_list args;
9180 char *buffer;
9181 unsigned length = 0;
9182 enum tree_code code;
9183
9184 va_start (args, function);
9185 while ((code = (enum tree_code) va_arg (args, int)))
9186 length += 4 + strlen (get_tree_code_name (code));
9187 va_end (args);
9188 va_start (args, function);
9189 buffer = (char *) alloca (length);
9190 length = 0;
9191 while ((code = (enum tree_code) va_arg (args, int)))
9192 {
9193 if (length)
9194 {
9195 strcpy (buffer + length, " or ");
9196 length += 4;
9197 }
9198 strcpy (buffer + length, get_tree_code_name (code));
9199 length += strlen (get_tree_code_name (code));
9200 }
9201 va_end (args);
9202
9203 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9204 buffer, get_tree_code_name (TREE_CODE (node)),
9205 function, trim_filename (file), line);
9206 }
9207
9208 /* Similar to tree_check_failed, except that we check for a class of tree
9209 code, given in CL. */
9210
9211 void
9212 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9213 const char *file, int line, const char *function)
9214 {
9215 internal_error
9216 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9217 TREE_CODE_CLASS_STRING (cl),
9218 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9219 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9220 }
9221
9222 /* Similar to tree_check_failed, except that instead of specifying a
9223 dozen codes, use the knowledge that they're all sequential. */
9224
9225 void
9226 tree_range_check_failed (const_tree node, const char *file, int line,
9227 const char *function, enum tree_code c1,
9228 enum tree_code c2)
9229 {
9230 char *buffer;
9231 unsigned length = 0;
9232 unsigned int c;
9233
9234 for (c = c1; c <= c2; ++c)
9235 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9236
9237 length += strlen ("expected ");
9238 buffer = (char *) alloca (length);
9239 length = 0;
9240
9241 for (c = c1; c <= c2; ++c)
9242 {
9243 const char *prefix = length ? " or " : "expected ";
9244
9245 strcpy (buffer + length, prefix);
9246 length += strlen (prefix);
9247 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9248 length += strlen (get_tree_code_name ((enum tree_code) c));
9249 }
9250
9251 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9252 buffer, get_tree_code_name (TREE_CODE (node)),
9253 function, trim_filename (file), line);
9254 }
9255
9256
9257 /* Similar to tree_check_failed, except that we check that a tree does
9258 not have the specified code, given in CL. */
9259
9260 void
9261 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9262 const char *file, int line, const char *function)
9263 {
9264 internal_error
9265 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9266 TREE_CODE_CLASS_STRING (cl),
9267 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9268 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9269 }
9270
9271
9272 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9273
9274 void
9275 omp_clause_check_failed (const_tree node, const char *file, int line,
9276 const char *function, enum omp_clause_code code)
9277 {
9278 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9279 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9280 function, trim_filename (file), line);
9281 }
9282
9283
9284 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9285
9286 void
9287 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9288 const char *function, enum omp_clause_code c1,
9289 enum omp_clause_code c2)
9290 {
9291 char *buffer;
9292 unsigned length = 0;
9293 unsigned int c;
9294
9295 for (c = c1; c <= c2; ++c)
9296 length += 4 + strlen (omp_clause_code_name[c]);
9297
9298 length += strlen ("expected ");
9299 buffer = (char *) alloca (length);
9300 length = 0;
9301
9302 for (c = c1; c <= c2; ++c)
9303 {
9304 const char *prefix = length ? " or " : "expected ";
9305
9306 strcpy (buffer + length, prefix);
9307 length += strlen (prefix);
9308 strcpy (buffer + length, omp_clause_code_name[c]);
9309 length += strlen (omp_clause_code_name[c]);
9310 }
9311
9312 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9313 buffer, omp_clause_code_name[TREE_CODE (node)],
9314 function, trim_filename (file), line);
9315 }
9316
9317
9318 #undef DEFTREESTRUCT
9319 #define DEFTREESTRUCT(VAL, NAME) NAME,
9320
9321 static const char *ts_enum_names[] = {
9322 #include "treestruct.def"
9323 };
9324 #undef DEFTREESTRUCT
9325
9326 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9327
9328 /* Similar to tree_class_check_failed, except that we check for
9329 whether CODE contains the tree structure identified by EN. */
9330
9331 void
9332 tree_contains_struct_check_failed (const_tree node,
9333 const enum tree_node_structure_enum en,
9334 const char *file, int line,
9335 const char *function)
9336 {
9337 internal_error
9338 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9339 TS_ENUM_NAME (en),
9340 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9341 }
9342
9343
9344 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9345 (dynamically sized) vector. */
9346
9347 void
9348 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9349 const char *function)
9350 {
9351 internal_error
9352 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9353 idx + 1, len, function, trim_filename (file), line);
9354 }
9355
9356 /* Similar to above, except that the check is for the bounds of the operand
9357 vector of an expression node EXP. */
9358
9359 void
9360 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9361 int line, const char *function)
9362 {
9363 enum tree_code code = TREE_CODE (exp);
9364 internal_error
9365 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9366 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9367 function, trim_filename (file), line);
9368 }
9369
9370 /* Similar to above, except that the check is for the number of
9371 operands of an OMP_CLAUSE node. */
9372
9373 void
9374 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9375 int line, const char *function)
9376 {
9377 internal_error
9378 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9379 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9380 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9381 trim_filename (file), line);
9382 }
9383 #endif /* ENABLE_TREE_CHECKING */
9384 \f
9385 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9386 and mapped to the machine mode MODE. Initialize its fields and build
9387 the information necessary for debugging output. */
9388
9389 static tree
9390 make_vector_type (tree innertype, int nunits, enum machine_mode mode)
9391 {
9392 tree t;
9393 hashval_t hashcode = 0;
9394
9395 t = make_node (VECTOR_TYPE);
9396 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9397 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9398 SET_TYPE_MODE (t, mode);
9399
9400 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9401 SET_TYPE_STRUCTURAL_EQUALITY (t);
9402 else if (TYPE_CANONICAL (innertype) != innertype
9403 || mode != VOIDmode)
9404 TYPE_CANONICAL (t)
9405 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9406
9407 layout_type (t);
9408
9409 hashcode = iterative_hash_host_wide_int (VECTOR_TYPE, hashcode);
9410 hashcode = iterative_hash_host_wide_int (nunits, hashcode);
9411 hashcode = iterative_hash_host_wide_int (mode, hashcode);
9412 hashcode = iterative_hash_object (TYPE_HASH (TREE_TYPE (t)), hashcode);
9413 t = type_hash_canon (hashcode, t);
9414
9415 /* We have built a main variant, based on the main variant of the
9416 inner type. Use it to build the variant we return. */
9417 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9418 && TREE_TYPE (t) != innertype)
9419 return build_type_attribute_qual_variant (t,
9420 TYPE_ATTRIBUTES (innertype),
9421 TYPE_QUALS (innertype));
9422
9423 return t;
9424 }
9425
9426 static tree
9427 make_or_reuse_type (unsigned size, int unsignedp)
9428 {
9429 if (size == INT_TYPE_SIZE)
9430 return unsignedp ? unsigned_type_node : integer_type_node;
9431 if (size == CHAR_TYPE_SIZE)
9432 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9433 if (size == SHORT_TYPE_SIZE)
9434 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9435 if (size == LONG_TYPE_SIZE)
9436 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9437 if (size == LONG_LONG_TYPE_SIZE)
9438 return (unsignedp ? long_long_unsigned_type_node
9439 : long_long_integer_type_node);
9440 if (size == 128 && int128_integer_type_node)
9441 return (unsignedp ? int128_unsigned_type_node
9442 : int128_integer_type_node);
9443
9444 if (unsignedp)
9445 return make_unsigned_type (size);
9446 else
9447 return make_signed_type (size);
9448 }
9449
9450 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9451
9452 static tree
9453 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9454 {
9455 if (satp)
9456 {
9457 if (size == SHORT_FRACT_TYPE_SIZE)
9458 return unsignedp ? sat_unsigned_short_fract_type_node
9459 : sat_short_fract_type_node;
9460 if (size == FRACT_TYPE_SIZE)
9461 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9462 if (size == LONG_FRACT_TYPE_SIZE)
9463 return unsignedp ? sat_unsigned_long_fract_type_node
9464 : sat_long_fract_type_node;
9465 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9466 return unsignedp ? sat_unsigned_long_long_fract_type_node
9467 : sat_long_long_fract_type_node;
9468 }
9469 else
9470 {
9471 if (size == SHORT_FRACT_TYPE_SIZE)
9472 return unsignedp ? unsigned_short_fract_type_node
9473 : short_fract_type_node;
9474 if (size == FRACT_TYPE_SIZE)
9475 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9476 if (size == LONG_FRACT_TYPE_SIZE)
9477 return unsignedp ? unsigned_long_fract_type_node
9478 : long_fract_type_node;
9479 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9480 return unsignedp ? unsigned_long_long_fract_type_node
9481 : long_long_fract_type_node;
9482 }
9483
9484 return make_fract_type (size, unsignedp, satp);
9485 }
9486
9487 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9488
9489 static tree
9490 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9491 {
9492 if (satp)
9493 {
9494 if (size == SHORT_ACCUM_TYPE_SIZE)
9495 return unsignedp ? sat_unsigned_short_accum_type_node
9496 : sat_short_accum_type_node;
9497 if (size == ACCUM_TYPE_SIZE)
9498 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9499 if (size == LONG_ACCUM_TYPE_SIZE)
9500 return unsignedp ? sat_unsigned_long_accum_type_node
9501 : sat_long_accum_type_node;
9502 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9503 return unsignedp ? sat_unsigned_long_long_accum_type_node
9504 : sat_long_long_accum_type_node;
9505 }
9506 else
9507 {
9508 if (size == SHORT_ACCUM_TYPE_SIZE)
9509 return unsignedp ? unsigned_short_accum_type_node
9510 : short_accum_type_node;
9511 if (size == ACCUM_TYPE_SIZE)
9512 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9513 if (size == LONG_ACCUM_TYPE_SIZE)
9514 return unsignedp ? unsigned_long_accum_type_node
9515 : long_accum_type_node;
9516 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9517 return unsignedp ? unsigned_long_long_accum_type_node
9518 : long_long_accum_type_node;
9519 }
9520
9521 return make_accum_type (size, unsignedp, satp);
9522 }
9523
9524
9525 /* Create an atomic variant node for TYPE. This routine is called
9526 during initialization of data types to create the 5 basic atomic
9527 types. The generic build_variant_type function requires these to
9528 already be set up in order to function properly, so cannot be
9529 called from there. */
9530
9531 static tree
9532 build_atomic_base (tree type)
9533 {
9534 tree t;
9535
9536 /* Make sure its not already registered. */
9537 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9538 return t;
9539
9540 t = build_variant_type_copy (type);
9541 set_type_quals (t, TYPE_QUAL_ATOMIC);
9542
9543 return t;
9544 }
9545
9546 /* Create nodes for all integer types (and error_mark_node) using the sizes
9547 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9548 SHORT_DOUBLE specifies whether double should be of the same precision
9549 as float. */
9550
9551 void
9552 build_common_tree_nodes (bool signed_char, bool short_double)
9553 {
9554 error_mark_node = make_node (ERROR_MARK);
9555 TREE_TYPE (error_mark_node) = error_mark_node;
9556
9557 initialize_sizetypes ();
9558
9559 /* Define both `signed char' and `unsigned char'. */
9560 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9561 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9562 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9563 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9564
9565 /* Define `char', which is like either `signed char' or `unsigned char'
9566 but not the same as either. */
9567 char_type_node
9568 = (signed_char
9569 ? make_signed_type (CHAR_TYPE_SIZE)
9570 : make_unsigned_type (CHAR_TYPE_SIZE));
9571 TYPE_STRING_FLAG (char_type_node) = 1;
9572
9573 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9574 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9575 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9576 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9577 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9578 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9579 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9580 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9581 #if HOST_BITS_PER_WIDE_INT >= 64
9582 /* TODO: This isn't correct, but as logic depends at the moment on
9583 host's instead of target's wide-integer.
9584 If there is a target not supporting TImode, but has an 128-bit
9585 integer-scalar register, this target check needs to be adjusted. */
9586 if (targetm.scalar_mode_supported_p (TImode))
9587 {
9588 int128_integer_type_node = make_signed_type (128);
9589 int128_unsigned_type_node = make_unsigned_type (128);
9590 }
9591 #endif
9592
9593 /* Define a boolean type. This type only represents boolean values but
9594 may be larger than char depending on the value of BOOL_TYPE_SIZE.
9595 Front ends which want to override this size (i.e. Java) can redefine
9596 boolean_type_node before calling build_common_tree_nodes_2. */
9597 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9598 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9599 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9600 TYPE_PRECISION (boolean_type_node) = 1;
9601
9602 /* Define what type to use for size_t. */
9603 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9604 size_type_node = unsigned_type_node;
9605 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9606 size_type_node = long_unsigned_type_node;
9607 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9608 size_type_node = long_long_unsigned_type_node;
9609 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9610 size_type_node = short_unsigned_type_node;
9611 else
9612 gcc_unreachable ();
9613
9614 /* Fill in the rest of the sized types. Reuse existing type nodes
9615 when possible. */
9616 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9617 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9618 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9619 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9620 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9621
9622 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9623 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9624 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9625 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9626 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9627
9628 /* Don't call build_qualified type for atomics. That routine does
9629 special processing for atomics, and until they are initialized
9630 it's better not to make that call. */
9631
9632 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node);
9633 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node);
9634 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node);
9635 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node);
9636 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node);
9637
9638 access_public_node = get_identifier ("public");
9639 access_protected_node = get_identifier ("protected");
9640 access_private_node = get_identifier ("private");
9641
9642 /* Define these next since types below may used them. */
9643 integer_zero_node = build_int_cst (integer_type_node, 0);
9644 integer_one_node = build_int_cst (integer_type_node, 1);
9645 integer_three_node = build_int_cst (integer_type_node, 3);
9646 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9647
9648 size_zero_node = size_int (0);
9649 size_one_node = size_int (1);
9650 bitsize_zero_node = bitsize_int (0);
9651 bitsize_one_node = bitsize_int (1);
9652 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9653
9654 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9655 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9656
9657 void_type_node = make_node (VOID_TYPE);
9658 layout_type (void_type_node);
9659
9660 pointer_bounds_type_node = targetm.chkp_bound_type ();
9661
9662 /* We are not going to have real types in C with less than byte alignment,
9663 so we might as well not have any types that claim to have it. */
9664 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9665 TYPE_USER_ALIGN (void_type_node) = 0;
9666
9667 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9668 layout_type (TREE_TYPE (null_pointer_node));
9669
9670 ptr_type_node = build_pointer_type (void_type_node);
9671 const_ptr_type_node
9672 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9673 fileptr_type_node = ptr_type_node;
9674
9675 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9676
9677 float_type_node = make_node (REAL_TYPE);
9678 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9679 layout_type (float_type_node);
9680
9681 double_type_node = make_node (REAL_TYPE);
9682 if (short_double)
9683 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9684 else
9685 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9686 layout_type (double_type_node);
9687
9688 long_double_type_node = make_node (REAL_TYPE);
9689 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9690 layout_type (long_double_type_node);
9691
9692 float_ptr_type_node = build_pointer_type (float_type_node);
9693 double_ptr_type_node = build_pointer_type (double_type_node);
9694 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9695 integer_ptr_type_node = build_pointer_type (integer_type_node);
9696
9697 /* Fixed size integer types. */
9698 uint16_type_node = build_nonstandard_integer_type (16, true);
9699 uint32_type_node = build_nonstandard_integer_type (32, true);
9700 uint64_type_node = build_nonstandard_integer_type (64, true);
9701
9702 /* Decimal float types. */
9703 dfloat32_type_node = make_node (REAL_TYPE);
9704 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9705 layout_type (dfloat32_type_node);
9706 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9707 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9708
9709 dfloat64_type_node = make_node (REAL_TYPE);
9710 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9711 layout_type (dfloat64_type_node);
9712 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9713 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9714
9715 dfloat128_type_node = make_node (REAL_TYPE);
9716 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9717 layout_type (dfloat128_type_node);
9718 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9719 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9720
9721 complex_integer_type_node = build_complex_type (integer_type_node);
9722 complex_float_type_node = build_complex_type (float_type_node);
9723 complex_double_type_node = build_complex_type (double_type_node);
9724 complex_long_double_type_node = build_complex_type (long_double_type_node);
9725
9726 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9727 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9728 sat_ ## KIND ## _type_node = \
9729 make_sat_signed_ ## KIND ## _type (SIZE); \
9730 sat_unsigned_ ## KIND ## _type_node = \
9731 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9732 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9733 unsigned_ ## KIND ## _type_node = \
9734 make_unsigned_ ## KIND ## _type (SIZE);
9735
9736 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9737 sat_ ## WIDTH ## KIND ## _type_node = \
9738 make_sat_signed_ ## KIND ## _type (SIZE); \
9739 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9740 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9741 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9742 unsigned_ ## WIDTH ## KIND ## _type_node = \
9743 make_unsigned_ ## KIND ## _type (SIZE);
9744
9745 /* Make fixed-point type nodes based on four different widths. */
9746 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9747 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9748 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9749 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9750 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9751
9752 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9753 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9754 NAME ## _type_node = \
9755 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9756 u ## NAME ## _type_node = \
9757 make_or_reuse_unsigned_ ## KIND ## _type \
9758 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9759 sat_ ## NAME ## _type_node = \
9760 make_or_reuse_sat_signed_ ## KIND ## _type \
9761 (GET_MODE_BITSIZE (MODE ## mode)); \
9762 sat_u ## NAME ## _type_node = \
9763 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9764 (GET_MODE_BITSIZE (U ## MODE ## mode));
9765
9766 /* Fixed-point type and mode nodes. */
9767 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9768 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9769 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9770 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9771 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9772 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9773 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9774 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9775 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9776 MAKE_FIXED_MODE_NODE (accum, da, DA)
9777 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9778
9779 {
9780 tree t = targetm.build_builtin_va_list ();
9781
9782 /* Many back-ends define record types without setting TYPE_NAME.
9783 If we copied the record type here, we'd keep the original
9784 record type without a name. This breaks name mangling. So,
9785 don't copy record types and let c_common_nodes_and_builtins()
9786 declare the type to be __builtin_va_list. */
9787 if (TREE_CODE (t) != RECORD_TYPE)
9788 t = build_variant_type_copy (t);
9789
9790 va_list_type_node = t;
9791 }
9792 }
9793
9794 /* Modify DECL for given flags.
9795 TM_PURE attribute is set only on types, so the function will modify
9796 DECL's type when ECF_TM_PURE is used. */
9797
9798 void
9799 set_call_expr_flags (tree decl, int flags)
9800 {
9801 if (flags & ECF_NOTHROW)
9802 TREE_NOTHROW (decl) = 1;
9803 if (flags & ECF_CONST)
9804 TREE_READONLY (decl) = 1;
9805 if (flags & ECF_PURE)
9806 DECL_PURE_P (decl) = 1;
9807 if (flags & ECF_LOOPING_CONST_OR_PURE)
9808 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9809 if (flags & ECF_NOVOPS)
9810 DECL_IS_NOVOPS (decl) = 1;
9811 if (flags & ECF_NORETURN)
9812 TREE_THIS_VOLATILE (decl) = 1;
9813 if (flags & ECF_MALLOC)
9814 DECL_IS_MALLOC (decl) = 1;
9815 if (flags & ECF_RETURNS_TWICE)
9816 DECL_IS_RETURNS_TWICE (decl) = 1;
9817 if (flags & ECF_LEAF)
9818 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9819 NULL, DECL_ATTRIBUTES (decl));
9820 if ((flags & ECF_TM_PURE) && flag_tm)
9821 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9822 /* Looping const or pure is implied by noreturn.
9823 There is currently no way to declare looping const or looping pure alone. */
9824 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9825 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
9826 }
9827
9828
9829 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
9830
9831 static void
9832 local_define_builtin (const char *name, tree type, enum built_in_function code,
9833 const char *library_name, int ecf_flags)
9834 {
9835 tree decl;
9836
9837 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
9838 library_name, NULL_TREE);
9839 set_call_expr_flags (decl, ecf_flags);
9840
9841 set_builtin_decl (code, decl, true);
9842 }
9843
9844 /* Call this function after instantiating all builtins that the language
9845 front end cares about. This will build the rest of the builtins that
9846 are relied upon by the tree optimizers and the middle-end. */
9847
9848 void
9849 build_common_builtin_nodes (void)
9850 {
9851 tree tmp, ftype;
9852 int ecf_flags;
9853
9854 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
9855 {
9856 ftype = build_function_type (void_type_node, void_list_node);
9857 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
9858 "__builtin_unreachable",
9859 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
9860 | ECF_CONST | ECF_LEAF);
9861 }
9862
9863 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
9864 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9865 {
9866 ftype = build_function_type_list (ptr_type_node,
9867 ptr_type_node, const_ptr_type_node,
9868 size_type_node, NULL_TREE);
9869
9870 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
9871 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
9872 "memcpy", ECF_NOTHROW | ECF_LEAF);
9873 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
9874 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
9875 "memmove", ECF_NOTHROW | ECF_LEAF);
9876 }
9877
9878 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
9879 {
9880 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
9881 const_ptr_type_node, size_type_node,
9882 NULL_TREE);
9883 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
9884 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
9885 }
9886
9887 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
9888 {
9889 ftype = build_function_type_list (ptr_type_node,
9890 ptr_type_node, integer_type_node,
9891 size_type_node, NULL_TREE);
9892 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
9893 "memset", ECF_NOTHROW | ECF_LEAF);
9894 }
9895
9896 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
9897 {
9898 ftype = build_function_type_list (ptr_type_node,
9899 size_type_node, NULL_TREE);
9900 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
9901 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9902 }
9903
9904 ftype = build_function_type_list (ptr_type_node, size_type_node,
9905 size_type_node, NULL_TREE);
9906 local_define_builtin ("__builtin_alloca_with_align", ftype,
9907 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
9908 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
9909
9910 /* If we're checking the stack, `alloca' can throw. */
9911 if (flag_stack_check)
9912 {
9913 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
9914 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
9915 }
9916
9917 ftype = build_function_type_list (void_type_node,
9918 ptr_type_node, ptr_type_node,
9919 ptr_type_node, NULL_TREE);
9920 local_define_builtin ("__builtin_init_trampoline", ftype,
9921 BUILT_IN_INIT_TRAMPOLINE,
9922 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
9923 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
9924 BUILT_IN_INIT_HEAP_TRAMPOLINE,
9925 "__builtin_init_heap_trampoline",
9926 ECF_NOTHROW | ECF_LEAF);
9927
9928 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9929 local_define_builtin ("__builtin_adjust_trampoline", ftype,
9930 BUILT_IN_ADJUST_TRAMPOLINE,
9931 "__builtin_adjust_trampoline",
9932 ECF_CONST | ECF_NOTHROW);
9933
9934 ftype = build_function_type_list (void_type_node,
9935 ptr_type_node, ptr_type_node, NULL_TREE);
9936 local_define_builtin ("__builtin_nonlocal_goto", ftype,
9937 BUILT_IN_NONLOCAL_GOTO,
9938 "__builtin_nonlocal_goto",
9939 ECF_NORETURN | ECF_NOTHROW);
9940
9941 ftype = build_function_type_list (void_type_node,
9942 ptr_type_node, ptr_type_node, NULL_TREE);
9943 local_define_builtin ("__builtin_setjmp_setup", ftype,
9944 BUILT_IN_SETJMP_SETUP,
9945 "__builtin_setjmp_setup", ECF_NOTHROW);
9946
9947 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
9948 local_define_builtin ("__builtin_setjmp_dispatcher", ftype,
9949 BUILT_IN_SETJMP_DISPATCHER,
9950 "__builtin_setjmp_dispatcher",
9951 ECF_PURE | ECF_NOTHROW);
9952
9953 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9954 local_define_builtin ("__builtin_setjmp_receiver", ftype,
9955 BUILT_IN_SETJMP_RECEIVER,
9956 "__builtin_setjmp_receiver", ECF_NOTHROW);
9957
9958 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
9959 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
9960 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
9961
9962 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9963 local_define_builtin ("__builtin_stack_restore", ftype,
9964 BUILT_IN_STACK_RESTORE,
9965 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
9966
9967 /* If there's a possibility that we might use the ARM EABI, build the
9968 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
9969 if (targetm.arm_eabi_unwinder)
9970 {
9971 ftype = build_function_type_list (void_type_node, NULL_TREE);
9972 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
9973 BUILT_IN_CXA_END_CLEANUP,
9974 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
9975 }
9976
9977 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
9978 local_define_builtin ("__builtin_unwind_resume", ftype,
9979 BUILT_IN_UNWIND_RESUME,
9980 ((targetm_common.except_unwind_info (&global_options)
9981 == UI_SJLJ)
9982 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
9983 ECF_NORETURN);
9984
9985 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
9986 {
9987 ftype = build_function_type_list (ptr_type_node, integer_type_node,
9988 NULL_TREE);
9989 local_define_builtin ("__builtin_return_address", ftype,
9990 BUILT_IN_RETURN_ADDRESS,
9991 "__builtin_return_address",
9992 ECF_NOTHROW);
9993 }
9994
9995 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
9996 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
9997 {
9998 ftype = build_function_type_list (void_type_node, ptr_type_node,
9999 ptr_type_node, NULL_TREE);
10000 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10001 local_define_builtin ("__cyg_profile_func_enter", ftype,
10002 BUILT_IN_PROFILE_FUNC_ENTER,
10003 "__cyg_profile_func_enter", 0);
10004 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10005 local_define_builtin ("__cyg_profile_func_exit", ftype,
10006 BUILT_IN_PROFILE_FUNC_EXIT,
10007 "__cyg_profile_func_exit", 0);
10008 }
10009
10010 /* The exception object and filter values from the runtime. The argument
10011 must be zero before exception lowering, i.e. from the front end. After
10012 exception lowering, it will be the region number for the exception
10013 landing pad. These functions are PURE instead of CONST to prevent
10014 them from being hoisted past the exception edge that will initialize
10015 its value in the landing pad. */
10016 ftype = build_function_type_list (ptr_type_node,
10017 integer_type_node, NULL_TREE);
10018 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10019 /* Only use TM_PURE if we we have TM language support. */
10020 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10021 ecf_flags |= ECF_TM_PURE;
10022 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10023 "__builtin_eh_pointer", ecf_flags);
10024
10025 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10026 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10027 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10028 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10029
10030 ftype = build_function_type_list (void_type_node,
10031 integer_type_node, integer_type_node,
10032 NULL_TREE);
10033 local_define_builtin ("__builtin_eh_copy_values", ftype,
10034 BUILT_IN_EH_COPY_VALUES,
10035 "__builtin_eh_copy_values", ECF_NOTHROW);
10036
10037 /* Complex multiplication and division. These are handled as builtins
10038 rather than optabs because emit_library_call_value doesn't support
10039 complex. Further, we can do slightly better with folding these
10040 beasties if the real and complex parts of the arguments are separate. */
10041 {
10042 int mode;
10043
10044 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10045 {
10046 char mode_name_buf[4], *q;
10047 const char *p;
10048 enum built_in_function mcode, dcode;
10049 tree type, inner_type;
10050 const char *prefix = "__";
10051
10052 if (targetm.libfunc_gnu_prefix)
10053 prefix = "__gnu_";
10054
10055 type = lang_hooks.types.type_for_mode ((enum machine_mode) mode, 0);
10056 if (type == NULL)
10057 continue;
10058 inner_type = TREE_TYPE (type);
10059
10060 ftype = build_function_type_list (type, inner_type, inner_type,
10061 inner_type, inner_type, NULL_TREE);
10062
10063 mcode = ((enum built_in_function)
10064 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10065 dcode = ((enum built_in_function)
10066 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10067
10068 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10069 *q = TOLOWER (*p);
10070 *q = '\0';
10071
10072 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10073 NULL);
10074 local_define_builtin (built_in_names[mcode], ftype, mcode,
10075 built_in_names[mcode],
10076 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10077
10078 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10079 NULL);
10080 local_define_builtin (built_in_names[dcode], ftype, dcode,
10081 built_in_names[dcode],
10082 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10083 }
10084 }
10085 }
10086
10087 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10088 better way.
10089
10090 If we requested a pointer to a vector, build up the pointers that
10091 we stripped off while looking for the inner type. Similarly for
10092 return values from functions.
10093
10094 The argument TYPE is the top of the chain, and BOTTOM is the
10095 new type which we will point to. */
10096
10097 tree
10098 reconstruct_complex_type (tree type, tree bottom)
10099 {
10100 tree inner, outer;
10101
10102 if (TREE_CODE (type) == POINTER_TYPE)
10103 {
10104 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10105 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10106 TYPE_REF_CAN_ALIAS_ALL (type));
10107 }
10108 else if (TREE_CODE (type) == REFERENCE_TYPE)
10109 {
10110 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10111 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10112 TYPE_REF_CAN_ALIAS_ALL (type));
10113 }
10114 else if (TREE_CODE (type) == ARRAY_TYPE)
10115 {
10116 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10117 outer = build_array_type (inner, TYPE_DOMAIN (type));
10118 }
10119 else if (TREE_CODE (type) == FUNCTION_TYPE)
10120 {
10121 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10122 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10123 }
10124 else if (TREE_CODE (type) == METHOD_TYPE)
10125 {
10126 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10127 /* The build_method_type_directly() routine prepends 'this' to argument list,
10128 so we must compensate by getting rid of it. */
10129 outer
10130 = build_method_type_directly
10131 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10132 inner,
10133 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10134 }
10135 else if (TREE_CODE (type) == OFFSET_TYPE)
10136 {
10137 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10138 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10139 }
10140 else
10141 return bottom;
10142
10143 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10144 TYPE_QUALS (type));
10145 }
10146
10147 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10148 the inner type. */
10149 tree
10150 build_vector_type_for_mode (tree innertype, enum machine_mode mode)
10151 {
10152 int nunits;
10153
10154 switch (GET_MODE_CLASS (mode))
10155 {
10156 case MODE_VECTOR_INT:
10157 case MODE_VECTOR_FLOAT:
10158 case MODE_VECTOR_FRACT:
10159 case MODE_VECTOR_UFRACT:
10160 case MODE_VECTOR_ACCUM:
10161 case MODE_VECTOR_UACCUM:
10162 nunits = GET_MODE_NUNITS (mode);
10163 break;
10164
10165 case MODE_INT:
10166 /* Check that there are no leftover bits. */
10167 gcc_assert (GET_MODE_BITSIZE (mode)
10168 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10169
10170 nunits = GET_MODE_BITSIZE (mode)
10171 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10172 break;
10173
10174 default:
10175 gcc_unreachable ();
10176 }
10177
10178 return make_vector_type (innertype, nunits, mode);
10179 }
10180
10181 /* Similarly, but takes the inner type and number of units, which must be
10182 a power of two. */
10183
10184 tree
10185 build_vector_type (tree innertype, int nunits)
10186 {
10187 return make_vector_type (innertype, nunits, VOIDmode);
10188 }
10189
10190 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10191
10192 tree
10193 build_opaque_vector_type (tree innertype, int nunits)
10194 {
10195 tree t = make_vector_type (innertype, nunits, VOIDmode);
10196 tree cand;
10197 /* We always build the non-opaque variant before the opaque one,
10198 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10199 cand = TYPE_NEXT_VARIANT (t);
10200 if (cand
10201 && TYPE_VECTOR_OPAQUE (cand)
10202 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10203 return cand;
10204 /* Othewise build a variant type and make sure to queue it after
10205 the non-opaque type. */
10206 cand = build_distinct_type_copy (t);
10207 TYPE_VECTOR_OPAQUE (cand) = true;
10208 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10209 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10210 TYPE_NEXT_VARIANT (t) = cand;
10211 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10212 return cand;
10213 }
10214
10215
10216 /* Given an initializer INIT, return TRUE if INIT is zero or some
10217 aggregate of zeros. Otherwise return FALSE. */
10218 bool
10219 initializer_zerop (const_tree init)
10220 {
10221 tree elt;
10222
10223 STRIP_NOPS (init);
10224
10225 switch (TREE_CODE (init))
10226 {
10227 case INTEGER_CST:
10228 return integer_zerop (init);
10229
10230 case REAL_CST:
10231 /* ??? Note that this is not correct for C4X float formats. There,
10232 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10233 negative exponent. */
10234 return real_zerop (init)
10235 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10236
10237 case FIXED_CST:
10238 return fixed_zerop (init);
10239
10240 case COMPLEX_CST:
10241 return integer_zerop (init)
10242 || (real_zerop (init)
10243 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10244 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10245
10246 case VECTOR_CST:
10247 {
10248 unsigned i;
10249 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10250 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10251 return false;
10252 return true;
10253 }
10254
10255 case CONSTRUCTOR:
10256 {
10257 unsigned HOST_WIDE_INT idx;
10258
10259 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10260 if (!initializer_zerop (elt))
10261 return false;
10262 return true;
10263 }
10264
10265 case STRING_CST:
10266 {
10267 int i;
10268
10269 /* We need to loop through all elements to handle cases like
10270 "\0" and "\0foobar". */
10271 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10272 if (TREE_STRING_POINTER (init)[i] != '\0')
10273 return false;
10274
10275 return true;
10276 }
10277
10278 default:
10279 return false;
10280 }
10281 }
10282
10283 /* Check if vector VEC consists of all the equal elements and
10284 that the number of elements corresponds to the type of VEC.
10285 The function returns first element of the vector
10286 or NULL_TREE if the vector is not uniform. */
10287 tree
10288 uniform_vector_p (const_tree vec)
10289 {
10290 tree first, t;
10291 unsigned i;
10292
10293 if (vec == NULL_TREE)
10294 return NULL_TREE;
10295
10296 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10297
10298 if (TREE_CODE (vec) == VECTOR_CST)
10299 {
10300 first = VECTOR_CST_ELT (vec, 0);
10301 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10302 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10303 return NULL_TREE;
10304
10305 return first;
10306 }
10307
10308 else if (TREE_CODE (vec) == CONSTRUCTOR)
10309 {
10310 first = error_mark_node;
10311
10312 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10313 {
10314 if (i == 0)
10315 {
10316 first = t;
10317 continue;
10318 }
10319 if (!operand_equal_p (first, t, 0))
10320 return NULL_TREE;
10321 }
10322 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10323 return NULL_TREE;
10324
10325 return first;
10326 }
10327
10328 return NULL_TREE;
10329 }
10330
10331 /* Build an empty statement at location LOC. */
10332
10333 tree
10334 build_empty_stmt (location_t loc)
10335 {
10336 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10337 SET_EXPR_LOCATION (t, loc);
10338 return t;
10339 }
10340
10341
10342 /* Build an OpenMP clause with code CODE. LOC is the location of the
10343 clause. */
10344
10345 tree
10346 build_omp_clause (location_t loc, enum omp_clause_code code)
10347 {
10348 tree t;
10349 int size, length;
10350
10351 length = omp_clause_num_ops[code];
10352 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10353
10354 record_node_allocation_statistics (OMP_CLAUSE, size);
10355
10356 t = ggc_alloc_tree_node (size);
10357 memset (t, 0, size);
10358 TREE_SET_CODE (t, OMP_CLAUSE);
10359 OMP_CLAUSE_SET_CODE (t, code);
10360 OMP_CLAUSE_LOCATION (t) = loc;
10361
10362 return t;
10363 }
10364
10365 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10366 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10367 Except for the CODE and operand count field, other storage for the
10368 object is initialized to zeros. */
10369
10370 tree
10371 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10372 {
10373 tree t;
10374 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10375
10376 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10377 gcc_assert (len >= 1);
10378
10379 record_node_allocation_statistics (code, length);
10380
10381 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10382
10383 TREE_SET_CODE (t, code);
10384
10385 /* Can't use TREE_OPERAND to store the length because if checking is
10386 enabled, it will try to check the length before we store it. :-P */
10387 t->exp.operands[0] = build_int_cst (sizetype, len);
10388
10389 return t;
10390 }
10391
10392 /* Helper function for build_call_* functions; build a CALL_EXPR with
10393 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10394 the argument slots. */
10395
10396 static tree
10397 build_call_1 (tree return_type, tree fn, int nargs)
10398 {
10399 tree t;
10400
10401 t = build_vl_exp (CALL_EXPR, nargs + 3);
10402 TREE_TYPE (t) = return_type;
10403 CALL_EXPR_FN (t) = fn;
10404 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10405
10406 return t;
10407 }
10408
10409 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10410 FN and a null static chain slot. NARGS is the number of call arguments
10411 which are specified as "..." arguments. */
10412
10413 tree
10414 build_call_nary (tree return_type, tree fn, int nargs, ...)
10415 {
10416 tree ret;
10417 va_list args;
10418 va_start (args, nargs);
10419 ret = build_call_valist (return_type, fn, nargs, args);
10420 va_end (args);
10421 return ret;
10422 }
10423
10424 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10425 FN and a null static chain slot. NARGS is the number of call arguments
10426 which are specified as a va_list ARGS. */
10427
10428 tree
10429 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10430 {
10431 tree t;
10432 int i;
10433
10434 t = build_call_1 (return_type, fn, nargs);
10435 for (i = 0; i < nargs; i++)
10436 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10437 process_call_operands (t);
10438 return t;
10439 }
10440
10441 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10442 FN and a null static chain slot. NARGS is the number of call arguments
10443 which are specified as a tree array ARGS. */
10444
10445 tree
10446 build_call_array_loc (location_t loc, tree return_type, tree fn,
10447 int nargs, const tree *args)
10448 {
10449 tree t;
10450 int i;
10451
10452 t = build_call_1 (return_type, fn, nargs);
10453 for (i = 0; i < nargs; i++)
10454 CALL_EXPR_ARG (t, i) = args[i];
10455 process_call_operands (t);
10456 SET_EXPR_LOCATION (t, loc);
10457 return t;
10458 }
10459
10460 /* Like build_call_array, but takes a vec. */
10461
10462 tree
10463 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10464 {
10465 tree ret, t;
10466 unsigned int ix;
10467
10468 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10469 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10470 CALL_EXPR_ARG (ret, ix) = t;
10471 process_call_operands (ret);
10472 return ret;
10473 }
10474
10475 /* Return true if T (assumed to be a DECL) must be assigned a memory
10476 location. */
10477
10478 bool
10479 needs_to_live_in_memory (const_tree t)
10480 {
10481 return (TREE_ADDRESSABLE (t)
10482 || is_global_var (t)
10483 || (TREE_CODE (t) == RESULT_DECL
10484 && !DECL_BY_REFERENCE (t)
10485 && aggregate_value_p (t, current_function_decl)));
10486 }
10487
10488 /* Return value of a constant X and sign-extend it. */
10489
10490 HOST_WIDE_INT
10491 int_cst_value (const_tree x)
10492 {
10493 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10494 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10495
10496 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10497 gcc_assert (TREE_INT_CST_HIGH (x) == 0
10498 || TREE_INT_CST_HIGH (x) == -1);
10499
10500 if (bits < HOST_BITS_PER_WIDE_INT)
10501 {
10502 bool negative = ((val >> (bits - 1)) & 1) != 0;
10503 if (negative)
10504 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10505 else
10506 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10507 }
10508
10509 return val;
10510 }
10511
10512 /* Return value of a constant X and sign-extend it. */
10513
10514 HOST_WIDEST_INT
10515 widest_int_cst_value (const_tree x)
10516 {
10517 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10518 unsigned HOST_WIDEST_INT val = TREE_INT_CST_LOW (x);
10519
10520 #if HOST_BITS_PER_WIDEST_INT > HOST_BITS_PER_WIDE_INT
10521 gcc_assert (HOST_BITS_PER_WIDEST_INT >= HOST_BITS_PER_DOUBLE_INT);
10522 val |= (((unsigned HOST_WIDEST_INT) TREE_INT_CST_HIGH (x))
10523 << HOST_BITS_PER_WIDE_INT);
10524 #else
10525 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10526 gcc_assert (TREE_INT_CST_HIGH (x) == 0
10527 || TREE_INT_CST_HIGH (x) == -1);
10528 #endif
10529
10530 if (bits < HOST_BITS_PER_WIDEST_INT)
10531 {
10532 bool negative = ((val >> (bits - 1)) & 1) != 0;
10533 if (negative)
10534 val |= (~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1;
10535 else
10536 val &= ~((~(unsigned HOST_WIDEST_INT) 0) << (bits - 1) << 1);
10537 }
10538
10539 return val;
10540 }
10541
10542 /* If TYPE is an integral or pointer type, return an integer type with
10543 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10544 if TYPE is already an integer type of signedness UNSIGNEDP. */
10545
10546 tree
10547 signed_or_unsigned_type_for (int unsignedp, tree type)
10548 {
10549 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10550 return type;
10551
10552 if (TREE_CODE (type) == VECTOR_TYPE)
10553 {
10554 tree inner = TREE_TYPE (type);
10555 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10556 if (!inner2)
10557 return NULL_TREE;
10558 if (inner == inner2)
10559 return type;
10560 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10561 }
10562
10563 if (!INTEGRAL_TYPE_P (type)
10564 && !POINTER_TYPE_P (type))
10565 return NULL_TREE;
10566
10567 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10568 }
10569
10570 /* If TYPE is an integral or pointer type, return an integer type with
10571 the same precision which is unsigned, or itself if TYPE is already an
10572 unsigned integer type. */
10573
10574 tree
10575 unsigned_type_for (tree type)
10576 {
10577 return signed_or_unsigned_type_for (1, type);
10578 }
10579
10580 /* If TYPE is an integral or pointer type, return an integer type with
10581 the same precision which is signed, or itself if TYPE is already a
10582 signed integer type. */
10583
10584 tree
10585 signed_type_for (tree type)
10586 {
10587 return signed_or_unsigned_type_for (0, type);
10588 }
10589
10590 /* If TYPE is a vector type, return a signed integer vector type with the
10591 same width and number of subparts. Otherwise return boolean_type_node. */
10592
10593 tree
10594 truth_type_for (tree type)
10595 {
10596 if (TREE_CODE (type) == VECTOR_TYPE)
10597 {
10598 tree elem = lang_hooks.types.type_for_size
10599 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10600 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10601 }
10602 else
10603 return boolean_type_node;
10604 }
10605
10606 /* Returns the largest value obtainable by casting something in INNER type to
10607 OUTER type. */
10608
10609 tree
10610 upper_bound_in_type (tree outer, tree inner)
10611 {
10612 double_int high;
10613 unsigned int det = 0;
10614 unsigned oprec = TYPE_PRECISION (outer);
10615 unsigned iprec = TYPE_PRECISION (inner);
10616 unsigned prec;
10617
10618 /* Compute a unique number for every combination. */
10619 det |= (oprec > iprec) ? 4 : 0;
10620 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10621 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10622
10623 /* Determine the exponent to use. */
10624 switch (det)
10625 {
10626 case 0:
10627 case 1:
10628 /* oprec <= iprec, outer: signed, inner: don't care. */
10629 prec = oprec - 1;
10630 break;
10631 case 2:
10632 case 3:
10633 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10634 prec = oprec;
10635 break;
10636 case 4:
10637 /* oprec > iprec, outer: signed, inner: signed. */
10638 prec = iprec - 1;
10639 break;
10640 case 5:
10641 /* oprec > iprec, outer: signed, inner: unsigned. */
10642 prec = iprec;
10643 break;
10644 case 6:
10645 /* oprec > iprec, outer: unsigned, inner: signed. */
10646 prec = oprec;
10647 break;
10648 case 7:
10649 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10650 prec = iprec;
10651 break;
10652 default:
10653 gcc_unreachable ();
10654 }
10655
10656 /* Compute 2^^prec - 1. */
10657 if (prec <= HOST_BITS_PER_WIDE_INT)
10658 {
10659 high.high = 0;
10660 high.low = ((~(unsigned HOST_WIDE_INT) 0)
10661 >> (HOST_BITS_PER_WIDE_INT - prec));
10662 }
10663 else
10664 {
10665 high.high = ((~(unsigned HOST_WIDE_INT) 0)
10666 >> (HOST_BITS_PER_DOUBLE_INT - prec));
10667 high.low = ~(unsigned HOST_WIDE_INT) 0;
10668 }
10669
10670 return double_int_to_tree (outer, high);
10671 }
10672
10673 /* Returns the smallest value obtainable by casting something in INNER type to
10674 OUTER type. */
10675
10676 tree
10677 lower_bound_in_type (tree outer, tree inner)
10678 {
10679 double_int low;
10680 unsigned oprec = TYPE_PRECISION (outer);
10681 unsigned iprec = TYPE_PRECISION (inner);
10682
10683 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10684 and obtain 0. */
10685 if (TYPE_UNSIGNED (outer)
10686 /* If we are widening something of an unsigned type, OUTER type
10687 contains all values of INNER type. In particular, both INNER
10688 and OUTER types have zero in common. */
10689 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10690 low.low = low.high = 0;
10691 else
10692 {
10693 /* If we are widening a signed type to another signed type, we
10694 want to obtain -2^^(iprec-1). If we are keeping the
10695 precision or narrowing to a signed type, we want to obtain
10696 -2^(oprec-1). */
10697 unsigned prec = oprec > iprec ? iprec : oprec;
10698
10699 if (prec <= HOST_BITS_PER_WIDE_INT)
10700 {
10701 low.high = ~(unsigned HOST_WIDE_INT) 0;
10702 low.low = (~(unsigned HOST_WIDE_INT) 0) << (prec - 1);
10703 }
10704 else
10705 {
10706 low.high = ((~(unsigned HOST_WIDE_INT) 0)
10707 << (prec - HOST_BITS_PER_WIDE_INT - 1));
10708 low.low = 0;
10709 }
10710 }
10711
10712 return double_int_to_tree (outer, low);
10713 }
10714
10715 /* Return nonzero if two operands that are suitable for PHI nodes are
10716 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10717 SSA_NAME or invariant. Note that this is strictly an optimization.
10718 That is, callers of this function can directly call operand_equal_p
10719 and get the same result, only slower. */
10720
10721 int
10722 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10723 {
10724 if (arg0 == arg1)
10725 return 1;
10726 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10727 return 0;
10728 return operand_equal_p (arg0, arg1, 0);
10729 }
10730
10731 /* Returns number of zeros at the end of binary representation of X.
10732
10733 ??? Use ffs if available? */
10734
10735 tree
10736 num_ending_zeros (const_tree x)
10737 {
10738 unsigned HOST_WIDE_INT fr, nfr;
10739 unsigned num, abits;
10740 tree type = TREE_TYPE (x);
10741
10742 if (TREE_INT_CST_LOW (x) == 0)
10743 {
10744 num = HOST_BITS_PER_WIDE_INT;
10745 fr = TREE_INT_CST_HIGH (x);
10746 }
10747 else
10748 {
10749 num = 0;
10750 fr = TREE_INT_CST_LOW (x);
10751 }
10752
10753 for (abits = HOST_BITS_PER_WIDE_INT / 2; abits; abits /= 2)
10754 {
10755 nfr = fr >> abits;
10756 if (nfr << abits == fr)
10757 {
10758 num += abits;
10759 fr = nfr;
10760 }
10761 }
10762
10763 if (num > TYPE_PRECISION (type))
10764 num = TYPE_PRECISION (type);
10765
10766 return build_int_cst_type (type, num);
10767 }
10768
10769
10770 #define WALK_SUBTREE(NODE) \
10771 do \
10772 { \
10773 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10774 if (result) \
10775 return result; \
10776 } \
10777 while (0)
10778
10779 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10780 be walked whenever a type is seen in the tree. Rest of operands and return
10781 value are as for walk_tree. */
10782
10783 static tree
10784 walk_type_fields (tree type, walk_tree_fn func, void *data,
10785 struct pointer_set_t *pset, walk_tree_lh lh)
10786 {
10787 tree result = NULL_TREE;
10788
10789 switch (TREE_CODE (type))
10790 {
10791 case POINTER_TYPE:
10792 case REFERENCE_TYPE:
10793 /* We have to worry about mutually recursive pointers. These can't
10794 be written in C. They can in Ada. It's pathological, but
10795 there's an ACATS test (c38102a) that checks it. Deal with this
10796 by checking if we're pointing to another pointer, that one
10797 points to another pointer, that one does too, and we have no htab.
10798 If so, get a hash table. We check three levels deep to avoid
10799 the cost of the hash table if we don't need one. */
10800 if (POINTER_TYPE_P (TREE_TYPE (type))
10801 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10802 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10803 && !pset)
10804 {
10805 result = walk_tree_without_duplicates (&TREE_TYPE (type),
10806 func, data);
10807 if (result)
10808 return result;
10809
10810 break;
10811 }
10812
10813 /* ... fall through ... */
10814
10815 case COMPLEX_TYPE:
10816 WALK_SUBTREE (TREE_TYPE (type));
10817 break;
10818
10819 case METHOD_TYPE:
10820 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
10821
10822 /* Fall through. */
10823
10824 case FUNCTION_TYPE:
10825 WALK_SUBTREE (TREE_TYPE (type));
10826 {
10827 tree arg;
10828
10829 /* We never want to walk into default arguments. */
10830 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
10831 WALK_SUBTREE (TREE_VALUE (arg));
10832 }
10833 break;
10834
10835 case ARRAY_TYPE:
10836 /* Don't follow this nodes's type if a pointer for fear that
10837 we'll have infinite recursion. If we have a PSET, then we
10838 need not fear. */
10839 if (pset
10840 || (!POINTER_TYPE_P (TREE_TYPE (type))
10841 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
10842 WALK_SUBTREE (TREE_TYPE (type));
10843 WALK_SUBTREE (TYPE_DOMAIN (type));
10844 break;
10845
10846 case OFFSET_TYPE:
10847 WALK_SUBTREE (TREE_TYPE (type));
10848 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
10849 break;
10850
10851 default:
10852 break;
10853 }
10854
10855 return NULL_TREE;
10856 }
10857
10858 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
10859 called with the DATA and the address of each sub-tree. If FUNC returns a
10860 non-NULL value, the traversal is stopped, and the value returned by FUNC
10861 is returned. If PSET is non-NULL it is used to record the nodes visited,
10862 and to avoid visiting a node more than once. */
10863
10864 tree
10865 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
10866 struct pointer_set_t *pset, walk_tree_lh lh)
10867 {
10868 enum tree_code code;
10869 int walk_subtrees;
10870 tree result;
10871
10872 #define WALK_SUBTREE_TAIL(NODE) \
10873 do \
10874 { \
10875 tp = & (NODE); \
10876 goto tail_recurse; \
10877 } \
10878 while (0)
10879
10880 tail_recurse:
10881 /* Skip empty subtrees. */
10882 if (!*tp)
10883 return NULL_TREE;
10884
10885 /* Don't walk the same tree twice, if the user has requested
10886 that we avoid doing so. */
10887 if (pset && pointer_set_insert (pset, *tp))
10888 return NULL_TREE;
10889
10890 /* Call the function. */
10891 walk_subtrees = 1;
10892 result = (*func) (tp, &walk_subtrees, data);
10893
10894 /* If we found something, return it. */
10895 if (result)
10896 return result;
10897
10898 code = TREE_CODE (*tp);
10899
10900 /* Even if we didn't, FUNC may have decided that there was nothing
10901 interesting below this point in the tree. */
10902 if (!walk_subtrees)
10903 {
10904 /* But we still need to check our siblings. */
10905 if (code == TREE_LIST)
10906 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10907 else if (code == OMP_CLAUSE)
10908 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
10909 else
10910 return NULL_TREE;
10911 }
10912
10913 if (lh)
10914 {
10915 result = (*lh) (tp, &walk_subtrees, func, data, pset);
10916 if (result || !walk_subtrees)
10917 return result;
10918 }
10919
10920 switch (code)
10921 {
10922 case ERROR_MARK:
10923 case IDENTIFIER_NODE:
10924 case INTEGER_CST:
10925 case REAL_CST:
10926 case FIXED_CST:
10927 case VECTOR_CST:
10928 case STRING_CST:
10929 case BLOCK:
10930 case PLACEHOLDER_EXPR:
10931 case SSA_NAME:
10932 case FIELD_DECL:
10933 case RESULT_DECL:
10934 /* None of these have subtrees other than those already walked
10935 above. */
10936 break;
10937
10938 case TREE_LIST:
10939 WALK_SUBTREE (TREE_VALUE (*tp));
10940 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
10941 break;
10942
10943 case TREE_VEC:
10944 {
10945 int len = TREE_VEC_LENGTH (*tp);
10946
10947 if (len == 0)
10948 break;
10949
10950 /* Walk all elements but the first. */
10951 while (--len)
10952 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
10953
10954 /* Now walk the first one as a tail call. */
10955 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
10956 }
10957
10958 case COMPLEX_CST:
10959 WALK_SUBTREE (TREE_REALPART (*tp));
10960 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
10961
10962 case CONSTRUCTOR:
10963 {
10964 unsigned HOST_WIDE_INT idx;
10965 constructor_elt *ce;
10966
10967 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
10968 idx++)
10969 WALK_SUBTREE (ce->value);
10970 }
10971 break;
10972
10973 case SAVE_EXPR:
10974 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
10975
10976 case BIND_EXPR:
10977 {
10978 tree decl;
10979 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
10980 {
10981 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
10982 into declarations that are just mentioned, rather than
10983 declared; they don't really belong to this part of the tree.
10984 And, we can see cycles: the initializer for a declaration
10985 can refer to the declaration itself. */
10986 WALK_SUBTREE (DECL_INITIAL (decl));
10987 WALK_SUBTREE (DECL_SIZE (decl));
10988 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
10989 }
10990 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
10991 }
10992
10993 case STATEMENT_LIST:
10994 {
10995 tree_stmt_iterator i;
10996 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
10997 WALK_SUBTREE (*tsi_stmt_ptr (i));
10998 }
10999 break;
11000
11001 case OMP_CLAUSE:
11002 switch (OMP_CLAUSE_CODE (*tp))
11003 {
11004 case OMP_CLAUSE_PRIVATE:
11005 case OMP_CLAUSE_SHARED:
11006 case OMP_CLAUSE_FIRSTPRIVATE:
11007 case OMP_CLAUSE_COPYIN:
11008 case OMP_CLAUSE_COPYPRIVATE:
11009 case OMP_CLAUSE_FINAL:
11010 case OMP_CLAUSE_IF:
11011 case OMP_CLAUSE_NUM_THREADS:
11012 case OMP_CLAUSE_SCHEDULE:
11013 case OMP_CLAUSE_UNIFORM:
11014 case OMP_CLAUSE_DEPEND:
11015 case OMP_CLAUSE_NUM_TEAMS:
11016 case OMP_CLAUSE_THREAD_LIMIT:
11017 case OMP_CLAUSE_DEVICE:
11018 case OMP_CLAUSE_DIST_SCHEDULE:
11019 case OMP_CLAUSE_SAFELEN:
11020 case OMP_CLAUSE_SIMDLEN:
11021 case OMP_CLAUSE__LOOPTEMP_:
11022 case OMP_CLAUSE__SIMDUID_:
11023 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11024 /* FALLTHRU */
11025
11026 case OMP_CLAUSE_NOWAIT:
11027 case OMP_CLAUSE_ORDERED:
11028 case OMP_CLAUSE_DEFAULT:
11029 case OMP_CLAUSE_UNTIED:
11030 case OMP_CLAUSE_MERGEABLE:
11031 case OMP_CLAUSE_PROC_BIND:
11032 case OMP_CLAUSE_INBRANCH:
11033 case OMP_CLAUSE_NOTINBRANCH:
11034 case OMP_CLAUSE_FOR:
11035 case OMP_CLAUSE_PARALLEL:
11036 case OMP_CLAUSE_SECTIONS:
11037 case OMP_CLAUSE_TASKGROUP:
11038 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11039
11040 case OMP_CLAUSE_LASTPRIVATE:
11041 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11042 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11043 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11044
11045 case OMP_CLAUSE_COLLAPSE:
11046 {
11047 int i;
11048 for (i = 0; i < 3; i++)
11049 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11050 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11051 }
11052
11053 case OMP_CLAUSE_ALIGNED:
11054 case OMP_CLAUSE_LINEAR:
11055 case OMP_CLAUSE_FROM:
11056 case OMP_CLAUSE_TO:
11057 case OMP_CLAUSE_MAP:
11058 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11059 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11060 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11061
11062 case OMP_CLAUSE_REDUCTION:
11063 {
11064 int i;
11065 for (i = 0; i < 4; i++)
11066 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11067 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11068 }
11069
11070 default:
11071 gcc_unreachable ();
11072 }
11073 break;
11074
11075 case TARGET_EXPR:
11076 {
11077 int i, len;
11078
11079 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11080 But, we only want to walk once. */
11081 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11082 for (i = 0; i < len; ++i)
11083 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11084 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11085 }
11086
11087 case DECL_EXPR:
11088 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11089 defining. We only want to walk into these fields of a type in this
11090 case and not in the general case of a mere reference to the type.
11091
11092 The criterion is as follows: if the field can be an expression, it
11093 must be walked only here. This should be in keeping with the fields
11094 that are directly gimplified in gimplify_type_sizes in order for the
11095 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11096 variable-sized types.
11097
11098 Note that DECLs get walked as part of processing the BIND_EXPR. */
11099 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11100 {
11101 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11102 if (TREE_CODE (*type_p) == ERROR_MARK)
11103 return NULL_TREE;
11104
11105 /* Call the function for the type. See if it returns anything or
11106 doesn't want us to continue. If we are to continue, walk both
11107 the normal fields and those for the declaration case. */
11108 result = (*func) (type_p, &walk_subtrees, data);
11109 if (result || !walk_subtrees)
11110 return result;
11111
11112 /* But do not walk a pointed-to type since it may itself need to
11113 be walked in the declaration case if it isn't anonymous. */
11114 if (!POINTER_TYPE_P (*type_p))
11115 {
11116 result = walk_type_fields (*type_p, func, data, pset, lh);
11117 if (result)
11118 return result;
11119 }
11120
11121 /* If this is a record type, also walk the fields. */
11122 if (RECORD_OR_UNION_TYPE_P (*type_p))
11123 {
11124 tree field;
11125
11126 for (field = TYPE_FIELDS (*type_p); field;
11127 field = DECL_CHAIN (field))
11128 {
11129 /* We'd like to look at the type of the field, but we can
11130 easily get infinite recursion. So assume it's pointed
11131 to elsewhere in the tree. Also, ignore things that
11132 aren't fields. */
11133 if (TREE_CODE (field) != FIELD_DECL)
11134 continue;
11135
11136 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11137 WALK_SUBTREE (DECL_SIZE (field));
11138 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11139 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11140 WALK_SUBTREE (DECL_QUALIFIER (field));
11141 }
11142 }
11143
11144 /* Same for scalar types. */
11145 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11146 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11147 || TREE_CODE (*type_p) == INTEGER_TYPE
11148 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11149 || TREE_CODE (*type_p) == REAL_TYPE)
11150 {
11151 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11152 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11153 }
11154
11155 WALK_SUBTREE (TYPE_SIZE (*type_p));
11156 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11157 }
11158 /* FALLTHRU */
11159
11160 default:
11161 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11162 {
11163 int i, len;
11164
11165 /* Walk over all the sub-trees of this operand. */
11166 len = TREE_OPERAND_LENGTH (*tp);
11167
11168 /* Go through the subtrees. We need to do this in forward order so
11169 that the scope of a FOR_EXPR is handled properly. */
11170 if (len)
11171 {
11172 for (i = 0; i < len - 1; ++i)
11173 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11174 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11175 }
11176 }
11177 /* If this is a type, walk the needed fields in the type. */
11178 else if (TYPE_P (*tp))
11179 return walk_type_fields (*tp, func, data, pset, lh);
11180 break;
11181 }
11182
11183 /* We didn't find what we were looking for. */
11184 return NULL_TREE;
11185
11186 #undef WALK_SUBTREE_TAIL
11187 }
11188 #undef WALK_SUBTREE
11189
11190 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11191
11192 tree
11193 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11194 walk_tree_lh lh)
11195 {
11196 tree result;
11197 struct pointer_set_t *pset;
11198
11199 pset = pointer_set_create ();
11200 result = walk_tree_1 (tp, func, data, pset, lh);
11201 pointer_set_destroy (pset);
11202 return result;
11203 }
11204
11205
11206 tree
11207 tree_block (tree t)
11208 {
11209 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11210
11211 if (IS_EXPR_CODE_CLASS (c))
11212 return LOCATION_BLOCK (t->exp.locus);
11213 gcc_unreachable ();
11214 return NULL;
11215 }
11216
11217 void
11218 tree_set_block (tree t, tree b)
11219 {
11220 char const c = TREE_CODE_CLASS (TREE_CODE (t));
11221
11222 if (IS_EXPR_CODE_CLASS (c))
11223 {
11224 if (b)
11225 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11226 else
11227 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11228 }
11229 else
11230 gcc_unreachable ();
11231 }
11232
11233 /* Create a nameless artificial label and put it in the current
11234 function context. The label has a location of LOC. Returns the
11235 newly created label. */
11236
11237 tree
11238 create_artificial_label (location_t loc)
11239 {
11240 tree lab = build_decl (loc,
11241 LABEL_DECL, NULL_TREE, void_type_node);
11242
11243 DECL_ARTIFICIAL (lab) = 1;
11244 DECL_IGNORED_P (lab) = 1;
11245 DECL_CONTEXT (lab) = current_function_decl;
11246 return lab;
11247 }
11248
11249 /* Given a tree, try to return a useful variable name that we can use
11250 to prefix a temporary that is being assigned the value of the tree.
11251 I.E. given <temp> = &A, return A. */
11252
11253 const char *
11254 get_name (tree t)
11255 {
11256 tree stripped_decl;
11257
11258 stripped_decl = t;
11259 STRIP_NOPS (stripped_decl);
11260 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11261 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11262 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11263 {
11264 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11265 if (!name)
11266 return NULL;
11267 return IDENTIFIER_POINTER (name);
11268 }
11269 else
11270 {
11271 switch (TREE_CODE (stripped_decl))
11272 {
11273 case ADDR_EXPR:
11274 return get_name (TREE_OPERAND (stripped_decl, 0));
11275 default:
11276 return NULL;
11277 }
11278 }
11279 }
11280
11281 /* Return true if TYPE has a variable argument list. */
11282
11283 bool
11284 stdarg_p (const_tree fntype)
11285 {
11286 function_args_iterator args_iter;
11287 tree n = NULL_TREE, t;
11288
11289 if (!fntype)
11290 return false;
11291
11292 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11293 {
11294 n = t;
11295 }
11296
11297 return n != NULL_TREE && n != void_type_node;
11298 }
11299
11300 /* Return true if TYPE has a prototype. */
11301
11302 bool
11303 prototype_p (tree fntype)
11304 {
11305 tree t;
11306
11307 gcc_assert (fntype != NULL_TREE);
11308
11309 t = TYPE_ARG_TYPES (fntype);
11310 return (t != NULL_TREE);
11311 }
11312
11313 /* If BLOCK is inlined from an __attribute__((__artificial__))
11314 routine, return pointer to location from where it has been
11315 called. */
11316 location_t *
11317 block_nonartificial_location (tree block)
11318 {
11319 location_t *ret = NULL;
11320
11321 while (block && TREE_CODE (block) == BLOCK
11322 && BLOCK_ABSTRACT_ORIGIN (block))
11323 {
11324 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11325
11326 while (TREE_CODE (ao) == BLOCK
11327 && BLOCK_ABSTRACT_ORIGIN (ao)
11328 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11329 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11330
11331 if (TREE_CODE (ao) == FUNCTION_DECL)
11332 {
11333 /* If AO is an artificial inline, point RET to the
11334 call site locus at which it has been inlined and continue
11335 the loop, in case AO's caller is also an artificial
11336 inline. */
11337 if (DECL_DECLARED_INLINE_P (ao)
11338 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11339 ret = &BLOCK_SOURCE_LOCATION (block);
11340 else
11341 break;
11342 }
11343 else if (TREE_CODE (ao) != BLOCK)
11344 break;
11345
11346 block = BLOCK_SUPERCONTEXT (block);
11347 }
11348 return ret;
11349 }
11350
11351
11352 /* If EXP is inlined from an __attribute__((__artificial__))
11353 function, return the location of the original call expression. */
11354
11355 location_t
11356 tree_nonartificial_location (tree exp)
11357 {
11358 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11359
11360 if (loc)
11361 return *loc;
11362 else
11363 return EXPR_LOCATION (exp);
11364 }
11365
11366
11367 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11368 nodes. */
11369
11370 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11371
11372 static hashval_t
11373 cl_option_hash_hash (const void *x)
11374 {
11375 const_tree const t = (const_tree) x;
11376 const char *p;
11377 size_t i;
11378 size_t len = 0;
11379 hashval_t hash = 0;
11380
11381 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11382 {
11383 p = (const char *)TREE_OPTIMIZATION (t);
11384 len = sizeof (struct cl_optimization);
11385 }
11386
11387 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11388 {
11389 p = (const char *)TREE_TARGET_OPTION (t);
11390 len = sizeof (struct cl_target_option);
11391 }
11392
11393 else
11394 gcc_unreachable ();
11395
11396 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11397 something else. */
11398 for (i = 0; i < len; i++)
11399 if (p[i])
11400 hash = (hash << 4) ^ ((i << 2) | p[i]);
11401
11402 return hash;
11403 }
11404
11405 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11406 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11407 same. */
11408
11409 static int
11410 cl_option_hash_eq (const void *x, const void *y)
11411 {
11412 const_tree const xt = (const_tree) x;
11413 const_tree const yt = (const_tree) y;
11414 const char *xp;
11415 const char *yp;
11416 size_t len;
11417
11418 if (TREE_CODE (xt) != TREE_CODE (yt))
11419 return 0;
11420
11421 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11422 {
11423 xp = (const char *)TREE_OPTIMIZATION (xt);
11424 yp = (const char *)TREE_OPTIMIZATION (yt);
11425 len = sizeof (struct cl_optimization);
11426 }
11427
11428 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11429 {
11430 xp = (const char *)TREE_TARGET_OPTION (xt);
11431 yp = (const char *)TREE_TARGET_OPTION (yt);
11432 len = sizeof (struct cl_target_option);
11433 }
11434
11435 else
11436 gcc_unreachable ();
11437
11438 return (memcmp (xp, yp, len) == 0);
11439 }
11440
11441 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11442
11443 tree
11444 build_optimization_node (struct gcc_options *opts)
11445 {
11446 tree t;
11447 void **slot;
11448
11449 /* Use the cache of optimization nodes. */
11450
11451 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11452 opts);
11453
11454 slot = htab_find_slot (cl_option_hash_table, cl_optimization_node, INSERT);
11455 t = (tree) *slot;
11456 if (!t)
11457 {
11458 /* Insert this one into the hash table. */
11459 t = cl_optimization_node;
11460 *slot = t;
11461
11462 /* Make a new node for next time round. */
11463 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11464 }
11465
11466 return t;
11467 }
11468
11469 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11470
11471 tree
11472 build_target_option_node (struct gcc_options *opts)
11473 {
11474 tree t;
11475 void **slot;
11476
11477 /* Use the cache of optimization nodes. */
11478
11479 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11480 opts);
11481
11482 slot = htab_find_slot (cl_option_hash_table, cl_target_option_node, INSERT);
11483 t = (tree) *slot;
11484 if (!t)
11485 {
11486 /* Insert this one into the hash table. */
11487 t = cl_target_option_node;
11488 *slot = t;
11489
11490 /* Make a new node for next time round. */
11491 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11492 }
11493
11494 return t;
11495 }
11496
11497 /* Determine the "ultimate origin" of a block. The block may be an inlined
11498 instance of an inlined instance of a block which is local to an inline
11499 function, so we have to trace all of the way back through the origin chain
11500 to find out what sort of node actually served as the original seed for the
11501 given block. */
11502
11503 tree
11504 block_ultimate_origin (const_tree block)
11505 {
11506 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11507
11508 /* output_inline_function sets BLOCK_ABSTRACT_ORIGIN for all the
11509 nodes in the function to point to themselves; ignore that if
11510 we're trying to output the abstract instance of this function. */
11511 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11512 return NULL_TREE;
11513
11514 if (immediate_origin == NULL_TREE)
11515 return NULL_TREE;
11516 else
11517 {
11518 tree ret_val;
11519 tree lookahead = immediate_origin;
11520
11521 do
11522 {
11523 ret_val = lookahead;
11524 lookahead = (TREE_CODE (ret_val) == BLOCK
11525 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11526 }
11527 while (lookahead != NULL && lookahead != ret_val);
11528
11529 /* The block's abstract origin chain may not be the *ultimate* origin of
11530 the block. It could lead to a DECL that has an abstract origin set.
11531 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11532 will give us if it has one). Note that DECL's abstract origins are
11533 supposed to be the most distant ancestor (or so decl_ultimate_origin
11534 claims), so we don't need to loop following the DECL origins. */
11535 if (DECL_P (ret_val))
11536 return DECL_ORIGIN (ret_val);
11537
11538 return ret_val;
11539 }
11540 }
11541
11542 /* Return true iff conversion in EXP generates no instruction. Mark
11543 it inline so that we fully inline into the stripping functions even
11544 though we have two uses of this function. */
11545
11546 static inline bool
11547 tree_nop_conversion (const_tree exp)
11548 {
11549 tree outer_type, inner_type;
11550
11551 if (!CONVERT_EXPR_P (exp)
11552 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11553 return false;
11554 if (TREE_OPERAND (exp, 0) == error_mark_node)
11555 return false;
11556
11557 outer_type = TREE_TYPE (exp);
11558 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11559
11560 if (!inner_type)
11561 return false;
11562
11563 /* Use precision rather then machine mode when we can, which gives
11564 the correct answer even for submode (bit-field) types. */
11565 if ((INTEGRAL_TYPE_P (outer_type)
11566 || POINTER_TYPE_P (outer_type)
11567 || TREE_CODE (outer_type) == OFFSET_TYPE)
11568 && (INTEGRAL_TYPE_P (inner_type)
11569 || POINTER_TYPE_P (inner_type)
11570 || TREE_CODE (inner_type) == OFFSET_TYPE))
11571 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11572
11573 /* Otherwise fall back on comparing machine modes (e.g. for
11574 aggregate types, floats). */
11575 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11576 }
11577
11578 /* Return true iff conversion in EXP generates no instruction. Don't
11579 consider conversions changing the signedness. */
11580
11581 static bool
11582 tree_sign_nop_conversion (const_tree exp)
11583 {
11584 tree outer_type, inner_type;
11585
11586 if (!tree_nop_conversion (exp))
11587 return false;
11588
11589 outer_type = TREE_TYPE (exp);
11590 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11591
11592 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11593 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11594 }
11595
11596 /* Strip conversions from EXP according to tree_nop_conversion and
11597 return the resulting expression. */
11598
11599 tree
11600 tree_strip_nop_conversions (tree exp)
11601 {
11602 while (tree_nop_conversion (exp))
11603 exp = TREE_OPERAND (exp, 0);
11604 return exp;
11605 }
11606
11607 /* Strip conversions from EXP according to tree_sign_nop_conversion
11608 and return the resulting expression. */
11609
11610 tree
11611 tree_strip_sign_nop_conversions (tree exp)
11612 {
11613 while (tree_sign_nop_conversion (exp))
11614 exp = TREE_OPERAND (exp, 0);
11615 return exp;
11616 }
11617
11618 /* Avoid any floating point extensions from EXP. */
11619 tree
11620 strip_float_extensions (tree exp)
11621 {
11622 tree sub, expt, subt;
11623
11624 /* For floating point constant look up the narrowest type that can hold
11625 it properly and handle it like (type)(narrowest_type)constant.
11626 This way we can optimize for instance a=a*2.0 where "a" is float
11627 but 2.0 is double constant. */
11628 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11629 {
11630 REAL_VALUE_TYPE orig;
11631 tree type = NULL;
11632
11633 orig = TREE_REAL_CST (exp);
11634 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11635 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11636 type = float_type_node;
11637 else if (TYPE_PRECISION (TREE_TYPE (exp))
11638 > TYPE_PRECISION (double_type_node)
11639 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11640 type = double_type_node;
11641 if (type)
11642 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11643 }
11644
11645 if (!CONVERT_EXPR_P (exp))
11646 return exp;
11647
11648 sub = TREE_OPERAND (exp, 0);
11649 subt = TREE_TYPE (sub);
11650 expt = TREE_TYPE (exp);
11651
11652 if (!FLOAT_TYPE_P (subt))
11653 return exp;
11654
11655 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11656 return exp;
11657
11658 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11659 return exp;
11660
11661 return strip_float_extensions (sub);
11662 }
11663
11664 /* Strip out all handled components that produce invariant
11665 offsets. */
11666
11667 const_tree
11668 strip_invariant_refs (const_tree op)
11669 {
11670 while (handled_component_p (op))
11671 {
11672 switch (TREE_CODE (op))
11673 {
11674 case ARRAY_REF:
11675 case ARRAY_RANGE_REF:
11676 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11677 || TREE_OPERAND (op, 2) != NULL_TREE
11678 || TREE_OPERAND (op, 3) != NULL_TREE)
11679 return NULL;
11680 break;
11681
11682 case COMPONENT_REF:
11683 if (TREE_OPERAND (op, 2) != NULL_TREE)
11684 return NULL;
11685 break;
11686
11687 default:;
11688 }
11689 op = TREE_OPERAND (op, 0);
11690 }
11691
11692 return op;
11693 }
11694
11695 static GTY(()) tree gcc_eh_personality_decl;
11696
11697 /* Return the GCC personality function decl. */
11698
11699 tree
11700 lhd_gcc_personality (void)
11701 {
11702 if (!gcc_eh_personality_decl)
11703 gcc_eh_personality_decl = build_personality_function ("gcc");
11704 return gcc_eh_personality_decl;
11705 }
11706
11707 /* For languages with One Definition Rule, work out if
11708 trees are actually the same even if the tree representation
11709 differs. This handles only decls appearing in TYPE_NAME
11710 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11711 RECORD_TYPE and IDENTIFIER_NODE. */
11712
11713 static bool
11714 same_for_odr (tree t1, tree t2)
11715 {
11716 if (t1 == t2)
11717 return true;
11718 if (!t1 || !t2)
11719 return false;
11720 /* C and C++ FEs differ by using IDENTIFIER_NODE and TYPE_DECL. */
11721 if (TREE_CODE (t1) == IDENTIFIER_NODE
11722 && TREE_CODE (t2) == TYPE_DECL
11723 && DECL_FILE_SCOPE_P (t1))
11724 {
11725 t2 = DECL_NAME (t2);
11726 gcc_assert (TREE_CODE (t2) == IDENTIFIER_NODE);
11727 }
11728 if (TREE_CODE (t2) == IDENTIFIER_NODE
11729 && TREE_CODE (t1) == TYPE_DECL
11730 && DECL_FILE_SCOPE_P (t2))
11731 {
11732 t1 = DECL_NAME (t1);
11733 gcc_assert (TREE_CODE (t1) == IDENTIFIER_NODE);
11734 }
11735 if (TREE_CODE (t1) != TREE_CODE (t2))
11736 return false;
11737 if (TYPE_P (t1))
11738 return types_same_for_odr (t1, t2);
11739 if (DECL_P (t1))
11740 return decls_same_for_odr (t1, t2);
11741 return false;
11742 }
11743
11744 /* For languages with One Definition Rule, work out if
11745 decls are actually the same even if the tree representation
11746 differs. This handles only decls appearing in TYPE_NAME
11747 and TYPE_CONTEXT. That is NAMESPACE_DECL, TYPE_DECL,
11748 RECORD_TYPE and IDENTIFIER_NODE. */
11749
11750 static bool
11751 decls_same_for_odr (tree decl1, tree decl2)
11752 {
11753 if (decl1 && TREE_CODE (decl1) == TYPE_DECL
11754 && DECL_ORIGINAL_TYPE (decl1))
11755 decl1 = DECL_ORIGINAL_TYPE (decl1);
11756 if (decl2 && TREE_CODE (decl2) == TYPE_DECL
11757 && DECL_ORIGINAL_TYPE (decl2))
11758 decl2 = DECL_ORIGINAL_TYPE (decl2);
11759 if (decl1 == decl2)
11760 return true;
11761 if (!decl1 || !decl2)
11762 return false;
11763 gcc_checking_assert (DECL_P (decl1) && DECL_P (decl2));
11764 if (TREE_CODE (decl1) != TREE_CODE (decl2))
11765 return false;
11766 if (TREE_CODE (decl1) == TRANSLATION_UNIT_DECL)
11767 return true;
11768 if (TREE_CODE (decl1) != NAMESPACE_DECL
11769 && TREE_CODE (decl1) != TYPE_DECL)
11770 return false;
11771 if (!DECL_NAME (decl1))
11772 return false;
11773 gcc_checking_assert (TREE_CODE (DECL_NAME (decl1)) == IDENTIFIER_NODE);
11774 gcc_checking_assert (!DECL_NAME (decl2)
11775 || TREE_CODE (DECL_NAME (decl2)) == IDENTIFIER_NODE);
11776 if (DECL_NAME (decl1) != DECL_NAME (decl2))
11777 return false;
11778 return same_for_odr (DECL_CONTEXT (decl1),
11779 DECL_CONTEXT (decl2));
11780 }
11781
11782 /* For languages with One Definition Rule, work out if
11783 types are same even if the tree representation differs.
11784 This is non-trivial for LTO where minnor differences in
11785 the type representation may have prevented type merging
11786 to merge two copies of otherwise equivalent type. */
11787
11788 bool
11789 types_same_for_odr (tree type1, tree type2)
11790 {
11791 gcc_checking_assert (TYPE_P (type1) && TYPE_P (type2));
11792 type1 = TYPE_MAIN_VARIANT (type1);
11793 type2 = TYPE_MAIN_VARIANT (type2);
11794 if (type1 == type2)
11795 return true;
11796
11797 #ifndef ENABLE_CHECKING
11798 if (!in_lto_p)
11799 return false;
11800 #endif
11801
11802 /* Check for anonymous namespaces. Those have !TREE_PUBLIC
11803 on the corresponding TYPE_STUB_DECL. */
11804 if (type_in_anonymous_namespace_p (type1)
11805 || type_in_anonymous_namespace_p (type2))
11806 return false;
11807 /* When assembler name of virtual table is available, it is
11808 easy to compare types for equivalence. */
11809 if (TYPE_BINFO (type1) && TYPE_BINFO (type2)
11810 && BINFO_VTABLE (TYPE_BINFO (type1))
11811 && BINFO_VTABLE (TYPE_BINFO (type2)))
11812 {
11813 tree v1 = BINFO_VTABLE (TYPE_BINFO (type1));
11814 tree v2 = BINFO_VTABLE (TYPE_BINFO (type2));
11815
11816 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
11817 {
11818 if (TREE_CODE (v2) != POINTER_PLUS_EXPR
11819 || !operand_equal_p (TREE_OPERAND (v1, 1),
11820 TREE_OPERAND (v2, 1), 0))
11821 return false;
11822 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
11823 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
11824 }
11825 v1 = DECL_ASSEMBLER_NAME (v1);
11826 v2 = DECL_ASSEMBLER_NAME (v2);
11827 return (v1 == v2);
11828 }
11829
11830 /* FIXME: the code comparing type names consider all instantiations of the
11831 same template to have same name. This is because we have no access
11832 to template parameters. For types with no virtual method tables
11833 we thus can return false positives. At the moment we do not need
11834 to compare types in other scenarios than devirtualization. */
11835
11836 /* If types are not structuraly same, do not bother to contnue.
11837 Match in the remainder of code would mean ODR violation. */
11838 if (!types_compatible_p (type1, type2))
11839 return false;
11840 if (!TYPE_NAME (type1))
11841 return false;
11842 if (!decls_same_for_odr (TYPE_NAME (type1), TYPE_NAME (type2)))
11843 return false;
11844 if (!same_for_odr (TYPE_CONTEXT (type1), TYPE_CONTEXT (type2)))
11845 return false;
11846 /* When not in LTO the MAIN_VARIANT check should be the same. */
11847 gcc_assert (in_lto_p);
11848
11849 return true;
11850 }
11851
11852 /* TARGET is a call target of GIMPLE call statement
11853 (obtained by gimple_call_fn). Return true if it is
11854 OBJ_TYPE_REF representing an virtual call of C++ method.
11855 (As opposed to OBJ_TYPE_REF representing objc calls
11856 through a cast where middle-end devirtualization machinery
11857 can't apply.) */
11858
11859 bool
11860 virtual_method_call_p (tree target)
11861 {
11862 if (TREE_CODE (target) != OBJ_TYPE_REF)
11863 return false;
11864 target = TREE_TYPE (target);
11865 gcc_checking_assert (TREE_CODE (target) == POINTER_TYPE);
11866 target = TREE_TYPE (target);
11867 if (TREE_CODE (target) == FUNCTION_TYPE)
11868 return false;
11869 gcc_checking_assert (TREE_CODE (target) == METHOD_TYPE);
11870 return true;
11871 }
11872
11873 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11874
11875 tree
11876 obj_type_ref_class (tree ref)
11877 {
11878 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11879 ref = TREE_TYPE (ref);
11880 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11881 ref = TREE_TYPE (ref);
11882 /* We look for type THIS points to. ObjC also builds
11883 OBJ_TYPE_REF with non-method calls, Their first parameter
11884 ID however also corresponds to class type. */
11885 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11886 || TREE_CODE (ref) == FUNCTION_TYPE);
11887 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11888 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11889 return TREE_TYPE (ref);
11890 }
11891
11892 /* Return true if T is in anonymous namespace. */
11893
11894 bool
11895 type_in_anonymous_namespace_p (tree t)
11896 {
11897 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11898 }
11899
11900 /* Try to find a base info of BINFO that would have its field decl at offset
11901 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11902 found, return, otherwise return NULL_TREE. */
11903
11904 tree
11905 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11906 {
11907 tree type = BINFO_TYPE (binfo);
11908
11909 while (true)
11910 {
11911 HOST_WIDE_INT pos, size;
11912 tree fld;
11913 int i;
11914
11915 if (types_same_for_odr (type, expected_type))
11916 return binfo;
11917 if (offset < 0)
11918 return NULL_TREE;
11919
11920 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
11921 {
11922 if (TREE_CODE (fld) != FIELD_DECL)
11923 continue;
11924
11925 pos = int_bit_position (fld);
11926 size = tree_low_cst (DECL_SIZE (fld), 1);
11927 if (pos <= offset && (pos + size) > offset)
11928 break;
11929 }
11930 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
11931 return NULL_TREE;
11932
11933 if (!DECL_ARTIFICIAL (fld))
11934 {
11935 binfo = TYPE_BINFO (TREE_TYPE (fld));
11936 if (!binfo)
11937 return NULL_TREE;
11938 }
11939 /* Offset 0 indicates the primary base, whose vtable contents are
11940 represented in the binfo for the derived class. */
11941 else if (offset != 0)
11942 {
11943 tree base_binfo, found_binfo = NULL_TREE;
11944 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
11945 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
11946 {
11947 found_binfo = base_binfo;
11948 break;
11949 }
11950 if (!found_binfo)
11951 return NULL_TREE;
11952 binfo = found_binfo;
11953 }
11954
11955 type = TREE_TYPE (fld);
11956 offset -= pos;
11957 }
11958 }
11959
11960 /* Returns true if X is a typedef decl. */
11961
11962 bool
11963 is_typedef_decl (tree x)
11964 {
11965 return (x && TREE_CODE (x) == TYPE_DECL
11966 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
11967 }
11968
11969 /* Returns true iff TYPE is a type variant created for a typedef. */
11970
11971 bool
11972 typedef_variant_p (tree type)
11973 {
11974 return is_typedef_decl (TYPE_NAME (type));
11975 }
11976
11977 /* Warn about a use of an identifier which was marked deprecated. */
11978 void
11979 warn_deprecated_use (tree node, tree attr)
11980 {
11981 const char *msg;
11982
11983 if (node == 0 || !warn_deprecated_decl)
11984 return;
11985
11986 if (!attr)
11987 {
11988 if (DECL_P (node))
11989 attr = DECL_ATTRIBUTES (node);
11990 else if (TYPE_P (node))
11991 {
11992 tree decl = TYPE_STUB_DECL (node);
11993 if (decl)
11994 attr = lookup_attribute ("deprecated",
11995 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
11996 }
11997 }
11998
11999 if (attr)
12000 attr = lookup_attribute ("deprecated", attr);
12001
12002 if (attr)
12003 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12004 else
12005 msg = NULL;
12006
12007 if (DECL_P (node))
12008 {
12009 expanded_location xloc = expand_location (DECL_SOURCE_LOCATION (node));
12010 if (msg)
12011 warning (OPT_Wdeprecated_declarations,
12012 "%qD is deprecated (declared at %r%s:%d%R): %s",
12013 node, "locus", xloc.file, xloc.line, msg);
12014 else
12015 warning (OPT_Wdeprecated_declarations,
12016 "%qD is deprecated (declared at %r%s:%d%R)",
12017 node, "locus", xloc.file, xloc.line);
12018 }
12019 else if (TYPE_P (node))
12020 {
12021 tree what = NULL_TREE;
12022 tree decl = TYPE_STUB_DECL (node);
12023
12024 if (TYPE_NAME (node))
12025 {
12026 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12027 what = TYPE_NAME (node);
12028 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12029 && DECL_NAME (TYPE_NAME (node)))
12030 what = DECL_NAME (TYPE_NAME (node));
12031 }
12032
12033 if (decl)
12034 {
12035 expanded_location xloc
12036 = expand_location (DECL_SOURCE_LOCATION (decl));
12037 if (what)
12038 {
12039 if (msg)
12040 warning (OPT_Wdeprecated_declarations,
12041 "%qE is deprecated (declared at %r%s:%d%R): %s",
12042 what, "locus", xloc.file, xloc.line, msg);
12043 else
12044 warning (OPT_Wdeprecated_declarations,
12045 "%qE is deprecated (declared at %r%s:%d%R)",
12046 what, "locus", xloc.file, xloc.line);
12047 }
12048 else
12049 {
12050 if (msg)
12051 warning (OPT_Wdeprecated_declarations,
12052 "type is deprecated (declared at %r%s:%d%R): %s",
12053 "locus", xloc.file, xloc.line, msg);
12054 else
12055 warning (OPT_Wdeprecated_declarations,
12056 "type is deprecated (declared at %r%s:%d%R)",
12057 "locus", xloc.file, xloc.line);
12058 }
12059 }
12060 else
12061 {
12062 if (what)
12063 {
12064 if (msg)
12065 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12066 what, msg);
12067 else
12068 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12069 }
12070 else
12071 {
12072 if (msg)
12073 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12074 msg);
12075 else
12076 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12077 }
12078 }
12079 }
12080 }
12081
12082 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12083 somewhere in it. */
12084
12085 bool
12086 contains_bitfld_component_ref_p (const_tree ref)
12087 {
12088 while (handled_component_p (ref))
12089 {
12090 if (TREE_CODE (ref) == COMPONENT_REF
12091 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12092 return true;
12093 ref = TREE_OPERAND (ref, 0);
12094 }
12095
12096 return false;
12097 }
12098
12099 /* Try to determine whether a TRY_CATCH expression can fall through.
12100 This is a subroutine of block_may_fallthru. */
12101
12102 static bool
12103 try_catch_may_fallthru (const_tree stmt)
12104 {
12105 tree_stmt_iterator i;
12106
12107 /* If the TRY block can fall through, the whole TRY_CATCH can
12108 fall through. */
12109 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12110 return true;
12111
12112 i = tsi_start (TREE_OPERAND (stmt, 1));
12113 switch (TREE_CODE (tsi_stmt (i)))
12114 {
12115 case CATCH_EXPR:
12116 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12117 catch expression and a body. The whole TRY_CATCH may fall
12118 through iff any of the catch bodies falls through. */
12119 for (; !tsi_end_p (i); tsi_next (&i))
12120 {
12121 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12122 return true;
12123 }
12124 return false;
12125
12126 case EH_FILTER_EXPR:
12127 /* The exception filter expression only matters if there is an
12128 exception. If the exception does not match EH_FILTER_TYPES,
12129 we will execute EH_FILTER_FAILURE, and we will fall through
12130 if that falls through. If the exception does match
12131 EH_FILTER_TYPES, the stack unwinder will continue up the
12132 stack, so we will not fall through. We don't know whether we
12133 will throw an exception which matches EH_FILTER_TYPES or not,
12134 so we just ignore EH_FILTER_TYPES and assume that we might
12135 throw an exception which doesn't match. */
12136 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12137
12138 default:
12139 /* This case represents statements to be executed when an
12140 exception occurs. Those statements are implicitly followed
12141 by a RESX statement to resume execution after the exception.
12142 So in this case the TRY_CATCH never falls through. */
12143 return false;
12144 }
12145 }
12146
12147 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12148 need not be 100% accurate; simply be conservative and return true if we
12149 don't know. This is used only to avoid stupidly generating extra code.
12150 If we're wrong, we'll just delete the extra code later. */
12151
12152 bool
12153 block_may_fallthru (const_tree block)
12154 {
12155 /* This CONST_CAST is okay because expr_last returns its argument
12156 unmodified and we assign it to a const_tree. */
12157 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12158
12159 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12160 {
12161 case GOTO_EXPR:
12162 case RETURN_EXPR:
12163 /* Easy cases. If the last statement of the block implies
12164 control transfer, then we can't fall through. */
12165 return false;
12166
12167 case SWITCH_EXPR:
12168 /* If SWITCH_LABELS is set, this is lowered, and represents a
12169 branch to a selected label and hence can not fall through.
12170 Otherwise SWITCH_BODY is set, and the switch can fall
12171 through. */
12172 return SWITCH_LABELS (stmt) == NULL_TREE;
12173
12174 case COND_EXPR:
12175 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12176 return true;
12177 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12178
12179 case BIND_EXPR:
12180 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12181
12182 case TRY_CATCH_EXPR:
12183 return try_catch_may_fallthru (stmt);
12184
12185 case TRY_FINALLY_EXPR:
12186 /* The finally clause is always executed after the try clause,
12187 so if it does not fall through, then the try-finally will not
12188 fall through. Otherwise, if the try clause does not fall
12189 through, then when the finally clause falls through it will
12190 resume execution wherever the try clause was going. So the
12191 whole try-finally will only fall through if both the try
12192 clause and the finally clause fall through. */
12193 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12194 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12195
12196 case MODIFY_EXPR:
12197 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12198 stmt = TREE_OPERAND (stmt, 1);
12199 else
12200 return true;
12201 /* FALLTHRU */
12202
12203 case CALL_EXPR:
12204 /* Functions that do not return do not fall through. */
12205 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12206
12207 case CLEANUP_POINT_EXPR:
12208 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12209
12210 case TARGET_EXPR:
12211 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12212
12213 case ERROR_MARK:
12214 return true;
12215
12216 default:
12217 return lang_hooks.block_may_fallthru (stmt);
12218 }
12219 }
12220
12221 /* True if we are using EH to handle cleanups. */
12222 static bool using_eh_for_cleanups_flag = false;
12223
12224 /* This routine is called from front ends to indicate eh should be used for
12225 cleanups. */
12226 void
12227 using_eh_for_cleanups (void)
12228 {
12229 using_eh_for_cleanups_flag = true;
12230 }
12231
12232 /* Query whether EH is used for cleanups. */
12233 bool
12234 using_eh_for_cleanups_p (void)
12235 {
12236 return using_eh_for_cleanups_flag;
12237 }
12238
12239 /* Wrapper for tree_code_name to ensure that tree code is valid */
12240 const char *
12241 get_tree_code_name (enum tree_code code)
12242 {
12243 const char *invalid = "<invalid tree code>";
12244
12245 if (code >= MAX_TREE_CODES)
12246 return invalid;
12247
12248 return tree_code_name[code];
12249 }
12250
12251 /* Drops the TREE_OVERFLOW flag from T. */
12252
12253 tree
12254 drop_tree_overflow (tree t)
12255 {
12256 gcc_checking_assert (TREE_OVERFLOW (t));
12257
12258 /* For tree codes with a sharing machinery re-build the result. */
12259 if (TREE_CODE (t) == INTEGER_CST)
12260 return build_int_cst_wide (TREE_TYPE (t),
12261 TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t));
12262
12263 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12264 and drop the flag. */
12265 t = copy_node (t);
12266 TREE_OVERFLOW (t) = 0;
12267 return t;
12268 }
12269
12270 #include "gt-tree.h"