re PR lto/65316 (LTO: Uninitialized memory / ICE with -g -fno-lto-odr-type-merging...
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105
106 /* Tree code classes. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
109 #define END_OF_BASE_TREE_CODES tcc_exceptional,
110
111 const enum tree_code_class tree_code_type[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Table indexed by tree code giving number of expression
119 operands beyond the fixed part of the node structure.
120 Not used for types or decls. */
121
122 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
123 #define END_OF_BASE_TREE_CODES 0,
124
125 const unsigned char tree_code_length[] = {
126 #include "all-tree.def"
127 };
128
129 #undef DEFTREECODE
130 #undef END_OF_BASE_TREE_CODES
131
132 /* Names of tree components.
133 Used for printing out the tree and error messages. */
134 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
135 #define END_OF_BASE_TREE_CODES "@dummy",
136
137 static const char *const tree_code_name[] = {
138 #include "all-tree.def"
139 };
140
141 #undef DEFTREECODE
142 #undef END_OF_BASE_TREE_CODES
143
144 /* Each tree code class has an associated string representation.
145 These must correspond to the tree_code_class entries. */
146
147 const char *const tree_code_class_strings[] =
148 {
149 "exceptional",
150 "constant",
151 "type",
152 "declaration",
153 "reference",
154 "comparison",
155 "unary",
156 "binary",
157 "statement",
158 "vl_exp",
159 "expression"
160 };
161
162 /* obstack.[ch] explicitly declined to prototype this. */
163 extern int _obstack_allocated_p (struct obstack *h, void *obj);
164
165 /* Statistics-gathering stuff. */
166
167 static int tree_code_counts[MAX_TREE_CODES];
168 int tree_node_counts[(int) all_kinds];
169 int tree_node_sizes[(int) all_kinds];
170
171 /* Keep in sync with tree.h:enum tree_node_kind. */
172 static const char * const tree_node_kind_names[] = {
173 "decls",
174 "types",
175 "blocks",
176 "stmts",
177 "refs",
178 "exprs",
179 "constants",
180 "identifiers",
181 "vecs",
182 "binfos",
183 "ssa names",
184 "constructors",
185 "random kinds",
186 "lang_decl kinds",
187 "lang_type kinds",
188 "omp clauses",
189 };
190
191 /* Unique id for next decl created. */
192 static GTY(()) int next_decl_uid;
193 /* Unique id for next type created. */
194 static GTY(()) int next_type_uid = 1;
195 /* Unique id for next debug decl created. Use negative numbers,
196 to catch erroneous uses. */
197 static GTY(()) int next_debug_decl_uid;
198
199 /* Since we cannot rehash a type after it is in the table, we have to
200 keep the hash code. */
201
202 struct GTY((for_user)) type_hash {
203 unsigned long hash;
204 tree type;
205 };
206
207 /* Initial size of the hash table (rounded to next prime). */
208 #define TYPE_HASH_INITIAL_SIZE 1000
209
210 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
211 {
212 static hashval_t hash (type_hash *t) { return t->hash; }
213 static bool equal (type_hash *a, type_hash *b);
214
215 static void
216 handle_cache_entry (type_hash *&t)
217 {
218 extern void gt_ggc_mx (type_hash *&);
219 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
220 return;
221 else if (ggc_marked_p (t->type))
222 gt_ggc_mx (t);
223 else
224 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
225 }
226 };
227
228 /* Now here is the hash table. When recording a type, it is added to
229 the slot whose index is the hash code. Note that the hash table is
230 used for several kinds of types (function types, array types and
231 array index range types, for now). While all these live in the
232 same table, they are completely independent, and the hash code is
233 computed differently for each of these. */
234
235 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
236
237 /* Hash table and temporary node for larger integer const values. */
238 static GTY (()) tree int_cst_node;
239
240 struct int_cst_hasher : ggc_cache_hasher<tree>
241 {
242 static hashval_t hash (tree t);
243 static bool equal (tree x, tree y);
244 };
245
246 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
247
248 /* Hash table for optimization flags and target option flags. Use the same
249 hash table for both sets of options. Nodes for building the current
250 optimization and target option nodes. The assumption is most of the time
251 the options created will already be in the hash table, so we avoid
252 allocating and freeing up a node repeatably. */
253 static GTY (()) tree cl_optimization_node;
254 static GTY (()) tree cl_target_option_node;
255
256 struct cl_option_hasher : ggc_cache_hasher<tree>
257 {
258 static hashval_t hash (tree t);
259 static bool equal (tree x, tree y);
260 };
261
262 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
263
264 /* General tree->tree mapping structure for use in hash tables. */
265
266
267 static GTY ((cache))
268 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
269
270 static GTY ((cache))
271 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
272
273 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
274 {
275 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
276
277 static bool
278 equal (tree_vec_map *a, tree_vec_map *b)
279 {
280 return a->base.from == b->base.from;
281 }
282
283 static void
284 handle_cache_entry (tree_vec_map *&m)
285 {
286 extern void gt_ggc_mx (tree_vec_map *&);
287 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
288 return;
289 else if (ggc_marked_p (m->base.from))
290 gt_ggc_mx (m);
291 else
292 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
293 }
294 };
295
296 static GTY ((cache))
297 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
298
299 static void set_type_quals (tree, int);
300 static void print_type_hash_statistics (void);
301 static void print_debug_expr_statistics (void);
302 static void print_value_expr_statistics (void);
303 static void type_hash_list (const_tree, inchash::hash &);
304 static void attribute_hash_list (const_tree, inchash::hash &);
305
306 tree global_trees[TI_MAX];
307 tree integer_types[itk_none];
308
309 bool int_n_enabled_p[NUM_INT_N_ENTS];
310 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
311
312 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
313
314 /* Number of operands for each OpenMP clause. */
315 unsigned const char omp_clause_num_ops[] =
316 {
317 0, /* OMP_CLAUSE_ERROR */
318 1, /* OMP_CLAUSE_PRIVATE */
319 1, /* OMP_CLAUSE_SHARED */
320 1, /* OMP_CLAUSE_FIRSTPRIVATE */
321 2, /* OMP_CLAUSE_LASTPRIVATE */
322 4, /* OMP_CLAUSE_REDUCTION */
323 1, /* OMP_CLAUSE_COPYIN */
324 1, /* OMP_CLAUSE_COPYPRIVATE */
325 3, /* OMP_CLAUSE_LINEAR */
326 2, /* OMP_CLAUSE_ALIGNED */
327 1, /* OMP_CLAUSE_DEPEND */
328 1, /* OMP_CLAUSE_UNIFORM */
329 2, /* OMP_CLAUSE_FROM */
330 2, /* OMP_CLAUSE_TO */
331 2, /* OMP_CLAUSE_MAP */
332 2, /* OMP_CLAUSE__CACHE_ */
333 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
334 1, /* OMP_CLAUSE_USE_DEVICE */
335 2, /* OMP_CLAUSE_GANG */
336 1, /* OMP_CLAUSE_ASYNC */
337 1, /* OMP_CLAUSE_WAIT */
338 0, /* OMP_CLAUSE_AUTO */
339 0, /* OMP_CLAUSE_SEQ */
340 1, /* OMP_CLAUSE__LOOPTEMP_ */
341 1, /* OMP_CLAUSE_IF */
342 1, /* OMP_CLAUSE_NUM_THREADS */
343 1, /* OMP_CLAUSE_SCHEDULE */
344 0, /* OMP_CLAUSE_NOWAIT */
345 0, /* OMP_CLAUSE_ORDERED */
346 0, /* OMP_CLAUSE_DEFAULT */
347 3, /* OMP_CLAUSE_COLLAPSE */
348 0, /* OMP_CLAUSE_UNTIED */
349 1, /* OMP_CLAUSE_FINAL */
350 0, /* OMP_CLAUSE_MERGEABLE */
351 1, /* OMP_CLAUSE_DEVICE */
352 1, /* OMP_CLAUSE_DIST_SCHEDULE */
353 0, /* OMP_CLAUSE_INBRANCH */
354 0, /* OMP_CLAUSE_NOTINBRANCH */
355 1, /* OMP_CLAUSE_NUM_TEAMS */
356 1, /* OMP_CLAUSE_THREAD_LIMIT */
357 0, /* OMP_CLAUSE_PROC_BIND */
358 1, /* OMP_CLAUSE_SAFELEN */
359 1, /* OMP_CLAUSE_SIMDLEN */
360 0, /* OMP_CLAUSE_FOR */
361 0, /* OMP_CLAUSE_PARALLEL */
362 0, /* OMP_CLAUSE_SECTIONS */
363 0, /* OMP_CLAUSE_TASKGROUP */
364 1, /* OMP_CLAUSE__SIMDUID_ */
365 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
366 0, /* OMP_CLAUSE_INDEPENDENT */
367 1, /* OMP_CLAUSE_WORKER */
368 1, /* OMP_CLAUSE_VECTOR */
369 1, /* OMP_CLAUSE_NUM_GANGS */
370 1, /* OMP_CLAUSE_NUM_WORKERS */
371 1, /* OMP_CLAUSE_VECTOR_LENGTH */
372 };
373
374 const char * const omp_clause_code_name[] =
375 {
376 "error_clause",
377 "private",
378 "shared",
379 "firstprivate",
380 "lastprivate",
381 "reduction",
382 "copyin",
383 "copyprivate",
384 "linear",
385 "aligned",
386 "depend",
387 "uniform",
388 "from",
389 "to",
390 "map",
391 "_cache_",
392 "device_resident",
393 "use_device",
394 "gang",
395 "async",
396 "wait",
397 "auto",
398 "seq",
399 "_looptemp_",
400 "if",
401 "num_threads",
402 "schedule",
403 "nowait",
404 "ordered",
405 "default",
406 "collapse",
407 "untied",
408 "final",
409 "mergeable",
410 "device",
411 "dist_schedule",
412 "inbranch",
413 "notinbranch",
414 "num_teams",
415 "thread_limit",
416 "proc_bind",
417 "safelen",
418 "simdlen",
419 "for",
420 "parallel",
421 "sections",
422 "taskgroup",
423 "_simduid_",
424 "_Cilk_for_count_",
425 "independent",
426 "worker",
427 "vector",
428 "num_gangs",
429 "num_workers",
430 "vector_length"
431 };
432
433
434 /* Return the tree node structure used by tree code CODE. */
435
436 static inline enum tree_node_structure_enum
437 tree_node_structure_for_code (enum tree_code code)
438 {
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_declaration:
442 {
443 switch (code)
444 {
445 case FIELD_DECL:
446 return TS_FIELD_DECL;
447 case PARM_DECL:
448 return TS_PARM_DECL;
449 case VAR_DECL:
450 return TS_VAR_DECL;
451 case LABEL_DECL:
452 return TS_LABEL_DECL;
453 case RESULT_DECL:
454 return TS_RESULT_DECL;
455 case DEBUG_EXPR_DECL:
456 return TS_DECL_WRTL;
457 case CONST_DECL:
458 return TS_CONST_DECL;
459 case TYPE_DECL:
460 return TS_TYPE_DECL;
461 case FUNCTION_DECL:
462 return TS_FUNCTION_DECL;
463 case TRANSLATION_UNIT_DECL:
464 return TS_TRANSLATION_UNIT_DECL;
465 default:
466 return TS_DECL_NON_COMMON;
467 }
468 }
469 case tcc_type:
470 return TS_TYPE_NON_COMMON;
471 case tcc_reference:
472 case tcc_comparison:
473 case tcc_unary:
474 case tcc_binary:
475 case tcc_expression:
476 case tcc_statement:
477 case tcc_vl_exp:
478 return TS_EXP;
479 default: /* tcc_constant and tcc_exceptional */
480 break;
481 }
482 switch (code)
483 {
484 /* tcc_constant cases. */
485 case VOID_CST: return TS_TYPED;
486 case INTEGER_CST: return TS_INT_CST;
487 case REAL_CST: return TS_REAL_CST;
488 case FIXED_CST: return TS_FIXED_CST;
489 case COMPLEX_CST: return TS_COMPLEX;
490 case VECTOR_CST: return TS_VECTOR;
491 case STRING_CST: return TS_STRING;
492 /* tcc_exceptional cases. */
493 case ERROR_MARK: return TS_COMMON;
494 case IDENTIFIER_NODE: return TS_IDENTIFIER;
495 case TREE_LIST: return TS_LIST;
496 case TREE_VEC: return TS_VEC;
497 case SSA_NAME: return TS_SSA_NAME;
498 case PLACEHOLDER_EXPR: return TS_COMMON;
499 case STATEMENT_LIST: return TS_STATEMENT_LIST;
500 case BLOCK: return TS_BLOCK;
501 case CONSTRUCTOR: return TS_CONSTRUCTOR;
502 case TREE_BINFO: return TS_BINFO;
503 case OMP_CLAUSE: return TS_OMP_CLAUSE;
504 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
505 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
506
507 default:
508 gcc_unreachable ();
509 }
510 }
511
512
513 /* Initialize tree_contains_struct to describe the hierarchy of tree
514 nodes. */
515
516 static void
517 initialize_tree_contains_struct (void)
518 {
519 unsigned i;
520
521 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
522 {
523 enum tree_code code;
524 enum tree_node_structure_enum ts_code;
525
526 code = (enum tree_code) i;
527 ts_code = tree_node_structure_for_code (code);
528
529 /* Mark the TS structure itself. */
530 tree_contains_struct[code][ts_code] = 1;
531
532 /* Mark all the structures that TS is derived from. */
533 switch (ts_code)
534 {
535 case TS_TYPED:
536 case TS_BLOCK:
537 MARK_TS_BASE (code);
538 break;
539
540 case TS_COMMON:
541 case TS_INT_CST:
542 case TS_REAL_CST:
543 case TS_FIXED_CST:
544 case TS_VECTOR:
545 case TS_STRING:
546 case TS_COMPLEX:
547 case TS_SSA_NAME:
548 case TS_CONSTRUCTOR:
549 case TS_EXP:
550 case TS_STATEMENT_LIST:
551 MARK_TS_TYPED (code);
552 break;
553
554 case TS_IDENTIFIER:
555 case TS_DECL_MINIMAL:
556 case TS_TYPE_COMMON:
557 case TS_LIST:
558 case TS_VEC:
559 case TS_BINFO:
560 case TS_OMP_CLAUSE:
561 case TS_OPTIMIZATION:
562 case TS_TARGET_OPTION:
563 MARK_TS_COMMON (code);
564 break;
565
566 case TS_TYPE_WITH_LANG_SPECIFIC:
567 MARK_TS_TYPE_COMMON (code);
568 break;
569
570 case TS_TYPE_NON_COMMON:
571 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
572 break;
573
574 case TS_DECL_COMMON:
575 MARK_TS_DECL_MINIMAL (code);
576 break;
577
578 case TS_DECL_WRTL:
579 case TS_CONST_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 case TS_DECL_NON_COMMON:
584 MARK_TS_DECL_WITH_VIS (code);
585 break;
586
587 case TS_DECL_WITH_VIS:
588 case TS_PARM_DECL:
589 case TS_LABEL_DECL:
590 case TS_RESULT_DECL:
591 MARK_TS_DECL_WRTL (code);
592 break;
593
594 case TS_FIELD_DECL:
595 MARK_TS_DECL_COMMON (code);
596 break;
597
598 case TS_VAR_DECL:
599 MARK_TS_DECL_WITH_VIS (code);
600 break;
601
602 case TS_TYPE_DECL:
603 case TS_FUNCTION_DECL:
604 MARK_TS_DECL_NON_COMMON (code);
605 break;
606
607 case TS_TRANSLATION_UNIT_DECL:
608 MARK_TS_DECL_COMMON (code);
609 break;
610
611 default:
612 gcc_unreachable ();
613 }
614 }
615
616 /* Basic consistency checks for attributes used in fold. */
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
619 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
620 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
621 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
629 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
630 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
634 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
644 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
646 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
649 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
650 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
651 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
653 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
655 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
657 }
658
659
660 /* Init tree.c. */
661
662 void
663 init_ttree (void)
664 {
665 /* Initialize the hash table of types. */
666 type_hash_table
667 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
668
669 debug_expr_for_decl
670 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
671
672 value_expr_for_decl
673 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
674
675 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
676
677 int_cst_node = make_int_cst (1, 1);
678
679 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
680
681 cl_optimization_node = make_node (OPTIMIZATION_NODE);
682 cl_target_option_node = make_node (TARGET_OPTION_NODE);
683
684 /* Initialize the tree_contains_struct array. */
685 initialize_tree_contains_struct ();
686 lang_hooks.init_ts ();
687 }
688
689 \f
690 /* The name of the object as the assembler will see it (but before any
691 translations made by ASM_OUTPUT_LABELREF). Often this is the same
692 as DECL_NAME. It is an IDENTIFIER_NODE. */
693 tree
694 decl_assembler_name (tree decl)
695 {
696 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
697 lang_hooks.set_decl_assembler_name (decl);
698 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
699 }
700
701 /* When the target supports COMDAT groups, this indicates which group the
702 DECL is associated with. This can be either an IDENTIFIER_NODE or a
703 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
704 tree
705 decl_comdat_group (const_tree node)
706 {
707 struct symtab_node *snode = symtab_node::get (node);
708 if (!snode)
709 return NULL;
710 return snode->get_comdat_group ();
711 }
712
713 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
714 tree
715 decl_comdat_group_id (const_tree node)
716 {
717 struct symtab_node *snode = symtab_node::get (node);
718 if (!snode)
719 return NULL;
720 return snode->get_comdat_group_id ();
721 }
722
723 /* When the target supports named section, return its name as IDENTIFIER_NODE
724 or NULL if it is in no section. */
725 const char *
726 decl_section_name (const_tree node)
727 {
728 struct symtab_node *snode = symtab_node::get (node);
729 if (!snode)
730 return NULL;
731 return snode->get_section ();
732 }
733
734 /* Set section section name of NODE to VALUE (that is expected to
735 be identifier node) */
736 void
737 set_decl_section_name (tree node, const char *value)
738 {
739 struct symtab_node *snode;
740
741 if (value == NULL)
742 {
743 snode = symtab_node::get (node);
744 if (!snode)
745 return;
746 }
747 else if (TREE_CODE (node) == VAR_DECL)
748 snode = varpool_node::get_create (node);
749 else
750 snode = cgraph_node::get_create (node);
751 snode->set_section (value);
752 }
753
754 /* Return TLS model of a variable NODE. */
755 enum tls_model
756 decl_tls_model (const_tree node)
757 {
758 struct varpool_node *snode = varpool_node::get (node);
759 if (!snode)
760 return TLS_MODEL_NONE;
761 return snode->tls_model;
762 }
763
764 /* Set TLS model of variable NODE to MODEL. */
765 void
766 set_decl_tls_model (tree node, enum tls_model model)
767 {
768 struct varpool_node *vnode;
769
770 if (model == TLS_MODEL_NONE)
771 {
772 vnode = varpool_node::get (node);
773 if (!vnode)
774 return;
775 }
776 else
777 vnode = varpool_node::get_create (node);
778 vnode->tls_model = model;
779 }
780
781 /* Compute the number of bytes occupied by a tree with code CODE.
782 This function cannot be used for nodes that have variable sizes,
783 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
784 size_t
785 tree_code_size (enum tree_code code)
786 {
787 switch (TREE_CODE_CLASS (code))
788 {
789 case tcc_declaration: /* A decl node */
790 {
791 switch (code)
792 {
793 case FIELD_DECL:
794 return sizeof (struct tree_field_decl);
795 case PARM_DECL:
796 return sizeof (struct tree_parm_decl);
797 case VAR_DECL:
798 return sizeof (struct tree_var_decl);
799 case LABEL_DECL:
800 return sizeof (struct tree_label_decl);
801 case RESULT_DECL:
802 return sizeof (struct tree_result_decl);
803 case CONST_DECL:
804 return sizeof (struct tree_const_decl);
805 case TYPE_DECL:
806 return sizeof (struct tree_type_decl);
807 case FUNCTION_DECL:
808 return sizeof (struct tree_function_decl);
809 case DEBUG_EXPR_DECL:
810 return sizeof (struct tree_decl_with_rtl);
811 case TRANSLATION_UNIT_DECL:
812 return sizeof (struct tree_translation_unit_decl);
813 case NAMESPACE_DECL:
814 case IMPORTED_DECL:
815 case NAMELIST_DECL:
816 return sizeof (struct tree_decl_non_common);
817 default:
818 return lang_hooks.tree_size (code);
819 }
820 }
821
822 case tcc_type: /* a type node */
823 return sizeof (struct tree_type_non_common);
824
825 case tcc_reference: /* a reference */
826 case tcc_expression: /* an expression */
827 case tcc_statement: /* an expression with side effects */
828 case tcc_comparison: /* a comparison expression */
829 case tcc_unary: /* a unary arithmetic expression */
830 case tcc_binary: /* a binary arithmetic expression */
831 return (sizeof (struct tree_exp)
832 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
833
834 case tcc_constant: /* a constant */
835 switch (code)
836 {
837 case VOID_CST: return sizeof (struct tree_typed);
838 case INTEGER_CST: gcc_unreachable ();
839 case REAL_CST: return sizeof (struct tree_real_cst);
840 case FIXED_CST: return sizeof (struct tree_fixed_cst);
841 case COMPLEX_CST: return sizeof (struct tree_complex);
842 case VECTOR_CST: return sizeof (struct tree_vector);
843 case STRING_CST: gcc_unreachable ();
844 default:
845 return lang_hooks.tree_size (code);
846 }
847
848 case tcc_exceptional: /* something random, like an identifier. */
849 switch (code)
850 {
851 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
852 case TREE_LIST: return sizeof (struct tree_list);
853
854 case ERROR_MARK:
855 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
856
857 case TREE_VEC:
858 case OMP_CLAUSE: gcc_unreachable ();
859
860 case SSA_NAME: return sizeof (struct tree_ssa_name);
861
862 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
863 case BLOCK: return sizeof (struct tree_block);
864 case CONSTRUCTOR: return sizeof (struct tree_constructor);
865 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
866 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
867
868 default:
869 return lang_hooks.tree_size (code);
870 }
871
872 default:
873 gcc_unreachable ();
874 }
875 }
876
877 /* Compute the number of bytes occupied by NODE. This routine only
878 looks at TREE_CODE, except for those nodes that have variable sizes. */
879 size_t
880 tree_size (const_tree node)
881 {
882 const enum tree_code code = TREE_CODE (node);
883 switch (code)
884 {
885 case INTEGER_CST:
886 return (sizeof (struct tree_int_cst)
887 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
888
889 case TREE_BINFO:
890 return (offsetof (struct tree_binfo, base_binfos)
891 + vec<tree, va_gc>
892 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
893
894 case TREE_VEC:
895 return (sizeof (struct tree_vec)
896 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
897
898 case VECTOR_CST:
899 return (sizeof (struct tree_vector)
900 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
901
902 case STRING_CST:
903 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
904
905 case OMP_CLAUSE:
906 return (sizeof (struct tree_omp_clause)
907 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
908 * sizeof (tree));
909
910 default:
911 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
912 return (sizeof (struct tree_exp)
913 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
914 else
915 return tree_code_size (code);
916 }
917 }
918
919 /* Record interesting allocation statistics for a tree node with CODE
920 and LENGTH. */
921
922 static void
923 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
924 size_t length ATTRIBUTE_UNUSED)
925 {
926 enum tree_code_class type = TREE_CODE_CLASS (code);
927 tree_node_kind kind;
928
929 if (!GATHER_STATISTICS)
930 return;
931
932 switch (type)
933 {
934 case tcc_declaration: /* A decl node */
935 kind = d_kind;
936 break;
937
938 case tcc_type: /* a type node */
939 kind = t_kind;
940 break;
941
942 case tcc_statement: /* an expression with side effects */
943 kind = s_kind;
944 break;
945
946 case tcc_reference: /* a reference */
947 kind = r_kind;
948 break;
949
950 case tcc_expression: /* an expression */
951 case tcc_comparison: /* a comparison expression */
952 case tcc_unary: /* a unary arithmetic expression */
953 case tcc_binary: /* a binary arithmetic expression */
954 kind = e_kind;
955 break;
956
957 case tcc_constant: /* a constant */
958 kind = c_kind;
959 break;
960
961 case tcc_exceptional: /* something random, like an identifier. */
962 switch (code)
963 {
964 case IDENTIFIER_NODE:
965 kind = id_kind;
966 break;
967
968 case TREE_VEC:
969 kind = vec_kind;
970 break;
971
972 case TREE_BINFO:
973 kind = binfo_kind;
974 break;
975
976 case SSA_NAME:
977 kind = ssa_name_kind;
978 break;
979
980 case BLOCK:
981 kind = b_kind;
982 break;
983
984 case CONSTRUCTOR:
985 kind = constr_kind;
986 break;
987
988 case OMP_CLAUSE:
989 kind = omp_clause_kind;
990 break;
991
992 default:
993 kind = x_kind;
994 break;
995 }
996 break;
997
998 case tcc_vl_exp:
999 kind = e_kind;
1000 break;
1001
1002 default:
1003 gcc_unreachable ();
1004 }
1005
1006 tree_code_counts[(int) code]++;
1007 tree_node_counts[(int) kind]++;
1008 tree_node_sizes[(int) kind] += length;
1009 }
1010
1011 /* Allocate and return a new UID from the DECL_UID namespace. */
1012
1013 int
1014 allocate_decl_uid (void)
1015 {
1016 return next_decl_uid++;
1017 }
1018
1019 /* Return a newly allocated node of code CODE. For decl and type
1020 nodes, some other fields are initialized. The rest of the node is
1021 initialized to zero. This function cannot be used for TREE_VEC,
1022 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1023 tree_code_size.
1024
1025 Achoo! I got a code in the node. */
1026
1027 tree
1028 make_node_stat (enum tree_code code MEM_STAT_DECL)
1029 {
1030 tree t;
1031 enum tree_code_class type = TREE_CODE_CLASS (code);
1032 size_t length = tree_code_size (code);
1033
1034 record_node_allocation_statistics (code, length);
1035
1036 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1037 TREE_SET_CODE (t, code);
1038
1039 switch (type)
1040 {
1041 case tcc_statement:
1042 TREE_SIDE_EFFECTS (t) = 1;
1043 break;
1044
1045 case tcc_declaration:
1046 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1047 {
1048 if (code == FUNCTION_DECL)
1049 {
1050 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1051 DECL_MODE (t) = FUNCTION_MODE;
1052 }
1053 else
1054 DECL_ALIGN (t) = 1;
1055 }
1056 DECL_SOURCE_LOCATION (t) = input_location;
1057 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1058 DECL_UID (t) = --next_debug_decl_uid;
1059 else
1060 {
1061 DECL_UID (t) = allocate_decl_uid ();
1062 SET_DECL_PT_UID (t, -1);
1063 }
1064 if (TREE_CODE (t) == LABEL_DECL)
1065 LABEL_DECL_UID (t) = -1;
1066
1067 break;
1068
1069 case tcc_type:
1070 TYPE_UID (t) = next_type_uid++;
1071 TYPE_ALIGN (t) = BITS_PER_UNIT;
1072 TYPE_USER_ALIGN (t) = 0;
1073 TYPE_MAIN_VARIANT (t) = t;
1074 TYPE_CANONICAL (t) = t;
1075
1076 /* Default to no attributes for type, but let target change that. */
1077 TYPE_ATTRIBUTES (t) = NULL_TREE;
1078 targetm.set_default_type_attributes (t);
1079
1080 /* We have not yet computed the alias set for this type. */
1081 TYPE_ALIAS_SET (t) = -1;
1082 break;
1083
1084 case tcc_constant:
1085 TREE_CONSTANT (t) = 1;
1086 break;
1087
1088 case tcc_expression:
1089 switch (code)
1090 {
1091 case INIT_EXPR:
1092 case MODIFY_EXPR:
1093 case VA_ARG_EXPR:
1094 case PREDECREMENT_EXPR:
1095 case PREINCREMENT_EXPR:
1096 case POSTDECREMENT_EXPR:
1097 case POSTINCREMENT_EXPR:
1098 /* All of these have side-effects, no matter what their
1099 operands are. */
1100 TREE_SIDE_EFFECTS (t) = 1;
1101 break;
1102
1103 default:
1104 break;
1105 }
1106 break;
1107
1108 default:
1109 /* Other classes need no special treatment. */
1110 break;
1111 }
1112
1113 return t;
1114 }
1115 \f
1116 /* Return a new node with the same contents as NODE except that its
1117 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1118
1119 tree
1120 copy_node_stat (tree node MEM_STAT_DECL)
1121 {
1122 tree t;
1123 enum tree_code code = TREE_CODE (node);
1124 size_t length;
1125
1126 gcc_assert (code != STATEMENT_LIST);
1127
1128 length = tree_size (node);
1129 record_node_allocation_statistics (code, length);
1130 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1131 memcpy (t, node, length);
1132
1133 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1134 TREE_CHAIN (t) = 0;
1135 TREE_ASM_WRITTEN (t) = 0;
1136 TREE_VISITED (t) = 0;
1137
1138 if (TREE_CODE_CLASS (code) == tcc_declaration)
1139 {
1140 if (code == DEBUG_EXPR_DECL)
1141 DECL_UID (t) = --next_debug_decl_uid;
1142 else
1143 {
1144 DECL_UID (t) = allocate_decl_uid ();
1145 if (DECL_PT_UID_SET_P (node))
1146 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1147 }
1148 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1149 && DECL_HAS_VALUE_EXPR_P (node))
1150 {
1151 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1152 DECL_HAS_VALUE_EXPR_P (t) = 1;
1153 }
1154 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1155 if (TREE_CODE (node) == VAR_DECL)
1156 {
1157 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1158 t->decl_with_vis.symtab_node = NULL;
1159 }
1160 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1161 {
1162 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1163 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1164 }
1165 if (TREE_CODE (node) == FUNCTION_DECL)
1166 {
1167 DECL_STRUCT_FUNCTION (t) = NULL;
1168 t->decl_with_vis.symtab_node = NULL;
1169 }
1170 }
1171 else if (TREE_CODE_CLASS (code) == tcc_type)
1172 {
1173 TYPE_UID (t) = next_type_uid++;
1174 /* The following is so that the debug code for
1175 the copy is different from the original type.
1176 The two statements usually duplicate each other
1177 (because they clear fields of the same union),
1178 but the optimizer should catch that. */
1179 TYPE_SYMTAB_POINTER (t) = 0;
1180 TYPE_SYMTAB_ADDRESS (t) = 0;
1181
1182 /* Do not copy the values cache. */
1183 if (TYPE_CACHED_VALUES_P (t))
1184 {
1185 TYPE_CACHED_VALUES_P (t) = 0;
1186 TYPE_CACHED_VALUES (t) = NULL_TREE;
1187 }
1188 }
1189
1190 return t;
1191 }
1192
1193 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1194 For example, this can copy a list made of TREE_LIST nodes. */
1195
1196 tree
1197 copy_list (tree list)
1198 {
1199 tree head;
1200 tree prev, next;
1201
1202 if (list == 0)
1203 return 0;
1204
1205 head = prev = copy_node (list);
1206 next = TREE_CHAIN (list);
1207 while (next)
1208 {
1209 TREE_CHAIN (prev) = copy_node (next);
1210 prev = TREE_CHAIN (prev);
1211 next = TREE_CHAIN (next);
1212 }
1213 return head;
1214 }
1215
1216 \f
1217 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1218 INTEGER_CST with value CST and type TYPE. */
1219
1220 static unsigned int
1221 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1222 {
1223 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1224 /* We need an extra zero HWI if CST is an unsigned integer with its
1225 upper bit set, and if CST occupies a whole number of HWIs. */
1226 if (TYPE_UNSIGNED (type)
1227 && wi::neg_p (cst)
1228 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1229 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1230 return cst.get_len ();
1231 }
1232
1233 /* Return a new INTEGER_CST with value CST and type TYPE. */
1234
1235 static tree
1236 build_new_int_cst (tree type, const wide_int &cst)
1237 {
1238 unsigned int len = cst.get_len ();
1239 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1240 tree nt = make_int_cst (len, ext_len);
1241
1242 if (len < ext_len)
1243 {
1244 --ext_len;
1245 TREE_INT_CST_ELT (nt, ext_len) = 0;
1246 for (unsigned int i = len; i < ext_len; ++i)
1247 TREE_INT_CST_ELT (nt, i) = -1;
1248 }
1249 else if (TYPE_UNSIGNED (type)
1250 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1251 {
1252 len--;
1253 TREE_INT_CST_ELT (nt, len)
1254 = zext_hwi (cst.elt (len),
1255 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1256 }
1257
1258 for (unsigned int i = 0; i < len; i++)
1259 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1260 TREE_TYPE (nt) = type;
1261 return nt;
1262 }
1263
1264 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1265
1266 tree
1267 build_int_cst (tree type, HOST_WIDE_INT low)
1268 {
1269 /* Support legacy code. */
1270 if (!type)
1271 type = integer_type_node;
1272
1273 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1274 }
1275
1276 tree
1277 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1278 {
1279 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1280 }
1281
1282 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1283
1284 tree
1285 build_int_cst_type (tree type, HOST_WIDE_INT low)
1286 {
1287 gcc_assert (type);
1288 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1289 }
1290
1291 /* Constructs tree in type TYPE from with value given by CST. Signedness
1292 of CST is assumed to be the same as the signedness of TYPE. */
1293
1294 tree
1295 double_int_to_tree (tree type, double_int cst)
1296 {
1297 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1298 }
1299
1300 /* We force the wide_int CST to the range of the type TYPE by sign or
1301 zero extending it. OVERFLOWABLE indicates if we are interested in
1302 overflow of the value, when >0 we are only interested in signed
1303 overflow, for <0 we are interested in any overflow. OVERFLOWED
1304 indicates whether overflow has already occurred. CONST_OVERFLOWED
1305 indicates whether constant overflow has already occurred. We force
1306 T's value to be within range of T's type (by setting to 0 or 1 all
1307 the bits outside the type's range). We set TREE_OVERFLOWED if,
1308 OVERFLOWED is nonzero,
1309 or OVERFLOWABLE is >0 and signed overflow occurs
1310 or OVERFLOWABLE is <0 and any overflow occurs
1311 We return a new tree node for the extended wide_int. The node
1312 is shared if no overflow flags are set. */
1313
1314
1315 tree
1316 force_fit_type (tree type, const wide_int_ref &cst,
1317 int overflowable, bool overflowed)
1318 {
1319 signop sign = TYPE_SIGN (type);
1320
1321 /* If we need to set overflow flags, return a new unshared node. */
1322 if (overflowed || !wi::fits_to_tree_p (cst, type))
1323 {
1324 if (overflowed
1325 || overflowable < 0
1326 || (overflowable > 0 && sign == SIGNED))
1327 {
1328 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1329 tree t = build_new_int_cst (type, tmp);
1330 TREE_OVERFLOW (t) = 1;
1331 return t;
1332 }
1333 }
1334
1335 /* Else build a shared node. */
1336 return wide_int_to_tree (type, cst);
1337 }
1338
1339 /* These are the hash table functions for the hash table of INTEGER_CST
1340 nodes of a sizetype. */
1341
1342 /* Return the hash code code X, an INTEGER_CST. */
1343
1344 hashval_t
1345 int_cst_hasher::hash (tree x)
1346 {
1347 const_tree const t = x;
1348 hashval_t code = TYPE_UID (TREE_TYPE (t));
1349 int i;
1350
1351 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1352 code ^= TREE_INT_CST_ELT (t, i);
1353
1354 return code;
1355 }
1356
1357 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1358 is the same as that given by *Y, which is the same. */
1359
1360 bool
1361 int_cst_hasher::equal (tree x, tree y)
1362 {
1363 const_tree const xt = x;
1364 const_tree const yt = y;
1365
1366 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1367 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1368 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1369 return false;
1370
1371 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1372 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1373 return false;
1374
1375 return true;
1376 }
1377
1378 /* Create an INT_CST node of TYPE and value CST.
1379 The returned node is always shared. For small integers we use a
1380 per-type vector cache, for larger ones we use a single hash table.
1381 The value is extended from its precision according to the sign of
1382 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1383 the upper bits and ensures that hashing and value equality based
1384 upon the underlying HOST_WIDE_INTs works without masking. */
1385
1386 tree
1387 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1388 {
1389 tree t;
1390 int ix = -1;
1391 int limit = 0;
1392
1393 gcc_assert (type);
1394 unsigned int prec = TYPE_PRECISION (type);
1395 signop sgn = TYPE_SIGN (type);
1396
1397 /* Verify that everything is canonical. */
1398 int l = pcst.get_len ();
1399 if (l > 1)
1400 {
1401 if (pcst.elt (l - 1) == 0)
1402 gcc_checking_assert (pcst.elt (l - 2) < 0);
1403 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1404 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1405 }
1406
1407 wide_int cst = wide_int::from (pcst, prec, sgn);
1408 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1409
1410 if (ext_len == 1)
1411 {
1412 /* We just need to store a single HOST_WIDE_INT. */
1413 HOST_WIDE_INT hwi;
1414 if (TYPE_UNSIGNED (type))
1415 hwi = cst.to_uhwi ();
1416 else
1417 hwi = cst.to_shwi ();
1418
1419 switch (TREE_CODE (type))
1420 {
1421 case NULLPTR_TYPE:
1422 gcc_assert (hwi == 0);
1423 /* Fallthru. */
1424
1425 case POINTER_TYPE:
1426 case REFERENCE_TYPE:
1427 case POINTER_BOUNDS_TYPE:
1428 /* Cache NULL pointer and zero bounds. */
1429 if (hwi == 0)
1430 {
1431 limit = 1;
1432 ix = 0;
1433 }
1434 break;
1435
1436 case BOOLEAN_TYPE:
1437 /* Cache false or true. */
1438 limit = 2;
1439 if (hwi < 2)
1440 ix = hwi;
1441 break;
1442
1443 case INTEGER_TYPE:
1444 case OFFSET_TYPE:
1445 if (TYPE_SIGN (type) == UNSIGNED)
1446 {
1447 /* Cache [0, N). */
1448 limit = INTEGER_SHARE_LIMIT;
1449 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1450 ix = hwi;
1451 }
1452 else
1453 {
1454 /* Cache [-1, N). */
1455 limit = INTEGER_SHARE_LIMIT + 1;
1456 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1457 ix = hwi + 1;
1458 }
1459 break;
1460
1461 case ENUMERAL_TYPE:
1462 break;
1463
1464 default:
1465 gcc_unreachable ();
1466 }
1467
1468 if (ix >= 0)
1469 {
1470 /* Look for it in the type's vector of small shared ints. */
1471 if (!TYPE_CACHED_VALUES_P (type))
1472 {
1473 TYPE_CACHED_VALUES_P (type) = 1;
1474 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1475 }
1476
1477 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1478 if (t)
1479 /* Make sure no one is clobbering the shared constant. */
1480 gcc_checking_assert (TREE_TYPE (t) == type
1481 && TREE_INT_CST_NUNITS (t) == 1
1482 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1483 && TREE_INT_CST_EXT_NUNITS (t) == 1
1484 && TREE_INT_CST_ELT (t, 0) == hwi);
1485 else
1486 {
1487 /* Create a new shared int. */
1488 t = build_new_int_cst (type, cst);
1489 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1490 }
1491 }
1492 else
1493 {
1494 /* Use the cache of larger shared ints, using int_cst_node as
1495 a temporary. */
1496
1497 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1498 TREE_TYPE (int_cst_node) = type;
1499
1500 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1501 t = *slot;
1502 if (!t)
1503 {
1504 /* Insert this one into the hash table. */
1505 t = int_cst_node;
1506 *slot = t;
1507 /* Make a new node for next time round. */
1508 int_cst_node = make_int_cst (1, 1);
1509 }
1510 }
1511 }
1512 else
1513 {
1514 /* The value either hashes properly or we drop it on the floor
1515 for the gc to take care of. There will not be enough of them
1516 to worry about. */
1517
1518 tree nt = build_new_int_cst (type, cst);
1519 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1520 t = *slot;
1521 if (!t)
1522 {
1523 /* Insert this one into the hash table. */
1524 t = nt;
1525 *slot = t;
1526 }
1527 }
1528
1529 return t;
1530 }
1531
1532 void
1533 cache_integer_cst (tree t)
1534 {
1535 tree type = TREE_TYPE (t);
1536 int ix = -1;
1537 int limit = 0;
1538 int prec = TYPE_PRECISION (type);
1539
1540 gcc_assert (!TREE_OVERFLOW (t));
1541
1542 switch (TREE_CODE (type))
1543 {
1544 case NULLPTR_TYPE:
1545 gcc_assert (integer_zerop (t));
1546 /* Fallthru. */
1547
1548 case POINTER_TYPE:
1549 case REFERENCE_TYPE:
1550 /* Cache NULL pointer. */
1551 if (integer_zerop (t))
1552 {
1553 limit = 1;
1554 ix = 0;
1555 }
1556 break;
1557
1558 case BOOLEAN_TYPE:
1559 /* Cache false or true. */
1560 limit = 2;
1561 if (wi::ltu_p (t, 2))
1562 ix = TREE_INT_CST_ELT (t, 0);
1563 break;
1564
1565 case INTEGER_TYPE:
1566 case OFFSET_TYPE:
1567 if (TYPE_UNSIGNED (type))
1568 {
1569 /* Cache 0..N */
1570 limit = INTEGER_SHARE_LIMIT;
1571
1572 /* This is a little hokie, but if the prec is smaller than
1573 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1574 obvious test will not get the correct answer. */
1575 if (prec < HOST_BITS_PER_WIDE_INT)
1576 {
1577 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1578 ix = tree_to_uhwi (t);
1579 }
1580 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1581 ix = tree_to_uhwi (t);
1582 }
1583 else
1584 {
1585 /* Cache -1..N */
1586 limit = INTEGER_SHARE_LIMIT + 1;
1587
1588 if (integer_minus_onep (t))
1589 ix = 0;
1590 else if (!wi::neg_p (t))
1591 {
1592 if (prec < HOST_BITS_PER_WIDE_INT)
1593 {
1594 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1595 ix = tree_to_shwi (t) + 1;
1596 }
1597 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1598 ix = tree_to_shwi (t) + 1;
1599 }
1600 }
1601 break;
1602
1603 case ENUMERAL_TYPE:
1604 break;
1605
1606 default:
1607 gcc_unreachable ();
1608 }
1609
1610 if (ix >= 0)
1611 {
1612 /* Look for it in the type's vector of small shared ints. */
1613 if (!TYPE_CACHED_VALUES_P (type))
1614 {
1615 TYPE_CACHED_VALUES_P (type) = 1;
1616 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1617 }
1618
1619 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1620 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1621 }
1622 else
1623 {
1624 /* Use the cache of larger shared ints. */
1625 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1626 /* If there is already an entry for the number verify it's the
1627 same. */
1628 if (*slot)
1629 gcc_assert (wi::eq_p (tree (*slot), t));
1630 else
1631 /* Otherwise insert this one into the hash table. */
1632 *slot = t;
1633 }
1634 }
1635
1636
1637 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1638 and the rest are zeros. */
1639
1640 tree
1641 build_low_bits_mask (tree type, unsigned bits)
1642 {
1643 gcc_assert (bits <= TYPE_PRECISION (type));
1644
1645 return wide_int_to_tree (type, wi::mask (bits, false,
1646 TYPE_PRECISION (type)));
1647 }
1648
1649 /* Checks that X is integer constant that can be expressed in (unsigned)
1650 HOST_WIDE_INT without loss of precision. */
1651
1652 bool
1653 cst_and_fits_in_hwi (const_tree x)
1654 {
1655 if (TREE_CODE (x) != INTEGER_CST)
1656 return false;
1657
1658 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1659 return false;
1660
1661 return TREE_INT_CST_NUNITS (x) == 1;
1662 }
1663
1664 /* Build a newly constructed TREE_VEC node of length LEN. */
1665
1666 tree
1667 make_vector_stat (unsigned len MEM_STAT_DECL)
1668 {
1669 tree t;
1670 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1671
1672 record_node_allocation_statistics (VECTOR_CST, length);
1673
1674 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1675
1676 TREE_SET_CODE (t, VECTOR_CST);
1677 TREE_CONSTANT (t) = 1;
1678
1679 return t;
1680 }
1681
1682 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1683 are in a list pointed to by VALS. */
1684
1685 tree
1686 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1687 {
1688 int over = 0;
1689 unsigned cnt = 0;
1690 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1691 TREE_TYPE (v) = type;
1692
1693 /* Iterate through elements and check for overflow. */
1694 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1695 {
1696 tree value = vals[cnt];
1697
1698 VECTOR_CST_ELT (v, cnt) = value;
1699
1700 /* Don't crash if we get an address constant. */
1701 if (!CONSTANT_CLASS_P (value))
1702 continue;
1703
1704 over |= TREE_OVERFLOW (value);
1705 }
1706
1707 TREE_OVERFLOW (v) = over;
1708 return v;
1709 }
1710
1711 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1712 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1713
1714 tree
1715 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1716 {
1717 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1718 unsigned HOST_WIDE_INT idx;
1719 tree value;
1720
1721 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1722 vec[idx] = value;
1723 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1724 vec[idx] = build_zero_cst (TREE_TYPE (type));
1725
1726 return build_vector (type, vec);
1727 }
1728
1729 /* Build a vector of type VECTYPE where all the elements are SCs. */
1730 tree
1731 build_vector_from_val (tree vectype, tree sc)
1732 {
1733 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1734
1735 if (sc == error_mark_node)
1736 return sc;
1737
1738 /* Verify that the vector type is suitable for SC. Note that there
1739 is some inconsistency in the type-system with respect to restrict
1740 qualifications of pointers. Vector types always have a main-variant
1741 element type and the qualification is applied to the vector-type.
1742 So TREE_TYPE (vector-type) does not return a properly qualified
1743 vector element-type. */
1744 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1745 TREE_TYPE (vectype)));
1746
1747 if (CONSTANT_CLASS_P (sc))
1748 {
1749 tree *v = XALLOCAVEC (tree, nunits);
1750 for (i = 0; i < nunits; ++i)
1751 v[i] = sc;
1752 return build_vector (vectype, v);
1753 }
1754 else
1755 {
1756 vec<constructor_elt, va_gc> *v;
1757 vec_alloc (v, nunits);
1758 for (i = 0; i < nunits; ++i)
1759 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1760 return build_constructor (vectype, v);
1761 }
1762 }
1763
1764 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1765 are in the vec pointed to by VALS. */
1766 tree
1767 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1768 {
1769 tree c = make_node (CONSTRUCTOR);
1770 unsigned int i;
1771 constructor_elt *elt;
1772 bool constant_p = true;
1773 bool side_effects_p = false;
1774
1775 TREE_TYPE (c) = type;
1776 CONSTRUCTOR_ELTS (c) = vals;
1777
1778 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1779 {
1780 /* Mostly ctors will have elts that don't have side-effects, so
1781 the usual case is to scan all the elements. Hence a single
1782 loop for both const and side effects, rather than one loop
1783 each (with early outs). */
1784 if (!TREE_CONSTANT (elt->value))
1785 constant_p = false;
1786 if (TREE_SIDE_EFFECTS (elt->value))
1787 side_effects_p = true;
1788 }
1789
1790 TREE_SIDE_EFFECTS (c) = side_effects_p;
1791 TREE_CONSTANT (c) = constant_p;
1792
1793 return c;
1794 }
1795
1796 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1797 INDEX and VALUE. */
1798 tree
1799 build_constructor_single (tree type, tree index, tree value)
1800 {
1801 vec<constructor_elt, va_gc> *v;
1802 constructor_elt elt = {index, value};
1803
1804 vec_alloc (v, 1);
1805 v->quick_push (elt);
1806
1807 return build_constructor (type, v);
1808 }
1809
1810
1811 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1812 are in a list pointed to by VALS. */
1813 tree
1814 build_constructor_from_list (tree type, tree vals)
1815 {
1816 tree t;
1817 vec<constructor_elt, va_gc> *v = NULL;
1818
1819 if (vals)
1820 {
1821 vec_alloc (v, list_length (vals));
1822 for (t = vals; t; t = TREE_CHAIN (t))
1823 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1824 }
1825
1826 return build_constructor (type, v);
1827 }
1828
1829 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1830 of elements, provided as index/value pairs. */
1831
1832 tree
1833 build_constructor_va (tree type, int nelts, ...)
1834 {
1835 vec<constructor_elt, va_gc> *v = NULL;
1836 va_list p;
1837
1838 va_start (p, nelts);
1839 vec_alloc (v, nelts);
1840 while (nelts--)
1841 {
1842 tree index = va_arg (p, tree);
1843 tree value = va_arg (p, tree);
1844 CONSTRUCTOR_APPEND_ELT (v, index, value);
1845 }
1846 va_end (p);
1847 return build_constructor (type, v);
1848 }
1849
1850 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1851
1852 tree
1853 build_fixed (tree type, FIXED_VALUE_TYPE f)
1854 {
1855 tree v;
1856 FIXED_VALUE_TYPE *fp;
1857
1858 v = make_node (FIXED_CST);
1859 fp = ggc_alloc<fixed_value> ();
1860 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1861
1862 TREE_TYPE (v) = type;
1863 TREE_FIXED_CST_PTR (v) = fp;
1864 return v;
1865 }
1866
1867 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1868
1869 tree
1870 build_real (tree type, REAL_VALUE_TYPE d)
1871 {
1872 tree v;
1873 REAL_VALUE_TYPE *dp;
1874 int overflow = 0;
1875
1876 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1877 Consider doing it via real_convert now. */
1878
1879 v = make_node (REAL_CST);
1880 dp = ggc_alloc<real_value> ();
1881 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1882
1883 TREE_TYPE (v) = type;
1884 TREE_REAL_CST_PTR (v) = dp;
1885 TREE_OVERFLOW (v) = overflow;
1886 return v;
1887 }
1888
1889 /* Return a new REAL_CST node whose type is TYPE
1890 and whose value is the integer value of the INTEGER_CST node I. */
1891
1892 REAL_VALUE_TYPE
1893 real_value_from_int_cst (const_tree type, const_tree i)
1894 {
1895 REAL_VALUE_TYPE d;
1896
1897 /* Clear all bits of the real value type so that we can later do
1898 bitwise comparisons to see if two values are the same. */
1899 memset (&d, 0, sizeof d);
1900
1901 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1902 TYPE_SIGN (TREE_TYPE (i)));
1903 return d;
1904 }
1905
1906 /* Given a tree representing an integer constant I, return a tree
1907 representing the same value as a floating-point constant of type TYPE. */
1908
1909 tree
1910 build_real_from_int_cst (tree type, const_tree i)
1911 {
1912 tree v;
1913 int overflow = TREE_OVERFLOW (i);
1914
1915 v = build_real (type, real_value_from_int_cst (type, i));
1916
1917 TREE_OVERFLOW (v) |= overflow;
1918 return v;
1919 }
1920
1921 /* Return a newly constructed STRING_CST node whose value is
1922 the LEN characters at STR.
1923 Note that for a C string literal, LEN should include the trailing NUL.
1924 The TREE_TYPE is not initialized. */
1925
1926 tree
1927 build_string (int len, const char *str)
1928 {
1929 tree s;
1930 size_t length;
1931
1932 /* Do not waste bytes provided by padding of struct tree_string. */
1933 length = len + offsetof (struct tree_string, str) + 1;
1934
1935 record_node_allocation_statistics (STRING_CST, length);
1936
1937 s = (tree) ggc_internal_alloc (length);
1938
1939 memset (s, 0, sizeof (struct tree_typed));
1940 TREE_SET_CODE (s, STRING_CST);
1941 TREE_CONSTANT (s) = 1;
1942 TREE_STRING_LENGTH (s) = len;
1943 memcpy (s->string.str, str, len);
1944 s->string.str[len] = '\0';
1945
1946 return s;
1947 }
1948
1949 /* Return a newly constructed COMPLEX_CST node whose value is
1950 specified by the real and imaginary parts REAL and IMAG.
1951 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1952 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1953
1954 tree
1955 build_complex (tree type, tree real, tree imag)
1956 {
1957 tree t = make_node (COMPLEX_CST);
1958
1959 TREE_REALPART (t) = real;
1960 TREE_IMAGPART (t) = imag;
1961 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1962 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1963 return t;
1964 }
1965
1966 /* Return a constant of arithmetic type TYPE which is the
1967 multiplicative identity of the set TYPE. */
1968
1969 tree
1970 build_one_cst (tree type)
1971 {
1972 switch (TREE_CODE (type))
1973 {
1974 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1976 case OFFSET_TYPE:
1977 return build_int_cst (type, 1);
1978
1979 case REAL_TYPE:
1980 return build_real (type, dconst1);
1981
1982 case FIXED_POINT_TYPE:
1983 /* We can only generate 1 for accum types. */
1984 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1985 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1986
1987 case VECTOR_TYPE:
1988 {
1989 tree scalar = build_one_cst (TREE_TYPE (type));
1990
1991 return build_vector_from_val (type, scalar);
1992 }
1993
1994 case COMPLEX_TYPE:
1995 return build_complex (type,
1996 build_one_cst (TREE_TYPE (type)),
1997 build_zero_cst (TREE_TYPE (type)));
1998
1999 default:
2000 gcc_unreachable ();
2001 }
2002 }
2003
2004 /* Return an integer of type TYPE containing all 1's in as much precision as
2005 it contains, or a complex or vector whose subparts are such integers. */
2006
2007 tree
2008 build_all_ones_cst (tree type)
2009 {
2010 if (TREE_CODE (type) == COMPLEX_TYPE)
2011 {
2012 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2013 return build_complex (type, scalar, scalar);
2014 }
2015 else
2016 return build_minus_one_cst (type);
2017 }
2018
2019 /* Return a constant of arithmetic type TYPE which is the
2020 opposite of the multiplicative identity of the set TYPE. */
2021
2022 tree
2023 build_minus_one_cst (tree type)
2024 {
2025 switch (TREE_CODE (type))
2026 {
2027 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2028 case POINTER_TYPE: case REFERENCE_TYPE:
2029 case OFFSET_TYPE:
2030 return build_int_cst (type, -1);
2031
2032 case REAL_TYPE:
2033 return build_real (type, dconstm1);
2034
2035 case FIXED_POINT_TYPE:
2036 /* We can only generate 1 for accum types. */
2037 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2038 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2039 TYPE_MODE (type)));
2040
2041 case VECTOR_TYPE:
2042 {
2043 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2044
2045 return build_vector_from_val (type, scalar);
2046 }
2047
2048 case COMPLEX_TYPE:
2049 return build_complex (type,
2050 build_minus_one_cst (TREE_TYPE (type)),
2051 build_zero_cst (TREE_TYPE (type)));
2052
2053 default:
2054 gcc_unreachable ();
2055 }
2056 }
2057
2058 /* Build 0 constant of type TYPE. This is used by constructor folding
2059 and thus the constant should be represented in memory by
2060 zero(es). */
2061
2062 tree
2063 build_zero_cst (tree type)
2064 {
2065 switch (TREE_CODE (type))
2066 {
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2069 case OFFSET_TYPE: case NULLPTR_TYPE:
2070 return build_int_cst (type, 0);
2071
2072 case REAL_TYPE:
2073 return build_real (type, dconst0);
2074
2075 case FIXED_POINT_TYPE:
2076 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2077
2078 case VECTOR_TYPE:
2079 {
2080 tree scalar = build_zero_cst (TREE_TYPE (type));
2081
2082 return build_vector_from_val (type, scalar);
2083 }
2084
2085 case COMPLEX_TYPE:
2086 {
2087 tree zero = build_zero_cst (TREE_TYPE (type));
2088
2089 return build_complex (type, zero, zero);
2090 }
2091
2092 default:
2093 if (!AGGREGATE_TYPE_P (type))
2094 return fold_convert (type, integer_zero_node);
2095 return build_constructor (type, NULL);
2096 }
2097 }
2098
2099
2100 /* Build a BINFO with LEN language slots. */
2101
2102 tree
2103 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2104 {
2105 tree t;
2106 size_t length = (offsetof (struct tree_binfo, base_binfos)
2107 + vec<tree, va_gc>::embedded_size (base_binfos));
2108
2109 record_node_allocation_statistics (TREE_BINFO, length);
2110
2111 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2112
2113 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2114
2115 TREE_SET_CODE (t, TREE_BINFO);
2116
2117 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2118
2119 return t;
2120 }
2121
2122 /* Create a CASE_LABEL_EXPR tree node and return it. */
2123
2124 tree
2125 build_case_label (tree low_value, tree high_value, tree label_decl)
2126 {
2127 tree t = make_node (CASE_LABEL_EXPR);
2128
2129 TREE_TYPE (t) = void_type_node;
2130 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2131
2132 CASE_LOW (t) = low_value;
2133 CASE_HIGH (t) = high_value;
2134 CASE_LABEL (t) = label_decl;
2135 CASE_CHAIN (t) = NULL_TREE;
2136
2137 return t;
2138 }
2139
2140 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2141 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2142 The latter determines the length of the HOST_WIDE_INT vector. */
2143
2144 tree
2145 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2146 {
2147 tree t;
2148 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2149 + sizeof (struct tree_int_cst));
2150
2151 gcc_assert (len);
2152 record_node_allocation_statistics (INTEGER_CST, length);
2153
2154 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2155
2156 TREE_SET_CODE (t, INTEGER_CST);
2157 TREE_INT_CST_NUNITS (t) = len;
2158 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2159 /* to_offset can only be applied to trees that are offset_int-sized
2160 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2161 must be exactly the precision of offset_int and so LEN is correct. */
2162 if (ext_len <= OFFSET_INT_ELTS)
2163 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2164 else
2165 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2166
2167 TREE_CONSTANT (t) = 1;
2168
2169 return t;
2170 }
2171
2172 /* Build a newly constructed TREE_VEC node of length LEN. */
2173
2174 tree
2175 make_tree_vec_stat (int len MEM_STAT_DECL)
2176 {
2177 tree t;
2178 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2179
2180 record_node_allocation_statistics (TREE_VEC, length);
2181
2182 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2183
2184 TREE_SET_CODE (t, TREE_VEC);
2185 TREE_VEC_LENGTH (t) = len;
2186
2187 return t;
2188 }
2189
2190 /* Grow a TREE_VEC node to new length LEN. */
2191
2192 tree
2193 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2194 {
2195 gcc_assert (TREE_CODE (v) == TREE_VEC);
2196
2197 int oldlen = TREE_VEC_LENGTH (v);
2198 gcc_assert (len > oldlen);
2199
2200 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2201 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2202
2203 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2204
2205 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2206
2207 TREE_VEC_LENGTH (v) = len;
2208
2209 return v;
2210 }
2211 \f
2212 /* Return 1 if EXPR is the integer constant zero or a complex constant
2213 of zero. */
2214
2215 int
2216 integer_zerop (const_tree expr)
2217 {
2218 STRIP_NOPS (expr);
2219
2220 switch (TREE_CODE (expr))
2221 {
2222 case INTEGER_CST:
2223 return wi::eq_p (expr, 0);
2224 case COMPLEX_CST:
2225 return (integer_zerop (TREE_REALPART (expr))
2226 && integer_zerop (TREE_IMAGPART (expr)));
2227 case VECTOR_CST:
2228 {
2229 unsigned i;
2230 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2231 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2232 return false;
2233 return true;
2234 }
2235 default:
2236 return false;
2237 }
2238 }
2239
2240 /* Return 1 if EXPR is the integer constant one or the corresponding
2241 complex constant. */
2242
2243 int
2244 integer_onep (const_tree expr)
2245 {
2246 STRIP_NOPS (expr);
2247
2248 switch (TREE_CODE (expr))
2249 {
2250 case INTEGER_CST:
2251 return wi::eq_p (wi::to_widest (expr), 1);
2252 case COMPLEX_CST:
2253 return (integer_onep (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)));
2255 case VECTOR_CST:
2256 {
2257 unsigned i;
2258 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2259 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2260 return false;
2261 return true;
2262 }
2263 default:
2264 return false;
2265 }
2266 }
2267
2268 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2269 return 1 if every piece is the integer constant one. */
2270
2271 int
2272 integer_each_onep (const_tree expr)
2273 {
2274 STRIP_NOPS (expr);
2275
2276 if (TREE_CODE (expr) == COMPLEX_CST)
2277 return (integer_onep (TREE_REALPART (expr))
2278 && integer_onep (TREE_IMAGPART (expr)));
2279 else
2280 return integer_onep (expr);
2281 }
2282
2283 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2284 it contains, or a complex or vector whose subparts are such integers. */
2285
2286 int
2287 integer_all_onesp (const_tree expr)
2288 {
2289 STRIP_NOPS (expr);
2290
2291 if (TREE_CODE (expr) == COMPLEX_CST
2292 && integer_all_onesp (TREE_REALPART (expr))
2293 && integer_all_onesp (TREE_IMAGPART (expr)))
2294 return 1;
2295
2296 else if (TREE_CODE (expr) == VECTOR_CST)
2297 {
2298 unsigned i;
2299 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2300 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2301 return 0;
2302 return 1;
2303 }
2304
2305 else if (TREE_CODE (expr) != INTEGER_CST)
2306 return 0;
2307
2308 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2309 }
2310
2311 /* Return 1 if EXPR is the integer constant minus one. */
2312
2313 int
2314 integer_minus_onep (const_tree expr)
2315 {
2316 STRIP_NOPS (expr);
2317
2318 if (TREE_CODE (expr) == COMPLEX_CST)
2319 return (integer_all_onesp (TREE_REALPART (expr))
2320 && integer_zerop (TREE_IMAGPART (expr)));
2321 else
2322 return integer_all_onesp (expr);
2323 }
2324
2325 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2326 one bit on). */
2327
2328 int
2329 integer_pow2p (const_tree expr)
2330 {
2331 STRIP_NOPS (expr);
2332
2333 if (TREE_CODE (expr) == COMPLEX_CST
2334 && integer_pow2p (TREE_REALPART (expr))
2335 && integer_zerop (TREE_IMAGPART (expr)))
2336 return 1;
2337
2338 if (TREE_CODE (expr) != INTEGER_CST)
2339 return 0;
2340
2341 return wi::popcount (expr) == 1;
2342 }
2343
2344 /* Return 1 if EXPR is an integer constant other than zero or a
2345 complex constant other than zero. */
2346
2347 int
2348 integer_nonzerop (const_tree expr)
2349 {
2350 STRIP_NOPS (expr);
2351
2352 return ((TREE_CODE (expr) == INTEGER_CST
2353 && !wi::eq_p (expr, 0))
2354 || (TREE_CODE (expr) == COMPLEX_CST
2355 && (integer_nonzerop (TREE_REALPART (expr))
2356 || integer_nonzerop (TREE_IMAGPART (expr)))));
2357 }
2358
2359 /* Return 1 if EXPR is the integer constant one. For vector,
2360 return 1 if every piece is the integer constant minus one
2361 (representing the value TRUE). */
2362
2363 int
2364 integer_truep (const_tree expr)
2365 {
2366 STRIP_NOPS (expr);
2367
2368 if (TREE_CODE (expr) == VECTOR_CST)
2369 return integer_all_onesp (expr);
2370 return integer_onep (expr);
2371 }
2372
2373 /* Return 1 if EXPR is the fixed-point constant zero. */
2374
2375 int
2376 fixed_zerop (const_tree expr)
2377 {
2378 return (TREE_CODE (expr) == FIXED_CST
2379 && TREE_FIXED_CST (expr).data.is_zero ());
2380 }
2381
2382 /* Return the power of two represented by a tree node known to be a
2383 power of two. */
2384
2385 int
2386 tree_log2 (const_tree expr)
2387 {
2388 STRIP_NOPS (expr);
2389
2390 if (TREE_CODE (expr) == COMPLEX_CST)
2391 return tree_log2 (TREE_REALPART (expr));
2392
2393 return wi::exact_log2 (expr);
2394 }
2395
2396 /* Similar, but return the largest integer Y such that 2 ** Y is less
2397 than or equal to EXPR. */
2398
2399 int
2400 tree_floor_log2 (const_tree expr)
2401 {
2402 STRIP_NOPS (expr);
2403
2404 if (TREE_CODE (expr) == COMPLEX_CST)
2405 return tree_log2 (TREE_REALPART (expr));
2406
2407 return wi::floor_log2 (expr);
2408 }
2409
2410 /* Return number of known trailing zero bits in EXPR, or, if the value of
2411 EXPR is known to be zero, the precision of it's type. */
2412
2413 unsigned int
2414 tree_ctz (const_tree expr)
2415 {
2416 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2417 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2418 return 0;
2419
2420 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2421 switch (TREE_CODE (expr))
2422 {
2423 case INTEGER_CST:
2424 ret1 = wi::ctz (expr);
2425 return MIN (ret1, prec);
2426 case SSA_NAME:
2427 ret1 = wi::ctz (get_nonzero_bits (expr));
2428 return MIN (ret1, prec);
2429 case PLUS_EXPR:
2430 case MINUS_EXPR:
2431 case BIT_IOR_EXPR:
2432 case BIT_XOR_EXPR:
2433 case MIN_EXPR:
2434 case MAX_EXPR:
2435 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2436 if (ret1 == 0)
2437 return ret1;
2438 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2439 return MIN (ret1, ret2);
2440 case POINTER_PLUS_EXPR:
2441 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2442 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2443 /* Second operand is sizetype, which could be in theory
2444 wider than pointer's precision. Make sure we never
2445 return more than prec. */
2446 ret2 = MIN (ret2, prec);
2447 return MIN (ret1, ret2);
2448 case BIT_AND_EXPR:
2449 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2450 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2451 return MAX (ret1, ret2);
2452 case MULT_EXPR:
2453 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2454 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2455 return MIN (ret1 + ret2, prec);
2456 case LSHIFT_EXPR:
2457 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2458 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2459 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2460 {
2461 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2462 return MIN (ret1 + ret2, prec);
2463 }
2464 return ret1;
2465 case RSHIFT_EXPR:
2466 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2467 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2468 {
2469 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2470 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2471 if (ret1 > ret2)
2472 return ret1 - ret2;
2473 }
2474 return 0;
2475 case TRUNC_DIV_EXPR:
2476 case CEIL_DIV_EXPR:
2477 case FLOOR_DIV_EXPR:
2478 case ROUND_DIV_EXPR:
2479 case EXACT_DIV_EXPR:
2480 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2481 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2482 {
2483 int l = tree_log2 (TREE_OPERAND (expr, 1));
2484 if (l >= 0)
2485 {
2486 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2487 ret2 = l;
2488 if (ret1 > ret2)
2489 return ret1 - ret2;
2490 }
2491 }
2492 return 0;
2493 CASE_CONVERT:
2494 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2495 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2496 ret1 = prec;
2497 return MIN (ret1, prec);
2498 case SAVE_EXPR:
2499 return tree_ctz (TREE_OPERAND (expr, 0));
2500 case COND_EXPR:
2501 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2502 if (ret1 == 0)
2503 return 0;
2504 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2505 return MIN (ret1, ret2);
2506 case COMPOUND_EXPR:
2507 return tree_ctz (TREE_OPERAND (expr, 1));
2508 case ADDR_EXPR:
2509 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2510 if (ret1 > BITS_PER_UNIT)
2511 {
2512 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2513 return MIN (ret1, prec);
2514 }
2515 return 0;
2516 default:
2517 return 0;
2518 }
2519 }
2520
2521 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2522 decimal float constants, so don't return 1 for them. */
2523
2524 int
2525 real_zerop (const_tree expr)
2526 {
2527 STRIP_NOPS (expr);
2528
2529 switch (TREE_CODE (expr))
2530 {
2531 case REAL_CST:
2532 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2533 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2534 case COMPLEX_CST:
2535 return real_zerop (TREE_REALPART (expr))
2536 && real_zerop (TREE_IMAGPART (expr));
2537 case VECTOR_CST:
2538 {
2539 unsigned i;
2540 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2541 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2542 return false;
2543 return true;
2544 }
2545 default:
2546 return false;
2547 }
2548 }
2549
2550 /* Return 1 if EXPR is the real constant one in real or complex form.
2551 Trailing zeroes matter for decimal float constants, so don't return
2552 1 for them. */
2553
2554 int
2555 real_onep (const_tree expr)
2556 {
2557 STRIP_NOPS (expr);
2558
2559 switch (TREE_CODE (expr))
2560 {
2561 case REAL_CST:
2562 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2563 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2564 case COMPLEX_CST:
2565 return real_onep (TREE_REALPART (expr))
2566 && real_zerop (TREE_IMAGPART (expr));
2567 case VECTOR_CST:
2568 {
2569 unsigned i;
2570 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2571 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2572 return false;
2573 return true;
2574 }
2575 default:
2576 return false;
2577 }
2578 }
2579
2580 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2581 matter for decimal float constants, so don't return 1 for them. */
2582
2583 int
2584 real_minus_onep (const_tree expr)
2585 {
2586 STRIP_NOPS (expr);
2587
2588 switch (TREE_CODE (expr))
2589 {
2590 case REAL_CST:
2591 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2592 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2593 case COMPLEX_CST:
2594 return real_minus_onep (TREE_REALPART (expr))
2595 && real_zerop (TREE_IMAGPART (expr));
2596 case VECTOR_CST:
2597 {
2598 unsigned i;
2599 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2600 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2601 return false;
2602 return true;
2603 }
2604 default:
2605 return false;
2606 }
2607 }
2608
2609 /* Nonzero if EXP is a constant or a cast of a constant. */
2610
2611 int
2612 really_constant_p (const_tree exp)
2613 {
2614 /* This is not quite the same as STRIP_NOPS. It does more. */
2615 while (CONVERT_EXPR_P (exp)
2616 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2617 exp = TREE_OPERAND (exp, 0);
2618 return TREE_CONSTANT (exp);
2619 }
2620 \f
2621 /* Return first list element whose TREE_VALUE is ELEM.
2622 Return 0 if ELEM is not in LIST. */
2623
2624 tree
2625 value_member (tree elem, tree list)
2626 {
2627 while (list)
2628 {
2629 if (elem == TREE_VALUE (list))
2630 return list;
2631 list = TREE_CHAIN (list);
2632 }
2633 return NULL_TREE;
2634 }
2635
2636 /* Return first list element whose TREE_PURPOSE is ELEM.
2637 Return 0 if ELEM is not in LIST. */
2638
2639 tree
2640 purpose_member (const_tree elem, tree list)
2641 {
2642 while (list)
2643 {
2644 if (elem == TREE_PURPOSE (list))
2645 return list;
2646 list = TREE_CHAIN (list);
2647 }
2648 return NULL_TREE;
2649 }
2650
2651 /* Return true if ELEM is in V. */
2652
2653 bool
2654 vec_member (const_tree elem, vec<tree, va_gc> *v)
2655 {
2656 unsigned ix;
2657 tree t;
2658 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2659 if (elem == t)
2660 return true;
2661 return false;
2662 }
2663
2664 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2665 NULL_TREE. */
2666
2667 tree
2668 chain_index (int idx, tree chain)
2669 {
2670 for (; chain && idx > 0; --idx)
2671 chain = TREE_CHAIN (chain);
2672 return chain;
2673 }
2674
2675 /* Return nonzero if ELEM is part of the chain CHAIN. */
2676
2677 int
2678 chain_member (const_tree elem, const_tree chain)
2679 {
2680 while (chain)
2681 {
2682 if (elem == chain)
2683 return 1;
2684 chain = DECL_CHAIN (chain);
2685 }
2686
2687 return 0;
2688 }
2689
2690 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2691 We expect a null pointer to mark the end of the chain.
2692 This is the Lisp primitive `length'. */
2693
2694 int
2695 list_length (const_tree t)
2696 {
2697 const_tree p = t;
2698 #ifdef ENABLE_TREE_CHECKING
2699 const_tree q = t;
2700 #endif
2701 int len = 0;
2702
2703 while (p)
2704 {
2705 p = TREE_CHAIN (p);
2706 #ifdef ENABLE_TREE_CHECKING
2707 if (len % 2)
2708 q = TREE_CHAIN (q);
2709 gcc_assert (p != q);
2710 #endif
2711 len++;
2712 }
2713
2714 return len;
2715 }
2716
2717 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2718 UNION_TYPE TYPE, or NULL_TREE if none. */
2719
2720 tree
2721 first_field (const_tree type)
2722 {
2723 tree t = TYPE_FIELDS (type);
2724 while (t && TREE_CODE (t) != FIELD_DECL)
2725 t = TREE_CHAIN (t);
2726 return t;
2727 }
2728
2729 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2730 by modifying the last node in chain 1 to point to chain 2.
2731 This is the Lisp primitive `nconc'. */
2732
2733 tree
2734 chainon (tree op1, tree op2)
2735 {
2736 tree t1;
2737
2738 if (!op1)
2739 return op2;
2740 if (!op2)
2741 return op1;
2742
2743 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2744 continue;
2745 TREE_CHAIN (t1) = op2;
2746
2747 #ifdef ENABLE_TREE_CHECKING
2748 {
2749 tree t2;
2750 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2751 gcc_assert (t2 != t1);
2752 }
2753 #endif
2754
2755 return op1;
2756 }
2757
2758 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2759
2760 tree
2761 tree_last (tree chain)
2762 {
2763 tree next;
2764 if (chain)
2765 while ((next = TREE_CHAIN (chain)))
2766 chain = next;
2767 return chain;
2768 }
2769
2770 /* Reverse the order of elements in the chain T,
2771 and return the new head of the chain (old last element). */
2772
2773 tree
2774 nreverse (tree t)
2775 {
2776 tree prev = 0, decl, next;
2777 for (decl = t; decl; decl = next)
2778 {
2779 /* We shouldn't be using this function to reverse BLOCK chains; we
2780 have blocks_nreverse for that. */
2781 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2782 next = TREE_CHAIN (decl);
2783 TREE_CHAIN (decl) = prev;
2784 prev = decl;
2785 }
2786 return prev;
2787 }
2788 \f
2789 /* Return a newly created TREE_LIST node whose
2790 purpose and value fields are PARM and VALUE. */
2791
2792 tree
2793 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2794 {
2795 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2796 TREE_PURPOSE (t) = parm;
2797 TREE_VALUE (t) = value;
2798 return t;
2799 }
2800
2801 /* Build a chain of TREE_LIST nodes from a vector. */
2802
2803 tree
2804 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2805 {
2806 tree ret = NULL_TREE;
2807 tree *pp = &ret;
2808 unsigned int i;
2809 tree t;
2810 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2811 {
2812 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2813 pp = &TREE_CHAIN (*pp);
2814 }
2815 return ret;
2816 }
2817
2818 /* Return a newly created TREE_LIST node whose
2819 purpose and value fields are PURPOSE and VALUE
2820 and whose TREE_CHAIN is CHAIN. */
2821
2822 tree
2823 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2824 {
2825 tree node;
2826
2827 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2828 memset (node, 0, sizeof (struct tree_common));
2829
2830 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2831
2832 TREE_SET_CODE (node, TREE_LIST);
2833 TREE_CHAIN (node) = chain;
2834 TREE_PURPOSE (node) = purpose;
2835 TREE_VALUE (node) = value;
2836 return node;
2837 }
2838
2839 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2840 trees. */
2841
2842 vec<tree, va_gc> *
2843 ctor_to_vec (tree ctor)
2844 {
2845 vec<tree, va_gc> *vec;
2846 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2847 unsigned int ix;
2848 tree val;
2849
2850 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2851 vec->quick_push (val);
2852
2853 return vec;
2854 }
2855 \f
2856 /* Return the size nominally occupied by an object of type TYPE
2857 when it resides in memory. The value is measured in units of bytes,
2858 and its data type is that normally used for type sizes
2859 (which is the first type created by make_signed_type or
2860 make_unsigned_type). */
2861
2862 tree
2863 size_in_bytes (const_tree type)
2864 {
2865 tree t;
2866
2867 if (type == error_mark_node)
2868 return integer_zero_node;
2869
2870 type = TYPE_MAIN_VARIANT (type);
2871 t = TYPE_SIZE_UNIT (type);
2872
2873 if (t == 0)
2874 {
2875 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2876 return size_zero_node;
2877 }
2878
2879 return t;
2880 }
2881
2882 /* Return the size of TYPE (in bytes) as a wide integer
2883 or return -1 if the size can vary or is larger than an integer. */
2884
2885 HOST_WIDE_INT
2886 int_size_in_bytes (const_tree type)
2887 {
2888 tree t;
2889
2890 if (type == error_mark_node)
2891 return 0;
2892
2893 type = TYPE_MAIN_VARIANT (type);
2894 t = TYPE_SIZE_UNIT (type);
2895
2896 if (t && tree_fits_uhwi_p (t))
2897 return TREE_INT_CST_LOW (t);
2898 else
2899 return -1;
2900 }
2901
2902 /* Return the maximum size of TYPE (in bytes) as a wide integer
2903 or return -1 if the size can vary or is larger than an integer. */
2904
2905 HOST_WIDE_INT
2906 max_int_size_in_bytes (const_tree type)
2907 {
2908 HOST_WIDE_INT size = -1;
2909 tree size_tree;
2910
2911 /* If this is an array type, check for a possible MAX_SIZE attached. */
2912
2913 if (TREE_CODE (type) == ARRAY_TYPE)
2914 {
2915 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2916
2917 if (size_tree && tree_fits_uhwi_p (size_tree))
2918 size = tree_to_uhwi (size_tree);
2919 }
2920
2921 /* If we still haven't been able to get a size, see if the language
2922 can compute a maximum size. */
2923
2924 if (size == -1)
2925 {
2926 size_tree = lang_hooks.types.max_size (type);
2927
2928 if (size_tree && tree_fits_uhwi_p (size_tree))
2929 size = tree_to_uhwi (size_tree);
2930 }
2931
2932 return size;
2933 }
2934 \f
2935 /* Return the bit position of FIELD, in bits from the start of the record.
2936 This is a tree of type bitsizetype. */
2937
2938 tree
2939 bit_position (const_tree field)
2940 {
2941 return bit_from_pos (DECL_FIELD_OFFSET (field),
2942 DECL_FIELD_BIT_OFFSET (field));
2943 }
2944 \f
2945 /* Return the byte position of FIELD, in bytes from the start of the record.
2946 This is a tree of type sizetype. */
2947
2948 tree
2949 byte_position (const_tree field)
2950 {
2951 return byte_from_pos (DECL_FIELD_OFFSET (field),
2952 DECL_FIELD_BIT_OFFSET (field));
2953 }
2954
2955 /* Likewise, but return as an integer. It must be representable in
2956 that way (since it could be a signed value, we don't have the
2957 option of returning -1 like int_size_in_byte can. */
2958
2959 HOST_WIDE_INT
2960 int_byte_position (const_tree field)
2961 {
2962 return tree_to_shwi (byte_position (field));
2963 }
2964 \f
2965 /* Return the strictest alignment, in bits, that T is known to have. */
2966
2967 unsigned int
2968 expr_align (const_tree t)
2969 {
2970 unsigned int align0, align1;
2971
2972 switch (TREE_CODE (t))
2973 {
2974 CASE_CONVERT: case NON_LVALUE_EXPR:
2975 /* If we have conversions, we know that the alignment of the
2976 object must meet each of the alignments of the types. */
2977 align0 = expr_align (TREE_OPERAND (t, 0));
2978 align1 = TYPE_ALIGN (TREE_TYPE (t));
2979 return MAX (align0, align1);
2980
2981 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2982 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2983 case CLEANUP_POINT_EXPR:
2984 /* These don't change the alignment of an object. */
2985 return expr_align (TREE_OPERAND (t, 0));
2986
2987 case COND_EXPR:
2988 /* The best we can do is say that the alignment is the least aligned
2989 of the two arms. */
2990 align0 = expr_align (TREE_OPERAND (t, 1));
2991 align1 = expr_align (TREE_OPERAND (t, 2));
2992 return MIN (align0, align1);
2993
2994 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2995 meaningfully, it's always 1. */
2996 case LABEL_DECL: case CONST_DECL:
2997 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2998 case FUNCTION_DECL:
2999 gcc_assert (DECL_ALIGN (t) != 0);
3000 return DECL_ALIGN (t);
3001
3002 default:
3003 break;
3004 }
3005
3006 /* Otherwise take the alignment from that of the type. */
3007 return TYPE_ALIGN (TREE_TYPE (t));
3008 }
3009 \f
3010 /* Return, as a tree node, the number of elements for TYPE (which is an
3011 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3012
3013 tree
3014 array_type_nelts (const_tree type)
3015 {
3016 tree index_type, min, max;
3017
3018 /* If they did it with unspecified bounds, then we should have already
3019 given an error about it before we got here. */
3020 if (! TYPE_DOMAIN (type))
3021 return error_mark_node;
3022
3023 index_type = TYPE_DOMAIN (type);
3024 min = TYPE_MIN_VALUE (index_type);
3025 max = TYPE_MAX_VALUE (index_type);
3026
3027 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3028 if (!max)
3029 return error_mark_node;
3030
3031 return (integer_zerop (min)
3032 ? max
3033 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3034 }
3035 \f
3036 /* If arg is static -- a reference to an object in static storage -- then
3037 return the object. This is not the same as the C meaning of `static'.
3038 If arg isn't static, return NULL. */
3039
3040 tree
3041 staticp (tree arg)
3042 {
3043 switch (TREE_CODE (arg))
3044 {
3045 case FUNCTION_DECL:
3046 /* Nested functions are static, even though taking their address will
3047 involve a trampoline as we unnest the nested function and create
3048 the trampoline on the tree level. */
3049 return arg;
3050
3051 case VAR_DECL:
3052 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3053 && ! DECL_THREAD_LOCAL_P (arg)
3054 && ! DECL_DLLIMPORT_P (arg)
3055 ? arg : NULL);
3056
3057 case CONST_DECL:
3058 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3059 ? arg : NULL);
3060
3061 case CONSTRUCTOR:
3062 return TREE_STATIC (arg) ? arg : NULL;
3063
3064 case LABEL_DECL:
3065 case STRING_CST:
3066 return arg;
3067
3068 case COMPONENT_REF:
3069 /* If the thing being referenced is not a field, then it is
3070 something language specific. */
3071 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3072
3073 /* If we are referencing a bitfield, we can't evaluate an
3074 ADDR_EXPR at compile time and so it isn't a constant. */
3075 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3076 return NULL;
3077
3078 return staticp (TREE_OPERAND (arg, 0));
3079
3080 case BIT_FIELD_REF:
3081 return NULL;
3082
3083 case INDIRECT_REF:
3084 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3085
3086 case ARRAY_REF:
3087 case ARRAY_RANGE_REF:
3088 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3089 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3090 return staticp (TREE_OPERAND (arg, 0));
3091 else
3092 return NULL;
3093
3094 case COMPOUND_LITERAL_EXPR:
3095 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3096
3097 default:
3098 return NULL;
3099 }
3100 }
3101
3102 \f
3103
3104
3105 /* Return whether OP is a DECL whose address is function-invariant. */
3106
3107 bool
3108 decl_address_invariant_p (const_tree op)
3109 {
3110 /* The conditions below are slightly less strict than the one in
3111 staticp. */
3112
3113 switch (TREE_CODE (op))
3114 {
3115 case PARM_DECL:
3116 case RESULT_DECL:
3117 case LABEL_DECL:
3118 case FUNCTION_DECL:
3119 return true;
3120
3121 case VAR_DECL:
3122 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3123 || DECL_THREAD_LOCAL_P (op)
3124 || DECL_CONTEXT (op) == current_function_decl
3125 || decl_function_context (op) == current_function_decl)
3126 return true;
3127 break;
3128
3129 case CONST_DECL:
3130 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3131 || decl_function_context (op) == current_function_decl)
3132 return true;
3133 break;
3134
3135 default:
3136 break;
3137 }
3138
3139 return false;
3140 }
3141
3142 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3143
3144 bool
3145 decl_address_ip_invariant_p (const_tree op)
3146 {
3147 /* The conditions below are slightly less strict than the one in
3148 staticp. */
3149
3150 switch (TREE_CODE (op))
3151 {
3152 case LABEL_DECL:
3153 case FUNCTION_DECL:
3154 case STRING_CST:
3155 return true;
3156
3157 case VAR_DECL:
3158 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3159 && !DECL_DLLIMPORT_P (op))
3160 || DECL_THREAD_LOCAL_P (op))
3161 return true;
3162 break;
3163
3164 case CONST_DECL:
3165 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3166 return true;
3167 break;
3168
3169 default:
3170 break;
3171 }
3172
3173 return false;
3174 }
3175
3176
3177 /* Return true if T is function-invariant (internal function, does
3178 not handle arithmetic; that's handled in skip_simple_arithmetic and
3179 tree_invariant_p). */
3180
3181 static bool tree_invariant_p (tree t);
3182
3183 static bool
3184 tree_invariant_p_1 (tree t)
3185 {
3186 tree op;
3187
3188 if (TREE_CONSTANT (t)
3189 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3190 return true;
3191
3192 switch (TREE_CODE (t))
3193 {
3194 case SAVE_EXPR:
3195 return true;
3196
3197 case ADDR_EXPR:
3198 op = TREE_OPERAND (t, 0);
3199 while (handled_component_p (op))
3200 {
3201 switch (TREE_CODE (op))
3202 {
3203 case ARRAY_REF:
3204 case ARRAY_RANGE_REF:
3205 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3206 || TREE_OPERAND (op, 2) != NULL_TREE
3207 || TREE_OPERAND (op, 3) != NULL_TREE)
3208 return false;
3209 break;
3210
3211 case COMPONENT_REF:
3212 if (TREE_OPERAND (op, 2) != NULL_TREE)
3213 return false;
3214 break;
3215
3216 default:;
3217 }
3218 op = TREE_OPERAND (op, 0);
3219 }
3220
3221 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3222
3223 default:
3224 break;
3225 }
3226
3227 return false;
3228 }
3229
3230 /* Return true if T is function-invariant. */
3231
3232 static bool
3233 tree_invariant_p (tree t)
3234 {
3235 tree inner = skip_simple_arithmetic (t);
3236 return tree_invariant_p_1 (inner);
3237 }
3238
3239 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3240 Do this to any expression which may be used in more than one place,
3241 but must be evaluated only once.
3242
3243 Normally, expand_expr would reevaluate the expression each time.
3244 Calling save_expr produces something that is evaluated and recorded
3245 the first time expand_expr is called on it. Subsequent calls to
3246 expand_expr just reuse the recorded value.
3247
3248 The call to expand_expr that generates code that actually computes
3249 the value is the first call *at compile time*. Subsequent calls
3250 *at compile time* generate code to use the saved value.
3251 This produces correct result provided that *at run time* control
3252 always flows through the insns made by the first expand_expr
3253 before reaching the other places where the save_expr was evaluated.
3254 You, the caller of save_expr, must make sure this is so.
3255
3256 Constants, and certain read-only nodes, are returned with no
3257 SAVE_EXPR because that is safe. Expressions containing placeholders
3258 are not touched; see tree.def for an explanation of what these
3259 are used for. */
3260
3261 tree
3262 save_expr (tree expr)
3263 {
3264 tree t = fold (expr);
3265 tree inner;
3266
3267 /* If the tree evaluates to a constant, then we don't want to hide that
3268 fact (i.e. this allows further folding, and direct checks for constants).
3269 However, a read-only object that has side effects cannot be bypassed.
3270 Since it is no problem to reevaluate literals, we just return the
3271 literal node. */
3272 inner = skip_simple_arithmetic (t);
3273 if (TREE_CODE (inner) == ERROR_MARK)
3274 return inner;
3275
3276 if (tree_invariant_p_1 (inner))
3277 return t;
3278
3279 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3280 it means that the size or offset of some field of an object depends on
3281 the value within another field.
3282
3283 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3284 and some variable since it would then need to be both evaluated once and
3285 evaluated more than once. Front-ends must assure this case cannot
3286 happen by surrounding any such subexpressions in their own SAVE_EXPR
3287 and forcing evaluation at the proper time. */
3288 if (contains_placeholder_p (inner))
3289 return t;
3290
3291 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3292 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3293
3294 /* This expression might be placed ahead of a jump to ensure that the
3295 value was computed on both sides of the jump. So make sure it isn't
3296 eliminated as dead. */
3297 TREE_SIDE_EFFECTS (t) = 1;
3298 return t;
3299 }
3300
3301 /* Look inside EXPR into any simple arithmetic operations. Return the
3302 outermost non-arithmetic or non-invariant node. */
3303
3304 tree
3305 skip_simple_arithmetic (tree expr)
3306 {
3307 /* We don't care about whether this can be used as an lvalue in this
3308 context. */
3309 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3310 expr = TREE_OPERAND (expr, 0);
3311
3312 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3313 a constant, it will be more efficient to not make another SAVE_EXPR since
3314 it will allow better simplification and GCSE will be able to merge the
3315 computations if they actually occur. */
3316 while (true)
3317 {
3318 if (UNARY_CLASS_P (expr))
3319 expr = TREE_OPERAND (expr, 0);
3320 else if (BINARY_CLASS_P (expr))
3321 {
3322 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3323 expr = TREE_OPERAND (expr, 0);
3324 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3325 expr = TREE_OPERAND (expr, 1);
3326 else
3327 break;
3328 }
3329 else
3330 break;
3331 }
3332
3333 return expr;
3334 }
3335
3336 /* Look inside EXPR into simple arithmetic operations involving constants.
3337 Return the outermost non-arithmetic or non-constant node. */
3338
3339 tree
3340 skip_simple_constant_arithmetic (tree expr)
3341 {
3342 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3343 expr = TREE_OPERAND (expr, 0);
3344
3345 while (true)
3346 {
3347 if (UNARY_CLASS_P (expr))
3348 expr = TREE_OPERAND (expr, 0);
3349 else if (BINARY_CLASS_P (expr))
3350 {
3351 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3352 expr = TREE_OPERAND (expr, 0);
3353 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3354 expr = TREE_OPERAND (expr, 1);
3355 else
3356 break;
3357 }
3358 else
3359 break;
3360 }
3361
3362 return expr;
3363 }
3364
3365 /* Return which tree structure is used by T. */
3366
3367 enum tree_node_structure_enum
3368 tree_node_structure (const_tree t)
3369 {
3370 const enum tree_code code = TREE_CODE (t);
3371 return tree_node_structure_for_code (code);
3372 }
3373
3374 /* Set various status flags when building a CALL_EXPR object T. */
3375
3376 static void
3377 process_call_operands (tree t)
3378 {
3379 bool side_effects = TREE_SIDE_EFFECTS (t);
3380 bool read_only = false;
3381 int i = call_expr_flags (t);
3382
3383 /* Calls have side-effects, except those to const or pure functions. */
3384 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3385 side_effects = true;
3386 /* Propagate TREE_READONLY of arguments for const functions. */
3387 if (i & ECF_CONST)
3388 read_only = true;
3389
3390 if (!side_effects || read_only)
3391 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3392 {
3393 tree op = TREE_OPERAND (t, i);
3394 if (op && TREE_SIDE_EFFECTS (op))
3395 side_effects = true;
3396 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3397 read_only = false;
3398 }
3399
3400 TREE_SIDE_EFFECTS (t) = side_effects;
3401 TREE_READONLY (t) = read_only;
3402 }
3403 \f
3404 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3405 size or offset that depends on a field within a record. */
3406
3407 bool
3408 contains_placeholder_p (const_tree exp)
3409 {
3410 enum tree_code code;
3411
3412 if (!exp)
3413 return 0;
3414
3415 code = TREE_CODE (exp);
3416 if (code == PLACEHOLDER_EXPR)
3417 return 1;
3418
3419 switch (TREE_CODE_CLASS (code))
3420 {
3421 case tcc_reference:
3422 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3423 position computations since they will be converted into a
3424 WITH_RECORD_EXPR involving the reference, which will assume
3425 here will be valid. */
3426 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3427
3428 case tcc_exceptional:
3429 if (code == TREE_LIST)
3430 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3431 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3432 break;
3433
3434 case tcc_unary:
3435 case tcc_binary:
3436 case tcc_comparison:
3437 case tcc_expression:
3438 switch (code)
3439 {
3440 case COMPOUND_EXPR:
3441 /* Ignoring the first operand isn't quite right, but works best. */
3442 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3443
3444 case COND_EXPR:
3445 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3446 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3447 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3448
3449 case SAVE_EXPR:
3450 /* The save_expr function never wraps anything containing
3451 a PLACEHOLDER_EXPR. */
3452 return 0;
3453
3454 default:
3455 break;
3456 }
3457
3458 switch (TREE_CODE_LENGTH (code))
3459 {
3460 case 1:
3461 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3462 case 2:
3463 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3464 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3465 default:
3466 return 0;
3467 }
3468
3469 case tcc_vl_exp:
3470 switch (code)
3471 {
3472 case CALL_EXPR:
3473 {
3474 const_tree arg;
3475 const_call_expr_arg_iterator iter;
3476 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3477 if (CONTAINS_PLACEHOLDER_P (arg))
3478 return 1;
3479 return 0;
3480 }
3481 default:
3482 return 0;
3483 }
3484
3485 default:
3486 return 0;
3487 }
3488 return 0;
3489 }
3490
3491 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3492 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3493 field positions. */
3494
3495 static bool
3496 type_contains_placeholder_1 (const_tree type)
3497 {
3498 /* If the size contains a placeholder or the parent type (component type in
3499 the case of arrays) type involves a placeholder, this type does. */
3500 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3501 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3502 || (!POINTER_TYPE_P (type)
3503 && TREE_TYPE (type)
3504 && type_contains_placeholder_p (TREE_TYPE (type))))
3505 return true;
3506
3507 /* Now do type-specific checks. Note that the last part of the check above
3508 greatly limits what we have to do below. */
3509 switch (TREE_CODE (type))
3510 {
3511 case VOID_TYPE:
3512 case POINTER_BOUNDS_TYPE:
3513 case COMPLEX_TYPE:
3514 case ENUMERAL_TYPE:
3515 case BOOLEAN_TYPE:
3516 case POINTER_TYPE:
3517 case OFFSET_TYPE:
3518 case REFERENCE_TYPE:
3519 case METHOD_TYPE:
3520 case FUNCTION_TYPE:
3521 case VECTOR_TYPE:
3522 case NULLPTR_TYPE:
3523 return false;
3524
3525 case INTEGER_TYPE:
3526 case REAL_TYPE:
3527 case FIXED_POINT_TYPE:
3528 /* Here we just check the bounds. */
3529 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3530 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3531
3532 case ARRAY_TYPE:
3533 /* We have already checked the component type above, so just check the
3534 domain type. */
3535 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3536
3537 case RECORD_TYPE:
3538 case UNION_TYPE:
3539 case QUAL_UNION_TYPE:
3540 {
3541 tree field;
3542
3543 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3544 if (TREE_CODE (field) == FIELD_DECL
3545 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3546 || (TREE_CODE (type) == QUAL_UNION_TYPE
3547 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3548 || type_contains_placeholder_p (TREE_TYPE (field))))
3549 return true;
3550
3551 return false;
3552 }
3553
3554 default:
3555 gcc_unreachable ();
3556 }
3557 }
3558
3559 /* Wrapper around above function used to cache its result. */
3560
3561 bool
3562 type_contains_placeholder_p (tree type)
3563 {
3564 bool result;
3565
3566 /* If the contains_placeholder_bits field has been initialized,
3567 then we know the answer. */
3568 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3569 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3570
3571 /* Indicate that we've seen this type node, and the answer is false.
3572 This is what we want to return if we run into recursion via fields. */
3573 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3574
3575 /* Compute the real value. */
3576 result = type_contains_placeholder_1 (type);
3577
3578 /* Store the real value. */
3579 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3580
3581 return result;
3582 }
3583 \f
3584 /* Push tree EXP onto vector QUEUE if it is not already present. */
3585
3586 static void
3587 push_without_duplicates (tree exp, vec<tree> *queue)
3588 {
3589 unsigned int i;
3590 tree iter;
3591
3592 FOR_EACH_VEC_ELT (*queue, i, iter)
3593 if (simple_cst_equal (iter, exp) == 1)
3594 break;
3595
3596 if (!iter)
3597 queue->safe_push (exp);
3598 }
3599
3600 /* Given a tree EXP, find all occurrences of references to fields
3601 in a PLACEHOLDER_EXPR and place them in vector REFS without
3602 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3603 we assume here that EXP contains only arithmetic expressions
3604 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3605 argument list. */
3606
3607 void
3608 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3609 {
3610 enum tree_code code = TREE_CODE (exp);
3611 tree inner;
3612 int i;
3613
3614 /* We handle TREE_LIST and COMPONENT_REF separately. */
3615 if (code == TREE_LIST)
3616 {
3617 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3618 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3619 }
3620 else if (code == COMPONENT_REF)
3621 {
3622 for (inner = TREE_OPERAND (exp, 0);
3623 REFERENCE_CLASS_P (inner);
3624 inner = TREE_OPERAND (inner, 0))
3625 ;
3626
3627 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3628 push_without_duplicates (exp, refs);
3629 else
3630 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3631 }
3632 else
3633 switch (TREE_CODE_CLASS (code))
3634 {
3635 case tcc_constant:
3636 break;
3637
3638 case tcc_declaration:
3639 /* Variables allocated to static storage can stay. */
3640 if (!TREE_STATIC (exp))
3641 push_without_duplicates (exp, refs);
3642 break;
3643
3644 case tcc_expression:
3645 /* This is the pattern built in ada/make_aligning_type. */
3646 if (code == ADDR_EXPR
3647 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3648 {
3649 push_without_duplicates (exp, refs);
3650 break;
3651 }
3652
3653 /* Fall through... */
3654
3655 case tcc_exceptional:
3656 case tcc_unary:
3657 case tcc_binary:
3658 case tcc_comparison:
3659 case tcc_reference:
3660 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3661 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3662 break;
3663
3664 case tcc_vl_exp:
3665 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3666 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3667 break;
3668
3669 default:
3670 gcc_unreachable ();
3671 }
3672 }
3673
3674 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3675 return a tree with all occurrences of references to F in a
3676 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3677 CONST_DECLs. Note that we assume here that EXP contains only
3678 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3679 occurring only in their argument list. */
3680
3681 tree
3682 substitute_in_expr (tree exp, tree f, tree r)
3683 {
3684 enum tree_code code = TREE_CODE (exp);
3685 tree op0, op1, op2, op3;
3686 tree new_tree;
3687
3688 /* We handle TREE_LIST and COMPONENT_REF separately. */
3689 if (code == TREE_LIST)
3690 {
3691 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3692 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3693 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3694 return exp;
3695
3696 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3697 }
3698 else if (code == COMPONENT_REF)
3699 {
3700 tree inner;
3701
3702 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3703 and it is the right field, replace it with R. */
3704 for (inner = TREE_OPERAND (exp, 0);
3705 REFERENCE_CLASS_P (inner);
3706 inner = TREE_OPERAND (inner, 0))
3707 ;
3708
3709 /* The field. */
3710 op1 = TREE_OPERAND (exp, 1);
3711
3712 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3713 return r;
3714
3715 /* If this expression hasn't been completed let, leave it alone. */
3716 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3717 return exp;
3718
3719 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3720 if (op0 == TREE_OPERAND (exp, 0))
3721 return exp;
3722
3723 new_tree
3724 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3725 }
3726 else
3727 switch (TREE_CODE_CLASS (code))
3728 {
3729 case tcc_constant:
3730 return exp;
3731
3732 case tcc_declaration:
3733 if (exp == f)
3734 return r;
3735 else
3736 return exp;
3737
3738 case tcc_expression:
3739 if (exp == f)
3740 return r;
3741
3742 /* Fall through... */
3743
3744 case tcc_exceptional:
3745 case tcc_unary:
3746 case tcc_binary:
3747 case tcc_comparison:
3748 case tcc_reference:
3749 switch (TREE_CODE_LENGTH (code))
3750 {
3751 case 0:
3752 return exp;
3753
3754 case 1:
3755 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3756 if (op0 == TREE_OPERAND (exp, 0))
3757 return exp;
3758
3759 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3760 break;
3761
3762 case 2:
3763 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3764 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3765
3766 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3767 return exp;
3768
3769 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3770 break;
3771
3772 case 3:
3773 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3774 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3775 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3776
3777 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3778 && op2 == TREE_OPERAND (exp, 2))
3779 return exp;
3780
3781 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3782 break;
3783
3784 case 4:
3785 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3786 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3787 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3788 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3789
3790 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3791 && op2 == TREE_OPERAND (exp, 2)
3792 && op3 == TREE_OPERAND (exp, 3))
3793 return exp;
3794
3795 new_tree
3796 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3797 break;
3798
3799 default:
3800 gcc_unreachable ();
3801 }
3802 break;
3803
3804 case tcc_vl_exp:
3805 {
3806 int i;
3807
3808 new_tree = NULL_TREE;
3809
3810 /* If we are trying to replace F with a constant, inline back
3811 functions which do nothing else than computing a value from
3812 the arguments they are passed. This makes it possible to
3813 fold partially or entirely the replacement expression. */
3814 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3815 {
3816 tree t = maybe_inline_call_in_expr (exp);
3817 if (t)
3818 return SUBSTITUTE_IN_EXPR (t, f, r);
3819 }
3820
3821 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3822 {
3823 tree op = TREE_OPERAND (exp, i);
3824 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3825 if (new_op != op)
3826 {
3827 if (!new_tree)
3828 new_tree = copy_node (exp);
3829 TREE_OPERAND (new_tree, i) = new_op;
3830 }
3831 }
3832
3833 if (new_tree)
3834 {
3835 new_tree = fold (new_tree);
3836 if (TREE_CODE (new_tree) == CALL_EXPR)
3837 process_call_operands (new_tree);
3838 }
3839 else
3840 return exp;
3841 }
3842 break;
3843
3844 default:
3845 gcc_unreachable ();
3846 }
3847
3848 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3849
3850 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3851 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3852
3853 return new_tree;
3854 }
3855
3856 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3857 for it within OBJ, a tree that is an object or a chain of references. */
3858
3859 tree
3860 substitute_placeholder_in_expr (tree exp, tree obj)
3861 {
3862 enum tree_code code = TREE_CODE (exp);
3863 tree op0, op1, op2, op3;
3864 tree new_tree;
3865
3866 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3867 in the chain of OBJ. */
3868 if (code == PLACEHOLDER_EXPR)
3869 {
3870 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3871 tree elt;
3872
3873 for (elt = obj; elt != 0;
3874 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3875 || TREE_CODE (elt) == COND_EXPR)
3876 ? TREE_OPERAND (elt, 1)
3877 : (REFERENCE_CLASS_P (elt)
3878 || UNARY_CLASS_P (elt)
3879 || BINARY_CLASS_P (elt)
3880 || VL_EXP_CLASS_P (elt)
3881 || EXPRESSION_CLASS_P (elt))
3882 ? TREE_OPERAND (elt, 0) : 0))
3883 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3884 return elt;
3885
3886 for (elt = obj; elt != 0;
3887 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3888 || TREE_CODE (elt) == COND_EXPR)
3889 ? TREE_OPERAND (elt, 1)
3890 : (REFERENCE_CLASS_P (elt)
3891 || UNARY_CLASS_P (elt)
3892 || BINARY_CLASS_P (elt)
3893 || VL_EXP_CLASS_P (elt)
3894 || EXPRESSION_CLASS_P (elt))
3895 ? TREE_OPERAND (elt, 0) : 0))
3896 if (POINTER_TYPE_P (TREE_TYPE (elt))
3897 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3898 == need_type))
3899 return fold_build1 (INDIRECT_REF, need_type, elt);
3900
3901 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3902 survives until RTL generation, there will be an error. */
3903 return exp;
3904 }
3905
3906 /* TREE_LIST is special because we need to look at TREE_VALUE
3907 and TREE_CHAIN, not TREE_OPERANDS. */
3908 else if (code == TREE_LIST)
3909 {
3910 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3911 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3912 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3913 return exp;
3914
3915 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3916 }
3917 else
3918 switch (TREE_CODE_CLASS (code))
3919 {
3920 case tcc_constant:
3921 case tcc_declaration:
3922 return exp;
3923
3924 case tcc_exceptional:
3925 case tcc_unary:
3926 case tcc_binary:
3927 case tcc_comparison:
3928 case tcc_expression:
3929 case tcc_reference:
3930 case tcc_statement:
3931 switch (TREE_CODE_LENGTH (code))
3932 {
3933 case 0:
3934 return exp;
3935
3936 case 1:
3937 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3938 if (op0 == TREE_OPERAND (exp, 0))
3939 return exp;
3940
3941 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3942 break;
3943
3944 case 2:
3945 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3946 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3947
3948 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3949 return exp;
3950
3951 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3952 break;
3953
3954 case 3:
3955 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3956 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3957 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3958
3959 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3960 && op2 == TREE_OPERAND (exp, 2))
3961 return exp;
3962
3963 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3964 break;
3965
3966 case 4:
3967 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3968 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3969 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3970 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3971
3972 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3973 && op2 == TREE_OPERAND (exp, 2)
3974 && op3 == TREE_OPERAND (exp, 3))
3975 return exp;
3976
3977 new_tree
3978 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3979 break;
3980
3981 default:
3982 gcc_unreachable ();
3983 }
3984 break;
3985
3986 case tcc_vl_exp:
3987 {
3988 int i;
3989
3990 new_tree = NULL_TREE;
3991
3992 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3993 {
3994 tree op = TREE_OPERAND (exp, i);
3995 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3996 if (new_op != op)
3997 {
3998 if (!new_tree)
3999 new_tree = copy_node (exp);
4000 TREE_OPERAND (new_tree, i) = new_op;
4001 }
4002 }
4003
4004 if (new_tree)
4005 {
4006 new_tree = fold (new_tree);
4007 if (TREE_CODE (new_tree) == CALL_EXPR)
4008 process_call_operands (new_tree);
4009 }
4010 else
4011 return exp;
4012 }
4013 break;
4014
4015 default:
4016 gcc_unreachable ();
4017 }
4018
4019 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4020
4021 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4022 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4023
4024 return new_tree;
4025 }
4026 \f
4027
4028 /* Subroutine of stabilize_reference; this is called for subtrees of
4029 references. Any expression with side-effects must be put in a SAVE_EXPR
4030 to ensure that it is only evaluated once.
4031
4032 We don't put SAVE_EXPR nodes around everything, because assigning very
4033 simple expressions to temporaries causes us to miss good opportunities
4034 for optimizations. Among other things, the opportunity to fold in the
4035 addition of a constant into an addressing mode often gets lost, e.g.
4036 "y[i+1] += x;". In general, we take the approach that we should not make
4037 an assignment unless we are forced into it - i.e., that any non-side effect
4038 operator should be allowed, and that cse should take care of coalescing
4039 multiple utterances of the same expression should that prove fruitful. */
4040
4041 static tree
4042 stabilize_reference_1 (tree e)
4043 {
4044 tree result;
4045 enum tree_code code = TREE_CODE (e);
4046
4047 /* We cannot ignore const expressions because it might be a reference
4048 to a const array but whose index contains side-effects. But we can
4049 ignore things that are actual constant or that already have been
4050 handled by this function. */
4051
4052 if (tree_invariant_p (e))
4053 return e;
4054
4055 switch (TREE_CODE_CLASS (code))
4056 {
4057 case tcc_exceptional:
4058 case tcc_type:
4059 case tcc_declaration:
4060 case tcc_comparison:
4061 case tcc_statement:
4062 case tcc_expression:
4063 case tcc_reference:
4064 case tcc_vl_exp:
4065 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4066 so that it will only be evaluated once. */
4067 /* The reference (r) and comparison (<) classes could be handled as
4068 below, but it is generally faster to only evaluate them once. */
4069 if (TREE_SIDE_EFFECTS (e))
4070 return save_expr (e);
4071 return e;
4072
4073 case tcc_constant:
4074 /* Constants need no processing. In fact, we should never reach
4075 here. */
4076 return e;
4077
4078 case tcc_binary:
4079 /* Division is slow and tends to be compiled with jumps,
4080 especially the division by powers of 2 that is often
4081 found inside of an array reference. So do it just once. */
4082 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4083 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4084 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4085 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4086 return save_expr (e);
4087 /* Recursively stabilize each operand. */
4088 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4089 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4090 break;
4091
4092 case tcc_unary:
4093 /* Recursively stabilize each operand. */
4094 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4095 break;
4096
4097 default:
4098 gcc_unreachable ();
4099 }
4100
4101 TREE_TYPE (result) = TREE_TYPE (e);
4102 TREE_READONLY (result) = TREE_READONLY (e);
4103 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4104 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4105
4106 return result;
4107 }
4108
4109 /* Stabilize a reference so that we can use it any number of times
4110 without causing its operands to be evaluated more than once.
4111 Returns the stabilized reference. This works by means of save_expr,
4112 so see the caveats in the comments about save_expr.
4113
4114 Also allows conversion expressions whose operands are references.
4115 Any other kind of expression is returned unchanged. */
4116
4117 tree
4118 stabilize_reference (tree ref)
4119 {
4120 tree result;
4121 enum tree_code code = TREE_CODE (ref);
4122
4123 switch (code)
4124 {
4125 case VAR_DECL:
4126 case PARM_DECL:
4127 case RESULT_DECL:
4128 /* No action is needed in this case. */
4129 return ref;
4130
4131 CASE_CONVERT:
4132 case FLOAT_EXPR:
4133 case FIX_TRUNC_EXPR:
4134 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4135 break;
4136
4137 case INDIRECT_REF:
4138 result = build_nt (INDIRECT_REF,
4139 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4140 break;
4141
4142 case COMPONENT_REF:
4143 result = build_nt (COMPONENT_REF,
4144 stabilize_reference (TREE_OPERAND (ref, 0)),
4145 TREE_OPERAND (ref, 1), NULL_TREE);
4146 break;
4147
4148 case BIT_FIELD_REF:
4149 result = build_nt (BIT_FIELD_REF,
4150 stabilize_reference (TREE_OPERAND (ref, 0)),
4151 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4152 break;
4153
4154 case ARRAY_REF:
4155 result = build_nt (ARRAY_REF,
4156 stabilize_reference (TREE_OPERAND (ref, 0)),
4157 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4158 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4159 break;
4160
4161 case ARRAY_RANGE_REF:
4162 result = build_nt (ARRAY_RANGE_REF,
4163 stabilize_reference (TREE_OPERAND (ref, 0)),
4164 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4165 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4166 break;
4167
4168 case COMPOUND_EXPR:
4169 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4170 it wouldn't be ignored. This matters when dealing with
4171 volatiles. */
4172 return stabilize_reference_1 (ref);
4173
4174 /* If arg isn't a kind of lvalue we recognize, make no change.
4175 Caller should recognize the error for an invalid lvalue. */
4176 default:
4177 return ref;
4178
4179 case ERROR_MARK:
4180 return error_mark_node;
4181 }
4182
4183 TREE_TYPE (result) = TREE_TYPE (ref);
4184 TREE_READONLY (result) = TREE_READONLY (ref);
4185 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4186 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4187
4188 return result;
4189 }
4190 \f
4191 /* Low-level constructors for expressions. */
4192
4193 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4194 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4195
4196 void
4197 recompute_tree_invariant_for_addr_expr (tree t)
4198 {
4199 tree node;
4200 bool tc = true, se = false;
4201
4202 /* We started out assuming this address is both invariant and constant, but
4203 does not have side effects. Now go down any handled components and see if
4204 any of them involve offsets that are either non-constant or non-invariant.
4205 Also check for side-effects.
4206
4207 ??? Note that this code makes no attempt to deal with the case where
4208 taking the address of something causes a copy due to misalignment. */
4209
4210 #define UPDATE_FLAGS(NODE) \
4211 do { tree _node = (NODE); \
4212 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4213 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4214
4215 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4216 node = TREE_OPERAND (node, 0))
4217 {
4218 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4219 array reference (probably made temporarily by the G++ front end),
4220 so ignore all the operands. */
4221 if ((TREE_CODE (node) == ARRAY_REF
4222 || TREE_CODE (node) == ARRAY_RANGE_REF)
4223 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4224 {
4225 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4226 if (TREE_OPERAND (node, 2))
4227 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4228 if (TREE_OPERAND (node, 3))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4230 }
4231 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4232 FIELD_DECL, apparently. The G++ front end can put something else
4233 there, at least temporarily. */
4234 else if (TREE_CODE (node) == COMPONENT_REF
4235 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4236 {
4237 if (TREE_OPERAND (node, 2))
4238 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4239 }
4240 }
4241
4242 node = lang_hooks.expr_to_decl (node, &tc, &se);
4243
4244 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4245 the address, since &(*a)->b is a form of addition. If it's a constant, the
4246 address is constant too. If it's a decl, its address is constant if the
4247 decl is static. Everything else is not constant and, furthermore,
4248 taking the address of a volatile variable is not volatile. */
4249 if (TREE_CODE (node) == INDIRECT_REF
4250 || TREE_CODE (node) == MEM_REF)
4251 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4252 else if (CONSTANT_CLASS_P (node))
4253 ;
4254 else if (DECL_P (node))
4255 tc &= (staticp (node) != NULL_TREE);
4256 else
4257 {
4258 tc = false;
4259 se |= TREE_SIDE_EFFECTS (node);
4260 }
4261
4262
4263 TREE_CONSTANT (t) = tc;
4264 TREE_SIDE_EFFECTS (t) = se;
4265 #undef UPDATE_FLAGS
4266 }
4267
4268 /* Build an expression of code CODE, data type TYPE, and operands as
4269 specified. Expressions and reference nodes can be created this way.
4270 Constants, decls, types and misc nodes cannot be.
4271
4272 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4273 enough for all extant tree codes. */
4274
4275 tree
4276 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4277 {
4278 tree t;
4279
4280 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4281
4282 t = make_node_stat (code PASS_MEM_STAT);
4283 TREE_TYPE (t) = tt;
4284
4285 return t;
4286 }
4287
4288 tree
4289 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4290 {
4291 int length = sizeof (struct tree_exp);
4292 tree t;
4293
4294 record_node_allocation_statistics (code, length);
4295
4296 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4297
4298 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4299
4300 memset (t, 0, sizeof (struct tree_common));
4301
4302 TREE_SET_CODE (t, code);
4303
4304 TREE_TYPE (t) = type;
4305 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4306 TREE_OPERAND (t, 0) = node;
4307 if (node && !TYPE_P (node))
4308 {
4309 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4310 TREE_READONLY (t) = TREE_READONLY (node);
4311 }
4312
4313 if (TREE_CODE_CLASS (code) == tcc_statement)
4314 TREE_SIDE_EFFECTS (t) = 1;
4315 else switch (code)
4316 {
4317 case VA_ARG_EXPR:
4318 /* All of these have side-effects, no matter what their
4319 operands are. */
4320 TREE_SIDE_EFFECTS (t) = 1;
4321 TREE_READONLY (t) = 0;
4322 break;
4323
4324 case INDIRECT_REF:
4325 /* Whether a dereference is readonly has nothing to do with whether
4326 its operand is readonly. */
4327 TREE_READONLY (t) = 0;
4328 break;
4329
4330 case ADDR_EXPR:
4331 if (node)
4332 recompute_tree_invariant_for_addr_expr (t);
4333 break;
4334
4335 default:
4336 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4337 && node && !TYPE_P (node)
4338 && TREE_CONSTANT (node))
4339 TREE_CONSTANT (t) = 1;
4340 if (TREE_CODE_CLASS (code) == tcc_reference
4341 && node && TREE_THIS_VOLATILE (node))
4342 TREE_THIS_VOLATILE (t) = 1;
4343 break;
4344 }
4345
4346 return t;
4347 }
4348
4349 #define PROCESS_ARG(N) \
4350 do { \
4351 TREE_OPERAND (t, N) = arg##N; \
4352 if (arg##N &&!TYPE_P (arg##N)) \
4353 { \
4354 if (TREE_SIDE_EFFECTS (arg##N)) \
4355 side_effects = 1; \
4356 if (!TREE_READONLY (arg##N) \
4357 && !CONSTANT_CLASS_P (arg##N)) \
4358 (void) (read_only = 0); \
4359 if (!TREE_CONSTANT (arg##N)) \
4360 (void) (constant = 0); \
4361 } \
4362 } while (0)
4363
4364 tree
4365 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4366 {
4367 bool constant, read_only, side_effects;
4368 tree t;
4369
4370 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4371
4372 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4373 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4374 /* When sizetype precision doesn't match that of pointers
4375 we need to be able to build explicit extensions or truncations
4376 of the offset argument. */
4377 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4378 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4379 && TREE_CODE (arg1) == INTEGER_CST);
4380
4381 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4382 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4383 && ptrofftype_p (TREE_TYPE (arg1)));
4384
4385 t = make_node_stat (code PASS_MEM_STAT);
4386 TREE_TYPE (t) = tt;
4387
4388 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4389 result based on those same flags for the arguments. But if the
4390 arguments aren't really even `tree' expressions, we shouldn't be trying
4391 to do this. */
4392
4393 /* Expressions without side effects may be constant if their
4394 arguments are as well. */
4395 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4396 || TREE_CODE_CLASS (code) == tcc_binary);
4397 read_only = 1;
4398 side_effects = TREE_SIDE_EFFECTS (t);
4399
4400 PROCESS_ARG (0);
4401 PROCESS_ARG (1);
4402
4403 TREE_SIDE_EFFECTS (t) = side_effects;
4404 if (code == MEM_REF)
4405 {
4406 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4407 {
4408 tree o = TREE_OPERAND (arg0, 0);
4409 TREE_READONLY (t) = TREE_READONLY (o);
4410 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4411 }
4412 }
4413 else
4414 {
4415 TREE_READONLY (t) = read_only;
4416 TREE_CONSTANT (t) = constant;
4417 TREE_THIS_VOLATILE (t)
4418 = (TREE_CODE_CLASS (code) == tcc_reference
4419 && arg0 && TREE_THIS_VOLATILE (arg0));
4420 }
4421
4422 return t;
4423 }
4424
4425
4426 tree
4427 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4428 tree arg2 MEM_STAT_DECL)
4429 {
4430 bool constant, read_only, side_effects;
4431 tree t;
4432
4433 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4434 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4435
4436 t = make_node_stat (code PASS_MEM_STAT);
4437 TREE_TYPE (t) = tt;
4438
4439 read_only = 1;
4440
4441 /* As a special exception, if COND_EXPR has NULL branches, we
4442 assume that it is a gimple statement and always consider
4443 it to have side effects. */
4444 if (code == COND_EXPR
4445 && tt == void_type_node
4446 && arg1 == NULL_TREE
4447 && arg2 == NULL_TREE)
4448 side_effects = true;
4449 else
4450 side_effects = TREE_SIDE_EFFECTS (t);
4451
4452 PROCESS_ARG (0);
4453 PROCESS_ARG (1);
4454 PROCESS_ARG (2);
4455
4456 if (code == COND_EXPR)
4457 TREE_READONLY (t) = read_only;
4458
4459 TREE_SIDE_EFFECTS (t) = side_effects;
4460 TREE_THIS_VOLATILE (t)
4461 = (TREE_CODE_CLASS (code) == tcc_reference
4462 && arg0 && TREE_THIS_VOLATILE (arg0));
4463
4464 return t;
4465 }
4466
4467 tree
4468 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4469 tree arg2, tree arg3 MEM_STAT_DECL)
4470 {
4471 bool constant, read_only, side_effects;
4472 tree t;
4473
4474 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4475
4476 t = make_node_stat (code PASS_MEM_STAT);
4477 TREE_TYPE (t) = tt;
4478
4479 side_effects = TREE_SIDE_EFFECTS (t);
4480
4481 PROCESS_ARG (0);
4482 PROCESS_ARG (1);
4483 PROCESS_ARG (2);
4484 PROCESS_ARG (3);
4485
4486 TREE_SIDE_EFFECTS (t) = side_effects;
4487 TREE_THIS_VOLATILE (t)
4488 = (TREE_CODE_CLASS (code) == tcc_reference
4489 && arg0 && TREE_THIS_VOLATILE (arg0));
4490
4491 return t;
4492 }
4493
4494 tree
4495 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4496 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4497 {
4498 bool constant, read_only, side_effects;
4499 tree t;
4500
4501 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4502
4503 t = make_node_stat (code PASS_MEM_STAT);
4504 TREE_TYPE (t) = tt;
4505
4506 side_effects = TREE_SIDE_EFFECTS (t);
4507
4508 PROCESS_ARG (0);
4509 PROCESS_ARG (1);
4510 PROCESS_ARG (2);
4511 PROCESS_ARG (3);
4512 PROCESS_ARG (4);
4513
4514 TREE_SIDE_EFFECTS (t) = side_effects;
4515 if (code == TARGET_MEM_REF)
4516 {
4517 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4518 {
4519 tree o = TREE_OPERAND (arg0, 0);
4520 TREE_READONLY (t) = TREE_READONLY (o);
4521 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4522 }
4523 }
4524 else
4525 TREE_THIS_VOLATILE (t)
4526 = (TREE_CODE_CLASS (code) == tcc_reference
4527 && arg0 && TREE_THIS_VOLATILE (arg0));
4528
4529 return t;
4530 }
4531
4532 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4533 on the pointer PTR. */
4534
4535 tree
4536 build_simple_mem_ref_loc (location_t loc, tree ptr)
4537 {
4538 HOST_WIDE_INT offset = 0;
4539 tree ptype = TREE_TYPE (ptr);
4540 tree tem;
4541 /* For convenience allow addresses that collapse to a simple base
4542 and offset. */
4543 if (TREE_CODE (ptr) == ADDR_EXPR
4544 && (handled_component_p (TREE_OPERAND (ptr, 0))
4545 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4546 {
4547 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4548 gcc_assert (ptr);
4549 ptr = build_fold_addr_expr (ptr);
4550 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4551 }
4552 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4553 ptr, build_int_cst (ptype, offset));
4554 SET_EXPR_LOCATION (tem, loc);
4555 return tem;
4556 }
4557
4558 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4559
4560 offset_int
4561 mem_ref_offset (const_tree t)
4562 {
4563 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4564 }
4565
4566 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4567 offsetted by OFFSET units. */
4568
4569 tree
4570 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4571 {
4572 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4573 build_fold_addr_expr (base),
4574 build_int_cst (ptr_type_node, offset));
4575 tree addr = build1 (ADDR_EXPR, type, ref);
4576 recompute_tree_invariant_for_addr_expr (addr);
4577 return addr;
4578 }
4579
4580 /* Similar except don't specify the TREE_TYPE
4581 and leave the TREE_SIDE_EFFECTS as 0.
4582 It is permissible for arguments to be null,
4583 or even garbage if their values do not matter. */
4584
4585 tree
4586 build_nt (enum tree_code code, ...)
4587 {
4588 tree t;
4589 int length;
4590 int i;
4591 va_list p;
4592
4593 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4594
4595 va_start (p, code);
4596
4597 t = make_node (code);
4598 length = TREE_CODE_LENGTH (code);
4599
4600 for (i = 0; i < length; i++)
4601 TREE_OPERAND (t, i) = va_arg (p, tree);
4602
4603 va_end (p);
4604 return t;
4605 }
4606
4607 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4608 tree vec. */
4609
4610 tree
4611 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4612 {
4613 tree ret, t;
4614 unsigned int ix;
4615
4616 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4617 CALL_EXPR_FN (ret) = fn;
4618 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4619 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4620 CALL_EXPR_ARG (ret, ix) = t;
4621 return ret;
4622 }
4623 \f
4624 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4625 We do NOT enter this node in any sort of symbol table.
4626
4627 LOC is the location of the decl.
4628
4629 layout_decl is used to set up the decl's storage layout.
4630 Other slots are initialized to 0 or null pointers. */
4631
4632 tree
4633 build_decl_stat (location_t loc, enum tree_code code, tree name,
4634 tree type MEM_STAT_DECL)
4635 {
4636 tree t;
4637
4638 t = make_node_stat (code PASS_MEM_STAT);
4639 DECL_SOURCE_LOCATION (t) = loc;
4640
4641 /* if (type == error_mark_node)
4642 type = integer_type_node; */
4643 /* That is not done, deliberately, so that having error_mark_node
4644 as the type can suppress useless errors in the use of this variable. */
4645
4646 DECL_NAME (t) = name;
4647 TREE_TYPE (t) = type;
4648
4649 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4650 layout_decl (t, 0);
4651
4652 return t;
4653 }
4654
4655 /* Builds and returns function declaration with NAME and TYPE. */
4656
4657 tree
4658 build_fn_decl (const char *name, tree type)
4659 {
4660 tree id = get_identifier (name);
4661 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4662
4663 DECL_EXTERNAL (decl) = 1;
4664 TREE_PUBLIC (decl) = 1;
4665 DECL_ARTIFICIAL (decl) = 1;
4666 TREE_NOTHROW (decl) = 1;
4667
4668 return decl;
4669 }
4670
4671 vec<tree, va_gc> *all_translation_units;
4672
4673 /* Builds a new translation-unit decl with name NAME, queues it in the
4674 global list of translation-unit decls and returns it. */
4675
4676 tree
4677 build_translation_unit_decl (tree name)
4678 {
4679 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4680 name, NULL_TREE);
4681 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4682 vec_safe_push (all_translation_units, tu);
4683 return tu;
4684 }
4685
4686 \f
4687 /* BLOCK nodes are used to represent the structure of binding contours
4688 and declarations, once those contours have been exited and their contents
4689 compiled. This information is used for outputting debugging info. */
4690
4691 tree
4692 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4693 {
4694 tree block = make_node (BLOCK);
4695
4696 BLOCK_VARS (block) = vars;
4697 BLOCK_SUBBLOCKS (block) = subblocks;
4698 BLOCK_SUPERCONTEXT (block) = supercontext;
4699 BLOCK_CHAIN (block) = chain;
4700 return block;
4701 }
4702
4703 \f
4704 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4705
4706 LOC is the location to use in tree T. */
4707
4708 void
4709 protected_set_expr_location (tree t, location_t loc)
4710 {
4711 if (CAN_HAVE_LOCATION_P (t))
4712 SET_EXPR_LOCATION (t, loc);
4713 }
4714 \f
4715 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4716 is ATTRIBUTE. */
4717
4718 tree
4719 build_decl_attribute_variant (tree ddecl, tree attribute)
4720 {
4721 DECL_ATTRIBUTES (ddecl) = attribute;
4722 return ddecl;
4723 }
4724
4725 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4726 is ATTRIBUTE and its qualifiers are QUALS.
4727
4728 Record such modified types already made so we don't make duplicates. */
4729
4730 tree
4731 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4732 {
4733 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4734 {
4735 inchash::hash hstate;
4736 tree ntype;
4737 int i;
4738 tree t;
4739 enum tree_code code = TREE_CODE (ttype);
4740
4741 /* Building a distinct copy of a tagged type is inappropriate; it
4742 causes breakage in code that expects there to be a one-to-one
4743 relationship between a struct and its fields.
4744 build_duplicate_type is another solution (as used in
4745 handle_transparent_union_attribute), but that doesn't play well
4746 with the stronger C++ type identity model. */
4747 if (TREE_CODE (ttype) == RECORD_TYPE
4748 || TREE_CODE (ttype) == UNION_TYPE
4749 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4750 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4751 {
4752 warning (OPT_Wattributes,
4753 "ignoring attributes applied to %qT after definition",
4754 TYPE_MAIN_VARIANT (ttype));
4755 return build_qualified_type (ttype, quals);
4756 }
4757
4758 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4759 ntype = build_distinct_type_copy (ttype);
4760
4761 TYPE_ATTRIBUTES (ntype) = attribute;
4762
4763 hstate.add_int (code);
4764 if (TREE_TYPE (ntype))
4765 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4766 attribute_hash_list (attribute, hstate);
4767
4768 switch (TREE_CODE (ntype))
4769 {
4770 case FUNCTION_TYPE:
4771 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4772 break;
4773 case ARRAY_TYPE:
4774 if (TYPE_DOMAIN (ntype))
4775 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4776 break;
4777 case INTEGER_TYPE:
4778 t = TYPE_MAX_VALUE (ntype);
4779 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4780 hstate.add_object (TREE_INT_CST_ELT (t, i));
4781 break;
4782 case REAL_TYPE:
4783 case FIXED_POINT_TYPE:
4784 {
4785 unsigned int precision = TYPE_PRECISION (ntype);
4786 hstate.add_object (precision);
4787 }
4788 break;
4789 default:
4790 break;
4791 }
4792
4793 ntype = type_hash_canon (hstate.end(), ntype);
4794
4795 /* If the target-dependent attributes make NTYPE different from
4796 its canonical type, we will need to use structural equality
4797 checks for this type. */
4798 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4799 || !comp_type_attributes (ntype, ttype))
4800 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4801 else if (TYPE_CANONICAL (ntype) == ntype)
4802 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4803
4804 ttype = build_qualified_type (ntype, quals);
4805 }
4806 else if (TYPE_QUALS (ttype) != quals)
4807 ttype = build_qualified_type (ttype, quals);
4808
4809 return ttype;
4810 }
4811
4812 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4813 the same. */
4814
4815 static bool
4816 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4817 {
4818 tree cl1, cl2;
4819 for (cl1 = clauses1, cl2 = clauses2;
4820 cl1 && cl2;
4821 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4822 {
4823 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4824 return false;
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4826 {
4827 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4828 OMP_CLAUSE_DECL (cl2)) != 1)
4829 return false;
4830 }
4831 switch (OMP_CLAUSE_CODE (cl1))
4832 {
4833 case OMP_CLAUSE_ALIGNED:
4834 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4835 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4836 return false;
4837 break;
4838 case OMP_CLAUSE_LINEAR:
4839 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4840 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4841 return false;
4842 break;
4843 case OMP_CLAUSE_SIMDLEN:
4844 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4845 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4846 return false;
4847 default:
4848 break;
4849 }
4850 }
4851 return true;
4852 }
4853
4854 /* Compare two constructor-element-type constants. Return 1 if the lists
4855 are known to be equal; otherwise return 0. */
4856
4857 static bool
4858 simple_cst_list_equal (const_tree l1, const_tree l2)
4859 {
4860 while (l1 != NULL_TREE && l2 != NULL_TREE)
4861 {
4862 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4863 return false;
4864
4865 l1 = TREE_CHAIN (l1);
4866 l2 = TREE_CHAIN (l2);
4867 }
4868
4869 return l1 == l2;
4870 }
4871
4872 /* Compare two attributes for their value identity. Return true if the
4873 attribute values are known to be equal; otherwise return false.
4874 */
4875
4876 static bool
4877 attribute_value_equal (const_tree attr1, const_tree attr2)
4878 {
4879 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4880 return true;
4881
4882 if (TREE_VALUE (attr1) != NULL_TREE
4883 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4884 && TREE_VALUE (attr2) != NULL
4885 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4886 return (simple_cst_list_equal (TREE_VALUE (attr1),
4887 TREE_VALUE (attr2)) == 1);
4888
4889 if ((flag_openmp || flag_openmp_simd)
4890 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4891 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4892 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4893 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4894 TREE_VALUE (attr2));
4895
4896 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4897 }
4898
4899 /* Return 0 if the attributes for two types are incompatible, 1 if they
4900 are compatible, and 2 if they are nearly compatible (which causes a
4901 warning to be generated). */
4902 int
4903 comp_type_attributes (const_tree type1, const_tree type2)
4904 {
4905 const_tree a1 = TYPE_ATTRIBUTES (type1);
4906 const_tree a2 = TYPE_ATTRIBUTES (type2);
4907 const_tree a;
4908
4909 if (a1 == a2)
4910 return 1;
4911 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4912 {
4913 const struct attribute_spec *as;
4914 const_tree attr;
4915
4916 as = lookup_attribute_spec (get_attribute_name (a));
4917 if (!as || as->affects_type_identity == false)
4918 continue;
4919
4920 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4921 if (!attr || !attribute_value_equal (a, attr))
4922 break;
4923 }
4924 if (!a)
4925 {
4926 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4927 {
4928 const struct attribute_spec *as;
4929
4930 as = lookup_attribute_spec (get_attribute_name (a));
4931 if (!as || as->affects_type_identity == false)
4932 continue;
4933
4934 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4935 break;
4936 /* We don't need to compare trees again, as we did this
4937 already in first loop. */
4938 }
4939 /* All types - affecting identity - are equal, so
4940 there is no need to call target hook for comparison. */
4941 if (!a)
4942 return 1;
4943 }
4944 /* As some type combinations - like default calling-convention - might
4945 be compatible, we have to call the target hook to get the final result. */
4946 return targetm.comp_type_attributes (type1, type2);
4947 }
4948
4949 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4950 is ATTRIBUTE.
4951
4952 Record such modified types already made so we don't make duplicates. */
4953
4954 tree
4955 build_type_attribute_variant (tree ttype, tree attribute)
4956 {
4957 return build_type_attribute_qual_variant (ttype, attribute,
4958 TYPE_QUALS (ttype));
4959 }
4960
4961
4962 /* Reset the expression *EXPR_P, a size or position.
4963
4964 ??? We could reset all non-constant sizes or positions. But it's cheap
4965 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4966
4967 We need to reset self-referential sizes or positions because they cannot
4968 be gimplified and thus can contain a CALL_EXPR after the gimplification
4969 is finished, which will run afoul of LTO streaming. And they need to be
4970 reset to something essentially dummy but not constant, so as to preserve
4971 the properties of the object they are attached to. */
4972
4973 static inline void
4974 free_lang_data_in_one_sizepos (tree *expr_p)
4975 {
4976 tree expr = *expr_p;
4977 if (CONTAINS_PLACEHOLDER_P (expr))
4978 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4979 }
4980
4981
4982 /* Reset all the fields in a binfo node BINFO. We only keep
4983 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4984
4985 static void
4986 free_lang_data_in_binfo (tree binfo)
4987 {
4988 unsigned i;
4989 tree t;
4990
4991 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4992
4993 BINFO_VIRTUALS (binfo) = NULL_TREE;
4994 BINFO_BASE_ACCESSES (binfo) = NULL;
4995 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4996 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4997
4998 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4999 free_lang_data_in_binfo (t);
5000 }
5001
5002
5003 /* Reset all language specific information still present in TYPE. */
5004
5005 static void
5006 free_lang_data_in_type (tree type)
5007 {
5008 gcc_assert (TYPE_P (type));
5009
5010 /* Give the FE a chance to remove its own data first. */
5011 lang_hooks.free_lang_data (type);
5012
5013 TREE_LANG_FLAG_0 (type) = 0;
5014 TREE_LANG_FLAG_1 (type) = 0;
5015 TREE_LANG_FLAG_2 (type) = 0;
5016 TREE_LANG_FLAG_3 (type) = 0;
5017 TREE_LANG_FLAG_4 (type) = 0;
5018 TREE_LANG_FLAG_5 (type) = 0;
5019 TREE_LANG_FLAG_6 (type) = 0;
5020
5021 if (TREE_CODE (type) == FUNCTION_TYPE)
5022 {
5023 /* Remove the const and volatile qualifiers from arguments. The
5024 C++ front end removes them, but the C front end does not,
5025 leading to false ODR violation errors when merging two
5026 instances of the same function signature compiled by
5027 different front ends. */
5028 tree p;
5029
5030 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5031 {
5032 tree arg_type = TREE_VALUE (p);
5033
5034 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5035 {
5036 int quals = TYPE_QUALS (arg_type)
5037 & ~TYPE_QUAL_CONST
5038 & ~TYPE_QUAL_VOLATILE;
5039 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5040 free_lang_data_in_type (TREE_VALUE (p));
5041 }
5042 }
5043 }
5044
5045 /* Remove members that are not actually FIELD_DECLs from the field
5046 list of an aggregate. These occur in C++. */
5047 if (RECORD_OR_UNION_TYPE_P (type))
5048 {
5049 tree prev, member;
5050
5051 /* Note that TYPE_FIELDS can be shared across distinct
5052 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5053 to be removed, we cannot set its TREE_CHAIN to NULL.
5054 Otherwise, we would not be able to find all the other fields
5055 in the other instances of this TREE_TYPE.
5056
5057 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5058 prev = NULL_TREE;
5059 member = TYPE_FIELDS (type);
5060 while (member)
5061 {
5062 if (TREE_CODE (member) == FIELD_DECL
5063 || TREE_CODE (member) == TYPE_DECL)
5064 {
5065 if (prev)
5066 TREE_CHAIN (prev) = member;
5067 else
5068 TYPE_FIELDS (type) = member;
5069 prev = member;
5070 }
5071
5072 member = TREE_CHAIN (member);
5073 }
5074
5075 if (prev)
5076 TREE_CHAIN (prev) = NULL_TREE;
5077 else
5078 TYPE_FIELDS (type) = NULL_TREE;
5079
5080 TYPE_METHODS (type) = NULL_TREE;
5081 if (TYPE_BINFO (type))
5082 {
5083 free_lang_data_in_binfo (TYPE_BINFO (type));
5084 /* We need to preserve link to bases and virtual table for all
5085 polymorphic types to make devirtualization machinery working.
5086 Debug output cares only about bases, but output also
5087 virtual table pointers so merging of -fdevirtualize and
5088 -fno-devirtualize units is easier. */
5089 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5090 || !flag_devirtualize)
5091 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5092 && !BINFO_VTABLE (TYPE_BINFO (type)))
5093 || debug_info_level != DINFO_LEVEL_NONE))
5094 TYPE_BINFO (type) = NULL;
5095 }
5096 }
5097 else
5098 {
5099 /* For non-aggregate types, clear out the language slot (which
5100 overloads TYPE_BINFO). */
5101 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5102
5103 if (INTEGRAL_TYPE_P (type)
5104 || SCALAR_FLOAT_TYPE_P (type)
5105 || FIXED_POINT_TYPE_P (type))
5106 {
5107 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5108 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5109 }
5110 }
5111
5112 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5113 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5114
5115 if (TYPE_CONTEXT (type)
5116 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5117 {
5118 tree ctx = TYPE_CONTEXT (type);
5119 do
5120 {
5121 ctx = BLOCK_SUPERCONTEXT (ctx);
5122 }
5123 while (ctx && TREE_CODE (ctx) == BLOCK);
5124 TYPE_CONTEXT (type) = ctx;
5125 }
5126 }
5127
5128
5129 /* Return true if DECL may need an assembler name to be set. */
5130
5131 static inline bool
5132 need_assembler_name_p (tree decl)
5133 {
5134 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5135 merging. */
5136 if (flag_lto_odr_type_mering
5137 && TREE_CODE (decl) == TYPE_DECL
5138 && DECL_NAME (decl)
5139 && decl == TYPE_NAME (TREE_TYPE (decl))
5140 && !is_lang_specific (TREE_TYPE (decl))
5141 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5142 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5143 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5144 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5145 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5146 if (TREE_CODE (decl) != FUNCTION_DECL
5147 && TREE_CODE (decl) != VAR_DECL)
5148 return false;
5149
5150 /* If DECL already has its assembler name set, it does not need a
5151 new one. */
5152 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5153 || DECL_ASSEMBLER_NAME_SET_P (decl))
5154 return false;
5155
5156 /* Abstract decls do not need an assembler name. */
5157 if (DECL_ABSTRACT_P (decl))
5158 return false;
5159
5160 /* For VAR_DECLs, only static, public and external symbols need an
5161 assembler name. */
5162 if (TREE_CODE (decl) == VAR_DECL
5163 && !TREE_STATIC (decl)
5164 && !TREE_PUBLIC (decl)
5165 && !DECL_EXTERNAL (decl))
5166 return false;
5167
5168 if (TREE_CODE (decl) == FUNCTION_DECL)
5169 {
5170 /* Do not set assembler name on builtins. Allow RTL expansion to
5171 decide whether to expand inline or via a regular call. */
5172 if (DECL_BUILT_IN (decl)
5173 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5174 return false;
5175
5176 /* Functions represented in the callgraph need an assembler name. */
5177 if (cgraph_node::get (decl) != NULL)
5178 return true;
5179
5180 /* Unused and not public functions don't need an assembler name. */
5181 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5182 return false;
5183 }
5184
5185 return true;
5186 }
5187
5188
5189 /* Reset all language specific information still present in symbol
5190 DECL. */
5191
5192 static void
5193 free_lang_data_in_decl (tree decl)
5194 {
5195 gcc_assert (DECL_P (decl));
5196
5197 /* Give the FE a chance to remove its own data first. */
5198 lang_hooks.free_lang_data (decl);
5199
5200 TREE_LANG_FLAG_0 (decl) = 0;
5201 TREE_LANG_FLAG_1 (decl) = 0;
5202 TREE_LANG_FLAG_2 (decl) = 0;
5203 TREE_LANG_FLAG_3 (decl) = 0;
5204 TREE_LANG_FLAG_4 (decl) = 0;
5205 TREE_LANG_FLAG_5 (decl) = 0;
5206 TREE_LANG_FLAG_6 (decl) = 0;
5207
5208 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5209 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5210 if (TREE_CODE (decl) == FIELD_DECL)
5211 {
5212 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5213 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5214 DECL_QUALIFIER (decl) = NULL_TREE;
5215 }
5216
5217 if (TREE_CODE (decl) == FUNCTION_DECL)
5218 {
5219 struct cgraph_node *node;
5220 if (!(node = cgraph_node::get (decl))
5221 || (!node->definition && !node->clones))
5222 {
5223 if (node)
5224 node->release_body ();
5225 else
5226 {
5227 release_function_body (decl);
5228 DECL_ARGUMENTS (decl) = NULL;
5229 DECL_RESULT (decl) = NULL;
5230 DECL_INITIAL (decl) = error_mark_node;
5231 }
5232 }
5233 if (gimple_has_body_p (decl))
5234 {
5235 tree t;
5236
5237 /* If DECL has a gimple body, then the context for its
5238 arguments must be DECL. Otherwise, it doesn't really
5239 matter, as we will not be emitting any code for DECL. In
5240 general, there may be other instances of DECL created by
5241 the front end and since PARM_DECLs are generally shared,
5242 their DECL_CONTEXT changes as the replicas of DECL are
5243 created. The only time where DECL_CONTEXT is important
5244 is for the FUNCTION_DECLs that have a gimple body (since
5245 the PARM_DECL will be used in the function's body). */
5246 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5247 DECL_CONTEXT (t) = decl;
5248 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5249 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5250 = target_option_default_node;
5251 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5252 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5253 = optimization_default_node;
5254 }
5255
5256 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5257 At this point, it is not needed anymore. */
5258 DECL_SAVED_TREE (decl) = NULL_TREE;
5259
5260 /* Clear the abstract origin if it refers to a method. Otherwise
5261 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5262 origin will not be output correctly. */
5263 if (DECL_ABSTRACT_ORIGIN (decl)
5264 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5265 && RECORD_OR_UNION_TYPE_P
5266 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5267 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5268
5269 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5270 DECL_VINDEX referring to itself into a vtable slot number as it
5271 should. Happens with functions that are copied and then forgotten
5272 about. Just clear it, it won't matter anymore. */
5273 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5274 DECL_VINDEX (decl) = NULL_TREE;
5275 }
5276 else if (TREE_CODE (decl) == VAR_DECL)
5277 {
5278 if ((DECL_EXTERNAL (decl)
5279 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5280 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5281 DECL_INITIAL (decl) = NULL_TREE;
5282 }
5283 else if (TREE_CODE (decl) == TYPE_DECL
5284 || TREE_CODE (decl) == FIELD_DECL)
5285 DECL_INITIAL (decl) = NULL_TREE;
5286 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5287 && DECL_INITIAL (decl)
5288 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5289 {
5290 /* Strip builtins from the translation-unit BLOCK. We still have targets
5291 without builtin_decl_explicit support and also builtins are shared
5292 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5293 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5294 while (*nextp)
5295 {
5296 tree var = *nextp;
5297 if (TREE_CODE (var) == FUNCTION_DECL
5298 && DECL_BUILT_IN (var))
5299 *nextp = TREE_CHAIN (var);
5300 else
5301 nextp = &TREE_CHAIN (var);
5302 }
5303 }
5304 }
5305
5306
5307 /* Data used when collecting DECLs and TYPEs for language data removal. */
5308
5309 struct free_lang_data_d
5310 {
5311 /* Worklist to avoid excessive recursion. */
5312 vec<tree> worklist;
5313
5314 /* Set of traversed objects. Used to avoid duplicate visits. */
5315 hash_set<tree> *pset;
5316
5317 /* Array of symbols to process with free_lang_data_in_decl. */
5318 vec<tree> decls;
5319
5320 /* Array of types to process with free_lang_data_in_type. */
5321 vec<tree> types;
5322 };
5323
5324
5325 /* Save all language fields needed to generate proper debug information
5326 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5327
5328 static void
5329 save_debug_info_for_decl (tree t)
5330 {
5331 /*struct saved_debug_info_d *sdi;*/
5332
5333 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5334
5335 /* FIXME. Partial implementation for saving debug info removed. */
5336 }
5337
5338
5339 /* Save all language fields needed to generate proper debug information
5340 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5341
5342 static void
5343 save_debug_info_for_type (tree t)
5344 {
5345 /*struct saved_debug_info_d *sdi;*/
5346
5347 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5348
5349 /* FIXME. Partial implementation for saving debug info removed. */
5350 }
5351
5352
5353 /* Add type or decl T to one of the list of tree nodes that need their
5354 language data removed. The lists are held inside FLD. */
5355
5356 static void
5357 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5358 {
5359 if (DECL_P (t))
5360 {
5361 fld->decls.safe_push (t);
5362 if (debug_info_level > DINFO_LEVEL_TERSE)
5363 save_debug_info_for_decl (t);
5364 }
5365 else if (TYPE_P (t))
5366 {
5367 fld->types.safe_push (t);
5368 if (debug_info_level > DINFO_LEVEL_TERSE)
5369 save_debug_info_for_type (t);
5370 }
5371 else
5372 gcc_unreachable ();
5373 }
5374
5375 /* Push tree node T into FLD->WORKLIST. */
5376
5377 static inline void
5378 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5379 {
5380 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5381 fld->worklist.safe_push ((t));
5382 }
5383
5384
5385 /* Operand callback helper for free_lang_data_in_node. *TP is the
5386 subtree operand being considered. */
5387
5388 static tree
5389 find_decls_types_r (tree *tp, int *ws, void *data)
5390 {
5391 tree t = *tp;
5392 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5393
5394 if (TREE_CODE (t) == TREE_LIST)
5395 return NULL_TREE;
5396
5397 /* Language specific nodes will be removed, so there is no need
5398 to gather anything under them. */
5399 if (is_lang_specific (t))
5400 {
5401 *ws = 0;
5402 return NULL_TREE;
5403 }
5404
5405 if (DECL_P (t))
5406 {
5407 /* Note that walk_tree does not traverse every possible field in
5408 decls, so we have to do our own traversals here. */
5409 add_tree_to_fld_list (t, fld);
5410
5411 fld_worklist_push (DECL_NAME (t), fld);
5412 fld_worklist_push (DECL_CONTEXT (t), fld);
5413 fld_worklist_push (DECL_SIZE (t), fld);
5414 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5415
5416 /* We are going to remove everything under DECL_INITIAL for
5417 TYPE_DECLs. No point walking them. */
5418 if (TREE_CODE (t) != TYPE_DECL)
5419 fld_worklist_push (DECL_INITIAL (t), fld);
5420
5421 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5422 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5423
5424 if (TREE_CODE (t) == FUNCTION_DECL)
5425 {
5426 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5427 fld_worklist_push (DECL_RESULT (t), fld);
5428 }
5429 else if (TREE_CODE (t) == TYPE_DECL)
5430 {
5431 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5432 }
5433 else if (TREE_CODE (t) == FIELD_DECL)
5434 {
5435 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5436 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5437 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5438 fld_worklist_push (DECL_FCONTEXT (t), fld);
5439 }
5440
5441 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5442 && DECL_HAS_VALUE_EXPR_P (t))
5443 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5444
5445 if (TREE_CODE (t) != FIELD_DECL
5446 && TREE_CODE (t) != TYPE_DECL)
5447 fld_worklist_push (TREE_CHAIN (t), fld);
5448 *ws = 0;
5449 }
5450 else if (TYPE_P (t))
5451 {
5452 /* Note that walk_tree does not traverse every possible field in
5453 types, so we have to do our own traversals here. */
5454 add_tree_to_fld_list (t, fld);
5455
5456 if (!RECORD_OR_UNION_TYPE_P (t))
5457 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5458 fld_worklist_push (TYPE_SIZE (t), fld);
5459 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5460 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5461 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5462 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5463 fld_worklist_push (TYPE_NAME (t), fld);
5464 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5465 them and thus do not and want not to reach unused pointer types
5466 this way. */
5467 if (!POINTER_TYPE_P (t))
5468 fld_worklist_push (TYPE_MINVAL (t), fld);
5469 if (!RECORD_OR_UNION_TYPE_P (t))
5470 fld_worklist_push (TYPE_MAXVAL (t), fld);
5471 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5472 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5473 do not and want not to reach unused variants this way. */
5474 if (TYPE_CONTEXT (t))
5475 {
5476 tree ctx = TYPE_CONTEXT (t);
5477 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5478 So push that instead. */
5479 while (ctx && TREE_CODE (ctx) == BLOCK)
5480 ctx = BLOCK_SUPERCONTEXT (ctx);
5481 fld_worklist_push (ctx, fld);
5482 }
5483 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5484 and want not to reach unused types this way. */
5485
5486 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5487 {
5488 unsigned i;
5489 tree tem;
5490 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5491 fld_worklist_push (TREE_TYPE (tem), fld);
5492 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5493 if (tem
5494 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5495 && TREE_CODE (tem) == TREE_LIST)
5496 do
5497 {
5498 fld_worklist_push (TREE_VALUE (tem), fld);
5499 tem = TREE_CHAIN (tem);
5500 }
5501 while (tem);
5502 }
5503 if (RECORD_OR_UNION_TYPE_P (t))
5504 {
5505 tree tem;
5506 /* Push all TYPE_FIELDS - there can be interleaving interesting
5507 and non-interesting things. */
5508 tem = TYPE_FIELDS (t);
5509 while (tem)
5510 {
5511 if (TREE_CODE (tem) == FIELD_DECL
5512 || TREE_CODE (tem) == TYPE_DECL)
5513 fld_worklist_push (tem, fld);
5514 tem = TREE_CHAIN (tem);
5515 }
5516 }
5517
5518 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5519 *ws = 0;
5520 }
5521 else if (TREE_CODE (t) == BLOCK)
5522 {
5523 tree tem;
5524 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5525 fld_worklist_push (tem, fld);
5526 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5527 fld_worklist_push (tem, fld);
5528 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5529 }
5530
5531 if (TREE_CODE (t) != IDENTIFIER_NODE
5532 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5533 fld_worklist_push (TREE_TYPE (t), fld);
5534
5535 return NULL_TREE;
5536 }
5537
5538
5539 /* Find decls and types in T. */
5540
5541 static void
5542 find_decls_types (tree t, struct free_lang_data_d *fld)
5543 {
5544 while (1)
5545 {
5546 if (!fld->pset->contains (t))
5547 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5548 if (fld->worklist.is_empty ())
5549 break;
5550 t = fld->worklist.pop ();
5551 }
5552 }
5553
5554 /* Translate all the types in LIST with the corresponding runtime
5555 types. */
5556
5557 static tree
5558 get_eh_types_for_runtime (tree list)
5559 {
5560 tree head, prev;
5561
5562 if (list == NULL_TREE)
5563 return NULL_TREE;
5564
5565 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5566 prev = head;
5567 list = TREE_CHAIN (list);
5568 while (list)
5569 {
5570 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5571 TREE_CHAIN (prev) = n;
5572 prev = TREE_CHAIN (prev);
5573 list = TREE_CHAIN (list);
5574 }
5575
5576 return head;
5577 }
5578
5579
5580 /* Find decls and types referenced in EH region R and store them in
5581 FLD->DECLS and FLD->TYPES. */
5582
5583 static void
5584 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5585 {
5586 switch (r->type)
5587 {
5588 case ERT_CLEANUP:
5589 break;
5590
5591 case ERT_TRY:
5592 {
5593 eh_catch c;
5594
5595 /* The types referenced in each catch must first be changed to the
5596 EH types used at runtime. This removes references to FE types
5597 in the region. */
5598 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5599 {
5600 c->type_list = get_eh_types_for_runtime (c->type_list);
5601 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5602 }
5603 }
5604 break;
5605
5606 case ERT_ALLOWED_EXCEPTIONS:
5607 r->u.allowed.type_list
5608 = get_eh_types_for_runtime (r->u.allowed.type_list);
5609 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5610 break;
5611
5612 case ERT_MUST_NOT_THROW:
5613 walk_tree (&r->u.must_not_throw.failure_decl,
5614 find_decls_types_r, fld, fld->pset);
5615 break;
5616 }
5617 }
5618
5619
5620 /* Find decls and types referenced in cgraph node N and store them in
5621 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5622 look for *every* kind of DECL and TYPE node reachable from N,
5623 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5624 NAMESPACE_DECLs, etc). */
5625
5626 static void
5627 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5628 {
5629 basic_block bb;
5630 struct function *fn;
5631 unsigned ix;
5632 tree t;
5633
5634 find_decls_types (n->decl, fld);
5635
5636 if (!gimple_has_body_p (n->decl))
5637 return;
5638
5639 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5640
5641 fn = DECL_STRUCT_FUNCTION (n->decl);
5642
5643 /* Traverse locals. */
5644 FOR_EACH_LOCAL_DECL (fn, ix, t)
5645 find_decls_types (t, fld);
5646
5647 /* Traverse EH regions in FN. */
5648 {
5649 eh_region r;
5650 FOR_ALL_EH_REGION_FN (r, fn)
5651 find_decls_types_in_eh_region (r, fld);
5652 }
5653
5654 /* Traverse every statement in FN. */
5655 FOR_EACH_BB_FN (bb, fn)
5656 {
5657 gphi_iterator psi;
5658 gimple_stmt_iterator si;
5659 unsigned i;
5660
5661 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5662 {
5663 gphi *phi = psi.phi ();
5664
5665 for (i = 0; i < gimple_phi_num_args (phi); i++)
5666 {
5667 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5668 find_decls_types (*arg_p, fld);
5669 }
5670 }
5671
5672 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5673 {
5674 gimple stmt = gsi_stmt (si);
5675
5676 if (is_gimple_call (stmt))
5677 find_decls_types (gimple_call_fntype (stmt), fld);
5678
5679 for (i = 0; i < gimple_num_ops (stmt); i++)
5680 {
5681 tree arg = gimple_op (stmt, i);
5682 find_decls_types (arg, fld);
5683 }
5684 }
5685 }
5686 }
5687
5688
5689 /* Find decls and types referenced in varpool node N and store them in
5690 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5691 look for *every* kind of DECL and TYPE node reachable from N,
5692 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5693 NAMESPACE_DECLs, etc). */
5694
5695 static void
5696 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5697 {
5698 find_decls_types (v->decl, fld);
5699 }
5700
5701 /* If T needs an assembler name, have one created for it. */
5702
5703 void
5704 assign_assembler_name_if_neeeded (tree t)
5705 {
5706 if (need_assembler_name_p (t))
5707 {
5708 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5709 diagnostics that use input_location to show locus
5710 information. The problem here is that, at this point,
5711 input_location is generally anchored to the end of the file
5712 (since the parser is long gone), so we don't have a good
5713 position to pin it to.
5714
5715 To alleviate this problem, this uses the location of T's
5716 declaration. Examples of this are
5717 testsuite/g++.dg/template/cond2.C and
5718 testsuite/g++.dg/template/pr35240.C. */
5719 location_t saved_location = input_location;
5720 input_location = DECL_SOURCE_LOCATION (t);
5721
5722 decl_assembler_name (t);
5723
5724 input_location = saved_location;
5725 }
5726 }
5727
5728
5729 /* Free language specific information for every operand and expression
5730 in every node of the call graph. This process operates in three stages:
5731
5732 1- Every callgraph node and varpool node is traversed looking for
5733 decls and types embedded in them. This is a more exhaustive
5734 search than that done by find_referenced_vars, because it will
5735 also collect individual fields, decls embedded in types, etc.
5736
5737 2- All the decls found are sent to free_lang_data_in_decl.
5738
5739 3- All the types found are sent to free_lang_data_in_type.
5740
5741 The ordering between decls and types is important because
5742 free_lang_data_in_decl sets assembler names, which includes
5743 mangling. So types cannot be freed up until assembler names have
5744 been set up. */
5745
5746 static void
5747 free_lang_data_in_cgraph (void)
5748 {
5749 struct cgraph_node *n;
5750 varpool_node *v;
5751 struct free_lang_data_d fld;
5752 tree t;
5753 unsigned i;
5754 alias_pair *p;
5755
5756 /* Initialize sets and arrays to store referenced decls and types. */
5757 fld.pset = new hash_set<tree>;
5758 fld.worklist.create (0);
5759 fld.decls.create (100);
5760 fld.types.create (100);
5761
5762 /* Find decls and types in the body of every function in the callgraph. */
5763 FOR_EACH_FUNCTION (n)
5764 find_decls_types_in_node (n, &fld);
5765
5766 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5767 find_decls_types (p->decl, &fld);
5768
5769 /* Find decls and types in every varpool symbol. */
5770 FOR_EACH_VARIABLE (v)
5771 find_decls_types_in_var (v, &fld);
5772
5773 /* Set the assembler name on every decl found. We need to do this
5774 now because free_lang_data_in_decl will invalidate data needed
5775 for mangling. This breaks mangling on interdependent decls. */
5776 FOR_EACH_VEC_ELT (fld.decls, i, t)
5777 assign_assembler_name_if_neeeded (t);
5778
5779 /* Traverse every decl found freeing its language data. */
5780 FOR_EACH_VEC_ELT (fld.decls, i, t)
5781 free_lang_data_in_decl (t);
5782
5783 /* Traverse every type found freeing its language data. */
5784 FOR_EACH_VEC_ELT (fld.types, i, t)
5785 free_lang_data_in_type (t);
5786
5787 delete fld.pset;
5788 fld.worklist.release ();
5789 fld.decls.release ();
5790 fld.types.release ();
5791 }
5792
5793
5794 /* Free resources that are used by FE but are not needed once they are done. */
5795
5796 static unsigned
5797 free_lang_data (void)
5798 {
5799 unsigned i;
5800
5801 /* If we are the LTO frontend we have freed lang-specific data already. */
5802 if (in_lto_p
5803 || (!flag_generate_lto && !flag_generate_offload))
5804 return 0;
5805
5806 /* Allocate and assign alias sets to the standard integer types
5807 while the slots are still in the way the frontends generated them. */
5808 for (i = 0; i < itk_none; ++i)
5809 if (integer_types[i])
5810 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5811
5812 /* Traverse the IL resetting language specific information for
5813 operands, expressions, etc. */
5814 free_lang_data_in_cgraph ();
5815
5816 /* Create gimple variants for common types. */
5817 ptrdiff_type_node = integer_type_node;
5818 fileptr_type_node = ptr_type_node;
5819
5820 /* Reset some langhooks. Do not reset types_compatible_p, it may
5821 still be used indirectly via the get_alias_set langhook. */
5822 lang_hooks.dwarf_name = lhd_dwarf_name;
5823 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5824 /* We do not want the default decl_assembler_name implementation,
5825 rather if we have fixed everything we want a wrapper around it
5826 asserting that all non-local symbols already got their assembler
5827 name and only produce assembler names for local symbols. Or rather
5828 make sure we never call decl_assembler_name on local symbols and
5829 devise a separate, middle-end private scheme for it. */
5830
5831 /* Reset diagnostic machinery. */
5832 tree_diagnostics_defaults (global_dc);
5833
5834 return 0;
5835 }
5836
5837
5838 namespace {
5839
5840 const pass_data pass_data_ipa_free_lang_data =
5841 {
5842 SIMPLE_IPA_PASS, /* type */
5843 "*free_lang_data", /* name */
5844 OPTGROUP_NONE, /* optinfo_flags */
5845 TV_IPA_FREE_LANG_DATA, /* tv_id */
5846 0, /* properties_required */
5847 0, /* properties_provided */
5848 0, /* properties_destroyed */
5849 0, /* todo_flags_start */
5850 0, /* todo_flags_finish */
5851 };
5852
5853 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5854 {
5855 public:
5856 pass_ipa_free_lang_data (gcc::context *ctxt)
5857 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5858 {}
5859
5860 /* opt_pass methods: */
5861 virtual unsigned int execute (function *) { return free_lang_data (); }
5862
5863 }; // class pass_ipa_free_lang_data
5864
5865 } // anon namespace
5866
5867 simple_ipa_opt_pass *
5868 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5869 {
5870 return new pass_ipa_free_lang_data (ctxt);
5871 }
5872
5873 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5874 ATTR_NAME. Also used internally by remove_attribute(). */
5875 bool
5876 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5877 {
5878 size_t ident_len = IDENTIFIER_LENGTH (ident);
5879
5880 if (ident_len == attr_len)
5881 {
5882 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5883 return true;
5884 }
5885 else if (ident_len == attr_len + 4)
5886 {
5887 /* There is the possibility that ATTR is 'text' and IDENT is
5888 '__text__'. */
5889 const char *p = IDENTIFIER_POINTER (ident);
5890 if (p[0] == '_' && p[1] == '_'
5891 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5892 && strncmp (attr_name, p + 2, attr_len) == 0)
5893 return true;
5894 }
5895
5896 return false;
5897 }
5898
5899 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5900 of ATTR_NAME, and LIST is not NULL_TREE. */
5901 tree
5902 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5903 {
5904 while (list)
5905 {
5906 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5907
5908 if (ident_len == attr_len)
5909 {
5910 if (!strcmp (attr_name,
5911 IDENTIFIER_POINTER (get_attribute_name (list))))
5912 break;
5913 }
5914 /* TODO: If we made sure that attributes were stored in the
5915 canonical form without '__...__' (ie, as in 'text' as opposed
5916 to '__text__') then we could avoid the following case. */
5917 else if (ident_len == attr_len + 4)
5918 {
5919 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5920 if (p[0] == '_' && p[1] == '_'
5921 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5922 && strncmp (attr_name, p + 2, attr_len) == 0)
5923 break;
5924 }
5925 list = TREE_CHAIN (list);
5926 }
5927
5928 return list;
5929 }
5930
5931 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5932 return a pointer to the attribute's list first element if the attribute
5933 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5934 '__text__'). */
5935
5936 tree
5937 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5938 tree list)
5939 {
5940 while (list)
5941 {
5942 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5943
5944 if (attr_len > ident_len)
5945 {
5946 list = TREE_CHAIN (list);
5947 continue;
5948 }
5949
5950 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5951
5952 if (strncmp (attr_name, p, attr_len) == 0)
5953 break;
5954
5955 /* TODO: If we made sure that attributes were stored in the
5956 canonical form without '__...__' (ie, as in 'text' as opposed
5957 to '__text__') then we could avoid the following case. */
5958 if (p[0] == '_' && p[1] == '_' &&
5959 strncmp (attr_name, p + 2, attr_len) == 0)
5960 break;
5961
5962 list = TREE_CHAIN (list);
5963 }
5964
5965 return list;
5966 }
5967
5968
5969 /* A variant of lookup_attribute() that can be used with an identifier
5970 as the first argument, and where the identifier can be either
5971 'text' or '__text__'.
5972
5973 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5974 return a pointer to the attribute's list element if the attribute
5975 is part of the list, or NULL_TREE if not found. If the attribute
5976 appears more than once, this only returns the first occurrence; the
5977 TREE_CHAIN of the return value should be passed back in if further
5978 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5979 can be in the form 'text' or '__text__'. */
5980 static tree
5981 lookup_ident_attribute (tree attr_identifier, tree list)
5982 {
5983 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5984
5985 while (list)
5986 {
5987 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5988 == IDENTIFIER_NODE);
5989
5990 /* Identifiers can be compared directly for equality. */
5991 if (attr_identifier == get_attribute_name (list))
5992 break;
5993
5994 /* If they are not equal, they may still be one in the form
5995 'text' while the other one is in the form '__text__'. TODO:
5996 If we were storing attributes in normalized 'text' form, then
5997 this could all go away and we could take full advantage of
5998 the fact that we're comparing identifiers. :-) */
5999 {
6000 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
6001 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
6002
6003 if (ident_len == attr_len + 4)
6004 {
6005 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6006 const char *q = IDENTIFIER_POINTER (attr_identifier);
6007 if (p[0] == '_' && p[1] == '_'
6008 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6009 && strncmp (q, p + 2, attr_len) == 0)
6010 break;
6011 }
6012 else if (ident_len + 4 == attr_len)
6013 {
6014 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6015 const char *q = IDENTIFIER_POINTER (attr_identifier);
6016 if (q[0] == '_' && q[1] == '_'
6017 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
6018 && strncmp (q + 2, p, ident_len) == 0)
6019 break;
6020 }
6021 }
6022 list = TREE_CHAIN (list);
6023 }
6024
6025 return list;
6026 }
6027
6028 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6029 modified list. */
6030
6031 tree
6032 remove_attribute (const char *attr_name, tree list)
6033 {
6034 tree *p;
6035 size_t attr_len = strlen (attr_name);
6036
6037 gcc_checking_assert (attr_name[0] != '_');
6038
6039 for (p = &list; *p; )
6040 {
6041 tree l = *p;
6042 /* TODO: If we were storing attributes in normalized form, here
6043 we could use a simple strcmp(). */
6044 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6045 *p = TREE_CHAIN (l);
6046 else
6047 p = &TREE_CHAIN (l);
6048 }
6049
6050 return list;
6051 }
6052
6053 /* Return an attribute list that is the union of a1 and a2. */
6054
6055 tree
6056 merge_attributes (tree a1, tree a2)
6057 {
6058 tree attributes;
6059
6060 /* Either one unset? Take the set one. */
6061
6062 if ((attributes = a1) == 0)
6063 attributes = a2;
6064
6065 /* One that completely contains the other? Take it. */
6066
6067 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6068 {
6069 if (attribute_list_contained (a2, a1))
6070 attributes = a2;
6071 else
6072 {
6073 /* Pick the longest list, and hang on the other list. */
6074
6075 if (list_length (a1) < list_length (a2))
6076 attributes = a2, a2 = a1;
6077
6078 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6079 {
6080 tree a;
6081 for (a = lookup_ident_attribute (get_attribute_name (a2),
6082 attributes);
6083 a != NULL_TREE && !attribute_value_equal (a, a2);
6084 a = lookup_ident_attribute (get_attribute_name (a2),
6085 TREE_CHAIN (a)))
6086 ;
6087 if (a == NULL_TREE)
6088 {
6089 a1 = copy_node (a2);
6090 TREE_CHAIN (a1) = attributes;
6091 attributes = a1;
6092 }
6093 }
6094 }
6095 }
6096 return attributes;
6097 }
6098
6099 /* Given types T1 and T2, merge their attributes and return
6100 the result. */
6101
6102 tree
6103 merge_type_attributes (tree t1, tree t2)
6104 {
6105 return merge_attributes (TYPE_ATTRIBUTES (t1),
6106 TYPE_ATTRIBUTES (t2));
6107 }
6108
6109 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6110 the result. */
6111
6112 tree
6113 merge_decl_attributes (tree olddecl, tree newdecl)
6114 {
6115 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6116 DECL_ATTRIBUTES (newdecl));
6117 }
6118
6119 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6120
6121 /* Specialization of merge_decl_attributes for various Windows targets.
6122
6123 This handles the following situation:
6124
6125 __declspec (dllimport) int foo;
6126 int foo;
6127
6128 The second instance of `foo' nullifies the dllimport. */
6129
6130 tree
6131 merge_dllimport_decl_attributes (tree old, tree new_tree)
6132 {
6133 tree a;
6134 int delete_dllimport_p = 1;
6135
6136 /* What we need to do here is remove from `old' dllimport if it doesn't
6137 appear in `new'. dllimport behaves like extern: if a declaration is
6138 marked dllimport and a definition appears later, then the object
6139 is not dllimport'd. We also remove a `new' dllimport if the old list
6140 contains dllexport: dllexport always overrides dllimport, regardless
6141 of the order of declaration. */
6142 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6143 delete_dllimport_p = 0;
6144 else if (DECL_DLLIMPORT_P (new_tree)
6145 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6146 {
6147 DECL_DLLIMPORT_P (new_tree) = 0;
6148 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6149 "dllimport ignored", new_tree);
6150 }
6151 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6152 {
6153 /* Warn about overriding a symbol that has already been used, e.g.:
6154 extern int __attribute__ ((dllimport)) foo;
6155 int* bar () {return &foo;}
6156 int foo;
6157 */
6158 if (TREE_USED (old))
6159 {
6160 warning (0, "%q+D redeclared without dllimport attribute "
6161 "after being referenced with dll linkage", new_tree);
6162 /* If we have used a variable's address with dllimport linkage,
6163 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6164 decl may already have had TREE_CONSTANT computed.
6165 We still remove the attribute so that assembler code refers
6166 to '&foo rather than '_imp__foo'. */
6167 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6168 DECL_DLLIMPORT_P (new_tree) = 1;
6169 }
6170
6171 /* Let an inline definition silently override the external reference,
6172 but otherwise warn about attribute inconsistency. */
6173 else if (TREE_CODE (new_tree) == VAR_DECL
6174 || !DECL_DECLARED_INLINE_P (new_tree))
6175 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6176 "previous dllimport ignored", new_tree);
6177 }
6178 else
6179 delete_dllimport_p = 0;
6180
6181 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6182
6183 if (delete_dllimport_p)
6184 a = remove_attribute ("dllimport", a);
6185
6186 return a;
6187 }
6188
6189 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6190 struct attribute_spec.handler. */
6191
6192 tree
6193 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6194 bool *no_add_attrs)
6195 {
6196 tree node = *pnode;
6197 bool is_dllimport;
6198
6199 /* These attributes may apply to structure and union types being created,
6200 but otherwise should pass to the declaration involved. */
6201 if (!DECL_P (node))
6202 {
6203 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6204 | (int) ATTR_FLAG_ARRAY_NEXT))
6205 {
6206 *no_add_attrs = true;
6207 return tree_cons (name, args, NULL_TREE);
6208 }
6209 if (TREE_CODE (node) == RECORD_TYPE
6210 || TREE_CODE (node) == UNION_TYPE)
6211 {
6212 node = TYPE_NAME (node);
6213 if (!node)
6214 return NULL_TREE;
6215 }
6216 else
6217 {
6218 warning (OPT_Wattributes, "%qE attribute ignored",
6219 name);
6220 *no_add_attrs = true;
6221 return NULL_TREE;
6222 }
6223 }
6224
6225 if (TREE_CODE (node) != FUNCTION_DECL
6226 && TREE_CODE (node) != VAR_DECL
6227 && TREE_CODE (node) != TYPE_DECL)
6228 {
6229 *no_add_attrs = true;
6230 warning (OPT_Wattributes, "%qE attribute ignored",
6231 name);
6232 return NULL_TREE;
6233 }
6234
6235 if (TREE_CODE (node) == TYPE_DECL
6236 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6237 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6238 {
6239 *no_add_attrs = true;
6240 warning (OPT_Wattributes, "%qE attribute ignored",
6241 name);
6242 return NULL_TREE;
6243 }
6244
6245 is_dllimport = is_attribute_p ("dllimport", name);
6246
6247 /* Report error on dllimport ambiguities seen now before they cause
6248 any damage. */
6249 if (is_dllimport)
6250 {
6251 /* Honor any target-specific overrides. */
6252 if (!targetm.valid_dllimport_attribute_p (node))
6253 *no_add_attrs = true;
6254
6255 else if (TREE_CODE (node) == FUNCTION_DECL
6256 && DECL_DECLARED_INLINE_P (node))
6257 {
6258 warning (OPT_Wattributes, "inline function %q+D declared as "
6259 " dllimport: attribute ignored", node);
6260 *no_add_attrs = true;
6261 }
6262 /* Like MS, treat definition of dllimported variables and
6263 non-inlined functions on declaration as syntax errors. */
6264 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6265 {
6266 error ("function %q+D definition is marked dllimport", node);
6267 *no_add_attrs = true;
6268 }
6269
6270 else if (TREE_CODE (node) == VAR_DECL)
6271 {
6272 if (DECL_INITIAL (node))
6273 {
6274 error ("variable %q+D definition is marked dllimport",
6275 node);
6276 *no_add_attrs = true;
6277 }
6278
6279 /* `extern' needn't be specified with dllimport.
6280 Specify `extern' now and hope for the best. Sigh. */
6281 DECL_EXTERNAL (node) = 1;
6282 /* Also, implicitly give dllimport'd variables declared within
6283 a function global scope, unless declared static. */
6284 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6285 TREE_PUBLIC (node) = 1;
6286 }
6287
6288 if (*no_add_attrs == false)
6289 DECL_DLLIMPORT_P (node) = 1;
6290 }
6291 else if (TREE_CODE (node) == FUNCTION_DECL
6292 && DECL_DECLARED_INLINE_P (node)
6293 && flag_keep_inline_dllexport)
6294 /* An exported function, even if inline, must be emitted. */
6295 DECL_EXTERNAL (node) = 0;
6296
6297 /* Report error if symbol is not accessible at global scope. */
6298 if (!TREE_PUBLIC (node)
6299 && (TREE_CODE (node) == VAR_DECL
6300 || TREE_CODE (node) == FUNCTION_DECL))
6301 {
6302 error ("external linkage required for symbol %q+D because of "
6303 "%qE attribute", node, name);
6304 *no_add_attrs = true;
6305 }
6306
6307 /* A dllexport'd entity must have default visibility so that other
6308 program units (shared libraries or the main executable) can see
6309 it. A dllimport'd entity must have default visibility so that
6310 the linker knows that undefined references within this program
6311 unit can be resolved by the dynamic linker. */
6312 if (!*no_add_attrs)
6313 {
6314 if (DECL_VISIBILITY_SPECIFIED (node)
6315 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6316 error ("%qE implies default visibility, but %qD has already "
6317 "been declared with a different visibility",
6318 name, node);
6319 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6320 DECL_VISIBILITY_SPECIFIED (node) = 1;
6321 }
6322
6323 return NULL_TREE;
6324 }
6325
6326 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6327 \f
6328 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6329 of the various TYPE_QUAL values. */
6330
6331 static void
6332 set_type_quals (tree type, int type_quals)
6333 {
6334 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6335 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6336 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6337 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6338 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6339 }
6340
6341 /* Returns true iff unqualified CAND and BASE are equivalent. */
6342
6343 bool
6344 check_base_type (const_tree cand, const_tree base)
6345 {
6346 return (TYPE_NAME (cand) == TYPE_NAME (base)
6347 /* Apparently this is needed for Objective-C. */
6348 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6349 /* Check alignment. */
6350 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6351 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6352 TYPE_ATTRIBUTES (base)));
6353 }
6354
6355 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6356
6357 bool
6358 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6359 {
6360 return (TYPE_QUALS (cand) == type_quals
6361 && check_base_type (cand, base));
6362 }
6363
6364 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6365
6366 static bool
6367 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6368 {
6369 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6370 && TYPE_NAME (cand) == TYPE_NAME (base)
6371 /* Apparently this is needed for Objective-C. */
6372 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6373 /* Check alignment. */
6374 && TYPE_ALIGN (cand) == align
6375 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6376 TYPE_ATTRIBUTES (base)));
6377 }
6378
6379 /* This function checks to see if TYPE matches the size one of the built-in
6380 atomic types, and returns that core atomic type. */
6381
6382 static tree
6383 find_atomic_core_type (tree type)
6384 {
6385 tree base_atomic_type;
6386
6387 /* Only handle complete types. */
6388 if (TYPE_SIZE (type) == NULL_TREE)
6389 return NULL_TREE;
6390
6391 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6392 switch (type_size)
6393 {
6394 case 8:
6395 base_atomic_type = atomicQI_type_node;
6396 break;
6397
6398 case 16:
6399 base_atomic_type = atomicHI_type_node;
6400 break;
6401
6402 case 32:
6403 base_atomic_type = atomicSI_type_node;
6404 break;
6405
6406 case 64:
6407 base_atomic_type = atomicDI_type_node;
6408 break;
6409
6410 case 128:
6411 base_atomic_type = atomicTI_type_node;
6412 break;
6413
6414 default:
6415 base_atomic_type = NULL_TREE;
6416 }
6417
6418 return base_atomic_type;
6419 }
6420
6421 /* Return a version of the TYPE, qualified as indicated by the
6422 TYPE_QUALS, if one exists. If no qualified version exists yet,
6423 return NULL_TREE. */
6424
6425 tree
6426 get_qualified_type (tree type, int type_quals)
6427 {
6428 tree t;
6429
6430 if (TYPE_QUALS (type) == type_quals)
6431 return type;
6432
6433 /* Search the chain of variants to see if there is already one there just
6434 like the one we need to have. If so, use that existing one. We must
6435 preserve the TYPE_NAME, since there is code that depends on this. */
6436 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6437 if (check_qualified_type (t, type, type_quals))
6438 return t;
6439
6440 return NULL_TREE;
6441 }
6442
6443 /* Like get_qualified_type, but creates the type if it does not
6444 exist. This function never returns NULL_TREE. */
6445
6446 tree
6447 build_qualified_type (tree type, int type_quals)
6448 {
6449 tree t;
6450
6451 /* See if we already have the appropriate qualified variant. */
6452 t = get_qualified_type (type, type_quals);
6453
6454 /* If not, build it. */
6455 if (!t)
6456 {
6457 t = build_variant_type_copy (type);
6458 set_type_quals (t, type_quals);
6459
6460 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6461 {
6462 /* See if this object can map to a basic atomic type. */
6463 tree atomic_type = find_atomic_core_type (type);
6464 if (atomic_type)
6465 {
6466 /* Ensure the alignment of this type is compatible with
6467 the required alignment of the atomic type. */
6468 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6469 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6470 }
6471 }
6472
6473 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6474 /* Propagate structural equality. */
6475 SET_TYPE_STRUCTURAL_EQUALITY (t);
6476 else if (TYPE_CANONICAL (type) != type)
6477 /* Build the underlying canonical type, since it is different
6478 from TYPE. */
6479 {
6480 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6481 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6482 }
6483 else
6484 /* T is its own canonical type. */
6485 TYPE_CANONICAL (t) = t;
6486
6487 }
6488
6489 return t;
6490 }
6491
6492 /* Create a variant of type T with alignment ALIGN. */
6493
6494 tree
6495 build_aligned_type (tree type, unsigned int align)
6496 {
6497 tree t;
6498
6499 if (TYPE_PACKED (type)
6500 || TYPE_ALIGN (type) == align)
6501 return type;
6502
6503 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6504 if (check_aligned_type (t, type, align))
6505 return t;
6506
6507 t = build_variant_type_copy (type);
6508 TYPE_ALIGN (t) = align;
6509
6510 return t;
6511 }
6512
6513 /* Create a new distinct copy of TYPE. The new type is made its own
6514 MAIN_VARIANT. If TYPE requires structural equality checks, the
6515 resulting type requires structural equality checks; otherwise, its
6516 TYPE_CANONICAL points to itself. */
6517
6518 tree
6519 build_distinct_type_copy (tree type)
6520 {
6521 tree t = copy_node (type);
6522
6523 TYPE_POINTER_TO (t) = 0;
6524 TYPE_REFERENCE_TO (t) = 0;
6525
6526 /* Set the canonical type either to a new equivalence class, or
6527 propagate the need for structural equality checks. */
6528 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6529 SET_TYPE_STRUCTURAL_EQUALITY (t);
6530 else
6531 TYPE_CANONICAL (t) = t;
6532
6533 /* Make it its own variant. */
6534 TYPE_MAIN_VARIANT (t) = t;
6535 TYPE_NEXT_VARIANT (t) = 0;
6536
6537 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6538 whose TREE_TYPE is not t. This can also happen in the Ada
6539 frontend when using subtypes. */
6540
6541 return t;
6542 }
6543
6544 /* Create a new variant of TYPE, equivalent but distinct. This is so
6545 the caller can modify it. TYPE_CANONICAL for the return type will
6546 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6547 are considered equal by the language itself (or that both types
6548 require structural equality checks). */
6549
6550 tree
6551 build_variant_type_copy (tree type)
6552 {
6553 tree t, m = TYPE_MAIN_VARIANT (type);
6554
6555 t = build_distinct_type_copy (type);
6556
6557 /* Since we're building a variant, assume that it is a non-semantic
6558 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6559 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6560
6561 /* Add the new type to the chain of variants of TYPE. */
6562 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6563 TYPE_NEXT_VARIANT (m) = t;
6564 TYPE_MAIN_VARIANT (t) = m;
6565
6566 return t;
6567 }
6568 \f
6569 /* Return true if the from tree in both tree maps are equal. */
6570
6571 int
6572 tree_map_base_eq (const void *va, const void *vb)
6573 {
6574 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6575 *const b = (const struct tree_map_base *) vb;
6576 return (a->from == b->from);
6577 }
6578
6579 /* Hash a from tree in a tree_base_map. */
6580
6581 unsigned int
6582 tree_map_base_hash (const void *item)
6583 {
6584 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6585 }
6586
6587 /* Return true if this tree map structure is marked for garbage collection
6588 purposes. We simply return true if the from tree is marked, so that this
6589 structure goes away when the from tree goes away. */
6590
6591 int
6592 tree_map_base_marked_p (const void *p)
6593 {
6594 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6595 }
6596
6597 /* Hash a from tree in a tree_map. */
6598
6599 unsigned int
6600 tree_map_hash (const void *item)
6601 {
6602 return (((const struct tree_map *) item)->hash);
6603 }
6604
6605 /* Hash a from tree in a tree_decl_map. */
6606
6607 unsigned int
6608 tree_decl_map_hash (const void *item)
6609 {
6610 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6611 }
6612
6613 /* Return the initialization priority for DECL. */
6614
6615 priority_type
6616 decl_init_priority_lookup (tree decl)
6617 {
6618 symtab_node *snode = symtab_node::get (decl);
6619
6620 if (!snode)
6621 return DEFAULT_INIT_PRIORITY;
6622 return
6623 snode->get_init_priority ();
6624 }
6625
6626 /* Return the finalization priority for DECL. */
6627
6628 priority_type
6629 decl_fini_priority_lookup (tree decl)
6630 {
6631 cgraph_node *node = cgraph_node::get (decl);
6632
6633 if (!node)
6634 return DEFAULT_INIT_PRIORITY;
6635 return
6636 node->get_fini_priority ();
6637 }
6638
6639 /* Set the initialization priority for DECL to PRIORITY. */
6640
6641 void
6642 decl_init_priority_insert (tree decl, priority_type priority)
6643 {
6644 struct symtab_node *snode;
6645
6646 if (priority == DEFAULT_INIT_PRIORITY)
6647 {
6648 snode = symtab_node::get (decl);
6649 if (!snode)
6650 return;
6651 }
6652 else if (TREE_CODE (decl) == VAR_DECL)
6653 snode = varpool_node::get_create (decl);
6654 else
6655 snode = cgraph_node::get_create (decl);
6656 snode->set_init_priority (priority);
6657 }
6658
6659 /* Set the finalization priority for DECL to PRIORITY. */
6660
6661 void
6662 decl_fini_priority_insert (tree decl, priority_type priority)
6663 {
6664 struct cgraph_node *node;
6665
6666 if (priority == DEFAULT_INIT_PRIORITY)
6667 {
6668 node = cgraph_node::get (decl);
6669 if (!node)
6670 return;
6671 }
6672 else
6673 node = cgraph_node::get_create (decl);
6674 node->set_fini_priority (priority);
6675 }
6676
6677 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6678
6679 static void
6680 print_debug_expr_statistics (void)
6681 {
6682 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6683 (long) debug_expr_for_decl->size (),
6684 (long) debug_expr_for_decl->elements (),
6685 debug_expr_for_decl->collisions ());
6686 }
6687
6688 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6689
6690 static void
6691 print_value_expr_statistics (void)
6692 {
6693 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6694 (long) value_expr_for_decl->size (),
6695 (long) value_expr_for_decl->elements (),
6696 value_expr_for_decl->collisions ());
6697 }
6698
6699 /* Lookup a debug expression for FROM, and return it if we find one. */
6700
6701 tree
6702 decl_debug_expr_lookup (tree from)
6703 {
6704 struct tree_decl_map *h, in;
6705 in.base.from = from;
6706
6707 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6708 if (h)
6709 return h->to;
6710 return NULL_TREE;
6711 }
6712
6713 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6714
6715 void
6716 decl_debug_expr_insert (tree from, tree to)
6717 {
6718 struct tree_decl_map *h;
6719
6720 h = ggc_alloc<tree_decl_map> ();
6721 h->base.from = from;
6722 h->to = to;
6723 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6724 }
6725
6726 /* Lookup a value expression for FROM, and return it if we find one. */
6727
6728 tree
6729 decl_value_expr_lookup (tree from)
6730 {
6731 struct tree_decl_map *h, in;
6732 in.base.from = from;
6733
6734 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6735 if (h)
6736 return h->to;
6737 return NULL_TREE;
6738 }
6739
6740 /* Insert a mapping FROM->TO in the value expression hashtable. */
6741
6742 void
6743 decl_value_expr_insert (tree from, tree to)
6744 {
6745 struct tree_decl_map *h;
6746
6747 h = ggc_alloc<tree_decl_map> ();
6748 h->base.from = from;
6749 h->to = to;
6750 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6751 }
6752
6753 /* Lookup a vector of debug arguments for FROM, and return it if we
6754 find one. */
6755
6756 vec<tree, va_gc> **
6757 decl_debug_args_lookup (tree from)
6758 {
6759 struct tree_vec_map *h, in;
6760
6761 if (!DECL_HAS_DEBUG_ARGS_P (from))
6762 return NULL;
6763 gcc_checking_assert (debug_args_for_decl != NULL);
6764 in.base.from = from;
6765 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6766 if (h)
6767 return &h->to;
6768 return NULL;
6769 }
6770
6771 /* Insert a mapping FROM->empty vector of debug arguments in the value
6772 expression hashtable. */
6773
6774 vec<tree, va_gc> **
6775 decl_debug_args_insert (tree from)
6776 {
6777 struct tree_vec_map *h;
6778 tree_vec_map **loc;
6779
6780 if (DECL_HAS_DEBUG_ARGS_P (from))
6781 return decl_debug_args_lookup (from);
6782 if (debug_args_for_decl == NULL)
6783 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6784 h = ggc_alloc<tree_vec_map> ();
6785 h->base.from = from;
6786 h->to = NULL;
6787 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6788 *loc = h;
6789 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6790 return &h->to;
6791 }
6792
6793 /* Hashing of types so that we don't make duplicates.
6794 The entry point is `type_hash_canon'. */
6795
6796 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6797 with types in the TREE_VALUE slots), by adding the hash codes
6798 of the individual types. */
6799
6800 static void
6801 type_hash_list (const_tree list, inchash::hash &hstate)
6802 {
6803 const_tree tail;
6804
6805 for (tail = list; tail; tail = TREE_CHAIN (tail))
6806 if (TREE_VALUE (tail) != error_mark_node)
6807 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6808 }
6809
6810 /* These are the Hashtable callback functions. */
6811
6812 /* Returns true iff the types are equivalent. */
6813
6814 bool
6815 type_cache_hasher::equal (type_hash *a, type_hash *b)
6816 {
6817 /* First test the things that are the same for all types. */
6818 if (a->hash != b->hash
6819 || TREE_CODE (a->type) != TREE_CODE (b->type)
6820 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6821 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6822 TYPE_ATTRIBUTES (b->type))
6823 || (TREE_CODE (a->type) != COMPLEX_TYPE
6824 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6825 return 0;
6826
6827 /* Be careful about comparing arrays before and after the element type
6828 has been completed; don't compare TYPE_ALIGN unless both types are
6829 complete. */
6830 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6831 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6832 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6833 return 0;
6834
6835 switch (TREE_CODE (a->type))
6836 {
6837 case VOID_TYPE:
6838 case COMPLEX_TYPE:
6839 case POINTER_TYPE:
6840 case REFERENCE_TYPE:
6841 case NULLPTR_TYPE:
6842 return 1;
6843
6844 case VECTOR_TYPE:
6845 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6846
6847 case ENUMERAL_TYPE:
6848 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6849 && !(TYPE_VALUES (a->type)
6850 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6851 && TYPE_VALUES (b->type)
6852 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6853 && type_list_equal (TYPE_VALUES (a->type),
6854 TYPE_VALUES (b->type))))
6855 return 0;
6856
6857 /* ... fall through ... */
6858
6859 case INTEGER_TYPE:
6860 case REAL_TYPE:
6861 case BOOLEAN_TYPE:
6862 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6863 return false;
6864 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6865 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6866 TYPE_MAX_VALUE (b->type)))
6867 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6868 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6869 TYPE_MIN_VALUE (b->type))));
6870
6871 case FIXED_POINT_TYPE:
6872 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6873
6874 case OFFSET_TYPE:
6875 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6876
6877 case METHOD_TYPE:
6878 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6879 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6880 || (TYPE_ARG_TYPES (a->type)
6881 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6882 && TYPE_ARG_TYPES (b->type)
6883 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6884 && type_list_equal (TYPE_ARG_TYPES (a->type),
6885 TYPE_ARG_TYPES (b->type)))))
6886 break;
6887 return 0;
6888 case ARRAY_TYPE:
6889 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6890
6891 case RECORD_TYPE:
6892 case UNION_TYPE:
6893 case QUAL_UNION_TYPE:
6894 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6895 || (TYPE_FIELDS (a->type)
6896 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6897 && TYPE_FIELDS (b->type)
6898 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6899 && type_list_equal (TYPE_FIELDS (a->type),
6900 TYPE_FIELDS (b->type))));
6901
6902 case FUNCTION_TYPE:
6903 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6904 || (TYPE_ARG_TYPES (a->type)
6905 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6906 && TYPE_ARG_TYPES (b->type)
6907 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6908 && type_list_equal (TYPE_ARG_TYPES (a->type),
6909 TYPE_ARG_TYPES (b->type))))
6910 break;
6911 return 0;
6912
6913 default:
6914 return 0;
6915 }
6916
6917 if (lang_hooks.types.type_hash_eq != NULL)
6918 return lang_hooks.types.type_hash_eq (a->type, b->type);
6919
6920 return 1;
6921 }
6922
6923 /* Given TYPE, and HASHCODE its hash code, return the canonical
6924 object for an identical type if one already exists.
6925 Otherwise, return TYPE, and record it as the canonical object.
6926
6927 To use this function, first create a type of the sort you want.
6928 Then compute its hash code from the fields of the type that
6929 make it different from other similar types.
6930 Then call this function and use the value. */
6931
6932 tree
6933 type_hash_canon (unsigned int hashcode, tree type)
6934 {
6935 type_hash in;
6936 type_hash **loc;
6937
6938 /* The hash table only contains main variants, so ensure that's what we're
6939 being passed. */
6940 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6941
6942 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6943 must call that routine before comparing TYPE_ALIGNs. */
6944 layout_type (type);
6945
6946 in.hash = hashcode;
6947 in.type = type;
6948
6949 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6950 if (*loc)
6951 {
6952 tree t1 = ((type_hash *) *loc)->type;
6953 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6954 if (GATHER_STATISTICS)
6955 {
6956 tree_code_counts[(int) TREE_CODE (type)]--;
6957 tree_node_counts[(int) t_kind]--;
6958 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6959 }
6960 return t1;
6961 }
6962 else
6963 {
6964 struct type_hash *h;
6965
6966 h = ggc_alloc<type_hash> ();
6967 h->hash = hashcode;
6968 h->type = type;
6969 *loc = h;
6970
6971 return type;
6972 }
6973 }
6974
6975 static void
6976 print_type_hash_statistics (void)
6977 {
6978 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6979 (long) type_hash_table->size (),
6980 (long) type_hash_table->elements (),
6981 type_hash_table->collisions ());
6982 }
6983
6984 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6985 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6986 by adding the hash codes of the individual attributes. */
6987
6988 static void
6989 attribute_hash_list (const_tree list, inchash::hash &hstate)
6990 {
6991 const_tree tail;
6992
6993 for (tail = list; tail; tail = TREE_CHAIN (tail))
6994 /* ??? Do we want to add in TREE_VALUE too? */
6995 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6996 }
6997
6998 /* Given two lists of attributes, return true if list l2 is
6999 equivalent to l1. */
7000
7001 int
7002 attribute_list_equal (const_tree l1, const_tree l2)
7003 {
7004 if (l1 == l2)
7005 return 1;
7006
7007 return attribute_list_contained (l1, l2)
7008 && attribute_list_contained (l2, l1);
7009 }
7010
7011 /* Given two lists of attributes, return true if list L2 is
7012 completely contained within L1. */
7013 /* ??? This would be faster if attribute names were stored in a canonicalized
7014 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7015 must be used to show these elements are equivalent (which they are). */
7016 /* ??? It's not clear that attributes with arguments will always be handled
7017 correctly. */
7018
7019 int
7020 attribute_list_contained (const_tree l1, const_tree l2)
7021 {
7022 const_tree t1, t2;
7023
7024 /* First check the obvious, maybe the lists are identical. */
7025 if (l1 == l2)
7026 return 1;
7027
7028 /* Maybe the lists are similar. */
7029 for (t1 = l1, t2 = l2;
7030 t1 != 0 && t2 != 0
7031 && get_attribute_name (t1) == get_attribute_name (t2)
7032 && TREE_VALUE (t1) == TREE_VALUE (t2);
7033 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7034 ;
7035
7036 /* Maybe the lists are equal. */
7037 if (t1 == 0 && t2 == 0)
7038 return 1;
7039
7040 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7041 {
7042 const_tree attr;
7043 /* This CONST_CAST is okay because lookup_attribute does not
7044 modify its argument and the return value is assigned to a
7045 const_tree. */
7046 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7047 CONST_CAST_TREE (l1));
7048 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7049 attr = lookup_ident_attribute (get_attribute_name (t2),
7050 TREE_CHAIN (attr)))
7051 ;
7052
7053 if (attr == NULL_TREE)
7054 return 0;
7055 }
7056
7057 return 1;
7058 }
7059
7060 /* Given two lists of types
7061 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7062 return 1 if the lists contain the same types in the same order.
7063 Also, the TREE_PURPOSEs must match. */
7064
7065 int
7066 type_list_equal (const_tree l1, const_tree l2)
7067 {
7068 const_tree t1, t2;
7069
7070 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7071 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7072 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7073 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7074 && (TREE_TYPE (TREE_PURPOSE (t1))
7075 == TREE_TYPE (TREE_PURPOSE (t2))))))
7076 return 0;
7077
7078 return t1 == t2;
7079 }
7080
7081 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7082 given by TYPE. If the argument list accepts variable arguments,
7083 then this function counts only the ordinary arguments. */
7084
7085 int
7086 type_num_arguments (const_tree type)
7087 {
7088 int i = 0;
7089 tree t;
7090
7091 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7092 /* If the function does not take a variable number of arguments,
7093 the last element in the list will have type `void'. */
7094 if (VOID_TYPE_P (TREE_VALUE (t)))
7095 break;
7096 else
7097 ++i;
7098
7099 return i;
7100 }
7101
7102 /* Nonzero if integer constants T1 and T2
7103 represent the same constant value. */
7104
7105 int
7106 tree_int_cst_equal (const_tree t1, const_tree t2)
7107 {
7108 if (t1 == t2)
7109 return 1;
7110
7111 if (t1 == 0 || t2 == 0)
7112 return 0;
7113
7114 if (TREE_CODE (t1) == INTEGER_CST
7115 && TREE_CODE (t2) == INTEGER_CST
7116 && wi::to_widest (t1) == wi::to_widest (t2))
7117 return 1;
7118
7119 return 0;
7120 }
7121
7122 /* Return true if T is an INTEGER_CST whose numerical value (extended
7123 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7124
7125 bool
7126 tree_fits_shwi_p (const_tree t)
7127 {
7128 return (t != NULL_TREE
7129 && TREE_CODE (t) == INTEGER_CST
7130 && wi::fits_shwi_p (wi::to_widest (t)));
7131 }
7132
7133 /* Return true if T is an INTEGER_CST whose numerical value (extended
7134 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7135
7136 bool
7137 tree_fits_uhwi_p (const_tree t)
7138 {
7139 return (t != NULL_TREE
7140 && TREE_CODE (t) == INTEGER_CST
7141 && wi::fits_uhwi_p (wi::to_widest (t)));
7142 }
7143
7144 /* T is an INTEGER_CST whose numerical value (extended according to
7145 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7146 HOST_WIDE_INT. */
7147
7148 HOST_WIDE_INT
7149 tree_to_shwi (const_tree t)
7150 {
7151 gcc_assert (tree_fits_shwi_p (t));
7152 return TREE_INT_CST_LOW (t);
7153 }
7154
7155 /* T is an INTEGER_CST whose numerical value (extended according to
7156 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7157 HOST_WIDE_INT. */
7158
7159 unsigned HOST_WIDE_INT
7160 tree_to_uhwi (const_tree t)
7161 {
7162 gcc_assert (tree_fits_uhwi_p (t));
7163 return TREE_INT_CST_LOW (t);
7164 }
7165
7166 /* Return the most significant (sign) bit of T. */
7167
7168 int
7169 tree_int_cst_sign_bit (const_tree t)
7170 {
7171 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7172
7173 return wi::extract_uhwi (t, bitno, 1);
7174 }
7175
7176 /* Return an indication of the sign of the integer constant T.
7177 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7178 Note that -1 will never be returned if T's type is unsigned. */
7179
7180 int
7181 tree_int_cst_sgn (const_tree t)
7182 {
7183 if (wi::eq_p (t, 0))
7184 return 0;
7185 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7186 return 1;
7187 else if (wi::neg_p (t))
7188 return -1;
7189 else
7190 return 1;
7191 }
7192
7193 /* Return the minimum number of bits needed to represent VALUE in a
7194 signed or unsigned type, UNSIGNEDP says which. */
7195
7196 unsigned int
7197 tree_int_cst_min_precision (tree value, signop sgn)
7198 {
7199 /* If the value is negative, compute its negative minus 1. The latter
7200 adjustment is because the absolute value of the largest negative value
7201 is one larger than the largest positive value. This is equivalent to
7202 a bit-wise negation, so use that operation instead. */
7203
7204 if (tree_int_cst_sgn (value) < 0)
7205 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7206
7207 /* Return the number of bits needed, taking into account the fact
7208 that we need one more bit for a signed than unsigned type.
7209 If value is 0 or -1, the minimum precision is 1 no matter
7210 whether unsignedp is true or false. */
7211
7212 if (integer_zerop (value))
7213 return 1;
7214 else
7215 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7216 }
7217
7218 /* Return truthvalue of whether T1 is the same tree structure as T2.
7219 Return 1 if they are the same.
7220 Return 0 if they are understandably different.
7221 Return -1 if either contains tree structure not understood by
7222 this function. */
7223
7224 int
7225 simple_cst_equal (const_tree t1, const_tree t2)
7226 {
7227 enum tree_code code1, code2;
7228 int cmp;
7229 int i;
7230
7231 if (t1 == t2)
7232 return 1;
7233 if (t1 == 0 || t2 == 0)
7234 return 0;
7235
7236 code1 = TREE_CODE (t1);
7237 code2 = TREE_CODE (t2);
7238
7239 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7240 {
7241 if (CONVERT_EXPR_CODE_P (code2)
7242 || code2 == NON_LVALUE_EXPR)
7243 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7244 else
7245 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7246 }
7247
7248 else if (CONVERT_EXPR_CODE_P (code2)
7249 || code2 == NON_LVALUE_EXPR)
7250 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7251
7252 if (code1 != code2)
7253 return 0;
7254
7255 switch (code1)
7256 {
7257 case INTEGER_CST:
7258 return wi::to_widest (t1) == wi::to_widest (t2);
7259
7260 case REAL_CST:
7261 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7262
7263 case FIXED_CST:
7264 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7265
7266 case STRING_CST:
7267 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7268 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7269 TREE_STRING_LENGTH (t1)));
7270
7271 case CONSTRUCTOR:
7272 {
7273 unsigned HOST_WIDE_INT idx;
7274 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7275 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7276
7277 if (vec_safe_length (v1) != vec_safe_length (v2))
7278 return false;
7279
7280 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7281 /* ??? Should we handle also fields here? */
7282 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7283 return false;
7284 return true;
7285 }
7286
7287 case SAVE_EXPR:
7288 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7289
7290 case CALL_EXPR:
7291 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7292 if (cmp <= 0)
7293 return cmp;
7294 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7295 return 0;
7296 {
7297 const_tree arg1, arg2;
7298 const_call_expr_arg_iterator iter1, iter2;
7299 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7300 arg2 = first_const_call_expr_arg (t2, &iter2);
7301 arg1 && arg2;
7302 arg1 = next_const_call_expr_arg (&iter1),
7303 arg2 = next_const_call_expr_arg (&iter2))
7304 {
7305 cmp = simple_cst_equal (arg1, arg2);
7306 if (cmp <= 0)
7307 return cmp;
7308 }
7309 return arg1 == arg2;
7310 }
7311
7312 case TARGET_EXPR:
7313 /* Special case: if either target is an unallocated VAR_DECL,
7314 it means that it's going to be unified with whatever the
7315 TARGET_EXPR is really supposed to initialize, so treat it
7316 as being equivalent to anything. */
7317 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7318 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7319 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7320 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7321 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7322 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7323 cmp = 1;
7324 else
7325 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7326
7327 if (cmp <= 0)
7328 return cmp;
7329
7330 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7331
7332 case WITH_CLEANUP_EXPR:
7333 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7334 if (cmp <= 0)
7335 return cmp;
7336
7337 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7338
7339 case COMPONENT_REF:
7340 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7341 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7342
7343 return 0;
7344
7345 case VAR_DECL:
7346 case PARM_DECL:
7347 case CONST_DECL:
7348 case FUNCTION_DECL:
7349 return 0;
7350
7351 default:
7352 break;
7353 }
7354
7355 /* This general rule works for most tree codes. All exceptions should be
7356 handled above. If this is a language-specific tree code, we can't
7357 trust what might be in the operand, so say we don't know
7358 the situation. */
7359 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7360 return -1;
7361
7362 switch (TREE_CODE_CLASS (code1))
7363 {
7364 case tcc_unary:
7365 case tcc_binary:
7366 case tcc_comparison:
7367 case tcc_expression:
7368 case tcc_reference:
7369 case tcc_statement:
7370 cmp = 1;
7371 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7372 {
7373 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7374 if (cmp <= 0)
7375 return cmp;
7376 }
7377
7378 return cmp;
7379
7380 default:
7381 return -1;
7382 }
7383 }
7384
7385 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7386 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7387 than U, respectively. */
7388
7389 int
7390 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7391 {
7392 if (tree_int_cst_sgn (t) < 0)
7393 return -1;
7394 else if (!tree_fits_uhwi_p (t))
7395 return 1;
7396 else if (TREE_INT_CST_LOW (t) == u)
7397 return 0;
7398 else if (TREE_INT_CST_LOW (t) < u)
7399 return -1;
7400 else
7401 return 1;
7402 }
7403
7404 /* Return true if SIZE represents a constant size that is in bounds of
7405 what the middle-end and the backend accepts (covering not more than
7406 half of the address-space). */
7407
7408 bool
7409 valid_constant_size_p (const_tree size)
7410 {
7411 if (! tree_fits_uhwi_p (size)
7412 || TREE_OVERFLOW (size)
7413 || tree_int_cst_sign_bit (size) != 0)
7414 return false;
7415 return true;
7416 }
7417
7418 /* Return the precision of the type, or for a complex or vector type the
7419 precision of the type of its elements. */
7420
7421 unsigned int
7422 element_precision (const_tree type)
7423 {
7424 enum tree_code code = TREE_CODE (type);
7425 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7426 type = TREE_TYPE (type);
7427
7428 return TYPE_PRECISION (type);
7429 }
7430
7431 /* Return true if CODE represents an associative tree code. Otherwise
7432 return false. */
7433 bool
7434 associative_tree_code (enum tree_code code)
7435 {
7436 switch (code)
7437 {
7438 case BIT_IOR_EXPR:
7439 case BIT_AND_EXPR:
7440 case BIT_XOR_EXPR:
7441 case PLUS_EXPR:
7442 case MULT_EXPR:
7443 case MIN_EXPR:
7444 case MAX_EXPR:
7445 return true;
7446
7447 default:
7448 break;
7449 }
7450 return false;
7451 }
7452
7453 /* Return true if CODE represents a commutative tree code. Otherwise
7454 return false. */
7455 bool
7456 commutative_tree_code (enum tree_code code)
7457 {
7458 switch (code)
7459 {
7460 case PLUS_EXPR:
7461 case MULT_EXPR:
7462 case MULT_HIGHPART_EXPR:
7463 case MIN_EXPR:
7464 case MAX_EXPR:
7465 case BIT_IOR_EXPR:
7466 case BIT_XOR_EXPR:
7467 case BIT_AND_EXPR:
7468 case NE_EXPR:
7469 case EQ_EXPR:
7470 case UNORDERED_EXPR:
7471 case ORDERED_EXPR:
7472 case UNEQ_EXPR:
7473 case LTGT_EXPR:
7474 case TRUTH_AND_EXPR:
7475 case TRUTH_XOR_EXPR:
7476 case TRUTH_OR_EXPR:
7477 case WIDEN_MULT_EXPR:
7478 case VEC_WIDEN_MULT_HI_EXPR:
7479 case VEC_WIDEN_MULT_LO_EXPR:
7480 case VEC_WIDEN_MULT_EVEN_EXPR:
7481 case VEC_WIDEN_MULT_ODD_EXPR:
7482 return true;
7483
7484 default:
7485 break;
7486 }
7487 return false;
7488 }
7489
7490 /* Return true if CODE represents a ternary tree code for which the
7491 first two operands are commutative. Otherwise return false. */
7492 bool
7493 commutative_ternary_tree_code (enum tree_code code)
7494 {
7495 switch (code)
7496 {
7497 case WIDEN_MULT_PLUS_EXPR:
7498 case WIDEN_MULT_MINUS_EXPR:
7499 case DOT_PROD_EXPR:
7500 case FMA_EXPR:
7501 return true;
7502
7503 default:
7504 break;
7505 }
7506 return false;
7507 }
7508
7509 namespace inchash
7510 {
7511
7512 /* Generate a hash value for an expression. This can be used iteratively
7513 by passing a previous result as the HSTATE argument.
7514
7515 This function is intended to produce the same hash for expressions which
7516 would compare equal using operand_equal_p. */
7517 void
7518 add_expr (const_tree t, inchash::hash &hstate)
7519 {
7520 int i;
7521 enum tree_code code;
7522 enum tree_code_class tclass;
7523
7524 if (t == NULL_TREE)
7525 {
7526 hstate.merge_hash (0);
7527 return;
7528 }
7529
7530 code = TREE_CODE (t);
7531
7532 switch (code)
7533 {
7534 /* Alas, constants aren't shared, so we can't rely on pointer
7535 identity. */
7536 case VOID_CST:
7537 hstate.merge_hash (0);
7538 return;
7539 case INTEGER_CST:
7540 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7541 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7542 return;
7543 case REAL_CST:
7544 {
7545 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7546 hstate.merge_hash (val2);
7547 return;
7548 }
7549 case FIXED_CST:
7550 {
7551 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7552 hstate.merge_hash (val2);
7553 return;
7554 }
7555 case STRING_CST:
7556 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7557 return;
7558 case COMPLEX_CST:
7559 inchash::add_expr (TREE_REALPART (t), hstate);
7560 inchash::add_expr (TREE_IMAGPART (t), hstate);
7561 return;
7562 case VECTOR_CST:
7563 {
7564 unsigned i;
7565 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7566 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7567 return;
7568 }
7569 case SSA_NAME:
7570 /* We can just compare by pointer. */
7571 hstate.add_wide_int (SSA_NAME_VERSION (t));
7572 return;
7573 case PLACEHOLDER_EXPR:
7574 /* The node itself doesn't matter. */
7575 return;
7576 case TREE_LIST:
7577 /* A list of expressions, for a CALL_EXPR or as the elements of a
7578 VECTOR_CST. */
7579 for (; t; t = TREE_CHAIN (t))
7580 inchash::add_expr (TREE_VALUE (t), hstate);
7581 return;
7582 case CONSTRUCTOR:
7583 {
7584 unsigned HOST_WIDE_INT idx;
7585 tree field, value;
7586 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7587 {
7588 inchash::add_expr (field, hstate);
7589 inchash::add_expr (value, hstate);
7590 }
7591 return;
7592 }
7593 case FUNCTION_DECL:
7594 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7595 Otherwise nodes that compare equal according to operand_equal_p might
7596 get different hash codes. However, don't do this for machine specific
7597 or front end builtins, since the function code is overloaded in those
7598 cases. */
7599 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7600 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7601 {
7602 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7603 code = TREE_CODE (t);
7604 }
7605 /* FALL THROUGH */
7606 default:
7607 tclass = TREE_CODE_CLASS (code);
7608
7609 if (tclass == tcc_declaration)
7610 {
7611 /* DECL's have a unique ID */
7612 hstate.add_wide_int (DECL_UID (t));
7613 }
7614 else
7615 {
7616 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7617
7618 hstate.add_object (code);
7619
7620 /* Don't hash the type, that can lead to having nodes which
7621 compare equal according to operand_equal_p, but which
7622 have different hash codes. */
7623 if (CONVERT_EXPR_CODE_P (code)
7624 || code == NON_LVALUE_EXPR)
7625 {
7626 /* Make sure to include signness in the hash computation. */
7627 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7628 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7629 }
7630
7631 else if (commutative_tree_code (code))
7632 {
7633 /* It's a commutative expression. We want to hash it the same
7634 however it appears. We do this by first hashing both operands
7635 and then rehashing based on the order of their independent
7636 hashes. */
7637 inchash::hash one, two;
7638 inchash::add_expr (TREE_OPERAND (t, 0), one);
7639 inchash::add_expr (TREE_OPERAND (t, 1), two);
7640 hstate.add_commutative (one, two);
7641 }
7642 else
7643 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7644 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7645 }
7646 return;
7647 }
7648 }
7649
7650 }
7651
7652 /* Constructors for pointer, array and function types.
7653 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7654 constructed by language-dependent code, not here.) */
7655
7656 /* Construct, lay out and return the type of pointers to TO_TYPE with
7657 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7658 reference all of memory. If such a type has already been
7659 constructed, reuse it. */
7660
7661 tree
7662 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7663 bool can_alias_all)
7664 {
7665 tree t;
7666
7667 if (to_type == error_mark_node)
7668 return error_mark_node;
7669
7670 /* If the pointed-to type has the may_alias attribute set, force
7671 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7672 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7673 can_alias_all = true;
7674
7675 /* In some cases, languages will have things that aren't a POINTER_TYPE
7676 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7677 In that case, return that type without regard to the rest of our
7678 operands.
7679
7680 ??? This is a kludge, but consistent with the way this function has
7681 always operated and there doesn't seem to be a good way to avoid this
7682 at the moment. */
7683 if (TYPE_POINTER_TO (to_type) != 0
7684 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7685 return TYPE_POINTER_TO (to_type);
7686
7687 /* First, if we already have a type for pointers to TO_TYPE and it's
7688 the proper mode, use it. */
7689 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7690 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7691 return t;
7692
7693 t = make_node (POINTER_TYPE);
7694
7695 TREE_TYPE (t) = to_type;
7696 SET_TYPE_MODE (t, mode);
7697 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7698 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7699 TYPE_POINTER_TO (to_type) = t;
7700
7701 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7702 SET_TYPE_STRUCTURAL_EQUALITY (t);
7703 else if (TYPE_CANONICAL (to_type) != to_type)
7704 TYPE_CANONICAL (t)
7705 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7706 mode, can_alias_all);
7707
7708 /* Lay out the type. This function has many callers that are concerned
7709 with expression-construction, and this simplifies them all. */
7710 layout_type (t);
7711
7712 return t;
7713 }
7714
7715 /* By default build pointers in ptr_mode. */
7716
7717 tree
7718 build_pointer_type (tree to_type)
7719 {
7720 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7721 : TYPE_ADDR_SPACE (to_type);
7722 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7723 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7724 }
7725
7726 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7727
7728 tree
7729 build_reference_type_for_mode (tree to_type, machine_mode mode,
7730 bool can_alias_all)
7731 {
7732 tree t;
7733
7734 if (to_type == error_mark_node)
7735 return error_mark_node;
7736
7737 /* If the pointed-to type has the may_alias attribute set, force
7738 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7739 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7740 can_alias_all = true;
7741
7742 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7743 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7744 In that case, return that type without regard to the rest of our
7745 operands.
7746
7747 ??? This is a kludge, but consistent with the way this function has
7748 always operated and there doesn't seem to be a good way to avoid this
7749 at the moment. */
7750 if (TYPE_REFERENCE_TO (to_type) != 0
7751 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7752 return TYPE_REFERENCE_TO (to_type);
7753
7754 /* First, if we already have a type for pointers to TO_TYPE and it's
7755 the proper mode, use it. */
7756 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7757 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7758 return t;
7759
7760 t = make_node (REFERENCE_TYPE);
7761
7762 TREE_TYPE (t) = to_type;
7763 SET_TYPE_MODE (t, mode);
7764 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7765 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7766 TYPE_REFERENCE_TO (to_type) = t;
7767
7768 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7769 SET_TYPE_STRUCTURAL_EQUALITY (t);
7770 else if (TYPE_CANONICAL (to_type) != to_type)
7771 TYPE_CANONICAL (t)
7772 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7773 mode, can_alias_all);
7774
7775 layout_type (t);
7776
7777 return t;
7778 }
7779
7780
7781 /* Build the node for the type of references-to-TO_TYPE by default
7782 in ptr_mode. */
7783
7784 tree
7785 build_reference_type (tree to_type)
7786 {
7787 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7788 : TYPE_ADDR_SPACE (to_type);
7789 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7790 return build_reference_type_for_mode (to_type, pointer_mode, false);
7791 }
7792
7793 #define MAX_INT_CACHED_PREC \
7794 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7795 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7796
7797 /* Builds a signed or unsigned integer type of precision PRECISION.
7798 Used for C bitfields whose precision does not match that of
7799 built-in target types. */
7800 tree
7801 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7802 int unsignedp)
7803 {
7804 tree itype, ret;
7805
7806 if (unsignedp)
7807 unsignedp = MAX_INT_CACHED_PREC + 1;
7808
7809 if (precision <= MAX_INT_CACHED_PREC)
7810 {
7811 itype = nonstandard_integer_type_cache[precision + unsignedp];
7812 if (itype)
7813 return itype;
7814 }
7815
7816 itype = make_node (INTEGER_TYPE);
7817 TYPE_PRECISION (itype) = precision;
7818
7819 if (unsignedp)
7820 fixup_unsigned_type (itype);
7821 else
7822 fixup_signed_type (itype);
7823
7824 ret = itype;
7825 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7826 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7827 if (precision <= MAX_INT_CACHED_PREC)
7828 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7829
7830 return ret;
7831 }
7832
7833 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7834 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7835 is true, reuse such a type that has already been constructed. */
7836
7837 static tree
7838 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7839 {
7840 tree itype = make_node (INTEGER_TYPE);
7841 inchash::hash hstate;
7842
7843 TREE_TYPE (itype) = type;
7844
7845 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7846 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7847
7848 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7849 SET_TYPE_MODE (itype, TYPE_MODE (type));
7850 TYPE_SIZE (itype) = TYPE_SIZE (type);
7851 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7852 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7853 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7854
7855 if (!shared)
7856 return itype;
7857
7858 if ((TYPE_MIN_VALUE (itype)
7859 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7860 || (TYPE_MAX_VALUE (itype)
7861 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7862 {
7863 /* Since we cannot reliably merge this type, we need to compare it using
7864 structural equality checks. */
7865 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7866 return itype;
7867 }
7868
7869 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7870 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7871 hstate.merge_hash (TYPE_HASH (type));
7872 itype = type_hash_canon (hstate.end (), itype);
7873
7874 return itype;
7875 }
7876
7877 /* Wrapper around build_range_type_1 with SHARED set to true. */
7878
7879 tree
7880 build_range_type (tree type, tree lowval, tree highval)
7881 {
7882 return build_range_type_1 (type, lowval, highval, true);
7883 }
7884
7885 /* Wrapper around build_range_type_1 with SHARED set to false. */
7886
7887 tree
7888 build_nonshared_range_type (tree type, tree lowval, tree highval)
7889 {
7890 return build_range_type_1 (type, lowval, highval, false);
7891 }
7892
7893 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7894 MAXVAL should be the maximum value in the domain
7895 (one less than the length of the array).
7896
7897 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7898 We don't enforce this limit, that is up to caller (e.g. language front end).
7899 The limit exists because the result is a signed type and we don't handle
7900 sizes that use more than one HOST_WIDE_INT. */
7901
7902 tree
7903 build_index_type (tree maxval)
7904 {
7905 return build_range_type (sizetype, size_zero_node, maxval);
7906 }
7907
7908 /* Return true if the debug information for TYPE, a subtype, should be emitted
7909 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7910 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7911 debug info and doesn't reflect the source code. */
7912
7913 bool
7914 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7915 {
7916 tree base_type = TREE_TYPE (type), low, high;
7917
7918 /* Subrange types have a base type which is an integral type. */
7919 if (!INTEGRAL_TYPE_P (base_type))
7920 return false;
7921
7922 /* Get the real bounds of the subtype. */
7923 if (lang_hooks.types.get_subrange_bounds)
7924 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7925 else
7926 {
7927 low = TYPE_MIN_VALUE (type);
7928 high = TYPE_MAX_VALUE (type);
7929 }
7930
7931 /* If the type and its base type have the same representation and the same
7932 name, then the type is not a subrange but a copy of the base type. */
7933 if ((TREE_CODE (base_type) == INTEGER_TYPE
7934 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7935 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7936 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7937 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7938 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7939 return false;
7940
7941 if (lowval)
7942 *lowval = low;
7943 if (highval)
7944 *highval = high;
7945 return true;
7946 }
7947
7948 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7949 and number of elements specified by the range of values of INDEX_TYPE.
7950 If SHARED is true, reuse such a type that has already been constructed. */
7951
7952 static tree
7953 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7954 {
7955 tree t;
7956
7957 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7958 {
7959 error ("arrays of functions are not meaningful");
7960 elt_type = integer_type_node;
7961 }
7962
7963 t = make_node (ARRAY_TYPE);
7964 TREE_TYPE (t) = elt_type;
7965 TYPE_DOMAIN (t) = index_type;
7966 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7967 layout_type (t);
7968
7969 /* If the element type is incomplete at this point we get marked for
7970 structural equality. Do not record these types in the canonical
7971 type hashtable. */
7972 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7973 return t;
7974
7975 if (shared)
7976 {
7977 inchash::hash hstate;
7978 hstate.add_object (TYPE_HASH (elt_type));
7979 if (index_type)
7980 hstate.add_object (TYPE_HASH (index_type));
7981 t = type_hash_canon (hstate.end (), t);
7982 }
7983
7984 if (TYPE_CANONICAL (t) == t)
7985 {
7986 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7987 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7988 SET_TYPE_STRUCTURAL_EQUALITY (t);
7989 else if (TYPE_CANONICAL (elt_type) != elt_type
7990 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7991 TYPE_CANONICAL (t)
7992 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7993 index_type
7994 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7995 shared);
7996 }
7997
7998 return t;
7999 }
8000
8001 /* Wrapper around build_array_type_1 with SHARED set to true. */
8002
8003 tree
8004 build_array_type (tree elt_type, tree index_type)
8005 {
8006 return build_array_type_1 (elt_type, index_type, true);
8007 }
8008
8009 /* Wrapper around build_array_type_1 with SHARED set to false. */
8010
8011 tree
8012 build_nonshared_array_type (tree elt_type, tree index_type)
8013 {
8014 return build_array_type_1 (elt_type, index_type, false);
8015 }
8016
8017 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8018 sizetype. */
8019
8020 tree
8021 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8022 {
8023 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8024 }
8025
8026 /* Recursively examines the array elements of TYPE, until a non-array
8027 element type is found. */
8028
8029 tree
8030 strip_array_types (tree type)
8031 {
8032 while (TREE_CODE (type) == ARRAY_TYPE)
8033 type = TREE_TYPE (type);
8034
8035 return type;
8036 }
8037
8038 /* Computes the canonical argument types from the argument type list
8039 ARGTYPES.
8040
8041 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8042 on entry to this function, or if any of the ARGTYPES are
8043 structural.
8044
8045 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8046 true on entry to this function, or if any of the ARGTYPES are
8047 non-canonical.
8048
8049 Returns a canonical argument list, which may be ARGTYPES when the
8050 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8051 true) or would not differ from ARGTYPES. */
8052
8053 static tree
8054 maybe_canonicalize_argtypes (tree argtypes,
8055 bool *any_structural_p,
8056 bool *any_noncanonical_p)
8057 {
8058 tree arg;
8059 bool any_noncanonical_argtypes_p = false;
8060
8061 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8062 {
8063 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8064 /* Fail gracefully by stating that the type is structural. */
8065 *any_structural_p = true;
8066 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8067 *any_structural_p = true;
8068 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8069 || TREE_PURPOSE (arg))
8070 /* If the argument has a default argument, we consider it
8071 non-canonical even though the type itself is canonical.
8072 That way, different variants of function and method types
8073 with default arguments will all point to the variant with
8074 no defaults as their canonical type. */
8075 any_noncanonical_argtypes_p = true;
8076 }
8077
8078 if (*any_structural_p)
8079 return argtypes;
8080
8081 if (any_noncanonical_argtypes_p)
8082 {
8083 /* Build the canonical list of argument types. */
8084 tree canon_argtypes = NULL_TREE;
8085 bool is_void = false;
8086
8087 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8088 {
8089 if (arg == void_list_node)
8090 is_void = true;
8091 else
8092 canon_argtypes = tree_cons (NULL_TREE,
8093 TYPE_CANONICAL (TREE_VALUE (arg)),
8094 canon_argtypes);
8095 }
8096
8097 canon_argtypes = nreverse (canon_argtypes);
8098 if (is_void)
8099 canon_argtypes = chainon (canon_argtypes, void_list_node);
8100
8101 /* There is a non-canonical type. */
8102 *any_noncanonical_p = true;
8103 return canon_argtypes;
8104 }
8105
8106 /* The canonical argument types are the same as ARGTYPES. */
8107 return argtypes;
8108 }
8109
8110 /* Construct, lay out and return
8111 the type of functions returning type VALUE_TYPE
8112 given arguments of types ARG_TYPES.
8113 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8114 are data type nodes for the arguments of the function.
8115 If such a type has already been constructed, reuse it. */
8116
8117 tree
8118 build_function_type (tree value_type, tree arg_types)
8119 {
8120 tree t;
8121 inchash::hash hstate;
8122 bool any_structural_p, any_noncanonical_p;
8123 tree canon_argtypes;
8124
8125 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8126 {
8127 error ("function return type cannot be function");
8128 value_type = integer_type_node;
8129 }
8130
8131 /* Make a node of the sort we want. */
8132 t = make_node (FUNCTION_TYPE);
8133 TREE_TYPE (t) = value_type;
8134 TYPE_ARG_TYPES (t) = arg_types;
8135
8136 /* If we already have such a type, use the old one. */
8137 hstate.add_object (TYPE_HASH (value_type));
8138 type_hash_list (arg_types, hstate);
8139 t = type_hash_canon (hstate.end (), t);
8140
8141 /* Set up the canonical type. */
8142 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8143 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8144 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8145 &any_structural_p,
8146 &any_noncanonical_p);
8147 if (any_structural_p)
8148 SET_TYPE_STRUCTURAL_EQUALITY (t);
8149 else if (any_noncanonical_p)
8150 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8151 canon_argtypes);
8152
8153 if (!COMPLETE_TYPE_P (t))
8154 layout_type (t);
8155 return t;
8156 }
8157
8158 /* Build a function type. The RETURN_TYPE is the type returned by the
8159 function. If VAARGS is set, no void_type_node is appended to the
8160 the list. ARGP must be always be terminated be a NULL_TREE. */
8161
8162 static tree
8163 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8164 {
8165 tree t, args, last;
8166
8167 t = va_arg (argp, tree);
8168 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8169 args = tree_cons (NULL_TREE, t, args);
8170
8171 if (vaargs)
8172 {
8173 last = args;
8174 if (args != NULL_TREE)
8175 args = nreverse (args);
8176 gcc_assert (last != void_list_node);
8177 }
8178 else if (args == NULL_TREE)
8179 args = void_list_node;
8180 else
8181 {
8182 last = args;
8183 args = nreverse (args);
8184 TREE_CHAIN (last) = void_list_node;
8185 }
8186 args = build_function_type (return_type, args);
8187
8188 return args;
8189 }
8190
8191 /* Build a function type. The RETURN_TYPE is the type returned by the
8192 function. If additional arguments are provided, they are
8193 additional argument types. The list of argument types must always
8194 be terminated by NULL_TREE. */
8195
8196 tree
8197 build_function_type_list (tree return_type, ...)
8198 {
8199 tree args;
8200 va_list p;
8201
8202 va_start (p, return_type);
8203 args = build_function_type_list_1 (false, return_type, p);
8204 va_end (p);
8205 return args;
8206 }
8207
8208 /* Build a variable argument function type. The RETURN_TYPE is the
8209 type returned by the function. If additional arguments are provided,
8210 they are additional argument types. The list of argument types must
8211 always be terminated by NULL_TREE. */
8212
8213 tree
8214 build_varargs_function_type_list (tree return_type, ...)
8215 {
8216 tree args;
8217 va_list p;
8218
8219 va_start (p, return_type);
8220 args = build_function_type_list_1 (true, return_type, p);
8221 va_end (p);
8222
8223 return args;
8224 }
8225
8226 /* Build a function type. RETURN_TYPE is the type returned by the
8227 function; VAARGS indicates whether the function takes varargs. The
8228 function takes N named arguments, the types of which are provided in
8229 ARG_TYPES. */
8230
8231 static tree
8232 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8233 tree *arg_types)
8234 {
8235 int i;
8236 tree t = vaargs ? NULL_TREE : void_list_node;
8237
8238 for (i = n - 1; i >= 0; i--)
8239 t = tree_cons (NULL_TREE, arg_types[i], t);
8240
8241 return build_function_type (return_type, t);
8242 }
8243
8244 /* Build a function type. RETURN_TYPE is the type returned by the
8245 function. The function takes N named arguments, the types of which
8246 are provided in ARG_TYPES. */
8247
8248 tree
8249 build_function_type_array (tree return_type, int n, tree *arg_types)
8250 {
8251 return build_function_type_array_1 (false, return_type, n, arg_types);
8252 }
8253
8254 /* Build a variable argument function type. RETURN_TYPE is the type
8255 returned by the function. The function takes N named arguments, the
8256 types of which are provided in ARG_TYPES. */
8257
8258 tree
8259 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8260 {
8261 return build_function_type_array_1 (true, return_type, n, arg_types);
8262 }
8263
8264 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8265 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8266 for the method. An implicit additional parameter (of type
8267 pointer-to-BASETYPE) is added to the ARGTYPES. */
8268
8269 tree
8270 build_method_type_directly (tree basetype,
8271 tree rettype,
8272 tree argtypes)
8273 {
8274 tree t;
8275 tree ptype;
8276 inchash::hash hstate;
8277 bool any_structural_p, any_noncanonical_p;
8278 tree canon_argtypes;
8279
8280 /* Make a node of the sort we want. */
8281 t = make_node (METHOD_TYPE);
8282
8283 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8284 TREE_TYPE (t) = rettype;
8285 ptype = build_pointer_type (basetype);
8286
8287 /* The actual arglist for this function includes a "hidden" argument
8288 which is "this". Put it into the list of argument types. */
8289 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8290 TYPE_ARG_TYPES (t) = argtypes;
8291
8292 /* If we already have such a type, use the old one. */
8293 hstate.add_object (TYPE_HASH (basetype));
8294 hstate.add_object (TYPE_HASH (rettype));
8295 type_hash_list (argtypes, hstate);
8296 t = type_hash_canon (hstate.end (), t);
8297
8298 /* Set up the canonical type. */
8299 any_structural_p
8300 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8301 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8302 any_noncanonical_p
8303 = (TYPE_CANONICAL (basetype) != basetype
8304 || TYPE_CANONICAL (rettype) != rettype);
8305 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8306 &any_structural_p,
8307 &any_noncanonical_p);
8308 if (any_structural_p)
8309 SET_TYPE_STRUCTURAL_EQUALITY (t);
8310 else if (any_noncanonical_p)
8311 TYPE_CANONICAL (t)
8312 = build_method_type_directly (TYPE_CANONICAL (basetype),
8313 TYPE_CANONICAL (rettype),
8314 canon_argtypes);
8315 if (!COMPLETE_TYPE_P (t))
8316 layout_type (t);
8317
8318 return t;
8319 }
8320
8321 /* Construct, lay out and return the type of methods belonging to class
8322 BASETYPE and whose arguments and values are described by TYPE.
8323 If that type exists already, reuse it.
8324 TYPE must be a FUNCTION_TYPE node. */
8325
8326 tree
8327 build_method_type (tree basetype, tree type)
8328 {
8329 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8330
8331 return build_method_type_directly (basetype,
8332 TREE_TYPE (type),
8333 TYPE_ARG_TYPES (type));
8334 }
8335
8336 /* Construct, lay out and return the type of offsets to a value
8337 of type TYPE, within an object of type BASETYPE.
8338 If a suitable offset type exists already, reuse it. */
8339
8340 tree
8341 build_offset_type (tree basetype, tree type)
8342 {
8343 tree t;
8344 inchash::hash hstate;
8345
8346 /* Make a node of the sort we want. */
8347 t = make_node (OFFSET_TYPE);
8348
8349 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8350 TREE_TYPE (t) = type;
8351
8352 /* If we already have such a type, use the old one. */
8353 hstate.add_object (TYPE_HASH (basetype));
8354 hstate.add_object (TYPE_HASH (type));
8355 t = type_hash_canon (hstate.end (), t);
8356
8357 if (!COMPLETE_TYPE_P (t))
8358 layout_type (t);
8359
8360 if (TYPE_CANONICAL (t) == t)
8361 {
8362 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8363 || TYPE_STRUCTURAL_EQUALITY_P (type))
8364 SET_TYPE_STRUCTURAL_EQUALITY (t);
8365 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8366 || TYPE_CANONICAL (type) != type)
8367 TYPE_CANONICAL (t)
8368 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8369 TYPE_CANONICAL (type));
8370 }
8371
8372 return t;
8373 }
8374
8375 /* Create a complex type whose components are COMPONENT_TYPE. */
8376
8377 tree
8378 build_complex_type (tree component_type)
8379 {
8380 tree t;
8381 inchash::hash hstate;
8382
8383 gcc_assert (INTEGRAL_TYPE_P (component_type)
8384 || SCALAR_FLOAT_TYPE_P (component_type)
8385 || FIXED_POINT_TYPE_P (component_type));
8386
8387 /* Make a node of the sort we want. */
8388 t = make_node (COMPLEX_TYPE);
8389
8390 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8391
8392 /* If we already have such a type, use the old one. */
8393 hstate.add_object (TYPE_HASH (component_type));
8394 t = type_hash_canon (hstate.end (), t);
8395
8396 if (!COMPLETE_TYPE_P (t))
8397 layout_type (t);
8398
8399 if (TYPE_CANONICAL (t) == t)
8400 {
8401 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8402 SET_TYPE_STRUCTURAL_EQUALITY (t);
8403 else if (TYPE_CANONICAL (component_type) != component_type)
8404 TYPE_CANONICAL (t)
8405 = build_complex_type (TYPE_CANONICAL (component_type));
8406 }
8407
8408 /* We need to create a name, since complex is a fundamental type. */
8409 if (! TYPE_NAME (t))
8410 {
8411 const char *name;
8412 if (component_type == char_type_node)
8413 name = "complex char";
8414 else if (component_type == signed_char_type_node)
8415 name = "complex signed char";
8416 else if (component_type == unsigned_char_type_node)
8417 name = "complex unsigned char";
8418 else if (component_type == short_integer_type_node)
8419 name = "complex short int";
8420 else if (component_type == short_unsigned_type_node)
8421 name = "complex short unsigned int";
8422 else if (component_type == integer_type_node)
8423 name = "complex int";
8424 else if (component_type == unsigned_type_node)
8425 name = "complex unsigned int";
8426 else if (component_type == long_integer_type_node)
8427 name = "complex long int";
8428 else if (component_type == long_unsigned_type_node)
8429 name = "complex long unsigned int";
8430 else if (component_type == long_long_integer_type_node)
8431 name = "complex long long int";
8432 else if (component_type == long_long_unsigned_type_node)
8433 name = "complex long long unsigned int";
8434 else
8435 name = 0;
8436
8437 if (name != 0)
8438 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8439 get_identifier (name), t);
8440 }
8441
8442 return build_qualified_type (t, TYPE_QUALS (component_type));
8443 }
8444
8445 /* If TYPE is a real or complex floating-point type and the target
8446 does not directly support arithmetic on TYPE then return the wider
8447 type to be used for arithmetic on TYPE. Otherwise, return
8448 NULL_TREE. */
8449
8450 tree
8451 excess_precision_type (tree type)
8452 {
8453 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8454 {
8455 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8456 switch (TREE_CODE (type))
8457 {
8458 case REAL_TYPE:
8459 switch (flt_eval_method)
8460 {
8461 case 1:
8462 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8463 return double_type_node;
8464 break;
8465 case 2:
8466 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8467 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8468 return long_double_type_node;
8469 break;
8470 default:
8471 gcc_unreachable ();
8472 }
8473 break;
8474 case COMPLEX_TYPE:
8475 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8476 return NULL_TREE;
8477 switch (flt_eval_method)
8478 {
8479 case 1:
8480 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8481 return complex_double_type_node;
8482 break;
8483 case 2:
8484 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8485 || (TYPE_MODE (TREE_TYPE (type))
8486 == TYPE_MODE (double_type_node)))
8487 return complex_long_double_type_node;
8488 break;
8489 default:
8490 gcc_unreachable ();
8491 }
8492 break;
8493 default:
8494 break;
8495 }
8496 }
8497 return NULL_TREE;
8498 }
8499 \f
8500 /* Return OP, stripped of any conversions to wider types as much as is safe.
8501 Converting the value back to OP's type makes a value equivalent to OP.
8502
8503 If FOR_TYPE is nonzero, we return a value which, if converted to
8504 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8505
8506 OP must have integer, real or enumeral type. Pointers are not allowed!
8507
8508 There are some cases where the obvious value we could return
8509 would regenerate to OP if converted to OP's type,
8510 but would not extend like OP to wider types.
8511 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8512 For example, if OP is (unsigned short)(signed char)-1,
8513 we avoid returning (signed char)-1 if FOR_TYPE is int,
8514 even though extending that to an unsigned short would regenerate OP,
8515 since the result of extending (signed char)-1 to (int)
8516 is different from (int) OP. */
8517
8518 tree
8519 get_unwidened (tree op, tree for_type)
8520 {
8521 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8522 tree type = TREE_TYPE (op);
8523 unsigned final_prec
8524 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8525 int uns
8526 = (for_type != 0 && for_type != type
8527 && final_prec > TYPE_PRECISION (type)
8528 && TYPE_UNSIGNED (type));
8529 tree win = op;
8530
8531 while (CONVERT_EXPR_P (op))
8532 {
8533 int bitschange;
8534
8535 /* TYPE_PRECISION on vector types has different meaning
8536 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8537 so avoid them here. */
8538 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8539 break;
8540
8541 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8542 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8543
8544 /* Truncations are many-one so cannot be removed.
8545 Unless we are later going to truncate down even farther. */
8546 if (bitschange < 0
8547 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8548 break;
8549
8550 /* See what's inside this conversion. If we decide to strip it,
8551 we will set WIN. */
8552 op = TREE_OPERAND (op, 0);
8553
8554 /* If we have not stripped any zero-extensions (uns is 0),
8555 we can strip any kind of extension.
8556 If we have previously stripped a zero-extension,
8557 only zero-extensions can safely be stripped.
8558 Any extension can be stripped if the bits it would produce
8559 are all going to be discarded later by truncating to FOR_TYPE. */
8560
8561 if (bitschange > 0)
8562 {
8563 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8564 win = op;
8565 /* TYPE_UNSIGNED says whether this is a zero-extension.
8566 Let's avoid computing it if it does not affect WIN
8567 and if UNS will not be needed again. */
8568 if ((uns
8569 || CONVERT_EXPR_P (op))
8570 && TYPE_UNSIGNED (TREE_TYPE (op)))
8571 {
8572 uns = 1;
8573 win = op;
8574 }
8575 }
8576 }
8577
8578 /* If we finally reach a constant see if it fits in for_type and
8579 in that case convert it. */
8580 if (for_type
8581 && TREE_CODE (win) == INTEGER_CST
8582 && TREE_TYPE (win) != for_type
8583 && int_fits_type_p (win, for_type))
8584 win = fold_convert (for_type, win);
8585
8586 return win;
8587 }
8588 \f
8589 /* Return OP or a simpler expression for a narrower value
8590 which can be sign-extended or zero-extended to give back OP.
8591 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8592 or 0 if the value should be sign-extended. */
8593
8594 tree
8595 get_narrower (tree op, int *unsignedp_ptr)
8596 {
8597 int uns = 0;
8598 int first = 1;
8599 tree win = op;
8600 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8601
8602 while (TREE_CODE (op) == NOP_EXPR)
8603 {
8604 int bitschange
8605 = (TYPE_PRECISION (TREE_TYPE (op))
8606 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8607
8608 /* Truncations are many-one so cannot be removed. */
8609 if (bitschange < 0)
8610 break;
8611
8612 /* See what's inside this conversion. If we decide to strip it,
8613 we will set WIN. */
8614
8615 if (bitschange > 0)
8616 {
8617 op = TREE_OPERAND (op, 0);
8618 /* An extension: the outermost one can be stripped,
8619 but remember whether it is zero or sign extension. */
8620 if (first)
8621 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8622 /* Otherwise, if a sign extension has been stripped,
8623 only sign extensions can now be stripped;
8624 if a zero extension has been stripped, only zero-extensions. */
8625 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8626 break;
8627 first = 0;
8628 }
8629 else /* bitschange == 0 */
8630 {
8631 /* A change in nominal type can always be stripped, but we must
8632 preserve the unsignedness. */
8633 if (first)
8634 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8635 first = 0;
8636 op = TREE_OPERAND (op, 0);
8637 /* Keep trying to narrow, but don't assign op to win if it
8638 would turn an integral type into something else. */
8639 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8640 continue;
8641 }
8642
8643 win = op;
8644 }
8645
8646 if (TREE_CODE (op) == COMPONENT_REF
8647 /* Since type_for_size always gives an integer type. */
8648 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8649 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8650 /* Ensure field is laid out already. */
8651 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8652 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8653 {
8654 unsigned HOST_WIDE_INT innerprec
8655 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8656 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8657 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8658 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8659
8660 /* We can get this structure field in a narrower type that fits it,
8661 but the resulting extension to its nominal type (a fullword type)
8662 must satisfy the same conditions as for other extensions.
8663
8664 Do this only for fields that are aligned (not bit-fields),
8665 because when bit-field insns will be used there is no
8666 advantage in doing this. */
8667
8668 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8669 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8670 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8671 && type != 0)
8672 {
8673 if (first)
8674 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8675 win = fold_convert (type, op);
8676 }
8677 }
8678
8679 *unsignedp_ptr = uns;
8680 return win;
8681 }
8682 \f
8683 /* Returns true if integer constant C has a value that is permissible
8684 for type TYPE (an INTEGER_TYPE). */
8685
8686 bool
8687 int_fits_type_p (const_tree c, const_tree type)
8688 {
8689 tree type_low_bound, type_high_bound;
8690 bool ok_for_low_bound, ok_for_high_bound;
8691 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8692
8693 retry:
8694 type_low_bound = TYPE_MIN_VALUE (type);
8695 type_high_bound = TYPE_MAX_VALUE (type);
8696
8697 /* If at least one bound of the type is a constant integer, we can check
8698 ourselves and maybe make a decision. If no such decision is possible, but
8699 this type is a subtype, try checking against that. Otherwise, use
8700 fits_to_tree_p, which checks against the precision.
8701
8702 Compute the status for each possibly constant bound, and return if we see
8703 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8704 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8705 for "constant known to fit". */
8706
8707 /* Check if c >= type_low_bound. */
8708 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8709 {
8710 if (tree_int_cst_lt (c, type_low_bound))
8711 return false;
8712 ok_for_low_bound = true;
8713 }
8714 else
8715 ok_for_low_bound = false;
8716
8717 /* Check if c <= type_high_bound. */
8718 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8719 {
8720 if (tree_int_cst_lt (type_high_bound, c))
8721 return false;
8722 ok_for_high_bound = true;
8723 }
8724 else
8725 ok_for_high_bound = false;
8726
8727 /* If the constant fits both bounds, the result is known. */
8728 if (ok_for_low_bound && ok_for_high_bound)
8729 return true;
8730
8731 /* Perform some generic filtering which may allow making a decision
8732 even if the bounds are not constant. First, negative integers
8733 never fit in unsigned types, */
8734 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8735 return false;
8736
8737 /* Second, narrower types always fit in wider ones. */
8738 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8739 return true;
8740
8741 /* Third, unsigned integers with top bit set never fit signed types. */
8742 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8743 {
8744 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8745 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8746 {
8747 /* When a tree_cst is converted to a wide-int, the precision
8748 is taken from the type. However, if the precision of the
8749 mode underneath the type is smaller than that, it is
8750 possible that the value will not fit. The test below
8751 fails if any bit is set between the sign bit of the
8752 underlying mode and the top bit of the type. */
8753 if (wi::ne_p (wi::zext (c, prec - 1), c))
8754 return false;
8755 }
8756 else if (wi::neg_p (c))
8757 return false;
8758 }
8759
8760 /* If we haven't been able to decide at this point, there nothing more we
8761 can check ourselves here. Look at the base type if we have one and it
8762 has the same precision. */
8763 if (TREE_CODE (type) == INTEGER_TYPE
8764 && TREE_TYPE (type) != 0
8765 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8766 {
8767 type = TREE_TYPE (type);
8768 goto retry;
8769 }
8770
8771 /* Or to fits_to_tree_p, if nothing else. */
8772 return wi::fits_to_tree_p (c, type);
8773 }
8774
8775 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8776 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8777 represented (assuming two's-complement arithmetic) within the bit
8778 precision of the type are returned instead. */
8779
8780 void
8781 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8782 {
8783 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8784 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8785 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8786 else
8787 {
8788 if (TYPE_UNSIGNED (type))
8789 mpz_set_ui (min, 0);
8790 else
8791 {
8792 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8793 wi::to_mpz (mn, min, SIGNED);
8794 }
8795 }
8796
8797 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8798 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8799 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8800 else
8801 {
8802 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8803 wi::to_mpz (mn, max, TYPE_SIGN (type));
8804 }
8805 }
8806
8807 /* Return true if VAR is an automatic variable defined in function FN. */
8808
8809 bool
8810 auto_var_in_fn_p (const_tree var, const_tree fn)
8811 {
8812 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8813 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8814 || TREE_CODE (var) == PARM_DECL)
8815 && ! TREE_STATIC (var))
8816 || TREE_CODE (var) == LABEL_DECL
8817 || TREE_CODE (var) == RESULT_DECL));
8818 }
8819
8820 /* Subprogram of following function. Called by walk_tree.
8821
8822 Return *TP if it is an automatic variable or parameter of the
8823 function passed in as DATA. */
8824
8825 static tree
8826 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8827 {
8828 tree fn = (tree) data;
8829
8830 if (TYPE_P (*tp))
8831 *walk_subtrees = 0;
8832
8833 else if (DECL_P (*tp)
8834 && auto_var_in_fn_p (*tp, fn))
8835 return *tp;
8836
8837 return NULL_TREE;
8838 }
8839
8840 /* Returns true if T is, contains, or refers to a type with variable
8841 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8842 arguments, but not the return type. If FN is nonzero, only return
8843 true if a modifier of the type or position of FN is a variable or
8844 parameter inside FN.
8845
8846 This concept is more general than that of C99 'variably modified types':
8847 in C99, a struct type is never variably modified because a VLA may not
8848 appear as a structure member. However, in GNU C code like:
8849
8850 struct S { int i[f()]; };
8851
8852 is valid, and other languages may define similar constructs. */
8853
8854 bool
8855 variably_modified_type_p (tree type, tree fn)
8856 {
8857 tree t;
8858
8859 /* Test if T is either variable (if FN is zero) or an expression containing
8860 a variable in FN. If TYPE isn't gimplified, return true also if
8861 gimplify_one_sizepos would gimplify the expression into a local
8862 variable. */
8863 #define RETURN_TRUE_IF_VAR(T) \
8864 do { tree _t = (T); \
8865 if (_t != NULL_TREE \
8866 && _t != error_mark_node \
8867 && TREE_CODE (_t) != INTEGER_CST \
8868 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8869 && (!fn \
8870 || (!TYPE_SIZES_GIMPLIFIED (type) \
8871 && !is_gimple_sizepos (_t)) \
8872 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8873 return true; } while (0)
8874
8875 if (type == error_mark_node)
8876 return false;
8877
8878 /* If TYPE itself has variable size, it is variably modified. */
8879 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8880 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8881
8882 switch (TREE_CODE (type))
8883 {
8884 case POINTER_TYPE:
8885 case REFERENCE_TYPE:
8886 case VECTOR_TYPE:
8887 if (variably_modified_type_p (TREE_TYPE (type), fn))
8888 return true;
8889 break;
8890
8891 case FUNCTION_TYPE:
8892 case METHOD_TYPE:
8893 /* If TYPE is a function type, it is variably modified if the
8894 return type is variably modified. */
8895 if (variably_modified_type_p (TREE_TYPE (type), fn))
8896 return true;
8897 break;
8898
8899 case INTEGER_TYPE:
8900 case REAL_TYPE:
8901 case FIXED_POINT_TYPE:
8902 case ENUMERAL_TYPE:
8903 case BOOLEAN_TYPE:
8904 /* Scalar types are variably modified if their end points
8905 aren't constant. */
8906 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8907 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8908 break;
8909
8910 case RECORD_TYPE:
8911 case UNION_TYPE:
8912 case QUAL_UNION_TYPE:
8913 /* We can't see if any of the fields are variably-modified by the
8914 definition we normally use, since that would produce infinite
8915 recursion via pointers. */
8916 /* This is variably modified if some field's type is. */
8917 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8918 if (TREE_CODE (t) == FIELD_DECL)
8919 {
8920 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8921 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8922 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8923
8924 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8925 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8926 }
8927 break;
8928
8929 case ARRAY_TYPE:
8930 /* Do not call ourselves to avoid infinite recursion. This is
8931 variably modified if the element type is. */
8932 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8933 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8934 break;
8935
8936 default:
8937 break;
8938 }
8939
8940 /* The current language may have other cases to check, but in general,
8941 all other types are not variably modified. */
8942 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8943
8944 #undef RETURN_TRUE_IF_VAR
8945 }
8946
8947 /* Given a DECL or TYPE, return the scope in which it was declared, or
8948 NULL_TREE if there is no containing scope. */
8949
8950 tree
8951 get_containing_scope (const_tree t)
8952 {
8953 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8954 }
8955
8956 /* Return the innermost context enclosing DECL that is
8957 a FUNCTION_DECL, or zero if none. */
8958
8959 tree
8960 decl_function_context (const_tree decl)
8961 {
8962 tree context;
8963
8964 if (TREE_CODE (decl) == ERROR_MARK)
8965 return 0;
8966
8967 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8968 where we look up the function at runtime. Such functions always take
8969 a first argument of type 'pointer to real context'.
8970
8971 C++ should really be fixed to use DECL_CONTEXT for the real context,
8972 and use something else for the "virtual context". */
8973 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8974 context
8975 = TYPE_MAIN_VARIANT
8976 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8977 else
8978 context = DECL_CONTEXT (decl);
8979
8980 while (context && TREE_CODE (context) != FUNCTION_DECL)
8981 {
8982 if (TREE_CODE (context) == BLOCK)
8983 context = BLOCK_SUPERCONTEXT (context);
8984 else
8985 context = get_containing_scope (context);
8986 }
8987
8988 return context;
8989 }
8990
8991 /* Return the innermost context enclosing DECL that is
8992 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8993 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8994
8995 tree
8996 decl_type_context (const_tree decl)
8997 {
8998 tree context = DECL_CONTEXT (decl);
8999
9000 while (context)
9001 switch (TREE_CODE (context))
9002 {
9003 case NAMESPACE_DECL:
9004 case TRANSLATION_UNIT_DECL:
9005 return NULL_TREE;
9006
9007 case RECORD_TYPE:
9008 case UNION_TYPE:
9009 case QUAL_UNION_TYPE:
9010 return context;
9011
9012 case TYPE_DECL:
9013 case FUNCTION_DECL:
9014 context = DECL_CONTEXT (context);
9015 break;
9016
9017 case BLOCK:
9018 context = BLOCK_SUPERCONTEXT (context);
9019 break;
9020
9021 default:
9022 gcc_unreachable ();
9023 }
9024
9025 return NULL_TREE;
9026 }
9027
9028 /* CALL is a CALL_EXPR. Return the declaration for the function
9029 called, or NULL_TREE if the called function cannot be
9030 determined. */
9031
9032 tree
9033 get_callee_fndecl (const_tree call)
9034 {
9035 tree addr;
9036
9037 if (call == error_mark_node)
9038 return error_mark_node;
9039
9040 /* It's invalid to call this function with anything but a
9041 CALL_EXPR. */
9042 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9043
9044 /* The first operand to the CALL is the address of the function
9045 called. */
9046 addr = CALL_EXPR_FN (call);
9047
9048 /* If there is no function, return early. */
9049 if (addr == NULL_TREE)
9050 return NULL_TREE;
9051
9052 STRIP_NOPS (addr);
9053
9054 /* If this is a readonly function pointer, extract its initial value. */
9055 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9056 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9057 && DECL_INITIAL (addr))
9058 addr = DECL_INITIAL (addr);
9059
9060 /* If the address is just `&f' for some function `f', then we know
9061 that `f' is being called. */
9062 if (TREE_CODE (addr) == ADDR_EXPR
9063 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9064 return TREE_OPERAND (addr, 0);
9065
9066 /* We couldn't figure out what was being called. */
9067 return NULL_TREE;
9068 }
9069
9070 /* Print debugging information about tree nodes generated during the compile,
9071 and any language-specific information. */
9072
9073 void
9074 dump_tree_statistics (void)
9075 {
9076 if (GATHER_STATISTICS)
9077 {
9078 int i;
9079 int total_nodes, total_bytes;
9080 fprintf (stderr, "Kind Nodes Bytes\n");
9081 fprintf (stderr, "---------------------------------------\n");
9082 total_nodes = total_bytes = 0;
9083 for (i = 0; i < (int) all_kinds; i++)
9084 {
9085 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9086 tree_node_counts[i], tree_node_sizes[i]);
9087 total_nodes += tree_node_counts[i];
9088 total_bytes += tree_node_sizes[i];
9089 }
9090 fprintf (stderr, "---------------------------------------\n");
9091 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9092 fprintf (stderr, "---------------------------------------\n");
9093 fprintf (stderr, "Code Nodes\n");
9094 fprintf (stderr, "----------------------------\n");
9095 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9096 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9097 tree_code_counts[i]);
9098 fprintf (stderr, "----------------------------\n");
9099 ssanames_print_statistics ();
9100 phinodes_print_statistics ();
9101 }
9102 else
9103 fprintf (stderr, "(No per-node statistics)\n");
9104
9105 print_type_hash_statistics ();
9106 print_debug_expr_statistics ();
9107 print_value_expr_statistics ();
9108 lang_hooks.print_statistics ();
9109 }
9110 \f
9111 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9112
9113 /* Generate a crc32 of a byte. */
9114
9115 static unsigned
9116 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9117 {
9118 unsigned ix;
9119
9120 for (ix = bits; ix--; value <<= 1)
9121 {
9122 unsigned feedback;
9123
9124 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9125 chksum <<= 1;
9126 chksum ^= feedback;
9127 }
9128 return chksum;
9129 }
9130
9131 /* Generate a crc32 of a 32-bit unsigned. */
9132
9133 unsigned
9134 crc32_unsigned (unsigned chksum, unsigned value)
9135 {
9136 return crc32_unsigned_bits (chksum, value, 32);
9137 }
9138
9139 /* Generate a crc32 of a byte. */
9140
9141 unsigned
9142 crc32_byte (unsigned chksum, char byte)
9143 {
9144 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9145 }
9146
9147 /* Generate a crc32 of a string. */
9148
9149 unsigned
9150 crc32_string (unsigned chksum, const char *string)
9151 {
9152 do
9153 {
9154 chksum = crc32_byte (chksum, *string);
9155 }
9156 while (*string++);
9157 return chksum;
9158 }
9159
9160 /* P is a string that will be used in a symbol. Mask out any characters
9161 that are not valid in that context. */
9162
9163 void
9164 clean_symbol_name (char *p)
9165 {
9166 for (; *p; p++)
9167 if (! (ISALNUM (*p)
9168 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9169 || *p == '$'
9170 #endif
9171 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9172 || *p == '.'
9173 #endif
9174 ))
9175 *p = '_';
9176 }
9177
9178 /* Generate a name for a special-purpose function.
9179 The generated name may need to be unique across the whole link.
9180 Changes to this function may also require corresponding changes to
9181 xstrdup_mask_random.
9182 TYPE is some string to identify the purpose of this function to the
9183 linker or collect2; it must start with an uppercase letter,
9184 one of:
9185 I - for constructors
9186 D - for destructors
9187 N - for C++ anonymous namespaces
9188 F - for DWARF unwind frame information. */
9189
9190 tree
9191 get_file_function_name (const char *type)
9192 {
9193 char *buf;
9194 const char *p;
9195 char *q;
9196
9197 /* If we already have a name we know to be unique, just use that. */
9198 if (first_global_object_name)
9199 p = q = ASTRDUP (first_global_object_name);
9200 /* If the target is handling the constructors/destructors, they
9201 will be local to this file and the name is only necessary for
9202 debugging purposes.
9203 We also assign sub_I and sub_D sufixes to constructors called from
9204 the global static constructors. These are always local. */
9205 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9206 || (strncmp (type, "sub_", 4) == 0
9207 && (type[4] == 'I' || type[4] == 'D')))
9208 {
9209 const char *file = main_input_filename;
9210 if (! file)
9211 file = LOCATION_FILE (input_location);
9212 /* Just use the file's basename, because the full pathname
9213 might be quite long. */
9214 p = q = ASTRDUP (lbasename (file));
9215 }
9216 else
9217 {
9218 /* Otherwise, the name must be unique across the entire link.
9219 We don't have anything that we know to be unique to this translation
9220 unit, so use what we do have and throw in some randomness. */
9221 unsigned len;
9222 const char *name = weak_global_object_name;
9223 const char *file = main_input_filename;
9224
9225 if (! name)
9226 name = "";
9227 if (! file)
9228 file = LOCATION_FILE (input_location);
9229
9230 len = strlen (file);
9231 q = (char *) alloca (9 + 17 + len + 1);
9232 memcpy (q, file, len + 1);
9233
9234 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9235 crc32_string (0, name), get_random_seed (false));
9236
9237 p = q;
9238 }
9239
9240 clean_symbol_name (q);
9241 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9242 + strlen (type));
9243
9244 /* Set up the name of the file-level functions we may need.
9245 Use a global object (which is already required to be unique over
9246 the program) rather than the file name (which imposes extra
9247 constraints). */
9248 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9249
9250 return get_identifier (buf);
9251 }
9252 \f
9253 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9254
9255 /* Complain that the tree code of NODE does not match the expected 0
9256 terminated list of trailing codes. The trailing code list can be
9257 empty, for a more vague error message. FILE, LINE, and FUNCTION
9258 are of the caller. */
9259
9260 void
9261 tree_check_failed (const_tree node, const char *file,
9262 int line, const char *function, ...)
9263 {
9264 va_list args;
9265 const char *buffer;
9266 unsigned length = 0;
9267 enum tree_code code;
9268
9269 va_start (args, function);
9270 while ((code = (enum tree_code) va_arg (args, int)))
9271 length += 4 + strlen (get_tree_code_name (code));
9272 va_end (args);
9273 if (length)
9274 {
9275 char *tmp;
9276 va_start (args, function);
9277 length += strlen ("expected ");
9278 buffer = tmp = (char *) alloca (length);
9279 length = 0;
9280 while ((code = (enum tree_code) va_arg (args, int)))
9281 {
9282 const char *prefix = length ? " or " : "expected ";
9283
9284 strcpy (tmp + length, prefix);
9285 length += strlen (prefix);
9286 strcpy (tmp + length, get_tree_code_name (code));
9287 length += strlen (get_tree_code_name (code));
9288 }
9289 va_end (args);
9290 }
9291 else
9292 buffer = "unexpected node";
9293
9294 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9295 buffer, get_tree_code_name (TREE_CODE (node)),
9296 function, trim_filename (file), line);
9297 }
9298
9299 /* Complain that the tree code of NODE does match the expected 0
9300 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9301 the caller. */
9302
9303 void
9304 tree_not_check_failed (const_tree node, const char *file,
9305 int line, const char *function, ...)
9306 {
9307 va_list args;
9308 char *buffer;
9309 unsigned length = 0;
9310 enum tree_code code;
9311
9312 va_start (args, function);
9313 while ((code = (enum tree_code) va_arg (args, int)))
9314 length += 4 + strlen (get_tree_code_name (code));
9315 va_end (args);
9316 va_start (args, function);
9317 buffer = (char *) alloca (length);
9318 length = 0;
9319 while ((code = (enum tree_code) va_arg (args, int)))
9320 {
9321 if (length)
9322 {
9323 strcpy (buffer + length, " or ");
9324 length += 4;
9325 }
9326 strcpy (buffer + length, get_tree_code_name (code));
9327 length += strlen (get_tree_code_name (code));
9328 }
9329 va_end (args);
9330
9331 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9332 buffer, get_tree_code_name (TREE_CODE (node)),
9333 function, trim_filename (file), line);
9334 }
9335
9336 /* Similar to tree_check_failed, except that we check for a class of tree
9337 code, given in CL. */
9338
9339 void
9340 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9341 const char *file, int line, const char *function)
9342 {
9343 internal_error
9344 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9345 TREE_CODE_CLASS_STRING (cl),
9346 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9347 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9348 }
9349
9350 /* Similar to tree_check_failed, except that instead of specifying a
9351 dozen codes, use the knowledge that they're all sequential. */
9352
9353 void
9354 tree_range_check_failed (const_tree node, const char *file, int line,
9355 const char *function, enum tree_code c1,
9356 enum tree_code c2)
9357 {
9358 char *buffer;
9359 unsigned length = 0;
9360 unsigned int c;
9361
9362 for (c = c1; c <= c2; ++c)
9363 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9364
9365 length += strlen ("expected ");
9366 buffer = (char *) alloca (length);
9367 length = 0;
9368
9369 for (c = c1; c <= c2; ++c)
9370 {
9371 const char *prefix = length ? " or " : "expected ";
9372
9373 strcpy (buffer + length, prefix);
9374 length += strlen (prefix);
9375 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9376 length += strlen (get_tree_code_name ((enum tree_code) c));
9377 }
9378
9379 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9380 buffer, get_tree_code_name (TREE_CODE (node)),
9381 function, trim_filename (file), line);
9382 }
9383
9384
9385 /* Similar to tree_check_failed, except that we check that a tree does
9386 not have the specified code, given in CL. */
9387
9388 void
9389 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9390 const char *file, int line, const char *function)
9391 {
9392 internal_error
9393 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9394 TREE_CODE_CLASS_STRING (cl),
9395 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9396 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9397 }
9398
9399
9400 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9401
9402 void
9403 omp_clause_check_failed (const_tree node, const char *file, int line,
9404 const char *function, enum omp_clause_code code)
9405 {
9406 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9407 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9408 function, trim_filename (file), line);
9409 }
9410
9411
9412 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9413
9414 void
9415 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9416 const char *function, enum omp_clause_code c1,
9417 enum omp_clause_code c2)
9418 {
9419 char *buffer;
9420 unsigned length = 0;
9421 unsigned int c;
9422
9423 for (c = c1; c <= c2; ++c)
9424 length += 4 + strlen (omp_clause_code_name[c]);
9425
9426 length += strlen ("expected ");
9427 buffer = (char *) alloca (length);
9428 length = 0;
9429
9430 for (c = c1; c <= c2; ++c)
9431 {
9432 const char *prefix = length ? " or " : "expected ";
9433
9434 strcpy (buffer + length, prefix);
9435 length += strlen (prefix);
9436 strcpy (buffer + length, omp_clause_code_name[c]);
9437 length += strlen (omp_clause_code_name[c]);
9438 }
9439
9440 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9441 buffer, omp_clause_code_name[TREE_CODE (node)],
9442 function, trim_filename (file), line);
9443 }
9444
9445
9446 #undef DEFTREESTRUCT
9447 #define DEFTREESTRUCT(VAL, NAME) NAME,
9448
9449 static const char *ts_enum_names[] = {
9450 #include "treestruct.def"
9451 };
9452 #undef DEFTREESTRUCT
9453
9454 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9455
9456 /* Similar to tree_class_check_failed, except that we check for
9457 whether CODE contains the tree structure identified by EN. */
9458
9459 void
9460 tree_contains_struct_check_failed (const_tree node,
9461 const enum tree_node_structure_enum en,
9462 const char *file, int line,
9463 const char *function)
9464 {
9465 internal_error
9466 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9467 TS_ENUM_NAME (en),
9468 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9469 }
9470
9471
9472 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9473 (dynamically sized) vector. */
9474
9475 void
9476 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9477 const char *function)
9478 {
9479 internal_error
9480 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9481 idx + 1, len, function, trim_filename (file), line);
9482 }
9483
9484 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9485 (dynamically sized) vector. */
9486
9487 void
9488 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9489 const char *function)
9490 {
9491 internal_error
9492 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9493 idx + 1, len, function, trim_filename (file), line);
9494 }
9495
9496 /* Similar to above, except that the check is for the bounds of the operand
9497 vector of an expression node EXP. */
9498
9499 void
9500 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9501 int line, const char *function)
9502 {
9503 enum tree_code code = TREE_CODE (exp);
9504 internal_error
9505 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9506 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9507 function, trim_filename (file), line);
9508 }
9509
9510 /* Similar to above, except that the check is for the number of
9511 operands of an OMP_CLAUSE node. */
9512
9513 void
9514 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9515 int line, const char *function)
9516 {
9517 internal_error
9518 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9519 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9520 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9521 trim_filename (file), line);
9522 }
9523 #endif /* ENABLE_TREE_CHECKING */
9524 \f
9525 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9526 and mapped to the machine mode MODE. Initialize its fields and build
9527 the information necessary for debugging output. */
9528
9529 static tree
9530 make_vector_type (tree innertype, int nunits, machine_mode mode)
9531 {
9532 tree t;
9533 inchash::hash hstate;
9534
9535 t = make_node (VECTOR_TYPE);
9536 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9537 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9538 SET_TYPE_MODE (t, mode);
9539
9540 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9541 SET_TYPE_STRUCTURAL_EQUALITY (t);
9542 else if (TYPE_CANONICAL (innertype) != innertype
9543 || mode != VOIDmode)
9544 TYPE_CANONICAL (t)
9545 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9546
9547 layout_type (t);
9548
9549 hstate.add_wide_int (VECTOR_TYPE);
9550 hstate.add_wide_int (nunits);
9551 hstate.add_wide_int (mode);
9552 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9553 t = type_hash_canon (hstate.end (), t);
9554
9555 /* We have built a main variant, based on the main variant of the
9556 inner type. Use it to build the variant we return. */
9557 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9558 && TREE_TYPE (t) != innertype)
9559 return build_type_attribute_qual_variant (t,
9560 TYPE_ATTRIBUTES (innertype),
9561 TYPE_QUALS (innertype));
9562
9563 return t;
9564 }
9565
9566 static tree
9567 make_or_reuse_type (unsigned size, int unsignedp)
9568 {
9569 int i;
9570
9571 if (size == INT_TYPE_SIZE)
9572 return unsignedp ? unsigned_type_node : integer_type_node;
9573 if (size == CHAR_TYPE_SIZE)
9574 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9575 if (size == SHORT_TYPE_SIZE)
9576 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9577 if (size == LONG_TYPE_SIZE)
9578 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9579 if (size == LONG_LONG_TYPE_SIZE)
9580 return (unsignedp ? long_long_unsigned_type_node
9581 : long_long_integer_type_node);
9582
9583 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9584 if (size == int_n_data[i].bitsize
9585 && int_n_enabled_p[i])
9586 return (unsignedp ? int_n_trees[i].unsigned_type
9587 : int_n_trees[i].signed_type);
9588
9589 if (unsignedp)
9590 return make_unsigned_type (size);
9591 else
9592 return make_signed_type (size);
9593 }
9594
9595 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9596
9597 static tree
9598 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9599 {
9600 if (satp)
9601 {
9602 if (size == SHORT_FRACT_TYPE_SIZE)
9603 return unsignedp ? sat_unsigned_short_fract_type_node
9604 : sat_short_fract_type_node;
9605 if (size == FRACT_TYPE_SIZE)
9606 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9607 if (size == LONG_FRACT_TYPE_SIZE)
9608 return unsignedp ? sat_unsigned_long_fract_type_node
9609 : sat_long_fract_type_node;
9610 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9611 return unsignedp ? sat_unsigned_long_long_fract_type_node
9612 : sat_long_long_fract_type_node;
9613 }
9614 else
9615 {
9616 if (size == SHORT_FRACT_TYPE_SIZE)
9617 return unsignedp ? unsigned_short_fract_type_node
9618 : short_fract_type_node;
9619 if (size == FRACT_TYPE_SIZE)
9620 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9621 if (size == LONG_FRACT_TYPE_SIZE)
9622 return unsignedp ? unsigned_long_fract_type_node
9623 : long_fract_type_node;
9624 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9625 return unsignedp ? unsigned_long_long_fract_type_node
9626 : long_long_fract_type_node;
9627 }
9628
9629 return make_fract_type (size, unsignedp, satp);
9630 }
9631
9632 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9633
9634 static tree
9635 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9636 {
9637 if (satp)
9638 {
9639 if (size == SHORT_ACCUM_TYPE_SIZE)
9640 return unsignedp ? sat_unsigned_short_accum_type_node
9641 : sat_short_accum_type_node;
9642 if (size == ACCUM_TYPE_SIZE)
9643 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9644 if (size == LONG_ACCUM_TYPE_SIZE)
9645 return unsignedp ? sat_unsigned_long_accum_type_node
9646 : sat_long_accum_type_node;
9647 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9648 return unsignedp ? sat_unsigned_long_long_accum_type_node
9649 : sat_long_long_accum_type_node;
9650 }
9651 else
9652 {
9653 if (size == SHORT_ACCUM_TYPE_SIZE)
9654 return unsignedp ? unsigned_short_accum_type_node
9655 : short_accum_type_node;
9656 if (size == ACCUM_TYPE_SIZE)
9657 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9658 if (size == LONG_ACCUM_TYPE_SIZE)
9659 return unsignedp ? unsigned_long_accum_type_node
9660 : long_accum_type_node;
9661 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9662 return unsignedp ? unsigned_long_long_accum_type_node
9663 : long_long_accum_type_node;
9664 }
9665
9666 return make_accum_type (size, unsignedp, satp);
9667 }
9668
9669
9670 /* Create an atomic variant node for TYPE. This routine is called
9671 during initialization of data types to create the 5 basic atomic
9672 types. The generic build_variant_type function requires these to
9673 already be set up in order to function properly, so cannot be
9674 called from there. If ALIGN is non-zero, then ensure alignment is
9675 overridden to this value. */
9676
9677 static tree
9678 build_atomic_base (tree type, unsigned int align)
9679 {
9680 tree t;
9681
9682 /* Make sure its not already registered. */
9683 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9684 return t;
9685
9686 t = build_variant_type_copy (type);
9687 set_type_quals (t, TYPE_QUAL_ATOMIC);
9688
9689 if (align)
9690 TYPE_ALIGN (t) = align;
9691
9692 return t;
9693 }
9694
9695 /* Create nodes for all integer types (and error_mark_node) using the sizes
9696 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9697 SHORT_DOUBLE specifies whether double should be of the same precision
9698 as float. */
9699
9700 void
9701 build_common_tree_nodes (bool signed_char, bool short_double)
9702 {
9703 int i;
9704
9705 error_mark_node = make_node (ERROR_MARK);
9706 TREE_TYPE (error_mark_node) = error_mark_node;
9707
9708 initialize_sizetypes ();
9709
9710 /* Define both `signed char' and `unsigned char'. */
9711 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9712 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9713 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9714 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9715
9716 /* Define `char', which is like either `signed char' or `unsigned char'
9717 but not the same as either. */
9718 char_type_node
9719 = (signed_char
9720 ? make_signed_type (CHAR_TYPE_SIZE)
9721 : make_unsigned_type (CHAR_TYPE_SIZE));
9722 TYPE_STRING_FLAG (char_type_node) = 1;
9723
9724 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9725 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9726 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9727 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9728 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9729 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9730 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9731 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9732
9733 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9734 {
9735 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9736 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9737 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9738 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9739
9740 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9741 && int_n_enabled_p[i])
9742 {
9743 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9744 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9745 }
9746 }
9747
9748 /* Define a boolean type. This type only represents boolean values but
9749 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9750 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9751 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9752 TYPE_PRECISION (boolean_type_node) = 1;
9753 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9754
9755 /* Define what type to use for size_t. */
9756 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9757 size_type_node = unsigned_type_node;
9758 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9759 size_type_node = long_unsigned_type_node;
9760 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9761 size_type_node = long_long_unsigned_type_node;
9762 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9763 size_type_node = short_unsigned_type_node;
9764 else
9765 {
9766 int i;
9767
9768 size_type_node = NULL_TREE;
9769 for (i = 0; i < NUM_INT_N_ENTS; i++)
9770 if (int_n_enabled_p[i])
9771 {
9772 char name[50];
9773 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9774
9775 if (strcmp (name, SIZE_TYPE) == 0)
9776 {
9777 size_type_node = int_n_trees[i].unsigned_type;
9778 }
9779 }
9780 if (size_type_node == NULL_TREE)
9781 gcc_unreachable ();
9782 }
9783
9784 /* Fill in the rest of the sized types. Reuse existing type nodes
9785 when possible. */
9786 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9787 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9788 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9789 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9790 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9791
9792 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9793 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9794 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9795 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9796 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9797
9798 /* Don't call build_qualified type for atomics. That routine does
9799 special processing for atomics, and until they are initialized
9800 it's better not to make that call.
9801
9802 Check to see if there is a target override for atomic types. */
9803
9804 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9805 targetm.atomic_align_for_mode (QImode));
9806 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9807 targetm.atomic_align_for_mode (HImode));
9808 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9809 targetm.atomic_align_for_mode (SImode));
9810 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9811 targetm.atomic_align_for_mode (DImode));
9812 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9813 targetm.atomic_align_for_mode (TImode));
9814
9815 access_public_node = get_identifier ("public");
9816 access_protected_node = get_identifier ("protected");
9817 access_private_node = get_identifier ("private");
9818
9819 /* Define these next since types below may used them. */
9820 integer_zero_node = build_int_cst (integer_type_node, 0);
9821 integer_one_node = build_int_cst (integer_type_node, 1);
9822 integer_three_node = build_int_cst (integer_type_node, 3);
9823 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9824
9825 size_zero_node = size_int (0);
9826 size_one_node = size_int (1);
9827 bitsize_zero_node = bitsize_int (0);
9828 bitsize_one_node = bitsize_int (1);
9829 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9830
9831 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9832 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9833
9834 void_type_node = make_node (VOID_TYPE);
9835 layout_type (void_type_node);
9836
9837 pointer_bounds_type_node = targetm.chkp_bound_type ();
9838
9839 /* We are not going to have real types in C with less than byte alignment,
9840 so we might as well not have any types that claim to have it. */
9841 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9842 TYPE_USER_ALIGN (void_type_node) = 0;
9843
9844 void_node = make_node (VOID_CST);
9845 TREE_TYPE (void_node) = void_type_node;
9846
9847 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9848 layout_type (TREE_TYPE (null_pointer_node));
9849
9850 ptr_type_node = build_pointer_type (void_type_node);
9851 const_ptr_type_node
9852 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9853 fileptr_type_node = ptr_type_node;
9854
9855 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9856
9857 float_type_node = make_node (REAL_TYPE);
9858 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9859 layout_type (float_type_node);
9860
9861 double_type_node = make_node (REAL_TYPE);
9862 if (short_double)
9863 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9864 else
9865 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9866 layout_type (double_type_node);
9867
9868 long_double_type_node = make_node (REAL_TYPE);
9869 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9870 layout_type (long_double_type_node);
9871
9872 float_ptr_type_node = build_pointer_type (float_type_node);
9873 double_ptr_type_node = build_pointer_type (double_type_node);
9874 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9875 integer_ptr_type_node = build_pointer_type (integer_type_node);
9876
9877 /* Fixed size integer types. */
9878 uint16_type_node = make_or_reuse_type (16, 1);
9879 uint32_type_node = make_or_reuse_type (32, 1);
9880 uint64_type_node = make_or_reuse_type (64, 1);
9881
9882 /* Decimal float types. */
9883 dfloat32_type_node = make_node (REAL_TYPE);
9884 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9885 layout_type (dfloat32_type_node);
9886 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9887 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9888
9889 dfloat64_type_node = make_node (REAL_TYPE);
9890 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9891 layout_type (dfloat64_type_node);
9892 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9893 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9894
9895 dfloat128_type_node = make_node (REAL_TYPE);
9896 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9897 layout_type (dfloat128_type_node);
9898 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9899 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9900
9901 complex_integer_type_node = build_complex_type (integer_type_node);
9902 complex_float_type_node = build_complex_type (float_type_node);
9903 complex_double_type_node = build_complex_type (double_type_node);
9904 complex_long_double_type_node = build_complex_type (long_double_type_node);
9905
9906 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9907 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9908 sat_ ## KIND ## _type_node = \
9909 make_sat_signed_ ## KIND ## _type (SIZE); \
9910 sat_unsigned_ ## KIND ## _type_node = \
9911 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9912 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9913 unsigned_ ## KIND ## _type_node = \
9914 make_unsigned_ ## KIND ## _type (SIZE);
9915
9916 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9917 sat_ ## WIDTH ## KIND ## _type_node = \
9918 make_sat_signed_ ## KIND ## _type (SIZE); \
9919 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9920 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9921 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9922 unsigned_ ## WIDTH ## KIND ## _type_node = \
9923 make_unsigned_ ## KIND ## _type (SIZE);
9924
9925 /* Make fixed-point type nodes based on four different widths. */
9926 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9927 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9928 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9929 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9930 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9931
9932 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9933 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9934 NAME ## _type_node = \
9935 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9936 u ## NAME ## _type_node = \
9937 make_or_reuse_unsigned_ ## KIND ## _type \
9938 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9939 sat_ ## NAME ## _type_node = \
9940 make_or_reuse_sat_signed_ ## KIND ## _type \
9941 (GET_MODE_BITSIZE (MODE ## mode)); \
9942 sat_u ## NAME ## _type_node = \
9943 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9944 (GET_MODE_BITSIZE (U ## MODE ## mode));
9945
9946 /* Fixed-point type and mode nodes. */
9947 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9948 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9949 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9950 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9951 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9952 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9953 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9954 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9955 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9956 MAKE_FIXED_MODE_NODE (accum, da, DA)
9957 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9958
9959 {
9960 tree t = targetm.build_builtin_va_list ();
9961
9962 /* Many back-ends define record types without setting TYPE_NAME.
9963 If we copied the record type here, we'd keep the original
9964 record type without a name. This breaks name mangling. So,
9965 don't copy record types and let c_common_nodes_and_builtins()
9966 declare the type to be __builtin_va_list. */
9967 if (TREE_CODE (t) != RECORD_TYPE)
9968 t = build_variant_type_copy (t);
9969
9970 va_list_type_node = t;
9971 }
9972 }
9973
9974 /* Modify DECL for given flags.
9975 TM_PURE attribute is set only on types, so the function will modify
9976 DECL's type when ECF_TM_PURE is used. */
9977
9978 void
9979 set_call_expr_flags (tree decl, int flags)
9980 {
9981 if (flags & ECF_NOTHROW)
9982 TREE_NOTHROW (decl) = 1;
9983 if (flags & ECF_CONST)
9984 TREE_READONLY (decl) = 1;
9985 if (flags & ECF_PURE)
9986 DECL_PURE_P (decl) = 1;
9987 if (flags & ECF_LOOPING_CONST_OR_PURE)
9988 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9989 if (flags & ECF_NOVOPS)
9990 DECL_IS_NOVOPS (decl) = 1;
9991 if (flags & ECF_NORETURN)
9992 TREE_THIS_VOLATILE (decl) = 1;
9993 if (flags & ECF_MALLOC)
9994 DECL_IS_MALLOC (decl) = 1;
9995 if (flags & ECF_RETURNS_TWICE)
9996 DECL_IS_RETURNS_TWICE (decl) = 1;
9997 if (flags & ECF_LEAF)
9998 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9999 NULL, DECL_ATTRIBUTES (decl));
10000 if ((flags & ECF_TM_PURE) && flag_tm)
10001 apply_tm_attr (decl, get_identifier ("transaction_pure"));
10002 /* Looping const or pure is implied by noreturn.
10003 There is currently no way to declare looping const or looping pure alone. */
10004 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
10005 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10006 }
10007
10008
10009 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10010
10011 static void
10012 local_define_builtin (const char *name, tree type, enum built_in_function code,
10013 const char *library_name, int ecf_flags)
10014 {
10015 tree decl;
10016
10017 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10018 library_name, NULL_TREE);
10019 set_call_expr_flags (decl, ecf_flags);
10020
10021 set_builtin_decl (code, decl, true);
10022 }
10023
10024 /* Call this function after instantiating all builtins that the language
10025 front end cares about. This will build the rest of the builtins
10026 and internal functions that are relied upon by the tree optimizers and
10027 the middle-end. */
10028
10029 void
10030 build_common_builtin_nodes (void)
10031 {
10032 tree tmp, ftype;
10033 int ecf_flags;
10034
10035 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10036 {
10037 ftype = build_function_type (void_type_node, void_list_node);
10038 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10039 "__builtin_unreachable",
10040 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10041 | ECF_CONST);
10042 }
10043
10044 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10045 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10046 {
10047 ftype = build_function_type_list (ptr_type_node,
10048 ptr_type_node, const_ptr_type_node,
10049 size_type_node, NULL_TREE);
10050
10051 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10052 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10053 "memcpy", ECF_NOTHROW | ECF_LEAF);
10054 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10055 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10056 "memmove", ECF_NOTHROW | ECF_LEAF);
10057 }
10058
10059 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10060 {
10061 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10062 const_ptr_type_node, size_type_node,
10063 NULL_TREE);
10064 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10065 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10066 }
10067
10068 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10069 {
10070 ftype = build_function_type_list (ptr_type_node,
10071 ptr_type_node, integer_type_node,
10072 size_type_node, NULL_TREE);
10073 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10074 "memset", ECF_NOTHROW | ECF_LEAF);
10075 }
10076
10077 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10078 {
10079 ftype = build_function_type_list (ptr_type_node,
10080 size_type_node, NULL_TREE);
10081 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10082 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10083 }
10084
10085 ftype = build_function_type_list (ptr_type_node, size_type_node,
10086 size_type_node, NULL_TREE);
10087 local_define_builtin ("__builtin_alloca_with_align", ftype,
10088 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10089 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10090
10091 /* If we're checking the stack, `alloca' can throw. */
10092 if (flag_stack_check)
10093 {
10094 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10095 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10096 }
10097
10098 ftype = build_function_type_list (void_type_node,
10099 ptr_type_node, ptr_type_node,
10100 ptr_type_node, NULL_TREE);
10101 local_define_builtin ("__builtin_init_trampoline", ftype,
10102 BUILT_IN_INIT_TRAMPOLINE,
10103 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10104 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10105 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10106 "__builtin_init_heap_trampoline",
10107 ECF_NOTHROW | ECF_LEAF);
10108
10109 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10110 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10111 BUILT_IN_ADJUST_TRAMPOLINE,
10112 "__builtin_adjust_trampoline",
10113 ECF_CONST | ECF_NOTHROW);
10114
10115 ftype = build_function_type_list (void_type_node,
10116 ptr_type_node, ptr_type_node, NULL_TREE);
10117 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10118 BUILT_IN_NONLOCAL_GOTO,
10119 "__builtin_nonlocal_goto",
10120 ECF_NORETURN | ECF_NOTHROW);
10121
10122 ftype = build_function_type_list (void_type_node,
10123 ptr_type_node, ptr_type_node, NULL_TREE);
10124 local_define_builtin ("__builtin_setjmp_setup", ftype,
10125 BUILT_IN_SETJMP_SETUP,
10126 "__builtin_setjmp_setup", ECF_NOTHROW);
10127
10128 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10129 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10130 BUILT_IN_SETJMP_RECEIVER,
10131 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10132
10133 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10134 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10135 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10136
10137 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10138 local_define_builtin ("__builtin_stack_restore", ftype,
10139 BUILT_IN_STACK_RESTORE,
10140 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10141
10142 /* If there's a possibility that we might use the ARM EABI, build the
10143 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10144 if (targetm.arm_eabi_unwinder)
10145 {
10146 ftype = build_function_type_list (void_type_node, NULL_TREE);
10147 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10148 BUILT_IN_CXA_END_CLEANUP,
10149 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10150 }
10151
10152 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10153 local_define_builtin ("__builtin_unwind_resume", ftype,
10154 BUILT_IN_UNWIND_RESUME,
10155 ((targetm_common.except_unwind_info (&global_options)
10156 == UI_SJLJ)
10157 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10158 ECF_NORETURN);
10159
10160 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10161 {
10162 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10163 NULL_TREE);
10164 local_define_builtin ("__builtin_return_address", ftype,
10165 BUILT_IN_RETURN_ADDRESS,
10166 "__builtin_return_address",
10167 ECF_NOTHROW);
10168 }
10169
10170 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10171 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10172 {
10173 ftype = build_function_type_list (void_type_node, ptr_type_node,
10174 ptr_type_node, NULL_TREE);
10175 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10176 local_define_builtin ("__cyg_profile_func_enter", ftype,
10177 BUILT_IN_PROFILE_FUNC_ENTER,
10178 "__cyg_profile_func_enter", 0);
10179 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10180 local_define_builtin ("__cyg_profile_func_exit", ftype,
10181 BUILT_IN_PROFILE_FUNC_EXIT,
10182 "__cyg_profile_func_exit", 0);
10183 }
10184
10185 /* The exception object and filter values from the runtime. The argument
10186 must be zero before exception lowering, i.e. from the front end. After
10187 exception lowering, it will be the region number for the exception
10188 landing pad. These functions are PURE instead of CONST to prevent
10189 them from being hoisted past the exception edge that will initialize
10190 its value in the landing pad. */
10191 ftype = build_function_type_list (ptr_type_node,
10192 integer_type_node, NULL_TREE);
10193 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10194 /* Only use TM_PURE if we we have TM language support. */
10195 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10196 ecf_flags |= ECF_TM_PURE;
10197 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10198 "__builtin_eh_pointer", ecf_flags);
10199
10200 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10201 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10202 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10203 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10204
10205 ftype = build_function_type_list (void_type_node,
10206 integer_type_node, integer_type_node,
10207 NULL_TREE);
10208 local_define_builtin ("__builtin_eh_copy_values", ftype,
10209 BUILT_IN_EH_COPY_VALUES,
10210 "__builtin_eh_copy_values", ECF_NOTHROW);
10211
10212 /* Complex multiplication and division. These are handled as builtins
10213 rather than optabs because emit_library_call_value doesn't support
10214 complex. Further, we can do slightly better with folding these
10215 beasties if the real and complex parts of the arguments are separate. */
10216 {
10217 int mode;
10218
10219 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10220 {
10221 char mode_name_buf[4], *q;
10222 const char *p;
10223 enum built_in_function mcode, dcode;
10224 tree type, inner_type;
10225 const char *prefix = "__";
10226
10227 if (targetm.libfunc_gnu_prefix)
10228 prefix = "__gnu_";
10229
10230 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10231 if (type == NULL)
10232 continue;
10233 inner_type = TREE_TYPE (type);
10234
10235 ftype = build_function_type_list (type, inner_type, inner_type,
10236 inner_type, inner_type, NULL_TREE);
10237
10238 mcode = ((enum built_in_function)
10239 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10240 dcode = ((enum built_in_function)
10241 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10242
10243 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10244 *q = TOLOWER (*p);
10245 *q = '\0';
10246
10247 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10248 NULL);
10249 local_define_builtin (built_in_names[mcode], ftype, mcode,
10250 built_in_names[mcode],
10251 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10252
10253 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10254 NULL);
10255 local_define_builtin (built_in_names[dcode], ftype, dcode,
10256 built_in_names[dcode],
10257 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10258 }
10259 }
10260
10261 init_internal_fns ();
10262 }
10263
10264 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10265 better way.
10266
10267 If we requested a pointer to a vector, build up the pointers that
10268 we stripped off while looking for the inner type. Similarly for
10269 return values from functions.
10270
10271 The argument TYPE is the top of the chain, and BOTTOM is the
10272 new type which we will point to. */
10273
10274 tree
10275 reconstruct_complex_type (tree type, tree bottom)
10276 {
10277 tree inner, outer;
10278
10279 if (TREE_CODE (type) == POINTER_TYPE)
10280 {
10281 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10282 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10283 TYPE_REF_CAN_ALIAS_ALL (type));
10284 }
10285 else if (TREE_CODE (type) == REFERENCE_TYPE)
10286 {
10287 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10288 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10289 TYPE_REF_CAN_ALIAS_ALL (type));
10290 }
10291 else if (TREE_CODE (type) == ARRAY_TYPE)
10292 {
10293 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10294 outer = build_array_type (inner, TYPE_DOMAIN (type));
10295 }
10296 else if (TREE_CODE (type) == FUNCTION_TYPE)
10297 {
10298 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10299 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10300 }
10301 else if (TREE_CODE (type) == METHOD_TYPE)
10302 {
10303 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10304 /* The build_method_type_directly() routine prepends 'this' to argument list,
10305 so we must compensate by getting rid of it. */
10306 outer
10307 = build_method_type_directly
10308 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10309 inner,
10310 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10311 }
10312 else if (TREE_CODE (type) == OFFSET_TYPE)
10313 {
10314 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10315 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10316 }
10317 else
10318 return bottom;
10319
10320 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10321 TYPE_QUALS (type));
10322 }
10323
10324 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10325 the inner type. */
10326 tree
10327 build_vector_type_for_mode (tree innertype, machine_mode mode)
10328 {
10329 int nunits;
10330
10331 switch (GET_MODE_CLASS (mode))
10332 {
10333 case MODE_VECTOR_INT:
10334 case MODE_VECTOR_FLOAT:
10335 case MODE_VECTOR_FRACT:
10336 case MODE_VECTOR_UFRACT:
10337 case MODE_VECTOR_ACCUM:
10338 case MODE_VECTOR_UACCUM:
10339 nunits = GET_MODE_NUNITS (mode);
10340 break;
10341
10342 case MODE_INT:
10343 /* Check that there are no leftover bits. */
10344 gcc_assert (GET_MODE_BITSIZE (mode)
10345 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10346
10347 nunits = GET_MODE_BITSIZE (mode)
10348 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10349 break;
10350
10351 default:
10352 gcc_unreachable ();
10353 }
10354
10355 return make_vector_type (innertype, nunits, mode);
10356 }
10357
10358 /* Similarly, but takes the inner type and number of units, which must be
10359 a power of two. */
10360
10361 tree
10362 build_vector_type (tree innertype, int nunits)
10363 {
10364 return make_vector_type (innertype, nunits, VOIDmode);
10365 }
10366
10367 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10368
10369 tree
10370 build_opaque_vector_type (tree innertype, int nunits)
10371 {
10372 tree t = make_vector_type (innertype, nunits, VOIDmode);
10373 tree cand;
10374 /* We always build the non-opaque variant before the opaque one,
10375 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10376 cand = TYPE_NEXT_VARIANT (t);
10377 if (cand
10378 && TYPE_VECTOR_OPAQUE (cand)
10379 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10380 return cand;
10381 /* Othewise build a variant type and make sure to queue it after
10382 the non-opaque type. */
10383 cand = build_distinct_type_copy (t);
10384 TYPE_VECTOR_OPAQUE (cand) = true;
10385 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10386 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10387 TYPE_NEXT_VARIANT (t) = cand;
10388 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10389 return cand;
10390 }
10391
10392
10393 /* Given an initializer INIT, return TRUE if INIT is zero or some
10394 aggregate of zeros. Otherwise return FALSE. */
10395 bool
10396 initializer_zerop (const_tree init)
10397 {
10398 tree elt;
10399
10400 STRIP_NOPS (init);
10401
10402 switch (TREE_CODE (init))
10403 {
10404 case INTEGER_CST:
10405 return integer_zerop (init);
10406
10407 case REAL_CST:
10408 /* ??? Note that this is not correct for C4X float formats. There,
10409 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10410 negative exponent. */
10411 return real_zerop (init)
10412 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10413
10414 case FIXED_CST:
10415 return fixed_zerop (init);
10416
10417 case COMPLEX_CST:
10418 return integer_zerop (init)
10419 || (real_zerop (init)
10420 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10421 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10422
10423 case VECTOR_CST:
10424 {
10425 unsigned i;
10426 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10427 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10428 return false;
10429 return true;
10430 }
10431
10432 case CONSTRUCTOR:
10433 {
10434 unsigned HOST_WIDE_INT idx;
10435
10436 if (TREE_CLOBBER_P (init))
10437 return false;
10438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10439 if (!initializer_zerop (elt))
10440 return false;
10441 return true;
10442 }
10443
10444 case STRING_CST:
10445 {
10446 int i;
10447
10448 /* We need to loop through all elements to handle cases like
10449 "\0" and "\0foobar". */
10450 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10451 if (TREE_STRING_POINTER (init)[i] != '\0')
10452 return false;
10453
10454 return true;
10455 }
10456
10457 default:
10458 return false;
10459 }
10460 }
10461
10462 /* Check if vector VEC consists of all the equal elements and
10463 that the number of elements corresponds to the type of VEC.
10464 The function returns first element of the vector
10465 or NULL_TREE if the vector is not uniform. */
10466 tree
10467 uniform_vector_p (const_tree vec)
10468 {
10469 tree first, t;
10470 unsigned i;
10471
10472 if (vec == NULL_TREE)
10473 return NULL_TREE;
10474
10475 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10476
10477 if (TREE_CODE (vec) == VECTOR_CST)
10478 {
10479 first = VECTOR_CST_ELT (vec, 0);
10480 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10481 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10482 return NULL_TREE;
10483
10484 return first;
10485 }
10486
10487 else if (TREE_CODE (vec) == CONSTRUCTOR)
10488 {
10489 first = error_mark_node;
10490
10491 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10492 {
10493 if (i == 0)
10494 {
10495 first = t;
10496 continue;
10497 }
10498 if (!operand_equal_p (first, t, 0))
10499 return NULL_TREE;
10500 }
10501 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10502 return NULL_TREE;
10503
10504 return first;
10505 }
10506
10507 return NULL_TREE;
10508 }
10509
10510 /* Build an empty statement at location LOC. */
10511
10512 tree
10513 build_empty_stmt (location_t loc)
10514 {
10515 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10516 SET_EXPR_LOCATION (t, loc);
10517 return t;
10518 }
10519
10520
10521 /* Build an OpenMP clause with code CODE. LOC is the location of the
10522 clause. */
10523
10524 tree
10525 build_omp_clause (location_t loc, enum omp_clause_code code)
10526 {
10527 tree t;
10528 int size, length;
10529
10530 length = omp_clause_num_ops[code];
10531 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10532
10533 record_node_allocation_statistics (OMP_CLAUSE, size);
10534
10535 t = (tree) ggc_internal_alloc (size);
10536 memset (t, 0, size);
10537 TREE_SET_CODE (t, OMP_CLAUSE);
10538 OMP_CLAUSE_SET_CODE (t, code);
10539 OMP_CLAUSE_LOCATION (t) = loc;
10540
10541 return t;
10542 }
10543
10544 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10545 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10546 Except for the CODE and operand count field, other storage for the
10547 object is initialized to zeros. */
10548
10549 tree
10550 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10551 {
10552 tree t;
10553 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10554
10555 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10556 gcc_assert (len >= 1);
10557
10558 record_node_allocation_statistics (code, length);
10559
10560 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10561
10562 TREE_SET_CODE (t, code);
10563
10564 /* Can't use TREE_OPERAND to store the length because if checking is
10565 enabled, it will try to check the length before we store it. :-P */
10566 t->exp.operands[0] = build_int_cst (sizetype, len);
10567
10568 return t;
10569 }
10570
10571 /* Helper function for build_call_* functions; build a CALL_EXPR with
10572 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10573 the argument slots. */
10574
10575 static tree
10576 build_call_1 (tree return_type, tree fn, int nargs)
10577 {
10578 tree t;
10579
10580 t = build_vl_exp (CALL_EXPR, nargs + 3);
10581 TREE_TYPE (t) = return_type;
10582 CALL_EXPR_FN (t) = fn;
10583 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10584
10585 return t;
10586 }
10587
10588 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10589 FN and a null static chain slot. NARGS is the number of call arguments
10590 which are specified as "..." arguments. */
10591
10592 tree
10593 build_call_nary (tree return_type, tree fn, int nargs, ...)
10594 {
10595 tree ret;
10596 va_list args;
10597 va_start (args, nargs);
10598 ret = build_call_valist (return_type, fn, nargs, args);
10599 va_end (args);
10600 return ret;
10601 }
10602
10603 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10604 FN and a null static chain slot. NARGS is the number of call arguments
10605 which are specified as a va_list ARGS. */
10606
10607 tree
10608 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10609 {
10610 tree t;
10611 int i;
10612
10613 t = build_call_1 (return_type, fn, nargs);
10614 for (i = 0; i < nargs; i++)
10615 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10616 process_call_operands (t);
10617 return t;
10618 }
10619
10620 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10621 FN and a null static chain slot. NARGS is the number of call arguments
10622 which are specified as a tree array ARGS. */
10623
10624 tree
10625 build_call_array_loc (location_t loc, tree return_type, tree fn,
10626 int nargs, const tree *args)
10627 {
10628 tree t;
10629 int i;
10630
10631 t = build_call_1 (return_type, fn, nargs);
10632 for (i = 0; i < nargs; i++)
10633 CALL_EXPR_ARG (t, i) = args[i];
10634 process_call_operands (t);
10635 SET_EXPR_LOCATION (t, loc);
10636 return t;
10637 }
10638
10639 /* Like build_call_array, but takes a vec. */
10640
10641 tree
10642 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10643 {
10644 tree ret, t;
10645 unsigned int ix;
10646
10647 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10648 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10649 CALL_EXPR_ARG (ret, ix) = t;
10650 process_call_operands (ret);
10651 return ret;
10652 }
10653
10654 /* Conveniently construct a function call expression. FNDECL names the
10655 function to be called and N arguments are passed in the array
10656 ARGARRAY. */
10657
10658 tree
10659 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10660 {
10661 tree fntype = TREE_TYPE (fndecl);
10662 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10663
10664 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10665 }
10666
10667 /* Conveniently construct a function call expression. FNDECL names the
10668 function to be called and the arguments are passed in the vector
10669 VEC. */
10670
10671 tree
10672 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10673 {
10674 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10675 vec_safe_address (vec));
10676 }
10677
10678
10679 /* Conveniently construct a function call expression. FNDECL names the
10680 function to be called, N is the number of arguments, and the "..."
10681 parameters are the argument expressions. */
10682
10683 tree
10684 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10685 {
10686 va_list ap;
10687 tree *argarray = XALLOCAVEC (tree, n);
10688 int i;
10689
10690 va_start (ap, n);
10691 for (i = 0; i < n; i++)
10692 argarray[i] = va_arg (ap, tree);
10693 va_end (ap);
10694 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10695 }
10696
10697 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10698 varargs macros aren't supported by all bootstrap compilers. */
10699
10700 tree
10701 build_call_expr (tree fndecl, int n, ...)
10702 {
10703 va_list ap;
10704 tree *argarray = XALLOCAVEC (tree, n);
10705 int i;
10706
10707 va_start (ap, n);
10708 for (i = 0; i < n; i++)
10709 argarray[i] = va_arg (ap, tree);
10710 va_end (ap);
10711 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10712 }
10713
10714 /* Build internal call expression. This is just like CALL_EXPR, except
10715 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10716 internal function. */
10717
10718 tree
10719 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10720 tree type, int n, ...)
10721 {
10722 va_list ap;
10723 int i;
10724
10725 tree fn = build_call_1 (type, NULL_TREE, n);
10726 va_start (ap, n);
10727 for (i = 0; i < n; i++)
10728 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10729 va_end (ap);
10730 SET_EXPR_LOCATION (fn, loc);
10731 CALL_EXPR_IFN (fn) = ifn;
10732 return fn;
10733 }
10734
10735 /* Create a new constant string literal and return a char* pointer to it.
10736 The STRING_CST value is the LEN characters at STR. */
10737 tree
10738 build_string_literal (int len, const char *str)
10739 {
10740 tree t, elem, index, type;
10741
10742 t = build_string (len, str);
10743 elem = build_type_variant (char_type_node, 1, 0);
10744 index = build_index_type (size_int (len - 1));
10745 type = build_array_type (elem, index);
10746 TREE_TYPE (t) = type;
10747 TREE_CONSTANT (t) = 1;
10748 TREE_READONLY (t) = 1;
10749 TREE_STATIC (t) = 1;
10750
10751 type = build_pointer_type (elem);
10752 t = build1 (ADDR_EXPR, type,
10753 build4 (ARRAY_REF, elem,
10754 t, integer_zero_node, NULL_TREE, NULL_TREE));
10755 return t;
10756 }
10757
10758
10759
10760 /* Return true if T (assumed to be a DECL) must be assigned a memory
10761 location. */
10762
10763 bool
10764 needs_to_live_in_memory (const_tree t)
10765 {
10766 return (TREE_ADDRESSABLE (t)
10767 || is_global_var (t)
10768 || (TREE_CODE (t) == RESULT_DECL
10769 && !DECL_BY_REFERENCE (t)
10770 && aggregate_value_p (t, current_function_decl)));
10771 }
10772
10773 /* Return value of a constant X and sign-extend it. */
10774
10775 HOST_WIDE_INT
10776 int_cst_value (const_tree x)
10777 {
10778 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10779 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10780
10781 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10782 gcc_assert (cst_and_fits_in_hwi (x));
10783
10784 if (bits < HOST_BITS_PER_WIDE_INT)
10785 {
10786 bool negative = ((val >> (bits - 1)) & 1) != 0;
10787 if (negative)
10788 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10789 else
10790 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10791 }
10792
10793 return val;
10794 }
10795
10796 /* If TYPE is an integral or pointer type, return an integer type with
10797 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10798 if TYPE is already an integer type of signedness UNSIGNEDP. */
10799
10800 tree
10801 signed_or_unsigned_type_for (int unsignedp, tree type)
10802 {
10803 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10804 return type;
10805
10806 if (TREE_CODE (type) == VECTOR_TYPE)
10807 {
10808 tree inner = TREE_TYPE (type);
10809 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10810 if (!inner2)
10811 return NULL_TREE;
10812 if (inner == inner2)
10813 return type;
10814 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10815 }
10816
10817 if (!INTEGRAL_TYPE_P (type)
10818 && !POINTER_TYPE_P (type)
10819 && TREE_CODE (type) != OFFSET_TYPE)
10820 return NULL_TREE;
10821
10822 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10823 }
10824
10825 /* If TYPE is an integral or pointer type, return an integer type with
10826 the same precision which is unsigned, or itself if TYPE is already an
10827 unsigned integer type. */
10828
10829 tree
10830 unsigned_type_for (tree type)
10831 {
10832 return signed_or_unsigned_type_for (1, type);
10833 }
10834
10835 /* If TYPE is an integral or pointer type, return an integer type with
10836 the same precision which is signed, or itself if TYPE is already a
10837 signed integer type. */
10838
10839 tree
10840 signed_type_for (tree type)
10841 {
10842 return signed_or_unsigned_type_for (0, type);
10843 }
10844
10845 /* If TYPE is a vector type, return a signed integer vector type with the
10846 same width and number of subparts. Otherwise return boolean_type_node. */
10847
10848 tree
10849 truth_type_for (tree type)
10850 {
10851 if (TREE_CODE (type) == VECTOR_TYPE)
10852 {
10853 tree elem = lang_hooks.types.type_for_size
10854 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10855 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10856 }
10857 else
10858 return boolean_type_node;
10859 }
10860
10861 /* Returns the largest value obtainable by casting something in INNER type to
10862 OUTER type. */
10863
10864 tree
10865 upper_bound_in_type (tree outer, tree inner)
10866 {
10867 unsigned int det = 0;
10868 unsigned oprec = TYPE_PRECISION (outer);
10869 unsigned iprec = TYPE_PRECISION (inner);
10870 unsigned prec;
10871
10872 /* Compute a unique number for every combination. */
10873 det |= (oprec > iprec) ? 4 : 0;
10874 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10875 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10876
10877 /* Determine the exponent to use. */
10878 switch (det)
10879 {
10880 case 0:
10881 case 1:
10882 /* oprec <= iprec, outer: signed, inner: don't care. */
10883 prec = oprec - 1;
10884 break;
10885 case 2:
10886 case 3:
10887 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10888 prec = oprec;
10889 break;
10890 case 4:
10891 /* oprec > iprec, outer: signed, inner: signed. */
10892 prec = iprec - 1;
10893 break;
10894 case 5:
10895 /* oprec > iprec, outer: signed, inner: unsigned. */
10896 prec = iprec;
10897 break;
10898 case 6:
10899 /* oprec > iprec, outer: unsigned, inner: signed. */
10900 prec = oprec;
10901 break;
10902 case 7:
10903 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10904 prec = iprec;
10905 break;
10906 default:
10907 gcc_unreachable ();
10908 }
10909
10910 return wide_int_to_tree (outer,
10911 wi::mask (prec, false, TYPE_PRECISION (outer)));
10912 }
10913
10914 /* Returns the smallest value obtainable by casting something in INNER type to
10915 OUTER type. */
10916
10917 tree
10918 lower_bound_in_type (tree outer, tree inner)
10919 {
10920 unsigned oprec = TYPE_PRECISION (outer);
10921 unsigned iprec = TYPE_PRECISION (inner);
10922
10923 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10924 and obtain 0. */
10925 if (TYPE_UNSIGNED (outer)
10926 /* If we are widening something of an unsigned type, OUTER type
10927 contains all values of INNER type. In particular, both INNER
10928 and OUTER types have zero in common. */
10929 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10930 return build_int_cst (outer, 0);
10931 else
10932 {
10933 /* If we are widening a signed type to another signed type, we
10934 want to obtain -2^^(iprec-1). If we are keeping the
10935 precision or narrowing to a signed type, we want to obtain
10936 -2^(oprec-1). */
10937 unsigned prec = oprec > iprec ? iprec : oprec;
10938 return wide_int_to_tree (outer,
10939 wi::mask (prec - 1, true,
10940 TYPE_PRECISION (outer)));
10941 }
10942 }
10943
10944 /* Return nonzero if two operands that are suitable for PHI nodes are
10945 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10946 SSA_NAME or invariant. Note that this is strictly an optimization.
10947 That is, callers of this function can directly call operand_equal_p
10948 and get the same result, only slower. */
10949
10950 int
10951 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10952 {
10953 if (arg0 == arg1)
10954 return 1;
10955 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10956 return 0;
10957 return operand_equal_p (arg0, arg1, 0);
10958 }
10959
10960 /* Returns number of zeros at the end of binary representation of X. */
10961
10962 tree
10963 num_ending_zeros (const_tree x)
10964 {
10965 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10966 }
10967
10968
10969 #define WALK_SUBTREE(NODE) \
10970 do \
10971 { \
10972 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10973 if (result) \
10974 return result; \
10975 } \
10976 while (0)
10977
10978 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10979 be walked whenever a type is seen in the tree. Rest of operands and return
10980 value are as for walk_tree. */
10981
10982 static tree
10983 walk_type_fields (tree type, walk_tree_fn func, void *data,
10984 hash_set<tree> *pset, walk_tree_lh lh)
10985 {
10986 tree result = NULL_TREE;
10987
10988 switch (TREE_CODE (type))
10989 {
10990 case POINTER_TYPE:
10991 case REFERENCE_TYPE:
10992 case VECTOR_TYPE:
10993 /* We have to worry about mutually recursive pointers. These can't
10994 be written in C. They can in Ada. It's pathological, but
10995 there's an ACATS test (c38102a) that checks it. Deal with this
10996 by checking if we're pointing to another pointer, that one
10997 points to another pointer, that one does too, and we have no htab.
10998 If so, get a hash table. We check three levels deep to avoid
10999 the cost of the hash table if we don't need one. */
11000 if (POINTER_TYPE_P (TREE_TYPE (type))
11001 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
11002 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
11003 && !pset)
11004 {
11005 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11006 func, data);
11007 if (result)
11008 return result;
11009
11010 break;
11011 }
11012
11013 /* ... fall through ... */
11014
11015 case COMPLEX_TYPE:
11016 WALK_SUBTREE (TREE_TYPE (type));
11017 break;
11018
11019 case METHOD_TYPE:
11020 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11021
11022 /* Fall through. */
11023
11024 case FUNCTION_TYPE:
11025 WALK_SUBTREE (TREE_TYPE (type));
11026 {
11027 tree arg;
11028
11029 /* We never want to walk into default arguments. */
11030 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11031 WALK_SUBTREE (TREE_VALUE (arg));
11032 }
11033 break;
11034
11035 case ARRAY_TYPE:
11036 /* Don't follow this nodes's type if a pointer for fear that
11037 we'll have infinite recursion. If we have a PSET, then we
11038 need not fear. */
11039 if (pset
11040 || (!POINTER_TYPE_P (TREE_TYPE (type))
11041 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11042 WALK_SUBTREE (TREE_TYPE (type));
11043 WALK_SUBTREE (TYPE_DOMAIN (type));
11044 break;
11045
11046 case OFFSET_TYPE:
11047 WALK_SUBTREE (TREE_TYPE (type));
11048 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11049 break;
11050
11051 default:
11052 break;
11053 }
11054
11055 return NULL_TREE;
11056 }
11057
11058 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11059 called with the DATA and the address of each sub-tree. If FUNC returns a
11060 non-NULL value, the traversal is stopped, and the value returned by FUNC
11061 is returned. If PSET is non-NULL it is used to record the nodes visited,
11062 and to avoid visiting a node more than once. */
11063
11064 tree
11065 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11066 hash_set<tree> *pset, walk_tree_lh lh)
11067 {
11068 enum tree_code code;
11069 int walk_subtrees;
11070 tree result;
11071
11072 #define WALK_SUBTREE_TAIL(NODE) \
11073 do \
11074 { \
11075 tp = & (NODE); \
11076 goto tail_recurse; \
11077 } \
11078 while (0)
11079
11080 tail_recurse:
11081 /* Skip empty subtrees. */
11082 if (!*tp)
11083 return NULL_TREE;
11084
11085 /* Don't walk the same tree twice, if the user has requested
11086 that we avoid doing so. */
11087 if (pset && pset->add (*tp))
11088 return NULL_TREE;
11089
11090 /* Call the function. */
11091 walk_subtrees = 1;
11092 result = (*func) (tp, &walk_subtrees, data);
11093
11094 /* If we found something, return it. */
11095 if (result)
11096 return result;
11097
11098 code = TREE_CODE (*tp);
11099
11100 /* Even if we didn't, FUNC may have decided that there was nothing
11101 interesting below this point in the tree. */
11102 if (!walk_subtrees)
11103 {
11104 /* But we still need to check our siblings. */
11105 if (code == TREE_LIST)
11106 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11107 else if (code == OMP_CLAUSE)
11108 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11109 else
11110 return NULL_TREE;
11111 }
11112
11113 if (lh)
11114 {
11115 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11116 if (result || !walk_subtrees)
11117 return result;
11118 }
11119
11120 switch (code)
11121 {
11122 case ERROR_MARK:
11123 case IDENTIFIER_NODE:
11124 case INTEGER_CST:
11125 case REAL_CST:
11126 case FIXED_CST:
11127 case VECTOR_CST:
11128 case STRING_CST:
11129 case BLOCK:
11130 case PLACEHOLDER_EXPR:
11131 case SSA_NAME:
11132 case FIELD_DECL:
11133 case RESULT_DECL:
11134 /* None of these have subtrees other than those already walked
11135 above. */
11136 break;
11137
11138 case TREE_LIST:
11139 WALK_SUBTREE (TREE_VALUE (*tp));
11140 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11141 break;
11142
11143 case TREE_VEC:
11144 {
11145 int len = TREE_VEC_LENGTH (*tp);
11146
11147 if (len == 0)
11148 break;
11149
11150 /* Walk all elements but the first. */
11151 while (--len)
11152 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11153
11154 /* Now walk the first one as a tail call. */
11155 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11156 }
11157
11158 case COMPLEX_CST:
11159 WALK_SUBTREE (TREE_REALPART (*tp));
11160 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11161
11162 case CONSTRUCTOR:
11163 {
11164 unsigned HOST_WIDE_INT idx;
11165 constructor_elt *ce;
11166
11167 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11168 idx++)
11169 WALK_SUBTREE (ce->value);
11170 }
11171 break;
11172
11173 case SAVE_EXPR:
11174 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11175
11176 case BIND_EXPR:
11177 {
11178 tree decl;
11179 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11180 {
11181 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11182 into declarations that are just mentioned, rather than
11183 declared; they don't really belong to this part of the tree.
11184 And, we can see cycles: the initializer for a declaration
11185 can refer to the declaration itself. */
11186 WALK_SUBTREE (DECL_INITIAL (decl));
11187 WALK_SUBTREE (DECL_SIZE (decl));
11188 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11189 }
11190 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11191 }
11192
11193 case STATEMENT_LIST:
11194 {
11195 tree_stmt_iterator i;
11196 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11197 WALK_SUBTREE (*tsi_stmt_ptr (i));
11198 }
11199 break;
11200
11201 case OMP_CLAUSE:
11202 switch (OMP_CLAUSE_CODE (*tp))
11203 {
11204 case OMP_CLAUSE_GANG:
11205 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11206 /* FALLTHRU */
11207
11208 case OMP_CLAUSE_DEVICE_RESIDENT:
11209 case OMP_CLAUSE_USE_DEVICE:
11210 case OMP_CLAUSE_ASYNC:
11211 case OMP_CLAUSE_WAIT:
11212 case OMP_CLAUSE_WORKER:
11213 case OMP_CLAUSE_VECTOR:
11214 case OMP_CLAUSE_NUM_GANGS:
11215 case OMP_CLAUSE_NUM_WORKERS:
11216 case OMP_CLAUSE_VECTOR_LENGTH:
11217 case OMP_CLAUSE_PRIVATE:
11218 case OMP_CLAUSE_SHARED:
11219 case OMP_CLAUSE_FIRSTPRIVATE:
11220 case OMP_CLAUSE_COPYIN:
11221 case OMP_CLAUSE_COPYPRIVATE:
11222 case OMP_CLAUSE_FINAL:
11223 case OMP_CLAUSE_IF:
11224 case OMP_CLAUSE_NUM_THREADS:
11225 case OMP_CLAUSE_SCHEDULE:
11226 case OMP_CLAUSE_UNIFORM:
11227 case OMP_CLAUSE_DEPEND:
11228 case OMP_CLAUSE_NUM_TEAMS:
11229 case OMP_CLAUSE_THREAD_LIMIT:
11230 case OMP_CLAUSE_DEVICE:
11231 case OMP_CLAUSE_DIST_SCHEDULE:
11232 case OMP_CLAUSE_SAFELEN:
11233 case OMP_CLAUSE_SIMDLEN:
11234 case OMP_CLAUSE__LOOPTEMP_:
11235 case OMP_CLAUSE__SIMDUID_:
11236 case OMP_CLAUSE__CILK_FOR_COUNT_:
11237 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11238 /* FALLTHRU */
11239
11240 case OMP_CLAUSE_INDEPENDENT:
11241 case OMP_CLAUSE_NOWAIT:
11242 case OMP_CLAUSE_ORDERED:
11243 case OMP_CLAUSE_DEFAULT:
11244 case OMP_CLAUSE_UNTIED:
11245 case OMP_CLAUSE_MERGEABLE:
11246 case OMP_CLAUSE_PROC_BIND:
11247 case OMP_CLAUSE_INBRANCH:
11248 case OMP_CLAUSE_NOTINBRANCH:
11249 case OMP_CLAUSE_FOR:
11250 case OMP_CLAUSE_PARALLEL:
11251 case OMP_CLAUSE_SECTIONS:
11252 case OMP_CLAUSE_TASKGROUP:
11253 case OMP_CLAUSE_AUTO:
11254 case OMP_CLAUSE_SEQ:
11255 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11256
11257 case OMP_CLAUSE_LASTPRIVATE:
11258 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11259 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11260 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11261
11262 case OMP_CLAUSE_COLLAPSE:
11263 {
11264 int i;
11265 for (i = 0; i < 3; i++)
11266 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11267 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11268 }
11269
11270 case OMP_CLAUSE_LINEAR:
11271 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11272 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11273 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11274 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11275
11276 case OMP_CLAUSE_ALIGNED:
11277 case OMP_CLAUSE_FROM:
11278 case OMP_CLAUSE_TO:
11279 case OMP_CLAUSE_MAP:
11280 case OMP_CLAUSE__CACHE_:
11281 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11282 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11283 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11284
11285 case OMP_CLAUSE_REDUCTION:
11286 {
11287 int i;
11288 for (i = 0; i < 4; i++)
11289 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11290 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11291 }
11292
11293 default:
11294 gcc_unreachable ();
11295 }
11296 break;
11297
11298 case TARGET_EXPR:
11299 {
11300 int i, len;
11301
11302 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11303 But, we only want to walk once. */
11304 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11305 for (i = 0; i < len; ++i)
11306 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11307 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11308 }
11309
11310 case DECL_EXPR:
11311 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11312 defining. We only want to walk into these fields of a type in this
11313 case and not in the general case of a mere reference to the type.
11314
11315 The criterion is as follows: if the field can be an expression, it
11316 must be walked only here. This should be in keeping with the fields
11317 that are directly gimplified in gimplify_type_sizes in order for the
11318 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11319 variable-sized types.
11320
11321 Note that DECLs get walked as part of processing the BIND_EXPR. */
11322 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11323 {
11324 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11325 if (TREE_CODE (*type_p) == ERROR_MARK)
11326 return NULL_TREE;
11327
11328 /* Call the function for the type. See if it returns anything or
11329 doesn't want us to continue. If we are to continue, walk both
11330 the normal fields and those for the declaration case. */
11331 result = (*func) (type_p, &walk_subtrees, data);
11332 if (result || !walk_subtrees)
11333 return result;
11334
11335 /* But do not walk a pointed-to type since it may itself need to
11336 be walked in the declaration case if it isn't anonymous. */
11337 if (!POINTER_TYPE_P (*type_p))
11338 {
11339 result = walk_type_fields (*type_p, func, data, pset, lh);
11340 if (result)
11341 return result;
11342 }
11343
11344 /* If this is a record type, also walk the fields. */
11345 if (RECORD_OR_UNION_TYPE_P (*type_p))
11346 {
11347 tree field;
11348
11349 for (field = TYPE_FIELDS (*type_p); field;
11350 field = DECL_CHAIN (field))
11351 {
11352 /* We'd like to look at the type of the field, but we can
11353 easily get infinite recursion. So assume it's pointed
11354 to elsewhere in the tree. Also, ignore things that
11355 aren't fields. */
11356 if (TREE_CODE (field) != FIELD_DECL)
11357 continue;
11358
11359 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11360 WALK_SUBTREE (DECL_SIZE (field));
11361 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11362 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11363 WALK_SUBTREE (DECL_QUALIFIER (field));
11364 }
11365 }
11366
11367 /* Same for scalar types. */
11368 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11369 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11370 || TREE_CODE (*type_p) == INTEGER_TYPE
11371 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11372 || TREE_CODE (*type_p) == REAL_TYPE)
11373 {
11374 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11375 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11376 }
11377
11378 WALK_SUBTREE (TYPE_SIZE (*type_p));
11379 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11380 }
11381 /* FALLTHRU */
11382
11383 default:
11384 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11385 {
11386 int i, len;
11387
11388 /* Walk over all the sub-trees of this operand. */
11389 len = TREE_OPERAND_LENGTH (*tp);
11390
11391 /* Go through the subtrees. We need to do this in forward order so
11392 that the scope of a FOR_EXPR is handled properly. */
11393 if (len)
11394 {
11395 for (i = 0; i < len - 1; ++i)
11396 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11397 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11398 }
11399 }
11400 /* If this is a type, walk the needed fields in the type. */
11401 else if (TYPE_P (*tp))
11402 return walk_type_fields (*tp, func, data, pset, lh);
11403 break;
11404 }
11405
11406 /* We didn't find what we were looking for. */
11407 return NULL_TREE;
11408
11409 #undef WALK_SUBTREE_TAIL
11410 }
11411 #undef WALK_SUBTREE
11412
11413 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11414
11415 tree
11416 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11417 walk_tree_lh lh)
11418 {
11419 tree result;
11420
11421 hash_set<tree> pset;
11422 result = walk_tree_1 (tp, func, data, &pset, lh);
11423 return result;
11424 }
11425
11426
11427 tree
11428 tree_block (tree t)
11429 {
11430 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11431
11432 if (IS_EXPR_CODE_CLASS (c))
11433 return LOCATION_BLOCK (t->exp.locus);
11434 gcc_unreachable ();
11435 return NULL;
11436 }
11437
11438 void
11439 tree_set_block (tree t, tree b)
11440 {
11441 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11442
11443 if (IS_EXPR_CODE_CLASS (c))
11444 {
11445 if (b)
11446 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11447 else
11448 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11449 }
11450 else
11451 gcc_unreachable ();
11452 }
11453
11454 /* Create a nameless artificial label and put it in the current
11455 function context. The label has a location of LOC. Returns the
11456 newly created label. */
11457
11458 tree
11459 create_artificial_label (location_t loc)
11460 {
11461 tree lab = build_decl (loc,
11462 LABEL_DECL, NULL_TREE, void_type_node);
11463
11464 DECL_ARTIFICIAL (lab) = 1;
11465 DECL_IGNORED_P (lab) = 1;
11466 DECL_CONTEXT (lab) = current_function_decl;
11467 return lab;
11468 }
11469
11470 /* Given a tree, try to return a useful variable name that we can use
11471 to prefix a temporary that is being assigned the value of the tree.
11472 I.E. given <temp> = &A, return A. */
11473
11474 const char *
11475 get_name (tree t)
11476 {
11477 tree stripped_decl;
11478
11479 stripped_decl = t;
11480 STRIP_NOPS (stripped_decl);
11481 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11482 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11483 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11484 {
11485 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11486 if (!name)
11487 return NULL;
11488 return IDENTIFIER_POINTER (name);
11489 }
11490 else
11491 {
11492 switch (TREE_CODE (stripped_decl))
11493 {
11494 case ADDR_EXPR:
11495 return get_name (TREE_OPERAND (stripped_decl, 0));
11496 default:
11497 return NULL;
11498 }
11499 }
11500 }
11501
11502 /* Return true if TYPE has a variable argument list. */
11503
11504 bool
11505 stdarg_p (const_tree fntype)
11506 {
11507 function_args_iterator args_iter;
11508 tree n = NULL_TREE, t;
11509
11510 if (!fntype)
11511 return false;
11512
11513 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11514 {
11515 n = t;
11516 }
11517
11518 return n != NULL_TREE && n != void_type_node;
11519 }
11520
11521 /* Return true if TYPE has a prototype. */
11522
11523 bool
11524 prototype_p (tree fntype)
11525 {
11526 tree t;
11527
11528 gcc_assert (fntype != NULL_TREE);
11529
11530 t = TYPE_ARG_TYPES (fntype);
11531 return (t != NULL_TREE);
11532 }
11533
11534 /* If BLOCK is inlined from an __attribute__((__artificial__))
11535 routine, return pointer to location from where it has been
11536 called. */
11537 location_t *
11538 block_nonartificial_location (tree block)
11539 {
11540 location_t *ret = NULL;
11541
11542 while (block && TREE_CODE (block) == BLOCK
11543 && BLOCK_ABSTRACT_ORIGIN (block))
11544 {
11545 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11546
11547 while (TREE_CODE (ao) == BLOCK
11548 && BLOCK_ABSTRACT_ORIGIN (ao)
11549 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11550 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11551
11552 if (TREE_CODE (ao) == FUNCTION_DECL)
11553 {
11554 /* If AO is an artificial inline, point RET to the
11555 call site locus at which it has been inlined and continue
11556 the loop, in case AO's caller is also an artificial
11557 inline. */
11558 if (DECL_DECLARED_INLINE_P (ao)
11559 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11560 ret = &BLOCK_SOURCE_LOCATION (block);
11561 else
11562 break;
11563 }
11564 else if (TREE_CODE (ao) != BLOCK)
11565 break;
11566
11567 block = BLOCK_SUPERCONTEXT (block);
11568 }
11569 return ret;
11570 }
11571
11572
11573 /* If EXP is inlined from an __attribute__((__artificial__))
11574 function, return the location of the original call expression. */
11575
11576 location_t
11577 tree_nonartificial_location (tree exp)
11578 {
11579 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11580
11581 if (loc)
11582 return *loc;
11583 else
11584 return EXPR_LOCATION (exp);
11585 }
11586
11587
11588 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11589 nodes. */
11590
11591 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11592
11593 hashval_t
11594 cl_option_hasher::hash (tree x)
11595 {
11596 const_tree const t = x;
11597 const char *p;
11598 size_t i;
11599 size_t len = 0;
11600 hashval_t hash = 0;
11601
11602 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11603 {
11604 p = (const char *)TREE_OPTIMIZATION (t);
11605 len = sizeof (struct cl_optimization);
11606 }
11607
11608 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11609 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11610
11611 else
11612 gcc_unreachable ();
11613
11614 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11615 something else. */
11616 for (i = 0; i < len; i++)
11617 if (p[i])
11618 hash = (hash << 4) ^ ((i << 2) | p[i]);
11619
11620 return hash;
11621 }
11622
11623 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11624 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11625 same. */
11626
11627 bool
11628 cl_option_hasher::equal (tree x, tree y)
11629 {
11630 const_tree const xt = x;
11631 const_tree const yt = y;
11632 const char *xp;
11633 const char *yp;
11634 size_t len;
11635
11636 if (TREE_CODE (xt) != TREE_CODE (yt))
11637 return 0;
11638
11639 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11640 {
11641 xp = (const char *)TREE_OPTIMIZATION (xt);
11642 yp = (const char *)TREE_OPTIMIZATION (yt);
11643 len = sizeof (struct cl_optimization);
11644 }
11645
11646 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11647 {
11648 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11649 TREE_TARGET_OPTION (yt));
11650 }
11651
11652 else
11653 gcc_unreachable ();
11654
11655 return (memcmp (xp, yp, len) == 0);
11656 }
11657
11658 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11659
11660 tree
11661 build_optimization_node (struct gcc_options *opts)
11662 {
11663 tree t;
11664
11665 /* Use the cache of optimization nodes. */
11666
11667 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11668 opts);
11669
11670 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11671 t = *slot;
11672 if (!t)
11673 {
11674 /* Insert this one into the hash table. */
11675 t = cl_optimization_node;
11676 *slot = t;
11677
11678 /* Make a new node for next time round. */
11679 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11680 }
11681
11682 return t;
11683 }
11684
11685 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11686
11687 tree
11688 build_target_option_node (struct gcc_options *opts)
11689 {
11690 tree t;
11691
11692 /* Use the cache of optimization nodes. */
11693
11694 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11695 opts);
11696
11697 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11698 t = *slot;
11699 if (!t)
11700 {
11701 /* Insert this one into the hash table. */
11702 t = cl_target_option_node;
11703 *slot = t;
11704
11705 /* Make a new node for next time round. */
11706 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11707 }
11708
11709 return t;
11710 }
11711
11712 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11713 so that they aren't saved during PCH writing. */
11714
11715 void
11716 prepare_target_option_nodes_for_pch (void)
11717 {
11718 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11719 for (; iter != cl_option_hash_table->end (); ++iter)
11720 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11721 TREE_TARGET_GLOBALS (*iter) = NULL;
11722 }
11723
11724 /* Determine the "ultimate origin" of a block. The block may be an inlined
11725 instance of an inlined instance of a block which is local to an inline
11726 function, so we have to trace all of the way back through the origin chain
11727 to find out what sort of node actually served as the original seed for the
11728 given block. */
11729
11730 tree
11731 block_ultimate_origin (const_tree block)
11732 {
11733 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11734
11735 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11736 we're trying to output the abstract instance of this function. */
11737 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11738 return NULL_TREE;
11739
11740 if (immediate_origin == NULL_TREE)
11741 return NULL_TREE;
11742 else
11743 {
11744 tree ret_val;
11745 tree lookahead = immediate_origin;
11746
11747 do
11748 {
11749 ret_val = lookahead;
11750 lookahead = (TREE_CODE (ret_val) == BLOCK
11751 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11752 }
11753 while (lookahead != NULL && lookahead != ret_val);
11754
11755 /* The block's abstract origin chain may not be the *ultimate* origin of
11756 the block. It could lead to a DECL that has an abstract origin set.
11757 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11758 will give us if it has one). Note that DECL's abstract origins are
11759 supposed to be the most distant ancestor (or so decl_ultimate_origin
11760 claims), so we don't need to loop following the DECL origins. */
11761 if (DECL_P (ret_val))
11762 return DECL_ORIGIN (ret_val);
11763
11764 return ret_val;
11765 }
11766 }
11767
11768 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11769 no instruction. */
11770
11771 bool
11772 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11773 {
11774 /* Use precision rather then machine mode when we can, which gives
11775 the correct answer even for submode (bit-field) types. */
11776 if ((INTEGRAL_TYPE_P (outer_type)
11777 || POINTER_TYPE_P (outer_type)
11778 || TREE_CODE (outer_type) == OFFSET_TYPE)
11779 && (INTEGRAL_TYPE_P (inner_type)
11780 || POINTER_TYPE_P (inner_type)
11781 || TREE_CODE (inner_type) == OFFSET_TYPE))
11782 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11783
11784 /* Otherwise fall back on comparing machine modes (e.g. for
11785 aggregate types, floats). */
11786 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11787 }
11788
11789 /* Return true iff conversion in EXP generates no instruction. Mark
11790 it inline so that we fully inline into the stripping functions even
11791 though we have two uses of this function. */
11792
11793 static inline bool
11794 tree_nop_conversion (const_tree exp)
11795 {
11796 tree outer_type, inner_type;
11797
11798 if (!CONVERT_EXPR_P (exp)
11799 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11800 return false;
11801 if (TREE_OPERAND (exp, 0) == error_mark_node)
11802 return false;
11803
11804 outer_type = TREE_TYPE (exp);
11805 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11806
11807 if (!inner_type)
11808 return false;
11809
11810 return tree_nop_conversion_p (outer_type, inner_type);
11811 }
11812
11813 /* Return true iff conversion in EXP generates no instruction. Don't
11814 consider conversions changing the signedness. */
11815
11816 static bool
11817 tree_sign_nop_conversion (const_tree exp)
11818 {
11819 tree outer_type, inner_type;
11820
11821 if (!tree_nop_conversion (exp))
11822 return false;
11823
11824 outer_type = TREE_TYPE (exp);
11825 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11826
11827 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11828 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11829 }
11830
11831 /* Strip conversions from EXP according to tree_nop_conversion and
11832 return the resulting expression. */
11833
11834 tree
11835 tree_strip_nop_conversions (tree exp)
11836 {
11837 while (tree_nop_conversion (exp))
11838 exp = TREE_OPERAND (exp, 0);
11839 return exp;
11840 }
11841
11842 /* Strip conversions from EXP according to tree_sign_nop_conversion
11843 and return the resulting expression. */
11844
11845 tree
11846 tree_strip_sign_nop_conversions (tree exp)
11847 {
11848 while (tree_sign_nop_conversion (exp))
11849 exp = TREE_OPERAND (exp, 0);
11850 return exp;
11851 }
11852
11853 /* Avoid any floating point extensions from EXP. */
11854 tree
11855 strip_float_extensions (tree exp)
11856 {
11857 tree sub, expt, subt;
11858
11859 /* For floating point constant look up the narrowest type that can hold
11860 it properly and handle it like (type)(narrowest_type)constant.
11861 This way we can optimize for instance a=a*2.0 where "a" is float
11862 but 2.0 is double constant. */
11863 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11864 {
11865 REAL_VALUE_TYPE orig;
11866 tree type = NULL;
11867
11868 orig = TREE_REAL_CST (exp);
11869 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11870 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11871 type = float_type_node;
11872 else if (TYPE_PRECISION (TREE_TYPE (exp))
11873 > TYPE_PRECISION (double_type_node)
11874 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11875 type = double_type_node;
11876 if (type)
11877 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11878 }
11879
11880 if (!CONVERT_EXPR_P (exp))
11881 return exp;
11882
11883 sub = TREE_OPERAND (exp, 0);
11884 subt = TREE_TYPE (sub);
11885 expt = TREE_TYPE (exp);
11886
11887 if (!FLOAT_TYPE_P (subt))
11888 return exp;
11889
11890 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11891 return exp;
11892
11893 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11894 return exp;
11895
11896 return strip_float_extensions (sub);
11897 }
11898
11899 /* Strip out all handled components that produce invariant
11900 offsets. */
11901
11902 const_tree
11903 strip_invariant_refs (const_tree op)
11904 {
11905 while (handled_component_p (op))
11906 {
11907 switch (TREE_CODE (op))
11908 {
11909 case ARRAY_REF:
11910 case ARRAY_RANGE_REF:
11911 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11912 || TREE_OPERAND (op, 2) != NULL_TREE
11913 || TREE_OPERAND (op, 3) != NULL_TREE)
11914 return NULL;
11915 break;
11916
11917 case COMPONENT_REF:
11918 if (TREE_OPERAND (op, 2) != NULL_TREE)
11919 return NULL;
11920 break;
11921
11922 default:;
11923 }
11924 op = TREE_OPERAND (op, 0);
11925 }
11926
11927 return op;
11928 }
11929
11930 static GTY(()) tree gcc_eh_personality_decl;
11931
11932 /* Return the GCC personality function decl. */
11933
11934 tree
11935 lhd_gcc_personality (void)
11936 {
11937 if (!gcc_eh_personality_decl)
11938 gcc_eh_personality_decl = build_personality_function ("gcc");
11939 return gcc_eh_personality_decl;
11940 }
11941
11942 /* TARGET is a call target of GIMPLE call statement
11943 (obtained by gimple_call_fn). Return true if it is
11944 OBJ_TYPE_REF representing an virtual call of C++ method.
11945 (As opposed to OBJ_TYPE_REF representing objc calls
11946 through a cast where middle-end devirtualization machinery
11947 can't apply.) */
11948
11949 bool
11950 virtual_method_call_p (tree target)
11951 {
11952 if (TREE_CODE (target) != OBJ_TYPE_REF)
11953 return false;
11954 tree t = TREE_TYPE (target);
11955 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11956 t = TREE_TYPE (t);
11957 if (TREE_CODE (t) == FUNCTION_TYPE)
11958 return false;
11959 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11960 /* If we do not have BINFO associated, it means that type was built
11961 without devirtualization enabled. Do not consider this a virtual
11962 call. */
11963 if (!TYPE_BINFO (obj_type_ref_class (target)))
11964 return false;
11965 return true;
11966 }
11967
11968 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11969
11970 tree
11971 obj_type_ref_class (tree ref)
11972 {
11973 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11974 ref = TREE_TYPE (ref);
11975 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11976 ref = TREE_TYPE (ref);
11977 /* We look for type THIS points to. ObjC also builds
11978 OBJ_TYPE_REF with non-method calls, Their first parameter
11979 ID however also corresponds to class type. */
11980 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11981 || TREE_CODE (ref) == FUNCTION_TYPE);
11982 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11983 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11984 return TREE_TYPE (ref);
11985 }
11986
11987 /* Return true if T is in anonymous namespace. */
11988
11989 bool
11990 type_in_anonymous_namespace_p (const_tree t)
11991 {
11992 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11993 bulitin types; those have CONTEXT NULL. */
11994 if (!TYPE_CONTEXT (t))
11995 return false;
11996 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11997 }
11998
11999 /* Lookup sub-BINFO of BINFO of TYPE at offset POS. */
12000
12001 static tree
12002 lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos)
12003 {
12004 unsigned int i;
12005 tree base_binfo, b;
12006
12007 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12008 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo))
12009 && types_same_for_odr (TREE_TYPE (base_binfo), type))
12010 return base_binfo;
12011 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL)
12012 return b;
12013 return NULL;
12014 }
12015
12016 /* Try to find a base info of BINFO that would have its field decl at offset
12017 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
12018 found, return, otherwise return NULL_TREE. */
12019
12020 tree
12021 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
12022 {
12023 tree type = BINFO_TYPE (binfo);
12024
12025 while (true)
12026 {
12027 HOST_WIDE_INT pos, size;
12028 tree fld;
12029 int i;
12030
12031 if (types_same_for_odr (type, expected_type))
12032 return binfo;
12033 if (offset < 0)
12034 return NULL_TREE;
12035
12036 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12037 {
12038 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12039 continue;
12040
12041 pos = int_bit_position (fld);
12042 size = tree_to_uhwi (DECL_SIZE (fld));
12043 if (pos <= offset && (pos + size) > offset)
12044 break;
12045 }
12046 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12047 return NULL_TREE;
12048
12049 /* Offset 0 indicates the primary base, whose vtable contents are
12050 represented in the binfo for the derived class. */
12051 else if (offset != 0)
12052 {
12053 tree found_binfo = NULL, base_binfo;
12054 /* Offsets in BINFO are in bytes relative to the whole structure
12055 while POS is in bits relative to the containing field. */
12056 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos
12057 / BITS_PER_UNIT);
12058
12059 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
12060 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset
12061 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12062 {
12063 found_binfo = base_binfo;
12064 break;
12065 }
12066 if (found_binfo)
12067 binfo = found_binfo;
12068 else
12069 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld),
12070 binfo_offset);
12071 }
12072
12073 type = TREE_TYPE (fld);
12074 offset -= pos;
12075 }
12076 }
12077
12078 /* Returns true if X is a typedef decl. */
12079
12080 bool
12081 is_typedef_decl (tree x)
12082 {
12083 return (x && TREE_CODE (x) == TYPE_DECL
12084 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12085 }
12086
12087 /* Returns true iff TYPE is a type variant created for a typedef. */
12088
12089 bool
12090 typedef_variant_p (tree type)
12091 {
12092 return is_typedef_decl (TYPE_NAME (type));
12093 }
12094
12095 /* Warn about a use of an identifier which was marked deprecated. */
12096 void
12097 warn_deprecated_use (tree node, tree attr)
12098 {
12099 const char *msg;
12100
12101 if (node == 0 || !warn_deprecated_decl)
12102 return;
12103
12104 if (!attr)
12105 {
12106 if (DECL_P (node))
12107 attr = DECL_ATTRIBUTES (node);
12108 else if (TYPE_P (node))
12109 {
12110 tree decl = TYPE_STUB_DECL (node);
12111 if (decl)
12112 attr = lookup_attribute ("deprecated",
12113 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12114 }
12115 }
12116
12117 if (attr)
12118 attr = lookup_attribute ("deprecated", attr);
12119
12120 if (attr)
12121 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12122 else
12123 msg = NULL;
12124
12125 bool w;
12126 if (DECL_P (node))
12127 {
12128 if (msg)
12129 w = warning (OPT_Wdeprecated_declarations,
12130 "%qD is deprecated: %s", node, msg);
12131 else
12132 w = warning (OPT_Wdeprecated_declarations,
12133 "%qD is deprecated", node);
12134 if (w)
12135 inform (DECL_SOURCE_LOCATION (node), "declared here");
12136 }
12137 else if (TYPE_P (node))
12138 {
12139 tree what = NULL_TREE;
12140 tree decl = TYPE_STUB_DECL (node);
12141
12142 if (TYPE_NAME (node))
12143 {
12144 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12145 what = TYPE_NAME (node);
12146 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12147 && DECL_NAME (TYPE_NAME (node)))
12148 what = DECL_NAME (TYPE_NAME (node));
12149 }
12150
12151 if (decl)
12152 {
12153 if (what)
12154 {
12155 if (msg)
12156 w = warning (OPT_Wdeprecated_declarations,
12157 "%qE is deprecated: %s", what, msg);
12158 else
12159 w = warning (OPT_Wdeprecated_declarations,
12160 "%qE is deprecated", what);
12161 }
12162 else
12163 {
12164 if (msg)
12165 w = warning (OPT_Wdeprecated_declarations,
12166 "type is deprecated: %s", msg);
12167 else
12168 w = warning (OPT_Wdeprecated_declarations,
12169 "type is deprecated");
12170 }
12171 if (w)
12172 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12173 }
12174 else
12175 {
12176 if (what)
12177 {
12178 if (msg)
12179 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12180 what, msg);
12181 else
12182 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12183 }
12184 else
12185 {
12186 if (msg)
12187 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12188 msg);
12189 else
12190 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12191 }
12192 }
12193 }
12194 }
12195
12196 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12197 somewhere in it. */
12198
12199 bool
12200 contains_bitfld_component_ref_p (const_tree ref)
12201 {
12202 while (handled_component_p (ref))
12203 {
12204 if (TREE_CODE (ref) == COMPONENT_REF
12205 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12206 return true;
12207 ref = TREE_OPERAND (ref, 0);
12208 }
12209
12210 return false;
12211 }
12212
12213 /* Try to determine whether a TRY_CATCH expression can fall through.
12214 This is a subroutine of block_may_fallthru. */
12215
12216 static bool
12217 try_catch_may_fallthru (const_tree stmt)
12218 {
12219 tree_stmt_iterator i;
12220
12221 /* If the TRY block can fall through, the whole TRY_CATCH can
12222 fall through. */
12223 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12224 return true;
12225
12226 i = tsi_start (TREE_OPERAND (stmt, 1));
12227 switch (TREE_CODE (tsi_stmt (i)))
12228 {
12229 case CATCH_EXPR:
12230 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12231 catch expression and a body. The whole TRY_CATCH may fall
12232 through iff any of the catch bodies falls through. */
12233 for (; !tsi_end_p (i); tsi_next (&i))
12234 {
12235 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12236 return true;
12237 }
12238 return false;
12239
12240 case EH_FILTER_EXPR:
12241 /* The exception filter expression only matters if there is an
12242 exception. If the exception does not match EH_FILTER_TYPES,
12243 we will execute EH_FILTER_FAILURE, and we will fall through
12244 if that falls through. If the exception does match
12245 EH_FILTER_TYPES, the stack unwinder will continue up the
12246 stack, so we will not fall through. We don't know whether we
12247 will throw an exception which matches EH_FILTER_TYPES or not,
12248 so we just ignore EH_FILTER_TYPES and assume that we might
12249 throw an exception which doesn't match. */
12250 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12251
12252 default:
12253 /* This case represents statements to be executed when an
12254 exception occurs. Those statements are implicitly followed
12255 by a RESX statement to resume execution after the exception.
12256 So in this case the TRY_CATCH never falls through. */
12257 return false;
12258 }
12259 }
12260
12261 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12262 need not be 100% accurate; simply be conservative and return true if we
12263 don't know. This is used only to avoid stupidly generating extra code.
12264 If we're wrong, we'll just delete the extra code later. */
12265
12266 bool
12267 block_may_fallthru (const_tree block)
12268 {
12269 /* This CONST_CAST is okay because expr_last returns its argument
12270 unmodified and we assign it to a const_tree. */
12271 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12272
12273 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12274 {
12275 case GOTO_EXPR:
12276 case RETURN_EXPR:
12277 /* Easy cases. If the last statement of the block implies
12278 control transfer, then we can't fall through. */
12279 return false;
12280
12281 case SWITCH_EXPR:
12282 /* If SWITCH_LABELS is set, this is lowered, and represents a
12283 branch to a selected label and hence can not fall through.
12284 Otherwise SWITCH_BODY is set, and the switch can fall
12285 through. */
12286 return SWITCH_LABELS (stmt) == NULL_TREE;
12287
12288 case COND_EXPR:
12289 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12290 return true;
12291 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12292
12293 case BIND_EXPR:
12294 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12295
12296 case TRY_CATCH_EXPR:
12297 return try_catch_may_fallthru (stmt);
12298
12299 case TRY_FINALLY_EXPR:
12300 /* The finally clause is always executed after the try clause,
12301 so if it does not fall through, then the try-finally will not
12302 fall through. Otherwise, if the try clause does not fall
12303 through, then when the finally clause falls through it will
12304 resume execution wherever the try clause was going. So the
12305 whole try-finally will only fall through if both the try
12306 clause and the finally clause fall through. */
12307 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12308 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12309
12310 case MODIFY_EXPR:
12311 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12312 stmt = TREE_OPERAND (stmt, 1);
12313 else
12314 return true;
12315 /* FALLTHRU */
12316
12317 case CALL_EXPR:
12318 /* Functions that do not return do not fall through. */
12319 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12320
12321 case CLEANUP_POINT_EXPR:
12322 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12323
12324 case TARGET_EXPR:
12325 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12326
12327 case ERROR_MARK:
12328 return true;
12329
12330 default:
12331 return lang_hooks.block_may_fallthru (stmt);
12332 }
12333 }
12334
12335 /* True if we are using EH to handle cleanups. */
12336 static bool using_eh_for_cleanups_flag = false;
12337
12338 /* This routine is called from front ends to indicate eh should be used for
12339 cleanups. */
12340 void
12341 using_eh_for_cleanups (void)
12342 {
12343 using_eh_for_cleanups_flag = true;
12344 }
12345
12346 /* Query whether EH is used for cleanups. */
12347 bool
12348 using_eh_for_cleanups_p (void)
12349 {
12350 return using_eh_for_cleanups_flag;
12351 }
12352
12353 /* Wrapper for tree_code_name to ensure that tree code is valid */
12354 const char *
12355 get_tree_code_name (enum tree_code code)
12356 {
12357 const char *invalid = "<invalid tree code>";
12358
12359 if (code >= MAX_TREE_CODES)
12360 return invalid;
12361
12362 return tree_code_name[code];
12363 }
12364
12365 /* Drops the TREE_OVERFLOW flag from T. */
12366
12367 tree
12368 drop_tree_overflow (tree t)
12369 {
12370 gcc_checking_assert (TREE_OVERFLOW (t));
12371
12372 /* For tree codes with a sharing machinery re-build the result. */
12373 if (TREE_CODE (t) == INTEGER_CST)
12374 return wide_int_to_tree (TREE_TYPE (t), t);
12375
12376 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12377 and drop the flag. */
12378 t = copy_node (t);
12379 TREE_OVERFLOW (t) = 0;
12380 return t;
12381 }
12382
12383 /* Given a memory reference expression T, return its base address.
12384 The base address of a memory reference expression is the main
12385 object being referenced. For instance, the base address for
12386 'array[i].fld[j]' is 'array'. You can think of this as stripping
12387 away the offset part from a memory address.
12388
12389 This function calls handled_component_p to strip away all the inner
12390 parts of the memory reference until it reaches the base object. */
12391
12392 tree
12393 get_base_address (tree t)
12394 {
12395 while (handled_component_p (t))
12396 t = TREE_OPERAND (t, 0);
12397
12398 if ((TREE_CODE (t) == MEM_REF
12399 || TREE_CODE (t) == TARGET_MEM_REF)
12400 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12401 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12402
12403 /* ??? Either the alias oracle or all callers need to properly deal
12404 with WITH_SIZE_EXPRs before we can look through those. */
12405 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12406 return NULL_TREE;
12407
12408 return t;
12409 }
12410
12411 /* Return the machine mode of T. For vectors, returns the mode of the
12412 inner type. The main use case is to feed the result to HONOR_NANS,
12413 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12414
12415 machine_mode
12416 element_mode (const_tree t)
12417 {
12418 if (!TYPE_P (t))
12419 t = TREE_TYPE (t);
12420 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12421 t = TREE_TYPE (t);
12422 return TYPE_MODE (t);
12423 }
12424
12425 #include "gt-tree.h"