Merge current set of OpenACC changes from gomp-4_0-branch.
[gcc.git] / gcc / tree.c
1 /* Language-independent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* This file contains the low level primitives for operating on tree nodes,
21 including allocation, list operations, interning of identifiers,
22 construction of data type nodes and statement nodes,
23 and construction of type conversion nodes. It also contains
24 tables index by tree code that describe how to take apart
25 nodes of that code.
26
27 It is intended to be language-independent, but occasionally
28 calls language-dependent routines defined (for C) in typecheck.c. */
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "flags.h"
35 #include "hash-set.h"
36 #include "machmode.h"
37 #include "vec.h"
38 #include "double-int.h"
39 #include "input.h"
40 #include "alias.h"
41 #include "symtab.h"
42 #include "wide-int.h"
43 #include "inchash.h"
44 #include "tree.h"
45 #include "fold-const.h"
46 #include "stor-layout.h"
47 #include "calls.h"
48 #include "attribs.h"
49 #include "varasm.h"
50 #include "tm_p.h"
51 #include "hashtab.h"
52 #include "hard-reg-set.h"
53 #include "function.h"
54 #include "obstack.h"
55 #include "toplev.h" /* get_random_seed */
56 #include "filenames.h"
57 #include "output.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "langhooks.h"
61 #include "tree-inline.h"
62 #include "tree-iterator.h"
63 #include "predict.h"
64 #include "dominance.h"
65 #include "cfg.h"
66 #include "basic-block.h"
67 #include "bitmap.h"
68 #include "tree-ssa-alias.h"
69 #include "internal-fn.h"
70 #include "gimple-expr.h"
71 #include "is-a.h"
72 #include "gimple.h"
73 #include "gimple-iterator.h"
74 #include "gimplify.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-phinodes.h"
81 #include "stringpool.h"
82 #include "tree-ssanames.h"
83 #include "rtl.h"
84 #include "statistics.h"
85 #include "real.h"
86 #include "fixed-value.h"
87 #include "insn-config.h"
88 #include "expmed.h"
89 #include "dojump.h"
90 #include "explow.h"
91 #include "emit-rtl.h"
92 #include "stmt.h"
93 #include "expr.h"
94 #include "tree-dfa.h"
95 #include "params.h"
96 #include "tree-pass.h"
97 #include "langhooks-def.h"
98 #include "diagnostic.h"
99 #include "tree-diagnostic.h"
100 #include "tree-pretty-print.h"
101 #include "except.h"
102 #include "debug.h"
103 #include "intl.h"
104 #include "builtins.h"
105
106 /* Tree code classes. */
107
108 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE,
109 #define END_OF_BASE_TREE_CODES tcc_exceptional,
110
111 const enum tree_code_class tree_code_type[] = {
112 #include "all-tree.def"
113 };
114
115 #undef DEFTREECODE
116 #undef END_OF_BASE_TREE_CODES
117
118 /* Table indexed by tree code giving number of expression
119 operands beyond the fixed part of the node structure.
120 Not used for types or decls. */
121
122 #define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH,
123 #define END_OF_BASE_TREE_CODES 0,
124
125 const unsigned char tree_code_length[] = {
126 #include "all-tree.def"
127 };
128
129 #undef DEFTREECODE
130 #undef END_OF_BASE_TREE_CODES
131
132 /* Names of tree components.
133 Used for printing out the tree and error messages. */
134 #define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME,
135 #define END_OF_BASE_TREE_CODES "@dummy",
136
137 static const char *const tree_code_name[] = {
138 #include "all-tree.def"
139 };
140
141 #undef DEFTREECODE
142 #undef END_OF_BASE_TREE_CODES
143
144 /* Each tree code class has an associated string representation.
145 These must correspond to the tree_code_class entries. */
146
147 const char *const tree_code_class_strings[] =
148 {
149 "exceptional",
150 "constant",
151 "type",
152 "declaration",
153 "reference",
154 "comparison",
155 "unary",
156 "binary",
157 "statement",
158 "vl_exp",
159 "expression"
160 };
161
162 /* obstack.[ch] explicitly declined to prototype this. */
163 extern int _obstack_allocated_p (struct obstack *h, void *obj);
164
165 /* Statistics-gathering stuff. */
166
167 static int tree_code_counts[MAX_TREE_CODES];
168 int tree_node_counts[(int) all_kinds];
169 int tree_node_sizes[(int) all_kinds];
170
171 /* Keep in sync with tree.h:enum tree_node_kind. */
172 static const char * const tree_node_kind_names[] = {
173 "decls",
174 "types",
175 "blocks",
176 "stmts",
177 "refs",
178 "exprs",
179 "constants",
180 "identifiers",
181 "vecs",
182 "binfos",
183 "ssa names",
184 "constructors",
185 "random kinds",
186 "lang_decl kinds",
187 "lang_type kinds",
188 "omp clauses",
189 };
190
191 /* Unique id for next decl created. */
192 static GTY(()) int next_decl_uid;
193 /* Unique id for next type created. */
194 static GTY(()) int next_type_uid = 1;
195 /* Unique id for next debug decl created. Use negative numbers,
196 to catch erroneous uses. */
197 static GTY(()) int next_debug_decl_uid;
198
199 /* Since we cannot rehash a type after it is in the table, we have to
200 keep the hash code. */
201
202 struct GTY((for_user)) type_hash {
203 unsigned long hash;
204 tree type;
205 };
206
207 /* Initial size of the hash table (rounded to next prime). */
208 #define TYPE_HASH_INITIAL_SIZE 1000
209
210 struct type_cache_hasher : ggc_cache_hasher<type_hash *>
211 {
212 static hashval_t hash (type_hash *t) { return t->hash; }
213 static bool equal (type_hash *a, type_hash *b);
214
215 static void
216 handle_cache_entry (type_hash *&t)
217 {
218 extern void gt_ggc_mx (type_hash *&);
219 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY)
220 return;
221 else if (ggc_marked_p (t->type))
222 gt_ggc_mx (t);
223 else
224 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY);
225 }
226 };
227
228 /* Now here is the hash table. When recording a type, it is added to
229 the slot whose index is the hash code. Note that the hash table is
230 used for several kinds of types (function types, array types and
231 array index range types, for now). While all these live in the
232 same table, they are completely independent, and the hash code is
233 computed differently for each of these. */
234
235 static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table;
236
237 /* Hash table and temporary node for larger integer const values. */
238 static GTY (()) tree int_cst_node;
239
240 struct int_cst_hasher : ggc_cache_hasher<tree>
241 {
242 static hashval_t hash (tree t);
243 static bool equal (tree x, tree y);
244 };
245
246 static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
247
248 /* Hash table for optimization flags and target option flags. Use the same
249 hash table for both sets of options. Nodes for building the current
250 optimization and target option nodes. The assumption is most of the time
251 the options created will already be in the hash table, so we avoid
252 allocating and freeing up a node repeatably. */
253 static GTY (()) tree cl_optimization_node;
254 static GTY (()) tree cl_target_option_node;
255
256 struct cl_option_hasher : ggc_cache_hasher<tree>
257 {
258 static hashval_t hash (tree t);
259 static bool equal (tree x, tree y);
260 };
261
262 static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table;
263
264 /* General tree->tree mapping structure for use in hash tables. */
265
266
267 static GTY ((cache))
268 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl;
269
270 static GTY ((cache))
271 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl;
272
273 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *>
274 {
275 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); }
276
277 static bool
278 equal (tree_vec_map *a, tree_vec_map *b)
279 {
280 return a->base.from == b->base.from;
281 }
282
283 static void
284 handle_cache_entry (tree_vec_map *&m)
285 {
286 extern void gt_ggc_mx (tree_vec_map *&);
287 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY)
288 return;
289 else if (ggc_marked_p (m->base.from))
290 gt_ggc_mx (m);
291 else
292 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY);
293 }
294 };
295
296 static GTY ((cache))
297 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl;
298
299 static void set_type_quals (tree, int);
300 static void print_type_hash_statistics (void);
301 static void print_debug_expr_statistics (void);
302 static void print_value_expr_statistics (void);
303 static void type_hash_list (const_tree, inchash::hash &);
304 static void attribute_hash_list (const_tree, inchash::hash &);
305
306 tree global_trees[TI_MAX];
307 tree integer_types[itk_none];
308
309 bool int_n_enabled_p[NUM_INT_N_ENTS];
310 struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS];
311
312 unsigned char tree_contains_struct[MAX_TREE_CODES][64];
313
314 /* Number of operands for each OpenMP clause. */
315 unsigned const char omp_clause_num_ops[] =
316 {
317 0, /* OMP_CLAUSE_ERROR */
318 1, /* OMP_CLAUSE_PRIVATE */
319 1, /* OMP_CLAUSE_SHARED */
320 1, /* OMP_CLAUSE_FIRSTPRIVATE */
321 2, /* OMP_CLAUSE_LASTPRIVATE */
322 4, /* OMP_CLAUSE_REDUCTION */
323 1, /* OMP_CLAUSE_COPYIN */
324 1, /* OMP_CLAUSE_COPYPRIVATE */
325 3, /* OMP_CLAUSE_LINEAR */
326 2, /* OMP_CLAUSE_ALIGNED */
327 1, /* OMP_CLAUSE_DEPEND */
328 1, /* OMP_CLAUSE_UNIFORM */
329 2, /* OMP_CLAUSE_FROM */
330 2, /* OMP_CLAUSE_TO */
331 2, /* OMP_CLAUSE_MAP */
332 2, /* OMP_CLAUSE__CACHE_ */
333 1, /* OMP_CLAUSE_DEVICE_RESIDENT */
334 1, /* OMP_CLAUSE_USE_DEVICE */
335 2, /* OMP_CLAUSE_GANG */
336 1, /* OMP_CLAUSE_ASYNC */
337 1, /* OMP_CLAUSE_WAIT */
338 0, /* OMP_CLAUSE_AUTO */
339 0, /* OMP_CLAUSE_SEQ */
340 1, /* OMP_CLAUSE__LOOPTEMP_ */
341 1, /* OMP_CLAUSE_IF */
342 1, /* OMP_CLAUSE_NUM_THREADS */
343 1, /* OMP_CLAUSE_SCHEDULE */
344 0, /* OMP_CLAUSE_NOWAIT */
345 0, /* OMP_CLAUSE_ORDERED */
346 0, /* OMP_CLAUSE_DEFAULT */
347 3, /* OMP_CLAUSE_COLLAPSE */
348 0, /* OMP_CLAUSE_UNTIED */
349 1, /* OMP_CLAUSE_FINAL */
350 0, /* OMP_CLAUSE_MERGEABLE */
351 1, /* OMP_CLAUSE_DEVICE */
352 1, /* OMP_CLAUSE_DIST_SCHEDULE */
353 0, /* OMP_CLAUSE_INBRANCH */
354 0, /* OMP_CLAUSE_NOTINBRANCH */
355 1, /* OMP_CLAUSE_NUM_TEAMS */
356 1, /* OMP_CLAUSE_THREAD_LIMIT */
357 0, /* OMP_CLAUSE_PROC_BIND */
358 1, /* OMP_CLAUSE_SAFELEN */
359 1, /* OMP_CLAUSE_SIMDLEN */
360 0, /* OMP_CLAUSE_FOR */
361 0, /* OMP_CLAUSE_PARALLEL */
362 0, /* OMP_CLAUSE_SECTIONS */
363 0, /* OMP_CLAUSE_TASKGROUP */
364 1, /* OMP_CLAUSE__SIMDUID_ */
365 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */
366 0, /* OMP_CLAUSE_INDEPENDENT */
367 1, /* OMP_CLAUSE_WORKER */
368 1, /* OMP_CLAUSE_VECTOR */
369 1, /* OMP_CLAUSE_NUM_GANGS */
370 1, /* OMP_CLAUSE_NUM_WORKERS */
371 1, /* OMP_CLAUSE_VECTOR_LENGTH */
372 };
373
374 const char * const omp_clause_code_name[] =
375 {
376 "error_clause",
377 "private",
378 "shared",
379 "firstprivate",
380 "lastprivate",
381 "reduction",
382 "copyin",
383 "copyprivate",
384 "linear",
385 "aligned",
386 "depend",
387 "uniform",
388 "from",
389 "to",
390 "map",
391 "_cache_",
392 "device_resident",
393 "use_device",
394 "gang",
395 "async",
396 "wait",
397 "auto",
398 "seq",
399 "_looptemp_",
400 "if",
401 "num_threads",
402 "schedule",
403 "nowait",
404 "ordered",
405 "default",
406 "collapse",
407 "untied",
408 "final",
409 "mergeable",
410 "device",
411 "dist_schedule",
412 "inbranch",
413 "notinbranch",
414 "num_teams",
415 "thread_limit",
416 "proc_bind",
417 "safelen",
418 "simdlen",
419 "for",
420 "parallel",
421 "sections",
422 "taskgroup",
423 "_simduid_",
424 "_Cilk_for_count_",
425 "independent",
426 "worker",
427 "vector",
428 "num_gangs",
429 "num_workers",
430 "vector_length"
431 };
432
433
434 /* Return the tree node structure used by tree code CODE. */
435
436 static inline enum tree_node_structure_enum
437 tree_node_structure_for_code (enum tree_code code)
438 {
439 switch (TREE_CODE_CLASS (code))
440 {
441 case tcc_declaration:
442 {
443 switch (code)
444 {
445 case FIELD_DECL:
446 return TS_FIELD_DECL;
447 case PARM_DECL:
448 return TS_PARM_DECL;
449 case VAR_DECL:
450 return TS_VAR_DECL;
451 case LABEL_DECL:
452 return TS_LABEL_DECL;
453 case RESULT_DECL:
454 return TS_RESULT_DECL;
455 case DEBUG_EXPR_DECL:
456 return TS_DECL_WRTL;
457 case CONST_DECL:
458 return TS_CONST_DECL;
459 case TYPE_DECL:
460 return TS_TYPE_DECL;
461 case FUNCTION_DECL:
462 return TS_FUNCTION_DECL;
463 case TRANSLATION_UNIT_DECL:
464 return TS_TRANSLATION_UNIT_DECL;
465 default:
466 return TS_DECL_NON_COMMON;
467 }
468 }
469 case tcc_type:
470 return TS_TYPE_NON_COMMON;
471 case tcc_reference:
472 case tcc_comparison:
473 case tcc_unary:
474 case tcc_binary:
475 case tcc_expression:
476 case tcc_statement:
477 case tcc_vl_exp:
478 return TS_EXP;
479 default: /* tcc_constant and tcc_exceptional */
480 break;
481 }
482 switch (code)
483 {
484 /* tcc_constant cases. */
485 case VOID_CST: return TS_TYPED;
486 case INTEGER_CST: return TS_INT_CST;
487 case REAL_CST: return TS_REAL_CST;
488 case FIXED_CST: return TS_FIXED_CST;
489 case COMPLEX_CST: return TS_COMPLEX;
490 case VECTOR_CST: return TS_VECTOR;
491 case STRING_CST: return TS_STRING;
492 /* tcc_exceptional cases. */
493 case ERROR_MARK: return TS_COMMON;
494 case IDENTIFIER_NODE: return TS_IDENTIFIER;
495 case TREE_LIST: return TS_LIST;
496 case TREE_VEC: return TS_VEC;
497 case SSA_NAME: return TS_SSA_NAME;
498 case PLACEHOLDER_EXPR: return TS_COMMON;
499 case STATEMENT_LIST: return TS_STATEMENT_LIST;
500 case BLOCK: return TS_BLOCK;
501 case CONSTRUCTOR: return TS_CONSTRUCTOR;
502 case TREE_BINFO: return TS_BINFO;
503 case OMP_CLAUSE: return TS_OMP_CLAUSE;
504 case OPTIMIZATION_NODE: return TS_OPTIMIZATION;
505 case TARGET_OPTION_NODE: return TS_TARGET_OPTION;
506
507 default:
508 gcc_unreachable ();
509 }
510 }
511
512
513 /* Initialize tree_contains_struct to describe the hierarchy of tree
514 nodes. */
515
516 static void
517 initialize_tree_contains_struct (void)
518 {
519 unsigned i;
520
521 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++)
522 {
523 enum tree_code code;
524 enum tree_node_structure_enum ts_code;
525
526 code = (enum tree_code) i;
527 ts_code = tree_node_structure_for_code (code);
528
529 /* Mark the TS structure itself. */
530 tree_contains_struct[code][ts_code] = 1;
531
532 /* Mark all the structures that TS is derived from. */
533 switch (ts_code)
534 {
535 case TS_TYPED:
536 case TS_BLOCK:
537 MARK_TS_BASE (code);
538 break;
539
540 case TS_COMMON:
541 case TS_INT_CST:
542 case TS_REAL_CST:
543 case TS_FIXED_CST:
544 case TS_VECTOR:
545 case TS_STRING:
546 case TS_COMPLEX:
547 case TS_SSA_NAME:
548 case TS_CONSTRUCTOR:
549 case TS_EXP:
550 case TS_STATEMENT_LIST:
551 MARK_TS_TYPED (code);
552 break;
553
554 case TS_IDENTIFIER:
555 case TS_DECL_MINIMAL:
556 case TS_TYPE_COMMON:
557 case TS_LIST:
558 case TS_VEC:
559 case TS_BINFO:
560 case TS_OMP_CLAUSE:
561 case TS_OPTIMIZATION:
562 case TS_TARGET_OPTION:
563 MARK_TS_COMMON (code);
564 break;
565
566 case TS_TYPE_WITH_LANG_SPECIFIC:
567 MARK_TS_TYPE_COMMON (code);
568 break;
569
570 case TS_TYPE_NON_COMMON:
571 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code);
572 break;
573
574 case TS_DECL_COMMON:
575 MARK_TS_DECL_MINIMAL (code);
576 break;
577
578 case TS_DECL_WRTL:
579 case TS_CONST_DECL:
580 MARK_TS_DECL_COMMON (code);
581 break;
582
583 case TS_DECL_NON_COMMON:
584 MARK_TS_DECL_WITH_VIS (code);
585 break;
586
587 case TS_DECL_WITH_VIS:
588 case TS_PARM_DECL:
589 case TS_LABEL_DECL:
590 case TS_RESULT_DECL:
591 MARK_TS_DECL_WRTL (code);
592 break;
593
594 case TS_FIELD_DECL:
595 MARK_TS_DECL_COMMON (code);
596 break;
597
598 case TS_VAR_DECL:
599 MARK_TS_DECL_WITH_VIS (code);
600 break;
601
602 case TS_TYPE_DECL:
603 case TS_FUNCTION_DECL:
604 MARK_TS_DECL_NON_COMMON (code);
605 break;
606
607 case TS_TRANSLATION_UNIT_DECL:
608 MARK_TS_DECL_COMMON (code);
609 break;
610
611 default:
612 gcc_unreachable ();
613 }
614 }
615
616 /* Basic consistency checks for attributes used in fold. */
617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]);
618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]);
619 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]);
620 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]);
621 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]);
622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]);
623 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]);
624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]);
625 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]);
626 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]);
627 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]);
628 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]);
629 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]);
630 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]);
631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]);
632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]);
633 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]);
634 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]);
635 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]);
636 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]);
637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]);
638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]);
639 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]);
640 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]);
641 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]);
642 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]);
643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]);
644 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]);
645 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]);
646 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]);
647 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]);
648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]);
649 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]);
650 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]);
651 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]);
652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]);
653 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]);
654 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]);
655 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]);
656 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]);
657 }
658
659
660 /* Init tree.c. */
661
662 void
663 init_ttree (void)
664 {
665 /* Initialize the hash table of types. */
666 type_hash_table
667 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE);
668
669 debug_expr_for_decl
670 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
671
672 value_expr_for_decl
673 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512);
674
675 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
676
677 int_cst_node = make_int_cst (1, 1);
678
679 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
680
681 cl_optimization_node = make_node (OPTIMIZATION_NODE);
682 cl_target_option_node = make_node (TARGET_OPTION_NODE);
683
684 /* Initialize the tree_contains_struct array. */
685 initialize_tree_contains_struct ();
686 lang_hooks.init_ts ();
687 }
688
689 \f
690 /* The name of the object as the assembler will see it (but before any
691 translations made by ASM_OUTPUT_LABELREF). Often this is the same
692 as DECL_NAME. It is an IDENTIFIER_NODE. */
693 tree
694 decl_assembler_name (tree decl)
695 {
696 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
697 lang_hooks.set_decl_assembler_name (decl);
698 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name;
699 }
700
701 /* When the target supports COMDAT groups, this indicates which group the
702 DECL is associated with. This can be either an IDENTIFIER_NODE or a
703 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */
704 tree
705 decl_comdat_group (const_tree node)
706 {
707 struct symtab_node *snode = symtab_node::get (node);
708 if (!snode)
709 return NULL;
710 return snode->get_comdat_group ();
711 }
712
713 /* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */
714 tree
715 decl_comdat_group_id (const_tree node)
716 {
717 struct symtab_node *snode = symtab_node::get (node);
718 if (!snode)
719 return NULL;
720 return snode->get_comdat_group_id ();
721 }
722
723 /* When the target supports named section, return its name as IDENTIFIER_NODE
724 or NULL if it is in no section. */
725 const char *
726 decl_section_name (const_tree node)
727 {
728 struct symtab_node *snode = symtab_node::get (node);
729 if (!snode)
730 return NULL;
731 return snode->get_section ();
732 }
733
734 /* Set section section name of NODE to VALUE (that is expected to
735 be identifier node) */
736 void
737 set_decl_section_name (tree node, const char *value)
738 {
739 struct symtab_node *snode;
740
741 if (value == NULL)
742 {
743 snode = symtab_node::get (node);
744 if (!snode)
745 return;
746 }
747 else if (TREE_CODE (node) == VAR_DECL)
748 snode = varpool_node::get_create (node);
749 else
750 snode = cgraph_node::get_create (node);
751 snode->set_section (value);
752 }
753
754 /* Return TLS model of a variable NODE. */
755 enum tls_model
756 decl_tls_model (const_tree node)
757 {
758 struct varpool_node *snode = varpool_node::get (node);
759 if (!snode)
760 return TLS_MODEL_NONE;
761 return snode->tls_model;
762 }
763
764 /* Set TLS model of variable NODE to MODEL. */
765 void
766 set_decl_tls_model (tree node, enum tls_model model)
767 {
768 struct varpool_node *vnode;
769
770 if (model == TLS_MODEL_NONE)
771 {
772 vnode = varpool_node::get (node);
773 if (!vnode)
774 return;
775 }
776 else
777 vnode = varpool_node::get_create (node);
778 vnode->tls_model = model;
779 }
780
781 /* Compute the number of bytes occupied by a tree with code CODE.
782 This function cannot be used for nodes that have variable sizes,
783 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */
784 size_t
785 tree_code_size (enum tree_code code)
786 {
787 switch (TREE_CODE_CLASS (code))
788 {
789 case tcc_declaration: /* A decl node */
790 {
791 switch (code)
792 {
793 case FIELD_DECL:
794 return sizeof (struct tree_field_decl);
795 case PARM_DECL:
796 return sizeof (struct tree_parm_decl);
797 case VAR_DECL:
798 return sizeof (struct tree_var_decl);
799 case LABEL_DECL:
800 return sizeof (struct tree_label_decl);
801 case RESULT_DECL:
802 return sizeof (struct tree_result_decl);
803 case CONST_DECL:
804 return sizeof (struct tree_const_decl);
805 case TYPE_DECL:
806 return sizeof (struct tree_type_decl);
807 case FUNCTION_DECL:
808 return sizeof (struct tree_function_decl);
809 case DEBUG_EXPR_DECL:
810 return sizeof (struct tree_decl_with_rtl);
811 case TRANSLATION_UNIT_DECL:
812 return sizeof (struct tree_translation_unit_decl);
813 case NAMESPACE_DECL:
814 case IMPORTED_DECL:
815 case NAMELIST_DECL:
816 return sizeof (struct tree_decl_non_common);
817 default:
818 return lang_hooks.tree_size (code);
819 }
820 }
821
822 case tcc_type: /* a type node */
823 return sizeof (struct tree_type_non_common);
824
825 case tcc_reference: /* a reference */
826 case tcc_expression: /* an expression */
827 case tcc_statement: /* an expression with side effects */
828 case tcc_comparison: /* a comparison expression */
829 case tcc_unary: /* a unary arithmetic expression */
830 case tcc_binary: /* a binary arithmetic expression */
831 return (sizeof (struct tree_exp)
832 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree));
833
834 case tcc_constant: /* a constant */
835 switch (code)
836 {
837 case VOID_CST: return sizeof (struct tree_typed);
838 case INTEGER_CST: gcc_unreachable ();
839 case REAL_CST: return sizeof (struct tree_real_cst);
840 case FIXED_CST: return sizeof (struct tree_fixed_cst);
841 case COMPLEX_CST: return sizeof (struct tree_complex);
842 case VECTOR_CST: return sizeof (struct tree_vector);
843 case STRING_CST: gcc_unreachable ();
844 default:
845 return lang_hooks.tree_size (code);
846 }
847
848 case tcc_exceptional: /* something random, like an identifier. */
849 switch (code)
850 {
851 case IDENTIFIER_NODE: return lang_hooks.identifier_size;
852 case TREE_LIST: return sizeof (struct tree_list);
853
854 case ERROR_MARK:
855 case PLACEHOLDER_EXPR: return sizeof (struct tree_common);
856
857 case TREE_VEC:
858 case OMP_CLAUSE: gcc_unreachable ();
859
860 case SSA_NAME: return sizeof (struct tree_ssa_name);
861
862 case STATEMENT_LIST: return sizeof (struct tree_statement_list);
863 case BLOCK: return sizeof (struct tree_block);
864 case CONSTRUCTOR: return sizeof (struct tree_constructor);
865 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option);
866 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option);
867
868 default:
869 return lang_hooks.tree_size (code);
870 }
871
872 default:
873 gcc_unreachable ();
874 }
875 }
876
877 /* Compute the number of bytes occupied by NODE. This routine only
878 looks at TREE_CODE, except for those nodes that have variable sizes. */
879 size_t
880 tree_size (const_tree node)
881 {
882 const enum tree_code code = TREE_CODE (node);
883 switch (code)
884 {
885 case INTEGER_CST:
886 return (sizeof (struct tree_int_cst)
887 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT));
888
889 case TREE_BINFO:
890 return (offsetof (struct tree_binfo, base_binfos)
891 + vec<tree, va_gc>
892 ::embedded_size (BINFO_N_BASE_BINFOS (node)));
893
894 case TREE_VEC:
895 return (sizeof (struct tree_vec)
896 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree));
897
898 case VECTOR_CST:
899 return (sizeof (struct tree_vector)
900 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree));
901
902 case STRING_CST:
903 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1;
904
905 case OMP_CLAUSE:
906 return (sizeof (struct tree_omp_clause)
907 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1)
908 * sizeof (tree));
909
910 default:
911 if (TREE_CODE_CLASS (code) == tcc_vl_exp)
912 return (sizeof (struct tree_exp)
913 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree));
914 else
915 return tree_code_size (code);
916 }
917 }
918
919 /* Record interesting allocation statistics for a tree node with CODE
920 and LENGTH. */
921
922 static void
923 record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED,
924 size_t length ATTRIBUTE_UNUSED)
925 {
926 enum tree_code_class type = TREE_CODE_CLASS (code);
927 tree_node_kind kind;
928
929 if (!GATHER_STATISTICS)
930 return;
931
932 switch (type)
933 {
934 case tcc_declaration: /* A decl node */
935 kind = d_kind;
936 break;
937
938 case tcc_type: /* a type node */
939 kind = t_kind;
940 break;
941
942 case tcc_statement: /* an expression with side effects */
943 kind = s_kind;
944 break;
945
946 case tcc_reference: /* a reference */
947 kind = r_kind;
948 break;
949
950 case tcc_expression: /* an expression */
951 case tcc_comparison: /* a comparison expression */
952 case tcc_unary: /* a unary arithmetic expression */
953 case tcc_binary: /* a binary arithmetic expression */
954 kind = e_kind;
955 break;
956
957 case tcc_constant: /* a constant */
958 kind = c_kind;
959 break;
960
961 case tcc_exceptional: /* something random, like an identifier. */
962 switch (code)
963 {
964 case IDENTIFIER_NODE:
965 kind = id_kind;
966 break;
967
968 case TREE_VEC:
969 kind = vec_kind;
970 break;
971
972 case TREE_BINFO:
973 kind = binfo_kind;
974 break;
975
976 case SSA_NAME:
977 kind = ssa_name_kind;
978 break;
979
980 case BLOCK:
981 kind = b_kind;
982 break;
983
984 case CONSTRUCTOR:
985 kind = constr_kind;
986 break;
987
988 case OMP_CLAUSE:
989 kind = omp_clause_kind;
990 break;
991
992 default:
993 kind = x_kind;
994 break;
995 }
996 break;
997
998 case tcc_vl_exp:
999 kind = e_kind;
1000 break;
1001
1002 default:
1003 gcc_unreachable ();
1004 }
1005
1006 tree_code_counts[(int) code]++;
1007 tree_node_counts[(int) kind]++;
1008 tree_node_sizes[(int) kind] += length;
1009 }
1010
1011 /* Allocate and return a new UID from the DECL_UID namespace. */
1012
1013 int
1014 allocate_decl_uid (void)
1015 {
1016 return next_decl_uid++;
1017 }
1018
1019 /* Return a newly allocated node of code CODE. For decl and type
1020 nodes, some other fields are initialized. The rest of the node is
1021 initialized to zero. This function cannot be used for TREE_VEC,
1022 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in
1023 tree_code_size.
1024
1025 Achoo! I got a code in the node. */
1026
1027 tree
1028 make_node_stat (enum tree_code code MEM_STAT_DECL)
1029 {
1030 tree t;
1031 enum tree_code_class type = TREE_CODE_CLASS (code);
1032 size_t length = tree_code_size (code);
1033
1034 record_node_allocation_statistics (code, length);
1035
1036 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1037 TREE_SET_CODE (t, code);
1038
1039 switch (type)
1040 {
1041 case tcc_statement:
1042 TREE_SIDE_EFFECTS (t) = 1;
1043 break;
1044
1045 case tcc_declaration:
1046 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1047 {
1048 if (code == FUNCTION_DECL)
1049 {
1050 DECL_ALIGN (t) = FUNCTION_BOUNDARY;
1051 DECL_MODE (t) = FUNCTION_MODE;
1052 }
1053 else
1054 DECL_ALIGN (t) = 1;
1055 }
1056 DECL_SOURCE_LOCATION (t) = input_location;
1057 if (TREE_CODE (t) == DEBUG_EXPR_DECL)
1058 DECL_UID (t) = --next_debug_decl_uid;
1059 else
1060 {
1061 DECL_UID (t) = allocate_decl_uid ();
1062 SET_DECL_PT_UID (t, -1);
1063 }
1064 if (TREE_CODE (t) == LABEL_DECL)
1065 LABEL_DECL_UID (t) = -1;
1066
1067 break;
1068
1069 case tcc_type:
1070 TYPE_UID (t) = next_type_uid++;
1071 TYPE_ALIGN (t) = BITS_PER_UNIT;
1072 TYPE_USER_ALIGN (t) = 0;
1073 TYPE_MAIN_VARIANT (t) = t;
1074 TYPE_CANONICAL (t) = t;
1075
1076 /* Default to no attributes for type, but let target change that. */
1077 TYPE_ATTRIBUTES (t) = NULL_TREE;
1078 targetm.set_default_type_attributes (t);
1079
1080 /* We have not yet computed the alias set for this type. */
1081 TYPE_ALIAS_SET (t) = -1;
1082 break;
1083
1084 case tcc_constant:
1085 TREE_CONSTANT (t) = 1;
1086 break;
1087
1088 case tcc_expression:
1089 switch (code)
1090 {
1091 case INIT_EXPR:
1092 case MODIFY_EXPR:
1093 case VA_ARG_EXPR:
1094 case PREDECREMENT_EXPR:
1095 case PREINCREMENT_EXPR:
1096 case POSTDECREMENT_EXPR:
1097 case POSTINCREMENT_EXPR:
1098 /* All of these have side-effects, no matter what their
1099 operands are. */
1100 TREE_SIDE_EFFECTS (t) = 1;
1101 break;
1102
1103 default:
1104 break;
1105 }
1106 break;
1107
1108 default:
1109 /* Other classes need no special treatment. */
1110 break;
1111 }
1112
1113 return t;
1114 }
1115 \f
1116 /* Return a new node with the same contents as NODE except that its
1117 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */
1118
1119 tree
1120 copy_node_stat (tree node MEM_STAT_DECL)
1121 {
1122 tree t;
1123 enum tree_code code = TREE_CODE (node);
1124 size_t length;
1125
1126 gcc_assert (code != STATEMENT_LIST);
1127
1128 length = tree_size (node);
1129 record_node_allocation_statistics (code, length);
1130 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
1131 memcpy (t, node, length);
1132
1133 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
1134 TREE_CHAIN (t) = 0;
1135 TREE_ASM_WRITTEN (t) = 0;
1136 TREE_VISITED (t) = 0;
1137
1138 if (TREE_CODE_CLASS (code) == tcc_declaration)
1139 {
1140 if (code == DEBUG_EXPR_DECL)
1141 DECL_UID (t) = --next_debug_decl_uid;
1142 else
1143 {
1144 DECL_UID (t) = allocate_decl_uid ();
1145 if (DECL_PT_UID_SET_P (node))
1146 SET_DECL_PT_UID (t, DECL_PT_UID (node));
1147 }
1148 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL)
1149 && DECL_HAS_VALUE_EXPR_P (node))
1150 {
1151 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node));
1152 DECL_HAS_VALUE_EXPR_P (t) = 1;
1153 }
1154 /* DECL_DEBUG_EXPR is copied explicitely by callers. */
1155 if (TREE_CODE (node) == VAR_DECL)
1156 {
1157 DECL_HAS_DEBUG_EXPR_P (t) = 0;
1158 t->decl_with_vis.symtab_node = NULL;
1159 }
1160 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node))
1161 {
1162 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node));
1163 DECL_HAS_INIT_PRIORITY_P (t) = 1;
1164 }
1165 if (TREE_CODE (node) == FUNCTION_DECL)
1166 {
1167 DECL_STRUCT_FUNCTION (t) = NULL;
1168 t->decl_with_vis.symtab_node = NULL;
1169 }
1170 }
1171 else if (TREE_CODE_CLASS (code) == tcc_type)
1172 {
1173 TYPE_UID (t) = next_type_uid++;
1174 /* The following is so that the debug code for
1175 the copy is different from the original type.
1176 The two statements usually duplicate each other
1177 (because they clear fields of the same union),
1178 but the optimizer should catch that. */
1179 TYPE_SYMTAB_POINTER (t) = 0;
1180 TYPE_SYMTAB_ADDRESS (t) = 0;
1181
1182 /* Do not copy the values cache. */
1183 if (TYPE_CACHED_VALUES_P (t))
1184 {
1185 TYPE_CACHED_VALUES_P (t) = 0;
1186 TYPE_CACHED_VALUES (t) = NULL_TREE;
1187 }
1188 }
1189
1190 return t;
1191 }
1192
1193 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
1194 For example, this can copy a list made of TREE_LIST nodes. */
1195
1196 tree
1197 copy_list (tree list)
1198 {
1199 tree head;
1200 tree prev, next;
1201
1202 if (list == 0)
1203 return 0;
1204
1205 head = prev = copy_node (list);
1206 next = TREE_CHAIN (list);
1207 while (next)
1208 {
1209 TREE_CHAIN (prev) = copy_node (next);
1210 prev = TREE_CHAIN (prev);
1211 next = TREE_CHAIN (next);
1212 }
1213 return head;
1214 }
1215
1216 \f
1217 /* Return the value that TREE_INT_CST_EXT_NUNITS should have for an
1218 INTEGER_CST with value CST and type TYPE. */
1219
1220 static unsigned int
1221 get_int_cst_ext_nunits (tree type, const wide_int &cst)
1222 {
1223 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type));
1224 /* We need an extra zero HWI if CST is an unsigned integer with its
1225 upper bit set, and if CST occupies a whole number of HWIs. */
1226 if (TYPE_UNSIGNED (type)
1227 && wi::neg_p (cst)
1228 && (cst.get_precision () % HOST_BITS_PER_WIDE_INT) == 0)
1229 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1;
1230 return cst.get_len ();
1231 }
1232
1233 /* Return a new INTEGER_CST with value CST and type TYPE. */
1234
1235 static tree
1236 build_new_int_cst (tree type, const wide_int &cst)
1237 {
1238 unsigned int len = cst.get_len ();
1239 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1240 tree nt = make_int_cst (len, ext_len);
1241
1242 if (len < ext_len)
1243 {
1244 --ext_len;
1245 TREE_INT_CST_ELT (nt, ext_len) = 0;
1246 for (unsigned int i = len; i < ext_len; ++i)
1247 TREE_INT_CST_ELT (nt, i) = -1;
1248 }
1249 else if (TYPE_UNSIGNED (type)
1250 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT)
1251 {
1252 len--;
1253 TREE_INT_CST_ELT (nt, len)
1254 = zext_hwi (cst.elt (len),
1255 cst.get_precision () % HOST_BITS_PER_WIDE_INT);
1256 }
1257
1258 for (unsigned int i = 0; i < len; i++)
1259 TREE_INT_CST_ELT (nt, i) = cst.elt (i);
1260 TREE_TYPE (nt) = type;
1261 return nt;
1262 }
1263
1264 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1265
1266 tree
1267 build_int_cst (tree type, HOST_WIDE_INT low)
1268 {
1269 /* Support legacy code. */
1270 if (!type)
1271 type = integer_type_node;
1272
1273 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1274 }
1275
1276 tree
1277 build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
1278 {
1279 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
1280 }
1281
1282 /* Create an INT_CST node with a LOW value sign extended to TYPE. */
1283
1284 tree
1285 build_int_cst_type (tree type, HOST_WIDE_INT low)
1286 {
1287 gcc_assert (type);
1288 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
1289 }
1290
1291 /* Constructs tree in type TYPE from with value given by CST. Signedness
1292 of CST is assumed to be the same as the signedness of TYPE. */
1293
1294 tree
1295 double_int_to_tree (tree type, double_int cst)
1296 {
1297 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type)));
1298 }
1299
1300 /* We force the wide_int CST to the range of the type TYPE by sign or
1301 zero extending it. OVERFLOWABLE indicates if we are interested in
1302 overflow of the value, when >0 we are only interested in signed
1303 overflow, for <0 we are interested in any overflow. OVERFLOWED
1304 indicates whether overflow has already occurred. CONST_OVERFLOWED
1305 indicates whether constant overflow has already occurred. We force
1306 T's value to be within range of T's type (by setting to 0 or 1 all
1307 the bits outside the type's range). We set TREE_OVERFLOWED if,
1308 OVERFLOWED is nonzero,
1309 or OVERFLOWABLE is >0 and signed overflow occurs
1310 or OVERFLOWABLE is <0 and any overflow occurs
1311 We return a new tree node for the extended wide_int. The node
1312 is shared if no overflow flags are set. */
1313
1314
1315 tree
1316 force_fit_type (tree type, const wide_int_ref &cst,
1317 int overflowable, bool overflowed)
1318 {
1319 signop sign = TYPE_SIGN (type);
1320
1321 /* If we need to set overflow flags, return a new unshared node. */
1322 if (overflowed || !wi::fits_to_tree_p (cst, type))
1323 {
1324 if (overflowed
1325 || overflowable < 0
1326 || (overflowable > 0 && sign == SIGNED))
1327 {
1328 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
1329 tree t = build_new_int_cst (type, tmp);
1330 TREE_OVERFLOW (t) = 1;
1331 return t;
1332 }
1333 }
1334
1335 /* Else build a shared node. */
1336 return wide_int_to_tree (type, cst);
1337 }
1338
1339 /* These are the hash table functions for the hash table of INTEGER_CST
1340 nodes of a sizetype. */
1341
1342 /* Return the hash code code X, an INTEGER_CST. */
1343
1344 hashval_t
1345 int_cst_hasher::hash (tree x)
1346 {
1347 const_tree const t = x;
1348 hashval_t code = TYPE_UID (TREE_TYPE (t));
1349 int i;
1350
1351 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1352 code ^= TREE_INT_CST_ELT (t, i);
1353
1354 return code;
1355 }
1356
1357 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1358 is the same as that given by *Y, which is the same. */
1359
1360 bool
1361 int_cst_hasher::equal (tree x, tree y)
1362 {
1363 const_tree const xt = x;
1364 const_tree const yt = y;
1365
1366 if (TREE_TYPE (xt) != TREE_TYPE (yt)
1367 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt)
1368 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt))
1369 return false;
1370
1371 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++)
1372 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i))
1373 return false;
1374
1375 return true;
1376 }
1377
1378 /* Create an INT_CST node of TYPE and value CST.
1379 The returned node is always shared. For small integers we use a
1380 per-type vector cache, for larger ones we use a single hash table.
1381 The value is extended from its precision according to the sign of
1382 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines
1383 the upper bits and ensures that hashing and value equality based
1384 upon the underlying HOST_WIDE_INTs works without masking. */
1385
1386 tree
1387 wide_int_to_tree (tree type, const wide_int_ref &pcst)
1388 {
1389 tree t;
1390 int ix = -1;
1391 int limit = 0;
1392
1393 gcc_assert (type);
1394 unsigned int prec = TYPE_PRECISION (type);
1395 signop sgn = TYPE_SIGN (type);
1396
1397 /* Verify that everything is canonical. */
1398 int l = pcst.get_len ();
1399 if (l > 1)
1400 {
1401 if (pcst.elt (l - 1) == 0)
1402 gcc_checking_assert (pcst.elt (l - 2) < 0);
1403 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1)
1404 gcc_checking_assert (pcst.elt (l - 2) >= 0);
1405 }
1406
1407 wide_int cst = wide_int::from (pcst, prec, sgn);
1408 unsigned int ext_len = get_int_cst_ext_nunits (type, cst);
1409
1410 if (ext_len == 1)
1411 {
1412 /* We just need to store a single HOST_WIDE_INT. */
1413 HOST_WIDE_INT hwi;
1414 if (TYPE_UNSIGNED (type))
1415 hwi = cst.to_uhwi ();
1416 else
1417 hwi = cst.to_shwi ();
1418
1419 switch (TREE_CODE (type))
1420 {
1421 case NULLPTR_TYPE:
1422 gcc_assert (hwi == 0);
1423 /* Fallthru. */
1424
1425 case POINTER_TYPE:
1426 case REFERENCE_TYPE:
1427 case POINTER_BOUNDS_TYPE:
1428 /* Cache NULL pointer and zero bounds. */
1429 if (hwi == 0)
1430 {
1431 limit = 1;
1432 ix = 0;
1433 }
1434 break;
1435
1436 case BOOLEAN_TYPE:
1437 /* Cache false or true. */
1438 limit = 2;
1439 if (hwi < 2)
1440 ix = hwi;
1441 break;
1442
1443 case INTEGER_TYPE:
1444 case OFFSET_TYPE:
1445 if (TYPE_SIGN (type) == UNSIGNED)
1446 {
1447 /* Cache [0, N). */
1448 limit = INTEGER_SHARE_LIMIT;
1449 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1))
1450 ix = hwi;
1451 }
1452 else
1453 {
1454 /* Cache [-1, N). */
1455 limit = INTEGER_SHARE_LIMIT + 1;
1456 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1))
1457 ix = hwi + 1;
1458 }
1459 break;
1460
1461 case ENUMERAL_TYPE:
1462 break;
1463
1464 default:
1465 gcc_unreachable ();
1466 }
1467
1468 if (ix >= 0)
1469 {
1470 /* Look for it in the type's vector of small shared ints. */
1471 if (!TYPE_CACHED_VALUES_P (type))
1472 {
1473 TYPE_CACHED_VALUES_P (type) = 1;
1474 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1475 }
1476
1477 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix);
1478 if (t)
1479 /* Make sure no one is clobbering the shared constant. */
1480 gcc_checking_assert (TREE_TYPE (t) == type
1481 && TREE_INT_CST_NUNITS (t) == 1
1482 && TREE_INT_CST_OFFSET_NUNITS (t) == 1
1483 && TREE_INT_CST_EXT_NUNITS (t) == 1
1484 && TREE_INT_CST_ELT (t, 0) == hwi);
1485 else
1486 {
1487 /* Create a new shared int. */
1488 t = build_new_int_cst (type, cst);
1489 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1490 }
1491 }
1492 else
1493 {
1494 /* Use the cache of larger shared ints, using int_cst_node as
1495 a temporary. */
1496
1497 TREE_INT_CST_ELT (int_cst_node, 0) = hwi;
1498 TREE_TYPE (int_cst_node) = type;
1499
1500 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT);
1501 t = *slot;
1502 if (!t)
1503 {
1504 /* Insert this one into the hash table. */
1505 t = int_cst_node;
1506 *slot = t;
1507 /* Make a new node for next time round. */
1508 int_cst_node = make_int_cst (1, 1);
1509 }
1510 }
1511 }
1512 else
1513 {
1514 /* The value either hashes properly or we drop it on the floor
1515 for the gc to take care of. There will not be enough of them
1516 to worry about. */
1517
1518 tree nt = build_new_int_cst (type, cst);
1519 tree *slot = int_cst_hash_table->find_slot (nt, INSERT);
1520 t = *slot;
1521 if (!t)
1522 {
1523 /* Insert this one into the hash table. */
1524 t = nt;
1525 *slot = t;
1526 }
1527 }
1528
1529 return t;
1530 }
1531
1532 void
1533 cache_integer_cst (tree t)
1534 {
1535 tree type = TREE_TYPE (t);
1536 int ix = -1;
1537 int limit = 0;
1538 int prec = TYPE_PRECISION (type);
1539
1540 gcc_assert (!TREE_OVERFLOW (t));
1541
1542 switch (TREE_CODE (type))
1543 {
1544 case NULLPTR_TYPE:
1545 gcc_assert (integer_zerop (t));
1546 /* Fallthru. */
1547
1548 case POINTER_TYPE:
1549 case REFERENCE_TYPE:
1550 /* Cache NULL pointer. */
1551 if (integer_zerop (t))
1552 {
1553 limit = 1;
1554 ix = 0;
1555 }
1556 break;
1557
1558 case BOOLEAN_TYPE:
1559 /* Cache false or true. */
1560 limit = 2;
1561 if (wi::ltu_p (t, 2))
1562 ix = TREE_INT_CST_ELT (t, 0);
1563 break;
1564
1565 case INTEGER_TYPE:
1566 case OFFSET_TYPE:
1567 if (TYPE_UNSIGNED (type))
1568 {
1569 /* Cache 0..N */
1570 limit = INTEGER_SHARE_LIMIT;
1571
1572 /* This is a little hokie, but if the prec is smaller than
1573 what is necessary to hold INTEGER_SHARE_LIMIT, then the
1574 obvious test will not get the correct answer. */
1575 if (prec < HOST_BITS_PER_WIDE_INT)
1576 {
1577 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT)
1578 ix = tree_to_uhwi (t);
1579 }
1580 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1581 ix = tree_to_uhwi (t);
1582 }
1583 else
1584 {
1585 /* Cache -1..N */
1586 limit = INTEGER_SHARE_LIMIT + 1;
1587
1588 if (integer_minus_onep (t))
1589 ix = 0;
1590 else if (!wi::neg_p (t))
1591 {
1592 if (prec < HOST_BITS_PER_WIDE_INT)
1593 {
1594 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT)
1595 ix = tree_to_shwi (t) + 1;
1596 }
1597 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT))
1598 ix = tree_to_shwi (t) + 1;
1599 }
1600 }
1601 break;
1602
1603 case ENUMERAL_TYPE:
1604 break;
1605
1606 default:
1607 gcc_unreachable ();
1608 }
1609
1610 if (ix >= 0)
1611 {
1612 /* Look for it in the type's vector of small shared ints. */
1613 if (!TYPE_CACHED_VALUES_P (type))
1614 {
1615 TYPE_CACHED_VALUES_P (type) = 1;
1616 TYPE_CACHED_VALUES (type) = make_tree_vec (limit);
1617 }
1618
1619 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE);
1620 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t;
1621 }
1622 else
1623 {
1624 /* Use the cache of larger shared ints. */
1625 tree *slot = int_cst_hash_table->find_slot (t, INSERT);
1626 /* If there is already an entry for the number verify it's the
1627 same. */
1628 if (*slot)
1629 gcc_assert (wi::eq_p (tree (*slot), t));
1630 else
1631 /* Otherwise insert this one into the hash table. */
1632 *slot = t;
1633 }
1634 }
1635
1636
1637 /* Builds an integer constant in TYPE such that lowest BITS bits are ones
1638 and the rest are zeros. */
1639
1640 tree
1641 build_low_bits_mask (tree type, unsigned bits)
1642 {
1643 gcc_assert (bits <= TYPE_PRECISION (type));
1644
1645 return wide_int_to_tree (type, wi::mask (bits, false,
1646 TYPE_PRECISION (type)));
1647 }
1648
1649 /* Checks that X is integer constant that can be expressed in (unsigned)
1650 HOST_WIDE_INT without loss of precision. */
1651
1652 bool
1653 cst_and_fits_in_hwi (const_tree x)
1654 {
1655 if (TREE_CODE (x) != INTEGER_CST)
1656 return false;
1657
1658 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT)
1659 return false;
1660
1661 return TREE_INT_CST_NUNITS (x) == 1;
1662 }
1663
1664 /* Build a newly constructed TREE_VEC node of length LEN. */
1665
1666 tree
1667 make_vector_stat (unsigned len MEM_STAT_DECL)
1668 {
1669 tree t;
1670 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector);
1671
1672 record_node_allocation_statistics (VECTOR_CST, length);
1673
1674 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
1675
1676 TREE_SET_CODE (t, VECTOR_CST);
1677 TREE_CONSTANT (t) = 1;
1678
1679 return t;
1680 }
1681
1682 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1683 are in a list pointed to by VALS. */
1684
1685 tree
1686 build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
1687 {
1688 int over = 0;
1689 unsigned cnt = 0;
1690 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type));
1691 TREE_TYPE (v) = type;
1692
1693 /* Iterate through elements and check for overflow. */
1694 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt)
1695 {
1696 tree value = vals[cnt];
1697
1698 VECTOR_CST_ELT (v, cnt) = value;
1699
1700 /* Don't crash if we get an address constant. */
1701 if (!CONSTANT_CLASS_P (value))
1702 continue;
1703
1704 over |= TREE_OVERFLOW (value);
1705 }
1706
1707 TREE_OVERFLOW (v) = over;
1708 return v;
1709 }
1710
1711 /* Return a new VECTOR_CST node whose type is TYPE and whose values
1712 are extracted from V, a vector of CONSTRUCTOR_ELT. */
1713
1714 tree
1715 build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
1716 {
1717 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
1718 unsigned HOST_WIDE_INT idx;
1719 tree value;
1720
1721 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
1722 vec[idx] = value;
1723 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx)
1724 vec[idx] = build_zero_cst (TREE_TYPE (type));
1725
1726 return build_vector (type, vec);
1727 }
1728
1729 /* Build a vector of type VECTYPE where all the elements are SCs. */
1730 tree
1731 build_vector_from_val (tree vectype, tree sc)
1732 {
1733 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype);
1734
1735 if (sc == error_mark_node)
1736 return sc;
1737
1738 /* Verify that the vector type is suitable for SC. Note that there
1739 is some inconsistency in the type-system with respect to restrict
1740 qualifications of pointers. Vector types always have a main-variant
1741 element type and the qualification is applied to the vector-type.
1742 So TREE_TYPE (vector-type) does not return a properly qualified
1743 vector element-type. */
1744 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)),
1745 TREE_TYPE (vectype)));
1746
1747 if (CONSTANT_CLASS_P (sc))
1748 {
1749 tree *v = XALLOCAVEC (tree, nunits);
1750 for (i = 0; i < nunits; ++i)
1751 v[i] = sc;
1752 return build_vector (vectype, v);
1753 }
1754 else
1755 {
1756 vec<constructor_elt, va_gc> *v;
1757 vec_alloc (v, nunits);
1758 for (i = 0; i < nunits; ++i)
1759 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
1760 return build_constructor (vectype, v);
1761 }
1762 }
1763
1764 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1765 are in the vec pointed to by VALS. */
1766 tree
1767 build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
1768 {
1769 tree c = make_node (CONSTRUCTOR);
1770 unsigned int i;
1771 constructor_elt *elt;
1772 bool constant_p = true;
1773 bool side_effects_p = false;
1774
1775 TREE_TYPE (c) = type;
1776 CONSTRUCTOR_ELTS (c) = vals;
1777
1778 FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
1779 {
1780 /* Mostly ctors will have elts that don't have side-effects, so
1781 the usual case is to scan all the elements. Hence a single
1782 loop for both const and side effects, rather than one loop
1783 each (with early outs). */
1784 if (!TREE_CONSTANT (elt->value))
1785 constant_p = false;
1786 if (TREE_SIDE_EFFECTS (elt->value))
1787 side_effects_p = true;
1788 }
1789
1790 TREE_SIDE_EFFECTS (c) = side_effects_p;
1791 TREE_CONSTANT (c) = constant_p;
1792
1793 return c;
1794 }
1795
1796 /* Build a CONSTRUCTOR node made of a single initializer, with the specified
1797 INDEX and VALUE. */
1798 tree
1799 build_constructor_single (tree type, tree index, tree value)
1800 {
1801 vec<constructor_elt, va_gc> *v;
1802 constructor_elt elt = {index, value};
1803
1804 vec_alloc (v, 1);
1805 v->quick_push (elt);
1806
1807 return build_constructor (type, v);
1808 }
1809
1810
1811 /* Return a new CONSTRUCTOR node whose type is TYPE and whose values
1812 are in a list pointed to by VALS. */
1813 tree
1814 build_constructor_from_list (tree type, tree vals)
1815 {
1816 tree t;
1817 vec<constructor_elt, va_gc> *v = NULL;
1818
1819 if (vals)
1820 {
1821 vec_alloc (v, list_length (vals));
1822 for (t = vals; t; t = TREE_CHAIN (t))
1823 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
1824 }
1825
1826 return build_constructor (type, v);
1827 }
1828
1829 /* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number
1830 of elements, provided as index/value pairs. */
1831
1832 tree
1833 build_constructor_va (tree type, int nelts, ...)
1834 {
1835 vec<constructor_elt, va_gc> *v = NULL;
1836 va_list p;
1837
1838 va_start (p, nelts);
1839 vec_alloc (v, nelts);
1840 while (nelts--)
1841 {
1842 tree index = va_arg (p, tree);
1843 tree value = va_arg (p, tree);
1844 CONSTRUCTOR_APPEND_ELT (v, index, value);
1845 }
1846 va_end (p);
1847 return build_constructor (type, v);
1848 }
1849
1850 /* Return a new FIXED_CST node whose type is TYPE and value is F. */
1851
1852 tree
1853 build_fixed (tree type, FIXED_VALUE_TYPE f)
1854 {
1855 tree v;
1856 FIXED_VALUE_TYPE *fp;
1857
1858 v = make_node (FIXED_CST);
1859 fp = ggc_alloc<fixed_value> ();
1860 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE));
1861
1862 TREE_TYPE (v) = type;
1863 TREE_FIXED_CST_PTR (v) = fp;
1864 return v;
1865 }
1866
1867 /* Return a new REAL_CST node whose type is TYPE and value is D. */
1868
1869 tree
1870 build_real (tree type, REAL_VALUE_TYPE d)
1871 {
1872 tree v;
1873 REAL_VALUE_TYPE *dp;
1874 int overflow = 0;
1875
1876 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
1877 Consider doing it via real_convert now. */
1878
1879 v = make_node (REAL_CST);
1880 dp = ggc_alloc<real_value> ();
1881 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE));
1882
1883 TREE_TYPE (v) = type;
1884 TREE_REAL_CST_PTR (v) = dp;
1885 TREE_OVERFLOW (v) = overflow;
1886 return v;
1887 }
1888
1889 /* Return a new REAL_CST node whose type is TYPE
1890 and whose value is the integer value of the INTEGER_CST node I. */
1891
1892 REAL_VALUE_TYPE
1893 real_value_from_int_cst (const_tree type, const_tree i)
1894 {
1895 REAL_VALUE_TYPE d;
1896
1897 /* Clear all bits of the real value type so that we can later do
1898 bitwise comparisons to see if two values are the same. */
1899 memset (&d, 0, sizeof d);
1900
1901 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i,
1902 TYPE_SIGN (TREE_TYPE (i)));
1903 return d;
1904 }
1905
1906 /* Given a tree representing an integer constant I, return a tree
1907 representing the same value as a floating-point constant of type TYPE. */
1908
1909 tree
1910 build_real_from_int_cst (tree type, const_tree i)
1911 {
1912 tree v;
1913 int overflow = TREE_OVERFLOW (i);
1914
1915 v = build_real (type, real_value_from_int_cst (type, i));
1916
1917 TREE_OVERFLOW (v) |= overflow;
1918 return v;
1919 }
1920
1921 /* Return a newly constructed STRING_CST node whose value is
1922 the LEN characters at STR.
1923 Note that for a C string literal, LEN should include the trailing NUL.
1924 The TREE_TYPE is not initialized. */
1925
1926 tree
1927 build_string (int len, const char *str)
1928 {
1929 tree s;
1930 size_t length;
1931
1932 /* Do not waste bytes provided by padding of struct tree_string. */
1933 length = len + offsetof (struct tree_string, str) + 1;
1934
1935 record_node_allocation_statistics (STRING_CST, length);
1936
1937 s = (tree) ggc_internal_alloc (length);
1938
1939 memset (s, 0, sizeof (struct tree_typed));
1940 TREE_SET_CODE (s, STRING_CST);
1941 TREE_CONSTANT (s) = 1;
1942 TREE_STRING_LENGTH (s) = len;
1943 memcpy (s->string.str, str, len);
1944 s->string.str[len] = '\0';
1945
1946 return s;
1947 }
1948
1949 /* Return a newly constructed COMPLEX_CST node whose value is
1950 specified by the real and imaginary parts REAL and IMAG.
1951 Both REAL and IMAG should be constant nodes. TYPE, if specified,
1952 will be the type of the COMPLEX_CST; otherwise a new type will be made. */
1953
1954 tree
1955 build_complex (tree type, tree real, tree imag)
1956 {
1957 tree t = make_node (COMPLEX_CST);
1958
1959 TREE_REALPART (t) = real;
1960 TREE_IMAGPART (t) = imag;
1961 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real));
1962 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag);
1963 return t;
1964 }
1965
1966 /* Return a constant of arithmetic type TYPE which is the
1967 multiplicative identity of the set TYPE. */
1968
1969 tree
1970 build_one_cst (tree type)
1971 {
1972 switch (TREE_CODE (type))
1973 {
1974 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1976 case OFFSET_TYPE:
1977 return build_int_cst (type, 1);
1978
1979 case REAL_TYPE:
1980 return build_real (type, dconst1);
1981
1982 case FIXED_POINT_TYPE:
1983 /* We can only generate 1 for accum types. */
1984 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
1985 return build_fixed (type, FCONST1 (TYPE_MODE (type)));
1986
1987 case VECTOR_TYPE:
1988 {
1989 tree scalar = build_one_cst (TREE_TYPE (type));
1990
1991 return build_vector_from_val (type, scalar);
1992 }
1993
1994 case COMPLEX_TYPE:
1995 return build_complex (type,
1996 build_one_cst (TREE_TYPE (type)),
1997 build_zero_cst (TREE_TYPE (type)));
1998
1999 default:
2000 gcc_unreachable ();
2001 }
2002 }
2003
2004 /* Return an integer of type TYPE containing all 1's in as much precision as
2005 it contains, or a complex or vector whose subparts are such integers. */
2006
2007 tree
2008 build_all_ones_cst (tree type)
2009 {
2010 if (TREE_CODE (type) == COMPLEX_TYPE)
2011 {
2012 tree scalar = build_all_ones_cst (TREE_TYPE (type));
2013 return build_complex (type, scalar, scalar);
2014 }
2015 else
2016 return build_minus_one_cst (type);
2017 }
2018
2019 /* Return a constant of arithmetic type TYPE which is the
2020 opposite of the multiplicative identity of the set TYPE. */
2021
2022 tree
2023 build_minus_one_cst (tree type)
2024 {
2025 switch (TREE_CODE (type))
2026 {
2027 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2028 case POINTER_TYPE: case REFERENCE_TYPE:
2029 case OFFSET_TYPE:
2030 return build_int_cst (type, -1);
2031
2032 case REAL_TYPE:
2033 return build_real (type, dconstm1);
2034
2035 case FIXED_POINT_TYPE:
2036 /* We can only generate 1 for accum types. */
2037 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)));
2038 return build_fixed (type, fixed_from_double_int (double_int_minus_one,
2039 TYPE_MODE (type)));
2040
2041 case VECTOR_TYPE:
2042 {
2043 tree scalar = build_minus_one_cst (TREE_TYPE (type));
2044
2045 return build_vector_from_val (type, scalar);
2046 }
2047
2048 case COMPLEX_TYPE:
2049 return build_complex (type,
2050 build_minus_one_cst (TREE_TYPE (type)),
2051 build_zero_cst (TREE_TYPE (type)));
2052
2053 default:
2054 gcc_unreachable ();
2055 }
2056 }
2057
2058 /* Build 0 constant of type TYPE. This is used by constructor folding
2059 and thus the constant should be represented in memory by
2060 zero(es). */
2061
2062 tree
2063 build_zero_cst (tree type)
2064 {
2065 switch (TREE_CODE (type))
2066 {
2067 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2068 case POINTER_TYPE: case REFERENCE_TYPE:
2069 case OFFSET_TYPE: case NULLPTR_TYPE:
2070 return build_int_cst (type, 0);
2071
2072 case REAL_TYPE:
2073 return build_real (type, dconst0);
2074
2075 case FIXED_POINT_TYPE:
2076 return build_fixed (type, FCONST0 (TYPE_MODE (type)));
2077
2078 case VECTOR_TYPE:
2079 {
2080 tree scalar = build_zero_cst (TREE_TYPE (type));
2081
2082 return build_vector_from_val (type, scalar);
2083 }
2084
2085 case COMPLEX_TYPE:
2086 {
2087 tree zero = build_zero_cst (TREE_TYPE (type));
2088
2089 return build_complex (type, zero, zero);
2090 }
2091
2092 default:
2093 if (!AGGREGATE_TYPE_P (type))
2094 return fold_convert (type, integer_zero_node);
2095 return build_constructor (type, NULL);
2096 }
2097 }
2098
2099
2100 /* Build a BINFO with LEN language slots. */
2101
2102 tree
2103 make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
2104 {
2105 tree t;
2106 size_t length = (offsetof (struct tree_binfo, base_binfos)
2107 + vec<tree, va_gc>::embedded_size (base_binfos));
2108
2109 record_node_allocation_statistics (TREE_BINFO, length);
2110
2111 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
2112
2113 memset (t, 0, offsetof (struct tree_binfo, base_binfos));
2114
2115 TREE_SET_CODE (t, TREE_BINFO);
2116
2117 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
2118
2119 return t;
2120 }
2121
2122 /* Create a CASE_LABEL_EXPR tree node and return it. */
2123
2124 tree
2125 build_case_label (tree low_value, tree high_value, tree label_decl)
2126 {
2127 tree t = make_node (CASE_LABEL_EXPR);
2128
2129 TREE_TYPE (t) = void_type_node;
2130 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl));
2131
2132 CASE_LOW (t) = low_value;
2133 CASE_HIGH (t) = high_value;
2134 CASE_LABEL (t) = label_decl;
2135 CASE_CHAIN (t) = NULL_TREE;
2136
2137 return t;
2138 }
2139
2140 /* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the
2141 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively.
2142 The latter determines the length of the HOST_WIDE_INT vector. */
2143
2144 tree
2145 make_int_cst_stat (int len, int ext_len MEM_STAT_DECL)
2146 {
2147 tree t;
2148 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT)
2149 + sizeof (struct tree_int_cst));
2150
2151 gcc_assert (len);
2152 record_node_allocation_statistics (INTEGER_CST, length);
2153
2154 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2155
2156 TREE_SET_CODE (t, INTEGER_CST);
2157 TREE_INT_CST_NUNITS (t) = len;
2158 TREE_INT_CST_EXT_NUNITS (t) = ext_len;
2159 /* to_offset can only be applied to trees that are offset_int-sized
2160 or smaller. EXT_LEN is correct if it fits, otherwise the constant
2161 must be exactly the precision of offset_int and so LEN is correct. */
2162 if (ext_len <= OFFSET_INT_ELTS)
2163 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len;
2164 else
2165 TREE_INT_CST_OFFSET_NUNITS (t) = len;
2166
2167 TREE_CONSTANT (t) = 1;
2168
2169 return t;
2170 }
2171
2172 /* Build a newly constructed TREE_VEC node of length LEN. */
2173
2174 tree
2175 make_tree_vec_stat (int len MEM_STAT_DECL)
2176 {
2177 tree t;
2178 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2179
2180 record_node_allocation_statistics (TREE_VEC, length);
2181
2182 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
2183
2184 TREE_SET_CODE (t, TREE_VEC);
2185 TREE_VEC_LENGTH (t) = len;
2186
2187 return t;
2188 }
2189
2190 /* Grow a TREE_VEC node to new length LEN. */
2191
2192 tree
2193 grow_tree_vec_stat (tree v, int len MEM_STAT_DECL)
2194 {
2195 gcc_assert (TREE_CODE (v) == TREE_VEC);
2196
2197 int oldlen = TREE_VEC_LENGTH (v);
2198 gcc_assert (len > oldlen);
2199
2200 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec);
2201 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec);
2202
2203 record_node_allocation_statistics (TREE_VEC, length - oldlength);
2204
2205 v = (tree) ggc_realloc (v, length PASS_MEM_STAT);
2206
2207 TREE_VEC_LENGTH (v) = len;
2208
2209 return v;
2210 }
2211 \f
2212 /* Return 1 if EXPR is the integer constant zero or a complex constant
2213 of zero. */
2214
2215 int
2216 integer_zerop (const_tree expr)
2217 {
2218 STRIP_NOPS (expr);
2219
2220 switch (TREE_CODE (expr))
2221 {
2222 case INTEGER_CST:
2223 return wi::eq_p (expr, 0);
2224 case COMPLEX_CST:
2225 return (integer_zerop (TREE_REALPART (expr))
2226 && integer_zerop (TREE_IMAGPART (expr)));
2227 case VECTOR_CST:
2228 {
2229 unsigned i;
2230 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2231 if (!integer_zerop (VECTOR_CST_ELT (expr, i)))
2232 return false;
2233 return true;
2234 }
2235 default:
2236 return false;
2237 }
2238 }
2239
2240 /* Return 1 if EXPR is the integer constant one or the corresponding
2241 complex constant. */
2242
2243 int
2244 integer_onep (const_tree expr)
2245 {
2246 STRIP_NOPS (expr);
2247
2248 switch (TREE_CODE (expr))
2249 {
2250 case INTEGER_CST:
2251 return wi::eq_p (wi::to_widest (expr), 1);
2252 case COMPLEX_CST:
2253 return (integer_onep (TREE_REALPART (expr))
2254 && integer_zerop (TREE_IMAGPART (expr)));
2255 case VECTOR_CST:
2256 {
2257 unsigned i;
2258 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2259 if (!integer_onep (VECTOR_CST_ELT (expr, i)))
2260 return false;
2261 return true;
2262 }
2263 default:
2264 return false;
2265 }
2266 }
2267
2268 /* Return 1 if EXPR is the integer constant one. For complex and vector,
2269 return 1 if every piece is the integer constant one. */
2270
2271 int
2272 integer_each_onep (const_tree expr)
2273 {
2274 STRIP_NOPS (expr);
2275
2276 if (TREE_CODE (expr) == COMPLEX_CST)
2277 return (integer_onep (TREE_REALPART (expr))
2278 && integer_onep (TREE_IMAGPART (expr)));
2279 else
2280 return integer_onep (expr);
2281 }
2282
2283 /* Return 1 if EXPR is an integer containing all 1's in as much precision as
2284 it contains, or a complex or vector whose subparts are such integers. */
2285
2286 int
2287 integer_all_onesp (const_tree expr)
2288 {
2289 STRIP_NOPS (expr);
2290
2291 if (TREE_CODE (expr) == COMPLEX_CST
2292 && integer_all_onesp (TREE_REALPART (expr))
2293 && integer_all_onesp (TREE_IMAGPART (expr)))
2294 return 1;
2295
2296 else if (TREE_CODE (expr) == VECTOR_CST)
2297 {
2298 unsigned i;
2299 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2300 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i)))
2301 return 0;
2302 return 1;
2303 }
2304
2305 else if (TREE_CODE (expr) != INTEGER_CST)
2306 return 0;
2307
2308 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr;
2309 }
2310
2311 /* Return 1 if EXPR is the integer constant minus one. */
2312
2313 int
2314 integer_minus_onep (const_tree expr)
2315 {
2316 STRIP_NOPS (expr);
2317
2318 if (TREE_CODE (expr) == COMPLEX_CST)
2319 return (integer_all_onesp (TREE_REALPART (expr))
2320 && integer_zerop (TREE_IMAGPART (expr)));
2321 else
2322 return integer_all_onesp (expr);
2323 }
2324
2325 /* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only
2326 one bit on). */
2327
2328 int
2329 integer_pow2p (const_tree expr)
2330 {
2331 STRIP_NOPS (expr);
2332
2333 if (TREE_CODE (expr) == COMPLEX_CST
2334 && integer_pow2p (TREE_REALPART (expr))
2335 && integer_zerop (TREE_IMAGPART (expr)))
2336 return 1;
2337
2338 if (TREE_CODE (expr) != INTEGER_CST)
2339 return 0;
2340
2341 return wi::popcount (expr) == 1;
2342 }
2343
2344 /* Return 1 if EXPR is an integer constant other than zero or a
2345 complex constant other than zero. */
2346
2347 int
2348 integer_nonzerop (const_tree expr)
2349 {
2350 STRIP_NOPS (expr);
2351
2352 return ((TREE_CODE (expr) == INTEGER_CST
2353 && !wi::eq_p (expr, 0))
2354 || (TREE_CODE (expr) == COMPLEX_CST
2355 && (integer_nonzerop (TREE_REALPART (expr))
2356 || integer_nonzerop (TREE_IMAGPART (expr)))));
2357 }
2358
2359 /* Return 1 if EXPR is the integer constant one. For vector,
2360 return 1 if every piece is the integer constant minus one
2361 (representing the value TRUE). */
2362
2363 int
2364 integer_truep (const_tree expr)
2365 {
2366 STRIP_NOPS (expr);
2367
2368 if (TREE_CODE (expr) == VECTOR_CST)
2369 return integer_all_onesp (expr);
2370 return integer_onep (expr);
2371 }
2372
2373 /* Return 1 if EXPR is the fixed-point constant zero. */
2374
2375 int
2376 fixed_zerop (const_tree expr)
2377 {
2378 return (TREE_CODE (expr) == FIXED_CST
2379 && TREE_FIXED_CST (expr).data.is_zero ());
2380 }
2381
2382 /* Return the power of two represented by a tree node known to be a
2383 power of two. */
2384
2385 int
2386 tree_log2 (const_tree expr)
2387 {
2388 STRIP_NOPS (expr);
2389
2390 if (TREE_CODE (expr) == COMPLEX_CST)
2391 return tree_log2 (TREE_REALPART (expr));
2392
2393 return wi::exact_log2 (expr);
2394 }
2395
2396 /* Similar, but return the largest integer Y such that 2 ** Y is less
2397 than or equal to EXPR. */
2398
2399 int
2400 tree_floor_log2 (const_tree expr)
2401 {
2402 STRIP_NOPS (expr);
2403
2404 if (TREE_CODE (expr) == COMPLEX_CST)
2405 return tree_log2 (TREE_REALPART (expr));
2406
2407 return wi::floor_log2 (expr);
2408 }
2409
2410 /* Return number of known trailing zero bits in EXPR, or, if the value of
2411 EXPR is known to be zero, the precision of it's type. */
2412
2413 unsigned int
2414 tree_ctz (const_tree expr)
2415 {
2416 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
2417 && !POINTER_TYPE_P (TREE_TYPE (expr)))
2418 return 0;
2419
2420 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr));
2421 switch (TREE_CODE (expr))
2422 {
2423 case INTEGER_CST:
2424 ret1 = wi::ctz (expr);
2425 return MIN (ret1, prec);
2426 case SSA_NAME:
2427 ret1 = wi::ctz (get_nonzero_bits (expr));
2428 return MIN (ret1, prec);
2429 case PLUS_EXPR:
2430 case MINUS_EXPR:
2431 case BIT_IOR_EXPR:
2432 case BIT_XOR_EXPR:
2433 case MIN_EXPR:
2434 case MAX_EXPR:
2435 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2436 if (ret1 == 0)
2437 return ret1;
2438 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2439 return MIN (ret1, ret2);
2440 case POINTER_PLUS_EXPR:
2441 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2442 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2443 /* Second operand is sizetype, which could be in theory
2444 wider than pointer's precision. Make sure we never
2445 return more than prec. */
2446 ret2 = MIN (ret2, prec);
2447 return MIN (ret1, ret2);
2448 case BIT_AND_EXPR:
2449 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2450 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2451 return MAX (ret1, ret2);
2452 case MULT_EXPR:
2453 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2454 ret2 = tree_ctz (TREE_OPERAND (expr, 1));
2455 return MIN (ret1 + ret2, prec);
2456 case LSHIFT_EXPR:
2457 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2458 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2459 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2460 {
2461 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2462 return MIN (ret1 + ret2, prec);
2463 }
2464 return ret1;
2465 case RSHIFT_EXPR:
2466 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1))
2467 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec))
2468 {
2469 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2470 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1));
2471 if (ret1 > ret2)
2472 return ret1 - ret2;
2473 }
2474 return 0;
2475 case TRUNC_DIV_EXPR:
2476 case CEIL_DIV_EXPR:
2477 case FLOOR_DIV_EXPR:
2478 case ROUND_DIV_EXPR:
2479 case EXACT_DIV_EXPR:
2480 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2481 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1)
2482 {
2483 int l = tree_log2 (TREE_OPERAND (expr, 1));
2484 if (l >= 0)
2485 {
2486 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2487 ret2 = l;
2488 if (ret1 > ret2)
2489 return ret1 - ret2;
2490 }
2491 }
2492 return 0;
2493 CASE_CONVERT:
2494 ret1 = tree_ctz (TREE_OPERAND (expr, 0));
2495 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0))))
2496 ret1 = prec;
2497 return MIN (ret1, prec);
2498 case SAVE_EXPR:
2499 return tree_ctz (TREE_OPERAND (expr, 0));
2500 case COND_EXPR:
2501 ret1 = tree_ctz (TREE_OPERAND (expr, 1));
2502 if (ret1 == 0)
2503 return 0;
2504 ret2 = tree_ctz (TREE_OPERAND (expr, 2));
2505 return MIN (ret1, ret2);
2506 case COMPOUND_EXPR:
2507 return tree_ctz (TREE_OPERAND (expr, 1));
2508 case ADDR_EXPR:
2509 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr));
2510 if (ret1 > BITS_PER_UNIT)
2511 {
2512 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT);
2513 return MIN (ret1, prec);
2514 }
2515 return 0;
2516 default:
2517 return 0;
2518 }
2519 }
2520
2521 /* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for
2522 decimal float constants, so don't return 1 for them. */
2523
2524 int
2525 real_zerop (const_tree expr)
2526 {
2527 STRIP_NOPS (expr);
2528
2529 switch (TREE_CODE (expr))
2530 {
2531 case REAL_CST:
2532 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0)
2533 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2534 case COMPLEX_CST:
2535 return real_zerop (TREE_REALPART (expr))
2536 && real_zerop (TREE_IMAGPART (expr));
2537 case VECTOR_CST:
2538 {
2539 unsigned i;
2540 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2541 if (!real_zerop (VECTOR_CST_ELT (expr, i)))
2542 return false;
2543 return true;
2544 }
2545 default:
2546 return false;
2547 }
2548 }
2549
2550 /* Return 1 if EXPR is the real constant one in real or complex form.
2551 Trailing zeroes matter for decimal float constants, so don't return
2552 1 for them. */
2553
2554 int
2555 real_onep (const_tree expr)
2556 {
2557 STRIP_NOPS (expr);
2558
2559 switch (TREE_CODE (expr))
2560 {
2561 case REAL_CST:
2562 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1)
2563 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2564 case COMPLEX_CST:
2565 return real_onep (TREE_REALPART (expr))
2566 && real_zerop (TREE_IMAGPART (expr));
2567 case VECTOR_CST:
2568 {
2569 unsigned i;
2570 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2571 if (!real_onep (VECTOR_CST_ELT (expr, i)))
2572 return false;
2573 return true;
2574 }
2575 default:
2576 return false;
2577 }
2578 }
2579
2580 /* Return 1 if EXPR is the real constant minus one. Trailing zeroes
2581 matter for decimal float constants, so don't return 1 for them. */
2582
2583 int
2584 real_minus_onep (const_tree expr)
2585 {
2586 STRIP_NOPS (expr);
2587
2588 switch (TREE_CODE (expr))
2589 {
2590 case REAL_CST:
2591 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1)
2592 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr))));
2593 case COMPLEX_CST:
2594 return real_minus_onep (TREE_REALPART (expr))
2595 && real_zerop (TREE_IMAGPART (expr));
2596 case VECTOR_CST:
2597 {
2598 unsigned i;
2599 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i)
2600 if (!real_minus_onep (VECTOR_CST_ELT (expr, i)))
2601 return false;
2602 return true;
2603 }
2604 default:
2605 return false;
2606 }
2607 }
2608
2609 /* Nonzero if EXP is a constant or a cast of a constant. */
2610
2611 int
2612 really_constant_p (const_tree exp)
2613 {
2614 /* This is not quite the same as STRIP_NOPS. It does more. */
2615 while (CONVERT_EXPR_P (exp)
2616 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2617 exp = TREE_OPERAND (exp, 0);
2618 return TREE_CONSTANT (exp);
2619 }
2620 \f
2621 /* Return first list element whose TREE_VALUE is ELEM.
2622 Return 0 if ELEM is not in LIST. */
2623
2624 tree
2625 value_member (tree elem, tree list)
2626 {
2627 while (list)
2628 {
2629 if (elem == TREE_VALUE (list))
2630 return list;
2631 list = TREE_CHAIN (list);
2632 }
2633 return NULL_TREE;
2634 }
2635
2636 /* Return first list element whose TREE_PURPOSE is ELEM.
2637 Return 0 if ELEM is not in LIST. */
2638
2639 tree
2640 purpose_member (const_tree elem, tree list)
2641 {
2642 while (list)
2643 {
2644 if (elem == TREE_PURPOSE (list))
2645 return list;
2646 list = TREE_CHAIN (list);
2647 }
2648 return NULL_TREE;
2649 }
2650
2651 /* Return true if ELEM is in V. */
2652
2653 bool
2654 vec_member (const_tree elem, vec<tree, va_gc> *v)
2655 {
2656 unsigned ix;
2657 tree t;
2658 FOR_EACH_VEC_SAFE_ELT (v, ix, t)
2659 if (elem == t)
2660 return true;
2661 return false;
2662 }
2663
2664 /* Returns element number IDX (zero-origin) of chain CHAIN, or
2665 NULL_TREE. */
2666
2667 tree
2668 chain_index (int idx, tree chain)
2669 {
2670 for (; chain && idx > 0; --idx)
2671 chain = TREE_CHAIN (chain);
2672 return chain;
2673 }
2674
2675 /* Return nonzero if ELEM is part of the chain CHAIN. */
2676
2677 int
2678 chain_member (const_tree elem, const_tree chain)
2679 {
2680 while (chain)
2681 {
2682 if (elem == chain)
2683 return 1;
2684 chain = DECL_CHAIN (chain);
2685 }
2686
2687 return 0;
2688 }
2689
2690 /* Return the length of a chain of nodes chained through TREE_CHAIN.
2691 We expect a null pointer to mark the end of the chain.
2692 This is the Lisp primitive `length'. */
2693
2694 int
2695 list_length (const_tree t)
2696 {
2697 const_tree p = t;
2698 #ifdef ENABLE_TREE_CHECKING
2699 const_tree q = t;
2700 #endif
2701 int len = 0;
2702
2703 while (p)
2704 {
2705 p = TREE_CHAIN (p);
2706 #ifdef ENABLE_TREE_CHECKING
2707 if (len % 2)
2708 q = TREE_CHAIN (q);
2709 gcc_assert (p != q);
2710 #endif
2711 len++;
2712 }
2713
2714 return len;
2715 }
2716
2717 /* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or
2718 UNION_TYPE TYPE, or NULL_TREE if none. */
2719
2720 tree
2721 first_field (const_tree type)
2722 {
2723 tree t = TYPE_FIELDS (type);
2724 while (t && TREE_CODE (t) != FIELD_DECL)
2725 t = TREE_CHAIN (t);
2726 return t;
2727 }
2728
2729 /* Concatenate two chains of nodes (chained through TREE_CHAIN)
2730 by modifying the last node in chain 1 to point to chain 2.
2731 This is the Lisp primitive `nconc'. */
2732
2733 tree
2734 chainon (tree op1, tree op2)
2735 {
2736 tree t1;
2737
2738 if (!op1)
2739 return op2;
2740 if (!op2)
2741 return op1;
2742
2743 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1))
2744 continue;
2745 TREE_CHAIN (t1) = op2;
2746
2747 #ifdef ENABLE_TREE_CHECKING
2748 {
2749 tree t2;
2750 for (t2 = op2; t2; t2 = TREE_CHAIN (t2))
2751 gcc_assert (t2 != t1);
2752 }
2753 #endif
2754
2755 return op1;
2756 }
2757
2758 /* Return the last node in a chain of nodes (chained through TREE_CHAIN). */
2759
2760 tree
2761 tree_last (tree chain)
2762 {
2763 tree next;
2764 if (chain)
2765 while ((next = TREE_CHAIN (chain)))
2766 chain = next;
2767 return chain;
2768 }
2769
2770 /* Reverse the order of elements in the chain T,
2771 and return the new head of the chain (old last element). */
2772
2773 tree
2774 nreverse (tree t)
2775 {
2776 tree prev = 0, decl, next;
2777 for (decl = t; decl; decl = next)
2778 {
2779 /* We shouldn't be using this function to reverse BLOCK chains; we
2780 have blocks_nreverse for that. */
2781 gcc_checking_assert (TREE_CODE (decl) != BLOCK);
2782 next = TREE_CHAIN (decl);
2783 TREE_CHAIN (decl) = prev;
2784 prev = decl;
2785 }
2786 return prev;
2787 }
2788 \f
2789 /* Return a newly created TREE_LIST node whose
2790 purpose and value fields are PARM and VALUE. */
2791
2792 tree
2793 build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
2794 {
2795 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT);
2796 TREE_PURPOSE (t) = parm;
2797 TREE_VALUE (t) = value;
2798 return t;
2799 }
2800
2801 /* Build a chain of TREE_LIST nodes from a vector. */
2802
2803 tree
2804 build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
2805 {
2806 tree ret = NULL_TREE;
2807 tree *pp = &ret;
2808 unsigned int i;
2809 tree t;
2810 FOR_EACH_VEC_SAFE_ELT (vec, i, t)
2811 {
2812 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
2813 pp = &TREE_CHAIN (*pp);
2814 }
2815 return ret;
2816 }
2817
2818 /* Return a newly created TREE_LIST node whose
2819 purpose and value fields are PURPOSE and VALUE
2820 and whose TREE_CHAIN is CHAIN. */
2821
2822 tree
2823 tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
2824 {
2825 tree node;
2826
2827 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT);
2828 memset (node, 0, sizeof (struct tree_common));
2829
2830 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list));
2831
2832 TREE_SET_CODE (node, TREE_LIST);
2833 TREE_CHAIN (node) = chain;
2834 TREE_PURPOSE (node) = purpose;
2835 TREE_VALUE (node) = value;
2836 return node;
2837 }
2838
2839 /* Return the values of the elements of a CONSTRUCTOR as a vector of
2840 trees. */
2841
2842 vec<tree, va_gc> *
2843 ctor_to_vec (tree ctor)
2844 {
2845 vec<tree, va_gc> *vec;
2846 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
2847 unsigned int ix;
2848 tree val;
2849
2850 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
2851 vec->quick_push (val);
2852
2853 return vec;
2854 }
2855 \f
2856 /* Return the size nominally occupied by an object of type TYPE
2857 when it resides in memory. The value is measured in units of bytes,
2858 and its data type is that normally used for type sizes
2859 (which is the first type created by make_signed_type or
2860 make_unsigned_type). */
2861
2862 tree
2863 size_in_bytes (const_tree type)
2864 {
2865 tree t;
2866
2867 if (type == error_mark_node)
2868 return integer_zero_node;
2869
2870 type = TYPE_MAIN_VARIANT (type);
2871 t = TYPE_SIZE_UNIT (type);
2872
2873 if (t == 0)
2874 {
2875 lang_hooks.types.incomplete_type_error (NULL_TREE, type);
2876 return size_zero_node;
2877 }
2878
2879 return t;
2880 }
2881
2882 /* Return the size of TYPE (in bytes) as a wide integer
2883 or return -1 if the size can vary or is larger than an integer. */
2884
2885 HOST_WIDE_INT
2886 int_size_in_bytes (const_tree type)
2887 {
2888 tree t;
2889
2890 if (type == error_mark_node)
2891 return 0;
2892
2893 type = TYPE_MAIN_VARIANT (type);
2894 t = TYPE_SIZE_UNIT (type);
2895
2896 if (t && tree_fits_uhwi_p (t))
2897 return TREE_INT_CST_LOW (t);
2898 else
2899 return -1;
2900 }
2901
2902 /* Return the maximum size of TYPE (in bytes) as a wide integer
2903 or return -1 if the size can vary or is larger than an integer. */
2904
2905 HOST_WIDE_INT
2906 max_int_size_in_bytes (const_tree type)
2907 {
2908 HOST_WIDE_INT size = -1;
2909 tree size_tree;
2910
2911 /* If this is an array type, check for a possible MAX_SIZE attached. */
2912
2913 if (TREE_CODE (type) == ARRAY_TYPE)
2914 {
2915 size_tree = TYPE_ARRAY_MAX_SIZE (type);
2916
2917 if (size_tree && tree_fits_uhwi_p (size_tree))
2918 size = tree_to_uhwi (size_tree);
2919 }
2920
2921 /* If we still haven't been able to get a size, see if the language
2922 can compute a maximum size. */
2923
2924 if (size == -1)
2925 {
2926 size_tree = lang_hooks.types.max_size (type);
2927
2928 if (size_tree && tree_fits_uhwi_p (size_tree))
2929 size = tree_to_uhwi (size_tree);
2930 }
2931
2932 return size;
2933 }
2934 \f
2935 /* Return the bit position of FIELD, in bits from the start of the record.
2936 This is a tree of type bitsizetype. */
2937
2938 tree
2939 bit_position (const_tree field)
2940 {
2941 return bit_from_pos (DECL_FIELD_OFFSET (field),
2942 DECL_FIELD_BIT_OFFSET (field));
2943 }
2944 \f
2945 /* Return the byte position of FIELD, in bytes from the start of the record.
2946 This is a tree of type sizetype. */
2947
2948 tree
2949 byte_position (const_tree field)
2950 {
2951 return byte_from_pos (DECL_FIELD_OFFSET (field),
2952 DECL_FIELD_BIT_OFFSET (field));
2953 }
2954
2955 /* Likewise, but return as an integer. It must be representable in
2956 that way (since it could be a signed value, we don't have the
2957 option of returning -1 like int_size_in_byte can. */
2958
2959 HOST_WIDE_INT
2960 int_byte_position (const_tree field)
2961 {
2962 return tree_to_shwi (byte_position (field));
2963 }
2964 \f
2965 /* Return the strictest alignment, in bits, that T is known to have. */
2966
2967 unsigned int
2968 expr_align (const_tree t)
2969 {
2970 unsigned int align0, align1;
2971
2972 switch (TREE_CODE (t))
2973 {
2974 CASE_CONVERT: case NON_LVALUE_EXPR:
2975 /* If we have conversions, we know that the alignment of the
2976 object must meet each of the alignments of the types. */
2977 align0 = expr_align (TREE_OPERAND (t, 0));
2978 align1 = TYPE_ALIGN (TREE_TYPE (t));
2979 return MAX (align0, align1);
2980
2981 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR:
2982 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR:
2983 case CLEANUP_POINT_EXPR:
2984 /* These don't change the alignment of an object. */
2985 return expr_align (TREE_OPERAND (t, 0));
2986
2987 case COND_EXPR:
2988 /* The best we can do is say that the alignment is the least aligned
2989 of the two arms. */
2990 align0 = expr_align (TREE_OPERAND (t, 1));
2991 align1 = expr_align (TREE_OPERAND (t, 2));
2992 return MIN (align0, align1);
2993
2994 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set
2995 meaningfully, it's always 1. */
2996 case LABEL_DECL: case CONST_DECL:
2997 case VAR_DECL: case PARM_DECL: case RESULT_DECL:
2998 case FUNCTION_DECL:
2999 gcc_assert (DECL_ALIGN (t) != 0);
3000 return DECL_ALIGN (t);
3001
3002 default:
3003 break;
3004 }
3005
3006 /* Otherwise take the alignment from that of the type. */
3007 return TYPE_ALIGN (TREE_TYPE (t));
3008 }
3009 \f
3010 /* Return, as a tree node, the number of elements for TYPE (which is an
3011 ARRAY_TYPE) minus one. This counts only elements of the top array. */
3012
3013 tree
3014 array_type_nelts (const_tree type)
3015 {
3016 tree index_type, min, max;
3017
3018 /* If they did it with unspecified bounds, then we should have already
3019 given an error about it before we got here. */
3020 if (! TYPE_DOMAIN (type))
3021 return error_mark_node;
3022
3023 index_type = TYPE_DOMAIN (type);
3024 min = TYPE_MIN_VALUE (index_type);
3025 max = TYPE_MAX_VALUE (index_type);
3026
3027 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */
3028 if (!max)
3029 return error_mark_node;
3030
3031 return (integer_zerop (min)
3032 ? max
3033 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min));
3034 }
3035 \f
3036 /* If arg is static -- a reference to an object in static storage -- then
3037 return the object. This is not the same as the C meaning of `static'.
3038 If arg isn't static, return NULL. */
3039
3040 tree
3041 staticp (tree arg)
3042 {
3043 switch (TREE_CODE (arg))
3044 {
3045 case FUNCTION_DECL:
3046 /* Nested functions are static, even though taking their address will
3047 involve a trampoline as we unnest the nested function and create
3048 the trampoline on the tree level. */
3049 return arg;
3050
3051 case VAR_DECL:
3052 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3053 && ! DECL_THREAD_LOCAL_P (arg)
3054 && ! DECL_DLLIMPORT_P (arg)
3055 ? arg : NULL);
3056
3057 case CONST_DECL:
3058 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg))
3059 ? arg : NULL);
3060
3061 case CONSTRUCTOR:
3062 return TREE_STATIC (arg) ? arg : NULL;
3063
3064 case LABEL_DECL:
3065 case STRING_CST:
3066 return arg;
3067
3068 case COMPONENT_REF:
3069 /* If the thing being referenced is not a field, then it is
3070 something language specific. */
3071 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL);
3072
3073 /* If we are referencing a bitfield, we can't evaluate an
3074 ADDR_EXPR at compile time and so it isn't a constant. */
3075 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1)))
3076 return NULL;
3077
3078 return staticp (TREE_OPERAND (arg, 0));
3079
3080 case BIT_FIELD_REF:
3081 return NULL;
3082
3083 case INDIRECT_REF:
3084 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL;
3085
3086 case ARRAY_REF:
3087 case ARRAY_RANGE_REF:
3088 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST
3089 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST)
3090 return staticp (TREE_OPERAND (arg, 0));
3091 else
3092 return NULL;
3093
3094 case COMPOUND_LITERAL_EXPR:
3095 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL;
3096
3097 default:
3098 return NULL;
3099 }
3100 }
3101
3102 \f
3103
3104
3105 /* Return whether OP is a DECL whose address is function-invariant. */
3106
3107 bool
3108 decl_address_invariant_p (const_tree op)
3109 {
3110 /* The conditions below are slightly less strict than the one in
3111 staticp. */
3112
3113 switch (TREE_CODE (op))
3114 {
3115 case PARM_DECL:
3116 case RESULT_DECL:
3117 case LABEL_DECL:
3118 case FUNCTION_DECL:
3119 return true;
3120
3121 case VAR_DECL:
3122 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3123 || DECL_THREAD_LOCAL_P (op)
3124 || DECL_CONTEXT (op) == current_function_decl
3125 || decl_function_context (op) == current_function_decl)
3126 return true;
3127 break;
3128
3129 case CONST_DECL:
3130 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))
3131 || decl_function_context (op) == current_function_decl)
3132 return true;
3133 break;
3134
3135 default:
3136 break;
3137 }
3138
3139 return false;
3140 }
3141
3142 /* Return whether OP is a DECL whose address is interprocedural-invariant. */
3143
3144 bool
3145 decl_address_ip_invariant_p (const_tree op)
3146 {
3147 /* The conditions below are slightly less strict than the one in
3148 staticp. */
3149
3150 switch (TREE_CODE (op))
3151 {
3152 case LABEL_DECL:
3153 case FUNCTION_DECL:
3154 case STRING_CST:
3155 return true;
3156
3157 case VAR_DECL:
3158 if (((TREE_STATIC (op) || DECL_EXTERNAL (op))
3159 && !DECL_DLLIMPORT_P (op))
3160 || DECL_THREAD_LOCAL_P (op))
3161 return true;
3162 break;
3163
3164 case CONST_DECL:
3165 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)))
3166 return true;
3167 break;
3168
3169 default:
3170 break;
3171 }
3172
3173 return false;
3174 }
3175
3176
3177 /* Return true if T is function-invariant (internal function, does
3178 not handle arithmetic; that's handled in skip_simple_arithmetic and
3179 tree_invariant_p). */
3180
3181 static bool tree_invariant_p (tree t);
3182
3183 static bool
3184 tree_invariant_p_1 (tree t)
3185 {
3186 tree op;
3187
3188 if (TREE_CONSTANT (t)
3189 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t)))
3190 return true;
3191
3192 switch (TREE_CODE (t))
3193 {
3194 case SAVE_EXPR:
3195 return true;
3196
3197 case ADDR_EXPR:
3198 op = TREE_OPERAND (t, 0);
3199 while (handled_component_p (op))
3200 {
3201 switch (TREE_CODE (op))
3202 {
3203 case ARRAY_REF:
3204 case ARRAY_RANGE_REF:
3205 if (!tree_invariant_p (TREE_OPERAND (op, 1))
3206 || TREE_OPERAND (op, 2) != NULL_TREE
3207 || TREE_OPERAND (op, 3) != NULL_TREE)
3208 return false;
3209 break;
3210
3211 case COMPONENT_REF:
3212 if (TREE_OPERAND (op, 2) != NULL_TREE)
3213 return false;
3214 break;
3215
3216 default:;
3217 }
3218 op = TREE_OPERAND (op, 0);
3219 }
3220
3221 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
3222
3223 default:
3224 break;
3225 }
3226
3227 return false;
3228 }
3229
3230 /* Return true if T is function-invariant. */
3231
3232 static bool
3233 tree_invariant_p (tree t)
3234 {
3235 tree inner = skip_simple_arithmetic (t);
3236 return tree_invariant_p_1 (inner);
3237 }
3238
3239 /* Wrap a SAVE_EXPR around EXPR, if appropriate.
3240 Do this to any expression which may be used in more than one place,
3241 but must be evaluated only once.
3242
3243 Normally, expand_expr would reevaluate the expression each time.
3244 Calling save_expr produces something that is evaluated and recorded
3245 the first time expand_expr is called on it. Subsequent calls to
3246 expand_expr just reuse the recorded value.
3247
3248 The call to expand_expr that generates code that actually computes
3249 the value is the first call *at compile time*. Subsequent calls
3250 *at compile time* generate code to use the saved value.
3251 This produces correct result provided that *at run time* control
3252 always flows through the insns made by the first expand_expr
3253 before reaching the other places where the save_expr was evaluated.
3254 You, the caller of save_expr, must make sure this is so.
3255
3256 Constants, and certain read-only nodes, are returned with no
3257 SAVE_EXPR because that is safe. Expressions containing placeholders
3258 are not touched; see tree.def for an explanation of what these
3259 are used for. */
3260
3261 tree
3262 save_expr (tree expr)
3263 {
3264 tree t = fold (expr);
3265 tree inner;
3266
3267 /* If the tree evaluates to a constant, then we don't want to hide that
3268 fact (i.e. this allows further folding, and direct checks for constants).
3269 However, a read-only object that has side effects cannot be bypassed.
3270 Since it is no problem to reevaluate literals, we just return the
3271 literal node. */
3272 inner = skip_simple_arithmetic (t);
3273 if (TREE_CODE (inner) == ERROR_MARK)
3274 return inner;
3275
3276 if (tree_invariant_p_1 (inner))
3277 return t;
3278
3279 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since
3280 it means that the size or offset of some field of an object depends on
3281 the value within another field.
3282
3283 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR
3284 and some variable since it would then need to be both evaluated once and
3285 evaluated more than once. Front-ends must assure this case cannot
3286 happen by surrounding any such subexpressions in their own SAVE_EXPR
3287 and forcing evaluation at the proper time. */
3288 if (contains_placeholder_p (inner))
3289 return t;
3290
3291 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t);
3292 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr));
3293
3294 /* This expression might be placed ahead of a jump to ensure that the
3295 value was computed on both sides of the jump. So make sure it isn't
3296 eliminated as dead. */
3297 TREE_SIDE_EFFECTS (t) = 1;
3298 return t;
3299 }
3300
3301 /* Look inside EXPR into any simple arithmetic operations. Return the
3302 outermost non-arithmetic or non-invariant node. */
3303
3304 tree
3305 skip_simple_arithmetic (tree expr)
3306 {
3307 /* We don't care about whether this can be used as an lvalue in this
3308 context. */
3309 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3310 expr = TREE_OPERAND (expr, 0);
3311
3312 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and
3313 a constant, it will be more efficient to not make another SAVE_EXPR since
3314 it will allow better simplification and GCSE will be able to merge the
3315 computations if they actually occur. */
3316 while (true)
3317 {
3318 if (UNARY_CLASS_P (expr))
3319 expr = TREE_OPERAND (expr, 0);
3320 else if (BINARY_CLASS_P (expr))
3321 {
3322 if (tree_invariant_p (TREE_OPERAND (expr, 1)))
3323 expr = TREE_OPERAND (expr, 0);
3324 else if (tree_invariant_p (TREE_OPERAND (expr, 0)))
3325 expr = TREE_OPERAND (expr, 1);
3326 else
3327 break;
3328 }
3329 else
3330 break;
3331 }
3332
3333 return expr;
3334 }
3335
3336 /* Look inside EXPR into simple arithmetic operations involving constants.
3337 Return the outermost non-arithmetic or non-constant node. */
3338
3339 tree
3340 skip_simple_constant_arithmetic (tree expr)
3341 {
3342 while (TREE_CODE (expr) == NON_LVALUE_EXPR)
3343 expr = TREE_OPERAND (expr, 0);
3344
3345 while (true)
3346 {
3347 if (UNARY_CLASS_P (expr))
3348 expr = TREE_OPERAND (expr, 0);
3349 else if (BINARY_CLASS_P (expr))
3350 {
3351 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
3352 expr = TREE_OPERAND (expr, 0);
3353 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
3354 expr = TREE_OPERAND (expr, 1);
3355 else
3356 break;
3357 }
3358 else
3359 break;
3360 }
3361
3362 return expr;
3363 }
3364
3365 /* Return which tree structure is used by T. */
3366
3367 enum tree_node_structure_enum
3368 tree_node_structure (const_tree t)
3369 {
3370 const enum tree_code code = TREE_CODE (t);
3371 return tree_node_structure_for_code (code);
3372 }
3373
3374 /* Set various status flags when building a CALL_EXPR object T. */
3375
3376 static void
3377 process_call_operands (tree t)
3378 {
3379 bool side_effects = TREE_SIDE_EFFECTS (t);
3380 bool read_only = false;
3381 int i = call_expr_flags (t);
3382
3383 /* Calls have side-effects, except those to const or pure functions. */
3384 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE)))
3385 side_effects = true;
3386 /* Propagate TREE_READONLY of arguments for const functions. */
3387 if (i & ECF_CONST)
3388 read_only = true;
3389
3390 if (!side_effects || read_only)
3391 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++)
3392 {
3393 tree op = TREE_OPERAND (t, i);
3394 if (op && TREE_SIDE_EFFECTS (op))
3395 side_effects = true;
3396 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op))
3397 read_only = false;
3398 }
3399
3400 TREE_SIDE_EFFECTS (t) = side_effects;
3401 TREE_READONLY (t) = read_only;
3402 }
3403 \f
3404 /* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a
3405 size or offset that depends on a field within a record. */
3406
3407 bool
3408 contains_placeholder_p (const_tree exp)
3409 {
3410 enum tree_code code;
3411
3412 if (!exp)
3413 return 0;
3414
3415 code = TREE_CODE (exp);
3416 if (code == PLACEHOLDER_EXPR)
3417 return 1;
3418
3419 switch (TREE_CODE_CLASS (code))
3420 {
3421 case tcc_reference:
3422 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit
3423 position computations since they will be converted into a
3424 WITH_RECORD_EXPR involving the reference, which will assume
3425 here will be valid. */
3426 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3427
3428 case tcc_exceptional:
3429 if (code == TREE_LIST)
3430 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp))
3431 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp)));
3432 break;
3433
3434 case tcc_unary:
3435 case tcc_binary:
3436 case tcc_comparison:
3437 case tcc_expression:
3438 switch (code)
3439 {
3440 case COMPOUND_EXPR:
3441 /* Ignoring the first operand isn't quite right, but works best. */
3442 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1));
3443
3444 case COND_EXPR:
3445 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3446 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))
3447 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2)));
3448
3449 case SAVE_EXPR:
3450 /* The save_expr function never wraps anything containing
3451 a PLACEHOLDER_EXPR. */
3452 return 0;
3453
3454 default:
3455 break;
3456 }
3457
3458 switch (TREE_CODE_LENGTH (code))
3459 {
3460 case 1:
3461 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0));
3462 case 2:
3463 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0))
3464 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)));
3465 default:
3466 return 0;
3467 }
3468
3469 case tcc_vl_exp:
3470 switch (code)
3471 {
3472 case CALL_EXPR:
3473 {
3474 const_tree arg;
3475 const_call_expr_arg_iterator iter;
3476 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp)
3477 if (CONTAINS_PLACEHOLDER_P (arg))
3478 return 1;
3479 return 0;
3480 }
3481 default:
3482 return 0;
3483 }
3484
3485 default:
3486 return 0;
3487 }
3488 return 0;
3489 }
3490
3491 /* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR
3492 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and
3493 field positions. */
3494
3495 static bool
3496 type_contains_placeholder_1 (const_tree type)
3497 {
3498 /* If the size contains a placeholder or the parent type (component type in
3499 the case of arrays) type involves a placeholder, this type does. */
3500 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type))
3501 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type))
3502 || (!POINTER_TYPE_P (type)
3503 && TREE_TYPE (type)
3504 && type_contains_placeholder_p (TREE_TYPE (type))))
3505 return true;
3506
3507 /* Now do type-specific checks. Note that the last part of the check above
3508 greatly limits what we have to do below. */
3509 switch (TREE_CODE (type))
3510 {
3511 case VOID_TYPE:
3512 case POINTER_BOUNDS_TYPE:
3513 case COMPLEX_TYPE:
3514 case ENUMERAL_TYPE:
3515 case BOOLEAN_TYPE:
3516 case POINTER_TYPE:
3517 case OFFSET_TYPE:
3518 case REFERENCE_TYPE:
3519 case METHOD_TYPE:
3520 case FUNCTION_TYPE:
3521 case VECTOR_TYPE:
3522 case NULLPTR_TYPE:
3523 return false;
3524
3525 case INTEGER_TYPE:
3526 case REAL_TYPE:
3527 case FIXED_POINT_TYPE:
3528 /* Here we just check the bounds. */
3529 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type))
3530 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type)));
3531
3532 case ARRAY_TYPE:
3533 /* We have already checked the component type above, so just check the
3534 domain type. */
3535 return type_contains_placeholder_p (TYPE_DOMAIN (type));
3536
3537 case RECORD_TYPE:
3538 case UNION_TYPE:
3539 case QUAL_UNION_TYPE:
3540 {
3541 tree field;
3542
3543 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
3544 if (TREE_CODE (field) == FIELD_DECL
3545 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field))
3546 || (TREE_CODE (type) == QUAL_UNION_TYPE
3547 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field)))
3548 || type_contains_placeholder_p (TREE_TYPE (field))))
3549 return true;
3550
3551 return false;
3552 }
3553
3554 default:
3555 gcc_unreachable ();
3556 }
3557 }
3558
3559 /* Wrapper around above function used to cache its result. */
3560
3561 bool
3562 type_contains_placeholder_p (tree type)
3563 {
3564 bool result;
3565
3566 /* If the contains_placeholder_bits field has been initialized,
3567 then we know the answer. */
3568 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0)
3569 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1;
3570
3571 /* Indicate that we've seen this type node, and the answer is false.
3572 This is what we want to return if we run into recursion via fields. */
3573 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1;
3574
3575 /* Compute the real value. */
3576 result = type_contains_placeholder_1 (type);
3577
3578 /* Store the real value. */
3579 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1;
3580
3581 return result;
3582 }
3583 \f
3584 /* Push tree EXP onto vector QUEUE if it is not already present. */
3585
3586 static void
3587 push_without_duplicates (tree exp, vec<tree> *queue)
3588 {
3589 unsigned int i;
3590 tree iter;
3591
3592 FOR_EACH_VEC_ELT (*queue, i, iter)
3593 if (simple_cst_equal (iter, exp) == 1)
3594 break;
3595
3596 if (!iter)
3597 queue->safe_push (exp);
3598 }
3599
3600 /* Given a tree EXP, find all occurrences of references to fields
3601 in a PLACEHOLDER_EXPR and place them in vector REFS without
3602 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that
3603 we assume here that EXP contains only arithmetic expressions
3604 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
3605 argument list. */
3606
3607 void
3608 find_placeholder_in_expr (tree exp, vec<tree> *refs)
3609 {
3610 enum tree_code code = TREE_CODE (exp);
3611 tree inner;
3612 int i;
3613
3614 /* We handle TREE_LIST and COMPONENT_REF separately. */
3615 if (code == TREE_LIST)
3616 {
3617 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs);
3618 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs);
3619 }
3620 else if (code == COMPONENT_REF)
3621 {
3622 for (inner = TREE_OPERAND (exp, 0);
3623 REFERENCE_CLASS_P (inner);
3624 inner = TREE_OPERAND (inner, 0))
3625 ;
3626
3627 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
3628 push_without_duplicates (exp, refs);
3629 else
3630 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs);
3631 }
3632 else
3633 switch (TREE_CODE_CLASS (code))
3634 {
3635 case tcc_constant:
3636 break;
3637
3638 case tcc_declaration:
3639 /* Variables allocated to static storage can stay. */
3640 if (!TREE_STATIC (exp))
3641 push_without_duplicates (exp, refs);
3642 break;
3643
3644 case tcc_expression:
3645 /* This is the pattern built in ada/make_aligning_type. */
3646 if (code == ADDR_EXPR
3647 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR)
3648 {
3649 push_without_duplicates (exp, refs);
3650 break;
3651 }
3652
3653 /* Fall through... */
3654
3655 case tcc_exceptional:
3656 case tcc_unary:
3657 case tcc_binary:
3658 case tcc_comparison:
3659 case tcc_reference:
3660 for (i = 0; i < TREE_CODE_LENGTH (code); i++)
3661 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3662 break;
3663
3664 case tcc_vl_exp:
3665 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3666 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs);
3667 break;
3668
3669 default:
3670 gcc_unreachable ();
3671 }
3672 }
3673
3674 /* Given a tree EXP, a FIELD_DECL F, and a replacement value R,
3675 return a tree with all occurrences of references to F in a
3676 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and
3677 CONST_DECLs. Note that we assume here that EXP contains only
3678 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs
3679 occurring only in their argument list. */
3680
3681 tree
3682 substitute_in_expr (tree exp, tree f, tree r)
3683 {
3684 enum tree_code code = TREE_CODE (exp);
3685 tree op0, op1, op2, op3;
3686 tree new_tree;
3687
3688 /* We handle TREE_LIST and COMPONENT_REF separately. */
3689 if (code == TREE_LIST)
3690 {
3691 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r);
3692 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r);
3693 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3694 return exp;
3695
3696 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3697 }
3698 else if (code == COMPONENT_REF)
3699 {
3700 tree inner;
3701
3702 /* If this expression is getting a value from a PLACEHOLDER_EXPR
3703 and it is the right field, replace it with R. */
3704 for (inner = TREE_OPERAND (exp, 0);
3705 REFERENCE_CLASS_P (inner);
3706 inner = TREE_OPERAND (inner, 0))
3707 ;
3708
3709 /* The field. */
3710 op1 = TREE_OPERAND (exp, 1);
3711
3712 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f)
3713 return r;
3714
3715 /* If this expression hasn't been completed let, leave it alone. */
3716 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner))
3717 return exp;
3718
3719 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3720 if (op0 == TREE_OPERAND (exp, 0))
3721 return exp;
3722
3723 new_tree
3724 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE);
3725 }
3726 else
3727 switch (TREE_CODE_CLASS (code))
3728 {
3729 case tcc_constant:
3730 return exp;
3731
3732 case tcc_declaration:
3733 if (exp == f)
3734 return r;
3735 else
3736 return exp;
3737
3738 case tcc_expression:
3739 if (exp == f)
3740 return r;
3741
3742 /* Fall through... */
3743
3744 case tcc_exceptional:
3745 case tcc_unary:
3746 case tcc_binary:
3747 case tcc_comparison:
3748 case tcc_reference:
3749 switch (TREE_CODE_LENGTH (code))
3750 {
3751 case 0:
3752 return exp;
3753
3754 case 1:
3755 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3756 if (op0 == TREE_OPERAND (exp, 0))
3757 return exp;
3758
3759 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3760 break;
3761
3762 case 2:
3763 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3764 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3765
3766 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3767 return exp;
3768
3769 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3770 break;
3771
3772 case 3:
3773 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3774 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3775 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3776
3777 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3778 && op2 == TREE_OPERAND (exp, 2))
3779 return exp;
3780
3781 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3782 break;
3783
3784 case 4:
3785 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r);
3786 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r);
3787 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r);
3788 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r);
3789
3790 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3791 && op2 == TREE_OPERAND (exp, 2)
3792 && op3 == TREE_OPERAND (exp, 3))
3793 return exp;
3794
3795 new_tree
3796 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3797 break;
3798
3799 default:
3800 gcc_unreachable ();
3801 }
3802 break;
3803
3804 case tcc_vl_exp:
3805 {
3806 int i;
3807
3808 new_tree = NULL_TREE;
3809
3810 /* If we are trying to replace F with a constant, inline back
3811 functions which do nothing else than computing a value from
3812 the arguments they are passed. This makes it possible to
3813 fold partially or entirely the replacement expression. */
3814 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR)
3815 {
3816 tree t = maybe_inline_call_in_expr (exp);
3817 if (t)
3818 return SUBSTITUTE_IN_EXPR (t, f, r);
3819 }
3820
3821 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3822 {
3823 tree op = TREE_OPERAND (exp, i);
3824 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r);
3825 if (new_op != op)
3826 {
3827 if (!new_tree)
3828 new_tree = copy_node (exp);
3829 TREE_OPERAND (new_tree, i) = new_op;
3830 }
3831 }
3832
3833 if (new_tree)
3834 {
3835 new_tree = fold (new_tree);
3836 if (TREE_CODE (new_tree) == CALL_EXPR)
3837 process_call_operands (new_tree);
3838 }
3839 else
3840 return exp;
3841 }
3842 break;
3843
3844 default:
3845 gcc_unreachable ();
3846 }
3847
3848 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
3849
3850 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
3851 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
3852
3853 return new_tree;
3854 }
3855
3856 /* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement
3857 for it within OBJ, a tree that is an object or a chain of references. */
3858
3859 tree
3860 substitute_placeholder_in_expr (tree exp, tree obj)
3861 {
3862 enum tree_code code = TREE_CODE (exp);
3863 tree op0, op1, op2, op3;
3864 tree new_tree;
3865
3866 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type
3867 in the chain of OBJ. */
3868 if (code == PLACEHOLDER_EXPR)
3869 {
3870 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp));
3871 tree elt;
3872
3873 for (elt = obj; elt != 0;
3874 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3875 || TREE_CODE (elt) == COND_EXPR)
3876 ? TREE_OPERAND (elt, 1)
3877 : (REFERENCE_CLASS_P (elt)
3878 || UNARY_CLASS_P (elt)
3879 || BINARY_CLASS_P (elt)
3880 || VL_EXP_CLASS_P (elt)
3881 || EXPRESSION_CLASS_P (elt))
3882 ? TREE_OPERAND (elt, 0) : 0))
3883 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
3884 return elt;
3885
3886 for (elt = obj; elt != 0;
3887 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
3888 || TREE_CODE (elt) == COND_EXPR)
3889 ? TREE_OPERAND (elt, 1)
3890 : (REFERENCE_CLASS_P (elt)
3891 || UNARY_CLASS_P (elt)
3892 || BINARY_CLASS_P (elt)
3893 || VL_EXP_CLASS_P (elt)
3894 || EXPRESSION_CLASS_P (elt))
3895 ? TREE_OPERAND (elt, 0) : 0))
3896 if (POINTER_TYPE_P (TREE_TYPE (elt))
3897 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
3898 == need_type))
3899 return fold_build1 (INDIRECT_REF, need_type, elt);
3900
3901 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it
3902 survives until RTL generation, there will be an error. */
3903 return exp;
3904 }
3905
3906 /* TREE_LIST is special because we need to look at TREE_VALUE
3907 and TREE_CHAIN, not TREE_OPERANDS. */
3908 else if (code == TREE_LIST)
3909 {
3910 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj);
3911 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj);
3912 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp))
3913 return exp;
3914
3915 return tree_cons (TREE_PURPOSE (exp), op1, op0);
3916 }
3917 else
3918 switch (TREE_CODE_CLASS (code))
3919 {
3920 case tcc_constant:
3921 case tcc_declaration:
3922 return exp;
3923
3924 case tcc_exceptional:
3925 case tcc_unary:
3926 case tcc_binary:
3927 case tcc_comparison:
3928 case tcc_expression:
3929 case tcc_reference:
3930 case tcc_statement:
3931 switch (TREE_CODE_LENGTH (code))
3932 {
3933 case 0:
3934 return exp;
3935
3936 case 1:
3937 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3938 if (op0 == TREE_OPERAND (exp, 0))
3939 return exp;
3940
3941 new_tree = fold_build1 (code, TREE_TYPE (exp), op0);
3942 break;
3943
3944 case 2:
3945 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3946 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3947
3948 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3949 return exp;
3950
3951 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1);
3952 break;
3953
3954 case 3:
3955 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3956 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3957 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3958
3959 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3960 && op2 == TREE_OPERAND (exp, 2))
3961 return exp;
3962
3963 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2);
3964 break;
3965
3966 case 4:
3967 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj);
3968 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj);
3969 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj);
3970 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj);
3971
3972 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)
3973 && op2 == TREE_OPERAND (exp, 2)
3974 && op3 == TREE_OPERAND (exp, 3))
3975 return exp;
3976
3977 new_tree
3978 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3));
3979 break;
3980
3981 default:
3982 gcc_unreachable ();
3983 }
3984 break;
3985
3986 case tcc_vl_exp:
3987 {
3988 int i;
3989
3990 new_tree = NULL_TREE;
3991
3992 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++)
3993 {
3994 tree op = TREE_OPERAND (exp, i);
3995 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj);
3996 if (new_op != op)
3997 {
3998 if (!new_tree)
3999 new_tree = copy_node (exp);
4000 TREE_OPERAND (new_tree, i) = new_op;
4001 }
4002 }
4003
4004 if (new_tree)
4005 {
4006 new_tree = fold (new_tree);
4007 if (TREE_CODE (new_tree) == CALL_EXPR)
4008 process_call_operands (new_tree);
4009 }
4010 else
4011 return exp;
4012 }
4013 break;
4014
4015 default:
4016 gcc_unreachable ();
4017 }
4018
4019 TREE_READONLY (new_tree) |= TREE_READONLY (exp);
4020
4021 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF)
4022 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp);
4023
4024 return new_tree;
4025 }
4026 \f
4027
4028 /* Subroutine of stabilize_reference; this is called for subtrees of
4029 references. Any expression with side-effects must be put in a SAVE_EXPR
4030 to ensure that it is only evaluated once.
4031
4032 We don't put SAVE_EXPR nodes around everything, because assigning very
4033 simple expressions to temporaries causes us to miss good opportunities
4034 for optimizations. Among other things, the opportunity to fold in the
4035 addition of a constant into an addressing mode often gets lost, e.g.
4036 "y[i+1] += x;". In general, we take the approach that we should not make
4037 an assignment unless we are forced into it - i.e., that any non-side effect
4038 operator should be allowed, and that cse should take care of coalescing
4039 multiple utterances of the same expression should that prove fruitful. */
4040
4041 static tree
4042 stabilize_reference_1 (tree e)
4043 {
4044 tree result;
4045 enum tree_code code = TREE_CODE (e);
4046
4047 /* We cannot ignore const expressions because it might be a reference
4048 to a const array but whose index contains side-effects. But we can
4049 ignore things that are actual constant or that already have been
4050 handled by this function. */
4051
4052 if (tree_invariant_p (e))
4053 return e;
4054
4055 switch (TREE_CODE_CLASS (code))
4056 {
4057 case tcc_exceptional:
4058 case tcc_type:
4059 case tcc_declaration:
4060 case tcc_comparison:
4061 case tcc_statement:
4062 case tcc_expression:
4063 case tcc_reference:
4064 case tcc_vl_exp:
4065 /* If the expression has side-effects, then encase it in a SAVE_EXPR
4066 so that it will only be evaluated once. */
4067 /* The reference (r) and comparison (<) classes could be handled as
4068 below, but it is generally faster to only evaluate them once. */
4069 if (TREE_SIDE_EFFECTS (e))
4070 return save_expr (e);
4071 return e;
4072
4073 case tcc_constant:
4074 /* Constants need no processing. In fact, we should never reach
4075 here. */
4076 return e;
4077
4078 case tcc_binary:
4079 /* Division is slow and tends to be compiled with jumps,
4080 especially the division by powers of 2 that is often
4081 found inside of an array reference. So do it just once. */
4082 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR
4083 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR
4084 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR
4085 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR)
4086 return save_expr (e);
4087 /* Recursively stabilize each operand. */
4088 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)),
4089 stabilize_reference_1 (TREE_OPERAND (e, 1)));
4090 break;
4091
4092 case tcc_unary:
4093 /* Recursively stabilize each operand. */
4094 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)));
4095 break;
4096
4097 default:
4098 gcc_unreachable ();
4099 }
4100
4101 TREE_TYPE (result) = TREE_TYPE (e);
4102 TREE_READONLY (result) = TREE_READONLY (e);
4103 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e);
4104 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
4105
4106 return result;
4107 }
4108
4109 /* Stabilize a reference so that we can use it any number of times
4110 without causing its operands to be evaluated more than once.
4111 Returns the stabilized reference. This works by means of save_expr,
4112 so see the caveats in the comments about save_expr.
4113
4114 Also allows conversion expressions whose operands are references.
4115 Any other kind of expression is returned unchanged. */
4116
4117 tree
4118 stabilize_reference (tree ref)
4119 {
4120 tree result;
4121 enum tree_code code = TREE_CODE (ref);
4122
4123 switch (code)
4124 {
4125 case VAR_DECL:
4126 case PARM_DECL:
4127 case RESULT_DECL:
4128 /* No action is needed in this case. */
4129 return ref;
4130
4131 CASE_CONVERT:
4132 case FLOAT_EXPR:
4133 case FIX_TRUNC_EXPR:
4134 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0)));
4135 break;
4136
4137 case INDIRECT_REF:
4138 result = build_nt (INDIRECT_REF,
4139 stabilize_reference_1 (TREE_OPERAND (ref, 0)));
4140 break;
4141
4142 case COMPONENT_REF:
4143 result = build_nt (COMPONENT_REF,
4144 stabilize_reference (TREE_OPERAND (ref, 0)),
4145 TREE_OPERAND (ref, 1), NULL_TREE);
4146 break;
4147
4148 case BIT_FIELD_REF:
4149 result = build_nt (BIT_FIELD_REF,
4150 stabilize_reference (TREE_OPERAND (ref, 0)),
4151 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
4152 break;
4153
4154 case ARRAY_REF:
4155 result = build_nt (ARRAY_REF,
4156 stabilize_reference (TREE_OPERAND (ref, 0)),
4157 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4158 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4159 break;
4160
4161 case ARRAY_RANGE_REF:
4162 result = build_nt (ARRAY_RANGE_REF,
4163 stabilize_reference (TREE_OPERAND (ref, 0)),
4164 stabilize_reference_1 (TREE_OPERAND (ref, 1)),
4165 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
4166 break;
4167
4168 case COMPOUND_EXPR:
4169 /* We cannot wrap the first expression in a SAVE_EXPR, as then
4170 it wouldn't be ignored. This matters when dealing with
4171 volatiles. */
4172 return stabilize_reference_1 (ref);
4173
4174 /* If arg isn't a kind of lvalue we recognize, make no change.
4175 Caller should recognize the error for an invalid lvalue. */
4176 default:
4177 return ref;
4178
4179 case ERROR_MARK:
4180 return error_mark_node;
4181 }
4182
4183 TREE_TYPE (result) = TREE_TYPE (ref);
4184 TREE_READONLY (result) = TREE_READONLY (ref);
4185 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref);
4186 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
4187
4188 return result;
4189 }
4190 \f
4191 /* Low-level constructors for expressions. */
4192
4193 /* A helper function for build1 and constant folders. Set TREE_CONSTANT,
4194 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */
4195
4196 void
4197 recompute_tree_invariant_for_addr_expr (tree t)
4198 {
4199 tree node;
4200 bool tc = true, se = false;
4201
4202 /* We started out assuming this address is both invariant and constant, but
4203 does not have side effects. Now go down any handled components and see if
4204 any of them involve offsets that are either non-constant or non-invariant.
4205 Also check for side-effects.
4206
4207 ??? Note that this code makes no attempt to deal with the case where
4208 taking the address of something causes a copy due to misalignment. */
4209
4210 #define UPDATE_FLAGS(NODE) \
4211 do { tree _node = (NODE); \
4212 if (_node && !TREE_CONSTANT (_node)) tc = false; \
4213 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0)
4214
4215 for (node = TREE_OPERAND (t, 0); handled_component_p (node);
4216 node = TREE_OPERAND (node, 0))
4217 {
4218 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus
4219 array reference (probably made temporarily by the G++ front end),
4220 so ignore all the operands. */
4221 if ((TREE_CODE (node) == ARRAY_REF
4222 || TREE_CODE (node) == ARRAY_RANGE_REF)
4223 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE)
4224 {
4225 UPDATE_FLAGS (TREE_OPERAND (node, 1));
4226 if (TREE_OPERAND (node, 2))
4227 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4228 if (TREE_OPERAND (node, 3))
4229 UPDATE_FLAGS (TREE_OPERAND (node, 3));
4230 }
4231 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a
4232 FIELD_DECL, apparently. The G++ front end can put something else
4233 there, at least temporarily. */
4234 else if (TREE_CODE (node) == COMPONENT_REF
4235 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL)
4236 {
4237 if (TREE_OPERAND (node, 2))
4238 UPDATE_FLAGS (TREE_OPERAND (node, 2));
4239 }
4240 }
4241
4242 node = lang_hooks.expr_to_decl (node, &tc, &se);
4243
4244 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from
4245 the address, since &(*a)->b is a form of addition. If it's a constant, the
4246 address is constant too. If it's a decl, its address is constant if the
4247 decl is static. Everything else is not constant and, furthermore,
4248 taking the address of a volatile variable is not volatile. */
4249 if (TREE_CODE (node) == INDIRECT_REF
4250 || TREE_CODE (node) == MEM_REF)
4251 UPDATE_FLAGS (TREE_OPERAND (node, 0));
4252 else if (CONSTANT_CLASS_P (node))
4253 ;
4254 else if (DECL_P (node))
4255 tc &= (staticp (node) != NULL_TREE);
4256 else
4257 {
4258 tc = false;
4259 se |= TREE_SIDE_EFFECTS (node);
4260 }
4261
4262
4263 TREE_CONSTANT (t) = tc;
4264 TREE_SIDE_EFFECTS (t) = se;
4265 #undef UPDATE_FLAGS
4266 }
4267
4268 /* Build an expression of code CODE, data type TYPE, and operands as
4269 specified. Expressions and reference nodes can be created this way.
4270 Constants, decls, types and misc nodes cannot be.
4271
4272 We define 5 non-variadic functions, from 0 to 4 arguments. This is
4273 enough for all extant tree codes. */
4274
4275 tree
4276 build0_stat (enum tree_code code, tree tt MEM_STAT_DECL)
4277 {
4278 tree t;
4279
4280 gcc_assert (TREE_CODE_LENGTH (code) == 0);
4281
4282 t = make_node_stat (code PASS_MEM_STAT);
4283 TREE_TYPE (t) = tt;
4284
4285 return t;
4286 }
4287
4288 tree
4289 build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL)
4290 {
4291 int length = sizeof (struct tree_exp);
4292 tree t;
4293
4294 record_node_allocation_statistics (code, length);
4295
4296 gcc_assert (TREE_CODE_LENGTH (code) == 1);
4297
4298 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT);
4299
4300 memset (t, 0, sizeof (struct tree_common));
4301
4302 TREE_SET_CODE (t, code);
4303
4304 TREE_TYPE (t) = type;
4305 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION);
4306 TREE_OPERAND (t, 0) = node;
4307 if (node && !TYPE_P (node))
4308 {
4309 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node);
4310 TREE_READONLY (t) = TREE_READONLY (node);
4311 }
4312
4313 if (TREE_CODE_CLASS (code) == tcc_statement)
4314 TREE_SIDE_EFFECTS (t) = 1;
4315 else switch (code)
4316 {
4317 case VA_ARG_EXPR:
4318 /* All of these have side-effects, no matter what their
4319 operands are. */
4320 TREE_SIDE_EFFECTS (t) = 1;
4321 TREE_READONLY (t) = 0;
4322 break;
4323
4324 case INDIRECT_REF:
4325 /* Whether a dereference is readonly has nothing to do with whether
4326 its operand is readonly. */
4327 TREE_READONLY (t) = 0;
4328 break;
4329
4330 case ADDR_EXPR:
4331 if (node)
4332 recompute_tree_invariant_for_addr_expr (t);
4333 break;
4334
4335 default:
4336 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR)
4337 && node && !TYPE_P (node)
4338 && TREE_CONSTANT (node))
4339 TREE_CONSTANT (t) = 1;
4340 if (TREE_CODE_CLASS (code) == tcc_reference
4341 && node && TREE_THIS_VOLATILE (node))
4342 TREE_THIS_VOLATILE (t) = 1;
4343 break;
4344 }
4345
4346 return t;
4347 }
4348
4349 #define PROCESS_ARG(N) \
4350 do { \
4351 TREE_OPERAND (t, N) = arg##N; \
4352 if (arg##N &&!TYPE_P (arg##N)) \
4353 { \
4354 if (TREE_SIDE_EFFECTS (arg##N)) \
4355 side_effects = 1; \
4356 if (!TREE_READONLY (arg##N) \
4357 && !CONSTANT_CLASS_P (arg##N)) \
4358 (void) (read_only = 0); \
4359 if (!TREE_CONSTANT (arg##N)) \
4360 (void) (constant = 0); \
4361 } \
4362 } while (0)
4363
4364 tree
4365 build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL)
4366 {
4367 bool constant, read_only, side_effects;
4368 tree t;
4369
4370 gcc_assert (TREE_CODE_LENGTH (code) == 2);
4371
4372 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
4373 && arg0 && arg1 && tt && POINTER_TYPE_P (tt)
4374 /* When sizetype precision doesn't match that of pointers
4375 we need to be able to build explicit extensions or truncations
4376 of the offset argument. */
4377 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt))
4378 gcc_assert (TREE_CODE (arg0) == INTEGER_CST
4379 && TREE_CODE (arg1) == INTEGER_CST);
4380
4381 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
4382 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
4383 && ptrofftype_p (TREE_TYPE (arg1)));
4384
4385 t = make_node_stat (code PASS_MEM_STAT);
4386 TREE_TYPE (t) = tt;
4387
4388 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the
4389 result based on those same flags for the arguments. But if the
4390 arguments aren't really even `tree' expressions, we shouldn't be trying
4391 to do this. */
4392
4393 /* Expressions without side effects may be constant if their
4394 arguments are as well. */
4395 constant = (TREE_CODE_CLASS (code) == tcc_comparison
4396 || TREE_CODE_CLASS (code) == tcc_binary);
4397 read_only = 1;
4398 side_effects = TREE_SIDE_EFFECTS (t);
4399
4400 PROCESS_ARG (0);
4401 PROCESS_ARG (1);
4402
4403 TREE_SIDE_EFFECTS (t) = side_effects;
4404 if (code == MEM_REF)
4405 {
4406 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4407 {
4408 tree o = TREE_OPERAND (arg0, 0);
4409 TREE_READONLY (t) = TREE_READONLY (o);
4410 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4411 }
4412 }
4413 else
4414 {
4415 TREE_READONLY (t) = read_only;
4416 TREE_CONSTANT (t) = constant;
4417 TREE_THIS_VOLATILE (t)
4418 = (TREE_CODE_CLASS (code) == tcc_reference
4419 && arg0 && TREE_THIS_VOLATILE (arg0));
4420 }
4421
4422 return t;
4423 }
4424
4425
4426 tree
4427 build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4428 tree arg2 MEM_STAT_DECL)
4429 {
4430 bool constant, read_only, side_effects;
4431 tree t;
4432
4433 gcc_assert (TREE_CODE_LENGTH (code) == 3);
4434 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4435
4436 t = make_node_stat (code PASS_MEM_STAT);
4437 TREE_TYPE (t) = tt;
4438
4439 read_only = 1;
4440
4441 /* As a special exception, if COND_EXPR has NULL branches, we
4442 assume that it is a gimple statement and always consider
4443 it to have side effects. */
4444 if (code == COND_EXPR
4445 && tt == void_type_node
4446 && arg1 == NULL_TREE
4447 && arg2 == NULL_TREE)
4448 side_effects = true;
4449 else
4450 side_effects = TREE_SIDE_EFFECTS (t);
4451
4452 PROCESS_ARG (0);
4453 PROCESS_ARG (1);
4454 PROCESS_ARG (2);
4455
4456 if (code == COND_EXPR)
4457 TREE_READONLY (t) = read_only;
4458
4459 TREE_SIDE_EFFECTS (t) = side_effects;
4460 TREE_THIS_VOLATILE (t)
4461 = (TREE_CODE_CLASS (code) == tcc_reference
4462 && arg0 && TREE_THIS_VOLATILE (arg0));
4463
4464 return t;
4465 }
4466
4467 tree
4468 build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4469 tree arg2, tree arg3 MEM_STAT_DECL)
4470 {
4471 bool constant, read_only, side_effects;
4472 tree t;
4473
4474 gcc_assert (TREE_CODE_LENGTH (code) == 4);
4475
4476 t = make_node_stat (code PASS_MEM_STAT);
4477 TREE_TYPE (t) = tt;
4478
4479 side_effects = TREE_SIDE_EFFECTS (t);
4480
4481 PROCESS_ARG (0);
4482 PROCESS_ARG (1);
4483 PROCESS_ARG (2);
4484 PROCESS_ARG (3);
4485
4486 TREE_SIDE_EFFECTS (t) = side_effects;
4487 TREE_THIS_VOLATILE (t)
4488 = (TREE_CODE_CLASS (code) == tcc_reference
4489 && arg0 && TREE_THIS_VOLATILE (arg0));
4490
4491 return t;
4492 }
4493
4494 tree
4495 build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
4496 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL)
4497 {
4498 bool constant, read_only, side_effects;
4499 tree t;
4500
4501 gcc_assert (TREE_CODE_LENGTH (code) == 5);
4502
4503 t = make_node_stat (code PASS_MEM_STAT);
4504 TREE_TYPE (t) = tt;
4505
4506 side_effects = TREE_SIDE_EFFECTS (t);
4507
4508 PROCESS_ARG (0);
4509 PROCESS_ARG (1);
4510 PROCESS_ARG (2);
4511 PROCESS_ARG (3);
4512 PROCESS_ARG (4);
4513
4514 TREE_SIDE_EFFECTS (t) = side_effects;
4515 if (code == TARGET_MEM_REF)
4516 {
4517 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR)
4518 {
4519 tree o = TREE_OPERAND (arg0, 0);
4520 TREE_READONLY (t) = TREE_READONLY (o);
4521 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o);
4522 }
4523 }
4524 else
4525 TREE_THIS_VOLATILE (t)
4526 = (TREE_CODE_CLASS (code) == tcc_reference
4527 && arg0 && TREE_THIS_VOLATILE (arg0));
4528
4529 return t;
4530 }
4531
4532 /* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
4533 on the pointer PTR. */
4534
4535 tree
4536 build_simple_mem_ref_loc (location_t loc, tree ptr)
4537 {
4538 HOST_WIDE_INT offset = 0;
4539 tree ptype = TREE_TYPE (ptr);
4540 tree tem;
4541 /* For convenience allow addresses that collapse to a simple base
4542 and offset. */
4543 if (TREE_CODE (ptr) == ADDR_EXPR
4544 && (handled_component_p (TREE_OPERAND (ptr, 0))
4545 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
4546 {
4547 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
4548 gcc_assert (ptr);
4549 ptr = build_fold_addr_expr (ptr);
4550 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
4551 }
4552 tem = build2 (MEM_REF, TREE_TYPE (ptype),
4553 ptr, build_int_cst (ptype, offset));
4554 SET_EXPR_LOCATION (tem, loc);
4555 return tem;
4556 }
4557
4558 /* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */
4559
4560 offset_int
4561 mem_ref_offset (const_tree t)
4562 {
4563 return offset_int::from (TREE_OPERAND (t, 1), SIGNED);
4564 }
4565
4566 /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
4567 offsetted by OFFSET units. */
4568
4569 tree
4570 build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
4571 {
4572 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
4573 build_fold_addr_expr (base),
4574 build_int_cst (ptr_type_node, offset));
4575 tree addr = build1 (ADDR_EXPR, type, ref);
4576 recompute_tree_invariant_for_addr_expr (addr);
4577 return addr;
4578 }
4579
4580 /* Similar except don't specify the TREE_TYPE
4581 and leave the TREE_SIDE_EFFECTS as 0.
4582 It is permissible for arguments to be null,
4583 or even garbage if their values do not matter. */
4584
4585 tree
4586 build_nt (enum tree_code code, ...)
4587 {
4588 tree t;
4589 int length;
4590 int i;
4591 va_list p;
4592
4593 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
4594
4595 va_start (p, code);
4596
4597 t = make_node (code);
4598 length = TREE_CODE_LENGTH (code);
4599
4600 for (i = 0; i < length; i++)
4601 TREE_OPERAND (t, i) = va_arg (p, tree);
4602
4603 va_end (p);
4604 return t;
4605 }
4606
4607 /* Similar to build_nt, but for creating a CALL_EXPR object with a
4608 tree vec. */
4609
4610 tree
4611 build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
4612 {
4613 tree ret, t;
4614 unsigned int ix;
4615
4616 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
4617 CALL_EXPR_FN (ret) = fn;
4618 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
4619 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
4620 CALL_EXPR_ARG (ret, ix) = t;
4621 return ret;
4622 }
4623 \f
4624 /* Create a DECL_... node of code CODE, name NAME and data type TYPE.
4625 We do NOT enter this node in any sort of symbol table.
4626
4627 LOC is the location of the decl.
4628
4629 layout_decl is used to set up the decl's storage layout.
4630 Other slots are initialized to 0 or null pointers. */
4631
4632 tree
4633 build_decl_stat (location_t loc, enum tree_code code, tree name,
4634 tree type MEM_STAT_DECL)
4635 {
4636 tree t;
4637
4638 t = make_node_stat (code PASS_MEM_STAT);
4639 DECL_SOURCE_LOCATION (t) = loc;
4640
4641 /* if (type == error_mark_node)
4642 type = integer_type_node; */
4643 /* That is not done, deliberately, so that having error_mark_node
4644 as the type can suppress useless errors in the use of this variable. */
4645
4646 DECL_NAME (t) = name;
4647 TREE_TYPE (t) = type;
4648
4649 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
4650 layout_decl (t, 0);
4651
4652 return t;
4653 }
4654
4655 /* Builds and returns function declaration with NAME and TYPE. */
4656
4657 tree
4658 build_fn_decl (const char *name, tree type)
4659 {
4660 tree id = get_identifier (name);
4661 tree decl = build_decl (input_location, FUNCTION_DECL, id, type);
4662
4663 DECL_EXTERNAL (decl) = 1;
4664 TREE_PUBLIC (decl) = 1;
4665 DECL_ARTIFICIAL (decl) = 1;
4666 TREE_NOTHROW (decl) = 1;
4667
4668 return decl;
4669 }
4670
4671 vec<tree, va_gc> *all_translation_units;
4672
4673 /* Builds a new translation-unit decl with name NAME, queues it in the
4674 global list of translation-unit decls and returns it. */
4675
4676 tree
4677 build_translation_unit_decl (tree name)
4678 {
4679 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
4680 name, NULL_TREE);
4681 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
4682 vec_safe_push (all_translation_units, tu);
4683 return tu;
4684 }
4685
4686 \f
4687 /* BLOCK nodes are used to represent the structure of binding contours
4688 and declarations, once those contours have been exited and their contents
4689 compiled. This information is used for outputting debugging info. */
4690
4691 tree
4692 build_block (tree vars, tree subblocks, tree supercontext, tree chain)
4693 {
4694 tree block = make_node (BLOCK);
4695
4696 BLOCK_VARS (block) = vars;
4697 BLOCK_SUBBLOCKS (block) = subblocks;
4698 BLOCK_SUPERCONTEXT (block) = supercontext;
4699 BLOCK_CHAIN (block) = chain;
4700 return block;
4701 }
4702
4703 \f
4704 /* Like SET_EXPR_LOCATION, but make sure the tree can have a location.
4705
4706 LOC is the location to use in tree T. */
4707
4708 void
4709 protected_set_expr_location (tree t, location_t loc)
4710 {
4711 if (CAN_HAVE_LOCATION_P (t))
4712 SET_EXPR_LOCATION (t, loc);
4713 }
4714 \f
4715 /* Return a declaration like DDECL except that its DECL_ATTRIBUTES
4716 is ATTRIBUTE. */
4717
4718 tree
4719 build_decl_attribute_variant (tree ddecl, tree attribute)
4720 {
4721 DECL_ATTRIBUTES (ddecl) = attribute;
4722 return ddecl;
4723 }
4724
4725 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4726 is ATTRIBUTE and its qualifiers are QUALS.
4727
4728 Record such modified types already made so we don't make duplicates. */
4729
4730 tree
4731 build_type_attribute_qual_variant (tree ttype, tree attribute, int quals)
4732 {
4733 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute))
4734 {
4735 inchash::hash hstate;
4736 tree ntype;
4737 int i;
4738 tree t;
4739 enum tree_code code = TREE_CODE (ttype);
4740
4741 /* Building a distinct copy of a tagged type is inappropriate; it
4742 causes breakage in code that expects there to be a one-to-one
4743 relationship between a struct and its fields.
4744 build_duplicate_type is another solution (as used in
4745 handle_transparent_union_attribute), but that doesn't play well
4746 with the stronger C++ type identity model. */
4747 if (TREE_CODE (ttype) == RECORD_TYPE
4748 || TREE_CODE (ttype) == UNION_TYPE
4749 || TREE_CODE (ttype) == QUAL_UNION_TYPE
4750 || TREE_CODE (ttype) == ENUMERAL_TYPE)
4751 {
4752 warning (OPT_Wattributes,
4753 "ignoring attributes applied to %qT after definition",
4754 TYPE_MAIN_VARIANT (ttype));
4755 return build_qualified_type (ttype, quals);
4756 }
4757
4758 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED);
4759 ntype = build_distinct_type_copy (ttype);
4760
4761 TYPE_ATTRIBUTES (ntype) = attribute;
4762
4763 hstate.add_int (code);
4764 if (TREE_TYPE (ntype))
4765 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype)));
4766 attribute_hash_list (attribute, hstate);
4767
4768 switch (TREE_CODE (ntype))
4769 {
4770 case FUNCTION_TYPE:
4771 type_hash_list (TYPE_ARG_TYPES (ntype), hstate);
4772 break;
4773 case ARRAY_TYPE:
4774 if (TYPE_DOMAIN (ntype))
4775 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype)));
4776 break;
4777 case INTEGER_TYPE:
4778 t = TYPE_MAX_VALUE (ntype);
4779 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
4780 hstate.add_object (TREE_INT_CST_ELT (t, i));
4781 break;
4782 case REAL_TYPE:
4783 case FIXED_POINT_TYPE:
4784 {
4785 unsigned int precision = TYPE_PRECISION (ntype);
4786 hstate.add_object (precision);
4787 }
4788 break;
4789 default:
4790 break;
4791 }
4792
4793 ntype = type_hash_canon (hstate.end(), ntype);
4794
4795 /* If the target-dependent attributes make NTYPE different from
4796 its canonical type, we will need to use structural equality
4797 checks for this type. */
4798 if (TYPE_STRUCTURAL_EQUALITY_P (ttype)
4799 || !comp_type_attributes (ntype, ttype))
4800 SET_TYPE_STRUCTURAL_EQUALITY (ntype);
4801 else if (TYPE_CANONICAL (ntype) == ntype)
4802 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype);
4803
4804 ttype = build_qualified_type (ntype, quals);
4805 }
4806 else if (TYPE_QUALS (ttype) != quals)
4807 ttype = build_qualified_type (ttype, quals);
4808
4809 return ttype;
4810 }
4811
4812 /* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are
4813 the same. */
4814
4815 static bool
4816 omp_declare_simd_clauses_equal (tree clauses1, tree clauses2)
4817 {
4818 tree cl1, cl2;
4819 for (cl1 = clauses1, cl2 = clauses2;
4820 cl1 && cl2;
4821 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2))
4822 {
4823 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2))
4824 return false;
4825 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN)
4826 {
4827 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1),
4828 OMP_CLAUSE_DECL (cl2)) != 1)
4829 return false;
4830 }
4831 switch (OMP_CLAUSE_CODE (cl1))
4832 {
4833 case OMP_CLAUSE_ALIGNED:
4834 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1),
4835 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1)
4836 return false;
4837 break;
4838 case OMP_CLAUSE_LINEAR:
4839 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1),
4840 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1)
4841 return false;
4842 break;
4843 case OMP_CLAUSE_SIMDLEN:
4844 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1),
4845 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1)
4846 return false;
4847 default:
4848 break;
4849 }
4850 }
4851 return true;
4852 }
4853
4854 /* Compare two constructor-element-type constants. Return 1 if the lists
4855 are known to be equal; otherwise return 0. */
4856
4857 static bool
4858 simple_cst_list_equal (const_tree l1, const_tree l2)
4859 {
4860 while (l1 != NULL_TREE && l2 != NULL_TREE)
4861 {
4862 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1)
4863 return false;
4864
4865 l1 = TREE_CHAIN (l1);
4866 l2 = TREE_CHAIN (l2);
4867 }
4868
4869 return l1 == l2;
4870 }
4871
4872 /* Compare two attributes for their value identity. Return true if the
4873 attribute values are known to be equal; otherwise return false.
4874 */
4875
4876 static bool
4877 attribute_value_equal (const_tree attr1, const_tree attr2)
4878 {
4879 if (TREE_VALUE (attr1) == TREE_VALUE (attr2))
4880 return true;
4881
4882 if (TREE_VALUE (attr1) != NULL_TREE
4883 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST
4884 && TREE_VALUE (attr2) != NULL
4885 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST)
4886 return (simple_cst_list_equal (TREE_VALUE (attr1),
4887 TREE_VALUE (attr2)) == 1);
4888
4889 if ((flag_openmp || flag_openmp_simd)
4890 && TREE_VALUE (attr1) && TREE_VALUE (attr2)
4891 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE
4892 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE)
4893 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1),
4894 TREE_VALUE (attr2));
4895
4896 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1);
4897 }
4898
4899 /* Return 0 if the attributes for two types are incompatible, 1 if they
4900 are compatible, and 2 if they are nearly compatible (which causes a
4901 warning to be generated). */
4902 int
4903 comp_type_attributes (const_tree type1, const_tree type2)
4904 {
4905 const_tree a1 = TYPE_ATTRIBUTES (type1);
4906 const_tree a2 = TYPE_ATTRIBUTES (type2);
4907 const_tree a;
4908
4909 if (a1 == a2)
4910 return 1;
4911 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a))
4912 {
4913 const struct attribute_spec *as;
4914 const_tree attr;
4915
4916 as = lookup_attribute_spec (get_attribute_name (a));
4917 if (!as || as->affects_type_identity == false)
4918 continue;
4919
4920 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2));
4921 if (!attr || !attribute_value_equal (a, attr))
4922 break;
4923 }
4924 if (!a)
4925 {
4926 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a))
4927 {
4928 const struct attribute_spec *as;
4929
4930 as = lookup_attribute_spec (get_attribute_name (a));
4931 if (!as || as->affects_type_identity == false)
4932 continue;
4933
4934 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1)))
4935 break;
4936 /* We don't need to compare trees again, as we did this
4937 already in first loop. */
4938 }
4939 /* All types - affecting identity - are equal, so
4940 there is no need to call target hook for comparison. */
4941 if (!a)
4942 return 1;
4943 }
4944 /* As some type combinations - like default calling-convention - might
4945 be compatible, we have to call the target hook to get the final result. */
4946 return targetm.comp_type_attributes (type1, type2);
4947 }
4948
4949 /* Return a type like TTYPE except that its TYPE_ATTRIBUTE
4950 is ATTRIBUTE.
4951
4952 Record such modified types already made so we don't make duplicates. */
4953
4954 tree
4955 build_type_attribute_variant (tree ttype, tree attribute)
4956 {
4957 return build_type_attribute_qual_variant (ttype, attribute,
4958 TYPE_QUALS (ttype));
4959 }
4960
4961
4962 /* Reset the expression *EXPR_P, a size or position.
4963
4964 ??? We could reset all non-constant sizes or positions. But it's cheap
4965 enough to not do so and refrain from adding workarounds to dwarf2out.c.
4966
4967 We need to reset self-referential sizes or positions because they cannot
4968 be gimplified and thus can contain a CALL_EXPR after the gimplification
4969 is finished, which will run afoul of LTO streaming. And they need to be
4970 reset to something essentially dummy but not constant, so as to preserve
4971 the properties of the object they are attached to. */
4972
4973 static inline void
4974 free_lang_data_in_one_sizepos (tree *expr_p)
4975 {
4976 tree expr = *expr_p;
4977 if (CONTAINS_PLACEHOLDER_P (expr))
4978 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr));
4979 }
4980
4981
4982 /* Reset all the fields in a binfo node BINFO. We only keep
4983 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */
4984
4985 static void
4986 free_lang_data_in_binfo (tree binfo)
4987 {
4988 unsigned i;
4989 tree t;
4990
4991 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
4992
4993 BINFO_VIRTUALS (binfo) = NULL_TREE;
4994 BINFO_BASE_ACCESSES (binfo) = NULL;
4995 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
4996 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
4997
4998 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
4999 free_lang_data_in_binfo (t);
5000 }
5001
5002
5003 /* Reset all language specific information still present in TYPE. */
5004
5005 static void
5006 free_lang_data_in_type (tree type)
5007 {
5008 gcc_assert (TYPE_P (type));
5009
5010 /* Give the FE a chance to remove its own data first. */
5011 lang_hooks.free_lang_data (type);
5012
5013 TREE_LANG_FLAG_0 (type) = 0;
5014 TREE_LANG_FLAG_1 (type) = 0;
5015 TREE_LANG_FLAG_2 (type) = 0;
5016 TREE_LANG_FLAG_3 (type) = 0;
5017 TREE_LANG_FLAG_4 (type) = 0;
5018 TREE_LANG_FLAG_5 (type) = 0;
5019 TREE_LANG_FLAG_6 (type) = 0;
5020
5021 if (TREE_CODE (type) == FUNCTION_TYPE)
5022 {
5023 /* Remove the const and volatile qualifiers from arguments. The
5024 C++ front end removes them, but the C front end does not,
5025 leading to false ODR violation errors when merging two
5026 instances of the same function signature compiled by
5027 different front ends. */
5028 tree p;
5029
5030 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p))
5031 {
5032 tree arg_type = TREE_VALUE (p);
5033
5034 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type))
5035 {
5036 int quals = TYPE_QUALS (arg_type)
5037 & ~TYPE_QUAL_CONST
5038 & ~TYPE_QUAL_VOLATILE;
5039 TREE_VALUE (p) = build_qualified_type (arg_type, quals);
5040 free_lang_data_in_type (TREE_VALUE (p));
5041 }
5042 }
5043 }
5044
5045 /* Remove members that are not actually FIELD_DECLs from the field
5046 list of an aggregate. These occur in C++. */
5047 if (RECORD_OR_UNION_TYPE_P (type))
5048 {
5049 tree prev, member;
5050
5051 /* Note that TYPE_FIELDS can be shared across distinct
5052 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is
5053 to be removed, we cannot set its TREE_CHAIN to NULL.
5054 Otherwise, we would not be able to find all the other fields
5055 in the other instances of this TREE_TYPE.
5056
5057 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */
5058 prev = NULL_TREE;
5059 member = TYPE_FIELDS (type);
5060 while (member)
5061 {
5062 if (TREE_CODE (member) == FIELD_DECL
5063 || TREE_CODE (member) == TYPE_DECL)
5064 {
5065 if (prev)
5066 TREE_CHAIN (prev) = member;
5067 else
5068 TYPE_FIELDS (type) = member;
5069 prev = member;
5070 }
5071
5072 member = TREE_CHAIN (member);
5073 }
5074
5075 if (prev)
5076 TREE_CHAIN (prev) = NULL_TREE;
5077 else
5078 TYPE_FIELDS (type) = NULL_TREE;
5079
5080 TYPE_METHODS (type) = NULL_TREE;
5081 if (TYPE_BINFO (type))
5082 {
5083 free_lang_data_in_binfo (TYPE_BINFO (type));
5084 if ((!BINFO_VTABLE (TYPE_BINFO (type))
5085 || !flag_devirtualize)
5086 && (!BINFO_N_BASE_BINFOS (TYPE_BINFO (type))
5087 || debug_info_level != DINFO_LEVEL_NONE))
5088 TYPE_BINFO (type) = NULL;
5089 }
5090 }
5091 else
5092 {
5093 /* For non-aggregate types, clear out the language slot (which
5094 overloads TYPE_BINFO). */
5095 TYPE_LANG_SLOT_1 (type) = NULL_TREE;
5096
5097 if (INTEGRAL_TYPE_P (type)
5098 || SCALAR_FLOAT_TYPE_P (type)
5099 || FIXED_POINT_TYPE_P (type))
5100 {
5101 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type));
5102 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type));
5103 }
5104 }
5105
5106 free_lang_data_in_one_sizepos (&TYPE_SIZE (type));
5107 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type));
5108
5109 if (TYPE_CONTEXT (type)
5110 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK)
5111 {
5112 tree ctx = TYPE_CONTEXT (type);
5113 do
5114 {
5115 ctx = BLOCK_SUPERCONTEXT (ctx);
5116 }
5117 while (ctx && TREE_CODE (ctx) == BLOCK);
5118 TYPE_CONTEXT (type) = ctx;
5119 }
5120 }
5121
5122
5123 /* Return true if DECL may need an assembler name to be set. */
5124
5125 static inline bool
5126 need_assembler_name_p (tree decl)
5127 {
5128 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule
5129 merging. */
5130 if (flag_lto_odr_type_mering
5131 && TREE_CODE (decl) == TYPE_DECL
5132 && DECL_NAME (decl)
5133 && decl == TYPE_NAME (TREE_TYPE (decl))
5134 && !is_lang_specific (TREE_TYPE (decl))
5135 && AGGREGATE_TYPE_P (TREE_TYPE (decl))
5136 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE)
5137 && !type_in_anonymous_namespace_p (TREE_TYPE (decl)))
5138 return !DECL_ASSEMBLER_NAME_SET_P (decl);
5139 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */
5140 if (TREE_CODE (decl) != FUNCTION_DECL
5141 && TREE_CODE (decl) != VAR_DECL)
5142 return false;
5143
5144 /* If DECL already has its assembler name set, it does not need a
5145 new one. */
5146 if (!HAS_DECL_ASSEMBLER_NAME_P (decl)
5147 || DECL_ASSEMBLER_NAME_SET_P (decl))
5148 return false;
5149
5150 /* Abstract decls do not need an assembler name. */
5151 if (DECL_ABSTRACT_P (decl))
5152 return false;
5153
5154 /* For VAR_DECLs, only static, public and external symbols need an
5155 assembler name. */
5156 if (TREE_CODE (decl) == VAR_DECL
5157 && !TREE_STATIC (decl)
5158 && !TREE_PUBLIC (decl)
5159 && !DECL_EXTERNAL (decl))
5160 return false;
5161
5162 if (TREE_CODE (decl) == FUNCTION_DECL)
5163 {
5164 /* Do not set assembler name on builtins. Allow RTL expansion to
5165 decide whether to expand inline or via a regular call. */
5166 if (DECL_BUILT_IN (decl)
5167 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND)
5168 return false;
5169
5170 /* Functions represented in the callgraph need an assembler name. */
5171 if (cgraph_node::get (decl) != NULL)
5172 return true;
5173
5174 /* Unused and not public functions don't need an assembler name. */
5175 if (!TREE_USED (decl) && !TREE_PUBLIC (decl))
5176 return false;
5177 }
5178
5179 return true;
5180 }
5181
5182
5183 /* Reset all language specific information still present in symbol
5184 DECL. */
5185
5186 static void
5187 free_lang_data_in_decl (tree decl)
5188 {
5189 gcc_assert (DECL_P (decl));
5190
5191 /* Give the FE a chance to remove its own data first. */
5192 lang_hooks.free_lang_data (decl);
5193
5194 TREE_LANG_FLAG_0 (decl) = 0;
5195 TREE_LANG_FLAG_1 (decl) = 0;
5196 TREE_LANG_FLAG_2 (decl) = 0;
5197 TREE_LANG_FLAG_3 (decl) = 0;
5198 TREE_LANG_FLAG_4 (decl) = 0;
5199 TREE_LANG_FLAG_5 (decl) = 0;
5200 TREE_LANG_FLAG_6 (decl) = 0;
5201
5202 free_lang_data_in_one_sizepos (&DECL_SIZE (decl));
5203 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl));
5204 if (TREE_CODE (decl) == FIELD_DECL)
5205 {
5206 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl));
5207 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE)
5208 DECL_QUALIFIER (decl) = NULL_TREE;
5209 }
5210
5211 if (TREE_CODE (decl) == FUNCTION_DECL)
5212 {
5213 struct cgraph_node *node;
5214 if (!(node = cgraph_node::get (decl))
5215 || (!node->definition && !node->clones))
5216 {
5217 if (node)
5218 node->release_body ();
5219 else
5220 {
5221 release_function_body (decl);
5222 DECL_ARGUMENTS (decl) = NULL;
5223 DECL_RESULT (decl) = NULL;
5224 DECL_INITIAL (decl) = error_mark_node;
5225 }
5226 }
5227 if (gimple_has_body_p (decl))
5228 {
5229 tree t;
5230
5231 /* If DECL has a gimple body, then the context for its
5232 arguments must be DECL. Otherwise, it doesn't really
5233 matter, as we will not be emitting any code for DECL. In
5234 general, there may be other instances of DECL created by
5235 the front end and since PARM_DECLs are generally shared,
5236 their DECL_CONTEXT changes as the replicas of DECL are
5237 created. The only time where DECL_CONTEXT is important
5238 is for the FUNCTION_DECLs that have a gimple body (since
5239 the PARM_DECL will be used in the function's body). */
5240 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t))
5241 DECL_CONTEXT (t) = decl;
5242 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl))
5243 DECL_FUNCTION_SPECIFIC_TARGET (decl)
5244 = target_option_default_node;
5245 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl))
5246 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
5247 = optimization_default_node;
5248 }
5249
5250 /* DECL_SAVED_TREE holds the GENERIC representation for DECL.
5251 At this point, it is not needed anymore. */
5252 DECL_SAVED_TREE (decl) = NULL_TREE;
5253
5254 /* Clear the abstract origin if it refers to a method. Otherwise
5255 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the
5256 origin will not be output correctly. */
5257 if (DECL_ABSTRACT_ORIGIN (decl)
5258 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))
5259 && RECORD_OR_UNION_TYPE_P
5260 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl))))
5261 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE;
5262
5263 /* Sometimes the C++ frontend doesn't manage to transform a temporary
5264 DECL_VINDEX referring to itself into a vtable slot number as it
5265 should. Happens with functions that are copied and then forgotten
5266 about. Just clear it, it won't matter anymore. */
5267 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl)))
5268 DECL_VINDEX (decl) = NULL_TREE;
5269 }
5270 else if (TREE_CODE (decl) == VAR_DECL)
5271 {
5272 if ((DECL_EXTERNAL (decl)
5273 && (!TREE_STATIC (decl) || !TREE_READONLY (decl)))
5274 || (decl_function_context (decl) && !TREE_STATIC (decl)))
5275 DECL_INITIAL (decl) = NULL_TREE;
5276 }
5277 else if (TREE_CODE (decl) == TYPE_DECL
5278 || TREE_CODE (decl) == FIELD_DECL)
5279 DECL_INITIAL (decl) = NULL_TREE;
5280 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL
5281 && DECL_INITIAL (decl)
5282 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK)
5283 {
5284 /* Strip builtins from the translation-unit BLOCK. We still have targets
5285 without builtin_decl_explicit support and also builtins are shared
5286 nodes and thus we can't use TREE_CHAIN in multiple lists. */
5287 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl));
5288 while (*nextp)
5289 {
5290 tree var = *nextp;
5291 if (TREE_CODE (var) == FUNCTION_DECL
5292 && DECL_BUILT_IN (var))
5293 *nextp = TREE_CHAIN (var);
5294 else
5295 nextp = &TREE_CHAIN (var);
5296 }
5297 }
5298 }
5299
5300
5301 /* Data used when collecting DECLs and TYPEs for language data removal. */
5302
5303 struct free_lang_data_d
5304 {
5305 /* Worklist to avoid excessive recursion. */
5306 vec<tree> worklist;
5307
5308 /* Set of traversed objects. Used to avoid duplicate visits. */
5309 hash_set<tree> *pset;
5310
5311 /* Array of symbols to process with free_lang_data_in_decl. */
5312 vec<tree> decls;
5313
5314 /* Array of types to process with free_lang_data_in_type. */
5315 vec<tree> types;
5316 };
5317
5318
5319 /* Save all language fields needed to generate proper debug information
5320 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */
5321
5322 static void
5323 save_debug_info_for_decl (tree t)
5324 {
5325 /*struct saved_debug_info_d *sdi;*/
5326
5327 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t));
5328
5329 /* FIXME. Partial implementation for saving debug info removed. */
5330 }
5331
5332
5333 /* Save all language fields needed to generate proper debug information
5334 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */
5335
5336 static void
5337 save_debug_info_for_type (tree t)
5338 {
5339 /*struct saved_debug_info_d *sdi;*/
5340
5341 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t));
5342
5343 /* FIXME. Partial implementation for saving debug info removed. */
5344 }
5345
5346
5347 /* Add type or decl T to one of the list of tree nodes that need their
5348 language data removed. The lists are held inside FLD. */
5349
5350 static void
5351 add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
5352 {
5353 if (DECL_P (t))
5354 {
5355 fld->decls.safe_push (t);
5356 if (debug_info_level > DINFO_LEVEL_TERSE)
5357 save_debug_info_for_decl (t);
5358 }
5359 else if (TYPE_P (t))
5360 {
5361 fld->types.safe_push (t);
5362 if (debug_info_level > DINFO_LEVEL_TERSE)
5363 save_debug_info_for_type (t);
5364 }
5365 else
5366 gcc_unreachable ();
5367 }
5368
5369 /* Push tree node T into FLD->WORKLIST. */
5370
5371 static inline void
5372 fld_worklist_push (tree t, struct free_lang_data_d *fld)
5373 {
5374 if (t && !is_lang_specific (t) && !fld->pset->contains (t))
5375 fld->worklist.safe_push ((t));
5376 }
5377
5378
5379 /* Operand callback helper for free_lang_data_in_node. *TP is the
5380 subtree operand being considered. */
5381
5382 static tree
5383 find_decls_types_r (tree *tp, int *ws, void *data)
5384 {
5385 tree t = *tp;
5386 struct free_lang_data_d *fld = (struct free_lang_data_d *) data;
5387
5388 if (TREE_CODE (t) == TREE_LIST)
5389 return NULL_TREE;
5390
5391 /* Language specific nodes will be removed, so there is no need
5392 to gather anything under them. */
5393 if (is_lang_specific (t))
5394 {
5395 *ws = 0;
5396 return NULL_TREE;
5397 }
5398
5399 if (DECL_P (t))
5400 {
5401 /* Note that walk_tree does not traverse every possible field in
5402 decls, so we have to do our own traversals here. */
5403 add_tree_to_fld_list (t, fld);
5404
5405 fld_worklist_push (DECL_NAME (t), fld);
5406 fld_worklist_push (DECL_CONTEXT (t), fld);
5407 fld_worklist_push (DECL_SIZE (t), fld);
5408 fld_worklist_push (DECL_SIZE_UNIT (t), fld);
5409
5410 /* We are going to remove everything under DECL_INITIAL for
5411 TYPE_DECLs. No point walking them. */
5412 if (TREE_CODE (t) != TYPE_DECL)
5413 fld_worklist_push (DECL_INITIAL (t), fld);
5414
5415 fld_worklist_push (DECL_ATTRIBUTES (t), fld);
5416 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld);
5417
5418 if (TREE_CODE (t) == FUNCTION_DECL)
5419 {
5420 fld_worklist_push (DECL_ARGUMENTS (t), fld);
5421 fld_worklist_push (DECL_RESULT (t), fld);
5422 }
5423 else if (TREE_CODE (t) == TYPE_DECL)
5424 {
5425 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld);
5426 }
5427 else if (TREE_CODE (t) == FIELD_DECL)
5428 {
5429 fld_worklist_push (DECL_FIELD_OFFSET (t), fld);
5430 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld);
5431 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld);
5432 fld_worklist_push (DECL_FCONTEXT (t), fld);
5433 }
5434
5435 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL)
5436 && DECL_HAS_VALUE_EXPR_P (t))
5437 fld_worklist_push (DECL_VALUE_EXPR (t), fld);
5438
5439 if (TREE_CODE (t) != FIELD_DECL
5440 && TREE_CODE (t) != TYPE_DECL)
5441 fld_worklist_push (TREE_CHAIN (t), fld);
5442 *ws = 0;
5443 }
5444 else if (TYPE_P (t))
5445 {
5446 /* Note that walk_tree does not traverse every possible field in
5447 types, so we have to do our own traversals here. */
5448 add_tree_to_fld_list (t, fld);
5449
5450 if (!RECORD_OR_UNION_TYPE_P (t))
5451 fld_worklist_push (TYPE_CACHED_VALUES (t), fld);
5452 fld_worklist_push (TYPE_SIZE (t), fld);
5453 fld_worklist_push (TYPE_SIZE_UNIT (t), fld);
5454 fld_worklist_push (TYPE_ATTRIBUTES (t), fld);
5455 fld_worklist_push (TYPE_POINTER_TO (t), fld);
5456 fld_worklist_push (TYPE_REFERENCE_TO (t), fld);
5457 fld_worklist_push (TYPE_NAME (t), fld);
5458 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream
5459 them and thus do not and want not to reach unused pointer types
5460 this way. */
5461 if (!POINTER_TYPE_P (t))
5462 fld_worklist_push (TYPE_MINVAL (t), fld);
5463 if (!RECORD_OR_UNION_TYPE_P (t))
5464 fld_worklist_push (TYPE_MAXVAL (t), fld);
5465 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld);
5466 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus
5467 do not and want not to reach unused variants this way. */
5468 if (TYPE_CONTEXT (t))
5469 {
5470 tree ctx = TYPE_CONTEXT (t);
5471 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one.
5472 So push that instead. */
5473 while (ctx && TREE_CODE (ctx) == BLOCK)
5474 ctx = BLOCK_SUPERCONTEXT (ctx);
5475 fld_worklist_push (ctx, fld);
5476 }
5477 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not
5478 and want not to reach unused types this way. */
5479
5480 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t))
5481 {
5482 unsigned i;
5483 tree tem;
5484 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
5485 fld_worklist_push (TREE_TYPE (tem), fld);
5486 tem = BINFO_VIRTUALS (TYPE_BINFO (t));
5487 if (tem
5488 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */
5489 && TREE_CODE (tem) == TREE_LIST)
5490 do
5491 {
5492 fld_worklist_push (TREE_VALUE (tem), fld);
5493 tem = TREE_CHAIN (tem);
5494 }
5495 while (tem);
5496 }
5497 if (RECORD_OR_UNION_TYPE_P (t))
5498 {
5499 tree tem;
5500 /* Push all TYPE_FIELDS - there can be interleaving interesting
5501 and non-interesting things. */
5502 tem = TYPE_FIELDS (t);
5503 while (tem)
5504 {
5505 if (TREE_CODE (tem) == FIELD_DECL
5506 || TREE_CODE (tem) == TYPE_DECL)
5507 fld_worklist_push (tem, fld);
5508 tem = TREE_CHAIN (tem);
5509 }
5510 }
5511
5512 fld_worklist_push (TYPE_STUB_DECL (t), fld);
5513 *ws = 0;
5514 }
5515 else if (TREE_CODE (t) == BLOCK)
5516 {
5517 tree tem;
5518 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem))
5519 fld_worklist_push (tem, fld);
5520 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem))
5521 fld_worklist_push (tem, fld);
5522 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld);
5523 }
5524
5525 if (TREE_CODE (t) != IDENTIFIER_NODE
5526 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED))
5527 fld_worklist_push (TREE_TYPE (t), fld);
5528
5529 return NULL_TREE;
5530 }
5531
5532
5533 /* Find decls and types in T. */
5534
5535 static void
5536 find_decls_types (tree t, struct free_lang_data_d *fld)
5537 {
5538 while (1)
5539 {
5540 if (!fld->pset->contains (t))
5541 walk_tree (&t, find_decls_types_r, fld, fld->pset);
5542 if (fld->worklist.is_empty ())
5543 break;
5544 t = fld->worklist.pop ();
5545 }
5546 }
5547
5548 /* Translate all the types in LIST with the corresponding runtime
5549 types. */
5550
5551 static tree
5552 get_eh_types_for_runtime (tree list)
5553 {
5554 tree head, prev;
5555
5556 if (list == NULL_TREE)
5557 return NULL_TREE;
5558
5559 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5560 prev = head;
5561 list = TREE_CHAIN (list);
5562 while (list)
5563 {
5564 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list)));
5565 TREE_CHAIN (prev) = n;
5566 prev = TREE_CHAIN (prev);
5567 list = TREE_CHAIN (list);
5568 }
5569
5570 return head;
5571 }
5572
5573
5574 /* Find decls and types referenced in EH region R and store them in
5575 FLD->DECLS and FLD->TYPES. */
5576
5577 static void
5578 find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld)
5579 {
5580 switch (r->type)
5581 {
5582 case ERT_CLEANUP:
5583 break;
5584
5585 case ERT_TRY:
5586 {
5587 eh_catch c;
5588
5589 /* The types referenced in each catch must first be changed to the
5590 EH types used at runtime. This removes references to FE types
5591 in the region. */
5592 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
5593 {
5594 c->type_list = get_eh_types_for_runtime (c->type_list);
5595 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset);
5596 }
5597 }
5598 break;
5599
5600 case ERT_ALLOWED_EXCEPTIONS:
5601 r->u.allowed.type_list
5602 = get_eh_types_for_runtime (r->u.allowed.type_list);
5603 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset);
5604 break;
5605
5606 case ERT_MUST_NOT_THROW:
5607 walk_tree (&r->u.must_not_throw.failure_decl,
5608 find_decls_types_r, fld, fld->pset);
5609 break;
5610 }
5611 }
5612
5613
5614 /* Find decls and types referenced in cgraph node N and store them in
5615 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5616 look for *every* kind of DECL and TYPE node reachable from N,
5617 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5618 NAMESPACE_DECLs, etc). */
5619
5620 static void
5621 find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld)
5622 {
5623 basic_block bb;
5624 struct function *fn;
5625 unsigned ix;
5626 tree t;
5627
5628 find_decls_types (n->decl, fld);
5629
5630 if (!gimple_has_body_p (n->decl))
5631 return;
5632
5633 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
5634
5635 fn = DECL_STRUCT_FUNCTION (n->decl);
5636
5637 /* Traverse locals. */
5638 FOR_EACH_LOCAL_DECL (fn, ix, t)
5639 find_decls_types (t, fld);
5640
5641 /* Traverse EH regions in FN. */
5642 {
5643 eh_region r;
5644 FOR_ALL_EH_REGION_FN (r, fn)
5645 find_decls_types_in_eh_region (r, fld);
5646 }
5647
5648 /* Traverse every statement in FN. */
5649 FOR_EACH_BB_FN (bb, fn)
5650 {
5651 gphi_iterator psi;
5652 gimple_stmt_iterator si;
5653 unsigned i;
5654
5655 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
5656 {
5657 gphi *phi = psi.phi ();
5658
5659 for (i = 0; i < gimple_phi_num_args (phi); i++)
5660 {
5661 tree *arg_p = gimple_phi_arg_def_ptr (phi, i);
5662 find_decls_types (*arg_p, fld);
5663 }
5664 }
5665
5666 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
5667 {
5668 gimple stmt = gsi_stmt (si);
5669
5670 if (is_gimple_call (stmt))
5671 find_decls_types (gimple_call_fntype (stmt), fld);
5672
5673 for (i = 0; i < gimple_num_ops (stmt); i++)
5674 {
5675 tree arg = gimple_op (stmt, i);
5676 find_decls_types (arg, fld);
5677 }
5678 }
5679 }
5680 }
5681
5682
5683 /* Find decls and types referenced in varpool node N and store them in
5684 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will
5685 look for *every* kind of DECL and TYPE node reachable from N,
5686 including those embedded inside types and decls (i.e,, TYPE_DECLs,
5687 NAMESPACE_DECLs, etc). */
5688
5689 static void
5690 find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld)
5691 {
5692 find_decls_types (v->decl, fld);
5693 }
5694
5695 /* If T needs an assembler name, have one created for it. */
5696
5697 void
5698 assign_assembler_name_if_neeeded (tree t)
5699 {
5700 if (need_assembler_name_p (t))
5701 {
5702 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit
5703 diagnostics that use input_location to show locus
5704 information. The problem here is that, at this point,
5705 input_location is generally anchored to the end of the file
5706 (since the parser is long gone), so we don't have a good
5707 position to pin it to.
5708
5709 To alleviate this problem, this uses the location of T's
5710 declaration. Examples of this are
5711 testsuite/g++.dg/template/cond2.C and
5712 testsuite/g++.dg/template/pr35240.C. */
5713 location_t saved_location = input_location;
5714 input_location = DECL_SOURCE_LOCATION (t);
5715
5716 decl_assembler_name (t);
5717
5718 input_location = saved_location;
5719 }
5720 }
5721
5722
5723 /* Free language specific information for every operand and expression
5724 in every node of the call graph. This process operates in three stages:
5725
5726 1- Every callgraph node and varpool node is traversed looking for
5727 decls and types embedded in them. This is a more exhaustive
5728 search than that done by find_referenced_vars, because it will
5729 also collect individual fields, decls embedded in types, etc.
5730
5731 2- All the decls found are sent to free_lang_data_in_decl.
5732
5733 3- All the types found are sent to free_lang_data_in_type.
5734
5735 The ordering between decls and types is important because
5736 free_lang_data_in_decl sets assembler names, which includes
5737 mangling. So types cannot be freed up until assembler names have
5738 been set up. */
5739
5740 static void
5741 free_lang_data_in_cgraph (void)
5742 {
5743 struct cgraph_node *n;
5744 varpool_node *v;
5745 struct free_lang_data_d fld;
5746 tree t;
5747 unsigned i;
5748 alias_pair *p;
5749
5750 /* Initialize sets and arrays to store referenced decls and types. */
5751 fld.pset = new hash_set<tree>;
5752 fld.worklist.create (0);
5753 fld.decls.create (100);
5754 fld.types.create (100);
5755
5756 /* Find decls and types in the body of every function in the callgraph. */
5757 FOR_EACH_FUNCTION (n)
5758 find_decls_types_in_node (n, &fld);
5759
5760 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
5761 find_decls_types (p->decl, &fld);
5762
5763 /* Find decls and types in every varpool symbol. */
5764 FOR_EACH_VARIABLE (v)
5765 find_decls_types_in_var (v, &fld);
5766
5767 /* Set the assembler name on every decl found. We need to do this
5768 now because free_lang_data_in_decl will invalidate data needed
5769 for mangling. This breaks mangling on interdependent decls. */
5770 FOR_EACH_VEC_ELT (fld.decls, i, t)
5771 assign_assembler_name_if_neeeded (t);
5772
5773 /* Traverse every decl found freeing its language data. */
5774 FOR_EACH_VEC_ELT (fld.decls, i, t)
5775 free_lang_data_in_decl (t);
5776
5777 /* Traverse every type found freeing its language data. */
5778 FOR_EACH_VEC_ELT (fld.types, i, t)
5779 free_lang_data_in_type (t);
5780
5781 delete fld.pset;
5782 fld.worklist.release ();
5783 fld.decls.release ();
5784 fld.types.release ();
5785 }
5786
5787
5788 /* Free resources that are used by FE but are not needed once they are done. */
5789
5790 static unsigned
5791 free_lang_data (void)
5792 {
5793 unsigned i;
5794
5795 /* If we are the LTO frontend we have freed lang-specific data already. */
5796 if (in_lto_p
5797 || (!flag_generate_lto && !flag_generate_offload))
5798 return 0;
5799
5800 /* Allocate and assign alias sets to the standard integer types
5801 while the slots are still in the way the frontends generated them. */
5802 for (i = 0; i < itk_none; ++i)
5803 if (integer_types[i])
5804 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]);
5805
5806 /* Traverse the IL resetting language specific information for
5807 operands, expressions, etc. */
5808 free_lang_data_in_cgraph ();
5809
5810 /* Create gimple variants for common types. */
5811 ptrdiff_type_node = integer_type_node;
5812 fileptr_type_node = ptr_type_node;
5813
5814 /* Reset some langhooks. Do not reset types_compatible_p, it may
5815 still be used indirectly via the get_alias_set langhook. */
5816 lang_hooks.dwarf_name = lhd_dwarf_name;
5817 lang_hooks.decl_printable_name = gimple_decl_printable_name;
5818 /* We do not want the default decl_assembler_name implementation,
5819 rather if we have fixed everything we want a wrapper around it
5820 asserting that all non-local symbols already got their assembler
5821 name and only produce assembler names for local symbols. Or rather
5822 make sure we never call decl_assembler_name on local symbols and
5823 devise a separate, middle-end private scheme for it. */
5824
5825 /* Reset diagnostic machinery. */
5826 tree_diagnostics_defaults (global_dc);
5827
5828 return 0;
5829 }
5830
5831
5832 namespace {
5833
5834 const pass_data pass_data_ipa_free_lang_data =
5835 {
5836 SIMPLE_IPA_PASS, /* type */
5837 "*free_lang_data", /* name */
5838 OPTGROUP_NONE, /* optinfo_flags */
5839 TV_IPA_FREE_LANG_DATA, /* tv_id */
5840 0, /* properties_required */
5841 0, /* properties_provided */
5842 0, /* properties_destroyed */
5843 0, /* todo_flags_start */
5844 0, /* todo_flags_finish */
5845 };
5846
5847 class pass_ipa_free_lang_data : public simple_ipa_opt_pass
5848 {
5849 public:
5850 pass_ipa_free_lang_data (gcc::context *ctxt)
5851 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt)
5852 {}
5853
5854 /* opt_pass methods: */
5855 virtual unsigned int execute (function *) { return free_lang_data (); }
5856
5857 }; // class pass_ipa_free_lang_data
5858
5859 } // anon namespace
5860
5861 simple_ipa_opt_pass *
5862 make_pass_ipa_free_lang_data (gcc::context *ctxt)
5863 {
5864 return new pass_ipa_free_lang_data (ctxt);
5865 }
5866
5867 /* The backbone of is_attribute_p(). ATTR_LEN is the string length of
5868 ATTR_NAME. Also used internally by remove_attribute(). */
5869 bool
5870 private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident)
5871 {
5872 size_t ident_len = IDENTIFIER_LENGTH (ident);
5873
5874 if (ident_len == attr_len)
5875 {
5876 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0)
5877 return true;
5878 }
5879 else if (ident_len == attr_len + 4)
5880 {
5881 /* There is the possibility that ATTR is 'text' and IDENT is
5882 '__text__'. */
5883 const char *p = IDENTIFIER_POINTER (ident);
5884 if (p[0] == '_' && p[1] == '_'
5885 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5886 && strncmp (attr_name, p + 2, attr_len) == 0)
5887 return true;
5888 }
5889
5890 return false;
5891 }
5892
5893 /* The backbone of lookup_attribute(). ATTR_LEN is the string length
5894 of ATTR_NAME, and LIST is not NULL_TREE. */
5895 tree
5896 private_lookup_attribute (const char *attr_name, size_t attr_len, tree list)
5897 {
5898 while (list)
5899 {
5900 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5901
5902 if (ident_len == attr_len)
5903 {
5904 if (!strcmp (attr_name,
5905 IDENTIFIER_POINTER (get_attribute_name (list))))
5906 break;
5907 }
5908 /* TODO: If we made sure that attributes were stored in the
5909 canonical form without '__...__' (ie, as in 'text' as opposed
5910 to '__text__') then we could avoid the following case. */
5911 else if (ident_len == attr_len + 4)
5912 {
5913 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5914 if (p[0] == '_' && p[1] == '_'
5915 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
5916 && strncmp (attr_name, p + 2, attr_len) == 0)
5917 break;
5918 }
5919 list = TREE_CHAIN (list);
5920 }
5921
5922 return list;
5923 }
5924
5925 /* Given an attribute name ATTR_NAME and a list of attributes LIST,
5926 return a pointer to the attribute's list first element if the attribute
5927 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not
5928 '__text__'). */
5929
5930 tree
5931 private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len,
5932 tree list)
5933 {
5934 while (list)
5935 {
5936 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5937
5938 if (attr_len > ident_len)
5939 {
5940 list = TREE_CHAIN (list);
5941 continue;
5942 }
5943
5944 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
5945
5946 if (strncmp (attr_name, p, attr_len) == 0)
5947 break;
5948
5949 /* TODO: If we made sure that attributes were stored in the
5950 canonical form without '__...__' (ie, as in 'text' as opposed
5951 to '__text__') then we could avoid the following case. */
5952 if (p[0] == '_' && p[1] == '_' &&
5953 strncmp (attr_name, p + 2, attr_len) == 0)
5954 break;
5955
5956 list = TREE_CHAIN (list);
5957 }
5958
5959 return list;
5960 }
5961
5962
5963 /* A variant of lookup_attribute() that can be used with an identifier
5964 as the first argument, and where the identifier can be either
5965 'text' or '__text__'.
5966
5967 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST,
5968 return a pointer to the attribute's list element if the attribute
5969 is part of the list, or NULL_TREE if not found. If the attribute
5970 appears more than once, this only returns the first occurrence; the
5971 TREE_CHAIN of the return value should be passed back in if further
5972 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but
5973 can be in the form 'text' or '__text__'. */
5974 static tree
5975 lookup_ident_attribute (tree attr_identifier, tree list)
5976 {
5977 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE);
5978
5979 while (list)
5980 {
5981 gcc_checking_assert (TREE_CODE (get_attribute_name (list))
5982 == IDENTIFIER_NODE);
5983
5984 /* Identifiers can be compared directly for equality. */
5985 if (attr_identifier == get_attribute_name (list))
5986 break;
5987
5988 /* If they are not equal, they may still be one in the form
5989 'text' while the other one is in the form '__text__'. TODO:
5990 If we were storing attributes in normalized 'text' form, then
5991 this could all go away and we could take full advantage of
5992 the fact that we're comparing identifiers. :-) */
5993 {
5994 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier);
5995 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list));
5996
5997 if (ident_len == attr_len + 4)
5998 {
5999 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6000 const char *q = IDENTIFIER_POINTER (attr_identifier);
6001 if (p[0] == '_' && p[1] == '_'
6002 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_'
6003 && strncmp (q, p + 2, attr_len) == 0)
6004 break;
6005 }
6006 else if (ident_len + 4 == attr_len)
6007 {
6008 const char *p = IDENTIFIER_POINTER (get_attribute_name (list));
6009 const char *q = IDENTIFIER_POINTER (attr_identifier);
6010 if (q[0] == '_' && q[1] == '_'
6011 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_'
6012 && strncmp (q + 2, p, ident_len) == 0)
6013 break;
6014 }
6015 }
6016 list = TREE_CHAIN (list);
6017 }
6018
6019 return list;
6020 }
6021
6022 /* Remove any instances of attribute ATTR_NAME in LIST and return the
6023 modified list. */
6024
6025 tree
6026 remove_attribute (const char *attr_name, tree list)
6027 {
6028 tree *p;
6029 size_t attr_len = strlen (attr_name);
6030
6031 gcc_checking_assert (attr_name[0] != '_');
6032
6033 for (p = &list; *p; )
6034 {
6035 tree l = *p;
6036 /* TODO: If we were storing attributes in normalized form, here
6037 we could use a simple strcmp(). */
6038 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l)))
6039 *p = TREE_CHAIN (l);
6040 else
6041 p = &TREE_CHAIN (l);
6042 }
6043
6044 return list;
6045 }
6046
6047 /* Return an attribute list that is the union of a1 and a2. */
6048
6049 tree
6050 merge_attributes (tree a1, tree a2)
6051 {
6052 tree attributes;
6053
6054 /* Either one unset? Take the set one. */
6055
6056 if ((attributes = a1) == 0)
6057 attributes = a2;
6058
6059 /* One that completely contains the other? Take it. */
6060
6061 else if (a2 != 0 && ! attribute_list_contained (a1, a2))
6062 {
6063 if (attribute_list_contained (a2, a1))
6064 attributes = a2;
6065 else
6066 {
6067 /* Pick the longest list, and hang on the other list. */
6068
6069 if (list_length (a1) < list_length (a2))
6070 attributes = a2, a2 = a1;
6071
6072 for (; a2 != 0; a2 = TREE_CHAIN (a2))
6073 {
6074 tree a;
6075 for (a = lookup_ident_attribute (get_attribute_name (a2),
6076 attributes);
6077 a != NULL_TREE && !attribute_value_equal (a, a2);
6078 a = lookup_ident_attribute (get_attribute_name (a2),
6079 TREE_CHAIN (a)))
6080 ;
6081 if (a == NULL_TREE)
6082 {
6083 a1 = copy_node (a2);
6084 TREE_CHAIN (a1) = attributes;
6085 attributes = a1;
6086 }
6087 }
6088 }
6089 }
6090 return attributes;
6091 }
6092
6093 /* Given types T1 and T2, merge their attributes and return
6094 the result. */
6095
6096 tree
6097 merge_type_attributes (tree t1, tree t2)
6098 {
6099 return merge_attributes (TYPE_ATTRIBUTES (t1),
6100 TYPE_ATTRIBUTES (t2));
6101 }
6102
6103 /* Given decls OLDDECL and NEWDECL, merge their attributes and return
6104 the result. */
6105
6106 tree
6107 merge_decl_attributes (tree olddecl, tree newdecl)
6108 {
6109 return merge_attributes (DECL_ATTRIBUTES (olddecl),
6110 DECL_ATTRIBUTES (newdecl));
6111 }
6112
6113 #if TARGET_DLLIMPORT_DECL_ATTRIBUTES
6114
6115 /* Specialization of merge_decl_attributes for various Windows targets.
6116
6117 This handles the following situation:
6118
6119 __declspec (dllimport) int foo;
6120 int foo;
6121
6122 The second instance of `foo' nullifies the dllimport. */
6123
6124 tree
6125 merge_dllimport_decl_attributes (tree old, tree new_tree)
6126 {
6127 tree a;
6128 int delete_dllimport_p = 1;
6129
6130 /* What we need to do here is remove from `old' dllimport if it doesn't
6131 appear in `new'. dllimport behaves like extern: if a declaration is
6132 marked dllimport and a definition appears later, then the object
6133 is not dllimport'd. We also remove a `new' dllimport if the old list
6134 contains dllexport: dllexport always overrides dllimport, regardless
6135 of the order of declaration. */
6136 if (!VAR_OR_FUNCTION_DECL_P (new_tree))
6137 delete_dllimport_p = 0;
6138 else if (DECL_DLLIMPORT_P (new_tree)
6139 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old)))
6140 {
6141 DECL_DLLIMPORT_P (new_tree) = 0;
6142 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: "
6143 "dllimport ignored", new_tree);
6144 }
6145 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree))
6146 {
6147 /* Warn about overriding a symbol that has already been used, e.g.:
6148 extern int __attribute__ ((dllimport)) foo;
6149 int* bar () {return &foo;}
6150 int foo;
6151 */
6152 if (TREE_USED (old))
6153 {
6154 warning (0, "%q+D redeclared without dllimport attribute "
6155 "after being referenced with dll linkage", new_tree);
6156 /* If we have used a variable's address with dllimport linkage,
6157 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the
6158 decl may already have had TREE_CONSTANT computed.
6159 We still remove the attribute so that assembler code refers
6160 to '&foo rather than '_imp__foo'. */
6161 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old))
6162 DECL_DLLIMPORT_P (new_tree) = 1;
6163 }
6164
6165 /* Let an inline definition silently override the external reference,
6166 but otherwise warn about attribute inconsistency. */
6167 else if (TREE_CODE (new_tree) == VAR_DECL
6168 || !DECL_DECLARED_INLINE_P (new_tree))
6169 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: "
6170 "previous dllimport ignored", new_tree);
6171 }
6172 else
6173 delete_dllimport_p = 0;
6174
6175 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree));
6176
6177 if (delete_dllimport_p)
6178 a = remove_attribute ("dllimport", a);
6179
6180 return a;
6181 }
6182
6183 /* Handle a "dllimport" or "dllexport" attribute; arguments as in
6184 struct attribute_spec.handler. */
6185
6186 tree
6187 handle_dll_attribute (tree * pnode, tree name, tree args, int flags,
6188 bool *no_add_attrs)
6189 {
6190 tree node = *pnode;
6191 bool is_dllimport;
6192
6193 /* These attributes may apply to structure and union types being created,
6194 but otherwise should pass to the declaration involved. */
6195 if (!DECL_P (node))
6196 {
6197 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT
6198 | (int) ATTR_FLAG_ARRAY_NEXT))
6199 {
6200 *no_add_attrs = true;
6201 return tree_cons (name, args, NULL_TREE);
6202 }
6203 if (TREE_CODE (node) == RECORD_TYPE
6204 || TREE_CODE (node) == UNION_TYPE)
6205 {
6206 node = TYPE_NAME (node);
6207 if (!node)
6208 return NULL_TREE;
6209 }
6210 else
6211 {
6212 warning (OPT_Wattributes, "%qE attribute ignored",
6213 name);
6214 *no_add_attrs = true;
6215 return NULL_TREE;
6216 }
6217 }
6218
6219 if (TREE_CODE (node) != FUNCTION_DECL
6220 && TREE_CODE (node) != VAR_DECL
6221 && TREE_CODE (node) != TYPE_DECL)
6222 {
6223 *no_add_attrs = true;
6224 warning (OPT_Wattributes, "%qE attribute ignored",
6225 name);
6226 return NULL_TREE;
6227 }
6228
6229 if (TREE_CODE (node) == TYPE_DECL
6230 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE
6231 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE)
6232 {
6233 *no_add_attrs = true;
6234 warning (OPT_Wattributes, "%qE attribute ignored",
6235 name);
6236 return NULL_TREE;
6237 }
6238
6239 is_dllimport = is_attribute_p ("dllimport", name);
6240
6241 /* Report error on dllimport ambiguities seen now before they cause
6242 any damage. */
6243 if (is_dllimport)
6244 {
6245 /* Honor any target-specific overrides. */
6246 if (!targetm.valid_dllimport_attribute_p (node))
6247 *no_add_attrs = true;
6248
6249 else if (TREE_CODE (node) == FUNCTION_DECL
6250 && DECL_DECLARED_INLINE_P (node))
6251 {
6252 warning (OPT_Wattributes, "inline function %q+D declared as "
6253 " dllimport: attribute ignored", node);
6254 *no_add_attrs = true;
6255 }
6256 /* Like MS, treat definition of dllimported variables and
6257 non-inlined functions on declaration as syntax errors. */
6258 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node))
6259 {
6260 error ("function %q+D definition is marked dllimport", node);
6261 *no_add_attrs = true;
6262 }
6263
6264 else if (TREE_CODE (node) == VAR_DECL)
6265 {
6266 if (DECL_INITIAL (node))
6267 {
6268 error ("variable %q+D definition is marked dllimport",
6269 node);
6270 *no_add_attrs = true;
6271 }
6272
6273 /* `extern' needn't be specified with dllimport.
6274 Specify `extern' now and hope for the best. Sigh. */
6275 DECL_EXTERNAL (node) = 1;
6276 /* Also, implicitly give dllimport'd variables declared within
6277 a function global scope, unless declared static. */
6278 if (current_function_decl != NULL_TREE && !TREE_STATIC (node))
6279 TREE_PUBLIC (node) = 1;
6280 }
6281
6282 if (*no_add_attrs == false)
6283 DECL_DLLIMPORT_P (node) = 1;
6284 }
6285 else if (TREE_CODE (node) == FUNCTION_DECL
6286 && DECL_DECLARED_INLINE_P (node)
6287 && flag_keep_inline_dllexport)
6288 /* An exported function, even if inline, must be emitted. */
6289 DECL_EXTERNAL (node) = 0;
6290
6291 /* Report error if symbol is not accessible at global scope. */
6292 if (!TREE_PUBLIC (node)
6293 && (TREE_CODE (node) == VAR_DECL
6294 || TREE_CODE (node) == FUNCTION_DECL))
6295 {
6296 error ("external linkage required for symbol %q+D because of "
6297 "%qE attribute", node, name);
6298 *no_add_attrs = true;
6299 }
6300
6301 /* A dllexport'd entity must have default visibility so that other
6302 program units (shared libraries or the main executable) can see
6303 it. A dllimport'd entity must have default visibility so that
6304 the linker knows that undefined references within this program
6305 unit can be resolved by the dynamic linker. */
6306 if (!*no_add_attrs)
6307 {
6308 if (DECL_VISIBILITY_SPECIFIED (node)
6309 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT)
6310 error ("%qE implies default visibility, but %qD has already "
6311 "been declared with a different visibility",
6312 name, node);
6313 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT;
6314 DECL_VISIBILITY_SPECIFIED (node) = 1;
6315 }
6316
6317 return NULL_TREE;
6318 }
6319
6320 #endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */
6321 \f
6322 /* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask
6323 of the various TYPE_QUAL values. */
6324
6325 static void
6326 set_type_quals (tree type, int type_quals)
6327 {
6328 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0;
6329 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0;
6330 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0;
6331 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0;
6332 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals);
6333 }
6334
6335 /* Returns true iff unqualified CAND and BASE are equivalent. */
6336
6337 bool
6338 check_base_type (const_tree cand, const_tree base)
6339 {
6340 return (TYPE_NAME (cand) == TYPE_NAME (base)
6341 /* Apparently this is needed for Objective-C. */
6342 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6343 /* Check alignment. */
6344 && TYPE_ALIGN (cand) == TYPE_ALIGN (base)
6345 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6346 TYPE_ATTRIBUTES (base)));
6347 }
6348
6349 /* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */
6350
6351 bool
6352 check_qualified_type (const_tree cand, const_tree base, int type_quals)
6353 {
6354 return (TYPE_QUALS (cand) == type_quals
6355 && check_base_type (cand, base));
6356 }
6357
6358 /* Returns true iff CAND is equivalent to BASE with ALIGN. */
6359
6360 static bool
6361 check_aligned_type (const_tree cand, const_tree base, unsigned int align)
6362 {
6363 return (TYPE_QUALS (cand) == TYPE_QUALS (base)
6364 && TYPE_NAME (cand) == TYPE_NAME (base)
6365 /* Apparently this is needed for Objective-C. */
6366 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base)
6367 /* Check alignment. */
6368 && TYPE_ALIGN (cand) == align
6369 && attribute_list_equal (TYPE_ATTRIBUTES (cand),
6370 TYPE_ATTRIBUTES (base)));
6371 }
6372
6373 /* This function checks to see if TYPE matches the size one of the built-in
6374 atomic types, and returns that core atomic type. */
6375
6376 static tree
6377 find_atomic_core_type (tree type)
6378 {
6379 tree base_atomic_type;
6380
6381 /* Only handle complete types. */
6382 if (TYPE_SIZE (type) == NULL_TREE)
6383 return NULL_TREE;
6384
6385 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type));
6386 switch (type_size)
6387 {
6388 case 8:
6389 base_atomic_type = atomicQI_type_node;
6390 break;
6391
6392 case 16:
6393 base_atomic_type = atomicHI_type_node;
6394 break;
6395
6396 case 32:
6397 base_atomic_type = atomicSI_type_node;
6398 break;
6399
6400 case 64:
6401 base_atomic_type = atomicDI_type_node;
6402 break;
6403
6404 case 128:
6405 base_atomic_type = atomicTI_type_node;
6406 break;
6407
6408 default:
6409 base_atomic_type = NULL_TREE;
6410 }
6411
6412 return base_atomic_type;
6413 }
6414
6415 /* Return a version of the TYPE, qualified as indicated by the
6416 TYPE_QUALS, if one exists. If no qualified version exists yet,
6417 return NULL_TREE. */
6418
6419 tree
6420 get_qualified_type (tree type, int type_quals)
6421 {
6422 tree t;
6423
6424 if (TYPE_QUALS (type) == type_quals)
6425 return type;
6426
6427 /* Search the chain of variants to see if there is already one there just
6428 like the one we need to have. If so, use that existing one. We must
6429 preserve the TYPE_NAME, since there is code that depends on this. */
6430 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6431 if (check_qualified_type (t, type, type_quals))
6432 return t;
6433
6434 return NULL_TREE;
6435 }
6436
6437 /* Like get_qualified_type, but creates the type if it does not
6438 exist. This function never returns NULL_TREE. */
6439
6440 tree
6441 build_qualified_type (tree type, int type_quals)
6442 {
6443 tree t;
6444
6445 /* See if we already have the appropriate qualified variant. */
6446 t = get_qualified_type (type, type_quals);
6447
6448 /* If not, build it. */
6449 if (!t)
6450 {
6451 t = build_variant_type_copy (type);
6452 set_type_quals (t, type_quals);
6453
6454 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC))
6455 {
6456 /* See if this object can map to a basic atomic type. */
6457 tree atomic_type = find_atomic_core_type (type);
6458 if (atomic_type)
6459 {
6460 /* Ensure the alignment of this type is compatible with
6461 the required alignment of the atomic type. */
6462 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t))
6463 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type);
6464 }
6465 }
6466
6467 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6468 /* Propagate structural equality. */
6469 SET_TYPE_STRUCTURAL_EQUALITY (t);
6470 else if (TYPE_CANONICAL (type) != type)
6471 /* Build the underlying canonical type, since it is different
6472 from TYPE. */
6473 {
6474 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals);
6475 TYPE_CANONICAL (t) = TYPE_CANONICAL (c);
6476 }
6477 else
6478 /* T is its own canonical type. */
6479 TYPE_CANONICAL (t) = t;
6480
6481 }
6482
6483 return t;
6484 }
6485
6486 /* Create a variant of type T with alignment ALIGN. */
6487
6488 tree
6489 build_aligned_type (tree type, unsigned int align)
6490 {
6491 tree t;
6492
6493 if (TYPE_PACKED (type)
6494 || TYPE_ALIGN (type) == align)
6495 return type;
6496
6497 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6498 if (check_aligned_type (t, type, align))
6499 return t;
6500
6501 t = build_variant_type_copy (type);
6502 TYPE_ALIGN (t) = align;
6503
6504 return t;
6505 }
6506
6507 /* Create a new distinct copy of TYPE. The new type is made its own
6508 MAIN_VARIANT. If TYPE requires structural equality checks, the
6509 resulting type requires structural equality checks; otherwise, its
6510 TYPE_CANONICAL points to itself. */
6511
6512 tree
6513 build_distinct_type_copy (tree type)
6514 {
6515 tree t = copy_node (type);
6516
6517 TYPE_POINTER_TO (t) = 0;
6518 TYPE_REFERENCE_TO (t) = 0;
6519
6520 /* Set the canonical type either to a new equivalence class, or
6521 propagate the need for structural equality checks. */
6522 if (TYPE_STRUCTURAL_EQUALITY_P (type))
6523 SET_TYPE_STRUCTURAL_EQUALITY (t);
6524 else
6525 TYPE_CANONICAL (t) = t;
6526
6527 /* Make it its own variant. */
6528 TYPE_MAIN_VARIANT (t) = t;
6529 TYPE_NEXT_VARIANT (t) = 0;
6530
6531 /* Note that it is now possible for TYPE_MIN_VALUE to be a value
6532 whose TREE_TYPE is not t. This can also happen in the Ada
6533 frontend when using subtypes. */
6534
6535 return t;
6536 }
6537
6538 /* Create a new variant of TYPE, equivalent but distinct. This is so
6539 the caller can modify it. TYPE_CANONICAL for the return type will
6540 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types
6541 are considered equal by the language itself (or that both types
6542 require structural equality checks). */
6543
6544 tree
6545 build_variant_type_copy (tree type)
6546 {
6547 tree t, m = TYPE_MAIN_VARIANT (type);
6548
6549 t = build_distinct_type_copy (type);
6550
6551 /* Since we're building a variant, assume that it is a non-semantic
6552 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */
6553 TYPE_CANONICAL (t) = TYPE_CANONICAL (type);
6554
6555 /* Add the new type to the chain of variants of TYPE. */
6556 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
6557 TYPE_NEXT_VARIANT (m) = t;
6558 TYPE_MAIN_VARIANT (t) = m;
6559
6560 return t;
6561 }
6562 \f
6563 /* Return true if the from tree in both tree maps are equal. */
6564
6565 int
6566 tree_map_base_eq (const void *va, const void *vb)
6567 {
6568 const struct tree_map_base *const a = (const struct tree_map_base *) va,
6569 *const b = (const struct tree_map_base *) vb;
6570 return (a->from == b->from);
6571 }
6572
6573 /* Hash a from tree in a tree_base_map. */
6574
6575 unsigned int
6576 tree_map_base_hash (const void *item)
6577 {
6578 return htab_hash_pointer (((const struct tree_map_base *)item)->from);
6579 }
6580
6581 /* Return true if this tree map structure is marked for garbage collection
6582 purposes. We simply return true if the from tree is marked, so that this
6583 structure goes away when the from tree goes away. */
6584
6585 int
6586 tree_map_base_marked_p (const void *p)
6587 {
6588 return ggc_marked_p (((const struct tree_map_base *) p)->from);
6589 }
6590
6591 /* Hash a from tree in a tree_map. */
6592
6593 unsigned int
6594 tree_map_hash (const void *item)
6595 {
6596 return (((const struct tree_map *) item)->hash);
6597 }
6598
6599 /* Hash a from tree in a tree_decl_map. */
6600
6601 unsigned int
6602 tree_decl_map_hash (const void *item)
6603 {
6604 return DECL_UID (((const struct tree_decl_map *) item)->base.from);
6605 }
6606
6607 /* Return the initialization priority for DECL. */
6608
6609 priority_type
6610 decl_init_priority_lookup (tree decl)
6611 {
6612 symtab_node *snode = symtab_node::get (decl);
6613
6614 if (!snode)
6615 return DEFAULT_INIT_PRIORITY;
6616 return
6617 snode->get_init_priority ();
6618 }
6619
6620 /* Return the finalization priority for DECL. */
6621
6622 priority_type
6623 decl_fini_priority_lookup (tree decl)
6624 {
6625 cgraph_node *node = cgraph_node::get (decl);
6626
6627 if (!node)
6628 return DEFAULT_INIT_PRIORITY;
6629 return
6630 node->get_fini_priority ();
6631 }
6632
6633 /* Set the initialization priority for DECL to PRIORITY. */
6634
6635 void
6636 decl_init_priority_insert (tree decl, priority_type priority)
6637 {
6638 struct symtab_node *snode;
6639
6640 if (priority == DEFAULT_INIT_PRIORITY)
6641 {
6642 snode = symtab_node::get (decl);
6643 if (!snode)
6644 return;
6645 }
6646 else if (TREE_CODE (decl) == VAR_DECL)
6647 snode = varpool_node::get_create (decl);
6648 else
6649 snode = cgraph_node::get_create (decl);
6650 snode->set_init_priority (priority);
6651 }
6652
6653 /* Set the finalization priority for DECL to PRIORITY. */
6654
6655 void
6656 decl_fini_priority_insert (tree decl, priority_type priority)
6657 {
6658 struct cgraph_node *node;
6659
6660 if (priority == DEFAULT_INIT_PRIORITY)
6661 {
6662 node = cgraph_node::get (decl);
6663 if (!node)
6664 return;
6665 }
6666 else
6667 node = cgraph_node::get_create (decl);
6668 node->set_fini_priority (priority);
6669 }
6670
6671 /* Print out the statistics for the DECL_DEBUG_EXPR hash table. */
6672
6673 static void
6674 print_debug_expr_statistics (void)
6675 {
6676 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n",
6677 (long) debug_expr_for_decl->size (),
6678 (long) debug_expr_for_decl->elements (),
6679 debug_expr_for_decl->collisions ());
6680 }
6681
6682 /* Print out the statistics for the DECL_VALUE_EXPR hash table. */
6683
6684 static void
6685 print_value_expr_statistics (void)
6686 {
6687 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n",
6688 (long) value_expr_for_decl->size (),
6689 (long) value_expr_for_decl->elements (),
6690 value_expr_for_decl->collisions ());
6691 }
6692
6693 /* Lookup a debug expression for FROM, and return it if we find one. */
6694
6695 tree
6696 decl_debug_expr_lookup (tree from)
6697 {
6698 struct tree_decl_map *h, in;
6699 in.base.from = from;
6700
6701 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6702 if (h)
6703 return h->to;
6704 return NULL_TREE;
6705 }
6706
6707 /* Insert a mapping FROM->TO in the debug expression hashtable. */
6708
6709 void
6710 decl_debug_expr_insert (tree from, tree to)
6711 {
6712 struct tree_decl_map *h;
6713
6714 h = ggc_alloc<tree_decl_map> ();
6715 h->base.from = from;
6716 h->to = to;
6717 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6718 }
6719
6720 /* Lookup a value expression for FROM, and return it if we find one. */
6721
6722 tree
6723 decl_value_expr_lookup (tree from)
6724 {
6725 struct tree_decl_map *h, in;
6726 in.base.from = from;
6727
6728 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from));
6729 if (h)
6730 return h->to;
6731 return NULL_TREE;
6732 }
6733
6734 /* Insert a mapping FROM->TO in the value expression hashtable. */
6735
6736 void
6737 decl_value_expr_insert (tree from, tree to)
6738 {
6739 struct tree_decl_map *h;
6740
6741 h = ggc_alloc<tree_decl_map> ();
6742 h->base.from = from;
6743 h->to = to;
6744 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h;
6745 }
6746
6747 /* Lookup a vector of debug arguments for FROM, and return it if we
6748 find one. */
6749
6750 vec<tree, va_gc> **
6751 decl_debug_args_lookup (tree from)
6752 {
6753 struct tree_vec_map *h, in;
6754
6755 if (!DECL_HAS_DEBUG_ARGS_P (from))
6756 return NULL;
6757 gcc_checking_assert (debug_args_for_decl != NULL);
6758 in.base.from = from;
6759 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from));
6760 if (h)
6761 return &h->to;
6762 return NULL;
6763 }
6764
6765 /* Insert a mapping FROM->empty vector of debug arguments in the value
6766 expression hashtable. */
6767
6768 vec<tree, va_gc> **
6769 decl_debug_args_insert (tree from)
6770 {
6771 struct tree_vec_map *h;
6772 tree_vec_map **loc;
6773
6774 if (DECL_HAS_DEBUG_ARGS_P (from))
6775 return decl_debug_args_lookup (from);
6776 if (debug_args_for_decl == NULL)
6777 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64);
6778 h = ggc_alloc<tree_vec_map> ();
6779 h->base.from = from;
6780 h->to = NULL;
6781 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT);
6782 *loc = h;
6783 DECL_HAS_DEBUG_ARGS_P (from) = 1;
6784 return &h->to;
6785 }
6786
6787 /* Hashing of types so that we don't make duplicates.
6788 The entry point is `type_hash_canon'. */
6789
6790 /* Compute a hash code for a list of types (chain of TREE_LIST nodes
6791 with types in the TREE_VALUE slots), by adding the hash codes
6792 of the individual types. */
6793
6794 static void
6795 type_hash_list (const_tree list, inchash::hash &hstate)
6796 {
6797 const_tree tail;
6798
6799 for (tail = list; tail; tail = TREE_CHAIN (tail))
6800 if (TREE_VALUE (tail) != error_mark_node)
6801 hstate.add_object (TYPE_HASH (TREE_VALUE (tail)));
6802 }
6803
6804 /* These are the Hashtable callback functions. */
6805
6806 /* Returns true iff the types are equivalent. */
6807
6808 bool
6809 type_cache_hasher::equal (type_hash *a, type_hash *b)
6810 {
6811 /* First test the things that are the same for all types. */
6812 if (a->hash != b->hash
6813 || TREE_CODE (a->type) != TREE_CODE (b->type)
6814 || TREE_TYPE (a->type) != TREE_TYPE (b->type)
6815 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type),
6816 TYPE_ATTRIBUTES (b->type))
6817 || (TREE_CODE (a->type) != COMPLEX_TYPE
6818 && TYPE_NAME (a->type) != TYPE_NAME (b->type)))
6819 return 0;
6820
6821 /* Be careful about comparing arrays before and after the element type
6822 has been completed; don't compare TYPE_ALIGN unless both types are
6823 complete. */
6824 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type)
6825 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type)
6826 || TYPE_MODE (a->type) != TYPE_MODE (b->type)))
6827 return 0;
6828
6829 switch (TREE_CODE (a->type))
6830 {
6831 case VOID_TYPE:
6832 case COMPLEX_TYPE:
6833 case POINTER_TYPE:
6834 case REFERENCE_TYPE:
6835 case NULLPTR_TYPE:
6836 return 1;
6837
6838 case VECTOR_TYPE:
6839 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type);
6840
6841 case ENUMERAL_TYPE:
6842 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type)
6843 && !(TYPE_VALUES (a->type)
6844 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST
6845 && TYPE_VALUES (b->type)
6846 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST
6847 && type_list_equal (TYPE_VALUES (a->type),
6848 TYPE_VALUES (b->type))))
6849 return 0;
6850
6851 /* ... fall through ... */
6852
6853 case INTEGER_TYPE:
6854 case REAL_TYPE:
6855 case BOOLEAN_TYPE:
6856 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type))
6857 return false;
6858 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type)
6859 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type),
6860 TYPE_MAX_VALUE (b->type)))
6861 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type)
6862 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type),
6863 TYPE_MIN_VALUE (b->type))));
6864
6865 case FIXED_POINT_TYPE:
6866 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type);
6867
6868 case OFFSET_TYPE:
6869 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type);
6870
6871 case METHOD_TYPE:
6872 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type)
6873 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6874 || (TYPE_ARG_TYPES (a->type)
6875 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6876 && TYPE_ARG_TYPES (b->type)
6877 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6878 && type_list_equal (TYPE_ARG_TYPES (a->type),
6879 TYPE_ARG_TYPES (b->type)))))
6880 break;
6881 return 0;
6882 case ARRAY_TYPE:
6883 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type);
6884
6885 case RECORD_TYPE:
6886 case UNION_TYPE:
6887 case QUAL_UNION_TYPE:
6888 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type)
6889 || (TYPE_FIELDS (a->type)
6890 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST
6891 && TYPE_FIELDS (b->type)
6892 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST
6893 && type_list_equal (TYPE_FIELDS (a->type),
6894 TYPE_FIELDS (b->type))));
6895
6896 case FUNCTION_TYPE:
6897 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type)
6898 || (TYPE_ARG_TYPES (a->type)
6899 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST
6900 && TYPE_ARG_TYPES (b->type)
6901 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST
6902 && type_list_equal (TYPE_ARG_TYPES (a->type),
6903 TYPE_ARG_TYPES (b->type))))
6904 break;
6905 return 0;
6906
6907 default:
6908 return 0;
6909 }
6910
6911 if (lang_hooks.types.type_hash_eq != NULL)
6912 return lang_hooks.types.type_hash_eq (a->type, b->type);
6913
6914 return 1;
6915 }
6916
6917 /* Given TYPE, and HASHCODE its hash code, return the canonical
6918 object for an identical type if one already exists.
6919 Otherwise, return TYPE, and record it as the canonical object.
6920
6921 To use this function, first create a type of the sort you want.
6922 Then compute its hash code from the fields of the type that
6923 make it different from other similar types.
6924 Then call this function and use the value. */
6925
6926 tree
6927 type_hash_canon (unsigned int hashcode, tree type)
6928 {
6929 type_hash in;
6930 type_hash **loc;
6931
6932 /* The hash table only contains main variants, so ensure that's what we're
6933 being passed. */
6934 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
6935
6936 /* The TYPE_ALIGN field of a type is set by layout_type(), so we
6937 must call that routine before comparing TYPE_ALIGNs. */
6938 layout_type (type);
6939
6940 in.hash = hashcode;
6941 in.type = type;
6942
6943 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
6944 if (*loc)
6945 {
6946 tree t1 = ((type_hash *) *loc)->type;
6947 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1);
6948 if (GATHER_STATISTICS)
6949 {
6950 tree_code_counts[(int) TREE_CODE (type)]--;
6951 tree_node_counts[(int) t_kind]--;
6952 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common);
6953 }
6954 return t1;
6955 }
6956 else
6957 {
6958 struct type_hash *h;
6959
6960 h = ggc_alloc<type_hash> ();
6961 h->hash = hashcode;
6962 h->type = type;
6963 *loc = h;
6964
6965 return type;
6966 }
6967 }
6968
6969 static void
6970 print_type_hash_statistics (void)
6971 {
6972 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n",
6973 (long) type_hash_table->size (),
6974 (long) type_hash_table->elements (),
6975 type_hash_table->collisions ());
6976 }
6977
6978 /* Compute a hash code for a list of attributes (chain of TREE_LIST nodes
6979 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots),
6980 by adding the hash codes of the individual attributes. */
6981
6982 static void
6983 attribute_hash_list (const_tree list, inchash::hash &hstate)
6984 {
6985 const_tree tail;
6986
6987 for (tail = list; tail; tail = TREE_CHAIN (tail))
6988 /* ??? Do we want to add in TREE_VALUE too? */
6989 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail)));
6990 }
6991
6992 /* Given two lists of attributes, return true if list l2 is
6993 equivalent to l1. */
6994
6995 int
6996 attribute_list_equal (const_tree l1, const_tree l2)
6997 {
6998 if (l1 == l2)
6999 return 1;
7000
7001 return attribute_list_contained (l1, l2)
7002 && attribute_list_contained (l2, l1);
7003 }
7004
7005 /* Given two lists of attributes, return true if list L2 is
7006 completely contained within L1. */
7007 /* ??? This would be faster if attribute names were stored in a canonicalized
7008 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method
7009 must be used to show these elements are equivalent (which they are). */
7010 /* ??? It's not clear that attributes with arguments will always be handled
7011 correctly. */
7012
7013 int
7014 attribute_list_contained (const_tree l1, const_tree l2)
7015 {
7016 const_tree t1, t2;
7017
7018 /* First check the obvious, maybe the lists are identical. */
7019 if (l1 == l2)
7020 return 1;
7021
7022 /* Maybe the lists are similar. */
7023 for (t1 = l1, t2 = l2;
7024 t1 != 0 && t2 != 0
7025 && get_attribute_name (t1) == get_attribute_name (t2)
7026 && TREE_VALUE (t1) == TREE_VALUE (t2);
7027 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7028 ;
7029
7030 /* Maybe the lists are equal. */
7031 if (t1 == 0 && t2 == 0)
7032 return 1;
7033
7034 for (; t2 != 0; t2 = TREE_CHAIN (t2))
7035 {
7036 const_tree attr;
7037 /* This CONST_CAST is okay because lookup_attribute does not
7038 modify its argument and the return value is assigned to a
7039 const_tree. */
7040 for (attr = lookup_ident_attribute (get_attribute_name (t2),
7041 CONST_CAST_TREE (l1));
7042 attr != NULL_TREE && !attribute_value_equal (t2, attr);
7043 attr = lookup_ident_attribute (get_attribute_name (t2),
7044 TREE_CHAIN (attr)))
7045 ;
7046
7047 if (attr == NULL_TREE)
7048 return 0;
7049 }
7050
7051 return 1;
7052 }
7053
7054 /* Given two lists of types
7055 (chains of TREE_LIST nodes with types in the TREE_VALUE slots)
7056 return 1 if the lists contain the same types in the same order.
7057 Also, the TREE_PURPOSEs must match. */
7058
7059 int
7060 type_list_equal (const_tree l1, const_tree l2)
7061 {
7062 const_tree t1, t2;
7063
7064 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2))
7065 if (TREE_VALUE (t1) != TREE_VALUE (t2)
7066 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2)
7067 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2))
7068 && (TREE_TYPE (TREE_PURPOSE (t1))
7069 == TREE_TYPE (TREE_PURPOSE (t2))))))
7070 return 0;
7071
7072 return t1 == t2;
7073 }
7074
7075 /* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE
7076 given by TYPE. If the argument list accepts variable arguments,
7077 then this function counts only the ordinary arguments. */
7078
7079 int
7080 type_num_arguments (const_tree type)
7081 {
7082 int i = 0;
7083 tree t;
7084
7085 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t))
7086 /* If the function does not take a variable number of arguments,
7087 the last element in the list will have type `void'. */
7088 if (VOID_TYPE_P (TREE_VALUE (t)))
7089 break;
7090 else
7091 ++i;
7092
7093 return i;
7094 }
7095
7096 /* Nonzero if integer constants T1 and T2
7097 represent the same constant value. */
7098
7099 int
7100 tree_int_cst_equal (const_tree t1, const_tree t2)
7101 {
7102 if (t1 == t2)
7103 return 1;
7104
7105 if (t1 == 0 || t2 == 0)
7106 return 0;
7107
7108 if (TREE_CODE (t1) == INTEGER_CST
7109 && TREE_CODE (t2) == INTEGER_CST
7110 && wi::to_widest (t1) == wi::to_widest (t2))
7111 return 1;
7112
7113 return 0;
7114 }
7115
7116 /* Return true if T is an INTEGER_CST whose numerical value (extended
7117 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */
7118
7119 bool
7120 tree_fits_shwi_p (const_tree t)
7121 {
7122 return (t != NULL_TREE
7123 && TREE_CODE (t) == INTEGER_CST
7124 && wi::fits_shwi_p (wi::to_widest (t)));
7125 }
7126
7127 /* Return true if T is an INTEGER_CST whose numerical value (extended
7128 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
7129
7130 bool
7131 tree_fits_uhwi_p (const_tree t)
7132 {
7133 return (t != NULL_TREE
7134 && TREE_CODE (t) == INTEGER_CST
7135 && wi::fits_uhwi_p (wi::to_widest (t)));
7136 }
7137
7138 /* T is an INTEGER_CST whose numerical value (extended according to
7139 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
7140 HOST_WIDE_INT. */
7141
7142 HOST_WIDE_INT
7143 tree_to_shwi (const_tree t)
7144 {
7145 gcc_assert (tree_fits_shwi_p (t));
7146 return TREE_INT_CST_LOW (t);
7147 }
7148
7149 /* T is an INTEGER_CST whose numerical value (extended according to
7150 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that
7151 HOST_WIDE_INT. */
7152
7153 unsigned HOST_WIDE_INT
7154 tree_to_uhwi (const_tree t)
7155 {
7156 gcc_assert (tree_fits_uhwi_p (t));
7157 return TREE_INT_CST_LOW (t);
7158 }
7159
7160 /* Return the most significant (sign) bit of T. */
7161
7162 int
7163 tree_int_cst_sign_bit (const_tree t)
7164 {
7165 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1;
7166
7167 return wi::extract_uhwi (t, bitno, 1);
7168 }
7169
7170 /* Return an indication of the sign of the integer constant T.
7171 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0.
7172 Note that -1 will never be returned if T's type is unsigned. */
7173
7174 int
7175 tree_int_cst_sgn (const_tree t)
7176 {
7177 if (wi::eq_p (t, 0))
7178 return 0;
7179 else if (TYPE_UNSIGNED (TREE_TYPE (t)))
7180 return 1;
7181 else if (wi::neg_p (t))
7182 return -1;
7183 else
7184 return 1;
7185 }
7186
7187 /* Return the minimum number of bits needed to represent VALUE in a
7188 signed or unsigned type, UNSIGNEDP says which. */
7189
7190 unsigned int
7191 tree_int_cst_min_precision (tree value, signop sgn)
7192 {
7193 /* If the value is negative, compute its negative minus 1. The latter
7194 adjustment is because the absolute value of the largest negative value
7195 is one larger than the largest positive value. This is equivalent to
7196 a bit-wise negation, so use that operation instead. */
7197
7198 if (tree_int_cst_sgn (value) < 0)
7199 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value);
7200
7201 /* Return the number of bits needed, taking into account the fact
7202 that we need one more bit for a signed than unsigned type.
7203 If value is 0 or -1, the minimum precision is 1 no matter
7204 whether unsignedp is true or false. */
7205
7206 if (integer_zerop (value))
7207 return 1;
7208 else
7209 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ;
7210 }
7211
7212 /* Return truthvalue of whether T1 is the same tree structure as T2.
7213 Return 1 if they are the same.
7214 Return 0 if they are understandably different.
7215 Return -1 if either contains tree structure not understood by
7216 this function. */
7217
7218 int
7219 simple_cst_equal (const_tree t1, const_tree t2)
7220 {
7221 enum tree_code code1, code2;
7222 int cmp;
7223 int i;
7224
7225 if (t1 == t2)
7226 return 1;
7227 if (t1 == 0 || t2 == 0)
7228 return 0;
7229
7230 code1 = TREE_CODE (t1);
7231 code2 = TREE_CODE (t2);
7232
7233 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR)
7234 {
7235 if (CONVERT_EXPR_CODE_P (code2)
7236 || code2 == NON_LVALUE_EXPR)
7237 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7238 else
7239 return simple_cst_equal (TREE_OPERAND (t1, 0), t2);
7240 }
7241
7242 else if (CONVERT_EXPR_CODE_P (code2)
7243 || code2 == NON_LVALUE_EXPR)
7244 return simple_cst_equal (t1, TREE_OPERAND (t2, 0));
7245
7246 if (code1 != code2)
7247 return 0;
7248
7249 switch (code1)
7250 {
7251 case INTEGER_CST:
7252 return wi::to_widest (t1) == wi::to_widest (t2);
7253
7254 case REAL_CST:
7255 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
7256
7257 case FIXED_CST:
7258 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2));
7259
7260 case STRING_CST:
7261 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
7262 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
7263 TREE_STRING_LENGTH (t1)));
7264
7265 case CONSTRUCTOR:
7266 {
7267 unsigned HOST_WIDE_INT idx;
7268 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
7269 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
7270
7271 if (vec_safe_length (v1) != vec_safe_length (v2))
7272 return false;
7273
7274 for (idx = 0; idx < vec_safe_length (v1); ++idx)
7275 /* ??? Should we handle also fields here? */
7276 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
7277 return false;
7278 return true;
7279 }
7280
7281 case SAVE_EXPR:
7282 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7283
7284 case CALL_EXPR:
7285 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2));
7286 if (cmp <= 0)
7287 return cmp;
7288 if (call_expr_nargs (t1) != call_expr_nargs (t2))
7289 return 0;
7290 {
7291 const_tree arg1, arg2;
7292 const_call_expr_arg_iterator iter1, iter2;
7293 for (arg1 = first_const_call_expr_arg (t1, &iter1),
7294 arg2 = first_const_call_expr_arg (t2, &iter2);
7295 arg1 && arg2;
7296 arg1 = next_const_call_expr_arg (&iter1),
7297 arg2 = next_const_call_expr_arg (&iter2))
7298 {
7299 cmp = simple_cst_equal (arg1, arg2);
7300 if (cmp <= 0)
7301 return cmp;
7302 }
7303 return arg1 == arg2;
7304 }
7305
7306 case TARGET_EXPR:
7307 /* Special case: if either target is an unallocated VAR_DECL,
7308 it means that it's going to be unified with whatever the
7309 TARGET_EXPR is really supposed to initialize, so treat it
7310 as being equivalent to anything. */
7311 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
7312 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
7313 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0)))
7314 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
7315 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
7316 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0))))
7317 cmp = 1;
7318 else
7319 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7320
7321 if (cmp <= 0)
7322 return cmp;
7323
7324 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
7325
7326 case WITH_CLEANUP_EXPR:
7327 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7328 if (cmp <= 0)
7329 return cmp;
7330
7331 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1));
7332
7333 case COMPONENT_REF:
7334 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
7335 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
7336
7337 return 0;
7338
7339 case VAR_DECL:
7340 case PARM_DECL:
7341 case CONST_DECL:
7342 case FUNCTION_DECL:
7343 return 0;
7344
7345 default:
7346 break;
7347 }
7348
7349 /* This general rule works for most tree codes. All exceptions should be
7350 handled above. If this is a language-specific tree code, we can't
7351 trust what might be in the operand, so say we don't know
7352 the situation. */
7353 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE)
7354 return -1;
7355
7356 switch (TREE_CODE_CLASS (code1))
7357 {
7358 case tcc_unary:
7359 case tcc_binary:
7360 case tcc_comparison:
7361 case tcc_expression:
7362 case tcc_reference:
7363 case tcc_statement:
7364 cmp = 1;
7365 for (i = 0; i < TREE_CODE_LENGTH (code1); i++)
7366 {
7367 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
7368 if (cmp <= 0)
7369 return cmp;
7370 }
7371
7372 return cmp;
7373
7374 default:
7375 return -1;
7376 }
7377 }
7378
7379 /* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value.
7380 Return -1, 0, or 1 if the value of T is less than, equal to, or greater
7381 than U, respectively. */
7382
7383 int
7384 compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
7385 {
7386 if (tree_int_cst_sgn (t) < 0)
7387 return -1;
7388 else if (!tree_fits_uhwi_p (t))
7389 return 1;
7390 else if (TREE_INT_CST_LOW (t) == u)
7391 return 0;
7392 else if (TREE_INT_CST_LOW (t) < u)
7393 return -1;
7394 else
7395 return 1;
7396 }
7397
7398 /* Return true if SIZE represents a constant size that is in bounds of
7399 what the middle-end and the backend accepts (covering not more than
7400 half of the address-space). */
7401
7402 bool
7403 valid_constant_size_p (const_tree size)
7404 {
7405 if (! tree_fits_uhwi_p (size)
7406 || TREE_OVERFLOW (size)
7407 || tree_int_cst_sign_bit (size) != 0)
7408 return false;
7409 return true;
7410 }
7411
7412 /* Return the precision of the type, or for a complex or vector type the
7413 precision of the type of its elements. */
7414
7415 unsigned int
7416 element_precision (const_tree type)
7417 {
7418 enum tree_code code = TREE_CODE (type);
7419 if (code == COMPLEX_TYPE || code == VECTOR_TYPE)
7420 type = TREE_TYPE (type);
7421
7422 return TYPE_PRECISION (type);
7423 }
7424
7425 /* Return true if CODE represents an associative tree code. Otherwise
7426 return false. */
7427 bool
7428 associative_tree_code (enum tree_code code)
7429 {
7430 switch (code)
7431 {
7432 case BIT_IOR_EXPR:
7433 case BIT_AND_EXPR:
7434 case BIT_XOR_EXPR:
7435 case PLUS_EXPR:
7436 case MULT_EXPR:
7437 case MIN_EXPR:
7438 case MAX_EXPR:
7439 return true;
7440
7441 default:
7442 break;
7443 }
7444 return false;
7445 }
7446
7447 /* Return true if CODE represents a commutative tree code. Otherwise
7448 return false. */
7449 bool
7450 commutative_tree_code (enum tree_code code)
7451 {
7452 switch (code)
7453 {
7454 case PLUS_EXPR:
7455 case MULT_EXPR:
7456 case MULT_HIGHPART_EXPR:
7457 case MIN_EXPR:
7458 case MAX_EXPR:
7459 case BIT_IOR_EXPR:
7460 case BIT_XOR_EXPR:
7461 case BIT_AND_EXPR:
7462 case NE_EXPR:
7463 case EQ_EXPR:
7464 case UNORDERED_EXPR:
7465 case ORDERED_EXPR:
7466 case UNEQ_EXPR:
7467 case LTGT_EXPR:
7468 case TRUTH_AND_EXPR:
7469 case TRUTH_XOR_EXPR:
7470 case TRUTH_OR_EXPR:
7471 case WIDEN_MULT_EXPR:
7472 case VEC_WIDEN_MULT_HI_EXPR:
7473 case VEC_WIDEN_MULT_LO_EXPR:
7474 case VEC_WIDEN_MULT_EVEN_EXPR:
7475 case VEC_WIDEN_MULT_ODD_EXPR:
7476 return true;
7477
7478 default:
7479 break;
7480 }
7481 return false;
7482 }
7483
7484 /* Return true if CODE represents a ternary tree code for which the
7485 first two operands are commutative. Otherwise return false. */
7486 bool
7487 commutative_ternary_tree_code (enum tree_code code)
7488 {
7489 switch (code)
7490 {
7491 case WIDEN_MULT_PLUS_EXPR:
7492 case WIDEN_MULT_MINUS_EXPR:
7493 case DOT_PROD_EXPR:
7494 case FMA_EXPR:
7495 return true;
7496
7497 default:
7498 break;
7499 }
7500 return false;
7501 }
7502
7503 namespace inchash
7504 {
7505
7506 /* Generate a hash value for an expression. This can be used iteratively
7507 by passing a previous result as the HSTATE argument.
7508
7509 This function is intended to produce the same hash for expressions which
7510 would compare equal using operand_equal_p. */
7511 void
7512 add_expr (const_tree t, inchash::hash &hstate)
7513 {
7514 int i;
7515 enum tree_code code;
7516 enum tree_code_class tclass;
7517
7518 if (t == NULL_TREE)
7519 {
7520 hstate.merge_hash (0);
7521 return;
7522 }
7523
7524 code = TREE_CODE (t);
7525
7526 switch (code)
7527 {
7528 /* Alas, constants aren't shared, so we can't rely on pointer
7529 identity. */
7530 case VOID_CST:
7531 hstate.merge_hash (0);
7532 return;
7533 case INTEGER_CST:
7534 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
7535 hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
7536 return;
7537 case REAL_CST:
7538 {
7539 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t));
7540 hstate.merge_hash (val2);
7541 return;
7542 }
7543 case FIXED_CST:
7544 {
7545 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
7546 hstate.merge_hash (val2);
7547 return;
7548 }
7549 case STRING_CST:
7550 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
7551 return;
7552 case COMPLEX_CST:
7553 inchash::add_expr (TREE_REALPART (t), hstate);
7554 inchash::add_expr (TREE_IMAGPART (t), hstate);
7555 return;
7556 case VECTOR_CST:
7557 {
7558 unsigned i;
7559 for (i = 0; i < VECTOR_CST_NELTS (t); ++i)
7560 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate);
7561 return;
7562 }
7563 case SSA_NAME:
7564 /* We can just compare by pointer. */
7565 hstate.add_wide_int (SSA_NAME_VERSION (t));
7566 return;
7567 case PLACEHOLDER_EXPR:
7568 /* The node itself doesn't matter. */
7569 return;
7570 case TREE_LIST:
7571 /* A list of expressions, for a CALL_EXPR or as the elements of a
7572 VECTOR_CST. */
7573 for (; t; t = TREE_CHAIN (t))
7574 inchash::add_expr (TREE_VALUE (t), hstate);
7575 return;
7576 case CONSTRUCTOR:
7577 {
7578 unsigned HOST_WIDE_INT idx;
7579 tree field, value;
7580 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
7581 {
7582 inchash::add_expr (field, hstate);
7583 inchash::add_expr (value, hstate);
7584 }
7585 return;
7586 }
7587 case FUNCTION_DECL:
7588 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
7589 Otherwise nodes that compare equal according to operand_equal_p might
7590 get different hash codes. However, don't do this for machine specific
7591 or front end builtins, since the function code is overloaded in those
7592 cases. */
7593 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
7594 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
7595 {
7596 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
7597 code = TREE_CODE (t);
7598 }
7599 /* FALL THROUGH */
7600 default:
7601 tclass = TREE_CODE_CLASS (code);
7602
7603 if (tclass == tcc_declaration)
7604 {
7605 /* DECL's have a unique ID */
7606 hstate.add_wide_int (DECL_UID (t));
7607 }
7608 else
7609 {
7610 gcc_assert (IS_EXPR_CODE_CLASS (tclass));
7611
7612 hstate.add_object (code);
7613
7614 /* Don't hash the type, that can lead to having nodes which
7615 compare equal according to operand_equal_p, but which
7616 have different hash codes. */
7617 if (CONVERT_EXPR_CODE_P (code)
7618 || code == NON_LVALUE_EXPR)
7619 {
7620 /* Make sure to include signness in the hash computation. */
7621 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
7622 inchash::add_expr (TREE_OPERAND (t, 0), hstate);
7623 }
7624
7625 else if (commutative_tree_code (code))
7626 {
7627 /* It's a commutative expression. We want to hash it the same
7628 however it appears. We do this by first hashing both operands
7629 and then rehashing based on the order of their independent
7630 hashes. */
7631 inchash::hash one, two;
7632 inchash::add_expr (TREE_OPERAND (t, 0), one);
7633 inchash::add_expr (TREE_OPERAND (t, 1), two);
7634 hstate.add_commutative (one, two);
7635 }
7636 else
7637 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
7638 inchash::add_expr (TREE_OPERAND (t, i), hstate);
7639 }
7640 return;
7641 }
7642 }
7643
7644 }
7645
7646 /* Constructors for pointer, array and function types.
7647 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are
7648 constructed by language-dependent code, not here.) */
7649
7650 /* Construct, lay out and return the type of pointers to TO_TYPE with
7651 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can
7652 reference all of memory. If such a type has already been
7653 constructed, reuse it. */
7654
7655 tree
7656 build_pointer_type_for_mode (tree to_type, machine_mode mode,
7657 bool can_alias_all)
7658 {
7659 tree t;
7660
7661 if (to_type == error_mark_node)
7662 return error_mark_node;
7663
7664 /* If the pointed-to type has the may_alias attribute set, force
7665 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7666 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7667 can_alias_all = true;
7668
7669 /* In some cases, languages will have things that aren't a POINTER_TYPE
7670 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO.
7671 In that case, return that type without regard to the rest of our
7672 operands.
7673
7674 ??? This is a kludge, but consistent with the way this function has
7675 always operated and there doesn't seem to be a good way to avoid this
7676 at the moment. */
7677 if (TYPE_POINTER_TO (to_type) != 0
7678 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE)
7679 return TYPE_POINTER_TO (to_type);
7680
7681 /* First, if we already have a type for pointers to TO_TYPE and it's
7682 the proper mode, use it. */
7683 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t))
7684 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7685 return t;
7686
7687 t = make_node (POINTER_TYPE);
7688
7689 TREE_TYPE (t) = to_type;
7690 SET_TYPE_MODE (t, mode);
7691 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7692 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type);
7693 TYPE_POINTER_TO (to_type) = t;
7694
7695 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7696 SET_TYPE_STRUCTURAL_EQUALITY (t);
7697 else if (TYPE_CANONICAL (to_type) != to_type)
7698 TYPE_CANONICAL (t)
7699 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type),
7700 mode, can_alias_all);
7701
7702 /* Lay out the type. This function has many callers that are concerned
7703 with expression-construction, and this simplifies them all. */
7704 layout_type (t);
7705
7706 return t;
7707 }
7708
7709 /* By default build pointers in ptr_mode. */
7710
7711 tree
7712 build_pointer_type (tree to_type)
7713 {
7714 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7715 : TYPE_ADDR_SPACE (to_type);
7716 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7717 return build_pointer_type_for_mode (to_type, pointer_mode, false);
7718 }
7719
7720 /* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */
7721
7722 tree
7723 build_reference_type_for_mode (tree to_type, machine_mode mode,
7724 bool can_alias_all)
7725 {
7726 tree t;
7727
7728 if (to_type == error_mark_node)
7729 return error_mark_node;
7730
7731 /* If the pointed-to type has the may_alias attribute set, force
7732 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */
7733 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type)))
7734 can_alias_all = true;
7735
7736 /* In some cases, languages will have things that aren't a REFERENCE_TYPE
7737 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO.
7738 In that case, return that type without regard to the rest of our
7739 operands.
7740
7741 ??? This is a kludge, but consistent with the way this function has
7742 always operated and there doesn't seem to be a good way to avoid this
7743 at the moment. */
7744 if (TYPE_REFERENCE_TO (to_type) != 0
7745 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE)
7746 return TYPE_REFERENCE_TO (to_type);
7747
7748 /* First, if we already have a type for pointers to TO_TYPE and it's
7749 the proper mode, use it. */
7750 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t))
7751 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all)
7752 return t;
7753
7754 t = make_node (REFERENCE_TYPE);
7755
7756 TREE_TYPE (t) = to_type;
7757 SET_TYPE_MODE (t, mode);
7758 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all;
7759 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type);
7760 TYPE_REFERENCE_TO (to_type) = t;
7761
7762 if (TYPE_STRUCTURAL_EQUALITY_P (to_type))
7763 SET_TYPE_STRUCTURAL_EQUALITY (t);
7764 else if (TYPE_CANONICAL (to_type) != to_type)
7765 TYPE_CANONICAL (t)
7766 = build_reference_type_for_mode (TYPE_CANONICAL (to_type),
7767 mode, can_alias_all);
7768
7769 layout_type (t);
7770
7771 return t;
7772 }
7773
7774
7775 /* Build the node for the type of references-to-TO_TYPE by default
7776 in ptr_mode. */
7777
7778 tree
7779 build_reference_type (tree to_type)
7780 {
7781 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC
7782 : TYPE_ADDR_SPACE (to_type);
7783 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
7784 return build_reference_type_for_mode (to_type, pointer_mode, false);
7785 }
7786
7787 #define MAX_INT_CACHED_PREC \
7788 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64)
7789 static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2];
7790
7791 /* Builds a signed or unsigned integer type of precision PRECISION.
7792 Used for C bitfields whose precision does not match that of
7793 built-in target types. */
7794 tree
7795 build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision,
7796 int unsignedp)
7797 {
7798 tree itype, ret;
7799
7800 if (unsignedp)
7801 unsignedp = MAX_INT_CACHED_PREC + 1;
7802
7803 if (precision <= MAX_INT_CACHED_PREC)
7804 {
7805 itype = nonstandard_integer_type_cache[precision + unsignedp];
7806 if (itype)
7807 return itype;
7808 }
7809
7810 itype = make_node (INTEGER_TYPE);
7811 TYPE_PRECISION (itype) = precision;
7812
7813 if (unsignedp)
7814 fixup_unsigned_type (itype);
7815 else
7816 fixup_signed_type (itype);
7817
7818 ret = itype;
7819 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype)))
7820 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype);
7821 if (precision <= MAX_INT_CACHED_PREC)
7822 nonstandard_integer_type_cache[precision + unsignedp] = ret;
7823
7824 return ret;
7825 }
7826
7827 /* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE
7828 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED
7829 is true, reuse such a type that has already been constructed. */
7830
7831 static tree
7832 build_range_type_1 (tree type, tree lowval, tree highval, bool shared)
7833 {
7834 tree itype = make_node (INTEGER_TYPE);
7835 inchash::hash hstate;
7836
7837 TREE_TYPE (itype) = type;
7838
7839 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval);
7840 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL;
7841
7842 TYPE_PRECISION (itype) = TYPE_PRECISION (type);
7843 SET_TYPE_MODE (itype, TYPE_MODE (type));
7844 TYPE_SIZE (itype) = TYPE_SIZE (type);
7845 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type);
7846 TYPE_ALIGN (itype) = TYPE_ALIGN (type);
7847 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type);
7848
7849 if (!shared)
7850 return itype;
7851
7852 if ((TYPE_MIN_VALUE (itype)
7853 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST)
7854 || (TYPE_MAX_VALUE (itype)
7855 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST))
7856 {
7857 /* Since we cannot reliably merge this type, we need to compare it using
7858 structural equality checks. */
7859 SET_TYPE_STRUCTURAL_EQUALITY (itype);
7860 return itype;
7861 }
7862
7863 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate);
7864 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate);
7865 hstate.merge_hash (TYPE_HASH (type));
7866 itype = type_hash_canon (hstate.end (), itype);
7867
7868 return itype;
7869 }
7870
7871 /* Wrapper around build_range_type_1 with SHARED set to true. */
7872
7873 tree
7874 build_range_type (tree type, tree lowval, tree highval)
7875 {
7876 return build_range_type_1 (type, lowval, highval, true);
7877 }
7878
7879 /* Wrapper around build_range_type_1 with SHARED set to false. */
7880
7881 tree
7882 build_nonshared_range_type (tree type, tree lowval, tree highval)
7883 {
7884 return build_range_type_1 (type, lowval, highval, false);
7885 }
7886
7887 /* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE.
7888 MAXVAL should be the maximum value in the domain
7889 (one less than the length of the array).
7890
7891 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT.
7892 We don't enforce this limit, that is up to caller (e.g. language front end).
7893 The limit exists because the result is a signed type and we don't handle
7894 sizes that use more than one HOST_WIDE_INT. */
7895
7896 tree
7897 build_index_type (tree maxval)
7898 {
7899 return build_range_type (sizetype, size_zero_node, maxval);
7900 }
7901
7902 /* Return true if the debug information for TYPE, a subtype, should be emitted
7903 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the
7904 high bound, respectively. Sometimes doing so unnecessarily obfuscates the
7905 debug info and doesn't reflect the source code. */
7906
7907 bool
7908 subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval)
7909 {
7910 tree base_type = TREE_TYPE (type), low, high;
7911
7912 /* Subrange types have a base type which is an integral type. */
7913 if (!INTEGRAL_TYPE_P (base_type))
7914 return false;
7915
7916 /* Get the real bounds of the subtype. */
7917 if (lang_hooks.types.get_subrange_bounds)
7918 lang_hooks.types.get_subrange_bounds (type, &low, &high);
7919 else
7920 {
7921 low = TYPE_MIN_VALUE (type);
7922 high = TYPE_MAX_VALUE (type);
7923 }
7924
7925 /* If the type and its base type have the same representation and the same
7926 name, then the type is not a subrange but a copy of the base type. */
7927 if ((TREE_CODE (base_type) == INTEGER_TYPE
7928 || TREE_CODE (base_type) == BOOLEAN_TYPE)
7929 && int_size_in_bytes (type) == int_size_in_bytes (base_type)
7930 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type))
7931 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type))
7932 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type))
7933 return false;
7934
7935 if (lowval)
7936 *lowval = low;
7937 if (highval)
7938 *highval = high;
7939 return true;
7940 }
7941
7942 /* Construct, lay out and return the type of arrays of elements with ELT_TYPE
7943 and number of elements specified by the range of values of INDEX_TYPE.
7944 If SHARED is true, reuse such a type that has already been constructed. */
7945
7946 static tree
7947 build_array_type_1 (tree elt_type, tree index_type, bool shared)
7948 {
7949 tree t;
7950
7951 if (TREE_CODE (elt_type) == FUNCTION_TYPE)
7952 {
7953 error ("arrays of functions are not meaningful");
7954 elt_type = integer_type_node;
7955 }
7956
7957 t = make_node (ARRAY_TYPE);
7958 TREE_TYPE (t) = elt_type;
7959 TYPE_DOMAIN (t) = index_type;
7960 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type);
7961 layout_type (t);
7962
7963 /* If the element type is incomplete at this point we get marked for
7964 structural equality. Do not record these types in the canonical
7965 type hashtable. */
7966 if (TYPE_STRUCTURAL_EQUALITY_P (t))
7967 return t;
7968
7969 if (shared)
7970 {
7971 inchash::hash hstate;
7972 hstate.add_object (TYPE_HASH (elt_type));
7973 if (index_type)
7974 hstate.add_object (TYPE_HASH (index_type));
7975 t = type_hash_canon (hstate.end (), t);
7976 }
7977
7978 if (TYPE_CANONICAL (t) == t)
7979 {
7980 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type)
7981 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type)))
7982 SET_TYPE_STRUCTURAL_EQUALITY (t);
7983 else if (TYPE_CANONICAL (elt_type) != elt_type
7984 || (index_type && TYPE_CANONICAL (index_type) != index_type))
7985 TYPE_CANONICAL (t)
7986 = build_array_type_1 (TYPE_CANONICAL (elt_type),
7987 index_type
7988 ? TYPE_CANONICAL (index_type) : NULL_TREE,
7989 shared);
7990 }
7991
7992 return t;
7993 }
7994
7995 /* Wrapper around build_array_type_1 with SHARED set to true. */
7996
7997 tree
7998 build_array_type (tree elt_type, tree index_type)
7999 {
8000 return build_array_type_1 (elt_type, index_type, true);
8001 }
8002
8003 /* Wrapper around build_array_type_1 with SHARED set to false. */
8004
8005 tree
8006 build_nonshared_array_type (tree elt_type, tree index_type)
8007 {
8008 return build_array_type_1 (elt_type, index_type, false);
8009 }
8010
8011 /* Return a representation of ELT_TYPE[NELTS], using indices of type
8012 sizetype. */
8013
8014 tree
8015 build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
8016 {
8017 return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
8018 }
8019
8020 /* Recursively examines the array elements of TYPE, until a non-array
8021 element type is found. */
8022
8023 tree
8024 strip_array_types (tree type)
8025 {
8026 while (TREE_CODE (type) == ARRAY_TYPE)
8027 type = TREE_TYPE (type);
8028
8029 return type;
8030 }
8031
8032 /* Computes the canonical argument types from the argument type list
8033 ARGTYPES.
8034
8035 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true
8036 on entry to this function, or if any of the ARGTYPES are
8037 structural.
8038
8039 Upon return, *ANY_NONCANONICAL_P will be true iff either it was
8040 true on entry to this function, or if any of the ARGTYPES are
8041 non-canonical.
8042
8043 Returns a canonical argument list, which may be ARGTYPES when the
8044 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is
8045 true) or would not differ from ARGTYPES. */
8046
8047 static tree
8048 maybe_canonicalize_argtypes (tree argtypes,
8049 bool *any_structural_p,
8050 bool *any_noncanonical_p)
8051 {
8052 tree arg;
8053 bool any_noncanonical_argtypes_p = false;
8054
8055 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg))
8056 {
8057 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node)
8058 /* Fail gracefully by stating that the type is structural. */
8059 *any_structural_p = true;
8060 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg)))
8061 *any_structural_p = true;
8062 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg)
8063 || TREE_PURPOSE (arg))
8064 /* If the argument has a default argument, we consider it
8065 non-canonical even though the type itself is canonical.
8066 That way, different variants of function and method types
8067 with default arguments will all point to the variant with
8068 no defaults as their canonical type. */
8069 any_noncanonical_argtypes_p = true;
8070 }
8071
8072 if (*any_structural_p)
8073 return argtypes;
8074
8075 if (any_noncanonical_argtypes_p)
8076 {
8077 /* Build the canonical list of argument types. */
8078 tree canon_argtypes = NULL_TREE;
8079 bool is_void = false;
8080
8081 for (arg = argtypes; arg; arg = TREE_CHAIN (arg))
8082 {
8083 if (arg == void_list_node)
8084 is_void = true;
8085 else
8086 canon_argtypes = tree_cons (NULL_TREE,
8087 TYPE_CANONICAL (TREE_VALUE (arg)),
8088 canon_argtypes);
8089 }
8090
8091 canon_argtypes = nreverse (canon_argtypes);
8092 if (is_void)
8093 canon_argtypes = chainon (canon_argtypes, void_list_node);
8094
8095 /* There is a non-canonical type. */
8096 *any_noncanonical_p = true;
8097 return canon_argtypes;
8098 }
8099
8100 /* The canonical argument types are the same as ARGTYPES. */
8101 return argtypes;
8102 }
8103
8104 /* Construct, lay out and return
8105 the type of functions returning type VALUE_TYPE
8106 given arguments of types ARG_TYPES.
8107 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs
8108 are data type nodes for the arguments of the function.
8109 If such a type has already been constructed, reuse it. */
8110
8111 tree
8112 build_function_type (tree value_type, tree arg_types)
8113 {
8114 tree t;
8115 inchash::hash hstate;
8116 bool any_structural_p, any_noncanonical_p;
8117 tree canon_argtypes;
8118
8119 if (TREE_CODE (value_type) == FUNCTION_TYPE)
8120 {
8121 error ("function return type cannot be function");
8122 value_type = integer_type_node;
8123 }
8124
8125 /* Make a node of the sort we want. */
8126 t = make_node (FUNCTION_TYPE);
8127 TREE_TYPE (t) = value_type;
8128 TYPE_ARG_TYPES (t) = arg_types;
8129
8130 /* If we already have such a type, use the old one. */
8131 hstate.add_object (TYPE_HASH (value_type));
8132 type_hash_list (arg_types, hstate);
8133 t = type_hash_canon (hstate.end (), t);
8134
8135 /* Set up the canonical type. */
8136 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type);
8137 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type;
8138 canon_argtypes = maybe_canonicalize_argtypes (arg_types,
8139 &any_structural_p,
8140 &any_noncanonical_p);
8141 if (any_structural_p)
8142 SET_TYPE_STRUCTURAL_EQUALITY (t);
8143 else if (any_noncanonical_p)
8144 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type),
8145 canon_argtypes);
8146
8147 if (!COMPLETE_TYPE_P (t))
8148 layout_type (t);
8149 return t;
8150 }
8151
8152 /* Build a function type. The RETURN_TYPE is the type returned by the
8153 function. If VAARGS is set, no void_type_node is appended to the
8154 the list. ARGP must be always be terminated be a NULL_TREE. */
8155
8156 static tree
8157 build_function_type_list_1 (bool vaargs, tree return_type, va_list argp)
8158 {
8159 tree t, args, last;
8160
8161 t = va_arg (argp, tree);
8162 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree))
8163 args = tree_cons (NULL_TREE, t, args);
8164
8165 if (vaargs)
8166 {
8167 last = args;
8168 if (args != NULL_TREE)
8169 args = nreverse (args);
8170 gcc_assert (last != void_list_node);
8171 }
8172 else if (args == NULL_TREE)
8173 args = void_list_node;
8174 else
8175 {
8176 last = args;
8177 args = nreverse (args);
8178 TREE_CHAIN (last) = void_list_node;
8179 }
8180 args = build_function_type (return_type, args);
8181
8182 return args;
8183 }
8184
8185 /* Build a function type. The RETURN_TYPE is the type returned by the
8186 function. If additional arguments are provided, they are
8187 additional argument types. The list of argument types must always
8188 be terminated by NULL_TREE. */
8189
8190 tree
8191 build_function_type_list (tree return_type, ...)
8192 {
8193 tree args;
8194 va_list p;
8195
8196 va_start (p, return_type);
8197 args = build_function_type_list_1 (false, return_type, p);
8198 va_end (p);
8199 return args;
8200 }
8201
8202 /* Build a variable argument function type. The RETURN_TYPE is the
8203 type returned by the function. If additional arguments are provided,
8204 they are additional argument types. The list of argument types must
8205 always be terminated by NULL_TREE. */
8206
8207 tree
8208 build_varargs_function_type_list (tree return_type, ...)
8209 {
8210 tree args;
8211 va_list p;
8212
8213 va_start (p, return_type);
8214 args = build_function_type_list_1 (true, return_type, p);
8215 va_end (p);
8216
8217 return args;
8218 }
8219
8220 /* Build a function type. RETURN_TYPE is the type returned by the
8221 function; VAARGS indicates whether the function takes varargs. The
8222 function takes N named arguments, the types of which are provided in
8223 ARG_TYPES. */
8224
8225 static tree
8226 build_function_type_array_1 (bool vaargs, tree return_type, int n,
8227 tree *arg_types)
8228 {
8229 int i;
8230 tree t = vaargs ? NULL_TREE : void_list_node;
8231
8232 for (i = n - 1; i >= 0; i--)
8233 t = tree_cons (NULL_TREE, arg_types[i], t);
8234
8235 return build_function_type (return_type, t);
8236 }
8237
8238 /* Build a function type. RETURN_TYPE is the type returned by the
8239 function. The function takes N named arguments, the types of which
8240 are provided in ARG_TYPES. */
8241
8242 tree
8243 build_function_type_array (tree return_type, int n, tree *arg_types)
8244 {
8245 return build_function_type_array_1 (false, return_type, n, arg_types);
8246 }
8247
8248 /* Build a variable argument function type. RETURN_TYPE is the type
8249 returned by the function. The function takes N named arguments, the
8250 types of which are provided in ARG_TYPES. */
8251
8252 tree
8253 build_varargs_function_type_array (tree return_type, int n, tree *arg_types)
8254 {
8255 return build_function_type_array_1 (true, return_type, n, arg_types);
8256 }
8257
8258 /* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE)
8259 and ARGTYPES (a TREE_LIST) are the return type and arguments types
8260 for the method. An implicit additional parameter (of type
8261 pointer-to-BASETYPE) is added to the ARGTYPES. */
8262
8263 tree
8264 build_method_type_directly (tree basetype,
8265 tree rettype,
8266 tree argtypes)
8267 {
8268 tree t;
8269 tree ptype;
8270 inchash::hash hstate;
8271 bool any_structural_p, any_noncanonical_p;
8272 tree canon_argtypes;
8273
8274 /* Make a node of the sort we want. */
8275 t = make_node (METHOD_TYPE);
8276
8277 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8278 TREE_TYPE (t) = rettype;
8279 ptype = build_pointer_type (basetype);
8280
8281 /* The actual arglist for this function includes a "hidden" argument
8282 which is "this". Put it into the list of argument types. */
8283 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
8284 TYPE_ARG_TYPES (t) = argtypes;
8285
8286 /* If we already have such a type, use the old one. */
8287 hstate.add_object (TYPE_HASH (basetype));
8288 hstate.add_object (TYPE_HASH (rettype));
8289 type_hash_list (argtypes, hstate);
8290 t = type_hash_canon (hstate.end (), t);
8291
8292 /* Set up the canonical type. */
8293 any_structural_p
8294 = (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8295 || TYPE_STRUCTURAL_EQUALITY_P (rettype));
8296 any_noncanonical_p
8297 = (TYPE_CANONICAL (basetype) != basetype
8298 || TYPE_CANONICAL (rettype) != rettype);
8299 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes),
8300 &any_structural_p,
8301 &any_noncanonical_p);
8302 if (any_structural_p)
8303 SET_TYPE_STRUCTURAL_EQUALITY (t);
8304 else if (any_noncanonical_p)
8305 TYPE_CANONICAL (t)
8306 = build_method_type_directly (TYPE_CANONICAL (basetype),
8307 TYPE_CANONICAL (rettype),
8308 canon_argtypes);
8309 if (!COMPLETE_TYPE_P (t))
8310 layout_type (t);
8311
8312 return t;
8313 }
8314
8315 /* Construct, lay out and return the type of methods belonging to class
8316 BASETYPE and whose arguments and values are described by TYPE.
8317 If that type exists already, reuse it.
8318 TYPE must be a FUNCTION_TYPE node. */
8319
8320 tree
8321 build_method_type (tree basetype, tree type)
8322 {
8323 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE);
8324
8325 return build_method_type_directly (basetype,
8326 TREE_TYPE (type),
8327 TYPE_ARG_TYPES (type));
8328 }
8329
8330 /* Construct, lay out and return the type of offsets to a value
8331 of type TYPE, within an object of type BASETYPE.
8332 If a suitable offset type exists already, reuse it. */
8333
8334 tree
8335 build_offset_type (tree basetype, tree type)
8336 {
8337 tree t;
8338 inchash::hash hstate;
8339
8340 /* Make a node of the sort we want. */
8341 t = make_node (OFFSET_TYPE);
8342
8343 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
8344 TREE_TYPE (t) = type;
8345
8346 /* If we already have such a type, use the old one. */
8347 hstate.add_object (TYPE_HASH (basetype));
8348 hstate.add_object (TYPE_HASH (type));
8349 t = type_hash_canon (hstate.end (), t);
8350
8351 if (!COMPLETE_TYPE_P (t))
8352 layout_type (t);
8353
8354 if (TYPE_CANONICAL (t) == t)
8355 {
8356 if (TYPE_STRUCTURAL_EQUALITY_P (basetype)
8357 || TYPE_STRUCTURAL_EQUALITY_P (type))
8358 SET_TYPE_STRUCTURAL_EQUALITY (t);
8359 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype
8360 || TYPE_CANONICAL (type) != type)
8361 TYPE_CANONICAL (t)
8362 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)),
8363 TYPE_CANONICAL (type));
8364 }
8365
8366 return t;
8367 }
8368
8369 /* Create a complex type whose components are COMPONENT_TYPE. */
8370
8371 tree
8372 build_complex_type (tree component_type)
8373 {
8374 tree t;
8375 inchash::hash hstate;
8376
8377 gcc_assert (INTEGRAL_TYPE_P (component_type)
8378 || SCALAR_FLOAT_TYPE_P (component_type)
8379 || FIXED_POINT_TYPE_P (component_type));
8380
8381 /* Make a node of the sort we want. */
8382 t = make_node (COMPLEX_TYPE);
8383
8384 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type);
8385
8386 /* If we already have such a type, use the old one. */
8387 hstate.add_object (TYPE_HASH (component_type));
8388 t = type_hash_canon (hstate.end (), t);
8389
8390 if (!COMPLETE_TYPE_P (t))
8391 layout_type (t);
8392
8393 if (TYPE_CANONICAL (t) == t)
8394 {
8395 if (TYPE_STRUCTURAL_EQUALITY_P (component_type))
8396 SET_TYPE_STRUCTURAL_EQUALITY (t);
8397 else if (TYPE_CANONICAL (component_type) != component_type)
8398 TYPE_CANONICAL (t)
8399 = build_complex_type (TYPE_CANONICAL (component_type));
8400 }
8401
8402 /* We need to create a name, since complex is a fundamental type. */
8403 if (! TYPE_NAME (t))
8404 {
8405 const char *name;
8406 if (component_type == char_type_node)
8407 name = "complex char";
8408 else if (component_type == signed_char_type_node)
8409 name = "complex signed char";
8410 else if (component_type == unsigned_char_type_node)
8411 name = "complex unsigned char";
8412 else if (component_type == short_integer_type_node)
8413 name = "complex short int";
8414 else if (component_type == short_unsigned_type_node)
8415 name = "complex short unsigned int";
8416 else if (component_type == integer_type_node)
8417 name = "complex int";
8418 else if (component_type == unsigned_type_node)
8419 name = "complex unsigned int";
8420 else if (component_type == long_integer_type_node)
8421 name = "complex long int";
8422 else if (component_type == long_unsigned_type_node)
8423 name = "complex long unsigned int";
8424 else if (component_type == long_long_integer_type_node)
8425 name = "complex long long int";
8426 else if (component_type == long_long_unsigned_type_node)
8427 name = "complex long long unsigned int";
8428 else
8429 name = 0;
8430
8431 if (name != 0)
8432 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL,
8433 get_identifier (name), t);
8434 }
8435
8436 return build_qualified_type (t, TYPE_QUALS (component_type));
8437 }
8438
8439 /* If TYPE is a real or complex floating-point type and the target
8440 does not directly support arithmetic on TYPE then return the wider
8441 type to be used for arithmetic on TYPE. Otherwise, return
8442 NULL_TREE. */
8443
8444 tree
8445 excess_precision_type (tree type)
8446 {
8447 if (flag_excess_precision != EXCESS_PRECISION_FAST)
8448 {
8449 int flt_eval_method = TARGET_FLT_EVAL_METHOD;
8450 switch (TREE_CODE (type))
8451 {
8452 case REAL_TYPE:
8453 switch (flt_eval_method)
8454 {
8455 case 1:
8456 if (TYPE_MODE (type) == TYPE_MODE (float_type_node))
8457 return double_type_node;
8458 break;
8459 case 2:
8460 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)
8461 || TYPE_MODE (type) == TYPE_MODE (double_type_node))
8462 return long_double_type_node;
8463 break;
8464 default:
8465 gcc_unreachable ();
8466 }
8467 break;
8468 case COMPLEX_TYPE:
8469 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE)
8470 return NULL_TREE;
8471 switch (flt_eval_method)
8472 {
8473 case 1:
8474 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node))
8475 return complex_double_type_node;
8476 break;
8477 case 2:
8478 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)
8479 || (TYPE_MODE (TREE_TYPE (type))
8480 == TYPE_MODE (double_type_node)))
8481 return complex_long_double_type_node;
8482 break;
8483 default:
8484 gcc_unreachable ();
8485 }
8486 break;
8487 default:
8488 break;
8489 }
8490 }
8491 return NULL_TREE;
8492 }
8493 \f
8494 /* Return OP, stripped of any conversions to wider types as much as is safe.
8495 Converting the value back to OP's type makes a value equivalent to OP.
8496
8497 If FOR_TYPE is nonzero, we return a value which, if converted to
8498 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE.
8499
8500 OP must have integer, real or enumeral type. Pointers are not allowed!
8501
8502 There are some cases where the obvious value we could return
8503 would regenerate to OP if converted to OP's type,
8504 but would not extend like OP to wider types.
8505 If FOR_TYPE indicates such extension is contemplated, we eschew such values.
8506 For example, if OP is (unsigned short)(signed char)-1,
8507 we avoid returning (signed char)-1 if FOR_TYPE is int,
8508 even though extending that to an unsigned short would regenerate OP,
8509 since the result of extending (signed char)-1 to (int)
8510 is different from (int) OP. */
8511
8512 tree
8513 get_unwidened (tree op, tree for_type)
8514 {
8515 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */
8516 tree type = TREE_TYPE (op);
8517 unsigned final_prec
8518 = TYPE_PRECISION (for_type != 0 ? for_type : type);
8519 int uns
8520 = (for_type != 0 && for_type != type
8521 && final_prec > TYPE_PRECISION (type)
8522 && TYPE_UNSIGNED (type));
8523 tree win = op;
8524
8525 while (CONVERT_EXPR_P (op))
8526 {
8527 int bitschange;
8528
8529 /* TYPE_PRECISION on vector types has different meaning
8530 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions,
8531 so avoid them here. */
8532 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE)
8533 break;
8534
8535 bitschange = TYPE_PRECISION (TREE_TYPE (op))
8536 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)));
8537
8538 /* Truncations are many-one so cannot be removed.
8539 Unless we are later going to truncate down even farther. */
8540 if (bitschange < 0
8541 && final_prec > TYPE_PRECISION (TREE_TYPE (op)))
8542 break;
8543
8544 /* See what's inside this conversion. If we decide to strip it,
8545 we will set WIN. */
8546 op = TREE_OPERAND (op, 0);
8547
8548 /* If we have not stripped any zero-extensions (uns is 0),
8549 we can strip any kind of extension.
8550 If we have previously stripped a zero-extension,
8551 only zero-extensions can safely be stripped.
8552 Any extension can be stripped if the bits it would produce
8553 are all going to be discarded later by truncating to FOR_TYPE. */
8554
8555 if (bitschange > 0)
8556 {
8557 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op)))
8558 win = op;
8559 /* TYPE_UNSIGNED says whether this is a zero-extension.
8560 Let's avoid computing it if it does not affect WIN
8561 and if UNS will not be needed again. */
8562 if ((uns
8563 || CONVERT_EXPR_P (op))
8564 && TYPE_UNSIGNED (TREE_TYPE (op)))
8565 {
8566 uns = 1;
8567 win = op;
8568 }
8569 }
8570 }
8571
8572 /* If we finally reach a constant see if it fits in for_type and
8573 in that case convert it. */
8574 if (for_type
8575 && TREE_CODE (win) == INTEGER_CST
8576 && TREE_TYPE (win) != for_type
8577 && int_fits_type_p (win, for_type))
8578 win = fold_convert (for_type, win);
8579
8580 return win;
8581 }
8582 \f
8583 /* Return OP or a simpler expression for a narrower value
8584 which can be sign-extended or zero-extended to give back OP.
8585 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended
8586 or 0 if the value should be sign-extended. */
8587
8588 tree
8589 get_narrower (tree op, int *unsignedp_ptr)
8590 {
8591 int uns = 0;
8592 int first = 1;
8593 tree win = op;
8594 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op));
8595
8596 while (TREE_CODE (op) == NOP_EXPR)
8597 {
8598 int bitschange
8599 = (TYPE_PRECISION (TREE_TYPE (op))
8600 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))));
8601
8602 /* Truncations are many-one so cannot be removed. */
8603 if (bitschange < 0)
8604 break;
8605
8606 /* See what's inside this conversion. If we decide to strip it,
8607 we will set WIN. */
8608
8609 if (bitschange > 0)
8610 {
8611 op = TREE_OPERAND (op, 0);
8612 /* An extension: the outermost one can be stripped,
8613 but remember whether it is zero or sign extension. */
8614 if (first)
8615 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8616 /* Otherwise, if a sign extension has been stripped,
8617 only sign extensions can now be stripped;
8618 if a zero extension has been stripped, only zero-extensions. */
8619 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op)))
8620 break;
8621 first = 0;
8622 }
8623 else /* bitschange == 0 */
8624 {
8625 /* A change in nominal type can always be stripped, but we must
8626 preserve the unsignedness. */
8627 if (first)
8628 uns = TYPE_UNSIGNED (TREE_TYPE (op));
8629 first = 0;
8630 op = TREE_OPERAND (op, 0);
8631 /* Keep trying to narrow, but don't assign op to win if it
8632 would turn an integral type into something else. */
8633 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p)
8634 continue;
8635 }
8636
8637 win = op;
8638 }
8639
8640 if (TREE_CODE (op) == COMPONENT_REF
8641 /* Since type_for_size always gives an integer type. */
8642 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE
8643 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE
8644 /* Ensure field is laid out already. */
8645 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0
8646 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1))))
8647 {
8648 unsigned HOST_WIDE_INT innerprec
8649 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1)));
8650 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1))
8651 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1))));
8652 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp);
8653
8654 /* We can get this structure field in a narrower type that fits it,
8655 but the resulting extension to its nominal type (a fullword type)
8656 must satisfy the same conditions as for other extensions.
8657
8658 Do this only for fields that are aligned (not bit-fields),
8659 because when bit-field insns will be used there is no
8660 advantage in doing this. */
8661
8662 if (innerprec < TYPE_PRECISION (TREE_TYPE (op))
8663 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1))
8664 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1)))
8665 && type != 0)
8666 {
8667 if (first)
8668 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1));
8669 win = fold_convert (type, op);
8670 }
8671 }
8672
8673 *unsignedp_ptr = uns;
8674 return win;
8675 }
8676 \f
8677 /* Returns true if integer constant C has a value that is permissible
8678 for type TYPE (an INTEGER_TYPE). */
8679
8680 bool
8681 int_fits_type_p (const_tree c, const_tree type)
8682 {
8683 tree type_low_bound, type_high_bound;
8684 bool ok_for_low_bound, ok_for_high_bound;
8685 signop sgn_c = TYPE_SIGN (TREE_TYPE (c));
8686
8687 retry:
8688 type_low_bound = TYPE_MIN_VALUE (type);
8689 type_high_bound = TYPE_MAX_VALUE (type);
8690
8691 /* If at least one bound of the type is a constant integer, we can check
8692 ourselves and maybe make a decision. If no such decision is possible, but
8693 this type is a subtype, try checking against that. Otherwise, use
8694 fits_to_tree_p, which checks against the precision.
8695
8696 Compute the status for each possibly constant bound, and return if we see
8697 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1
8698 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1
8699 for "constant known to fit". */
8700
8701 /* Check if c >= type_low_bound. */
8702 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST)
8703 {
8704 if (tree_int_cst_lt (c, type_low_bound))
8705 return false;
8706 ok_for_low_bound = true;
8707 }
8708 else
8709 ok_for_low_bound = false;
8710
8711 /* Check if c <= type_high_bound. */
8712 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST)
8713 {
8714 if (tree_int_cst_lt (type_high_bound, c))
8715 return false;
8716 ok_for_high_bound = true;
8717 }
8718 else
8719 ok_for_high_bound = false;
8720
8721 /* If the constant fits both bounds, the result is known. */
8722 if (ok_for_low_bound && ok_for_high_bound)
8723 return true;
8724
8725 /* Perform some generic filtering which may allow making a decision
8726 even if the bounds are not constant. First, negative integers
8727 never fit in unsigned types, */
8728 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c))
8729 return false;
8730
8731 /* Second, narrower types always fit in wider ones. */
8732 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c)))
8733 return true;
8734
8735 /* Third, unsigned integers with top bit set never fit signed types. */
8736 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED)
8737 {
8738 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1;
8739 if (prec < TYPE_PRECISION (TREE_TYPE (c)))
8740 {
8741 /* When a tree_cst is converted to a wide-int, the precision
8742 is taken from the type. However, if the precision of the
8743 mode underneath the type is smaller than that, it is
8744 possible that the value will not fit. The test below
8745 fails if any bit is set between the sign bit of the
8746 underlying mode and the top bit of the type. */
8747 if (wi::ne_p (wi::zext (c, prec - 1), c))
8748 return false;
8749 }
8750 else if (wi::neg_p (c))
8751 return false;
8752 }
8753
8754 /* If we haven't been able to decide at this point, there nothing more we
8755 can check ourselves here. Look at the base type if we have one and it
8756 has the same precision. */
8757 if (TREE_CODE (type) == INTEGER_TYPE
8758 && TREE_TYPE (type) != 0
8759 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type)))
8760 {
8761 type = TREE_TYPE (type);
8762 goto retry;
8763 }
8764
8765 /* Or to fits_to_tree_p, if nothing else. */
8766 return wi::fits_to_tree_p (c, type);
8767 }
8768
8769 /* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant
8770 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be
8771 represented (assuming two's-complement arithmetic) within the bit
8772 precision of the type are returned instead. */
8773
8774 void
8775 get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
8776 {
8777 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type)
8778 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST)
8779 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type));
8780 else
8781 {
8782 if (TYPE_UNSIGNED (type))
8783 mpz_set_ui (min, 0);
8784 else
8785 {
8786 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED);
8787 wi::to_mpz (mn, min, SIGNED);
8788 }
8789 }
8790
8791 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type)
8792 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST)
8793 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type));
8794 else
8795 {
8796 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
8797 wi::to_mpz (mn, max, TYPE_SIGN (type));
8798 }
8799 }
8800
8801 /* Return true if VAR is an automatic variable defined in function FN. */
8802
8803 bool
8804 auto_var_in_fn_p (const_tree var, const_tree fn)
8805 {
8806 return (DECL_P (var) && DECL_CONTEXT (var) == fn
8807 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var))
8808 || TREE_CODE (var) == PARM_DECL)
8809 && ! TREE_STATIC (var))
8810 || TREE_CODE (var) == LABEL_DECL
8811 || TREE_CODE (var) == RESULT_DECL));
8812 }
8813
8814 /* Subprogram of following function. Called by walk_tree.
8815
8816 Return *TP if it is an automatic variable or parameter of the
8817 function passed in as DATA. */
8818
8819 static tree
8820 find_var_from_fn (tree *tp, int *walk_subtrees, void *data)
8821 {
8822 tree fn = (tree) data;
8823
8824 if (TYPE_P (*tp))
8825 *walk_subtrees = 0;
8826
8827 else if (DECL_P (*tp)
8828 && auto_var_in_fn_p (*tp, fn))
8829 return *tp;
8830
8831 return NULL_TREE;
8832 }
8833
8834 /* Returns true if T is, contains, or refers to a type with variable
8835 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the
8836 arguments, but not the return type. If FN is nonzero, only return
8837 true if a modifier of the type or position of FN is a variable or
8838 parameter inside FN.
8839
8840 This concept is more general than that of C99 'variably modified types':
8841 in C99, a struct type is never variably modified because a VLA may not
8842 appear as a structure member. However, in GNU C code like:
8843
8844 struct S { int i[f()]; };
8845
8846 is valid, and other languages may define similar constructs. */
8847
8848 bool
8849 variably_modified_type_p (tree type, tree fn)
8850 {
8851 tree t;
8852
8853 /* Test if T is either variable (if FN is zero) or an expression containing
8854 a variable in FN. If TYPE isn't gimplified, return true also if
8855 gimplify_one_sizepos would gimplify the expression into a local
8856 variable. */
8857 #define RETURN_TRUE_IF_VAR(T) \
8858 do { tree _t = (T); \
8859 if (_t != NULL_TREE \
8860 && _t != error_mark_node \
8861 && TREE_CODE (_t) != INTEGER_CST \
8862 && TREE_CODE (_t) != PLACEHOLDER_EXPR \
8863 && (!fn \
8864 || (!TYPE_SIZES_GIMPLIFIED (type) \
8865 && !is_gimple_sizepos (_t)) \
8866 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \
8867 return true; } while (0)
8868
8869 if (type == error_mark_node)
8870 return false;
8871
8872 /* If TYPE itself has variable size, it is variably modified. */
8873 RETURN_TRUE_IF_VAR (TYPE_SIZE (type));
8874 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type));
8875
8876 switch (TREE_CODE (type))
8877 {
8878 case POINTER_TYPE:
8879 case REFERENCE_TYPE:
8880 case VECTOR_TYPE:
8881 if (variably_modified_type_p (TREE_TYPE (type), fn))
8882 return true;
8883 break;
8884
8885 case FUNCTION_TYPE:
8886 case METHOD_TYPE:
8887 /* If TYPE is a function type, it is variably modified if the
8888 return type is variably modified. */
8889 if (variably_modified_type_p (TREE_TYPE (type), fn))
8890 return true;
8891 break;
8892
8893 case INTEGER_TYPE:
8894 case REAL_TYPE:
8895 case FIXED_POINT_TYPE:
8896 case ENUMERAL_TYPE:
8897 case BOOLEAN_TYPE:
8898 /* Scalar types are variably modified if their end points
8899 aren't constant. */
8900 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type));
8901 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type));
8902 break;
8903
8904 case RECORD_TYPE:
8905 case UNION_TYPE:
8906 case QUAL_UNION_TYPE:
8907 /* We can't see if any of the fields are variably-modified by the
8908 definition we normally use, since that would produce infinite
8909 recursion via pointers. */
8910 /* This is variably modified if some field's type is. */
8911 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t))
8912 if (TREE_CODE (t) == FIELD_DECL)
8913 {
8914 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t));
8915 RETURN_TRUE_IF_VAR (DECL_SIZE (t));
8916 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t));
8917
8918 if (TREE_CODE (type) == QUAL_UNION_TYPE)
8919 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t));
8920 }
8921 break;
8922
8923 case ARRAY_TYPE:
8924 /* Do not call ourselves to avoid infinite recursion. This is
8925 variably modified if the element type is. */
8926 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type)));
8927 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type)));
8928 break;
8929
8930 default:
8931 break;
8932 }
8933
8934 /* The current language may have other cases to check, but in general,
8935 all other types are not variably modified. */
8936 return lang_hooks.tree_inlining.var_mod_type_p (type, fn);
8937
8938 #undef RETURN_TRUE_IF_VAR
8939 }
8940
8941 /* Given a DECL or TYPE, return the scope in which it was declared, or
8942 NULL_TREE if there is no containing scope. */
8943
8944 tree
8945 get_containing_scope (const_tree t)
8946 {
8947 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t));
8948 }
8949
8950 /* Return the innermost context enclosing DECL that is
8951 a FUNCTION_DECL, or zero if none. */
8952
8953 tree
8954 decl_function_context (const_tree decl)
8955 {
8956 tree context;
8957
8958 if (TREE_CODE (decl) == ERROR_MARK)
8959 return 0;
8960
8961 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable
8962 where we look up the function at runtime. Such functions always take
8963 a first argument of type 'pointer to real context'.
8964
8965 C++ should really be fixed to use DECL_CONTEXT for the real context,
8966 and use something else for the "virtual context". */
8967 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl))
8968 context
8969 = TYPE_MAIN_VARIANT
8970 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
8971 else
8972 context = DECL_CONTEXT (decl);
8973
8974 while (context && TREE_CODE (context) != FUNCTION_DECL)
8975 {
8976 if (TREE_CODE (context) == BLOCK)
8977 context = BLOCK_SUPERCONTEXT (context);
8978 else
8979 context = get_containing_scope (context);
8980 }
8981
8982 return context;
8983 }
8984
8985 /* Return the innermost context enclosing DECL that is
8986 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none.
8987 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */
8988
8989 tree
8990 decl_type_context (const_tree decl)
8991 {
8992 tree context = DECL_CONTEXT (decl);
8993
8994 while (context)
8995 switch (TREE_CODE (context))
8996 {
8997 case NAMESPACE_DECL:
8998 case TRANSLATION_UNIT_DECL:
8999 return NULL_TREE;
9000
9001 case RECORD_TYPE:
9002 case UNION_TYPE:
9003 case QUAL_UNION_TYPE:
9004 return context;
9005
9006 case TYPE_DECL:
9007 case FUNCTION_DECL:
9008 context = DECL_CONTEXT (context);
9009 break;
9010
9011 case BLOCK:
9012 context = BLOCK_SUPERCONTEXT (context);
9013 break;
9014
9015 default:
9016 gcc_unreachable ();
9017 }
9018
9019 return NULL_TREE;
9020 }
9021
9022 /* CALL is a CALL_EXPR. Return the declaration for the function
9023 called, or NULL_TREE if the called function cannot be
9024 determined. */
9025
9026 tree
9027 get_callee_fndecl (const_tree call)
9028 {
9029 tree addr;
9030
9031 if (call == error_mark_node)
9032 return error_mark_node;
9033
9034 /* It's invalid to call this function with anything but a
9035 CALL_EXPR. */
9036 gcc_assert (TREE_CODE (call) == CALL_EXPR);
9037
9038 /* The first operand to the CALL is the address of the function
9039 called. */
9040 addr = CALL_EXPR_FN (call);
9041
9042 /* If there is no function, return early. */
9043 if (addr == NULL_TREE)
9044 return NULL_TREE;
9045
9046 STRIP_NOPS (addr);
9047
9048 /* If this is a readonly function pointer, extract its initial value. */
9049 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL
9050 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr)
9051 && DECL_INITIAL (addr))
9052 addr = DECL_INITIAL (addr);
9053
9054 /* If the address is just `&f' for some function `f', then we know
9055 that `f' is being called. */
9056 if (TREE_CODE (addr) == ADDR_EXPR
9057 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL)
9058 return TREE_OPERAND (addr, 0);
9059
9060 /* We couldn't figure out what was being called. */
9061 return NULL_TREE;
9062 }
9063
9064 /* Print debugging information about tree nodes generated during the compile,
9065 and any language-specific information. */
9066
9067 void
9068 dump_tree_statistics (void)
9069 {
9070 if (GATHER_STATISTICS)
9071 {
9072 int i;
9073 int total_nodes, total_bytes;
9074 fprintf (stderr, "Kind Nodes Bytes\n");
9075 fprintf (stderr, "---------------------------------------\n");
9076 total_nodes = total_bytes = 0;
9077 for (i = 0; i < (int) all_kinds; i++)
9078 {
9079 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i],
9080 tree_node_counts[i], tree_node_sizes[i]);
9081 total_nodes += tree_node_counts[i];
9082 total_bytes += tree_node_sizes[i];
9083 }
9084 fprintf (stderr, "---------------------------------------\n");
9085 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes);
9086 fprintf (stderr, "---------------------------------------\n");
9087 fprintf (stderr, "Code Nodes\n");
9088 fprintf (stderr, "----------------------------\n");
9089 for (i = 0; i < (int) MAX_TREE_CODES; i++)
9090 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i),
9091 tree_code_counts[i]);
9092 fprintf (stderr, "----------------------------\n");
9093 ssanames_print_statistics ();
9094 phinodes_print_statistics ();
9095 }
9096 else
9097 fprintf (stderr, "(No per-node statistics)\n");
9098
9099 print_type_hash_statistics ();
9100 print_debug_expr_statistics ();
9101 print_value_expr_statistics ();
9102 lang_hooks.print_statistics ();
9103 }
9104 \f
9105 #define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s"
9106
9107 /* Generate a crc32 of a byte. */
9108
9109 static unsigned
9110 crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits)
9111 {
9112 unsigned ix;
9113
9114 for (ix = bits; ix--; value <<= 1)
9115 {
9116 unsigned feedback;
9117
9118 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0;
9119 chksum <<= 1;
9120 chksum ^= feedback;
9121 }
9122 return chksum;
9123 }
9124
9125 /* Generate a crc32 of a 32-bit unsigned. */
9126
9127 unsigned
9128 crc32_unsigned (unsigned chksum, unsigned value)
9129 {
9130 return crc32_unsigned_bits (chksum, value, 32);
9131 }
9132
9133 /* Generate a crc32 of a byte. */
9134
9135 unsigned
9136 crc32_byte (unsigned chksum, char byte)
9137 {
9138 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8);
9139 }
9140
9141 /* Generate a crc32 of a string. */
9142
9143 unsigned
9144 crc32_string (unsigned chksum, const char *string)
9145 {
9146 do
9147 {
9148 chksum = crc32_byte (chksum, *string);
9149 }
9150 while (*string++);
9151 return chksum;
9152 }
9153
9154 /* P is a string that will be used in a symbol. Mask out any characters
9155 that are not valid in that context. */
9156
9157 void
9158 clean_symbol_name (char *p)
9159 {
9160 for (; *p; p++)
9161 if (! (ISALNUM (*p)
9162 #ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */
9163 || *p == '$'
9164 #endif
9165 #ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */
9166 || *p == '.'
9167 #endif
9168 ))
9169 *p = '_';
9170 }
9171
9172 /* Generate a name for a special-purpose function.
9173 The generated name may need to be unique across the whole link.
9174 Changes to this function may also require corresponding changes to
9175 xstrdup_mask_random.
9176 TYPE is some string to identify the purpose of this function to the
9177 linker or collect2; it must start with an uppercase letter,
9178 one of:
9179 I - for constructors
9180 D - for destructors
9181 N - for C++ anonymous namespaces
9182 F - for DWARF unwind frame information. */
9183
9184 tree
9185 get_file_function_name (const char *type)
9186 {
9187 char *buf;
9188 const char *p;
9189 char *q;
9190
9191 /* If we already have a name we know to be unique, just use that. */
9192 if (first_global_object_name)
9193 p = q = ASTRDUP (first_global_object_name);
9194 /* If the target is handling the constructors/destructors, they
9195 will be local to this file and the name is only necessary for
9196 debugging purposes.
9197 We also assign sub_I and sub_D sufixes to constructors called from
9198 the global static constructors. These are always local. */
9199 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors)
9200 || (strncmp (type, "sub_", 4) == 0
9201 && (type[4] == 'I' || type[4] == 'D')))
9202 {
9203 const char *file = main_input_filename;
9204 if (! file)
9205 file = LOCATION_FILE (input_location);
9206 /* Just use the file's basename, because the full pathname
9207 might be quite long. */
9208 p = q = ASTRDUP (lbasename (file));
9209 }
9210 else
9211 {
9212 /* Otherwise, the name must be unique across the entire link.
9213 We don't have anything that we know to be unique to this translation
9214 unit, so use what we do have and throw in some randomness. */
9215 unsigned len;
9216 const char *name = weak_global_object_name;
9217 const char *file = main_input_filename;
9218
9219 if (! name)
9220 name = "";
9221 if (! file)
9222 file = LOCATION_FILE (input_location);
9223
9224 len = strlen (file);
9225 q = (char *) alloca (9 + 17 + len + 1);
9226 memcpy (q, file, len + 1);
9227
9228 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX,
9229 crc32_string (0, name), get_random_seed (false));
9230
9231 p = q;
9232 }
9233
9234 clean_symbol_name (q);
9235 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p)
9236 + strlen (type));
9237
9238 /* Set up the name of the file-level functions we may need.
9239 Use a global object (which is already required to be unique over
9240 the program) rather than the file name (which imposes extra
9241 constraints). */
9242 sprintf (buf, FILE_FUNCTION_FORMAT, type, p);
9243
9244 return get_identifier (buf);
9245 }
9246 \f
9247 #if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007)
9248
9249 /* Complain that the tree code of NODE does not match the expected 0
9250 terminated list of trailing codes. The trailing code list can be
9251 empty, for a more vague error message. FILE, LINE, and FUNCTION
9252 are of the caller. */
9253
9254 void
9255 tree_check_failed (const_tree node, const char *file,
9256 int line, const char *function, ...)
9257 {
9258 va_list args;
9259 const char *buffer;
9260 unsigned length = 0;
9261 enum tree_code code;
9262
9263 va_start (args, function);
9264 while ((code = (enum tree_code) va_arg (args, int)))
9265 length += 4 + strlen (get_tree_code_name (code));
9266 va_end (args);
9267 if (length)
9268 {
9269 char *tmp;
9270 va_start (args, function);
9271 length += strlen ("expected ");
9272 buffer = tmp = (char *) alloca (length);
9273 length = 0;
9274 while ((code = (enum tree_code) va_arg (args, int)))
9275 {
9276 const char *prefix = length ? " or " : "expected ";
9277
9278 strcpy (tmp + length, prefix);
9279 length += strlen (prefix);
9280 strcpy (tmp + length, get_tree_code_name (code));
9281 length += strlen (get_tree_code_name (code));
9282 }
9283 va_end (args);
9284 }
9285 else
9286 buffer = "unexpected node";
9287
9288 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9289 buffer, get_tree_code_name (TREE_CODE (node)),
9290 function, trim_filename (file), line);
9291 }
9292
9293 /* Complain that the tree code of NODE does match the expected 0
9294 terminated list of trailing codes. FILE, LINE, and FUNCTION are of
9295 the caller. */
9296
9297 void
9298 tree_not_check_failed (const_tree node, const char *file,
9299 int line, const char *function, ...)
9300 {
9301 va_list args;
9302 char *buffer;
9303 unsigned length = 0;
9304 enum tree_code code;
9305
9306 va_start (args, function);
9307 while ((code = (enum tree_code) va_arg (args, int)))
9308 length += 4 + strlen (get_tree_code_name (code));
9309 va_end (args);
9310 va_start (args, function);
9311 buffer = (char *) alloca (length);
9312 length = 0;
9313 while ((code = (enum tree_code) va_arg (args, int)))
9314 {
9315 if (length)
9316 {
9317 strcpy (buffer + length, " or ");
9318 length += 4;
9319 }
9320 strcpy (buffer + length, get_tree_code_name (code));
9321 length += strlen (get_tree_code_name (code));
9322 }
9323 va_end (args);
9324
9325 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d",
9326 buffer, get_tree_code_name (TREE_CODE (node)),
9327 function, trim_filename (file), line);
9328 }
9329
9330 /* Similar to tree_check_failed, except that we check for a class of tree
9331 code, given in CL. */
9332
9333 void
9334 tree_class_check_failed (const_tree node, const enum tree_code_class cl,
9335 const char *file, int line, const char *function)
9336 {
9337 internal_error
9338 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d",
9339 TREE_CODE_CLASS_STRING (cl),
9340 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9341 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9342 }
9343
9344 /* Similar to tree_check_failed, except that instead of specifying a
9345 dozen codes, use the knowledge that they're all sequential. */
9346
9347 void
9348 tree_range_check_failed (const_tree node, const char *file, int line,
9349 const char *function, enum tree_code c1,
9350 enum tree_code c2)
9351 {
9352 char *buffer;
9353 unsigned length = 0;
9354 unsigned int c;
9355
9356 for (c = c1; c <= c2; ++c)
9357 length += 4 + strlen (get_tree_code_name ((enum tree_code) c));
9358
9359 length += strlen ("expected ");
9360 buffer = (char *) alloca (length);
9361 length = 0;
9362
9363 for (c = c1; c <= c2; ++c)
9364 {
9365 const char *prefix = length ? " or " : "expected ";
9366
9367 strcpy (buffer + length, prefix);
9368 length += strlen (prefix);
9369 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c));
9370 length += strlen (get_tree_code_name ((enum tree_code) c));
9371 }
9372
9373 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9374 buffer, get_tree_code_name (TREE_CODE (node)),
9375 function, trim_filename (file), line);
9376 }
9377
9378
9379 /* Similar to tree_check_failed, except that we check that a tree does
9380 not have the specified code, given in CL. */
9381
9382 void
9383 tree_not_class_check_failed (const_tree node, const enum tree_code_class cl,
9384 const char *file, int line, const char *function)
9385 {
9386 internal_error
9387 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d",
9388 TREE_CODE_CLASS_STRING (cl),
9389 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))),
9390 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9391 }
9392
9393
9394 /* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */
9395
9396 void
9397 omp_clause_check_failed (const_tree node, const char *file, int line,
9398 const char *function, enum omp_clause_code code)
9399 {
9400 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d",
9401 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)),
9402 function, trim_filename (file), line);
9403 }
9404
9405
9406 /* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */
9407
9408 void
9409 omp_clause_range_check_failed (const_tree node, const char *file, int line,
9410 const char *function, enum omp_clause_code c1,
9411 enum omp_clause_code c2)
9412 {
9413 char *buffer;
9414 unsigned length = 0;
9415 unsigned int c;
9416
9417 for (c = c1; c <= c2; ++c)
9418 length += 4 + strlen (omp_clause_code_name[c]);
9419
9420 length += strlen ("expected ");
9421 buffer = (char *) alloca (length);
9422 length = 0;
9423
9424 for (c = c1; c <= c2; ++c)
9425 {
9426 const char *prefix = length ? " or " : "expected ";
9427
9428 strcpy (buffer + length, prefix);
9429 length += strlen (prefix);
9430 strcpy (buffer + length, omp_clause_code_name[c]);
9431 length += strlen (omp_clause_code_name[c]);
9432 }
9433
9434 internal_error ("tree check: %s, have %s in %s, at %s:%d",
9435 buffer, omp_clause_code_name[TREE_CODE (node)],
9436 function, trim_filename (file), line);
9437 }
9438
9439
9440 #undef DEFTREESTRUCT
9441 #define DEFTREESTRUCT(VAL, NAME) NAME,
9442
9443 static const char *ts_enum_names[] = {
9444 #include "treestruct.def"
9445 };
9446 #undef DEFTREESTRUCT
9447
9448 #define TS_ENUM_NAME(EN) (ts_enum_names[(EN)])
9449
9450 /* Similar to tree_class_check_failed, except that we check for
9451 whether CODE contains the tree structure identified by EN. */
9452
9453 void
9454 tree_contains_struct_check_failed (const_tree node,
9455 const enum tree_node_structure_enum en,
9456 const char *file, int line,
9457 const char *function)
9458 {
9459 internal_error
9460 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d",
9461 TS_ENUM_NAME (en),
9462 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line);
9463 }
9464
9465
9466 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9467 (dynamically sized) vector. */
9468
9469 void
9470 tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line,
9471 const char *function)
9472 {
9473 internal_error
9474 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d",
9475 idx + 1, len, function, trim_filename (file), line);
9476 }
9477
9478 /* Similar to above, except that the check is for the bounds of a TREE_VEC's
9479 (dynamically sized) vector. */
9480
9481 void
9482 tree_vec_elt_check_failed (int idx, int len, const char *file, int line,
9483 const char *function)
9484 {
9485 internal_error
9486 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d",
9487 idx + 1, len, function, trim_filename (file), line);
9488 }
9489
9490 /* Similar to above, except that the check is for the bounds of the operand
9491 vector of an expression node EXP. */
9492
9493 void
9494 tree_operand_check_failed (int idx, const_tree exp, const char *file,
9495 int line, const char *function)
9496 {
9497 enum tree_code code = TREE_CODE (exp);
9498 internal_error
9499 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d",
9500 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp),
9501 function, trim_filename (file), line);
9502 }
9503
9504 /* Similar to above, except that the check is for the number of
9505 operands of an OMP_CLAUSE node. */
9506
9507 void
9508 omp_clause_operand_check_failed (int idx, const_tree t, const char *file,
9509 int line, const char *function)
9510 {
9511 internal_error
9512 ("tree check: accessed operand %d of omp_clause %s with %d operands "
9513 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)],
9514 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function,
9515 trim_filename (file), line);
9516 }
9517 #endif /* ENABLE_TREE_CHECKING */
9518 \f
9519 /* Create a new vector type node holding SUBPARTS units of type INNERTYPE,
9520 and mapped to the machine mode MODE. Initialize its fields and build
9521 the information necessary for debugging output. */
9522
9523 static tree
9524 make_vector_type (tree innertype, int nunits, machine_mode mode)
9525 {
9526 tree t;
9527 inchash::hash hstate;
9528
9529 t = make_node (VECTOR_TYPE);
9530 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype);
9531 SET_TYPE_VECTOR_SUBPARTS (t, nunits);
9532 SET_TYPE_MODE (t, mode);
9533
9534 if (TYPE_STRUCTURAL_EQUALITY_P (innertype))
9535 SET_TYPE_STRUCTURAL_EQUALITY (t);
9536 else if (TYPE_CANONICAL (innertype) != innertype
9537 || mode != VOIDmode)
9538 TYPE_CANONICAL (t)
9539 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode);
9540
9541 layout_type (t);
9542
9543 hstate.add_wide_int (VECTOR_TYPE);
9544 hstate.add_wide_int (nunits);
9545 hstate.add_wide_int (mode);
9546 hstate.add_object (TYPE_HASH (TREE_TYPE (t)));
9547 t = type_hash_canon (hstate.end (), t);
9548
9549 /* We have built a main variant, based on the main variant of the
9550 inner type. Use it to build the variant we return. */
9551 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype))
9552 && TREE_TYPE (t) != innertype)
9553 return build_type_attribute_qual_variant (t,
9554 TYPE_ATTRIBUTES (innertype),
9555 TYPE_QUALS (innertype));
9556
9557 return t;
9558 }
9559
9560 static tree
9561 make_or_reuse_type (unsigned size, int unsignedp)
9562 {
9563 int i;
9564
9565 if (size == INT_TYPE_SIZE)
9566 return unsignedp ? unsigned_type_node : integer_type_node;
9567 if (size == CHAR_TYPE_SIZE)
9568 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
9569 if (size == SHORT_TYPE_SIZE)
9570 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
9571 if (size == LONG_TYPE_SIZE)
9572 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
9573 if (size == LONG_LONG_TYPE_SIZE)
9574 return (unsignedp ? long_long_unsigned_type_node
9575 : long_long_integer_type_node);
9576
9577 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9578 if (size == int_n_data[i].bitsize
9579 && int_n_enabled_p[i])
9580 return (unsignedp ? int_n_trees[i].unsigned_type
9581 : int_n_trees[i].signed_type);
9582
9583 if (unsignedp)
9584 return make_unsigned_type (size);
9585 else
9586 return make_signed_type (size);
9587 }
9588
9589 /* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */
9590
9591 static tree
9592 make_or_reuse_fract_type (unsigned size, int unsignedp, int satp)
9593 {
9594 if (satp)
9595 {
9596 if (size == SHORT_FRACT_TYPE_SIZE)
9597 return unsignedp ? sat_unsigned_short_fract_type_node
9598 : sat_short_fract_type_node;
9599 if (size == FRACT_TYPE_SIZE)
9600 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node;
9601 if (size == LONG_FRACT_TYPE_SIZE)
9602 return unsignedp ? sat_unsigned_long_fract_type_node
9603 : sat_long_fract_type_node;
9604 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9605 return unsignedp ? sat_unsigned_long_long_fract_type_node
9606 : sat_long_long_fract_type_node;
9607 }
9608 else
9609 {
9610 if (size == SHORT_FRACT_TYPE_SIZE)
9611 return unsignedp ? unsigned_short_fract_type_node
9612 : short_fract_type_node;
9613 if (size == FRACT_TYPE_SIZE)
9614 return unsignedp ? unsigned_fract_type_node : fract_type_node;
9615 if (size == LONG_FRACT_TYPE_SIZE)
9616 return unsignedp ? unsigned_long_fract_type_node
9617 : long_fract_type_node;
9618 if (size == LONG_LONG_FRACT_TYPE_SIZE)
9619 return unsignedp ? unsigned_long_long_fract_type_node
9620 : long_long_fract_type_node;
9621 }
9622
9623 return make_fract_type (size, unsignedp, satp);
9624 }
9625
9626 /* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */
9627
9628 static tree
9629 make_or_reuse_accum_type (unsigned size, int unsignedp, int satp)
9630 {
9631 if (satp)
9632 {
9633 if (size == SHORT_ACCUM_TYPE_SIZE)
9634 return unsignedp ? sat_unsigned_short_accum_type_node
9635 : sat_short_accum_type_node;
9636 if (size == ACCUM_TYPE_SIZE)
9637 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node;
9638 if (size == LONG_ACCUM_TYPE_SIZE)
9639 return unsignedp ? sat_unsigned_long_accum_type_node
9640 : sat_long_accum_type_node;
9641 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9642 return unsignedp ? sat_unsigned_long_long_accum_type_node
9643 : sat_long_long_accum_type_node;
9644 }
9645 else
9646 {
9647 if (size == SHORT_ACCUM_TYPE_SIZE)
9648 return unsignedp ? unsigned_short_accum_type_node
9649 : short_accum_type_node;
9650 if (size == ACCUM_TYPE_SIZE)
9651 return unsignedp ? unsigned_accum_type_node : accum_type_node;
9652 if (size == LONG_ACCUM_TYPE_SIZE)
9653 return unsignedp ? unsigned_long_accum_type_node
9654 : long_accum_type_node;
9655 if (size == LONG_LONG_ACCUM_TYPE_SIZE)
9656 return unsignedp ? unsigned_long_long_accum_type_node
9657 : long_long_accum_type_node;
9658 }
9659
9660 return make_accum_type (size, unsignedp, satp);
9661 }
9662
9663
9664 /* Create an atomic variant node for TYPE. This routine is called
9665 during initialization of data types to create the 5 basic atomic
9666 types. The generic build_variant_type function requires these to
9667 already be set up in order to function properly, so cannot be
9668 called from there. If ALIGN is non-zero, then ensure alignment is
9669 overridden to this value. */
9670
9671 static tree
9672 build_atomic_base (tree type, unsigned int align)
9673 {
9674 tree t;
9675
9676 /* Make sure its not already registered. */
9677 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC)))
9678 return t;
9679
9680 t = build_variant_type_copy (type);
9681 set_type_quals (t, TYPE_QUAL_ATOMIC);
9682
9683 if (align)
9684 TYPE_ALIGN (t) = align;
9685
9686 return t;
9687 }
9688
9689 /* Create nodes for all integer types (and error_mark_node) using the sizes
9690 of C datatypes. SIGNED_CHAR specifies whether char is signed,
9691 SHORT_DOUBLE specifies whether double should be of the same precision
9692 as float. */
9693
9694 void
9695 build_common_tree_nodes (bool signed_char, bool short_double)
9696 {
9697 int i;
9698
9699 error_mark_node = make_node (ERROR_MARK);
9700 TREE_TYPE (error_mark_node) = error_mark_node;
9701
9702 initialize_sizetypes ();
9703
9704 /* Define both `signed char' and `unsigned char'. */
9705 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE);
9706 TYPE_STRING_FLAG (signed_char_type_node) = 1;
9707 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE);
9708 TYPE_STRING_FLAG (unsigned_char_type_node) = 1;
9709
9710 /* Define `char', which is like either `signed char' or `unsigned char'
9711 but not the same as either. */
9712 char_type_node
9713 = (signed_char
9714 ? make_signed_type (CHAR_TYPE_SIZE)
9715 : make_unsigned_type (CHAR_TYPE_SIZE));
9716 TYPE_STRING_FLAG (char_type_node) = 1;
9717
9718 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE);
9719 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE);
9720 integer_type_node = make_signed_type (INT_TYPE_SIZE);
9721 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE);
9722 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE);
9723 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE);
9724 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE);
9725 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE);
9726
9727 for (i = 0; i < NUM_INT_N_ENTS; i ++)
9728 {
9729 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize);
9730 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize);
9731 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize);
9732 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize);
9733
9734 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE
9735 && int_n_enabled_p[i])
9736 {
9737 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type;
9738 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type;
9739 }
9740 }
9741
9742 /* Define a boolean type. This type only represents boolean values but
9743 may be larger than char depending on the value of BOOL_TYPE_SIZE. */
9744 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE);
9745 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE);
9746 TYPE_PRECISION (boolean_type_node) = 1;
9747 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1);
9748
9749 /* Define what type to use for size_t. */
9750 if (strcmp (SIZE_TYPE, "unsigned int") == 0)
9751 size_type_node = unsigned_type_node;
9752 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0)
9753 size_type_node = long_unsigned_type_node;
9754 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0)
9755 size_type_node = long_long_unsigned_type_node;
9756 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0)
9757 size_type_node = short_unsigned_type_node;
9758 else
9759 {
9760 int i;
9761
9762 size_type_node = NULL_TREE;
9763 for (i = 0; i < NUM_INT_N_ENTS; i++)
9764 if (int_n_enabled_p[i])
9765 {
9766 char name[50];
9767 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize);
9768
9769 if (strcmp (name, SIZE_TYPE) == 0)
9770 {
9771 size_type_node = int_n_trees[i].unsigned_type;
9772 }
9773 }
9774 if (size_type_node == NULL_TREE)
9775 gcc_unreachable ();
9776 }
9777
9778 /* Fill in the rest of the sized types. Reuse existing type nodes
9779 when possible. */
9780 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0);
9781 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0);
9782 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0);
9783 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0);
9784 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0);
9785
9786 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1);
9787 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1);
9788 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1);
9789 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1);
9790 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1);
9791
9792 /* Don't call build_qualified type for atomics. That routine does
9793 special processing for atomics, and until they are initialized
9794 it's better not to make that call.
9795
9796 Check to see if there is a target override for atomic types. */
9797
9798 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node,
9799 targetm.atomic_align_for_mode (QImode));
9800 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node,
9801 targetm.atomic_align_for_mode (HImode));
9802 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node,
9803 targetm.atomic_align_for_mode (SImode));
9804 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node,
9805 targetm.atomic_align_for_mode (DImode));
9806 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node,
9807 targetm.atomic_align_for_mode (TImode));
9808
9809 access_public_node = get_identifier ("public");
9810 access_protected_node = get_identifier ("protected");
9811 access_private_node = get_identifier ("private");
9812
9813 /* Define these next since types below may used them. */
9814 integer_zero_node = build_int_cst (integer_type_node, 0);
9815 integer_one_node = build_int_cst (integer_type_node, 1);
9816 integer_three_node = build_int_cst (integer_type_node, 3);
9817 integer_minus_one_node = build_int_cst (integer_type_node, -1);
9818
9819 size_zero_node = size_int (0);
9820 size_one_node = size_int (1);
9821 bitsize_zero_node = bitsize_int (0);
9822 bitsize_one_node = bitsize_int (1);
9823 bitsize_unit_node = bitsize_int (BITS_PER_UNIT);
9824
9825 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node);
9826 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node);
9827
9828 void_type_node = make_node (VOID_TYPE);
9829 layout_type (void_type_node);
9830
9831 pointer_bounds_type_node = targetm.chkp_bound_type ();
9832
9833 /* We are not going to have real types in C with less than byte alignment,
9834 so we might as well not have any types that claim to have it. */
9835 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT;
9836 TYPE_USER_ALIGN (void_type_node) = 0;
9837
9838 void_node = make_node (VOID_CST);
9839 TREE_TYPE (void_node) = void_type_node;
9840
9841 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0);
9842 layout_type (TREE_TYPE (null_pointer_node));
9843
9844 ptr_type_node = build_pointer_type (void_type_node);
9845 const_ptr_type_node
9846 = build_pointer_type (build_type_variant (void_type_node, 1, 0));
9847 fileptr_type_node = ptr_type_node;
9848
9849 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1);
9850
9851 float_type_node = make_node (REAL_TYPE);
9852 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE;
9853 layout_type (float_type_node);
9854
9855 double_type_node = make_node (REAL_TYPE);
9856 if (short_double)
9857 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE;
9858 else
9859 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE;
9860 layout_type (double_type_node);
9861
9862 long_double_type_node = make_node (REAL_TYPE);
9863 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE;
9864 layout_type (long_double_type_node);
9865
9866 float_ptr_type_node = build_pointer_type (float_type_node);
9867 double_ptr_type_node = build_pointer_type (double_type_node);
9868 long_double_ptr_type_node = build_pointer_type (long_double_type_node);
9869 integer_ptr_type_node = build_pointer_type (integer_type_node);
9870
9871 /* Fixed size integer types. */
9872 uint16_type_node = make_or_reuse_type (16, 1);
9873 uint32_type_node = make_or_reuse_type (32, 1);
9874 uint64_type_node = make_or_reuse_type (64, 1);
9875
9876 /* Decimal float types. */
9877 dfloat32_type_node = make_node (REAL_TYPE);
9878 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE;
9879 layout_type (dfloat32_type_node);
9880 SET_TYPE_MODE (dfloat32_type_node, SDmode);
9881 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node);
9882
9883 dfloat64_type_node = make_node (REAL_TYPE);
9884 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE;
9885 layout_type (dfloat64_type_node);
9886 SET_TYPE_MODE (dfloat64_type_node, DDmode);
9887 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node);
9888
9889 dfloat128_type_node = make_node (REAL_TYPE);
9890 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE;
9891 layout_type (dfloat128_type_node);
9892 SET_TYPE_MODE (dfloat128_type_node, TDmode);
9893 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node);
9894
9895 complex_integer_type_node = build_complex_type (integer_type_node);
9896 complex_float_type_node = build_complex_type (float_type_node);
9897 complex_double_type_node = build_complex_type (double_type_node);
9898 complex_long_double_type_node = build_complex_type (long_double_type_node);
9899
9900 /* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */
9901 #define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \
9902 sat_ ## KIND ## _type_node = \
9903 make_sat_signed_ ## KIND ## _type (SIZE); \
9904 sat_unsigned_ ## KIND ## _type_node = \
9905 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9906 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9907 unsigned_ ## KIND ## _type_node = \
9908 make_unsigned_ ## KIND ## _type (SIZE);
9909
9910 #define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \
9911 sat_ ## WIDTH ## KIND ## _type_node = \
9912 make_sat_signed_ ## KIND ## _type (SIZE); \
9913 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \
9914 make_sat_unsigned_ ## KIND ## _type (SIZE); \
9915 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \
9916 unsigned_ ## WIDTH ## KIND ## _type_node = \
9917 make_unsigned_ ## KIND ## _type (SIZE);
9918
9919 /* Make fixed-point type nodes based on four different widths. */
9920 #define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \
9921 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \
9922 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \
9923 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \
9924 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE)
9925
9926 /* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */
9927 #define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \
9928 NAME ## _type_node = \
9929 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \
9930 u ## NAME ## _type_node = \
9931 make_or_reuse_unsigned_ ## KIND ## _type \
9932 (GET_MODE_BITSIZE (U ## MODE ## mode)); \
9933 sat_ ## NAME ## _type_node = \
9934 make_or_reuse_sat_signed_ ## KIND ## _type \
9935 (GET_MODE_BITSIZE (MODE ## mode)); \
9936 sat_u ## NAME ## _type_node = \
9937 make_or_reuse_sat_unsigned_ ## KIND ## _type \
9938 (GET_MODE_BITSIZE (U ## MODE ## mode));
9939
9940 /* Fixed-point type and mode nodes. */
9941 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT)
9942 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM)
9943 MAKE_FIXED_MODE_NODE (fract, qq, QQ)
9944 MAKE_FIXED_MODE_NODE (fract, hq, HQ)
9945 MAKE_FIXED_MODE_NODE (fract, sq, SQ)
9946 MAKE_FIXED_MODE_NODE (fract, dq, DQ)
9947 MAKE_FIXED_MODE_NODE (fract, tq, TQ)
9948 MAKE_FIXED_MODE_NODE (accum, ha, HA)
9949 MAKE_FIXED_MODE_NODE (accum, sa, SA)
9950 MAKE_FIXED_MODE_NODE (accum, da, DA)
9951 MAKE_FIXED_MODE_NODE (accum, ta, TA)
9952
9953 {
9954 tree t = targetm.build_builtin_va_list ();
9955
9956 /* Many back-ends define record types without setting TYPE_NAME.
9957 If we copied the record type here, we'd keep the original
9958 record type without a name. This breaks name mangling. So,
9959 don't copy record types and let c_common_nodes_and_builtins()
9960 declare the type to be __builtin_va_list. */
9961 if (TREE_CODE (t) != RECORD_TYPE)
9962 t = build_variant_type_copy (t);
9963
9964 va_list_type_node = t;
9965 }
9966 }
9967
9968 /* Modify DECL for given flags.
9969 TM_PURE attribute is set only on types, so the function will modify
9970 DECL's type when ECF_TM_PURE is used. */
9971
9972 void
9973 set_call_expr_flags (tree decl, int flags)
9974 {
9975 if (flags & ECF_NOTHROW)
9976 TREE_NOTHROW (decl) = 1;
9977 if (flags & ECF_CONST)
9978 TREE_READONLY (decl) = 1;
9979 if (flags & ECF_PURE)
9980 DECL_PURE_P (decl) = 1;
9981 if (flags & ECF_LOOPING_CONST_OR_PURE)
9982 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1;
9983 if (flags & ECF_NOVOPS)
9984 DECL_IS_NOVOPS (decl) = 1;
9985 if (flags & ECF_NORETURN)
9986 TREE_THIS_VOLATILE (decl) = 1;
9987 if (flags & ECF_MALLOC)
9988 DECL_IS_MALLOC (decl) = 1;
9989 if (flags & ECF_RETURNS_TWICE)
9990 DECL_IS_RETURNS_TWICE (decl) = 1;
9991 if (flags & ECF_LEAF)
9992 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"),
9993 NULL, DECL_ATTRIBUTES (decl));
9994 if ((flags & ECF_TM_PURE) && flag_tm)
9995 apply_tm_attr (decl, get_identifier ("transaction_pure"));
9996 /* Looping const or pure is implied by noreturn.
9997 There is currently no way to declare looping const or looping pure alone. */
9998 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE)
9999 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE))));
10000 }
10001
10002
10003 /* A subroutine of build_common_builtin_nodes. Define a builtin function. */
10004
10005 static void
10006 local_define_builtin (const char *name, tree type, enum built_in_function code,
10007 const char *library_name, int ecf_flags)
10008 {
10009 tree decl;
10010
10011 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL,
10012 library_name, NULL_TREE);
10013 set_call_expr_flags (decl, ecf_flags);
10014
10015 set_builtin_decl (code, decl, true);
10016 }
10017
10018 /* Call this function after instantiating all builtins that the language
10019 front end cares about. This will build the rest of the builtins
10020 and internal functions that are relied upon by the tree optimizers and
10021 the middle-end. */
10022
10023 void
10024 build_common_builtin_nodes (void)
10025 {
10026 tree tmp, ftype;
10027 int ecf_flags;
10028
10029 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE))
10030 {
10031 ftype = build_function_type (void_type_node, void_list_node);
10032 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE,
10033 "__builtin_unreachable",
10034 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN
10035 | ECF_CONST);
10036 }
10037
10038 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)
10039 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10040 {
10041 ftype = build_function_type_list (ptr_type_node,
10042 ptr_type_node, const_ptr_type_node,
10043 size_type_node, NULL_TREE);
10044
10045 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY))
10046 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
10047 "memcpy", ECF_NOTHROW | ECF_LEAF);
10048 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE))
10049 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE,
10050 "memmove", ECF_NOTHROW | ECF_LEAF);
10051 }
10052
10053 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP))
10054 {
10055 ftype = build_function_type_list (integer_type_node, const_ptr_type_node,
10056 const_ptr_type_node, size_type_node,
10057 NULL_TREE);
10058 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
10059 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10060 }
10061
10062 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET))
10063 {
10064 ftype = build_function_type_list (ptr_type_node,
10065 ptr_type_node, integer_type_node,
10066 size_type_node, NULL_TREE);
10067 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
10068 "memset", ECF_NOTHROW | ECF_LEAF);
10069 }
10070
10071 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA))
10072 {
10073 ftype = build_function_type_list (ptr_type_node,
10074 size_type_node, NULL_TREE);
10075 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
10076 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10077 }
10078
10079 ftype = build_function_type_list (ptr_type_node, size_type_node,
10080 size_type_node, NULL_TREE);
10081 local_define_builtin ("__builtin_alloca_with_align", ftype,
10082 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca",
10083 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF);
10084
10085 /* If we're checking the stack, `alloca' can throw. */
10086 if (flag_stack_check)
10087 {
10088 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0;
10089 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0;
10090 }
10091
10092 ftype = build_function_type_list (void_type_node,
10093 ptr_type_node, ptr_type_node,
10094 ptr_type_node, NULL_TREE);
10095 local_define_builtin ("__builtin_init_trampoline", ftype,
10096 BUILT_IN_INIT_TRAMPOLINE,
10097 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF);
10098 local_define_builtin ("__builtin_init_heap_trampoline", ftype,
10099 BUILT_IN_INIT_HEAP_TRAMPOLINE,
10100 "__builtin_init_heap_trampoline",
10101 ECF_NOTHROW | ECF_LEAF);
10102
10103 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
10104 local_define_builtin ("__builtin_adjust_trampoline", ftype,
10105 BUILT_IN_ADJUST_TRAMPOLINE,
10106 "__builtin_adjust_trampoline",
10107 ECF_CONST | ECF_NOTHROW);
10108
10109 ftype = build_function_type_list (void_type_node,
10110 ptr_type_node, ptr_type_node, NULL_TREE);
10111 local_define_builtin ("__builtin_nonlocal_goto", ftype,
10112 BUILT_IN_NONLOCAL_GOTO,
10113 "__builtin_nonlocal_goto",
10114 ECF_NORETURN | ECF_NOTHROW);
10115
10116 ftype = build_function_type_list (void_type_node,
10117 ptr_type_node, ptr_type_node, NULL_TREE);
10118 local_define_builtin ("__builtin_setjmp_setup", ftype,
10119 BUILT_IN_SETJMP_SETUP,
10120 "__builtin_setjmp_setup", ECF_NOTHROW);
10121
10122 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10123 local_define_builtin ("__builtin_setjmp_receiver", ftype,
10124 BUILT_IN_SETJMP_RECEIVER,
10125 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF);
10126
10127 ftype = build_function_type_list (ptr_type_node, NULL_TREE);
10128 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
10129 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF);
10130
10131 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10132 local_define_builtin ("__builtin_stack_restore", ftype,
10133 BUILT_IN_STACK_RESTORE,
10134 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF);
10135
10136 /* If there's a possibility that we might use the ARM EABI, build the
10137 alternate __cxa_end_cleanup node used to resume from C++ and Java. */
10138 if (targetm.arm_eabi_unwinder)
10139 {
10140 ftype = build_function_type_list (void_type_node, NULL_TREE);
10141 local_define_builtin ("__builtin_cxa_end_cleanup", ftype,
10142 BUILT_IN_CXA_END_CLEANUP,
10143 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF);
10144 }
10145
10146 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
10147 local_define_builtin ("__builtin_unwind_resume", ftype,
10148 BUILT_IN_UNWIND_RESUME,
10149 ((targetm_common.except_unwind_info (&global_options)
10150 == UI_SJLJ)
10151 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"),
10152 ECF_NORETURN);
10153
10154 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE)
10155 {
10156 ftype = build_function_type_list (ptr_type_node, integer_type_node,
10157 NULL_TREE);
10158 local_define_builtin ("__builtin_return_address", ftype,
10159 BUILT_IN_RETURN_ADDRESS,
10160 "__builtin_return_address",
10161 ECF_NOTHROW);
10162 }
10163
10164 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)
10165 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10166 {
10167 ftype = build_function_type_list (void_type_node, ptr_type_node,
10168 ptr_type_node, NULL_TREE);
10169 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER))
10170 local_define_builtin ("__cyg_profile_func_enter", ftype,
10171 BUILT_IN_PROFILE_FUNC_ENTER,
10172 "__cyg_profile_func_enter", 0);
10173 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT))
10174 local_define_builtin ("__cyg_profile_func_exit", ftype,
10175 BUILT_IN_PROFILE_FUNC_EXIT,
10176 "__cyg_profile_func_exit", 0);
10177 }
10178
10179 /* The exception object and filter values from the runtime. The argument
10180 must be zero before exception lowering, i.e. from the front end. After
10181 exception lowering, it will be the region number for the exception
10182 landing pad. These functions are PURE instead of CONST to prevent
10183 them from being hoisted past the exception edge that will initialize
10184 its value in the landing pad. */
10185 ftype = build_function_type_list (ptr_type_node,
10186 integer_type_node, NULL_TREE);
10187 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF;
10188 /* Only use TM_PURE if we we have TM language support. */
10189 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1))
10190 ecf_flags |= ECF_TM_PURE;
10191 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER,
10192 "__builtin_eh_pointer", ecf_flags);
10193
10194 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0);
10195 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE);
10196 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER,
10197 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF);
10198
10199 ftype = build_function_type_list (void_type_node,
10200 integer_type_node, integer_type_node,
10201 NULL_TREE);
10202 local_define_builtin ("__builtin_eh_copy_values", ftype,
10203 BUILT_IN_EH_COPY_VALUES,
10204 "__builtin_eh_copy_values", ECF_NOTHROW);
10205
10206 /* Complex multiplication and division. These are handled as builtins
10207 rather than optabs because emit_library_call_value doesn't support
10208 complex. Further, we can do slightly better with folding these
10209 beasties if the real and complex parts of the arguments are separate. */
10210 {
10211 int mode;
10212
10213 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode)
10214 {
10215 char mode_name_buf[4], *q;
10216 const char *p;
10217 enum built_in_function mcode, dcode;
10218 tree type, inner_type;
10219 const char *prefix = "__";
10220
10221 if (targetm.libfunc_gnu_prefix)
10222 prefix = "__gnu_";
10223
10224 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0);
10225 if (type == NULL)
10226 continue;
10227 inner_type = TREE_TYPE (type);
10228
10229 ftype = build_function_type_list (type, inner_type, inner_type,
10230 inner_type, inner_type, NULL_TREE);
10231
10232 mcode = ((enum built_in_function)
10233 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10234 dcode = ((enum built_in_function)
10235 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
10236
10237 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++)
10238 *q = TOLOWER (*p);
10239 *q = '\0';
10240
10241 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3",
10242 NULL);
10243 local_define_builtin (built_in_names[mcode], ftype, mcode,
10244 built_in_names[mcode],
10245 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10246
10247 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3",
10248 NULL);
10249 local_define_builtin (built_in_names[dcode], ftype, dcode,
10250 built_in_names[dcode],
10251 ECF_CONST | ECF_NOTHROW | ECF_LEAF);
10252 }
10253 }
10254
10255 init_internal_fns ();
10256 }
10257
10258 /* HACK. GROSS. This is absolutely disgusting. I wish there was a
10259 better way.
10260
10261 If we requested a pointer to a vector, build up the pointers that
10262 we stripped off while looking for the inner type. Similarly for
10263 return values from functions.
10264
10265 The argument TYPE is the top of the chain, and BOTTOM is the
10266 new type which we will point to. */
10267
10268 tree
10269 reconstruct_complex_type (tree type, tree bottom)
10270 {
10271 tree inner, outer;
10272
10273 if (TREE_CODE (type) == POINTER_TYPE)
10274 {
10275 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10276 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type),
10277 TYPE_REF_CAN_ALIAS_ALL (type));
10278 }
10279 else if (TREE_CODE (type) == REFERENCE_TYPE)
10280 {
10281 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10282 outer = build_reference_type_for_mode (inner, TYPE_MODE (type),
10283 TYPE_REF_CAN_ALIAS_ALL (type));
10284 }
10285 else if (TREE_CODE (type) == ARRAY_TYPE)
10286 {
10287 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10288 outer = build_array_type (inner, TYPE_DOMAIN (type));
10289 }
10290 else if (TREE_CODE (type) == FUNCTION_TYPE)
10291 {
10292 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10293 outer = build_function_type (inner, TYPE_ARG_TYPES (type));
10294 }
10295 else if (TREE_CODE (type) == METHOD_TYPE)
10296 {
10297 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10298 /* The build_method_type_directly() routine prepends 'this' to argument list,
10299 so we must compensate by getting rid of it. */
10300 outer
10301 = build_method_type_directly
10302 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))),
10303 inner,
10304 TREE_CHAIN (TYPE_ARG_TYPES (type)));
10305 }
10306 else if (TREE_CODE (type) == OFFSET_TYPE)
10307 {
10308 inner = reconstruct_complex_type (TREE_TYPE (type), bottom);
10309 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner);
10310 }
10311 else
10312 return bottom;
10313
10314 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type),
10315 TYPE_QUALS (type));
10316 }
10317
10318 /* Returns a vector tree node given a mode (integer, vector, or BLKmode) and
10319 the inner type. */
10320 tree
10321 build_vector_type_for_mode (tree innertype, machine_mode mode)
10322 {
10323 int nunits;
10324
10325 switch (GET_MODE_CLASS (mode))
10326 {
10327 case MODE_VECTOR_INT:
10328 case MODE_VECTOR_FLOAT:
10329 case MODE_VECTOR_FRACT:
10330 case MODE_VECTOR_UFRACT:
10331 case MODE_VECTOR_ACCUM:
10332 case MODE_VECTOR_UACCUM:
10333 nunits = GET_MODE_NUNITS (mode);
10334 break;
10335
10336 case MODE_INT:
10337 /* Check that there are no leftover bits. */
10338 gcc_assert (GET_MODE_BITSIZE (mode)
10339 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0);
10340
10341 nunits = GET_MODE_BITSIZE (mode)
10342 / TREE_INT_CST_LOW (TYPE_SIZE (innertype));
10343 break;
10344
10345 default:
10346 gcc_unreachable ();
10347 }
10348
10349 return make_vector_type (innertype, nunits, mode);
10350 }
10351
10352 /* Similarly, but takes the inner type and number of units, which must be
10353 a power of two. */
10354
10355 tree
10356 build_vector_type (tree innertype, int nunits)
10357 {
10358 return make_vector_type (innertype, nunits, VOIDmode);
10359 }
10360
10361 /* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */
10362
10363 tree
10364 build_opaque_vector_type (tree innertype, int nunits)
10365 {
10366 tree t = make_vector_type (innertype, nunits, VOIDmode);
10367 tree cand;
10368 /* We always build the non-opaque variant before the opaque one,
10369 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */
10370 cand = TYPE_NEXT_VARIANT (t);
10371 if (cand
10372 && TYPE_VECTOR_OPAQUE (cand)
10373 && check_qualified_type (cand, t, TYPE_QUALS (t)))
10374 return cand;
10375 /* Othewise build a variant type and make sure to queue it after
10376 the non-opaque type. */
10377 cand = build_distinct_type_copy (t);
10378 TYPE_VECTOR_OPAQUE (cand) = true;
10379 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t);
10380 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t);
10381 TYPE_NEXT_VARIANT (t) = cand;
10382 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t);
10383 return cand;
10384 }
10385
10386
10387 /* Given an initializer INIT, return TRUE if INIT is zero or some
10388 aggregate of zeros. Otherwise return FALSE. */
10389 bool
10390 initializer_zerop (const_tree init)
10391 {
10392 tree elt;
10393
10394 STRIP_NOPS (init);
10395
10396 switch (TREE_CODE (init))
10397 {
10398 case INTEGER_CST:
10399 return integer_zerop (init);
10400
10401 case REAL_CST:
10402 /* ??? Note that this is not correct for C4X float formats. There,
10403 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most
10404 negative exponent. */
10405 return real_zerop (init)
10406 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init));
10407
10408 case FIXED_CST:
10409 return fixed_zerop (init);
10410
10411 case COMPLEX_CST:
10412 return integer_zerop (init)
10413 || (real_zerop (init)
10414 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init)))
10415 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init))));
10416
10417 case VECTOR_CST:
10418 {
10419 unsigned i;
10420 for (i = 0; i < VECTOR_CST_NELTS (init); ++i)
10421 if (!initializer_zerop (VECTOR_CST_ELT (init, i)))
10422 return false;
10423 return true;
10424 }
10425
10426 case CONSTRUCTOR:
10427 {
10428 unsigned HOST_WIDE_INT idx;
10429
10430 if (TREE_CLOBBER_P (init))
10431 return false;
10432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt)
10433 if (!initializer_zerop (elt))
10434 return false;
10435 return true;
10436 }
10437
10438 case STRING_CST:
10439 {
10440 int i;
10441
10442 /* We need to loop through all elements to handle cases like
10443 "\0" and "\0foobar". */
10444 for (i = 0; i < TREE_STRING_LENGTH (init); ++i)
10445 if (TREE_STRING_POINTER (init)[i] != '\0')
10446 return false;
10447
10448 return true;
10449 }
10450
10451 default:
10452 return false;
10453 }
10454 }
10455
10456 /* Check if vector VEC consists of all the equal elements and
10457 that the number of elements corresponds to the type of VEC.
10458 The function returns first element of the vector
10459 or NULL_TREE if the vector is not uniform. */
10460 tree
10461 uniform_vector_p (const_tree vec)
10462 {
10463 tree first, t;
10464 unsigned i;
10465
10466 if (vec == NULL_TREE)
10467 return NULL_TREE;
10468
10469 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec)));
10470
10471 if (TREE_CODE (vec) == VECTOR_CST)
10472 {
10473 first = VECTOR_CST_ELT (vec, 0);
10474 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i)
10475 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0))
10476 return NULL_TREE;
10477
10478 return first;
10479 }
10480
10481 else if (TREE_CODE (vec) == CONSTRUCTOR)
10482 {
10483 first = error_mark_node;
10484
10485 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t)
10486 {
10487 if (i == 0)
10488 {
10489 first = t;
10490 continue;
10491 }
10492 if (!operand_equal_p (first, t, 0))
10493 return NULL_TREE;
10494 }
10495 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec)))
10496 return NULL_TREE;
10497
10498 return first;
10499 }
10500
10501 return NULL_TREE;
10502 }
10503
10504 /* Build an empty statement at location LOC. */
10505
10506 tree
10507 build_empty_stmt (location_t loc)
10508 {
10509 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node);
10510 SET_EXPR_LOCATION (t, loc);
10511 return t;
10512 }
10513
10514
10515 /* Build an OpenMP clause with code CODE. LOC is the location of the
10516 clause. */
10517
10518 tree
10519 build_omp_clause (location_t loc, enum omp_clause_code code)
10520 {
10521 tree t;
10522 int size, length;
10523
10524 length = omp_clause_num_ops[code];
10525 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree));
10526
10527 record_node_allocation_statistics (OMP_CLAUSE, size);
10528
10529 t = (tree) ggc_internal_alloc (size);
10530 memset (t, 0, size);
10531 TREE_SET_CODE (t, OMP_CLAUSE);
10532 OMP_CLAUSE_SET_CODE (t, code);
10533 OMP_CLAUSE_LOCATION (t) = loc;
10534
10535 return t;
10536 }
10537
10538 /* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN
10539 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1.
10540 Except for the CODE and operand count field, other storage for the
10541 object is initialized to zeros. */
10542
10543 tree
10544 build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL)
10545 {
10546 tree t;
10547 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp);
10548
10549 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp);
10550 gcc_assert (len >= 1);
10551
10552 record_node_allocation_statistics (code, length);
10553
10554 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
10555
10556 TREE_SET_CODE (t, code);
10557
10558 /* Can't use TREE_OPERAND to store the length because if checking is
10559 enabled, it will try to check the length before we store it. :-P */
10560 t->exp.operands[0] = build_int_cst (sizetype, len);
10561
10562 return t;
10563 }
10564
10565 /* Helper function for build_call_* functions; build a CALL_EXPR with
10566 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of
10567 the argument slots. */
10568
10569 static tree
10570 build_call_1 (tree return_type, tree fn, int nargs)
10571 {
10572 tree t;
10573
10574 t = build_vl_exp (CALL_EXPR, nargs + 3);
10575 TREE_TYPE (t) = return_type;
10576 CALL_EXPR_FN (t) = fn;
10577 CALL_EXPR_STATIC_CHAIN (t) = NULL;
10578
10579 return t;
10580 }
10581
10582 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10583 FN and a null static chain slot. NARGS is the number of call arguments
10584 which are specified as "..." arguments. */
10585
10586 tree
10587 build_call_nary (tree return_type, tree fn, int nargs, ...)
10588 {
10589 tree ret;
10590 va_list args;
10591 va_start (args, nargs);
10592 ret = build_call_valist (return_type, fn, nargs, args);
10593 va_end (args);
10594 return ret;
10595 }
10596
10597 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10598 FN and a null static chain slot. NARGS is the number of call arguments
10599 which are specified as a va_list ARGS. */
10600
10601 tree
10602 build_call_valist (tree return_type, tree fn, int nargs, va_list args)
10603 {
10604 tree t;
10605 int i;
10606
10607 t = build_call_1 (return_type, fn, nargs);
10608 for (i = 0; i < nargs; i++)
10609 CALL_EXPR_ARG (t, i) = va_arg (args, tree);
10610 process_call_operands (t);
10611 return t;
10612 }
10613
10614 /* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and
10615 FN and a null static chain slot. NARGS is the number of call arguments
10616 which are specified as a tree array ARGS. */
10617
10618 tree
10619 build_call_array_loc (location_t loc, tree return_type, tree fn,
10620 int nargs, const tree *args)
10621 {
10622 tree t;
10623 int i;
10624
10625 t = build_call_1 (return_type, fn, nargs);
10626 for (i = 0; i < nargs; i++)
10627 CALL_EXPR_ARG (t, i) = args[i];
10628 process_call_operands (t);
10629 SET_EXPR_LOCATION (t, loc);
10630 return t;
10631 }
10632
10633 /* Like build_call_array, but takes a vec. */
10634
10635 tree
10636 build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
10637 {
10638 tree ret, t;
10639 unsigned int ix;
10640
10641 ret = build_call_1 (return_type, fn, vec_safe_length (args));
10642 FOR_EACH_VEC_SAFE_ELT (args, ix, t)
10643 CALL_EXPR_ARG (ret, ix) = t;
10644 process_call_operands (ret);
10645 return ret;
10646 }
10647
10648 /* Conveniently construct a function call expression. FNDECL names the
10649 function to be called and N arguments are passed in the array
10650 ARGARRAY. */
10651
10652 tree
10653 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
10654 {
10655 tree fntype = TREE_TYPE (fndecl);
10656 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
10657
10658 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray);
10659 }
10660
10661 /* Conveniently construct a function call expression. FNDECL names the
10662 function to be called and the arguments are passed in the vector
10663 VEC. */
10664
10665 tree
10666 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
10667 {
10668 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
10669 vec_safe_address (vec));
10670 }
10671
10672
10673 /* Conveniently construct a function call expression. FNDECL names the
10674 function to be called, N is the number of arguments, and the "..."
10675 parameters are the argument expressions. */
10676
10677 tree
10678 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
10679 {
10680 va_list ap;
10681 tree *argarray = XALLOCAVEC (tree, n);
10682 int i;
10683
10684 va_start (ap, n);
10685 for (i = 0; i < n; i++)
10686 argarray[i] = va_arg (ap, tree);
10687 va_end (ap);
10688 return build_call_expr_loc_array (loc, fndecl, n, argarray);
10689 }
10690
10691 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
10692 varargs macros aren't supported by all bootstrap compilers. */
10693
10694 tree
10695 build_call_expr (tree fndecl, int n, ...)
10696 {
10697 va_list ap;
10698 tree *argarray = XALLOCAVEC (tree, n);
10699 int i;
10700
10701 va_start (ap, n);
10702 for (i = 0; i < n; i++)
10703 argarray[i] = va_arg (ap, tree);
10704 va_end (ap);
10705 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
10706 }
10707
10708 /* Build internal call expression. This is just like CALL_EXPR, except
10709 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary
10710 internal function. */
10711
10712 tree
10713 build_call_expr_internal_loc (location_t loc, enum internal_fn ifn,
10714 tree type, int n, ...)
10715 {
10716 va_list ap;
10717 int i;
10718
10719 tree fn = build_call_1 (type, NULL_TREE, n);
10720 va_start (ap, n);
10721 for (i = 0; i < n; i++)
10722 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree);
10723 va_end (ap);
10724 SET_EXPR_LOCATION (fn, loc);
10725 CALL_EXPR_IFN (fn) = ifn;
10726 return fn;
10727 }
10728
10729 /* Create a new constant string literal and return a char* pointer to it.
10730 The STRING_CST value is the LEN characters at STR. */
10731 tree
10732 build_string_literal (int len, const char *str)
10733 {
10734 tree t, elem, index, type;
10735
10736 t = build_string (len, str);
10737 elem = build_type_variant (char_type_node, 1, 0);
10738 index = build_index_type (size_int (len - 1));
10739 type = build_array_type (elem, index);
10740 TREE_TYPE (t) = type;
10741 TREE_CONSTANT (t) = 1;
10742 TREE_READONLY (t) = 1;
10743 TREE_STATIC (t) = 1;
10744
10745 type = build_pointer_type (elem);
10746 t = build1 (ADDR_EXPR, type,
10747 build4 (ARRAY_REF, elem,
10748 t, integer_zero_node, NULL_TREE, NULL_TREE));
10749 return t;
10750 }
10751
10752
10753
10754 /* Return true if T (assumed to be a DECL) must be assigned a memory
10755 location. */
10756
10757 bool
10758 needs_to_live_in_memory (const_tree t)
10759 {
10760 return (TREE_ADDRESSABLE (t)
10761 || is_global_var (t)
10762 || (TREE_CODE (t) == RESULT_DECL
10763 && !DECL_BY_REFERENCE (t)
10764 && aggregate_value_p (t, current_function_decl)));
10765 }
10766
10767 /* Return value of a constant X and sign-extend it. */
10768
10769 HOST_WIDE_INT
10770 int_cst_value (const_tree x)
10771 {
10772 unsigned bits = TYPE_PRECISION (TREE_TYPE (x));
10773 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x);
10774
10775 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */
10776 gcc_assert (cst_and_fits_in_hwi (x));
10777
10778 if (bits < HOST_BITS_PER_WIDE_INT)
10779 {
10780 bool negative = ((val >> (bits - 1)) & 1) != 0;
10781 if (negative)
10782 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1;
10783 else
10784 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1);
10785 }
10786
10787 return val;
10788 }
10789
10790 /* If TYPE is an integral or pointer type, return an integer type with
10791 the same precision which is unsigned iff UNSIGNEDP is true, or itself
10792 if TYPE is already an integer type of signedness UNSIGNEDP. */
10793
10794 tree
10795 signed_or_unsigned_type_for (int unsignedp, tree type)
10796 {
10797 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
10798 return type;
10799
10800 if (TREE_CODE (type) == VECTOR_TYPE)
10801 {
10802 tree inner = TREE_TYPE (type);
10803 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
10804 if (!inner2)
10805 return NULL_TREE;
10806 if (inner == inner2)
10807 return type;
10808 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
10809 }
10810
10811 if (!INTEGRAL_TYPE_P (type)
10812 && !POINTER_TYPE_P (type)
10813 && TREE_CODE (type) != OFFSET_TYPE)
10814 return NULL_TREE;
10815
10816 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
10817 }
10818
10819 /* If TYPE is an integral or pointer type, return an integer type with
10820 the same precision which is unsigned, or itself if TYPE is already an
10821 unsigned integer type. */
10822
10823 tree
10824 unsigned_type_for (tree type)
10825 {
10826 return signed_or_unsigned_type_for (1, type);
10827 }
10828
10829 /* If TYPE is an integral or pointer type, return an integer type with
10830 the same precision which is signed, or itself if TYPE is already a
10831 signed integer type. */
10832
10833 tree
10834 signed_type_for (tree type)
10835 {
10836 return signed_or_unsigned_type_for (0, type);
10837 }
10838
10839 /* If TYPE is a vector type, return a signed integer vector type with the
10840 same width and number of subparts. Otherwise return boolean_type_node. */
10841
10842 tree
10843 truth_type_for (tree type)
10844 {
10845 if (TREE_CODE (type) == VECTOR_TYPE)
10846 {
10847 tree elem = lang_hooks.types.type_for_size
10848 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0);
10849 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type));
10850 }
10851 else
10852 return boolean_type_node;
10853 }
10854
10855 /* Returns the largest value obtainable by casting something in INNER type to
10856 OUTER type. */
10857
10858 tree
10859 upper_bound_in_type (tree outer, tree inner)
10860 {
10861 unsigned int det = 0;
10862 unsigned oprec = TYPE_PRECISION (outer);
10863 unsigned iprec = TYPE_PRECISION (inner);
10864 unsigned prec;
10865
10866 /* Compute a unique number for every combination. */
10867 det |= (oprec > iprec) ? 4 : 0;
10868 det |= TYPE_UNSIGNED (outer) ? 2 : 0;
10869 det |= TYPE_UNSIGNED (inner) ? 1 : 0;
10870
10871 /* Determine the exponent to use. */
10872 switch (det)
10873 {
10874 case 0:
10875 case 1:
10876 /* oprec <= iprec, outer: signed, inner: don't care. */
10877 prec = oprec - 1;
10878 break;
10879 case 2:
10880 case 3:
10881 /* oprec <= iprec, outer: unsigned, inner: don't care. */
10882 prec = oprec;
10883 break;
10884 case 4:
10885 /* oprec > iprec, outer: signed, inner: signed. */
10886 prec = iprec - 1;
10887 break;
10888 case 5:
10889 /* oprec > iprec, outer: signed, inner: unsigned. */
10890 prec = iprec;
10891 break;
10892 case 6:
10893 /* oprec > iprec, outer: unsigned, inner: signed. */
10894 prec = oprec;
10895 break;
10896 case 7:
10897 /* oprec > iprec, outer: unsigned, inner: unsigned. */
10898 prec = iprec;
10899 break;
10900 default:
10901 gcc_unreachable ();
10902 }
10903
10904 return wide_int_to_tree (outer,
10905 wi::mask (prec, false, TYPE_PRECISION (outer)));
10906 }
10907
10908 /* Returns the smallest value obtainable by casting something in INNER type to
10909 OUTER type. */
10910
10911 tree
10912 lower_bound_in_type (tree outer, tree inner)
10913 {
10914 unsigned oprec = TYPE_PRECISION (outer);
10915 unsigned iprec = TYPE_PRECISION (inner);
10916
10917 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type
10918 and obtain 0. */
10919 if (TYPE_UNSIGNED (outer)
10920 /* If we are widening something of an unsigned type, OUTER type
10921 contains all values of INNER type. In particular, both INNER
10922 and OUTER types have zero in common. */
10923 || (oprec > iprec && TYPE_UNSIGNED (inner)))
10924 return build_int_cst (outer, 0);
10925 else
10926 {
10927 /* If we are widening a signed type to another signed type, we
10928 want to obtain -2^^(iprec-1). If we are keeping the
10929 precision or narrowing to a signed type, we want to obtain
10930 -2^(oprec-1). */
10931 unsigned prec = oprec > iprec ? iprec : oprec;
10932 return wide_int_to_tree (outer,
10933 wi::mask (prec - 1, true,
10934 TYPE_PRECISION (outer)));
10935 }
10936 }
10937
10938 /* Return nonzero if two operands that are suitable for PHI nodes are
10939 necessarily equal. Specifically, both ARG0 and ARG1 must be either
10940 SSA_NAME or invariant. Note that this is strictly an optimization.
10941 That is, callers of this function can directly call operand_equal_p
10942 and get the same result, only slower. */
10943
10944 int
10945 operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1)
10946 {
10947 if (arg0 == arg1)
10948 return 1;
10949 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME)
10950 return 0;
10951 return operand_equal_p (arg0, arg1, 0);
10952 }
10953
10954 /* Returns number of zeros at the end of binary representation of X. */
10955
10956 tree
10957 num_ending_zeros (const_tree x)
10958 {
10959 return build_int_cst (TREE_TYPE (x), wi::ctz (x));
10960 }
10961
10962
10963 #define WALK_SUBTREE(NODE) \
10964 do \
10965 { \
10966 result = walk_tree_1 (&(NODE), func, data, pset, lh); \
10967 if (result) \
10968 return result; \
10969 } \
10970 while (0)
10971
10972 /* This is a subroutine of walk_tree that walks field of TYPE that are to
10973 be walked whenever a type is seen in the tree. Rest of operands and return
10974 value are as for walk_tree. */
10975
10976 static tree
10977 walk_type_fields (tree type, walk_tree_fn func, void *data,
10978 hash_set<tree> *pset, walk_tree_lh lh)
10979 {
10980 tree result = NULL_TREE;
10981
10982 switch (TREE_CODE (type))
10983 {
10984 case POINTER_TYPE:
10985 case REFERENCE_TYPE:
10986 case VECTOR_TYPE:
10987 /* We have to worry about mutually recursive pointers. These can't
10988 be written in C. They can in Ada. It's pathological, but
10989 there's an ACATS test (c38102a) that checks it. Deal with this
10990 by checking if we're pointing to another pointer, that one
10991 points to another pointer, that one does too, and we have no htab.
10992 If so, get a hash table. We check three levels deep to avoid
10993 the cost of the hash table if we don't need one. */
10994 if (POINTER_TYPE_P (TREE_TYPE (type))
10995 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type)))
10996 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type))))
10997 && !pset)
10998 {
10999 result = walk_tree_without_duplicates (&TREE_TYPE (type),
11000 func, data);
11001 if (result)
11002 return result;
11003
11004 break;
11005 }
11006
11007 /* ... fall through ... */
11008
11009 case COMPLEX_TYPE:
11010 WALK_SUBTREE (TREE_TYPE (type));
11011 break;
11012
11013 case METHOD_TYPE:
11014 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type));
11015
11016 /* Fall through. */
11017
11018 case FUNCTION_TYPE:
11019 WALK_SUBTREE (TREE_TYPE (type));
11020 {
11021 tree arg;
11022
11023 /* We never want to walk into default arguments. */
11024 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg))
11025 WALK_SUBTREE (TREE_VALUE (arg));
11026 }
11027 break;
11028
11029 case ARRAY_TYPE:
11030 /* Don't follow this nodes's type if a pointer for fear that
11031 we'll have infinite recursion. If we have a PSET, then we
11032 need not fear. */
11033 if (pset
11034 || (!POINTER_TYPE_P (TREE_TYPE (type))
11035 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE))
11036 WALK_SUBTREE (TREE_TYPE (type));
11037 WALK_SUBTREE (TYPE_DOMAIN (type));
11038 break;
11039
11040 case OFFSET_TYPE:
11041 WALK_SUBTREE (TREE_TYPE (type));
11042 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type));
11043 break;
11044
11045 default:
11046 break;
11047 }
11048
11049 return NULL_TREE;
11050 }
11051
11052 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is
11053 called with the DATA and the address of each sub-tree. If FUNC returns a
11054 non-NULL value, the traversal is stopped, and the value returned by FUNC
11055 is returned. If PSET is non-NULL it is used to record the nodes visited,
11056 and to avoid visiting a node more than once. */
11057
11058 tree
11059 walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
11060 hash_set<tree> *pset, walk_tree_lh lh)
11061 {
11062 enum tree_code code;
11063 int walk_subtrees;
11064 tree result;
11065
11066 #define WALK_SUBTREE_TAIL(NODE) \
11067 do \
11068 { \
11069 tp = & (NODE); \
11070 goto tail_recurse; \
11071 } \
11072 while (0)
11073
11074 tail_recurse:
11075 /* Skip empty subtrees. */
11076 if (!*tp)
11077 return NULL_TREE;
11078
11079 /* Don't walk the same tree twice, if the user has requested
11080 that we avoid doing so. */
11081 if (pset && pset->add (*tp))
11082 return NULL_TREE;
11083
11084 /* Call the function. */
11085 walk_subtrees = 1;
11086 result = (*func) (tp, &walk_subtrees, data);
11087
11088 /* If we found something, return it. */
11089 if (result)
11090 return result;
11091
11092 code = TREE_CODE (*tp);
11093
11094 /* Even if we didn't, FUNC may have decided that there was nothing
11095 interesting below this point in the tree. */
11096 if (!walk_subtrees)
11097 {
11098 /* But we still need to check our siblings. */
11099 if (code == TREE_LIST)
11100 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11101 else if (code == OMP_CLAUSE)
11102 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11103 else
11104 return NULL_TREE;
11105 }
11106
11107 if (lh)
11108 {
11109 result = (*lh) (tp, &walk_subtrees, func, data, pset);
11110 if (result || !walk_subtrees)
11111 return result;
11112 }
11113
11114 switch (code)
11115 {
11116 case ERROR_MARK:
11117 case IDENTIFIER_NODE:
11118 case INTEGER_CST:
11119 case REAL_CST:
11120 case FIXED_CST:
11121 case VECTOR_CST:
11122 case STRING_CST:
11123 case BLOCK:
11124 case PLACEHOLDER_EXPR:
11125 case SSA_NAME:
11126 case FIELD_DECL:
11127 case RESULT_DECL:
11128 /* None of these have subtrees other than those already walked
11129 above. */
11130 break;
11131
11132 case TREE_LIST:
11133 WALK_SUBTREE (TREE_VALUE (*tp));
11134 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp));
11135 break;
11136
11137 case TREE_VEC:
11138 {
11139 int len = TREE_VEC_LENGTH (*tp);
11140
11141 if (len == 0)
11142 break;
11143
11144 /* Walk all elements but the first. */
11145 while (--len)
11146 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
11147
11148 /* Now walk the first one as a tail call. */
11149 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0));
11150 }
11151
11152 case COMPLEX_CST:
11153 WALK_SUBTREE (TREE_REALPART (*tp));
11154 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp));
11155
11156 case CONSTRUCTOR:
11157 {
11158 unsigned HOST_WIDE_INT idx;
11159 constructor_elt *ce;
11160
11161 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce);
11162 idx++)
11163 WALK_SUBTREE (ce->value);
11164 }
11165 break;
11166
11167 case SAVE_EXPR:
11168 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0));
11169
11170 case BIND_EXPR:
11171 {
11172 tree decl;
11173 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl))
11174 {
11175 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
11176 into declarations that are just mentioned, rather than
11177 declared; they don't really belong to this part of the tree.
11178 And, we can see cycles: the initializer for a declaration
11179 can refer to the declaration itself. */
11180 WALK_SUBTREE (DECL_INITIAL (decl));
11181 WALK_SUBTREE (DECL_SIZE (decl));
11182 WALK_SUBTREE (DECL_SIZE_UNIT (decl));
11183 }
11184 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp));
11185 }
11186
11187 case STATEMENT_LIST:
11188 {
11189 tree_stmt_iterator i;
11190 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i))
11191 WALK_SUBTREE (*tsi_stmt_ptr (i));
11192 }
11193 break;
11194
11195 case OMP_CLAUSE:
11196 switch (OMP_CLAUSE_CODE (*tp))
11197 {
11198 case OMP_CLAUSE_GANG:
11199 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11200 /* FALLTHRU */
11201
11202 case OMP_CLAUSE_DEVICE_RESIDENT:
11203 case OMP_CLAUSE_USE_DEVICE:
11204 case OMP_CLAUSE_ASYNC:
11205 case OMP_CLAUSE_WAIT:
11206 case OMP_CLAUSE_WORKER:
11207 case OMP_CLAUSE_VECTOR:
11208 case OMP_CLAUSE_NUM_GANGS:
11209 case OMP_CLAUSE_NUM_WORKERS:
11210 case OMP_CLAUSE_VECTOR_LENGTH:
11211 case OMP_CLAUSE_PRIVATE:
11212 case OMP_CLAUSE_SHARED:
11213 case OMP_CLAUSE_FIRSTPRIVATE:
11214 case OMP_CLAUSE_COPYIN:
11215 case OMP_CLAUSE_COPYPRIVATE:
11216 case OMP_CLAUSE_FINAL:
11217 case OMP_CLAUSE_IF:
11218 case OMP_CLAUSE_NUM_THREADS:
11219 case OMP_CLAUSE_SCHEDULE:
11220 case OMP_CLAUSE_UNIFORM:
11221 case OMP_CLAUSE_DEPEND:
11222 case OMP_CLAUSE_NUM_TEAMS:
11223 case OMP_CLAUSE_THREAD_LIMIT:
11224 case OMP_CLAUSE_DEVICE:
11225 case OMP_CLAUSE_DIST_SCHEDULE:
11226 case OMP_CLAUSE_SAFELEN:
11227 case OMP_CLAUSE_SIMDLEN:
11228 case OMP_CLAUSE__LOOPTEMP_:
11229 case OMP_CLAUSE__SIMDUID_:
11230 case OMP_CLAUSE__CILK_FOR_COUNT_:
11231 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0));
11232 /* FALLTHRU */
11233
11234 case OMP_CLAUSE_INDEPENDENT:
11235 case OMP_CLAUSE_NOWAIT:
11236 case OMP_CLAUSE_ORDERED:
11237 case OMP_CLAUSE_DEFAULT:
11238 case OMP_CLAUSE_UNTIED:
11239 case OMP_CLAUSE_MERGEABLE:
11240 case OMP_CLAUSE_PROC_BIND:
11241 case OMP_CLAUSE_INBRANCH:
11242 case OMP_CLAUSE_NOTINBRANCH:
11243 case OMP_CLAUSE_FOR:
11244 case OMP_CLAUSE_PARALLEL:
11245 case OMP_CLAUSE_SECTIONS:
11246 case OMP_CLAUSE_TASKGROUP:
11247 case OMP_CLAUSE_AUTO:
11248 case OMP_CLAUSE_SEQ:
11249 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11250
11251 case OMP_CLAUSE_LASTPRIVATE:
11252 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11253 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp));
11254 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11255
11256 case OMP_CLAUSE_COLLAPSE:
11257 {
11258 int i;
11259 for (i = 0; i < 3; i++)
11260 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11261 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11262 }
11263
11264 case OMP_CLAUSE_LINEAR:
11265 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11266 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp));
11267 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp));
11268 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11269
11270 case OMP_CLAUSE_ALIGNED:
11271 case OMP_CLAUSE_FROM:
11272 case OMP_CLAUSE_TO:
11273 case OMP_CLAUSE_MAP:
11274 case OMP_CLAUSE__CACHE_:
11275 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp));
11276 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1));
11277 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11278
11279 case OMP_CLAUSE_REDUCTION:
11280 {
11281 int i;
11282 for (i = 0; i < 4; i++)
11283 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i));
11284 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp));
11285 }
11286
11287 default:
11288 gcc_unreachable ();
11289 }
11290 break;
11291
11292 case TARGET_EXPR:
11293 {
11294 int i, len;
11295
11296 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
11297 But, we only want to walk once. */
11298 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3;
11299 for (i = 0; i < len; ++i)
11300 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11301 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len));
11302 }
11303
11304 case DECL_EXPR:
11305 /* If this is a TYPE_DECL, walk into the fields of the type that it's
11306 defining. We only want to walk into these fields of a type in this
11307 case and not in the general case of a mere reference to the type.
11308
11309 The criterion is as follows: if the field can be an expression, it
11310 must be walked only here. This should be in keeping with the fields
11311 that are directly gimplified in gimplify_type_sizes in order for the
11312 mark/copy-if-shared/unmark machinery of the gimplifier to work with
11313 variable-sized types.
11314
11315 Note that DECLs get walked as part of processing the BIND_EXPR. */
11316 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL)
11317 {
11318 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp));
11319 if (TREE_CODE (*type_p) == ERROR_MARK)
11320 return NULL_TREE;
11321
11322 /* Call the function for the type. See if it returns anything or
11323 doesn't want us to continue. If we are to continue, walk both
11324 the normal fields and those for the declaration case. */
11325 result = (*func) (type_p, &walk_subtrees, data);
11326 if (result || !walk_subtrees)
11327 return result;
11328
11329 /* But do not walk a pointed-to type since it may itself need to
11330 be walked in the declaration case if it isn't anonymous. */
11331 if (!POINTER_TYPE_P (*type_p))
11332 {
11333 result = walk_type_fields (*type_p, func, data, pset, lh);
11334 if (result)
11335 return result;
11336 }
11337
11338 /* If this is a record type, also walk the fields. */
11339 if (RECORD_OR_UNION_TYPE_P (*type_p))
11340 {
11341 tree field;
11342
11343 for (field = TYPE_FIELDS (*type_p); field;
11344 field = DECL_CHAIN (field))
11345 {
11346 /* We'd like to look at the type of the field, but we can
11347 easily get infinite recursion. So assume it's pointed
11348 to elsewhere in the tree. Also, ignore things that
11349 aren't fields. */
11350 if (TREE_CODE (field) != FIELD_DECL)
11351 continue;
11352
11353 WALK_SUBTREE (DECL_FIELD_OFFSET (field));
11354 WALK_SUBTREE (DECL_SIZE (field));
11355 WALK_SUBTREE (DECL_SIZE_UNIT (field));
11356 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE)
11357 WALK_SUBTREE (DECL_QUALIFIER (field));
11358 }
11359 }
11360
11361 /* Same for scalar types. */
11362 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE
11363 || TREE_CODE (*type_p) == ENUMERAL_TYPE
11364 || TREE_CODE (*type_p) == INTEGER_TYPE
11365 || TREE_CODE (*type_p) == FIXED_POINT_TYPE
11366 || TREE_CODE (*type_p) == REAL_TYPE)
11367 {
11368 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p));
11369 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p));
11370 }
11371
11372 WALK_SUBTREE (TYPE_SIZE (*type_p));
11373 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p));
11374 }
11375 /* FALLTHRU */
11376
11377 default:
11378 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
11379 {
11380 int i, len;
11381
11382 /* Walk over all the sub-trees of this operand. */
11383 len = TREE_OPERAND_LENGTH (*tp);
11384
11385 /* Go through the subtrees. We need to do this in forward order so
11386 that the scope of a FOR_EXPR is handled properly. */
11387 if (len)
11388 {
11389 for (i = 0; i < len - 1; ++i)
11390 WALK_SUBTREE (TREE_OPERAND (*tp, i));
11391 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1));
11392 }
11393 }
11394 /* If this is a type, walk the needed fields in the type. */
11395 else if (TYPE_P (*tp))
11396 return walk_type_fields (*tp, func, data, pset, lh);
11397 break;
11398 }
11399
11400 /* We didn't find what we were looking for. */
11401 return NULL_TREE;
11402
11403 #undef WALK_SUBTREE_TAIL
11404 }
11405 #undef WALK_SUBTREE
11406
11407 /* Like walk_tree, but does not walk duplicate nodes more than once. */
11408
11409 tree
11410 walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data,
11411 walk_tree_lh lh)
11412 {
11413 tree result;
11414
11415 hash_set<tree> pset;
11416 result = walk_tree_1 (tp, func, data, &pset, lh);
11417 return result;
11418 }
11419
11420
11421 tree
11422 tree_block (tree t)
11423 {
11424 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11425
11426 if (IS_EXPR_CODE_CLASS (c))
11427 return LOCATION_BLOCK (t->exp.locus);
11428 gcc_unreachable ();
11429 return NULL;
11430 }
11431
11432 void
11433 tree_set_block (tree t, tree b)
11434 {
11435 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
11436
11437 if (IS_EXPR_CODE_CLASS (c))
11438 {
11439 if (b)
11440 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b);
11441 else
11442 t->exp.locus = LOCATION_LOCUS (t->exp.locus);
11443 }
11444 else
11445 gcc_unreachable ();
11446 }
11447
11448 /* Create a nameless artificial label and put it in the current
11449 function context. The label has a location of LOC. Returns the
11450 newly created label. */
11451
11452 tree
11453 create_artificial_label (location_t loc)
11454 {
11455 tree lab = build_decl (loc,
11456 LABEL_DECL, NULL_TREE, void_type_node);
11457
11458 DECL_ARTIFICIAL (lab) = 1;
11459 DECL_IGNORED_P (lab) = 1;
11460 DECL_CONTEXT (lab) = current_function_decl;
11461 return lab;
11462 }
11463
11464 /* Given a tree, try to return a useful variable name that we can use
11465 to prefix a temporary that is being assigned the value of the tree.
11466 I.E. given <temp> = &A, return A. */
11467
11468 const char *
11469 get_name (tree t)
11470 {
11471 tree stripped_decl;
11472
11473 stripped_decl = t;
11474 STRIP_NOPS (stripped_decl);
11475 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
11476 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
11477 else if (TREE_CODE (stripped_decl) == SSA_NAME)
11478 {
11479 tree name = SSA_NAME_IDENTIFIER (stripped_decl);
11480 if (!name)
11481 return NULL;
11482 return IDENTIFIER_POINTER (name);
11483 }
11484 else
11485 {
11486 switch (TREE_CODE (stripped_decl))
11487 {
11488 case ADDR_EXPR:
11489 return get_name (TREE_OPERAND (stripped_decl, 0));
11490 default:
11491 return NULL;
11492 }
11493 }
11494 }
11495
11496 /* Return true if TYPE has a variable argument list. */
11497
11498 bool
11499 stdarg_p (const_tree fntype)
11500 {
11501 function_args_iterator args_iter;
11502 tree n = NULL_TREE, t;
11503
11504 if (!fntype)
11505 return false;
11506
11507 FOREACH_FUNCTION_ARGS (fntype, t, args_iter)
11508 {
11509 n = t;
11510 }
11511
11512 return n != NULL_TREE && n != void_type_node;
11513 }
11514
11515 /* Return true if TYPE has a prototype. */
11516
11517 bool
11518 prototype_p (tree fntype)
11519 {
11520 tree t;
11521
11522 gcc_assert (fntype != NULL_TREE);
11523
11524 t = TYPE_ARG_TYPES (fntype);
11525 return (t != NULL_TREE);
11526 }
11527
11528 /* If BLOCK is inlined from an __attribute__((__artificial__))
11529 routine, return pointer to location from where it has been
11530 called. */
11531 location_t *
11532 block_nonartificial_location (tree block)
11533 {
11534 location_t *ret = NULL;
11535
11536 while (block && TREE_CODE (block) == BLOCK
11537 && BLOCK_ABSTRACT_ORIGIN (block))
11538 {
11539 tree ao = BLOCK_ABSTRACT_ORIGIN (block);
11540
11541 while (TREE_CODE (ao) == BLOCK
11542 && BLOCK_ABSTRACT_ORIGIN (ao)
11543 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
11544 ao = BLOCK_ABSTRACT_ORIGIN (ao);
11545
11546 if (TREE_CODE (ao) == FUNCTION_DECL)
11547 {
11548 /* If AO is an artificial inline, point RET to the
11549 call site locus at which it has been inlined and continue
11550 the loop, in case AO's caller is also an artificial
11551 inline. */
11552 if (DECL_DECLARED_INLINE_P (ao)
11553 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
11554 ret = &BLOCK_SOURCE_LOCATION (block);
11555 else
11556 break;
11557 }
11558 else if (TREE_CODE (ao) != BLOCK)
11559 break;
11560
11561 block = BLOCK_SUPERCONTEXT (block);
11562 }
11563 return ret;
11564 }
11565
11566
11567 /* If EXP is inlined from an __attribute__((__artificial__))
11568 function, return the location of the original call expression. */
11569
11570 location_t
11571 tree_nonartificial_location (tree exp)
11572 {
11573 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp));
11574
11575 if (loc)
11576 return *loc;
11577 else
11578 return EXPR_LOCATION (exp);
11579 }
11580
11581
11582 /* These are the hash table functions for the hash table of OPTIMIZATION_NODEq
11583 nodes. */
11584
11585 /* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */
11586
11587 hashval_t
11588 cl_option_hasher::hash (tree x)
11589 {
11590 const_tree const t = x;
11591 const char *p;
11592 size_t i;
11593 size_t len = 0;
11594 hashval_t hash = 0;
11595
11596 if (TREE_CODE (t) == OPTIMIZATION_NODE)
11597 {
11598 p = (const char *)TREE_OPTIMIZATION (t);
11599 len = sizeof (struct cl_optimization);
11600 }
11601
11602 else if (TREE_CODE (t) == TARGET_OPTION_NODE)
11603 return cl_target_option_hash (TREE_TARGET_OPTION (t));
11604
11605 else
11606 gcc_unreachable ();
11607
11608 /* assume most opt flags are just 0/1, some are 2-3, and a few might be
11609 something else. */
11610 for (i = 0; i < len; i++)
11611 if (p[i])
11612 hash = (hash << 4) ^ ((i << 2) | p[i]);
11613
11614 return hash;
11615 }
11616
11617 /* Return nonzero if the value represented by *X (an OPTIMIZATION or
11618 TARGET_OPTION tree node) is the same as that given by *Y, which is the
11619 same. */
11620
11621 bool
11622 cl_option_hasher::equal (tree x, tree y)
11623 {
11624 const_tree const xt = x;
11625 const_tree const yt = y;
11626 const char *xp;
11627 const char *yp;
11628 size_t len;
11629
11630 if (TREE_CODE (xt) != TREE_CODE (yt))
11631 return 0;
11632
11633 if (TREE_CODE (xt) == OPTIMIZATION_NODE)
11634 {
11635 xp = (const char *)TREE_OPTIMIZATION (xt);
11636 yp = (const char *)TREE_OPTIMIZATION (yt);
11637 len = sizeof (struct cl_optimization);
11638 }
11639
11640 else if (TREE_CODE (xt) == TARGET_OPTION_NODE)
11641 {
11642 return cl_target_option_eq (TREE_TARGET_OPTION (xt),
11643 TREE_TARGET_OPTION (yt));
11644 }
11645
11646 else
11647 gcc_unreachable ();
11648
11649 return (memcmp (xp, yp, len) == 0);
11650 }
11651
11652 /* Build an OPTIMIZATION_NODE based on the options in OPTS. */
11653
11654 tree
11655 build_optimization_node (struct gcc_options *opts)
11656 {
11657 tree t;
11658
11659 /* Use the cache of optimization nodes. */
11660
11661 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node),
11662 opts);
11663
11664 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT);
11665 t = *slot;
11666 if (!t)
11667 {
11668 /* Insert this one into the hash table. */
11669 t = cl_optimization_node;
11670 *slot = t;
11671
11672 /* Make a new node for next time round. */
11673 cl_optimization_node = make_node (OPTIMIZATION_NODE);
11674 }
11675
11676 return t;
11677 }
11678
11679 /* Build a TARGET_OPTION_NODE based on the options in OPTS. */
11680
11681 tree
11682 build_target_option_node (struct gcc_options *opts)
11683 {
11684 tree t;
11685
11686 /* Use the cache of optimization nodes. */
11687
11688 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node),
11689 opts);
11690
11691 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT);
11692 t = *slot;
11693 if (!t)
11694 {
11695 /* Insert this one into the hash table. */
11696 t = cl_target_option_node;
11697 *slot = t;
11698
11699 /* Make a new node for next time round. */
11700 cl_target_option_node = make_node (TARGET_OPTION_NODE);
11701 }
11702
11703 return t;
11704 }
11705
11706 /* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees,
11707 so that they aren't saved during PCH writing. */
11708
11709 void
11710 prepare_target_option_nodes_for_pch (void)
11711 {
11712 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin ();
11713 for (; iter != cl_option_hash_table->end (); ++iter)
11714 if (TREE_CODE (*iter) == TARGET_OPTION_NODE)
11715 TREE_TARGET_GLOBALS (*iter) = NULL;
11716 }
11717
11718 /* Determine the "ultimate origin" of a block. The block may be an inlined
11719 instance of an inlined instance of a block which is local to an inline
11720 function, so we have to trace all of the way back through the origin chain
11721 to find out what sort of node actually served as the original seed for the
11722 given block. */
11723
11724 tree
11725 block_ultimate_origin (const_tree block)
11726 {
11727 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block);
11728
11729 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if
11730 we're trying to output the abstract instance of this function. */
11731 if (BLOCK_ABSTRACT (block) && immediate_origin == block)
11732 return NULL_TREE;
11733
11734 if (immediate_origin == NULL_TREE)
11735 return NULL_TREE;
11736 else
11737 {
11738 tree ret_val;
11739 tree lookahead = immediate_origin;
11740
11741 do
11742 {
11743 ret_val = lookahead;
11744 lookahead = (TREE_CODE (ret_val) == BLOCK
11745 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL);
11746 }
11747 while (lookahead != NULL && lookahead != ret_val);
11748
11749 /* The block's abstract origin chain may not be the *ultimate* origin of
11750 the block. It could lead to a DECL that has an abstract origin set.
11751 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN
11752 will give us if it has one). Note that DECL's abstract origins are
11753 supposed to be the most distant ancestor (or so decl_ultimate_origin
11754 claims), so we don't need to loop following the DECL origins. */
11755 if (DECL_P (ret_val))
11756 return DECL_ORIGIN (ret_val);
11757
11758 return ret_val;
11759 }
11760 }
11761
11762 /* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates
11763 no instruction. */
11764
11765 bool
11766 tree_nop_conversion_p (const_tree outer_type, const_tree inner_type)
11767 {
11768 /* Use precision rather then machine mode when we can, which gives
11769 the correct answer even for submode (bit-field) types. */
11770 if ((INTEGRAL_TYPE_P (outer_type)
11771 || POINTER_TYPE_P (outer_type)
11772 || TREE_CODE (outer_type) == OFFSET_TYPE)
11773 && (INTEGRAL_TYPE_P (inner_type)
11774 || POINTER_TYPE_P (inner_type)
11775 || TREE_CODE (inner_type) == OFFSET_TYPE))
11776 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type);
11777
11778 /* Otherwise fall back on comparing machine modes (e.g. for
11779 aggregate types, floats). */
11780 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type);
11781 }
11782
11783 /* Return true iff conversion in EXP generates no instruction. Mark
11784 it inline so that we fully inline into the stripping functions even
11785 though we have two uses of this function. */
11786
11787 static inline bool
11788 tree_nop_conversion (const_tree exp)
11789 {
11790 tree outer_type, inner_type;
11791
11792 if (!CONVERT_EXPR_P (exp)
11793 && TREE_CODE (exp) != NON_LVALUE_EXPR)
11794 return false;
11795 if (TREE_OPERAND (exp, 0) == error_mark_node)
11796 return false;
11797
11798 outer_type = TREE_TYPE (exp);
11799 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11800
11801 if (!inner_type)
11802 return false;
11803
11804 return tree_nop_conversion_p (outer_type, inner_type);
11805 }
11806
11807 /* Return true iff conversion in EXP generates no instruction. Don't
11808 consider conversions changing the signedness. */
11809
11810 static bool
11811 tree_sign_nop_conversion (const_tree exp)
11812 {
11813 tree outer_type, inner_type;
11814
11815 if (!tree_nop_conversion (exp))
11816 return false;
11817
11818 outer_type = TREE_TYPE (exp);
11819 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
11820
11821 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type)
11822 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type));
11823 }
11824
11825 /* Strip conversions from EXP according to tree_nop_conversion and
11826 return the resulting expression. */
11827
11828 tree
11829 tree_strip_nop_conversions (tree exp)
11830 {
11831 while (tree_nop_conversion (exp))
11832 exp = TREE_OPERAND (exp, 0);
11833 return exp;
11834 }
11835
11836 /* Strip conversions from EXP according to tree_sign_nop_conversion
11837 and return the resulting expression. */
11838
11839 tree
11840 tree_strip_sign_nop_conversions (tree exp)
11841 {
11842 while (tree_sign_nop_conversion (exp))
11843 exp = TREE_OPERAND (exp, 0);
11844 return exp;
11845 }
11846
11847 /* Avoid any floating point extensions from EXP. */
11848 tree
11849 strip_float_extensions (tree exp)
11850 {
11851 tree sub, expt, subt;
11852
11853 /* For floating point constant look up the narrowest type that can hold
11854 it properly and handle it like (type)(narrowest_type)constant.
11855 This way we can optimize for instance a=a*2.0 where "a" is float
11856 but 2.0 is double constant. */
11857 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp)))
11858 {
11859 REAL_VALUE_TYPE orig;
11860 tree type = NULL;
11861
11862 orig = TREE_REAL_CST (exp);
11863 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node)
11864 && exact_real_truncate (TYPE_MODE (float_type_node), &orig))
11865 type = float_type_node;
11866 else if (TYPE_PRECISION (TREE_TYPE (exp))
11867 > TYPE_PRECISION (double_type_node)
11868 && exact_real_truncate (TYPE_MODE (double_type_node), &orig))
11869 type = double_type_node;
11870 if (type)
11871 return build_real (type, real_value_truncate (TYPE_MODE (type), orig));
11872 }
11873
11874 if (!CONVERT_EXPR_P (exp))
11875 return exp;
11876
11877 sub = TREE_OPERAND (exp, 0);
11878 subt = TREE_TYPE (sub);
11879 expt = TREE_TYPE (exp);
11880
11881 if (!FLOAT_TYPE_P (subt))
11882 return exp;
11883
11884 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt))
11885 return exp;
11886
11887 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt))
11888 return exp;
11889
11890 return strip_float_extensions (sub);
11891 }
11892
11893 /* Strip out all handled components that produce invariant
11894 offsets. */
11895
11896 const_tree
11897 strip_invariant_refs (const_tree op)
11898 {
11899 while (handled_component_p (op))
11900 {
11901 switch (TREE_CODE (op))
11902 {
11903 case ARRAY_REF:
11904 case ARRAY_RANGE_REF:
11905 if (!is_gimple_constant (TREE_OPERAND (op, 1))
11906 || TREE_OPERAND (op, 2) != NULL_TREE
11907 || TREE_OPERAND (op, 3) != NULL_TREE)
11908 return NULL;
11909 break;
11910
11911 case COMPONENT_REF:
11912 if (TREE_OPERAND (op, 2) != NULL_TREE)
11913 return NULL;
11914 break;
11915
11916 default:;
11917 }
11918 op = TREE_OPERAND (op, 0);
11919 }
11920
11921 return op;
11922 }
11923
11924 static GTY(()) tree gcc_eh_personality_decl;
11925
11926 /* Return the GCC personality function decl. */
11927
11928 tree
11929 lhd_gcc_personality (void)
11930 {
11931 if (!gcc_eh_personality_decl)
11932 gcc_eh_personality_decl = build_personality_function ("gcc");
11933 return gcc_eh_personality_decl;
11934 }
11935
11936 /* TARGET is a call target of GIMPLE call statement
11937 (obtained by gimple_call_fn). Return true if it is
11938 OBJ_TYPE_REF representing an virtual call of C++ method.
11939 (As opposed to OBJ_TYPE_REF representing objc calls
11940 through a cast where middle-end devirtualization machinery
11941 can't apply.) */
11942
11943 bool
11944 virtual_method_call_p (tree target)
11945 {
11946 if (TREE_CODE (target) != OBJ_TYPE_REF)
11947 return false;
11948 tree t = TREE_TYPE (target);
11949 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE);
11950 t = TREE_TYPE (t);
11951 if (TREE_CODE (t) == FUNCTION_TYPE)
11952 return false;
11953 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE);
11954 /* If we do not have BINFO associated, it means that type was built
11955 without devirtualization enabled. Do not consider this a virtual
11956 call. */
11957 if (!TYPE_BINFO (obj_type_ref_class (target)))
11958 return false;
11959 return true;
11960 }
11961
11962 /* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */
11963
11964 tree
11965 obj_type_ref_class (tree ref)
11966 {
11967 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF);
11968 ref = TREE_TYPE (ref);
11969 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11970 ref = TREE_TYPE (ref);
11971 /* We look for type THIS points to. ObjC also builds
11972 OBJ_TYPE_REF with non-method calls, Their first parameter
11973 ID however also corresponds to class type. */
11974 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE
11975 || TREE_CODE (ref) == FUNCTION_TYPE);
11976 ref = TREE_VALUE (TYPE_ARG_TYPES (ref));
11977 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE);
11978 return TREE_TYPE (ref);
11979 }
11980
11981 /* Return true if T is in anonymous namespace. */
11982
11983 bool
11984 type_in_anonymous_namespace_p (const_tree t)
11985 {
11986 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for
11987 bulitin types; those have CONTEXT NULL. */
11988 if (!TYPE_CONTEXT (t))
11989 return false;
11990 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t)));
11991 }
11992
11993 /* Try to find a base info of BINFO that would have its field decl at offset
11994 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be
11995 found, return, otherwise return NULL_TREE. */
11996
11997 tree
11998 get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type)
11999 {
12000 tree type = BINFO_TYPE (binfo);
12001
12002 while (true)
12003 {
12004 HOST_WIDE_INT pos, size;
12005 tree fld;
12006 int i;
12007
12008 if (types_same_for_odr (type, expected_type))
12009 return binfo;
12010 if (offset < 0)
12011 return NULL_TREE;
12012
12013 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
12014 {
12015 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld))
12016 continue;
12017
12018 pos = int_bit_position (fld);
12019 size = tree_to_uhwi (DECL_SIZE (fld));
12020 if (pos <= offset && (pos + size) > offset)
12021 break;
12022 }
12023 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
12024 return NULL_TREE;
12025
12026 /* Offset 0 indicates the primary base, whose vtable contents are
12027 represented in the binfo for the derived class. */
12028 else if (offset != 0)
12029 {
12030 tree base_binfo, binfo2 = binfo;
12031
12032 /* Find BINFO corresponding to FLD. This is bit harder
12033 by a fact that in virtual inheritance we may need to walk down
12034 the non-virtual inheritance chain. */
12035 while (true)
12036 {
12037 tree containing_binfo = NULL, found_binfo = NULL;
12038 for (i = 0; BINFO_BASE_ITERATE (binfo2, i, base_binfo); i++)
12039 if (types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld)))
12040 {
12041 found_binfo = base_binfo;
12042 break;
12043 }
12044 else
12045 if ((tree_to_shwi (BINFO_OFFSET (base_binfo))
12046 - tree_to_shwi (BINFO_OFFSET (binfo)))
12047 * BITS_PER_UNIT < pos
12048 /* Rule out types with no virtual methods or we can get confused
12049 here by zero sized bases. */
12050 && TYPE_BINFO (BINFO_TYPE (base_binfo))
12051 && BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (base_binfo)))
12052 && (!containing_binfo
12053 || (tree_to_shwi (BINFO_OFFSET (containing_binfo))
12054 < tree_to_shwi (BINFO_OFFSET (base_binfo)))))
12055 containing_binfo = base_binfo;
12056 if (found_binfo)
12057 {
12058 binfo = found_binfo;
12059 break;
12060 }
12061 if (!containing_binfo)
12062 return NULL_TREE;
12063 binfo2 = containing_binfo;
12064 }
12065 }
12066
12067 type = TREE_TYPE (fld);
12068 offset -= pos;
12069 }
12070 }
12071
12072 /* Returns true if X is a typedef decl. */
12073
12074 bool
12075 is_typedef_decl (tree x)
12076 {
12077 return (x && TREE_CODE (x) == TYPE_DECL
12078 && DECL_ORIGINAL_TYPE (x) != NULL_TREE);
12079 }
12080
12081 /* Returns true iff TYPE is a type variant created for a typedef. */
12082
12083 bool
12084 typedef_variant_p (tree type)
12085 {
12086 return is_typedef_decl (TYPE_NAME (type));
12087 }
12088
12089 /* Warn about a use of an identifier which was marked deprecated. */
12090 void
12091 warn_deprecated_use (tree node, tree attr)
12092 {
12093 const char *msg;
12094
12095 if (node == 0 || !warn_deprecated_decl)
12096 return;
12097
12098 if (!attr)
12099 {
12100 if (DECL_P (node))
12101 attr = DECL_ATTRIBUTES (node);
12102 else if (TYPE_P (node))
12103 {
12104 tree decl = TYPE_STUB_DECL (node);
12105 if (decl)
12106 attr = lookup_attribute ("deprecated",
12107 TYPE_ATTRIBUTES (TREE_TYPE (decl)));
12108 }
12109 }
12110
12111 if (attr)
12112 attr = lookup_attribute ("deprecated", attr);
12113
12114 if (attr)
12115 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr)));
12116 else
12117 msg = NULL;
12118
12119 bool w;
12120 if (DECL_P (node))
12121 {
12122 if (msg)
12123 w = warning (OPT_Wdeprecated_declarations,
12124 "%qD is deprecated: %s", node, msg);
12125 else
12126 w = warning (OPT_Wdeprecated_declarations,
12127 "%qD is deprecated", node);
12128 if (w)
12129 inform (DECL_SOURCE_LOCATION (node), "declared here");
12130 }
12131 else if (TYPE_P (node))
12132 {
12133 tree what = NULL_TREE;
12134 tree decl = TYPE_STUB_DECL (node);
12135
12136 if (TYPE_NAME (node))
12137 {
12138 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE)
12139 what = TYPE_NAME (node);
12140 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL
12141 && DECL_NAME (TYPE_NAME (node)))
12142 what = DECL_NAME (TYPE_NAME (node));
12143 }
12144
12145 if (decl)
12146 {
12147 if (what)
12148 {
12149 if (msg)
12150 w = warning (OPT_Wdeprecated_declarations,
12151 "%qE is deprecated: %s", what, msg);
12152 else
12153 w = warning (OPT_Wdeprecated_declarations,
12154 "%qE is deprecated", what);
12155 }
12156 else
12157 {
12158 if (msg)
12159 w = warning (OPT_Wdeprecated_declarations,
12160 "type is deprecated: %s", msg);
12161 else
12162 w = warning (OPT_Wdeprecated_declarations,
12163 "type is deprecated");
12164 }
12165 if (w)
12166 inform (DECL_SOURCE_LOCATION (decl), "declared here");
12167 }
12168 else
12169 {
12170 if (what)
12171 {
12172 if (msg)
12173 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s",
12174 what, msg);
12175 else
12176 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what);
12177 }
12178 else
12179 {
12180 if (msg)
12181 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s",
12182 msg);
12183 else
12184 warning (OPT_Wdeprecated_declarations, "type is deprecated");
12185 }
12186 }
12187 }
12188 }
12189
12190 /* Return true if REF has a COMPONENT_REF with a bit-field field declaration
12191 somewhere in it. */
12192
12193 bool
12194 contains_bitfld_component_ref_p (const_tree ref)
12195 {
12196 while (handled_component_p (ref))
12197 {
12198 if (TREE_CODE (ref) == COMPONENT_REF
12199 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1)))
12200 return true;
12201 ref = TREE_OPERAND (ref, 0);
12202 }
12203
12204 return false;
12205 }
12206
12207 /* Try to determine whether a TRY_CATCH expression can fall through.
12208 This is a subroutine of block_may_fallthru. */
12209
12210 static bool
12211 try_catch_may_fallthru (const_tree stmt)
12212 {
12213 tree_stmt_iterator i;
12214
12215 /* If the TRY block can fall through, the whole TRY_CATCH can
12216 fall through. */
12217 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
12218 return true;
12219
12220 i = tsi_start (TREE_OPERAND (stmt, 1));
12221 switch (TREE_CODE (tsi_stmt (i)))
12222 {
12223 case CATCH_EXPR:
12224 /* We expect to see a sequence of CATCH_EXPR trees, each with a
12225 catch expression and a body. The whole TRY_CATCH may fall
12226 through iff any of the catch bodies falls through. */
12227 for (; !tsi_end_p (i); tsi_next (&i))
12228 {
12229 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
12230 return true;
12231 }
12232 return false;
12233
12234 case EH_FILTER_EXPR:
12235 /* The exception filter expression only matters if there is an
12236 exception. If the exception does not match EH_FILTER_TYPES,
12237 we will execute EH_FILTER_FAILURE, and we will fall through
12238 if that falls through. If the exception does match
12239 EH_FILTER_TYPES, the stack unwinder will continue up the
12240 stack, so we will not fall through. We don't know whether we
12241 will throw an exception which matches EH_FILTER_TYPES or not,
12242 so we just ignore EH_FILTER_TYPES and assume that we might
12243 throw an exception which doesn't match. */
12244 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
12245
12246 default:
12247 /* This case represents statements to be executed when an
12248 exception occurs. Those statements are implicitly followed
12249 by a RESX statement to resume execution after the exception.
12250 So in this case the TRY_CATCH never falls through. */
12251 return false;
12252 }
12253 }
12254
12255 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
12256 need not be 100% accurate; simply be conservative and return true if we
12257 don't know. This is used only to avoid stupidly generating extra code.
12258 If we're wrong, we'll just delete the extra code later. */
12259
12260 bool
12261 block_may_fallthru (const_tree block)
12262 {
12263 /* This CONST_CAST is okay because expr_last returns its argument
12264 unmodified and we assign it to a const_tree. */
12265 const_tree stmt = expr_last (CONST_CAST_TREE (block));
12266
12267 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
12268 {
12269 case GOTO_EXPR:
12270 case RETURN_EXPR:
12271 /* Easy cases. If the last statement of the block implies
12272 control transfer, then we can't fall through. */
12273 return false;
12274
12275 case SWITCH_EXPR:
12276 /* If SWITCH_LABELS is set, this is lowered, and represents a
12277 branch to a selected label and hence can not fall through.
12278 Otherwise SWITCH_BODY is set, and the switch can fall
12279 through. */
12280 return SWITCH_LABELS (stmt) == NULL_TREE;
12281
12282 case COND_EXPR:
12283 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
12284 return true;
12285 return block_may_fallthru (COND_EXPR_ELSE (stmt));
12286
12287 case BIND_EXPR:
12288 return block_may_fallthru (BIND_EXPR_BODY (stmt));
12289
12290 case TRY_CATCH_EXPR:
12291 return try_catch_may_fallthru (stmt);
12292
12293 case TRY_FINALLY_EXPR:
12294 /* The finally clause is always executed after the try clause,
12295 so if it does not fall through, then the try-finally will not
12296 fall through. Otherwise, if the try clause does not fall
12297 through, then when the finally clause falls through it will
12298 resume execution wherever the try clause was going. So the
12299 whole try-finally will only fall through if both the try
12300 clause and the finally clause fall through. */
12301 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
12302 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
12303
12304 case MODIFY_EXPR:
12305 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
12306 stmt = TREE_OPERAND (stmt, 1);
12307 else
12308 return true;
12309 /* FALLTHRU */
12310
12311 case CALL_EXPR:
12312 /* Functions that do not return do not fall through. */
12313 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
12314
12315 case CLEANUP_POINT_EXPR:
12316 return block_may_fallthru (TREE_OPERAND (stmt, 0));
12317
12318 case TARGET_EXPR:
12319 return block_may_fallthru (TREE_OPERAND (stmt, 1));
12320
12321 case ERROR_MARK:
12322 return true;
12323
12324 default:
12325 return lang_hooks.block_may_fallthru (stmt);
12326 }
12327 }
12328
12329 /* True if we are using EH to handle cleanups. */
12330 static bool using_eh_for_cleanups_flag = false;
12331
12332 /* This routine is called from front ends to indicate eh should be used for
12333 cleanups. */
12334 void
12335 using_eh_for_cleanups (void)
12336 {
12337 using_eh_for_cleanups_flag = true;
12338 }
12339
12340 /* Query whether EH is used for cleanups. */
12341 bool
12342 using_eh_for_cleanups_p (void)
12343 {
12344 return using_eh_for_cleanups_flag;
12345 }
12346
12347 /* Wrapper for tree_code_name to ensure that tree code is valid */
12348 const char *
12349 get_tree_code_name (enum tree_code code)
12350 {
12351 const char *invalid = "<invalid tree code>";
12352
12353 if (code >= MAX_TREE_CODES)
12354 return invalid;
12355
12356 return tree_code_name[code];
12357 }
12358
12359 /* Drops the TREE_OVERFLOW flag from T. */
12360
12361 tree
12362 drop_tree_overflow (tree t)
12363 {
12364 gcc_checking_assert (TREE_OVERFLOW (t));
12365
12366 /* For tree codes with a sharing machinery re-build the result. */
12367 if (TREE_CODE (t) == INTEGER_CST)
12368 return wide_int_to_tree (TREE_TYPE (t), t);
12369
12370 /* Otherwise, as all tcc_constants are possibly shared, copy the node
12371 and drop the flag. */
12372 t = copy_node (t);
12373 TREE_OVERFLOW (t) = 0;
12374 return t;
12375 }
12376
12377 /* Given a memory reference expression T, return its base address.
12378 The base address of a memory reference expression is the main
12379 object being referenced. For instance, the base address for
12380 'array[i].fld[j]' is 'array'. You can think of this as stripping
12381 away the offset part from a memory address.
12382
12383 This function calls handled_component_p to strip away all the inner
12384 parts of the memory reference until it reaches the base object. */
12385
12386 tree
12387 get_base_address (tree t)
12388 {
12389 while (handled_component_p (t))
12390 t = TREE_OPERAND (t, 0);
12391
12392 if ((TREE_CODE (t) == MEM_REF
12393 || TREE_CODE (t) == TARGET_MEM_REF)
12394 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
12395 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
12396
12397 /* ??? Either the alias oracle or all callers need to properly deal
12398 with WITH_SIZE_EXPRs before we can look through those. */
12399 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12400 return NULL_TREE;
12401
12402 return t;
12403 }
12404
12405 /* Return the machine mode of T. For vectors, returns the mode of the
12406 inner type. The main use case is to feed the result to HONOR_NANS,
12407 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */
12408
12409 machine_mode
12410 element_mode (const_tree t)
12411 {
12412 if (!TYPE_P (t))
12413 t = TREE_TYPE (t);
12414 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
12415 t = TREE_TYPE (t);
12416 return TYPE_MODE (t);
12417 }
12418
12419 #include "gt-tree.h"