gimple.c (gtc_visited, [...]): Remove.
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats. */
84
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
95 };
96
97 #endif /* GATHER_STATISTICS */
98
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102
103 /* Private API manipulation functions shared only with some
104 other files. */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
111
112 /* Set the code for statement G to CODE. */
113
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
116 {
117 g->gsbase.code = code;
118 }
119
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
122
123 static inline size_t
124 gimple_size (enum gimple_code code)
125 {
126 return gsstruct_code_size[gss_for_code (code)];
127 }
128
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
131
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134 {
135 size_t size;
136 gimple stmt;
137
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
141
142 #ifdef GATHER_STATISTICS
143 {
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
147 }
148 #endif
149
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
153
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
157
158 return stmt;
159 }
160
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
162
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
165 {
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
170 }
171
172
173
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
177
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
184 {
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
187
188 return s;
189 }
190
191
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
193
194 gimple
195 gimple_build_return (tree retval)
196 {
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
201 }
202
203 /* Reset alias information on call S. */
204
205 void
206 gimple_call_reset_alias_info (gimple s)
207 {
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
216 }
217
218 /* Helper for gimple_build_call, gimple_build_call_vec and
219 gimple_build_call_from_tree. Build the basic components of a
220 GIMPLE_CALL statement to function FN with NARGS arguments. */
221
222 static inline gimple
223 gimple_build_call_1 (tree fn, unsigned nargs)
224 {
225 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
226 if (TREE_CODE (fn) == FUNCTION_DECL)
227 fn = build_fold_addr_expr (fn);
228 gimple_set_op (s, 1, fn);
229 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
230 gimple_call_reset_alias_info (s);
231 return s;
232 }
233
234
235 /* Build a GIMPLE_CALL statement to function FN with the arguments
236 specified in vector ARGS. */
237
238 gimple
239 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
240 {
241 unsigned i;
242 unsigned nargs = VEC_length (tree, args);
243 gimple call = gimple_build_call_1 (fn, nargs);
244
245 for (i = 0; i < nargs; i++)
246 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
247
248 return call;
249 }
250
251
252 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
253 arguments. The ... are the arguments. */
254
255 gimple
256 gimple_build_call (tree fn, unsigned nargs, ...)
257 {
258 va_list ap;
259 gimple call;
260 unsigned i;
261
262 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
263
264 call = gimple_build_call_1 (fn, nargs);
265
266 va_start (ap, nargs);
267 for (i = 0; i < nargs; i++)
268 gimple_call_set_arg (call, i, va_arg (ap, tree));
269 va_end (ap);
270
271 return call;
272 }
273
274
275 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
276 Build the basic components of a GIMPLE_CALL statement to internal
277 function FN with NARGS arguments. */
278
279 static inline gimple
280 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
281 {
282 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
283 s->gsbase.subcode |= GF_CALL_INTERNAL;
284 gimple_call_set_internal_fn (s, fn);
285 gimple_call_reset_alias_info (s);
286 return s;
287 }
288
289
290 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
291 the number of arguments. The ... are the arguments. */
292
293 gimple
294 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
295 {
296 va_list ap;
297 gimple call;
298 unsigned i;
299
300 call = gimple_build_call_internal_1 (fn, nargs);
301 va_start (ap, nargs);
302 for (i = 0; i < nargs; i++)
303 gimple_call_set_arg (call, i, va_arg (ap, tree));
304 va_end (ap);
305
306 return call;
307 }
308
309
310 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
311 specified in vector ARGS. */
312
313 gimple
314 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
315 {
316 unsigned i, nargs;
317 gimple call;
318
319 nargs = VEC_length (tree, args);
320 call = gimple_build_call_internal_1 (fn, nargs);
321 for (i = 0; i < nargs; i++)
322 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
323
324 return call;
325 }
326
327
328 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
329 assumed to be in GIMPLE form already. Minimal checking is done of
330 this fact. */
331
332 gimple
333 gimple_build_call_from_tree (tree t)
334 {
335 unsigned i, nargs;
336 gimple call;
337 tree fndecl = get_callee_fndecl (t);
338
339 gcc_assert (TREE_CODE (t) == CALL_EXPR);
340
341 nargs = call_expr_nargs (t);
342 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
343
344 for (i = 0; i < nargs; i++)
345 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
346
347 gimple_set_block (call, TREE_BLOCK (t));
348
349 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
350 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
351 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
352 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
353 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
354 if (fndecl
355 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
356 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA)
357 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
358 else
359 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
360 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
361 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
362 gimple_set_no_warning (call, TREE_NO_WARNING (t));
363
364 return call;
365 }
366
367
368 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
369 *OP1_P, *OP2_P and *OP3_P respectively. */
370
371 void
372 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
373 tree *op2_p, tree *op3_p)
374 {
375 enum gimple_rhs_class grhs_class;
376
377 *subcode_p = TREE_CODE (expr);
378 grhs_class = get_gimple_rhs_class (*subcode_p);
379
380 if (grhs_class == GIMPLE_TERNARY_RHS)
381 {
382 *op1_p = TREE_OPERAND (expr, 0);
383 *op2_p = TREE_OPERAND (expr, 1);
384 *op3_p = TREE_OPERAND (expr, 2);
385 }
386 else if (grhs_class == GIMPLE_BINARY_RHS)
387 {
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
390 *op3_p = NULL_TREE;
391 }
392 else if (grhs_class == GIMPLE_UNARY_RHS)
393 {
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = NULL_TREE;
396 *op3_p = NULL_TREE;
397 }
398 else if (grhs_class == GIMPLE_SINGLE_RHS)
399 {
400 *op1_p = expr;
401 *op2_p = NULL_TREE;
402 *op3_p = NULL_TREE;
403 }
404 else
405 gcc_unreachable ();
406 }
407
408
409 /* Build a GIMPLE_ASSIGN statement.
410
411 LHS of the assignment.
412 RHS of the assignment which can be unary or binary. */
413
414 gimple
415 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
416 {
417 enum tree_code subcode;
418 tree op1, op2, op3;
419
420 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
421 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
422 PASS_MEM_STAT);
423 }
424
425
426 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
427 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
428 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
429
430 gimple
431 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
432 tree op2, tree op3 MEM_STAT_DECL)
433 {
434 unsigned num_ops;
435 gimple p;
436
437 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
438 code). */
439 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
440
441 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
442 PASS_MEM_STAT);
443 gimple_assign_set_lhs (p, lhs);
444 gimple_assign_set_rhs1 (p, op1);
445 if (op2)
446 {
447 gcc_assert (num_ops > 2);
448 gimple_assign_set_rhs2 (p, op2);
449 }
450
451 if (op3)
452 {
453 gcc_assert (num_ops > 3);
454 gimple_assign_set_rhs3 (p, op3);
455 }
456
457 return p;
458 }
459
460
461 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
462
463 DST/SRC are the destination and source respectively. You can pass
464 ungimplified trees in DST or SRC, in which case they will be
465 converted to a gimple operand if necessary.
466
467 This function returns the newly created GIMPLE_ASSIGN tuple. */
468
469 gimple
470 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
471 {
472 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
473 gimplify_and_add (t, seq_p);
474 ggc_free (t);
475 return gimple_seq_last_stmt (*seq_p);
476 }
477
478
479 /* Build a GIMPLE_COND statement.
480
481 PRED is the condition used to compare LHS and the RHS.
482 T_LABEL is the label to jump to if the condition is true.
483 F_LABEL is the label to jump to otherwise. */
484
485 gimple
486 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
487 tree t_label, tree f_label)
488 {
489 gimple p;
490
491 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
492 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
493 gimple_cond_set_lhs (p, lhs);
494 gimple_cond_set_rhs (p, rhs);
495 gimple_cond_set_true_label (p, t_label);
496 gimple_cond_set_false_label (p, f_label);
497 return p;
498 }
499
500
501 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
502
503 void
504 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
505 tree *lhs_p, tree *rhs_p)
506 {
507 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
508 || TREE_CODE (cond) == TRUTH_NOT_EXPR
509 || is_gimple_min_invariant (cond)
510 || SSA_VAR_P (cond));
511
512 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
513
514 /* Canonicalize conditionals of the form 'if (!VAL)'. */
515 if (*code_p == TRUTH_NOT_EXPR)
516 {
517 *code_p = EQ_EXPR;
518 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
519 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
520 }
521 /* Canonicalize conditionals of the form 'if (VAL)' */
522 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
523 {
524 *code_p = NE_EXPR;
525 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
526 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
527 }
528 }
529
530
531 /* Build a GIMPLE_COND statement from the conditional expression tree
532 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
533
534 gimple
535 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
536 {
537 enum tree_code code;
538 tree lhs, rhs;
539
540 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
541 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
542 }
543
544 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
545 boolean expression tree COND. */
546
547 void
548 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
549 {
550 enum tree_code code;
551 tree lhs, rhs;
552
553 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
554 gimple_cond_set_condition (stmt, code, lhs, rhs);
555 }
556
557 /* Build a GIMPLE_LABEL statement for LABEL. */
558
559 gimple
560 gimple_build_label (tree label)
561 {
562 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
563 gimple_label_set_label (p, label);
564 return p;
565 }
566
567 /* Build a GIMPLE_GOTO statement to label DEST. */
568
569 gimple
570 gimple_build_goto (tree dest)
571 {
572 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
573 gimple_goto_set_dest (p, dest);
574 return p;
575 }
576
577
578 /* Build a GIMPLE_NOP statement. */
579
580 gimple
581 gimple_build_nop (void)
582 {
583 return gimple_alloc (GIMPLE_NOP, 0);
584 }
585
586
587 /* Build a GIMPLE_BIND statement.
588 VARS are the variables in BODY.
589 BLOCK is the containing block. */
590
591 gimple
592 gimple_build_bind (tree vars, gimple_seq body, tree block)
593 {
594 gimple p = gimple_alloc (GIMPLE_BIND, 0);
595 gimple_bind_set_vars (p, vars);
596 if (body)
597 gimple_bind_set_body (p, body);
598 if (block)
599 gimple_bind_set_block (p, block);
600 return p;
601 }
602
603 /* Helper function to set the simple fields of a asm stmt.
604
605 STRING is a pointer to a string that is the asm blocks assembly code.
606 NINPUT is the number of register inputs.
607 NOUTPUT is the number of register outputs.
608 NCLOBBERS is the number of clobbered registers.
609 */
610
611 static inline gimple
612 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
613 unsigned nclobbers, unsigned nlabels)
614 {
615 gimple p;
616 int size = strlen (string);
617
618 /* ASMs with labels cannot have outputs. This should have been
619 enforced by the front end. */
620 gcc_assert (nlabels == 0 || noutputs == 0);
621
622 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
623 ninputs + noutputs + nclobbers + nlabels);
624
625 p->gimple_asm.ni = ninputs;
626 p->gimple_asm.no = noutputs;
627 p->gimple_asm.nc = nclobbers;
628 p->gimple_asm.nl = nlabels;
629 p->gimple_asm.string = ggc_alloc_string (string, size);
630
631 #ifdef GATHER_STATISTICS
632 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
633 #endif
634
635 return p;
636 }
637
638 /* Build a GIMPLE_ASM statement.
639
640 STRING is the assembly code.
641 NINPUT is the number of register inputs.
642 NOUTPUT is the number of register outputs.
643 NCLOBBERS is the number of clobbered registers.
644 INPUTS is a vector of the input register parameters.
645 OUTPUTS is a vector of the output register parameters.
646 CLOBBERS is a vector of the clobbered register parameters.
647 LABELS is a vector of destination labels. */
648
649 gimple
650 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
651 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
652 VEC(tree,gc)* labels)
653 {
654 gimple p;
655 unsigned i;
656
657 p = gimple_build_asm_1 (string,
658 VEC_length (tree, inputs),
659 VEC_length (tree, outputs),
660 VEC_length (tree, clobbers),
661 VEC_length (tree, labels));
662
663 for (i = 0; i < VEC_length (tree, inputs); i++)
664 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
665
666 for (i = 0; i < VEC_length (tree, outputs); i++)
667 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
668
669 for (i = 0; i < VEC_length (tree, clobbers); i++)
670 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
671
672 for (i = 0; i < VEC_length (tree, labels); i++)
673 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
674
675 return p;
676 }
677
678 /* Build a GIMPLE_CATCH statement.
679
680 TYPES are the catch types.
681 HANDLER is the exception handler. */
682
683 gimple
684 gimple_build_catch (tree types, gimple_seq handler)
685 {
686 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
687 gimple_catch_set_types (p, types);
688 if (handler)
689 gimple_catch_set_handler (p, handler);
690
691 return p;
692 }
693
694 /* Build a GIMPLE_EH_FILTER statement.
695
696 TYPES are the filter's types.
697 FAILURE is the filter's failure action. */
698
699 gimple
700 gimple_build_eh_filter (tree types, gimple_seq failure)
701 {
702 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
703 gimple_eh_filter_set_types (p, types);
704 if (failure)
705 gimple_eh_filter_set_failure (p, failure);
706
707 return p;
708 }
709
710 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
711
712 gimple
713 gimple_build_eh_must_not_throw (tree decl)
714 {
715 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
716
717 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
718 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
719 gimple_eh_must_not_throw_set_fndecl (p, decl);
720
721 return p;
722 }
723
724 /* Build a GIMPLE_TRY statement.
725
726 EVAL is the expression to evaluate.
727 CLEANUP is the cleanup expression.
728 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
729 whether this is a try/catch or a try/finally respectively. */
730
731 gimple
732 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
733 enum gimple_try_flags kind)
734 {
735 gimple p;
736
737 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
738 p = gimple_alloc (GIMPLE_TRY, 0);
739 gimple_set_subcode (p, kind);
740 if (eval)
741 gimple_try_set_eval (p, eval);
742 if (cleanup)
743 gimple_try_set_cleanup (p, cleanup);
744
745 return p;
746 }
747
748 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
749
750 CLEANUP is the cleanup expression. */
751
752 gimple
753 gimple_build_wce (gimple_seq cleanup)
754 {
755 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
756 if (cleanup)
757 gimple_wce_set_cleanup (p, cleanup);
758
759 return p;
760 }
761
762
763 /* Build a GIMPLE_RESX statement. */
764
765 gimple
766 gimple_build_resx (int region)
767 {
768 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
769 p->gimple_eh_ctrl.region = region;
770 return p;
771 }
772
773
774 /* The helper for constructing a gimple switch statement.
775 INDEX is the switch's index.
776 NLABELS is the number of labels in the switch excluding the default.
777 DEFAULT_LABEL is the default label for the switch statement. */
778
779 gimple
780 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
781 {
782 /* nlabels + 1 default label + 1 index. */
783 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
784 1 + (default_label != NULL) + nlabels);
785 gimple_switch_set_index (p, index);
786 if (default_label)
787 gimple_switch_set_default_label (p, default_label);
788 return p;
789 }
790
791
792 /* Build a GIMPLE_SWITCH statement.
793
794 INDEX is the switch's index.
795 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
796 ... are the labels excluding the default. */
797
798 gimple
799 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
800 {
801 va_list al;
802 unsigned i, offset;
803 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
804
805 /* Store the rest of the labels. */
806 va_start (al, default_label);
807 offset = (default_label != NULL);
808 for (i = 0; i < nlabels; i++)
809 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
810 va_end (al);
811
812 return p;
813 }
814
815
816 /* Build a GIMPLE_SWITCH statement.
817
818 INDEX is the switch's index.
819 DEFAULT_LABEL is the default label
820 ARGS is a vector of labels excluding the default. */
821
822 gimple
823 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
824 {
825 unsigned i, offset, nlabels = VEC_length (tree, args);
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
827
828 /* Copy the labels from the vector to the switch statement. */
829 offset = (default_label != NULL);
830 for (i = 0; i < nlabels; i++)
831 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
832
833 return p;
834 }
835
836 /* Build a GIMPLE_EH_DISPATCH statement. */
837
838 gimple
839 gimple_build_eh_dispatch (int region)
840 {
841 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
842 p->gimple_eh_ctrl.region = region;
843 return p;
844 }
845
846 /* Build a new GIMPLE_DEBUG_BIND statement.
847
848 VAR is bound to VALUE; block and location are taken from STMT. */
849
850 gimple
851 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
852 {
853 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
854 (unsigned)GIMPLE_DEBUG_BIND, 2
855 PASS_MEM_STAT);
856
857 gimple_debug_bind_set_var (p, var);
858 gimple_debug_bind_set_value (p, value);
859 if (stmt)
860 {
861 gimple_set_block (p, gimple_block (stmt));
862 gimple_set_location (p, gimple_location (stmt));
863 }
864
865 return p;
866 }
867
868
869 /* Build a GIMPLE_OMP_CRITICAL statement.
870
871 BODY is the sequence of statements for which only one thread can execute.
872 NAME is optional identifier for this critical block. */
873
874 gimple
875 gimple_build_omp_critical (gimple_seq body, tree name)
876 {
877 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
878 gimple_omp_critical_set_name (p, name);
879 if (body)
880 gimple_omp_set_body (p, body);
881
882 return p;
883 }
884
885 /* Build a GIMPLE_OMP_FOR statement.
886
887 BODY is sequence of statements inside the for loop.
888 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
889 lastprivate, reductions, ordered, schedule, and nowait.
890 COLLAPSE is the collapse count.
891 PRE_BODY is the sequence of statements that are loop invariant. */
892
893 gimple
894 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
895 gimple_seq pre_body)
896 {
897 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
898 if (body)
899 gimple_omp_set_body (p, body);
900 gimple_omp_for_set_clauses (p, clauses);
901 p->gimple_omp_for.collapse = collapse;
902 p->gimple_omp_for.iter
903 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
904 if (pre_body)
905 gimple_omp_for_set_pre_body (p, pre_body);
906
907 return p;
908 }
909
910
911 /* Build a GIMPLE_OMP_PARALLEL statement.
912
913 BODY is sequence of statements which are executed in parallel.
914 CLAUSES, are the OMP parallel construct's clauses.
915 CHILD_FN is the function created for the parallel threads to execute.
916 DATA_ARG are the shared data argument(s). */
917
918 gimple
919 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
920 tree data_arg)
921 {
922 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
923 if (body)
924 gimple_omp_set_body (p, body);
925 gimple_omp_parallel_set_clauses (p, clauses);
926 gimple_omp_parallel_set_child_fn (p, child_fn);
927 gimple_omp_parallel_set_data_arg (p, data_arg);
928
929 return p;
930 }
931
932
933 /* Build a GIMPLE_OMP_TASK statement.
934
935 BODY is sequence of statements which are executed by the explicit task.
936 CLAUSES, are the OMP parallel construct's clauses.
937 CHILD_FN is the function created for the parallel threads to execute.
938 DATA_ARG are the shared data argument(s).
939 COPY_FN is the optional function for firstprivate initialization.
940 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
941
942 gimple
943 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
944 tree data_arg, tree copy_fn, tree arg_size,
945 tree arg_align)
946 {
947 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
948 if (body)
949 gimple_omp_set_body (p, body);
950 gimple_omp_task_set_clauses (p, clauses);
951 gimple_omp_task_set_child_fn (p, child_fn);
952 gimple_omp_task_set_data_arg (p, data_arg);
953 gimple_omp_task_set_copy_fn (p, copy_fn);
954 gimple_omp_task_set_arg_size (p, arg_size);
955 gimple_omp_task_set_arg_align (p, arg_align);
956
957 return p;
958 }
959
960
961 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
962
963 BODY is the sequence of statements in the section. */
964
965 gimple
966 gimple_build_omp_section (gimple_seq body)
967 {
968 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
969 if (body)
970 gimple_omp_set_body (p, body);
971
972 return p;
973 }
974
975
976 /* Build a GIMPLE_OMP_MASTER statement.
977
978 BODY is the sequence of statements to be executed by just the master. */
979
980 gimple
981 gimple_build_omp_master (gimple_seq body)
982 {
983 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
984 if (body)
985 gimple_omp_set_body (p, body);
986
987 return p;
988 }
989
990
991 /* Build a GIMPLE_OMP_CONTINUE statement.
992
993 CONTROL_DEF is the definition of the control variable.
994 CONTROL_USE is the use of the control variable. */
995
996 gimple
997 gimple_build_omp_continue (tree control_def, tree control_use)
998 {
999 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1000 gimple_omp_continue_set_control_def (p, control_def);
1001 gimple_omp_continue_set_control_use (p, control_use);
1002 return p;
1003 }
1004
1005 /* Build a GIMPLE_OMP_ORDERED statement.
1006
1007 BODY is the sequence of statements inside a loop that will executed in
1008 sequence. */
1009
1010 gimple
1011 gimple_build_omp_ordered (gimple_seq body)
1012 {
1013 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1014 if (body)
1015 gimple_omp_set_body (p, body);
1016
1017 return p;
1018 }
1019
1020
1021 /* Build a GIMPLE_OMP_RETURN statement.
1022 WAIT_P is true if this is a non-waiting return. */
1023
1024 gimple
1025 gimple_build_omp_return (bool wait_p)
1026 {
1027 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1028 if (wait_p)
1029 gimple_omp_return_set_nowait (p);
1030
1031 return p;
1032 }
1033
1034
1035 /* Build a GIMPLE_OMP_SECTIONS statement.
1036
1037 BODY is a sequence of section statements.
1038 CLAUSES are any of the OMP sections contsruct's clauses: private,
1039 firstprivate, lastprivate, reduction, and nowait. */
1040
1041 gimple
1042 gimple_build_omp_sections (gimple_seq body, tree clauses)
1043 {
1044 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1045 if (body)
1046 gimple_omp_set_body (p, body);
1047 gimple_omp_sections_set_clauses (p, clauses);
1048
1049 return p;
1050 }
1051
1052
1053 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1054
1055 gimple
1056 gimple_build_omp_sections_switch (void)
1057 {
1058 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1059 }
1060
1061
1062 /* Build a GIMPLE_OMP_SINGLE statement.
1063
1064 BODY is the sequence of statements that will be executed once.
1065 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1066 copyprivate, nowait. */
1067
1068 gimple
1069 gimple_build_omp_single (gimple_seq body, tree clauses)
1070 {
1071 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1072 if (body)
1073 gimple_omp_set_body (p, body);
1074 gimple_omp_single_set_clauses (p, clauses);
1075
1076 return p;
1077 }
1078
1079
1080 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1081
1082 gimple
1083 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1084 {
1085 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1086 gimple_omp_atomic_load_set_lhs (p, lhs);
1087 gimple_omp_atomic_load_set_rhs (p, rhs);
1088 return p;
1089 }
1090
1091 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1092
1093 VAL is the value we are storing. */
1094
1095 gimple
1096 gimple_build_omp_atomic_store (tree val)
1097 {
1098 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1099 gimple_omp_atomic_store_set_val (p, val);
1100 return p;
1101 }
1102
1103 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1104 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1105
1106 gimple
1107 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1108 {
1109 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1110 /* Ensure all the predictors fit into the lower bits of the subcode. */
1111 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1112 gimple_predict_set_predictor (p, predictor);
1113 gimple_predict_set_outcome (p, outcome);
1114 return p;
1115 }
1116
1117 #if defined ENABLE_GIMPLE_CHECKING
1118 /* Complain of a gimple type mismatch and die. */
1119
1120 void
1121 gimple_check_failed (const_gimple gs, const char *file, int line,
1122 const char *function, enum gimple_code code,
1123 enum tree_code subcode)
1124 {
1125 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1126 gimple_code_name[code],
1127 tree_code_name[subcode],
1128 gimple_code_name[gimple_code (gs)],
1129 gs->gsbase.subcode > 0
1130 ? tree_code_name[gs->gsbase.subcode]
1131 : "",
1132 function, trim_filename (file), line);
1133 }
1134 #endif /* ENABLE_GIMPLE_CHECKING */
1135
1136
1137 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1138 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1139 instead. */
1140
1141 gimple_seq
1142 gimple_seq_alloc (void)
1143 {
1144 gimple_seq seq = gimple_seq_cache;
1145 if (seq)
1146 {
1147 gimple_seq_cache = gimple_seq_cache->next_free;
1148 gcc_assert (gimple_seq_cache != seq);
1149 memset (seq, 0, sizeof (*seq));
1150 }
1151 else
1152 {
1153 seq = ggc_alloc_cleared_gimple_seq_d ();
1154 #ifdef GATHER_STATISTICS
1155 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1156 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1157 #endif
1158 }
1159
1160 return seq;
1161 }
1162
1163 /* Return SEQ to the free pool of GIMPLE sequences. */
1164
1165 void
1166 gimple_seq_free (gimple_seq seq)
1167 {
1168 if (seq == NULL)
1169 return;
1170
1171 gcc_assert (gimple_seq_first (seq) == NULL);
1172 gcc_assert (gimple_seq_last (seq) == NULL);
1173
1174 /* If this triggers, it's a sign that the same list is being freed
1175 twice. */
1176 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1177
1178 /* Add SEQ to the pool of free sequences. */
1179 seq->next_free = gimple_seq_cache;
1180 gimple_seq_cache = seq;
1181 }
1182
1183
1184 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1185 *SEQ_P is NULL, a new sequence is allocated. */
1186
1187 void
1188 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1189 {
1190 gimple_stmt_iterator si;
1191
1192 if (gs == NULL)
1193 return;
1194
1195 if (*seq_p == NULL)
1196 *seq_p = gimple_seq_alloc ();
1197
1198 si = gsi_last (*seq_p);
1199 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1200 }
1201
1202
1203 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1204 NULL, a new sequence is allocated. */
1205
1206 void
1207 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1208 {
1209 gimple_stmt_iterator si;
1210
1211 if (src == NULL)
1212 return;
1213
1214 if (*dst_p == NULL)
1215 *dst_p = gimple_seq_alloc ();
1216
1217 si = gsi_last (*dst_p);
1218 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1219 }
1220
1221
1222 /* Helper function of empty_body_p. Return true if STMT is an empty
1223 statement. */
1224
1225 static bool
1226 empty_stmt_p (gimple stmt)
1227 {
1228 if (gimple_code (stmt) == GIMPLE_NOP)
1229 return true;
1230 if (gimple_code (stmt) == GIMPLE_BIND)
1231 return empty_body_p (gimple_bind_body (stmt));
1232 return false;
1233 }
1234
1235
1236 /* Return true if BODY contains nothing but empty statements. */
1237
1238 bool
1239 empty_body_p (gimple_seq body)
1240 {
1241 gimple_stmt_iterator i;
1242
1243 if (gimple_seq_empty_p (body))
1244 return true;
1245 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1246 if (!empty_stmt_p (gsi_stmt (i))
1247 && !is_gimple_debug (gsi_stmt (i)))
1248 return false;
1249
1250 return true;
1251 }
1252
1253
1254 /* Perform a deep copy of sequence SRC and return the result. */
1255
1256 gimple_seq
1257 gimple_seq_copy (gimple_seq src)
1258 {
1259 gimple_stmt_iterator gsi;
1260 gimple_seq new_seq = gimple_seq_alloc ();
1261 gimple stmt;
1262
1263 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1264 {
1265 stmt = gimple_copy (gsi_stmt (gsi));
1266 gimple_seq_add_stmt (&new_seq, stmt);
1267 }
1268
1269 return new_seq;
1270 }
1271
1272
1273 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1274 on each one. WI is as in walk_gimple_stmt.
1275
1276 If walk_gimple_stmt returns non-NULL, the walk is stopped, the
1277 value is stored in WI->CALLBACK_RESULT and the statement that
1278 produced the value is returned.
1279
1280 Otherwise, all the statements are walked and NULL returned. */
1281
1282 gimple
1283 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1284 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1285 {
1286 gimple_stmt_iterator gsi;
1287
1288 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
1289 {
1290 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1291 if (ret)
1292 {
1293 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1294 to hold it. */
1295 gcc_assert (wi);
1296 wi->callback_result = ret;
1297 return gsi_stmt (gsi);
1298 }
1299 }
1300
1301 if (wi)
1302 wi->callback_result = NULL_TREE;
1303
1304 return NULL;
1305 }
1306
1307
1308 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1309
1310 static tree
1311 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1312 struct walk_stmt_info *wi)
1313 {
1314 tree ret, op;
1315 unsigned noutputs;
1316 const char **oconstraints;
1317 unsigned i, n;
1318 const char *constraint;
1319 bool allows_mem, allows_reg, is_inout;
1320
1321 noutputs = gimple_asm_noutputs (stmt);
1322 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1323
1324 if (wi)
1325 wi->is_lhs = true;
1326
1327 for (i = 0; i < noutputs; i++)
1328 {
1329 op = gimple_asm_output_op (stmt, i);
1330 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1331 oconstraints[i] = constraint;
1332 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1333 &is_inout);
1334 if (wi)
1335 wi->val_only = (allows_reg || !allows_mem);
1336 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1337 if (ret)
1338 return ret;
1339 }
1340
1341 n = gimple_asm_ninputs (stmt);
1342 for (i = 0; i < n; i++)
1343 {
1344 op = gimple_asm_input_op (stmt, i);
1345 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1346 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1347 oconstraints, &allows_mem, &allows_reg);
1348 if (wi)
1349 {
1350 wi->val_only = (allows_reg || !allows_mem);
1351 /* Although input "m" is not really a LHS, we need a lvalue. */
1352 wi->is_lhs = !wi->val_only;
1353 }
1354 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1355 if (ret)
1356 return ret;
1357 }
1358
1359 if (wi)
1360 {
1361 wi->is_lhs = false;
1362 wi->val_only = true;
1363 }
1364
1365 n = gimple_asm_nlabels (stmt);
1366 for (i = 0; i < n; i++)
1367 {
1368 op = gimple_asm_label_op (stmt, i);
1369 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1370 if (ret)
1371 return ret;
1372 }
1373
1374 return NULL_TREE;
1375 }
1376
1377
1378 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1379 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1380
1381 CALLBACK_OP is called on each operand of STMT via walk_tree.
1382 Additional parameters to walk_tree must be stored in WI. For each operand
1383 OP, walk_tree is called as:
1384
1385 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1386
1387 If CALLBACK_OP returns non-NULL for an operand, the remaining
1388 operands are not scanned.
1389
1390 The return value is that returned by the last call to walk_tree, or
1391 NULL_TREE if no CALLBACK_OP is specified. */
1392
1393 tree
1394 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1395 struct walk_stmt_info *wi)
1396 {
1397 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1398 unsigned i;
1399 tree ret = NULL_TREE;
1400
1401 switch (gimple_code (stmt))
1402 {
1403 case GIMPLE_ASSIGN:
1404 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1405 is a register variable, we may use a COMPONENT_REF on the RHS. */
1406 if (wi)
1407 {
1408 tree lhs = gimple_assign_lhs (stmt);
1409 wi->val_only
1410 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1411 || !gimple_assign_single_p (stmt);
1412 }
1413
1414 for (i = 1; i < gimple_num_ops (stmt); i++)
1415 {
1416 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1417 pset);
1418 if (ret)
1419 return ret;
1420 }
1421
1422 /* Walk the LHS. If the RHS is appropriate for a memory, we
1423 may use a COMPONENT_REF on the LHS. */
1424 if (wi)
1425 {
1426 /* If the RHS has more than 1 operand, it is not appropriate
1427 for the memory. */
1428 wi->val_only = !is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1429 || !gimple_assign_single_p (stmt);
1430 wi->is_lhs = true;
1431 }
1432
1433 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1434 if (ret)
1435 return ret;
1436
1437 if (wi)
1438 {
1439 wi->val_only = true;
1440 wi->is_lhs = false;
1441 }
1442 break;
1443
1444 case GIMPLE_CALL:
1445 if (wi)
1446 {
1447 wi->is_lhs = false;
1448 wi->val_only = true;
1449 }
1450
1451 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1452 if (ret)
1453 return ret;
1454
1455 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1456 if (ret)
1457 return ret;
1458
1459 for (i = 0; i < gimple_call_num_args (stmt); i++)
1460 {
1461 if (wi)
1462 wi->val_only
1463 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1464 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1465 pset);
1466 if (ret)
1467 return ret;
1468 }
1469
1470 if (gimple_call_lhs (stmt))
1471 {
1472 if (wi)
1473 {
1474 wi->is_lhs = true;
1475 wi->val_only
1476 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1477 }
1478
1479 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1480 if (ret)
1481 return ret;
1482 }
1483
1484 if (wi)
1485 {
1486 wi->is_lhs = false;
1487 wi->val_only = true;
1488 }
1489 break;
1490
1491 case GIMPLE_CATCH:
1492 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1493 pset);
1494 if (ret)
1495 return ret;
1496 break;
1497
1498 case GIMPLE_EH_FILTER:
1499 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1500 pset);
1501 if (ret)
1502 return ret;
1503 break;
1504
1505 case GIMPLE_ASM:
1506 ret = walk_gimple_asm (stmt, callback_op, wi);
1507 if (ret)
1508 return ret;
1509 break;
1510
1511 case GIMPLE_OMP_CONTINUE:
1512 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1513 callback_op, wi, pset);
1514 if (ret)
1515 return ret;
1516
1517 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1518 callback_op, wi, pset);
1519 if (ret)
1520 return ret;
1521 break;
1522
1523 case GIMPLE_OMP_CRITICAL:
1524 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1525 pset);
1526 if (ret)
1527 return ret;
1528 break;
1529
1530 case GIMPLE_OMP_FOR:
1531 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1532 pset);
1533 if (ret)
1534 return ret;
1535 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1536 {
1537 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1538 wi, pset);
1539 if (ret)
1540 return ret;
1541 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1542 wi, pset);
1543 if (ret)
1544 return ret;
1545 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1546 wi, pset);
1547 if (ret)
1548 return ret;
1549 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1550 wi, pset);
1551 }
1552 if (ret)
1553 return ret;
1554 break;
1555
1556 case GIMPLE_OMP_PARALLEL:
1557 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1558 wi, pset);
1559 if (ret)
1560 return ret;
1561 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1562 wi, pset);
1563 if (ret)
1564 return ret;
1565 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1566 wi, pset);
1567 if (ret)
1568 return ret;
1569 break;
1570
1571 case GIMPLE_OMP_TASK:
1572 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1573 wi, pset);
1574 if (ret)
1575 return ret;
1576 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1577 wi, pset);
1578 if (ret)
1579 return ret;
1580 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1581 wi, pset);
1582 if (ret)
1583 return ret;
1584 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1585 wi, pset);
1586 if (ret)
1587 return ret;
1588 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1589 wi, pset);
1590 if (ret)
1591 return ret;
1592 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1593 wi, pset);
1594 if (ret)
1595 return ret;
1596 break;
1597
1598 case GIMPLE_OMP_SECTIONS:
1599 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1600 wi, pset);
1601 if (ret)
1602 return ret;
1603
1604 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1605 wi, pset);
1606 if (ret)
1607 return ret;
1608
1609 break;
1610
1611 case GIMPLE_OMP_SINGLE:
1612 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1613 pset);
1614 if (ret)
1615 return ret;
1616 break;
1617
1618 case GIMPLE_OMP_ATOMIC_LOAD:
1619 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1620 pset);
1621 if (ret)
1622 return ret;
1623
1624 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1625 pset);
1626 if (ret)
1627 return ret;
1628 break;
1629
1630 case GIMPLE_OMP_ATOMIC_STORE:
1631 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1632 wi, pset);
1633 if (ret)
1634 return ret;
1635 break;
1636
1637 /* Tuples that do not have operands. */
1638 case GIMPLE_NOP:
1639 case GIMPLE_RESX:
1640 case GIMPLE_OMP_RETURN:
1641 case GIMPLE_PREDICT:
1642 break;
1643
1644 default:
1645 {
1646 enum gimple_statement_structure_enum gss;
1647 gss = gimple_statement_structure (stmt);
1648 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1649 for (i = 0; i < gimple_num_ops (stmt); i++)
1650 {
1651 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1652 if (ret)
1653 return ret;
1654 }
1655 }
1656 break;
1657 }
1658
1659 return NULL_TREE;
1660 }
1661
1662
1663 /* Walk the current statement in GSI (optionally using traversal state
1664 stored in WI). If WI is NULL, no state is kept during traversal.
1665 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1666 that it has handled all the operands of the statement, its return
1667 value is returned. Otherwise, the return value from CALLBACK_STMT
1668 is discarded and its operands are scanned.
1669
1670 If CALLBACK_STMT is NULL or it didn't handle the operands,
1671 CALLBACK_OP is called on each operand of the statement via
1672 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1673 operand, the remaining operands are not scanned. In this case, the
1674 return value from CALLBACK_OP is returned.
1675
1676 In any other case, NULL_TREE is returned. */
1677
1678 tree
1679 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1680 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1681 {
1682 gimple ret;
1683 tree tree_ret;
1684 gimple stmt = gsi_stmt (*gsi);
1685
1686 if (wi)
1687 wi->gsi = *gsi;
1688
1689 if (wi && wi->want_locations && gimple_has_location (stmt))
1690 input_location = gimple_location (stmt);
1691
1692 ret = NULL;
1693
1694 /* Invoke the statement callback. Return if the callback handled
1695 all of STMT operands by itself. */
1696 if (callback_stmt)
1697 {
1698 bool handled_ops = false;
1699 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1700 if (handled_ops)
1701 return tree_ret;
1702
1703 /* If CALLBACK_STMT did not handle operands, it should not have
1704 a value to return. */
1705 gcc_assert (tree_ret == NULL);
1706
1707 /* Re-read stmt in case the callback changed it. */
1708 stmt = gsi_stmt (*gsi);
1709 }
1710
1711 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1712 if (callback_op)
1713 {
1714 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1715 if (tree_ret)
1716 return tree_ret;
1717 }
1718
1719 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1720 switch (gimple_code (stmt))
1721 {
1722 case GIMPLE_BIND:
1723 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1724 callback_op, wi);
1725 if (ret)
1726 return wi->callback_result;
1727 break;
1728
1729 case GIMPLE_CATCH:
1730 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1731 callback_op, wi);
1732 if (ret)
1733 return wi->callback_result;
1734 break;
1735
1736 case GIMPLE_EH_FILTER:
1737 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1738 callback_op, wi);
1739 if (ret)
1740 return wi->callback_result;
1741 break;
1742
1743 case GIMPLE_TRY:
1744 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1745 wi);
1746 if (ret)
1747 return wi->callback_result;
1748
1749 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1750 callback_op, wi);
1751 if (ret)
1752 return wi->callback_result;
1753 break;
1754
1755 case GIMPLE_OMP_FOR:
1756 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1757 callback_op, wi);
1758 if (ret)
1759 return wi->callback_result;
1760
1761 /* FALL THROUGH. */
1762 case GIMPLE_OMP_CRITICAL:
1763 case GIMPLE_OMP_MASTER:
1764 case GIMPLE_OMP_ORDERED:
1765 case GIMPLE_OMP_SECTION:
1766 case GIMPLE_OMP_PARALLEL:
1767 case GIMPLE_OMP_TASK:
1768 case GIMPLE_OMP_SECTIONS:
1769 case GIMPLE_OMP_SINGLE:
1770 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt, callback_op,
1771 wi);
1772 if (ret)
1773 return wi->callback_result;
1774 break;
1775
1776 case GIMPLE_WITH_CLEANUP_EXPR:
1777 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1778 callback_op, wi);
1779 if (ret)
1780 return wi->callback_result;
1781 break;
1782
1783 default:
1784 gcc_assert (!gimple_has_substatements (stmt));
1785 break;
1786 }
1787
1788 return NULL;
1789 }
1790
1791
1792 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1793
1794 void
1795 gimple_set_body (tree fndecl, gimple_seq seq)
1796 {
1797 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1798 if (fn == NULL)
1799 {
1800 /* If FNDECL still does not have a function structure associated
1801 with it, then it does not make sense for it to receive a
1802 GIMPLE body. */
1803 gcc_assert (seq == NULL);
1804 }
1805 else
1806 fn->gimple_body = seq;
1807 }
1808
1809
1810 /* Return the body of GIMPLE statements for function FN. After the
1811 CFG pass, the function body doesn't exist anymore because it has
1812 been split up into basic blocks. In this case, it returns
1813 NULL. */
1814
1815 gimple_seq
1816 gimple_body (tree fndecl)
1817 {
1818 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1819 return fn ? fn->gimple_body : NULL;
1820 }
1821
1822 /* Return true when FNDECL has Gimple body either in unlowered
1823 or CFG form. */
1824 bool
1825 gimple_has_body_p (tree fndecl)
1826 {
1827 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1828 return (gimple_body (fndecl) || (fn && fn->cfg));
1829 }
1830
1831 /* Return true if calls C1 and C2 are known to go to the same function. */
1832
1833 bool
1834 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1835 {
1836 if (gimple_call_internal_p (c1))
1837 return (gimple_call_internal_p (c2)
1838 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1839 else
1840 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1841 || (gimple_call_fndecl (c1)
1842 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1843 }
1844
1845 /* Detect flags from a GIMPLE_CALL. This is just like
1846 call_expr_flags, but for gimple tuples. */
1847
1848 int
1849 gimple_call_flags (const_gimple stmt)
1850 {
1851 int flags;
1852 tree decl = gimple_call_fndecl (stmt);
1853
1854 if (decl)
1855 flags = flags_from_decl_or_type (decl);
1856 else if (gimple_call_internal_p (stmt))
1857 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1858 else
1859 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1860
1861 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1862 flags |= ECF_NOTHROW;
1863
1864 return flags;
1865 }
1866
1867 /* Return the "fn spec" string for call STMT. */
1868
1869 static tree
1870 gimple_call_fnspec (const_gimple stmt)
1871 {
1872 tree type, attr;
1873
1874 type = gimple_call_fntype (stmt);
1875 if (!type)
1876 return NULL_TREE;
1877
1878 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1879 if (!attr)
1880 return NULL_TREE;
1881
1882 return TREE_VALUE (TREE_VALUE (attr));
1883 }
1884
1885 /* Detects argument flags for argument number ARG on call STMT. */
1886
1887 int
1888 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1889 {
1890 tree attr = gimple_call_fnspec (stmt);
1891
1892 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1893 return 0;
1894
1895 switch (TREE_STRING_POINTER (attr)[1 + arg])
1896 {
1897 case 'x':
1898 case 'X':
1899 return EAF_UNUSED;
1900
1901 case 'R':
1902 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1903
1904 case 'r':
1905 return EAF_NOCLOBBER | EAF_NOESCAPE;
1906
1907 case 'W':
1908 return EAF_DIRECT | EAF_NOESCAPE;
1909
1910 case 'w':
1911 return EAF_NOESCAPE;
1912
1913 case '.':
1914 default:
1915 return 0;
1916 }
1917 }
1918
1919 /* Detects return flags for the call STMT. */
1920
1921 int
1922 gimple_call_return_flags (const_gimple stmt)
1923 {
1924 tree attr;
1925
1926 if (gimple_call_flags (stmt) & ECF_MALLOC)
1927 return ERF_NOALIAS;
1928
1929 attr = gimple_call_fnspec (stmt);
1930 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1931 return 0;
1932
1933 switch (TREE_STRING_POINTER (attr)[0])
1934 {
1935 case '1':
1936 case '2':
1937 case '3':
1938 case '4':
1939 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1940
1941 case 'm':
1942 return ERF_NOALIAS;
1943
1944 case '.':
1945 default:
1946 return 0;
1947 }
1948 }
1949
1950
1951 /* Return true if GS is a copy assignment. */
1952
1953 bool
1954 gimple_assign_copy_p (gimple gs)
1955 {
1956 return (gimple_assign_single_p (gs)
1957 && is_gimple_val (gimple_op (gs, 1)));
1958 }
1959
1960
1961 /* Return true if GS is a SSA_NAME copy assignment. */
1962
1963 bool
1964 gimple_assign_ssa_name_copy_p (gimple gs)
1965 {
1966 return (gimple_assign_single_p (gs)
1967 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1968 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1969 }
1970
1971
1972 /* Return true if GS is an assignment with a unary RHS, but the
1973 operator has no effect on the assigned value. The logic is adapted
1974 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1975 instances in which STRIP_NOPS was previously applied to the RHS of
1976 an assignment.
1977
1978 NOTE: In the use cases that led to the creation of this function
1979 and of gimple_assign_single_p, it is typical to test for either
1980 condition and to proceed in the same manner. In each case, the
1981 assigned value is represented by the single RHS operand of the
1982 assignment. I suspect there may be cases where gimple_assign_copy_p,
1983 gimple_assign_single_p, or equivalent logic is used where a similar
1984 treatment of unary NOPs is appropriate. */
1985
1986 bool
1987 gimple_assign_unary_nop_p (gimple gs)
1988 {
1989 return (is_gimple_assign (gs)
1990 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
1991 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
1992 && gimple_assign_rhs1 (gs) != error_mark_node
1993 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
1994 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
1995 }
1996
1997 /* Set BB to be the basic block holding G. */
1998
1999 void
2000 gimple_set_bb (gimple stmt, basic_block bb)
2001 {
2002 stmt->gsbase.bb = bb;
2003
2004 /* If the statement is a label, add the label to block-to-labels map
2005 so that we can speed up edge creation for GIMPLE_GOTOs. */
2006 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2007 {
2008 tree t;
2009 int uid;
2010
2011 t = gimple_label_label (stmt);
2012 uid = LABEL_DECL_UID (t);
2013 if (uid == -1)
2014 {
2015 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2016 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2017 if (old_len <= (unsigned) uid)
2018 {
2019 unsigned new_len = 3 * uid / 2 + 1;
2020
2021 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2022 new_len);
2023 }
2024 }
2025
2026 VEC_replace (basic_block, label_to_block_map, uid, bb);
2027 }
2028 }
2029
2030
2031 /* Modify the RHS of the assignment pointed-to by GSI using the
2032 operands in the expression tree EXPR.
2033
2034 NOTE: The statement pointed-to by GSI may be reallocated if it
2035 did not have enough operand slots.
2036
2037 This function is useful to convert an existing tree expression into
2038 the flat representation used for the RHS of a GIMPLE assignment.
2039 It will reallocate memory as needed to expand or shrink the number
2040 of operand slots needed to represent EXPR.
2041
2042 NOTE: If you find yourself building a tree and then calling this
2043 function, you are most certainly doing it the slow way. It is much
2044 better to build a new assignment or to use the function
2045 gimple_assign_set_rhs_with_ops, which does not require an
2046 expression tree to be built. */
2047
2048 void
2049 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2050 {
2051 enum tree_code subcode;
2052 tree op1, op2, op3;
2053
2054 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2055 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2056 }
2057
2058
2059 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2060 operands OP1, OP2 and OP3.
2061
2062 NOTE: The statement pointed-to by GSI may be reallocated if it
2063 did not have enough operand slots. */
2064
2065 void
2066 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2067 tree op1, tree op2, tree op3)
2068 {
2069 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2070 gimple stmt = gsi_stmt (*gsi);
2071
2072 /* If the new CODE needs more operands, allocate a new statement. */
2073 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2074 {
2075 tree lhs = gimple_assign_lhs (stmt);
2076 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2077 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2078 gsi_replace (gsi, new_stmt, true);
2079 stmt = new_stmt;
2080
2081 /* The LHS needs to be reset as this also changes the SSA name
2082 on the LHS. */
2083 gimple_assign_set_lhs (stmt, lhs);
2084 }
2085
2086 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2087 gimple_set_subcode (stmt, code);
2088 gimple_assign_set_rhs1 (stmt, op1);
2089 if (new_rhs_ops > 1)
2090 gimple_assign_set_rhs2 (stmt, op2);
2091 if (new_rhs_ops > 2)
2092 gimple_assign_set_rhs3 (stmt, op3);
2093 }
2094
2095
2096 /* Return the LHS of a statement that performs an assignment,
2097 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2098 for a call to a function that returns no value, or for a
2099 statement other than an assignment or a call. */
2100
2101 tree
2102 gimple_get_lhs (const_gimple stmt)
2103 {
2104 enum gimple_code code = gimple_code (stmt);
2105
2106 if (code == GIMPLE_ASSIGN)
2107 return gimple_assign_lhs (stmt);
2108 else if (code == GIMPLE_CALL)
2109 return gimple_call_lhs (stmt);
2110 else
2111 return NULL_TREE;
2112 }
2113
2114
2115 /* Set the LHS of a statement that performs an assignment,
2116 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2117
2118 void
2119 gimple_set_lhs (gimple stmt, tree lhs)
2120 {
2121 enum gimple_code code = gimple_code (stmt);
2122
2123 if (code == GIMPLE_ASSIGN)
2124 gimple_assign_set_lhs (stmt, lhs);
2125 else if (code == GIMPLE_CALL)
2126 gimple_call_set_lhs (stmt, lhs);
2127 else
2128 gcc_unreachable();
2129 }
2130
2131 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2132 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2133 expression with a different value.
2134
2135 This will update any annotations (say debug bind stmts) referring
2136 to the original LHS, so that they use the RHS instead. This is
2137 done even if NLHS and LHS are the same, for it is understood that
2138 the RHS will be modified afterwards, and NLHS will not be assigned
2139 an equivalent value.
2140
2141 Adjusting any non-annotation uses of the LHS, if needed, is a
2142 responsibility of the caller.
2143
2144 The effect of this call should be pretty much the same as that of
2145 inserting a copy of STMT before STMT, and then removing the
2146 original stmt, at which time gsi_remove() would have update
2147 annotations, but using this function saves all the inserting,
2148 copying and removing. */
2149
2150 void
2151 gimple_replace_lhs (gimple stmt, tree nlhs)
2152 {
2153 if (MAY_HAVE_DEBUG_STMTS)
2154 {
2155 tree lhs = gimple_get_lhs (stmt);
2156
2157 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2158
2159 insert_debug_temp_for_var_def (NULL, lhs);
2160 }
2161
2162 gimple_set_lhs (stmt, nlhs);
2163 }
2164
2165 /* Return a deep copy of statement STMT. All the operands from STMT
2166 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2167 and VUSE operand arrays are set to empty in the new copy. */
2168
2169 gimple
2170 gimple_copy (gimple stmt)
2171 {
2172 enum gimple_code code = gimple_code (stmt);
2173 unsigned num_ops = gimple_num_ops (stmt);
2174 gimple copy = gimple_alloc (code, num_ops);
2175 unsigned i;
2176
2177 /* Shallow copy all the fields from STMT. */
2178 memcpy (copy, stmt, gimple_size (code));
2179
2180 /* If STMT has sub-statements, deep-copy them as well. */
2181 if (gimple_has_substatements (stmt))
2182 {
2183 gimple_seq new_seq;
2184 tree t;
2185
2186 switch (gimple_code (stmt))
2187 {
2188 case GIMPLE_BIND:
2189 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2190 gimple_bind_set_body (copy, new_seq);
2191 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2192 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2193 break;
2194
2195 case GIMPLE_CATCH:
2196 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2197 gimple_catch_set_handler (copy, new_seq);
2198 t = unshare_expr (gimple_catch_types (stmt));
2199 gimple_catch_set_types (copy, t);
2200 break;
2201
2202 case GIMPLE_EH_FILTER:
2203 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2204 gimple_eh_filter_set_failure (copy, new_seq);
2205 t = unshare_expr (gimple_eh_filter_types (stmt));
2206 gimple_eh_filter_set_types (copy, t);
2207 break;
2208
2209 case GIMPLE_TRY:
2210 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2211 gimple_try_set_eval (copy, new_seq);
2212 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2213 gimple_try_set_cleanup (copy, new_seq);
2214 break;
2215
2216 case GIMPLE_OMP_FOR:
2217 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2218 gimple_omp_for_set_pre_body (copy, new_seq);
2219 t = unshare_expr (gimple_omp_for_clauses (stmt));
2220 gimple_omp_for_set_clauses (copy, t);
2221 copy->gimple_omp_for.iter
2222 = ggc_alloc_vec_gimple_omp_for_iter
2223 (gimple_omp_for_collapse (stmt));
2224 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2225 {
2226 gimple_omp_for_set_cond (copy, i,
2227 gimple_omp_for_cond (stmt, i));
2228 gimple_omp_for_set_index (copy, i,
2229 gimple_omp_for_index (stmt, i));
2230 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2231 gimple_omp_for_set_initial (copy, i, t);
2232 t = unshare_expr (gimple_omp_for_final (stmt, i));
2233 gimple_omp_for_set_final (copy, i, t);
2234 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2235 gimple_omp_for_set_incr (copy, i, t);
2236 }
2237 goto copy_omp_body;
2238
2239 case GIMPLE_OMP_PARALLEL:
2240 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2241 gimple_omp_parallel_set_clauses (copy, t);
2242 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2243 gimple_omp_parallel_set_child_fn (copy, t);
2244 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2245 gimple_omp_parallel_set_data_arg (copy, t);
2246 goto copy_omp_body;
2247
2248 case GIMPLE_OMP_TASK:
2249 t = unshare_expr (gimple_omp_task_clauses (stmt));
2250 gimple_omp_task_set_clauses (copy, t);
2251 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2252 gimple_omp_task_set_child_fn (copy, t);
2253 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2254 gimple_omp_task_set_data_arg (copy, t);
2255 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2256 gimple_omp_task_set_copy_fn (copy, t);
2257 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2258 gimple_omp_task_set_arg_size (copy, t);
2259 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2260 gimple_omp_task_set_arg_align (copy, t);
2261 goto copy_omp_body;
2262
2263 case GIMPLE_OMP_CRITICAL:
2264 t = unshare_expr (gimple_omp_critical_name (stmt));
2265 gimple_omp_critical_set_name (copy, t);
2266 goto copy_omp_body;
2267
2268 case GIMPLE_OMP_SECTIONS:
2269 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2270 gimple_omp_sections_set_clauses (copy, t);
2271 t = unshare_expr (gimple_omp_sections_control (stmt));
2272 gimple_omp_sections_set_control (copy, t);
2273 /* FALLTHRU */
2274
2275 case GIMPLE_OMP_SINGLE:
2276 case GIMPLE_OMP_SECTION:
2277 case GIMPLE_OMP_MASTER:
2278 case GIMPLE_OMP_ORDERED:
2279 copy_omp_body:
2280 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2281 gimple_omp_set_body (copy, new_seq);
2282 break;
2283
2284 case GIMPLE_WITH_CLEANUP_EXPR:
2285 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2286 gimple_wce_set_cleanup (copy, new_seq);
2287 break;
2288
2289 default:
2290 gcc_unreachable ();
2291 }
2292 }
2293
2294 /* Make copy of operands. */
2295 if (num_ops > 0)
2296 {
2297 for (i = 0; i < num_ops; i++)
2298 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2299
2300 /* Clear out SSA operand vectors on COPY. */
2301 if (gimple_has_ops (stmt))
2302 {
2303 gimple_set_def_ops (copy, NULL);
2304 gimple_set_use_ops (copy, NULL);
2305 }
2306
2307 if (gimple_has_mem_ops (stmt))
2308 {
2309 gimple_set_vdef (copy, gimple_vdef (stmt));
2310 gimple_set_vuse (copy, gimple_vuse (stmt));
2311 }
2312
2313 /* SSA operands need to be updated. */
2314 gimple_set_modified (copy, true);
2315 }
2316
2317 return copy;
2318 }
2319
2320
2321 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2322 a MODIFIED field. */
2323
2324 void
2325 gimple_set_modified (gimple s, bool modifiedp)
2326 {
2327 if (gimple_has_ops (s))
2328 s->gsbase.modified = (unsigned) modifiedp;
2329 }
2330
2331
2332 /* Return true if statement S has side-effects. We consider a
2333 statement to have side effects if:
2334
2335 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2336 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2337
2338 bool
2339 gimple_has_side_effects (const_gimple s)
2340 {
2341 unsigned i;
2342
2343 if (is_gimple_debug (s))
2344 return false;
2345
2346 /* We don't have to scan the arguments to check for
2347 volatile arguments, though, at present, we still
2348 do a scan to check for TREE_SIDE_EFFECTS. */
2349 if (gimple_has_volatile_ops (s))
2350 return true;
2351
2352 if (gimple_code (s) == GIMPLE_ASM
2353 && gimple_asm_volatile_p (s))
2354 return true;
2355
2356 if (is_gimple_call (s))
2357 {
2358 unsigned nargs = gimple_call_num_args (s);
2359 tree fn;
2360
2361 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2362 return true;
2363 else if (gimple_call_flags (s) & ECF_LOOPING_CONST_OR_PURE)
2364 /* An infinite loop is considered a side effect. */
2365 return true;
2366
2367 if (gimple_call_lhs (s)
2368 && TREE_SIDE_EFFECTS (gimple_call_lhs (s)))
2369 {
2370 gcc_checking_assert (gimple_has_volatile_ops (s));
2371 return true;
2372 }
2373
2374 fn = gimple_call_fn (s);
2375 if (fn && TREE_SIDE_EFFECTS (fn))
2376 return true;
2377
2378 for (i = 0; i < nargs; i++)
2379 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i)))
2380 {
2381 gcc_checking_assert (gimple_has_volatile_ops (s));
2382 return true;
2383 }
2384
2385 return false;
2386 }
2387 else
2388 {
2389 for (i = 0; i < gimple_num_ops (s); i++)
2390 {
2391 tree op = gimple_op (s, i);
2392 if (op && TREE_SIDE_EFFECTS (op))
2393 {
2394 gcc_checking_assert (gimple_has_volatile_ops (s));
2395 return true;
2396 }
2397 }
2398 }
2399
2400 return false;
2401 }
2402
2403 /* Return true if the RHS of statement S has side effects.
2404 We may use it to determine if it is admissable to replace
2405 an assignment or call with a copy of a previously-computed
2406 value. In such cases, side-effects due to the LHS are
2407 preserved. */
2408
2409 bool
2410 gimple_rhs_has_side_effects (const_gimple s)
2411 {
2412 unsigned i;
2413
2414 if (is_gimple_call (s))
2415 {
2416 unsigned nargs = gimple_call_num_args (s);
2417 tree fn;
2418
2419 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2420 return true;
2421
2422 /* We cannot use gimple_has_volatile_ops here,
2423 because we must ignore a volatile LHS. */
2424 fn = gimple_call_fn (s);
2425 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2426 {
2427 gcc_assert (gimple_has_volatile_ops (s));
2428 return true;
2429 }
2430
2431 for (i = 0; i < nargs; i++)
2432 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2433 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2434 return true;
2435
2436 return false;
2437 }
2438 else if (is_gimple_assign (s))
2439 {
2440 /* Skip the first operand, the LHS. */
2441 for (i = 1; i < gimple_num_ops (s); i++)
2442 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2443 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2444 {
2445 gcc_assert (gimple_has_volatile_ops (s));
2446 return true;
2447 }
2448 }
2449 else if (is_gimple_debug (s))
2450 return false;
2451 else
2452 {
2453 /* For statements without an LHS, examine all arguments. */
2454 for (i = 0; i < gimple_num_ops (s); i++)
2455 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2456 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2457 {
2458 gcc_assert (gimple_has_volatile_ops (s));
2459 return true;
2460 }
2461 }
2462
2463 return false;
2464 }
2465
2466 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2467 Return true if S can trap. When INCLUDE_MEM is true, check whether
2468 the memory operations could trap. When INCLUDE_STORES is true and
2469 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2470
2471 bool
2472 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2473 {
2474 tree t, div = NULL_TREE;
2475 enum tree_code op;
2476
2477 if (include_mem)
2478 {
2479 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2480
2481 for (i = start; i < gimple_num_ops (s); i++)
2482 if (tree_could_trap_p (gimple_op (s, i)))
2483 return true;
2484 }
2485
2486 switch (gimple_code (s))
2487 {
2488 case GIMPLE_ASM:
2489 return gimple_asm_volatile_p (s);
2490
2491 case GIMPLE_CALL:
2492 t = gimple_call_fndecl (s);
2493 /* Assume that calls to weak functions may trap. */
2494 if (!t || !DECL_P (t) || DECL_WEAK (t))
2495 return true;
2496 return false;
2497
2498 case GIMPLE_ASSIGN:
2499 t = gimple_expr_type (s);
2500 op = gimple_assign_rhs_code (s);
2501 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2502 div = gimple_assign_rhs2 (s);
2503 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2504 (INTEGRAL_TYPE_P (t)
2505 && TYPE_OVERFLOW_TRAPS (t)),
2506 div));
2507
2508 default:
2509 break;
2510 }
2511
2512 return false;
2513 }
2514
2515 /* Return true if statement S can trap. */
2516
2517 bool
2518 gimple_could_trap_p (gimple s)
2519 {
2520 return gimple_could_trap_p_1 (s, true, true);
2521 }
2522
2523 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2524
2525 bool
2526 gimple_assign_rhs_could_trap_p (gimple s)
2527 {
2528 gcc_assert (is_gimple_assign (s));
2529 return gimple_could_trap_p_1 (s, true, false);
2530 }
2531
2532
2533 /* Print debugging information for gimple stmts generated. */
2534
2535 void
2536 dump_gimple_statistics (void)
2537 {
2538 #ifdef GATHER_STATISTICS
2539 int i, total_tuples = 0, total_bytes = 0;
2540
2541 fprintf (stderr, "\nGIMPLE statements\n");
2542 fprintf (stderr, "Kind Stmts Bytes\n");
2543 fprintf (stderr, "---------------------------------------\n");
2544 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2545 {
2546 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2547 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2548 total_tuples += gimple_alloc_counts[i];
2549 total_bytes += gimple_alloc_sizes[i];
2550 }
2551 fprintf (stderr, "---------------------------------------\n");
2552 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2553 fprintf (stderr, "---------------------------------------\n");
2554 #else
2555 fprintf (stderr, "No gimple statistics\n");
2556 #endif
2557 }
2558
2559
2560 /* Return the number of operands needed on the RHS of a GIMPLE
2561 assignment for an expression with tree code CODE. */
2562
2563 unsigned
2564 get_gimple_rhs_num_ops (enum tree_code code)
2565 {
2566 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2567
2568 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2569 return 1;
2570 else if (rhs_class == GIMPLE_BINARY_RHS)
2571 return 2;
2572 else if (rhs_class == GIMPLE_TERNARY_RHS)
2573 return 3;
2574 else
2575 gcc_unreachable ();
2576 }
2577
2578 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2579 (unsigned char) \
2580 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2581 : ((TYPE) == tcc_binary \
2582 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2583 : ((TYPE) == tcc_constant \
2584 || (TYPE) == tcc_declaration \
2585 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2586 : ((SYM) == TRUTH_AND_EXPR \
2587 || (SYM) == TRUTH_OR_EXPR \
2588 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2589 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2590 : ((SYM) == WIDEN_MULT_PLUS_EXPR \
2591 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2592 || (SYM) == DOT_PROD_EXPR \
2593 || (SYM) == REALIGN_LOAD_EXPR \
2594 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2595 : ((SYM) == COND_EXPR \
2596 || (SYM) == CONSTRUCTOR \
2597 || (SYM) == OBJ_TYPE_REF \
2598 || (SYM) == ASSERT_EXPR \
2599 || (SYM) == ADDR_EXPR \
2600 || (SYM) == WITH_SIZE_EXPR \
2601 || (SYM) == SSA_NAME \
2602 || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS \
2603 : GIMPLE_INVALID_RHS),
2604 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2605
2606 const unsigned char gimple_rhs_class_table[] = {
2607 #include "all-tree.def"
2608 };
2609
2610 #undef DEFTREECODE
2611 #undef END_OF_BASE_TREE_CODES
2612
2613 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2614
2615 /* Validation of GIMPLE expressions. */
2616
2617 /* Returns true iff T is a valid RHS for an assignment to a renamed
2618 user -- or front-end generated artificial -- variable. */
2619
2620 bool
2621 is_gimple_reg_rhs (tree t)
2622 {
2623 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2624 }
2625
2626 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2627 LHS, or for a call argument. */
2628
2629 bool
2630 is_gimple_mem_rhs (tree t)
2631 {
2632 /* If we're dealing with a renamable type, either source or dest must be
2633 a renamed variable. */
2634 if (is_gimple_reg_type (TREE_TYPE (t)))
2635 return is_gimple_val (t);
2636 else
2637 return is_gimple_val (t) || is_gimple_lvalue (t);
2638 }
2639
2640 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2641
2642 bool
2643 is_gimple_lvalue (tree t)
2644 {
2645 return (is_gimple_addressable (t)
2646 || TREE_CODE (t) == WITH_SIZE_EXPR
2647 /* These are complex lvalues, but don't have addresses, so they
2648 go here. */
2649 || TREE_CODE (t) == BIT_FIELD_REF);
2650 }
2651
2652 /* Return true if T is a GIMPLE condition. */
2653
2654 bool
2655 is_gimple_condexpr (tree t)
2656 {
2657 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2658 && !tree_could_throw_p (t)
2659 && is_gimple_val (TREE_OPERAND (t, 0))
2660 && is_gimple_val (TREE_OPERAND (t, 1))));
2661 }
2662
2663 /* Return true if T is something whose address can be taken. */
2664
2665 bool
2666 is_gimple_addressable (tree t)
2667 {
2668 return (is_gimple_id (t) || handled_component_p (t)
2669 || TREE_CODE (t) == MEM_REF);
2670 }
2671
2672 /* Return true if T is a valid gimple constant. */
2673
2674 bool
2675 is_gimple_constant (const_tree t)
2676 {
2677 switch (TREE_CODE (t))
2678 {
2679 case INTEGER_CST:
2680 case REAL_CST:
2681 case FIXED_CST:
2682 case STRING_CST:
2683 case COMPLEX_CST:
2684 case VECTOR_CST:
2685 return true;
2686
2687 /* Vector constant constructors are gimple invariant. */
2688 case CONSTRUCTOR:
2689 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2690 return TREE_CONSTANT (t);
2691 else
2692 return false;
2693
2694 default:
2695 return false;
2696 }
2697 }
2698
2699 /* Return true if T is a gimple address. */
2700
2701 bool
2702 is_gimple_address (const_tree t)
2703 {
2704 tree op;
2705
2706 if (TREE_CODE (t) != ADDR_EXPR)
2707 return false;
2708
2709 op = TREE_OPERAND (t, 0);
2710 while (handled_component_p (op))
2711 {
2712 if ((TREE_CODE (op) == ARRAY_REF
2713 || TREE_CODE (op) == ARRAY_RANGE_REF)
2714 && !is_gimple_val (TREE_OPERAND (op, 1)))
2715 return false;
2716
2717 op = TREE_OPERAND (op, 0);
2718 }
2719
2720 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2721 return true;
2722
2723 switch (TREE_CODE (op))
2724 {
2725 case PARM_DECL:
2726 case RESULT_DECL:
2727 case LABEL_DECL:
2728 case FUNCTION_DECL:
2729 case VAR_DECL:
2730 case CONST_DECL:
2731 return true;
2732
2733 default:
2734 return false;
2735 }
2736 }
2737
2738 /* Strip out all handled components that produce invariant
2739 offsets. */
2740
2741 static const_tree
2742 strip_invariant_refs (const_tree op)
2743 {
2744 while (handled_component_p (op))
2745 {
2746 switch (TREE_CODE (op))
2747 {
2748 case ARRAY_REF:
2749 case ARRAY_RANGE_REF:
2750 if (!is_gimple_constant (TREE_OPERAND (op, 1))
2751 || TREE_OPERAND (op, 2) != NULL_TREE
2752 || TREE_OPERAND (op, 3) != NULL_TREE)
2753 return NULL;
2754 break;
2755
2756 case COMPONENT_REF:
2757 if (TREE_OPERAND (op, 2) != NULL_TREE)
2758 return NULL;
2759 break;
2760
2761 default:;
2762 }
2763 op = TREE_OPERAND (op, 0);
2764 }
2765
2766 return op;
2767 }
2768
2769 /* Return true if T is a gimple invariant address. */
2770
2771 bool
2772 is_gimple_invariant_address (const_tree t)
2773 {
2774 const_tree op;
2775
2776 if (TREE_CODE (t) != ADDR_EXPR)
2777 return false;
2778
2779 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2780 if (!op)
2781 return false;
2782
2783 if (TREE_CODE (op) == MEM_REF)
2784 {
2785 const_tree op0 = TREE_OPERAND (op, 0);
2786 return (TREE_CODE (op0) == ADDR_EXPR
2787 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2788 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2789 }
2790
2791 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2792 }
2793
2794 /* Return true if T is a gimple invariant address at IPA level
2795 (so addresses of variables on stack are not allowed). */
2796
2797 bool
2798 is_gimple_ip_invariant_address (const_tree t)
2799 {
2800 const_tree op;
2801
2802 if (TREE_CODE (t) != ADDR_EXPR)
2803 return false;
2804
2805 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2806
2807 return op && (CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op));
2808 }
2809
2810 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2811 form of function invariant. */
2812
2813 bool
2814 is_gimple_min_invariant (const_tree t)
2815 {
2816 if (TREE_CODE (t) == ADDR_EXPR)
2817 return is_gimple_invariant_address (t);
2818
2819 return is_gimple_constant (t);
2820 }
2821
2822 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2823 form of gimple minimal invariant. */
2824
2825 bool
2826 is_gimple_ip_invariant (const_tree t)
2827 {
2828 if (TREE_CODE (t) == ADDR_EXPR)
2829 return is_gimple_ip_invariant_address (t);
2830
2831 return is_gimple_constant (t);
2832 }
2833
2834 /* Return true if T looks like a valid GIMPLE statement. */
2835
2836 bool
2837 is_gimple_stmt (tree t)
2838 {
2839 const enum tree_code code = TREE_CODE (t);
2840
2841 switch (code)
2842 {
2843 case NOP_EXPR:
2844 /* The only valid NOP_EXPR is the empty statement. */
2845 return IS_EMPTY_STMT (t);
2846
2847 case BIND_EXPR:
2848 case COND_EXPR:
2849 /* These are only valid if they're void. */
2850 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2851
2852 case SWITCH_EXPR:
2853 case GOTO_EXPR:
2854 case RETURN_EXPR:
2855 case LABEL_EXPR:
2856 case CASE_LABEL_EXPR:
2857 case TRY_CATCH_EXPR:
2858 case TRY_FINALLY_EXPR:
2859 case EH_FILTER_EXPR:
2860 case CATCH_EXPR:
2861 case ASM_EXPR:
2862 case STATEMENT_LIST:
2863 case OMP_PARALLEL:
2864 case OMP_FOR:
2865 case OMP_SECTIONS:
2866 case OMP_SECTION:
2867 case OMP_SINGLE:
2868 case OMP_MASTER:
2869 case OMP_ORDERED:
2870 case OMP_CRITICAL:
2871 case OMP_TASK:
2872 /* These are always void. */
2873 return true;
2874
2875 case CALL_EXPR:
2876 case MODIFY_EXPR:
2877 case PREDICT_EXPR:
2878 /* These are valid regardless of their type. */
2879 return true;
2880
2881 default:
2882 return false;
2883 }
2884 }
2885
2886 /* Return true if T is a variable. */
2887
2888 bool
2889 is_gimple_variable (tree t)
2890 {
2891 return (TREE_CODE (t) == VAR_DECL
2892 || TREE_CODE (t) == PARM_DECL
2893 || TREE_CODE (t) == RESULT_DECL
2894 || TREE_CODE (t) == SSA_NAME);
2895 }
2896
2897 /* Return true if T is a GIMPLE identifier (something with an address). */
2898
2899 bool
2900 is_gimple_id (tree t)
2901 {
2902 return (is_gimple_variable (t)
2903 || TREE_CODE (t) == FUNCTION_DECL
2904 || TREE_CODE (t) == LABEL_DECL
2905 || TREE_CODE (t) == CONST_DECL
2906 /* Allow string constants, since they are addressable. */
2907 || TREE_CODE (t) == STRING_CST);
2908 }
2909
2910 /* Return true if TYPE is a suitable type for a scalar register variable. */
2911
2912 bool
2913 is_gimple_reg_type (tree type)
2914 {
2915 return !AGGREGATE_TYPE_P (type);
2916 }
2917
2918 /* Return true if T is a non-aggregate register variable. */
2919
2920 bool
2921 is_gimple_reg (tree t)
2922 {
2923 if (TREE_CODE (t) == SSA_NAME)
2924 t = SSA_NAME_VAR (t);
2925
2926 if (!is_gimple_variable (t))
2927 return false;
2928
2929 if (!is_gimple_reg_type (TREE_TYPE (t)))
2930 return false;
2931
2932 /* A volatile decl is not acceptable because we can't reuse it as
2933 needed. We need to copy it into a temp first. */
2934 if (TREE_THIS_VOLATILE (t))
2935 return false;
2936
2937 /* We define "registers" as things that can be renamed as needed,
2938 which with our infrastructure does not apply to memory. */
2939 if (needs_to_live_in_memory (t))
2940 return false;
2941
2942 /* Hard register variables are an interesting case. For those that
2943 are call-clobbered, we don't know where all the calls are, since
2944 we don't (want to) take into account which operations will turn
2945 into libcalls at the rtl level. For those that are call-saved,
2946 we don't currently model the fact that calls may in fact change
2947 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2948 level, and so miss variable changes that might imply. All around,
2949 it seems safest to not do too much optimization with these at the
2950 tree level at all. We'll have to rely on the rtl optimizers to
2951 clean this up, as there we've got all the appropriate bits exposed. */
2952 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2953 return false;
2954
2955 /* Complex and vector values must have been put into SSA-like form.
2956 That is, no assignments to the individual components. */
2957 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2958 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2959 return DECL_GIMPLE_REG_P (t);
2960
2961 return true;
2962 }
2963
2964
2965 /* Return true if T is a GIMPLE variable whose address is not needed. */
2966
2967 bool
2968 is_gimple_non_addressable (tree t)
2969 {
2970 if (TREE_CODE (t) == SSA_NAME)
2971 t = SSA_NAME_VAR (t);
2972
2973 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
2974 }
2975
2976 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2977
2978 bool
2979 is_gimple_val (tree t)
2980 {
2981 /* Make loads from volatiles and memory vars explicit. */
2982 if (is_gimple_variable (t)
2983 && is_gimple_reg_type (TREE_TYPE (t))
2984 && !is_gimple_reg (t))
2985 return false;
2986
2987 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2988 }
2989
2990 /* Similarly, but accept hard registers as inputs to asm statements. */
2991
2992 bool
2993 is_gimple_asm_val (tree t)
2994 {
2995 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2996 return true;
2997
2998 return is_gimple_val (t);
2999 }
3000
3001 /* Return true if T is a GIMPLE minimal lvalue. */
3002
3003 bool
3004 is_gimple_min_lval (tree t)
3005 {
3006 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3007 return false;
3008 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3009 }
3010
3011 /* Return true if T is a valid function operand of a CALL_EXPR. */
3012
3013 bool
3014 is_gimple_call_addr (tree t)
3015 {
3016 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3017 }
3018
3019 /* Return true if T is a valid address operand of a MEM_REF. */
3020
3021 bool
3022 is_gimple_mem_ref_addr (tree t)
3023 {
3024 return (is_gimple_reg (t)
3025 || TREE_CODE (t) == INTEGER_CST
3026 || (TREE_CODE (t) == ADDR_EXPR
3027 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3028 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3029 }
3030
3031 /* If T makes a function call, return the corresponding CALL_EXPR operand.
3032 Otherwise, return NULL_TREE. */
3033
3034 tree
3035 get_call_expr_in (tree t)
3036 {
3037 if (TREE_CODE (t) == MODIFY_EXPR)
3038 t = TREE_OPERAND (t, 1);
3039 if (TREE_CODE (t) == WITH_SIZE_EXPR)
3040 t = TREE_OPERAND (t, 0);
3041 if (TREE_CODE (t) == CALL_EXPR)
3042 return t;
3043 return NULL_TREE;
3044 }
3045
3046
3047 /* Given a memory reference expression T, return its base address.
3048 The base address of a memory reference expression is the main
3049 object being referenced. For instance, the base address for
3050 'array[i].fld[j]' is 'array'. You can think of this as stripping
3051 away the offset part from a memory address.
3052
3053 This function calls handled_component_p to strip away all the inner
3054 parts of the memory reference until it reaches the base object. */
3055
3056 tree
3057 get_base_address (tree t)
3058 {
3059 while (handled_component_p (t))
3060 t = TREE_OPERAND (t, 0);
3061
3062 if ((TREE_CODE (t) == MEM_REF
3063 || TREE_CODE (t) == TARGET_MEM_REF)
3064 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3065 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3066
3067 if (TREE_CODE (t) == SSA_NAME
3068 || DECL_P (t)
3069 || TREE_CODE (t) == STRING_CST
3070 || TREE_CODE (t) == CONSTRUCTOR
3071 || INDIRECT_REF_P (t)
3072 || TREE_CODE (t) == MEM_REF
3073 || TREE_CODE (t) == TARGET_MEM_REF)
3074 return t;
3075 else
3076 return NULL_TREE;
3077 }
3078
3079 void
3080 recalculate_side_effects (tree t)
3081 {
3082 enum tree_code code = TREE_CODE (t);
3083 int len = TREE_OPERAND_LENGTH (t);
3084 int i;
3085
3086 switch (TREE_CODE_CLASS (code))
3087 {
3088 case tcc_expression:
3089 switch (code)
3090 {
3091 case INIT_EXPR:
3092 case MODIFY_EXPR:
3093 case VA_ARG_EXPR:
3094 case PREDECREMENT_EXPR:
3095 case PREINCREMENT_EXPR:
3096 case POSTDECREMENT_EXPR:
3097 case POSTINCREMENT_EXPR:
3098 /* All of these have side-effects, no matter what their
3099 operands are. */
3100 return;
3101
3102 default:
3103 break;
3104 }
3105 /* Fall through. */
3106
3107 case tcc_comparison: /* a comparison expression */
3108 case tcc_unary: /* a unary arithmetic expression */
3109 case tcc_binary: /* a binary arithmetic expression */
3110 case tcc_reference: /* a reference */
3111 case tcc_vl_exp: /* a function call */
3112 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3113 for (i = 0; i < len; ++i)
3114 {
3115 tree op = TREE_OPERAND (t, i);
3116 if (op && TREE_SIDE_EFFECTS (op))
3117 TREE_SIDE_EFFECTS (t) = 1;
3118 }
3119 break;
3120
3121 case tcc_constant:
3122 /* No side-effects. */
3123 return;
3124
3125 default:
3126 gcc_unreachable ();
3127 }
3128 }
3129
3130 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3131 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3132 we failed to create one. */
3133
3134 tree
3135 canonicalize_cond_expr_cond (tree t)
3136 {
3137 /* Strip conversions around boolean operations. */
3138 if (CONVERT_EXPR_P (t)
3139 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
3140 t = TREE_OPERAND (t, 0);
3141
3142 /* For (bool)x use x != 0. */
3143 if (CONVERT_EXPR_P (t)
3144 && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
3145 {
3146 tree top0 = TREE_OPERAND (t, 0);
3147 t = build2 (NE_EXPR, TREE_TYPE (t),
3148 top0, build_int_cst (TREE_TYPE (top0), 0));
3149 }
3150 /* For !x use x == 0. */
3151 else if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3152 {
3153 tree top0 = TREE_OPERAND (t, 0);
3154 t = build2 (EQ_EXPR, TREE_TYPE (t),
3155 top0, build_int_cst (TREE_TYPE (top0), 0));
3156 }
3157 /* For cmp ? 1 : 0 use cmp. */
3158 else if (TREE_CODE (t) == COND_EXPR
3159 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3160 && integer_onep (TREE_OPERAND (t, 1))
3161 && integer_zerop (TREE_OPERAND (t, 2)))
3162 {
3163 tree top0 = TREE_OPERAND (t, 0);
3164 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3165 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3166 }
3167
3168 if (is_gimple_condexpr (t))
3169 return t;
3170
3171 return NULL_TREE;
3172 }
3173
3174 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3175 the positions marked by the set ARGS_TO_SKIP. */
3176
3177 gimple
3178 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3179 {
3180 int i;
3181 int nargs = gimple_call_num_args (stmt);
3182 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3183 gimple new_stmt;
3184
3185 for (i = 0; i < nargs; i++)
3186 if (!bitmap_bit_p (args_to_skip, i))
3187 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3188
3189 if (gimple_call_internal_p (stmt))
3190 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3191 vargs);
3192 else
3193 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3194 VEC_free (tree, heap, vargs);
3195 if (gimple_call_lhs (stmt))
3196 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3197
3198 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3199 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3200
3201 gimple_set_block (new_stmt, gimple_block (stmt));
3202 if (gimple_has_location (stmt))
3203 gimple_set_location (new_stmt, gimple_location (stmt));
3204 gimple_call_copy_flags (new_stmt, stmt);
3205 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3206
3207 gimple_set_modified (new_stmt, true);
3208
3209 return new_stmt;
3210 }
3211
3212
3213 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3214
3215 static hashval_t gimple_type_hash (const void *);
3216
3217 /* Structure used to maintain a cache of some type pairs compared by
3218 gimple_types_compatible_p when comparing aggregate types. There are
3219 three possible values for SAME_P:
3220
3221 -2: The pair (T1, T2) has just been inserted in the table.
3222 0: T1 and T2 are different types.
3223 1: T1 and T2 are the same type.
3224
3225 The two elements in the SAME_P array are indexed by the comparison
3226 mode gtc_mode. */
3227
3228 struct type_pair_d
3229 {
3230 unsigned int uid1;
3231 unsigned int uid2;
3232 signed char same_p[2];
3233 };
3234 typedef struct type_pair_d *type_pair_t;
3235 DEF_VEC_P(type_pair_t);
3236 DEF_VEC_ALLOC_P(type_pair_t,heap);
3237
3238 #define GIMPLE_TYPE_PAIR_SIZE 16381
3239 struct type_pair_d *type_pair_cache;
3240
3241
3242 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3243 entry if none existed. */
3244
3245 static inline type_pair_t
3246 lookup_type_pair (tree t1, tree t2)
3247 {
3248 unsigned int index;
3249 unsigned int uid1, uid2;
3250
3251 if (type_pair_cache == NULL)
3252 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3253
3254 if (TYPE_UID (t1) < TYPE_UID (t2))
3255 {
3256 uid1 = TYPE_UID (t1);
3257 uid2 = TYPE_UID (t2);
3258 }
3259 else
3260 {
3261 uid1 = TYPE_UID (t2);
3262 uid2 = TYPE_UID (t1);
3263 }
3264 gcc_checking_assert (uid1 != uid2);
3265
3266 /* iterative_hash_hashval_t imply an function calls.
3267 We know that UIDS are in limited range. */
3268 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3269 % GIMPLE_TYPE_PAIR_SIZE);
3270 if (type_pair_cache [index].uid1 == uid1
3271 && type_pair_cache [index].uid2 == uid2)
3272 return &type_pair_cache[index];
3273
3274 type_pair_cache [index].uid1 = uid1;
3275 type_pair_cache [index].uid2 = uid2;
3276 type_pair_cache [index].same_p[0] = -2;
3277 type_pair_cache [index].same_p[1] = -2;
3278
3279 return &type_pair_cache[index];
3280 }
3281
3282 /* Per pointer state for the SCC finding. The on_sccstack flag
3283 is not strictly required, it is true when there is no hash value
3284 recorded for the type and false otherwise. But querying that
3285 is slower. */
3286
3287 struct sccs
3288 {
3289 unsigned int dfsnum;
3290 unsigned int low;
3291 bool on_sccstack;
3292 union {
3293 hashval_t hash;
3294 signed char same_p;
3295 } u;
3296 };
3297
3298 static unsigned int next_dfs_num;
3299 static unsigned int gtc_next_dfs_num;
3300
3301
3302 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3303
3304 typedef struct GTY(()) gimple_type_leader_entry_s {
3305 tree type;
3306 tree leader;
3307 } gimple_type_leader_entry;
3308
3309 #define GIMPLE_TYPE_LEADER_SIZE 16381
3310 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3311 gimple_type_leader_entry *gimple_type_leader;
3312
3313 /* Lookup an existing leader for T and return it or NULL_TREE, if
3314 there is none in the cache. */
3315
3316 static inline tree
3317 gimple_lookup_type_leader (tree t)
3318 {
3319 gimple_type_leader_entry *leader;
3320
3321 if (!gimple_type_leader)
3322 return NULL_TREE;
3323
3324 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3325 if (leader->type != t)
3326 return NULL_TREE;
3327
3328 return leader->leader;
3329 }
3330
3331 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3332 true then if any type has no name return false, otherwise return
3333 true if both types have no names. */
3334
3335 static bool
3336 compare_type_names_p (tree t1, tree t2, bool for_completion_p)
3337 {
3338 tree name1 = TYPE_NAME (t1);
3339 tree name2 = TYPE_NAME (t2);
3340
3341 /* Consider anonymous types all unique for completion. */
3342 if (for_completion_p
3343 && (!name1 || !name2))
3344 return false;
3345
3346 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3347 {
3348 name1 = DECL_NAME (name1);
3349 if (for_completion_p
3350 && !name1)
3351 return false;
3352 }
3353 gcc_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3354
3355 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3356 {
3357 name2 = DECL_NAME (name2);
3358 if (for_completion_p
3359 && !name2)
3360 return false;
3361 }
3362 gcc_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3363
3364 /* Identifiers can be compared with pointer equality rather
3365 than a string comparison. */
3366 if (name1 == name2)
3367 return true;
3368
3369 return false;
3370 }
3371
3372 /* Return true if the field decls F1 and F2 are at the same offset.
3373
3374 This is intended to be used on GIMPLE types only. */
3375
3376 bool
3377 gimple_compare_field_offset (tree f1, tree f2)
3378 {
3379 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3380 {
3381 tree offset1 = DECL_FIELD_OFFSET (f1);
3382 tree offset2 = DECL_FIELD_OFFSET (f2);
3383 return ((offset1 == offset2
3384 /* Once gimplification is done, self-referential offsets are
3385 instantiated as operand #2 of the COMPONENT_REF built for
3386 each access and reset. Therefore, they are not relevant
3387 anymore and fields are interchangeable provided that they
3388 represent the same access. */
3389 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3390 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3391 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3392 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3393 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3394 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3395 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3396 || operand_equal_p (offset1, offset2, 0))
3397 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3398 DECL_FIELD_BIT_OFFSET (f2)));
3399 }
3400
3401 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3402 should be, so handle differing ones specially by decomposing
3403 the offset into a byte and bit offset manually. */
3404 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3405 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3406 {
3407 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3408 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3409 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3410 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3411 + bit_offset1 / BITS_PER_UNIT);
3412 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3413 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3414 + bit_offset2 / BITS_PER_UNIT);
3415 if (byte_offset1 != byte_offset2)
3416 return false;
3417 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3418 }
3419
3420 return false;
3421 }
3422
3423 /* If the type T1 and the type T2 are a complete and an incomplete
3424 variant of the same type return true. */
3425
3426 static bool
3427 gimple_compatible_complete_and_incomplete_subtype_p (tree t1, tree t2)
3428 {
3429 /* If one pointer points to an incomplete type variant of
3430 the other pointed-to type they are the same. */
3431 if (TREE_CODE (t1) == TREE_CODE (t2)
3432 && RECORD_OR_UNION_TYPE_P (t1)
3433 && (!COMPLETE_TYPE_P (t1)
3434 || !COMPLETE_TYPE_P (t2))
3435 && TYPE_QUALS (t1) == TYPE_QUALS (t2)
3436 && compare_type_names_p (TYPE_MAIN_VARIANT (t1),
3437 TYPE_MAIN_VARIANT (t2), true))
3438 return true;
3439 return false;
3440 }
3441
3442 static bool
3443 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3444 VEC(type_pair_t, heap) **,
3445 struct pointer_map_t *, struct obstack *);
3446
3447 /* DFS visit the edge from the callers type pair with state *STATE to
3448 the pair T1, T2 while operating in FOR_MERGING_P mode.
3449 Update the merging status if it is not part of the SCC containing the
3450 callers pair and return it.
3451 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3452
3453 static bool
3454 gtc_visit (tree t1, tree t2,
3455 struct sccs *state,
3456 VEC(type_pair_t, heap) **sccstack,
3457 struct pointer_map_t *sccstate,
3458 struct obstack *sccstate_obstack)
3459 {
3460 struct sccs *cstate = NULL;
3461 type_pair_t p;
3462 void **slot;
3463 tree leader1, leader2;
3464
3465 /* Check first for the obvious case of pointer identity. */
3466 if (t1 == t2)
3467 return true;
3468
3469 /* Check that we have two types to compare. */
3470 if (t1 == NULL_TREE || t2 == NULL_TREE)
3471 return false;
3472
3473 /* Can't be the same type if the types don't have the same code. */
3474 if (TREE_CODE (t1) != TREE_CODE (t2))
3475 return false;
3476
3477 /* Can't be the same type if they have different CV qualifiers. */
3478 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3479 return false;
3480
3481 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3482 return false;
3483
3484 /* Void types and nullptr types are always the same. */
3485 if (TREE_CODE (t1) == VOID_TYPE
3486 || TREE_CODE (t1) == NULLPTR_TYPE)
3487 return true;
3488
3489 /* Can't be the same type if they have different alignment or mode. */
3490 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3491 || TYPE_MODE (t1) != TYPE_MODE (t2))
3492 return false;
3493
3494 /* Do some simple checks before doing three hashtable queries. */
3495 if (INTEGRAL_TYPE_P (t1)
3496 || SCALAR_FLOAT_TYPE_P (t1)
3497 || FIXED_POINT_TYPE_P (t1)
3498 || TREE_CODE (t1) == VECTOR_TYPE
3499 || TREE_CODE (t1) == COMPLEX_TYPE
3500 || TREE_CODE (t1) == OFFSET_TYPE
3501 || POINTER_TYPE_P (t1))
3502 {
3503 /* Can't be the same type if they have different sign or precision. */
3504 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3505 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3506 return false;
3507
3508 if (TREE_CODE (t1) == INTEGER_TYPE
3509 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3510 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3511 return false;
3512
3513 /* That's all we need to check for float and fixed-point types. */
3514 if (SCALAR_FLOAT_TYPE_P (t1)
3515 || FIXED_POINT_TYPE_P (t1))
3516 return true;
3517
3518 /* For other types fall thru to more complex checks. */
3519 }
3520
3521 /* If the types have been previously registered and found equal
3522 they still are. */
3523 leader1 = gimple_lookup_type_leader (t1);
3524 leader2 = gimple_lookup_type_leader (t2);
3525 if (leader1 == t2
3526 || t1 == leader2
3527 || (leader1 && leader1 == leader2))
3528 return true;
3529
3530 /* If the hash values of t1 and t2 are different the types can't
3531 possibly be the same. This helps keeping the type-pair hashtable
3532 small, only tracking comparisons for hash collisions. */
3533 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3534 return false;
3535
3536 /* Allocate a new cache entry for this comparison. */
3537 p = lookup_type_pair (t1, t2);
3538 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3539 {
3540 /* We have already decided whether T1 and T2 are the
3541 same, return the cached result. */
3542 return p->same_p[GTC_MERGE] == 1;
3543 }
3544
3545 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3546 cstate = (struct sccs *)*slot;
3547 /* Not yet visited. DFS recurse. */
3548 if (!cstate)
3549 {
3550 gimple_types_compatible_p_1 (t1, t2, p,
3551 sccstack, sccstate, sccstate_obstack);
3552 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3553 state->low = MIN (state->low, cstate->low);
3554 }
3555 /* If the type is still on the SCC stack adjust the parents low. */
3556 if (cstate->dfsnum < state->dfsnum
3557 && cstate->on_sccstack)
3558 state->low = MIN (cstate->dfsnum, state->low);
3559
3560 /* Return the current lattice value. We start with an equality
3561 assumption so types part of a SCC will be optimistically
3562 treated equal unless proven otherwise. */
3563 return cstate->u.same_p;
3564 }
3565
3566 /* Worker for gimple_types_compatible.
3567 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3568
3569 static bool
3570 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3571 VEC(type_pair_t, heap) **sccstack,
3572 struct pointer_map_t *sccstate,
3573 struct obstack *sccstate_obstack)
3574 {
3575 struct sccs *state;
3576
3577 gcc_assert (p->same_p[GTC_MERGE] == -2);
3578
3579 state = XOBNEW (sccstate_obstack, struct sccs);
3580 *pointer_map_insert (sccstate, p) = state;
3581
3582 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3583 state->dfsnum = gtc_next_dfs_num++;
3584 state->low = state->dfsnum;
3585 state->on_sccstack = true;
3586 /* Start with an equality assumption. As we DFS recurse into child
3587 SCCs this assumption may get revisited. */
3588 state->u.same_p = 1;
3589
3590 /* If their attributes are not the same they can't be the same type. */
3591 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3592 goto different_types;
3593
3594 /* Do type-specific comparisons. */
3595 switch (TREE_CODE (t1))
3596 {
3597 case VECTOR_TYPE:
3598 case COMPLEX_TYPE:
3599 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3600 state, sccstack, sccstate, sccstate_obstack))
3601 goto different_types;
3602 goto same_types;
3603
3604 case ARRAY_TYPE:
3605 /* Array types are the same if the element types are the same and
3606 the number of elements are the same. */
3607 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3608 state, sccstack, sccstate, sccstate_obstack)
3609 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3610 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3611 goto different_types;
3612 else
3613 {
3614 tree i1 = TYPE_DOMAIN (t1);
3615 tree i2 = TYPE_DOMAIN (t2);
3616
3617 /* For an incomplete external array, the type domain can be
3618 NULL_TREE. Check this condition also. */
3619 if (i1 == NULL_TREE && i2 == NULL_TREE)
3620 goto same_types;
3621 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3622 goto different_types;
3623 /* If for a complete array type the possibly gimplified sizes
3624 are different the types are different. */
3625 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3626 || (TYPE_SIZE (i1)
3627 && TYPE_SIZE (i2)
3628 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3629 goto different_types;
3630 else
3631 {
3632 tree min1 = TYPE_MIN_VALUE (i1);
3633 tree min2 = TYPE_MIN_VALUE (i2);
3634 tree max1 = TYPE_MAX_VALUE (i1);
3635 tree max2 = TYPE_MAX_VALUE (i2);
3636
3637 /* The minimum/maximum values have to be the same. */
3638 if ((min1 == min2
3639 || (min1 && min2
3640 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3641 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3642 || operand_equal_p (min1, min2, 0))))
3643 && (max1 == max2
3644 || (max1 && max2
3645 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3646 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3647 || operand_equal_p (max1, max2, 0)))))
3648 goto same_types;
3649 else
3650 goto different_types;
3651 }
3652 }
3653
3654 case METHOD_TYPE:
3655 /* Method types should belong to the same class. */
3656 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3657 state, sccstack, sccstate, sccstate_obstack))
3658 goto different_types;
3659
3660 /* Fallthru */
3661
3662 case FUNCTION_TYPE:
3663 /* Function types are the same if the return type and arguments types
3664 are the same. */
3665 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3666 state, sccstack, sccstate, sccstate_obstack))
3667 goto different_types;
3668
3669 if (!comp_type_attributes (t1, t2))
3670 goto different_types;
3671
3672 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3673 goto same_types;
3674 else
3675 {
3676 tree parms1, parms2;
3677
3678 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3679 parms1 && parms2;
3680 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3681 {
3682 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3683 state, sccstack, sccstate, sccstate_obstack))
3684 goto different_types;
3685 }
3686
3687 if (parms1 || parms2)
3688 goto different_types;
3689
3690 goto same_types;
3691 }
3692
3693 case OFFSET_TYPE:
3694 {
3695 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3696 state, sccstack, sccstate, sccstate_obstack)
3697 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3698 TYPE_OFFSET_BASETYPE (t2),
3699 state, sccstack, sccstate, sccstate_obstack))
3700 goto different_types;
3701
3702 goto same_types;
3703 }
3704
3705 case POINTER_TYPE:
3706 case REFERENCE_TYPE:
3707 {
3708 /* If the two pointers have different ref-all attributes,
3709 they can't be the same type. */
3710 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3711 goto different_types;
3712
3713 /* Otherwise, pointer and reference types are the same if the
3714 pointed-to types are the same. */
3715 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3716 state, sccstack, sccstate, sccstate_obstack))
3717 goto same_types;
3718
3719 goto different_types;
3720 }
3721
3722 case INTEGER_TYPE:
3723 case BOOLEAN_TYPE:
3724 {
3725 tree min1 = TYPE_MIN_VALUE (t1);
3726 tree max1 = TYPE_MAX_VALUE (t1);
3727 tree min2 = TYPE_MIN_VALUE (t2);
3728 tree max2 = TYPE_MAX_VALUE (t2);
3729 bool min_equal_p = false;
3730 bool max_equal_p = false;
3731
3732 /* If either type has a minimum value, the other type must
3733 have the same. */
3734 if (min1 == NULL_TREE && min2 == NULL_TREE)
3735 min_equal_p = true;
3736 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3737 min_equal_p = true;
3738
3739 /* Likewise, if either type has a maximum value, the other
3740 type must have the same. */
3741 if (max1 == NULL_TREE && max2 == NULL_TREE)
3742 max_equal_p = true;
3743 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3744 max_equal_p = true;
3745
3746 if (!min_equal_p || !max_equal_p)
3747 goto different_types;
3748
3749 goto same_types;
3750 }
3751
3752 case ENUMERAL_TYPE:
3753 {
3754 /* FIXME lto, we cannot check bounds on enumeral types because
3755 different front ends will produce different values.
3756 In C, enumeral types are integers, while in C++ each element
3757 will have its own symbolic value. We should decide how enums
3758 are to be represented in GIMPLE and have each front end lower
3759 to that. */
3760 tree v1, v2;
3761
3762 /* For enumeral types, all the values must be the same. */
3763 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3764 goto same_types;
3765
3766 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3767 v1 && v2;
3768 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3769 {
3770 tree c1 = TREE_VALUE (v1);
3771 tree c2 = TREE_VALUE (v2);
3772
3773 if (TREE_CODE (c1) == CONST_DECL)
3774 c1 = DECL_INITIAL (c1);
3775
3776 if (TREE_CODE (c2) == CONST_DECL)
3777 c2 = DECL_INITIAL (c2);
3778
3779 if (tree_int_cst_equal (c1, c2) != 1)
3780 goto different_types;
3781
3782 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3783 goto different_types;
3784 }
3785
3786 /* If one enumeration has more values than the other, they
3787 are not the same. */
3788 if (v1 || v2)
3789 goto different_types;
3790
3791 goto same_types;
3792 }
3793
3794 case RECORD_TYPE:
3795 case UNION_TYPE:
3796 case QUAL_UNION_TYPE:
3797 {
3798 tree f1, f2;
3799
3800 /* The struct tags shall compare equal. */
3801 if (!compare_type_names_p (t1, t2, false))
3802 goto different_types;
3803
3804 /* For aggregate types, all the fields must be the same. */
3805 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3806 f1 && f2;
3807 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3808 {
3809 /* The fields must have the same name, offset and type. */
3810 if (DECL_NAME (f1) != DECL_NAME (f2)
3811 || DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3812 || !gimple_compare_field_offset (f1, f2)
3813 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3814 state, sccstack, sccstate, sccstate_obstack))
3815 goto different_types;
3816 }
3817
3818 /* If one aggregate has more fields than the other, they
3819 are not the same. */
3820 if (f1 || f2)
3821 goto different_types;
3822
3823 goto same_types;
3824 }
3825
3826 default:
3827 gcc_unreachable ();
3828 }
3829
3830 /* Common exit path for types that are not compatible. */
3831 different_types:
3832 state->u.same_p = 0;
3833 goto pop;
3834
3835 /* Common exit path for types that are compatible. */
3836 same_types:
3837 gcc_assert (state->u.same_p == 1);
3838
3839 pop:
3840 if (state->low == state->dfsnum)
3841 {
3842 type_pair_t x;
3843
3844 /* Pop off the SCC and set its cache values to the final
3845 comparison result. */
3846 do
3847 {
3848 struct sccs *cstate;
3849 x = VEC_pop (type_pair_t, *sccstack);
3850 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3851 cstate->on_sccstack = false;
3852 x->same_p[GTC_MERGE] = state->u.same_p;
3853 }
3854 while (x != p);
3855 }
3856
3857 return state->u.same_p;
3858 }
3859
3860 /* Return true iff T1 and T2 are structurally identical. When
3861 FOR_MERGING_P is true the an incomplete type and a complete type
3862 are considered different, otherwise they are considered compatible. */
3863
3864 static bool
3865 gimple_types_compatible_p (tree t1, tree t2)
3866 {
3867 VEC(type_pair_t, heap) *sccstack = NULL;
3868 struct pointer_map_t *sccstate;
3869 struct obstack sccstate_obstack;
3870 type_pair_t p = NULL;
3871 bool res;
3872 tree leader1, leader2;
3873
3874 /* Before starting to set up the SCC machinery handle simple cases. */
3875
3876 /* Check first for the obvious case of pointer identity. */
3877 if (t1 == t2)
3878 return true;
3879
3880 /* Check that we have two types to compare. */
3881 if (t1 == NULL_TREE || t2 == NULL_TREE)
3882 return false;
3883
3884 /* Can't be the same type if the types don't have the same code. */
3885 if (TREE_CODE (t1) != TREE_CODE (t2))
3886 return false;
3887
3888 /* Can't be the same type if they have different CV qualifiers. */
3889 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3890 return false;
3891
3892 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3893 return false;
3894
3895 /* Void types and nullptr types are always the same. */
3896 if (TREE_CODE (t1) == VOID_TYPE
3897 || TREE_CODE (t1) == NULLPTR_TYPE)
3898 return true;
3899
3900 /* Can't be the same type if they have different alignment or mode. */
3901 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3902 || TYPE_MODE (t1) != TYPE_MODE (t2))
3903 return false;
3904
3905 /* Do some simple checks before doing three hashtable queries. */
3906 if (INTEGRAL_TYPE_P (t1)
3907 || SCALAR_FLOAT_TYPE_P (t1)
3908 || FIXED_POINT_TYPE_P (t1)
3909 || TREE_CODE (t1) == VECTOR_TYPE
3910 || TREE_CODE (t1) == COMPLEX_TYPE
3911 || TREE_CODE (t1) == OFFSET_TYPE
3912 || POINTER_TYPE_P (t1))
3913 {
3914 /* Can't be the same type if they have different sign or precision. */
3915 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3916 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3917 return false;
3918
3919 if (TREE_CODE (t1) == INTEGER_TYPE
3920 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3921 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3922 return false;
3923
3924 /* That's all we need to check for float and fixed-point types. */
3925 if (SCALAR_FLOAT_TYPE_P (t1)
3926 || FIXED_POINT_TYPE_P (t1))
3927 return true;
3928
3929 /* For other types fall thru to more complex checks. */
3930 }
3931
3932 /* If the types have been previously registered and found equal
3933 they still are. */
3934 leader1 = gimple_lookup_type_leader (t1);
3935 leader2 = gimple_lookup_type_leader (t2);
3936 if (leader1 == t2
3937 || t1 == leader2
3938 || (leader1 && leader1 == leader2))
3939 return true;
3940
3941 /* If the hash values of t1 and t2 are different the types can't
3942 possibly be the same. This helps keeping the type-pair hashtable
3943 small, only tracking comparisons for hash collisions. */
3944 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3945 return false;
3946
3947 /* If we've visited this type pair before (in the case of aggregates
3948 with self-referential types), and we made a decision, return it. */
3949 p = lookup_type_pair (t1, t2);
3950 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3951 {
3952 /* We have already decided whether T1 and T2 are the
3953 same, return the cached result. */
3954 return p->same_p[GTC_MERGE] == 1;
3955 }
3956
3957 /* Now set up the SCC machinery for the comparison. */
3958 gtc_next_dfs_num = 1;
3959 sccstate = pointer_map_create ();
3960 gcc_obstack_init (&sccstate_obstack);
3961 res = gimple_types_compatible_p_1 (t1, t2, p,
3962 &sccstack, sccstate, &sccstate_obstack);
3963 VEC_free (type_pair_t, heap, sccstack);
3964 pointer_map_destroy (sccstate);
3965 obstack_free (&sccstate_obstack, NULL);
3966
3967 return res;
3968 }
3969
3970
3971 static hashval_t
3972 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3973 struct pointer_map_t *, struct obstack *);
3974
3975 /* DFS visit the edge from the callers type with state *STATE to T.
3976 Update the callers type hash V with the hash for T if it is not part
3977 of the SCC containing the callers type and return it.
3978 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3979
3980 static hashval_t
3981 visit (tree t, struct sccs *state, hashval_t v,
3982 VEC (tree, heap) **sccstack,
3983 struct pointer_map_t *sccstate,
3984 struct obstack *sccstate_obstack)
3985 {
3986 struct sccs *cstate = NULL;
3987 struct tree_int_map m;
3988 void **slot;
3989
3990 /* If there is a hash value recorded for this type then it can't
3991 possibly be part of our parent SCC. Simply mix in its hash. */
3992 m.base.from = t;
3993 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
3994 && *slot)
3995 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3996
3997 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3998 cstate = (struct sccs *)*slot;
3999 if (!cstate)
4000 {
4001 hashval_t tem;
4002 /* Not yet visited. DFS recurse. */
4003 tem = iterative_hash_gimple_type (t, v,
4004 sccstack, sccstate, sccstate_obstack);
4005 if (!cstate)
4006 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4007 state->low = MIN (state->low, cstate->low);
4008 /* If the type is no longer on the SCC stack and thus is not part
4009 of the parents SCC mix in its hash value. Otherwise we will
4010 ignore the type for hashing purposes and return the unaltered
4011 hash value. */
4012 if (!cstate->on_sccstack)
4013 return tem;
4014 }
4015 if (cstate->dfsnum < state->dfsnum
4016 && cstate->on_sccstack)
4017 state->low = MIN (cstate->dfsnum, state->low);
4018
4019 /* We are part of our parents SCC, skip this type during hashing
4020 and return the unaltered hash value. */
4021 return v;
4022 }
4023
4024 /* Hash NAME with the previous hash value V and return it. */
4025
4026 static hashval_t
4027 iterative_hash_name (tree name, hashval_t v)
4028 {
4029 if (!name)
4030 return v;
4031 if (TREE_CODE (name) == TYPE_DECL)
4032 name = DECL_NAME (name);
4033 if (!name)
4034 return v;
4035 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4036 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4037 }
4038
4039 /* A type, hashvalue pair for sorting SCC members. */
4040
4041 struct type_hash_pair {
4042 tree type;
4043 hashval_t hash;
4044 };
4045
4046 /* Compare two type, hashvalue pairs. */
4047
4048 static int
4049 type_hash_pair_compare (const void *p1_, const void *p2_)
4050 {
4051 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4052 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4053 if (p1->hash < p2->hash)
4054 return -1;
4055 else if (p1->hash > p2->hash)
4056 return 1;
4057 return 0;
4058 }
4059
4060 /* Returning a hash value for gimple type TYPE combined with VAL.
4061 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4062
4063 To hash a type we end up hashing in types that are reachable.
4064 Through pointers we can end up with cycles which messes up the
4065 required property that we need to compute the same hash value
4066 for structurally equivalent types. To avoid this we have to
4067 hash all types in a cycle (the SCC) in a commutative way. The
4068 easiest way is to not mix in the hashes of the SCC members at
4069 all. To make this work we have to delay setting the hash
4070 values of the SCC until it is complete. */
4071
4072 static hashval_t
4073 iterative_hash_gimple_type (tree type, hashval_t val,
4074 VEC(tree, heap) **sccstack,
4075 struct pointer_map_t *sccstate,
4076 struct obstack *sccstate_obstack)
4077 {
4078 hashval_t v;
4079 void **slot;
4080 struct sccs *state;
4081
4082 /* Not visited during this DFS walk. */
4083 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4084 state = XOBNEW (sccstate_obstack, struct sccs);
4085 *pointer_map_insert (sccstate, type) = state;
4086
4087 VEC_safe_push (tree, heap, *sccstack, type);
4088 state->dfsnum = next_dfs_num++;
4089 state->low = state->dfsnum;
4090 state->on_sccstack = true;
4091
4092 /* Combine a few common features of types so that types are grouped into
4093 smaller sets; when searching for existing matching types to merge,
4094 only existing types having the same features as the new type will be
4095 checked. */
4096 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4097 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4098 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4099
4100 /* Do not hash the types size as this will cause differences in
4101 hash values for the complete vs. the incomplete type variant. */
4102
4103 /* Incorporate common features of numerical types. */
4104 if (INTEGRAL_TYPE_P (type)
4105 || SCALAR_FLOAT_TYPE_P (type)
4106 || FIXED_POINT_TYPE_P (type))
4107 {
4108 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4109 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4110 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4111 }
4112
4113 /* For pointer and reference types, fold in information about the type
4114 pointed to. */
4115 if (POINTER_TYPE_P (type))
4116 v = visit (TREE_TYPE (type), state, v,
4117 sccstack, sccstate, sccstate_obstack);
4118
4119 /* For integer types hash the types min/max values and the string flag. */
4120 if (TREE_CODE (type) == INTEGER_TYPE)
4121 {
4122 /* OMP lowering can introduce error_mark_node in place of
4123 random local decls in types. */
4124 if (TYPE_MIN_VALUE (type) != error_mark_node)
4125 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4126 if (TYPE_MAX_VALUE (type) != error_mark_node)
4127 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4128 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4129 }
4130
4131 /* For array types hash their domain and the string flag. */
4132 if (TREE_CODE (type) == ARRAY_TYPE
4133 && TYPE_DOMAIN (type))
4134 {
4135 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4136 v = visit (TYPE_DOMAIN (type), state, v,
4137 sccstack, sccstate, sccstate_obstack);
4138 }
4139
4140 /* Recurse for aggregates with a single element type. */
4141 if (TREE_CODE (type) == ARRAY_TYPE
4142 || TREE_CODE (type) == COMPLEX_TYPE
4143 || TREE_CODE (type) == VECTOR_TYPE)
4144 v = visit (TREE_TYPE (type), state, v,
4145 sccstack, sccstate, sccstate_obstack);
4146
4147 /* Incorporate function return and argument types. */
4148 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4149 {
4150 unsigned na;
4151 tree p;
4152
4153 /* For method types also incorporate their parent class. */
4154 if (TREE_CODE (type) == METHOD_TYPE)
4155 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4156 sccstack, sccstate, sccstate_obstack);
4157
4158 /* Check result and argument types. */
4159 v = visit (TREE_TYPE (type), state, v,
4160 sccstack, sccstate, sccstate_obstack);
4161 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4162 {
4163 v = visit (TREE_VALUE (p), state, v,
4164 sccstack, sccstate, sccstate_obstack);
4165 na++;
4166 }
4167
4168 v = iterative_hash_hashval_t (na, v);
4169 }
4170
4171 if (TREE_CODE (type) == RECORD_TYPE
4172 || TREE_CODE (type) == UNION_TYPE
4173 || TREE_CODE (type) == QUAL_UNION_TYPE)
4174 {
4175 unsigned nf;
4176 tree f;
4177
4178 v = iterative_hash_name (TYPE_NAME (type), v);
4179
4180 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4181 {
4182 v = iterative_hash_name (DECL_NAME (f), v);
4183 v = visit (TREE_TYPE (f), state, v,
4184 sccstack, sccstate, sccstate_obstack);
4185 nf++;
4186 }
4187
4188 v = iterative_hash_hashval_t (nf, v);
4189 }
4190
4191 /* Record hash for us. */
4192 state->u.hash = v;
4193
4194 /* See if we found an SCC. */
4195 if (state->low == state->dfsnum)
4196 {
4197 tree x;
4198 struct tree_int_map *m;
4199
4200 /* Pop off the SCC and set its hash values. */
4201 x = VEC_pop (tree, *sccstack);
4202 /* Optimize SCC size one. */
4203 if (x == type)
4204 {
4205 state->on_sccstack = false;
4206 m = ggc_alloc_cleared_tree_int_map ();
4207 m->base.from = x;
4208 m->to = v;
4209 slot = htab_find_slot (type_hash_cache, m, INSERT);
4210 gcc_assert (!*slot);
4211 *slot = (void *) m;
4212 }
4213 else
4214 {
4215 struct sccs *cstate;
4216 unsigned first, i, size, j;
4217 struct type_hash_pair *pairs;
4218 /* Pop off the SCC and build an array of type, hash pairs. */
4219 first = VEC_length (tree, *sccstack) - 1;
4220 while (VEC_index (tree, *sccstack, first) != type)
4221 --first;
4222 size = VEC_length (tree, *sccstack) - first + 1;
4223 pairs = XALLOCAVEC (struct type_hash_pair, size);
4224 i = 0;
4225 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4226 cstate->on_sccstack = false;
4227 pairs[i].type = x;
4228 pairs[i].hash = cstate->u.hash;
4229 do
4230 {
4231 x = VEC_pop (tree, *sccstack);
4232 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4233 cstate->on_sccstack = false;
4234 ++i;
4235 pairs[i].type = x;
4236 pairs[i].hash = cstate->u.hash;
4237 }
4238 while (x != type);
4239 gcc_assert (i + 1 == size);
4240 /* Sort the arrays of type, hash pairs so that when we mix in
4241 all members of the SCC the hash value becomes independent on
4242 the order we visited the SCC. Disregard hashes equal to
4243 the hash of the type we mix into because we cannot guarantee
4244 a stable sort for those across different TUs. */
4245 qsort (pairs, size, sizeof (struct type_hash_pair),
4246 type_hash_pair_compare);
4247 for (i = 0; i < size; ++i)
4248 {
4249 hashval_t hash;
4250 m = ggc_alloc_cleared_tree_int_map ();
4251 m->base.from = pairs[i].type;
4252 hash = pairs[i].hash;
4253 /* Skip same hashes. */
4254 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4255 ;
4256 for (; j < size; ++j)
4257 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4258 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4259 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4260 m->to = hash;
4261 if (pairs[i].type == type)
4262 v = hash;
4263 slot = htab_find_slot (type_hash_cache, m, INSERT);
4264 gcc_assert (!*slot);
4265 *slot = (void *) m;
4266 }
4267 }
4268 }
4269
4270 return iterative_hash_hashval_t (v, val);
4271 }
4272
4273
4274 /* Returns a hash value for P (assumed to be a type). The hash value
4275 is computed using some distinguishing features of the type. Note
4276 that we cannot use pointer hashing here as we may be dealing with
4277 two distinct instances of the same type.
4278
4279 This function should produce the same hash value for two compatible
4280 types according to gimple_types_compatible_p. */
4281
4282 static hashval_t
4283 gimple_type_hash (const void *p)
4284 {
4285 const_tree t = (const_tree) p;
4286 VEC(tree, heap) *sccstack = NULL;
4287 struct pointer_map_t *sccstate;
4288 struct obstack sccstate_obstack;
4289 hashval_t val;
4290 void **slot;
4291 struct tree_int_map m;
4292
4293 if (type_hash_cache == NULL)
4294 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4295 tree_int_map_eq, NULL);
4296
4297 m.base.from = CONST_CAST_TREE (t);
4298 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4299 && *slot)
4300 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4301
4302 /* Perform a DFS walk and pre-hash all reachable types. */
4303 next_dfs_num = 1;
4304 sccstate = pointer_map_create ();
4305 gcc_obstack_init (&sccstate_obstack);
4306 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4307 &sccstack, sccstate, &sccstate_obstack);
4308 VEC_free (tree, heap, sccstack);
4309 pointer_map_destroy (sccstate);
4310 obstack_free (&sccstate_obstack, NULL);
4311
4312 return val;
4313 }
4314
4315 /* Returning a hash value for gimple type TYPE combined with VAL.
4316
4317 The hash value returned is equal for types considered compatible
4318 by gimple_canonical_types_compatible_p. */
4319
4320 static hashval_t
4321 iterative_hash_canonical_type (tree type, hashval_t val)
4322 {
4323 hashval_t v;
4324 void **slot;
4325 struct tree_int_map *mp, m;
4326
4327 m.base.from = type;
4328 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4329 && *slot)
4330 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4331
4332 /* Combine a few common features of types so that types are grouped into
4333 smaller sets; when searching for existing matching types to merge,
4334 only existing types having the same features as the new type will be
4335 checked. */
4336 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4337 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4338 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4339 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4340
4341 /* Incorporate common features of numerical types. */
4342 if (INTEGRAL_TYPE_P (type)
4343 || SCALAR_FLOAT_TYPE_P (type)
4344 || FIXED_POINT_TYPE_P (type)
4345 || TREE_CODE (type) == VECTOR_TYPE
4346 || TREE_CODE (type) == COMPLEX_TYPE
4347 || TREE_CODE (type) == OFFSET_TYPE
4348 || POINTER_TYPE_P (type))
4349 {
4350 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4351 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4352 }
4353
4354 /* For pointer and reference types, fold in information about the type
4355 pointed to but do not recurse to the pointed-to type. */
4356 if (POINTER_TYPE_P (type))
4357 {
4358 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4359 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4360 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4361 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4362 }
4363
4364 /* For integer types hash the types min/max values and the string flag. */
4365 if (TREE_CODE (type) == INTEGER_TYPE)
4366 {
4367 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4368 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4369 }
4370
4371 /* For array types hash their domain and the string flag. */
4372 if (TREE_CODE (type) == ARRAY_TYPE
4373 && TYPE_DOMAIN (type))
4374 {
4375 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4376 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4377 }
4378
4379 /* Recurse for aggregates with a single element type. */
4380 if (TREE_CODE (type) == ARRAY_TYPE
4381 || TREE_CODE (type) == COMPLEX_TYPE
4382 || TREE_CODE (type) == VECTOR_TYPE)
4383 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4384
4385 /* Incorporate function return and argument types. */
4386 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4387 {
4388 unsigned na;
4389 tree p;
4390
4391 /* For method types also incorporate their parent class. */
4392 if (TREE_CODE (type) == METHOD_TYPE)
4393 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4394
4395 /* For result types allow mismatch in completeness. */
4396 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (type)))
4397 {
4398 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4399 v = iterative_hash_name
4400 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_TYPE (type))), v);
4401 }
4402 else
4403 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4404
4405 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4406 {
4407 /* For argument types allow mismatch in completeness. */
4408 if (RECORD_OR_UNION_TYPE_P (TREE_VALUE (p)))
4409 {
4410 v = iterative_hash_hashval_t (TREE_CODE (TREE_VALUE (p)), v);
4411 v = iterative_hash_name
4412 (TYPE_NAME (TYPE_MAIN_VARIANT (TREE_VALUE (p))), v);
4413 }
4414 else
4415 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4416 na++;
4417 }
4418
4419 v = iterative_hash_hashval_t (na, v);
4420 }
4421
4422 if (TREE_CODE (type) == RECORD_TYPE
4423 || TREE_CODE (type) == UNION_TYPE
4424 || TREE_CODE (type) == QUAL_UNION_TYPE)
4425 {
4426 unsigned nf;
4427 tree f;
4428
4429 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4430 {
4431 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4432 nf++;
4433 }
4434
4435 v = iterative_hash_hashval_t (nf, v);
4436 }
4437
4438 /* Cache the just computed hash value. */
4439 mp = ggc_alloc_cleared_tree_int_map ();
4440 mp->base.from = type;
4441 mp->to = v;
4442 *slot = (void *) mp;
4443
4444 return iterative_hash_hashval_t (v, val);
4445 }
4446
4447 static hashval_t
4448 gimple_canonical_type_hash (const void *p)
4449 {
4450 if (canonical_type_hash_cache == NULL)
4451 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4452 tree_int_map_eq, NULL);
4453
4454 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4455 }
4456
4457
4458 /* Returns nonzero if P1 and P2 are equal. */
4459
4460 static int
4461 gimple_type_eq (const void *p1, const void *p2)
4462 {
4463 const_tree t1 = (const_tree) p1;
4464 const_tree t2 = (const_tree) p2;
4465 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4466 CONST_CAST_TREE (t2));
4467 }
4468
4469
4470 /* Worker for gimple_register_type.
4471 Register type T in the global type table gimple_types.
4472 When REGISTERING_MV is false first recurse for the main variant of T. */
4473
4474 static tree
4475 gimple_register_type_1 (tree t, bool registering_mv)
4476 {
4477 void **slot;
4478 gimple_type_leader_entry *leader;
4479 tree mv_leader;
4480
4481 /* If we registered this type before return the cached result. */
4482 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4483 if (leader->type == t)
4484 return leader->leader;
4485
4486 /* Always register the main variant first. This is important so we
4487 pick up the non-typedef variants as canonical, otherwise we'll end
4488 up taking typedef ids for structure tags during comparison.
4489 It also makes sure that main variants will be merged to main variants.
4490 As we are operating on a possibly partially fixed up type graph
4491 do not bother to recurse more than once, otherwise we may end up
4492 walking in circles.
4493 If we are registering a main variant it will either remain its
4494 own main variant or it will be merged to something else in which
4495 case we do not care for the main variant leader. */
4496 if (!registering_mv
4497 && TYPE_MAIN_VARIANT (t) != t)
4498 mv_leader = gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4499 else
4500 mv_leader = t;
4501
4502 slot = htab_find_slot (gimple_types, t, INSERT);
4503 if (*slot
4504 && *(tree *)slot != t)
4505 {
4506 tree new_type = (tree) *((tree *) slot);
4507
4508 /* Do not merge types with different addressability. */
4509 gcc_assert (TREE_ADDRESSABLE (t) == TREE_ADDRESSABLE (new_type));
4510
4511 /* If t is not its main variant then make t unreachable from its
4512 main variant list. Otherwise we'd queue up a lot of duplicates
4513 there. */
4514 if (t != TYPE_MAIN_VARIANT (t))
4515 {
4516 tree tem = TYPE_MAIN_VARIANT (t);
4517 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4518 tem = TYPE_NEXT_VARIANT (tem);
4519 if (tem)
4520 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4521 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4522 }
4523
4524 /* If we are a pointer then remove us from the pointer-to or
4525 reference-to chain. Otherwise we'd queue up a lot of duplicates
4526 there. */
4527 if (TREE_CODE (t) == POINTER_TYPE)
4528 {
4529 if (TYPE_POINTER_TO (TREE_TYPE (t)) == t)
4530 TYPE_POINTER_TO (TREE_TYPE (t)) = TYPE_NEXT_PTR_TO (t);
4531 else
4532 {
4533 tree tem = TYPE_POINTER_TO (TREE_TYPE (t));
4534 while (tem && TYPE_NEXT_PTR_TO (tem) != t)
4535 tem = TYPE_NEXT_PTR_TO (tem);
4536 if (tem)
4537 TYPE_NEXT_PTR_TO (tem) = TYPE_NEXT_PTR_TO (t);
4538 }
4539 TYPE_NEXT_PTR_TO (t) = NULL_TREE;
4540 }
4541 else if (TREE_CODE (t) == REFERENCE_TYPE)
4542 {
4543 if (TYPE_REFERENCE_TO (TREE_TYPE (t)) == t)
4544 TYPE_REFERENCE_TO (TREE_TYPE (t)) = TYPE_NEXT_REF_TO (t);
4545 else
4546 {
4547 tree tem = TYPE_REFERENCE_TO (TREE_TYPE (t));
4548 while (tem && TYPE_NEXT_REF_TO (tem) != t)
4549 tem = TYPE_NEXT_REF_TO (tem);
4550 if (tem)
4551 TYPE_NEXT_REF_TO (tem) = TYPE_NEXT_REF_TO (t);
4552 }
4553 TYPE_NEXT_REF_TO (t) = NULL_TREE;
4554 }
4555
4556 leader->type = t;
4557 leader->leader = new_type;
4558 t = new_type;
4559 }
4560 else
4561 {
4562 leader->type = t;
4563 leader->leader = t;
4564 /* We're the type leader. Make our TYPE_MAIN_VARIANT valid. */
4565 if (TYPE_MAIN_VARIANT (t) != t
4566 && TYPE_MAIN_VARIANT (t) != mv_leader)
4567 {
4568 /* Remove us from our main variant list as we are not the variant
4569 leader and the variant leader will change. */
4570 tree tem = TYPE_MAIN_VARIANT (t);
4571 while (tem && TYPE_NEXT_VARIANT (tem) != t)
4572 tem = TYPE_NEXT_VARIANT (tem);
4573 if (tem)
4574 TYPE_NEXT_VARIANT (tem) = TYPE_NEXT_VARIANT (t);
4575 TYPE_NEXT_VARIANT (t) = NULL_TREE;
4576 /* Adjust our main variant. Linking us into its variant list
4577 will happen at fixup time. */
4578 TYPE_MAIN_VARIANT (t) = mv_leader;
4579 }
4580 *slot = (void *) t;
4581 }
4582
4583 return t;
4584 }
4585
4586 /* Register type T in the global type table gimple_types.
4587 If another type T', compatible with T, already existed in
4588 gimple_types then return T', otherwise return T. This is used by
4589 LTO to merge identical types read from different TUs. */
4590
4591 tree
4592 gimple_register_type (tree t)
4593 {
4594 gcc_assert (TYPE_P (t));
4595
4596 if (!gimple_type_leader)
4597 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4598 (GIMPLE_TYPE_LEADER_SIZE);
4599
4600 if (gimple_types == NULL)
4601 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4602
4603 return gimple_register_type_1 (t, false);
4604 }
4605
4606 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4607 the middle-end types_compatible_p function. It needs to avoid
4608 claiming types are different for types that should be treated
4609 the same with respect to TBAA. Canonical types are also used
4610 for IL consistency checks via the useless_type_conversion_p
4611 predicate which does not handle all type kinds itself but falls
4612 back to pointer-comparison of TYPE_CANONICAL for aggregates
4613 for example. */
4614
4615 /* Return true iff T1 and T2 are structurally identical for what
4616 TBAA is concerned. */
4617
4618 static bool
4619 gimple_canonical_types_compatible_p (tree t1, tree t2)
4620 {
4621 /* Before starting to set up the SCC machinery handle simple cases. */
4622
4623 /* Check first for the obvious case of pointer identity. */
4624 if (t1 == t2)
4625 return true;
4626
4627 /* Check that we have two types to compare. */
4628 if (t1 == NULL_TREE || t2 == NULL_TREE)
4629 return false;
4630
4631 /* If the types have been previously registered and found equal
4632 they still are. */
4633 if (TYPE_CANONICAL (t1)
4634 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4635 return true;
4636
4637 /* Can't be the same type if the types don't have the same code. */
4638 if (TREE_CODE (t1) != TREE_CODE (t2))
4639 return false;
4640
4641 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4642 return false;
4643
4644 /* Qualifiers do not matter for canonical type comparison purposes. */
4645
4646 /* Void types and nullptr types are always the same. */
4647 if (TREE_CODE (t1) == VOID_TYPE
4648 || TREE_CODE (t1) == NULLPTR_TYPE)
4649 return true;
4650
4651 /* Can't be the same type if they have different alignment, or mode. */
4652 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4653 || TYPE_MODE (t1) != TYPE_MODE (t2))
4654 return false;
4655
4656 /* Non-aggregate types can be handled cheaply. */
4657 if (INTEGRAL_TYPE_P (t1)
4658 || SCALAR_FLOAT_TYPE_P (t1)
4659 || FIXED_POINT_TYPE_P (t1)
4660 || TREE_CODE (t1) == VECTOR_TYPE
4661 || TREE_CODE (t1) == COMPLEX_TYPE
4662 || TREE_CODE (t1) == OFFSET_TYPE
4663 || POINTER_TYPE_P (t1))
4664 {
4665 /* Can't be the same type if they have different sign or precision. */
4666 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4667 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4668 return false;
4669
4670 if (TREE_CODE (t1) == INTEGER_TYPE
4671 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4672 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4673 return false;
4674
4675 /* For canonical type comparisons we do not want to build SCCs
4676 so we cannot compare pointed-to types. But we can, for now,
4677 require the same pointed-to type kind and match what
4678 useless_type_conversion_p would do. */
4679 if (POINTER_TYPE_P (t1))
4680 {
4681 /* If the two pointers have different ref-all attributes,
4682 they can't be the same type. */
4683 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4684 return false;
4685
4686 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4687 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4688 return false;
4689
4690 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4691 return false;
4692
4693 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4694 return false;
4695 }
4696
4697 /* Tail-recurse to components. */
4698 if (TREE_CODE (t1) == VECTOR_TYPE
4699 || TREE_CODE (t1) == COMPLEX_TYPE)
4700 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4701 TREE_TYPE (t2));
4702
4703 return true;
4704 }
4705
4706 /* If their attributes are not the same they can't be the same type. */
4707 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4708 return false;
4709
4710 /* Do type-specific comparisons. */
4711 switch (TREE_CODE (t1))
4712 {
4713 case ARRAY_TYPE:
4714 /* Array types are the same if the element types are the same and
4715 the number of elements are the same. */
4716 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4717 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4718 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4719 return false;
4720 else
4721 {
4722 tree i1 = TYPE_DOMAIN (t1);
4723 tree i2 = TYPE_DOMAIN (t2);
4724
4725 /* For an incomplete external array, the type domain can be
4726 NULL_TREE. Check this condition also. */
4727 if (i1 == NULL_TREE && i2 == NULL_TREE)
4728 return true;
4729 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4730 return false;
4731 /* If for a complete array type the possibly gimplified sizes
4732 are different the types are different. */
4733 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4734 || (TYPE_SIZE (i1)
4735 && TYPE_SIZE (i2)
4736 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4737 return false;
4738 else
4739 {
4740 tree min1 = TYPE_MIN_VALUE (i1);
4741 tree min2 = TYPE_MIN_VALUE (i2);
4742 tree max1 = TYPE_MAX_VALUE (i1);
4743 tree max2 = TYPE_MAX_VALUE (i2);
4744
4745 /* The minimum/maximum values have to be the same. */
4746 if ((min1 == min2
4747 || (min1 && min2
4748 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4749 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4750 || operand_equal_p (min1, min2, 0))))
4751 && (max1 == max2
4752 || (max1 && max2
4753 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4754 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4755 || operand_equal_p (max1, max2, 0)))))
4756 return true;
4757 else
4758 return false;
4759 }
4760 }
4761
4762 case METHOD_TYPE:
4763 /* Method types should belong to the same class. */
4764 if (!gimple_canonical_types_compatible_p
4765 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4766 return false;
4767
4768 /* Fallthru */
4769
4770 case FUNCTION_TYPE:
4771 /* Function types are the same if the return type and arguments types
4772 are the same. */
4773 if (!gimple_compatible_complete_and_incomplete_subtype_p
4774 (TREE_TYPE (t1), TREE_TYPE (t2))
4775 && !gimple_canonical_types_compatible_p
4776 (TREE_TYPE (t1), TREE_TYPE (t2)))
4777 return false;
4778
4779 if (!comp_type_attributes (t1, t2))
4780 return false;
4781
4782 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4783 return true;
4784 else
4785 {
4786 tree parms1, parms2;
4787
4788 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4789 parms1 && parms2;
4790 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4791 {
4792 if (!gimple_compatible_complete_and_incomplete_subtype_p
4793 (TREE_VALUE (parms1), TREE_VALUE (parms2))
4794 && !gimple_canonical_types_compatible_p
4795 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4796 return false;
4797 }
4798
4799 if (parms1 || parms2)
4800 return false;
4801
4802 return true;
4803 }
4804
4805 case RECORD_TYPE:
4806 case UNION_TYPE:
4807 case QUAL_UNION_TYPE:
4808 {
4809 tree f1, f2;
4810
4811 /* For aggregate types, all the fields must be the same. */
4812 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4813 f1 && f2;
4814 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4815 {
4816 /* The fields must have the same name, offset and type. */
4817 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4818 || !gimple_compare_field_offset (f1, f2)
4819 || !gimple_canonical_types_compatible_p
4820 (TREE_TYPE (f1), TREE_TYPE (f2)))
4821 return false;
4822 }
4823
4824 /* If one aggregate has more fields than the other, they
4825 are not the same. */
4826 if (f1 || f2)
4827 return false;
4828
4829 return true;
4830 }
4831
4832 default:
4833 gcc_unreachable ();
4834 }
4835 }
4836
4837
4838 /* Returns nonzero if P1 and P2 are equal. */
4839
4840 static int
4841 gimple_canonical_type_eq (const void *p1, const void *p2)
4842 {
4843 const_tree t1 = (const_tree) p1;
4844 const_tree t2 = (const_tree) p2;
4845 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4846 CONST_CAST_TREE (t2));
4847 }
4848
4849 /* Register type T in the global type table gimple_types.
4850 If another type T', compatible with T, already existed in
4851 gimple_types then return T', otherwise return T. This is used by
4852 LTO to merge identical types read from different TUs. */
4853
4854 tree
4855 gimple_register_canonical_type (tree t)
4856 {
4857 void **slot;
4858 tree orig_t = t;
4859
4860 gcc_assert (TYPE_P (t));
4861
4862 if (TYPE_CANONICAL (t))
4863 return TYPE_CANONICAL (t);
4864
4865 /* Use the leader of our main variant for determining our canonical
4866 type. The main variant leader is a type that will always
4867 prevail. */
4868 t = gimple_register_type (TYPE_MAIN_VARIANT (t));
4869
4870 if (TYPE_CANONICAL (t))
4871 return TYPE_CANONICAL (t);
4872
4873 if (gimple_canonical_types == NULL)
4874 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4875 gimple_canonical_type_eq, 0);
4876
4877 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4878 if (*slot
4879 && *(tree *)slot != t)
4880 {
4881 tree new_type = (tree) *((tree *) slot);
4882
4883 TYPE_CANONICAL (t) = new_type;
4884 t = new_type;
4885 }
4886 else
4887 {
4888 TYPE_CANONICAL (t) = t;
4889 *slot = (void *) t;
4890 }
4891
4892 /* Also cache the canonical type in the non-leaders. */
4893 TYPE_CANONICAL (orig_t) = t;
4894
4895 return t;
4896 }
4897
4898
4899 /* Show statistics on references to the global type table gimple_types. */
4900
4901 void
4902 print_gimple_types_stats (void)
4903 {
4904 if (gimple_types)
4905 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4906 "%ld searches, %ld collisions (ratio: %f)\n",
4907 (long) htab_size (gimple_types),
4908 (long) htab_elements (gimple_types),
4909 (long) gimple_types->searches,
4910 (long) gimple_types->collisions,
4911 htab_collisions (gimple_types));
4912 else
4913 fprintf (stderr, "GIMPLE type table is empty\n");
4914 if (type_hash_cache)
4915 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4916 "%ld searches, %ld collisions (ratio: %f)\n",
4917 (long) htab_size (type_hash_cache),
4918 (long) htab_elements (type_hash_cache),
4919 (long) type_hash_cache->searches,
4920 (long) type_hash_cache->collisions,
4921 htab_collisions (type_hash_cache));
4922 else
4923 fprintf (stderr, "GIMPLE type hash table is empty\n");
4924 if (gimple_canonical_types)
4925 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4926 "%ld searches, %ld collisions (ratio: %f)\n",
4927 (long) htab_size (gimple_canonical_types),
4928 (long) htab_elements (gimple_canonical_types),
4929 (long) gimple_canonical_types->searches,
4930 (long) gimple_canonical_types->collisions,
4931 htab_collisions (gimple_canonical_types));
4932 else
4933 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4934 if (canonical_type_hash_cache)
4935 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4936 "%ld searches, %ld collisions (ratio: %f)\n",
4937 (long) htab_size (canonical_type_hash_cache),
4938 (long) htab_elements (canonical_type_hash_cache),
4939 (long) canonical_type_hash_cache->searches,
4940 (long) canonical_type_hash_cache->collisions,
4941 htab_collisions (canonical_type_hash_cache));
4942 else
4943 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4944 }
4945
4946 /* Free the gimple type hashtables used for LTO type merging. */
4947
4948 void
4949 free_gimple_type_tables (void)
4950 {
4951 /* Last chance to print stats for the tables. */
4952 if (flag_lto_report)
4953 print_gimple_types_stats ();
4954
4955 if (gimple_types)
4956 {
4957 htab_delete (gimple_types);
4958 gimple_types = NULL;
4959 }
4960 if (gimple_canonical_types)
4961 {
4962 htab_delete (gimple_canonical_types);
4963 gimple_canonical_types = NULL;
4964 }
4965 if (type_hash_cache)
4966 {
4967 htab_delete (type_hash_cache);
4968 type_hash_cache = NULL;
4969 }
4970 if (canonical_type_hash_cache)
4971 {
4972 htab_delete (canonical_type_hash_cache);
4973 canonical_type_hash_cache = NULL;
4974 }
4975 if (type_pair_cache)
4976 {
4977 free (type_pair_cache);
4978 type_pair_cache = NULL;
4979 }
4980 gimple_type_leader = NULL;
4981 }
4982
4983
4984 /* Return a type the same as TYPE except unsigned or
4985 signed according to UNSIGNEDP. */
4986
4987 static tree
4988 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4989 {
4990 tree type1;
4991
4992 type1 = TYPE_MAIN_VARIANT (type);
4993 if (type1 == signed_char_type_node
4994 || type1 == char_type_node
4995 || type1 == unsigned_char_type_node)
4996 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4997 if (type1 == integer_type_node || type1 == unsigned_type_node)
4998 return unsignedp ? unsigned_type_node : integer_type_node;
4999 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
5000 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5001 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
5002 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5003 if (type1 == long_long_integer_type_node
5004 || type1 == long_long_unsigned_type_node)
5005 return unsignedp
5006 ? long_long_unsigned_type_node
5007 : long_long_integer_type_node;
5008 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
5009 return unsignedp
5010 ? int128_unsigned_type_node
5011 : int128_integer_type_node;
5012 #if HOST_BITS_PER_WIDE_INT >= 64
5013 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
5014 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5015 #endif
5016 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
5017 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5018 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
5019 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5020 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
5021 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5022 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
5023 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5024
5025 #define GIMPLE_FIXED_TYPES(NAME) \
5026 if (type1 == short_ ## NAME ## _type_node \
5027 || type1 == unsigned_short_ ## NAME ## _type_node) \
5028 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
5029 : short_ ## NAME ## _type_node; \
5030 if (type1 == NAME ## _type_node \
5031 || type1 == unsigned_ ## NAME ## _type_node) \
5032 return unsignedp ? unsigned_ ## NAME ## _type_node \
5033 : NAME ## _type_node; \
5034 if (type1 == long_ ## NAME ## _type_node \
5035 || type1 == unsigned_long_ ## NAME ## _type_node) \
5036 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
5037 : long_ ## NAME ## _type_node; \
5038 if (type1 == long_long_ ## NAME ## _type_node \
5039 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
5040 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
5041 : long_long_ ## NAME ## _type_node;
5042
5043 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
5044 if (type1 == NAME ## _type_node \
5045 || type1 == u ## NAME ## _type_node) \
5046 return unsignedp ? u ## NAME ## _type_node \
5047 : NAME ## _type_node;
5048
5049 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
5050 if (type1 == sat_ ## short_ ## NAME ## _type_node \
5051 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
5052 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
5053 : sat_ ## short_ ## NAME ## _type_node; \
5054 if (type1 == sat_ ## NAME ## _type_node \
5055 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
5056 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
5057 : sat_ ## NAME ## _type_node; \
5058 if (type1 == sat_ ## long_ ## NAME ## _type_node \
5059 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
5060 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
5061 : sat_ ## long_ ## NAME ## _type_node; \
5062 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
5063 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
5064 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
5065 : sat_ ## long_long_ ## NAME ## _type_node;
5066
5067 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
5068 if (type1 == sat_ ## NAME ## _type_node \
5069 || type1 == sat_ ## u ## NAME ## _type_node) \
5070 return unsignedp ? sat_ ## u ## NAME ## _type_node \
5071 : sat_ ## NAME ## _type_node;
5072
5073 GIMPLE_FIXED_TYPES (fract);
5074 GIMPLE_FIXED_TYPES_SAT (fract);
5075 GIMPLE_FIXED_TYPES (accum);
5076 GIMPLE_FIXED_TYPES_SAT (accum);
5077
5078 GIMPLE_FIXED_MODE_TYPES (qq);
5079 GIMPLE_FIXED_MODE_TYPES (hq);
5080 GIMPLE_FIXED_MODE_TYPES (sq);
5081 GIMPLE_FIXED_MODE_TYPES (dq);
5082 GIMPLE_FIXED_MODE_TYPES (tq);
5083 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
5084 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
5085 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
5086 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
5087 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
5088 GIMPLE_FIXED_MODE_TYPES (ha);
5089 GIMPLE_FIXED_MODE_TYPES (sa);
5090 GIMPLE_FIXED_MODE_TYPES (da);
5091 GIMPLE_FIXED_MODE_TYPES (ta);
5092 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
5093 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
5094 GIMPLE_FIXED_MODE_TYPES_SAT (da);
5095 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
5096
5097 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
5098 the precision; they have precision set to match their range, but
5099 may use a wider mode to match an ABI. If we change modes, we may
5100 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5101 the precision as well, so as to yield correct results for
5102 bit-field types. C++ does not have these separate bit-field
5103 types, and producing a signed or unsigned variant of an
5104 ENUMERAL_TYPE may cause other problems as well. */
5105 if (!INTEGRAL_TYPE_P (type)
5106 || TYPE_UNSIGNED (type) == unsignedp)
5107 return type;
5108
5109 #define TYPE_OK(node) \
5110 (TYPE_MODE (type) == TYPE_MODE (node) \
5111 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5112 if (TYPE_OK (signed_char_type_node))
5113 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
5114 if (TYPE_OK (integer_type_node))
5115 return unsignedp ? unsigned_type_node : integer_type_node;
5116 if (TYPE_OK (short_integer_type_node))
5117 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5118 if (TYPE_OK (long_integer_type_node))
5119 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5120 if (TYPE_OK (long_long_integer_type_node))
5121 return (unsignedp
5122 ? long_long_unsigned_type_node
5123 : long_long_integer_type_node);
5124 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
5125 return (unsignedp
5126 ? int128_unsigned_type_node
5127 : int128_integer_type_node);
5128
5129 #if HOST_BITS_PER_WIDE_INT >= 64
5130 if (TYPE_OK (intTI_type_node))
5131 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5132 #endif
5133 if (TYPE_OK (intDI_type_node))
5134 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5135 if (TYPE_OK (intSI_type_node))
5136 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5137 if (TYPE_OK (intHI_type_node))
5138 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5139 if (TYPE_OK (intQI_type_node))
5140 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5141
5142 #undef GIMPLE_FIXED_TYPES
5143 #undef GIMPLE_FIXED_MODE_TYPES
5144 #undef GIMPLE_FIXED_TYPES_SAT
5145 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5146 #undef TYPE_OK
5147
5148 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5149 }
5150
5151
5152 /* Return an unsigned type the same as TYPE in other respects. */
5153
5154 tree
5155 gimple_unsigned_type (tree type)
5156 {
5157 return gimple_signed_or_unsigned_type (true, type);
5158 }
5159
5160
5161 /* Return a signed type the same as TYPE in other respects. */
5162
5163 tree
5164 gimple_signed_type (tree type)
5165 {
5166 return gimple_signed_or_unsigned_type (false, type);
5167 }
5168
5169
5170 /* Return the typed-based alias set for T, which may be an expression
5171 or a type. Return -1 if we don't do anything special. */
5172
5173 alias_set_type
5174 gimple_get_alias_set (tree t)
5175 {
5176 tree u;
5177
5178 /* Permit type-punning when accessing a union, provided the access
5179 is directly through the union. For example, this code does not
5180 permit taking the address of a union member and then storing
5181 through it. Even the type-punning allowed here is a GCC
5182 extension, albeit a common and useful one; the C standard says
5183 that such accesses have implementation-defined behavior. */
5184 for (u = t;
5185 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5186 u = TREE_OPERAND (u, 0))
5187 if (TREE_CODE (u) == COMPONENT_REF
5188 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5189 return 0;
5190
5191 /* That's all the expressions we handle specially. */
5192 if (!TYPE_P (t))
5193 return -1;
5194
5195 /* For convenience, follow the C standard when dealing with
5196 character types. Any object may be accessed via an lvalue that
5197 has character type. */
5198 if (t == char_type_node
5199 || t == signed_char_type_node
5200 || t == unsigned_char_type_node)
5201 return 0;
5202
5203 /* Allow aliasing between signed and unsigned variants of the same
5204 type. We treat the signed variant as canonical. */
5205 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5206 {
5207 tree t1 = gimple_signed_type (t);
5208
5209 /* t1 == t can happen for boolean nodes which are always unsigned. */
5210 if (t1 != t)
5211 return get_alias_set (t1);
5212 }
5213
5214 return -1;
5215 }
5216
5217
5218 /* Data structure used to count the number of dereferences to PTR
5219 inside an expression. */
5220 struct count_ptr_d
5221 {
5222 tree ptr;
5223 unsigned num_stores;
5224 unsigned num_loads;
5225 };
5226
5227 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5228 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5229
5230 static tree
5231 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5232 {
5233 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5234 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5235
5236 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5237 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5238 the address of 'fld' as 'ptr + offsetof(fld)'. */
5239 if (TREE_CODE (*tp) == ADDR_EXPR)
5240 {
5241 *walk_subtrees = 0;
5242 return NULL_TREE;
5243 }
5244
5245 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5246 {
5247 if (wi_p->is_lhs)
5248 count_p->num_stores++;
5249 else
5250 count_p->num_loads++;
5251 }
5252
5253 return NULL_TREE;
5254 }
5255
5256 /* Count the number of direct and indirect uses for pointer PTR in
5257 statement STMT. The number of direct uses is stored in
5258 *NUM_USES_P. Indirect references are counted separately depending
5259 on whether they are store or load operations. The counts are
5260 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5261
5262 void
5263 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5264 unsigned *num_loads_p, unsigned *num_stores_p)
5265 {
5266 ssa_op_iter i;
5267 tree use;
5268
5269 *num_uses_p = 0;
5270 *num_loads_p = 0;
5271 *num_stores_p = 0;
5272
5273 /* Find out the total number of uses of PTR in STMT. */
5274 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5275 if (use == ptr)
5276 (*num_uses_p)++;
5277
5278 /* Now count the number of indirect references to PTR. This is
5279 truly awful, but we don't have much choice. There are no parent
5280 pointers inside INDIRECT_REFs, so an expression like
5281 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5282 find all the indirect and direct uses of x_1 inside. The only
5283 shortcut we can take is the fact that GIMPLE only allows
5284 INDIRECT_REFs inside the expressions below. */
5285 if (is_gimple_assign (stmt)
5286 || gimple_code (stmt) == GIMPLE_RETURN
5287 || gimple_code (stmt) == GIMPLE_ASM
5288 || is_gimple_call (stmt))
5289 {
5290 struct walk_stmt_info wi;
5291 struct count_ptr_d count;
5292
5293 count.ptr = ptr;
5294 count.num_stores = 0;
5295 count.num_loads = 0;
5296
5297 memset (&wi, 0, sizeof (wi));
5298 wi.info = &count;
5299 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5300
5301 *num_stores_p = count.num_stores;
5302 *num_loads_p = count.num_loads;
5303 }
5304
5305 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5306 }
5307
5308 /* From a tree operand OP return the base of a load or store operation
5309 or NULL_TREE if OP is not a load or a store. */
5310
5311 static tree
5312 get_base_loadstore (tree op)
5313 {
5314 while (handled_component_p (op))
5315 op = TREE_OPERAND (op, 0);
5316 if (DECL_P (op)
5317 || INDIRECT_REF_P (op)
5318 || TREE_CODE (op) == MEM_REF
5319 || TREE_CODE (op) == TARGET_MEM_REF)
5320 return op;
5321 return NULL_TREE;
5322 }
5323
5324 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5325 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5326 passing the STMT, the base of the operand and DATA to it. The base
5327 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5328 or the argument of an address expression.
5329 Returns the results of these callbacks or'ed. */
5330
5331 bool
5332 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5333 bool (*visit_load)(gimple, tree, void *),
5334 bool (*visit_store)(gimple, tree, void *),
5335 bool (*visit_addr)(gimple, tree, void *))
5336 {
5337 bool ret = false;
5338 unsigned i;
5339 if (gimple_assign_single_p (stmt))
5340 {
5341 tree lhs, rhs;
5342 if (visit_store)
5343 {
5344 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5345 if (lhs)
5346 ret |= visit_store (stmt, lhs, data);
5347 }
5348 rhs = gimple_assign_rhs1 (stmt);
5349 while (handled_component_p (rhs))
5350 rhs = TREE_OPERAND (rhs, 0);
5351 if (visit_addr)
5352 {
5353 if (TREE_CODE (rhs) == ADDR_EXPR)
5354 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5355 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5356 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5357 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5358 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5359 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5360 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5361 0), data);
5362 lhs = gimple_assign_lhs (stmt);
5363 if (TREE_CODE (lhs) == TARGET_MEM_REF
5364 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5365 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5366 }
5367 if (visit_load)
5368 {
5369 rhs = get_base_loadstore (rhs);
5370 if (rhs)
5371 ret |= visit_load (stmt, rhs, data);
5372 }
5373 }
5374 else if (visit_addr
5375 && (is_gimple_assign (stmt)
5376 || gimple_code (stmt) == GIMPLE_COND))
5377 {
5378 for (i = 0; i < gimple_num_ops (stmt); ++i)
5379 if (gimple_op (stmt, i)
5380 && TREE_CODE (gimple_op (stmt, i)) == ADDR_EXPR)
5381 ret |= visit_addr (stmt, TREE_OPERAND (gimple_op (stmt, i), 0), data);
5382 }
5383 else if (is_gimple_call (stmt))
5384 {
5385 if (visit_store)
5386 {
5387 tree lhs = gimple_call_lhs (stmt);
5388 if (lhs)
5389 {
5390 lhs = get_base_loadstore (lhs);
5391 if (lhs)
5392 ret |= visit_store (stmt, lhs, data);
5393 }
5394 }
5395 if (visit_load || visit_addr)
5396 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5397 {
5398 tree rhs = gimple_call_arg (stmt, i);
5399 if (visit_addr
5400 && TREE_CODE (rhs) == ADDR_EXPR)
5401 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5402 else if (visit_load)
5403 {
5404 rhs = get_base_loadstore (rhs);
5405 if (rhs)
5406 ret |= visit_load (stmt, rhs, data);
5407 }
5408 }
5409 if (visit_addr
5410 && gimple_call_chain (stmt)
5411 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5412 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5413 data);
5414 if (visit_addr
5415 && gimple_call_return_slot_opt_p (stmt)
5416 && gimple_call_lhs (stmt) != NULL_TREE
5417 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5418 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5419 }
5420 else if (gimple_code (stmt) == GIMPLE_ASM)
5421 {
5422 unsigned noutputs;
5423 const char *constraint;
5424 const char **oconstraints;
5425 bool allows_mem, allows_reg, is_inout;
5426 noutputs = gimple_asm_noutputs (stmt);
5427 oconstraints = XALLOCAVEC (const char *, noutputs);
5428 if (visit_store || visit_addr)
5429 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5430 {
5431 tree link = gimple_asm_output_op (stmt, i);
5432 tree op = get_base_loadstore (TREE_VALUE (link));
5433 if (op && visit_store)
5434 ret |= visit_store (stmt, op, data);
5435 if (visit_addr)
5436 {
5437 constraint = TREE_STRING_POINTER
5438 (TREE_VALUE (TREE_PURPOSE (link)));
5439 oconstraints[i] = constraint;
5440 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5441 &allows_reg, &is_inout);
5442 if (op && !allows_reg && allows_mem)
5443 ret |= visit_addr (stmt, op, data);
5444 }
5445 }
5446 if (visit_load || visit_addr)
5447 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5448 {
5449 tree link = gimple_asm_input_op (stmt, i);
5450 tree op = TREE_VALUE (link);
5451 if (visit_addr
5452 && TREE_CODE (op) == ADDR_EXPR)
5453 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5454 else if (visit_load || visit_addr)
5455 {
5456 op = get_base_loadstore (op);
5457 if (op)
5458 {
5459 if (visit_load)
5460 ret |= visit_load (stmt, op, data);
5461 if (visit_addr)
5462 {
5463 constraint = TREE_STRING_POINTER
5464 (TREE_VALUE (TREE_PURPOSE (link)));
5465 parse_input_constraint (&constraint, 0, 0, noutputs,
5466 0, oconstraints,
5467 &allows_mem, &allows_reg);
5468 if (!allows_reg && allows_mem)
5469 ret |= visit_addr (stmt, op, data);
5470 }
5471 }
5472 }
5473 }
5474 }
5475 else if (gimple_code (stmt) == GIMPLE_RETURN)
5476 {
5477 tree op = gimple_return_retval (stmt);
5478 if (op)
5479 {
5480 if (visit_addr
5481 && TREE_CODE (op) == ADDR_EXPR)
5482 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5483 else if (visit_load)
5484 {
5485 op = get_base_loadstore (op);
5486 if (op)
5487 ret |= visit_load (stmt, op, data);
5488 }
5489 }
5490 }
5491 else if (visit_addr
5492 && gimple_code (stmt) == GIMPLE_PHI)
5493 {
5494 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5495 {
5496 tree op = PHI_ARG_DEF (stmt, i);
5497 if (TREE_CODE (op) == ADDR_EXPR)
5498 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5499 }
5500 }
5501
5502 return ret;
5503 }
5504
5505 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5506 should make a faster clone for this case. */
5507
5508 bool
5509 walk_stmt_load_store_ops (gimple stmt, void *data,
5510 bool (*visit_load)(gimple, tree, void *),
5511 bool (*visit_store)(gimple, tree, void *))
5512 {
5513 return walk_stmt_load_store_addr_ops (stmt, data,
5514 visit_load, visit_store, NULL);
5515 }
5516
5517 /* Helper for gimple_ior_addresses_taken_1. */
5518
5519 static bool
5520 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5521 tree addr, void *data)
5522 {
5523 bitmap addresses_taken = (bitmap)data;
5524 addr = get_base_address (addr);
5525 if (addr
5526 && DECL_P (addr))
5527 {
5528 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5529 return true;
5530 }
5531 return false;
5532 }
5533
5534 /* Set the bit for the uid of all decls that have their address taken
5535 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5536 were any in this stmt. */
5537
5538 bool
5539 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5540 {
5541 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5542 gimple_ior_addresses_taken_1);
5543 }
5544
5545
5546 /* Return a printable name for symbol DECL. */
5547
5548 const char *
5549 gimple_decl_printable_name (tree decl, int verbosity)
5550 {
5551 if (!DECL_NAME (decl))
5552 return NULL;
5553
5554 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5555 {
5556 const char *str, *mangled_str;
5557 int dmgl_opts = DMGL_NO_OPTS;
5558
5559 if (verbosity >= 2)
5560 {
5561 dmgl_opts = DMGL_VERBOSE
5562 | DMGL_ANSI
5563 | DMGL_GNU_V3
5564 | DMGL_RET_POSTFIX;
5565 if (TREE_CODE (decl) == FUNCTION_DECL)
5566 dmgl_opts |= DMGL_PARAMS;
5567 }
5568
5569 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5570 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5571 return (str) ? str : mangled_str;
5572 }
5573
5574 return IDENTIFIER_POINTER (DECL_NAME (decl));
5575 }
5576
5577 /* Return true when STMT is builtins call to CODE. */
5578
5579 bool
5580 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5581 {
5582 tree fndecl;
5583 return (is_gimple_call (stmt)
5584 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5585 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5586 && DECL_FUNCTION_CODE (fndecl) == code);
5587 }
5588
5589 /* Return true if STMT clobbers memory. STMT is required to be a
5590 GIMPLE_ASM. */
5591
5592 bool
5593 gimple_asm_clobbers_memory_p (const_gimple stmt)
5594 {
5595 unsigned i;
5596
5597 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5598 {
5599 tree op = gimple_asm_clobber_op (stmt, i);
5600 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5601 return true;
5602 }
5603
5604 return false;
5605 }
5606 #include "gt-gimple.h"