Daily bump.
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats. */
84
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
95 };
96
97 #endif /* GATHER_STATISTICS */
98
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102
103 /* Private API manipulation functions shared only with some
104 other files. */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
111
112 /* Set the code for statement G to CODE. */
113
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
116 {
117 g->gsbase.code = code;
118 }
119
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
122
123 static inline size_t
124 gimple_size (enum gimple_code code)
125 {
126 return gsstruct_code_size[gss_for_code (code)];
127 }
128
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
131
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134 {
135 size_t size;
136 gimple stmt;
137
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
141
142 #ifdef GATHER_STATISTICS
143 {
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
147 }
148 #endif
149
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
153
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
157
158 return stmt;
159 }
160
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
162
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
165 {
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
170 }
171
172
173
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
177
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
184 {
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
187
188 return s;
189 }
190
191
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
193
194 gimple
195 gimple_build_return (tree retval)
196 {
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
201 }
202
203 /* Reset alias information on call S. */
204
205 void
206 gimple_call_reset_alias_info (gimple s)
207 {
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
216 }
217
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
221 arguments. */
222
223 static inline gimple
224 gimple_build_call_1 (tree fn, unsigned nargs)
225 {
226 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
227 if (TREE_CODE (fn) == FUNCTION_DECL)
228 fn = build_fold_addr_expr (fn);
229 gimple_set_op (s, 1, fn);
230 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
231 gimple_call_reset_alias_info (s);
232 return s;
233 }
234
235
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
238
239 gimple
240 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241 {
242 unsigned i;
243 unsigned nargs = VEC_length (tree, args);
244 gimple call = gimple_build_call_1 (fn, nargs);
245
246 for (i = 0; i < nargs; i++)
247 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248
249 return call;
250 }
251
252
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
255
256 gimple
257 gimple_build_call (tree fn, unsigned nargs, ...)
258 {
259 va_list ap;
260 gimple call;
261 unsigned i;
262
263 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264
265 call = gimple_build_call_1 (fn, nargs);
266
267 va_start (ap, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
270 va_end (ap);
271
272 return call;
273 }
274
275
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
278
279 gimple
280 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
281 {
282 gimple call;
283 unsigned i;
284
285 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
286
287 call = gimple_build_call_1 (fn, nargs);
288
289 for (i = 0; i < nargs; i++)
290 gimple_call_set_arg (call, i, va_arg (ap, tree));
291
292 return call;
293 }
294
295
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
299
300 static inline gimple
301 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
302 {
303 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304 s->gsbase.subcode |= GF_CALL_INTERNAL;
305 gimple_call_set_internal_fn (s, fn);
306 gimple_call_reset_alias_info (s);
307 return s;
308 }
309
310
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
313
314 gimple
315 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
316 {
317 va_list ap;
318 gimple call;
319 unsigned i;
320
321 call = gimple_build_call_internal_1 (fn, nargs);
322 va_start (ap, nargs);
323 for (i = 0; i < nargs; i++)
324 gimple_call_set_arg (call, i, va_arg (ap, tree));
325 va_end (ap);
326
327 return call;
328 }
329
330
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
333
334 gimple
335 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
336 {
337 unsigned i, nargs;
338 gimple call;
339
340 nargs = VEC_length (tree, args);
341 call = gimple_build_call_internal_1 (fn, nargs);
342 for (i = 0; i < nargs; i++)
343 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
344
345 return call;
346 }
347
348
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
351 this fact. */
352
353 gimple
354 gimple_build_call_from_tree (tree t)
355 {
356 unsigned i, nargs;
357 gimple call;
358 tree fndecl = get_callee_fndecl (t);
359
360 gcc_assert (TREE_CODE (t) == CALL_EXPR);
361
362 nargs = call_expr_nargs (t);
363 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
364
365 for (i = 0; i < nargs; i++)
366 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
367
368 gimple_set_block (call, TREE_BLOCK (t));
369
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
373 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
374 if (fndecl
375 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
376 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
377 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
378 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
379 else
380 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
381 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
382 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
383 gimple_set_no_warning (call, TREE_NO_WARNING (t));
384
385 return call;
386 }
387
388
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390 *OP1_P, *OP2_P and *OP3_P respectively. */
391
392 void
393 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 tree *op2_p, tree *op3_p)
395 {
396 enum gimple_rhs_class grhs_class;
397
398 *subcode_p = TREE_CODE (expr);
399 grhs_class = get_gimple_rhs_class (*subcode_p);
400
401 if (grhs_class == GIMPLE_TERNARY_RHS)
402 {
403 *op1_p = TREE_OPERAND (expr, 0);
404 *op2_p = TREE_OPERAND (expr, 1);
405 *op3_p = TREE_OPERAND (expr, 2);
406 }
407 else if (grhs_class == GIMPLE_BINARY_RHS)
408 {
409 *op1_p = TREE_OPERAND (expr, 0);
410 *op2_p = TREE_OPERAND (expr, 1);
411 *op3_p = NULL_TREE;
412 }
413 else if (grhs_class == GIMPLE_UNARY_RHS)
414 {
415 *op1_p = TREE_OPERAND (expr, 0);
416 *op2_p = NULL_TREE;
417 *op3_p = NULL_TREE;
418 }
419 else if (grhs_class == GIMPLE_SINGLE_RHS)
420 {
421 *op1_p = expr;
422 *op2_p = NULL_TREE;
423 *op3_p = NULL_TREE;
424 }
425 else
426 gcc_unreachable ();
427 }
428
429
430 /* Build a GIMPLE_ASSIGN statement.
431
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
434
435 gimple
436 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
437 {
438 enum tree_code subcode;
439 tree op1, op2, op3;
440
441 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
443 PASS_MEM_STAT);
444 }
445
446
447 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
450
451 gimple
452 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
453 tree op2, tree op3 MEM_STAT_DECL)
454 {
455 unsigned num_ops;
456 gimple p;
457
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
459 code). */
460 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
461
462 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
463 PASS_MEM_STAT);
464 gimple_assign_set_lhs (p, lhs);
465 gimple_assign_set_rhs1 (p, op1);
466 if (op2)
467 {
468 gcc_assert (num_ops > 2);
469 gimple_assign_set_rhs2 (p, op2);
470 }
471
472 if (op3)
473 {
474 gcc_assert (num_ops > 3);
475 gimple_assign_set_rhs3 (p, op3);
476 }
477
478 return p;
479 }
480
481
482 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
483
484 DST/SRC are the destination and source respectively. You can pass
485 ungimplified trees in DST or SRC, in which case they will be
486 converted to a gimple operand if necessary.
487
488 This function returns the newly created GIMPLE_ASSIGN tuple. */
489
490 gimple
491 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
492 {
493 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
494 gimplify_and_add (t, seq_p);
495 ggc_free (t);
496 return gimple_seq_last_stmt (*seq_p);
497 }
498
499
500 /* Build a GIMPLE_COND statement.
501
502 PRED is the condition used to compare LHS and the RHS.
503 T_LABEL is the label to jump to if the condition is true.
504 F_LABEL is the label to jump to otherwise. */
505
506 gimple
507 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
508 tree t_label, tree f_label)
509 {
510 gimple p;
511
512 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
513 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
514 gimple_cond_set_lhs (p, lhs);
515 gimple_cond_set_rhs (p, rhs);
516 gimple_cond_set_true_label (p, t_label);
517 gimple_cond_set_false_label (p, f_label);
518 return p;
519 }
520
521
522 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
523
524 void
525 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
526 tree *lhs_p, tree *rhs_p)
527 {
528 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
529 || TREE_CODE (cond) == TRUTH_NOT_EXPR
530 || is_gimple_min_invariant (cond)
531 || SSA_VAR_P (cond));
532
533 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
534
535 /* Canonicalize conditionals of the form 'if (!VAL)'. */
536 if (*code_p == TRUTH_NOT_EXPR)
537 {
538 *code_p = EQ_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
541 }
542 /* Canonicalize conditionals of the form 'if (VAL)' */
543 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
544 {
545 *code_p = NE_EXPR;
546 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
547 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
548 }
549 }
550
551
552 /* Build a GIMPLE_COND statement from the conditional expression tree
553 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
554
555 gimple
556 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
557 {
558 enum tree_code code;
559 tree lhs, rhs;
560
561 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
562 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
563 }
564
565 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566 boolean expression tree COND. */
567
568 void
569 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
570 {
571 enum tree_code code;
572 tree lhs, rhs;
573
574 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
575 gimple_cond_set_condition (stmt, code, lhs, rhs);
576 }
577
578 /* Build a GIMPLE_LABEL statement for LABEL. */
579
580 gimple
581 gimple_build_label (tree label)
582 {
583 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
584 gimple_label_set_label (p, label);
585 return p;
586 }
587
588 /* Build a GIMPLE_GOTO statement to label DEST. */
589
590 gimple
591 gimple_build_goto (tree dest)
592 {
593 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
594 gimple_goto_set_dest (p, dest);
595 return p;
596 }
597
598
599 /* Build a GIMPLE_NOP statement. */
600
601 gimple
602 gimple_build_nop (void)
603 {
604 return gimple_alloc (GIMPLE_NOP, 0);
605 }
606
607
608 /* Build a GIMPLE_BIND statement.
609 VARS are the variables in BODY.
610 BLOCK is the containing block. */
611
612 gimple
613 gimple_build_bind (tree vars, gimple_seq body, tree block)
614 {
615 gimple p = gimple_alloc (GIMPLE_BIND, 0);
616 gimple_bind_set_vars (p, vars);
617 if (body)
618 gimple_bind_set_body (p, body);
619 if (block)
620 gimple_bind_set_block (p, block);
621 return p;
622 }
623
624 /* Helper function to set the simple fields of a asm stmt.
625
626 STRING is a pointer to a string that is the asm blocks assembly code.
627 NINPUT is the number of register inputs.
628 NOUTPUT is the number of register outputs.
629 NCLOBBERS is the number of clobbered registers.
630 */
631
632 static inline gimple
633 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
634 unsigned nclobbers, unsigned nlabels)
635 {
636 gimple p;
637 int size = strlen (string);
638
639 /* ASMs with labels cannot have outputs. This should have been
640 enforced by the front end. */
641 gcc_assert (nlabels == 0 || noutputs == 0);
642
643 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
644 ninputs + noutputs + nclobbers + nlabels);
645
646 p->gimple_asm.ni = ninputs;
647 p->gimple_asm.no = noutputs;
648 p->gimple_asm.nc = nclobbers;
649 p->gimple_asm.nl = nlabels;
650 p->gimple_asm.string = ggc_alloc_string (string, size);
651
652 #ifdef GATHER_STATISTICS
653 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
654 #endif
655
656 return p;
657 }
658
659 /* Build a GIMPLE_ASM statement.
660
661 STRING is the assembly code.
662 NINPUT is the number of register inputs.
663 NOUTPUT is the number of register outputs.
664 NCLOBBERS is the number of clobbered registers.
665 INPUTS is a vector of the input register parameters.
666 OUTPUTS is a vector of the output register parameters.
667 CLOBBERS is a vector of the clobbered register parameters.
668 LABELS is a vector of destination labels. */
669
670 gimple
671 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
672 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
673 VEC(tree,gc)* labels)
674 {
675 gimple p;
676 unsigned i;
677
678 p = gimple_build_asm_1 (string,
679 VEC_length (tree, inputs),
680 VEC_length (tree, outputs),
681 VEC_length (tree, clobbers),
682 VEC_length (tree, labels));
683
684 for (i = 0; i < VEC_length (tree, inputs); i++)
685 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
686
687 for (i = 0; i < VEC_length (tree, outputs); i++)
688 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
689
690 for (i = 0; i < VEC_length (tree, clobbers); i++)
691 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
692
693 for (i = 0; i < VEC_length (tree, labels); i++)
694 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
695
696 return p;
697 }
698
699 /* Build a GIMPLE_CATCH statement.
700
701 TYPES are the catch types.
702 HANDLER is the exception handler. */
703
704 gimple
705 gimple_build_catch (tree types, gimple_seq handler)
706 {
707 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
708 gimple_catch_set_types (p, types);
709 if (handler)
710 gimple_catch_set_handler (p, handler);
711
712 return p;
713 }
714
715 /* Build a GIMPLE_EH_FILTER statement.
716
717 TYPES are the filter's types.
718 FAILURE is the filter's failure action. */
719
720 gimple
721 gimple_build_eh_filter (tree types, gimple_seq failure)
722 {
723 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
724 gimple_eh_filter_set_types (p, types);
725 if (failure)
726 gimple_eh_filter_set_failure (p, failure);
727
728 return p;
729 }
730
731 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
732
733 gimple
734 gimple_build_eh_must_not_throw (tree decl)
735 {
736 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
737
738 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
739 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
740 gimple_eh_must_not_throw_set_fndecl (p, decl);
741
742 return p;
743 }
744
745 /* Build a GIMPLE_EH_ELSE statement. */
746
747 gimple
748 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
749 {
750 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
751 gimple_eh_else_set_n_body (p, n_body);
752 gimple_eh_else_set_e_body (p, e_body);
753 return p;
754 }
755
756 /* Build a GIMPLE_TRY statement.
757
758 EVAL is the expression to evaluate.
759 CLEANUP is the cleanup expression.
760 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
761 whether this is a try/catch or a try/finally respectively. */
762
763 gimple
764 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
765 enum gimple_try_flags kind)
766 {
767 gimple p;
768
769 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
770 p = gimple_alloc (GIMPLE_TRY, 0);
771 gimple_set_subcode (p, kind);
772 if (eval)
773 gimple_try_set_eval (p, eval);
774 if (cleanup)
775 gimple_try_set_cleanup (p, cleanup);
776
777 return p;
778 }
779
780 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
781
782 CLEANUP is the cleanup expression. */
783
784 gimple
785 gimple_build_wce (gimple_seq cleanup)
786 {
787 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
788 if (cleanup)
789 gimple_wce_set_cleanup (p, cleanup);
790
791 return p;
792 }
793
794
795 /* Build a GIMPLE_RESX statement. */
796
797 gimple
798 gimple_build_resx (int region)
799 {
800 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
801 p->gimple_eh_ctrl.region = region;
802 return p;
803 }
804
805
806 /* The helper for constructing a gimple switch statement.
807 INDEX is the switch's index.
808 NLABELS is the number of labels in the switch excluding the default.
809 DEFAULT_LABEL is the default label for the switch statement. */
810
811 gimple
812 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
813 {
814 /* nlabels + 1 default label + 1 index. */
815 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
816 1 + (default_label != NULL) + nlabels);
817 gimple_switch_set_index (p, index);
818 if (default_label)
819 gimple_switch_set_default_label (p, default_label);
820 return p;
821 }
822
823
824 /* Build a GIMPLE_SWITCH statement.
825
826 INDEX is the switch's index.
827 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
828 ... are the labels excluding the default. */
829
830 gimple
831 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
832 {
833 va_list al;
834 unsigned i, offset;
835 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
836
837 /* Store the rest of the labels. */
838 va_start (al, default_label);
839 offset = (default_label != NULL);
840 for (i = 0; i < nlabels; i++)
841 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
842 va_end (al);
843
844 return p;
845 }
846
847
848 /* Build a GIMPLE_SWITCH statement.
849
850 INDEX is the switch's index.
851 DEFAULT_LABEL is the default label
852 ARGS is a vector of labels excluding the default. */
853
854 gimple
855 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
856 {
857 unsigned i, offset, nlabels = VEC_length (tree, args);
858 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
859
860 /* Copy the labels from the vector to the switch statement. */
861 offset = (default_label != NULL);
862 for (i = 0; i < nlabels; i++)
863 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
864
865 return p;
866 }
867
868 /* Build a GIMPLE_EH_DISPATCH statement. */
869
870 gimple
871 gimple_build_eh_dispatch (int region)
872 {
873 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
874 p->gimple_eh_ctrl.region = region;
875 return p;
876 }
877
878 /* Build a new GIMPLE_DEBUG_BIND statement.
879
880 VAR is bound to VALUE; block and location are taken from STMT. */
881
882 gimple
883 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
884 {
885 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
886 (unsigned)GIMPLE_DEBUG_BIND, 2
887 PASS_MEM_STAT);
888
889 gimple_debug_bind_set_var (p, var);
890 gimple_debug_bind_set_value (p, value);
891 if (stmt)
892 {
893 gimple_set_block (p, gimple_block (stmt));
894 gimple_set_location (p, gimple_location (stmt));
895 }
896
897 return p;
898 }
899
900
901 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
902
903 VAR is bound to VALUE; block and location are taken from STMT. */
904
905 gimple
906 gimple_build_debug_source_bind_stat (tree var, tree value,
907 gimple stmt MEM_STAT_DECL)
908 {
909 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
910 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
911 PASS_MEM_STAT);
912
913 gimple_debug_source_bind_set_var (p, var);
914 gimple_debug_source_bind_set_value (p, value);
915 if (stmt)
916 {
917 gimple_set_block (p, gimple_block (stmt));
918 gimple_set_location (p, gimple_location (stmt));
919 }
920
921 return p;
922 }
923
924
925 /* Build a GIMPLE_OMP_CRITICAL statement.
926
927 BODY is the sequence of statements for which only one thread can execute.
928 NAME is optional identifier for this critical block. */
929
930 gimple
931 gimple_build_omp_critical (gimple_seq body, tree name)
932 {
933 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
934 gimple_omp_critical_set_name (p, name);
935 if (body)
936 gimple_omp_set_body (p, body);
937
938 return p;
939 }
940
941 /* Build a GIMPLE_OMP_FOR statement.
942
943 BODY is sequence of statements inside the for loop.
944 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
945 lastprivate, reductions, ordered, schedule, and nowait.
946 COLLAPSE is the collapse count.
947 PRE_BODY is the sequence of statements that are loop invariant. */
948
949 gimple
950 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
951 gimple_seq pre_body)
952 {
953 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
954 if (body)
955 gimple_omp_set_body (p, body);
956 gimple_omp_for_set_clauses (p, clauses);
957 p->gimple_omp_for.collapse = collapse;
958 p->gimple_omp_for.iter
959 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
960 if (pre_body)
961 gimple_omp_for_set_pre_body (p, pre_body);
962
963 return p;
964 }
965
966
967 /* Build a GIMPLE_OMP_PARALLEL statement.
968
969 BODY is sequence of statements which are executed in parallel.
970 CLAUSES, are the OMP parallel construct's clauses.
971 CHILD_FN is the function created for the parallel threads to execute.
972 DATA_ARG are the shared data argument(s). */
973
974 gimple
975 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
976 tree data_arg)
977 {
978 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
979 if (body)
980 gimple_omp_set_body (p, body);
981 gimple_omp_parallel_set_clauses (p, clauses);
982 gimple_omp_parallel_set_child_fn (p, child_fn);
983 gimple_omp_parallel_set_data_arg (p, data_arg);
984
985 return p;
986 }
987
988
989 /* Build a GIMPLE_OMP_TASK statement.
990
991 BODY is sequence of statements which are executed by the explicit task.
992 CLAUSES, are the OMP parallel construct's clauses.
993 CHILD_FN is the function created for the parallel threads to execute.
994 DATA_ARG are the shared data argument(s).
995 COPY_FN is the optional function for firstprivate initialization.
996 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
997
998 gimple
999 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
1000 tree data_arg, tree copy_fn, tree arg_size,
1001 tree arg_align)
1002 {
1003 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1004 if (body)
1005 gimple_omp_set_body (p, body);
1006 gimple_omp_task_set_clauses (p, clauses);
1007 gimple_omp_task_set_child_fn (p, child_fn);
1008 gimple_omp_task_set_data_arg (p, data_arg);
1009 gimple_omp_task_set_copy_fn (p, copy_fn);
1010 gimple_omp_task_set_arg_size (p, arg_size);
1011 gimple_omp_task_set_arg_align (p, arg_align);
1012
1013 return p;
1014 }
1015
1016
1017 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1018
1019 BODY is the sequence of statements in the section. */
1020
1021 gimple
1022 gimple_build_omp_section (gimple_seq body)
1023 {
1024 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1025 if (body)
1026 gimple_omp_set_body (p, body);
1027
1028 return p;
1029 }
1030
1031
1032 /* Build a GIMPLE_OMP_MASTER statement.
1033
1034 BODY is the sequence of statements to be executed by just the master. */
1035
1036 gimple
1037 gimple_build_omp_master (gimple_seq body)
1038 {
1039 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1040 if (body)
1041 gimple_omp_set_body (p, body);
1042
1043 return p;
1044 }
1045
1046
1047 /* Build a GIMPLE_OMP_CONTINUE statement.
1048
1049 CONTROL_DEF is the definition of the control variable.
1050 CONTROL_USE is the use of the control variable. */
1051
1052 gimple
1053 gimple_build_omp_continue (tree control_def, tree control_use)
1054 {
1055 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1056 gimple_omp_continue_set_control_def (p, control_def);
1057 gimple_omp_continue_set_control_use (p, control_use);
1058 return p;
1059 }
1060
1061 /* Build a GIMPLE_OMP_ORDERED statement.
1062
1063 BODY is the sequence of statements inside a loop that will executed in
1064 sequence. */
1065
1066 gimple
1067 gimple_build_omp_ordered (gimple_seq body)
1068 {
1069 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1070 if (body)
1071 gimple_omp_set_body (p, body);
1072
1073 return p;
1074 }
1075
1076
1077 /* Build a GIMPLE_OMP_RETURN statement.
1078 WAIT_P is true if this is a non-waiting return. */
1079
1080 gimple
1081 gimple_build_omp_return (bool wait_p)
1082 {
1083 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1084 if (wait_p)
1085 gimple_omp_return_set_nowait (p);
1086
1087 return p;
1088 }
1089
1090
1091 /* Build a GIMPLE_OMP_SECTIONS statement.
1092
1093 BODY is a sequence of section statements.
1094 CLAUSES are any of the OMP sections contsruct's clauses: private,
1095 firstprivate, lastprivate, reduction, and nowait. */
1096
1097 gimple
1098 gimple_build_omp_sections (gimple_seq body, tree clauses)
1099 {
1100 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1101 if (body)
1102 gimple_omp_set_body (p, body);
1103 gimple_omp_sections_set_clauses (p, clauses);
1104
1105 return p;
1106 }
1107
1108
1109 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1110
1111 gimple
1112 gimple_build_omp_sections_switch (void)
1113 {
1114 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1115 }
1116
1117
1118 /* Build a GIMPLE_OMP_SINGLE statement.
1119
1120 BODY is the sequence of statements that will be executed once.
1121 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1122 copyprivate, nowait. */
1123
1124 gimple
1125 gimple_build_omp_single (gimple_seq body, tree clauses)
1126 {
1127 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1128 if (body)
1129 gimple_omp_set_body (p, body);
1130 gimple_omp_single_set_clauses (p, clauses);
1131
1132 return p;
1133 }
1134
1135
1136 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1137
1138 gimple
1139 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1140 {
1141 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1142 gimple_omp_atomic_load_set_lhs (p, lhs);
1143 gimple_omp_atomic_load_set_rhs (p, rhs);
1144 return p;
1145 }
1146
1147 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1148
1149 VAL is the value we are storing. */
1150
1151 gimple
1152 gimple_build_omp_atomic_store (tree val)
1153 {
1154 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1155 gimple_omp_atomic_store_set_val (p, val);
1156 return p;
1157 }
1158
1159 /* Build a GIMPLE_TRANSACTION statement. */
1160
1161 gimple
1162 gimple_build_transaction (gimple_seq body, tree label)
1163 {
1164 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1165 gimple_transaction_set_body (p, body);
1166 gimple_transaction_set_label (p, label);
1167 return p;
1168 }
1169
1170 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1171 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1172
1173 gimple
1174 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1175 {
1176 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1177 /* Ensure all the predictors fit into the lower bits of the subcode. */
1178 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1179 gimple_predict_set_predictor (p, predictor);
1180 gimple_predict_set_outcome (p, outcome);
1181 return p;
1182 }
1183
1184 #if defined ENABLE_GIMPLE_CHECKING
1185 /* Complain of a gimple type mismatch and die. */
1186
1187 void
1188 gimple_check_failed (const_gimple gs, const char *file, int line,
1189 const char *function, enum gimple_code code,
1190 enum tree_code subcode)
1191 {
1192 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1193 gimple_code_name[code],
1194 tree_code_name[subcode],
1195 gimple_code_name[gimple_code (gs)],
1196 gs->gsbase.subcode > 0
1197 ? tree_code_name[gs->gsbase.subcode]
1198 : "",
1199 function, trim_filename (file), line);
1200 }
1201 #endif /* ENABLE_GIMPLE_CHECKING */
1202
1203
1204 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1205 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1206 instead. */
1207
1208 gimple_seq
1209 gimple_seq_alloc (void)
1210 {
1211 gimple_seq seq = gimple_seq_cache;
1212 if (seq)
1213 {
1214 gimple_seq_cache = gimple_seq_cache->next_free;
1215 gcc_assert (gimple_seq_cache != seq);
1216 memset (seq, 0, sizeof (*seq));
1217 }
1218 else
1219 {
1220 seq = ggc_alloc_cleared_gimple_seq_d ();
1221 #ifdef GATHER_STATISTICS
1222 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1223 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1224 #endif
1225 }
1226
1227 return seq;
1228 }
1229
1230 /* Return SEQ to the free pool of GIMPLE sequences. */
1231
1232 void
1233 gimple_seq_free (gimple_seq seq)
1234 {
1235 if (seq == NULL)
1236 return;
1237
1238 gcc_assert (gimple_seq_first (seq) == NULL);
1239 gcc_assert (gimple_seq_last (seq) == NULL);
1240
1241 /* If this triggers, it's a sign that the same list is being freed
1242 twice. */
1243 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1244
1245 /* Add SEQ to the pool of free sequences. */
1246 seq->next_free = gimple_seq_cache;
1247 gimple_seq_cache = seq;
1248 }
1249
1250
1251 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1252 *SEQ_P is NULL, a new sequence is allocated. */
1253
1254 void
1255 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1256 {
1257 gimple_stmt_iterator si;
1258
1259 if (gs == NULL)
1260 return;
1261
1262 if (*seq_p == NULL)
1263 *seq_p = gimple_seq_alloc ();
1264
1265 si = gsi_last (*seq_p);
1266 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1267 }
1268
1269
1270 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1271 NULL, a new sequence is allocated. */
1272
1273 void
1274 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1275 {
1276 gimple_stmt_iterator si;
1277
1278 if (src == NULL)
1279 return;
1280
1281 if (*dst_p == NULL)
1282 *dst_p = gimple_seq_alloc ();
1283
1284 si = gsi_last (*dst_p);
1285 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1286 }
1287
1288
1289 /* Helper function of empty_body_p. Return true if STMT is an empty
1290 statement. */
1291
1292 static bool
1293 empty_stmt_p (gimple stmt)
1294 {
1295 if (gimple_code (stmt) == GIMPLE_NOP)
1296 return true;
1297 if (gimple_code (stmt) == GIMPLE_BIND)
1298 return empty_body_p (gimple_bind_body (stmt));
1299 return false;
1300 }
1301
1302
1303 /* Return true if BODY contains nothing but empty statements. */
1304
1305 bool
1306 empty_body_p (gimple_seq body)
1307 {
1308 gimple_stmt_iterator i;
1309
1310 if (gimple_seq_empty_p (body))
1311 return true;
1312 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1313 if (!empty_stmt_p (gsi_stmt (i))
1314 && !is_gimple_debug (gsi_stmt (i)))
1315 return false;
1316
1317 return true;
1318 }
1319
1320
1321 /* Perform a deep copy of sequence SRC and return the result. */
1322
1323 gimple_seq
1324 gimple_seq_copy (gimple_seq src)
1325 {
1326 gimple_stmt_iterator gsi;
1327 gimple_seq new_seq = gimple_seq_alloc ();
1328 gimple stmt;
1329
1330 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1331 {
1332 stmt = gimple_copy (gsi_stmt (gsi));
1333 gimple_seq_add_stmt (&new_seq, stmt);
1334 }
1335
1336 return new_seq;
1337 }
1338
1339
1340 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1341 on each one. WI is as in walk_gimple_stmt.
1342
1343 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1344 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1345 produced the value is returned if this statement has not been
1346 removed by a callback (wi->removed_stmt). If the statement has
1347 been removed, NULL is returned.
1348
1349 Otherwise, all the statements are walked and NULL returned. */
1350
1351 gimple
1352 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1353 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1354 {
1355 gimple_stmt_iterator gsi;
1356
1357 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
1358 {
1359 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1360 if (ret)
1361 {
1362 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1363 to hold it. */
1364 gcc_assert (wi);
1365 wi->callback_result = ret;
1366
1367 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1368 }
1369
1370 if (!wi->removed_stmt)
1371 gsi_next (&gsi);
1372 }
1373
1374 if (wi)
1375 wi->callback_result = NULL_TREE;
1376
1377 return NULL;
1378 }
1379
1380
1381 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1382
1383 static tree
1384 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1385 struct walk_stmt_info *wi)
1386 {
1387 tree ret, op;
1388 unsigned noutputs;
1389 const char **oconstraints;
1390 unsigned i, n;
1391 const char *constraint;
1392 bool allows_mem, allows_reg, is_inout;
1393
1394 noutputs = gimple_asm_noutputs (stmt);
1395 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1396
1397 if (wi)
1398 wi->is_lhs = true;
1399
1400 for (i = 0; i < noutputs; i++)
1401 {
1402 op = gimple_asm_output_op (stmt, i);
1403 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1404 oconstraints[i] = constraint;
1405 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1406 &is_inout);
1407 if (wi)
1408 wi->val_only = (allows_reg || !allows_mem);
1409 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1410 if (ret)
1411 return ret;
1412 }
1413
1414 n = gimple_asm_ninputs (stmt);
1415 for (i = 0; i < n; i++)
1416 {
1417 op = gimple_asm_input_op (stmt, i);
1418 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1419 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1420 oconstraints, &allows_mem, &allows_reg);
1421 if (wi)
1422 {
1423 wi->val_only = (allows_reg || !allows_mem);
1424 /* Although input "m" is not really a LHS, we need a lvalue. */
1425 wi->is_lhs = !wi->val_only;
1426 }
1427 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1428 if (ret)
1429 return ret;
1430 }
1431
1432 if (wi)
1433 {
1434 wi->is_lhs = false;
1435 wi->val_only = true;
1436 }
1437
1438 n = gimple_asm_nlabels (stmt);
1439 for (i = 0; i < n; i++)
1440 {
1441 op = gimple_asm_label_op (stmt, i);
1442 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1443 if (ret)
1444 return ret;
1445 }
1446
1447 return NULL_TREE;
1448 }
1449
1450
1451 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1452 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1453
1454 CALLBACK_OP is called on each operand of STMT via walk_tree.
1455 Additional parameters to walk_tree must be stored in WI. For each operand
1456 OP, walk_tree is called as:
1457
1458 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1459
1460 If CALLBACK_OP returns non-NULL for an operand, the remaining
1461 operands are not scanned.
1462
1463 The return value is that returned by the last call to walk_tree, or
1464 NULL_TREE if no CALLBACK_OP is specified. */
1465
1466 tree
1467 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1468 struct walk_stmt_info *wi)
1469 {
1470 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1471 unsigned i;
1472 tree ret = NULL_TREE;
1473
1474 switch (gimple_code (stmt))
1475 {
1476 case GIMPLE_ASSIGN:
1477 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1478 is a register variable, we may use a COMPONENT_REF on the RHS. */
1479 if (wi)
1480 {
1481 tree lhs = gimple_assign_lhs (stmt);
1482 wi->val_only
1483 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1484 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1485 }
1486
1487 for (i = 1; i < gimple_num_ops (stmt); i++)
1488 {
1489 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1490 pset);
1491 if (ret)
1492 return ret;
1493 }
1494
1495 /* Walk the LHS. If the RHS is appropriate for a memory, we
1496 may use a COMPONENT_REF on the LHS. */
1497 if (wi)
1498 {
1499 /* If the RHS has more than 1 operand, it is not appropriate
1500 for the memory.
1501 ??? A lhs always requires an lvalue, checking the val_only flag
1502 does not make any sense, so we should be able to avoid computing
1503 it here. */
1504 tree rhs1 = gimple_assign_rhs1 (stmt);
1505 wi->val_only = !(is_gimple_mem_rhs (rhs1)
1506 || TREE_CODE (rhs1) == CONSTRUCTOR)
1507 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1508 wi->is_lhs = true;
1509 }
1510
1511 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1512 if (ret)
1513 return ret;
1514
1515 if (wi)
1516 {
1517 wi->val_only = true;
1518 wi->is_lhs = false;
1519 }
1520 break;
1521
1522 case GIMPLE_CALL:
1523 if (wi)
1524 {
1525 wi->is_lhs = false;
1526 wi->val_only = true;
1527 }
1528
1529 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1530 if (ret)
1531 return ret;
1532
1533 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1534 if (ret)
1535 return ret;
1536
1537 for (i = 0; i < gimple_call_num_args (stmt); i++)
1538 {
1539 if (wi)
1540 wi->val_only
1541 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1542 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1543 pset);
1544 if (ret)
1545 return ret;
1546 }
1547
1548 if (gimple_call_lhs (stmt))
1549 {
1550 if (wi)
1551 {
1552 wi->is_lhs = true;
1553 wi->val_only
1554 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1555 }
1556
1557 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1558 if (ret)
1559 return ret;
1560 }
1561
1562 if (wi)
1563 {
1564 wi->is_lhs = false;
1565 wi->val_only = true;
1566 }
1567 break;
1568
1569 case GIMPLE_CATCH:
1570 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1571 pset);
1572 if (ret)
1573 return ret;
1574 break;
1575
1576 case GIMPLE_EH_FILTER:
1577 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1578 pset);
1579 if (ret)
1580 return ret;
1581 break;
1582
1583 case GIMPLE_ASM:
1584 ret = walk_gimple_asm (stmt, callback_op, wi);
1585 if (ret)
1586 return ret;
1587 break;
1588
1589 case GIMPLE_OMP_CONTINUE:
1590 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1591 callback_op, wi, pset);
1592 if (ret)
1593 return ret;
1594
1595 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1596 callback_op, wi, pset);
1597 if (ret)
1598 return ret;
1599 break;
1600
1601 case GIMPLE_OMP_CRITICAL:
1602 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1603 pset);
1604 if (ret)
1605 return ret;
1606 break;
1607
1608 case GIMPLE_OMP_FOR:
1609 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1610 pset);
1611 if (ret)
1612 return ret;
1613 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1614 {
1615 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1616 wi, pset);
1617 if (ret)
1618 return ret;
1619 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1620 wi, pset);
1621 if (ret)
1622 return ret;
1623 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1624 wi, pset);
1625 if (ret)
1626 return ret;
1627 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1628 wi, pset);
1629 }
1630 if (ret)
1631 return ret;
1632 break;
1633
1634 case GIMPLE_OMP_PARALLEL:
1635 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1636 wi, pset);
1637 if (ret)
1638 return ret;
1639 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1640 wi, pset);
1641 if (ret)
1642 return ret;
1643 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1644 wi, pset);
1645 if (ret)
1646 return ret;
1647 break;
1648
1649 case GIMPLE_OMP_TASK:
1650 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1651 wi, pset);
1652 if (ret)
1653 return ret;
1654 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1655 wi, pset);
1656 if (ret)
1657 return ret;
1658 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1659 wi, pset);
1660 if (ret)
1661 return ret;
1662 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1663 wi, pset);
1664 if (ret)
1665 return ret;
1666 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1667 wi, pset);
1668 if (ret)
1669 return ret;
1670 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1671 wi, pset);
1672 if (ret)
1673 return ret;
1674 break;
1675
1676 case GIMPLE_OMP_SECTIONS:
1677 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1678 wi, pset);
1679 if (ret)
1680 return ret;
1681
1682 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1683 wi, pset);
1684 if (ret)
1685 return ret;
1686
1687 break;
1688
1689 case GIMPLE_OMP_SINGLE:
1690 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1691 pset);
1692 if (ret)
1693 return ret;
1694 break;
1695
1696 case GIMPLE_OMP_ATOMIC_LOAD:
1697 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1698 pset);
1699 if (ret)
1700 return ret;
1701
1702 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1703 pset);
1704 if (ret)
1705 return ret;
1706 break;
1707
1708 case GIMPLE_OMP_ATOMIC_STORE:
1709 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1710 wi, pset);
1711 if (ret)
1712 return ret;
1713 break;
1714
1715 case GIMPLE_TRANSACTION:
1716 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1717 wi, pset);
1718 if (ret)
1719 return ret;
1720 break;
1721
1722 /* Tuples that do not have operands. */
1723 case GIMPLE_NOP:
1724 case GIMPLE_RESX:
1725 case GIMPLE_OMP_RETURN:
1726 case GIMPLE_PREDICT:
1727 break;
1728
1729 default:
1730 {
1731 enum gimple_statement_structure_enum gss;
1732 gss = gimple_statement_structure (stmt);
1733 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1734 for (i = 0; i < gimple_num_ops (stmt); i++)
1735 {
1736 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1737 if (ret)
1738 return ret;
1739 }
1740 }
1741 break;
1742 }
1743
1744 return NULL_TREE;
1745 }
1746
1747
1748 /* Walk the current statement in GSI (optionally using traversal state
1749 stored in WI). If WI is NULL, no state is kept during traversal.
1750 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1751 that it has handled all the operands of the statement, its return
1752 value is returned. Otherwise, the return value from CALLBACK_STMT
1753 is discarded and its operands are scanned.
1754
1755 If CALLBACK_STMT is NULL or it didn't handle the operands,
1756 CALLBACK_OP is called on each operand of the statement via
1757 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1758 operand, the remaining operands are not scanned. In this case, the
1759 return value from CALLBACK_OP is returned.
1760
1761 In any other case, NULL_TREE is returned. */
1762
1763 tree
1764 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1765 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1766 {
1767 gimple ret;
1768 tree tree_ret;
1769 gimple stmt = gsi_stmt (*gsi);
1770
1771 if (wi)
1772 {
1773 wi->gsi = *gsi;
1774 wi->removed_stmt = false;
1775
1776 if (wi->want_locations && gimple_has_location (stmt))
1777 input_location = gimple_location (stmt);
1778 }
1779
1780 ret = NULL;
1781
1782 /* Invoke the statement callback. Return if the callback handled
1783 all of STMT operands by itself. */
1784 if (callback_stmt)
1785 {
1786 bool handled_ops = false;
1787 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1788 if (handled_ops)
1789 return tree_ret;
1790
1791 /* If CALLBACK_STMT did not handle operands, it should not have
1792 a value to return. */
1793 gcc_assert (tree_ret == NULL);
1794
1795 if (wi && wi->removed_stmt)
1796 return NULL;
1797
1798 /* Re-read stmt in case the callback changed it. */
1799 stmt = gsi_stmt (*gsi);
1800 }
1801
1802 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1803 if (callback_op)
1804 {
1805 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1806 if (tree_ret)
1807 return tree_ret;
1808 }
1809
1810 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1811 switch (gimple_code (stmt))
1812 {
1813 case GIMPLE_BIND:
1814 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1815 callback_op, wi);
1816 if (ret)
1817 return wi->callback_result;
1818 break;
1819
1820 case GIMPLE_CATCH:
1821 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1822 callback_op, wi);
1823 if (ret)
1824 return wi->callback_result;
1825 break;
1826
1827 case GIMPLE_EH_FILTER:
1828 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1829 callback_op, wi);
1830 if (ret)
1831 return wi->callback_result;
1832 break;
1833
1834 case GIMPLE_EH_ELSE:
1835 ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
1836 callback_stmt, callback_op, wi);
1837 if (ret)
1838 return wi->callback_result;
1839 ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
1840 callback_stmt, callback_op, wi);
1841 if (ret)
1842 return wi->callback_result;
1843 break;
1844
1845 case GIMPLE_TRY:
1846 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1847 wi);
1848 if (ret)
1849 return wi->callback_result;
1850
1851 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1852 callback_op, wi);
1853 if (ret)
1854 return wi->callback_result;
1855 break;
1856
1857 case GIMPLE_OMP_FOR:
1858 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1859 callback_op, wi);
1860 if (ret)
1861 return wi->callback_result;
1862
1863 /* FALL THROUGH. */
1864 case GIMPLE_OMP_CRITICAL:
1865 case GIMPLE_OMP_MASTER:
1866 case GIMPLE_OMP_ORDERED:
1867 case GIMPLE_OMP_SECTION:
1868 case GIMPLE_OMP_PARALLEL:
1869 case GIMPLE_OMP_TASK:
1870 case GIMPLE_OMP_SECTIONS:
1871 case GIMPLE_OMP_SINGLE:
1872 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
1873 callback_op, wi);
1874 if (ret)
1875 return wi->callback_result;
1876 break;
1877
1878 case GIMPLE_WITH_CLEANUP_EXPR:
1879 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1880 callback_op, wi);
1881 if (ret)
1882 return wi->callback_result;
1883 break;
1884
1885 case GIMPLE_TRANSACTION:
1886 ret = walk_gimple_seq (gimple_transaction_body (stmt),
1887 callback_stmt, callback_op, wi);
1888 if (ret)
1889 return wi->callback_result;
1890 break;
1891
1892 default:
1893 gcc_assert (!gimple_has_substatements (stmt));
1894 break;
1895 }
1896
1897 return NULL;
1898 }
1899
1900
1901 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1902
1903 void
1904 gimple_set_body (tree fndecl, gimple_seq seq)
1905 {
1906 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1907 if (fn == NULL)
1908 {
1909 /* If FNDECL still does not have a function structure associated
1910 with it, then it does not make sense for it to receive a
1911 GIMPLE body. */
1912 gcc_assert (seq == NULL);
1913 }
1914 else
1915 fn->gimple_body = seq;
1916 }
1917
1918
1919 /* Return the body of GIMPLE statements for function FN. After the
1920 CFG pass, the function body doesn't exist anymore because it has
1921 been split up into basic blocks. In this case, it returns
1922 NULL. */
1923
1924 gimple_seq
1925 gimple_body (tree fndecl)
1926 {
1927 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1928 return fn ? fn->gimple_body : NULL;
1929 }
1930
1931 /* Return true when FNDECL has Gimple body either in unlowered
1932 or CFG form. */
1933 bool
1934 gimple_has_body_p (tree fndecl)
1935 {
1936 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1937 return (gimple_body (fndecl) || (fn && fn->cfg));
1938 }
1939
1940 /* Return true if calls C1 and C2 are known to go to the same function. */
1941
1942 bool
1943 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1944 {
1945 if (gimple_call_internal_p (c1))
1946 return (gimple_call_internal_p (c2)
1947 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1948 else
1949 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1950 || (gimple_call_fndecl (c1)
1951 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1952 }
1953
1954 /* Detect flags from a GIMPLE_CALL. This is just like
1955 call_expr_flags, but for gimple tuples. */
1956
1957 int
1958 gimple_call_flags (const_gimple stmt)
1959 {
1960 int flags;
1961 tree decl = gimple_call_fndecl (stmt);
1962
1963 if (decl)
1964 flags = flags_from_decl_or_type (decl);
1965 else if (gimple_call_internal_p (stmt))
1966 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1967 else
1968 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1969
1970 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1971 flags |= ECF_NOTHROW;
1972
1973 return flags;
1974 }
1975
1976 /* Return the "fn spec" string for call STMT. */
1977
1978 static tree
1979 gimple_call_fnspec (const_gimple stmt)
1980 {
1981 tree type, attr;
1982
1983 type = gimple_call_fntype (stmt);
1984 if (!type)
1985 return NULL_TREE;
1986
1987 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1988 if (!attr)
1989 return NULL_TREE;
1990
1991 return TREE_VALUE (TREE_VALUE (attr));
1992 }
1993
1994 /* Detects argument flags for argument number ARG on call STMT. */
1995
1996 int
1997 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1998 {
1999 tree attr = gimple_call_fnspec (stmt);
2000
2001 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
2002 return 0;
2003
2004 switch (TREE_STRING_POINTER (attr)[1 + arg])
2005 {
2006 case 'x':
2007 case 'X':
2008 return EAF_UNUSED;
2009
2010 case 'R':
2011 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2012
2013 case 'r':
2014 return EAF_NOCLOBBER | EAF_NOESCAPE;
2015
2016 case 'W':
2017 return EAF_DIRECT | EAF_NOESCAPE;
2018
2019 case 'w':
2020 return EAF_NOESCAPE;
2021
2022 case '.':
2023 default:
2024 return 0;
2025 }
2026 }
2027
2028 /* Detects return flags for the call STMT. */
2029
2030 int
2031 gimple_call_return_flags (const_gimple stmt)
2032 {
2033 tree attr;
2034
2035 if (gimple_call_flags (stmt) & ECF_MALLOC)
2036 return ERF_NOALIAS;
2037
2038 attr = gimple_call_fnspec (stmt);
2039 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2040 return 0;
2041
2042 switch (TREE_STRING_POINTER (attr)[0])
2043 {
2044 case '1':
2045 case '2':
2046 case '3':
2047 case '4':
2048 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2049
2050 case 'm':
2051 return ERF_NOALIAS;
2052
2053 case '.':
2054 default:
2055 return 0;
2056 }
2057 }
2058
2059
2060 /* Return true if GS is a copy assignment. */
2061
2062 bool
2063 gimple_assign_copy_p (gimple gs)
2064 {
2065 return (gimple_assign_single_p (gs)
2066 && is_gimple_val (gimple_op (gs, 1)));
2067 }
2068
2069
2070 /* Return true if GS is a SSA_NAME copy assignment. */
2071
2072 bool
2073 gimple_assign_ssa_name_copy_p (gimple gs)
2074 {
2075 return (gimple_assign_single_p (gs)
2076 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2077 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2078 }
2079
2080
2081 /* Return true if GS is an assignment with a unary RHS, but the
2082 operator has no effect on the assigned value. The logic is adapted
2083 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2084 instances in which STRIP_NOPS was previously applied to the RHS of
2085 an assignment.
2086
2087 NOTE: In the use cases that led to the creation of this function
2088 and of gimple_assign_single_p, it is typical to test for either
2089 condition and to proceed in the same manner. In each case, the
2090 assigned value is represented by the single RHS operand of the
2091 assignment. I suspect there may be cases where gimple_assign_copy_p,
2092 gimple_assign_single_p, or equivalent logic is used where a similar
2093 treatment of unary NOPs is appropriate. */
2094
2095 bool
2096 gimple_assign_unary_nop_p (gimple gs)
2097 {
2098 return (is_gimple_assign (gs)
2099 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2100 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2101 && gimple_assign_rhs1 (gs) != error_mark_node
2102 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2103 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2104 }
2105
2106 /* Set BB to be the basic block holding G. */
2107
2108 void
2109 gimple_set_bb (gimple stmt, basic_block bb)
2110 {
2111 stmt->gsbase.bb = bb;
2112
2113 /* If the statement is a label, add the label to block-to-labels map
2114 so that we can speed up edge creation for GIMPLE_GOTOs. */
2115 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2116 {
2117 tree t;
2118 int uid;
2119
2120 t = gimple_label_label (stmt);
2121 uid = LABEL_DECL_UID (t);
2122 if (uid == -1)
2123 {
2124 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2125 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2126 if (old_len <= (unsigned) uid)
2127 {
2128 unsigned new_len = 3 * uid / 2 + 1;
2129
2130 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2131 new_len);
2132 }
2133 }
2134
2135 VEC_replace (basic_block, label_to_block_map, uid, bb);
2136 }
2137 }
2138
2139
2140 /* Modify the RHS of the assignment pointed-to by GSI using the
2141 operands in the expression tree EXPR.
2142
2143 NOTE: The statement pointed-to by GSI may be reallocated if it
2144 did not have enough operand slots.
2145
2146 This function is useful to convert an existing tree expression into
2147 the flat representation used for the RHS of a GIMPLE assignment.
2148 It will reallocate memory as needed to expand or shrink the number
2149 of operand slots needed to represent EXPR.
2150
2151 NOTE: If you find yourself building a tree and then calling this
2152 function, you are most certainly doing it the slow way. It is much
2153 better to build a new assignment or to use the function
2154 gimple_assign_set_rhs_with_ops, which does not require an
2155 expression tree to be built. */
2156
2157 void
2158 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2159 {
2160 enum tree_code subcode;
2161 tree op1, op2, op3;
2162
2163 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2164 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2165 }
2166
2167
2168 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2169 operands OP1, OP2 and OP3.
2170
2171 NOTE: The statement pointed-to by GSI may be reallocated if it
2172 did not have enough operand slots. */
2173
2174 void
2175 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2176 tree op1, tree op2, tree op3)
2177 {
2178 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2179 gimple stmt = gsi_stmt (*gsi);
2180
2181 /* If the new CODE needs more operands, allocate a new statement. */
2182 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2183 {
2184 tree lhs = gimple_assign_lhs (stmt);
2185 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2186 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2187 gsi_replace (gsi, new_stmt, true);
2188 stmt = new_stmt;
2189
2190 /* The LHS needs to be reset as this also changes the SSA name
2191 on the LHS. */
2192 gimple_assign_set_lhs (stmt, lhs);
2193 }
2194
2195 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2196 gimple_set_subcode (stmt, code);
2197 gimple_assign_set_rhs1 (stmt, op1);
2198 if (new_rhs_ops > 1)
2199 gimple_assign_set_rhs2 (stmt, op2);
2200 if (new_rhs_ops > 2)
2201 gimple_assign_set_rhs3 (stmt, op3);
2202 }
2203
2204
2205 /* Return the LHS of a statement that performs an assignment,
2206 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2207 for a call to a function that returns no value, or for a
2208 statement other than an assignment or a call. */
2209
2210 tree
2211 gimple_get_lhs (const_gimple stmt)
2212 {
2213 enum gimple_code code = gimple_code (stmt);
2214
2215 if (code == GIMPLE_ASSIGN)
2216 return gimple_assign_lhs (stmt);
2217 else if (code == GIMPLE_CALL)
2218 return gimple_call_lhs (stmt);
2219 else
2220 return NULL_TREE;
2221 }
2222
2223
2224 /* Set the LHS of a statement that performs an assignment,
2225 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2226
2227 void
2228 gimple_set_lhs (gimple stmt, tree lhs)
2229 {
2230 enum gimple_code code = gimple_code (stmt);
2231
2232 if (code == GIMPLE_ASSIGN)
2233 gimple_assign_set_lhs (stmt, lhs);
2234 else if (code == GIMPLE_CALL)
2235 gimple_call_set_lhs (stmt, lhs);
2236 else
2237 gcc_unreachable();
2238 }
2239
2240 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2241 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2242 expression with a different value.
2243
2244 This will update any annotations (say debug bind stmts) referring
2245 to the original LHS, so that they use the RHS instead. This is
2246 done even if NLHS and LHS are the same, for it is understood that
2247 the RHS will be modified afterwards, and NLHS will not be assigned
2248 an equivalent value.
2249
2250 Adjusting any non-annotation uses of the LHS, if needed, is a
2251 responsibility of the caller.
2252
2253 The effect of this call should be pretty much the same as that of
2254 inserting a copy of STMT before STMT, and then removing the
2255 original stmt, at which time gsi_remove() would have update
2256 annotations, but using this function saves all the inserting,
2257 copying and removing. */
2258
2259 void
2260 gimple_replace_lhs (gimple stmt, tree nlhs)
2261 {
2262 if (MAY_HAVE_DEBUG_STMTS)
2263 {
2264 tree lhs = gimple_get_lhs (stmt);
2265
2266 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2267
2268 insert_debug_temp_for_var_def (NULL, lhs);
2269 }
2270
2271 gimple_set_lhs (stmt, nlhs);
2272 }
2273
2274 /* Return a deep copy of statement STMT. All the operands from STMT
2275 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2276 and VUSE operand arrays are set to empty in the new copy. */
2277
2278 gimple
2279 gimple_copy (gimple stmt)
2280 {
2281 enum gimple_code code = gimple_code (stmt);
2282 unsigned num_ops = gimple_num_ops (stmt);
2283 gimple copy = gimple_alloc (code, num_ops);
2284 unsigned i;
2285
2286 /* Shallow copy all the fields from STMT. */
2287 memcpy (copy, stmt, gimple_size (code));
2288
2289 /* If STMT has sub-statements, deep-copy them as well. */
2290 if (gimple_has_substatements (stmt))
2291 {
2292 gimple_seq new_seq;
2293 tree t;
2294
2295 switch (gimple_code (stmt))
2296 {
2297 case GIMPLE_BIND:
2298 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2299 gimple_bind_set_body (copy, new_seq);
2300 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2301 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2302 break;
2303
2304 case GIMPLE_CATCH:
2305 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2306 gimple_catch_set_handler (copy, new_seq);
2307 t = unshare_expr (gimple_catch_types (stmt));
2308 gimple_catch_set_types (copy, t);
2309 break;
2310
2311 case GIMPLE_EH_FILTER:
2312 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2313 gimple_eh_filter_set_failure (copy, new_seq);
2314 t = unshare_expr (gimple_eh_filter_types (stmt));
2315 gimple_eh_filter_set_types (copy, t);
2316 break;
2317
2318 case GIMPLE_EH_ELSE:
2319 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2320 gimple_eh_else_set_n_body (copy, new_seq);
2321 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2322 gimple_eh_else_set_e_body (copy, new_seq);
2323 break;
2324
2325 case GIMPLE_TRY:
2326 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2327 gimple_try_set_eval (copy, new_seq);
2328 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2329 gimple_try_set_cleanup (copy, new_seq);
2330 break;
2331
2332 case GIMPLE_OMP_FOR:
2333 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2334 gimple_omp_for_set_pre_body (copy, new_seq);
2335 t = unshare_expr (gimple_omp_for_clauses (stmt));
2336 gimple_omp_for_set_clauses (copy, t);
2337 copy->gimple_omp_for.iter
2338 = ggc_alloc_vec_gimple_omp_for_iter
2339 (gimple_omp_for_collapse (stmt));
2340 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2341 {
2342 gimple_omp_for_set_cond (copy, i,
2343 gimple_omp_for_cond (stmt, i));
2344 gimple_omp_for_set_index (copy, i,
2345 gimple_omp_for_index (stmt, i));
2346 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2347 gimple_omp_for_set_initial (copy, i, t);
2348 t = unshare_expr (gimple_omp_for_final (stmt, i));
2349 gimple_omp_for_set_final (copy, i, t);
2350 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2351 gimple_omp_for_set_incr (copy, i, t);
2352 }
2353 goto copy_omp_body;
2354
2355 case GIMPLE_OMP_PARALLEL:
2356 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2357 gimple_omp_parallel_set_clauses (copy, t);
2358 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2359 gimple_omp_parallel_set_child_fn (copy, t);
2360 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2361 gimple_omp_parallel_set_data_arg (copy, t);
2362 goto copy_omp_body;
2363
2364 case GIMPLE_OMP_TASK:
2365 t = unshare_expr (gimple_omp_task_clauses (stmt));
2366 gimple_omp_task_set_clauses (copy, t);
2367 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2368 gimple_omp_task_set_child_fn (copy, t);
2369 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2370 gimple_omp_task_set_data_arg (copy, t);
2371 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2372 gimple_omp_task_set_copy_fn (copy, t);
2373 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2374 gimple_omp_task_set_arg_size (copy, t);
2375 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2376 gimple_omp_task_set_arg_align (copy, t);
2377 goto copy_omp_body;
2378
2379 case GIMPLE_OMP_CRITICAL:
2380 t = unshare_expr (gimple_omp_critical_name (stmt));
2381 gimple_omp_critical_set_name (copy, t);
2382 goto copy_omp_body;
2383
2384 case GIMPLE_OMP_SECTIONS:
2385 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2386 gimple_omp_sections_set_clauses (copy, t);
2387 t = unshare_expr (gimple_omp_sections_control (stmt));
2388 gimple_omp_sections_set_control (copy, t);
2389 /* FALLTHRU */
2390
2391 case GIMPLE_OMP_SINGLE:
2392 case GIMPLE_OMP_SECTION:
2393 case GIMPLE_OMP_MASTER:
2394 case GIMPLE_OMP_ORDERED:
2395 copy_omp_body:
2396 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2397 gimple_omp_set_body (copy, new_seq);
2398 break;
2399
2400 case GIMPLE_TRANSACTION:
2401 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2402 gimple_transaction_set_body (copy, new_seq);
2403 break;
2404
2405 case GIMPLE_WITH_CLEANUP_EXPR:
2406 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2407 gimple_wce_set_cleanup (copy, new_seq);
2408 break;
2409
2410 default:
2411 gcc_unreachable ();
2412 }
2413 }
2414
2415 /* Make copy of operands. */
2416 if (num_ops > 0)
2417 {
2418 for (i = 0; i < num_ops; i++)
2419 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2420
2421 /* Clear out SSA operand vectors on COPY. */
2422 if (gimple_has_ops (stmt))
2423 {
2424 gimple_set_def_ops (copy, NULL);
2425 gimple_set_use_ops (copy, NULL);
2426 }
2427
2428 if (gimple_has_mem_ops (stmt))
2429 {
2430 gimple_set_vdef (copy, gimple_vdef (stmt));
2431 gimple_set_vuse (copy, gimple_vuse (stmt));
2432 }
2433
2434 /* SSA operands need to be updated. */
2435 gimple_set_modified (copy, true);
2436 }
2437
2438 return copy;
2439 }
2440
2441
2442 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2443 a MODIFIED field. */
2444
2445 void
2446 gimple_set_modified (gimple s, bool modifiedp)
2447 {
2448 if (gimple_has_ops (s))
2449 s->gsbase.modified = (unsigned) modifiedp;
2450 }
2451
2452
2453 /* Return true if statement S has side-effects. We consider a
2454 statement to have side effects if:
2455
2456 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2457 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2458
2459 bool
2460 gimple_has_side_effects (const_gimple s)
2461 {
2462 if (is_gimple_debug (s))
2463 return false;
2464
2465 /* We don't have to scan the arguments to check for
2466 volatile arguments, though, at present, we still
2467 do a scan to check for TREE_SIDE_EFFECTS. */
2468 if (gimple_has_volatile_ops (s))
2469 return true;
2470
2471 if (gimple_code (s) == GIMPLE_ASM
2472 && gimple_asm_volatile_p (s))
2473 return true;
2474
2475 if (is_gimple_call (s))
2476 {
2477 int flags = gimple_call_flags (s);
2478
2479 /* An infinite loop is considered a side effect. */
2480 if (!(flags & (ECF_CONST | ECF_PURE))
2481 || (flags & ECF_LOOPING_CONST_OR_PURE))
2482 return true;
2483
2484 return false;
2485 }
2486
2487 return false;
2488 }
2489
2490 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2491 Return true if S can trap. When INCLUDE_MEM is true, check whether
2492 the memory operations could trap. When INCLUDE_STORES is true and
2493 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2494
2495 bool
2496 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2497 {
2498 tree t, div = NULL_TREE;
2499 enum tree_code op;
2500
2501 if (include_mem)
2502 {
2503 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2504
2505 for (i = start; i < gimple_num_ops (s); i++)
2506 if (tree_could_trap_p (gimple_op (s, i)))
2507 return true;
2508 }
2509
2510 switch (gimple_code (s))
2511 {
2512 case GIMPLE_ASM:
2513 return gimple_asm_volatile_p (s);
2514
2515 case GIMPLE_CALL:
2516 t = gimple_call_fndecl (s);
2517 /* Assume that calls to weak functions may trap. */
2518 if (!t || !DECL_P (t) || DECL_WEAK (t))
2519 return true;
2520 return false;
2521
2522 case GIMPLE_ASSIGN:
2523 t = gimple_expr_type (s);
2524 op = gimple_assign_rhs_code (s);
2525 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2526 div = gimple_assign_rhs2 (s);
2527 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2528 (INTEGRAL_TYPE_P (t)
2529 && TYPE_OVERFLOW_TRAPS (t)),
2530 div));
2531
2532 default:
2533 break;
2534 }
2535
2536 return false;
2537 }
2538
2539 /* Return true if statement S can trap. */
2540
2541 bool
2542 gimple_could_trap_p (gimple s)
2543 {
2544 return gimple_could_trap_p_1 (s, true, true);
2545 }
2546
2547 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2548
2549 bool
2550 gimple_assign_rhs_could_trap_p (gimple s)
2551 {
2552 gcc_assert (is_gimple_assign (s));
2553 return gimple_could_trap_p_1 (s, true, false);
2554 }
2555
2556
2557 /* Print debugging information for gimple stmts generated. */
2558
2559 void
2560 dump_gimple_statistics (void)
2561 {
2562 #ifdef GATHER_STATISTICS
2563 int i, total_tuples = 0, total_bytes = 0;
2564
2565 fprintf (stderr, "\nGIMPLE statements\n");
2566 fprintf (stderr, "Kind Stmts Bytes\n");
2567 fprintf (stderr, "---------------------------------------\n");
2568 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2569 {
2570 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2571 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2572 total_tuples += gimple_alloc_counts[i];
2573 total_bytes += gimple_alloc_sizes[i];
2574 }
2575 fprintf (stderr, "---------------------------------------\n");
2576 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2577 fprintf (stderr, "---------------------------------------\n");
2578 #else
2579 fprintf (stderr, "No gimple statistics\n");
2580 #endif
2581 }
2582
2583
2584 /* Return the number of operands needed on the RHS of a GIMPLE
2585 assignment for an expression with tree code CODE. */
2586
2587 unsigned
2588 get_gimple_rhs_num_ops (enum tree_code code)
2589 {
2590 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2591
2592 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2593 return 1;
2594 else if (rhs_class == GIMPLE_BINARY_RHS)
2595 return 2;
2596 else if (rhs_class == GIMPLE_TERNARY_RHS)
2597 return 3;
2598 else
2599 gcc_unreachable ();
2600 }
2601
2602 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2603 (unsigned char) \
2604 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2605 : ((TYPE) == tcc_binary \
2606 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2607 : ((TYPE) == tcc_constant \
2608 || (TYPE) == tcc_declaration \
2609 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2610 : ((SYM) == TRUTH_AND_EXPR \
2611 || (SYM) == TRUTH_OR_EXPR \
2612 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2613 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2614 : ((SYM) == COND_EXPR \
2615 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2616 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2617 || (SYM) == DOT_PROD_EXPR \
2618 || (SYM) == REALIGN_LOAD_EXPR \
2619 || (SYM) == VEC_COND_EXPR \
2620 || (SYM) == VEC_PERM_EXPR \
2621 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2622 : ((SYM) == CONSTRUCTOR \
2623 || (SYM) == OBJ_TYPE_REF \
2624 || (SYM) == ASSERT_EXPR \
2625 || (SYM) == ADDR_EXPR \
2626 || (SYM) == WITH_SIZE_EXPR \
2627 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2628 : GIMPLE_INVALID_RHS),
2629 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2630
2631 const unsigned char gimple_rhs_class_table[] = {
2632 #include "all-tree.def"
2633 };
2634
2635 #undef DEFTREECODE
2636 #undef END_OF_BASE_TREE_CODES
2637
2638 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2639
2640 /* Validation of GIMPLE expressions. */
2641
2642 /* Returns true iff T is a valid RHS for an assignment to a renamed
2643 user -- or front-end generated artificial -- variable. */
2644
2645 bool
2646 is_gimple_reg_rhs (tree t)
2647 {
2648 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2649 }
2650
2651 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2652 LHS, or for a call argument. */
2653
2654 bool
2655 is_gimple_mem_rhs (tree t)
2656 {
2657 /* If we're dealing with a renamable type, either source or dest must be
2658 a renamed variable. */
2659 if (is_gimple_reg_type (TREE_TYPE (t)))
2660 return is_gimple_val (t);
2661 else
2662 return is_gimple_val (t) || is_gimple_lvalue (t);
2663 }
2664
2665 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2666
2667 bool
2668 is_gimple_lvalue (tree t)
2669 {
2670 return (is_gimple_addressable (t)
2671 || TREE_CODE (t) == WITH_SIZE_EXPR
2672 /* These are complex lvalues, but don't have addresses, so they
2673 go here. */
2674 || TREE_CODE (t) == BIT_FIELD_REF);
2675 }
2676
2677 /* Return true if T is a GIMPLE condition. */
2678
2679 bool
2680 is_gimple_condexpr (tree t)
2681 {
2682 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2683 && !tree_could_throw_p (t)
2684 && is_gimple_val (TREE_OPERAND (t, 0))
2685 && is_gimple_val (TREE_OPERAND (t, 1))));
2686 }
2687
2688 /* Return true if T is something whose address can be taken. */
2689
2690 bool
2691 is_gimple_addressable (tree t)
2692 {
2693 return (is_gimple_id (t) || handled_component_p (t)
2694 || TREE_CODE (t) == MEM_REF);
2695 }
2696
2697 /* Return true if T is a valid gimple constant. */
2698
2699 bool
2700 is_gimple_constant (const_tree t)
2701 {
2702 switch (TREE_CODE (t))
2703 {
2704 case INTEGER_CST:
2705 case REAL_CST:
2706 case FIXED_CST:
2707 case STRING_CST:
2708 case COMPLEX_CST:
2709 case VECTOR_CST:
2710 return true;
2711
2712 /* Vector constant constructors are gimple invariant. */
2713 case CONSTRUCTOR:
2714 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2715 return TREE_CONSTANT (t);
2716 else
2717 return false;
2718
2719 default:
2720 return false;
2721 }
2722 }
2723
2724 /* Return true if T is a gimple address. */
2725
2726 bool
2727 is_gimple_address (const_tree t)
2728 {
2729 tree op;
2730
2731 if (TREE_CODE (t) != ADDR_EXPR)
2732 return false;
2733
2734 op = TREE_OPERAND (t, 0);
2735 while (handled_component_p (op))
2736 {
2737 if ((TREE_CODE (op) == ARRAY_REF
2738 || TREE_CODE (op) == ARRAY_RANGE_REF)
2739 && !is_gimple_val (TREE_OPERAND (op, 1)))
2740 return false;
2741
2742 op = TREE_OPERAND (op, 0);
2743 }
2744
2745 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2746 return true;
2747
2748 switch (TREE_CODE (op))
2749 {
2750 case PARM_DECL:
2751 case RESULT_DECL:
2752 case LABEL_DECL:
2753 case FUNCTION_DECL:
2754 case VAR_DECL:
2755 case CONST_DECL:
2756 return true;
2757
2758 default:
2759 return false;
2760 }
2761 }
2762
2763 /* Return true if T is a gimple invariant address. */
2764
2765 bool
2766 is_gimple_invariant_address (const_tree t)
2767 {
2768 const_tree op;
2769
2770 if (TREE_CODE (t) != ADDR_EXPR)
2771 return false;
2772
2773 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2774 if (!op)
2775 return false;
2776
2777 if (TREE_CODE (op) == MEM_REF)
2778 {
2779 const_tree op0 = TREE_OPERAND (op, 0);
2780 return (TREE_CODE (op0) == ADDR_EXPR
2781 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2782 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2783 }
2784
2785 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2786 }
2787
2788 /* Return true if T is a gimple invariant address at IPA level
2789 (so addresses of variables on stack are not allowed). */
2790
2791 bool
2792 is_gimple_ip_invariant_address (const_tree t)
2793 {
2794 const_tree op;
2795
2796 if (TREE_CODE (t) != ADDR_EXPR)
2797 return false;
2798
2799 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2800 if (!op)
2801 return false;
2802
2803 if (TREE_CODE (op) == MEM_REF)
2804 {
2805 const_tree op0 = TREE_OPERAND (op, 0);
2806 return (TREE_CODE (op0) == ADDR_EXPR
2807 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2808 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2809 }
2810
2811 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2812 }
2813
2814 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2815 form of function invariant. */
2816
2817 bool
2818 is_gimple_min_invariant (const_tree t)
2819 {
2820 if (TREE_CODE (t) == ADDR_EXPR)
2821 return is_gimple_invariant_address (t);
2822
2823 return is_gimple_constant (t);
2824 }
2825
2826 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2827 form of gimple minimal invariant. */
2828
2829 bool
2830 is_gimple_ip_invariant (const_tree t)
2831 {
2832 if (TREE_CODE (t) == ADDR_EXPR)
2833 return is_gimple_ip_invariant_address (t);
2834
2835 return is_gimple_constant (t);
2836 }
2837
2838 /* Return true if T looks like a valid GIMPLE statement. */
2839
2840 bool
2841 is_gimple_stmt (tree t)
2842 {
2843 const enum tree_code code = TREE_CODE (t);
2844
2845 switch (code)
2846 {
2847 case NOP_EXPR:
2848 /* The only valid NOP_EXPR is the empty statement. */
2849 return IS_EMPTY_STMT (t);
2850
2851 case BIND_EXPR:
2852 case COND_EXPR:
2853 /* These are only valid if they're void. */
2854 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2855
2856 case SWITCH_EXPR:
2857 case GOTO_EXPR:
2858 case RETURN_EXPR:
2859 case LABEL_EXPR:
2860 case CASE_LABEL_EXPR:
2861 case TRY_CATCH_EXPR:
2862 case TRY_FINALLY_EXPR:
2863 case EH_FILTER_EXPR:
2864 case CATCH_EXPR:
2865 case ASM_EXPR:
2866 case STATEMENT_LIST:
2867 case OMP_PARALLEL:
2868 case OMP_FOR:
2869 case OMP_SECTIONS:
2870 case OMP_SECTION:
2871 case OMP_SINGLE:
2872 case OMP_MASTER:
2873 case OMP_ORDERED:
2874 case OMP_CRITICAL:
2875 case OMP_TASK:
2876 /* These are always void. */
2877 return true;
2878
2879 case CALL_EXPR:
2880 case MODIFY_EXPR:
2881 case PREDICT_EXPR:
2882 /* These are valid regardless of their type. */
2883 return true;
2884
2885 default:
2886 return false;
2887 }
2888 }
2889
2890 /* Return true if T is a variable. */
2891
2892 bool
2893 is_gimple_variable (tree t)
2894 {
2895 return (TREE_CODE (t) == VAR_DECL
2896 || TREE_CODE (t) == PARM_DECL
2897 || TREE_CODE (t) == RESULT_DECL
2898 || TREE_CODE (t) == SSA_NAME);
2899 }
2900
2901 /* Return true if T is a GIMPLE identifier (something with an address). */
2902
2903 bool
2904 is_gimple_id (tree t)
2905 {
2906 return (is_gimple_variable (t)
2907 || TREE_CODE (t) == FUNCTION_DECL
2908 || TREE_CODE (t) == LABEL_DECL
2909 || TREE_CODE (t) == CONST_DECL
2910 /* Allow string constants, since they are addressable. */
2911 || TREE_CODE (t) == STRING_CST);
2912 }
2913
2914 /* Return true if T is a non-aggregate register variable. */
2915
2916 bool
2917 is_gimple_reg (tree t)
2918 {
2919 if (TREE_CODE (t) == SSA_NAME)
2920 t = SSA_NAME_VAR (t);
2921
2922 if (!is_gimple_variable (t))
2923 return false;
2924
2925 if (!is_gimple_reg_type (TREE_TYPE (t)))
2926 return false;
2927
2928 /* A volatile decl is not acceptable because we can't reuse it as
2929 needed. We need to copy it into a temp first. */
2930 if (TREE_THIS_VOLATILE (t))
2931 return false;
2932
2933 /* We define "registers" as things that can be renamed as needed,
2934 which with our infrastructure does not apply to memory. */
2935 if (needs_to_live_in_memory (t))
2936 return false;
2937
2938 /* Hard register variables are an interesting case. For those that
2939 are call-clobbered, we don't know where all the calls are, since
2940 we don't (want to) take into account which operations will turn
2941 into libcalls at the rtl level. For those that are call-saved,
2942 we don't currently model the fact that calls may in fact change
2943 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2944 level, and so miss variable changes that might imply. All around,
2945 it seems safest to not do too much optimization with these at the
2946 tree level at all. We'll have to rely on the rtl optimizers to
2947 clean this up, as there we've got all the appropriate bits exposed. */
2948 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2949 return false;
2950
2951 /* Complex and vector values must have been put into SSA-like form.
2952 That is, no assignments to the individual components. */
2953 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2954 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2955 return DECL_GIMPLE_REG_P (t);
2956
2957 return true;
2958 }
2959
2960
2961 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2962
2963 bool
2964 is_gimple_val (tree t)
2965 {
2966 /* Make loads from volatiles and memory vars explicit. */
2967 if (is_gimple_variable (t)
2968 && is_gimple_reg_type (TREE_TYPE (t))
2969 && !is_gimple_reg (t))
2970 return false;
2971
2972 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2973 }
2974
2975 /* Similarly, but accept hard registers as inputs to asm statements. */
2976
2977 bool
2978 is_gimple_asm_val (tree t)
2979 {
2980 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2981 return true;
2982
2983 return is_gimple_val (t);
2984 }
2985
2986 /* Return true if T is a GIMPLE minimal lvalue. */
2987
2988 bool
2989 is_gimple_min_lval (tree t)
2990 {
2991 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2992 return false;
2993 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2994 }
2995
2996 /* Return true if T is a valid function operand of a CALL_EXPR. */
2997
2998 bool
2999 is_gimple_call_addr (tree t)
3000 {
3001 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3002 }
3003
3004 /* Return true if T is a valid address operand of a MEM_REF. */
3005
3006 bool
3007 is_gimple_mem_ref_addr (tree t)
3008 {
3009 return (is_gimple_reg (t)
3010 || TREE_CODE (t) == INTEGER_CST
3011 || (TREE_CODE (t) == ADDR_EXPR
3012 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3013 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3014 }
3015
3016
3017 /* Given a memory reference expression T, return its base address.
3018 The base address of a memory reference expression is the main
3019 object being referenced. For instance, the base address for
3020 'array[i].fld[j]' is 'array'. You can think of this as stripping
3021 away the offset part from a memory address.
3022
3023 This function calls handled_component_p to strip away all the inner
3024 parts of the memory reference until it reaches the base object. */
3025
3026 tree
3027 get_base_address (tree t)
3028 {
3029 while (handled_component_p (t))
3030 t = TREE_OPERAND (t, 0);
3031
3032 if ((TREE_CODE (t) == MEM_REF
3033 || TREE_CODE (t) == TARGET_MEM_REF)
3034 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3035 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3036
3037 if (TREE_CODE (t) == SSA_NAME
3038 || DECL_P (t)
3039 || TREE_CODE (t) == STRING_CST
3040 || TREE_CODE (t) == CONSTRUCTOR
3041 || INDIRECT_REF_P (t)
3042 || TREE_CODE (t) == MEM_REF
3043 || TREE_CODE (t) == TARGET_MEM_REF)
3044 return t;
3045 else
3046 return NULL_TREE;
3047 }
3048
3049 void
3050 recalculate_side_effects (tree t)
3051 {
3052 enum tree_code code = TREE_CODE (t);
3053 int len = TREE_OPERAND_LENGTH (t);
3054 int i;
3055
3056 switch (TREE_CODE_CLASS (code))
3057 {
3058 case tcc_expression:
3059 switch (code)
3060 {
3061 case INIT_EXPR:
3062 case MODIFY_EXPR:
3063 case VA_ARG_EXPR:
3064 case PREDECREMENT_EXPR:
3065 case PREINCREMENT_EXPR:
3066 case POSTDECREMENT_EXPR:
3067 case POSTINCREMENT_EXPR:
3068 /* All of these have side-effects, no matter what their
3069 operands are. */
3070 return;
3071
3072 default:
3073 break;
3074 }
3075 /* Fall through. */
3076
3077 case tcc_comparison: /* a comparison expression */
3078 case tcc_unary: /* a unary arithmetic expression */
3079 case tcc_binary: /* a binary arithmetic expression */
3080 case tcc_reference: /* a reference */
3081 case tcc_vl_exp: /* a function call */
3082 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3083 for (i = 0; i < len; ++i)
3084 {
3085 tree op = TREE_OPERAND (t, i);
3086 if (op && TREE_SIDE_EFFECTS (op))
3087 TREE_SIDE_EFFECTS (t) = 1;
3088 }
3089 break;
3090
3091 case tcc_constant:
3092 /* No side-effects. */
3093 return;
3094
3095 default:
3096 gcc_unreachable ();
3097 }
3098 }
3099
3100 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3101 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3102 we failed to create one. */
3103
3104 tree
3105 canonicalize_cond_expr_cond (tree t)
3106 {
3107 /* Strip conversions around boolean operations. */
3108 if (CONVERT_EXPR_P (t)
3109 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3110 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3111 == BOOLEAN_TYPE))
3112 t = TREE_OPERAND (t, 0);
3113
3114 /* For !x use x == 0. */
3115 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3116 {
3117 tree top0 = TREE_OPERAND (t, 0);
3118 t = build2 (EQ_EXPR, TREE_TYPE (t),
3119 top0, build_int_cst (TREE_TYPE (top0), 0));
3120 }
3121 /* For cmp ? 1 : 0 use cmp. */
3122 else if (TREE_CODE (t) == COND_EXPR
3123 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3124 && integer_onep (TREE_OPERAND (t, 1))
3125 && integer_zerop (TREE_OPERAND (t, 2)))
3126 {
3127 tree top0 = TREE_OPERAND (t, 0);
3128 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3129 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3130 }
3131
3132 if (is_gimple_condexpr (t))
3133 return t;
3134
3135 return NULL_TREE;
3136 }
3137
3138 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3139 the positions marked by the set ARGS_TO_SKIP. */
3140
3141 gimple
3142 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3143 {
3144 int i;
3145 int nargs = gimple_call_num_args (stmt);
3146 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3147 gimple new_stmt;
3148
3149 for (i = 0; i < nargs; i++)
3150 if (!bitmap_bit_p (args_to_skip, i))
3151 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3152
3153 if (gimple_call_internal_p (stmt))
3154 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3155 vargs);
3156 else
3157 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3158 VEC_free (tree, heap, vargs);
3159 if (gimple_call_lhs (stmt))
3160 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3161
3162 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3163 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3164
3165 gimple_set_block (new_stmt, gimple_block (stmt));
3166 if (gimple_has_location (stmt))
3167 gimple_set_location (new_stmt, gimple_location (stmt));
3168 gimple_call_copy_flags (new_stmt, stmt);
3169 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3170
3171 gimple_set_modified (new_stmt, true);
3172
3173 return new_stmt;
3174 }
3175
3176
3177 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3178
3179 static hashval_t gimple_type_hash (const void *);
3180
3181 /* Structure used to maintain a cache of some type pairs compared by
3182 gimple_types_compatible_p when comparing aggregate types. There are
3183 three possible values for SAME_P:
3184
3185 -2: The pair (T1, T2) has just been inserted in the table.
3186 0: T1 and T2 are different types.
3187 1: T1 and T2 are the same type.
3188
3189 The two elements in the SAME_P array are indexed by the comparison
3190 mode gtc_mode. */
3191
3192 struct type_pair_d
3193 {
3194 unsigned int uid1;
3195 unsigned int uid2;
3196 signed char same_p[2];
3197 };
3198 typedef struct type_pair_d *type_pair_t;
3199 DEF_VEC_P(type_pair_t);
3200 DEF_VEC_ALLOC_P(type_pair_t,heap);
3201
3202 #define GIMPLE_TYPE_PAIR_SIZE 16381
3203 struct type_pair_d *type_pair_cache;
3204
3205
3206 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3207 entry if none existed. */
3208
3209 static inline type_pair_t
3210 lookup_type_pair (tree t1, tree t2)
3211 {
3212 unsigned int index;
3213 unsigned int uid1, uid2;
3214
3215 if (type_pair_cache == NULL)
3216 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3217
3218 if (TYPE_UID (t1) < TYPE_UID (t2))
3219 {
3220 uid1 = TYPE_UID (t1);
3221 uid2 = TYPE_UID (t2);
3222 }
3223 else
3224 {
3225 uid1 = TYPE_UID (t2);
3226 uid2 = TYPE_UID (t1);
3227 }
3228 gcc_checking_assert (uid1 != uid2);
3229
3230 /* iterative_hash_hashval_t imply an function calls.
3231 We know that UIDS are in limited range. */
3232 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3233 % GIMPLE_TYPE_PAIR_SIZE);
3234 if (type_pair_cache [index].uid1 == uid1
3235 && type_pair_cache [index].uid2 == uid2)
3236 return &type_pair_cache[index];
3237
3238 type_pair_cache [index].uid1 = uid1;
3239 type_pair_cache [index].uid2 = uid2;
3240 type_pair_cache [index].same_p[0] = -2;
3241 type_pair_cache [index].same_p[1] = -2;
3242
3243 return &type_pair_cache[index];
3244 }
3245
3246 /* Per pointer state for the SCC finding. The on_sccstack flag
3247 is not strictly required, it is true when there is no hash value
3248 recorded for the type and false otherwise. But querying that
3249 is slower. */
3250
3251 struct sccs
3252 {
3253 unsigned int dfsnum;
3254 unsigned int low;
3255 bool on_sccstack;
3256 union {
3257 hashval_t hash;
3258 signed char same_p;
3259 } u;
3260 };
3261
3262 static unsigned int next_dfs_num;
3263 static unsigned int gtc_next_dfs_num;
3264
3265
3266 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3267
3268 typedef struct GTY(()) gimple_type_leader_entry_s {
3269 tree type;
3270 tree leader;
3271 } gimple_type_leader_entry;
3272
3273 #define GIMPLE_TYPE_LEADER_SIZE 16381
3274 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3275 gimple_type_leader_entry *gimple_type_leader;
3276
3277 /* Lookup an existing leader for T and return it or NULL_TREE, if
3278 there is none in the cache. */
3279
3280 static inline tree
3281 gimple_lookup_type_leader (tree t)
3282 {
3283 gimple_type_leader_entry *leader;
3284
3285 if (!gimple_type_leader)
3286 return NULL_TREE;
3287
3288 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3289 if (leader->type != t)
3290 return NULL_TREE;
3291
3292 return leader->leader;
3293 }
3294
3295 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3296 true then if any type has no name return false, otherwise return
3297 true if both types have no names. */
3298
3299 static bool
3300 compare_type_names_p (tree t1, tree t2)
3301 {
3302 tree name1 = TYPE_NAME (t1);
3303 tree name2 = TYPE_NAME (t2);
3304
3305 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3306 return false;
3307
3308 if (name1 == NULL_TREE)
3309 return true;
3310
3311 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3312 if (TREE_CODE (name1) != TREE_CODE (name2))
3313 return false;
3314
3315 if (TREE_CODE (name1) == TYPE_DECL)
3316 name1 = DECL_NAME (name1);
3317 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3318
3319 if (TREE_CODE (name2) == TYPE_DECL)
3320 name2 = DECL_NAME (name2);
3321 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3322
3323 /* Identifiers can be compared with pointer equality rather
3324 than a string comparison. */
3325 if (name1 == name2)
3326 return true;
3327
3328 return false;
3329 }
3330
3331 /* Return true if the field decls F1 and F2 are at the same offset.
3332
3333 This is intended to be used on GIMPLE types only. */
3334
3335 bool
3336 gimple_compare_field_offset (tree f1, tree f2)
3337 {
3338 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3339 {
3340 tree offset1 = DECL_FIELD_OFFSET (f1);
3341 tree offset2 = DECL_FIELD_OFFSET (f2);
3342 return ((offset1 == offset2
3343 /* Once gimplification is done, self-referential offsets are
3344 instantiated as operand #2 of the COMPONENT_REF built for
3345 each access and reset. Therefore, they are not relevant
3346 anymore and fields are interchangeable provided that they
3347 represent the same access. */
3348 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3349 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3350 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3351 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3352 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3353 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3354 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3355 || operand_equal_p (offset1, offset2, 0))
3356 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3357 DECL_FIELD_BIT_OFFSET (f2)));
3358 }
3359
3360 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3361 should be, so handle differing ones specially by decomposing
3362 the offset into a byte and bit offset manually. */
3363 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3364 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3365 {
3366 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3367 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3368 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3369 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3370 + bit_offset1 / BITS_PER_UNIT);
3371 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3372 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3373 + bit_offset2 / BITS_PER_UNIT);
3374 if (byte_offset1 != byte_offset2)
3375 return false;
3376 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3377 }
3378
3379 return false;
3380 }
3381
3382 static bool
3383 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3384 VEC(type_pair_t, heap) **,
3385 struct pointer_map_t *, struct obstack *);
3386
3387 /* DFS visit the edge from the callers type pair with state *STATE to
3388 the pair T1, T2 while operating in FOR_MERGING_P mode.
3389 Update the merging status if it is not part of the SCC containing the
3390 callers pair and return it.
3391 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3392
3393 static bool
3394 gtc_visit (tree t1, tree t2,
3395 struct sccs *state,
3396 VEC(type_pair_t, heap) **sccstack,
3397 struct pointer_map_t *sccstate,
3398 struct obstack *sccstate_obstack)
3399 {
3400 struct sccs *cstate = NULL;
3401 type_pair_t p;
3402 void **slot;
3403 tree leader1, leader2;
3404
3405 /* Check first for the obvious case of pointer identity. */
3406 if (t1 == t2)
3407 return true;
3408
3409 /* Check that we have two types to compare. */
3410 if (t1 == NULL_TREE || t2 == NULL_TREE)
3411 return false;
3412
3413 /* Can't be the same type if the types don't have the same code. */
3414 if (TREE_CODE (t1) != TREE_CODE (t2))
3415 return false;
3416
3417 /* Can't be the same type if they have different CV qualifiers. */
3418 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3419 return false;
3420
3421 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3422 return false;
3423
3424 /* Void types and nullptr types are always the same. */
3425 if (TREE_CODE (t1) == VOID_TYPE
3426 || TREE_CODE (t1) == NULLPTR_TYPE)
3427 return true;
3428
3429 /* Can't be the same type if they have different alignment or mode. */
3430 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3431 || TYPE_MODE (t1) != TYPE_MODE (t2))
3432 return false;
3433
3434 /* Do some simple checks before doing three hashtable queries. */
3435 if (INTEGRAL_TYPE_P (t1)
3436 || SCALAR_FLOAT_TYPE_P (t1)
3437 || FIXED_POINT_TYPE_P (t1)
3438 || TREE_CODE (t1) == VECTOR_TYPE
3439 || TREE_CODE (t1) == COMPLEX_TYPE
3440 || TREE_CODE (t1) == OFFSET_TYPE
3441 || POINTER_TYPE_P (t1))
3442 {
3443 /* Can't be the same type if they have different sign or precision. */
3444 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3445 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3446 return false;
3447
3448 if (TREE_CODE (t1) == INTEGER_TYPE
3449 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3450 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3451 return false;
3452
3453 /* That's all we need to check for float and fixed-point types. */
3454 if (SCALAR_FLOAT_TYPE_P (t1)
3455 || FIXED_POINT_TYPE_P (t1))
3456 return true;
3457
3458 /* For other types fall thru to more complex checks. */
3459 }
3460
3461 /* If the types have been previously registered and found equal
3462 they still are. */
3463 leader1 = gimple_lookup_type_leader (t1);
3464 leader2 = gimple_lookup_type_leader (t2);
3465 if (leader1 == t2
3466 || t1 == leader2
3467 || (leader1 && leader1 == leader2))
3468 return true;
3469
3470 /* If the hash values of t1 and t2 are different the types can't
3471 possibly be the same. This helps keeping the type-pair hashtable
3472 small, only tracking comparisons for hash collisions. */
3473 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3474 return false;
3475
3476 /* Allocate a new cache entry for this comparison. */
3477 p = lookup_type_pair (t1, t2);
3478 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3479 {
3480 /* We have already decided whether T1 and T2 are the
3481 same, return the cached result. */
3482 return p->same_p[GTC_MERGE] == 1;
3483 }
3484
3485 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3486 cstate = (struct sccs *)*slot;
3487 /* Not yet visited. DFS recurse. */
3488 if (!cstate)
3489 {
3490 gimple_types_compatible_p_1 (t1, t2, p,
3491 sccstack, sccstate, sccstate_obstack);
3492 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3493 state->low = MIN (state->low, cstate->low);
3494 }
3495 /* If the type is still on the SCC stack adjust the parents low. */
3496 if (cstate->dfsnum < state->dfsnum
3497 && cstate->on_sccstack)
3498 state->low = MIN (cstate->dfsnum, state->low);
3499
3500 /* Return the current lattice value. We start with an equality
3501 assumption so types part of a SCC will be optimistically
3502 treated equal unless proven otherwise. */
3503 return cstate->u.same_p;
3504 }
3505
3506 /* Worker for gimple_types_compatible.
3507 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3508
3509 static bool
3510 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3511 VEC(type_pair_t, heap) **sccstack,
3512 struct pointer_map_t *sccstate,
3513 struct obstack *sccstate_obstack)
3514 {
3515 struct sccs *state;
3516
3517 gcc_assert (p->same_p[GTC_MERGE] == -2);
3518
3519 state = XOBNEW (sccstate_obstack, struct sccs);
3520 *pointer_map_insert (sccstate, p) = state;
3521
3522 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3523 state->dfsnum = gtc_next_dfs_num++;
3524 state->low = state->dfsnum;
3525 state->on_sccstack = true;
3526 /* Start with an equality assumption. As we DFS recurse into child
3527 SCCs this assumption may get revisited. */
3528 state->u.same_p = 1;
3529
3530 /* The struct tags shall compare equal. */
3531 if (!compare_type_names_p (t1, t2))
3532 goto different_types;
3533
3534 /* We may not merge typedef types to the same type in different
3535 contexts. */
3536 if (TYPE_NAME (t1)
3537 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3538 && DECL_CONTEXT (TYPE_NAME (t1))
3539 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3540 {
3541 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3542 DECL_CONTEXT (TYPE_NAME (t2)),
3543 state, sccstack, sccstate, sccstate_obstack))
3544 goto different_types;
3545 }
3546
3547 /* If their attributes are not the same they can't be the same type. */
3548 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3549 goto different_types;
3550
3551 /* Do type-specific comparisons. */
3552 switch (TREE_CODE (t1))
3553 {
3554 case VECTOR_TYPE:
3555 case COMPLEX_TYPE:
3556 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3557 state, sccstack, sccstate, sccstate_obstack))
3558 goto different_types;
3559 goto same_types;
3560
3561 case ARRAY_TYPE:
3562 /* Array types are the same if the element types are the same and
3563 the number of elements are the same. */
3564 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3565 state, sccstack, sccstate, sccstate_obstack)
3566 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3567 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3568 goto different_types;
3569 else
3570 {
3571 tree i1 = TYPE_DOMAIN (t1);
3572 tree i2 = TYPE_DOMAIN (t2);
3573
3574 /* For an incomplete external array, the type domain can be
3575 NULL_TREE. Check this condition also. */
3576 if (i1 == NULL_TREE && i2 == NULL_TREE)
3577 goto same_types;
3578 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3579 goto different_types;
3580 /* If for a complete array type the possibly gimplified sizes
3581 are different the types are different. */
3582 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3583 || (TYPE_SIZE (i1)
3584 && TYPE_SIZE (i2)
3585 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3586 goto different_types;
3587 else
3588 {
3589 tree min1 = TYPE_MIN_VALUE (i1);
3590 tree min2 = TYPE_MIN_VALUE (i2);
3591 tree max1 = TYPE_MAX_VALUE (i1);
3592 tree max2 = TYPE_MAX_VALUE (i2);
3593
3594 /* The minimum/maximum values have to be the same. */
3595 if ((min1 == min2
3596 || (min1 && min2
3597 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3598 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3599 || operand_equal_p (min1, min2, 0))))
3600 && (max1 == max2
3601 || (max1 && max2
3602 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3603 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3604 || operand_equal_p (max1, max2, 0)))))
3605 goto same_types;
3606 else
3607 goto different_types;
3608 }
3609 }
3610
3611 case METHOD_TYPE:
3612 /* Method types should belong to the same class. */
3613 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3614 state, sccstack, sccstate, sccstate_obstack))
3615 goto different_types;
3616
3617 /* Fallthru */
3618
3619 case FUNCTION_TYPE:
3620 /* Function types are the same if the return type and arguments types
3621 are the same. */
3622 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3623 state, sccstack, sccstate, sccstate_obstack))
3624 goto different_types;
3625
3626 if (!comp_type_attributes (t1, t2))
3627 goto different_types;
3628
3629 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3630 goto same_types;
3631 else
3632 {
3633 tree parms1, parms2;
3634
3635 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3636 parms1 && parms2;
3637 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3638 {
3639 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3640 state, sccstack, sccstate, sccstate_obstack))
3641 goto different_types;
3642 }
3643
3644 if (parms1 || parms2)
3645 goto different_types;
3646
3647 goto same_types;
3648 }
3649
3650 case OFFSET_TYPE:
3651 {
3652 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3653 state, sccstack, sccstate, sccstate_obstack)
3654 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3655 TYPE_OFFSET_BASETYPE (t2),
3656 state, sccstack, sccstate, sccstate_obstack))
3657 goto different_types;
3658
3659 goto same_types;
3660 }
3661
3662 case POINTER_TYPE:
3663 case REFERENCE_TYPE:
3664 {
3665 /* If the two pointers have different ref-all attributes,
3666 they can't be the same type. */
3667 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3668 goto different_types;
3669
3670 /* Otherwise, pointer and reference types are the same if the
3671 pointed-to types are the same. */
3672 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3673 state, sccstack, sccstate, sccstate_obstack))
3674 goto same_types;
3675
3676 goto different_types;
3677 }
3678
3679 case INTEGER_TYPE:
3680 case BOOLEAN_TYPE:
3681 {
3682 tree min1 = TYPE_MIN_VALUE (t1);
3683 tree max1 = TYPE_MAX_VALUE (t1);
3684 tree min2 = TYPE_MIN_VALUE (t2);
3685 tree max2 = TYPE_MAX_VALUE (t2);
3686 bool min_equal_p = false;
3687 bool max_equal_p = false;
3688
3689 /* If either type has a minimum value, the other type must
3690 have the same. */
3691 if (min1 == NULL_TREE && min2 == NULL_TREE)
3692 min_equal_p = true;
3693 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3694 min_equal_p = true;
3695
3696 /* Likewise, if either type has a maximum value, the other
3697 type must have the same. */
3698 if (max1 == NULL_TREE && max2 == NULL_TREE)
3699 max_equal_p = true;
3700 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3701 max_equal_p = true;
3702
3703 if (!min_equal_p || !max_equal_p)
3704 goto different_types;
3705
3706 goto same_types;
3707 }
3708
3709 case ENUMERAL_TYPE:
3710 {
3711 /* FIXME lto, we cannot check bounds on enumeral types because
3712 different front ends will produce different values.
3713 In C, enumeral types are integers, while in C++ each element
3714 will have its own symbolic value. We should decide how enums
3715 are to be represented in GIMPLE and have each front end lower
3716 to that. */
3717 tree v1, v2;
3718
3719 /* For enumeral types, all the values must be the same. */
3720 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3721 goto same_types;
3722
3723 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3724 v1 && v2;
3725 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3726 {
3727 tree c1 = TREE_VALUE (v1);
3728 tree c2 = TREE_VALUE (v2);
3729
3730 if (TREE_CODE (c1) == CONST_DECL)
3731 c1 = DECL_INITIAL (c1);
3732
3733 if (TREE_CODE (c2) == CONST_DECL)
3734 c2 = DECL_INITIAL (c2);
3735
3736 if (tree_int_cst_equal (c1, c2) != 1)
3737 goto different_types;
3738
3739 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3740 goto different_types;
3741 }
3742
3743 /* If one enumeration has more values than the other, they
3744 are not the same. */
3745 if (v1 || v2)
3746 goto different_types;
3747
3748 goto same_types;
3749 }
3750
3751 case RECORD_TYPE:
3752 case UNION_TYPE:
3753 case QUAL_UNION_TYPE:
3754 {
3755 tree f1, f2;
3756
3757 /* For aggregate types, all the fields must be the same. */
3758 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3759 f1 && f2;
3760 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3761 {
3762 /* Different field kinds are not compatible. */
3763 if (TREE_CODE (f1) != TREE_CODE (f2))
3764 goto different_types;
3765 /* Field decls must have the same name and offset. */
3766 if (TREE_CODE (f1) == FIELD_DECL
3767 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3768 || !gimple_compare_field_offset (f1, f2)))
3769 goto different_types;
3770 /* All entities should have the same name and type. */
3771 if (DECL_NAME (f1) != DECL_NAME (f2)
3772 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3773 state, sccstack, sccstate, sccstate_obstack))
3774 goto different_types;
3775 }
3776
3777 /* If one aggregate has more fields than the other, they
3778 are not the same. */
3779 if (f1 || f2)
3780 goto different_types;
3781
3782 goto same_types;
3783 }
3784
3785 default:
3786 gcc_unreachable ();
3787 }
3788
3789 /* Common exit path for types that are not compatible. */
3790 different_types:
3791 state->u.same_p = 0;
3792 goto pop;
3793
3794 /* Common exit path for types that are compatible. */
3795 same_types:
3796 gcc_assert (state->u.same_p == 1);
3797
3798 pop:
3799 if (state->low == state->dfsnum)
3800 {
3801 type_pair_t x;
3802
3803 /* Pop off the SCC and set its cache values to the final
3804 comparison result. */
3805 do
3806 {
3807 struct sccs *cstate;
3808 x = VEC_pop (type_pair_t, *sccstack);
3809 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3810 cstate->on_sccstack = false;
3811 x->same_p[GTC_MERGE] = state->u.same_p;
3812 }
3813 while (x != p);
3814 }
3815
3816 return state->u.same_p;
3817 }
3818
3819 /* Return true iff T1 and T2 are structurally identical. When
3820 FOR_MERGING_P is true the an incomplete type and a complete type
3821 are considered different, otherwise they are considered compatible. */
3822
3823 static bool
3824 gimple_types_compatible_p (tree t1, tree t2)
3825 {
3826 VEC(type_pair_t, heap) *sccstack = NULL;
3827 struct pointer_map_t *sccstate;
3828 struct obstack sccstate_obstack;
3829 type_pair_t p = NULL;
3830 bool res;
3831 tree leader1, leader2;
3832
3833 /* Before starting to set up the SCC machinery handle simple cases. */
3834
3835 /* Check first for the obvious case of pointer identity. */
3836 if (t1 == t2)
3837 return true;
3838
3839 /* Check that we have two types to compare. */
3840 if (t1 == NULL_TREE || t2 == NULL_TREE)
3841 return false;
3842
3843 /* Can't be the same type if the types don't have the same code. */
3844 if (TREE_CODE (t1) != TREE_CODE (t2))
3845 return false;
3846
3847 /* Can't be the same type if they have different CV qualifiers. */
3848 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3849 return false;
3850
3851 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3852 return false;
3853
3854 /* Void types and nullptr types are always the same. */
3855 if (TREE_CODE (t1) == VOID_TYPE
3856 || TREE_CODE (t1) == NULLPTR_TYPE)
3857 return true;
3858
3859 /* Can't be the same type if they have different alignment or mode. */
3860 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3861 || TYPE_MODE (t1) != TYPE_MODE (t2))
3862 return false;
3863
3864 /* Do some simple checks before doing three hashtable queries. */
3865 if (INTEGRAL_TYPE_P (t1)
3866 || SCALAR_FLOAT_TYPE_P (t1)
3867 || FIXED_POINT_TYPE_P (t1)
3868 || TREE_CODE (t1) == VECTOR_TYPE
3869 || TREE_CODE (t1) == COMPLEX_TYPE
3870 || TREE_CODE (t1) == OFFSET_TYPE
3871 || POINTER_TYPE_P (t1))
3872 {
3873 /* Can't be the same type if they have different sign or precision. */
3874 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3875 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3876 return false;
3877
3878 if (TREE_CODE (t1) == INTEGER_TYPE
3879 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3880 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3881 return false;
3882
3883 /* That's all we need to check for float and fixed-point types. */
3884 if (SCALAR_FLOAT_TYPE_P (t1)
3885 || FIXED_POINT_TYPE_P (t1))
3886 return true;
3887
3888 /* For other types fall thru to more complex checks. */
3889 }
3890
3891 /* If the types have been previously registered and found equal
3892 they still are. */
3893 leader1 = gimple_lookup_type_leader (t1);
3894 leader2 = gimple_lookup_type_leader (t2);
3895 if (leader1 == t2
3896 || t1 == leader2
3897 || (leader1 && leader1 == leader2))
3898 return true;
3899
3900 /* If the hash values of t1 and t2 are different the types can't
3901 possibly be the same. This helps keeping the type-pair hashtable
3902 small, only tracking comparisons for hash collisions. */
3903 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3904 return false;
3905
3906 /* If we've visited this type pair before (in the case of aggregates
3907 with self-referential types), and we made a decision, return it. */
3908 p = lookup_type_pair (t1, t2);
3909 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3910 {
3911 /* We have already decided whether T1 and T2 are the
3912 same, return the cached result. */
3913 return p->same_p[GTC_MERGE] == 1;
3914 }
3915
3916 /* Now set up the SCC machinery for the comparison. */
3917 gtc_next_dfs_num = 1;
3918 sccstate = pointer_map_create ();
3919 gcc_obstack_init (&sccstate_obstack);
3920 res = gimple_types_compatible_p_1 (t1, t2, p,
3921 &sccstack, sccstate, &sccstate_obstack);
3922 VEC_free (type_pair_t, heap, sccstack);
3923 pointer_map_destroy (sccstate);
3924 obstack_free (&sccstate_obstack, NULL);
3925
3926 return res;
3927 }
3928
3929
3930 static hashval_t
3931 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3932 struct pointer_map_t *, struct obstack *);
3933
3934 /* DFS visit the edge from the callers type with state *STATE to T.
3935 Update the callers type hash V with the hash for T if it is not part
3936 of the SCC containing the callers type and return it.
3937 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3938
3939 static hashval_t
3940 visit (tree t, struct sccs *state, hashval_t v,
3941 VEC (tree, heap) **sccstack,
3942 struct pointer_map_t *sccstate,
3943 struct obstack *sccstate_obstack)
3944 {
3945 struct sccs *cstate = NULL;
3946 struct tree_int_map m;
3947 void **slot;
3948
3949 /* If there is a hash value recorded for this type then it can't
3950 possibly be part of our parent SCC. Simply mix in its hash. */
3951 m.base.from = t;
3952 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
3953 && *slot)
3954 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3955
3956 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3957 cstate = (struct sccs *)*slot;
3958 if (!cstate)
3959 {
3960 hashval_t tem;
3961 /* Not yet visited. DFS recurse. */
3962 tem = iterative_hash_gimple_type (t, v,
3963 sccstack, sccstate, sccstate_obstack);
3964 if (!cstate)
3965 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3966 state->low = MIN (state->low, cstate->low);
3967 /* If the type is no longer on the SCC stack and thus is not part
3968 of the parents SCC mix in its hash value. Otherwise we will
3969 ignore the type for hashing purposes and return the unaltered
3970 hash value. */
3971 if (!cstate->on_sccstack)
3972 return tem;
3973 }
3974 if (cstate->dfsnum < state->dfsnum
3975 && cstate->on_sccstack)
3976 state->low = MIN (cstate->dfsnum, state->low);
3977
3978 /* We are part of our parents SCC, skip this type during hashing
3979 and return the unaltered hash value. */
3980 return v;
3981 }
3982
3983 /* Hash NAME with the previous hash value V and return it. */
3984
3985 static hashval_t
3986 iterative_hash_name (tree name, hashval_t v)
3987 {
3988 if (!name)
3989 return v;
3990 v = iterative_hash_hashval_t (TREE_CODE (name), v);
3991 if (TREE_CODE (name) == TYPE_DECL)
3992 name = DECL_NAME (name);
3993 if (!name)
3994 return v;
3995 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3996 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3997 }
3998
3999 /* A type, hashvalue pair for sorting SCC members. */
4000
4001 struct type_hash_pair {
4002 tree type;
4003 hashval_t hash;
4004 };
4005
4006 /* Compare two type, hashvalue pairs. */
4007
4008 static int
4009 type_hash_pair_compare (const void *p1_, const void *p2_)
4010 {
4011 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4012 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4013 if (p1->hash < p2->hash)
4014 return -1;
4015 else if (p1->hash > p2->hash)
4016 return 1;
4017 return 0;
4018 }
4019
4020 /* Returning a hash value for gimple type TYPE combined with VAL.
4021 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4022
4023 To hash a type we end up hashing in types that are reachable.
4024 Through pointers we can end up with cycles which messes up the
4025 required property that we need to compute the same hash value
4026 for structurally equivalent types. To avoid this we have to
4027 hash all types in a cycle (the SCC) in a commutative way. The
4028 easiest way is to not mix in the hashes of the SCC members at
4029 all. To make this work we have to delay setting the hash
4030 values of the SCC until it is complete. */
4031
4032 static hashval_t
4033 iterative_hash_gimple_type (tree type, hashval_t val,
4034 VEC(tree, heap) **sccstack,
4035 struct pointer_map_t *sccstate,
4036 struct obstack *sccstate_obstack)
4037 {
4038 hashval_t v;
4039 void **slot;
4040 struct sccs *state;
4041
4042 /* Not visited during this DFS walk. */
4043 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4044 state = XOBNEW (sccstate_obstack, struct sccs);
4045 *pointer_map_insert (sccstate, type) = state;
4046
4047 VEC_safe_push (tree, heap, *sccstack, type);
4048 state->dfsnum = next_dfs_num++;
4049 state->low = state->dfsnum;
4050 state->on_sccstack = true;
4051
4052 /* Combine a few common features of types so that types are grouped into
4053 smaller sets; when searching for existing matching types to merge,
4054 only existing types having the same features as the new type will be
4055 checked. */
4056 v = iterative_hash_name (TYPE_NAME (type), 0);
4057 if (TYPE_NAME (type)
4058 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
4059 && DECL_CONTEXT (TYPE_NAME (type))
4060 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
4061 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
4062 sccstack, sccstate, sccstate_obstack);
4063 v = iterative_hash_hashval_t (TREE_CODE (type), v);
4064 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4065 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4066
4067 /* Do not hash the types size as this will cause differences in
4068 hash values for the complete vs. the incomplete type variant. */
4069
4070 /* Incorporate common features of numerical types. */
4071 if (INTEGRAL_TYPE_P (type)
4072 || SCALAR_FLOAT_TYPE_P (type)
4073 || FIXED_POINT_TYPE_P (type))
4074 {
4075 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4076 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4077 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4078 }
4079
4080 /* For pointer and reference types, fold in information about the type
4081 pointed to. */
4082 if (POINTER_TYPE_P (type))
4083 v = visit (TREE_TYPE (type), state, v,
4084 sccstack, sccstate, sccstate_obstack);
4085
4086 /* For integer types hash the types min/max values and the string flag. */
4087 if (TREE_CODE (type) == INTEGER_TYPE)
4088 {
4089 /* OMP lowering can introduce error_mark_node in place of
4090 random local decls in types. */
4091 if (TYPE_MIN_VALUE (type) != error_mark_node)
4092 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4093 if (TYPE_MAX_VALUE (type) != error_mark_node)
4094 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4095 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4096 }
4097
4098 /* For array types hash their domain and the string flag. */
4099 if (TREE_CODE (type) == ARRAY_TYPE
4100 && TYPE_DOMAIN (type))
4101 {
4102 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4103 v = visit (TYPE_DOMAIN (type), state, v,
4104 sccstack, sccstate, sccstate_obstack);
4105 }
4106
4107 /* Recurse for aggregates with a single element type. */
4108 if (TREE_CODE (type) == ARRAY_TYPE
4109 || TREE_CODE (type) == COMPLEX_TYPE
4110 || TREE_CODE (type) == VECTOR_TYPE)
4111 v = visit (TREE_TYPE (type), state, v,
4112 sccstack, sccstate, sccstate_obstack);
4113
4114 /* Incorporate function return and argument types. */
4115 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4116 {
4117 unsigned na;
4118 tree p;
4119
4120 /* For method types also incorporate their parent class. */
4121 if (TREE_CODE (type) == METHOD_TYPE)
4122 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4123 sccstack, sccstate, sccstate_obstack);
4124
4125 /* Check result and argument types. */
4126 v = visit (TREE_TYPE (type), state, v,
4127 sccstack, sccstate, sccstate_obstack);
4128 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4129 {
4130 v = visit (TREE_VALUE (p), state, v,
4131 sccstack, sccstate, sccstate_obstack);
4132 na++;
4133 }
4134
4135 v = iterative_hash_hashval_t (na, v);
4136 }
4137
4138 if (RECORD_OR_UNION_TYPE_P (type))
4139 {
4140 unsigned nf;
4141 tree f;
4142
4143 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4144 {
4145 v = iterative_hash_name (DECL_NAME (f), v);
4146 v = visit (TREE_TYPE (f), state, v,
4147 sccstack, sccstate, sccstate_obstack);
4148 nf++;
4149 }
4150
4151 v = iterative_hash_hashval_t (nf, v);
4152 }
4153
4154 /* Record hash for us. */
4155 state->u.hash = v;
4156
4157 /* See if we found an SCC. */
4158 if (state->low == state->dfsnum)
4159 {
4160 tree x;
4161 struct tree_int_map *m;
4162
4163 /* Pop off the SCC and set its hash values. */
4164 x = VEC_pop (tree, *sccstack);
4165 /* Optimize SCC size one. */
4166 if (x == type)
4167 {
4168 state->on_sccstack = false;
4169 m = ggc_alloc_cleared_tree_int_map ();
4170 m->base.from = x;
4171 m->to = v;
4172 slot = htab_find_slot (type_hash_cache, m, INSERT);
4173 gcc_assert (!*slot);
4174 *slot = (void *) m;
4175 }
4176 else
4177 {
4178 struct sccs *cstate;
4179 unsigned first, i, size, j;
4180 struct type_hash_pair *pairs;
4181 /* Pop off the SCC and build an array of type, hash pairs. */
4182 first = VEC_length (tree, *sccstack) - 1;
4183 while (VEC_index (tree, *sccstack, first) != type)
4184 --first;
4185 size = VEC_length (tree, *sccstack) - first + 1;
4186 pairs = XALLOCAVEC (struct type_hash_pair, size);
4187 i = 0;
4188 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4189 cstate->on_sccstack = false;
4190 pairs[i].type = x;
4191 pairs[i].hash = cstate->u.hash;
4192 do
4193 {
4194 x = VEC_pop (tree, *sccstack);
4195 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4196 cstate->on_sccstack = false;
4197 ++i;
4198 pairs[i].type = x;
4199 pairs[i].hash = cstate->u.hash;
4200 }
4201 while (x != type);
4202 gcc_assert (i + 1 == size);
4203 /* Sort the arrays of type, hash pairs so that when we mix in
4204 all members of the SCC the hash value becomes independent on
4205 the order we visited the SCC. Disregard hashes equal to
4206 the hash of the type we mix into because we cannot guarantee
4207 a stable sort for those across different TUs. */
4208 qsort (pairs, size, sizeof (struct type_hash_pair),
4209 type_hash_pair_compare);
4210 for (i = 0; i < size; ++i)
4211 {
4212 hashval_t hash;
4213 m = ggc_alloc_cleared_tree_int_map ();
4214 m->base.from = pairs[i].type;
4215 hash = pairs[i].hash;
4216 /* Skip same hashes. */
4217 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4218 ;
4219 for (; j < size; ++j)
4220 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4221 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4222 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4223 m->to = hash;
4224 if (pairs[i].type == type)
4225 v = hash;
4226 slot = htab_find_slot (type_hash_cache, m, INSERT);
4227 gcc_assert (!*slot);
4228 *slot = (void *) m;
4229 }
4230 }
4231 }
4232
4233 return iterative_hash_hashval_t (v, val);
4234 }
4235
4236
4237 /* Returns a hash value for P (assumed to be a type). The hash value
4238 is computed using some distinguishing features of the type. Note
4239 that we cannot use pointer hashing here as we may be dealing with
4240 two distinct instances of the same type.
4241
4242 This function should produce the same hash value for two compatible
4243 types according to gimple_types_compatible_p. */
4244
4245 static hashval_t
4246 gimple_type_hash (const void *p)
4247 {
4248 const_tree t = (const_tree) p;
4249 VEC(tree, heap) *sccstack = NULL;
4250 struct pointer_map_t *sccstate;
4251 struct obstack sccstate_obstack;
4252 hashval_t val;
4253 void **slot;
4254 struct tree_int_map m;
4255
4256 if (type_hash_cache == NULL)
4257 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4258 tree_int_map_eq, NULL);
4259
4260 m.base.from = CONST_CAST_TREE (t);
4261 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4262 && *slot)
4263 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4264
4265 /* Perform a DFS walk and pre-hash all reachable types. */
4266 next_dfs_num = 1;
4267 sccstate = pointer_map_create ();
4268 gcc_obstack_init (&sccstate_obstack);
4269 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4270 &sccstack, sccstate, &sccstate_obstack);
4271 VEC_free (tree, heap, sccstack);
4272 pointer_map_destroy (sccstate);
4273 obstack_free (&sccstate_obstack, NULL);
4274
4275 return val;
4276 }
4277
4278 /* Returning a hash value for gimple type TYPE combined with VAL.
4279
4280 The hash value returned is equal for types considered compatible
4281 by gimple_canonical_types_compatible_p. */
4282
4283 static hashval_t
4284 iterative_hash_canonical_type (tree type, hashval_t val)
4285 {
4286 hashval_t v;
4287 void **slot;
4288 struct tree_int_map *mp, m;
4289
4290 m.base.from = type;
4291 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4292 && *slot)
4293 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4294
4295 /* Combine a few common features of types so that types are grouped into
4296 smaller sets; when searching for existing matching types to merge,
4297 only existing types having the same features as the new type will be
4298 checked. */
4299 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4300 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4301 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4302 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4303
4304 /* Incorporate common features of numerical types. */
4305 if (INTEGRAL_TYPE_P (type)
4306 || SCALAR_FLOAT_TYPE_P (type)
4307 || FIXED_POINT_TYPE_P (type)
4308 || TREE_CODE (type) == VECTOR_TYPE
4309 || TREE_CODE (type) == COMPLEX_TYPE
4310 || TREE_CODE (type) == OFFSET_TYPE
4311 || POINTER_TYPE_P (type))
4312 {
4313 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4314 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4315 }
4316
4317 /* For pointer and reference types, fold in information about the type
4318 pointed to but do not recurse to the pointed-to type. */
4319 if (POINTER_TYPE_P (type))
4320 {
4321 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4322 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4323 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4324 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4325 }
4326
4327 /* For integer types hash the types min/max values and the string flag. */
4328 if (TREE_CODE (type) == INTEGER_TYPE)
4329 {
4330 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4331 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4332 }
4333
4334 /* For array types hash their domain and the string flag. */
4335 if (TREE_CODE (type) == ARRAY_TYPE
4336 && TYPE_DOMAIN (type))
4337 {
4338 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4339 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4340 }
4341
4342 /* Recurse for aggregates with a single element type. */
4343 if (TREE_CODE (type) == ARRAY_TYPE
4344 || TREE_CODE (type) == COMPLEX_TYPE
4345 || TREE_CODE (type) == VECTOR_TYPE)
4346 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4347
4348 /* Incorporate function return and argument types. */
4349 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4350 {
4351 unsigned na;
4352 tree p;
4353
4354 /* For method types also incorporate their parent class. */
4355 if (TREE_CODE (type) == METHOD_TYPE)
4356 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4357
4358 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4359
4360 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4361 {
4362 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4363 na++;
4364 }
4365
4366 v = iterative_hash_hashval_t (na, v);
4367 }
4368
4369 if (RECORD_OR_UNION_TYPE_P (type))
4370 {
4371 unsigned nf;
4372 tree f;
4373
4374 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4375 if (TREE_CODE (f) == FIELD_DECL)
4376 {
4377 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4378 nf++;
4379 }
4380
4381 v = iterative_hash_hashval_t (nf, v);
4382 }
4383
4384 /* Cache the just computed hash value. */
4385 mp = ggc_alloc_cleared_tree_int_map ();
4386 mp->base.from = type;
4387 mp->to = v;
4388 *slot = (void *) mp;
4389
4390 return iterative_hash_hashval_t (v, val);
4391 }
4392
4393 static hashval_t
4394 gimple_canonical_type_hash (const void *p)
4395 {
4396 if (canonical_type_hash_cache == NULL)
4397 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4398 tree_int_map_eq, NULL);
4399
4400 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4401 }
4402
4403
4404 /* Returns nonzero if P1 and P2 are equal. */
4405
4406 static int
4407 gimple_type_eq (const void *p1, const void *p2)
4408 {
4409 const_tree t1 = (const_tree) p1;
4410 const_tree t2 = (const_tree) p2;
4411 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4412 CONST_CAST_TREE (t2));
4413 }
4414
4415
4416 /* Worker for gimple_register_type.
4417 Register type T in the global type table gimple_types.
4418 When REGISTERING_MV is false first recurse for the main variant of T. */
4419
4420 static tree
4421 gimple_register_type_1 (tree t, bool registering_mv)
4422 {
4423 void **slot;
4424 gimple_type_leader_entry *leader;
4425
4426 /* If we registered this type before return the cached result. */
4427 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4428 if (leader->type == t)
4429 return leader->leader;
4430
4431 /* Always register the main variant first. This is important so we
4432 pick up the non-typedef variants as canonical, otherwise we'll end
4433 up taking typedef ids for structure tags during comparison.
4434 It also makes sure that main variants will be merged to main variants.
4435 As we are operating on a possibly partially fixed up type graph
4436 do not bother to recurse more than once, otherwise we may end up
4437 walking in circles.
4438 If we are registering a main variant it will either remain its
4439 own main variant or it will be merged to something else in which
4440 case we do not care for the main variant leader. */
4441 if (!registering_mv
4442 && TYPE_MAIN_VARIANT (t) != t)
4443 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4444
4445 /* See if we already have an equivalent type registered. */
4446 slot = htab_find_slot (gimple_types, t, INSERT);
4447 if (*slot
4448 && *(tree *)slot != t)
4449 {
4450 tree new_type = (tree) *((tree *) slot);
4451 leader->type = t;
4452 leader->leader = new_type;
4453 return new_type;
4454 }
4455
4456 /* If not, insert it to the cache and the hash. */
4457 leader->type = t;
4458 leader->leader = t;
4459 *slot = (void *) t;
4460 return t;
4461 }
4462
4463 /* Register type T in the global type table gimple_types.
4464 If another type T', compatible with T, already existed in
4465 gimple_types then return T', otherwise return T. This is used by
4466 LTO to merge identical types read from different TUs. */
4467
4468 tree
4469 gimple_register_type (tree t)
4470 {
4471 gcc_assert (TYPE_P (t));
4472
4473 if (!gimple_type_leader)
4474 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4475 (GIMPLE_TYPE_LEADER_SIZE);
4476
4477 if (gimple_types == NULL)
4478 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4479
4480 return gimple_register_type_1 (t, false);
4481 }
4482
4483 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4484 the middle-end types_compatible_p function. It needs to avoid
4485 claiming types are different for types that should be treated
4486 the same with respect to TBAA. Canonical types are also used
4487 for IL consistency checks via the useless_type_conversion_p
4488 predicate which does not handle all type kinds itself but falls
4489 back to pointer-comparison of TYPE_CANONICAL for aggregates
4490 for example. */
4491
4492 /* Return true iff T1 and T2 are structurally identical for what
4493 TBAA is concerned. */
4494
4495 static bool
4496 gimple_canonical_types_compatible_p (tree t1, tree t2)
4497 {
4498 /* Before starting to set up the SCC machinery handle simple cases. */
4499
4500 /* Check first for the obvious case of pointer identity. */
4501 if (t1 == t2)
4502 return true;
4503
4504 /* Check that we have two types to compare. */
4505 if (t1 == NULL_TREE || t2 == NULL_TREE)
4506 return false;
4507
4508 /* If the types have been previously registered and found equal
4509 they still are. */
4510 if (TYPE_CANONICAL (t1)
4511 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4512 return true;
4513
4514 /* Can't be the same type if the types don't have the same code. */
4515 if (TREE_CODE (t1) != TREE_CODE (t2))
4516 return false;
4517
4518 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4519 return false;
4520
4521 /* Qualifiers do not matter for canonical type comparison purposes. */
4522
4523 /* Void types and nullptr types are always the same. */
4524 if (TREE_CODE (t1) == VOID_TYPE
4525 || TREE_CODE (t1) == NULLPTR_TYPE)
4526 return true;
4527
4528 /* Can't be the same type if they have different alignment, or mode. */
4529 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4530 || TYPE_MODE (t1) != TYPE_MODE (t2))
4531 return false;
4532
4533 /* Non-aggregate types can be handled cheaply. */
4534 if (INTEGRAL_TYPE_P (t1)
4535 || SCALAR_FLOAT_TYPE_P (t1)
4536 || FIXED_POINT_TYPE_P (t1)
4537 || TREE_CODE (t1) == VECTOR_TYPE
4538 || TREE_CODE (t1) == COMPLEX_TYPE
4539 || TREE_CODE (t1) == OFFSET_TYPE
4540 || POINTER_TYPE_P (t1))
4541 {
4542 /* Can't be the same type if they have different sign or precision. */
4543 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4544 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4545 return false;
4546
4547 if (TREE_CODE (t1) == INTEGER_TYPE
4548 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4549 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4550 return false;
4551
4552 /* For canonical type comparisons we do not want to build SCCs
4553 so we cannot compare pointed-to types. But we can, for now,
4554 require the same pointed-to type kind and match what
4555 useless_type_conversion_p would do. */
4556 if (POINTER_TYPE_P (t1))
4557 {
4558 /* If the two pointers have different ref-all attributes,
4559 they can't be the same type. */
4560 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4561 return false;
4562
4563 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4564 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4565 return false;
4566
4567 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4568 return false;
4569
4570 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4571 return false;
4572 }
4573
4574 /* Tail-recurse to components. */
4575 if (TREE_CODE (t1) == VECTOR_TYPE
4576 || TREE_CODE (t1) == COMPLEX_TYPE)
4577 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4578 TREE_TYPE (t2));
4579
4580 return true;
4581 }
4582
4583 /* If their attributes are not the same they can't be the same type. */
4584 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4585 return false;
4586
4587 /* Do type-specific comparisons. */
4588 switch (TREE_CODE (t1))
4589 {
4590 case ARRAY_TYPE:
4591 /* Array types are the same if the element types are the same and
4592 the number of elements are the same. */
4593 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4594 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4595 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4596 return false;
4597 else
4598 {
4599 tree i1 = TYPE_DOMAIN (t1);
4600 tree i2 = TYPE_DOMAIN (t2);
4601
4602 /* For an incomplete external array, the type domain can be
4603 NULL_TREE. Check this condition also. */
4604 if (i1 == NULL_TREE && i2 == NULL_TREE)
4605 return true;
4606 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4607 return false;
4608 /* If for a complete array type the possibly gimplified sizes
4609 are different the types are different. */
4610 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4611 || (TYPE_SIZE (i1)
4612 && TYPE_SIZE (i2)
4613 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4614 return false;
4615 else
4616 {
4617 tree min1 = TYPE_MIN_VALUE (i1);
4618 tree min2 = TYPE_MIN_VALUE (i2);
4619 tree max1 = TYPE_MAX_VALUE (i1);
4620 tree max2 = TYPE_MAX_VALUE (i2);
4621
4622 /* The minimum/maximum values have to be the same. */
4623 if ((min1 == min2
4624 || (min1 && min2
4625 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4626 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4627 || operand_equal_p (min1, min2, 0))))
4628 && (max1 == max2
4629 || (max1 && max2
4630 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4631 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4632 || operand_equal_p (max1, max2, 0)))))
4633 return true;
4634 else
4635 return false;
4636 }
4637 }
4638
4639 case METHOD_TYPE:
4640 /* Method types should belong to the same class. */
4641 if (!gimple_canonical_types_compatible_p
4642 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4643 return false;
4644
4645 /* Fallthru */
4646
4647 case FUNCTION_TYPE:
4648 /* Function types are the same if the return type and arguments types
4649 are the same. */
4650 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4651 return false;
4652
4653 if (!comp_type_attributes (t1, t2))
4654 return false;
4655
4656 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4657 return true;
4658 else
4659 {
4660 tree parms1, parms2;
4661
4662 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4663 parms1 && parms2;
4664 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4665 {
4666 if (!gimple_canonical_types_compatible_p
4667 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4668 return false;
4669 }
4670
4671 if (parms1 || parms2)
4672 return false;
4673
4674 return true;
4675 }
4676
4677 case RECORD_TYPE:
4678 case UNION_TYPE:
4679 case QUAL_UNION_TYPE:
4680 {
4681 tree f1, f2;
4682
4683 /* For aggregate types, all the fields must be the same. */
4684 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4685 f1 || f2;
4686 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4687 {
4688 /* Skip non-fields. */
4689 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4690 f1 = TREE_CHAIN (f1);
4691 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4692 f2 = TREE_CHAIN (f2);
4693 if (!f1 || !f2)
4694 break;
4695 /* The fields must have the same name, offset and type. */
4696 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4697 || !gimple_compare_field_offset (f1, f2)
4698 || !gimple_canonical_types_compatible_p
4699 (TREE_TYPE (f1), TREE_TYPE (f2)))
4700 return false;
4701 }
4702
4703 /* If one aggregate has more fields than the other, they
4704 are not the same. */
4705 if (f1 || f2)
4706 return false;
4707
4708 return true;
4709 }
4710
4711 default:
4712 gcc_unreachable ();
4713 }
4714 }
4715
4716
4717 /* Returns nonzero if P1 and P2 are equal. */
4718
4719 static int
4720 gimple_canonical_type_eq (const void *p1, const void *p2)
4721 {
4722 const_tree t1 = (const_tree) p1;
4723 const_tree t2 = (const_tree) p2;
4724 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4725 CONST_CAST_TREE (t2));
4726 }
4727
4728 /* Register type T in the global type table gimple_types.
4729 If another type T', compatible with T, already existed in
4730 gimple_types then return T', otherwise return T. This is used by
4731 LTO to merge identical types read from different TUs.
4732
4733 ??? This merging does not exactly match how the tree.c middle-end
4734 functions will assign TYPE_CANONICAL when new types are created
4735 during optimization (which at least happens for pointer and array
4736 types). */
4737
4738 tree
4739 gimple_register_canonical_type (tree t)
4740 {
4741 void **slot;
4742
4743 gcc_assert (TYPE_P (t));
4744
4745 if (TYPE_CANONICAL (t))
4746 return TYPE_CANONICAL (t);
4747
4748 if (gimple_canonical_types == NULL)
4749 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4750 gimple_canonical_type_eq, 0);
4751
4752 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4753 if (*slot
4754 && *(tree *)slot != t)
4755 {
4756 tree new_type = (tree) *((tree *) slot);
4757
4758 TYPE_CANONICAL (t) = new_type;
4759 t = new_type;
4760 }
4761 else
4762 {
4763 TYPE_CANONICAL (t) = t;
4764 *slot = (void *) t;
4765 }
4766
4767 return t;
4768 }
4769
4770
4771 /* Show statistics on references to the global type table gimple_types. */
4772
4773 void
4774 print_gimple_types_stats (void)
4775 {
4776 if (gimple_types)
4777 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4778 "%ld searches, %ld collisions (ratio: %f)\n",
4779 (long) htab_size (gimple_types),
4780 (long) htab_elements (gimple_types),
4781 (long) gimple_types->searches,
4782 (long) gimple_types->collisions,
4783 htab_collisions (gimple_types));
4784 else
4785 fprintf (stderr, "GIMPLE type table is empty\n");
4786 if (type_hash_cache)
4787 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4788 "%ld searches, %ld collisions (ratio: %f)\n",
4789 (long) htab_size (type_hash_cache),
4790 (long) htab_elements (type_hash_cache),
4791 (long) type_hash_cache->searches,
4792 (long) type_hash_cache->collisions,
4793 htab_collisions (type_hash_cache));
4794 else
4795 fprintf (stderr, "GIMPLE type hash table is empty\n");
4796 if (gimple_canonical_types)
4797 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4798 "%ld searches, %ld collisions (ratio: %f)\n",
4799 (long) htab_size (gimple_canonical_types),
4800 (long) htab_elements (gimple_canonical_types),
4801 (long) gimple_canonical_types->searches,
4802 (long) gimple_canonical_types->collisions,
4803 htab_collisions (gimple_canonical_types));
4804 else
4805 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4806 if (canonical_type_hash_cache)
4807 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4808 "%ld searches, %ld collisions (ratio: %f)\n",
4809 (long) htab_size (canonical_type_hash_cache),
4810 (long) htab_elements (canonical_type_hash_cache),
4811 (long) canonical_type_hash_cache->searches,
4812 (long) canonical_type_hash_cache->collisions,
4813 htab_collisions (canonical_type_hash_cache));
4814 else
4815 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4816 }
4817
4818 /* Free the gimple type hashtables used for LTO type merging. */
4819
4820 void
4821 free_gimple_type_tables (void)
4822 {
4823 /* Last chance to print stats for the tables. */
4824 if (flag_lto_report)
4825 print_gimple_types_stats ();
4826
4827 if (gimple_types)
4828 {
4829 htab_delete (gimple_types);
4830 gimple_types = NULL;
4831 }
4832 if (gimple_canonical_types)
4833 {
4834 htab_delete (gimple_canonical_types);
4835 gimple_canonical_types = NULL;
4836 }
4837 if (type_hash_cache)
4838 {
4839 htab_delete (type_hash_cache);
4840 type_hash_cache = NULL;
4841 }
4842 if (canonical_type_hash_cache)
4843 {
4844 htab_delete (canonical_type_hash_cache);
4845 canonical_type_hash_cache = NULL;
4846 }
4847 if (type_pair_cache)
4848 {
4849 free (type_pair_cache);
4850 type_pair_cache = NULL;
4851 }
4852 gimple_type_leader = NULL;
4853 }
4854
4855
4856 /* Return a type the same as TYPE except unsigned or
4857 signed according to UNSIGNEDP. */
4858
4859 static tree
4860 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4861 {
4862 tree type1;
4863
4864 type1 = TYPE_MAIN_VARIANT (type);
4865 if (type1 == signed_char_type_node
4866 || type1 == char_type_node
4867 || type1 == unsigned_char_type_node)
4868 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4869 if (type1 == integer_type_node || type1 == unsigned_type_node)
4870 return unsignedp ? unsigned_type_node : integer_type_node;
4871 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4872 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4873 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4874 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4875 if (type1 == long_long_integer_type_node
4876 || type1 == long_long_unsigned_type_node)
4877 return unsignedp
4878 ? long_long_unsigned_type_node
4879 : long_long_integer_type_node;
4880 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4881 return unsignedp
4882 ? int128_unsigned_type_node
4883 : int128_integer_type_node;
4884 #if HOST_BITS_PER_WIDE_INT >= 64
4885 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4886 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4887 #endif
4888 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4889 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4890 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4891 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4892 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4893 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4894 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4895 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4896
4897 #define GIMPLE_FIXED_TYPES(NAME) \
4898 if (type1 == short_ ## NAME ## _type_node \
4899 || type1 == unsigned_short_ ## NAME ## _type_node) \
4900 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4901 : short_ ## NAME ## _type_node; \
4902 if (type1 == NAME ## _type_node \
4903 || type1 == unsigned_ ## NAME ## _type_node) \
4904 return unsignedp ? unsigned_ ## NAME ## _type_node \
4905 : NAME ## _type_node; \
4906 if (type1 == long_ ## NAME ## _type_node \
4907 || type1 == unsigned_long_ ## NAME ## _type_node) \
4908 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4909 : long_ ## NAME ## _type_node; \
4910 if (type1 == long_long_ ## NAME ## _type_node \
4911 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4912 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4913 : long_long_ ## NAME ## _type_node;
4914
4915 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4916 if (type1 == NAME ## _type_node \
4917 || type1 == u ## NAME ## _type_node) \
4918 return unsignedp ? u ## NAME ## _type_node \
4919 : NAME ## _type_node;
4920
4921 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4922 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4923 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4924 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4925 : sat_ ## short_ ## NAME ## _type_node; \
4926 if (type1 == sat_ ## NAME ## _type_node \
4927 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4928 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4929 : sat_ ## NAME ## _type_node; \
4930 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4931 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4932 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4933 : sat_ ## long_ ## NAME ## _type_node; \
4934 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4935 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4936 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4937 : sat_ ## long_long_ ## NAME ## _type_node;
4938
4939 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4940 if (type1 == sat_ ## NAME ## _type_node \
4941 || type1 == sat_ ## u ## NAME ## _type_node) \
4942 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4943 : sat_ ## NAME ## _type_node;
4944
4945 GIMPLE_FIXED_TYPES (fract);
4946 GIMPLE_FIXED_TYPES_SAT (fract);
4947 GIMPLE_FIXED_TYPES (accum);
4948 GIMPLE_FIXED_TYPES_SAT (accum);
4949
4950 GIMPLE_FIXED_MODE_TYPES (qq);
4951 GIMPLE_FIXED_MODE_TYPES (hq);
4952 GIMPLE_FIXED_MODE_TYPES (sq);
4953 GIMPLE_FIXED_MODE_TYPES (dq);
4954 GIMPLE_FIXED_MODE_TYPES (tq);
4955 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4956 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4957 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4958 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4959 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4960 GIMPLE_FIXED_MODE_TYPES (ha);
4961 GIMPLE_FIXED_MODE_TYPES (sa);
4962 GIMPLE_FIXED_MODE_TYPES (da);
4963 GIMPLE_FIXED_MODE_TYPES (ta);
4964 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4965 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4966 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4967 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4968
4969 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4970 the precision; they have precision set to match their range, but
4971 may use a wider mode to match an ABI. If we change modes, we may
4972 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4973 the precision as well, so as to yield correct results for
4974 bit-field types. C++ does not have these separate bit-field
4975 types, and producing a signed or unsigned variant of an
4976 ENUMERAL_TYPE may cause other problems as well. */
4977 if (!INTEGRAL_TYPE_P (type)
4978 || TYPE_UNSIGNED (type) == unsignedp)
4979 return type;
4980
4981 #define TYPE_OK(node) \
4982 (TYPE_MODE (type) == TYPE_MODE (node) \
4983 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4984 if (TYPE_OK (signed_char_type_node))
4985 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4986 if (TYPE_OK (integer_type_node))
4987 return unsignedp ? unsigned_type_node : integer_type_node;
4988 if (TYPE_OK (short_integer_type_node))
4989 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4990 if (TYPE_OK (long_integer_type_node))
4991 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4992 if (TYPE_OK (long_long_integer_type_node))
4993 return (unsignedp
4994 ? long_long_unsigned_type_node
4995 : long_long_integer_type_node);
4996 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4997 return (unsignedp
4998 ? int128_unsigned_type_node
4999 : int128_integer_type_node);
5000
5001 #if HOST_BITS_PER_WIDE_INT >= 64
5002 if (TYPE_OK (intTI_type_node))
5003 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5004 #endif
5005 if (TYPE_OK (intDI_type_node))
5006 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5007 if (TYPE_OK (intSI_type_node))
5008 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5009 if (TYPE_OK (intHI_type_node))
5010 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5011 if (TYPE_OK (intQI_type_node))
5012 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5013
5014 #undef GIMPLE_FIXED_TYPES
5015 #undef GIMPLE_FIXED_MODE_TYPES
5016 #undef GIMPLE_FIXED_TYPES_SAT
5017 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5018 #undef TYPE_OK
5019
5020 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5021 }
5022
5023
5024 /* Return an unsigned type the same as TYPE in other respects. */
5025
5026 tree
5027 gimple_unsigned_type (tree type)
5028 {
5029 return gimple_signed_or_unsigned_type (true, type);
5030 }
5031
5032
5033 /* Return a signed type the same as TYPE in other respects. */
5034
5035 tree
5036 gimple_signed_type (tree type)
5037 {
5038 return gimple_signed_or_unsigned_type (false, type);
5039 }
5040
5041
5042 /* Return the typed-based alias set for T, which may be an expression
5043 or a type. Return -1 if we don't do anything special. */
5044
5045 alias_set_type
5046 gimple_get_alias_set (tree t)
5047 {
5048 tree u;
5049
5050 /* Permit type-punning when accessing a union, provided the access
5051 is directly through the union. For example, this code does not
5052 permit taking the address of a union member and then storing
5053 through it. Even the type-punning allowed here is a GCC
5054 extension, albeit a common and useful one; the C standard says
5055 that such accesses have implementation-defined behavior. */
5056 for (u = t;
5057 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5058 u = TREE_OPERAND (u, 0))
5059 if (TREE_CODE (u) == COMPONENT_REF
5060 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5061 return 0;
5062
5063 /* That's all the expressions we handle specially. */
5064 if (!TYPE_P (t))
5065 return -1;
5066
5067 /* For convenience, follow the C standard when dealing with
5068 character types. Any object may be accessed via an lvalue that
5069 has character type. */
5070 if (t == char_type_node
5071 || t == signed_char_type_node
5072 || t == unsigned_char_type_node)
5073 return 0;
5074
5075 /* Allow aliasing between signed and unsigned variants of the same
5076 type. We treat the signed variant as canonical. */
5077 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5078 {
5079 tree t1 = gimple_signed_type (t);
5080
5081 /* t1 == t can happen for boolean nodes which are always unsigned. */
5082 if (t1 != t)
5083 return get_alias_set (t1);
5084 }
5085
5086 return -1;
5087 }
5088
5089
5090 /* Data structure used to count the number of dereferences to PTR
5091 inside an expression. */
5092 struct count_ptr_d
5093 {
5094 tree ptr;
5095 unsigned num_stores;
5096 unsigned num_loads;
5097 };
5098
5099 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5100 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5101
5102 static tree
5103 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5104 {
5105 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5106 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5107
5108 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5109 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5110 the address of 'fld' as 'ptr + offsetof(fld)'. */
5111 if (TREE_CODE (*tp) == ADDR_EXPR)
5112 {
5113 *walk_subtrees = 0;
5114 return NULL_TREE;
5115 }
5116
5117 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5118 {
5119 if (wi_p->is_lhs)
5120 count_p->num_stores++;
5121 else
5122 count_p->num_loads++;
5123 }
5124
5125 return NULL_TREE;
5126 }
5127
5128 /* Count the number of direct and indirect uses for pointer PTR in
5129 statement STMT. The number of direct uses is stored in
5130 *NUM_USES_P. Indirect references are counted separately depending
5131 on whether they are store or load operations. The counts are
5132 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5133
5134 void
5135 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5136 unsigned *num_loads_p, unsigned *num_stores_p)
5137 {
5138 ssa_op_iter i;
5139 tree use;
5140
5141 *num_uses_p = 0;
5142 *num_loads_p = 0;
5143 *num_stores_p = 0;
5144
5145 /* Find out the total number of uses of PTR in STMT. */
5146 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5147 if (use == ptr)
5148 (*num_uses_p)++;
5149
5150 /* Now count the number of indirect references to PTR. This is
5151 truly awful, but we don't have much choice. There are no parent
5152 pointers inside INDIRECT_REFs, so an expression like
5153 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5154 find all the indirect and direct uses of x_1 inside. The only
5155 shortcut we can take is the fact that GIMPLE only allows
5156 INDIRECT_REFs inside the expressions below. */
5157 if (is_gimple_assign (stmt)
5158 || gimple_code (stmt) == GIMPLE_RETURN
5159 || gimple_code (stmt) == GIMPLE_ASM
5160 || is_gimple_call (stmt))
5161 {
5162 struct walk_stmt_info wi;
5163 struct count_ptr_d count;
5164
5165 count.ptr = ptr;
5166 count.num_stores = 0;
5167 count.num_loads = 0;
5168
5169 memset (&wi, 0, sizeof (wi));
5170 wi.info = &count;
5171 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5172
5173 *num_stores_p = count.num_stores;
5174 *num_loads_p = count.num_loads;
5175 }
5176
5177 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5178 }
5179
5180 /* From a tree operand OP return the base of a load or store operation
5181 or NULL_TREE if OP is not a load or a store. */
5182
5183 static tree
5184 get_base_loadstore (tree op)
5185 {
5186 while (handled_component_p (op))
5187 op = TREE_OPERAND (op, 0);
5188 if (DECL_P (op)
5189 || INDIRECT_REF_P (op)
5190 || TREE_CODE (op) == MEM_REF
5191 || TREE_CODE (op) == TARGET_MEM_REF)
5192 return op;
5193 return NULL_TREE;
5194 }
5195
5196 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5197 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5198 passing the STMT, the base of the operand and DATA to it. The base
5199 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5200 or the argument of an address expression.
5201 Returns the results of these callbacks or'ed. */
5202
5203 bool
5204 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5205 bool (*visit_load)(gimple, tree, void *),
5206 bool (*visit_store)(gimple, tree, void *),
5207 bool (*visit_addr)(gimple, tree, void *))
5208 {
5209 bool ret = false;
5210 unsigned i;
5211 if (gimple_assign_single_p (stmt))
5212 {
5213 tree lhs, rhs;
5214 if (visit_store)
5215 {
5216 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5217 if (lhs)
5218 ret |= visit_store (stmt, lhs, data);
5219 }
5220 rhs = gimple_assign_rhs1 (stmt);
5221 while (handled_component_p (rhs))
5222 rhs = TREE_OPERAND (rhs, 0);
5223 if (visit_addr)
5224 {
5225 if (TREE_CODE (rhs) == ADDR_EXPR)
5226 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5227 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5228 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5229 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5230 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5231 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5232 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5233 0), data);
5234 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5235 {
5236 unsigned int ix;
5237 tree val;
5238
5239 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5240 if (TREE_CODE (val) == ADDR_EXPR)
5241 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5242 else if (TREE_CODE (val) == OBJ_TYPE_REF
5243 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5244 ret |= visit_addr (stmt,
5245 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5246 0), data);
5247 }
5248 lhs = gimple_assign_lhs (stmt);
5249 if (TREE_CODE (lhs) == TARGET_MEM_REF
5250 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5251 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5252 }
5253 if (visit_load)
5254 {
5255 rhs = get_base_loadstore (rhs);
5256 if (rhs)
5257 ret |= visit_load (stmt, rhs, data);
5258 }
5259 }
5260 else if (visit_addr
5261 && (is_gimple_assign (stmt)
5262 || gimple_code (stmt) == GIMPLE_COND))
5263 {
5264 for (i = 0; i < gimple_num_ops (stmt); ++i)
5265 {
5266 tree op = gimple_op (stmt, i);
5267 if (op == NULL_TREE)
5268 ;
5269 else if (TREE_CODE (op) == ADDR_EXPR)
5270 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5271 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5272 tree with two operands. */
5273 else if (i == 1 && COMPARISON_CLASS_P (op))
5274 {
5275 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5276 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5277 0), data);
5278 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5279 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5280 0), data);
5281 }
5282 }
5283 }
5284 else if (is_gimple_call (stmt))
5285 {
5286 if (visit_store)
5287 {
5288 tree lhs = gimple_call_lhs (stmt);
5289 if (lhs)
5290 {
5291 lhs = get_base_loadstore (lhs);
5292 if (lhs)
5293 ret |= visit_store (stmt, lhs, data);
5294 }
5295 }
5296 if (visit_load || visit_addr)
5297 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5298 {
5299 tree rhs = gimple_call_arg (stmt, i);
5300 if (visit_addr
5301 && TREE_CODE (rhs) == ADDR_EXPR)
5302 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5303 else if (visit_load)
5304 {
5305 rhs = get_base_loadstore (rhs);
5306 if (rhs)
5307 ret |= visit_load (stmt, rhs, data);
5308 }
5309 }
5310 if (visit_addr
5311 && gimple_call_chain (stmt)
5312 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5313 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5314 data);
5315 if (visit_addr
5316 && gimple_call_return_slot_opt_p (stmt)
5317 && gimple_call_lhs (stmt) != NULL_TREE
5318 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5319 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5320 }
5321 else if (gimple_code (stmt) == GIMPLE_ASM)
5322 {
5323 unsigned noutputs;
5324 const char *constraint;
5325 const char **oconstraints;
5326 bool allows_mem, allows_reg, is_inout;
5327 noutputs = gimple_asm_noutputs (stmt);
5328 oconstraints = XALLOCAVEC (const char *, noutputs);
5329 if (visit_store || visit_addr)
5330 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5331 {
5332 tree link = gimple_asm_output_op (stmt, i);
5333 tree op = get_base_loadstore (TREE_VALUE (link));
5334 if (op && visit_store)
5335 ret |= visit_store (stmt, op, data);
5336 if (visit_addr)
5337 {
5338 constraint = TREE_STRING_POINTER
5339 (TREE_VALUE (TREE_PURPOSE (link)));
5340 oconstraints[i] = constraint;
5341 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5342 &allows_reg, &is_inout);
5343 if (op && !allows_reg && allows_mem)
5344 ret |= visit_addr (stmt, op, data);
5345 }
5346 }
5347 if (visit_load || visit_addr)
5348 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5349 {
5350 tree link = gimple_asm_input_op (stmt, i);
5351 tree op = TREE_VALUE (link);
5352 if (visit_addr
5353 && TREE_CODE (op) == ADDR_EXPR)
5354 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5355 else if (visit_load || visit_addr)
5356 {
5357 op = get_base_loadstore (op);
5358 if (op)
5359 {
5360 if (visit_load)
5361 ret |= visit_load (stmt, op, data);
5362 if (visit_addr)
5363 {
5364 constraint = TREE_STRING_POINTER
5365 (TREE_VALUE (TREE_PURPOSE (link)));
5366 parse_input_constraint (&constraint, 0, 0, noutputs,
5367 0, oconstraints,
5368 &allows_mem, &allows_reg);
5369 if (!allows_reg && allows_mem)
5370 ret |= visit_addr (stmt, op, data);
5371 }
5372 }
5373 }
5374 }
5375 }
5376 else if (gimple_code (stmt) == GIMPLE_RETURN)
5377 {
5378 tree op = gimple_return_retval (stmt);
5379 if (op)
5380 {
5381 if (visit_addr
5382 && TREE_CODE (op) == ADDR_EXPR)
5383 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5384 else if (visit_load)
5385 {
5386 op = get_base_loadstore (op);
5387 if (op)
5388 ret |= visit_load (stmt, op, data);
5389 }
5390 }
5391 }
5392 else if (visit_addr
5393 && gimple_code (stmt) == GIMPLE_PHI)
5394 {
5395 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5396 {
5397 tree op = PHI_ARG_DEF (stmt, i);
5398 if (TREE_CODE (op) == ADDR_EXPR)
5399 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5400 }
5401 }
5402
5403 return ret;
5404 }
5405
5406 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5407 should make a faster clone for this case. */
5408
5409 bool
5410 walk_stmt_load_store_ops (gimple stmt, void *data,
5411 bool (*visit_load)(gimple, tree, void *),
5412 bool (*visit_store)(gimple, tree, void *))
5413 {
5414 return walk_stmt_load_store_addr_ops (stmt, data,
5415 visit_load, visit_store, NULL);
5416 }
5417
5418 /* Helper for gimple_ior_addresses_taken_1. */
5419
5420 static bool
5421 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5422 tree addr, void *data)
5423 {
5424 bitmap addresses_taken = (bitmap)data;
5425 addr = get_base_address (addr);
5426 if (addr
5427 && DECL_P (addr))
5428 {
5429 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5430 return true;
5431 }
5432 return false;
5433 }
5434
5435 /* Set the bit for the uid of all decls that have their address taken
5436 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5437 were any in this stmt. */
5438
5439 bool
5440 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5441 {
5442 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5443 gimple_ior_addresses_taken_1);
5444 }
5445
5446
5447 /* Return a printable name for symbol DECL. */
5448
5449 const char *
5450 gimple_decl_printable_name (tree decl, int verbosity)
5451 {
5452 if (!DECL_NAME (decl))
5453 return NULL;
5454
5455 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5456 {
5457 const char *str, *mangled_str;
5458 int dmgl_opts = DMGL_NO_OPTS;
5459
5460 if (verbosity >= 2)
5461 {
5462 dmgl_opts = DMGL_VERBOSE
5463 | DMGL_ANSI
5464 | DMGL_GNU_V3
5465 | DMGL_RET_POSTFIX;
5466 if (TREE_CODE (decl) == FUNCTION_DECL)
5467 dmgl_opts |= DMGL_PARAMS;
5468 }
5469
5470 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5471 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5472 return (str) ? str : mangled_str;
5473 }
5474
5475 return IDENTIFIER_POINTER (DECL_NAME (decl));
5476 }
5477
5478 /* Return true when STMT is builtins call to CODE. */
5479
5480 bool
5481 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5482 {
5483 tree fndecl;
5484 return (is_gimple_call (stmt)
5485 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5486 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5487 && DECL_FUNCTION_CODE (fndecl) == code);
5488 }
5489
5490 /* Return true if STMT clobbers memory. STMT is required to be a
5491 GIMPLE_ASM. */
5492
5493 bool
5494 gimple_asm_clobbers_memory_p (const_gimple stmt)
5495 {
5496 unsigned i;
5497
5498 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5499 {
5500 tree op = gimple_asm_clobber_op (stmt, i);
5501 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5502 return true;
5503 }
5504
5505 return false;
5506 }
5507 #include "gt-gimple.h"