re PR tree-optimization/50605 (ice in ipa_get_jf_pass_through_result with -O3)
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats. */
84
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
95 };
96
97 #endif /* GATHER_STATISTICS */
98
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102
103 /* Private API manipulation functions shared only with some
104 other files. */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
111
112 /* Set the code for statement G to CODE. */
113
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
116 {
117 g->gsbase.code = code;
118 }
119
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
122
123 static inline size_t
124 gimple_size (enum gimple_code code)
125 {
126 return gsstruct_code_size[gss_for_code (code)];
127 }
128
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
131
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134 {
135 size_t size;
136 gimple stmt;
137
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
141
142 #ifdef GATHER_STATISTICS
143 {
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
147 }
148 #endif
149
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
153
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
157
158 return stmt;
159 }
160
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
162
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
165 {
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
170 }
171
172
173
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
177
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
184 {
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
187
188 return s;
189 }
190
191
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
193
194 gimple
195 gimple_build_return (tree retval)
196 {
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
201 }
202
203 /* Reset alias information on call S. */
204
205 void
206 gimple_call_reset_alias_info (gimple s)
207 {
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
216 }
217
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
221 arguments. */
222
223 static inline gimple
224 gimple_build_call_1 (tree fn, unsigned nargs)
225 {
226 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
227 if (TREE_CODE (fn) == FUNCTION_DECL)
228 fn = build_fold_addr_expr (fn);
229 gimple_set_op (s, 1, fn);
230 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
231 gimple_call_reset_alias_info (s);
232 return s;
233 }
234
235
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
238
239 gimple
240 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241 {
242 unsigned i;
243 unsigned nargs = VEC_length (tree, args);
244 gimple call = gimple_build_call_1 (fn, nargs);
245
246 for (i = 0; i < nargs; i++)
247 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248
249 return call;
250 }
251
252
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
255
256 gimple
257 gimple_build_call (tree fn, unsigned nargs, ...)
258 {
259 va_list ap;
260 gimple call;
261 unsigned i;
262
263 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264
265 call = gimple_build_call_1 (fn, nargs);
266
267 va_start (ap, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
270 va_end (ap);
271
272 return call;
273 }
274
275
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
278
279 gimple
280 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
281 {
282 gimple call;
283 unsigned i;
284
285 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
286
287 call = gimple_build_call_1 (fn, nargs);
288
289 for (i = 0; i < nargs; i++)
290 gimple_call_set_arg (call, i, va_arg (ap, tree));
291
292 return call;
293 }
294
295
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
299
300 static inline gimple
301 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
302 {
303 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304 s->gsbase.subcode |= GF_CALL_INTERNAL;
305 gimple_call_set_internal_fn (s, fn);
306 gimple_call_reset_alias_info (s);
307 return s;
308 }
309
310
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
313
314 gimple
315 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
316 {
317 va_list ap;
318 gimple call;
319 unsigned i;
320
321 call = gimple_build_call_internal_1 (fn, nargs);
322 va_start (ap, nargs);
323 for (i = 0; i < nargs; i++)
324 gimple_call_set_arg (call, i, va_arg (ap, tree));
325 va_end (ap);
326
327 return call;
328 }
329
330
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
333
334 gimple
335 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
336 {
337 unsigned i, nargs;
338 gimple call;
339
340 nargs = VEC_length (tree, args);
341 call = gimple_build_call_internal_1 (fn, nargs);
342 for (i = 0; i < nargs; i++)
343 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
344
345 return call;
346 }
347
348
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
351 this fact. */
352
353 gimple
354 gimple_build_call_from_tree (tree t)
355 {
356 unsigned i, nargs;
357 gimple call;
358 tree fndecl = get_callee_fndecl (t);
359
360 gcc_assert (TREE_CODE (t) == CALL_EXPR);
361
362 nargs = call_expr_nargs (t);
363 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
364
365 for (i = 0; i < nargs; i++)
366 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
367
368 gimple_set_block (call, TREE_BLOCK (t));
369
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
373 gimple_call_set_cannot_inline (call, CALL_CANNOT_INLINE_P (t));
374 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
375 if (fndecl
376 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
377 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
378 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
379 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
380 else
381 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
382 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
383 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
384 gimple_set_no_warning (call, TREE_NO_WARNING (t));
385
386 return call;
387 }
388
389
390 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
391 *OP1_P, *OP2_P and *OP3_P respectively. */
392
393 void
394 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
395 tree *op2_p, tree *op3_p)
396 {
397 enum gimple_rhs_class grhs_class;
398
399 *subcode_p = TREE_CODE (expr);
400 grhs_class = get_gimple_rhs_class (*subcode_p);
401
402 if (grhs_class == GIMPLE_TERNARY_RHS)
403 {
404 *op1_p = TREE_OPERAND (expr, 0);
405 *op2_p = TREE_OPERAND (expr, 1);
406 *op3_p = TREE_OPERAND (expr, 2);
407 }
408 else if (grhs_class == GIMPLE_BINARY_RHS)
409 {
410 *op1_p = TREE_OPERAND (expr, 0);
411 *op2_p = TREE_OPERAND (expr, 1);
412 *op3_p = NULL_TREE;
413 }
414 else if (grhs_class == GIMPLE_UNARY_RHS)
415 {
416 *op1_p = TREE_OPERAND (expr, 0);
417 *op2_p = NULL_TREE;
418 *op3_p = NULL_TREE;
419 }
420 else if (grhs_class == GIMPLE_SINGLE_RHS)
421 {
422 *op1_p = expr;
423 *op2_p = NULL_TREE;
424 *op3_p = NULL_TREE;
425 }
426 else
427 gcc_unreachable ();
428 }
429
430
431 /* Build a GIMPLE_ASSIGN statement.
432
433 LHS of the assignment.
434 RHS of the assignment which can be unary or binary. */
435
436 gimple
437 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
438 {
439 enum tree_code subcode;
440 tree op1, op2, op3;
441
442 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
443 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
444 PASS_MEM_STAT);
445 }
446
447
448 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
449 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
450 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
451
452 gimple
453 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
454 tree op2, tree op3 MEM_STAT_DECL)
455 {
456 unsigned num_ops;
457 gimple p;
458
459 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
460 code). */
461 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
462
463 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
464 PASS_MEM_STAT);
465 gimple_assign_set_lhs (p, lhs);
466 gimple_assign_set_rhs1 (p, op1);
467 if (op2)
468 {
469 gcc_assert (num_ops > 2);
470 gimple_assign_set_rhs2 (p, op2);
471 }
472
473 if (op3)
474 {
475 gcc_assert (num_ops > 3);
476 gimple_assign_set_rhs3 (p, op3);
477 }
478
479 return p;
480 }
481
482
483 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
484
485 DST/SRC are the destination and source respectively. You can pass
486 ungimplified trees in DST or SRC, in which case they will be
487 converted to a gimple operand if necessary.
488
489 This function returns the newly created GIMPLE_ASSIGN tuple. */
490
491 gimple
492 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
493 {
494 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
495 gimplify_and_add (t, seq_p);
496 ggc_free (t);
497 return gimple_seq_last_stmt (*seq_p);
498 }
499
500
501 /* Build a GIMPLE_COND statement.
502
503 PRED is the condition used to compare LHS and the RHS.
504 T_LABEL is the label to jump to if the condition is true.
505 F_LABEL is the label to jump to otherwise. */
506
507 gimple
508 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
509 tree t_label, tree f_label)
510 {
511 gimple p;
512
513 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
514 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
515 gimple_cond_set_lhs (p, lhs);
516 gimple_cond_set_rhs (p, rhs);
517 gimple_cond_set_true_label (p, t_label);
518 gimple_cond_set_false_label (p, f_label);
519 return p;
520 }
521
522
523 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
524
525 void
526 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
527 tree *lhs_p, tree *rhs_p)
528 {
529 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
530 || TREE_CODE (cond) == TRUTH_NOT_EXPR
531 || is_gimple_min_invariant (cond)
532 || SSA_VAR_P (cond));
533
534 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
535
536 /* Canonicalize conditionals of the form 'if (!VAL)'. */
537 if (*code_p == TRUTH_NOT_EXPR)
538 {
539 *code_p = EQ_EXPR;
540 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
541 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
542 }
543 /* Canonicalize conditionals of the form 'if (VAL)' */
544 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
545 {
546 *code_p = NE_EXPR;
547 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
548 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
549 }
550 }
551
552
553 /* Build a GIMPLE_COND statement from the conditional expression tree
554 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
555
556 gimple
557 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
558 {
559 enum tree_code code;
560 tree lhs, rhs;
561
562 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
563 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
564 }
565
566 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
567 boolean expression tree COND. */
568
569 void
570 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
571 {
572 enum tree_code code;
573 tree lhs, rhs;
574
575 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
576 gimple_cond_set_condition (stmt, code, lhs, rhs);
577 }
578
579 /* Build a GIMPLE_LABEL statement for LABEL. */
580
581 gimple
582 gimple_build_label (tree label)
583 {
584 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
585 gimple_label_set_label (p, label);
586 return p;
587 }
588
589 /* Build a GIMPLE_GOTO statement to label DEST. */
590
591 gimple
592 gimple_build_goto (tree dest)
593 {
594 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
595 gimple_goto_set_dest (p, dest);
596 return p;
597 }
598
599
600 /* Build a GIMPLE_NOP statement. */
601
602 gimple
603 gimple_build_nop (void)
604 {
605 return gimple_alloc (GIMPLE_NOP, 0);
606 }
607
608
609 /* Build a GIMPLE_BIND statement.
610 VARS are the variables in BODY.
611 BLOCK is the containing block. */
612
613 gimple
614 gimple_build_bind (tree vars, gimple_seq body, tree block)
615 {
616 gimple p = gimple_alloc (GIMPLE_BIND, 0);
617 gimple_bind_set_vars (p, vars);
618 if (body)
619 gimple_bind_set_body (p, body);
620 if (block)
621 gimple_bind_set_block (p, block);
622 return p;
623 }
624
625 /* Helper function to set the simple fields of a asm stmt.
626
627 STRING is a pointer to a string that is the asm blocks assembly code.
628 NINPUT is the number of register inputs.
629 NOUTPUT is the number of register outputs.
630 NCLOBBERS is the number of clobbered registers.
631 */
632
633 static inline gimple
634 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
635 unsigned nclobbers, unsigned nlabels)
636 {
637 gimple p;
638 int size = strlen (string);
639
640 /* ASMs with labels cannot have outputs. This should have been
641 enforced by the front end. */
642 gcc_assert (nlabels == 0 || noutputs == 0);
643
644 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
645 ninputs + noutputs + nclobbers + nlabels);
646
647 p->gimple_asm.ni = ninputs;
648 p->gimple_asm.no = noutputs;
649 p->gimple_asm.nc = nclobbers;
650 p->gimple_asm.nl = nlabels;
651 p->gimple_asm.string = ggc_alloc_string (string, size);
652
653 #ifdef GATHER_STATISTICS
654 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
655 #endif
656
657 return p;
658 }
659
660 /* Build a GIMPLE_ASM statement.
661
662 STRING is the assembly code.
663 NINPUT is the number of register inputs.
664 NOUTPUT is the number of register outputs.
665 NCLOBBERS is the number of clobbered registers.
666 INPUTS is a vector of the input register parameters.
667 OUTPUTS is a vector of the output register parameters.
668 CLOBBERS is a vector of the clobbered register parameters.
669 LABELS is a vector of destination labels. */
670
671 gimple
672 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
673 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
674 VEC(tree,gc)* labels)
675 {
676 gimple p;
677 unsigned i;
678
679 p = gimple_build_asm_1 (string,
680 VEC_length (tree, inputs),
681 VEC_length (tree, outputs),
682 VEC_length (tree, clobbers),
683 VEC_length (tree, labels));
684
685 for (i = 0; i < VEC_length (tree, inputs); i++)
686 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
687
688 for (i = 0; i < VEC_length (tree, outputs); i++)
689 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
690
691 for (i = 0; i < VEC_length (tree, clobbers); i++)
692 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
693
694 for (i = 0; i < VEC_length (tree, labels); i++)
695 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
696
697 return p;
698 }
699
700 /* Build a GIMPLE_CATCH statement.
701
702 TYPES are the catch types.
703 HANDLER is the exception handler. */
704
705 gimple
706 gimple_build_catch (tree types, gimple_seq handler)
707 {
708 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
709 gimple_catch_set_types (p, types);
710 if (handler)
711 gimple_catch_set_handler (p, handler);
712
713 return p;
714 }
715
716 /* Build a GIMPLE_EH_FILTER statement.
717
718 TYPES are the filter's types.
719 FAILURE is the filter's failure action. */
720
721 gimple
722 gimple_build_eh_filter (tree types, gimple_seq failure)
723 {
724 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
725 gimple_eh_filter_set_types (p, types);
726 if (failure)
727 gimple_eh_filter_set_failure (p, failure);
728
729 return p;
730 }
731
732 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
733
734 gimple
735 gimple_build_eh_must_not_throw (tree decl)
736 {
737 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
738
739 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
740 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
741 gimple_eh_must_not_throw_set_fndecl (p, decl);
742
743 return p;
744 }
745
746 /* Build a GIMPLE_EH_ELSE statement. */
747
748 gimple
749 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
750 {
751 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
752 gimple_eh_else_set_n_body (p, n_body);
753 gimple_eh_else_set_e_body (p, e_body);
754 return p;
755 }
756
757 /* Build a GIMPLE_TRY statement.
758
759 EVAL is the expression to evaluate.
760 CLEANUP is the cleanup expression.
761 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
762 whether this is a try/catch or a try/finally respectively. */
763
764 gimple
765 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
766 enum gimple_try_flags kind)
767 {
768 gimple p;
769
770 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
771 p = gimple_alloc (GIMPLE_TRY, 0);
772 gimple_set_subcode (p, kind);
773 if (eval)
774 gimple_try_set_eval (p, eval);
775 if (cleanup)
776 gimple_try_set_cleanup (p, cleanup);
777
778 return p;
779 }
780
781 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
782
783 CLEANUP is the cleanup expression. */
784
785 gimple
786 gimple_build_wce (gimple_seq cleanup)
787 {
788 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
789 if (cleanup)
790 gimple_wce_set_cleanup (p, cleanup);
791
792 return p;
793 }
794
795
796 /* Build a GIMPLE_RESX statement. */
797
798 gimple
799 gimple_build_resx (int region)
800 {
801 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
802 p->gimple_eh_ctrl.region = region;
803 return p;
804 }
805
806
807 /* The helper for constructing a gimple switch statement.
808 INDEX is the switch's index.
809 NLABELS is the number of labels in the switch excluding the default.
810 DEFAULT_LABEL is the default label for the switch statement. */
811
812 gimple
813 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
814 {
815 /* nlabels + 1 default label + 1 index. */
816 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
817 1 + (default_label != NULL) + nlabels);
818 gimple_switch_set_index (p, index);
819 if (default_label)
820 gimple_switch_set_default_label (p, default_label);
821 return p;
822 }
823
824
825 /* Build a GIMPLE_SWITCH statement.
826
827 INDEX is the switch's index.
828 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
829 ... are the labels excluding the default. */
830
831 gimple
832 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
833 {
834 va_list al;
835 unsigned i, offset;
836 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
837
838 /* Store the rest of the labels. */
839 va_start (al, default_label);
840 offset = (default_label != NULL);
841 for (i = 0; i < nlabels; i++)
842 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
843 va_end (al);
844
845 return p;
846 }
847
848
849 /* Build a GIMPLE_SWITCH statement.
850
851 INDEX is the switch's index.
852 DEFAULT_LABEL is the default label
853 ARGS is a vector of labels excluding the default. */
854
855 gimple
856 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
857 {
858 unsigned i, offset, nlabels = VEC_length (tree, args);
859 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
860
861 /* Copy the labels from the vector to the switch statement. */
862 offset = (default_label != NULL);
863 for (i = 0; i < nlabels; i++)
864 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
865
866 return p;
867 }
868
869 /* Build a GIMPLE_EH_DISPATCH statement. */
870
871 gimple
872 gimple_build_eh_dispatch (int region)
873 {
874 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
875 p->gimple_eh_ctrl.region = region;
876 return p;
877 }
878
879 /* Build a new GIMPLE_DEBUG_BIND statement.
880
881 VAR is bound to VALUE; block and location are taken from STMT. */
882
883 gimple
884 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
885 {
886 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
887 (unsigned)GIMPLE_DEBUG_BIND, 2
888 PASS_MEM_STAT);
889
890 gimple_debug_bind_set_var (p, var);
891 gimple_debug_bind_set_value (p, value);
892 if (stmt)
893 {
894 gimple_set_block (p, gimple_block (stmt));
895 gimple_set_location (p, gimple_location (stmt));
896 }
897
898 return p;
899 }
900
901
902 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
903
904 VAR is bound to VALUE; block and location are taken from STMT. */
905
906 gimple
907 gimple_build_debug_source_bind_stat (tree var, tree value,
908 gimple stmt MEM_STAT_DECL)
909 {
910 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
911 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
912 PASS_MEM_STAT);
913
914 gimple_debug_source_bind_set_var (p, var);
915 gimple_debug_source_bind_set_value (p, value);
916 if (stmt)
917 {
918 gimple_set_block (p, gimple_block (stmt));
919 gimple_set_location (p, gimple_location (stmt));
920 }
921
922 return p;
923 }
924
925
926 /* Build a GIMPLE_OMP_CRITICAL statement.
927
928 BODY is the sequence of statements for which only one thread can execute.
929 NAME is optional identifier for this critical block. */
930
931 gimple
932 gimple_build_omp_critical (gimple_seq body, tree name)
933 {
934 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
935 gimple_omp_critical_set_name (p, name);
936 if (body)
937 gimple_omp_set_body (p, body);
938
939 return p;
940 }
941
942 /* Build a GIMPLE_OMP_FOR statement.
943
944 BODY is sequence of statements inside the for loop.
945 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
946 lastprivate, reductions, ordered, schedule, and nowait.
947 COLLAPSE is the collapse count.
948 PRE_BODY is the sequence of statements that are loop invariant. */
949
950 gimple
951 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
952 gimple_seq pre_body)
953 {
954 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
955 if (body)
956 gimple_omp_set_body (p, body);
957 gimple_omp_for_set_clauses (p, clauses);
958 p->gimple_omp_for.collapse = collapse;
959 p->gimple_omp_for.iter
960 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
961 if (pre_body)
962 gimple_omp_for_set_pre_body (p, pre_body);
963
964 return p;
965 }
966
967
968 /* Build a GIMPLE_OMP_PARALLEL statement.
969
970 BODY is sequence of statements which are executed in parallel.
971 CLAUSES, are the OMP parallel construct's clauses.
972 CHILD_FN is the function created for the parallel threads to execute.
973 DATA_ARG are the shared data argument(s). */
974
975 gimple
976 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
977 tree data_arg)
978 {
979 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
980 if (body)
981 gimple_omp_set_body (p, body);
982 gimple_omp_parallel_set_clauses (p, clauses);
983 gimple_omp_parallel_set_child_fn (p, child_fn);
984 gimple_omp_parallel_set_data_arg (p, data_arg);
985
986 return p;
987 }
988
989
990 /* Build a GIMPLE_OMP_TASK statement.
991
992 BODY is sequence of statements which are executed by the explicit task.
993 CLAUSES, are the OMP parallel construct's clauses.
994 CHILD_FN is the function created for the parallel threads to execute.
995 DATA_ARG are the shared data argument(s).
996 COPY_FN is the optional function for firstprivate initialization.
997 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
998
999 gimple
1000 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
1001 tree data_arg, tree copy_fn, tree arg_size,
1002 tree arg_align)
1003 {
1004 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1005 if (body)
1006 gimple_omp_set_body (p, body);
1007 gimple_omp_task_set_clauses (p, clauses);
1008 gimple_omp_task_set_child_fn (p, child_fn);
1009 gimple_omp_task_set_data_arg (p, data_arg);
1010 gimple_omp_task_set_copy_fn (p, copy_fn);
1011 gimple_omp_task_set_arg_size (p, arg_size);
1012 gimple_omp_task_set_arg_align (p, arg_align);
1013
1014 return p;
1015 }
1016
1017
1018 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1019
1020 BODY is the sequence of statements in the section. */
1021
1022 gimple
1023 gimple_build_omp_section (gimple_seq body)
1024 {
1025 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1026 if (body)
1027 gimple_omp_set_body (p, body);
1028
1029 return p;
1030 }
1031
1032
1033 /* Build a GIMPLE_OMP_MASTER statement.
1034
1035 BODY is the sequence of statements to be executed by just the master. */
1036
1037 gimple
1038 gimple_build_omp_master (gimple_seq body)
1039 {
1040 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1041 if (body)
1042 gimple_omp_set_body (p, body);
1043
1044 return p;
1045 }
1046
1047
1048 /* Build a GIMPLE_OMP_CONTINUE statement.
1049
1050 CONTROL_DEF is the definition of the control variable.
1051 CONTROL_USE is the use of the control variable. */
1052
1053 gimple
1054 gimple_build_omp_continue (tree control_def, tree control_use)
1055 {
1056 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1057 gimple_omp_continue_set_control_def (p, control_def);
1058 gimple_omp_continue_set_control_use (p, control_use);
1059 return p;
1060 }
1061
1062 /* Build a GIMPLE_OMP_ORDERED statement.
1063
1064 BODY is the sequence of statements inside a loop that will executed in
1065 sequence. */
1066
1067 gimple
1068 gimple_build_omp_ordered (gimple_seq body)
1069 {
1070 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1071 if (body)
1072 gimple_omp_set_body (p, body);
1073
1074 return p;
1075 }
1076
1077
1078 /* Build a GIMPLE_OMP_RETURN statement.
1079 WAIT_P is true if this is a non-waiting return. */
1080
1081 gimple
1082 gimple_build_omp_return (bool wait_p)
1083 {
1084 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1085 if (wait_p)
1086 gimple_omp_return_set_nowait (p);
1087
1088 return p;
1089 }
1090
1091
1092 /* Build a GIMPLE_OMP_SECTIONS statement.
1093
1094 BODY is a sequence of section statements.
1095 CLAUSES are any of the OMP sections contsruct's clauses: private,
1096 firstprivate, lastprivate, reduction, and nowait. */
1097
1098 gimple
1099 gimple_build_omp_sections (gimple_seq body, tree clauses)
1100 {
1101 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1102 if (body)
1103 gimple_omp_set_body (p, body);
1104 gimple_omp_sections_set_clauses (p, clauses);
1105
1106 return p;
1107 }
1108
1109
1110 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1111
1112 gimple
1113 gimple_build_omp_sections_switch (void)
1114 {
1115 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1116 }
1117
1118
1119 /* Build a GIMPLE_OMP_SINGLE statement.
1120
1121 BODY is the sequence of statements that will be executed once.
1122 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1123 copyprivate, nowait. */
1124
1125 gimple
1126 gimple_build_omp_single (gimple_seq body, tree clauses)
1127 {
1128 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1129 if (body)
1130 gimple_omp_set_body (p, body);
1131 gimple_omp_single_set_clauses (p, clauses);
1132
1133 return p;
1134 }
1135
1136
1137 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1138
1139 gimple
1140 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1141 {
1142 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1143 gimple_omp_atomic_load_set_lhs (p, lhs);
1144 gimple_omp_atomic_load_set_rhs (p, rhs);
1145 return p;
1146 }
1147
1148 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1149
1150 VAL is the value we are storing. */
1151
1152 gimple
1153 gimple_build_omp_atomic_store (tree val)
1154 {
1155 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1156 gimple_omp_atomic_store_set_val (p, val);
1157 return p;
1158 }
1159
1160 /* Build a GIMPLE_TRANSACTION statement. */
1161
1162 gimple
1163 gimple_build_transaction (gimple_seq body, tree label)
1164 {
1165 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1166 gimple_transaction_set_body (p, body);
1167 gimple_transaction_set_label (p, label);
1168 return p;
1169 }
1170
1171 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1172 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1173
1174 gimple
1175 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1176 {
1177 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1178 /* Ensure all the predictors fit into the lower bits of the subcode. */
1179 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1180 gimple_predict_set_predictor (p, predictor);
1181 gimple_predict_set_outcome (p, outcome);
1182 return p;
1183 }
1184
1185 #if defined ENABLE_GIMPLE_CHECKING
1186 /* Complain of a gimple type mismatch and die. */
1187
1188 void
1189 gimple_check_failed (const_gimple gs, const char *file, int line,
1190 const char *function, enum gimple_code code,
1191 enum tree_code subcode)
1192 {
1193 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1194 gimple_code_name[code],
1195 tree_code_name[subcode],
1196 gimple_code_name[gimple_code (gs)],
1197 gs->gsbase.subcode > 0
1198 ? tree_code_name[gs->gsbase.subcode]
1199 : "",
1200 function, trim_filename (file), line);
1201 }
1202 #endif /* ENABLE_GIMPLE_CHECKING */
1203
1204
1205 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1206 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1207 instead. */
1208
1209 gimple_seq
1210 gimple_seq_alloc (void)
1211 {
1212 gimple_seq seq = gimple_seq_cache;
1213 if (seq)
1214 {
1215 gimple_seq_cache = gimple_seq_cache->next_free;
1216 gcc_assert (gimple_seq_cache != seq);
1217 memset (seq, 0, sizeof (*seq));
1218 }
1219 else
1220 {
1221 seq = ggc_alloc_cleared_gimple_seq_d ();
1222 #ifdef GATHER_STATISTICS
1223 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1224 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1225 #endif
1226 }
1227
1228 return seq;
1229 }
1230
1231 /* Return SEQ to the free pool of GIMPLE sequences. */
1232
1233 void
1234 gimple_seq_free (gimple_seq seq)
1235 {
1236 if (seq == NULL)
1237 return;
1238
1239 gcc_assert (gimple_seq_first (seq) == NULL);
1240 gcc_assert (gimple_seq_last (seq) == NULL);
1241
1242 /* If this triggers, it's a sign that the same list is being freed
1243 twice. */
1244 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1245
1246 /* Add SEQ to the pool of free sequences. */
1247 seq->next_free = gimple_seq_cache;
1248 gimple_seq_cache = seq;
1249 }
1250
1251
1252 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1253 *SEQ_P is NULL, a new sequence is allocated. */
1254
1255 void
1256 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1257 {
1258 gimple_stmt_iterator si;
1259
1260 if (gs == NULL)
1261 return;
1262
1263 if (*seq_p == NULL)
1264 *seq_p = gimple_seq_alloc ();
1265
1266 si = gsi_last (*seq_p);
1267 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1268 }
1269
1270
1271 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1272 NULL, a new sequence is allocated. */
1273
1274 void
1275 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1276 {
1277 gimple_stmt_iterator si;
1278
1279 if (src == NULL)
1280 return;
1281
1282 if (*dst_p == NULL)
1283 *dst_p = gimple_seq_alloc ();
1284
1285 si = gsi_last (*dst_p);
1286 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1287 }
1288
1289
1290 /* Helper function of empty_body_p. Return true if STMT is an empty
1291 statement. */
1292
1293 static bool
1294 empty_stmt_p (gimple stmt)
1295 {
1296 if (gimple_code (stmt) == GIMPLE_NOP)
1297 return true;
1298 if (gimple_code (stmt) == GIMPLE_BIND)
1299 return empty_body_p (gimple_bind_body (stmt));
1300 return false;
1301 }
1302
1303
1304 /* Return true if BODY contains nothing but empty statements. */
1305
1306 bool
1307 empty_body_p (gimple_seq body)
1308 {
1309 gimple_stmt_iterator i;
1310
1311 if (gimple_seq_empty_p (body))
1312 return true;
1313 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1314 if (!empty_stmt_p (gsi_stmt (i))
1315 && !is_gimple_debug (gsi_stmt (i)))
1316 return false;
1317
1318 return true;
1319 }
1320
1321
1322 /* Perform a deep copy of sequence SRC and return the result. */
1323
1324 gimple_seq
1325 gimple_seq_copy (gimple_seq src)
1326 {
1327 gimple_stmt_iterator gsi;
1328 gimple_seq new_seq = gimple_seq_alloc ();
1329 gimple stmt;
1330
1331 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1332 {
1333 stmt = gimple_copy (gsi_stmt (gsi));
1334 gimple_seq_add_stmt (&new_seq, stmt);
1335 }
1336
1337 return new_seq;
1338 }
1339
1340
1341 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1342 on each one. WI is as in walk_gimple_stmt.
1343
1344 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1345 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1346 produced the value is returned if this statement has not been
1347 removed by a callback (wi->removed_stmt). If the statement has
1348 been removed, NULL is returned.
1349
1350 Otherwise, all the statements are walked and NULL returned. */
1351
1352 gimple
1353 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1354 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1355 {
1356 gimple_stmt_iterator gsi;
1357
1358 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
1359 {
1360 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1361 if (ret)
1362 {
1363 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1364 to hold it. */
1365 gcc_assert (wi);
1366 wi->callback_result = ret;
1367
1368 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1369 }
1370
1371 if (!wi->removed_stmt)
1372 gsi_next (&gsi);
1373 }
1374
1375 if (wi)
1376 wi->callback_result = NULL_TREE;
1377
1378 return NULL;
1379 }
1380
1381
1382 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1383
1384 static tree
1385 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1386 struct walk_stmt_info *wi)
1387 {
1388 tree ret, op;
1389 unsigned noutputs;
1390 const char **oconstraints;
1391 unsigned i, n;
1392 const char *constraint;
1393 bool allows_mem, allows_reg, is_inout;
1394
1395 noutputs = gimple_asm_noutputs (stmt);
1396 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1397
1398 if (wi)
1399 wi->is_lhs = true;
1400
1401 for (i = 0; i < noutputs; i++)
1402 {
1403 op = gimple_asm_output_op (stmt, i);
1404 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1405 oconstraints[i] = constraint;
1406 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1407 &is_inout);
1408 if (wi)
1409 wi->val_only = (allows_reg || !allows_mem);
1410 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1411 if (ret)
1412 return ret;
1413 }
1414
1415 n = gimple_asm_ninputs (stmt);
1416 for (i = 0; i < n; i++)
1417 {
1418 op = gimple_asm_input_op (stmt, i);
1419 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1420 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1421 oconstraints, &allows_mem, &allows_reg);
1422 if (wi)
1423 {
1424 wi->val_only = (allows_reg || !allows_mem);
1425 /* Although input "m" is not really a LHS, we need a lvalue. */
1426 wi->is_lhs = !wi->val_only;
1427 }
1428 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1429 if (ret)
1430 return ret;
1431 }
1432
1433 if (wi)
1434 {
1435 wi->is_lhs = false;
1436 wi->val_only = true;
1437 }
1438
1439 n = gimple_asm_nlabels (stmt);
1440 for (i = 0; i < n; i++)
1441 {
1442 op = gimple_asm_label_op (stmt, i);
1443 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1444 if (ret)
1445 return ret;
1446 }
1447
1448 return NULL_TREE;
1449 }
1450
1451
1452 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1453 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1454
1455 CALLBACK_OP is called on each operand of STMT via walk_tree.
1456 Additional parameters to walk_tree must be stored in WI. For each operand
1457 OP, walk_tree is called as:
1458
1459 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1460
1461 If CALLBACK_OP returns non-NULL for an operand, the remaining
1462 operands are not scanned.
1463
1464 The return value is that returned by the last call to walk_tree, or
1465 NULL_TREE if no CALLBACK_OP is specified. */
1466
1467 tree
1468 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1469 struct walk_stmt_info *wi)
1470 {
1471 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1472 unsigned i;
1473 tree ret = NULL_TREE;
1474
1475 switch (gimple_code (stmt))
1476 {
1477 case GIMPLE_ASSIGN:
1478 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1479 is a register variable, we may use a COMPONENT_REF on the RHS. */
1480 if (wi)
1481 {
1482 tree lhs = gimple_assign_lhs (stmt);
1483 wi->val_only
1484 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1485 || !gimple_assign_single_p (stmt);
1486 }
1487
1488 for (i = 1; i < gimple_num_ops (stmt); i++)
1489 {
1490 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1491 pset);
1492 if (ret)
1493 return ret;
1494 }
1495
1496 /* Walk the LHS. If the RHS is appropriate for a memory, we
1497 may use a COMPONENT_REF on the LHS. */
1498 if (wi)
1499 {
1500 /* If the RHS has more than 1 operand, it is not appropriate
1501 for the memory. */
1502 wi->val_only = !(is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1503 || TREE_CODE (gimple_assign_rhs1 (stmt))
1504 == CONSTRUCTOR)
1505 || !gimple_assign_single_p (stmt);
1506 wi->is_lhs = true;
1507 }
1508
1509 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1510 if (ret)
1511 return ret;
1512
1513 if (wi)
1514 {
1515 wi->val_only = true;
1516 wi->is_lhs = false;
1517 }
1518 break;
1519
1520 case GIMPLE_CALL:
1521 if (wi)
1522 {
1523 wi->is_lhs = false;
1524 wi->val_only = true;
1525 }
1526
1527 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1528 if (ret)
1529 return ret;
1530
1531 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1532 if (ret)
1533 return ret;
1534
1535 for (i = 0; i < gimple_call_num_args (stmt); i++)
1536 {
1537 if (wi)
1538 wi->val_only
1539 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1540 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1541 pset);
1542 if (ret)
1543 return ret;
1544 }
1545
1546 if (gimple_call_lhs (stmt))
1547 {
1548 if (wi)
1549 {
1550 wi->is_lhs = true;
1551 wi->val_only
1552 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1553 }
1554
1555 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1556 if (ret)
1557 return ret;
1558 }
1559
1560 if (wi)
1561 {
1562 wi->is_lhs = false;
1563 wi->val_only = true;
1564 }
1565 break;
1566
1567 case GIMPLE_CATCH:
1568 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1569 pset);
1570 if (ret)
1571 return ret;
1572 break;
1573
1574 case GIMPLE_EH_FILTER:
1575 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1576 pset);
1577 if (ret)
1578 return ret;
1579 break;
1580
1581 case GIMPLE_ASM:
1582 ret = walk_gimple_asm (stmt, callback_op, wi);
1583 if (ret)
1584 return ret;
1585 break;
1586
1587 case GIMPLE_OMP_CONTINUE:
1588 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1589 callback_op, wi, pset);
1590 if (ret)
1591 return ret;
1592
1593 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1594 callback_op, wi, pset);
1595 if (ret)
1596 return ret;
1597 break;
1598
1599 case GIMPLE_OMP_CRITICAL:
1600 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1601 pset);
1602 if (ret)
1603 return ret;
1604 break;
1605
1606 case GIMPLE_OMP_FOR:
1607 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1608 pset);
1609 if (ret)
1610 return ret;
1611 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1612 {
1613 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1614 wi, pset);
1615 if (ret)
1616 return ret;
1617 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1618 wi, pset);
1619 if (ret)
1620 return ret;
1621 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1622 wi, pset);
1623 if (ret)
1624 return ret;
1625 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1626 wi, pset);
1627 }
1628 if (ret)
1629 return ret;
1630 break;
1631
1632 case GIMPLE_OMP_PARALLEL:
1633 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1634 wi, pset);
1635 if (ret)
1636 return ret;
1637 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1638 wi, pset);
1639 if (ret)
1640 return ret;
1641 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1642 wi, pset);
1643 if (ret)
1644 return ret;
1645 break;
1646
1647 case GIMPLE_OMP_TASK:
1648 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1649 wi, pset);
1650 if (ret)
1651 return ret;
1652 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1653 wi, pset);
1654 if (ret)
1655 return ret;
1656 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1657 wi, pset);
1658 if (ret)
1659 return ret;
1660 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1661 wi, pset);
1662 if (ret)
1663 return ret;
1664 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1665 wi, pset);
1666 if (ret)
1667 return ret;
1668 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1669 wi, pset);
1670 if (ret)
1671 return ret;
1672 break;
1673
1674 case GIMPLE_OMP_SECTIONS:
1675 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1676 wi, pset);
1677 if (ret)
1678 return ret;
1679
1680 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1681 wi, pset);
1682 if (ret)
1683 return ret;
1684
1685 break;
1686
1687 case GIMPLE_OMP_SINGLE:
1688 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1689 pset);
1690 if (ret)
1691 return ret;
1692 break;
1693
1694 case GIMPLE_OMP_ATOMIC_LOAD:
1695 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1696 pset);
1697 if (ret)
1698 return ret;
1699
1700 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1701 pset);
1702 if (ret)
1703 return ret;
1704 break;
1705
1706 case GIMPLE_OMP_ATOMIC_STORE:
1707 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1708 wi, pset);
1709 if (ret)
1710 return ret;
1711 break;
1712
1713 case GIMPLE_TRANSACTION:
1714 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1715 wi, pset);
1716 if (ret)
1717 return ret;
1718 break;
1719
1720 /* Tuples that do not have operands. */
1721 case GIMPLE_NOP:
1722 case GIMPLE_RESX:
1723 case GIMPLE_OMP_RETURN:
1724 case GIMPLE_PREDICT:
1725 break;
1726
1727 default:
1728 {
1729 enum gimple_statement_structure_enum gss;
1730 gss = gimple_statement_structure (stmt);
1731 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1732 for (i = 0; i < gimple_num_ops (stmt); i++)
1733 {
1734 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1735 if (ret)
1736 return ret;
1737 }
1738 }
1739 break;
1740 }
1741
1742 return NULL_TREE;
1743 }
1744
1745
1746 /* Walk the current statement in GSI (optionally using traversal state
1747 stored in WI). If WI is NULL, no state is kept during traversal.
1748 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1749 that it has handled all the operands of the statement, its return
1750 value is returned. Otherwise, the return value from CALLBACK_STMT
1751 is discarded and its operands are scanned.
1752
1753 If CALLBACK_STMT is NULL or it didn't handle the operands,
1754 CALLBACK_OP is called on each operand of the statement via
1755 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1756 operand, the remaining operands are not scanned. In this case, the
1757 return value from CALLBACK_OP is returned.
1758
1759 In any other case, NULL_TREE is returned. */
1760
1761 tree
1762 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1763 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1764 {
1765 gimple ret;
1766 tree tree_ret;
1767 gimple stmt = gsi_stmt (*gsi);
1768
1769 if (wi)
1770 {
1771 wi->gsi = *gsi;
1772 wi->removed_stmt = false;
1773
1774 if (wi->want_locations && gimple_has_location (stmt))
1775 input_location = gimple_location (stmt);
1776 }
1777
1778 ret = NULL;
1779
1780 /* Invoke the statement callback. Return if the callback handled
1781 all of STMT operands by itself. */
1782 if (callback_stmt)
1783 {
1784 bool handled_ops = false;
1785 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1786 if (handled_ops)
1787 return tree_ret;
1788
1789 /* If CALLBACK_STMT did not handle operands, it should not have
1790 a value to return. */
1791 gcc_assert (tree_ret == NULL);
1792
1793 if (wi && wi->removed_stmt)
1794 return NULL;
1795
1796 /* Re-read stmt in case the callback changed it. */
1797 stmt = gsi_stmt (*gsi);
1798 }
1799
1800 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1801 if (callback_op)
1802 {
1803 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1804 if (tree_ret)
1805 return tree_ret;
1806 }
1807
1808 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1809 switch (gimple_code (stmt))
1810 {
1811 case GIMPLE_BIND:
1812 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1813 callback_op, wi);
1814 if (ret)
1815 return wi->callback_result;
1816 break;
1817
1818 case GIMPLE_CATCH:
1819 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1820 callback_op, wi);
1821 if (ret)
1822 return wi->callback_result;
1823 break;
1824
1825 case GIMPLE_EH_FILTER:
1826 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1827 callback_op, wi);
1828 if (ret)
1829 return wi->callback_result;
1830 break;
1831
1832 case GIMPLE_EH_ELSE:
1833 ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
1834 callback_stmt, callback_op, wi);
1835 if (ret)
1836 return wi->callback_result;
1837 ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
1838 callback_stmt, callback_op, wi);
1839 if (ret)
1840 return wi->callback_result;
1841 break;
1842
1843 case GIMPLE_TRY:
1844 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1845 wi);
1846 if (ret)
1847 return wi->callback_result;
1848
1849 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1850 callback_op, wi);
1851 if (ret)
1852 return wi->callback_result;
1853 break;
1854
1855 case GIMPLE_OMP_FOR:
1856 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1857 callback_op, wi);
1858 if (ret)
1859 return wi->callback_result;
1860
1861 /* FALL THROUGH. */
1862 case GIMPLE_OMP_CRITICAL:
1863 case GIMPLE_OMP_MASTER:
1864 case GIMPLE_OMP_ORDERED:
1865 case GIMPLE_OMP_SECTION:
1866 case GIMPLE_OMP_PARALLEL:
1867 case GIMPLE_OMP_TASK:
1868 case GIMPLE_OMP_SECTIONS:
1869 case GIMPLE_OMP_SINGLE:
1870 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
1871 callback_op, wi);
1872 if (ret)
1873 return wi->callback_result;
1874 break;
1875
1876 case GIMPLE_WITH_CLEANUP_EXPR:
1877 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1878 callback_op, wi);
1879 if (ret)
1880 return wi->callback_result;
1881 break;
1882
1883 case GIMPLE_TRANSACTION:
1884 ret = walk_gimple_seq (gimple_transaction_body (stmt),
1885 callback_stmt, callback_op, wi);
1886 if (ret)
1887 return wi->callback_result;
1888 break;
1889
1890 default:
1891 gcc_assert (!gimple_has_substatements (stmt));
1892 break;
1893 }
1894
1895 return NULL;
1896 }
1897
1898
1899 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1900
1901 void
1902 gimple_set_body (tree fndecl, gimple_seq seq)
1903 {
1904 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1905 if (fn == NULL)
1906 {
1907 /* If FNDECL still does not have a function structure associated
1908 with it, then it does not make sense for it to receive a
1909 GIMPLE body. */
1910 gcc_assert (seq == NULL);
1911 }
1912 else
1913 fn->gimple_body = seq;
1914 }
1915
1916
1917 /* Return the body of GIMPLE statements for function FN. After the
1918 CFG pass, the function body doesn't exist anymore because it has
1919 been split up into basic blocks. In this case, it returns
1920 NULL. */
1921
1922 gimple_seq
1923 gimple_body (tree fndecl)
1924 {
1925 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1926 return fn ? fn->gimple_body : NULL;
1927 }
1928
1929 /* Return true when FNDECL has Gimple body either in unlowered
1930 or CFG form. */
1931 bool
1932 gimple_has_body_p (tree fndecl)
1933 {
1934 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1935 return (gimple_body (fndecl) || (fn && fn->cfg));
1936 }
1937
1938 /* Return true if calls C1 and C2 are known to go to the same function. */
1939
1940 bool
1941 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1942 {
1943 if (gimple_call_internal_p (c1))
1944 return (gimple_call_internal_p (c2)
1945 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1946 else
1947 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1948 || (gimple_call_fndecl (c1)
1949 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1950 }
1951
1952 /* Detect flags from a GIMPLE_CALL. This is just like
1953 call_expr_flags, but for gimple tuples. */
1954
1955 int
1956 gimple_call_flags (const_gimple stmt)
1957 {
1958 int flags;
1959 tree decl = gimple_call_fndecl (stmt);
1960
1961 if (decl)
1962 flags = flags_from_decl_or_type (decl);
1963 else if (gimple_call_internal_p (stmt))
1964 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1965 else
1966 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1967
1968 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1969 flags |= ECF_NOTHROW;
1970
1971 return flags;
1972 }
1973
1974 /* Return the "fn spec" string for call STMT. */
1975
1976 static tree
1977 gimple_call_fnspec (const_gimple stmt)
1978 {
1979 tree type, attr;
1980
1981 type = gimple_call_fntype (stmt);
1982 if (!type)
1983 return NULL_TREE;
1984
1985 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1986 if (!attr)
1987 return NULL_TREE;
1988
1989 return TREE_VALUE (TREE_VALUE (attr));
1990 }
1991
1992 /* Detects argument flags for argument number ARG on call STMT. */
1993
1994 int
1995 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1996 {
1997 tree attr = gimple_call_fnspec (stmt);
1998
1999 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
2000 return 0;
2001
2002 switch (TREE_STRING_POINTER (attr)[1 + arg])
2003 {
2004 case 'x':
2005 case 'X':
2006 return EAF_UNUSED;
2007
2008 case 'R':
2009 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2010
2011 case 'r':
2012 return EAF_NOCLOBBER | EAF_NOESCAPE;
2013
2014 case 'W':
2015 return EAF_DIRECT | EAF_NOESCAPE;
2016
2017 case 'w':
2018 return EAF_NOESCAPE;
2019
2020 case '.':
2021 default:
2022 return 0;
2023 }
2024 }
2025
2026 /* Detects return flags for the call STMT. */
2027
2028 int
2029 gimple_call_return_flags (const_gimple stmt)
2030 {
2031 tree attr;
2032
2033 if (gimple_call_flags (stmt) & ECF_MALLOC)
2034 return ERF_NOALIAS;
2035
2036 attr = gimple_call_fnspec (stmt);
2037 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2038 return 0;
2039
2040 switch (TREE_STRING_POINTER (attr)[0])
2041 {
2042 case '1':
2043 case '2':
2044 case '3':
2045 case '4':
2046 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2047
2048 case 'm':
2049 return ERF_NOALIAS;
2050
2051 case '.':
2052 default:
2053 return 0;
2054 }
2055 }
2056
2057
2058 /* Return true if GS is a copy assignment. */
2059
2060 bool
2061 gimple_assign_copy_p (gimple gs)
2062 {
2063 return (gimple_assign_single_p (gs)
2064 && is_gimple_val (gimple_op (gs, 1)));
2065 }
2066
2067
2068 /* Return true if GS is a SSA_NAME copy assignment. */
2069
2070 bool
2071 gimple_assign_ssa_name_copy_p (gimple gs)
2072 {
2073 return (gimple_assign_single_p (gs)
2074 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2075 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2076 }
2077
2078
2079 /* Return true if GS is an assignment with a unary RHS, but the
2080 operator has no effect on the assigned value. The logic is adapted
2081 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2082 instances in which STRIP_NOPS was previously applied to the RHS of
2083 an assignment.
2084
2085 NOTE: In the use cases that led to the creation of this function
2086 and of gimple_assign_single_p, it is typical to test for either
2087 condition and to proceed in the same manner. In each case, the
2088 assigned value is represented by the single RHS operand of the
2089 assignment. I suspect there may be cases where gimple_assign_copy_p,
2090 gimple_assign_single_p, or equivalent logic is used where a similar
2091 treatment of unary NOPs is appropriate. */
2092
2093 bool
2094 gimple_assign_unary_nop_p (gimple gs)
2095 {
2096 return (is_gimple_assign (gs)
2097 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2098 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2099 && gimple_assign_rhs1 (gs) != error_mark_node
2100 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2101 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2102 }
2103
2104 /* Set BB to be the basic block holding G. */
2105
2106 void
2107 gimple_set_bb (gimple stmt, basic_block bb)
2108 {
2109 stmt->gsbase.bb = bb;
2110
2111 /* If the statement is a label, add the label to block-to-labels map
2112 so that we can speed up edge creation for GIMPLE_GOTOs. */
2113 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2114 {
2115 tree t;
2116 int uid;
2117
2118 t = gimple_label_label (stmt);
2119 uid = LABEL_DECL_UID (t);
2120 if (uid == -1)
2121 {
2122 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2123 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2124 if (old_len <= (unsigned) uid)
2125 {
2126 unsigned new_len = 3 * uid / 2 + 1;
2127
2128 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2129 new_len);
2130 }
2131 }
2132
2133 VEC_replace (basic_block, label_to_block_map, uid, bb);
2134 }
2135 }
2136
2137
2138 /* Modify the RHS of the assignment pointed-to by GSI using the
2139 operands in the expression tree EXPR.
2140
2141 NOTE: The statement pointed-to by GSI may be reallocated if it
2142 did not have enough operand slots.
2143
2144 This function is useful to convert an existing tree expression into
2145 the flat representation used for the RHS of a GIMPLE assignment.
2146 It will reallocate memory as needed to expand or shrink the number
2147 of operand slots needed to represent EXPR.
2148
2149 NOTE: If you find yourself building a tree and then calling this
2150 function, you are most certainly doing it the slow way. It is much
2151 better to build a new assignment or to use the function
2152 gimple_assign_set_rhs_with_ops, which does not require an
2153 expression tree to be built. */
2154
2155 void
2156 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2157 {
2158 enum tree_code subcode;
2159 tree op1, op2, op3;
2160
2161 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2162 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2163 }
2164
2165
2166 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2167 operands OP1, OP2 and OP3.
2168
2169 NOTE: The statement pointed-to by GSI may be reallocated if it
2170 did not have enough operand slots. */
2171
2172 void
2173 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2174 tree op1, tree op2, tree op3)
2175 {
2176 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2177 gimple stmt = gsi_stmt (*gsi);
2178
2179 /* If the new CODE needs more operands, allocate a new statement. */
2180 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2181 {
2182 tree lhs = gimple_assign_lhs (stmt);
2183 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2184 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2185 gsi_replace (gsi, new_stmt, true);
2186 stmt = new_stmt;
2187
2188 /* The LHS needs to be reset as this also changes the SSA name
2189 on the LHS. */
2190 gimple_assign_set_lhs (stmt, lhs);
2191 }
2192
2193 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2194 gimple_set_subcode (stmt, code);
2195 gimple_assign_set_rhs1 (stmt, op1);
2196 if (new_rhs_ops > 1)
2197 gimple_assign_set_rhs2 (stmt, op2);
2198 if (new_rhs_ops > 2)
2199 gimple_assign_set_rhs3 (stmt, op3);
2200 }
2201
2202
2203 /* Return the LHS of a statement that performs an assignment,
2204 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2205 for a call to a function that returns no value, or for a
2206 statement other than an assignment or a call. */
2207
2208 tree
2209 gimple_get_lhs (const_gimple stmt)
2210 {
2211 enum gimple_code code = gimple_code (stmt);
2212
2213 if (code == GIMPLE_ASSIGN)
2214 return gimple_assign_lhs (stmt);
2215 else if (code == GIMPLE_CALL)
2216 return gimple_call_lhs (stmt);
2217 else
2218 return NULL_TREE;
2219 }
2220
2221
2222 /* Set the LHS of a statement that performs an assignment,
2223 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2224
2225 void
2226 gimple_set_lhs (gimple stmt, tree lhs)
2227 {
2228 enum gimple_code code = gimple_code (stmt);
2229
2230 if (code == GIMPLE_ASSIGN)
2231 gimple_assign_set_lhs (stmt, lhs);
2232 else if (code == GIMPLE_CALL)
2233 gimple_call_set_lhs (stmt, lhs);
2234 else
2235 gcc_unreachable();
2236 }
2237
2238 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2239 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2240 expression with a different value.
2241
2242 This will update any annotations (say debug bind stmts) referring
2243 to the original LHS, so that they use the RHS instead. This is
2244 done even if NLHS and LHS are the same, for it is understood that
2245 the RHS will be modified afterwards, and NLHS will not be assigned
2246 an equivalent value.
2247
2248 Adjusting any non-annotation uses of the LHS, if needed, is a
2249 responsibility of the caller.
2250
2251 The effect of this call should be pretty much the same as that of
2252 inserting a copy of STMT before STMT, and then removing the
2253 original stmt, at which time gsi_remove() would have update
2254 annotations, but using this function saves all the inserting,
2255 copying and removing. */
2256
2257 void
2258 gimple_replace_lhs (gimple stmt, tree nlhs)
2259 {
2260 if (MAY_HAVE_DEBUG_STMTS)
2261 {
2262 tree lhs = gimple_get_lhs (stmt);
2263
2264 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2265
2266 insert_debug_temp_for_var_def (NULL, lhs);
2267 }
2268
2269 gimple_set_lhs (stmt, nlhs);
2270 }
2271
2272 /* Return a deep copy of statement STMT. All the operands from STMT
2273 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2274 and VUSE operand arrays are set to empty in the new copy. */
2275
2276 gimple
2277 gimple_copy (gimple stmt)
2278 {
2279 enum gimple_code code = gimple_code (stmt);
2280 unsigned num_ops = gimple_num_ops (stmt);
2281 gimple copy = gimple_alloc (code, num_ops);
2282 unsigned i;
2283
2284 /* Shallow copy all the fields from STMT. */
2285 memcpy (copy, stmt, gimple_size (code));
2286
2287 /* If STMT has sub-statements, deep-copy them as well. */
2288 if (gimple_has_substatements (stmt))
2289 {
2290 gimple_seq new_seq;
2291 tree t;
2292
2293 switch (gimple_code (stmt))
2294 {
2295 case GIMPLE_BIND:
2296 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2297 gimple_bind_set_body (copy, new_seq);
2298 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2299 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2300 break;
2301
2302 case GIMPLE_CATCH:
2303 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2304 gimple_catch_set_handler (copy, new_seq);
2305 t = unshare_expr (gimple_catch_types (stmt));
2306 gimple_catch_set_types (copy, t);
2307 break;
2308
2309 case GIMPLE_EH_FILTER:
2310 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2311 gimple_eh_filter_set_failure (copy, new_seq);
2312 t = unshare_expr (gimple_eh_filter_types (stmt));
2313 gimple_eh_filter_set_types (copy, t);
2314 break;
2315
2316 case GIMPLE_EH_ELSE:
2317 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2318 gimple_eh_else_set_n_body (copy, new_seq);
2319 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2320 gimple_eh_else_set_e_body (copy, new_seq);
2321 break;
2322
2323 case GIMPLE_TRY:
2324 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2325 gimple_try_set_eval (copy, new_seq);
2326 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2327 gimple_try_set_cleanup (copy, new_seq);
2328 break;
2329
2330 case GIMPLE_OMP_FOR:
2331 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2332 gimple_omp_for_set_pre_body (copy, new_seq);
2333 t = unshare_expr (gimple_omp_for_clauses (stmt));
2334 gimple_omp_for_set_clauses (copy, t);
2335 copy->gimple_omp_for.iter
2336 = ggc_alloc_vec_gimple_omp_for_iter
2337 (gimple_omp_for_collapse (stmt));
2338 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2339 {
2340 gimple_omp_for_set_cond (copy, i,
2341 gimple_omp_for_cond (stmt, i));
2342 gimple_omp_for_set_index (copy, i,
2343 gimple_omp_for_index (stmt, i));
2344 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2345 gimple_omp_for_set_initial (copy, i, t);
2346 t = unshare_expr (gimple_omp_for_final (stmt, i));
2347 gimple_omp_for_set_final (copy, i, t);
2348 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2349 gimple_omp_for_set_incr (copy, i, t);
2350 }
2351 goto copy_omp_body;
2352
2353 case GIMPLE_OMP_PARALLEL:
2354 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2355 gimple_omp_parallel_set_clauses (copy, t);
2356 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2357 gimple_omp_parallel_set_child_fn (copy, t);
2358 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2359 gimple_omp_parallel_set_data_arg (copy, t);
2360 goto copy_omp_body;
2361
2362 case GIMPLE_OMP_TASK:
2363 t = unshare_expr (gimple_omp_task_clauses (stmt));
2364 gimple_omp_task_set_clauses (copy, t);
2365 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2366 gimple_omp_task_set_child_fn (copy, t);
2367 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2368 gimple_omp_task_set_data_arg (copy, t);
2369 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2370 gimple_omp_task_set_copy_fn (copy, t);
2371 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2372 gimple_omp_task_set_arg_size (copy, t);
2373 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2374 gimple_omp_task_set_arg_align (copy, t);
2375 goto copy_omp_body;
2376
2377 case GIMPLE_OMP_CRITICAL:
2378 t = unshare_expr (gimple_omp_critical_name (stmt));
2379 gimple_omp_critical_set_name (copy, t);
2380 goto copy_omp_body;
2381
2382 case GIMPLE_OMP_SECTIONS:
2383 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2384 gimple_omp_sections_set_clauses (copy, t);
2385 t = unshare_expr (gimple_omp_sections_control (stmt));
2386 gimple_omp_sections_set_control (copy, t);
2387 /* FALLTHRU */
2388
2389 case GIMPLE_OMP_SINGLE:
2390 case GIMPLE_OMP_SECTION:
2391 case GIMPLE_OMP_MASTER:
2392 case GIMPLE_OMP_ORDERED:
2393 copy_omp_body:
2394 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2395 gimple_omp_set_body (copy, new_seq);
2396 break;
2397
2398 case GIMPLE_TRANSACTION:
2399 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2400 gimple_transaction_set_body (copy, new_seq);
2401 break;
2402
2403 case GIMPLE_WITH_CLEANUP_EXPR:
2404 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2405 gimple_wce_set_cleanup (copy, new_seq);
2406 break;
2407
2408 default:
2409 gcc_unreachable ();
2410 }
2411 }
2412
2413 /* Make copy of operands. */
2414 if (num_ops > 0)
2415 {
2416 for (i = 0; i < num_ops; i++)
2417 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2418
2419 /* Clear out SSA operand vectors on COPY. */
2420 if (gimple_has_ops (stmt))
2421 {
2422 gimple_set_def_ops (copy, NULL);
2423 gimple_set_use_ops (copy, NULL);
2424 }
2425
2426 if (gimple_has_mem_ops (stmt))
2427 {
2428 gimple_set_vdef (copy, gimple_vdef (stmt));
2429 gimple_set_vuse (copy, gimple_vuse (stmt));
2430 }
2431
2432 /* SSA operands need to be updated. */
2433 gimple_set_modified (copy, true);
2434 }
2435
2436 return copy;
2437 }
2438
2439
2440 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2441 a MODIFIED field. */
2442
2443 void
2444 gimple_set_modified (gimple s, bool modifiedp)
2445 {
2446 if (gimple_has_ops (s))
2447 s->gsbase.modified = (unsigned) modifiedp;
2448 }
2449
2450
2451 /* Return true if statement S has side-effects. We consider a
2452 statement to have side effects if:
2453
2454 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2455 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2456
2457 bool
2458 gimple_has_side_effects (const_gimple s)
2459 {
2460 if (is_gimple_debug (s))
2461 return false;
2462
2463 /* We don't have to scan the arguments to check for
2464 volatile arguments, though, at present, we still
2465 do a scan to check for TREE_SIDE_EFFECTS. */
2466 if (gimple_has_volatile_ops (s))
2467 return true;
2468
2469 if (gimple_code (s) == GIMPLE_ASM
2470 && gimple_asm_volatile_p (s))
2471 return true;
2472
2473 if (is_gimple_call (s))
2474 {
2475 int flags = gimple_call_flags (s);
2476
2477 /* An infinite loop is considered a side effect. */
2478 if (!(flags & (ECF_CONST | ECF_PURE))
2479 || (flags & ECF_LOOPING_CONST_OR_PURE))
2480 return true;
2481
2482 return false;
2483 }
2484
2485 return false;
2486 }
2487
2488 /* Return true if the RHS of statement S has side effects.
2489 We may use it to determine if it is admissable to replace
2490 an assignment or call with a copy of a previously-computed
2491 value. In such cases, side-effects due to the LHS are
2492 preserved. */
2493
2494 bool
2495 gimple_rhs_has_side_effects (const_gimple s)
2496 {
2497 unsigned i;
2498
2499 if (is_gimple_call (s))
2500 {
2501 unsigned nargs = gimple_call_num_args (s);
2502 tree fn;
2503
2504 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2505 return true;
2506
2507 /* We cannot use gimple_has_volatile_ops here,
2508 because we must ignore a volatile LHS. */
2509 fn = gimple_call_fn (s);
2510 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2511 {
2512 gcc_assert (gimple_has_volatile_ops (s));
2513 return true;
2514 }
2515
2516 for (i = 0; i < nargs; i++)
2517 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2518 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2519 return true;
2520
2521 return false;
2522 }
2523 else if (is_gimple_assign (s))
2524 {
2525 /* Skip the first operand, the LHS. */
2526 for (i = 1; i < gimple_num_ops (s); i++)
2527 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2528 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2529 {
2530 gcc_assert (gimple_has_volatile_ops (s));
2531 return true;
2532 }
2533 }
2534 else if (is_gimple_debug (s))
2535 return false;
2536 else
2537 {
2538 /* For statements without an LHS, examine all arguments. */
2539 for (i = 0; i < gimple_num_ops (s); i++)
2540 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2541 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2542 {
2543 gcc_assert (gimple_has_volatile_ops (s));
2544 return true;
2545 }
2546 }
2547
2548 return false;
2549 }
2550
2551 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2552 Return true if S can trap. When INCLUDE_MEM is true, check whether
2553 the memory operations could trap. When INCLUDE_STORES is true and
2554 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2555
2556 bool
2557 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2558 {
2559 tree t, div = NULL_TREE;
2560 enum tree_code op;
2561
2562 if (include_mem)
2563 {
2564 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2565
2566 for (i = start; i < gimple_num_ops (s); i++)
2567 if (tree_could_trap_p (gimple_op (s, i)))
2568 return true;
2569 }
2570
2571 switch (gimple_code (s))
2572 {
2573 case GIMPLE_ASM:
2574 return gimple_asm_volatile_p (s);
2575
2576 case GIMPLE_CALL:
2577 t = gimple_call_fndecl (s);
2578 /* Assume that calls to weak functions may trap. */
2579 if (!t || !DECL_P (t) || DECL_WEAK (t))
2580 return true;
2581 return false;
2582
2583 case GIMPLE_ASSIGN:
2584 t = gimple_expr_type (s);
2585 op = gimple_assign_rhs_code (s);
2586 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2587 div = gimple_assign_rhs2 (s);
2588 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2589 (INTEGRAL_TYPE_P (t)
2590 && TYPE_OVERFLOW_TRAPS (t)),
2591 div));
2592
2593 default:
2594 break;
2595 }
2596
2597 return false;
2598 }
2599
2600 /* Return true if statement S can trap. */
2601
2602 bool
2603 gimple_could_trap_p (gimple s)
2604 {
2605 return gimple_could_trap_p_1 (s, true, true);
2606 }
2607
2608 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2609
2610 bool
2611 gimple_assign_rhs_could_trap_p (gimple s)
2612 {
2613 gcc_assert (is_gimple_assign (s));
2614 return gimple_could_trap_p_1 (s, true, false);
2615 }
2616
2617
2618 /* Print debugging information for gimple stmts generated. */
2619
2620 void
2621 dump_gimple_statistics (void)
2622 {
2623 #ifdef GATHER_STATISTICS
2624 int i, total_tuples = 0, total_bytes = 0;
2625
2626 fprintf (stderr, "\nGIMPLE statements\n");
2627 fprintf (stderr, "Kind Stmts Bytes\n");
2628 fprintf (stderr, "---------------------------------------\n");
2629 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2630 {
2631 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2632 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2633 total_tuples += gimple_alloc_counts[i];
2634 total_bytes += gimple_alloc_sizes[i];
2635 }
2636 fprintf (stderr, "---------------------------------------\n");
2637 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2638 fprintf (stderr, "---------------------------------------\n");
2639 #else
2640 fprintf (stderr, "No gimple statistics\n");
2641 #endif
2642 }
2643
2644
2645 /* Return the number of operands needed on the RHS of a GIMPLE
2646 assignment for an expression with tree code CODE. */
2647
2648 unsigned
2649 get_gimple_rhs_num_ops (enum tree_code code)
2650 {
2651 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2652
2653 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2654 return 1;
2655 else if (rhs_class == GIMPLE_BINARY_RHS)
2656 return 2;
2657 else if (rhs_class == GIMPLE_TERNARY_RHS)
2658 return 3;
2659 else
2660 gcc_unreachable ();
2661 }
2662
2663 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2664 (unsigned char) \
2665 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2666 : ((TYPE) == tcc_binary \
2667 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2668 : ((TYPE) == tcc_constant \
2669 || (TYPE) == tcc_declaration \
2670 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2671 : ((SYM) == TRUTH_AND_EXPR \
2672 || (SYM) == TRUTH_OR_EXPR \
2673 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2674 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2675 : ((SYM) == COND_EXPR \
2676 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2677 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2678 || (SYM) == DOT_PROD_EXPR \
2679 || (SYM) == REALIGN_LOAD_EXPR \
2680 || (SYM) == VEC_COND_EXPR \
2681 || (SYM) == VEC_PERM_EXPR \
2682 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2683 : ((SYM) == CONSTRUCTOR \
2684 || (SYM) == OBJ_TYPE_REF \
2685 || (SYM) == ASSERT_EXPR \
2686 || (SYM) == ADDR_EXPR \
2687 || (SYM) == WITH_SIZE_EXPR \
2688 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2689 : GIMPLE_INVALID_RHS),
2690 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2691
2692 const unsigned char gimple_rhs_class_table[] = {
2693 #include "all-tree.def"
2694 };
2695
2696 #undef DEFTREECODE
2697 #undef END_OF_BASE_TREE_CODES
2698
2699 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2700
2701 /* Validation of GIMPLE expressions. */
2702
2703 /* Returns true iff T is a valid RHS for an assignment to a renamed
2704 user -- or front-end generated artificial -- variable. */
2705
2706 bool
2707 is_gimple_reg_rhs (tree t)
2708 {
2709 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2710 }
2711
2712 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2713 LHS, or for a call argument. */
2714
2715 bool
2716 is_gimple_mem_rhs (tree t)
2717 {
2718 /* If we're dealing with a renamable type, either source or dest must be
2719 a renamed variable. */
2720 if (is_gimple_reg_type (TREE_TYPE (t)))
2721 return is_gimple_val (t);
2722 else
2723 return is_gimple_val (t) || is_gimple_lvalue (t);
2724 }
2725
2726 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2727
2728 bool
2729 is_gimple_lvalue (tree t)
2730 {
2731 return (is_gimple_addressable (t)
2732 || TREE_CODE (t) == WITH_SIZE_EXPR
2733 /* These are complex lvalues, but don't have addresses, so they
2734 go here. */
2735 || TREE_CODE (t) == BIT_FIELD_REF);
2736 }
2737
2738 /* Return true if T is a GIMPLE condition. */
2739
2740 bool
2741 is_gimple_condexpr (tree t)
2742 {
2743 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2744 && !tree_could_throw_p (t)
2745 && is_gimple_val (TREE_OPERAND (t, 0))
2746 && is_gimple_val (TREE_OPERAND (t, 1))));
2747 }
2748
2749 /* Return true if T is something whose address can be taken. */
2750
2751 bool
2752 is_gimple_addressable (tree t)
2753 {
2754 return (is_gimple_id (t) || handled_component_p (t)
2755 || TREE_CODE (t) == MEM_REF);
2756 }
2757
2758 /* Return true if T is a valid gimple constant. */
2759
2760 bool
2761 is_gimple_constant (const_tree t)
2762 {
2763 switch (TREE_CODE (t))
2764 {
2765 case INTEGER_CST:
2766 case REAL_CST:
2767 case FIXED_CST:
2768 case STRING_CST:
2769 case COMPLEX_CST:
2770 case VECTOR_CST:
2771 return true;
2772
2773 /* Vector constant constructors are gimple invariant. */
2774 case CONSTRUCTOR:
2775 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2776 return TREE_CONSTANT (t);
2777 else
2778 return false;
2779
2780 default:
2781 return false;
2782 }
2783 }
2784
2785 /* Return true if T is a gimple address. */
2786
2787 bool
2788 is_gimple_address (const_tree t)
2789 {
2790 tree op;
2791
2792 if (TREE_CODE (t) != ADDR_EXPR)
2793 return false;
2794
2795 op = TREE_OPERAND (t, 0);
2796 while (handled_component_p (op))
2797 {
2798 if ((TREE_CODE (op) == ARRAY_REF
2799 || TREE_CODE (op) == ARRAY_RANGE_REF)
2800 && !is_gimple_val (TREE_OPERAND (op, 1)))
2801 return false;
2802
2803 op = TREE_OPERAND (op, 0);
2804 }
2805
2806 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2807 return true;
2808
2809 switch (TREE_CODE (op))
2810 {
2811 case PARM_DECL:
2812 case RESULT_DECL:
2813 case LABEL_DECL:
2814 case FUNCTION_DECL:
2815 case VAR_DECL:
2816 case CONST_DECL:
2817 return true;
2818
2819 default:
2820 return false;
2821 }
2822 }
2823
2824 /* Return true if T is a gimple invariant address. */
2825
2826 bool
2827 is_gimple_invariant_address (const_tree t)
2828 {
2829 const_tree op;
2830
2831 if (TREE_CODE (t) != ADDR_EXPR)
2832 return false;
2833
2834 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2835 if (!op)
2836 return false;
2837
2838 if (TREE_CODE (op) == MEM_REF)
2839 {
2840 const_tree op0 = TREE_OPERAND (op, 0);
2841 return (TREE_CODE (op0) == ADDR_EXPR
2842 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2843 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2844 }
2845
2846 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2847 }
2848
2849 /* Return true if T is a gimple invariant address at IPA level
2850 (so addresses of variables on stack are not allowed). */
2851
2852 bool
2853 is_gimple_ip_invariant_address (const_tree t)
2854 {
2855 const_tree op;
2856
2857 if (TREE_CODE (t) != ADDR_EXPR)
2858 return false;
2859
2860 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2861 if (!op)
2862 return false;
2863
2864 if (TREE_CODE (op) == MEM_REF)
2865 {
2866 const_tree op0 = TREE_OPERAND (op, 0);
2867 return (TREE_CODE (op0) == ADDR_EXPR
2868 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2869 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2870 }
2871
2872 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2873 }
2874
2875 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2876 form of function invariant. */
2877
2878 bool
2879 is_gimple_min_invariant (const_tree t)
2880 {
2881 if (TREE_CODE (t) == ADDR_EXPR)
2882 return is_gimple_invariant_address (t);
2883
2884 return is_gimple_constant (t);
2885 }
2886
2887 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2888 form of gimple minimal invariant. */
2889
2890 bool
2891 is_gimple_ip_invariant (const_tree t)
2892 {
2893 if (TREE_CODE (t) == ADDR_EXPR)
2894 return is_gimple_ip_invariant_address (t);
2895
2896 return is_gimple_constant (t);
2897 }
2898
2899 /* Return true if T looks like a valid GIMPLE statement. */
2900
2901 bool
2902 is_gimple_stmt (tree t)
2903 {
2904 const enum tree_code code = TREE_CODE (t);
2905
2906 switch (code)
2907 {
2908 case NOP_EXPR:
2909 /* The only valid NOP_EXPR is the empty statement. */
2910 return IS_EMPTY_STMT (t);
2911
2912 case BIND_EXPR:
2913 case COND_EXPR:
2914 /* These are only valid if they're void. */
2915 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2916
2917 case SWITCH_EXPR:
2918 case GOTO_EXPR:
2919 case RETURN_EXPR:
2920 case LABEL_EXPR:
2921 case CASE_LABEL_EXPR:
2922 case TRY_CATCH_EXPR:
2923 case TRY_FINALLY_EXPR:
2924 case EH_FILTER_EXPR:
2925 case CATCH_EXPR:
2926 case ASM_EXPR:
2927 case STATEMENT_LIST:
2928 case OMP_PARALLEL:
2929 case OMP_FOR:
2930 case OMP_SECTIONS:
2931 case OMP_SECTION:
2932 case OMP_SINGLE:
2933 case OMP_MASTER:
2934 case OMP_ORDERED:
2935 case OMP_CRITICAL:
2936 case OMP_TASK:
2937 /* These are always void. */
2938 return true;
2939
2940 case CALL_EXPR:
2941 case MODIFY_EXPR:
2942 case PREDICT_EXPR:
2943 /* These are valid regardless of their type. */
2944 return true;
2945
2946 default:
2947 return false;
2948 }
2949 }
2950
2951 /* Return true if T is a variable. */
2952
2953 bool
2954 is_gimple_variable (tree t)
2955 {
2956 return (TREE_CODE (t) == VAR_DECL
2957 || TREE_CODE (t) == PARM_DECL
2958 || TREE_CODE (t) == RESULT_DECL
2959 || TREE_CODE (t) == SSA_NAME);
2960 }
2961
2962 /* Return true if T is a GIMPLE identifier (something with an address). */
2963
2964 bool
2965 is_gimple_id (tree t)
2966 {
2967 return (is_gimple_variable (t)
2968 || TREE_CODE (t) == FUNCTION_DECL
2969 || TREE_CODE (t) == LABEL_DECL
2970 || TREE_CODE (t) == CONST_DECL
2971 /* Allow string constants, since they are addressable. */
2972 || TREE_CODE (t) == STRING_CST);
2973 }
2974
2975 /* Return true if TYPE is a suitable type for a scalar register variable. */
2976
2977 bool
2978 is_gimple_reg_type (tree type)
2979 {
2980 return !AGGREGATE_TYPE_P (type);
2981 }
2982
2983 /* Return true if T is a non-aggregate register variable. */
2984
2985 bool
2986 is_gimple_reg (tree t)
2987 {
2988 if (TREE_CODE (t) == SSA_NAME)
2989 t = SSA_NAME_VAR (t);
2990
2991 if (!is_gimple_variable (t))
2992 return false;
2993
2994 if (!is_gimple_reg_type (TREE_TYPE (t)))
2995 return false;
2996
2997 /* A volatile decl is not acceptable because we can't reuse it as
2998 needed. We need to copy it into a temp first. */
2999 if (TREE_THIS_VOLATILE (t))
3000 return false;
3001
3002 /* We define "registers" as things that can be renamed as needed,
3003 which with our infrastructure does not apply to memory. */
3004 if (needs_to_live_in_memory (t))
3005 return false;
3006
3007 /* Hard register variables are an interesting case. For those that
3008 are call-clobbered, we don't know where all the calls are, since
3009 we don't (want to) take into account which operations will turn
3010 into libcalls at the rtl level. For those that are call-saved,
3011 we don't currently model the fact that calls may in fact change
3012 global hard registers, nor do we examine ASM_CLOBBERS at the tree
3013 level, and so miss variable changes that might imply. All around,
3014 it seems safest to not do too much optimization with these at the
3015 tree level at all. We'll have to rely on the rtl optimizers to
3016 clean this up, as there we've got all the appropriate bits exposed. */
3017 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3018 return false;
3019
3020 /* Complex and vector values must have been put into SSA-like form.
3021 That is, no assignments to the individual components. */
3022 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
3023 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3024 return DECL_GIMPLE_REG_P (t);
3025
3026 return true;
3027 }
3028
3029
3030 /* Return true if T is a GIMPLE variable whose address is not needed. */
3031
3032 bool
3033 is_gimple_non_addressable (tree t)
3034 {
3035 if (TREE_CODE (t) == SSA_NAME)
3036 t = SSA_NAME_VAR (t);
3037
3038 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
3039 }
3040
3041 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
3042
3043 bool
3044 is_gimple_val (tree t)
3045 {
3046 /* Make loads from volatiles and memory vars explicit. */
3047 if (is_gimple_variable (t)
3048 && is_gimple_reg_type (TREE_TYPE (t))
3049 && !is_gimple_reg (t))
3050 return false;
3051
3052 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
3053 }
3054
3055 /* Similarly, but accept hard registers as inputs to asm statements. */
3056
3057 bool
3058 is_gimple_asm_val (tree t)
3059 {
3060 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3061 return true;
3062
3063 return is_gimple_val (t);
3064 }
3065
3066 /* Return true if T is a GIMPLE minimal lvalue. */
3067
3068 bool
3069 is_gimple_min_lval (tree t)
3070 {
3071 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3072 return false;
3073 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3074 }
3075
3076 /* Return true if T is a valid function operand of a CALL_EXPR. */
3077
3078 bool
3079 is_gimple_call_addr (tree t)
3080 {
3081 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3082 }
3083
3084 /* Return true if T is a valid address operand of a MEM_REF. */
3085
3086 bool
3087 is_gimple_mem_ref_addr (tree t)
3088 {
3089 return (is_gimple_reg (t)
3090 || TREE_CODE (t) == INTEGER_CST
3091 || (TREE_CODE (t) == ADDR_EXPR
3092 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3093 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3094 }
3095
3096
3097 /* Given a memory reference expression T, return its base address.
3098 The base address of a memory reference expression is the main
3099 object being referenced. For instance, the base address for
3100 'array[i].fld[j]' is 'array'. You can think of this as stripping
3101 away the offset part from a memory address.
3102
3103 This function calls handled_component_p to strip away all the inner
3104 parts of the memory reference until it reaches the base object. */
3105
3106 tree
3107 get_base_address (tree t)
3108 {
3109 while (handled_component_p (t))
3110 t = TREE_OPERAND (t, 0);
3111
3112 if ((TREE_CODE (t) == MEM_REF
3113 || TREE_CODE (t) == TARGET_MEM_REF)
3114 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3115 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3116
3117 if (TREE_CODE (t) == SSA_NAME
3118 || DECL_P (t)
3119 || TREE_CODE (t) == STRING_CST
3120 || TREE_CODE (t) == CONSTRUCTOR
3121 || INDIRECT_REF_P (t)
3122 || TREE_CODE (t) == MEM_REF
3123 || TREE_CODE (t) == TARGET_MEM_REF)
3124 return t;
3125 else
3126 return NULL_TREE;
3127 }
3128
3129 void
3130 recalculate_side_effects (tree t)
3131 {
3132 enum tree_code code = TREE_CODE (t);
3133 int len = TREE_OPERAND_LENGTH (t);
3134 int i;
3135
3136 switch (TREE_CODE_CLASS (code))
3137 {
3138 case tcc_expression:
3139 switch (code)
3140 {
3141 case INIT_EXPR:
3142 case MODIFY_EXPR:
3143 case VA_ARG_EXPR:
3144 case PREDECREMENT_EXPR:
3145 case PREINCREMENT_EXPR:
3146 case POSTDECREMENT_EXPR:
3147 case POSTINCREMENT_EXPR:
3148 /* All of these have side-effects, no matter what their
3149 operands are. */
3150 return;
3151
3152 default:
3153 break;
3154 }
3155 /* Fall through. */
3156
3157 case tcc_comparison: /* a comparison expression */
3158 case tcc_unary: /* a unary arithmetic expression */
3159 case tcc_binary: /* a binary arithmetic expression */
3160 case tcc_reference: /* a reference */
3161 case tcc_vl_exp: /* a function call */
3162 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3163 for (i = 0; i < len; ++i)
3164 {
3165 tree op = TREE_OPERAND (t, i);
3166 if (op && TREE_SIDE_EFFECTS (op))
3167 TREE_SIDE_EFFECTS (t) = 1;
3168 }
3169 break;
3170
3171 case tcc_constant:
3172 /* No side-effects. */
3173 return;
3174
3175 default:
3176 gcc_unreachable ();
3177 }
3178 }
3179
3180 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3181 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3182 we failed to create one. */
3183
3184 tree
3185 canonicalize_cond_expr_cond (tree t)
3186 {
3187 /* Strip conversions around boolean operations. */
3188 if (CONVERT_EXPR_P (t)
3189 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3190 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3191 == BOOLEAN_TYPE))
3192 t = TREE_OPERAND (t, 0);
3193
3194 /* For !x use x == 0. */
3195 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3196 {
3197 tree top0 = TREE_OPERAND (t, 0);
3198 t = build2 (EQ_EXPR, TREE_TYPE (t),
3199 top0, build_int_cst (TREE_TYPE (top0), 0));
3200 }
3201 /* For cmp ? 1 : 0 use cmp. */
3202 else if (TREE_CODE (t) == COND_EXPR
3203 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3204 && integer_onep (TREE_OPERAND (t, 1))
3205 && integer_zerop (TREE_OPERAND (t, 2)))
3206 {
3207 tree top0 = TREE_OPERAND (t, 0);
3208 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3209 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3210 }
3211
3212 if (is_gimple_condexpr (t))
3213 return t;
3214
3215 return NULL_TREE;
3216 }
3217
3218 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3219 the positions marked by the set ARGS_TO_SKIP. */
3220
3221 gimple
3222 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3223 {
3224 int i;
3225 int nargs = gimple_call_num_args (stmt);
3226 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3227 gimple new_stmt;
3228
3229 for (i = 0; i < nargs; i++)
3230 if (!bitmap_bit_p (args_to_skip, i))
3231 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3232
3233 if (gimple_call_internal_p (stmt))
3234 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3235 vargs);
3236 else
3237 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3238 VEC_free (tree, heap, vargs);
3239 if (gimple_call_lhs (stmt))
3240 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3241
3242 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3243 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3244
3245 gimple_set_block (new_stmt, gimple_block (stmt));
3246 if (gimple_has_location (stmt))
3247 gimple_set_location (new_stmt, gimple_location (stmt));
3248 gimple_call_copy_flags (new_stmt, stmt);
3249 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3250
3251 gimple_set_modified (new_stmt, true);
3252
3253 return new_stmt;
3254 }
3255
3256
3257 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3258
3259 static hashval_t gimple_type_hash (const void *);
3260
3261 /* Structure used to maintain a cache of some type pairs compared by
3262 gimple_types_compatible_p when comparing aggregate types. There are
3263 three possible values for SAME_P:
3264
3265 -2: The pair (T1, T2) has just been inserted in the table.
3266 0: T1 and T2 are different types.
3267 1: T1 and T2 are the same type.
3268
3269 The two elements in the SAME_P array are indexed by the comparison
3270 mode gtc_mode. */
3271
3272 struct type_pair_d
3273 {
3274 unsigned int uid1;
3275 unsigned int uid2;
3276 signed char same_p[2];
3277 };
3278 typedef struct type_pair_d *type_pair_t;
3279 DEF_VEC_P(type_pair_t);
3280 DEF_VEC_ALLOC_P(type_pair_t,heap);
3281
3282 #define GIMPLE_TYPE_PAIR_SIZE 16381
3283 struct type_pair_d *type_pair_cache;
3284
3285
3286 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3287 entry if none existed. */
3288
3289 static inline type_pair_t
3290 lookup_type_pair (tree t1, tree t2)
3291 {
3292 unsigned int index;
3293 unsigned int uid1, uid2;
3294
3295 if (type_pair_cache == NULL)
3296 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3297
3298 if (TYPE_UID (t1) < TYPE_UID (t2))
3299 {
3300 uid1 = TYPE_UID (t1);
3301 uid2 = TYPE_UID (t2);
3302 }
3303 else
3304 {
3305 uid1 = TYPE_UID (t2);
3306 uid2 = TYPE_UID (t1);
3307 }
3308 gcc_checking_assert (uid1 != uid2);
3309
3310 /* iterative_hash_hashval_t imply an function calls.
3311 We know that UIDS are in limited range. */
3312 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3313 % GIMPLE_TYPE_PAIR_SIZE);
3314 if (type_pair_cache [index].uid1 == uid1
3315 && type_pair_cache [index].uid2 == uid2)
3316 return &type_pair_cache[index];
3317
3318 type_pair_cache [index].uid1 = uid1;
3319 type_pair_cache [index].uid2 = uid2;
3320 type_pair_cache [index].same_p[0] = -2;
3321 type_pair_cache [index].same_p[1] = -2;
3322
3323 return &type_pair_cache[index];
3324 }
3325
3326 /* Per pointer state for the SCC finding. The on_sccstack flag
3327 is not strictly required, it is true when there is no hash value
3328 recorded for the type and false otherwise. But querying that
3329 is slower. */
3330
3331 struct sccs
3332 {
3333 unsigned int dfsnum;
3334 unsigned int low;
3335 bool on_sccstack;
3336 union {
3337 hashval_t hash;
3338 signed char same_p;
3339 } u;
3340 };
3341
3342 static unsigned int next_dfs_num;
3343 static unsigned int gtc_next_dfs_num;
3344
3345
3346 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3347
3348 typedef struct GTY(()) gimple_type_leader_entry_s {
3349 tree type;
3350 tree leader;
3351 } gimple_type_leader_entry;
3352
3353 #define GIMPLE_TYPE_LEADER_SIZE 16381
3354 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3355 gimple_type_leader_entry *gimple_type_leader;
3356
3357 /* Lookup an existing leader for T and return it or NULL_TREE, if
3358 there is none in the cache. */
3359
3360 static inline tree
3361 gimple_lookup_type_leader (tree t)
3362 {
3363 gimple_type_leader_entry *leader;
3364
3365 if (!gimple_type_leader)
3366 return NULL_TREE;
3367
3368 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3369 if (leader->type != t)
3370 return NULL_TREE;
3371
3372 return leader->leader;
3373 }
3374
3375 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3376 true then if any type has no name return false, otherwise return
3377 true if both types have no names. */
3378
3379 static bool
3380 compare_type_names_p (tree t1, tree t2)
3381 {
3382 tree name1 = TYPE_NAME (t1);
3383 tree name2 = TYPE_NAME (t2);
3384
3385 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3386 name1 = DECL_NAME (name1);
3387 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3388
3389 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3390 name2 = DECL_NAME (name2);
3391 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3392
3393 /* Identifiers can be compared with pointer equality rather
3394 than a string comparison. */
3395 if (name1 == name2)
3396 return true;
3397
3398 return false;
3399 }
3400
3401 /* Return true if the field decls F1 and F2 are at the same offset.
3402
3403 This is intended to be used on GIMPLE types only. */
3404
3405 bool
3406 gimple_compare_field_offset (tree f1, tree f2)
3407 {
3408 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3409 {
3410 tree offset1 = DECL_FIELD_OFFSET (f1);
3411 tree offset2 = DECL_FIELD_OFFSET (f2);
3412 return ((offset1 == offset2
3413 /* Once gimplification is done, self-referential offsets are
3414 instantiated as operand #2 of the COMPONENT_REF built for
3415 each access and reset. Therefore, they are not relevant
3416 anymore and fields are interchangeable provided that they
3417 represent the same access. */
3418 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3419 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3420 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3421 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3422 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3423 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3424 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3425 || operand_equal_p (offset1, offset2, 0))
3426 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3427 DECL_FIELD_BIT_OFFSET (f2)));
3428 }
3429
3430 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3431 should be, so handle differing ones specially by decomposing
3432 the offset into a byte and bit offset manually. */
3433 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3434 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3435 {
3436 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3437 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3438 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3439 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3440 + bit_offset1 / BITS_PER_UNIT);
3441 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3442 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3443 + bit_offset2 / BITS_PER_UNIT);
3444 if (byte_offset1 != byte_offset2)
3445 return false;
3446 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3447 }
3448
3449 return false;
3450 }
3451
3452 static bool
3453 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3454 VEC(type_pair_t, heap) **,
3455 struct pointer_map_t *, struct obstack *);
3456
3457 /* DFS visit the edge from the callers type pair with state *STATE to
3458 the pair T1, T2 while operating in FOR_MERGING_P mode.
3459 Update the merging status if it is not part of the SCC containing the
3460 callers pair and return it.
3461 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3462
3463 static bool
3464 gtc_visit (tree t1, tree t2,
3465 struct sccs *state,
3466 VEC(type_pair_t, heap) **sccstack,
3467 struct pointer_map_t *sccstate,
3468 struct obstack *sccstate_obstack)
3469 {
3470 struct sccs *cstate = NULL;
3471 type_pair_t p;
3472 void **slot;
3473 tree leader1, leader2;
3474
3475 /* Check first for the obvious case of pointer identity. */
3476 if (t1 == t2)
3477 return true;
3478
3479 /* Check that we have two types to compare. */
3480 if (t1 == NULL_TREE || t2 == NULL_TREE)
3481 return false;
3482
3483 /* Can't be the same type if the types don't have the same code. */
3484 if (TREE_CODE (t1) != TREE_CODE (t2))
3485 return false;
3486
3487 /* Can't be the same type if they have different CV qualifiers. */
3488 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3489 return false;
3490
3491 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3492 return false;
3493
3494 /* Void types and nullptr types are always the same. */
3495 if (TREE_CODE (t1) == VOID_TYPE
3496 || TREE_CODE (t1) == NULLPTR_TYPE)
3497 return true;
3498
3499 /* Can't be the same type if they have different alignment or mode. */
3500 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3501 || TYPE_MODE (t1) != TYPE_MODE (t2))
3502 return false;
3503
3504 /* Do some simple checks before doing three hashtable queries. */
3505 if (INTEGRAL_TYPE_P (t1)
3506 || SCALAR_FLOAT_TYPE_P (t1)
3507 || FIXED_POINT_TYPE_P (t1)
3508 || TREE_CODE (t1) == VECTOR_TYPE
3509 || TREE_CODE (t1) == COMPLEX_TYPE
3510 || TREE_CODE (t1) == OFFSET_TYPE
3511 || POINTER_TYPE_P (t1))
3512 {
3513 /* Can't be the same type if they have different sign or precision. */
3514 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3515 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3516 return false;
3517
3518 if (TREE_CODE (t1) == INTEGER_TYPE
3519 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3520 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3521 return false;
3522
3523 /* That's all we need to check for float and fixed-point types. */
3524 if (SCALAR_FLOAT_TYPE_P (t1)
3525 || FIXED_POINT_TYPE_P (t1))
3526 return true;
3527
3528 /* For other types fall thru to more complex checks. */
3529 }
3530
3531 /* If the types have been previously registered and found equal
3532 they still are. */
3533 leader1 = gimple_lookup_type_leader (t1);
3534 leader2 = gimple_lookup_type_leader (t2);
3535 if (leader1 == t2
3536 || t1 == leader2
3537 || (leader1 && leader1 == leader2))
3538 return true;
3539
3540 /* If the hash values of t1 and t2 are different the types can't
3541 possibly be the same. This helps keeping the type-pair hashtable
3542 small, only tracking comparisons for hash collisions. */
3543 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3544 return false;
3545
3546 /* Allocate a new cache entry for this comparison. */
3547 p = lookup_type_pair (t1, t2);
3548 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3549 {
3550 /* We have already decided whether T1 and T2 are the
3551 same, return the cached result. */
3552 return p->same_p[GTC_MERGE] == 1;
3553 }
3554
3555 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3556 cstate = (struct sccs *)*slot;
3557 /* Not yet visited. DFS recurse. */
3558 if (!cstate)
3559 {
3560 gimple_types_compatible_p_1 (t1, t2, p,
3561 sccstack, sccstate, sccstate_obstack);
3562 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3563 state->low = MIN (state->low, cstate->low);
3564 }
3565 /* If the type is still on the SCC stack adjust the parents low. */
3566 if (cstate->dfsnum < state->dfsnum
3567 && cstate->on_sccstack)
3568 state->low = MIN (cstate->dfsnum, state->low);
3569
3570 /* Return the current lattice value. We start with an equality
3571 assumption so types part of a SCC will be optimistically
3572 treated equal unless proven otherwise. */
3573 return cstate->u.same_p;
3574 }
3575
3576 /* Worker for gimple_types_compatible.
3577 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3578
3579 static bool
3580 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3581 VEC(type_pair_t, heap) **sccstack,
3582 struct pointer_map_t *sccstate,
3583 struct obstack *sccstate_obstack)
3584 {
3585 struct sccs *state;
3586
3587 gcc_assert (p->same_p[GTC_MERGE] == -2);
3588
3589 state = XOBNEW (sccstate_obstack, struct sccs);
3590 *pointer_map_insert (sccstate, p) = state;
3591
3592 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3593 state->dfsnum = gtc_next_dfs_num++;
3594 state->low = state->dfsnum;
3595 state->on_sccstack = true;
3596 /* Start with an equality assumption. As we DFS recurse into child
3597 SCCs this assumption may get revisited. */
3598 state->u.same_p = 1;
3599
3600 /* The struct tags shall compare equal. */
3601 if (!compare_type_names_p (t1, t2))
3602 goto different_types;
3603
3604 /* If their attributes are not the same they can't be the same type. */
3605 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3606 goto different_types;
3607
3608 /* Do type-specific comparisons. */
3609 switch (TREE_CODE (t1))
3610 {
3611 case VECTOR_TYPE:
3612 case COMPLEX_TYPE:
3613 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3614 state, sccstack, sccstate, sccstate_obstack))
3615 goto different_types;
3616 goto same_types;
3617
3618 case ARRAY_TYPE:
3619 /* Array types are the same if the element types are the same and
3620 the number of elements are the same. */
3621 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3622 state, sccstack, sccstate, sccstate_obstack)
3623 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3624 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3625 goto different_types;
3626 else
3627 {
3628 tree i1 = TYPE_DOMAIN (t1);
3629 tree i2 = TYPE_DOMAIN (t2);
3630
3631 /* For an incomplete external array, the type domain can be
3632 NULL_TREE. Check this condition also. */
3633 if (i1 == NULL_TREE && i2 == NULL_TREE)
3634 goto same_types;
3635 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3636 goto different_types;
3637 /* If for a complete array type the possibly gimplified sizes
3638 are different the types are different. */
3639 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3640 || (TYPE_SIZE (i1)
3641 && TYPE_SIZE (i2)
3642 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3643 goto different_types;
3644 else
3645 {
3646 tree min1 = TYPE_MIN_VALUE (i1);
3647 tree min2 = TYPE_MIN_VALUE (i2);
3648 tree max1 = TYPE_MAX_VALUE (i1);
3649 tree max2 = TYPE_MAX_VALUE (i2);
3650
3651 /* The minimum/maximum values have to be the same. */
3652 if ((min1 == min2
3653 || (min1 && min2
3654 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3655 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3656 || operand_equal_p (min1, min2, 0))))
3657 && (max1 == max2
3658 || (max1 && max2
3659 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3660 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3661 || operand_equal_p (max1, max2, 0)))))
3662 goto same_types;
3663 else
3664 goto different_types;
3665 }
3666 }
3667
3668 case METHOD_TYPE:
3669 /* Method types should belong to the same class. */
3670 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3671 state, sccstack, sccstate, sccstate_obstack))
3672 goto different_types;
3673
3674 /* Fallthru */
3675
3676 case FUNCTION_TYPE:
3677 /* Function types are the same if the return type and arguments types
3678 are the same. */
3679 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3680 state, sccstack, sccstate, sccstate_obstack))
3681 goto different_types;
3682
3683 if (!comp_type_attributes (t1, t2))
3684 goto different_types;
3685
3686 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3687 goto same_types;
3688 else
3689 {
3690 tree parms1, parms2;
3691
3692 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3693 parms1 && parms2;
3694 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3695 {
3696 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3697 state, sccstack, sccstate, sccstate_obstack))
3698 goto different_types;
3699 }
3700
3701 if (parms1 || parms2)
3702 goto different_types;
3703
3704 goto same_types;
3705 }
3706
3707 case OFFSET_TYPE:
3708 {
3709 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3710 state, sccstack, sccstate, sccstate_obstack)
3711 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3712 TYPE_OFFSET_BASETYPE (t2),
3713 state, sccstack, sccstate, sccstate_obstack))
3714 goto different_types;
3715
3716 goto same_types;
3717 }
3718
3719 case POINTER_TYPE:
3720 case REFERENCE_TYPE:
3721 {
3722 /* If the two pointers have different ref-all attributes,
3723 they can't be the same type. */
3724 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3725 goto different_types;
3726
3727 /* Otherwise, pointer and reference types are the same if the
3728 pointed-to types are the same. */
3729 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3730 state, sccstack, sccstate, sccstate_obstack))
3731 goto same_types;
3732
3733 goto different_types;
3734 }
3735
3736 case INTEGER_TYPE:
3737 case BOOLEAN_TYPE:
3738 {
3739 tree min1 = TYPE_MIN_VALUE (t1);
3740 tree max1 = TYPE_MAX_VALUE (t1);
3741 tree min2 = TYPE_MIN_VALUE (t2);
3742 tree max2 = TYPE_MAX_VALUE (t2);
3743 bool min_equal_p = false;
3744 bool max_equal_p = false;
3745
3746 /* If either type has a minimum value, the other type must
3747 have the same. */
3748 if (min1 == NULL_TREE && min2 == NULL_TREE)
3749 min_equal_p = true;
3750 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3751 min_equal_p = true;
3752
3753 /* Likewise, if either type has a maximum value, the other
3754 type must have the same. */
3755 if (max1 == NULL_TREE && max2 == NULL_TREE)
3756 max_equal_p = true;
3757 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3758 max_equal_p = true;
3759
3760 if (!min_equal_p || !max_equal_p)
3761 goto different_types;
3762
3763 goto same_types;
3764 }
3765
3766 case ENUMERAL_TYPE:
3767 {
3768 /* FIXME lto, we cannot check bounds on enumeral types because
3769 different front ends will produce different values.
3770 In C, enumeral types are integers, while in C++ each element
3771 will have its own symbolic value. We should decide how enums
3772 are to be represented in GIMPLE and have each front end lower
3773 to that. */
3774 tree v1, v2;
3775
3776 /* For enumeral types, all the values must be the same. */
3777 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3778 goto same_types;
3779
3780 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3781 v1 && v2;
3782 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3783 {
3784 tree c1 = TREE_VALUE (v1);
3785 tree c2 = TREE_VALUE (v2);
3786
3787 if (TREE_CODE (c1) == CONST_DECL)
3788 c1 = DECL_INITIAL (c1);
3789
3790 if (TREE_CODE (c2) == CONST_DECL)
3791 c2 = DECL_INITIAL (c2);
3792
3793 if (tree_int_cst_equal (c1, c2) != 1)
3794 goto different_types;
3795
3796 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3797 goto different_types;
3798 }
3799
3800 /* If one enumeration has more values than the other, they
3801 are not the same. */
3802 if (v1 || v2)
3803 goto different_types;
3804
3805 goto same_types;
3806 }
3807
3808 case RECORD_TYPE:
3809 case UNION_TYPE:
3810 case QUAL_UNION_TYPE:
3811 {
3812 tree f1, f2;
3813
3814 /* For aggregate types, all the fields must be the same. */
3815 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3816 f1 && f2;
3817 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3818 {
3819 /* Different field kinds are not compatible. */
3820 if (TREE_CODE (f1) != TREE_CODE (f2))
3821 goto different_types;
3822 /* Field decls must have the same name and offset. */
3823 if (TREE_CODE (f1) == FIELD_DECL
3824 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3825 || !gimple_compare_field_offset (f1, f2)))
3826 goto different_types;
3827 /* All entities should have the same name and type. */
3828 if (DECL_NAME (f1) != DECL_NAME (f2)
3829 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3830 state, sccstack, sccstate, sccstate_obstack))
3831 goto different_types;
3832 }
3833
3834 /* If one aggregate has more fields than the other, they
3835 are not the same. */
3836 if (f1 || f2)
3837 goto different_types;
3838
3839 goto same_types;
3840 }
3841
3842 default:
3843 gcc_unreachable ();
3844 }
3845
3846 /* Common exit path for types that are not compatible. */
3847 different_types:
3848 state->u.same_p = 0;
3849 goto pop;
3850
3851 /* Common exit path for types that are compatible. */
3852 same_types:
3853 gcc_assert (state->u.same_p == 1);
3854
3855 pop:
3856 if (state->low == state->dfsnum)
3857 {
3858 type_pair_t x;
3859
3860 /* Pop off the SCC and set its cache values to the final
3861 comparison result. */
3862 do
3863 {
3864 struct sccs *cstate;
3865 x = VEC_pop (type_pair_t, *sccstack);
3866 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3867 cstate->on_sccstack = false;
3868 x->same_p[GTC_MERGE] = state->u.same_p;
3869 }
3870 while (x != p);
3871 }
3872
3873 return state->u.same_p;
3874 }
3875
3876 /* Return true iff T1 and T2 are structurally identical. When
3877 FOR_MERGING_P is true the an incomplete type and a complete type
3878 are considered different, otherwise they are considered compatible. */
3879
3880 static bool
3881 gimple_types_compatible_p (tree t1, tree t2)
3882 {
3883 VEC(type_pair_t, heap) *sccstack = NULL;
3884 struct pointer_map_t *sccstate;
3885 struct obstack sccstate_obstack;
3886 type_pair_t p = NULL;
3887 bool res;
3888 tree leader1, leader2;
3889
3890 /* Before starting to set up the SCC machinery handle simple cases. */
3891
3892 /* Check first for the obvious case of pointer identity. */
3893 if (t1 == t2)
3894 return true;
3895
3896 /* Check that we have two types to compare. */
3897 if (t1 == NULL_TREE || t2 == NULL_TREE)
3898 return false;
3899
3900 /* Can't be the same type if the types don't have the same code. */
3901 if (TREE_CODE (t1) != TREE_CODE (t2))
3902 return false;
3903
3904 /* Can't be the same type if they have different CV qualifiers. */
3905 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3906 return false;
3907
3908 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3909 return false;
3910
3911 /* Void types and nullptr types are always the same. */
3912 if (TREE_CODE (t1) == VOID_TYPE
3913 || TREE_CODE (t1) == NULLPTR_TYPE)
3914 return true;
3915
3916 /* Can't be the same type if they have different alignment or mode. */
3917 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3918 || TYPE_MODE (t1) != TYPE_MODE (t2))
3919 return false;
3920
3921 /* Do some simple checks before doing three hashtable queries. */
3922 if (INTEGRAL_TYPE_P (t1)
3923 || SCALAR_FLOAT_TYPE_P (t1)
3924 || FIXED_POINT_TYPE_P (t1)
3925 || TREE_CODE (t1) == VECTOR_TYPE
3926 || TREE_CODE (t1) == COMPLEX_TYPE
3927 || TREE_CODE (t1) == OFFSET_TYPE
3928 || POINTER_TYPE_P (t1))
3929 {
3930 /* Can't be the same type if they have different sign or precision. */
3931 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3932 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3933 return false;
3934
3935 if (TREE_CODE (t1) == INTEGER_TYPE
3936 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3937 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3938 return false;
3939
3940 /* That's all we need to check for float and fixed-point types. */
3941 if (SCALAR_FLOAT_TYPE_P (t1)
3942 || FIXED_POINT_TYPE_P (t1))
3943 return true;
3944
3945 /* For other types fall thru to more complex checks. */
3946 }
3947
3948 /* If the types have been previously registered and found equal
3949 they still are. */
3950 leader1 = gimple_lookup_type_leader (t1);
3951 leader2 = gimple_lookup_type_leader (t2);
3952 if (leader1 == t2
3953 || t1 == leader2
3954 || (leader1 && leader1 == leader2))
3955 return true;
3956
3957 /* If the hash values of t1 and t2 are different the types can't
3958 possibly be the same. This helps keeping the type-pair hashtable
3959 small, only tracking comparisons for hash collisions. */
3960 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3961 return false;
3962
3963 /* If we've visited this type pair before (in the case of aggregates
3964 with self-referential types), and we made a decision, return it. */
3965 p = lookup_type_pair (t1, t2);
3966 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3967 {
3968 /* We have already decided whether T1 and T2 are the
3969 same, return the cached result. */
3970 return p->same_p[GTC_MERGE] == 1;
3971 }
3972
3973 /* Now set up the SCC machinery for the comparison. */
3974 gtc_next_dfs_num = 1;
3975 sccstate = pointer_map_create ();
3976 gcc_obstack_init (&sccstate_obstack);
3977 res = gimple_types_compatible_p_1 (t1, t2, p,
3978 &sccstack, sccstate, &sccstate_obstack);
3979 VEC_free (type_pair_t, heap, sccstack);
3980 pointer_map_destroy (sccstate);
3981 obstack_free (&sccstate_obstack, NULL);
3982
3983 return res;
3984 }
3985
3986
3987 static hashval_t
3988 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3989 struct pointer_map_t *, struct obstack *);
3990
3991 /* DFS visit the edge from the callers type with state *STATE to T.
3992 Update the callers type hash V with the hash for T if it is not part
3993 of the SCC containing the callers type and return it.
3994 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3995
3996 static hashval_t
3997 visit (tree t, struct sccs *state, hashval_t v,
3998 VEC (tree, heap) **sccstack,
3999 struct pointer_map_t *sccstate,
4000 struct obstack *sccstate_obstack)
4001 {
4002 struct sccs *cstate = NULL;
4003 struct tree_int_map m;
4004 void **slot;
4005
4006 /* If there is a hash value recorded for this type then it can't
4007 possibly be part of our parent SCC. Simply mix in its hash. */
4008 m.base.from = t;
4009 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4010 && *slot)
4011 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
4012
4013 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
4014 cstate = (struct sccs *)*slot;
4015 if (!cstate)
4016 {
4017 hashval_t tem;
4018 /* Not yet visited. DFS recurse. */
4019 tem = iterative_hash_gimple_type (t, v,
4020 sccstack, sccstate, sccstate_obstack);
4021 if (!cstate)
4022 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4023 state->low = MIN (state->low, cstate->low);
4024 /* If the type is no longer on the SCC stack and thus is not part
4025 of the parents SCC mix in its hash value. Otherwise we will
4026 ignore the type for hashing purposes and return the unaltered
4027 hash value. */
4028 if (!cstate->on_sccstack)
4029 return tem;
4030 }
4031 if (cstate->dfsnum < state->dfsnum
4032 && cstate->on_sccstack)
4033 state->low = MIN (cstate->dfsnum, state->low);
4034
4035 /* We are part of our parents SCC, skip this type during hashing
4036 and return the unaltered hash value. */
4037 return v;
4038 }
4039
4040 /* Hash NAME with the previous hash value V and return it. */
4041
4042 static hashval_t
4043 iterative_hash_name (tree name, hashval_t v)
4044 {
4045 if (!name)
4046 return v;
4047 if (TREE_CODE (name) == TYPE_DECL)
4048 name = DECL_NAME (name);
4049 if (!name)
4050 return v;
4051 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4052 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4053 }
4054
4055 /* A type, hashvalue pair for sorting SCC members. */
4056
4057 struct type_hash_pair {
4058 tree type;
4059 hashval_t hash;
4060 };
4061
4062 /* Compare two type, hashvalue pairs. */
4063
4064 static int
4065 type_hash_pair_compare (const void *p1_, const void *p2_)
4066 {
4067 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4068 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4069 if (p1->hash < p2->hash)
4070 return -1;
4071 else if (p1->hash > p2->hash)
4072 return 1;
4073 return 0;
4074 }
4075
4076 /* Returning a hash value for gimple type TYPE combined with VAL.
4077 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4078
4079 To hash a type we end up hashing in types that are reachable.
4080 Through pointers we can end up with cycles which messes up the
4081 required property that we need to compute the same hash value
4082 for structurally equivalent types. To avoid this we have to
4083 hash all types in a cycle (the SCC) in a commutative way. The
4084 easiest way is to not mix in the hashes of the SCC members at
4085 all. To make this work we have to delay setting the hash
4086 values of the SCC until it is complete. */
4087
4088 static hashval_t
4089 iterative_hash_gimple_type (tree type, hashval_t val,
4090 VEC(tree, heap) **sccstack,
4091 struct pointer_map_t *sccstate,
4092 struct obstack *sccstate_obstack)
4093 {
4094 hashval_t v;
4095 void **slot;
4096 struct sccs *state;
4097
4098 /* Not visited during this DFS walk. */
4099 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4100 state = XOBNEW (sccstate_obstack, struct sccs);
4101 *pointer_map_insert (sccstate, type) = state;
4102
4103 VEC_safe_push (tree, heap, *sccstack, type);
4104 state->dfsnum = next_dfs_num++;
4105 state->low = state->dfsnum;
4106 state->on_sccstack = true;
4107
4108 /* Combine a few common features of types so that types are grouped into
4109 smaller sets; when searching for existing matching types to merge,
4110 only existing types having the same features as the new type will be
4111 checked. */
4112 v = iterative_hash_name (TYPE_NAME (type), 0);
4113 v = iterative_hash_hashval_t (TREE_CODE (type), v);
4114 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4115 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4116
4117 /* Do not hash the types size as this will cause differences in
4118 hash values for the complete vs. the incomplete type variant. */
4119
4120 /* Incorporate common features of numerical types. */
4121 if (INTEGRAL_TYPE_P (type)
4122 || SCALAR_FLOAT_TYPE_P (type)
4123 || FIXED_POINT_TYPE_P (type))
4124 {
4125 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4126 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4127 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4128 }
4129
4130 /* For pointer and reference types, fold in information about the type
4131 pointed to. */
4132 if (POINTER_TYPE_P (type))
4133 v = visit (TREE_TYPE (type), state, v,
4134 sccstack, sccstate, sccstate_obstack);
4135
4136 /* For integer types hash the types min/max values and the string flag. */
4137 if (TREE_CODE (type) == INTEGER_TYPE)
4138 {
4139 /* OMP lowering can introduce error_mark_node in place of
4140 random local decls in types. */
4141 if (TYPE_MIN_VALUE (type) != error_mark_node)
4142 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4143 if (TYPE_MAX_VALUE (type) != error_mark_node)
4144 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4145 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4146 }
4147
4148 /* For array types hash their domain and the string flag. */
4149 if (TREE_CODE (type) == ARRAY_TYPE
4150 && TYPE_DOMAIN (type))
4151 {
4152 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4153 v = visit (TYPE_DOMAIN (type), state, v,
4154 sccstack, sccstate, sccstate_obstack);
4155 }
4156
4157 /* Recurse for aggregates with a single element type. */
4158 if (TREE_CODE (type) == ARRAY_TYPE
4159 || TREE_CODE (type) == COMPLEX_TYPE
4160 || TREE_CODE (type) == VECTOR_TYPE)
4161 v = visit (TREE_TYPE (type), state, v,
4162 sccstack, sccstate, sccstate_obstack);
4163
4164 /* Incorporate function return and argument types. */
4165 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4166 {
4167 unsigned na;
4168 tree p;
4169
4170 /* For method types also incorporate their parent class. */
4171 if (TREE_CODE (type) == METHOD_TYPE)
4172 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4173 sccstack, sccstate, sccstate_obstack);
4174
4175 /* Check result and argument types. */
4176 v = visit (TREE_TYPE (type), state, v,
4177 sccstack, sccstate, sccstate_obstack);
4178 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4179 {
4180 v = visit (TREE_VALUE (p), state, v,
4181 sccstack, sccstate, sccstate_obstack);
4182 na++;
4183 }
4184
4185 v = iterative_hash_hashval_t (na, v);
4186 }
4187
4188 if (TREE_CODE (type) == RECORD_TYPE
4189 || TREE_CODE (type) == UNION_TYPE
4190 || TREE_CODE (type) == QUAL_UNION_TYPE)
4191 {
4192 unsigned nf;
4193 tree f;
4194
4195 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4196 {
4197 v = iterative_hash_name (DECL_NAME (f), v);
4198 v = visit (TREE_TYPE (f), state, v,
4199 sccstack, sccstate, sccstate_obstack);
4200 nf++;
4201 }
4202
4203 v = iterative_hash_hashval_t (nf, v);
4204 }
4205
4206 /* Record hash for us. */
4207 state->u.hash = v;
4208
4209 /* See if we found an SCC. */
4210 if (state->low == state->dfsnum)
4211 {
4212 tree x;
4213 struct tree_int_map *m;
4214
4215 /* Pop off the SCC and set its hash values. */
4216 x = VEC_pop (tree, *sccstack);
4217 /* Optimize SCC size one. */
4218 if (x == type)
4219 {
4220 state->on_sccstack = false;
4221 m = ggc_alloc_cleared_tree_int_map ();
4222 m->base.from = x;
4223 m->to = v;
4224 slot = htab_find_slot (type_hash_cache, m, INSERT);
4225 gcc_assert (!*slot);
4226 *slot = (void *) m;
4227 }
4228 else
4229 {
4230 struct sccs *cstate;
4231 unsigned first, i, size, j;
4232 struct type_hash_pair *pairs;
4233 /* Pop off the SCC and build an array of type, hash pairs. */
4234 first = VEC_length (tree, *sccstack) - 1;
4235 while (VEC_index (tree, *sccstack, first) != type)
4236 --first;
4237 size = VEC_length (tree, *sccstack) - first + 1;
4238 pairs = XALLOCAVEC (struct type_hash_pair, size);
4239 i = 0;
4240 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4241 cstate->on_sccstack = false;
4242 pairs[i].type = x;
4243 pairs[i].hash = cstate->u.hash;
4244 do
4245 {
4246 x = VEC_pop (tree, *sccstack);
4247 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4248 cstate->on_sccstack = false;
4249 ++i;
4250 pairs[i].type = x;
4251 pairs[i].hash = cstate->u.hash;
4252 }
4253 while (x != type);
4254 gcc_assert (i + 1 == size);
4255 /* Sort the arrays of type, hash pairs so that when we mix in
4256 all members of the SCC the hash value becomes independent on
4257 the order we visited the SCC. Disregard hashes equal to
4258 the hash of the type we mix into because we cannot guarantee
4259 a stable sort for those across different TUs. */
4260 qsort (pairs, size, sizeof (struct type_hash_pair),
4261 type_hash_pair_compare);
4262 for (i = 0; i < size; ++i)
4263 {
4264 hashval_t hash;
4265 m = ggc_alloc_cleared_tree_int_map ();
4266 m->base.from = pairs[i].type;
4267 hash = pairs[i].hash;
4268 /* Skip same hashes. */
4269 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4270 ;
4271 for (; j < size; ++j)
4272 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4273 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4274 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4275 m->to = hash;
4276 if (pairs[i].type == type)
4277 v = hash;
4278 slot = htab_find_slot (type_hash_cache, m, INSERT);
4279 gcc_assert (!*slot);
4280 *slot = (void *) m;
4281 }
4282 }
4283 }
4284
4285 return iterative_hash_hashval_t (v, val);
4286 }
4287
4288
4289 /* Returns a hash value for P (assumed to be a type). The hash value
4290 is computed using some distinguishing features of the type. Note
4291 that we cannot use pointer hashing here as we may be dealing with
4292 two distinct instances of the same type.
4293
4294 This function should produce the same hash value for two compatible
4295 types according to gimple_types_compatible_p. */
4296
4297 static hashval_t
4298 gimple_type_hash (const void *p)
4299 {
4300 const_tree t = (const_tree) p;
4301 VEC(tree, heap) *sccstack = NULL;
4302 struct pointer_map_t *sccstate;
4303 struct obstack sccstate_obstack;
4304 hashval_t val;
4305 void **slot;
4306 struct tree_int_map m;
4307
4308 if (type_hash_cache == NULL)
4309 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4310 tree_int_map_eq, NULL);
4311
4312 m.base.from = CONST_CAST_TREE (t);
4313 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4314 && *slot)
4315 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4316
4317 /* Perform a DFS walk and pre-hash all reachable types. */
4318 next_dfs_num = 1;
4319 sccstate = pointer_map_create ();
4320 gcc_obstack_init (&sccstate_obstack);
4321 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4322 &sccstack, sccstate, &sccstate_obstack);
4323 VEC_free (tree, heap, sccstack);
4324 pointer_map_destroy (sccstate);
4325 obstack_free (&sccstate_obstack, NULL);
4326
4327 return val;
4328 }
4329
4330 /* Returning a hash value for gimple type TYPE combined with VAL.
4331
4332 The hash value returned is equal for types considered compatible
4333 by gimple_canonical_types_compatible_p. */
4334
4335 static hashval_t
4336 iterative_hash_canonical_type (tree type, hashval_t val)
4337 {
4338 hashval_t v;
4339 void **slot;
4340 struct tree_int_map *mp, m;
4341
4342 m.base.from = type;
4343 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4344 && *slot)
4345 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4346
4347 /* Combine a few common features of types so that types are grouped into
4348 smaller sets; when searching for existing matching types to merge,
4349 only existing types having the same features as the new type will be
4350 checked. */
4351 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4352 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4353 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4354 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4355
4356 /* Incorporate common features of numerical types. */
4357 if (INTEGRAL_TYPE_P (type)
4358 || SCALAR_FLOAT_TYPE_P (type)
4359 || FIXED_POINT_TYPE_P (type)
4360 || TREE_CODE (type) == VECTOR_TYPE
4361 || TREE_CODE (type) == COMPLEX_TYPE
4362 || TREE_CODE (type) == OFFSET_TYPE
4363 || POINTER_TYPE_P (type))
4364 {
4365 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4366 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4367 }
4368
4369 /* For pointer and reference types, fold in information about the type
4370 pointed to but do not recurse to the pointed-to type. */
4371 if (POINTER_TYPE_P (type))
4372 {
4373 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4374 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4375 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4376 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4377 }
4378
4379 /* For integer types hash the types min/max values and the string flag. */
4380 if (TREE_CODE (type) == INTEGER_TYPE)
4381 {
4382 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4383 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4384 }
4385
4386 /* For array types hash their domain and the string flag. */
4387 if (TREE_CODE (type) == ARRAY_TYPE
4388 && TYPE_DOMAIN (type))
4389 {
4390 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4391 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4392 }
4393
4394 /* Recurse for aggregates with a single element type. */
4395 if (TREE_CODE (type) == ARRAY_TYPE
4396 || TREE_CODE (type) == COMPLEX_TYPE
4397 || TREE_CODE (type) == VECTOR_TYPE)
4398 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4399
4400 /* Incorporate function return and argument types. */
4401 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4402 {
4403 unsigned na;
4404 tree p;
4405
4406 /* For method types also incorporate their parent class. */
4407 if (TREE_CODE (type) == METHOD_TYPE)
4408 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4409
4410 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4411
4412 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4413 {
4414 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4415 na++;
4416 }
4417
4418 v = iterative_hash_hashval_t (na, v);
4419 }
4420
4421 if (TREE_CODE (type) == RECORD_TYPE
4422 || TREE_CODE (type) == UNION_TYPE
4423 || TREE_CODE (type) == QUAL_UNION_TYPE)
4424 {
4425 unsigned nf;
4426 tree f;
4427
4428 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4429 if (TREE_CODE (f) == FIELD_DECL)
4430 {
4431 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4432 nf++;
4433 }
4434
4435 v = iterative_hash_hashval_t (nf, v);
4436 }
4437
4438 /* Cache the just computed hash value. */
4439 mp = ggc_alloc_cleared_tree_int_map ();
4440 mp->base.from = type;
4441 mp->to = v;
4442 *slot = (void *) mp;
4443
4444 return iterative_hash_hashval_t (v, val);
4445 }
4446
4447 static hashval_t
4448 gimple_canonical_type_hash (const void *p)
4449 {
4450 if (canonical_type_hash_cache == NULL)
4451 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4452 tree_int_map_eq, NULL);
4453
4454 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4455 }
4456
4457
4458 /* Returns nonzero if P1 and P2 are equal. */
4459
4460 static int
4461 gimple_type_eq (const void *p1, const void *p2)
4462 {
4463 const_tree t1 = (const_tree) p1;
4464 const_tree t2 = (const_tree) p2;
4465 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4466 CONST_CAST_TREE (t2));
4467 }
4468
4469
4470 /* Worker for gimple_register_type.
4471 Register type T in the global type table gimple_types.
4472 When REGISTERING_MV is false first recurse for the main variant of T. */
4473
4474 static tree
4475 gimple_register_type_1 (tree t, bool registering_mv)
4476 {
4477 void **slot;
4478 gimple_type_leader_entry *leader;
4479
4480 /* If we registered this type before return the cached result. */
4481 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4482 if (leader->type == t)
4483 return leader->leader;
4484
4485 /* Always register the main variant first. This is important so we
4486 pick up the non-typedef variants as canonical, otherwise we'll end
4487 up taking typedef ids for structure tags during comparison.
4488 It also makes sure that main variants will be merged to main variants.
4489 As we are operating on a possibly partially fixed up type graph
4490 do not bother to recurse more than once, otherwise we may end up
4491 walking in circles.
4492 If we are registering a main variant it will either remain its
4493 own main variant or it will be merged to something else in which
4494 case we do not care for the main variant leader. */
4495 if (!registering_mv
4496 && TYPE_MAIN_VARIANT (t) != t)
4497 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4498
4499 /* See if we already have an equivalent type registered. */
4500 slot = htab_find_slot (gimple_types, t, INSERT);
4501 if (*slot
4502 && *(tree *)slot != t)
4503 {
4504 tree new_type = (tree) *((tree *) slot);
4505 leader->type = t;
4506 leader->leader = new_type;
4507 return new_type;
4508 }
4509
4510 /* If not, insert it to the cache and the hash. */
4511 leader->type = t;
4512 leader->leader = t;
4513 *slot = (void *) t;
4514 return t;
4515 }
4516
4517 /* Register type T in the global type table gimple_types.
4518 If another type T', compatible with T, already existed in
4519 gimple_types then return T', otherwise return T. This is used by
4520 LTO to merge identical types read from different TUs. */
4521
4522 tree
4523 gimple_register_type (tree t)
4524 {
4525 gcc_assert (TYPE_P (t));
4526
4527 if (!gimple_type_leader)
4528 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4529 (GIMPLE_TYPE_LEADER_SIZE);
4530
4531 if (gimple_types == NULL)
4532 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4533
4534 return gimple_register_type_1 (t, false);
4535 }
4536
4537 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4538 the middle-end types_compatible_p function. It needs to avoid
4539 claiming types are different for types that should be treated
4540 the same with respect to TBAA. Canonical types are also used
4541 for IL consistency checks via the useless_type_conversion_p
4542 predicate which does not handle all type kinds itself but falls
4543 back to pointer-comparison of TYPE_CANONICAL for aggregates
4544 for example. */
4545
4546 /* Return true iff T1 and T2 are structurally identical for what
4547 TBAA is concerned. */
4548
4549 static bool
4550 gimple_canonical_types_compatible_p (tree t1, tree t2)
4551 {
4552 /* Before starting to set up the SCC machinery handle simple cases. */
4553
4554 /* Check first for the obvious case of pointer identity. */
4555 if (t1 == t2)
4556 return true;
4557
4558 /* Check that we have two types to compare. */
4559 if (t1 == NULL_TREE || t2 == NULL_TREE)
4560 return false;
4561
4562 /* If the types have been previously registered and found equal
4563 they still are. */
4564 if (TYPE_CANONICAL (t1)
4565 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4566 return true;
4567
4568 /* Can't be the same type if the types don't have the same code. */
4569 if (TREE_CODE (t1) != TREE_CODE (t2))
4570 return false;
4571
4572 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4573 return false;
4574
4575 /* Qualifiers do not matter for canonical type comparison purposes. */
4576
4577 /* Void types and nullptr types are always the same. */
4578 if (TREE_CODE (t1) == VOID_TYPE
4579 || TREE_CODE (t1) == NULLPTR_TYPE)
4580 return true;
4581
4582 /* Can't be the same type if they have different alignment, or mode. */
4583 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4584 || TYPE_MODE (t1) != TYPE_MODE (t2))
4585 return false;
4586
4587 /* Non-aggregate types can be handled cheaply. */
4588 if (INTEGRAL_TYPE_P (t1)
4589 || SCALAR_FLOAT_TYPE_P (t1)
4590 || FIXED_POINT_TYPE_P (t1)
4591 || TREE_CODE (t1) == VECTOR_TYPE
4592 || TREE_CODE (t1) == COMPLEX_TYPE
4593 || TREE_CODE (t1) == OFFSET_TYPE
4594 || POINTER_TYPE_P (t1))
4595 {
4596 /* Can't be the same type if they have different sign or precision. */
4597 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4598 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4599 return false;
4600
4601 if (TREE_CODE (t1) == INTEGER_TYPE
4602 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4603 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4604 return false;
4605
4606 /* For canonical type comparisons we do not want to build SCCs
4607 so we cannot compare pointed-to types. But we can, for now,
4608 require the same pointed-to type kind and match what
4609 useless_type_conversion_p would do. */
4610 if (POINTER_TYPE_P (t1))
4611 {
4612 /* If the two pointers have different ref-all attributes,
4613 they can't be the same type. */
4614 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4615 return false;
4616
4617 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4618 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4619 return false;
4620
4621 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4622 return false;
4623
4624 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4625 return false;
4626 }
4627
4628 /* Tail-recurse to components. */
4629 if (TREE_CODE (t1) == VECTOR_TYPE
4630 || TREE_CODE (t1) == COMPLEX_TYPE)
4631 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4632 TREE_TYPE (t2));
4633
4634 return true;
4635 }
4636
4637 /* If their attributes are not the same they can't be the same type. */
4638 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4639 return false;
4640
4641 /* Do type-specific comparisons. */
4642 switch (TREE_CODE (t1))
4643 {
4644 case ARRAY_TYPE:
4645 /* Array types are the same if the element types are the same and
4646 the number of elements are the same. */
4647 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4648 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4649 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4650 return false;
4651 else
4652 {
4653 tree i1 = TYPE_DOMAIN (t1);
4654 tree i2 = TYPE_DOMAIN (t2);
4655
4656 /* For an incomplete external array, the type domain can be
4657 NULL_TREE. Check this condition also. */
4658 if (i1 == NULL_TREE && i2 == NULL_TREE)
4659 return true;
4660 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4661 return false;
4662 /* If for a complete array type the possibly gimplified sizes
4663 are different the types are different. */
4664 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4665 || (TYPE_SIZE (i1)
4666 && TYPE_SIZE (i2)
4667 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4668 return false;
4669 else
4670 {
4671 tree min1 = TYPE_MIN_VALUE (i1);
4672 tree min2 = TYPE_MIN_VALUE (i2);
4673 tree max1 = TYPE_MAX_VALUE (i1);
4674 tree max2 = TYPE_MAX_VALUE (i2);
4675
4676 /* The minimum/maximum values have to be the same. */
4677 if ((min1 == min2
4678 || (min1 && min2
4679 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4680 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4681 || operand_equal_p (min1, min2, 0))))
4682 && (max1 == max2
4683 || (max1 && max2
4684 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4685 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4686 || operand_equal_p (max1, max2, 0)))))
4687 return true;
4688 else
4689 return false;
4690 }
4691 }
4692
4693 case METHOD_TYPE:
4694 /* Method types should belong to the same class. */
4695 if (!gimple_canonical_types_compatible_p
4696 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4697 return false;
4698
4699 /* Fallthru */
4700
4701 case FUNCTION_TYPE:
4702 /* Function types are the same if the return type and arguments types
4703 are the same. */
4704 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4705 return false;
4706
4707 if (!comp_type_attributes (t1, t2))
4708 return false;
4709
4710 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4711 return true;
4712 else
4713 {
4714 tree parms1, parms2;
4715
4716 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4717 parms1 && parms2;
4718 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4719 {
4720 if (!gimple_canonical_types_compatible_p
4721 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4722 return false;
4723 }
4724
4725 if (parms1 || parms2)
4726 return false;
4727
4728 return true;
4729 }
4730
4731 case RECORD_TYPE:
4732 case UNION_TYPE:
4733 case QUAL_UNION_TYPE:
4734 {
4735 tree f1, f2;
4736
4737 /* For aggregate types, all the fields must be the same. */
4738 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4739 f1 && f2;
4740 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4741 {
4742 /* Skip non-fields. */
4743 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4744 f1 = TREE_CHAIN (f1);
4745 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4746 f2 = TREE_CHAIN (f2);
4747 if (!f1 || !f2)
4748 break;
4749 /* The fields must have the same name, offset and type. */
4750 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4751 || !gimple_compare_field_offset (f1, f2)
4752 || !gimple_canonical_types_compatible_p
4753 (TREE_TYPE (f1), TREE_TYPE (f2)))
4754 return false;
4755 }
4756
4757 /* If one aggregate has more fields than the other, they
4758 are not the same. */
4759 if (f1 || f2)
4760 return false;
4761
4762 return true;
4763 }
4764
4765 default:
4766 gcc_unreachable ();
4767 }
4768 }
4769
4770
4771 /* Returns nonzero if P1 and P2 are equal. */
4772
4773 static int
4774 gimple_canonical_type_eq (const void *p1, const void *p2)
4775 {
4776 const_tree t1 = (const_tree) p1;
4777 const_tree t2 = (const_tree) p2;
4778 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4779 CONST_CAST_TREE (t2));
4780 }
4781
4782 /* Register type T in the global type table gimple_types.
4783 If another type T', compatible with T, already existed in
4784 gimple_types then return T', otherwise return T. This is used by
4785 LTO to merge identical types read from different TUs.
4786
4787 ??? This merging does not exactly match how the tree.c middle-end
4788 functions will assign TYPE_CANONICAL when new types are created
4789 during optimization (which at least happens for pointer and array
4790 types). */
4791
4792 tree
4793 gimple_register_canonical_type (tree t)
4794 {
4795 void **slot;
4796
4797 gcc_assert (TYPE_P (t));
4798
4799 if (TYPE_CANONICAL (t))
4800 return TYPE_CANONICAL (t);
4801
4802 if (gimple_canonical_types == NULL)
4803 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4804 gimple_canonical_type_eq, 0);
4805
4806 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4807 if (*slot
4808 && *(tree *)slot != t)
4809 {
4810 tree new_type = (tree) *((tree *) slot);
4811
4812 TYPE_CANONICAL (t) = new_type;
4813 t = new_type;
4814 }
4815 else
4816 {
4817 TYPE_CANONICAL (t) = t;
4818 *slot = (void *) t;
4819 }
4820
4821 return t;
4822 }
4823
4824
4825 /* Show statistics on references to the global type table gimple_types. */
4826
4827 void
4828 print_gimple_types_stats (void)
4829 {
4830 if (gimple_types)
4831 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4832 "%ld searches, %ld collisions (ratio: %f)\n",
4833 (long) htab_size (gimple_types),
4834 (long) htab_elements (gimple_types),
4835 (long) gimple_types->searches,
4836 (long) gimple_types->collisions,
4837 htab_collisions (gimple_types));
4838 else
4839 fprintf (stderr, "GIMPLE type table is empty\n");
4840 if (type_hash_cache)
4841 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4842 "%ld searches, %ld collisions (ratio: %f)\n",
4843 (long) htab_size (type_hash_cache),
4844 (long) htab_elements (type_hash_cache),
4845 (long) type_hash_cache->searches,
4846 (long) type_hash_cache->collisions,
4847 htab_collisions (type_hash_cache));
4848 else
4849 fprintf (stderr, "GIMPLE type hash table is empty\n");
4850 if (gimple_canonical_types)
4851 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4852 "%ld searches, %ld collisions (ratio: %f)\n",
4853 (long) htab_size (gimple_canonical_types),
4854 (long) htab_elements (gimple_canonical_types),
4855 (long) gimple_canonical_types->searches,
4856 (long) gimple_canonical_types->collisions,
4857 htab_collisions (gimple_canonical_types));
4858 else
4859 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4860 if (canonical_type_hash_cache)
4861 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4862 "%ld searches, %ld collisions (ratio: %f)\n",
4863 (long) htab_size (canonical_type_hash_cache),
4864 (long) htab_elements (canonical_type_hash_cache),
4865 (long) canonical_type_hash_cache->searches,
4866 (long) canonical_type_hash_cache->collisions,
4867 htab_collisions (canonical_type_hash_cache));
4868 else
4869 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4870 }
4871
4872 /* Free the gimple type hashtables used for LTO type merging. */
4873
4874 void
4875 free_gimple_type_tables (void)
4876 {
4877 /* Last chance to print stats for the tables. */
4878 if (flag_lto_report)
4879 print_gimple_types_stats ();
4880
4881 if (gimple_types)
4882 {
4883 htab_delete (gimple_types);
4884 gimple_types = NULL;
4885 }
4886 if (gimple_canonical_types)
4887 {
4888 htab_delete (gimple_canonical_types);
4889 gimple_canonical_types = NULL;
4890 }
4891 if (type_hash_cache)
4892 {
4893 htab_delete (type_hash_cache);
4894 type_hash_cache = NULL;
4895 }
4896 if (canonical_type_hash_cache)
4897 {
4898 htab_delete (canonical_type_hash_cache);
4899 canonical_type_hash_cache = NULL;
4900 }
4901 if (type_pair_cache)
4902 {
4903 free (type_pair_cache);
4904 type_pair_cache = NULL;
4905 }
4906 gimple_type_leader = NULL;
4907 }
4908
4909
4910 /* Return a type the same as TYPE except unsigned or
4911 signed according to UNSIGNEDP. */
4912
4913 static tree
4914 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4915 {
4916 tree type1;
4917
4918 type1 = TYPE_MAIN_VARIANT (type);
4919 if (type1 == signed_char_type_node
4920 || type1 == char_type_node
4921 || type1 == unsigned_char_type_node)
4922 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4923 if (type1 == integer_type_node || type1 == unsigned_type_node)
4924 return unsignedp ? unsigned_type_node : integer_type_node;
4925 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4926 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4927 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4928 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4929 if (type1 == long_long_integer_type_node
4930 || type1 == long_long_unsigned_type_node)
4931 return unsignedp
4932 ? long_long_unsigned_type_node
4933 : long_long_integer_type_node;
4934 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4935 return unsignedp
4936 ? int128_unsigned_type_node
4937 : int128_integer_type_node;
4938 #if HOST_BITS_PER_WIDE_INT >= 64
4939 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4940 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4941 #endif
4942 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4943 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4944 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4945 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4946 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4947 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4948 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4949 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4950
4951 #define GIMPLE_FIXED_TYPES(NAME) \
4952 if (type1 == short_ ## NAME ## _type_node \
4953 || type1 == unsigned_short_ ## NAME ## _type_node) \
4954 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4955 : short_ ## NAME ## _type_node; \
4956 if (type1 == NAME ## _type_node \
4957 || type1 == unsigned_ ## NAME ## _type_node) \
4958 return unsignedp ? unsigned_ ## NAME ## _type_node \
4959 : NAME ## _type_node; \
4960 if (type1 == long_ ## NAME ## _type_node \
4961 || type1 == unsigned_long_ ## NAME ## _type_node) \
4962 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4963 : long_ ## NAME ## _type_node; \
4964 if (type1 == long_long_ ## NAME ## _type_node \
4965 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4966 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4967 : long_long_ ## NAME ## _type_node;
4968
4969 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4970 if (type1 == NAME ## _type_node \
4971 || type1 == u ## NAME ## _type_node) \
4972 return unsignedp ? u ## NAME ## _type_node \
4973 : NAME ## _type_node;
4974
4975 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4976 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4977 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4978 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4979 : sat_ ## short_ ## NAME ## _type_node; \
4980 if (type1 == sat_ ## NAME ## _type_node \
4981 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4982 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4983 : sat_ ## NAME ## _type_node; \
4984 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4985 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4986 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4987 : sat_ ## long_ ## NAME ## _type_node; \
4988 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4989 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4990 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4991 : sat_ ## long_long_ ## NAME ## _type_node;
4992
4993 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4994 if (type1 == sat_ ## NAME ## _type_node \
4995 || type1 == sat_ ## u ## NAME ## _type_node) \
4996 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4997 : sat_ ## NAME ## _type_node;
4998
4999 GIMPLE_FIXED_TYPES (fract);
5000 GIMPLE_FIXED_TYPES_SAT (fract);
5001 GIMPLE_FIXED_TYPES (accum);
5002 GIMPLE_FIXED_TYPES_SAT (accum);
5003
5004 GIMPLE_FIXED_MODE_TYPES (qq);
5005 GIMPLE_FIXED_MODE_TYPES (hq);
5006 GIMPLE_FIXED_MODE_TYPES (sq);
5007 GIMPLE_FIXED_MODE_TYPES (dq);
5008 GIMPLE_FIXED_MODE_TYPES (tq);
5009 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
5010 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
5011 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
5012 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
5013 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
5014 GIMPLE_FIXED_MODE_TYPES (ha);
5015 GIMPLE_FIXED_MODE_TYPES (sa);
5016 GIMPLE_FIXED_MODE_TYPES (da);
5017 GIMPLE_FIXED_MODE_TYPES (ta);
5018 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
5019 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
5020 GIMPLE_FIXED_MODE_TYPES_SAT (da);
5021 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
5022
5023 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
5024 the precision; they have precision set to match their range, but
5025 may use a wider mode to match an ABI. If we change modes, we may
5026 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5027 the precision as well, so as to yield correct results for
5028 bit-field types. C++ does not have these separate bit-field
5029 types, and producing a signed or unsigned variant of an
5030 ENUMERAL_TYPE may cause other problems as well. */
5031 if (!INTEGRAL_TYPE_P (type)
5032 || TYPE_UNSIGNED (type) == unsignedp)
5033 return type;
5034
5035 #define TYPE_OK(node) \
5036 (TYPE_MODE (type) == TYPE_MODE (node) \
5037 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5038 if (TYPE_OK (signed_char_type_node))
5039 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
5040 if (TYPE_OK (integer_type_node))
5041 return unsignedp ? unsigned_type_node : integer_type_node;
5042 if (TYPE_OK (short_integer_type_node))
5043 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5044 if (TYPE_OK (long_integer_type_node))
5045 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5046 if (TYPE_OK (long_long_integer_type_node))
5047 return (unsignedp
5048 ? long_long_unsigned_type_node
5049 : long_long_integer_type_node);
5050 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
5051 return (unsignedp
5052 ? int128_unsigned_type_node
5053 : int128_integer_type_node);
5054
5055 #if HOST_BITS_PER_WIDE_INT >= 64
5056 if (TYPE_OK (intTI_type_node))
5057 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5058 #endif
5059 if (TYPE_OK (intDI_type_node))
5060 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5061 if (TYPE_OK (intSI_type_node))
5062 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5063 if (TYPE_OK (intHI_type_node))
5064 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5065 if (TYPE_OK (intQI_type_node))
5066 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5067
5068 #undef GIMPLE_FIXED_TYPES
5069 #undef GIMPLE_FIXED_MODE_TYPES
5070 #undef GIMPLE_FIXED_TYPES_SAT
5071 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5072 #undef TYPE_OK
5073
5074 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5075 }
5076
5077
5078 /* Return an unsigned type the same as TYPE in other respects. */
5079
5080 tree
5081 gimple_unsigned_type (tree type)
5082 {
5083 return gimple_signed_or_unsigned_type (true, type);
5084 }
5085
5086
5087 /* Return a signed type the same as TYPE in other respects. */
5088
5089 tree
5090 gimple_signed_type (tree type)
5091 {
5092 return gimple_signed_or_unsigned_type (false, type);
5093 }
5094
5095
5096 /* Return the typed-based alias set for T, which may be an expression
5097 or a type. Return -1 if we don't do anything special. */
5098
5099 alias_set_type
5100 gimple_get_alias_set (tree t)
5101 {
5102 tree u;
5103
5104 /* Permit type-punning when accessing a union, provided the access
5105 is directly through the union. For example, this code does not
5106 permit taking the address of a union member and then storing
5107 through it. Even the type-punning allowed here is a GCC
5108 extension, albeit a common and useful one; the C standard says
5109 that such accesses have implementation-defined behavior. */
5110 for (u = t;
5111 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5112 u = TREE_OPERAND (u, 0))
5113 if (TREE_CODE (u) == COMPONENT_REF
5114 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5115 return 0;
5116
5117 /* That's all the expressions we handle specially. */
5118 if (!TYPE_P (t))
5119 return -1;
5120
5121 /* For convenience, follow the C standard when dealing with
5122 character types. Any object may be accessed via an lvalue that
5123 has character type. */
5124 if (t == char_type_node
5125 || t == signed_char_type_node
5126 || t == unsigned_char_type_node)
5127 return 0;
5128
5129 /* Allow aliasing between signed and unsigned variants of the same
5130 type. We treat the signed variant as canonical. */
5131 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5132 {
5133 tree t1 = gimple_signed_type (t);
5134
5135 /* t1 == t can happen for boolean nodes which are always unsigned. */
5136 if (t1 != t)
5137 return get_alias_set (t1);
5138 }
5139
5140 return -1;
5141 }
5142
5143
5144 /* Data structure used to count the number of dereferences to PTR
5145 inside an expression. */
5146 struct count_ptr_d
5147 {
5148 tree ptr;
5149 unsigned num_stores;
5150 unsigned num_loads;
5151 };
5152
5153 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5154 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5155
5156 static tree
5157 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5158 {
5159 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5160 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5161
5162 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5163 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5164 the address of 'fld' as 'ptr + offsetof(fld)'. */
5165 if (TREE_CODE (*tp) == ADDR_EXPR)
5166 {
5167 *walk_subtrees = 0;
5168 return NULL_TREE;
5169 }
5170
5171 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5172 {
5173 if (wi_p->is_lhs)
5174 count_p->num_stores++;
5175 else
5176 count_p->num_loads++;
5177 }
5178
5179 return NULL_TREE;
5180 }
5181
5182 /* Count the number of direct and indirect uses for pointer PTR in
5183 statement STMT. The number of direct uses is stored in
5184 *NUM_USES_P. Indirect references are counted separately depending
5185 on whether they are store or load operations. The counts are
5186 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5187
5188 void
5189 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5190 unsigned *num_loads_p, unsigned *num_stores_p)
5191 {
5192 ssa_op_iter i;
5193 tree use;
5194
5195 *num_uses_p = 0;
5196 *num_loads_p = 0;
5197 *num_stores_p = 0;
5198
5199 /* Find out the total number of uses of PTR in STMT. */
5200 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5201 if (use == ptr)
5202 (*num_uses_p)++;
5203
5204 /* Now count the number of indirect references to PTR. This is
5205 truly awful, but we don't have much choice. There are no parent
5206 pointers inside INDIRECT_REFs, so an expression like
5207 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5208 find all the indirect and direct uses of x_1 inside. The only
5209 shortcut we can take is the fact that GIMPLE only allows
5210 INDIRECT_REFs inside the expressions below. */
5211 if (is_gimple_assign (stmt)
5212 || gimple_code (stmt) == GIMPLE_RETURN
5213 || gimple_code (stmt) == GIMPLE_ASM
5214 || is_gimple_call (stmt))
5215 {
5216 struct walk_stmt_info wi;
5217 struct count_ptr_d count;
5218
5219 count.ptr = ptr;
5220 count.num_stores = 0;
5221 count.num_loads = 0;
5222
5223 memset (&wi, 0, sizeof (wi));
5224 wi.info = &count;
5225 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5226
5227 *num_stores_p = count.num_stores;
5228 *num_loads_p = count.num_loads;
5229 }
5230
5231 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5232 }
5233
5234 /* From a tree operand OP return the base of a load or store operation
5235 or NULL_TREE if OP is not a load or a store. */
5236
5237 static tree
5238 get_base_loadstore (tree op)
5239 {
5240 while (handled_component_p (op))
5241 op = TREE_OPERAND (op, 0);
5242 if (DECL_P (op)
5243 || INDIRECT_REF_P (op)
5244 || TREE_CODE (op) == MEM_REF
5245 || TREE_CODE (op) == TARGET_MEM_REF)
5246 return op;
5247 return NULL_TREE;
5248 }
5249
5250 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5251 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5252 passing the STMT, the base of the operand and DATA to it. The base
5253 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5254 or the argument of an address expression.
5255 Returns the results of these callbacks or'ed. */
5256
5257 bool
5258 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5259 bool (*visit_load)(gimple, tree, void *),
5260 bool (*visit_store)(gimple, tree, void *),
5261 bool (*visit_addr)(gimple, tree, void *))
5262 {
5263 bool ret = false;
5264 unsigned i;
5265 if (gimple_assign_single_p (stmt))
5266 {
5267 tree lhs, rhs;
5268 if (visit_store)
5269 {
5270 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5271 if (lhs)
5272 ret |= visit_store (stmt, lhs, data);
5273 }
5274 rhs = gimple_assign_rhs1 (stmt);
5275 while (handled_component_p (rhs))
5276 rhs = TREE_OPERAND (rhs, 0);
5277 if (visit_addr)
5278 {
5279 if (TREE_CODE (rhs) == ADDR_EXPR)
5280 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5281 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5282 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5283 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5284 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5285 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5286 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5287 0), data);
5288 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5289 {
5290 unsigned int ix;
5291 tree val;
5292
5293 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5294 if (TREE_CODE (val) == ADDR_EXPR)
5295 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5296 else if (TREE_CODE (val) == OBJ_TYPE_REF
5297 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5298 ret |= visit_addr (stmt,
5299 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5300 0), data);
5301 }
5302 lhs = gimple_assign_lhs (stmt);
5303 if (TREE_CODE (lhs) == TARGET_MEM_REF
5304 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5305 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5306 }
5307 if (visit_load)
5308 {
5309 rhs = get_base_loadstore (rhs);
5310 if (rhs)
5311 ret |= visit_load (stmt, rhs, data);
5312 }
5313 }
5314 else if (visit_addr
5315 && (is_gimple_assign (stmt)
5316 || gimple_code (stmt) == GIMPLE_COND))
5317 {
5318 for (i = 0; i < gimple_num_ops (stmt); ++i)
5319 {
5320 tree op = gimple_op (stmt, i);
5321 if (op == NULL_TREE)
5322 ;
5323 else if (TREE_CODE (op) == ADDR_EXPR)
5324 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5325 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5326 tree with two operands. */
5327 else if (i == 1 && COMPARISON_CLASS_P (op))
5328 {
5329 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5330 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5331 0), data);
5332 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5333 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5334 0), data);
5335 }
5336 }
5337 }
5338 else if (is_gimple_call (stmt))
5339 {
5340 if (visit_store)
5341 {
5342 tree lhs = gimple_call_lhs (stmt);
5343 if (lhs)
5344 {
5345 lhs = get_base_loadstore (lhs);
5346 if (lhs)
5347 ret |= visit_store (stmt, lhs, data);
5348 }
5349 }
5350 if (visit_load || visit_addr)
5351 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5352 {
5353 tree rhs = gimple_call_arg (stmt, i);
5354 if (visit_addr
5355 && TREE_CODE (rhs) == ADDR_EXPR)
5356 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5357 else if (visit_load)
5358 {
5359 rhs = get_base_loadstore (rhs);
5360 if (rhs)
5361 ret |= visit_load (stmt, rhs, data);
5362 }
5363 }
5364 if (visit_addr
5365 && gimple_call_chain (stmt)
5366 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5367 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5368 data);
5369 if (visit_addr
5370 && gimple_call_return_slot_opt_p (stmt)
5371 && gimple_call_lhs (stmt) != NULL_TREE
5372 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5373 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5374 }
5375 else if (gimple_code (stmt) == GIMPLE_ASM)
5376 {
5377 unsigned noutputs;
5378 const char *constraint;
5379 const char **oconstraints;
5380 bool allows_mem, allows_reg, is_inout;
5381 noutputs = gimple_asm_noutputs (stmt);
5382 oconstraints = XALLOCAVEC (const char *, noutputs);
5383 if (visit_store || visit_addr)
5384 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5385 {
5386 tree link = gimple_asm_output_op (stmt, i);
5387 tree op = get_base_loadstore (TREE_VALUE (link));
5388 if (op && visit_store)
5389 ret |= visit_store (stmt, op, data);
5390 if (visit_addr)
5391 {
5392 constraint = TREE_STRING_POINTER
5393 (TREE_VALUE (TREE_PURPOSE (link)));
5394 oconstraints[i] = constraint;
5395 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5396 &allows_reg, &is_inout);
5397 if (op && !allows_reg && allows_mem)
5398 ret |= visit_addr (stmt, op, data);
5399 }
5400 }
5401 if (visit_load || visit_addr)
5402 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5403 {
5404 tree link = gimple_asm_input_op (stmt, i);
5405 tree op = TREE_VALUE (link);
5406 if (visit_addr
5407 && TREE_CODE (op) == ADDR_EXPR)
5408 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5409 else if (visit_load || visit_addr)
5410 {
5411 op = get_base_loadstore (op);
5412 if (op)
5413 {
5414 if (visit_load)
5415 ret |= visit_load (stmt, op, data);
5416 if (visit_addr)
5417 {
5418 constraint = TREE_STRING_POINTER
5419 (TREE_VALUE (TREE_PURPOSE (link)));
5420 parse_input_constraint (&constraint, 0, 0, noutputs,
5421 0, oconstraints,
5422 &allows_mem, &allows_reg);
5423 if (!allows_reg && allows_mem)
5424 ret |= visit_addr (stmt, op, data);
5425 }
5426 }
5427 }
5428 }
5429 }
5430 else if (gimple_code (stmt) == GIMPLE_RETURN)
5431 {
5432 tree op = gimple_return_retval (stmt);
5433 if (op)
5434 {
5435 if (visit_addr
5436 && TREE_CODE (op) == ADDR_EXPR)
5437 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5438 else if (visit_load)
5439 {
5440 op = get_base_loadstore (op);
5441 if (op)
5442 ret |= visit_load (stmt, op, data);
5443 }
5444 }
5445 }
5446 else if (visit_addr
5447 && gimple_code (stmt) == GIMPLE_PHI)
5448 {
5449 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5450 {
5451 tree op = PHI_ARG_DEF (stmt, i);
5452 if (TREE_CODE (op) == ADDR_EXPR)
5453 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5454 }
5455 }
5456
5457 return ret;
5458 }
5459
5460 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5461 should make a faster clone for this case. */
5462
5463 bool
5464 walk_stmt_load_store_ops (gimple stmt, void *data,
5465 bool (*visit_load)(gimple, tree, void *),
5466 bool (*visit_store)(gimple, tree, void *))
5467 {
5468 return walk_stmt_load_store_addr_ops (stmt, data,
5469 visit_load, visit_store, NULL);
5470 }
5471
5472 /* Helper for gimple_ior_addresses_taken_1. */
5473
5474 static bool
5475 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5476 tree addr, void *data)
5477 {
5478 bitmap addresses_taken = (bitmap)data;
5479 addr = get_base_address (addr);
5480 if (addr
5481 && DECL_P (addr))
5482 {
5483 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5484 return true;
5485 }
5486 return false;
5487 }
5488
5489 /* Set the bit for the uid of all decls that have their address taken
5490 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5491 were any in this stmt. */
5492
5493 bool
5494 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5495 {
5496 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5497 gimple_ior_addresses_taken_1);
5498 }
5499
5500
5501 /* Return a printable name for symbol DECL. */
5502
5503 const char *
5504 gimple_decl_printable_name (tree decl, int verbosity)
5505 {
5506 if (!DECL_NAME (decl))
5507 return NULL;
5508
5509 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5510 {
5511 const char *str, *mangled_str;
5512 int dmgl_opts = DMGL_NO_OPTS;
5513
5514 if (verbosity >= 2)
5515 {
5516 dmgl_opts = DMGL_VERBOSE
5517 | DMGL_ANSI
5518 | DMGL_GNU_V3
5519 | DMGL_RET_POSTFIX;
5520 if (TREE_CODE (decl) == FUNCTION_DECL)
5521 dmgl_opts |= DMGL_PARAMS;
5522 }
5523
5524 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5525 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5526 return (str) ? str : mangled_str;
5527 }
5528
5529 return IDENTIFIER_POINTER (DECL_NAME (decl));
5530 }
5531
5532 /* Return true when STMT is builtins call to CODE. */
5533
5534 bool
5535 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5536 {
5537 tree fndecl;
5538 return (is_gimple_call (stmt)
5539 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5540 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5541 && DECL_FUNCTION_CODE (fndecl) == code);
5542 }
5543
5544 /* Return true if STMT clobbers memory. STMT is required to be a
5545 GIMPLE_ASM. */
5546
5547 bool
5548 gimple_asm_clobbers_memory_p (const_gimple stmt)
5549 {
5550 unsigned i;
5551
5552 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5553 {
5554 tree op = gimple_asm_clobber_op (stmt, i);
5555 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5556 return true;
5557 }
5558
5559 return false;
5560 }
5561 #include "gt-gimple.h"