cgraph.c (cgraph_create_edge_1): Initialize call_stmt_cannot_inline_p from the stmt...
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81
82 #ifdef GATHER_STATISTICS
83 /* Gimple stats. */
84
85 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
86 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
87
88 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
89 static const char * const gimple_alloc_kind_names[] = {
90 "assignments",
91 "phi nodes",
92 "conditionals",
93 "sequences",
94 "everything else"
95 };
96
97 #endif /* GATHER_STATISTICS */
98
99 /* A cache of gimple_seq objects. Sequences are created and destroyed
100 fairly often during gimplification. */
101 static GTY ((deletable)) struct gimple_seq_d *gimple_seq_cache;
102
103 /* Private API manipulation functions shared only with some
104 other files. */
105 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
106 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
107
108 /* Gimple tuple constructors.
109 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
110 be passed a NULL to start with an empty sequence. */
111
112 /* Set the code for statement G to CODE. */
113
114 static inline void
115 gimple_set_code (gimple g, enum gimple_code code)
116 {
117 g->gsbase.code = code;
118 }
119
120 /* Return the number of bytes needed to hold a GIMPLE statement with
121 code CODE. */
122
123 static inline size_t
124 gimple_size (enum gimple_code code)
125 {
126 return gsstruct_code_size[gss_for_code (code)];
127 }
128
129 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
130 operands. */
131
132 gimple
133 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
134 {
135 size_t size;
136 gimple stmt;
137
138 size = gimple_size (code);
139 if (num_ops > 0)
140 size += sizeof (tree) * (num_ops - 1);
141
142 #ifdef GATHER_STATISTICS
143 {
144 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
145 gimple_alloc_counts[(int) kind]++;
146 gimple_alloc_sizes[(int) kind] += size;
147 }
148 #endif
149
150 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
151 gimple_set_code (stmt, code);
152 gimple_set_num_ops (stmt, num_ops);
153
154 /* Do not call gimple_set_modified here as it has other side
155 effects and this tuple is still not completely built. */
156 stmt->gsbase.modified = 1;
157
158 return stmt;
159 }
160
161 /* Set SUBCODE to be the code of the expression computed by statement G. */
162
163 static inline void
164 gimple_set_subcode (gimple g, unsigned subcode)
165 {
166 /* We only have 16 bits for the RHS code. Assert that we are not
167 overflowing it. */
168 gcc_assert (subcode < (1 << 16));
169 g->gsbase.subcode = subcode;
170 }
171
172
173
174 /* Build a tuple with operands. CODE is the statement to build (which
175 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
176 for the new tuple. NUM_OPS is the number of operands to allocate. */
177
178 #define gimple_build_with_ops(c, s, n) \
179 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
180
181 static gimple
182 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
183 unsigned num_ops MEM_STAT_DECL)
184 {
185 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
186 gimple_set_subcode (s, subcode);
187
188 return s;
189 }
190
191
192 /* Build a GIMPLE_RETURN statement returning RETVAL. */
193
194 gimple
195 gimple_build_return (tree retval)
196 {
197 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
198 if (retval)
199 gimple_return_set_retval (s, retval);
200 return s;
201 }
202
203 /* Reset alias information on call S. */
204
205 void
206 gimple_call_reset_alias_info (gimple s)
207 {
208 if (gimple_call_flags (s) & ECF_CONST)
209 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
210 else
211 pt_solution_reset (gimple_call_use_set (s));
212 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
213 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
214 else
215 pt_solution_reset (gimple_call_clobber_set (s));
216 }
217
218 /* Helper for gimple_build_call, gimple_build_call_valist,
219 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
220 components of a GIMPLE_CALL statement to function FN with NARGS
221 arguments. */
222
223 static inline gimple
224 gimple_build_call_1 (tree fn, unsigned nargs)
225 {
226 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
227 if (TREE_CODE (fn) == FUNCTION_DECL)
228 fn = build_fold_addr_expr (fn);
229 gimple_set_op (s, 1, fn);
230 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
231 gimple_call_reset_alias_info (s);
232 return s;
233 }
234
235
236 /* Build a GIMPLE_CALL statement to function FN with the arguments
237 specified in vector ARGS. */
238
239 gimple
240 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
241 {
242 unsigned i;
243 unsigned nargs = VEC_length (tree, args);
244 gimple call = gimple_build_call_1 (fn, nargs);
245
246 for (i = 0; i < nargs; i++)
247 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
248
249 return call;
250 }
251
252
253 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
254 arguments. The ... are the arguments. */
255
256 gimple
257 gimple_build_call (tree fn, unsigned nargs, ...)
258 {
259 va_list ap;
260 gimple call;
261 unsigned i;
262
263 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
264
265 call = gimple_build_call_1 (fn, nargs);
266
267 va_start (ap, nargs);
268 for (i = 0; i < nargs; i++)
269 gimple_call_set_arg (call, i, va_arg (ap, tree));
270 va_end (ap);
271
272 return call;
273 }
274
275
276 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
277 arguments. AP contains the arguments. */
278
279 gimple
280 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
281 {
282 gimple call;
283 unsigned i;
284
285 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
286
287 call = gimple_build_call_1 (fn, nargs);
288
289 for (i = 0; i < nargs; i++)
290 gimple_call_set_arg (call, i, va_arg (ap, tree));
291
292 return call;
293 }
294
295
296 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
297 Build the basic components of a GIMPLE_CALL statement to internal
298 function FN with NARGS arguments. */
299
300 static inline gimple
301 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
302 {
303 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
304 s->gsbase.subcode |= GF_CALL_INTERNAL;
305 gimple_call_set_internal_fn (s, fn);
306 gimple_call_reset_alias_info (s);
307 return s;
308 }
309
310
311 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
312 the number of arguments. The ... are the arguments. */
313
314 gimple
315 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
316 {
317 va_list ap;
318 gimple call;
319 unsigned i;
320
321 call = gimple_build_call_internal_1 (fn, nargs);
322 va_start (ap, nargs);
323 for (i = 0; i < nargs; i++)
324 gimple_call_set_arg (call, i, va_arg (ap, tree));
325 va_end (ap);
326
327 return call;
328 }
329
330
331 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
332 specified in vector ARGS. */
333
334 gimple
335 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
336 {
337 unsigned i, nargs;
338 gimple call;
339
340 nargs = VEC_length (tree, args);
341 call = gimple_build_call_internal_1 (fn, nargs);
342 for (i = 0; i < nargs; i++)
343 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
344
345 return call;
346 }
347
348
349 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
350 assumed to be in GIMPLE form already. Minimal checking is done of
351 this fact. */
352
353 gimple
354 gimple_build_call_from_tree (tree t)
355 {
356 unsigned i, nargs;
357 gimple call;
358 tree fndecl = get_callee_fndecl (t);
359
360 gcc_assert (TREE_CODE (t) == CALL_EXPR);
361
362 nargs = call_expr_nargs (t);
363 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
364
365 for (i = 0; i < nargs; i++)
366 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
367
368 gimple_set_block (call, TREE_BLOCK (t));
369
370 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
371 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
372 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
373 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
374 if (fndecl
375 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
376 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
377 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
378 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
379 else
380 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
381 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
382 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
383 gimple_set_no_warning (call, TREE_NO_WARNING (t));
384
385 return call;
386 }
387
388
389 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
390 *OP1_P, *OP2_P and *OP3_P respectively. */
391
392 void
393 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
394 tree *op2_p, tree *op3_p)
395 {
396 enum gimple_rhs_class grhs_class;
397
398 *subcode_p = TREE_CODE (expr);
399 grhs_class = get_gimple_rhs_class (*subcode_p);
400
401 if (grhs_class == GIMPLE_TERNARY_RHS)
402 {
403 *op1_p = TREE_OPERAND (expr, 0);
404 *op2_p = TREE_OPERAND (expr, 1);
405 *op3_p = TREE_OPERAND (expr, 2);
406 }
407 else if (grhs_class == GIMPLE_BINARY_RHS)
408 {
409 *op1_p = TREE_OPERAND (expr, 0);
410 *op2_p = TREE_OPERAND (expr, 1);
411 *op3_p = NULL_TREE;
412 }
413 else if (grhs_class == GIMPLE_UNARY_RHS)
414 {
415 *op1_p = TREE_OPERAND (expr, 0);
416 *op2_p = NULL_TREE;
417 *op3_p = NULL_TREE;
418 }
419 else if (grhs_class == GIMPLE_SINGLE_RHS)
420 {
421 *op1_p = expr;
422 *op2_p = NULL_TREE;
423 *op3_p = NULL_TREE;
424 }
425 else
426 gcc_unreachable ();
427 }
428
429
430 /* Build a GIMPLE_ASSIGN statement.
431
432 LHS of the assignment.
433 RHS of the assignment which can be unary or binary. */
434
435 gimple
436 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
437 {
438 enum tree_code subcode;
439 tree op1, op2, op3;
440
441 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
442 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
443 PASS_MEM_STAT);
444 }
445
446
447 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
448 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
449 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
450
451 gimple
452 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
453 tree op2, tree op3 MEM_STAT_DECL)
454 {
455 unsigned num_ops;
456 gimple p;
457
458 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
459 code). */
460 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
461
462 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
463 PASS_MEM_STAT);
464 gimple_assign_set_lhs (p, lhs);
465 gimple_assign_set_rhs1 (p, op1);
466 if (op2)
467 {
468 gcc_assert (num_ops > 2);
469 gimple_assign_set_rhs2 (p, op2);
470 }
471
472 if (op3)
473 {
474 gcc_assert (num_ops > 3);
475 gimple_assign_set_rhs3 (p, op3);
476 }
477
478 return p;
479 }
480
481
482 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
483
484 DST/SRC are the destination and source respectively. You can pass
485 ungimplified trees in DST or SRC, in which case they will be
486 converted to a gimple operand if necessary.
487
488 This function returns the newly created GIMPLE_ASSIGN tuple. */
489
490 gimple
491 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
492 {
493 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
494 gimplify_and_add (t, seq_p);
495 ggc_free (t);
496 return gimple_seq_last_stmt (*seq_p);
497 }
498
499
500 /* Build a GIMPLE_COND statement.
501
502 PRED is the condition used to compare LHS and the RHS.
503 T_LABEL is the label to jump to if the condition is true.
504 F_LABEL is the label to jump to otherwise. */
505
506 gimple
507 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
508 tree t_label, tree f_label)
509 {
510 gimple p;
511
512 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
513 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
514 gimple_cond_set_lhs (p, lhs);
515 gimple_cond_set_rhs (p, rhs);
516 gimple_cond_set_true_label (p, t_label);
517 gimple_cond_set_false_label (p, f_label);
518 return p;
519 }
520
521
522 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
523
524 void
525 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
526 tree *lhs_p, tree *rhs_p)
527 {
528 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
529 || TREE_CODE (cond) == TRUTH_NOT_EXPR
530 || is_gimple_min_invariant (cond)
531 || SSA_VAR_P (cond));
532
533 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
534
535 /* Canonicalize conditionals of the form 'if (!VAL)'. */
536 if (*code_p == TRUTH_NOT_EXPR)
537 {
538 *code_p = EQ_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
541 }
542 /* Canonicalize conditionals of the form 'if (VAL)' */
543 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
544 {
545 *code_p = NE_EXPR;
546 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
547 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
548 }
549 }
550
551
552 /* Build a GIMPLE_COND statement from the conditional expression tree
553 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
554
555 gimple
556 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
557 {
558 enum tree_code code;
559 tree lhs, rhs;
560
561 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
562 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
563 }
564
565 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
566 boolean expression tree COND. */
567
568 void
569 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
570 {
571 enum tree_code code;
572 tree lhs, rhs;
573
574 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
575 gimple_cond_set_condition (stmt, code, lhs, rhs);
576 }
577
578 /* Build a GIMPLE_LABEL statement for LABEL. */
579
580 gimple
581 gimple_build_label (tree label)
582 {
583 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
584 gimple_label_set_label (p, label);
585 return p;
586 }
587
588 /* Build a GIMPLE_GOTO statement to label DEST. */
589
590 gimple
591 gimple_build_goto (tree dest)
592 {
593 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
594 gimple_goto_set_dest (p, dest);
595 return p;
596 }
597
598
599 /* Build a GIMPLE_NOP statement. */
600
601 gimple
602 gimple_build_nop (void)
603 {
604 return gimple_alloc (GIMPLE_NOP, 0);
605 }
606
607
608 /* Build a GIMPLE_BIND statement.
609 VARS are the variables in BODY.
610 BLOCK is the containing block. */
611
612 gimple
613 gimple_build_bind (tree vars, gimple_seq body, tree block)
614 {
615 gimple p = gimple_alloc (GIMPLE_BIND, 0);
616 gimple_bind_set_vars (p, vars);
617 if (body)
618 gimple_bind_set_body (p, body);
619 if (block)
620 gimple_bind_set_block (p, block);
621 return p;
622 }
623
624 /* Helper function to set the simple fields of a asm stmt.
625
626 STRING is a pointer to a string that is the asm blocks assembly code.
627 NINPUT is the number of register inputs.
628 NOUTPUT is the number of register outputs.
629 NCLOBBERS is the number of clobbered registers.
630 */
631
632 static inline gimple
633 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
634 unsigned nclobbers, unsigned nlabels)
635 {
636 gimple p;
637 int size = strlen (string);
638
639 /* ASMs with labels cannot have outputs. This should have been
640 enforced by the front end. */
641 gcc_assert (nlabels == 0 || noutputs == 0);
642
643 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
644 ninputs + noutputs + nclobbers + nlabels);
645
646 p->gimple_asm.ni = ninputs;
647 p->gimple_asm.no = noutputs;
648 p->gimple_asm.nc = nclobbers;
649 p->gimple_asm.nl = nlabels;
650 p->gimple_asm.string = ggc_alloc_string (string, size);
651
652 #ifdef GATHER_STATISTICS
653 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
654 #endif
655
656 return p;
657 }
658
659 /* Build a GIMPLE_ASM statement.
660
661 STRING is the assembly code.
662 NINPUT is the number of register inputs.
663 NOUTPUT is the number of register outputs.
664 NCLOBBERS is the number of clobbered registers.
665 INPUTS is a vector of the input register parameters.
666 OUTPUTS is a vector of the output register parameters.
667 CLOBBERS is a vector of the clobbered register parameters.
668 LABELS is a vector of destination labels. */
669
670 gimple
671 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
672 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
673 VEC(tree,gc)* labels)
674 {
675 gimple p;
676 unsigned i;
677
678 p = gimple_build_asm_1 (string,
679 VEC_length (tree, inputs),
680 VEC_length (tree, outputs),
681 VEC_length (tree, clobbers),
682 VEC_length (tree, labels));
683
684 for (i = 0; i < VEC_length (tree, inputs); i++)
685 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
686
687 for (i = 0; i < VEC_length (tree, outputs); i++)
688 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
689
690 for (i = 0; i < VEC_length (tree, clobbers); i++)
691 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
692
693 for (i = 0; i < VEC_length (tree, labels); i++)
694 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
695
696 return p;
697 }
698
699 /* Build a GIMPLE_CATCH statement.
700
701 TYPES are the catch types.
702 HANDLER is the exception handler. */
703
704 gimple
705 gimple_build_catch (tree types, gimple_seq handler)
706 {
707 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
708 gimple_catch_set_types (p, types);
709 if (handler)
710 gimple_catch_set_handler (p, handler);
711
712 return p;
713 }
714
715 /* Build a GIMPLE_EH_FILTER statement.
716
717 TYPES are the filter's types.
718 FAILURE is the filter's failure action. */
719
720 gimple
721 gimple_build_eh_filter (tree types, gimple_seq failure)
722 {
723 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
724 gimple_eh_filter_set_types (p, types);
725 if (failure)
726 gimple_eh_filter_set_failure (p, failure);
727
728 return p;
729 }
730
731 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
732
733 gimple
734 gimple_build_eh_must_not_throw (tree decl)
735 {
736 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
737
738 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
739 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
740 gimple_eh_must_not_throw_set_fndecl (p, decl);
741
742 return p;
743 }
744
745 /* Build a GIMPLE_EH_ELSE statement. */
746
747 gimple
748 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
749 {
750 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
751 gimple_eh_else_set_n_body (p, n_body);
752 gimple_eh_else_set_e_body (p, e_body);
753 return p;
754 }
755
756 /* Build a GIMPLE_TRY statement.
757
758 EVAL is the expression to evaluate.
759 CLEANUP is the cleanup expression.
760 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
761 whether this is a try/catch or a try/finally respectively. */
762
763 gimple
764 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
765 enum gimple_try_flags kind)
766 {
767 gimple p;
768
769 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
770 p = gimple_alloc (GIMPLE_TRY, 0);
771 gimple_set_subcode (p, kind);
772 if (eval)
773 gimple_try_set_eval (p, eval);
774 if (cleanup)
775 gimple_try_set_cleanup (p, cleanup);
776
777 return p;
778 }
779
780 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
781
782 CLEANUP is the cleanup expression. */
783
784 gimple
785 gimple_build_wce (gimple_seq cleanup)
786 {
787 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
788 if (cleanup)
789 gimple_wce_set_cleanup (p, cleanup);
790
791 return p;
792 }
793
794
795 /* Build a GIMPLE_RESX statement. */
796
797 gimple
798 gimple_build_resx (int region)
799 {
800 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
801 p->gimple_eh_ctrl.region = region;
802 return p;
803 }
804
805
806 /* The helper for constructing a gimple switch statement.
807 INDEX is the switch's index.
808 NLABELS is the number of labels in the switch excluding the default.
809 DEFAULT_LABEL is the default label for the switch statement. */
810
811 gimple
812 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
813 {
814 /* nlabels + 1 default label + 1 index. */
815 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
816 1 + (default_label != NULL) + nlabels);
817 gimple_switch_set_index (p, index);
818 if (default_label)
819 gimple_switch_set_default_label (p, default_label);
820 return p;
821 }
822
823
824 /* Build a GIMPLE_SWITCH statement.
825
826 INDEX is the switch's index.
827 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
828 ... are the labels excluding the default. */
829
830 gimple
831 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
832 {
833 va_list al;
834 unsigned i, offset;
835 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
836
837 /* Store the rest of the labels. */
838 va_start (al, default_label);
839 offset = (default_label != NULL);
840 for (i = 0; i < nlabels; i++)
841 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
842 va_end (al);
843
844 return p;
845 }
846
847
848 /* Build a GIMPLE_SWITCH statement.
849
850 INDEX is the switch's index.
851 DEFAULT_LABEL is the default label
852 ARGS is a vector of labels excluding the default. */
853
854 gimple
855 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
856 {
857 unsigned i, offset, nlabels = VEC_length (tree, args);
858 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
859
860 /* Copy the labels from the vector to the switch statement. */
861 offset = (default_label != NULL);
862 for (i = 0; i < nlabels; i++)
863 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
864
865 return p;
866 }
867
868 /* Build a GIMPLE_EH_DISPATCH statement. */
869
870 gimple
871 gimple_build_eh_dispatch (int region)
872 {
873 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
874 p->gimple_eh_ctrl.region = region;
875 return p;
876 }
877
878 /* Build a new GIMPLE_DEBUG_BIND statement.
879
880 VAR is bound to VALUE; block and location are taken from STMT. */
881
882 gimple
883 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
884 {
885 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
886 (unsigned)GIMPLE_DEBUG_BIND, 2
887 PASS_MEM_STAT);
888
889 gimple_debug_bind_set_var (p, var);
890 gimple_debug_bind_set_value (p, value);
891 if (stmt)
892 {
893 gimple_set_block (p, gimple_block (stmt));
894 gimple_set_location (p, gimple_location (stmt));
895 }
896
897 return p;
898 }
899
900
901 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
902
903 VAR is bound to VALUE; block and location are taken from STMT. */
904
905 gimple
906 gimple_build_debug_source_bind_stat (tree var, tree value,
907 gimple stmt MEM_STAT_DECL)
908 {
909 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
910 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
911 PASS_MEM_STAT);
912
913 gimple_debug_source_bind_set_var (p, var);
914 gimple_debug_source_bind_set_value (p, value);
915 if (stmt)
916 {
917 gimple_set_block (p, gimple_block (stmt));
918 gimple_set_location (p, gimple_location (stmt));
919 }
920
921 return p;
922 }
923
924
925 /* Build a GIMPLE_OMP_CRITICAL statement.
926
927 BODY is the sequence of statements for which only one thread can execute.
928 NAME is optional identifier for this critical block. */
929
930 gimple
931 gimple_build_omp_critical (gimple_seq body, tree name)
932 {
933 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
934 gimple_omp_critical_set_name (p, name);
935 if (body)
936 gimple_omp_set_body (p, body);
937
938 return p;
939 }
940
941 /* Build a GIMPLE_OMP_FOR statement.
942
943 BODY is sequence of statements inside the for loop.
944 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
945 lastprivate, reductions, ordered, schedule, and nowait.
946 COLLAPSE is the collapse count.
947 PRE_BODY is the sequence of statements that are loop invariant. */
948
949 gimple
950 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
951 gimple_seq pre_body)
952 {
953 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
954 if (body)
955 gimple_omp_set_body (p, body);
956 gimple_omp_for_set_clauses (p, clauses);
957 p->gimple_omp_for.collapse = collapse;
958 p->gimple_omp_for.iter
959 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
960 if (pre_body)
961 gimple_omp_for_set_pre_body (p, pre_body);
962
963 return p;
964 }
965
966
967 /* Build a GIMPLE_OMP_PARALLEL statement.
968
969 BODY is sequence of statements which are executed in parallel.
970 CLAUSES, are the OMP parallel construct's clauses.
971 CHILD_FN is the function created for the parallel threads to execute.
972 DATA_ARG are the shared data argument(s). */
973
974 gimple
975 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
976 tree data_arg)
977 {
978 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
979 if (body)
980 gimple_omp_set_body (p, body);
981 gimple_omp_parallel_set_clauses (p, clauses);
982 gimple_omp_parallel_set_child_fn (p, child_fn);
983 gimple_omp_parallel_set_data_arg (p, data_arg);
984
985 return p;
986 }
987
988
989 /* Build a GIMPLE_OMP_TASK statement.
990
991 BODY is sequence of statements which are executed by the explicit task.
992 CLAUSES, are the OMP parallel construct's clauses.
993 CHILD_FN is the function created for the parallel threads to execute.
994 DATA_ARG are the shared data argument(s).
995 COPY_FN is the optional function for firstprivate initialization.
996 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
997
998 gimple
999 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
1000 tree data_arg, tree copy_fn, tree arg_size,
1001 tree arg_align)
1002 {
1003 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
1004 if (body)
1005 gimple_omp_set_body (p, body);
1006 gimple_omp_task_set_clauses (p, clauses);
1007 gimple_omp_task_set_child_fn (p, child_fn);
1008 gimple_omp_task_set_data_arg (p, data_arg);
1009 gimple_omp_task_set_copy_fn (p, copy_fn);
1010 gimple_omp_task_set_arg_size (p, arg_size);
1011 gimple_omp_task_set_arg_align (p, arg_align);
1012
1013 return p;
1014 }
1015
1016
1017 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1018
1019 BODY is the sequence of statements in the section. */
1020
1021 gimple
1022 gimple_build_omp_section (gimple_seq body)
1023 {
1024 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1025 if (body)
1026 gimple_omp_set_body (p, body);
1027
1028 return p;
1029 }
1030
1031
1032 /* Build a GIMPLE_OMP_MASTER statement.
1033
1034 BODY is the sequence of statements to be executed by just the master. */
1035
1036 gimple
1037 gimple_build_omp_master (gimple_seq body)
1038 {
1039 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1040 if (body)
1041 gimple_omp_set_body (p, body);
1042
1043 return p;
1044 }
1045
1046
1047 /* Build a GIMPLE_OMP_CONTINUE statement.
1048
1049 CONTROL_DEF is the definition of the control variable.
1050 CONTROL_USE is the use of the control variable. */
1051
1052 gimple
1053 gimple_build_omp_continue (tree control_def, tree control_use)
1054 {
1055 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1056 gimple_omp_continue_set_control_def (p, control_def);
1057 gimple_omp_continue_set_control_use (p, control_use);
1058 return p;
1059 }
1060
1061 /* Build a GIMPLE_OMP_ORDERED statement.
1062
1063 BODY is the sequence of statements inside a loop that will executed in
1064 sequence. */
1065
1066 gimple
1067 gimple_build_omp_ordered (gimple_seq body)
1068 {
1069 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1070 if (body)
1071 gimple_omp_set_body (p, body);
1072
1073 return p;
1074 }
1075
1076
1077 /* Build a GIMPLE_OMP_RETURN statement.
1078 WAIT_P is true if this is a non-waiting return. */
1079
1080 gimple
1081 gimple_build_omp_return (bool wait_p)
1082 {
1083 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1084 if (wait_p)
1085 gimple_omp_return_set_nowait (p);
1086
1087 return p;
1088 }
1089
1090
1091 /* Build a GIMPLE_OMP_SECTIONS statement.
1092
1093 BODY is a sequence of section statements.
1094 CLAUSES are any of the OMP sections contsruct's clauses: private,
1095 firstprivate, lastprivate, reduction, and nowait. */
1096
1097 gimple
1098 gimple_build_omp_sections (gimple_seq body, tree clauses)
1099 {
1100 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1101 if (body)
1102 gimple_omp_set_body (p, body);
1103 gimple_omp_sections_set_clauses (p, clauses);
1104
1105 return p;
1106 }
1107
1108
1109 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1110
1111 gimple
1112 gimple_build_omp_sections_switch (void)
1113 {
1114 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1115 }
1116
1117
1118 /* Build a GIMPLE_OMP_SINGLE statement.
1119
1120 BODY is the sequence of statements that will be executed once.
1121 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1122 copyprivate, nowait. */
1123
1124 gimple
1125 gimple_build_omp_single (gimple_seq body, tree clauses)
1126 {
1127 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1128 if (body)
1129 gimple_omp_set_body (p, body);
1130 gimple_omp_single_set_clauses (p, clauses);
1131
1132 return p;
1133 }
1134
1135
1136 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1137
1138 gimple
1139 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1140 {
1141 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1142 gimple_omp_atomic_load_set_lhs (p, lhs);
1143 gimple_omp_atomic_load_set_rhs (p, rhs);
1144 return p;
1145 }
1146
1147 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1148
1149 VAL is the value we are storing. */
1150
1151 gimple
1152 gimple_build_omp_atomic_store (tree val)
1153 {
1154 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1155 gimple_omp_atomic_store_set_val (p, val);
1156 return p;
1157 }
1158
1159 /* Build a GIMPLE_TRANSACTION statement. */
1160
1161 gimple
1162 gimple_build_transaction (gimple_seq body, tree label)
1163 {
1164 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1165 gimple_transaction_set_body (p, body);
1166 gimple_transaction_set_label (p, label);
1167 return p;
1168 }
1169
1170 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1171 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1172
1173 gimple
1174 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1175 {
1176 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1177 /* Ensure all the predictors fit into the lower bits of the subcode. */
1178 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1179 gimple_predict_set_predictor (p, predictor);
1180 gimple_predict_set_outcome (p, outcome);
1181 return p;
1182 }
1183
1184 #if defined ENABLE_GIMPLE_CHECKING
1185 /* Complain of a gimple type mismatch and die. */
1186
1187 void
1188 gimple_check_failed (const_gimple gs, const char *file, int line,
1189 const char *function, enum gimple_code code,
1190 enum tree_code subcode)
1191 {
1192 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1193 gimple_code_name[code],
1194 tree_code_name[subcode],
1195 gimple_code_name[gimple_code (gs)],
1196 gs->gsbase.subcode > 0
1197 ? tree_code_name[gs->gsbase.subcode]
1198 : "",
1199 function, trim_filename (file), line);
1200 }
1201 #endif /* ENABLE_GIMPLE_CHECKING */
1202
1203
1204 /* Allocate a new GIMPLE sequence in GC memory and return it. If
1205 there are free sequences in GIMPLE_SEQ_CACHE return one of those
1206 instead. */
1207
1208 gimple_seq
1209 gimple_seq_alloc (void)
1210 {
1211 gimple_seq seq = gimple_seq_cache;
1212 if (seq)
1213 {
1214 gimple_seq_cache = gimple_seq_cache->next_free;
1215 gcc_assert (gimple_seq_cache != seq);
1216 memset (seq, 0, sizeof (*seq));
1217 }
1218 else
1219 {
1220 seq = ggc_alloc_cleared_gimple_seq_d ();
1221 #ifdef GATHER_STATISTICS
1222 gimple_alloc_counts[(int) gimple_alloc_kind_seq]++;
1223 gimple_alloc_sizes[(int) gimple_alloc_kind_seq] += sizeof (*seq);
1224 #endif
1225 }
1226
1227 return seq;
1228 }
1229
1230 /* Return SEQ to the free pool of GIMPLE sequences. */
1231
1232 void
1233 gimple_seq_free (gimple_seq seq)
1234 {
1235 if (seq == NULL)
1236 return;
1237
1238 gcc_assert (gimple_seq_first (seq) == NULL);
1239 gcc_assert (gimple_seq_last (seq) == NULL);
1240
1241 /* If this triggers, it's a sign that the same list is being freed
1242 twice. */
1243 gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
1244
1245 /* Add SEQ to the pool of free sequences. */
1246 seq->next_free = gimple_seq_cache;
1247 gimple_seq_cache = seq;
1248 }
1249
1250
1251 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1252 *SEQ_P is NULL, a new sequence is allocated. */
1253
1254 void
1255 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1256 {
1257 gimple_stmt_iterator si;
1258
1259 if (gs == NULL)
1260 return;
1261
1262 if (*seq_p == NULL)
1263 *seq_p = gimple_seq_alloc ();
1264
1265 si = gsi_last (*seq_p);
1266 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1267 }
1268
1269
1270 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1271 NULL, a new sequence is allocated. */
1272
1273 void
1274 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1275 {
1276 gimple_stmt_iterator si;
1277
1278 if (src == NULL)
1279 return;
1280
1281 if (*dst_p == NULL)
1282 *dst_p = gimple_seq_alloc ();
1283
1284 si = gsi_last (*dst_p);
1285 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1286 }
1287
1288
1289 /* Helper function of empty_body_p. Return true if STMT is an empty
1290 statement. */
1291
1292 static bool
1293 empty_stmt_p (gimple stmt)
1294 {
1295 if (gimple_code (stmt) == GIMPLE_NOP)
1296 return true;
1297 if (gimple_code (stmt) == GIMPLE_BIND)
1298 return empty_body_p (gimple_bind_body (stmt));
1299 return false;
1300 }
1301
1302
1303 /* Return true if BODY contains nothing but empty statements. */
1304
1305 bool
1306 empty_body_p (gimple_seq body)
1307 {
1308 gimple_stmt_iterator i;
1309
1310 if (gimple_seq_empty_p (body))
1311 return true;
1312 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1313 if (!empty_stmt_p (gsi_stmt (i))
1314 && !is_gimple_debug (gsi_stmt (i)))
1315 return false;
1316
1317 return true;
1318 }
1319
1320
1321 /* Perform a deep copy of sequence SRC and return the result. */
1322
1323 gimple_seq
1324 gimple_seq_copy (gimple_seq src)
1325 {
1326 gimple_stmt_iterator gsi;
1327 gimple_seq new_seq = gimple_seq_alloc ();
1328 gimple stmt;
1329
1330 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1331 {
1332 stmt = gimple_copy (gsi_stmt (gsi));
1333 gimple_seq_add_stmt (&new_seq, stmt);
1334 }
1335
1336 return new_seq;
1337 }
1338
1339
1340 /* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
1341 on each one. WI is as in walk_gimple_stmt.
1342
1343 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1344 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1345 produced the value is returned if this statement has not been
1346 removed by a callback (wi->removed_stmt). If the statement has
1347 been removed, NULL is returned.
1348
1349 Otherwise, all the statements are walked and NULL returned. */
1350
1351 gimple
1352 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1353 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1354 {
1355 gimple_stmt_iterator gsi;
1356
1357 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
1358 {
1359 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1360 if (ret)
1361 {
1362 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1363 to hold it. */
1364 gcc_assert (wi);
1365 wi->callback_result = ret;
1366
1367 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1368 }
1369
1370 if (!wi->removed_stmt)
1371 gsi_next (&gsi);
1372 }
1373
1374 if (wi)
1375 wi->callback_result = NULL_TREE;
1376
1377 return NULL;
1378 }
1379
1380
1381 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1382
1383 static tree
1384 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1385 struct walk_stmt_info *wi)
1386 {
1387 tree ret, op;
1388 unsigned noutputs;
1389 const char **oconstraints;
1390 unsigned i, n;
1391 const char *constraint;
1392 bool allows_mem, allows_reg, is_inout;
1393
1394 noutputs = gimple_asm_noutputs (stmt);
1395 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1396
1397 if (wi)
1398 wi->is_lhs = true;
1399
1400 for (i = 0; i < noutputs; i++)
1401 {
1402 op = gimple_asm_output_op (stmt, i);
1403 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1404 oconstraints[i] = constraint;
1405 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1406 &is_inout);
1407 if (wi)
1408 wi->val_only = (allows_reg || !allows_mem);
1409 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1410 if (ret)
1411 return ret;
1412 }
1413
1414 n = gimple_asm_ninputs (stmt);
1415 for (i = 0; i < n; i++)
1416 {
1417 op = gimple_asm_input_op (stmt, i);
1418 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1419 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1420 oconstraints, &allows_mem, &allows_reg);
1421 if (wi)
1422 {
1423 wi->val_only = (allows_reg || !allows_mem);
1424 /* Although input "m" is not really a LHS, we need a lvalue. */
1425 wi->is_lhs = !wi->val_only;
1426 }
1427 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1428 if (ret)
1429 return ret;
1430 }
1431
1432 if (wi)
1433 {
1434 wi->is_lhs = false;
1435 wi->val_only = true;
1436 }
1437
1438 n = gimple_asm_nlabels (stmt);
1439 for (i = 0; i < n; i++)
1440 {
1441 op = gimple_asm_label_op (stmt, i);
1442 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1443 if (ret)
1444 return ret;
1445 }
1446
1447 return NULL_TREE;
1448 }
1449
1450
1451 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1452 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1453
1454 CALLBACK_OP is called on each operand of STMT via walk_tree.
1455 Additional parameters to walk_tree must be stored in WI. For each operand
1456 OP, walk_tree is called as:
1457
1458 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1459
1460 If CALLBACK_OP returns non-NULL for an operand, the remaining
1461 operands are not scanned.
1462
1463 The return value is that returned by the last call to walk_tree, or
1464 NULL_TREE if no CALLBACK_OP is specified. */
1465
1466 tree
1467 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1468 struct walk_stmt_info *wi)
1469 {
1470 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1471 unsigned i;
1472 tree ret = NULL_TREE;
1473
1474 switch (gimple_code (stmt))
1475 {
1476 case GIMPLE_ASSIGN:
1477 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1478 is a register variable, we may use a COMPONENT_REF on the RHS. */
1479 if (wi)
1480 {
1481 tree lhs = gimple_assign_lhs (stmt);
1482 wi->val_only
1483 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1484 || !gimple_assign_single_p (stmt);
1485 }
1486
1487 for (i = 1; i < gimple_num_ops (stmt); i++)
1488 {
1489 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1490 pset);
1491 if (ret)
1492 return ret;
1493 }
1494
1495 /* Walk the LHS. If the RHS is appropriate for a memory, we
1496 may use a COMPONENT_REF on the LHS. */
1497 if (wi)
1498 {
1499 /* If the RHS has more than 1 operand, it is not appropriate
1500 for the memory. */
1501 wi->val_only = !(is_gimple_mem_rhs (gimple_assign_rhs1 (stmt))
1502 || TREE_CODE (gimple_assign_rhs1 (stmt))
1503 == CONSTRUCTOR)
1504 || !gimple_assign_single_p (stmt);
1505 wi->is_lhs = true;
1506 }
1507
1508 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1509 if (ret)
1510 return ret;
1511
1512 if (wi)
1513 {
1514 wi->val_only = true;
1515 wi->is_lhs = false;
1516 }
1517 break;
1518
1519 case GIMPLE_CALL:
1520 if (wi)
1521 {
1522 wi->is_lhs = false;
1523 wi->val_only = true;
1524 }
1525
1526 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1527 if (ret)
1528 return ret;
1529
1530 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1531 if (ret)
1532 return ret;
1533
1534 for (i = 0; i < gimple_call_num_args (stmt); i++)
1535 {
1536 if (wi)
1537 wi->val_only
1538 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1539 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1540 pset);
1541 if (ret)
1542 return ret;
1543 }
1544
1545 if (gimple_call_lhs (stmt))
1546 {
1547 if (wi)
1548 {
1549 wi->is_lhs = true;
1550 wi->val_only
1551 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1552 }
1553
1554 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1555 if (ret)
1556 return ret;
1557 }
1558
1559 if (wi)
1560 {
1561 wi->is_lhs = false;
1562 wi->val_only = true;
1563 }
1564 break;
1565
1566 case GIMPLE_CATCH:
1567 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1568 pset);
1569 if (ret)
1570 return ret;
1571 break;
1572
1573 case GIMPLE_EH_FILTER:
1574 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1575 pset);
1576 if (ret)
1577 return ret;
1578 break;
1579
1580 case GIMPLE_ASM:
1581 ret = walk_gimple_asm (stmt, callback_op, wi);
1582 if (ret)
1583 return ret;
1584 break;
1585
1586 case GIMPLE_OMP_CONTINUE:
1587 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1588 callback_op, wi, pset);
1589 if (ret)
1590 return ret;
1591
1592 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1593 callback_op, wi, pset);
1594 if (ret)
1595 return ret;
1596 break;
1597
1598 case GIMPLE_OMP_CRITICAL:
1599 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1600 pset);
1601 if (ret)
1602 return ret;
1603 break;
1604
1605 case GIMPLE_OMP_FOR:
1606 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1607 pset);
1608 if (ret)
1609 return ret;
1610 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1611 {
1612 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1613 wi, pset);
1614 if (ret)
1615 return ret;
1616 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1617 wi, pset);
1618 if (ret)
1619 return ret;
1620 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1621 wi, pset);
1622 if (ret)
1623 return ret;
1624 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1625 wi, pset);
1626 }
1627 if (ret)
1628 return ret;
1629 break;
1630
1631 case GIMPLE_OMP_PARALLEL:
1632 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1633 wi, pset);
1634 if (ret)
1635 return ret;
1636 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1637 wi, pset);
1638 if (ret)
1639 return ret;
1640 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1641 wi, pset);
1642 if (ret)
1643 return ret;
1644 break;
1645
1646 case GIMPLE_OMP_TASK:
1647 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1648 wi, pset);
1649 if (ret)
1650 return ret;
1651 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1652 wi, pset);
1653 if (ret)
1654 return ret;
1655 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1656 wi, pset);
1657 if (ret)
1658 return ret;
1659 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1660 wi, pset);
1661 if (ret)
1662 return ret;
1663 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1664 wi, pset);
1665 if (ret)
1666 return ret;
1667 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1668 wi, pset);
1669 if (ret)
1670 return ret;
1671 break;
1672
1673 case GIMPLE_OMP_SECTIONS:
1674 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1675 wi, pset);
1676 if (ret)
1677 return ret;
1678
1679 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1680 wi, pset);
1681 if (ret)
1682 return ret;
1683
1684 break;
1685
1686 case GIMPLE_OMP_SINGLE:
1687 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1688 pset);
1689 if (ret)
1690 return ret;
1691 break;
1692
1693 case GIMPLE_OMP_ATOMIC_LOAD:
1694 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1695 pset);
1696 if (ret)
1697 return ret;
1698
1699 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1700 pset);
1701 if (ret)
1702 return ret;
1703 break;
1704
1705 case GIMPLE_OMP_ATOMIC_STORE:
1706 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1707 wi, pset);
1708 if (ret)
1709 return ret;
1710 break;
1711
1712 case GIMPLE_TRANSACTION:
1713 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1714 wi, pset);
1715 if (ret)
1716 return ret;
1717 break;
1718
1719 /* Tuples that do not have operands. */
1720 case GIMPLE_NOP:
1721 case GIMPLE_RESX:
1722 case GIMPLE_OMP_RETURN:
1723 case GIMPLE_PREDICT:
1724 break;
1725
1726 default:
1727 {
1728 enum gimple_statement_structure_enum gss;
1729 gss = gimple_statement_structure (stmt);
1730 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1731 for (i = 0; i < gimple_num_ops (stmt); i++)
1732 {
1733 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1734 if (ret)
1735 return ret;
1736 }
1737 }
1738 break;
1739 }
1740
1741 return NULL_TREE;
1742 }
1743
1744
1745 /* Walk the current statement in GSI (optionally using traversal state
1746 stored in WI). If WI is NULL, no state is kept during traversal.
1747 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1748 that it has handled all the operands of the statement, its return
1749 value is returned. Otherwise, the return value from CALLBACK_STMT
1750 is discarded and its operands are scanned.
1751
1752 If CALLBACK_STMT is NULL or it didn't handle the operands,
1753 CALLBACK_OP is called on each operand of the statement via
1754 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1755 operand, the remaining operands are not scanned. In this case, the
1756 return value from CALLBACK_OP is returned.
1757
1758 In any other case, NULL_TREE is returned. */
1759
1760 tree
1761 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1762 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1763 {
1764 gimple ret;
1765 tree tree_ret;
1766 gimple stmt = gsi_stmt (*gsi);
1767
1768 if (wi)
1769 {
1770 wi->gsi = *gsi;
1771 wi->removed_stmt = false;
1772
1773 if (wi->want_locations && gimple_has_location (stmt))
1774 input_location = gimple_location (stmt);
1775 }
1776
1777 ret = NULL;
1778
1779 /* Invoke the statement callback. Return if the callback handled
1780 all of STMT operands by itself. */
1781 if (callback_stmt)
1782 {
1783 bool handled_ops = false;
1784 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1785 if (handled_ops)
1786 return tree_ret;
1787
1788 /* If CALLBACK_STMT did not handle operands, it should not have
1789 a value to return. */
1790 gcc_assert (tree_ret == NULL);
1791
1792 if (wi && wi->removed_stmt)
1793 return NULL;
1794
1795 /* Re-read stmt in case the callback changed it. */
1796 stmt = gsi_stmt (*gsi);
1797 }
1798
1799 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1800 if (callback_op)
1801 {
1802 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1803 if (tree_ret)
1804 return tree_ret;
1805 }
1806
1807 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1808 switch (gimple_code (stmt))
1809 {
1810 case GIMPLE_BIND:
1811 ret = walk_gimple_seq (gimple_bind_body (stmt), callback_stmt,
1812 callback_op, wi);
1813 if (ret)
1814 return wi->callback_result;
1815 break;
1816
1817 case GIMPLE_CATCH:
1818 ret = walk_gimple_seq (gimple_catch_handler (stmt), callback_stmt,
1819 callback_op, wi);
1820 if (ret)
1821 return wi->callback_result;
1822 break;
1823
1824 case GIMPLE_EH_FILTER:
1825 ret = walk_gimple_seq (gimple_eh_filter_failure (stmt), callback_stmt,
1826 callback_op, wi);
1827 if (ret)
1828 return wi->callback_result;
1829 break;
1830
1831 case GIMPLE_EH_ELSE:
1832 ret = walk_gimple_seq (gimple_eh_else_n_body (stmt),
1833 callback_stmt, callback_op, wi);
1834 if (ret)
1835 return wi->callback_result;
1836 ret = walk_gimple_seq (gimple_eh_else_e_body (stmt),
1837 callback_stmt, callback_op, wi);
1838 if (ret)
1839 return wi->callback_result;
1840 break;
1841
1842 case GIMPLE_TRY:
1843 ret = walk_gimple_seq (gimple_try_eval (stmt), callback_stmt, callback_op,
1844 wi);
1845 if (ret)
1846 return wi->callback_result;
1847
1848 ret = walk_gimple_seq (gimple_try_cleanup (stmt), callback_stmt,
1849 callback_op, wi);
1850 if (ret)
1851 return wi->callback_result;
1852 break;
1853
1854 case GIMPLE_OMP_FOR:
1855 ret = walk_gimple_seq (gimple_omp_for_pre_body (stmt), callback_stmt,
1856 callback_op, wi);
1857 if (ret)
1858 return wi->callback_result;
1859
1860 /* FALL THROUGH. */
1861 case GIMPLE_OMP_CRITICAL:
1862 case GIMPLE_OMP_MASTER:
1863 case GIMPLE_OMP_ORDERED:
1864 case GIMPLE_OMP_SECTION:
1865 case GIMPLE_OMP_PARALLEL:
1866 case GIMPLE_OMP_TASK:
1867 case GIMPLE_OMP_SECTIONS:
1868 case GIMPLE_OMP_SINGLE:
1869 ret = walk_gimple_seq (gimple_omp_body (stmt), callback_stmt,
1870 callback_op, wi);
1871 if (ret)
1872 return wi->callback_result;
1873 break;
1874
1875 case GIMPLE_WITH_CLEANUP_EXPR:
1876 ret = walk_gimple_seq (gimple_wce_cleanup (stmt), callback_stmt,
1877 callback_op, wi);
1878 if (ret)
1879 return wi->callback_result;
1880 break;
1881
1882 case GIMPLE_TRANSACTION:
1883 ret = walk_gimple_seq (gimple_transaction_body (stmt),
1884 callback_stmt, callback_op, wi);
1885 if (ret)
1886 return wi->callback_result;
1887 break;
1888
1889 default:
1890 gcc_assert (!gimple_has_substatements (stmt));
1891 break;
1892 }
1893
1894 return NULL;
1895 }
1896
1897
1898 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1899
1900 void
1901 gimple_set_body (tree fndecl, gimple_seq seq)
1902 {
1903 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1904 if (fn == NULL)
1905 {
1906 /* If FNDECL still does not have a function structure associated
1907 with it, then it does not make sense for it to receive a
1908 GIMPLE body. */
1909 gcc_assert (seq == NULL);
1910 }
1911 else
1912 fn->gimple_body = seq;
1913 }
1914
1915
1916 /* Return the body of GIMPLE statements for function FN. After the
1917 CFG pass, the function body doesn't exist anymore because it has
1918 been split up into basic blocks. In this case, it returns
1919 NULL. */
1920
1921 gimple_seq
1922 gimple_body (tree fndecl)
1923 {
1924 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1925 return fn ? fn->gimple_body : NULL;
1926 }
1927
1928 /* Return true when FNDECL has Gimple body either in unlowered
1929 or CFG form. */
1930 bool
1931 gimple_has_body_p (tree fndecl)
1932 {
1933 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1934 return (gimple_body (fndecl) || (fn && fn->cfg));
1935 }
1936
1937 /* Return true if calls C1 and C2 are known to go to the same function. */
1938
1939 bool
1940 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1941 {
1942 if (gimple_call_internal_p (c1))
1943 return (gimple_call_internal_p (c2)
1944 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1945 else
1946 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1947 || (gimple_call_fndecl (c1)
1948 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1949 }
1950
1951 /* Detect flags from a GIMPLE_CALL. This is just like
1952 call_expr_flags, but for gimple tuples. */
1953
1954 int
1955 gimple_call_flags (const_gimple stmt)
1956 {
1957 int flags;
1958 tree decl = gimple_call_fndecl (stmt);
1959
1960 if (decl)
1961 flags = flags_from_decl_or_type (decl);
1962 else if (gimple_call_internal_p (stmt))
1963 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1964 else
1965 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1966
1967 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1968 flags |= ECF_NOTHROW;
1969
1970 return flags;
1971 }
1972
1973 /* Return the "fn spec" string for call STMT. */
1974
1975 static tree
1976 gimple_call_fnspec (const_gimple stmt)
1977 {
1978 tree type, attr;
1979
1980 type = gimple_call_fntype (stmt);
1981 if (!type)
1982 return NULL_TREE;
1983
1984 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1985 if (!attr)
1986 return NULL_TREE;
1987
1988 return TREE_VALUE (TREE_VALUE (attr));
1989 }
1990
1991 /* Detects argument flags for argument number ARG on call STMT. */
1992
1993 int
1994 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1995 {
1996 tree attr = gimple_call_fnspec (stmt);
1997
1998 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1999 return 0;
2000
2001 switch (TREE_STRING_POINTER (attr)[1 + arg])
2002 {
2003 case 'x':
2004 case 'X':
2005 return EAF_UNUSED;
2006
2007 case 'R':
2008 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
2009
2010 case 'r':
2011 return EAF_NOCLOBBER | EAF_NOESCAPE;
2012
2013 case 'W':
2014 return EAF_DIRECT | EAF_NOESCAPE;
2015
2016 case 'w':
2017 return EAF_NOESCAPE;
2018
2019 case '.':
2020 default:
2021 return 0;
2022 }
2023 }
2024
2025 /* Detects return flags for the call STMT. */
2026
2027 int
2028 gimple_call_return_flags (const_gimple stmt)
2029 {
2030 tree attr;
2031
2032 if (gimple_call_flags (stmt) & ECF_MALLOC)
2033 return ERF_NOALIAS;
2034
2035 attr = gimple_call_fnspec (stmt);
2036 if (!attr || TREE_STRING_LENGTH (attr) < 1)
2037 return 0;
2038
2039 switch (TREE_STRING_POINTER (attr)[0])
2040 {
2041 case '1':
2042 case '2':
2043 case '3':
2044 case '4':
2045 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
2046
2047 case 'm':
2048 return ERF_NOALIAS;
2049
2050 case '.':
2051 default:
2052 return 0;
2053 }
2054 }
2055
2056
2057 /* Return true if GS is a copy assignment. */
2058
2059 bool
2060 gimple_assign_copy_p (gimple gs)
2061 {
2062 return (gimple_assign_single_p (gs)
2063 && is_gimple_val (gimple_op (gs, 1)));
2064 }
2065
2066
2067 /* Return true if GS is a SSA_NAME copy assignment. */
2068
2069 bool
2070 gimple_assign_ssa_name_copy_p (gimple gs)
2071 {
2072 return (gimple_assign_single_p (gs)
2073 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2074 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2075 }
2076
2077
2078 /* Return true if GS is an assignment with a unary RHS, but the
2079 operator has no effect on the assigned value. The logic is adapted
2080 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2081 instances in which STRIP_NOPS was previously applied to the RHS of
2082 an assignment.
2083
2084 NOTE: In the use cases that led to the creation of this function
2085 and of gimple_assign_single_p, it is typical to test for either
2086 condition and to proceed in the same manner. In each case, the
2087 assigned value is represented by the single RHS operand of the
2088 assignment. I suspect there may be cases where gimple_assign_copy_p,
2089 gimple_assign_single_p, or equivalent logic is used where a similar
2090 treatment of unary NOPs is appropriate. */
2091
2092 bool
2093 gimple_assign_unary_nop_p (gimple gs)
2094 {
2095 return (is_gimple_assign (gs)
2096 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2097 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2098 && gimple_assign_rhs1 (gs) != error_mark_node
2099 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2100 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2101 }
2102
2103 /* Set BB to be the basic block holding G. */
2104
2105 void
2106 gimple_set_bb (gimple stmt, basic_block bb)
2107 {
2108 stmt->gsbase.bb = bb;
2109
2110 /* If the statement is a label, add the label to block-to-labels map
2111 so that we can speed up edge creation for GIMPLE_GOTOs. */
2112 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2113 {
2114 tree t;
2115 int uid;
2116
2117 t = gimple_label_label (stmt);
2118 uid = LABEL_DECL_UID (t);
2119 if (uid == -1)
2120 {
2121 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2122 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2123 if (old_len <= (unsigned) uid)
2124 {
2125 unsigned new_len = 3 * uid / 2 + 1;
2126
2127 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2128 new_len);
2129 }
2130 }
2131
2132 VEC_replace (basic_block, label_to_block_map, uid, bb);
2133 }
2134 }
2135
2136
2137 /* Modify the RHS of the assignment pointed-to by GSI using the
2138 operands in the expression tree EXPR.
2139
2140 NOTE: The statement pointed-to by GSI may be reallocated if it
2141 did not have enough operand slots.
2142
2143 This function is useful to convert an existing tree expression into
2144 the flat representation used for the RHS of a GIMPLE assignment.
2145 It will reallocate memory as needed to expand or shrink the number
2146 of operand slots needed to represent EXPR.
2147
2148 NOTE: If you find yourself building a tree and then calling this
2149 function, you are most certainly doing it the slow way. It is much
2150 better to build a new assignment or to use the function
2151 gimple_assign_set_rhs_with_ops, which does not require an
2152 expression tree to be built. */
2153
2154 void
2155 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2156 {
2157 enum tree_code subcode;
2158 tree op1, op2, op3;
2159
2160 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2161 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2162 }
2163
2164
2165 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2166 operands OP1, OP2 and OP3.
2167
2168 NOTE: The statement pointed-to by GSI may be reallocated if it
2169 did not have enough operand slots. */
2170
2171 void
2172 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2173 tree op1, tree op2, tree op3)
2174 {
2175 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2176 gimple stmt = gsi_stmt (*gsi);
2177
2178 /* If the new CODE needs more operands, allocate a new statement. */
2179 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2180 {
2181 tree lhs = gimple_assign_lhs (stmt);
2182 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2183 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2184 gsi_replace (gsi, new_stmt, true);
2185 stmt = new_stmt;
2186
2187 /* The LHS needs to be reset as this also changes the SSA name
2188 on the LHS. */
2189 gimple_assign_set_lhs (stmt, lhs);
2190 }
2191
2192 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2193 gimple_set_subcode (stmt, code);
2194 gimple_assign_set_rhs1 (stmt, op1);
2195 if (new_rhs_ops > 1)
2196 gimple_assign_set_rhs2 (stmt, op2);
2197 if (new_rhs_ops > 2)
2198 gimple_assign_set_rhs3 (stmt, op3);
2199 }
2200
2201
2202 /* Return the LHS of a statement that performs an assignment,
2203 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2204 for a call to a function that returns no value, or for a
2205 statement other than an assignment or a call. */
2206
2207 tree
2208 gimple_get_lhs (const_gimple stmt)
2209 {
2210 enum gimple_code code = gimple_code (stmt);
2211
2212 if (code == GIMPLE_ASSIGN)
2213 return gimple_assign_lhs (stmt);
2214 else if (code == GIMPLE_CALL)
2215 return gimple_call_lhs (stmt);
2216 else
2217 return NULL_TREE;
2218 }
2219
2220
2221 /* Set the LHS of a statement that performs an assignment,
2222 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2223
2224 void
2225 gimple_set_lhs (gimple stmt, tree lhs)
2226 {
2227 enum gimple_code code = gimple_code (stmt);
2228
2229 if (code == GIMPLE_ASSIGN)
2230 gimple_assign_set_lhs (stmt, lhs);
2231 else if (code == GIMPLE_CALL)
2232 gimple_call_set_lhs (stmt, lhs);
2233 else
2234 gcc_unreachable();
2235 }
2236
2237 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2238 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2239 expression with a different value.
2240
2241 This will update any annotations (say debug bind stmts) referring
2242 to the original LHS, so that they use the RHS instead. This is
2243 done even if NLHS and LHS are the same, for it is understood that
2244 the RHS will be modified afterwards, and NLHS will not be assigned
2245 an equivalent value.
2246
2247 Adjusting any non-annotation uses of the LHS, if needed, is a
2248 responsibility of the caller.
2249
2250 The effect of this call should be pretty much the same as that of
2251 inserting a copy of STMT before STMT, and then removing the
2252 original stmt, at which time gsi_remove() would have update
2253 annotations, but using this function saves all the inserting,
2254 copying and removing. */
2255
2256 void
2257 gimple_replace_lhs (gimple stmt, tree nlhs)
2258 {
2259 if (MAY_HAVE_DEBUG_STMTS)
2260 {
2261 tree lhs = gimple_get_lhs (stmt);
2262
2263 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2264
2265 insert_debug_temp_for_var_def (NULL, lhs);
2266 }
2267
2268 gimple_set_lhs (stmt, nlhs);
2269 }
2270
2271 /* Return a deep copy of statement STMT. All the operands from STMT
2272 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2273 and VUSE operand arrays are set to empty in the new copy. */
2274
2275 gimple
2276 gimple_copy (gimple stmt)
2277 {
2278 enum gimple_code code = gimple_code (stmt);
2279 unsigned num_ops = gimple_num_ops (stmt);
2280 gimple copy = gimple_alloc (code, num_ops);
2281 unsigned i;
2282
2283 /* Shallow copy all the fields from STMT. */
2284 memcpy (copy, stmt, gimple_size (code));
2285
2286 /* If STMT has sub-statements, deep-copy them as well. */
2287 if (gimple_has_substatements (stmt))
2288 {
2289 gimple_seq new_seq;
2290 tree t;
2291
2292 switch (gimple_code (stmt))
2293 {
2294 case GIMPLE_BIND:
2295 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2296 gimple_bind_set_body (copy, new_seq);
2297 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2298 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2299 break;
2300
2301 case GIMPLE_CATCH:
2302 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2303 gimple_catch_set_handler (copy, new_seq);
2304 t = unshare_expr (gimple_catch_types (stmt));
2305 gimple_catch_set_types (copy, t);
2306 break;
2307
2308 case GIMPLE_EH_FILTER:
2309 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2310 gimple_eh_filter_set_failure (copy, new_seq);
2311 t = unshare_expr (gimple_eh_filter_types (stmt));
2312 gimple_eh_filter_set_types (copy, t);
2313 break;
2314
2315 case GIMPLE_EH_ELSE:
2316 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2317 gimple_eh_else_set_n_body (copy, new_seq);
2318 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2319 gimple_eh_else_set_e_body (copy, new_seq);
2320 break;
2321
2322 case GIMPLE_TRY:
2323 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2324 gimple_try_set_eval (copy, new_seq);
2325 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2326 gimple_try_set_cleanup (copy, new_seq);
2327 break;
2328
2329 case GIMPLE_OMP_FOR:
2330 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2331 gimple_omp_for_set_pre_body (copy, new_seq);
2332 t = unshare_expr (gimple_omp_for_clauses (stmt));
2333 gimple_omp_for_set_clauses (copy, t);
2334 copy->gimple_omp_for.iter
2335 = ggc_alloc_vec_gimple_omp_for_iter
2336 (gimple_omp_for_collapse (stmt));
2337 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2338 {
2339 gimple_omp_for_set_cond (copy, i,
2340 gimple_omp_for_cond (stmt, i));
2341 gimple_omp_for_set_index (copy, i,
2342 gimple_omp_for_index (stmt, i));
2343 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2344 gimple_omp_for_set_initial (copy, i, t);
2345 t = unshare_expr (gimple_omp_for_final (stmt, i));
2346 gimple_omp_for_set_final (copy, i, t);
2347 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2348 gimple_omp_for_set_incr (copy, i, t);
2349 }
2350 goto copy_omp_body;
2351
2352 case GIMPLE_OMP_PARALLEL:
2353 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2354 gimple_omp_parallel_set_clauses (copy, t);
2355 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2356 gimple_omp_parallel_set_child_fn (copy, t);
2357 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2358 gimple_omp_parallel_set_data_arg (copy, t);
2359 goto copy_omp_body;
2360
2361 case GIMPLE_OMP_TASK:
2362 t = unshare_expr (gimple_omp_task_clauses (stmt));
2363 gimple_omp_task_set_clauses (copy, t);
2364 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2365 gimple_omp_task_set_child_fn (copy, t);
2366 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2367 gimple_omp_task_set_data_arg (copy, t);
2368 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2369 gimple_omp_task_set_copy_fn (copy, t);
2370 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2371 gimple_omp_task_set_arg_size (copy, t);
2372 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2373 gimple_omp_task_set_arg_align (copy, t);
2374 goto copy_omp_body;
2375
2376 case GIMPLE_OMP_CRITICAL:
2377 t = unshare_expr (gimple_omp_critical_name (stmt));
2378 gimple_omp_critical_set_name (copy, t);
2379 goto copy_omp_body;
2380
2381 case GIMPLE_OMP_SECTIONS:
2382 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2383 gimple_omp_sections_set_clauses (copy, t);
2384 t = unshare_expr (gimple_omp_sections_control (stmt));
2385 gimple_omp_sections_set_control (copy, t);
2386 /* FALLTHRU */
2387
2388 case GIMPLE_OMP_SINGLE:
2389 case GIMPLE_OMP_SECTION:
2390 case GIMPLE_OMP_MASTER:
2391 case GIMPLE_OMP_ORDERED:
2392 copy_omp_body:
2393 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2394 gimple_omp_set_body (copy, new_seq);
2395 break;
2396
2397 case GIMPLE_TRANSACTION:
2398 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2399 gimple_transaction_set_body (copy, new_seq);
2400 break;
2401
2402 case GIMPLE_WITH_CLEANUP_EXPR:
2403 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2404 gimple_wce_set_cleanup (copy, new_seq);
2405 break;
2406
2407 default:
2408 gcc_unreachable ();
2409 }
2410 }
2411
2412 /* Make copy of operands. */
2413 if (num_ops > 0)
2414 {
2415 for (i = 0; i < num_ops; i++)
2416 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2417
2418 /* Clear out SSA operand vectors on COPY. */
2419 if (gimple_has_ops (stmt))
2420 {
2421 gimple_set_def_ops (copy, NULL);
2422 gimple_set_use_ops (copy, NULL);
2423 }
2424
2425 if (gimple_has_mem_ops (stmt))
2426 {
2427 gimple_set_vdef (copy, gimple_vdef (stmt));
2428 gimple_set_vuse (copy, gimple_vuse (stmt));
2429 }
2430
2431 /* SSA operands need to be updated. */
2432 gimple_set_modified (copy, true);
2433 }
2434
2435 return copy;
2436 }
2437
2438
2439 /* Set the MODIFIED flag to MODIFIEDP, iff the gimple statement G has
2440 a MODIFIED field. */
2441
2442 void
2443 gimple_set_modified (gimple s, bool modifiedp)
2444 {
2445 if (gimple_has_ops (s))
2446 s->gsbase.modified = (unsigned) modifiedp;
2447 }
2448
2449
2450 /* Return true if statement S has side-effects. We consider a
2451 statement to have side effects if:
2452
2453 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2454 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2455
2456 bool
2457 gimple_has_side_effects (const_gimple s)
2458 {
2459 if (is_gimple_debug (s))
2460 return false;
2461
2462 /* We don't have to scan the arguments to check for
2463 volatile arguments, though, at present, we still
2464 do a scan to check for TREE_SIDE_EFFECTS. */
2465 if (gimple_has_volatile_ops (s))
2466 return true;
2467
2468 if (gimple_code (s) == GIMPLE_ASM
2469 && gimple_asm_volatile_p (s))
2470 return true;
2471
2472 if (is_gimple_call (s))
2473 {
2474 int flags = gimple_call_flags (s);
2475
2476 /* An infinite loop is considered a side effect. */
2477 if (!(flags & (ECF_CONST | ECF_PURE))
2478 || (flags & ECF_LOOPING_CONST_OR_PURE))
2479 return true;
2480
2481 return false;
2482 }
2483
2484 return false;
2485 }
2486
2487 /* Return true if the RHS of statement S has side effects.
2488 We may use it to determine if it is admissable to replace
2489 an assignment or call with a copy of a previously-computed
2490 value. In such cases, side-effects due to the LHS are
2491 preserved. */
2492
2493 bool
2494 gimple_rhs_has_side_effects (const_gimple s)
2495 {
2496 unsigned i;
2497
2498 if (is_gimple_call (s))
2499 {
2500 unsigned nargs = gimple_call_num_args (s);
2501 tree fn;
2502
2503 if (!(gimple_call_flags (s) & (ECF_CONST | ECF_PURE)))
2504 return true;
2505
2506 /* We cannot use gimple_has_volatile_ops here,
2507 because we must ignore a volatile LHS. */
2508 fn = gimple_call_fn (s);
2509 if (fn && (TREE_SIDE_EFFECTS (fn) || TREE_THIS_VOLATILE (fn)))
2510 {
2511 gcc_assert (gimple_has_volatile_ops (s));
2512 return true;
2513 }
2514
2515 for (i = 0; i < nargs; i++)
2516 if (TREE_SIDE_EFFECTS (gimple_call_arg (s, i))
2517 || TREE_THIS_VOLATILE (gimple_call_arg (s, i)))
2518 return true;
2519
2520 return false;
2521 }
2522 else if (is_gimple_assign (s))
2523 {
2524 /* Skip the first operand, the LHS. */
2525 for (i = 1; i < gimple_num_ops (s); i++)
2526 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2527 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2528 {
2529 gcc_assert (gimple_has_volatile_ops (s));
2530 return true;
2531 }
2532 }
2533 else if (is_gimple_debug (s))
2534 return false;
2535 else
2536 {
2537 /* For statements without an LHS, examine all arguments. */
2538 for (i = 0; i < gimple_num_ops (s); i++)
2539 if (TREE_SIDE_EFFECTS (gimple_op (s, i))
2540 || TREE_THIS_VOLATILE (gimple_op (s, i)))
2541 {
2542 gcc_assert (gimple_has_volatile_ops (s));
2543 return true;
2544 }
2545 }
2546
2547 return false;
2548 }
2549
2550 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2551 Return true if S can trap. When INCLUDE_MEM is true, check whether
2552 the memory operations could trap. When INCLUDE_STORES is true and
2553 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2554
2555 bool
2556 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2557 {
2558 tree t, div = NULL_TREE;
2559 enum tree_code op;
2560
2561 if (include_mem)
2562 {
2563 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2564
2565 for (i = start; i < gimple_num_ops (s); i++)
2566 if (tree_could_trap_p (gimple_op (s, i)))
2567 return true;
2568 }
2569
2570 switch (gimple_code (s))
2571 {
2572 case GIMPLE_ASM:
2573 return gimple_asm_volatile_p (s);
2574
2575 case GIMPLE_CALL:
2576 t = gimple_call_fndecl (s);
2577 /* Assume that calls to weak functions may trap. */
2578 if (!t || !DECL_P (t) || DECL_WEAK (t))
2579 return true;
2580 return false;
2581
2582 case GIMPLE_ASSIGN:
2583 t = gimple_expr_type (s);
2584 op = gimple_assign_rhs_code (s);
2585 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2586 div = gimple_assign_rhs2 (s);
2587 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2588 (INTEGRAL_TYPE_P (t)
2589 && TYPE_OVERFLOW_TRAPS (t)),
2590 div));
2591
2592 default:
2593 break;
2594 }
2595
2596 return false;
2597 }
2598
2599 /* Return true if statement S can trap. */
2600
2601 bool
2602 gimple_could_trap_p (gimple s)
2603 {
2604 return gimple_could_trap_p_1 (s, true, true);
2605 }
2606
2607 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2608
2609 bool
2610 gimple_assign_rhs_could_trap_p (gimple s)
2611 {
2612 gcc_assert (is_gimple_assign (s));
2613 return gimple_could_trap_p_1 (s, true, false);
2614 }
2615
2616
2617 /* Print debugging information for gimple stmts generated. */
2618
2619 void
2620 dump_gimple_statistics (void)
2621 {
2622 #ifdef GATHER_STATISTICS
2623 int i, total_tuples = 0, total_bytes = 0;
2624
2625 fprintf (stderr, "\nGIMPLE statements\n");
2626 fprintf (stderr, "Kind Stmts Bytes\n");
2627 fprintf (stderr, "---------------------------------------\n");
2628 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2629 {
2630 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2631 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2632 total_tuples += gimple_alloc_counts[i];
2633 total_bytes += gimple_alloc_sizes[i];
2634 }
2635 fprintf (stderr, "---------------------------------------\n");
2636 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2637 fprintf (stderr, "---------------------------------------\n");
2638 #else
2639 fprintf (stderr, "No gimple statistics\n");
2640 #endif
2641 }
2642
2643
2644 /* Return the number of operands needed on the RHS of a GIMPLE
2645 assignment for an expression with tree code CODE. */
2646
2647 unsigned
2648 get_gimple_rhs_num_ops (enum tree_code code)
2649 {
2650 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2651
2652 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2653 return 1;
2654 else if (rhs_class == GIMPLE_BINARY_RHS)
2655 return 2;
2656 else if (rhs_class == GIMPLE_TERNARY_RHS)
2657 return 3;
2658 else
2659 gcc_unreachable ();
2660 }
2661
2662 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2663 (unsigned char) \
2664 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2665 : ((TYPE) == tcc_binary \
2666 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2667 : ((TYPE) == tcc_constant \
2668 || (TYPE) == tcc_declaration \
2669 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2670 : ((SYM) == TRUTH_AND_EXPR \
2671 || (SYM) == TRUTH_OR_EXPR \
2672 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2673 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2674 : ((SYM) == COND_EXPR \
2675 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2676 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2677 || (SYM) == DOT_PROD_EXPR \
2678 || (SYM) == REALIGN_LOAD_EXPR \
2679 || (SYM) == VEC_COND_EXPR \
2680 || (SYM) == VEC_PERM_EXPR \
2681 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2682 : ((SYM) == CONSTRUCTOR \
2683 || (SYM) == OBJ_TYPE_REF \
2684 || (SYM) == ASSERT_EXPR \
2685 || (SYM) == ADDR_EXPR \
2686 || (SYM) == WITH_SIZE_EXPR \
2687 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2688 : GIMPLE_INVALID_RHS),
2689 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2690
2691 const unsigned char gimple_rhs_class_table[] = {
2692 #include "all-tree.def"
2693 };
2694
2695 #undef DEFTREECODE
2696 #undef END_OF_BASE_TREE_CODES
2697
2698 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2699
2700 /* Validation of GIMPLE expressions. */
2701
2702 /* Returns true iff T is a valid RHS for an assignment to a renamed
2703 user -- or front-end generated artificial -- variable. */
2704
2705 bool
2706 is_gimple_reg_rhs (tree t)
2707 {
2708 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
2709 }
2710
2711 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
2712 LHS, or for a call argument. */
2713
2714 bool
2715 is_gimple_mem_rhs (tree t)
2716 {
2717 /* If we're dealing with a renamable type, either source or dest must be
2718 a renamed variable. */
2719 if (is_gimple_reg_type (TREE_TYPE (t)))
2720 return is_gimple_val (t);
2721 else
2722 return is_gimple_val (t) || is_gimple_lvalue (t);
2723 }
2724
2725 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2726
2727 bool
2728 is_gimple_lvalue (tree t)
2729 {
2730 return (is_gimple_addressable (t)
2731 || TREE_CODE (t) == WITH_SIZE_EXPR
2732 /* These are complex lvalues, but don't have addresses, so they
2733 go here. */
2734 || TREE_CODE (t) == BIT_FIELD_REF);
2735 }
2736
2737 /* Return true if T is a GIMPLE condition. */
2738
2739 bool
2740 is_gimple_condexpr (tree t)
2741 {
2742 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2743 && !tree_could_throw_p (t)
2744 && is_gimple_val (TREE_OPERAND (t, 0))
2745 && is_gimple_val (TREE_OPERAND (t, 1))));
2746 }
2747
2748 /* Return true if T is something whose address can be taken. */
2749
2750 bool
2751 is_gimple_addressable (tree t)
2752 {
2753 return (is_gimple_id (t) || handled_component_p (t)
2754 || TREE_CODE (t) == MEM_REF);
2755 }
2756
2757 /* Return true if T is a valid gimple constant. */
2758
2759 bool
2760 is_gimple_constant (const_tree t)
2761 {
2762 switch (TREE_CODE (t))
2763 {
2764 case INTEGER_CST:
2765 case REAL_CST:
2766 case FIXED_CST:
2767 case STRING_CST:
2768 case COMPLEX_CST:
2769 case VECTOR_CST:
2770 return true;
2771
2772 /* Vector constant constructors are gimple invariant. */
2773 case CONSTRUCTOR:
2774 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2775 return TREE_CONSTANT (t);
2776 else
2777 return false;
2778
2779 default:
2780 return false;
2781 }
2782 }
2783
2784 /* Return true if T is a gimple address. */
2785
2786 bool
2787 is_gimple_address (const_tree t)
2788 {
2789 tree op;
2790
2791 if (TREE_CODE (t) != ADDR_EXPR)
2792 return false;
2793
2794 op = TREE_OPERAND (t, 0);
2795 while (handled_component_p (op))
2796 {
2797 if ((TREE_CODE (op) == ARRAY_REF
2798 || TREE_CODE (op) == ARRAY_RANGE_REF)
2799 && !is_gimple_val (TREE_OPERAND (op, 1)))
2800 return false;
2801
2802 op = TREE_OPERAND (op, 0);
2803 }
2804
2805 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2806 return true;
2807
2808 switch (TREE_CODE (op))
2809 {
2810 case PARM_DECL:
2811 case RESULT_DECL:
2812 case LABEL_DECL:
2813 case FUNCTION_DECL:
2814 case VAR_DECL:
2815 case CONST_DECL:
2816 return true;
2817
2818 default:
2819 return false;
2820 }
2821 }
2822
2823 /* Return true if T is a gimple invariant address. */
2824
2825 bool
2826 is_gimple_invariant_address (const_tree t)
2827 {
2828 const_tree op;
2829
2830 if (TREE_CODE (t) != ADDR_EXPR)
2831 return false;
2832
2833 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2834 if (!op)
2835 return false;
2836
2837 if (TREE_CODE (op) == MEM_REF)
2838 {
2839 const_tree op0 = TREE_OPERAND (op, 0);
2840 return (TREE_CODE (op0) == ADDR_EXPR
2841 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2842 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2843 }
2844
2845 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2846 }
2847
2848 /* Return true if T is a gimple invariant address at IPA level
2849 (so addresses of variables on stack are not allowed). */
2850
2851 bool
2852 is_gimple_ip_invariant_address (const_tree t)
2853 {
2854 const_tree op;
2855
2856 if (TREE_CODE (t) != ADDR_EXPR)
2857 return false;
2858
2859 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2860 if (!op)
2861 return false;
2862
2863 if (TREE_CODE (op) == MEM_REF)
2864 {
2865 const_tree op0 = TREE_OPERAND (op, 0);
2866 return (TREE_CODE (op0) == ADDR_EXPR
2867 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2868 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2869 }
2870
2871 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2872 }
2873
2874 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2875 form of function invariant. */
2876
2877 bool
2878 is_gimple_min_invariant (const_tree t)
2879 {
2880 if (TREE_CODE (t) == ADDR_EXPR)
2881 return is_gimple_invariant_address (t);
2882
2883 return is_gimple_constant (t);
2884 }
2885
2886 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2887 form of gimple minimal invariant. */
2888
2889 bool
2890 is_gimple_ip_invariant (const_tree t)
2891 {
2892 if (TREE_CODE (t) == ADDR_EXPR)
2893 return is_gimple_ip_invariant_address (t);
2894
2895 return is_gimple_constant (t);
2896 }
2897
2898 /* Return true if T looks like a valid GIMPLE statement. */
2899
2900 bool
2901 is_gimple_stmt (tree t)
2902 {
2903 const enum tree_code code = TREE_CODE (t);
2904
2905 switch (code)
2906 {
2907 case NOP_EXPR:
2908 /* The only valid NOP_EXPR is the empty statement. */
2909 return IS_EMPTY_STMT (t);
2910
2911 case BIND_EXPR:
2912 case COND_EXPR:
2913 /* These are only valid if they're void. */
2914 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
2915
2916 case SWITCH_EXPR:
2917 case GOTO_EXPR:
2918 case RETURN_EXPR:
2919 case LABEL_EXPR:
2920 case CASE_LABEL_EXPR:
2921 case TRY_CATCH_EXPR:
2922 case TRY_FINALLY_EXPR:
2923 case EH_FILTER_EXPR:
2924 case CATCH_EXPR:
2925 case ASM_EXPR:
2926 case STATEMENT_LIST:
2927 case OMP_PARALLEL:
2928 case OMP_FOR:
2929 case OMP_SECTIONS:
2930 case OMP_SECTION:
2931 case OMP_SINGLE:
2932 case OMP_MASTER:
2933 case OMP_ORDERED:
2934 case OMP_CRITICAL:
2935 case OMP_TASK:
2936 /* These are always void. */
2937 return true;
2938
2939 case CALL_EXPR:
2940 case MODIFY_EXPR:
2941 case PREDICT_EXPR:
2942 /* These are valid regardless of their type. */
2943 return true;
2944
2945 default:
2946 return false;
2947 }
2948 }
2949
2950 /* Return true if T is a variable. */
2951
2952 bool
2953 is_gimple_variable (tree t)
2954 {
2955 return (TREE_CODE (t) == VAR_DECL
2956 || TREE_CODE (t) == PARM_DECL
2957 || TREE_CODE (t) == RESULT_DECL
2958 || TREE_CODE (t) == SSA_NAME);
2959 }
2960
2961 /* Return true if T is a GIMPLE identifier (something with an address). */
2962
2963 bool
2964 is_gimple_id (tree t)
2965 {
2966 return (is_gimple_variable (t)
2967 || TREE_CODE (t) == FUNCTION_DECL
2968 || TREE_CODE (t) == LABEL_DECL
2969 || TREE_CODE (t) == CONST_DECL
2970 /* Allow string constants, since they are addressable. */
2971 || TREE_CODE (t) == STRING_CST);
2972 }
2973
2974 /* Return true if TYPE is a suitable type for a scalar register variable. */
2975
2976 bool
2977 is_gimple_reg_type (tree type)
2978 {
2979 return !AGGREGATE_TYPE_P (type);
2980 }
2981
2982 /* Return true if T is a non-aggregate register variable. */
2983
2984 bool
2985 is_gimple_reg (tree t)
2986 {
2987 if (TREE_CODE (t) == SSA_NAME)
2988 t = SSA_NAME_VAR (t);
2989
2990 if (!is_gimple_variable (t))
2991 return false;
2992
2993 if (!is_gimple_reg_type (TREE_TYPE (t)))
2994 return false;
2995
2996 /* A volatile decl is not acceptable because we can't reuse it as
2997 needed. We need to copy it into a temp first. */
2998 if (TREE_THIS_VOLATILE (t))
2999 return false;
3000
3001 /* We define "registers" as things that can be renamed as needed,
3002 which with our infrastructure does not apply to memory. */
3003 if (needs_to_live_in_memory (t))
3004 return false;
3005
3006 /* Hard register variables are an interesting case. For those that
3007 are call-clobbered, we don't know where all the calls are, since
3008 we don't (want to) take into account which operations will turn
3009 into libcalls at the rtl level. For those that are call-saved,
3010 we don't currently model the fact that calls may in fact change
3011 global hard registers, nor do we examine ASM_CLOBBERS at the tree
3012 level, and so miss variable changes that might imply. All around,
3013 it seems safest to not do too much optimization with these at the
3014 tree level at all. We'll have to rely on the rtl optimizers to
3015 clean this up, as there we've got all the appropriate bits exposed. */
3016 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3017 return false;
3018
3019 /* Complex and vector values must have been put into SSA-like form.
3020 That is, no assignments to the individual components. */
3021 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
3022 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3023 return DECL_GIMPLE_REG_P (t);
3024
3025 return true;
3026 }
3027
3028
3029 /* Return true if T is a GIMPLE variable whose address is not needed. */
3030
3031 bool
3032 is_gimple_non_addressable (tree t)
3033 {
3034 if (TREE_CODE (t) == SSA_NAME)
3035 t = SSA_NAME_VAR (t);
3036
3037 return (is_gimple_variable (t) && ! needs_to_live_in_memory (t));
3038 }
3039
3040 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
3041
3042 bool
3043 is_gimple_val (tree t)
3044 {
3045 /* Make loads from volatiles and memory vars explicit. */
3046 if (is_gimple_variable (t)
3047 && is_gimple_reg_type (TREE_TYPE (t))
3048 && !is_gimple_reg (t))
3049 return false;
3050
3051 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
3052 }
3053
3054 /* Similarly, but accept hard registers as inputs to asm statements. */
3055
3056 bool
3057 is_gimple_asm_val (tree t)
3058 {
3059 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
3060 return true;
3061
3062 return is_gimple_val (t);
3063 }
3064
3065 /* Return true if T is a GIMPLE minimal lvalue. */
3066
3067 bool
3068 is_gimple_min_lval (tree t)
3069 {
3070 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
3071 return false;
3072 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
3073 }
3074
3075 /* Return true if T is a valid function operand of a CALL_EXPR. */
3076
3077 bool
3078 is_gimple_call_addr (tree t)
3079 {
3080 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
3081 }
3082
3083 /* Return true if T is a valid address operand of a MEM_REF. */
3084
3085 bool
3086 is_gimple_mem_ref_addr (tree t)
3087 {
3088 return (is_gimple_reg (t)
3089 || TREE_CODE (t) == INTEGER_CST
3090 || (TREE_CODE (t) == ADDR_EXPR
3091 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
3092 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
3093 }
3094
3095
3096 /* Given a memory reference expression T, return its base address.
3097 The base address of a memory reference expression is the main
3098 object being referenced. For instance, the base address for
3099 'array[i].fld[j]' is 'array'. You can think of this as stripping
3100 away the offset part from a memory address.
3101
3102 This function calls handled_component_p to strip away all the inner
3103 parts of the memory reference until it reaches the base object. */
3104
3105 tree
3106 get_base_address (tree t)
3107 {
3108 while (handled_component_p (t))
3109 t = TREE_OPERAND (t, 0);
3110
3111 if ((TREE_CODE (t) == MEM_REF
3112 || TREE_CODE (t) == TARGET_MEM_REF)
3113 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
3114 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3115
3116 if (TREE_CODE (t) == SSA_NAME
3117 || DECL_P (t)
3118 || TREE_CODE (t) == STRING_CST
3119 || TREE_CODE (t) == CONSTRUCTOR
3120 || INDIRECT_REF_P (t)
3121 || TREE_CODE (t) == MEM_REF
3122 || TREE_CODE (t) == TARGET_MEM_REF)
3123 return t;
3124 else
3125 return NULL_TREE;
3126 }
3127
3128 void
3129 recalculate_side_effects (tree t)
3130 {
3131 enum tree_code code = TREE_CODE (t);
3132 int len = TREE_OPERAND_LENGTH (t);
3133 int i;
3134
3135 switch (TREE_CODE_CLASS (code))
3136 {
3137 case tcc_expression:
3138 switch (code)
3139 {
3140 case INIT_EXPR:
3141 case MODIFY_EXPR:
3142 case VA_ARG_EXPR:
3143 case PREDECREMENT_EXPR:
3144 case PREINCREMENT_EXPR:
3145 case POSTDECREMENT_EXPR:
3146 case POSTINCREMENT_EXPR:
3147 /* All of these have side-effects, no matter what their
3148 operands are. */
3149 return;
3150
3151 default:
3152 break;
3153 }
3154 /* Fall through. */
3155
3156 case tcc_comparison: /* a comparison expression */
3157 case tcc_unary: /* a unary arithmetic expression */
3158 case tcc_binary: /* a binary arithmetic expression */
3159 case tcc_reference: /* a reference */
3160 case tcc_vl_exp: /* a function call */
3161 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3162 for (i = 0; i < len; ++i)
3163 {
3164 tree op = TREE_OPERAND (t, i);
3165 if (op && TREE_SIDE_EFFECTS (op))
3166 TREE_SIDE_EFFECTS (t) = 1;
3167 }
3168 break;
3169
3170 case tcc_constant:
3171 /* No side-effects. */
3172 return;
3173
3174 default:
3175 gcc_unreachable ();
3176 }
3177 }
3178
3179 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
3180 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
3181 we failed to create one. */
3182
3183 tree
3184 canonicalize_cond_expr_cond (tree t)
3185 {
3186 /* Strip conversions around boolean operations. */
3187 if (CONVERT_EXPR_P (t)
3188 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
3189 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3190 == BOOLEAN_TYPE))
3191 t = TREE_OPERAND (t, 0);
3192
3193 /* For !x use x == 0. */
3194 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
3195 {
3196 tree top0 = TREE_OPERAND (t, 0);
3197 t = build2 (EQ_EXPR, TREE_TYPE (t),
3198 top0, build_int_cst (TREE_TYPE (top0), 0));
3199 }
3200 /* For cmp ? 1 : 0 use cmp. */
3201 else if (TREE_CODE (t) == COND_EXPR
3202 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3203 && integer_onep (TREE_OPERAND (t, 1))
3204 && integer_zerop (TREE_OPERAND (t, 2)))
3205 {
3206 tree top0 = TREE_OPERAND (t, 0);
3207 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3208 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3209 }
3210
3211 if (is_gimple_condexpr (t))
3212 return t;
3213
3214 return NULL_TREE;
3215 }
3216
3217 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3218 the positions marked by the set ARGS_TO_SKIP. */
3219
3220 gimple
3221 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3222 {
3223 int i;
3224 int nargs = gimple_call_num_args (stmt);
3225 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3226 gimple new_stmt;
3227
3228 for (i = 0; i < nargs; i++)
3229 if (!bitmap_bit_p (args_to_skip, i))
3230 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3231
3232 if (gimple_call_internal_p (stmt))
3233 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3234 vargs);
3235 else
3236 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3237 VEC_free (tree, heap, vargs);
3238 if (gimple_call_lhs (stmt))
3239 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3240
3241 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3242 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3243
3244 gimple_set_block (new_stmt, gimple_block (stmt));
3245 if (gimple_has_location (stmt))
3246 gimple_set_location (new_stmt, gimple_location (stmt));
3247 gimple_call_copy_flags (new_stmt, stmt);
3248 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3249
3250 gimple_set_modified (new_stmt, true);
3251
3252 return new_stmt;
3253 }
3254
3255
3256 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3257
3258 static hashval_t gimple_type_hash (const void *);
3259
3260 /* Structure used to maintain a cache of some type pairs compared by
3261 gimple_types_compatible_p when comparing aggregate types. There are
3262 three possible values for SAME_P:
3263
3264 -2: The pair (T1, T2) has just been inserted in the table.
3265 0: T1 and T2 are different types.
3266 1: T1 and T2 are the same type.
3267
3268 The two elements in the SAME_P array are indexed by the comparison
3269 mode gtc_mode. */
3270
3271 struct type_pair_d
3272 {
3273 unsigned int uid1;
3274 unsigned int uid2;
3275 signed char same_p[2];
3276 };
3277 typedef struct type_pair_d *type_pair_t;
3278 DEF_VEC_P(type_pair_t);
3279 DEF_VEC_ALLOC_P(type_pair_t,heap);
3280
3281 #define GIMPLE_TYPE_PAIR_SIZE 16381
3282 struct type_pair_d *type_pair_cache;
3283
3284
3285 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3286 entry if none existed. */
3287
3288 static inline type_pair_t
3289 lookup_type_pair (tree t1, tree t2)
3290 {
3291 unsigned int index;
3292 unsigned int uid1, uid2;
3293
3294 if (type_pair_cache == NULL)
3295 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3296
3297 if (TYPE_UID (t1) < TYPE_UID (t2))
3298 {
3299 uid1 = TYPE_UID (t1);
3300 uid2 = TYPE_UID (t2);
3301 }
3302 else
3303 {
3304 uid1 = TYPE_UID (t2);
3305 uid2 = TYPE_UID (t1);
3306 }
3307 gcc_checking_assert (uid1 != uid2);
3308
3309 /* iterative_hash_hashval_t imply an function calls.
3310 We know that UIDS are in limited range. */
3311 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3312 % GIMPLE_TYPE_PAIR_SIZE);
3313 if (type_pair_cache [index].uid1 == uid1
3314 && type_pair_cache [index].uid2 == uid2)
3315 return &type_pair_cache[index];
3316
3317 type_pair_cache [index].uid1 = uid1;
3318 type_pair_cache [index].uid2 = uid2;
3319 type_pair_cache [index].same_p[0] = -2;
3320 type_pair_cache [index].same_p[1] = -2;
3321
3322 return &type_pair_cache[index];
3323 }
3324
3325 /* Per pointer state for the SCC finding. The on_sccstack flag
3326 is not strictly required, it is true when there is no hash value
3327 recorded for the type and false otherwise. But querying that
3328 is slower. */
3329
3330 struct sccs
3331 {
3332 unsigned int dfsnum;
3333 unsigned int low;
3334 bool on_sccstack;
3335 union {
3336 hashval_t hash;
3337 signed char same_p;
3338 } u;
3339 };
3340
3341 static unsigned int next_dfs_num;
3342 static unsigned int gtc_next_dfs_num;
3343
3344
3345 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3346
3347 typedef struct GTY(()) gimple_type_leader_entry_s {
3348 tree type;
3349 tree leader;
3350 } gimple_type_leader_entry;
3351
3352 #define GIMPLE_TYPE_LEADER_SIZE 16381
3353 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3354 gimple_type_leader_entry *gimple_type_leader;
3355
3356 /* Lookup an existing leader for T and return it or NULL_TREE, if
3357 there is none in the cache. */
3358
3359 static inline tree
3360 gimple_lookup_type_leader (tree t)
3361 {
3362 gimple_type_leader_entry *leader;
3363
3364 if (!gimple_type_leader)
3365 return NULL_TREE;
3366
3367 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3368 if (leader->type != t)
3369 return NULL_TREE;
3370
3371 return leader->leader;
3372 }
3373
3374 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3375 true then if any type has no name return false, otherwise return
3376 true if both types have no names. */
3377
3378 static bool
3379 compare_type_names_p (tree t1, tree t2)
3380 {
3381 tree name1 = TYPE_NAME (t1);
3382 tree name2 = TYPE_NAME (t2);
3383
3384 if (name1 && TREE_CODE (name1) == TYPE_DECL)
3385 name1 = DECL_NAME (name1);
3386 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3387
3388 if (name2 && TREE_CODE (name2) == TYPE_DECL)
3389 name2 = DECL_NAME (name2);
3390 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3391
3392 /* Identifiers can be compared with pointer equality rather
3393 than a string comparison. */
3394 if (name1 == name2)
3395 return true;
3396
3397 return false;
3398 }
3399
3400 /* Return true if the field decls F1 and F2 are at the same offset.
3401
3402 This is intended to be used on GIMPLE types only. */
3403
3404 bool
3405 gimple_compare_field_offset (tree f1, tree f2)
3406 {
3407 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3408 {
3409 tree offset1 = DECL_FIELD_OFFSET (f1);
3410 tree offset2 = DECL_FIELD_OFFSET (f2);
3411 return ((offset1 == offset2
3412 /* Once gimplification is done, self-referential offsets are
3413 instantiated as operand #2 of the COMPONENT_REF built for
3414 each access and reset. Therefore, they are not relevant
3415 anymore and fields are interchangeable provided that they
3416 represent the same access. */
3417 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3418 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3419 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3420 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3421 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3422 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3423 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3424 || operand_equal_p (offset1, offset2, 0))
3425 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3426 DECL_FIELD_BIT_OFFSET (f2)));
3427 }
3428
3429 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3430 should be, so handle differing ones specially by decomposing
3431 the offset into a byte and bit offset manually. */
3432 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3433 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3434 {
3435 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3436 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3437 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3438 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3439 + bit_offset1 / BITS_PER_UNIT);
3440 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3441 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3442 + bit_offset2 / BITS_PER_UNIT);
3443 if (byte_offset1 != byte_offset2)
3444 return false;
3445 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3446 }
3447
3448 return false;
3449 }
3450
3451 static bool
3452 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3453 VEC(type_pair_t, heap) **,
3454 struct pointer_map_t *, struct obstack *);
3455
3456 /* DFS visit the edge from the callers type pair with state *STATE to
3457 the pair T1, T2 while operating in FOR_MERGING_P mode.
3458 Update the merging status if it is not part of the SCC containing the
3459 callers pair and return it.
3460 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3461
3462 static bool
3463 gtc_visit (tree t1, tree t2,
3464 struct sccs *state,
3465 VEC(type_pair_t, heap) **sccstack,
3466 struct pointer_map_t *sccstate,
3467 struct obstack *sccstate_obstack)
3468 {
3469 struct sccs *cstate = NULL;
3470 type_pair_t p;
3471 void **slot;
3472 tree leader1, leader2;
3473
3474 /* Check first for the obvious case of pointer identity. */
3475 if (t1 == t2)
3476 return true;
3477
3478 /* Check that we have two types to compare. */
3479 if (t1 == NULL_TREE || t2 == NULL_TREE)
3480 return false;
3481
3482 /* Can't be the same type if the types don't have the same code. */
3483 if (TREE_CODE (t1) != TREE_CODE (t2))
3484 return false;
3485
3486 /* Can't be the same type if they have different CV qualifiers. */
3487 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3488 return false;
3489
3490 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3491 return false;
3492
3493 /* Void types and nullptr types are always the same. */
3494 if (TREE_CODE (t1) == VOID_TYPE
3495 || TREE_CODE (t1) == NULLPTR_TYPE)
3496 return true;
3497
3498 /* Can't be the same type if they have different alignment or mode. */
3499 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3500 || TYPE_MODE (t1) != TYPE_MODE (t2))
3501 return false;
3502
3503 /* Do some simple checks before doing three hashtable queries. */
3504 if (INTEGRAL_TYPE_P (t1)
3505 || SCALAR_FLOAT_TYPE_P (t1)
3506 || FIXED_POINT_TYPE_P (t1)
3507 || TREE_CODE (t1) == VECTOR_TYPE
3508 || TREE_CODE (t1) == COMPLEX_TYPE
3509 || TREE_CODE (t1) == OFFSET_TYPE
3510 || POINTER_TYPE_P (t1))
3511 {
3512 /* Can't be the same type if they have different sign or precision. */
3513 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3514 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3515 return false;
3516
3517 if (TREE_CODE (t1) == INTEGER_TYPE
3518 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3519 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3520 return false;
3521
3522 /* That's all we need to check for float and fixed-point types. */
3523 if (SCALAR_FLOAT_TYPE_P (t1)
3524 || FIXED_POINT_TYPE_P (t1))
3525 return true;
3526
3527 /* For other types fall thru to more complex checks. */
3528 }
3529
3530 /* If the types have been previously registered and found equal
3531 they still are. */
3532 leader1 = gimple_lookup_type_leader (t1);
3533 leader2 = gimple_lookup_type_leader (t2);
3534 if (leader1 == t2
3535 || t1 == leader2
3536 || (leader1 && leader1 == leader2))
3537 return true;
3538
3539 /* If the hash values of t1 and t2 are different the types can't
3540 possibly be the same. This helps keeping the type-pair hashtable
3541 small, only tracking comparisons for hash collisions. */
3542 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3543 return false;
3544
3545 /* Allocate a new cache entry for this comparison. */
3546 p = lookup_type_pair (t1, t2);
3547 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3548 {
3549 /* We have already decided whether T1 and T2 are the
3550 same, return the cached result. */
3551 return p->same_p[GTC_MERGE] == 1;
3552 }
3553
3554 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3555 cstate = (struct sccs *)*slot;
3556 /* Not yet visited. DFS recurse. */
3557 if (!cstate)
3558 {
3559 gimple_types_compatible_p_1 (t1, t2, p,
3560 sccstack, sccstate, sccstate_obstack);
3561 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3562 state->low = MIN (state->low, cstate->low);
3563 }
3564 /* If the type is still on the SCC stack adjust the parents low. */
3565 if (cstate->dfsnum < state->dfsnum
3566 && cstate->on_sccstack)
3567 state->low = MIN (cstate->dfsnum, state->low);
3568
3569 /* Return the current lattice value. We start with an equality
3570 assumption so types part of a SCC will be optimistically
3571 treated equal unless proven otherwise. */
3572 return cstate->u.same_p;
3573 }
3574
3575 /* Worker for gimple_types_compatible.
3576 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3577
3578 static bool
3579 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3580 VEC(type_pair_t, heap) **sccstack,
3581 struct pointer_map_t *sccstate,
3582 struct obstack *sccstate_obstack)
3583 {
3584 struct sccs *state;
3585
3586 gcc_assert (p->same_p[GTC_MERGE] == -2);
3587
3588 state = XOBNEW (sccstate_obstack, struct sccs);
3589 *pointer_map_insert (sccstate, p) = state;
3590
3591 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3592 state->dfsnum = gtc_next_dfs_num++;
3593 state->low = state->dfsnum;
3594 state->on_sccstack = true;
3595 /* Start with an equality assumption. As we DFS recurse into child
3596 SCCs this assumption may get revisited. */
3597 state->u.same_p = 1;
3598
3599 /* The struct tags shall compare equal. */
3600 if (!compare_type_names_p (t1, t2))
3601 goto different_types;
3602
3603 /* If their attributes are not the same they can't be the same type. */
3604 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3605 goto different_types;
3606
3607 /* Do type-specific comparisons. */
3608 switch (TREE_CODE (t1))
3609 {
3610 case VECTOR_TYPE:
3611 case COMPLEX_TYPE:
3612 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3613 state, sccstack, sccstate, sccstate_obstack))
3614 goto different_types;
3615 goto same_types;
3616
3617 case ARRAY_TYPE:
3618 /* Array types are the same if the element types are the same and
3619 the number of elements are the same. */
3620 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3621 state, sccstack, sccstate, sccstate_obstack)
3622 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3623 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3624 goto different_types;
3625 else
3626 {
3627 tree i1 = TYPE_DOMAIN (t1);
3628 tree i2 = TYPE_DOMAIN (t2);
3629
3630 /* For an incomplete external array, the type domain can be
3631 NULL_TREE. Check this condition also. */
3632 if (i1 == NULL_TREE && i2 == NULL_TREE)
3633 goto same_types;
3634 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3635 goto different_types;
3636 /* If for a complete array type the possibly gimplified sizes
3637 are different the types are different. */
3638 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
3639 || (TYPE_SIZE (i1)
3640 && TYPE_SIZE (i2)
3641 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
3642 goto different_types;
3643 else
3644 {
3645 tree min1 = TYPE_MIN_VALUE (i1);
3646 tree min2 = TYPE_MIN_VALUE (i2);
3647 tree max1 = TYPE_MAX_VALUE (i1);
3648 tree max2 = TYPE_MAX_VALUE (i2);
3649
3650 /* The minimum/maximum values have to be the same. */
3651 if ((min1 == min2
3652 || (min1 && min2
3653 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3654 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3655 || operand_equal_p (min1, min2, 0))))
3656 && (max1 == max2
3657 || (max1 && max2
3658 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3659 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3660 || operand_equal_p (max1, max2, 0)))))
3661 goto same_types;
3662 else
3663 goto different_types;
3664 }
3665 }
3666
3667 case METHOD_TYPE:
3668 /* Method types should belong to the same class. */
3669 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3670 state, sccstack, sccstate, sccstate_obstack))
3671 goto different_types;
3672
3673 /* Fallthru */
3674
3675 case FUNCTION_TYPE:
3676 /* Function types are the same if the return type and arguments types
3677 are the same. */
3678 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3679 state, sccstack, sccstate, sccstate_obstack))
3680 goto different_types;
3681
3682 if (!comp_type_attributes (t1, t2))
3683 goto different_types;
3684
3685 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3686 goto same_types;
3687 else
3688 {
3689 tree parms1, parms2;
3690
3691 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3692 parms1 && parms2;
3693 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3694 {
3695 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3696 state, sccstack, sccstate, sccstate_obstack))
3697 goto different_types;
3698 }
3699
3700 if (parms1 || parms2)
3701 goto different_types;
3702
3703 goto same_types;
3704 }
3705
3706 case OFFSET_TYPE:
3707 {
3708 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3709 state, sccstack, sccstate, sccstate_obstack)
3710 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3711 TYPE_OFFSET_BASETYPE (t2),
3712 state, sccstack, sccstate, sccstate_obstack))
3713 goto different_types;
3714
3715 goto same_types;
3716 }
3717
3718 case POINTER_TYPE:
3719 case REFERENCE_TYPE:
3720 {
3721 /* If the two pointers have different ref-all attributes,
3722 they can't be the same type. */
3723 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3724 goto different_types;
3725
3726 /* Otherwise, pointer and reference types are the same if the
3727 pointed-to types are the same. */
3728 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3729 state, sccstack, sccstate, sccstate_obstack))
3730 goto same_types;
3731
3732 goto different_types;
3733 }
3734
3735 case INTEGER_TYPE:
3736 case BOOLEAN_TYPE:
3737 {
3738 tree min1 = TYPE_MIN_VALUE (t1);
3739 tree max1 = TYPE_MAX_VALUE (t1);
3740 tree min2 = TYPE_MIN_VALUE (t2);
3741 tree max2 = TYPE_MAX_VALUE (t2);
3742 bool min_equal_p = false;
3743 bool max_equal_p = false;
3744
3745 /* If either type has a minimum value, the other type must
3746 have the same. */
3747 if (min1 == NULL_TREE && min2 == NULL_TREE)
3748 min_equal_p = true;
3749 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3750 min_equal_p = true;
3751
3752 /* Likewise, if either type has a maximum value, the other
3753 type must have the same. */
3754 if (max1 == NULL_TREE && max2 == NULL_TREE)
3755 max_equal_p = true;
3756 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3757 max_equal_p = true;
3758
3759 if (!min_equal_p || !max_equal_p)
3760 goto different_types;
3761
3762 goto same_types;
3763 }
3764
3765 case ENUMERAL_TYPE:
3766 {
3767 /* FIXME lto, we cannot check bounds on enumeral types because
3768 different front ends will produce different values.
3769 In C, enumeral types are integers, while in C++ each element
3770 will have its own symbolic value. We should decide how enums
3771 are to be represented in GIMPLE and have each front end lower
3772 to that. */
3773 tree v1, v2;
3774
3775 /* For enumeral types, all the values must be the same. */
3776 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3777 goto same_types;
3778
3779 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3780 v1 && v2;
3781 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3782 {
3783 tree c1 = TREE_VALUE (v1);
3784 tree c2 = TREE_VALUE (v2);
3785
3786 if (TREE_CODE (c1) == CONST_DECL)
3787 c1 = DECL_INITIAL (c1);
3788
3789 if (TREE_CODE (c2) == CONST_DECL)
3790 c2 = DECL_INITIAL (c2);
3791
3792 if (tree_int_cst_equal (c1, c2) != 1)
3793 goto different_types;
3794
3795 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3796 goto different_types;
3797 }
3798
3799 /* If one enumeration has more values than the other, they
3800 are not the same. */
3801 if (v1 || v2)
3802 goto different_types;
3803
3804 goto same_types;
3805 }
3806
3807 case RECORD_TYPE:
3808 case UNION_TYPE:
3809 case QUAL_UNION_TYPE:
3810 {
3811 tree f1, f2;
3812
3813 /* For aggregate types, all the fields must be the same. */
3814 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3815 f1 && f2;
3816 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3817 {
3818 /* Different field kinds are not compatible. */
3819 if (TREE_CODE (f1) != TREE_CODE (f2))
3820 goto different_types;
3821 /* Field decls must have the same name and offset. */
3822 if (TREE_CODE (f1) == FIELD_DECL
3823 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3824 || !gimple_compare_field_offset (f1, f2)))
3825 goto different_types;
3826 /* All entities should have the same name and type. */
3827 if (DECL_NAME (f1) != DECL_NAME (f2)
3828 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3829 state, sccstack, sccstate, sccstate_obstack))
3830 goto different_types;
3831 }
3832
3833 /* If one aggregate has more fields than the other, they
3834 are not the same. */
3835 if (f1 || f2)
3836 goto different_types;
3837
3838 goto same_types;
3839 }
3840
3841 default:
3842 gcc_unreachable ();
3843 }
3844
3845 /* Common exit path for types that are not compatible. */
3846 different_types:
3847 state->u.same_p = 0;
3848 goto pop;
3849
3850 /* Common exit path for types that are compatible. */
3851 same_types:
3852 gcc_assert (state->u.same_p == 1);
3853
3854 pop:
3855 if (state->low == state->dfsnum)
3856 {
3857 type_pair_t x;
3858
3859 /* Pop off the SCC and set its cache values to the final
3860 comparison result. */
3861 do
3862 {
3863 struct sccs *cstate;
3864 x = VEC_pop (type_pair_t, *sccstack);
3865 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3866 cstate->on_sccstack = false;
3867 x->same_p[GTC_MERGE] = state->u.same_p;
3868 }
3869 while (x != p);
3870 }
3871
3872 return state->u.same_p;
3873 }
3874
3875 /* Return true iff T1 and T2 are structurally identical. When
3876 FOR_MERGING_P is true the an incomplete type and a complete type
3877 are considered different, otherwise they are considered compatible. */
3878
3879 static bool
3880 gimple_types_compatible_p (tree t1, tree t2)
3881 {
3882 VEC(type_pair_t, heap) *sccstack = NULL;
3883 struct pointer_map_t *sccstate;
3884 struct obstack sccstate_obstack;
3885 type_pair_t p = NULL;
3886 bool res;
3887 tree leader1, leader2;
3888
3889 /* Before starting to set up the SCC machinery handle simple cases. */
3890
3891 /* Check first for the obvious case of pointer identity. */
3892 if (t1 == t2)
3893 return true;
3894
3895 /* Check that we have two types to compare. */
3896 if (t1 == NULL_TREE || t2 == NULL_TREE)
3897 return false;
3898
3899 /* Can't be the same type if the types don't have the same code. */
3900 if (TREE_CODE (t1) != TREE_CODE (t2))
3901 return false;
3902
3903 /* Can't be the same type if they have different CV qualifiers. */
3904 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3905 return false;
3906
3907 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3908 return false;
3909
3910 /* Void types and nullptr types are always the same. */
3911 if (TREE_CODE (t1) == VOID_TYPE
3912 || TREE_CODE (t1) == NULLPTR_TYPE)
3913 return true;
3914
3915 /* Can't be the same type if they have different alignment or mode. */
3916 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3917 || TYPE_MODE (t1) != TYPE_MODE (t2))
3918 return false;
3919
3920 /* Do some simple checks before doing three hashtable queries. */
3921 if (INTEGRAL_TYPE_P (t1)
3922 || SCALAR_FLOAT_TYPE_P (t1)
3923 || FIXED_POINT_TYPE_P (t1)
3924 || TREE_CODE (t1) == VECTOR_TYPE
3925 || TREE_CODE (t1) == COMPLEX_TYPE
3926 || TREE_CODE (t1) == OFFSET_TYPE
3927 || POINTER_TYPE_P (t1))
3928 {
3929 /* Can't be the same type if they have different sign or precision. */
3930 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3931 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3932 return false;
3933
3934 if (TREE_CODE (t1) == INTEGER_TYPE
3935 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
3936 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
3937 return false;
3938
3939 /* That's all we need to check for float and fixed-point types. */
3940 if (SCALAR_FLOAT_TYPE_P (t1)
3941 || FIXED_POINT_TYPE_P (t1))
3942 return true;
3943
3944 /* For other types fall thru to more complex checks. */
3945 }
3946
3947 /* If the types have been previously registered and found equal
3948 they still are. */
3949 leader1 = gimple_lookup_type_leader (t1);
3950 leader2 = gimple_lookup_type_leader (t2);
3951 if (leader1 == t2
3952 || t1 == leader2
3953 || (leader1 && leader1 == leader2))
3954 return true;
3955
3956 /* If the hash values of t1 and t2 are different the types can't
3957 possibly be the same. This helps keeping the type-pair hashtable
3958 small, only tracking comparisons for hash collisions. */
3959 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3960 return false;
3961
3962 /* If we've visited this type pair before (in the case of aggregates
3963 with self-referential types), and we made a decision, return it. */
3964 p = lookup_type_pair (t1, t2);
3965 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3966 {
3967 /* We have already decided whether T1 and T2 are the
3968 same, return the cached result. */
3969 return p->same_p[GTC_MERGE] == 1;
3970 }
3971
3972 /* Now set up the SCC machinery for the comparison. */
3973 gtc_next_dfs_num = 1;
3974 sccstate = pointer_map_create ();
3975 gcc_obstack_init (&sccstate_obstack);
3976 res = gimple_types_compatible_p_1 (t1, t2, p,
3977 &sccstack, sccstate, &sccstate_obstack);
3978 VEC_free (type_pair_t, heap, sccstack);
3979 pointer_map_destroy (sccstate);
3980 obstack_free (&sccstate_obstack, NULL);
3981
3982 return res;
3983 }
3984
3985
3986 static hashval_t
3987 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3988 struct pointer_map_t *, struct obstack *);
3989
3990 /* DFS visit the edge from the callers type with state *STATE to T.
3991 Update the callers type hash V with the hash for T if it is not part
3992 of the SCC containing the callers type and return it.
3993 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3994
3995 static hashval_t
3996 visit (tree t, struct sccs *state, hashval_t v,
3997 VEC (tree, heap) **sccstack,
3998 struct pointer_map_t *sccstate,
3999 struct obstack *sccstate_obstack)
4000 {
4001 struct sccs *cstate = NULL;
4002 struct tree_int_map m;
4003 void **slot;
4004
4005 /* If there is a hash value recorded for this type then it can't
4006 possibly be part of our parent SCC. Simply mix in its hash. */
4007 m.base.from = t;
4008 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4009 && *slot)
4010 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
4011
4012 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
4013 cstate = (struct sccs *)*slot;
4014 if (!cstate)
4015 {
4016 hashval_t tem;
4017 /* Not yet visited. DFS recurse. */
4018 tem = iterative_hash_gimple_type (t, v,
4019 sccstack, sccstate, sccstate_obstack);
4020 if (!cstate)
4021 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
4022 state->low = MIN (state->low, cstate->low);
4023 /* If the type is no longer on the SCC stack and thus is not part
4024 of the parents SCC mix in its hash value. Otherwise we will
4025 ignore the type for hashing purposes and return the unaltered
4026 hash value. */
4027 if (!cstate->on_sccstack)
4028 return tem;
4029 }
4030 if (cstate->dfsnum < state->dfsnum
4031 && cstate->on_sccstack)
4032 state->low = MIN (cstate->dfsnum, state->low);
4033
4034 /* We are part of our parents SCC, skip this type during hashing
4035 and return the unaltered hash value. */
4036 return v;
4037 }
4038
4039 /* Hash NAME with the previous hash value V and return it. */
4040
4041 static hashval_t
4042 iterative_hash_name (tree name, hashval_t v)
4043 {
4044 if (!name)
4045 return v;
4046 if (TREE_CODE (name) == TYPE_DECL)
4047 name = DECL_NAME (name);
4048 if (!name)
4049 return v;
4050 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
4051 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
4052 }
4053
4054 /* A type, hashvalue pair for sorting SCC members. */
4055
4056 struct type_hash_pair {
4057 tree type;
4058 hashval_t hash;
4059 };
4060
4061 /* Compare two type, hashvalue pairs. */
4062
4063 static int
4064 type_hash_pair_compare (const void *p1_, const void *p2_)
4065 {
4066 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
4067 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
4068 if (p1->hash < p2->hash)
4069 return -1;
4070 else if (p1->hash > p2->hash)
4071 return 1;
4072 return 0;
4073 }
4074
4075 /* Returning a hash value for gimple type TYPE combined with VAL.
4076 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
4077
4078 To hash a type we end up hashing in types that are reachable.
4079 Through pointers we can end up with cycles which messes up the
4080 required property that we need to compute the same hash value
4081 for structurally equivalent types. To avoid this we have to
4082 hash all types in a cycle (the SCC) in a commutative way. The
4083 easiest way is to not mix in the hashes of the SCC members at
4084 all. To make this work we have to delay setting the hash
4085 values of the SCC until it is complete. */
4086
4087 static hashval_t
4088 iterative_hash_gimple_type (tree type, hashval_t val,
4089 VEC(tree, heap) **sccstack,
4090 struct pointer_map_t *sccstate,
4091 struct obstack *sccstate_obstack)
4092 {
4093 hashval_t v;
4094 void **slot;
4095 struct sccs *state;
4096
4097 /* Not visited during this DFS walk. */
4098 gcc_checking_assert (!pointer_map_contains (sccstate, type));
4099 state = XOBNEW (sccstate_obstack, struct sccs);
4100 *pointer_map_insert (sccstate, type) = state;
4101
4102 VEC_safe_push (tree, heap, *sccstack, type);
4103 state->dfsnum = next_dfs_num++;
4104 state->low = state->dfsnum;
4105 state->on_sccstack = true;
4106
4107 /* Combine a few common features of types so that types are grouped into
4108 smaller sets; when searching for existing matching types to merge,
4109 only existing types having the same features as the new type will be
4110 checked. */
4111 v = iterative_hash_name (TYPE_NAME (type), 0);
4112 v = iterative_hash_hashval_t (TREE_CODE (type), v);
4113 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
4114 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4115
4116 /* Do not hash the types size as this will cause differences in
4117 hash values for the complete vs. the incomplete type variant. */
4118
4119 /* Incorporate common features of numerical types. */
4120 if (INTEGRAL_TYPE_P (type)
4121 || SCALAR_FLOAT_TYPE_P (type)
4122 || FIXED_POINT_TYPE_P (type))
4123 {
4124 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4125 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4126 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4127 }
4128
4129 /* For pointer and reference types, fold in information about the type
4130 pointed to. */
4131 if (POINTER_TYPE_P (type))
4132 v = visit (TREE_TYPE (type), state, v,
4133 sccstack, sccstate, sccstate_obstack);
4134
4135 /* For integer types hash the types min/max values and the string flag. */
4136 if (TREE_CODE (type) == INTEGER_TYPE)
4137 {
4138 /* OMP lowering can introduce error_mark_node in place of
4139 random local decls in types. */
4140 if (TYPE_MIN_VALUE (type) != error_mark_node)
4141 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
4142 if (TYPE_MAX_VALUE (type) != error_mark_node)
4143 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
4144 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4145 }
4146
4147 /* For array types hash their domain and the string flag. */
4148 if (TREE_CODE (type) == ARRAY_TYPE
4149 && TYPE_DOMAIN (type))
4150 {
4151 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4152 v = visit (TYPE_DOMAIN (type), state, v,
4153 sccstack, sccstate, sccstate_obstack);
4154 }
4155
4156 /* Recurse for aggregates with a single element type. */
4157 if (TREE_CODE (type) == ARRAY_TYPE
4158 || TREE_CODE (type) == COMPLEX_TYPE
4159 || TREE_CODE (type) == VECTOR_TYPE)
4160 v = visit (TREE_TYPE (type), state, v,
4161 sccstack, sccstate, sccstate_obstack);
4162
4163 /* Incorporate function return and argument types. */
4164 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4165 {
4166 unsigned na;
4167 tree p;
4168
4169 /* For method types also incorporate their parent class. */
4170 if (TREE_CODE (type) == METHOD_TYPE)
4171 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
4172 sccstack, sccstate, sccstate_obstack);
4173
4174 /* Check result and argument types. */
4175 v = visit (TREE_TYPE (type), state, v,
4176 sccstack, sccstate, sccstate_obstack);
4177 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4178 {
4179 v = visit (TREE_VALUE (p), state, v,
4180 sccstack, sccstate, sccstate_obstack);
4181 na++;
4182 }
4183
4184 v = iterative_hash_hashval_t (na, v);
4185 }
4186
4187 if (TREE_CODE (type) == RECORD_TYPE
4188 || TREE_CODE (type) == UNION_TYPE
4189 || TREE_CODE (type) == QUAL_UNION_TYPE)
4190 {
4191 unsigned nf;
4192 tree f;
4193
4194 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4195 {
4196 v = iterative_hash_name (DECL_NAME (f), v);
4197 v = visit (TREE_TYPE (f), state, v,
4198 sccstack, sccstate, sccstate_obstack);
4199 nf++;
4200 }
4201
4202 v = iterative_hash_hashval_t (nf, v);
4203 }
4204
4205 /* Record hash for us. */
4206 state->u.hash = v;
4207
4208 /* See if we found an SCC. */
4209 if (state->low == state->dfsnum)
4210 {
4211 tree x;
4212 struct tree_int_map *m;
4213
4214 /* Pop off the SCC and set its hash values. */
4215 x = VEC_pop (tree, *sccstack);
4216 /* Optimize SCC size one. */
4217 if (x == type)
4218 {
4219 state->on_sccstack = false;
4220 m = ggc_alloc_cleared_tree_int_map ();
4221 m->base.from = x;
4222 m->to = v;
4223 slot = htab_find_slot (type_hash_cache, m, INSERT);
4224 gcc_assert (!*slot);
4225 *slot = (void *) m;
4226 }
4227 else
4228 {
4229 struct sccs *cstate;
4230 unsigned first, i, size, j;
4231 struct type_hash_pair *pairs;
4232 /* Pop off the SCC and build an array of type, hash pairs. */
4233 first = VEC_length (tree, *sccstack) - 1;
4234 while (VEC_index (tree, *sccstack, first) != type)
4235 --first;
4236 size = VEC_length (tree, *sccstack) - first + 1;
4237 pairs = XALLOCAVEC (struct type_hash_pair, size);
4238 i = 0;
4239 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4240 cstate->on_sccstack = false;
4241 pairs[i].type = x;
4242 pairs[i].hash = cstate->u.hash;
4243 do
4244 {
4245 x = VEC_pop (tree, *sccstack);
4246 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4247 cstate->on_sccstack = false;
4248 ++i;
4249 pairs[i].type = x;
4250 pairs[i].hash = cstate->u.hash;
4251 }
4252 while (x != type);
4253 gcc_assert (i + 1 == size);
4254 /* Sort the arrays of type, hash pairs so that when we mix in
4255 all members of the SCC the hash value becomes independent on
4256 the order we visited the SCC. Disregard hashes equal to
4257 the hash of the type we mix into because we cannot guarantee
4258 a stable sort for those across different TUs. */
4259 qsort (pairs, size, sizeof (struct type_hash_pair),
4260 type_hash_pair_compare);
4261 for (i = 0; i < size; ++i)
4262 {
4263 hashval_t hash;
4264 m = ggc_alloc_cleared_tree_int_map ();
4265 m->base.from = pairs[i].type;
4266 hash = pairs[i].hash;
4267 /* Skip same hashes. */
4268 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4269 ;
4270 for (; j < size; ++j)
4271 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4272 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4273 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4274 m->to = hash;
4275 if (pairs[i].type == type)
4276 v = hash;
4277 slot = htab_find_slot (type_hash_cache, m, INSERT);
4278 gcc_assert (!*slot);
4279 *slot = (void *) m;
4280 }
4281 }
4282 }
4283
4284 return iterative_hash_hashval_t (v, val);
4285 }
4286
4287
4288 /* Returns a hash value for P (assumed to be a type). The hash value
4289 is computed using some distinguishing features of the type. Note
4290 that we cannot use pointer hashing here as we may be dealing with
4291 two distinct instances of the same type.
4292
4293 This function should produce the same hash value for two compatible
4294 types according to gimple_types_compatible_p. */
4295
4296 static hashval_t
4297 gimple_type_hash (const void *p)
4298 {
4299 const_tree t = (const_tree) p;
4300 VEC(tree, heap) *sccstack = NULL;
4301 struct pointer_map_t *sccstate;
4302 struct obstack sccstate_obstack;
4303 hashval_t val;
4304 void **slot;
4305 struct tree_int_map m;
4306
4307 if (type_hash_cache == NULL)
4308 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4309 tree_int_map_eq, NULL);
4310
4311 m.base.from = CONST_CAST_TREE (t);
4312 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4313 && *slot)
4314 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4315
4316 /* Perform a DFS walk and pre-hash all reachable types. */
4317 next_dfs_num = 1;
4318 sccstate = pointer_map_create ();
4319 gcc_obstack_init (&sccstate_obstack);
4320 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4321 &sccstack, sccstate, &sccstate_obstack);
4322 VEC_free (tree, heap, sccstack);
4323 pointer_map_destroy (sccstate);
4324 obstack_free (&sccstate_obstack, NULL);
4325
4326 return val;
4327 }
4328
4329 /* Returning a hash value for gimple type TYPE combined with VAL.
4330
4331 The hash value returned is equal for types considered compatible
4332 by gimple_canonical_types_compatible_p. */
4333
4334 static hashval_t
4335 iterative_hash_canonical_type (tree type, hashval_t val)
4336 {
4337 hashval_t v;
4338 void **slot;
4339 struct tree_int_map *mp, m;
4340
4341 m.base.from = type;
4342 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4343 && *slot)
4344 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4345
4346 /* Combine a few common features of types so that types are grouped into
4347 smaller sets; when searching for existing matching types to merge,
4348 only existing types having the same features as the new type will be
4349 checked. */
4350 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4351 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4352 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4353 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4354
4355 /* Incorporate common features of numerical types. */
4356 if (INTEGRAL_TYPE_P (type)
4357 || SCALAR_FLOAT_TYPE_P (type)
4358 || FIXED_POINT_TYPE_P (type)
4359 || TREE_CODE (type) == VECTOR_TYPE
4360 || TREE_CODE (type) == COMPLEX_TYPE
4361 || TREE_CODE (type) == OFFSET_TYPE
4362 || POINTER_TYPE_P (type))
4363 {
4364 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4365 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4366 }
4367
4368 /* For pointer and reference types, fold in information about the type
4369 pointed to but do not recurse to the pointed-to type. */
4370 if (POINTER_TYPE_P (type))
4371 {
4372 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4373 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4374 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4375 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4376 }
4377
4378 /* For integer types hash the types min/max values and the string flag. */
4379 if (TREE_CODE (type) == INTEGER_TYPE)
4380 {
4381 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4382 v = iterative_hash_hashval_t (TYPE_IS_SIZETYPE (type), v);
4383 }
4384
4385 /* For array types hash their domain and the string flag. */
4386 if (TREE_CODE (type) == ARRAY_TYPE
4387 && TYPE_DOMAIN (type))
4388 {
4389 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4390 v = iterative_hash_canonical_type (TYPE_DOMAIN (type), v);
4391 }
4392
4393 /* Recurse for aggregates with a single element type. */
4394 if (TREE_CODE (type) == ARRAY_TYPE
4395 || TREE_CODE (type) == COMPLEX_TYPE
4396 || TREE_CODE (type) == VECTOR_TYPE)
4397 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4398
4399 /* Incorporate function return and argument types. */
4400 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4401 {
4402 unsigned na;
4403 tree p;
4404
4405 /* For method types also incorporate their parent class. */
4406 if (TREE_CODE (type) == METHOD_TYPE)
4407 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4408
4409 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4410
4411 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4412 {
4413 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4414 na++;
4415 }
4416
4417 v = iterative_hash_hashval_t (na, v);
4418 }
4419
4420 if (TREE_CODE (type) == RECORD_TYPE
4421 || TREE_CODE (type) == UNION_TYPE
4422 || TREE_CODE (type) == QUAL_UNION_TYPE)
4423 {
4424 unsigned nf;
4425 tree f;
4426
4427 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4428 if (TREE_CODE (f) == FIELD_DECL)
4429 {
4430 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4431 nf++;
4432 }
4433
4434 v = iterative_hash_hashval_t (nf, v);
4435 }
4436
4437 /* Cache the just computed hash value. */
4438 mp = ggc_alloc_cleared_tree_int_map ();
4439 mp->base.from = type;
4440 mp->to = v;
4441 *slot = (void *) mp;
4442
4443 return iterative_hash_hashval_t (v, val);
4444 }
4445
4446 static hashval_t
4447 gimple_canonical_type_hash (const void *p)
4448 {
4449 if (canonical_type_hash_cache == NULL)
4450 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4451 tree_int_map_eq, NULL);
4452
4453 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4454 }
4455
4456
4457 /* Returns nonzero if P1 and P2 are equal. */
4458
4459 static int
4460 gimple_type_eq (const void *p1, const void *p2)
4461 {
4462 const_tree t1 = (const_tree) p1;
4463 const_tree t2 = (const_tree) p2;
4464 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4465 CONST_CAST_TREE (t2));
4466 }
4467
4468
4469 /* Worker for gimple_register_type.
4470 Register type T in the global type table gimple_types.
4471 When REGISTERING_MV is false first recurse for the main variant of T. */
4472
4473 static tree
4474 gimple_register_type_1 (tree t, bool registering_mv)
4475 {
4476 void **slot;
4477 gimple_type_leader_entry *leader;
4478
4479 /* If we registered this type before return the cached result. */
4480 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4481 if (leader->type == t)
4482 return leader->leader;
4483
4484 /* Always register the main variant first. This is important so we
4485 pick up the non-typedef variants as canonical, otherwise we'll end
4486 up taking typedef ids for structure tags during comparison.
4487 It also makes sure that main variants will be merged to main variants.
4488 As we are operating on a possibly partially fixed up type graph
4489 do not bother to recurse more than once, otherwise we may end up
4490 walking in circles.
4491 If we are registering a main variant it will either remain its
4492 own main variant or it will be merged to something else in which
4493 case we do not care for the main variant leader. */
4494 if (!registering_mv
4495 && TYPE_MAIN_VARIANT (t) != t)
4496 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4497
4498 /* See if we already have an equivalent type registered. */
4499 slot = htab_find_slot (gimple_types, t, INSERT);
4500 if (*slot
4501 && *(tree *)slot != t)
4502 {
4503 tree new_type = (tree) *((tree *) slot);
4504 leader->type = t;
4505 leader->leader = new_type;
4506 return new_type;
4507 }
4508
4509 /* If not, insert it to the cache and the hash. */
4510 leader->type = t;
4511 leader->leader = t;
4512 *slot = (void *) t;
4513 return t;
4514 }
4515
4516 /* Register type T in the global type table gimple_types.
4517 If another type T', compatible with T, already existed in
4518 gimple_types then return T', otherwise return T. This is used by
4519 LTO to merge identical types read from different TUs. */
4520
4521 tree
4522 gimple_register_type (tree t)
4523 {
4524 gcc_assert (TYPE_P (t));
4525
4526 if (!gimple_type_leader)
4527 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4528 (GIMPLE_TYPE_LEADER_SIZE);
4529
4530 if (gimple_types == NULL)
4531 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4532
4533 return gimple_register_type_1 (t, false);
4534 }
4535
4536 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4537 the middle-end types_compatible_p function. It needs to avoid
4538 claiming types are different for types that should be treated
4539 the same with respect to TBAA. Canonical types are also used
4540 for IL consistency checks via the useless_type_conversion_p
4541 predicate which does not handle all type kinds itself but falls
4542 back to pointer-comparison of TYPE_CANONICAL for aggregates
4543 for example. */
4544
4545 /* Return true iff T1 and T2 are structurally identical for what
4546 TBAA is concerned. */
4547
4548 static bool
4549 gimple_canonical_types_compatible_p (tree t1, tree t2)
4550 {
4551 /* Before starting to set up the SCC machinery handle simple cases. */
4552
4553 /* Check first for the obvious case of pointer identity. */
4554 if (t1 == t2)
4555 return true;
4556
4557 /* Check that we have two types to compare. */
4558 if (t1 == NULL_TREE || t2 == NULL_TREE)
4559 return false;
4560
4561 /* If the types have been previously registered and found equal
4562 they still are. */
4563 if (TYPE_CANONICAL (t1)
4564 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4565 return true;
4566
4567 /* Can't be the same type if the types don't have the same code. */
4568 if (TREE_CODE (t1) != TREE_CODE (t2))
4569 return false;
4570
4571 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4572 return false;
4573
4574 /* Qualifiers do not matter for canonical type comparison purposes. */
4575
4576 /* Void types and nullptr types are always the same. */
4577 if (TREE_CODE (t1) == VOID_TYPE
4578 || TREE_CODE (t1) == NULLPTR_TYPE)
4579 return true;
4580
4581 /* Can't be the same type if they have different alignment, or mode. */
4582 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4583 || TYPE_MODE (t1) != TYPE_MODE (t2))
4584 return false;
4585
4586 /* Non-aggregate types can be handled cheaply. */
4587 if (INTEGRAL_TYPE_P (t1)
4588 || SCALAR_FLOAT_TYPE_P (t1)
4589 || FIXED_POINT_TYPE_P (t1)
4590 || TREE_CODE (t1) == VECTOR_TYPE
4591 || TREE_CODE (t1) == COMPLEX_TYPE
4592 || TREE_CODE (t1) == OFFSET_TYPE
4593 || POINTER_TYPE_P (t1))
4594 {
4595 /* Can't be the same type if they have different sign or precision. */
4596 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4597 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4598 return false;
4599
4600 if (TREE_CODE (t1) == INTEGER_TYPE
4601 && (TYPE_IS_SIZETYPE (t1) != TYPE_IS_SIZETYPE (t2)
4602 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)))
4603 return false;
4604
4605 /* For canonical type comparisons we do not want to build SCCs
4606 so we cannot compare pointed-to types. But we can, for now,
4607 require the same pointed-to type kind and match what
4608 useless_type_conversion_p would do. */
4609 if (POINTER_TYPE_P (t1))
4610 {
4611 /* If the two pointers have different ref-all attributes,
4612 they can't be the same type. */
4613 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4614 return false;
4615
4616 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4617 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4618 return false;
4619
4620 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4621 return false;
4622
4623 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4624 return false;
4625 }
4626
4627 /* Tail-recurse to components. */
4628 if (TREE_CODE (t1) == VECTOR_TYPE
4629 || TREE_CODE (t1) == COMPLEX_TYPE)
4630 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4631 TREE_TYPE (t2));
4632
4633 return true;
4634 }
4635
4636 /* If their attributes are not the same they can't be the same type. */
4637 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
4638 return false;
4639
4640 /* Do type-specific comparisons. */
4641 switch (TREE_CODE (t1))
4642 {
4643 case ARRAY_TYPE:
4644 /* Array types are the same if the element types are the same and
4645 the number of elements are the same. */
4646 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4647 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4648 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4649 return false;
4650 else
4651 {
4652 tree i1 = TYPE_DOMAIN (t1);
4653 tree i2 = TYPE_DOMAIN (t2);
4654
4655 /* For an incomplete external array, the type domain can be
4656 NULL_TREE. Check this condition also. */
4657 if (i1 == NULL_TREE && i2 == NULL_TREE)
4658 return true;
4659 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4660 return false;
4661 /* If for a complete array type the possibly gimplified sizes
4662 are different the types are different. */
4663 else if (((TYPE_SIZE (i1) != NULL) ^ (TYPE_SIZE (i2) != NULL))
4664 || (TYPE_SIZE (i1)
4665 && TYPE_SIZE (i2)
4666 && !operand_equal_p (TYPE_SIZE (i1), TYPE_SIZE (i2), 0)))
4667 return false;
4668 else
4669 {
4670 tree min1 = TYPE_MIN_VALUE (i1);
4671 tree min2 = TYPE_MIN_VALUE (i2);
4672 tree max1 = TYPE_MAX_VALUE (i1);
4673 tree max2 = TYPE_MAX_VALUE (i2);
4674
4675 /* The minimum/maximum values have to be the same. */
4676 if ((min1 == min2
4677 || (min1 && min2
4678 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4679 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4680 || operand_equal_p (min1, min2, 0))))
4681 && (max1 == max2
4682 || (max1 && max2
4683 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4684 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4685 || operand_equal_p (max1, max2, 0)))))
4686 return true;
4687 else
4688 return false;
4689 }
4690 }
4691
4692 case METHOD_TYPE:
4693 /* Method types should belong to the same class. */
4694 if (!gimple_canonical_types_compatible_p
4695 (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2)))
4696 return false;
4697
4698 /* Fallthru */
4699
4700 case FUNCTION_TYPE:
4701 /* Function types are the same if the return type and arguments types
4702 are the same. */
4703 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4704 return false;
4705
4706 if (!comp_type_attributes (t1, t2))
4707 return false;
4708
4709 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4710 return true;
4711 else
4712 {
4713 tree parms1, parms2;
4714
4715 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4716 parms1 && parms2;
4717 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4718 {
4719 if (!gimple_canonical_types_compatible_p
4720 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4721 return false;
4722 }
4723
4724 if (parms1 || parms2)
4725 return false;
4726
4727 return true;
4728 }
4729
4730 case RECORD_TYPE:
4731 case UNION_TYPE:
4732 case QUAL_UNION_TYPE:
4733 {
4734 tree f1, f2;
4735
4736 /* For aggregate types, all the fields must be the same. */
4737 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4738 f1 && f2;
4739 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4740 {
4741 /* Skip non-fields. */
4742 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4743 f1 = TREE_CHAIN (f1);
4744 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4745 f2 = TREE_CHAIN (f2);
4746 if (!f1 || !f2)
4747 break;
4748 /* The fields must have the same name, offset and type. */
4749 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4750 || !gimple_compare_field_offset (f1, f2)
4751 || !gimple_canonical_types_compatible_p
4752 (TREE_TYPE (f1), TREE_TYPE (f2)))
4753 return false;
4754 }
4755
4756 /* If one aggregate has more fields than the other, they
4757 are not the same. */
4758 if (f1 || f2)
4759 return false;
4760
4761 return true;
4762 }
4763
4764 default:
4765 gcc_unreachable ();
4766 }
4767 }
4768
4769
4770 /* Returns nonzero if P1 and P2 are equal. */
4771
4772 static int
4773 gimple_canonical_type_eq (const void *p1, const void *p2)
4774 {
4775 const_tree t1 = (const_tree) p1;
4776 const_tree t2 = (const_tree) p2;
4777 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4778 CONST_CAST_TREE (t2));
4779 }
4780
4781 /* Register type T in the global type table gimple_types.
4782 If another type T', compatible with T, already existed in
4783 gimple_types then return T', otherwise return T. This is used by
4784 LTO to merge identical types read from different TUs.
4785
4786 ??? This merging does not exactly match how the tree.c middle-end
4787 functions will assign TYPE_CANONICAL when new types are created
4788 during optimization (which at least happens for pointer and array
4789 types). */
4790
4791 tree
4792 gimple_register_canonical_type (tree t)
4793 {
4794 void **slot;
4795
4796 gcc_assert (TYPE_P (t));
4797
4798 if (TYPE_CANONICAL (t))
4799 return TYPE_CANONICAL (t);
4800
4801 if (gimple_canonical_types == NULL)
4802 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4803 gimple_canonical_type_eq, 0);
4804
4805 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4806 if (*slot
4807 && *(tree *)slot != t)
4808 {
4809 tree new_type = (tree) *((tree *) slot);
4810
4811 TYPE_CANONICAL (t) = new_type;
4812 t = new_type;
4813 }
4814 else
4815 {
4816 TYPE_CANONICAL (t) = t;
4817 *slot = (void *) t;
4818 }
4819
4820 return t;
4821 }
4822
4823
4824 /* Show statistics on references to the global type table gimple_types. */
4825
4826 void
4827 print_gimple_types_stats (void)
4828 {
4829 if (gimple_types)
4830 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4831 "%ld searches, %ld collisions (ratio: %f)\n",
4832 (long) htab_size (gimple_types),
4833 (long) htab_elements (gimple_types),
4834 (long) gimple_types->searches,
4835 (long) gimple_types->collisions,
4836 htab_collisions (gimple_types));
4837 else
4838 fprintf (stderr, "GIMPLE type table is empty\n");
4839 if (type_hash_cache)
4840 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4841 "%ld searches, %ld collisions (ratio: %f)\n",
4842 (long) htab_size (type_hash_cache),
4843 (long) htab_elements (type_hash_cache),
4844 (long) type_hash_cache->searches,
4845 (long) type_hash_cache->collisions,
4846 htab_collisions (type_hash_cache));
4847 else
4848 fprintf (stderr, "GIMPLE type hash table is empty\n");
4849 if (gimple_canonical_types)
4850 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4851 "%ld searches, %ld collisions (ratio: %f)\n",
4852 (long) htab_size (gimple_canonical_types),
4853 (long) htab_elements (gimple_canonical_types),
4854 (long) gimple_canonical_types->searches,
4855 (long) gimple_canonical_types->collisions,
4856 htab_collisions (gimple_canonical_types));
4857 else
4858 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4859 if (canonical_type_hash_cache)
4860 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4861 "%ld searches, %ld collisions (ratio: %f)\n",
4862 (long) htab_size (canonical_type_hash_cache),
4863 (long) htab_elements (canonical_type_hash_cache),
4864 (long) canonical_type_hash_cache->searches,
4865 (long) canonical_type_hash_cache->collisions,
4866 htab_collisions (canonical_type_hash_cache));
4867 else
4868 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4869 }
4870
4871 /* Free the gimple type hashtables used for LTO type merging. */
4872
4873 void
4874 free_gimple_type_tables (void)
4875 {
4876 /* Last chance to print stats for the tables. */
4877 if (flag_lto_report)
4878 print_gimple_types_stats ();
4879
4880 if (gimple_types)
4881 {
4882 htab_delete (gimple_types);
4883 gimple_types = NULL;
4884 }
4885 if (gimple_canonical_types)
4886 {
4887 htab_delete (gimple_canonical_types);
4888 gimple_canonical_types = NULL;
4889 }
4890 if (type_hash_cache)
4891 {
4892 htab_delete (type_hash_cache);
4893 type_hash_cache = NULL;
4894 }
4895 if (canonical_type_hash_cache)
4896 {
4897 htab_delete (canonical_type_hash_cache);
4898 canonical_type_hash_cache = NULL;
4899 }
4900 if (type_pair_cache)
4901 {
4902 free (type_pair_cache);
4903 type_pair_cache = NULL;
4904 }
4905 gimple_type_leader = NULL;
4906 }
4907
4908
4909 /* Return a type the same as TYPE except unsigned or
4910 signed according to UNSIGNEDP. */
4911
4912 static tree
4913 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4914 {
4915 tree type1;
4916
4917 type1 = TYPE_MAIN_VARIANT (type);
4918 if (type1 == signed_char_type_node
4919 || type1 == char_type_node
4920 || type1 == unsigned_char_type_node)
4921 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4922 if (type1 == integer_type_node || type1 == unsigned_type_node)
4923 return unsignedp ? unsigned_type_node : integer_type_node;
4924 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4925 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4926 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4927 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4928 if (type1 == long_long_integer_type_node
4929 || type1 == long_long_unsigned_type_node)
4930 return unsignedp
4931 ? long_long_unsigned_type_node
4932 : long_long_integer_type_node;
4933 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4934 return unsignedp
4935 ? int128_unsigned_type_node
4936 : int128_integer_type_node;
4937 #if HOST_BITS_PER_WIDE_INT >= 64
4938 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4939 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4940 #endif
4941 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4942 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4943 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4944 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4945 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4946 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4947 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4948 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4949
4950 #define GIMPLE_FIXED_TYPES(NAME) \
4951 if (type1 == short_ ## NAME ## _type_node \
4952 || type1 == unsigned_short_ ## NAME ## _type_node) \
4953 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4954 : short_ ## NAME ## _type_node; \
4955 if (type1 == NAME ## _type_node \
4956 || type1 == unsigned_ ## NAME ## _type_node) \
4957 return unsignedp ? unsigned_ ## NAME ## _type_node \
4958 : NAME ## _type_node; \
4959 if (type1 == long_ ## NAME ## _type_node \
4960 || type1 == unsigned_long_ ## NAME ## _type_node) \
4961 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4962 : long_ ## NAME ## _type_node; \
4963 if (type1 == long_long_ ## NAME ## _type_node \
4964 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4965 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4966 : long_long_ ## NAME ## _type_node;
4967
4968 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4969 if (type1 == NAME ## _type_node \
4970 || type1 == u ## NAME ## _type_node) \
4971 return unsignedp ? u ## NAME ## _type_node \
4972 : NAME ## _type_node;
4973
4974 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4975 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4976 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4977 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4978 : sat_ ## short_ ## NAME ## _type_node; \
4979 if (type1 == sat_ ## NAME ## _type_node \
4980 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4981 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4982 : sat_ ## NAME ## _type_node; \
4983 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4984 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4985 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4986 : sat_ ## long_ ## NAME ## _type_node; \
4987 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4988 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4989 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4990 : sat_ ## long_long_ ## NAME ## _type_node;
4991
4992 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4993 if (type1 == sat_ ## NAME ## _type_node \
4994 || type1 == sat_ ## u ## NAME ## _type_node) \
4995 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4996 : sat_ ## NAME ## _type_node;
4997
4998 GIMPLE_FIXED_TYPES (fract);
4999 GIMPLE_FIXED_TYPES_SAT (fract);
5000 GIMPLE_FIXED_TYPES (accum);
5001 GIMPLE_FIXED_TYPES_SAT (accum);
5002
5003 GIMPLE_FIXED_MODE_TYPES (qq);
5004 GIMPLE_FIXED_MODE_TYPES (hq);
5005 GIMPLE_FIXED_MODE_TYPES (sq);
5006 GIMPLE_FIXED_MODE_TYPES (dq);
5007 GIMPLE_FIXED_MODE_TYPES (tq);
5008 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
5009 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
5010 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
5011 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
5012 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
5013 GIMPLE_FIXED_MODE_TYPES (ha);
5014 GIMPLE_FIXED_MODE_TYPES (sa);
5015 GIMPLE_FIXED_MODE_TYPES (da);
5016 GIMPLE_FIXED_MODE_TYPES (ta);
5017 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
5018 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
5019 GIMPLE_FIXED_MODE_TYPES_SAT (da);
5020 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
5021
5022 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
5023 the precision; they have precision set to match their range, but
5024 may use a wider mode to match an ABI. If we change modes, we may
5025 wind up with bad conversions. For INTEGER_TYPEs in C, must check
5026 the precision as well, so as to yield correct results for
5027 bit-field types. C++ does not have these separate bit-field
5028 types, and producing a signed or unsigned variant of an
5029 ENUMERAL_TYPE may cause other problems as well. */
5030 if (!INTEGRAL_TYPE_P (type)
5031 || TYPE_UNSIGNED (type) == unsignedp)
5032 return type;
5033
5034 #define TYPE_OK(node) \
5035 (TYPE_MODE (type) == TYPE_MODE (node) \
5036 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
5037 if (TYPE_OK (signed_char_type_node))
5038 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
5039 if (TYPE_OK (integer_type_node))
5040 return unsignedp ? unsigned_type_node : integer_type_node;
5041 if (TYPE_OK (short_integer_type_node))
5042 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
5043 if (TYPE_OK (long_integer_type_node))
5044 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
5045 if (TYPE_OK (long_long_integer_type_node))
5046 return (unsignedp
5047 ? long_long_unsigned_type_node
5048 : long_long_integer_type_node);
5049 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
5050 return (unsignedp
5051 ? int128_unsigned_type_node
5052 : int128_integer_type_node);
5053
5054 #if HOST_BITS_PER_WIDE_INT >= 64
5055 if (TYPE_OK (intTI_type_node))
5056 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
5057 #endif
5058 if (TYPE_OK (intDI_type_node))
5059 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
5060 if (TYPE_OK (intSI_type_node))
5061 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
5062 if (TYPE_OK (intHI_type_node))
5063 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
5064 if (TYPE_OK (intQI_type_node))
5065 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
5066
5067 #undef GIMPLE_FIXED_TYPES
5068 #undef GIMPLE_FIXED_MODE_TYPES
5069 #undef GIMPLE_FIXED_TYPES_SAT
5070 #undef GIMPLE_FIXED_MODE_TYPES_SAT
5071 #undef TYPE_OK
5072
5073 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
5074 }
5075
5076
5077 /* Return an unsigned type the same as TYPE in other respects. */
5078
5079 tree
5080 gimple_unsigned_type (tree type)
5081 {
5082 return gimple_signed_or_unsigned_type (true, type);
5083 }
5084
5085
5086 /* Return a signed type the same as TYPE in other respects. */
5087
5088 tree
5089 gimple_signed_type (tree type)
5090 {
5091 return gimple_signed_or_unsigned_type (false, type);
5092 }
5093
5094
5095 /* Return the typed-based alias set for T, which may be an expression
5096 or a type. Return -1 if we don't do anything special. */
5097
5098 alias_set_type
5099 gimple_get_alias_set (tree t)
5100 {
5101 tree u;
5102
5103 /* Permit type-punning when accessing a union, provided the access
5104 is directly through the union. For example, this code does not
5105 permit taking the address of a union member and then storing
5106 through it. Even the type-punning allowed here is a GCC
5107 extension, albeit a common and useful one; the C standard says
5108 that such accesses have implementation-defined behavior. */
5109 for (u = t;
5110 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
5111 u = TREE_OPERAND (u, 0))
5112 if (TREE_CODE (u) == COMPONENT_REF
5113 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
5114 return 0;
5115
5116 /* That's all the expressions we handle specially. */
5117 if (!TYPE_P (t))
5118 return -1;
5119
5120 /* For convenience, follow the C standard when dealing with
5121 character types. Any object may be accessed via an lvalue that
5122 has character type. */
5123 if (t == char_type_node
5124 || t == signed_char_type_node
5125 || t == unsigned_char_type_node)
5126 return 0;
5127
5128 /* Allow aliasing between signed and unsigned variants of the same
5129 type. We treat the signed variant as canonical. */
5130 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
5131 {
5132 tree t1 = gimple_signed_type (t);
5133
5134 /* t1 == t can happen for boolean nodes which are always unsigned. */
5135 if (t1 != t)
5136 return get_alias_set (t1);
5137 }
5138
5139 return -1;
5140 }
5141
5142
5143 /* Data structure used to count the number of dereferences to PTR
5144 inside an expression. */
5145 struct count_ptr_d
5146 {
5147 tree ptr;
5148 unsigned num_stores;
5149 unsigned num_loads;
5150 };
5151
5152 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
5153 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
5154
5155 static tree
5156 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
5157 {
5158 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
5159 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
5160
5161 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
5162 pointer 'ptr' is *not* dereferenced, it is simply used to compute
5163 the address of 'fld' as 'ptr + offsetof(fld)'. */
5164 if (TREE_CODE (*tp) == ADDR_EXPR)
5165 {
5166 *walk_subtrees = 0;
5167 return NULL_TREE;
5168 }
5169
5170 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
5171 {
5172 if (wi_p->is_lhs)
5173 count_p->num_stores++;
5174 else
5175 count_p->num_loads++;
5176 }
5177
5178 return NULL_TREE;
5179 }
5180
5181 /* Count the number of direct and indirect uses for pointer PTR in
5182 statement STMT. The number of direct uses is stored in
5183 *NUM_USES_P. Indirect references are counted separately depending
5184 on whether they are store or load operations. The counts are
5185 stored in *NUM_STORES_P and *NUM_LOADS_P. */
5186
5187 void
5188 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
5189 unsigned *num_loads_p, unsigned *num_stores_p)
5190 {
5191 ssa_op_iter i;
5192 tree use;
5193
5194 *num_uses_p = 0;
5195 *num_loads_p = 0;
5196 *num_stores_p = 0;
5197
5198 /* Find out the total number of uses of PTR in STMT. */
5199 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
5200 if (use == ptr)
5201 (*num_uses_p)++;
5202
5203 /* Now count the number of indirect references to PTR. This is
5204 truly awful, but we don't have much choice. There are no parent
5205 pointers inside INDIRECT_REFs, so an expression like
5206 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5207 find all the indirect and direct uses of x_1 inside. The only
5208 shortcut we can take is the fact that GIMPLE only allows
5209 INDIRECT_REFs inside the expressions below. */
5210 if (is_gimple_assign (stmt)
5211 || gimple_code (stmt) == GIMPLE_RETURN
5212 || gimple_code (stmt) == GIMPLE_ASM
5213 || is_gimple_call (stmt))
5214 {
5215 struct walk_stmt_info wi;
5216 struct count_ptr_d count;
5217
5218 count.ptr = ptr;
5219 count.num_stores = 0;
5220 count.num_loads = 0;
5221
5222 memset (&wi, 0, sizeof (wi));
5223 wi.info = &count;
5224 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5225
5226 *num_stores_p = count.num_stores;
5227 *num_loads_p = count.num_loads;
5228 }
5229
5230 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5231 }
5232
5233 /* From a tree operand OP return the base of a load or store operation
5234 or NULL_TREE if OP is not a load or a store. */
5235
5236 static tree
5237 get_base_loadstore (tree op)
5238 {
5239 while (handled_component_p (op))
5240 op = TREE_OPERAND (op, 0);
5241 if (DECL_P (op)
5242 || INDIRECT_REF_P (op)
5243 || TREE_CODE (op) == MEM_REF
5244 || TREE_CODE (op) == TARGET_MEM_REF)
5245 return op;
5246 return NULL_TREE;
5247 }
5248
5249 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5250 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5251 passing the STMT, the base of the operand and DATA to it. The base
5252 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5253 or the argument of an address expression.
5254 Returns the results of these callbacks or'ed. */
5255
5256 bool
5257 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5258 bool (*visit_load)(gimple, tree, void *),
5259 bool (*visit_store)(gimple, tree, void *),
5260 bool (*visit_addr)(gimple, tree, void *))
5261 {
5262 bool ret = false;
5263 unsigned i;
5264 if (gimple_assign_single_p (stmt))
5265 {
5266 tree lhs, rhs;
5267 if (visit_store)
5268 {
5269 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5270 if (lhs)
5271 ret |= visit_store (stmt, lhs, data);
5272 }
5273 rhs = gimple_assign_rhs1 (stmt);
5274 while (handled_component_p (rhs))
5275 rhs = TREE_OPERAND (rhs, 0);
5276 if (visit_addr)
5277 {
5278 if (TREE_CODE (rhs) == ADDR_EXPR)
5279 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5280 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5281 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5282 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5283 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5284 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5285 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5286 0), data);
5287 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5288 {
5289 unsigned int ix;
5290 tree val;
5291
5292 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5293 if (TREE_CODE (val) == ADDR_EXPR)
5294 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5295 else if (TREE_CODE (val) == OBJ_TYPE_REF
5296 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5297 ret |= visit_addr (stmt,
5298 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5299 0), data);
5300 }
5301 lhs = gimple_assign_lhs (stmt);
5302 if (TREE_CODE (lhs) == TARGET_MEM_REF
5303 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5304 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5305 }
5306 if (visit_load)
5307 {
5308 rhs = get_base_loadstore (rhs);
5309 if (rhs)
5310 ret |= visit_load (stmt, rhs, data);
5311 }
5312 }
5313 else if (visit_addr
5314 && (is_gimple_assign (stmt)
5315 || gimple_code (stmt) == GIMPLE_COND))
5316 {
5317 for (i = 0; i < gimple_num_ops (stmt); ++i)
5318 {
5319 tree op = gimple_op (stmt, i);
5320 if (op == NULL_TREE)
5321 ;
5322 else if (TREE_CODE (op) == ADDR_EXPR)
5323 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5324 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5325 tree with two operands. */
5326 else if (i == 1 && COMPARISON_CLASS_P (op))
5327 {
5328 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5329 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5330 0), data);
5331 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5332 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5333 0), data);
5334 }
5335 }
5336 }
5337 else if (is_gimple_call (stmt))
5338 {
5339 if (visit_store)
5340 {
5341 tree lhs = gimple_call_lhs (stmt);
5342 if (lhs)
5343 {
5344 lhs = get_base_loadstore (lhs);
5345 if (lhs)
5346 ret |= visit_store (stmt, lhs, data);
5347 }
5348 }
5349 if (visit_load || visit_addr)
5350 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5351 {
5352 tree rhs = gimple_call_arg (stmt, i);
5353 if (visit_addr
5354 && TREE_CODE (rhs) == ADDR_EXPR)
5355 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5356 else if (visit_load)
5357 {
5358 rhs = get_base_loadstore (rhs);
5359 if (rhs)
5360 ret |= visit_load (stmt, rhs, data);
5361 }
5362 }
5363 if (visit_addr
5364 && gimple_call_chain (stmt)
5365 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5366 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5367 data);
5368 if (visit_addr
5369 && gimple_call_return_slot_opt_p (stmt)
5370 && gimple_call_lhs (stmt) != NULL_TREE
5371 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5372 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5373 }
5374 else if (gimple_code (stmt) == GIMPLE_ASM)
5375 {
5376 unsigned noutputs;
5377 const char *constraint;
5378 const char **oconstraints;
5379 bool allows_mem, allows_reg, is_inout;
5380 noutputs = gimple_asm_noutputs (stmt);
5381 oconstraints = XALLOCAVEC (const char *, noutputs);
5382 if (visit_store || visit_addr)
5383 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5384 {
5385 tree link = gimple_asm_output_op (stmt, i);
5386 tree op = get_base_loadstore (TREE_VALUE (link));
5387 if (op && visit_store)
5388 ret |= visit_store (stmt, op, data);
5389 if (visit_addr)
5390 {
5391 constraint = TREE_STRING_POINTER
5392 (TREE_VALUE (TREE_PURPOSE (link)));
5393 oconstraints[i] = constraint;
5394 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5395 &allows_reg, &is_inout);
5396 if (op && !allows_reg && allows_mem)
5397 ret |= visit_addr (stmt, op, data);
5398 }
5399 }
5400 if (visit_load || visit_addr)
5401 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5402 {
5403 tree link = gimple_asm_input_op (stmt, i);
5404 tree op = TREE_VALUE (link);
5405 if (visit_addr
5406 && TREE_CODE (op) == ADDR_EXPR)
5407 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5408 else if (visit_load || visit_addr)
5409 {
5410 op = get_base_loadstore (op);
5411 if (op)
5412 {
5413 if (visit_load)
5414 ret |= visit_load (stmt, op, data);
5415 if (visit_addr)
5416 {
5417 constraint = TREE_STRING_POINTER
5418 (TREE_VALUE (TREE_PURPOSE (link)));
5419 parse_input_constraint (&constraint, 0, 0, noutputs,
5420 0, oconstraints,
5421 &allows_mem, &allows_reg);
5422 if (!allows_reg && allows_mem)
5423 ret |= visit_addr (stmt, op, data);
5424 }
5425 }
5426 }
5427 }
5428 }
5429 else if (gimple_code (stmt) == GIMPLE_RETURN)
5430 {
5431 tree op = gimple_return_retval (stmt);
5432 if (op)
5433 {
5434 if (visit_addr
5435 && TREE_CODE (op) == ADDR_EXPR)
5436 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5437 else if (visit_load)
5438 {
5439 op = get_base_loadstore (op);
5440 if (op)
5441 ret |= visit_load (stmt, op, data);
5442 }
5443 }
5444 }
5445 else if (visit_addr
5446 && gimple_code (stmt) == GIMPLE_PHI)
5447 {
5448 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5449 {
5450 tree op = PHI_ARG_DEF (stmt, i);
5451 if (TREE_CODE (op) == ADDR_EXPR)
5452 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5453 }
5454 }
5455
5456 return ret;
5457 }
5458
5459 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5460 should make a faster clone for this case. */
5461
5462 bool
5463 walk_stmt_load_store_ops (gimple stmt, void *data,
5464 bool (*visit_load)(gimple, tree, void *),
5465 bool (*visit_store)(gimple, tree, void *))
5466 {
5467 return walk_stmt_load_store_addr_ops (stmt, data,
5468 visit_load, visit_store, NULL);
5469 }
5470
5471 /* Helper for gimple_ior_addresses_taken_1. */
5472
5473 static bool
5474 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5475 tree addr, void *data)
5476 {
5477 bitmap addresses_taken = (bitmap)data;
5478 addr = get_base_address (addr);
5479 if (addr
5480 && DECL_P (addr))
5481 {
5482 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5483 return true;
5484 }
5485 return false;
5486 }
5487
5488 /* Set the bit for the uid of all decls that have their address taken
5489 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5490 were any in this stmt. */
5491
5492 bool
5493 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5494 {
5495 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5496 gimple_ior_addresses_taken_1);
5497 }
5498
5499
5500 /* Return a printable name for symbol DECL. */
5501
5502 const char *
5503 gimple_decl_printable_name (tree decl, int verbosity)
5504 {
5505 if (!DECL_NAME (decl))
5506 return NULL;
5507
5508 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5509 {
5510 const char *str, *mangled_str;
5511 int dmgl_opts = DMGL_NO_OPTS;
5512
5513 if (verbosity >= 2)
5514 {
5515 dmgl_opts = DMGL_VERBOSE
5516 | DMGL_ANSI
5517 | DMGL_GNU_V3
5518 | DMGL_RET_POSTFIX;
5519 if (TREE_CODE (decl) == FUNCTION_DECL)
5520 dmgl_opts |= DMGL_PARAMS;
5521 }
5522
5523 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5524 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5525 return (str) ? str : mangled_str;
5526 }
5527
5528 return IDENTIFIER_POINTER (DECL_NAME (decl));
5529 }
5530
5531 /* Return true when STMT is builtins call to CODE. */
5532
5533 bool
5534 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5535 {
5536 tree fndecl;
5537 return (is_gimple_call (stmt)
5538 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5539 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5540 && DECL_FUNCTION_CODE (fndecl) == code);
5541 }
5542
5543 /* Return true if STMT clobbers memory. STMT is required to be a
5544 GIMPLE_ASM. */
5545
5546 bool
5547 gimple_asm_clobbers_memory_p (const_gimple stmt)
5548 {
5549 unsigned i;
5550
5551 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5552 {
5553 tree op = gimple_asm_clobber_op (stmt, i);
5554 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5555 return true;
5556 }
5557
5558 return false;
5559 }
5560 #include "gt-gimple.h"