bind_c_array_params_2.f90: Add "-mno-explicit-relocs" for alpha*-*-* targets.
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global type table. FIXME lto, it should be possible to re-use some
41 of the type hashing routines in tree.c (type_hash_canon, type_hash_lookup,
42 etc), but those assume that types were built with the various
43 build_*_type routines which is not the case with the streamer. */
44 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
45 htab_t gimple_types;
46 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
47 htab_t gimple_canonical_types;
48 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
49 htab_t type_hash_cache;
50 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
51 htab_t canonical_type_hash_cache;
52
53 /* All the tuples have their operand vector (if present) at the very bottom
54 of the structure. Therefore, the offset required to find the
55 operands vector the size of the structure minus the size of the 1
56 element tree array at the end (see gimple_ops). */
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
58 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
59 EXPORTED_CONST size_t gimple_ops_offset_[] = {
60 #include "gsstruct.def"
61 };
62 #undef DEFGSSTRUCT
63
64 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
65 static const size_t gsstruct_code_size[] = {
66 #include "gsstruct.def"
67 };
68 #undef DEFGSSTRUCT
69
70 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
71 const char *const gimple_code_name[] = {
72 #include "gimple.def"
73 };
74 #undef DEFGSCODE
75
76 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
77 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
78 #include "gimple.def"
79 };
80 #undef DEFGSCODE
81
82 /* Gimple stats. */
83
84 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
85 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
86
87 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
88 static const char * const gimple_alloc_kind_names[] = {
89 "assignments",
90 "phi nodes",
91 "conditionals",
92 "everything else"
93 };
94
95 /* Private API manipulation functions shared only with some
96 other files. */
97 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
98 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
99
100 /* Gimple tuple constructors.
101 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
102 be passed a NULL to start with an empty sequence. */
103
104 /* Set the code for statement G to CODE. */
105
106 static inline void
107 gimple_set_code (gimple g, enum gimple_code code)
108 {
109 g->gsbase.code = code;
110 }
111
112 /* Return the number of bytes needed to hold a GIMPLE statement with
113 code CODE. */
114
115 static inline size_t
116 gimple_size (enum gimple_code code)
117 {
118 return gsstruct_code_size[gss_for_code (code)];
119 }
120
121 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
122 operands. */
123
124 gimple
125 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
126 {
127 size_t size;
128 gimple stmt;
129
130 size = gimple_size (code);
131 if (num_ops > 0)
132 size += sizeof (tree) * (num_ops - 1);
133
134 if (GATHER_STATISTICS)
135 {
136 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
137 gimple_alloc_counts[(int) kind]++;
138 gimple_alloc_sizes[(int) kind] += size;
139 }
140
141 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
142 gimple_set_code (stmt, code);
143 gimple_set_num_ops (stmt, num_ops);
144
145 /* Do not call gimple_set_modified here as it has other side
146 effects and this tuple is still not completely built. */
147 stmt->gsbase.modified = 1;
148 gimple_init_singleton (stmt);
149
150 return stmt;
151 }
152
153 /* Set SUBCODE to be the code of the expression computed by statement G. */
154
155 static inline void
156 gimple_set_subcode (gimple g, unsigned subcode)
157 {
158 /* We only have 16 bits for the RHS code. Assert that we are not
159 overflowing it. */
160 gcc_assert (subcode < (1 << 16));
161 g->gsbase.subcode = subcode;
162 }
163
164
165
166 /* Build a tuple with operands. CODE is the statement to build (which
167 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
168 for the new tuple. NUM_OPS is the number of operands to allocate. */
169
170 #define gimple_build_with_ops(c, s, n) \
171 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
172
173 static gimple
174 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
175 unsigned num_ops MEM_STAT_DECL)
176 {
177 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
178 gimple_set_subcode (s, subcode);
179
180 return s;
181 }
182
183
184 /* Build a GIMPLE_RETURN statement returning RETVAL. */
185
186 gimple
187 gimple_build_return (tree retval)
188 {
189 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
190 if (retval)
191 gimple_return_set_retval (s, retval);
192 return s;
193 }
194
195 /* Reset alias information on call S. */
196
197 void
198 gimple_call_reset_alias_info (gimple s)
199 {
200 if (gimple_call_flags (s) & ECF_CONST)
201 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
202 else
203 pt_solution_reset (gimple_call_use_set (s));
204 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
205 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
206 else
207 pt_solution_reset (gimple_call_clobber_set (s));
208 }
209
210 /* Helper for gimple_build_call, gimple_build_call_valist,
211 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
212 components of a GIMPLE_CALL statement to function FN with NARGS
213 arguments. */
214
215 static inline gimple
216 gimple_build_call_1 (tree fn, unsigned nargs)
217 {
218 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
219 if (TREE_CODE (fn) == FUNCTION_DECL)
220 fn = build_fold_addr_expr (fn);
221 gimple_set_op (s, 1, fn);
222 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
223 gimple_call_reset_alias_info (s);
224 return s;
225 }
226
227
228 /* Build a GIMPLE_CALL statement to function FN with the arguments
229 specified in vector ARGS. */
230
231 gimple
232 gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
233 {
234 unsigned i;
235 unsigned nargs = VEC_length (tree, args);
236 gimple call = gimple_build_call_1 (fn, nargs);
237
238 for (i = 0; i < nargs; i++)
239 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
240
241 return call;
242 }
243
244
245 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
246 arguments. The ... are the arguments. */
247
248 gimple
249 gimple_build_call (tree fn, unsigned nargs, ...)
250 {
251 va_list ap;
252 gimple call;
253 unsigned i;
254
255 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
256
257 call = gimple_build_call_1 (fn, nargs);
258
259 va_start (ap, nargs);
260 for (i = 0; i < nargs; i++)
261 gimple_call_set_arg (call, i, va_arg (ap, tree));
262 va_end (ap);
263
264 return call;
265 }
266
267
268 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
269 arguments. AP contains the arguments. */
270
271 gimple
272 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
273 {
274 gimple call;
275 unsigned i;
276
277 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
278
279 call = gimple_build_call_1 (fn, nargs);
280
281 for (i = 0; i < nargs; i++)
282 gimple_call_set_arg (call, i, va_arg (ap, tree));
283
284 return call;
285 }
286
287
288 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
289 Build the basic components of a GIMPLE_CALL statement to internal
290 function FN with NARGS arguments. */
291
292 static inline gimple
293 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
294 {
295 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
296 s->gsbase.subcode |= GF_CALL_INTERNAL;
297 gimple_call_set_internal_fn (s, fn);
298 gimple_call_reset_alias_info (s);
299 return s;
300 }
301
302
303 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
304 the number of arguments. The ... are the arguments. */
305
306 gimple
307 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
308 {
309 va_list ap;
310 gimple call;
311 unsigned i;
312
313 call = gimple_build_call_internal_1 (fn, nargs);
314 va_start (ap, nargs);
315 for (i = 0; i < nargs; i++)
316 gimple_call_set_arg (call, i, va_arg (ap, tree));
317 va_end (ap);
318
319 return call;
320 }
321
322
323 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
324 specified in vector ARGS. */
325
326 gimple
327 gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
328 {
329 unsigned i, nargs;
330 gimple call;
331
332 nargs = VEC_length (tree, args);
333 call = gimple_build_call_internal_1 (fn, nargs);
334 for (i = 0; i < nargs; i++)
335 gimple_call_set_arg (call, i, VEC_index (tree, args, i));
336
337 return call;
338 }
339
340
341 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
342 assumed to be in GIMPLE form already. Minimal checking is done of
343 this fact. */
344
345 gimple
346 gimple_build_call_from_tree (tree t)
347 {
348 unsigned i, nargs;
349 gimple call;
350 tree fndecl = get_callee_fndecl (t);
351
352 gcc_assert (TREE_CODE (t) == CALL_EXPR);
353
354 nargs = call_expr_nargs (t);
355 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
356
357 for (i = 0; i < nargs; i++)
358 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
359
360 gimple_set_block (call, TREE_BLOCK (t));
361
362 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
363 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
364 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
365 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
366 if (fndecl
367 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
368 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
369 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
370 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
371 else
372 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
373 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
374 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
375 gimple_set_no_warning (call, TREE_NO_WARNING (t));
376
377 return call;
378 }
379
380
381 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
382 *OP1_P, *OP2_P and *OP3_P respectively. */
383
384 void
385 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
386 tree *op2_p, tree *op3_p)
387 {
388 enum gimple_rhs_class grhs_class;
389
390 *subcode_p = TREE_CODE (expr);
391 grhs_class = get_gimple_rhs_class (*subcode_p);
392
393 if (grhs_class == GIMPLE_TERNARY_RHS)
394 {
395 *op1_p = TREE_OPERAND (expr, 0);
396 *op2_p = TREE_OPERAND (expr, 1);
397 *op3_p = TREE_OPERAND (expr, 2);
398 }
399 else if (grhs_class == GIMPLE_BINARY_RHS)
400 {
401 *op1_p = TREE_OPERAND (expr, 0);
402 *op2_p = TREE_OPERAND (expr, 1);
403 *op3_p = NULL_TREE;
404 }
405 else if (grhs_class == GIMPLE_UNARY_RHS)
406 {
407 *op1_p = TREE_OPERAND (expr, 0);
408 *op2_p = NULL_TREE;
409 *op3_p = NULL_TREE;
410 }
411 else if (grhs_class == GIMPLE_SINGLE_RHS)
412 {
413 *op1_p = expr;
414 *op2_p = NULL_TREE;
415 *op3_p = NULL_TREE;
416 }
417 else
418 gcc_unreachable ();
419 }
420
421
422 /* Build a GIMPLE_ASSIGN statement.
423
424 LHS of the assignment.
425 RHS of the assignment which can be unary or binary. */
426
427 gimple
428 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
429 {
430 enum tree_code subcode;
431 tree op1, op2, op3;
432
433 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
434 return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
435 PASS_MEM_STAT);
436 }
437
438
439 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
440 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
441 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
442
443 gimple
444 gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
445 tree op2, tree op3 MEM_STAT_DECL)
446 {
447 unsigned num_ops;
448 gimple p;
449
450 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
451 code). */
452 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
453
454 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
455 PASS_MEM_STAT);
456 gimple_assign_set_lhs (p, lhs);
457 gimple_assign_set_rhs1 (p, op1);
458 if (op2)
459 {
460 gcc_assert (num_ops > 2);
461 gimple_assign_set_rhs2 (p, op2);
462 }
463
464 if (op3)
465 {
466 gcc_assert (num_ops > 3);
467 gimple_assign_set_rhs3 (p, op3);
468 }
469
470 return p;
471 }
472
473
474 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
475
476 DST/SRC are the destination and source respectively. You can pass
477 ungimplified trees in DST or SRC, in which case they will be
478 converted to a gimple operand if necessary.
479
480 This function returns the newly created GIMPLE_ASSIGN tuple. */
481
482 gimple
483 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
484 {
485 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
486 gimplify_and_add (t, seq_p);
487 ggc_free (t);
488 return gimple_seq_last_stmt (*seq_p);
489 }
490
491
492 /* Build a GIMPLE_COND statement.
493
494 PRED is the condition used to compare LHS and the RHS.
495 T_LABEL is the label to jump to if the condition is true.
496 F_LABEL is the label to jump to otherwise. */
497
498 gimple
499 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
500 tree t_label, tree f_label)
501 {
502 gimple p;
503
504 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
505 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
506 gimple_cond_set_lhs (p, lhs);
507 gimple_cond_set_rhs (p, rhs);
508 gimple_cond_set_true_label (p, t_label);
509 gimple_cond_set_false_label (p, f_label);
510 return p;
511 }
512
513
514 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
515
516 void
517 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
518 tree *lhs_p, tree *rhs_p)
519 {
520 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
521 || TREE_CODE (cond) == TRUTH_NOT_EXPR
522 || is_gimple_min_invariant (cond)
523 || SSA_VAR_P (cond));
524
525 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
526
527 /* Canonicalize conditionals of the form 'if (!VAL)'. */
528 if (*code_p == TRUTH_NOT_EXPR)
529 {
530 *code_p = EQ_EXPR;
531 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
532 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
533 }
534 /* Canonicalize conditionals of the form 'if (VAL)' */
535 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
536 {
537 *code_p = NE_EXPR;
538 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
539 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
540 }
541 }
542
543
544 /* Build a GIMPLE_COND statement from the conditional expression tree
545 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
546
547 gimple
548 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
549 {
550 enum tree_code code;
551 tree lhs, rhs;
552
553 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
554 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
555 }
556
557 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
558 boolean expression tree COND. */
559
560 void
561 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
562 {
563 enum tree_code code;
564 tree lhs, rhs;
565
566 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
567 gimple_cond_set_condition (stmt, code, lhs, rhs);
568 }
569
570 /* Build a GIMPLE_LABEL statement for LABEL. */
571
572 gimple
573 gimple_build_label (tree label)
574 {
575 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
576 gimple_label_set_label (p, label);
577 return p;
578 }
579
580 /* Build a GIMPLE_GOTO statement to label DEST. */
581
582 gimple
583 gimple_build_goto (tree dest)
584 {
585 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
586 gimple_goto_set_dest (p, dest);
587 return p;
588 }
589
590
591 /* Build a GIMPLE_NOP statement. */
592
593 gimple
594 gimple_build_nop (void)
595 {
596 return gimple_alloc (GIMPLE_NOP, 0);
597 }
598
599
600 /* Build a GIMPLE_BIND statement.
601 VARS are the variables in BODY.
602 BLOCK is the containing block. */
603
604 gimple
605 gimple_build_bind (tree vars, gimple_seq body, tree block)
606 {
607 gimple p = gimple_alloc (GIMPLE_BIND, 0);
608 gimple_bind_set_vars (p, vars);
609 if (body)
610 gimple_bind_set_body (p, body);
611 if (block)
612 gimple_bind_set_block (p, block);
613 return p;
614 }
615
616 /* Helper function to set the simple fields of a asm stmt.
617
618 STRING is a pointer to a string that is the asm blocks assembly code.
619 NINPUT is the number of register inputs.
620 NOUTPUT is the number of register outputs.
621 NCLOBBERS is the number of clobbered registers.
622 */
623
624 static inline gimple
625 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
626 unsigned nclobbers, unsigned nlabels)
627 {
628 gimple p;
629 int size = strlen (string);
630
631 /* ASMs with labels cannot have outputs. This should have been
632 enforced by the front end. */
633 gcc_assert (nlabels == 0 || noutputs == 0);
634
635 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
636 ninputs + noutputs + nclobbers + nlabels);
637
638 p->gimple_asm.ni = ninputs;
639 p->gimple_asm.no = noutputs;
640 p->gimple_asm.nc = nclobbers;
641 p->gimple_asm.nl = nlabels;
642 p->gimple_asm.string = ggc_alloc_string (string, size);
643
644 if (GATHER_STATISTICS)
645 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
646
647 return p;
648 }
649
650 /* Build a GIMPLE_ASM statement.
651
652 STRING is the assembly code.
653 NINPUT is the number of register inputs.
654 NOUTPUT is the number of register outputs.
655 NCLOBBERS is the number of clobbered registers.
656 INPUTS is a vector of the input register parameters.
657 OUTPUTS is a vector of the output register parameters.
658 CLOBBERS is a vector of the clobbered register parameters.
659 LABELS is a vector of destination labels. */
660
661 gimple
662 gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
663 VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
664 VEC(tree,gc)* labels)
665 {
666 gimple p;
667 unsigned i;
668
669 p = gimple_build_asm_1 (string,
670 VEC_length (tree, inputs),
671 VEC_length (tree, outputs),
672 VEC_length (tree, clobbers),
673 VEC_length (tree, labels));
674
675 for (i = 0; i < VEC_length (tree, inputs); i++)
676 gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
677
678 for (i = 0; i < VEC_length (tree, outputs); i++)
679 gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
680
681 for (i = 0; i < VEC_length (tree, clobbers); i++)
682 gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
683
684 for (i = 0; i < VEC_length (tree, labels); i++)
685 gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
686
687 return p;
688 }
689
690 /* Build a GIMPLE_CATCH statement.
691
692 TYPES are the catch types.
693 HANDLER is the exception handler. */
694
695 gimple
696 gimple_build_catch (tree types, gimple_seq handler)
697 {
698 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
699 gimple_catch_set_types (p, types);
700 if (handler)
701 gimple_catch_set_handler (p, handler);
702
703 return p;
704 }
705
706 /* Build a GIMPLE_EH_FILTER statement.
707
708 TYPES are the filter's types.
709 FAILURE is the filter's failure action. */
710
711 gimple
712 gimple_build_eh_filter (tree types, gimple_seq failure)
713 {
714 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
715 gimple_eh_filter_set_types (p, types);
716 if (failure)
717 gimple_eh_filter_set_failure (p, failure);
718
719 return p;
720 }
721
722 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
723
724 gimple
725 gimple_build_eh_must_not_throw (tree decl)
726 {
727 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
728
729 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
730 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
731 gimple_eh_must_not_throw_set_fndecl (p, decl);
732
733 return p;
734 }
735
736 /* Build a GIMPLE_EH_ELSE statement. */
737
738 gimple
739 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
740 {
741 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
742 gimple_eh_else_set_n_body (p, n_body);
743 gimple_eh_else_set_e_body (p, e_body);
744 return p;
745 }
746
747 /* Build a GIMPLE_TRY statement.
748
749 EVAL is the expression to evaluate.
750 CLEANUP is the cleanup expression.
751 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
752 whether this is a try/catch or a try/finally respectively. */
753
754 gimple
755 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
756 enum gimple_try_flags kind)
757 {
758 gimple p;
759
760 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
761 p = gimple_alloc (GIMPLE_TRY, 0);
762 gimple_set_subcode (p, kind);
763 if (eval)
764 gimple_try_set_eval (p, eval);
765 if (cleanup)
766 gimple_try_set_cleanup (p, cleanup);
767
768 return p;
769 }
770
771 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
772
773 CLEANUP is the cleanup expression. */
774
775 gimple
776 gimple_build_wce (gimple_seq cleanup)
777 {
778 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
779 if (cleanup)
780 gimple_wce_set_cleanup (p, cleanup);
781
782 return p;
783 }
784
785
786 /* Build a GIMPLE_RESX statement. */
787
788 gimple
789 gimple_build_resx (int region)
790 {
791 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
792 p->gimple_eh_ctrl.region = region;
793 return p;
794 }
795
796
797 /* The helper for constructing a gimple switch statement.
798 INDEX is the switch's index.
799 NLABELS is the number of labels in the switch excluding the default.
800 DEFAULT_LABEL is the default label for the switch statement. */
801
802 gimple
803 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
804 {
805 /* nlabels + 1 default label + 1 index. */
806 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
807 1 + (default_label != NULL) + nlabels);
808 gimple_switch_set_index (p, index);
809 if (default_label)
810 gimple_switch_set_default_label (p, default_label);
811 return p;
812 }
813
814
815 /* Build a GIMPLE_SWITCH statement.
816
817 INDEX is the switch's index.
818 NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
819 ... are the labels excluding the default. */
820
821 gimple
822 gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
823 {
824 va_list al;
825 unsigned i, offset;
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
827
828 /* Store the rest of the labels. */
829 va_start (al, default_label);
830 offset = (default_label != NULL);
831 for (i = 0; i < nlabels; i++)
832 gimple_switch_set_label (p, i + offset, va_arg (al, tree));
833 va_end (al);
834
835 return p;
836 }
837
838
839 /* Build a GIMPLE_SWITCH statement.
840
841 INDEX is the switch's index.
842 DEFAULT_LABEL is the default label
843 ARGS is a vector of labels excluding the default. */
844
845 gimple
846 gimple_build_switch_vec (tree index, tree default_label, VEC(tree, heap) *args)
847 {
848 unsigned i, offset, nlabels = VEC_length (tree, args);
849 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
850
851 /* Copy the labels from the vector to the switch statement. */
852 offset = (default_label != NULL);
853 for (i = 0; i < nlabels; i++)
854 gimple_switch_set_label (p, i + offset, VEC_index (tree, args, i));
855
856 return p;
857 }
858
859 /* Build a GIMPLE_EH_DISPATCH statement. */
860
861 gimple
862 gimple_build_eh_dispatch (int region)
863 {
864 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
865 p->gimple_eh_ctrl.region = region;
866 return p;
867 }
868
869 /* Build a new GIMPLE_DEBUG_BIND statement.
870
871 VAR is bound to VALUE; block and location are taken from STMT. */
872
873 gimple
874 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
875 {
876 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
877 (unsigned)GIMPLE_DEBUG_BIND, 2
878 PASS_MEM_STAT);
879
880 gimple_debug_bind_set_var (p, var);
881 gimple_debug_bind_set_value (p, value);
882 if (stmt)
883 {
884 gimple_set_block (p, gimple_block (stmt));
885 gimple_set_location (p, gimple_location (stmt));
886 }
887
888 return p;
889 }
890
891
892 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
893
894 VAR is bound to VALUE; block and location are taken from STMT. */
895
896 gimple
897 gimple_build_debug_source_bind_stat (tree var, tree value,
898 gimple stmt MEM_STAT_DECL)
899 {
900 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
901 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
902 PASS_MEM_STAT);
903
904 gimple_debug_source_bind_set_var (p, var);
905 gimple_debug_source_bind_set_value (p, value);
906 if (stmt)
907 {
908 gimple_set_block (p, gimple_block (stmt));
909 gimple_set_location (p, gimple_location (stmt));
910 }
911
912 return p;
913 }
914
915
916 /* Build a GIMPLE_OMP_CRITICAL statement.
917
918 BODY is the sequence of statements for which only one thread can execute.
919 NAME is optional identifier for this critical block. */
920
921 gimple
922 gimple_build_omp_critical (gimple_seq body, tree name)
923 {
924 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
925 gimple_omp_critical_set_name (p, name);
926 if (body)
927 gimple_omp_set_body (p, body);
928
929 return p;
930 }
931
932 /* Build a GIMPLE_OMP_FOR statement.
933
934 BODY is sequence of statements inside the for loop.
935 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
936 lastprivate, reductions, ordered, schedule, and nowait.
937 COLLAPSE is the collapse count.
938 PRE_BODY is the sequence of statements that are loop invariant. */
939
940 gimple
941 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
942 gimple_seq pre_body)
943 {
944 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
945 if (body)
946 gimple_omp_set_body (p, body);
947 gimple_omp_for_set_clauses (p, clauses);
948 p->gimple_omp_for.collapse = collapse;
949 p->gimple_omp_for.iter
950 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
951 if (pre_body)
952 gimple_omp_for_set_pre_body (p, pre_body);
953
954 return p;
955 }
956
957
958 /* Build a GIMPLE_OMP_PARALLEL statement.
959
960 BODY is sequence of statements which are executed in parallel.
961 CLAUSES, are the OMP parallel construct's clauses.
962 CHILD_FN is the function created for the parallel threads to execute.
963 DATA_ARG are the shared data argument(s). */
964
965 gimple
966 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
967 tree data_arg)
968 {
969 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
970 if (body)
971 gimple_omp_set_body (p, body);
972 gimple_omp_parallel_set_clauses (p, clauses);
973 gimple_omp_parallel_set_child_fn (p, child_fn);
974 gimple_omp_parallel_set_data_arg (p, data_arg);
975
976 return p;
977 }
978
979
980 /* Build a GIMPLE_OMP_TASK statement.
981
982 BODY is sequence of statements which are executed by the explicit task.
983 CLAUSES, are the OMP parallel construct's clauses.
984 CHILD_FN is the function created for the parallel threads to execute.
985 DATA_ARG are the shared data argument(s).
986 COPY_FN is the optional function for firstprivate initialization.
987 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
988
989 gimple
990 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
991 tree data_arg, tree copy_fn, tree arg_size,
992 tree arg_align)
993 {
994 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
995 if (body)
996 gimple_omp_set_body (p, body);
997 gimple_omp_task_set_clauses (p, clauses);
998 gimple_omp_task_set_child_fn (p, child_fn);
999 gimple_omp_task_set_data_arg (p, data_arg);
1000 gimple_omp_task_set_copy_fn (p, copy_fn);
1001 gimple_omp_task_set_arg_size (p, arg_size);
1002 gimple_omp_task_set_arg_align (p, arg_align);
1003
1004 return p;
1005 }
1006
1007
1008 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
1009
1010 BODY is the sequence of statements in the section. */
1011
1012 gimple
1013 gimple_build_omp_section (gimple_seq body)
1014 {
1015 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
1016 if (body)
1017 gimple_omp_set_body (p, body);
1018
1019 return p;
1020 }
1021
1022
1023 /* Build a GIMPLE_OMP_MASTER statement.
1024
1025 BODY is the sequence of statements to be executed by just the master. */
1026
1027 gimple
1028 gimple_build_omp_master (gimple_seq body)
1029 {
1030 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1031 if (body)
1032 gimple_omp_set_body (p, body);
1033
1034 return p;
1035 }
1036
1037
1038 /* Build a GIMPLE_OMP_CONTINUE statement.
1039
1040 CONTROL_DEF is the definition of the control variable.
1041 CONTROL_USE is the use of the control variable. */
1042
1043 gimple
1044 gimple_build_omp_continue (tree control_def, tree control_use)
1045 {
1046 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1047 gimple_omp_continue_set_control_def (p, control_def);
1048 gimple_omp_continue_set_control_use (p, control_use);
1049 return p;
1050 }
1051
1052 /* Build a GIMPLE_OMP_ORDERED statement.
1053
1054 BODY is the sequence of statements inside a loop that will executed in
1055 sequence. */
1056
1057 gimple
1058 gimple_build_omp_ordered (gimple_seq body)
1059 {
1060 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1061 if (body)
1062 gimple_omp_set_body (p, body);
1063
1064 return p;
1065 }
1066
1067
1068 /* Build a GIMPLE_OMP_RETURN statement.
1069 WAIT_P is true if this is a non-waiting return. */
1070
1071 gimple
1072 gimple_build_omp_return (bool wait_p)
1073 {
1074 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1075 if (wait_p)
1076 gimple_omp_return_set_nowait (p);
1077
1078 return p;
1079 }
1080
1081
1082 /* Build a GIMPLE_OMP_SECTIONS statement.
1083
1084 BODY is a sequence of section statements.
1085 CLAUSES are any of the OMP sections contsruct's clauses: private,
1086 firstprivate, lastprivate, reduction, and nowait. */
1087
1088 gimple
1089 gimple_build_omp_sections (gimple_seq body, tree clauses)
1090 {
1091 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1092 if (body)
1093 gimple_omp_set_body (p, body);
1094 gimple_omp_sections_set_clauses (p, clauses);
1095
1096 return p;
1097 }
1098
1099
1100 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1101
1102 gimple
1103 gimple_build_omp_sections_switch (void)
1104 {
1105 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1106 }
1107
1108
1109 /* Build a GIMPLE_OMP_SINGLE statement.
1110
1111 BODY is the sequence of statements that will be executed once.
1112 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1113 copyprivate, nowait. */
1114
1115 gimple
1116 gimple_build_omp_single (gimple_seq body, tree clauses)
1117 {
1118 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1119 if (body)
1120 gimple_omp_set_body (p, body);
1121 gimple_omp_single_set_clauses (p, clauses);
1122
1123 return p;
1124 }
1125
1126
1127 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1128
1129 gimple
1130 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1131 {
1132 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1133 gimple_omp_atomic_load_set_lhs (p, lhs);
1134 gimple_omp_atomic_load_set_rhs (p, rhs);
1135 return p;
1136 }
1137
1138 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1139
1140 VAL is the value we are storing. */
1141
1142 gimple
1143 gimple_build_omp_atomic_store (tree val)
1144 {
1145 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1146 gimple_omp_atomic_store_set_val (p, val);
1147 return p;
1148 }
1149
1150 /* Build a GIMPLE_TRANSACTION statement. */
1151
1152 gimple
1153 gimple_build_transaction (gimple_seq body, tree label)
1154 {
1155 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1156 gimple_transaction_set_body (p, body);
1157 gimple_transaction_set_label (p, label);
1158 return p;
1159 }
1160
1161 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1162 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1163
1164 gimple
1165 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1166 {
1167 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1168 /* Ensure all the predictors fit into the lower bits of the subcode. */
1169 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1170 gimple_predict_set_predictor (p, predictor);
1171 gimple_predict_set_outcome (p, outcome);
1172 return p;
1173 }
1174
1175 #if defined ENABLE_GIMPLE_CHECKING
1176 /* Complain of a gimple type mismatch and die. */
1177
1178 void
1179 gimple_check_failed (const_gimple gs, const char *file, int line,
1180 const char *function, enum gimple_code code,
1181 enum tree_code subcode)
1182 {
1183 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1184 gimple_code_name[code],
1185 tree_code_name[subcode],
1186 gimple_code_name[gimple_code (gs)],
1187 gs->gsbase.subcode > 0
1188 ? tree_code_name[gs->gsbase.subcode]
1189 : "",
1190 function, trim_filename (file), line);
1191 }
1192 #endif /* ENABLE_GIMPLE_CHECKING */
1193
1194
1195 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1196 *SEQ_P is NULL, a new sequence is allocated. */
1197
1198 void
1199 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1200 {
1201 gimple_stmt_iterator si;
1202 if (gs == NULL)
1203 return;
1204
1205 si = gsi_last (*seq_p);
1206 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1207 }
1208
1209
1210 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1211 NULL, a new sequence is allocated. */
1212
1213 void
1214 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1215 {
1216 gimple_stmt_iterator si;
1217 if (src == NULL)
1218 return;
1219
1220 si = gsi_last (*dst_p);
1221 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1222 }
1223
1224
1225 /* Helper function of empty_body_p. Return true if STMT is an empty
1226 statement. */
1227
1228 static bool
1229 empty_stmt_p (gimple stmt)
1230 {
1231 if (gimple_code (stmt) == GIMPLE_NOP)
1232 return true;
1233 if (gimple_code (stmt) == GIMPLE_BIND)
1234 return empty_body_p (gimple_bind_body (stmt));
1235 return false;
1236 }
1237
1238
1239 /* Return true if BODY contains nothing but empty statements. */
1240
1241 bool
1242 empty_body_p (gimple_seq body)
1243 {
1244 gimple_stmt_iterator i;
1245
1246 if (gimple_seq_empty_p (body))
1247 return true;
1248 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1249 if (!empty_stmt_p (gsi_stmt (i))
1250 && !is_gimple_debug (gsi_stmt (i)))
1251 return false;
1252
1253 return true;
1254 }
1255
1256
1257 /* Perform a deep copy of sequence SRC and return the result. */
1258
1259 gimple_seq
1260 gimple_seq_copy (gimple_seq src)
1261 {
1262 gimple_stmt_iterator gsi;
1263 gimple_seq new_seq = NULL;
1264 gimple stmt;
1265
1266 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1267 {
1268 stmt = gimple_copy (gsi_stmt (gsi));
1269 gimple_seq_add_stmt (&new_seq, stmt);
1270 }
1271
1272 return new_seq;
1273 }
1274
1275
1276 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1277 on each one. WI is as in walk_gimple_stmt.
1278
1279 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1280 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1281 produced the value is returned if this statement has not been
1282 removed by a callback (wi->removed_stmt). If the statement has
1283 been removed, NULL is returned.
1284
1285 Otherwise, all the statements are walked and NULL returned. */
1286
1287 gimple
1288 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1289 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1290 {
1291 gimple_stmt_iterator gsi;
1292
1293 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1294 {
1295 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1296 if (ret)
1297 {
1298 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1299 to hold it. */
1300 gcc_assert (wi);
1301 wi->callback_result = ret;
1302
1303 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1304 }
1305
1306 if (!wi->removed_stmt)
1307 gsi_next (&gsi);
1308 }
1309
1310 if (wi)
1311 wi->callback_result = NULL_TREE;
1312
1313 return NULL;
1314 }
1315
1316
1317 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1318 changed by the callbacks. */
1319
1320 gimple
1321 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1322 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1323 {
1324 gimple_seq seq2 = seq;
1325 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1326 gcc_assert (seq2 == seq);
1327 return ret;
1328 }
1329
1330
1331 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1332
1333 static tree
1334 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1335 struct walk_stmt_info *wi)
1336 {
1337 tree ret, op;
1338 unsigned noutputs;
1339 const char **oconstraints;
1340 unsigned i, n;
1341 const char *constraint;
1342 bool allows_mem, allows_reg, is_inout;
1343
1344 noutputs = gimple_asm_noutputs (stmt);
1345 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1346
1347 if (wi)
1348 wi->is_lhs = true;
1349
1350 for (i = 0; i < noutputs; i++)
1351 {
1352 op = gimple_asm_output_op (stmt, i);
1353 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1354 oconstraints[i] = constraint;
1355 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1356 &is_inout);
1357 if (wi)
1358 wi->val_only = (allows_reg || !allows_mem);
1359 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1360 if (ret)
1361 return ret;
1362 }
1363
1364 n = gimple_asm_ninputs (stmt);
1365 for (i = 0; i < n; i++)
1366 {
1367 op = gimple_asm_input_op (stmt, i);
1368 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1369 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1370 oconstraints, &allows_mem, &allows_reg);
1371 if (wi)
1372 {
1373 wi->val_only = (allows_reg || !allows_mem);
1374 /* Although input "m" is not really a LHS, we need a lvalue. */
1375 wi->is_lhs = !wi->val_only;
1376 }
1377 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1378 if (ret)
1379 return ret;
1380 }
1381
1382 if (wi)
1383 {
1384 wi->is_lhs = false;
1385 wi->val_only = true;
1386 }
1387
1388 n = gimple_asm_nlabels (stmt);
1389 for (i = 0; i < n; i++)
1390 {
1391 op = gimple_asm_label_op (stmt, i);
1392 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1393 if (ret)
1394 return ret;
1395 }
1396
1397 return NULL_TREE;
1398 }
1399
1400
1401 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1402 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1403
1404 CALLBACK_OP is called on each operand of STMT via walk_tree.
1405 Additional parameters to walk_tree must be stored in WI. For each operand
1406 OP, walk_tree is called as:
1407
1408 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1409
1410 If CALLBACK_OP returns non-NULL for an operand, the remaining
1411 operands are not scanned.
1412
1413 The return value is that returned by the last call to walk_tree, or
1414 NULL_TREE if no CALLBACK_OP is specified. */
1415
1416 tree
1417 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1418 struct walk_stmt_info *wi)
1419 {
1420 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1421 unsigned i;
1422 tree ret = NULL_TREE;
1423
1424 switch (gimple_code (stmt))
1425 {
1426 case GIMPLE_ASSIGN:
1427 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1428 is a register variable, we may use a COMPONENT_REF on the RHS. */
1429 if (wi)
1430 {
1431 tree lhs = gimple_assign_lhs (stmt);
1432 wi->val_only
1433 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1434 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1435 }
1436
1437 for (i = 1; i < gimple_num_ops (stmt); i++)
1438 {
1439 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1440 pset);
1441 if (ret)
1442 return ret;
1443 }
1444
1445 /* Walk the LHS. If the RHS is appropriate for a memory, we
1446 may use a COMPONENT_REF on the LHS. */
1447 if (wi)
1448 {
1449 /* If the RHS is of a non-renamable type or is a register variable,
1450 we may use a COMPONENT_REF on the LHS. */
1451 tree rhs1 = gimple_assign_rhs1 (stmt);
1452 wi->val_only
1453 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1454 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1455 wi->is_lhs = true;
1456 }
1457
1458 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1459 if (ret)
1460 return ret;
1461
1462 if (wi)
1463 {
1464 wi->val_only = true;
1465 wi->is_lhs = false;
1466 }
1467 break;
1468
1469 case GIMPLE_CALL:
1470 if (wi)
1471 {
1472 wi->is_lhs = false;
1473 wi->val_only = true;
1474 }
1475
1476 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1477 if (ret)
1478 return ret;
1479
1480 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1481 if (ret)
1482 return ret;
1483
1484 for (i = 0; i < gimple_call_num_args (stmt); i++)
1485 {
1486 if (wi)
1487 wi->val_only
1488 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1489 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1490 pset);
1491 if (ret)
1492 return ret;
1493 }
1494
1495 if (gimple_call_lhs (stmt))
1496 {
1497 if (wi)
1498 {
1499 wi->is_lhs = true;
1500 wi->val_only
1501 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1502 }
1503
1504 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1505 if (ret)
1506 return ret;
1507 }
1508
1509 if (wi)
1510 {
1511 wi->is_lhs = false;
1512 wi->val_only = true;
1513 }
1514 break;
1515
1516 case GIMPLE_CATCH:
1517 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1518 pset);
1519 if (ret)
1520 return ret;
1521 break;
1522
1523 case GIMPLE_EH_FILTER:
1524 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1525 pset);
1526 if (ret)
1527 return ret;
1528 break;
1529
1530 case GIMPLE_ASM:
1531 ret = walk_gimple_asm (stmt, callback_op, wi);
1532 if (ret)
1533 return ret;
1534 break;
1535
1536 case GIMPLE_OMP_CONTINUE:
1537 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1538 callback_op, wi, pset);
1539 if (ret)
1540 return ret;
1541
1542 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1543 callback_op, wi, pset);
1544 if (ret)
1545 return ret;
1546 break;
1547
1548 case GIMPLE_OMP_CRITICAL:
1549 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1550 pset);
1551 if (ret)
1552 return ret;
1553 break;
1554
1555 case GIMPLE_OMP_FOR:
1556 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1557 pset);
1558 if (ret)
1559 return ret;
1560 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1561 {
1562 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1563 wi, pset);
1564 if (ret)
1565 return ret;
1566 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1567 wi, pset);
1568 if (ret)
1569 return ret;
1570 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1571 wi, pset);
1572 if (ret)
1573 return ret;
1574 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1575 wi, pset);
1576 }
1577 if (ret)
1578 return ret;
1579 break;
1580
1581 case GIMPLE_OMP_PARALLEL:
1582 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1583 wi, pset);
1584 if (ret)
1585 return ret;
1586 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1587 wi, pset);
1588 if (ret)
1589 return ret;
1590 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1591 wi, pset);
1592 if (ret)
1593 return ret;
1594 break;
1595
1596 case GIMPLE_OMP_TASK:
1597 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1598 wi, pset);
1599 if (ret)
1600 return ret;
1601 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1602 wi, pset);
1603 if (ret)
1604 return ret;
1605 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1606 wi, pset);
1607 if (ret)
1608 return ret;
1609 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1610 wi, pset);
1611 if (ret)
1612 return ret;
1613 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1614 wi, pset);
1615 if (ret)
1616 return ret;
1617 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1618 wi, pset);
1619 if (ret)
1620 return ret;
1621 break;
1622
1623 case GIMPLE_OMP_SECTIONS:
1624 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1625 wi, pset);
1626 if (ret)
1627 return ret;
1628
1629 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1630 wi, pset);
1631 if (ret)
1632 return ret;
1633
1634 break;
1635
1636 case GIMPLE_OMP_SINGLE:
1637 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1638 pset);
1639 if (ret)
1640 return ret;
1641 break;
1642
1643 case GIMPLE_OMP_ATOMIC_LOAD:
1644 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1645 pset);
1646 if (ret)
1647 return ret;
1648
1649 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1650 pset);
1651 if (ret)
1652 return ret;
1653 break;
1654
1655 case GIMPLE_OMP_ATOMIC_STORE:
1656 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1657 wi, pset);
1658 if (ret)
1659 return ret;
1660 break;
1661
1662 case GIMPLE_TRANSACTION:
1663 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1664 wi, pset);
1665 if (ret)
1666 return ret;
1667 break;
1668
1669 /* Tuples that do not have operands. */
1670 case GIMPLE_NOP:
1671 case GIMPLE_RESX:
1672 case GIMPLE_OMP_RETURN:
1673 case GIMPLE_PREDICT:
1674 break;
1675
1676 default:
1677 {
1678 enum gimple_statement_structure_enum gss;
1679 gss = gimple_statement_structure (stmt);
1680 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1681 for (i = 0; i < gimple_num_ops (stmt); i++)
1682 {
1683 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1684 if (ret)
1685 return ret;
1686 }
1687 }
1688 break;
1689 }
1690
1691 return NULL_TREE;
1692 }
1693
1694
1695 /* Walk the current statement in GSI (optionally using traversal state
1696 stored in WI). If WI is NULL, no state is kept during traversal.
1697 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1698 that it has handled all the operands of the statement, its return
1699 value is returned. Otherwise, the return value from CALLBACK_STMT
1700 is discarded and its operands are scanned.
1701
1702 If CALLBACK_STMT is NULL or it didn't handle the operands,
1703 CALLBACK_OP is called on each operand of the statement via
1704 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1705 operand, the remaining operands are not scanned. In this case, the
1706 return value from CALLBACK_OP is returned.
1707
1708 In any other case, NULL_TREE is returned. */
1709
1710 tree
1711 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1712 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1713 {
1714 gimple ret;
1715 tree tree_ret;
1716 gimple stmt = gsi_stmt (*gsi);
1717
1718 if (wi)
1719 {
1720 wi->gsi = *gsi;
1721 wi->removed_stmt = false;
1722
1723 if (wi->want_locations && gimple_has_location (stmt))
1724 input_location = gimple_location (stmt);
1725 }
1726
1727 ret = NULL;
1728
1729 /* Invoke the statement callback. Return if the callback handled
1730 all of STMT operands by itself. */
1731 if (callback_stmt)
1732 {
1733 bool handled_ops = false;
1734 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1735 if (handled_ops)
1736 return tree_ret;
1737
1738 /* If CALLBACK_STMT did not handle operands, it should not have
1739 a value to return. */
1740 gcc_assert (tree_ret == NULL);
1741
1742 if (wi && wi->removed_stmt)
1743 return NULL;
1744
1745 /* Re-read stmt in case the callback changed it. */
1746 stmt = gsi_stmt (*gsi);
1747 }
1748
1749 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1750 if (callback_op)
1751 {
1752 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1753 if (tree_ret)
1754 return tree_ret;
1755 }
1756
1757 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1758 switch (gimple_code (stmt))
1759 {
1760 case GIMPLE_BIND:
1761 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1762 callback_op, wi);
1763 if (ret)
1764 return wi->callback_result;
1765 break;
1766
1767 case GIMPLE_CATCH:
1768 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1769 callback_op, wi);
1770 if (ret)
1771 return wi->callback_result;
1772 break;
1773
1774 case GIMPLE_EH_FILTER:
1775 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1776 callback_op, wi);
1777 if (ret)
1778 return wi->callback_result;
1779 break;
1780
1781 case GIMPLE_EH_ELSE:
1782 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1783 callback_stmt, callback_op, wi);
1784 if (ret)
1785 return wi->callback_result;
1786 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1787 callback_stmt, callback_op, wi);
1788 if (ret)
1789 return wi->callback_result;
1790 break;
1791
1792 case GIMPLE_TRY:
1793 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1794 wi);
1795 if (ret)
1796 return wi->callback_result;
1797
1798 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1799 callback_op, wi);
1800 if (ret)
1801 return wi->callback_result;
1802 break;
1803
1804 case GIMPLE_OMP_FOR:
1805 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1806 callback_op, wi);
1807 if (ret)
1808 return wi->callback_result;
1809
1810 /* FALL THROUGH. */
1811 case GIMPLE_OMP_CRITICAL:
1812 case GIMPLE_OMP_MASTER:
1813 case GIMPLE_OMP_ORDERED:
1814 case GIMPLE_OMP_SECTION:
1815 case GIMPLE_OMP_PARALLEL:
1816 case GIMPLE_OMP_TASK:
1817 case GIMPLE_OMP_SECTIONS:
1818 case GIMPLE_OMP_SINGLE:
1819 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1820 callback_op, wi);
1821 if (ret)
1822 return wi->callback_result;
1823 break;
1824
1825 case GIMPLE_WITH_CLEANUP_EXPR:
1826 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1827 callback_op, wi);
1828 if (ret)
1829 return wi->callback_result;
1830 break;
1831
1832 case GIMPLE_TRANSACTION:
1833 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1834 callback_stmt, callback_op, wi);
1835 if (ret)
1836 return wi->callback_result;
1837 break;
1838
1839 default:
1840 gcc_assert (!gimple_has_substatements (stmt));
1841 break;
1842 }
1843
1844 return NULL;
1845 }
1846
1847
1848 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1849
1850 void
1851 gimple_set_body (tree fndecl, gimple_seq seq)
1852 {
1853 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1854 if (fn == NULL)
1855 {
1856 /* If FNDECL still does not have a function structure associated
1857 with it, then it does not make sense for it to receive a
1858 GIMPLE body. */
1859 gcc_assert (seq == NULL);
1860 }
1861 else
1862 fn->gimple_body = seq;
1863 }
1864
1865
1866 /* Return the body of GIMPLE statements for function FN. After the
1867 CFG pass, the function body doesn't exist anymore because it has
1868 been split up into basic blocks. In this case, it returns
1869 NULL. */
1870
1871 gimple_seq
1872 gimple_body (tree fndecl)
1873 {
1874 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1875 return fn ? fn->gimple_body : NULL;
1876 }
1877
1878 /* Return true when FNDECL has Gimple body either in unlowered
1879 or CFG form. */
1880 bool
1881 gimple_has_body_p (tree fndecl)
1882 {
1883 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1884 return (gimple_body (fndecl) || (fn && fn->cfg));
1885 }
1886
1887 /* Return true if calls C1 and C2 are known to go to the same function. */
1888
1889 bool
1890 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1891 {
1892 if (gimple_call_internal_p (c1))
1893 return (gimple_call_internal_p (c2)
1894 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1895 else
1896 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1897 || (gimple_call_fndecl (c1)
1898 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1899 }
1900
1901 /* Detect flags from a GIMPLE_CALL. This is just like
1902 call_expr_flags, but for gimple tuples. */
1903
1904 int
1905 gimple_call_flags (const_gimple stmt)
1906 {
1907 int flags;
1908 tree decl = gimple_call_fndecl (stmt);
1909
1910 if (decl)
1911 flags = flags_from_decl_or_type (decl);
1912 else if (gimple_call_internal_p (stmt))
1913 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1914 else
1915 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1916
1917 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1918 flags |= ECF_NOTHROW;
1919
1920 return flags;
1921 }
1922
1923 /* Return the "fn spec" string for call STMT. */
1924
1925 static tree
1926 gimple_call_fnspec (const_gimple stmt)
1927 {
1928 tree type, attr;
1929
1930 type = gimple_call_fntype (stmt);
1931 if (!type)
1932 return NULL_TREE;
1933
1934 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1935 if (!attr)
1936 return NULL_TREE;
1937
1938 return TREE_VALUE (TREE_VALUE (attr));
1939 }
1940
1941 /* Detects argument flags for argument number ARG on call STMT. */
1942
1943 int
1944 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1945 {
1946 tree attr = gimple_call_fnspec (stmt);
1947
1948 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1949 return 0;
1950
1951 switch (TREE_STRING_POINTER (attr)[1 + arg])
1952 {
1953 case 'x':
1954 case 'X':
1955 return EAF_UNUSED;
1956
1957 case 'R':
1958 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1959
1960 case 'r':
1961 return EAF_NOCLOBBER | EAF_NOESCAPE;
1962
1963 case 'W':
1964 return EAF_DIRECT | EAF_NOESCAPE;
1965
1966 case 'w':
1967 return EAF_NOESCAPE;
1968
1969 case '.':
1970 default:
1971 return 0;
1972 }
1973 }
1974
1975 /* Detects return flags for the call STMT. */
1976
1977 int
1978 gimple_call_return_flags (const_gimple stmt)
1979 {
1980 tree attr;
1981
1982 if (gimple_call_flags (stmt) & ECF_MALLOC)
1983 return ERF_NOALIAS;
1984
1985 attr = gimple_call_fnspec (stmt);
1986 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1987 return 0;
1988
1989 switch (TREE_STRING_POINTER (attr)[0])
1990 {
1991 case '1':
1992 case '2':
1993 case '3':
1994 case '4':
1995 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1996
1997 case 'm':
1998 return ERF_NOALIAS;
1999
2000 case '.':
2001 default:
2002 return 0;
2003 }
2004 }
2005
2006
2007 /* Return true if GS is a copy assignment. */
2008
2009 bool
2010 gimple_assign_copy_p (gimple gs)
2011 {
2012 return (gimple_assign_single_p (gs)
2013 && is_gimple_val (gimple_op (gs, 1)));
2014 }
2015
2016
2017 /* Return true if GS is a SSA_NAME copy assignment. */
2018
2019 bool
2020 gimple_assign_ssa_name_copy_p (gimple gs)
2021 {
2022 return (gimple_assign_single_p (gs)
2023 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
2024 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
2025 }
2026
2027
2028 /* Return true if GS is an assignment with a unary RHS, but the
2029 operator has no effect on the assigned value. The logic is adapted
2030 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2031 instances in which STRIP_NOPS was previously applied to the RHS of
2032 an assignment.
2033
2034 NOTE: In the use cases that led to the creation of this function
2035 and of gimple_assign_single_p, it is typical to test for either
2036 condition and to proceed in the same manner. In each case, the
2037 assigned value is represented by the single RHS operand of the
2038 assignment. I suspect there may be cases where gimple_assign_copy_p,
2039 gimple_assign_single_p, or equivalent logic is used where a similar
2040 treatment of unary NOPs is appropriate. */
2041
2042 bool
2043 gimple_assign_unary_nop_p (gimple gs)
2044 {
2045 return (is_gimple_assign (gs)
2046 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2047 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2048 && gimple_assign_rhs1 (gs) != error_mark_node
2049 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2050 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2051 }
2052
2053 /* Set BB to be the basic block holding G. */
2054
2055 void
2056 gimple_set_bb (gimple stmt, basic_block bb)
2057 {
2058 stmt->gsbase.bb = bb;
2059
2060 /* If the statement is a label, add the label to block-to-labels map
2061 so that we can speed up edge creation for GIMPLE_GOTOs. */
2062 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2063 {
2064 tree t;
2065 int uid;
2066
2067 t = gimple_label_label (stmt);
2068 uid = LABEL_DECL_UID (t);
2069 if (uid == -1)
2070 {
2071 unsigned old_len = VEC_length (basic_block, label_to_block_map);
2072 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2073 if (old_len <= (unsigned) uid)
2074 {
2075 unsigned new_len = 3 * uid / 2 + 1;
2076
2077 VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
2078 new_len);
2079 }
2080 }
2081
2082 VEC_replace (basic_block, label_to_block_map, uid, bb);
2083 }
2084 }
2085
2086
2087 /* Modify the RHS of the assignment pointed-to by GSI using the
2088 operands in the expression tree EXPR.
2089
2090 NOTE: The statement pointed-to by GSI may be reallocated if it
2091 did not have enough operand slots.
2092
2093 This function is useful to convert an existing tree expression into
2094 the flat representation used for the RHS of a GIMPLE assignment.
2095 It will reallocate memory as needed to expand or shrink the number
2096 of operand slots needed to represent EXPR.
2097
2098 NOTE: If you find yourself building a tree and then calling this
2099 function, you are most certainly doing it the slow way. It is much
2100 better to build a new assignment or to use the function
2101 gimple_assign_set_rhs_with_ops, which does not require an
2102 expression tree to be built. */
2103
2104 void
2105 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2106 {
2107 enum tree_code subcode;
2108 tree op1, op2, op3;
2109
2110 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2111 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2112 }
2113
2114
2115 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2116 operands OP1, OP2 and OP3.
2117
2118 NOTE: The statement pointed-to by GSI may be reallocated if it
2119 did not have enough operand slots. */
2120
2121 void
2122 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2123 tree op1, tree op2, tree op3)
2124 {
2125 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2126 gimple stmt = gsi_stmt (*gsi);
2127
2128 /* If the new CODE needs more operands, allocate a new statement. */
2129 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2130 {
2131 tree lhs = gimple_assign_lhs (stmt);
2132 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2133 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2134 gimple_init_singleton (new_stmt);
2135 gsi_replace (gsi, new_stmt, true);
2136 stmt = new_stmt;
2137
2138 /* The LHS needs to be reset as this also changes the SSA name
2139 on the LHS. */
2140 gimple_assign_set_lhs (stmt, lhs);
2141 }
2142
2143 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2144 gimple_set_subcode (stmt, code);
2145 gimple_assign_set_rhs1 (stmt, op1);
2146 if (new_rhs_ops > 1)
2147 gimple_assign_set_rhs2 (stmt, op2);
2148 if (new_rhs_ops > 2)
2149 gimple_assign_set_rhs3 (stmt, op3);
2150 }
2151
2152
2153 /* Return the LHS of a statement that performs an assignment,
2154 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2155 for a call to a function that returns no value, or for a
2156 statement other than an assignment or a call. */
2157
2158 tree
2159 gimple_get_lhs (const_gimple stmt)
2160 {
2161 enum gimple_code code = gimple_code (stmt);
2162
2163 if (code == GIMPLE_ASSIGN)
2164 return gimple_assign_lhs (stmt);
2165 else if (code == GIMPLE_CALL)
2166 return gimple_call_lhs (stmt);
2167 else
2168 return NULL_TREE;
2169 }
2170
2171
2172 /* Set the LHS of a statement that performs an assignment,
2173 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2174
2175 void
2176 gimple_set_lhs (gimple stmt, tree lhs)
2177 {
2178 enum gimple_code code = gimple_code (stmt);
2179
2180 if (code == GIMPLE_ASSIGN)
2181 gimple_assign_set_lhs (stmt, lhs);
2182 else if (code == GIMPLE_CALL)
2183 gimple_call_set_lhs (stmt, lhs);
2184 else
2185 gcc_unreachable();
2186 }
2187
2188 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2189 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2190 expression with a different value.
2191
2192 This will update any annotations (say debug bind stmts) referring
2193 to the original LHS, so that they use the RHS instead. This is
2194 done even if NLHS and LHS are the same, for it is understood that
2195 the RHS will be modified afterwards, and NLHS will not be assigned
2196 an equivalent value.
2197
2198 Adjusting any non-annotation uses of the LHS, if needed, is a
2199 responsibility of the caller.
2200
2201 The effect of this call should be pretty much the same as that of
2202 inserting a copy of STMT before STMT, and then removing the
2203 original stmt, at which time gsi_remove() would have update
2204 annotations, but using this function saves all the inserting,
2205 copying and removing. */
2206
2207 void
2208 gimple_replace_lhs (gimple stmt, tree nlhs)
2209 {
2210 if (MAY_HAVE_DEBUG_STMTS)
2211 {
2212 tree lhs = gimple_get_lhs (stmt);
2213
2214 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2215
2216 insert_debug_temp_for_var_def (NULL, lhs);
2217 }
2218
2219 gimple_set_lhs (stmt, nlhs);
2220 }
2221
2222 /* Return a deep copy of statement STMT. All the operands from STMT
2223 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2224 and VUSE operand arrays are set to empty in the new copy. The new
2225 copy isn't part of any sequence. */
2226
2227 gimple
2228 gimple_copy (gimple stmt)
2229 {
2230 enum gimple_code code = gimple_code (stmt);
2231 unsigned num_ops = gimple_num_ops (stmt);
2232 gimple copy = gimple_alloc (code, num_ops);
2233 unsigned i;
2234
2235 /* Shallow copy all the fields from STMT. */
2236 memcpy (copy, stmt, gimple_size (code));
2237 gimple_init_singleton (copy);
2238
2239 /* If STMT has sub-statements, deep-copy them as well. */
2240 if (gimple_has_substatements (stmt))
2241 {
2242 gimple_seq new_seq;
2243 tree t;
2244
2245 switch (gimple_code (stmt))
2246 {
2247 case GIMPLE_BIND:
2248 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2249 gimple_bind_set_body (copy, new_seq);
2250 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2251 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2252 break;
2253
2254 case GIMPLE_CATCH:
2255 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2256 gimple_catch_set_handler (copy, new_seq);
2257 t = unshare_expr (gimple_catch_types (stmt));
2258 gimple_catch_set_types (copy, t);
2259 break;
2260
2261 case GIMPLE_EH_FILTER:
2262 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2263 gimple_eh_filter_set_failure (copy, new_seq);
2264 t = unshare_expr (gimple_eh_filter_types (stmt));
2265 gimple_eh_filter_set_types (copy, t);
2266 break;
2267
2268 case GIMPLE_EH_ELSE:
2269 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2270 gimple_eh_else_set_n_body (copy, new_seq);
2271 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2272 gimple_eh_else_set_e_body (copy, new_seq);
2273 break;
2274
2275 case GIMPLE_TRY:
2276 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2277 gimple_try_set_eval (copy, new_seq);
2278 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2279 gimple_try_set_cleanup (copy, new_seq);
2280 break;
2281
2282 case GIMPLE_OMP_FOR:
2283 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2284 gimple_omp_for_set_pre_body (copy, new_seq);
2285 t = unshare_expr (gimple_omp_for_clauses (stmt));
2286 gimple_omp_for_set_clauses (copy, t);
2287 copy->gimple_omp_for.iter
2288 = ggc_alloc_vec_gimple_omp_for_iter
2289 (gimple_omp_for_collapse (stmt));
2290 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2291 {
2292 gimple_omp_for_set_cond (copy, i,
2293 gimple_omp_for_cond (stmt, i));
2294 gimple_omp_for_set_index (copy, i,
2295 gimple_omp_for_index (stmt, i));
2296 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2297 gimple_omp_for_set_initial (copy, i, t);
2298 t = unshare_expr (gimple_omp_for_final (stmt, i));
2299 gimple_omp_for_set_final (copy, i, t);
2300 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2301 gimple_omp_for_set_incr (copy, i, t);
2302 }
2303 goto copy_omp_body;
2304
2305 case GIMPLE_OMP_PARALLEL:
2306 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2307 gimple_omp_parallel_set_clauses (copy, t);
2308 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2309 gimple_omp_parallel_set_child_fn (copy, t);
2310 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2311 gimple_omp_parallel_set_data_arg (copy, t);
2312 goto copy_omp_body;
2313
2314 case GIMPLE_OMP_TASK:
2315 t = unshare_expr (gimple_omp_task_clauses (stmt));
2316 gimple_omp_task_set_clauses (copy, t);
2317 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2318 gimple_omp_task_set_child_fn (copy, t);
2319 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2320 gimple_omp_task_set_data_arg (copy, t);
2321 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2322 gimple_omp_task_set_copy_fn (copy, t);
2323 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2324 gimple_omp_task_set_arg_size (copy, t);
2325 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2326 gimple_omp_task_set_arg_align (copy, t);
2327 goto copy_omp_body;
2328
2329 case GIMPLE_OMP_CRITICAL:
2330 t = unshare_expr (gimple_omp_critical_name (stmt));
2331 gimple_omp_critical_set_name (copy, t);
2332 goto copy_omp_body;
2333
2334 case GIMPLE_OMP_SECTIONS:
2335 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2336 gimple_omp_sections_set_clauses (copy, t);
2337 t = unshare_expr (gimple_omp_sections_control (stmt));
2338 gimple_omp_sections_set_control (copy, t);
2339 /* FALLTHRU */
2340
2341 case GIMPLE_OMP_SINGLE:
2342 case GIMPLE_OMP_SECTION:
2343 case GIMPLE_OMP_MASTER:
2344 case GIMPLE_OMP_ORDERED:
2345 copy_omp_body:
2346 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2347 gimple_omp_set_body (copy, new_seq);
2348 break;
2349
2350 case GIMPLE_TRANSACTION:
2351 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2352 gimple_transaction_set_body (copy, new_seq);
2353 break;
2354
2355 case GIMPLE_WITH_CLEANUP_EXPR:
2356 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2357 gimple_wce_set_cleanup (copy, new_seq);
2358 break;
2359
2360 default:
2361 gcc_unreachable ();
2362 }
2363 }
2364
2365 /* Make copy of operands. */
2366 if (num_ops > 0)
2367 {
2368 for (i = 0; i < num_ops; i++)
2369 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2370
2371 /* Clear out SSA operand vectors on COPY. */
2372 if (gimple_has_ops (stmt))
2373 {
2374 gimple_set_def_ops (copy, NULL);
2375 gimple_set_use_ops (copy, NULL);
2376 }
2377
2378 if (gimple_has_mem_ops (stmt))
2379 {
2380 gimple_set_vdef (copy, gimple_vdef (stmt));
2381 gimple_set_vuse (copy, gimple_vuse (stmt));
2382 }
2383
2384 /* SSA operands need to be updated. */
2385 gimple_set_modified (copy, true);
2386 }
2387
2388 return copy;
2389 }
2390
2391
2392 /* Return true if statement S has side-effects. We consider a
2393 statement to have side effects if:
2394
2395 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2396 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2397
2398 bool
2399 gimple_has_side_effects (const_gimple s)
2400 {
2401 if (is_gimple_debug (s))
2402 return false;
2403
2404 /* We don't have to scan the arguments to check for
2405 volatile arguments, though, at present, we still
2406 do a scan to check for TREE_SIDE_EFFECTS. */
2407 if (gimple_has_volatile_ops (s))
2408 return true;
2409
2410 if (gimple_code (s) == GIMPLE_ASM
2411 && gimple_asm_volatile_p (s))
2412 return true;
2413
2414 if (is_gimple_call (s))
2415 {
2416 int flags = gimple_call_flags (s);
2417
2418 /* An infinite loop is considered a side effect. */
2419 if (!(flags & (ECF_CONST | ECF_PURE))
2420 || (flags & ECF_LOOPING_CONST_OR_PURE))
2421 return true;
2422
2423 return false;
2424 }
2425
2426 return false;
2427 }
2428
2429 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2430 Return true if S can trap. When INCLUDE_MEM is true, check whether
2431 the memory operations could trap. When INCLUDE_STORES is true and
2432 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2433
2434 bool
2435 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2436 {
2437 tree t, div = NULL_TREE;
2438 enum tree_code op;
2439
2440 if (include_mem)
2441 {
2442 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2443
2444 for (i = start; i < gimple_num_ops (s); i++)
2445 if (tree_could_trap_p (gimple_op (s, i)))
2446 return true;
2447 }
2448
2449 switch (gimple_code (s))
2450 {
2451 case GIMPLE_ASM:
2452 return gimple_asm_volatile_p (s);
2453
2454 case GIMPLE_CALL:
2455 t = gimple_call_fndecl (s);
2456 /* Assume that calls to weak functions may trap. */
2457 if (!t || !DECL_P (t) || DECL_WEAK (t))
2458 return true;
2459 return false;
2460
2461 case GIMPLE_ASSIGN:
2462 t = gimple_expr_type (s);
2463 op = gimple_assign_rhs_code (s);
2464 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2465 div = gimple_assign_rhs2 (s);
2466 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2467 (INTEGRAL_TYPE_P (t)
2468 && TYPE_OVERFLOW_TRAPS (t)),
2469 div));
2470
2471 default:
2472 break;
2473 }
2474
2475 return false;
2476 }
2477
2478 /* Return true if statement S can trap. */
2479
2480 bool
2481 gimple_could_trap_p (gimple s)
2482 {
2483 return gimple_could_trap_p_1 (s, true, true);
2484 }
2485
2486 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2487
2488 bool
2489 gimple_assign_rhs_could_trap_p (gimple s)
2490 {
2491 gcc_assert (is_gimple_assign (s));
2492 return gimple_could_trap_p_1 (s, true, false);
2493 }
2494
2495
2496 /* Print debugging information for gimple stmts generated. */
2497
2498 void
2499 dump_gimple_statistics (void)
2500 {
2501 int i, total_tuples = 0, total_bytes = 0;
2502
2503 if (! GATHER_STATISTICS)
2504 {
2505 fprintf (stderr, "No gimple statistics\n");
2506 return;
2507 }
2508
2509 fprintf (stderr, "\nGIMPLE statements\n");
2510 fprintf (stderr, "Kind Stmts Bytes\n");
2511 fprintf (stderr, "---------------------------------------\n");
2512 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2513 {
2514 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2515 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2516 total_tuples += gimple_alloc_counts[i];
2517 total_bytes += gimple_alloc_sizes[i];
2518 }
2519 fprintf (stderr, "---------------------------------------\n");
2520 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2521 fprintf (stderr, "---------------------------------------\n");
2522 }
2523
2524
2525 /* Return the number of operands needed on the RHS of a GIMPLE
2526 assignment for an expression with tree code CODE. */
2527
2528 unsigned
2529 get_gimple_rhs_num_ops (enum tree_code code)
2530 {
2531 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2532
2533 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2534 return 1;
2535 else if (rhs_class == GIMPLE_BINARY_RHS)
2536 return 2;
2537 else if (rhs_class == GIMPLE_TERNARY_RHS)
2538 return 3;
2539 else
2540 gcc_unreachable ();
2541 }
2542
2543 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2544 (unsigned char) \
2545 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2546 : ((TYPE) == tcc_binary \
2547 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2548 : ((TYPE) == tcc_constant \
2549 || (TYPE) == tcc_declaration \
2550 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2551 : ((SYM) == TRUTH_AND_EXPR \
2552 || (SYM) == TRUTH_OR_EXPR \
2553 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2554 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2555 : ((SYM) == COND_EXPR \
2556 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2557 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2558 || (SYM) == DOT_PROD_EXPR \
2559 || (SYM) == REALIGN_LOAD_EXPR \
2560 || (SYM) == VEC_COND_EXPR \
2561 || (SYM) == VEC_PERM_EXPR \
2562 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2563 : ((SYM) == CONSTRUCTOR \
2564 || (SYM) == OBJ_TYPE_REF \
2565 || (SYM) == ASSERT_EXPR \
2566 || (SYM) == ADDR_EXPR \
2567 || (SYM) == WITH_SIZE_EXPR \
2568 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2569 : GIMPLE_INVALID_RHS),
2570 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2571
2572 const unsigned char gimple_rhs_class_table[] = {
2573 #include "all-tree.def"
2574 };
2575
2576 #undef DEFTREECODE
2577 #undef END_OF_BASE_TREE_CODES
2578
2579 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2580
2581 /* Validation of GIMPLE expressions. */
2582
2583 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2584
2585 bool
2586 is_gimple_lvalue (tree t)
2587 {
2588 return (is_gimple_addressable (t)
2589 || TREE_CODE (t) == WITH_SIZE_EXPR
2590 /* These are complex lvalues, but don't have addresses, so they
2591 go here. */
2592 || TREE_CODE (t) == BIT_FIELD_REF);
2593 }
2594
2595 /* Return true if T is a GIMPLE condition. */
2596
2597 bool
2598 is_gimple_condexpr (tree t)
2599 {
2600 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2601 && !tree_could_throw_p (t)
2602 && is_gimple_val (TREE_OPERAND (t, 0))
2603 && is_gimple_val (TREE_OPERAND (t, 1))));
2604 }
2605
2606 /* Return true if T is something whose address can be taken. */
2607
2608 bool
2609 is_gimple_addressable (tree t)
2610 {
2611 return (is_gimple_id (t) || handled_component_p (t)
2612 || TREE_CODE (t) == MEM_REF);
2613 }
2614
2615 /* Return true if T is a valid gimple constant. */
2616
2617 bool
2618 is_gimple_constant (const_tree t)
2619 {
2620 switch (TREE_CODE (t))
2621 {
2622 case INTEGER_CST:
2623 case REAL_CST:
2624 case FIXED_CST:
2625 case STRING_CST:
2626 case COMPLEX_CST:
2627 case VECTOR_CST:
2628 return true;
2629
2630 /* Vector constant constructors are gimple invariant. */
2631 case CONSTRUCTOR:
2632 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2633 return TREE_CONSTANT (t);
2634 else
2635 return false;
2636
2637 default:
2638 return false;
2639 }
2640 }
2641
2642 /* Return true if T is a gimple address. */
2643
2644 bool
2645 is_gimple_address (const_tree t)
2646 {
2647 tree op;
2648
2649 if (TREE_CODE (t) != ADDR_EXPR)
2650 return false;
2651
2652 op = TREE_OPERAND (t, 0);
2653 while (handled_component_p (op))
2654 {
2655 if ((TREE_CODE (op) == ARRAY_REF
2656 || TREE_CODE (op) == ARRAY_RANGE_REF)
2657 && !is_gimple_val (TREE_OPERAND (op, 1)))
2658 return false;
2659
2660 op = TREE_OPERAND (op, 0);
2661 }
2662
2663 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2664 return true;
2665
2666 switch (TREE_CODE (op))
2667 {
2668 case PARM_DECL:
2669 case RESULT_DECL:
2670 case LABEL_DECL:
2671 case FUNCTION_DECL:
2672 case VAR_DECL:
2673 case CONST_DECL:
2674 return true;
2675
2676 default:
2677 return false;
2678 }
2679 }
2680
2681 /* Return true if T is a gimple invariant address. */
2682
2683 bool
2684 is_gimple_invariant_address (const_tree t)
2685 {
2686 const_tree op;
2687
2688 if (TREE_CODE (t) != ADDR_EXPR)
2689 return false;
2690
2691 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2692 if (!op)
2693 return false;
2694
2695 if (TREE_CODE (op) == MEM_REF)
2696 {
2697 const_tree op0 = TREE_OPERAND (op, 0);
2698 return (TREE_CODE (op0) == ADDR_EXPR
2699 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2700 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2701 }
2702
2703 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2704 }
2705
2706 /* Return true if T is a gimple invariant address at IPA level
2707 (so addresses of variables on stack are not allowed). */
2708
2709 bool
2710 is_gimple_ip_invariant_address (const_tree t)
2711 {
2712 const_tree op;
2713
2714 if (TREE_CODE (t) != ADDR_EXPR)
2715 return false;
2716
2717 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2718 if (!op)
2719 return false;
2720
2721 if (TREE_CODE (op) == MEM_REF)
2722 {
2723 const_tree op0 = TREE_OPERAND (op, 0);
2724 return (TREE_CODE (op0) == ADDR_EXPR
2725 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2726 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2727 }
2728
2729 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2730 }
2731
2732 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2733 form of function invariant. */
2734
2735 bool
2736 is_gimple_min_invariant (const_tree t)
2737 {
2738 if (TREE_CODE (t) == ADDR_EXPR)
2739 return is_gimple_invariant_address (t);
2740
2741 return is_gimple_constant (t);
2742 }
2743
2744 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2745 form of gimple minimal invariant. */
2746
2747 bool
2748 is_gimple_ip_invariant (const_tree t)
2749 {
2750 if (TREE_CODE (t) == ADDR_EXPR)
2751 return is_gimple_ip_invariant_address (t);
2752
2753 return is_gimple_constant (t);
2754 }
2755
2756 /* Return true if T is a variable. */
2757
2758 bool
2759 is_gimple_variable (tree t)
2760 {
2761 return (TREE_CODE (t) == VAR_DECL
2762 || TREE_CODE (t) == PARM_DECL
2763 || TREE_CODE (t) == RESULT_DECL
2764 || TREE_CODE (t) == SSA_NAME);
2765 }
2766
2767 /* Return true if T is a GIMPLE identifier (something with an address). */
2768
2769 bool
2770 is_gimple_id (tree t)
2771 {
2772 return (is_gimple_variable (t)
2773 || TREE_CODE (t) == FUNCTION_DECL
2774 || TREE_CODE (t) == LABEL_DECL
2775 || TREE_CODE (t) == CONST_DECL
2776 /* Allow string constants, since they are addressable. */
2777 || TREE_CODE (t) == STRING_CST);
2778 }
2779
2780 /* Return true if T is a non-aggregate register variable. */
2781
2782 bool
2783 is_gimple_reg (tree t)
2784 {
2785 if (TREE_CODE (t) == SSA_NAME)
2786 {
2787 t = SSA_NAME_VAR (t);
2788 if (TREE_CODE (t) == VAR_DECL
2789 && VAR_DECL_IS_VIRTUAL_OPERAND (t))
2790 return false;
2791 return true;
2792 }
2793
2794 if (TREE_CODE (t) == VAR_DECL
2795 && VAR_DECL_IS_VIRTUAL_OPERAND (t))
2796 return false;
2797
2798 if (!is_gimple_variable (t))
2799 return false;
2800
2801 if (!is_gimple_reg_type (TREE_TYPE (t)))
2802 return false;
2803
2804 /* A volatile decl is not acceptable because we can't reuse it as
2805 needed. We need to copy it into a temp first. */
2806 if (TREE_THIS_VOLATILE (t))
2807 return false;
2808
2809 /* We define "registers" as things that can be renamed as needed,
2810 which with our infrastructure does not apply to memory. */
2811 if (needs_to_live_in_memory (t))
2812 return false;
2813
2814 /* Hard register variables are an interesting case. For those that
2815 are call-clobbered, we don't know where all the calls are, since
2816 we don't (want to) take into account which operations will turn
2817 into libcalls at the rtl level. For those that are call-saved,
2818 we don't currently model the fact that calls may in fact change
2819 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2820 level, and so miss variable changes that might imply. All around,
2821 it seems safest to not do too much optimization with these at the
2822 tree level at all. We'll have to rely on the rtl optimizers to
2823 clean this up, as there we've got all the appropriate bits exposed. */
2824 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2825 return false;
2826
2827 /* Complex and vector values must have been put into SSA-like form.
2828 That is, no assignments to the individual components. */
2829 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2830 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2831 return DECL_GIMPLE_REG_P (t);
2832
2833 return true;
2834 }
2835
2836
2837 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2838
2839 bool
2840 is_gimple_val (tree t)
2841 {
2842 /* Make loads from volatiles and memory vars explicit. */
2843 if (is_gimple_variable (t)
2844 && is_gimple_reg_type (TREE_TYPE (t))
2845 && !is_gimple_reg (t))
2846 return false;
2847
2848 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2849 }
2850
2851 /* Similarly, but accept hard registers as inputs to asm statements. */
2852
2853 bool
2854 is_gimple_asm_val (tree t)
2855 {
2856 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2857 return true;
2858
2859 return is_gimple_val (t);
2860 }
2861
2862 /* Return true if T is a GIMPLE minimal lvalue. */
2863
2864 bool
2865 is_gimple_min_lval (tree t)
2866 {
2867 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2868 return false;
2869 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2870 }
2871
2872 /* Return true if T is a valid function operand of a CALL_EXPR. */
2873
2874 bool
2875 is_gimple_call_addr (tree t)
2876 {
2877 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2878 }
2879
2880 /* Return true if T is a valid address operand of a MEM_REF. */
2881
2882 bool
2883 is_gimple_mem_ref_addr (tree t)
2884 {
2885 return (is_gimple_reg (t)
2886 || TREE_CODE (t) == INTEGER_CST
2887 || (TREE_CODE (t) == ADDR_EXPR
2888 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2889 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2890 }
2891
2892
2893 /* Given a memory reference expression T, return its base address.
2894 The base address of a memory reference expression is the main
2895 object being referenced. For instance, the base address for
2896 'array[i].fld[j]' is 'array'. You can think of this as stripping
2897 away the offset part from a memory address.
2898
2899 This function calls handled_component_p to strip away all the inner
2900 parts of the memory reference until it reaches the base object. */
2901
2902 tree
2903 get_base_address (tree t)
2904 {
2905 while (handled_component_p (t))
2906 t = TREE_OPERAND (t, 0);
2907
2908 if ((TREE_CODE (t) == MEM_REF
2909 || TREE_CODE (t) == TARGET_MEM_REF)
2910 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2911 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2912
2913 if (TREE_CODE (t) == SSA_NAME
2914 || DECL_P (t)
2915 || TREE_CODE (t) == STRING_CST
2916 || TREE_CODE (t) == CONSTRUCTOR
2917 || INDIRECT_REF_P (t)
2918 || TREE_CODE (t) == MEM_REF
2919 || TREE_CODE (t) == TARGET_MEM_REF)
2920 return t;
2921 else
2922 return NULL_TREE;
2923 }
2924
2925 void
2926 recalculate_side_effects (tree t)
2927 {
2928 enum tree_code code = TREE_CODE (t);
2929 int len = TREE_OPERAND_LENGTH (t);
2930 int i;
2931
2932 switch (TREE_CODE_CLASS (code))
2933 {
2934 case tcc_expression:
2935 switch (code)
2936 {
2937 case INIT_EXPR:
2938 case MODIFY_EXPR:
2939 case VA_ARG_EXPR:
2940 case PREDECREMENT_EXPR:
2941 case PREINCREMENT_EXPR:
2942 case POSTDECREMENT_EXPR:
2943 case POSTINCREMENT_EXPR:
2944 /* All of these have side-effects, no matter what their
2945 operands are. */
2946 return;
2947
2948 default:
2949 break;
2950 }
2951 /* Fall through. */
2952
2953 case tcc_comparison: /* a comparison expression */
2954 case tcc_unary: /* a unary arithmetic expression */
2955 case tcc_binary: /* a binary arithmetic expression */
2956 case tcc_reference: /* a reference */
2957 case tcc_vl_exp: /* a function call */
2958 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2959 for (i = 0; i < len; ++i)
2960 {
2961 tree op = TREE_OPERAND (t, i);
2962 if (op && TREE_SIDE_EFFECTS (op))
2963 TREE_SIDE_EFFECTS (t) = 1;
2964 }
2965 break;
2966
2967 case tcc_constant:
2968 /* No side-effects. */
2969 return;
2970
2971 default:
2972 gcc_unreachable ();
2973 }
2974 }
2975
2976 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2977 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2978 we failed to create one. */
2979
2980 tree
2981 canonicalize_cond_expr_cond (tree t)
2982 {
2983 /* Strip conversions around boolean operations. */
2984 if (CONVERT_EXPR_P (t)
2985 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2986 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2987 == BOOLEAN_TYPE))
2988 t = TREE_OPERAND (t, 0);
2989
2990 /* For !x use x == 0. */
2991 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2992 {
2993 tree top0 = TREE_OPERAND (t, 0);
2994 t = build2 (EQ_EXPR, TREE_TYPE (t),
2995 top0, build_int_cst (TREE_TYPE (top0), 0));
2996 }
2997 /* For cmp ? 1 : 0 use cmp. */
2998 else if (TREE_CODE (t) == COND_EXPR
2999 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
3000 && integer_onep (TREE_OPERAND (t, 1))
3001 && integer_zerop (TREE_OPERAND (t, 2)))
3002 {
3003 tree top0 = TREE_OPERAND (t, 0);
3004 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
3005 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
3006 }
3007
3008 if (is_gimple_condexpr (t))
3009 return t;
3010
3011 return NULL_TREE;
3012 }
3013
3014 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
3015 the positions marked by the set ARGS_TO_SKIP. */
3016
3017 gimple
3018 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
3019 {
3020 int i;
3021 int nargs = gimple_call_num_args (stmt);
3022 VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
3023 gimple new_stmt;
3024
3025 for (i = 0; i < nargs; i++)
3026 if (!bitmap_bit_p (args_to_skip, i))
3027 VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
3028
3029 if (gimple_call_internal_p (stmt))
3030 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
3031 vargs);
3032 else
3033 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
3034 VEC_free (tree, heap, vargs);
3035 if (gimple_call_lhs (stmt))
3036 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3037
3038 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3039 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3040
3041 gimple_set_block (new_stmt, gimple_block (stmt));
3042 if (gimple_has_location (stmt))
3043 gimple_set_location (new_stmt, gimple_location (stmt));
3044 gimple_call_copy_flags (new_stmt, stmt);
3045 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3046
3047 gimple_set_modified (new_stmt, true);
3048
3049 return new_stmt;
3050 }
3051
3052
3053 enum gtc_mode { GTC_MERGE = 0, GTC_DIAG = 1 };
3054
3055 static hashval_t gimple_type_hash (const void *);
3056
3057 /* Structure used to maintain a cache of some type pairs compared by
3058 gimple_types_compatible_p when comparing aggregate types. There are
3059 three possible values for SAME_P:
3060
3061 -2: The pair (T1, T2) has just been inserted in the table.
3062 0: T1 and T2 are different types.
3063 1: T1 and T2 are the same type.
3064
3065 The two elements in the SAME_P array are indexed by the comparison
3066 mode gtc_mode. */
3067
3068 struct type_pair_d
3069 {
3070 unsigned int uid1;
3071 unsigned int uid2;
3072 signed char same_p[2];
3073 };
3074 typedef struct type_pair_d *type_pair_t;
3075 DEF_VEC_P(type_pair_t);
3076 DEF_VEC_ALLOC_P(type_pair_t,heap);
3077
3078 #define GIMPLE_TYPE_PAIR_SIZE 16381
3079 struct type_pair_d *type_pair_cache;
3080
3081
3082 /* Lookup the pair of types T1 and T2 in *VISITED_P. Insert a new
3083 entry if none existed. */
3084
3085 static inline type_pair_t
3086 lookup_type_pair (tree t1, tree t2)
3087 {
3088 unsigned int index;
3089 unsigned int uid1, uid2;
3090
3091 if (type_pair_cache == NULL)
3092 type_pair_cache = XCNEWVEC (struct type_pair_d, GIMPLE_TYPE_PAIR_SIZE);
3093
3094 if (TYPE_UID (t1) < TYPE_UID (t2))
3095 {
3096 uid1 = TYPE_UID (t1);
3097 uid2 = TYPE_UID (t2);
3098 }
3099 else
3100 {
3101 uid1 = TYPE_UID (t2);
3102 uid2 = TYPE_UID (t1);
3103 }
3104 gcc_checking_assert (uid1 != uid2);
3105
3106 /* iterative_hash_hashval_t imply an function calls.
3107 We know that UIDS are in limited range. */
3108 index = ((((unsigned HOST_WIDE_INT)uid1 << HOST_BITS_PER_WIDE_INT / 2) + uid2)
3109 % GIMPLE_TYPE_PAIR_SIZE);
3110 if (type_pair_cache [index].uid1 == uid1
3111 && type_pair_cache [index].uid2 == uid2)
3112 return &type_pair_cache[index];
3113
3114 type_pair_cache [index].uid1 = uid1;
3115 type_pair_cache [index].uid2 = uid2;
3116 type_pair_cache [index].same_p[0] = -2;
3117 type_pair_cache [index].same_p[1] = -2;
3118
3119 return &type_pair_cache[index];
3120 }
3121
3122 /* Per pointer state for the SCC finding. The on_sccstack flag
3123 is not strictly required, it is true when there is no hash value
3124 recorded for the type and false otherwise. But querying that
3125 is slower. */
3126
3127 struct sccs
3128 {
3129 unsigned int dfsnum;
3130 unsigned int low;
3131 bool on_sccstack;
3132 union {
3133 hashval_t hash;
3134 signed char same_p;
3135 } u;
3136 };
3137
3138 static unsigned int next_dfs_num;
3139 static unsigned int gtc_next_dfs_num;
3140
3141
3142 /* GIMPLE type merging cache. A direct-mapped cache based on TYPE_UID. */
3143
3144 typedef struct GTY(()) gimple_type_leader_entry_s {
3145 tree type;
3146 tree leader;
3147 } gimple_type_leader_entry;
3148
3149 #define GIMPLE_TYPE_LEADER_SIZE 16381
3150 static GTY((deletable, length("GIMPLE_TYPE_LEADER_SIZE")))
3151 gimple_type_leader_entry *gimple_type_leader;
3152
3153 /* Lookup an existing leader for T and return it or NULL_TREE, if
3154 there is none in the cache. */
3155
3156 static inline tree
3157 gimple_lookup_type_leader (tree t)
3158 {
3159 gimple_type_leader_entry *leader;
3160
3161 if (!gimple_type_leader)
3162 return NULL_TREE;
3163
3164 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
3165 if (leader->type != t)
3166 return NULL_TREE;
3167
3168 return leader->leader;
3169 }
3170
3171 /* Return true if T1 and T2 have the same name. If FOR_COMPLETION_P is
3172 true then if any type has no name return false, otherwise return
3173 true if both types have no names. */
3174
3175 static bool
3176 compare_type_names_p (tree t1, tree t2)
3177 {
3178 tree name1 = TYPE_NAME (t1);
3179 tree name2 = TYPE_NAME (t2);
3180
3181 if ((name1 != NULL_TREE) != (name2 != NULL_TREE))
3182 return false;
3183
3184 if (name1 == NULL_TREE)
3185 return true;
3186
3187 /* Either both should be a TYPE_DECL or both an IDENTIFIER_NODE. */
3188 if (TREE_CODE (name1) != TREE_CODE (name2))
3189 return false;
3190
3191 if (TREE_CODE (name1) == TYPE_DECL)
3192 name1 = DECL_NAME (name1);
3193 gcc_checking_assert (!name1 || TREE_CODE (name1) == IDENTIFIER_NODE);
3194
3195 if (TREE_CODE (name2) == TYPE_DECL)
3196 name2 = DECL_NAME (name2);
3197 gcc_checking_assert (!name2 || TREE_CODE (name2) == IDENTIFIER_NODE);
3198
3199 /* Identifiers can be compared with pointer equality rather
3200 than a string comparison. */
3201 if (name1 == name2)
3202 return true;
3203
3204 return false;
3205 }
3206
3207 /* Return true if the field decls F1 and F2 are at the same offset.
3208
3209 This is intended to be used on GIMPLE types only. */
3210
3211 bool
3212 gimple_compare_field_offset (tree f1, tree f2)
3213 {
3214 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3215 {
3216 tree offset1 = DECL_FIELD_OFFSET (f1);
3217 tree offset2 = DECL_FIELD_OFFSET (f2);
3218 return ((offset1 == offset2
3219 /* Once gimplification is done, self-referential offsets are
3220 instantiated as operand #2 of the COMPONENT_REF built for
3221 each access and reset. Therefore, they are not relevant
3222 anymore and fields are interchangeable provided that they
3223 represent the same access. */
3224 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3225 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3226 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3227 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3228 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3229 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3230 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3231 || operand_equal_p (offset1, offset2, 0))
3232 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3233 DECL_FIELD_BIT_OFFSET (f2)));
3234 }
3235
3236 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3237 should be, so handle differing ones specially by decomposing
3238 the offset into a byte and bit offset manually. */
3239 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3240 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3241 {
3242 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3243 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3244 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3245 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3246 + bit_offset1 / BITS_PER_UNIT);
3247 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3248 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3249 + bit_offset2 / BITS_PER_UNIT);
3250 if (byte_offset1 != byte_offset2)
3251 return false;
3252 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3253 }
3254
3255 return false;
3256 }
3257
3258 static bool
3259 gimple_types_compatible_p_1 (tree, tree, type_pair_t,
3260 VEC(type_pair_t, heap) **,
3261 struct pointer_map_t *, struct obstack *);
3262
3263 /* DFS visit the edge from the callers type pair with state *STATE to
3264 the pair T1, T2 while operating in FOR_MERGING_P mode.
3265 Update the merging status if it is not part of the SCC containing the
3266 callers pair and return it.
3267 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3268
3269 static bool
3270 gtc_visit (tree t1, tree t2,
3271 struct sccs *state,
3272 VEC(type_pair_t, heap) **sccstack,
3273 struct pointer_map_t *sccstate,
3274 struct obstack *sccstate_obstack)
3275 {
3276 struct sccs *cstate = NULL;
3277 type_pair_t p;
3278 void **slot;
3279 tree leader1, leader2;
3280
3281 /* Check first for the obvious case of pointer identity. */
3282 if (t1 == t2)
3283 return true;
3284
3285 /* Check that we have two types to compare. */
3286 if (t1 == NULL_TREE || t2 == NULL_TREE)
3287 return false;
3288
3289 /* Can't be the same type if the types don't have the same code. */
3290 if (TREE_CODE (t1) != TREE_CODE (t2))
3291 return false;
3292
3293 /* Can't be the same type if they have different CV qualifiers. */
3294 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3295 return false;
3296
3297 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3298 return false;
3299
3300 /* Void types and nullptr types are always the same. */
3301 if (TREE_CODE (t1) == VOID_TYPE
3302 || TREE_CODE (t1) == NULLPTR_TYPE)
3303 return true;
3304
3305 /* Can't be the same type if they have different alignment or mode. */
3306 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3307 || TYPE_MODE (t1) != TYPE_MODE (t2))
3308 return false;
3309
3310 /* Do some simple checks before doing three hashtable queries. */
3311 if (INTEGRAL_TYPE_P (t1)
3312 || SCALAR_FLOAT_TYPE_P (t1)
3313 || FIXED_POINT_TYPE_P (t1)
3314 || TREE_CODE (t1) == VECTOR_TYPE
3315 || TREE_CODE (t1) == COMPLEX_TYPE
3316 || TREE_CODE (t1) == OFFSET_TYPE
3317 || POINTER_TYPE_P (t1))
3318 {
3319 /* Can't be the same type if they have different sign or precision. */
3320 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3321 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3322 return false;
3323
3324 if (TREE_CODE (t1) == INTEGER_TYPE
3325 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
3326 return false;
3327
3328 /* That's all we need to check for float and fixed-point types. */
3329 if (SCALAR_FLOAT_TYPE_P (t1)
3330 || FIXED_POINT_TYPE_P (t1))
3331 return true;
3332
3333 /* For other types fall through to more complex checks. */
3334 }
3335
3336 /* If the types have been previously registered and found equal
3337 they still are. */
3338 leader1 = gimple_lookup_type_leader (t1);
3339 leader2 = gimple_lookup_type_leader (t2);
3340 if (leader1 == t2
3341 || t1 == leader2
3342 || (leader1 && leader1 == leader2))
3343 return true;
3344
3345 /* If the hash values of t1 and t2 are different the types can't
3346 possibly be the same. This helps keeping the type-pair hashtable
3347 small, only tracking comparisons for hash collisions. */
3348 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3349 return false;
3350
3351 /* Allocate a new cache entry for this comparison. */
3352 p = lookup_type_pair (t1, t2);
3353 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3354 {
3355 /* We have already decided whether T1 and T2 are the
3356 same, return the cached result. */
3357 return p->same_p[GTC_MERGE] == 1;
3358 }
3359
3360 if ((slot = pointer_map_contains (sccstate, p)) != NULL)
3361 cstate = (struct sccs *)*slot;
3362 /* Not yet visited. DFS recurse. */
3363 if (!cstate)
3364 {
3365 gimple_types_compatible_p_1 (t1, t2, p,
3366 sccstack, sccstate, sccstate_obstack);
3367 cstate = (struct sccs *)* pointer_map_contains (sccstate, p);
3368 state->low = MIN (state->low, cstate->low);
3369 }
3370 /* If the type is still on the SCC stack adjust the parents low. */
3371 if (cstate->dfsnum < state->dfsnum
3372 && cstate->on_sccstack)
3373 state->low = MIN (cstate->dfsnum, state->low);
3374
3375 /* Return the current lattice value. We start with an equality
3376 assumption so types part of a SCC will be optimistically
3377 treated equal unless proven otherwise. */
3378 return cstate->u.same_p;
3379 }
3380
3381 /* Worker for gimple_types_compatible.
3382 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3383
3384 static bool
3385 gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
3386 VEC(type_pair_t, heap) **sccstack,
3387 struct pointer_map_t *sccstate,
3388 struct obstack *sccstate_obstack)
3389 {
3390 struct sccs *state;
3391
3392 gcc_assert (p->same_p[GTC_MERGE] == -2);
3393
3394 state = XOBNEW (sccstate_obstack, struct sccs);
3395 *pointer_map_insert (sccstate, p) = state;
3396
3397 VEC_safe_push (type_pair_t, heap, *sccstack, p);
3398 state->dfsnum = gtc_next_dfs_num++;
3399 state->low = state->dfsnum;
3400 state->on_sccstack = true;
3401 /* Start with an equality assumption. As we DFS recurse into child
3402 SCCs this assumption may get revisited. */
3403 state->u.same_p = 1;
3404
3405 /* The struct tags shall compare equal. */
3406 if (!compare_type_names_p (t1, t2))
3407 goto different_types;
3408
3409 /* We may not merge typedef types to the same type in different
3410 contexts. */
3411 if (TYPE_NAME (t1)
3412 && TREE_CODE (TYPE_NAME (t1)) == TYPE_DECL
3413 && DECL_CONTEXT (TYPE_NAME (t1))
3414 && TYPE_P (DECL_CONTEXT (TYPE_NAME (t1))))
3415 {
3416 if (!gtc_visit (DECL_CONTEXT (TYPE_NAME (t1)),
3417 DECL_CONTEXT (TYPE_NAME (t2)),
3418 state, sccstack, sccstate, sccstate_obstack))
3419 goto different_types;
3420 }
3421
3422 /* If their attributes are not the same they can't be the same type. */
3423 if (!attribute_list_equal (TYPE_ATTRIBUTES (t1), TYPE_ATTRIBUTES (t2)))
3424 goto different_types;
3425
3426 /* Do type-specific comparisons. */
3427 switch (TREE_CODE (t1))
3428 {
3429 case VECTOR_TYPE:
3430 case COMPLEX_TYPE:
3431 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3432 state, sccstack, sccstate, sccstate_obstack))
3433 goto different_types;
3434 goto same_types;
3435
3436 case ARRAY_TYPE:
3437 /* Array types are the same if the element types are the same and
3438 the number of elements are the same. */
3439 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3440 state, sccstack, sccstate, sccstate_obstack)
3441 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3442 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3443 goto different_types;
3444 else
3445 {
3446 tree i1 = TYPE_DOMAIN (t1);
3447 tree i2 = TYPE_DOMAIN (t2);
3448
3449 /* For an incomplete external array, the type domain can be
3450 NULL_TREE. Check this condition also. */
3451 if (i1 == NULL_TREE && i2 == NULL_TREE)
3452 goto same_types;
3453 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3454 goto different_types;
3455 else
3456 {
3457 tree min1 = TYPE_MIN_VALUE (i1);
3458 tree min2 = TYPE_MIN_VALUE (i2);
3459 tree max1 = TYPE_MAX_VALUE (i1);
3460 tree max2 = TYPE_MAX_VALUE (i2);
3461
3462 /* The minimum/maximum values have to be the same. */
3463 if ((min1 == min2
3464 || (min1 && min2
3465 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3466 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3467 || operand_equal_p (min1, min2, 0))))
3468 && (max1 == max2
3469 || (max1 && max2
3470 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3471 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3472 || operand_equal_p (max1, max2, 0)))))
3473 goto same_types;
3474 else
3475 goto different_types;
3476 }
3477 }
3478
3479 case METHOD_TYPE:
3480 /* Method types should belong to the same class. */
3481 if (!gtc_visit (TYPE_METHOD_BASETYPE (t1), TYPE_METHOD_BASETYPE (t2),
3482 state, sccstack, sccstate, sccstate_obstack))
3483 goto different_types;
3484
3485 /* Fallthru */
3486
3487 case FUNCTION_TYPE:
3488 /* Function types are the same if the return type and arguments types
3489 are the same. */
3490 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3491 state, sccstack, sccstate, sccstate_obstack))
3492 goto different_types;
3493
3494 if (!comp_type_attributes (t1, t2))
3495 goto different_types;
3496
3497 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3498 goto same_types;
3499 else
3500 {
3501 tree parms1, parms2;
3502
3503 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3504 parms1 && parms2;
3505 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3506 {
3507 if (!gtc_visit (TREE_VALUE (parms1), TREE_VALUE (parms2),
3508 state, sccstack, sccstate, sccstate_obstack))
3509 goto different_types;
3510 }
3511
3512 if (parms1 || parms2)
3513 goto different_types;
3514
3515 goto same_types;
3516 }
3517
3518 case OFFSET_TYPE:
3519 {
3520 if (!gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3521 state, sccstack, sccstate, sccstate_obstack)
3522 || !gtc_visit (TYPE_OFFSET_BASETYPE (t1),
3523 TYPE_OFFSET_BASETYPE (t2),
3524 state, sccstack, sccstate, sccstate_obstack))
3525 goto different_types;
3526
3527 goto same_types;
3528 }
3529
3530 case POINTER_TYPE:
3531 case REFERENCE_TYPE:
3532 {
3533 /* If the two pointers have different ref-all attributes,
3534 they can't be the same type. */
3535 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3536 goto different_types;
3537
3538 /* Otherwise, pointer and reference types are the same if the
3539 pointed-to types are the same. */
3540 if (gtc_visit (TREE_TYPE (t1), TREE_TYPE (t2),
3541 state, sccstack, sccstate, sccstate_obstack))
3542 goto same_types;
3543
3544 goto different_types;
3545 }
3546
3547 case INTEGER_TYPE:
3548 case BOOLEAN_TYPE:
3549 {
3550 tree min1 = TYPE_MIN_VALUE (t1);
3551 tree max1 = TYPE_MAX_VALUE (t1);
3552 tree min2 = TYPE_MIN_VALUE (t2);
3553 tree max2 = TYPE_MAX_VALUE (t2);
3554 bool min_equal_p = false;
3555 bool max_equal_p = false;
3556
3557 /* If either type has a minimum value, the other type must
3558 have the same. */
3559 if (min1 == NULL_TREE && min2 == NULL_TREE)
3560 min_equal_p = true;
3561 else if (min1 && min2 && operand_equal_p (min1, min2, 0))
3562 min_equal_p = true;
3563
3564 /* Likewise, if either type has a maximum value, the other
3565 type must have the same. */
3566 if (max1 == NULL_TREE && max2 == NULL_TREE)
3567 max_equal_p = true;
3568 else if (max1 && max2 && operand_equal_p (max1, max2, 0))
3569 max_equal_p = true;
3570
3571 if (!min_equal_p || !max_equal_p)
3572 goto different_types;
3573
3574 goto same_types;
3575 }
3576
3577 case ENUMERAL_TYPE:
3578 {
3579 /* FIXME lto, we cannot check bounds on enumeral types because
3580 different front ends will produce different values.
3581 In C, enumeral types are integers, while in C++ each element
3582 will have its own symbolic value. We should decide how enums
3583 are to be represented in GIMPLE and have each front end lower
3584 to that. */
3585 tree v1, v2;
3586
3587 /* For enumeral types, all the values must be the same. */
3588 if (TYPE_VALUES (t1) == TYPE_VALUES (t2))
3589 goto same_types;
3590
3591 for (v1 = TYPE_VALUES (t1), v2 = TYPE_VALUES (t2);
3592 v1 && v2;
3593 v1 = TREE_CHAIN (v1), v2 = TREE_CHAIN (v2))
3594 {
3595 tree c1 = TREE_VALUE (v1);
3596 tree c2 = TREE_VALUE (v2);
3597
3598 if (TREE_CODE (c1) == CONST_DECL)
3599 c1 = DECL_INITIAL (c1);
3600
3601 if (TREE_CODE (c2) == CONST_DECL)
3602 c2 = DECL_INITIAL (c2);
3603
3604 if (tree_int_cst_equal (c1, c2) != 1)
3605 goto different_types;
3606
3607 if (TREE_PURPOSE (v1) != TREE_PURPOSE (v2))
3608 goto different_types;
3609 }
3610
3611 /* If one enumeration has more values than the other, they
3612 are not the same. */
3613 if (v1 || v2)
3614 goto different_types;
3615
3616 goto same_types;
3617 }
3618
3619 case RECORD_TYPE:
3620 case UNION_TYPE:
3621 case QUAL_UNION_TYPE:
3622 {
3623 tree f1, f2;
3624
3625 /* For aggregate types, all the fields must be the same. */
3626 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3627 f1 && f2;
3628 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3629 {
3630 /* Different field kinds are not compatible. */
3631 if (TREE_CODE (f1) != TREE_CODE (f2))
3632 goto different_types;
3633 /* Field decls must have the same name and offset. */
3634 if (TREE_CODE (f1) == FIELD_DECL
3635 && (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3636 || !gimple_compare_field_offset (f1, f2)))
3637 goto different_types;
3638 /* All entities should have the same name and type. */
3639 if (DECL_NAME (f1) != DECL_NAME (f2)
3640 || !gtc_visit (TREE_TYPE (f1), TREE_TYPE (f2),
3641 state, sccstack, sccstate, sccstate_obstack))
3642 goto different_types;
3643 }
3644
3645 /* If one aggregate has more fields than the other, they
3646 are not the same. */
3647 if (f1 || f2)
3648 goto different_types;
3649
3650 goto same_types;
3651 }
3652
3653 default:
3654 gcc_unreachable ();
3655 }
3656
3657 /* Common exit path for types that are not compatible. */
3658 different_types:
3659 state->u.same_p = 0;
3660 goto pop;
3661
3662 /* Common exit path for types that are compatible. */
3663 same_types:
3664 gcc_assert (state->u.same_p == 1);
3665
3666 pop:
3667 if (state->low == state->dfsnum)
3668 {
3669 type_pair_t x;
3670
3671 /* Pop off the SCC and set its cache values to the final
3672 comparison result. */
3673 do
3674 {
3675 struct sccs *cstate;
3676 x = VEC_pop (type_pair_t, *sccstack);
3677 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
3678 cstate->on_sccstack = false;
3679 x->same_p[GTC_MERGE] = state->u.same_p;
3680 }
3681 while (x != p);
3682 }
3683
3684 return state->u.same_p;
3685 }
3686
3687 /* Return true iff T1 and T2 are structurally identical. When
3688 FOR_MERGING_P is true the an incomplete type and a complete type
3689 are considered different, otherwise they are considered compatible. */
3690
3691 static bool
3692 gimple_types_compatible_p (tree t1, tree t2)
3693 {
3694 VEC(type_pair_t, heap) *sccstack = NULL;
3695 struct pointer_map_t *sccstate;
3696 struct obstack sccstate_obstack;
3697 type_pair_t p = NULL;
3698 bool res;
3699 tree leader1, leader2;
3700
3701 /* Before starting to set up the SCC machinery handle simple cases. */
3702
3703 /* Check first for the obvious case of pointer identity. */
3704 if (t1 == t2)
3705 return true;
3706
3707 /* Check that we have two types to compare. */
3708 if (t1 == NULL_TREE || t2 == NULL_TREE)
3709 return false;
3710
3711 /* Can't be the same type if the types don't have the same code. */
3712 if (TREE_CODE (t1) != TREE_CODE (t2))
3713 return false;
3714
3715 /* Can't be the same type if they have different CV qualifiers. */
3716 if (TYPE_QUALS (t1) != TYPE_QUALS (t2))
3717 return false;
3718
3719 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3720 return false;
3721
3722 /* Void types and nullptr types are always the same. */
3723 if (TREE_CODE (t1) == VOID_TYPE
3724 || TREE_CODE (t1) == NULLPTR_TYPE)
3725 return true;
3726
3727 /* Can't be the same type if they have different alignment or mode. */
3728 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3729 || TYPE_MODE (t1) != TYPE_MODE (t2))
3730 return false;
3731
3732 /* Do some simple checks before doing three hashtable queries. */
3733 if (INTEGRAL_TYPE_P (t1)
3734 || SCALAR_FLOAT_TYPE_P (t1)
3735 || FIXED_POINT_TYPE_P (t1)
3736 || TREE_CODE (t1) == VECTOR_TYPE
3737 || TREE_CODE (t1) == COMPLEX_TYPE
3738 || TREE_CODE (t1) == OFFSET_TYPE
3739 || POINTER_TYPE_P (t1))
3740 {
3741 /* Can't be the same type if they have different sign or precision. */
3742 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3743 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3744 return false;
3745
3746 if (TREE_CODE (t1) == INTEGER_TYPE
3747 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
3748 return false;
3749
3750 /* That's all we need to check for float and fixed-point types. */
3751 if (SCALAR_FLOAT_TYPE_P (t1)
3752 || FIXED_POINT_TYPE_P (t1))
3753 return true;
3754
3755 /* For other types fall through to more complex checks. */
3756 }
3757
3758 /* If the types have been previously registered and found equal
3759 they still are. */
3760 leader1 = gimple_lookup_type_leader (t1);
3761 leader2 = gimple_lookup_type_leader (t2);
3762 if (leader1 == t2
3763 || t1 == leader2
3764 || (leader1 && leader1 == leader2))
3765 return true;
3766
3767 /* If the hash values of t1 and t2 are different the types can't
3768 possibly be the same. This helps keeping the type-pair hashtable
3769 small, only tracking comparisons for hash collisions. */
3770 if (gimple_type_hash (t1) != gimple_type_hash (t2))
3771 return false;
3772
3773 /* If we've visited this type pair before (in the case of aggregates
3774 with self-referential types), and we made a decision, return it. */
3775 p = lookup_type_pair (t1, t2);
3776 if (p->same_p[GTC_MERGE] == 0 || p->same_p[GTC_MERGE] == 1)
3777 {
3778 /* We have already decided whether T1 and T2 are the
3779 same, return the cached result. */
3780 return p->same_p[GTC_MERGE] == 1;
3781 }
3782
3783 /* Now set up the SCC machinery for the comparison. */
3784 gtc_next_dfs_num = 1;
3785 sccstate = pointer_map_create ();
3786 gcc_obstack_init (&sccstate_obstack);
3787 res = gimple_types_compatible_p_1 (t1, t2, p,
3788 &sccstack, sccstate, &sccstate_obstack);
3789 VEC_free (type_pair_t, heap, sccstack);
3790 pointer_map_destroy (sccstate);
3791 obstack_free (&sccstate_obstack, NULL);
3792
3793 return res;
3794 }
3795
3796
3797 static hashval_t
3798 iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
3799 struct pointer_map_t *, struct obstack *);
3800
3801 /* DFS visit the edge from the callers type with state *STATE to T.
3802 Update the callers type hash V with the hash for T if it is not part
3803 of the SCC containing the callers type and return it.
3804 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done. */
3805
3806 static hashval_t
3807 visit (tree t, struct sccs *state, hashval_t v,
3808 VEC (tree, heap) **sccstack,
3809 struct pointer_map_t *sccstate,
3810 struct obstack *sccstate_obstack)
3811 {
3812 struct sccs *cstate = NULL;
3813 struct tree_int_map m;
3814 void **slot;
3815
3816 /* If there is a hash value recorded for this type then it can't
3817 possibly be part of our parent SCC. Simply mix in its hash. */
3818 m.base.from = t;
3819 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
3820 && *slot)
3821 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, v);
3822
3823 if ((slot = pointer_map_contains (sccstate, t)) != NULL)
3824 cstate = (struct sccs *)*slot;
3825 if (!cstate)
3826 {
3827 hashval_t tem;
3828 /* Not yet visited. DFS recurse. */
3829 tem = iterative_hash_gimple_type (t, v,
3830 sccstack, sccstate, sccstate_obstack);
3831 if (!cstate)
3832 cstate = (struct sccs *)* pointer_map_contains (sccstate, t);
3833 state->low = MIN (state->low, cstate->low);
3834 /* If the type is no longer on the SCC stack and thus is not part
3835 of the parents SCC mix in its hash value. Otherwise we will
3836 ignore the type for hashing purposes and return the unaltered
3837 hash value. */
3838 if (!cstate->on_sccstack)
3839 return tem;
3840 }
3841 if (cstate->dfsnum < state->dfsnum
3842 && cstate->on_sccstack)
3843 state->low = MIN (cstate->dfsnum, state->low);
3844
3845 /* We are part of our parents SCC, skip this type during hashing
3846 and return the unaltered hash value. */
3847 return v;
3848 }
3849
3850 /* Hash NAME with the previous hash value V and return it. */
3851
3852 static hashval_t
3853 iterative_hash_name (tree name, hashval_t v)
3854 {
3855 if (!name)
3856 return v;
3857 v = iterative_hash_hashval_t (TREE_CODE (name), v);
3858 if (TREE_CODE (name) == TYPE_DECL)
3859 name = DECL_NAME (name);
3860 if (!name)
3861 return v;
3862 gcc_assert (TREE_CODE (name) == IDENTIFIER_NODE);
3863 return iterative_hash_object (IDENTIFIER_HASH_VALUE (name), v);
3864 }
3865
3866 /* A type, hashvalue pair for sorting SCC members. */
3867
3868 struct type_hash_pair {
3869 tree type;
3870 hashval_t hash;
3871 };
3872
3873 /* Compare two type, hashvalue pairs. */
3874
3875 static int
3876 type_hash_pair_compare (const void *p1_, const void *p2_)
3877 {
3878 const struct type_hash_pair *p1 = (const struct type_hash_pair *) p1_;
3879 const struct type_hash_pair *p2 = (const struct type_hash_pair *) p2_;
3880 if (p1->hash < p2->hash)
3881 return -1;
3882 else if (p1->hash > p2->hash)
3883 return 1;
3884 return 0;
3885 }
3886
3887 /* Returning a hash value for gimple type TYPE combined with VAL.
3888 SCCSTACK, SCCSTATE and SCCSTATE_OBSTACK are state for the DFS walk done.
3889
3890 To hash a type we end up hashing in types that are reachable.
3891 Through pointers we can end up with cycles which messes up the
3892 required property that we need to compute the same hash value
3893 for structurally equivalent types. To avoid this we have to
3894 hash all types in a cycle (the SCC) in a commutative way. The
3895 easiest way is to not mix in the hashes of the SCC members at
3896 all. To make this work we have to delay setting the hash
3897 values of the SCC until it is complete. */
3898
3899 static hashval_t
3900 iterative_hash_gimple_type (tree type, hashval_t val,
3901 VEC(tree, heap) **sccstack,
3902 struct pointer_map_t *sccstate,
3903 struct obstack *sccstate_obstack)
3904 {
3905 hashval_t v;
3906 void **slot;
3907 struct sccs *state;
3908
3909 /* Not visited during this DFS walk. */
3910 gcc_checking_assert (!pointer_map_contains (sccstate, type));
3911 state = XOBNEW (sccstate_obstack, struct sccs);
3912 *pointer_map_insert (sccstate, type) = state;
3913
3914 VEC_safe_push (tree, heap, *sccstack, type);
3915 state->dfsnum = next_dfs_num++;
3916 state->low = state->dfsnum;
3917 state->on_sccstack = true;
3918
3919 /* Combine a few common features of types so that types are grouped into
3920 smaller sets; when searching for existing matching types to merge,
3921 only existing types having the same features as the new type will be
3922 checked. */
3923 v = iterative_hash_name (TYPE_NAME (type), 0);
3924 if (TYPE_NAME (type)
3925 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3926 && DECL_CONTEXT (TYPE_NAME (type))
3927 && TYPE_P (DECL_CONTEXT (TYPE_NAME (type))))
3928 v = visit (DECL_CONTEXT (TYPE_NAME (type)), state, v,
3929 sccstack, sccstate, sccstate_obstack);
3930 v = iterative_hash_hashval_t (TREE_CODE (type), v);
3931 v = iterative_hash_hashval_t (TYPE_QUALS (type), v);
3932 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3933
3934 /* Do not hash the types size as this will cause differences in
3935 hash values for the complete vs. the incomplete type variant. */
3936
3937 /* Incorporate common features of numerical types. */
3938 if (INTEGRAL_TYPE_P (type)
3939 || SCALAR_FLOAT_TYPE_P (type)
3940 || FIXED_POINT_TYPE_P (type))
3941 {
3942 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3943 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3944 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3945 }
3946
3947 /* For pointer and reference types, fold in information about the type
3948 pointed to. */
3949 if (POINTER_TYPE_P (type))
3950 v = visit (TREE_TYPE (type), state, v,
3951 sccstack, sccstate, sccstate_obstack);
3952
3953 /* For integer types hash the types min/max values and the string flag. */
3954 if (TREE_CODE (type) == INTEGER_TYPE)
3955 {
3956 /* OMP lowering can introduce error_mark_node in place of
3957 random local decls in types. */
3958 if (TYPE_MIN_VALUE (type) != error_mark_node)
3959 v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
3960 if (TYPE_MAX_VALUE (type) != error_mark_node)
3961 v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
3962 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3963 }
3964
3965 /* For array types hash the domain and the string flag. */
3966 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3967 {
3968 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3969 v = visit (TYPE_DOMAIN (type), state, v,
3970 sccstack, sccstate, sccstate_obstack);
3971 }
3972
3973 /* Recurse for aggregates with a single element type. */
3974 if (TREE_CODE (type) == ARRAY_TYPE
3975 || TREE_CODE (type) == COMPLEX_TYPE
3976 || TREE_CODE (type) == VECTOR_TYPE)
3977 v = visit (TREE_TYPE (type), state, v,
3978 sccstack, sccstate, sccstate_obstack);
3979
3980 /* Incorporate function return and argument types. */
3981 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3982 {
3983 unsigned na;
3984 tree p;
3985
3986 /* For method types also incorporate their parent class. */
3987 if (TREE_CODE (type) == METHOD_TYPE)
3988 v = visit (TYPE_METHOD_BASETYPE (type), state, v,
3989 sccstack, sccstate, sccstate_obstack);
3990
3991 /* Check result and argument types. */
3992 v = visit (TREE_TYPE (type), state, v,
3993 sccstack, sccstate, sccstate_obstack);
3994 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3995 {
3996 v = visit (TREE_VALUE (p), state, v,
3997 sccstack, sccstate, sccstate_obstack);
3998 na++;
3999 }
4000
4001 v = iterative_hash_hashval_t (na, v);
4002 }
4003
4004 if (RECORD_OR_UNION_TYPE_P (type))
4005 {
4006 unsigned nf;
4007 tree f;
4008
4009 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4010 {
4011 v = iterative_hash_name (DECL_NAME (f), v);
4012 v = visit (TREE_TYPE (f), state, v,
4013 sccstack, sccstate, sccstate_obstack);
4014 nf++;
4015 }
4016
4017 v = iterative_hash_hashval_t (nf, v);
4018 }
4019
4020 /* Record hash for us. */
4021 state->u.hash = v;
4022
4023 /* See if we found an SCC. */
4024 if (state->low == state->dfsnum)
4025 {
4026 tree x;
4027 struct tree_int_map *m;
4028
4029 /* Pop off the SCC and set its hash values. */
4030 x = VEC_pop (tree, *sccstack);
4031 /* Optimize SCC size one. */
4032 if (x == type)
4033 {
4034 state->on_sccstack = false;
4035 m = ggc_alloc_cleared_tree_int_map ();
4036 m->base.from = x;
4037 m->to = v;
4038 slot = htab_find_slot (type_hash_cache, m, INSERT);
4039 gcc_assert (!*slot);
4040 *slot = (void *) m;
4041 }
4042 else
4043 {
4044 struct sccs *cstate;
4045 unsigned first, i, size, j;
4046 struct type_hash_pair *pairs;
4047 /* Pop off the SCC and build an array of type, hash pairs. */
4048 first = VEC_length (tree, *sccstack) - 1;
4049 while (VEC_index (tree, *sccstack, first) != type)
4050 --first;
4051 size = VEC_length (tree, *sccstack) - first + 1;
4052 pairs = XALLOCAVEC (struct type_hash_pair, size);
4053 i = 0;
4054 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4055 cstate->on_sccstack = false;
4056 pairs[i].type = x;
4057 pairs[i].hash = cstate->u.hash;
4058 do
4059 {
4060 x = VEC_pop (tree, *sccstack);
4061 cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
4062 cstate->on_sccstack = false;
4063 ++i;
4064 pairs[i].type = x;
4065 pairs[i].hash = cstate->u.hash;
4066 }
4067 while (x != type);
4068 gcc_assert (i + 1 == size);
4069 /* Sort the arrays of type, hash pairs so that when we mix in
4070 all members of the SCC the hash value becomes independent on
4071 the order we visited the SCC. Disregard hashes equal to
4072 the hash of the type we mix into because we cannot guarantee
4073 a stable sort for those across different TUs. */
4074 qsort (pairs, size, sizeof (struct type_hash_pair),
4075 type_hash_pair_compare);
4076 for (i = 0; i < size; ++i)
4077 {
4078 hashval_t hash;
4079 m = ggc_alloc_cleared_tree_int_map ();
4080 m->base.from = pairs[i].type;
4081 hash = pairs[i].hash;
4082 /* Skip same hashes. */
4083 for (j = i + 1; j < size && pairs[j].hash == pairs[i].hash; ++j)
4084 ;
4085 for (; j < size; ++j)
4086 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4087 for (j = 0; pairs[j].hash != pairs[i].hash; ++j)
4088 hash = iterative_hash_hashval_t (pairs[j].hash, hash);
4089 m->to = hash;
4090 if (pairs[i].type == type)
4091 v = hash;
4092 slot = htab_find_slot (type_hash_cache, m, INSERT);
4093 gcc_assert (!*slot);
4094 *slot = (void *) m;
4095 }
4096 }
4097 }
4098
4099 return iterative_hash_hashval_t (v, val);
4100 }
4101
4102
4103 /* Returns a hash value for P (assumed to be a type). The hash value
4104 is computed using some distinguishing features of the type. Note
4105 that we cannot use pointer hashing here as we may be dealing with
4106 two distinct instances of the same type.
4107
4108 This function should produce the same hash value for two compatible
4109 types according to gimple_types_compatible_p. */
4110
4111 static hashval_t
4112 gimple_type_hash (const void *p)
4113 {
4114 const_tree t = (const_tree) p;
4115 VEC(tree, heap) *sccstack = NULL;
4116 struct pointer_map_t *sccstate;
4117 struct obstack sccstate_obstack;
4118 hashval_t val;
4119 void **slot;
4120 struct tree_int_map m;
4121
4122 if (type_hash_cache == NULL)
4123 type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4124 tree_int_map_eq, NULL);
4125
4126 m.base.from = CONST_CAST_TREE (t);
4127 if ((slot = htab_find_slot (type_hash_cache, &m, NO_INSERT))
4128 && *slot)
4129 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, 0);
4130
4131 /* Perform a DFS walk and pre-hash all reachable types. */
4132 next_dfs_num = 1;
4133 sccstate = pointer_map_create ();
4134 gcc_obstack_init (&sccstate_obstack);
4135 val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
4136 &sccstack, sccstate, &sccstate_obstack);
4137 VEC_free (tree, heap, sccstack);
4138 pointer_map_destroy (sccstate);
4139 obstack_free (&sccstate_obstack, NULL);
4140
4141 return val;
4142 }
4143
4144 /* Returning a hash value for gimple type TYPE combined with VAL.
4145
4146 The hash value returned is equal for types considered compatible
4147 by gimple_canonical_types_compatible_p. */
4148
4149 static hashval_t
4150 iterative_hash_canonical_type (tree type, hashval_t val)
4151 {
4152 hashval_t v;
4153 void **slot;
4154 struct tree_int_map *mp, m;
4155
4156 m.base.from = type;
4157 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
4158 && *slot)
4159 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
4160
4161 /* Combine a few common features of types so that types are grouped into
4162 smaller sets; when searching for existing matching types to merge,
4163 only existing types having the same features as the new type will be
4164 checked. */
4165 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
4166 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
4167 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
4168 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
4169
4170 /* Incorporate common features of numerical types. */
4171 if (INTEGRAL_TYPE_P (type)
4172 || SCALAR_FLOAT_TYPE_P (type)
4173 || FIXED_POINT_TYPE_P (type)
4174 || TREE_CODE (type) == VECTOR_TYPE
4175 || TREE_CODE (type) == COMPLEX_TYPE
4176 || TREE_CODE (type) == OFFSET_TYPE
4177 || POINTER_TYPE_P (type))
4178 {
4179 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
4180 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
4181 }
4182
4183 /* For pointer and reference types, fold in information about the type
4184 pointed to but do not recurse to the pointed-to type. */
4185 if (POINTER_TYPE_P (type))
4186 {
4187 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
4188 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
4189 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
4190 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
4191 }
4192
4193 /* For integer types hash only the string flag. */
4194 if (TREE_CODE (type) == INTEGER_TYPE)
4195 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4196
4197 /* For array types hash the domain bounds and the string flag. */
4198 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4199 {
4200 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
4201 /* OMP lowering can introduce error_mark_node in place of
4202 random local decls in types. */
4203 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4204 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
4205 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
4206 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
4207 }
4208
4209 /* Recurse for aggregates with a single element type. */
4210 if (TREE_CODE (type) == ARRAY_TYPE
4211 || TREE_CODE (type) == COMPLEX_TYPE
4212 || TREE_CODE (type) == VECTOR_TYPE)
4213 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4214
4215 /* Incorporate function return and argument types. */
4216 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
4217 {
4218 unsigned na;
4219 tree p;
4220
4221 /* For method types also incorporate their parent class. */
4222 if (TREE_CODE (type) == METHOD_TYPE)
4223 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
4224
4225 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
4226
4227 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
4228 {
4229 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
4230 na++;
4231 }
4232
4233 v = iterative_hash_hashval_t (na, v);
4234 }
4235
4236 if (RECORD_OR_UNION_TYPE_P (type))
4237 {
4238 unsigned nf;
4239 tree f;
4240
4241 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
4242 if (TREE_CODE (f) == FIELD_DECL)
4243 {
4244 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
4245 nf++;
4246 }
4247
4248 v = iterative_hash_hashval_t (nf, v);
4249 }
4250
4251 /* Cache the just computed hash value. */
4252 mp = ggc_alloc_cleared_tree_int_map ();
4253 mp->base.from = type;
4254 mp->to = v;
4255 *slot = (void *) mp;
4256
4257 return iterative_hash_hashval_t (v, val);
4258 }
4259
4260 static hashval_t
4261 gimple_canonical_type_hash (const void *p)
4262 {
4263 if (canonical_type_hash_cache == NULL)
4264 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
4265 tree_int_map_eq, NULL);
4266
4267 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
4268 }
4269
4270
4271 /* Returns nonzero if P1 and P2 are equal. */
4272
4273 static int
4274 gimple_type_eq (const void *p1, const void *p2)
4275 {
4276 const_tree t1 = (const_tree) p1;
4277 const_tree t2 = (const_tree) p2;
4278 return gimple_types_compatible_p (CONST_CAST_TREE (t1),
4279 CONST_CAST_TREE (t2));
4280 }
4281
4282
4283 /* Worker for gimple_register_type.
4284 Register type T in the global type table gimple_types.
4285 When REGISTERING_MV is false first recurse for the main variant of T. */
4286
4287 static tree
4288 gimple_register_type_1 (tree t, bool registering_mv)
4289 {
4290 void **slot;
4291 gimple_type_leader_entry *leader;
4292
4293 /* If we registered this type before return the cached result. */
4294 leader = &gimple_type_leader[TYPE_UID (t) % GIMPLE_TYPE_LEADER_SIZE];
4295 if (leader->type == t)
4296 return leader->leader;
4297
4298 /* Always register the main variant first. This is important so we
4299 pick up the non-typedef variants as canonical, otherwise we'll end
4300 up taking typedef ids for structure tags during comparison.
4301 It also makes sure that main variants will be merged to main variants.
4302 As we are operating on a possibly partially fixed up type graph
4303 do not bother to recurse more than once, otherwise we may end up
4304 walking in circles.
4305 If we are registering a main variant it will either remain its
4306 own main variant or it will be merged to something else in which
4307 case we do not care for the main variant leader. */
4308 if (!registering_mv
4309 && TYPE_MAIN_VARIANT (t) != t)
4310 gimple_register_type_1 (TYPE_MAIN_VARIANT (t), true);
4311
4312 /* See if we already have an equivalent type registered. */
4313 slot = htab_find_slot (gimple_types, t, INSERT);
4314 if (*slot
4315 && *(tree *)slot != t)
4316 {
4317 tree new_type = (tree) *((tree *) slot);
4318 leader->type = t;
4319 leader->leader = new_type;
4320 return new_type;
4321 }
4322
4323 /* If not, insert it to the cache and the hash. */
4324 leader->type = t;
4325 leader->leader = t;
4326 *slot = (void *) t;
4327 return t;
4328 }
4329
4330 /* Register type T in the global type table gimple_types.
4331 If another type T', compatible with T, already existed in
4332 gimple_types then return T', otherwise return T. This is used by
4333 LTO to merge identical types read from different TUs. */
4334
4335 tree
4336 gimple_register_type (tree t)
4337 {
4338 gcc_assert (TYPE_P (t));
4339
4340 if (!gimple_type_leader)
4341 gimple_type_leader = ggc_alloc_cleared_vec_gimple_type_leader_entry_s
4342 (GIMPLE_TYPE_LEADER_SIZE);
4343
4344 if (gimple_types == NULL)
4345 gimple_types = htab_create_ggc (16381, gimple_type_hash, gimple_type_eq, 0);
4346
4347 return gimple_register_type_1 (t, false);
4348 }
4349
4350 /* The TYPE_CANONICAL merging machinery. It should closely resemble
4351 the middle-end types_compatible_p function. It needs to avoid
4352 claiming types are different for types that should be treated
4353 the same with respect to TBAA. Canonical types are also used
4354 for IL consistency checks via the useless_type_conversion_p
4355 predicate which does not handle all type kinds itself but falls
4356 back to pointer-comparison of TYPE_CANONICAL for aggregates
4357 for example. */
4358
4359 /* Return true iff T1 and T2 are structurally identical for what
4360 TBAA is concerned. */
4361
4362 static bool
4363 gimple_canonical_types_compatible_p (tree t1, tree t2)
4364 {
4365 /* Before starting to set up the SCC machinery handle simple cases. */
4366
4367 /* Check first for the obvious case of pointer identity. */
4368 if (t1 == t2)
4369 return true;
4370
4371 /* Check that we have two types to compare. */
4372 if (t1 == NULL_TREE || t2 == NULL_TREE)
4373 return false;
4374
4375 /* If the types have been previously registered and found equal
4376 they still are. */
4377 if (TYPE_CANONICAL (t1)
4378 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
4379 return true;
4380
4381 /* Can't be the same type if the types don't have the same code. */
4382 if (TREE_CODE (t1) != TREE_CODE (t2))
4383 return false;
4384
4385 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
4386 return false;
4387
4388 /* Qualifiers do not matter for canonical type comparison purposes. */
4389
4390 /* Void types and nullptr types are always the same. */
4391 if (TREE_CODE (t1) == VOID_TYPE
4392 || TREE_CODE (t1) == NULLPTR_TYPE)
4393 return true;
4394
4395 /* Can't be the same type if they have different alignment, or mode. */
4396 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
4397 || TYPE_MODE (t1) != TYPE_MODE (t2))
4398 return false;
4399
4400 /* Non-aggregate types can be handled cheaply. */
4401 if (INTEGRAL_TYPE_P (t1)
4402 || SCALAR_FLOAT_TYPE_P (t1)
4403 || FIXED_POINT_TYPE_P (t1)
4404 || TREE_CODE (t1) == VECTOR_TYPE
4405 || TREE_CODE (t1) == COMPLEX_TYPE
4406 || TREE_CODE (t1) == OFFSET_TYPE
4407 || POINTER_TYPE_P (t1))
4408 {
4409 /* Can't be the same type if they have different sign or precision. */
4410 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
4411 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
4412 return false;
4413
4414 if (TREE_CODE (t1) == INTEGER_TYPE
4415 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
4416 return false;
4417
4418 /* For canonical type comparisons we do not want to build SCCs
4419 so we cannot compare pointed-to types. But we can, for now,
4420 require the same pointed-to type kind and match what
4421 useless_type_conversion_p would do. */
4422 if (POINTER_TYPE_P (t1))
4423 {
4424 /* If the two pointers have different ref-all attributes,
4425 they can't be the same type. */
4426 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
4427 return false;
4428
4429 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
4430 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
4431 return false;
4432
4433 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
4434 return false;
4435
4436 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
4437 return false;
4438 }
4439
4440 /* Tail-recurse to components. */
4441 if (TREE_CODE (t1) == VECTOR_TYPE
4442 || TREE_CODE (t1) == COMPLEX_TYPE)
4443 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
4444 TREE_TYPE (t2));
4445
4446 return true;
4447 }
4448
4449 /* Do type-specific comparisons. */
4450 switch (TREE_CODE (t1))
4451 {
4452 case ARRAY_TYPE:
4453 /* Array types are the same if the element types are the same and
4454 the number of elements are the same. */
4455 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
4456 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
4457 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
4458 return false;
4459 else
4460 {
4461 tree i1 = TYPE_DOMAIN (t1);
4462 tree i2 = TYPE_DOMAIN (t2);
4463
4464 /* For an incomplete external array, the type domain can be
4465 NULL_TREE. Check this condition also. */
4466 if (i1 == NULL_TREE && i2 == NULL_TREE)
4467 return true;
4468 else if (i1 == NULL_TREE || i2 == NULL_TREE)
4469 return false;
4470 else
4471 {
4472 tree min1 = TYPE_MIN_VALUE (i1);
4473 tree min2 = TYPE_MIN_VALUE (i2);
4474 tree max1 = TYPE_MAX_VALUE (i1);
4475 tree max2 = TYPE_MAX_VALUE (i2);
4476
4477 /* The minimum/maximum values have to be the same. */
4478 if ((min1 == min2
4479 || (min1 && min2
4480 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
4481 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
4482 || operand_equal_p (min1, min2, 0))))
4483 && (max1 == max2
4484 || (max1 && max2
4485 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
4486 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
4487 || operand_equal_p (max1, max2, 0)))))
4488 return true;
4489 else
4490 return false;
4491 }
4492 }
4493
4494 case METHOD_TYPE:
4495 case FUNCTION_TYPE:
4496 /* Function types are the same if the return type and arguments types
4497 are the same. */
4498 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
4499 return false;
4500
4501 if (!comp_type_attributes (t1, t2))
4502 return false;
4503
4504 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
4505 return true;
4506 else
4507 {
4508 tree parms1, parms2;
4509
4510 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
4511 parms1 && parms2;
4512 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
4513 {
4514 if (!gimple_canonical_types_compatible_p
4515 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
4516 return false;
4517 }
4518
4519 if (parms1 || parms2)
4520 return false;
4521
4522 return true;
4523 }
4524
4525 case RECORD_TYPE:
4526 case UNION_TYPE:
4527 case QUAL_UNION_TYPE:
4528 {
4529 tree f1, f2;
4530
4531 /* For aggregate types, all the fields must be the same. */
4532 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
4533 f1 || f2;
4534 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
4535 {
4536 /* Skip non-fields. */
4537 while (f1 && TREE_CODE (f1) != FIELD_DECL)
4538 f1 = TREE_CHAIN (f1);
4539 while (f2 && TREE_CODE (f2) != FIELD_DECL)
4540 f2 = TREE_CHAIN (f2);
4541 if (!f1 || !f2)
4542 break;
4543 /* The fields must have the same name, offset and type. */
4544 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
4545 || !gimple_compare_field_offset (f1, f2)
4546 || !gimple_canonical_types_compatible_p
4547 (TREE_TYPE (f1), TREE_TYPE (f2)))
4548 return false;
4549 }
4550
4551 /* If one aggregate has more fields than the other, they
4552 are not the same. */
4553 if (f1 || f2)
4554 return false;
4555
4556 return true;
4557 }
4558
4559 default:
4560 gcc_unreachable ();
4561 }
4562 }
4563
4564
4565 /* Returns nonzero if P1 and P2 are equal. */
4566
4567 static int
4568 gimple_canonical_type_eq (const void *p1, const void *p2)
4569 {
4570 const_tree t1 = (const_tree) p1;
4571 const_tree t2 = (const_tree) p2;
4572 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
4573 CONST_CAST_TREE (t2));
4574 }
4575
4576 /* Register type T in the global type table gimple_types.
4577 If another type T', compatible with T, already existed in
4578 gimple_types then return T', otherwise return T. This is used by
4579 LTO to merge identical types read from different TUs.
4580
4581 ??? This merging does not exactly match how the tree.c middle-end
4582 functions will assign TYPE_CANONICAL when new types are created
4583 during optimization (which at least happens for pointer and array
4584 types). */
4585
4586 tree
4587 gimple_register_canonical_type (tree t)
4588 {
4589 void **slot;
4590
4591 gcc_assert (TYPE_P (t));
4592
4593 if (TYPE_CANONICAL (t))
4594 return TYPE_CANONICAL (t);
4595
4596 if (gimple_canonical_types == NULL)
4597 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
4598 gimple_canonical_type_eq, 0);
4599
4600 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
4601 if (*slot
4602 && *(tree *)slot != t)
4603 {
4604 tree new_type = (tree) *((tree *) slot);
4605
4606 TYPE_CANONICAL (t) = new_type;
4607 t = new_type;
4608 }
4609 else
4610 {
4611 TYPE_CANONICAL (t) = t;
4612 *slot = (void *) t;
4613 }
4614
4615 return t;
4616 }
4617
4618
4619 /* Show statistics on references to the global type table gimple_types. */
4620
4621 void
4622 print_gimple_types_stats (void)
4623 {
4624 if (gimple_types)
4625 fprintf (stderr, "GIMPLE type table: size %ld, %ld elements, "
4626 "%ld searches, %ld collisions (ratio: %f)\n",
4627 (long) htab_size (gimple_types),
4628 (long) htab_elements (gimple_types),
4629 (long) gimple_types->searches,
4630 (long) gimple_types->collisions,
4631 htab_collisions (gimple_types));
4632 else
4633 fprintf (stderr, "GIMPLE type table is empty\n");
4634 if (type_hash_cache)
4635 fprintf (stderr, "GIMPLE type hash table: size %ld, %ld elements, "
4636 "%ld searches, %ld collisions (ratio: %f)\n",
4637 (long) htab_size (type_hash_cache),
4638 (long) htab_elements (type_hash_cache),
4639 (long) type_hash_cache->searches,
4640 (long) type_hash_cache->collisions,
4641 htab_collisions (type_hash_cache));
4642 else
4643 fprintf (stderr, "GIMPLE type hash table is empty\n");
4644 if (gimple_canonical_types)
4645 fprintf (stderr, "GIMPLE canonical type table: size %ld, %ld elements, "
4646 "%ld searches, %ld collisions (ratio: %f)\n",
4647 (long) htab_size (gimple_canonical_types),
4648 (long) htab_elements (gimple_canonical_types),
4649 (long) gimple_canonical_types->searches,
4650 (long) gimple_canonical_types->collisions,
4651 htab_collisions (gimple_canonical_types));
4652 else
4653 fprintf (stderr, "GIMPLE canonical type table is empty\n");
4654 if (canonical_type_hash_cache)
4655 fprintf (stderr, "GIMPLE canonical type hash table: size %ld, %ld elements, "
4656 "%ld searches, %ld collisions (ratio: %f)\n",
4657 (long) htab_size (canonical_type_hash_cache),
4658 (long) htab_elements (canonical_type_hash_cache),
4659 (long) canonical_type_hash_cache->searches,
4660 (long) canonical_type_hash_cache->collisions,
4661 htab_collisions (canonical_type_hash_cache));
4662 else
4663 fprintf (stderr, "GIMPLE canonical type hash table is empty\n");
4664 }
4665
4666 /* Free the gimple type hashtables used for LTO type merging. */
4667
4668 void
4669 free_gimple_type_tables (void)
4670 {
4671 /* Last chance to print stats for the tables. */
4672 if (flag_lto_report)
4673 print_gimple_types_stats ();
4674
4675 if (gimple_types)
4676 {
4677 htab_delete (gimple_types);
4678 gimple_types = NULL;
4679 }
4680 if (gimple_canonical_types)
4681 {
4682 htab_delete (gimple_canonical_types);
4683 gimple_canonical_types = NULL;
4684 }
4685 if (type_hash_cache)
4686 {
4687 htab_delete (type_hash_cache);
4688 type_hash_cache = NULL;
4689 }
4690 if (canonical_type_hash_cache)
4691 {
4692 htab_delete (canonical_type_hash_cache);
4693 canonical_type_hash_cache = NULL;
4694 }
4695 if (type_pair_cache)
4696 {
4697 free (type_pair_cache);
4698 type_pair_cache = NULL;
4699 }
4700 gimple_type_leader = NULL;
4701 }
4702
4703
4704 /* Return a type the same as TYPE except unsigned or
4705 signed according to UNSIGNEDP. */
4706
4707 static tree
4708 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
4709 {
4710 tree type1;
4711
4712 type1 = TYPE_MAIN_VARIANT (type);
4713 if (type1 == signed_char_type_node
4714 || type1 == char_type_node
4715 || type1 == unsigned_char_type_node)
4716 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4717 if (type1 == integer_type_node || type1 == unsigned_type_node)
4718 return unsignedp ? unsigned_type_node : integer_type_node;
4719 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
4720 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4721 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
4722 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4723 if (type1 == long_long_integer_type_node
4724 || type1 == long_long_unsigned_type_node)
4725 return unsignedp
4726 ? long_long_unsigned_type_node
4727 : long_long_integer_type_node;
4728 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
4729 return unsignedp
4730 ? int128_unsigned_type_node
4731 : int128_integer_type_node;
4732 #if HOST_BITS_PER_WIDE_INT >= 64
4733 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
4734 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4735 #endif
4736 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
4737 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4738 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
4739 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4740 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
4741 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4742 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
4743 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4744
4745 #define GIMPLE_FIXED_TYPES(NAME) \
4746 if (type1 == short_ ## NAME ## _type_node \
4747 || type1 == unsigned_short_ ## NAME ## _type_node) \
4748 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
4749 : short_ ## NAME ## _type_node; \
4750 if (type1 == NAME ## _type_node \
4751 || type1 == unsigned_ ## NAME ## _type_node) \
4752 return unsignedp ? unsigned_ ## NAME ## _type_node \
4753 : NAME ## _type_node; \
4754 if (type1 == long_ ## NAME ## _type_node \
4755 || type1 == unsigned_long_ ## NAME ## _type_node) \
4756 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
4757 : long_ ## NAME ## _type_node; \
4758 if (type1 == long_long_ ## NAME ## _type_node \
4759 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
4760 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
4761 : long_long_ ## NAME ## _type_node;
4762
4763 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
4764 if (type1 == NAME ## _type_node \
4765 || type1 == u ## NAME ## _type_node) \
4766 return unsignedp ? u ## NAME ## _type_node \
4767 : NAME ## _type_node;
4768
4769 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
4770 if (type1 == sat_ ## short_ ## NAME ## _type_node \
4771 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
4772 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
4773 : sat_ ## short_ ## NAME ## _type_node; \
4774 if (type1 == sat_ ## NAME ## _type_node \
4775 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
4776 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
4777 : sat_ ## NAME ## _type_node; \
4778 if (type1 == sat_ ## long_ ## NAME ## _type_node \
4779 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
4780 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
4781 : sat_ ## long_ ## NAME ## _type_node; \
4782 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
4783 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
4784 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
4785 : sat_ ## long_long_ ## NAME ## _type_node;
4786
4787 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
4788 if (type1 == sat_ ## NAME ## _type_node \
4789 || type1 == sat_ ## u ## NAME ## _type_node) \
4790 return unsignedp ? sat_ ## u ## NAME ## _type_node \
4791 : sat_ ## NAME ## _type_node;
4792
4793 GIMPLE_FIXED_TYPES (fract);
4794 GIMPLE_FIXED_TYPES_SAT (fract);
4795 GIMPLE_FIXED_TYPES (accum);
4796 GIMPLE_FIXED_TYPES_SAT (accum);
4797
4798 GIMPLE_FIXED_MODE_TYPES (qq);
4799 GIMPLE_FIXED_MODE_TYPES (hq);
4800 GIMPLE_FIXED_MODE_TYPES (sq);
4801 GIMPLE_FIXED_MODE_TYPES (dq);
4802 GIMPLE_FIXED_MODE_TYPES (tq);
4803 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
4804 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
4805 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
4806 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
4807 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
4808 GIMPLE_FIXED_MODE_TYPES (ha);
4809 GIMPLE_FIXED_MODE_TYPES (sa);
4810 GIMPLE_FIXED_MODE_TYPES (da);
4811 GIMPLE_FIXED_MODE_TYPES (ta);
4812 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
4813 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
4814 GIMPLE_FIXED_MODE_TYPES_SAT (da);
4815 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
4816
4817 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
4818 the precision; they have precision set to match their range, but
4819 may use a wider mode to match an ABI. If we change modes, we may
4820 wind up with bad conversions. For INTEGER_TYPEs in C, must check
4821 the precision as well, so as to yield correct results for
4822 bit-field types. C++ does not have these separate bit-field
4823 types, and producing a signed or unsigned variant of an
4824 ENUMERAL_TYPE may cause other problems as well. */
4825 if (!INTEGRAL_TYPE_P (type)
4826 || TYPE_UNSIGNED (type) == unsignedp)
4827 return type;
4828
4829 #define TYPE_OK(node) \
4830 (TYPE_MODE (type) == TYPE_MODE (node) \
4831 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
4832 if (TYPE_OK (signed_char_type_node))
4833 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
4834 if (TYPE_OK (integer_type_node))
4835 return unsignedp ? unsigned_type_node : integer_type_node;
4836 if (TYPE_OK (short_integer_type_node))
4837 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
4838 if (TYPE_OK (long_integer_type_node))
4839 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
4840 if (TYPE_OK (long_long_integer_type_node))
4841 return (unsignedp
4842 ? long_long_unsigned_type_node
4843 : long_long_integer_type_node);
4844 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
4845 return (unsignedp
4846 ? int128_unsigned_type_node
4847 : int128_integer_type_node);
4848
4849 #if HOST_BITS_PER_WIDE_INT >= 64
4850 if (TYPE_OK (intTI_type_node))
4851 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
4852 #endif
4853 if (TYPE_OK (intDI_type_node))
4854 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
4855 if (TYPE_OK (intSI_type_node))
4856 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
4857 if (TYPE_OK (intHI_type_node))
4858 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
4859 if (TYPE_OK (intQI_type_node))
4860 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
4861
4862 #undef GIMPLE_FIXED_TYPES
4863 #undef GIMPLE_FIXED_MODE_TYPES
4864 #undef GIMPLE_FIXED_TYPES_SAT
4865 #undef GIMPLE_FIXED_MODE_TYPES_SAT
4866 #undef TYPE_OK
4867
4868 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
4869 }
4870
4871
4872 /* Return an unsigned type the same as TYPE in other respects. */
4873
4874 tree
4875 gimple_unsigned_type (tree type)
4876 {
4877 return gimple_signed_or_unsigned_type (true, type);
4878 }
4879
4880
4881 /* Return a signed type the same as TYPE in other respects. */
4882
4883 tree
4884 gimple_signed_type (tree type)
4885 {
4886 return gimple_signed_or_unsigned_type (false, type);
4887 }
4888
4889
4890 /* Return the typed-based alias set for T, which may be an expression
4891 or a type. Return -1 if we don't do anything special. */
4892
4893 alias_set_type
4894 gimple_get_alias_set (tree t)
4895 {
4896 tree u;
4897
4898 /* Permit type-punning when accessing a union, provided the access
4899 is directly through the union. For example, this code does not
4900 permit taking the address of a union member and then storing
4901 through it. Even the type-punning allowed here is a GCC
4902 extension, albeit a common and useful one; the C standard says
4903 that such accesses have implementation-defined behavior. */
4904 for (u = t;
4905 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
4906 u = TREE_OPERAND (u, 0))
4907 if (TREE_CODE (u) == COMPONENT_REF
4908 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
4909 return 0;
4910
4911 /* That's all the expressions we handle specially. */
4912 if (!TYPE_P (t))
4913 return -1;
4914
4915 /* For convenience, follow the C standard when dealing with
4916 character types. Any object may be accessed via an lvalue that
4917 has character type. */
4918 if (t == char_type_node
4919 || t == signed_char_type_node
4920 || t == unsigned_char_type_node)
4921 return 0;
4922
4923 /* Allow aliasing between signed and unsigned variants of the same
4924 type. We treat the signed variant as canonical. */
4925 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
4926 {
4927 tree t1 = gimple_signed_type (t);
4928
4929 /* t1 == t can happen for boolean nodes which are always unsigned. */
4930 if (t1 != t)
4931 return get_alias_set (t1);
4932 }
4933
4934 return -1;
4935 }
4936
4937
4938 /* Data structure used to count the number of dereferences to PTR
4939 inside an expression. */
4940 struct count_ptr_d
4941 {
4942 tree ptr;
4943 unsigned num_stores;
4944 unsigned num_loads;
4945 };
4946
4947 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
4948 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
4949
4950 static tree
4951 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
4952 {
4953 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
4954 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
4955
4956 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
4957 pointer 'ptr' is *not* dereferenced, it is simply used to compute
4958 the address of 'fld' as 'ptr + offsetof(fld)'. */
4959 if (TREE_CODE (*tp) == ADDR_EXPR)
4960 {
4961 *walk_subtrees = 0;
4962 return NULL_TREE;
4963 }
4964
4965 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
4966 {
4967 if (wi_p->is_lhs)
4968 count_p->num_stores++;
4969 else
4970 count_p->num_loads++;
4971 }
4972
4973 return NULL_TREE;
4974 }
4975
4976 /* Count the number of direct and indirect uses for pointer PTR in
4977 statement STMT. The number of direct uses is stored in
4978 *NUM_USES_P. Indirect references are counted separately depending
4979 on whether they are store or load operations. The counts are
4980 stored in *NUM_STORES_P and *NUM_LOADS_P. */
4981
4982 void
4983 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
4984 unsigned *num_loads_p, unsigned *num_stores_p)
4985 {
4986 ssa_op_iter i;
4987 tree use;
4988
4989 *num_uses_p = 0;
4990 *num_loads_p = 0;
4991 *num_stores_p = 0;
4992
4993 /* Find out the total number of uses of PTR in STMT. */
4994 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
4995 if (use == ptr)
4996 (*num_uses_p)++;
4997
4998 /* Now count the number of indirect references to PTR. This is
4999 truly awful, but we don't have much choice. There are no parent
5000 pointers inside INDIRECT_REFs, so an expression like
5001 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
5002 find all the indirect and direct uses of x_1 inside. The only
5003 shortcut we can take is the fact that GIMPLE only allows
5004 INDIRECT_REFs inside the expressions below. */
5005 if (is_gimple_assign (stmt)
5006 || gimple_code (stmt) == GIMPLE_RETURN
5007 || gimple_code (stmt) == GIMPLE_ASM
5008 || is_gimple_call (stmt))
5009 {
5010 struct walk_stmt_info wi;
5011 struct count_ptr_d count;
5012
5013 count.ptr = ptr;
5014 count.num_stores = 0;
5015 count.num_loads = 0;
5016
5017 memset (&wi, 0, sizeof (wi));
5018 wi.info = &count;
5019 walk_gimple_op (stmt, count_ptr_derefs, &wi);
5020
5021 *num_stores_p = count.num_stores;
5022 *num_loads_p = count.num_loads;
5023 }
5024
5025 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
5026 }
5027
5028 /* From a tree operand OP return the base of a load or store operation
5029 or NULL_TREE if OP is not a load or a store. */
5030
5031 static tree
5032 get_base_loadstore (tree op)
5033 {
5034 while (handled_component_p (op))
5035 op = TREE_OPERAND (op, 0);
5036 if (DECL_P (op)
5037 || INDIRECT_REF_P (op)
5038 || TREE_CODE (op) == MEM_REF
5039 || TREE_CODE (op) == TARGET_MEM_REF)
5040 return op;
5041 return NULL_TREE;
5042 }
5043
5044 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
5045 VISIT_ADDR if non-NULL on loads, store and address-taken operands
5046 passing the STMT, the base of the operand and DATA to it. The base
5047 will be either a decl, an indirect reference (including TARGET_MEM_REF)
5048 or the argument of an address expression.
5049 Returns the results of these callbacks or'ed. */
5050
5051 bool
5052 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
5053 bool (*visit_load)(gimple, tree, void *),
5054 bool (*visit_store)(gimple, tree, void *),
5055 bool (*visit_addr)(gimple, tree, void *))
5056 {
5057 bool ret = false;
5058 unsigned i;
5059 if (gimple_assign_single_p (stmt))
5060 {
5061 tree lhs, rhs;
5062 if (visit_store)
5063 {
5064 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
5065 if (lhs)
5066 ret |= visit_store (stmt, lhs, data);
5067 }
5068 rhs = gimple_assign_rhs1 (stmt);
5069 while (handled_component_p (rhs))
5070 rhs = TREE_OPERAND (rhs, 0);
5071 if (visit_addr)
5072 {
5073 if (TREE_CODE (rhs) == ADDR_EXPR)
5074 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5075 else if (TREE_CODE (rhs) == TARGET_MEM_REF
5076 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
5077 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
5078 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
5079 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
5080 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
5081 0), data);
5082 else if (TREE_CODE (rhs) == CONSTRUCTOR)
5083 {
5084 unsigned int ix;
5085 tree val;
5086
5087 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
5088 if (TREE_CODE (val) == ADDR_EXPR)
5089 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
5090 else if (TREE_CODE (val) == OBJ_TYPE_REF
5091 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
5092 ret |= visit_addr (stmt,
5093 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
5094 0), data);
5095 }
5096 lhs = gimple_assign_lhs (stmt);
5097 if (TREE_CODE (lhs) == TARGET_MEM_REF
5098 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
5099 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
5100 }
5101 if (visit_load)
5102 {
5103 rhs = get_base_loadstore (rhs);
5104 if (rhs)
5105 ret |= visit_load (stmt, rhs, data);
5106 }
5107 }
5108 else if (visit_addr
5109 && (is_gimple_assign (stmt)
5110 || gimple_code (stmt) == GIMPLE_COND))
5111 {
5112 for (i = 0; i < gimple_num_ops (stmt); ++i)
5113 {
5114 tree op = gimple_op (stmt, i);
5115 if (op == NULL_TREE)
5116 ;
5117 else if (TREE_CODE (op) == ADDR_EXPR)
5118 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5119 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
5120 tree with two operands. */
5121 else if (i == 1 && COMPARISON_CLASS_P (op))
5122 {
5123 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
5124 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
5125 0), data);
5126 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
5127 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
5128 0), data);
5129 }
5130 }
5131 }
5132 else if (is_gimple_call (stmt))
5133 {
5134 if (visit_store)
5135 {
5136 tree lhs = gimple_call_lhs (stmt);
5137 if (lhs)
5138 {
5139 lhs = get_base_loadstore (lhs);
5140 if (lhs)
5141 ret |= visit_store (stmt, lhs, data);
5142 }
5143 }
5144 if (visit_load || visit_addr)
5145 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5146 {
5147 tree rhs = gimple_call_arg (stmt, i);
5148 if (visit_addr
5149 && TREE_CODE (rhs) == ADDR_EXPR)
5150 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
5151 else if (visit_load)
5152 {
5153 rhs = get_base_loadstore (rhs);
5154 if (rhs)
5155 ret |= visit_load (stmt, rhs, data);
5156 }
5157 }
5158 if (visit_addr
5159 && gimple_call_chain (stmt)
5160 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
5161 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
5162 data);
5163 if (visit_addr
5164 && gimple_call_return_slot_opt_p (stmt)
5165 && gimple_call_lhs (stmt) != NULL_TREE
5166 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
5167 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
5168 }
5169 else if (gimple_code (stmt) == GIMPLE_ASM)
5170 {
5171 unsigned noutputs;
5172 const char *constraint;
5173 const char **oconstraints;
5174 bool allows_mem, allows_reg, is_inout;
5175 noutputs = gimple_asm_noutputs (stmt);
5176 oconstraints = XALLOCAVEC (const char *, noutputs);
5177 if (visit_store || visit_addr)
5178 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
5179 {
5180 tree link = gimple_asm_output_op (stmt, i);
5181 tree op = get_base_loadstore (TREE_VALUE (link));
5182 if (op && visit_store)
5183 ret |= visit_store (stmt, op, data);
5184 if (visit_addr)
5185 {
5186 constraint = TREE_STRING_POINTER
5187 (TREE_VALUE (TREE_PURPOSE (link)));
5188 oconstraints[i] = constraint;
5189 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
5190 &allows_reg, &is_inout);
5191 if (op && !allows_reg && allows_mem)
5192 ret |= visit_addr (stmt, op, data);
5193 }
5194 }
5195 if (visit_load || visit_addr)
5196 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
5197 {
5198 tree link = gimple_asm_input_op (stmt, i);
5199 tree op = TREE_VALUE (link);
5200 if (visit_addr
5201 && TREE_CODE (op) == ADDR_EXPR)
5202 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5203 else if (visit_load || visit_addr)
5204 {
5205 op = get_base_loadstore (op);
5206 if (op)
5207 {
5208 if (visit_load)
5209 ret |= visit_load (stmt, op, data);
5210 if (visit_addr)
5211 {
5212 constraint = TREE_STRING_POINTER
5213 (TREE_VALUE (TREE_PURPOSE (link)));
5214 parse_input_constraint (&constraint, 0, 0, noutputs,
5215 0, oconstraints,
5216 &allows_mem, &allows_reg);
5217 if (!allows_reg && allows_mem)
5218 ret |= visit_addr (stmt, op, data);
5219 }
5220 }
5221 }
5222 }
5223 }
5224 else if (gimple_code (stmt) == GIMPLE_RETURN)
5225 {
5226 tree op = gimple_return_retval (stmt);
5227 if (op)
5228 {
5229 if (visit_addr
5230 && TREE_CODE (op) == ADDR_EXPR)
5231 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5232 else if (visit_load)
5233 {
5234 op = get_base_loadstore (op);
5235 if (op)
5236 ret |= visit_load (stmt, op, data);
5237 }
5238 }
5239 }
5240 else if (visit_addr
5241 && gimple_code (stmt) == GIMPLE_PHI)
5242 {
5243 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
5244 {
5245 tree op = PHI_ARG_DEF (stmt, i);
5246 if (TREE_CODE (op) == ADDR_EXPR)
5247 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
5248 }
5249 }
5250
5251 return ret;
5252 }
5253
5254 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
5255 should make a faster clone for this case. */
5256
5257 bool
5258 walk_stmt_load_store_ops (gimple stmt, void *data,
5259 bool (*visit_load)(gimple, tree, void *),
5260 bool (*visit_store)(gimple, tree, void *))
5261 {
5262 return walk_stmt_load_store_addr_ops (stmt, data,
5263 visit_load, visit_store, NULL);
5264 }
5265
5266 /* Helper for gimple_ior_addresses_taken_1. */
5267
5268 static bool
5269 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
5270 tree addr, void *data)
5271 {
5272 bitmap addresses_taken = (bitmap)data;
5273 addr = get_base_address (addr);
5274 if (addr
5275 && DECL_P (addr))
5276 {
5277 bitmap_set_bit (addresses_taken, DECL_UID (addr));
5278 return true;
5279 }
5280 return false;
5281 }
5282
5283 /* Set the bit for the uid of all decls that have their address taken
5284 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
5285 were any in this stmt. */
5286
5287 bool
5288 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
5289 {
5290 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
5291 gimple_ior_addresses_taken_1);
5292 }
5293
5294
5295 /* Return a printable name for symbol DECL. */
5296
5297 const char *
5298 gimple_decl_printable_name (tree decl, int verbosity)
5299 {
5300 if (!DECL_NAME (decl))
5301 return NULL;
5302
5303 if (DECL_ASSEMBLER_NAME_SET_P (decl))
5304 {
5305 const char *str, *mangled_str;
5306 int dmgl_opts = DMGL_NO_OPTS;
5307
5308 if (verbosity >= 2)
5309 {
5310 dmgl_opts = DMGL_VERBOSE
5311 | DMGL_ANSI
5312 | DMGL_GNU_V3
5313 | DMGL_RET_POSTFIX;
5314 if (TREE_CODE (decl) == FUNCTION_DECL)
5315 dmgl_opts |= DMGL_PARAMS;
5316 }
5317
5318 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
5319 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
5320 return (str) ? str : mangled_str;
5321 }
5322
5323 return IDENTIFIER_POINTER (DECL_NAME (decl));
5324 }
5325
5326 /* Return true when STMT is builtins call to CODE. */
5327
5328 bool
5329 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
5330 {
5331 tree fndecl;
5332 return (is_gimple_call (stmt)
5333 && (fndecl = gimple_call_fndecl (stmt)) != NULL
5334 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5335 && DECL_FUNCTION_CODE (fndecl) == code);
5336 }
5337
5338 /* Return true if STMT clobbers memory. STMT is required to be a
5339 GIMPLE_ASM. */
5340
5341 bool
5342 gimple_asm_clobbers_memory_p (const_gimple stmt)
5343 {
5344 unsigned i;
5345
5346 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
5347 {
5348 tree op = gimple_asm_clobber_op (stmt, i);
5349 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
5350 return true;
5351 }
5352
5353 return false;
5354 }
5355 #include "gt-gimple.h"