lto-cgraph.c (output_profile_summary, [...]): Use gcov streaming; stream hot bb thres...
[gcc.git] / gcc / gimple.c
1 /* Gimple IR support functions.
2
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "target.h"
27 #include "tree.h"
28 #include "ggc.h"
29 #include "hard-reg-set.h"
30 #include "basic-block.h"
31 #include "gimple.h"
32 #include "diagnostic.h"
33 #include "tree-flow.h"
34 #include "value-prof.h"
35 #include "flags.h"
36 #include "alias.h"
37 #include "demangle.h"
38 #include "langhooks.h"
39
40 /* Global canonical type table. */
41 static GTY((if_marked ("ggc_marked_p"), param_is (union tree_node)))
42 htab_t gimple_canonical_types;
43 static GTY((if_marked ("tree_int_map_marked_p"), param_is (struct tree_int_map)))
44 htab_t canonical_type_hash_cache;
45
46 /* All the tuples have their operand vector (if present) at the very bottom
47 of the structure. Therefore, the offset required to find the
48 operands vector the size of the structure minus the size of the 1
49 element tree array at the end (see gimple_ops). */
50 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
51 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
52 EXPORTED_CONST size_t gimple_ops_offset_[] = {
53 #include "gsstruct.def"
54 };
55 #undef DEFGSSTRUCT
56
57 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof(struct STRUCT),
58 static const size_t gsstruct_code_size[] = {
59 #include "gsstruct.def"
60 };
61 #undef DEFGSSTRUCT
62
63 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
64 const char *const gimple_code_name[] = {
65 #include "gimple.def"
66 };
67 #undef DEFGSCODE
68
69 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
70 EXPORTED_CONST enum gimple_statement_structure_enum gss_for_code_[] = {
71 #include "gimple.def"
72 };
73 #undef DEFGSCODE
74
75 /* Gimple stats. */
76
77 int gimple_alloc_counts[(int) gimple_alloc_kind_all];
78 int gimple_alloc_sizes[(int) gimple_alloc_kind_all];
79
80 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
81 static const char * const gimple_alloc_kind_names[] = {
82 "assignments",
83 "phi nodes",
84 "conditionals",
85 "everything else"
86 };
87
88 /* Private API manipulation functions shared only with some
89 other files. */
90 extern void gimple_set_stored_syms (gimple, bitmap, bitmap_obstack *);
91 extern void gimple_set_loaded_syms (gimple, bitmap, bitmap_obstack *);
92
93 /* Gimple tuple constructors.
94 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
95 be passed a NULL to start with an empty sequence. */
96
97 /* Set the code for statement G to CODE. */
98
99 static inline void
100 gimple_set_code (gimple g, enum gimple_code code)
101 {
102 g->gsbase.code = code;
103 }
104
105 /* Return the number of bytes needed to hold a GIMPLE statement with
106 code CODE. */
107
108 static inline size_t
109 gimple_size (enum gimple_code code)
110 {
111 return gsstruct_code_size[gss_for_code (code)];
112 }
113
114 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
115 operands. */
116
117 gimple
118 gimple_alloc_stat (enum gimple_code code, unsigned num_ops MEM_STAT_DECL)
119 {
120 size_t size;
121 gimple stmt;
122
123 size = gimple_size (code);
124 if (num_ops > 0)
125 size += sizeof (tree) * (num_ops - 1);
126
127 if (GATHER_STATISTICS)
128 {
129 enum gimple_alloc_kind kind = gimple_alloc_kind (code);
130 gimple_alloc_counts[(int) kind]++;
131 gimple_alloc_sizes[(int) kind] += size;
132 }
133
134 stmt = ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT);
135 gimple_set_code (stmt, code);
136 gimple_set_num_ops (stmt, num_ops);
137
138 /* Do not call gimple_set_modified here as it has other side
139 effects and this tuple is still not completely built. */
140 stmt->gsbase.modified = 1;
141 gimple_init_singleton (stmt);
142
143 return stmt;
144 }
145
146 /* Set SUBCODE to be the code of the expression computed by statement G. */
147
148 static inline void
149 gimple_set_subcode (gimple g, unsigned subcode)
150 {
151 /* We only have 16 bits for the RHS code. Assert that we are not
152 overflowing it. */
153 gcc_assert (subcode < (1 << 16));
154 g->gsbase.subcode = subcode;
155 }
156
157
158
159 /* Build a tuple with operands. CODE is the statement to build (which
160 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
161 for the new tuple. NUM_OPS is the number of operands to allocate. */
162
163 #define gimple_build_with_ops(c, s, n) \
164 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
165
166 static gimple
167 gimple_build_with_ops_stat (enum gimple_code code, unsigned subcode,
168 unsigned num_ops MEM_STAT_DECL)
169 {
170 gimple s = gimple_alloc_stat (code, num_ops PASS_MEM_STAT);
171 gimple_set_subcode (s, subcode);
172
173 return s;
174 }
175
176
177 /* Build a GIMPLE_RETURN statement returning RETVAL. */
178
179 gimple
180 gimple_build_return (tree retval)
181 {
182 gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 1);
183 if (retval)
184 gimple_return_set_retval (s, retval);
185 return s;
186 }
187
188 /* Reset alias information on call S. */
189
190 void
191 gimple_call_reset_alias_info (gimple s)
192 {
193 if (gimple_call_flags (s) & ECF_CONST)
194 memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
195 else
196 pt_solution_reset (gimple_call_use_set (s));
197 if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
198 memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
199 else
200 pt_solution_reset (gimple_call_clobber_set (s));
201 }
202
203 /* Helper for gimple_build_call, gimple_build_call_valist,
204 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
205 components of a GIMPLE_CALL statement to function FN with NARGS
206 arguments. */
207
208 static inline gimple
209 gimple_build_call_1 (tree fn, unsigned nargs)
210 {
211 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
212 if (TREE_CODE (fn) == FUNCTION_DECL)
213 fn = build_fold_addr_expr (fn);
214 gimple_set_op (s, 1, fn);
215 gimple_call_set_fntype (s, TREE_TYPE (TREE_TYPE (fn)));
216 gimple_call_reset_alias_info (s);
217 return s;
218 }
219
220
221 /* Build a GIMPLE_CALL statement to function FN with the arguments
222 specified in vector ARGS. */
223
224 gimple
225 gimple_build_call_vec (tree fn, vec<tree> args)
226 {
227 unsigned i;
228 unsigned nargs = args.length ();
229 gimple call = gimple_build_call_1 (fn, nargs);
230
231 for (i = 0; i < nargs; i++)
232 gimple_call_set_arg (call, i, args[i]);
233
234 return call;
235 }
236
237
238 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
239 arguments. The ... are the arguments. */
240
241 gimple
242 gimple_build_call (tree fn, unsigned nargs, ...)
243 {
244 va_list ap;
245 gimple call;
246 unsigned i;
247
248 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
249
250 call = gimple_build_call_1 (fn, nargs);
251
252 va_start (ap, nargs);
253 for (i = 0; i < nargs; i++)
254 gimple_call_set_arg (call, i, va_arg (ap, tree));
255 va_end (ap);
256
257 return call;
258 }
259
260
261 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
262 arguments. AP contains the arguments. */
263
264 gimple
265 gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
266 {
267 gimple call;
268 unsigned i;
269
270 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
271
272 call = gimple_build_call_1 (fn, nargs);
273
274 for (i = 0; i < nargs; i++)
275 gimple_call_set_arg (call, i, va_arg (ap, tree));
276
277 return call;
278 }
279
280
281 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
282 Build the basic components of a GIMPLE_CALL statement to internal
283 function FN with NARGS arguments. */
284
285 static inline gimple
286 gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
287 {
288 gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
289 s->gsbase.subcode |= GF_CALL_INTERNAL;
290 gimple_call_set_internal_fn (s, fn);
291 gimple_call_reset_alias_info (s);
292 return s;
293 }
294
295
296 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
297 the number of arguments. The ... are the arguments. */
298
299 gimple
300 gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
301 {
302 va_list ap;
303 gimple call;
304 unsigned i;
305
306 call = gimple_build_call_internal_1 (fn, nargs);
307 va_start (ap, nargs);
308 for (i = 0; i < nargs; i++)
309 gimple_call_set_arg (call, i, va_arg (ap, tree));
310 va_end (ap);
311
312 return call;
313 }
314
315
316 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
317 specified in vector ARGS. */
318
319 gimple
320 gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
321 {
322 unsigned i, nargs;
323 gimple call;
324
325 nargs = args.length ();
326 call = gimple_build_call_internal_1 (fn, nargs);
327 for (i = 0; i < nargs; i++)
328 gimple_call_set_arg (call, i, args[i]);
329
330 return call;
331 }
332
333
334 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
335 assumed to be in GIMPLE form already. Minimal checking is done of
336 this fact. */
337
338 gimple
339 gimple_build_call_from_tree (tree t)
340 {
341 unsigned i, nargs;
342 gimple call;
343 tree fndecl = get_callee_fndecl (t);
344
345 gcc_assert (TREE_CODE (t) == CALL_EXPR);
346
347 nargs = call_expr_nargs (t);
348 call = gimple_build_call_1 (fndecl ? fndecl : CALL_EXPR_FN (t), nargs);
349
350 for (i = 0; i < nargs; i++)
351 gimple_call_set_arg (call, i, CALL_EXPR_ARG (t, i));
352
353 gimple_set_block (call, TREE_BLOCK (t));
354
355 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
356 gimple_call_set_chain (call, CALL_EXPR_STATIC_CHAIN (t));
357 gimple_call_set_tail (call, CALL_EXPR_TAILCALL (t));
358 gimple_call_set_return_slot_opt (call, CALL_EXPR_RETURN_SLOT_OPT (t));
359 if (fndecl
360 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
361 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
362 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
363 gimple_call_set_alloca_for_var (call, CALL_ALLOCA_FOR_VAR_P (t));
364 else
365 gimple_call_set_from_thunk (call, CALL_FROM_THUNK_P (t));
366 gimple_call_set_va_arg_pack (call, CALL_EXPR_VA_ARG_PACK (t));
367 gimple_call_set_nothrow (call, TREE_NOTHROW (t));
368 gimple_set_no_warning (call, TREE_NO_WARNING (t));
369
370 return call;
371 }
372
373
374 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
375 *OP1_P, *OP2_P and *OP3_P respectively. */
376
377 void
378 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
379 tree *op2_p, tree *op3_p)
380 {
381 enum gimple_rhs_class grhs_class;
382
383 *subcode_p = TREE_CODE (expr);
384 grhs_class = get_gimple_rhs_class (*subcode_p);
385
386 if (grhs_class == GIMPLE_TERNARY_RHS)
387 {
388 *op1_p = TREE_OPERAND (expr, 0);
389 *op2_p = TREE_OPERAND (expr, 1);
390 *op3_p = TREE_OPERAND (expr, 2);
391 }
392 else if (grhs_class == GIMPLE_BINARY_RHS)
393 {
394 *op1_p = TREE_OPERAND (expr, 0);
395 *op2_p = TREE_OPERAND (expr, 1);
396 *op3_p = NULL_TREE;
397 }
398 else if (grhs_class == GIMPLE_UNARY_RHS)
399 {
400 *op1_p = TREE_OPERAND (expr, 0);
401 *op2_p = NULL_TREE;
402 *op3_p = NULL_TREE;
403 }
404 else if (grhs_class == GIMPLE_SINGLE_RHS)
405 {
406 *op1_p = expr;
407 *op2_p = NULL_TREE;
408 *op3_p = NULL_TREE;
409 }
410 else
411 gcc_unreachable ();
412 }
413
414
415 /* Build a GIMPLE_ASSIGN statement.
416
417 LHS of the assignment.
418 RHS of the assignment which can be unary or binary. */
419
420 gimple
421 gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
422 {
423 enum tree_code subcode;
424 tree op1, op2, op3;
425
426 extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
427 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, op3
428 PASS_MEM_STAT);
429 }
430
431
432 /* Build a GIMPLE_ASSIGN statement with sub-code SUBCODE and operands
433 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
434 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
435
436 gimple
437 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
438 tree op2, tree op3 MEM_STAT_DECL)
439 {
440 unsigned num_ops;
441 gimple p;
442
443 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
444 code). */
445 num_ops = get_gimple_rhs_num_ops (subcode) + 1;
446
447 p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
448 PASS_MEM_STAT);
449 gimple_assign_set_lhs (p, lhs);
450 gimple_assign_set_rhs1 (p, op1);
451 if (op2)
452 {
453 gcc_assert (num_ops > 2);
454 gimple_assign_set_rhs2 (p, op2);
455 }
456
457 if (op3)
458 {
459 gcc_assert (num_ops > 3);
460 gimple_assign_set_rhs3 (p, op3);
461 }
462
463 return p;
464 }
465
466 gimple
467 gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
468 tree op2 MEM_STAT_DECL)
469 {
470 return gimple_build_assign_with_ops (subcode, lhs, op1, op2, NULL_TREE
471 PASS_MEM_STAT);
472 }
473
474
475 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
476
477 DST/SRC are the destination and source respectively. You can pass
478 ungimplified trees in DST or SRC, in which case they will be
479 converted to a gimple operand if necessary.
480
481 This function returns the newly created GIMPLE_ASSIGN tuple. */
482
483 gimple
484 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
485 {
486 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
487 gimplify_and_add (t, seq_p);
488 ggc_free (t);
489 return gimple_seq_last_stmt (*seq_p);
490 }
491
492
493 /* Build a GIMPLE_COND statement.
494
495 PRED is the condition used to compare LHS and the RHS.
496 T_LABEL is the label to jump to if the condition is true.
497 F_LABEL is the label to jump to otherwise. */
498
499 gimple
500 gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
501 tree t_label, tree f_label)
502 {
503 gimple p;
504
505 gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
506 p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
507 gimple_cond_set_lhs (p, lhs);
508 gimple_cond_set_rhs (p, rhs);
509 gimple_cond_set_true_label (p, t_label);
510 gimple_cond_set_false_label (p, f_label);
511 return p;
512 }
513
514
515 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
516
517 void
518 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
519 tree *lhs_p, tree *rhs_p)
520 {
521 gcc_assert (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison
522 || TREE_CODE (cond) == TRUTH_NOT_EXPR
523 || is_gimple_min_invariant (cond)
524 || SSA_VAR_P (cond));
525
526 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
527
528 /* Canonicalize conditionals of the form 'if (!VAL)'. */
529 if (*code_p == TRUTH_NOT_EXPR)
530 {
531 *code_p = EQ_EXPR;
532 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
533 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
534 }
535 /* Canonicalize conditionals of the form 'if (VAL)' */
536 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
537 {
538 *code_p = NE_EXPR;
539 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
540 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
541 }
542 }
543
544
545 /* Build a GIMPLE_COND statement from the conditional expression tree
546 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
547
548 gimple
549 gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
550 {
551 enum tree_code code;
552 tree lhs, rhs;
553
554 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
555 return gimple_build_cond (code, lhs, rhs, t_label, f_label);
556 }
557
558 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
559 boolean expression tree COND. */
560
561 void
562 gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
563 {
564 enum tree_code code;
565 tree lhs, rhs;
566
567 gimple_cond_get_ops_from_tree (cond, &code, &lhs, &rhs);
568 gimple_cond_set_condition (stmt, code, lhs, rhs);
569 }
570
571 /* Build a GIMPLE_LABEL statement for LABEL. */
572
573 gimple
574 gimple_build_label (tree label)
575 {
576 gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
577 gimple_label_set_label (p, label);
578 return p;
579 }
580
581 /* Build a GIMPLE_GOTO statement to label DEST. */
582
583 gimple
584 gimple_build_goto (tree dest)
585 {
586 gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
587 gimple_goto_set_dest (p, dest);
588 return p;
589 }
590
591
592 /* Build a GIMPLE_NOP statement. */
593
594 gimple
595 gimple_build_nop (void)
596 {
597 return gimple_alloc (GIMPLE_NOP, 0);
598 }
599
600
601 /* Build a GIMPLE_BIND statement.
602 VARS are the variables in BODY.
603 BLOCK is the containing block. */
604
605 gimple
606 gimple_build_bind (tree vars, gimple_seq body, tree block)
607 {
608 gimple p = gimple_alloc (GIMPLE_BIND, 0);
609 gimple_bind_set_vars (p, vars);
610 if (body)
611 gimple_bind_set_body (p, body);
612 if (block)
613 gimple_bind_set_block (p, block);
614 return p;
615 }
616
617 /* Helper function to set the simple fields of a asm stmt.
618
619 STRING is a pointer to a string that is the asm blocks assembly code.
620 NINPUT is the number of register inputs.
621 NOUTPUT is the number of register outputs.
622 NCLOBBERS is the number of clobbered registers.
623 */
624
625 static inline gimple
626 gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
627 unsigned nclobbers, unsigned nlabels)
628 {
629 gimple p;
630 int size = strlen (string);
631
632 /* ASMs with labels cannot have outputs. This should have been
633 enforced by the front end. */
634 gcc_assert (nlabels == 0 || noutputs == 0);
635
636 p = gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
637 ninputs + noutputs + nclobbers + nlabels);
638
639 p->gimple_asm.ni = ninputs;
640 p->gimple_asm.no = noutputs;
641 p->gimple_asm.nc = nclobbers;
642 p->gimple_asm.nl = nlabels;
643 p->gimple_asm.string = ggc_alloc_string (string, size);
644
645 if (GATHER_STATISTICS)
646 gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
647
648 return p;
649 }
650
651 /* Build a GIMPLE_ASM statement.
652
653 STRING is the assembly code.
654 NINPUT is the number of register inputs.
655 NOUTPUT is the number of register outputs.
656 NCLOBBERS is the number of clobbered registers.
657 INPUTS is a vector of the input register parameters.
658 OUTPUTS is a vector of the output register parameters.
659 CLOBBERS is a vector of the clobbered register parameters.
660 LABELS is a vector of destination labels. */
661
662 gimple
663 gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
664 vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
665 vec<tree, va_gc> *labels)
666 {
667 gimple p;
668 unsigned i;
669
670 p = gimple_build_asm_1 (string,
671 vec_safe_length (inputs),
672 vec_safe_length (outputs),
673 vec_safe_length (clobbers),
674 vec_safe_length (labels));
675
676 for (i = 0; i < vec_safe_length (inputs); i++)
677 gimple_asm_set_input_op (p, i, (*inputs)[i]);
678
679 for (i = 0; i < vec_safe_length (outputs); i++)
680 gimple_asm_set_output_op (p, i, (*outputs)[i]);
681
682 for (i = 0; i < vec_safe_length (clobbers); i++)
683 gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
684
685 for (i = 0; i < vec_safe_length (labels); i++)
686 gimple_asm_set_label_op (p, i, (*labels)[i]);
687
688 return p;
689 }
690
691 /* Build a GIMPLE_CATCH statement.
692
693 TYPES are the catch types.
694 HANDLER is the exception handler. */
695
696 gimple
697 gimple_build_catch (tree types, gimple_seq handler)
698 {
699 gimple p = gimple_alloc (GIMPLE_CATCH, 0);
700 gimple_catch_set_types (p, types);
701 if (handler)
702 gimple_catch_set_handler (p, handler);
703
704 return p;
705 }
706
707 /* Build a GIMPLE_EH_FILTER statement.
708
709 TYPES are the filter's types.
710 FAILURE is the filter's failure action. */
711
712 gimple
713 gimple_build_eh_filter (tree types, gimple_seq failure)
714 {
715 gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
716 gimple_eh_filter_set_types (p, types);
717 if (failure)
718 gimple_eh_filter_set_failure (p, failure);
719
720 return p;
721 }
722
723 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
724
725 gimple
726 gimple_build_eh_must_not_throw (tree decl)
727 {
728 gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
729
730 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
731 gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
732 gimple_eh_must_not_throw_set_fndecl (p, decl);
733
734 return p;
735 }
736
737 /* Build a GIMPLE_EH_ELSE statement. */
738
739 gimple
740 gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
741 {
742 gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
743 gimple_eh_else_set_n_body (p, n_body);
744 gimple_eh_else_set_e_body (p, e_body);
745 return p;
746 }
747
748 /* Build a GIMPLE_TRY statement.
749
750 EVAL is the expression to evaluate.
751 CLEANUP is the cleanup expression.
752 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
753 whether this is a try/catch or a try/finally respectively. */
754
755 gimple
756 gimple_build_try (gimple_seq eval, gimple_seq cleanup,
757 enum gimple_try_flags kind)
758 {
759 gimple p;
760
761 gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
762 p = gimple_alloc (GIMPLE_TRY, 0);
763 gimple_set_subcode (p, kind);
764 if (eval)
765 gimple_try_set_eval (p, eval);
766 if (cleanup)
767 gimple_try_set_cleanup (p, cleanup);
768
769 return p;
770 }
771
772 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
773
774 CLEANUP is the cleanup expression. */
775
776 gimple
777 gimple_build_wce (gimple_seq cleanup)
778 {
779 gimple p = gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR, 0);
780 if (cleanup)
781 gimple_wce_set_cleanup (p, cleanup);
782
783 return p;
784 }
785
786
787 /* Build a GIMPLE_RESX statement. */
788
789 gimple
790 gimple_build_resx (int region)
791 {
792 gimple p = gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0);
793 p->gimple_eh_ctrl.region = region;
794 return p;
795 }
796
797
798 /* The helper for constructing a gimple switch statement.
799 INDEX is the switch's index.
800 NLABELS is the number of labels in the switch excluding the default.
801 DEFAULT_LABEL is the default label for the switch statement. */
802
803 gimple
804 gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
805 {
806 /* nlabels + 1 default label + 1 index. */
807 gcc_checking_assert (default_label);
808 gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
809 1 + 1 + nlabels);
810 gimple_switch_set_index (p, index);
811 gimple_switch_set_default_label (p, default_label);
812 return p;
813 }
814
815 /* Build a GIMPLE_SWITCH statement.
816
817 INDEX is the switch's index.
818 DEFAULT_LABEL is the default label
819 ARGS is a vector of labels excluding the default. */
820
821 gimple
822 gimple_build_switch (tree index, tree default_label, vec<tree> args)
823 {
824 unsigned i, nlabels = args.length ();
825
826 gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
827
828 /* Copy the labels from the vector to the switch statement. */
829 for (i = 0; i < nlabels; i++)
830 gimple_switch_set_label (p, i + 1, args[i]);
831
832 return p;
833 }
834
835 /* Build a GIMPLE_EH_DISPATCH statement. */
836
837 gimple
838 gimple_build_eh_dispatch (int region)
839 {
840 gimple p = gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0);
841 p->gimple_eh_ctrl.region = region;
842 return p;
843 }
844
845 /* Build a new GIMPLE_DEBUG_BIND statement.
846
847 VAR is bound to VALUE; block and location are taken from STMT. */
848
849 gimple
850 gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
851 {
852 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
853 (unsigned)GIMPLE_DEBUG_BIND, 2
854 PASS_MEM_STAT);
855
856 gimple_debug_bind_set_var (p, var);
857 gimple_debug_bind_set_value (p, value);
858 if (stmt)
859 gimple_set_location (p, gimple_location (stmt));
860
861 return p;
862 }
863
864
865 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
866
867 VAR is bound to VALUE; block and location are taken from STMT. */
868
869 gimple
870 gimple_build_debug_source_bind_stat (tree var, tree value,
871 gimple stmt MEM_STAT_DECL)
872 {
873 gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
874 (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
875 PASS_MEM_STAT);
876
877 gimple_debug_source_bind_set_var (p, var);
878 gimple_debug_source_bind_set_value (p, value);
879 if (stmt)
880 gimple_set_location (p, gimple_location (stmt));
881
882 return p;
883 }
884
885
886 /* Build a GIMPLE_OMP_CRITICAL statement.
887
888 BODY is the sequence of statements for which only one thread can execute.
889 NAME is optional identifier for this critical block. */
890
891 gimple
892 gimple_build_omp_critical (gimple_seq body, tree name)
893 {
894 gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
895 gimple_omp_critical_set_name (p, name);
896 if (body)
897 gimple_omp_set_body (p, body);
898
899 return p;
900 }
901
902 /* Build a GIMPLE_OMP_FOR statement.
903
904 BODY is sequence of statements inside the for loop.
905 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
906 lastprivate, reductions, ordered, schedule, and nowait.
907 COLLAPSE is the collapse count.
908 PRE_BODY is the sequence of statements that are loop invariant. */
909
910 gimple
911 gimple_build_omp_for (gimple_seq body, tree clauses, size_t collapse,
912 gimple_seq pre_body)
913 {
914 gimple p = gimple_alloc (GIMPLE_OMP_FOR, 0);
915 if (body)
916 gimple_omp_set_body (p, body);
917 gimple_omp_for_set_clauses (p, clauses);
918 p->gimple_omp_for.collapse = collapse;
919 p->gimple_omp_for.iter
920 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse);
921 if (pre_body)
922 gimple_omp_for_set_pre_body (p, pre_body);
923
924 return p;
925 }
926
927
928 /* Build a GIMPLE_OMP_PARALLEL statement.
929
930 BODY is sequence of statements which are executed in parallel.
931 CLAUSES, are the OMP parallel construct's clauses.
932 CHILD_FN is the function created for the parallel threads to execute.
933 DATA_ARG are the shared data argument(s). */
934
935 gimple
936 gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
937 tree data_arg)
938 {
939 gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
940 if (body)
941 gimple_omp_set_body (p, body);
942 gimple_omp_parallel_set_clauses (p, clauses);
943 gimple_omp_parallel_set_child_fn (p, child_fn);
944 gimple_omp_parallel_set_data_arg (p, data_arg);
945
946 return p;
947 }
948
949
950 /* Build a GIMPLE_OMP_TASK statement.
951
952 BODY is sequence of statements which are executed by the explicit task.
953 CLAUSES, are the OMP parallel construct's clauses.
954 CHILD_FN is the function created for the parallel threads to execute.
955 DATA_ARG are the shared data argument(s).
956 COPY_FN is the optional function for firstprivate initialization.
957 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
958
959 gimple
960 gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
961 tree data_arg, tree copy_fn, tree arg_size,
962 tree arg_align)
963 {
964 gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
965 if (body)
966 gimple_omp_set_body (p, body);
967 gimple_omp_task_set_clauses (p, clauses);
968 gimple_omp_task_set_child_fn (p, child_fn);
969 gimple_omp_task_set_data_arg (p, data_arg);
970 gimple_omp_task_set_copy_fn (p, copy_fn);
971 gimple_omp_task_set_arg_size (p, arg_size);
972 gimple_omp_task_set_arg_align (p, arg_align);
973
974 return p;
975 }
976
977
978 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
979
980 BODY is the sequence of statements in the section. */
981
982 gimple
983 gimple_build_omp_section (gimple_seq body)
984 {
985 gimple p = gimple_alloc (GIMPLE_OMP_SECTION, 0);
986 if (body)
987 gimple_omp_set_body (p, body);
988
989 return p;
990 }
991
992
993 /* Build a GIMPLE_OMP_MASTER statement.
994
995 BODY is the sequence of statements to be executed by just the master. */
996
997 gimple
998 gimple_build_omp_master (gimple_seq body)
999 {
1000 gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
1001 if (body)
1002 gimple_omp_set_body (p, body);
1003
1004 return p;
1005 }
1006
1007
1008 /* Build a GIMPLE_OMP_CONTINUE statement.
1009
1010 CONTROL_DEF is the definition of the control variable.
1011 CONTROL_USE is the use of the control variable. */
1012
1013 gimple
1014 gimple_build_omp_continue (tree control_def, tree control_use)
1015 {
1016 gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
1017 gimple_omp_continue_set_control_def (p, control_def);
1018 gimple_omp_continue_set_control_use (p, control_use);
1019 return p;
1020 }
1021
1022 /* Build a GIMPLE_OMP_ORDERED statement.
1023
1024 BODY is the sequence of statements inside a loop that will executed in
1025 sequence. */
1026
1027 gimple
1028 gimple_build_omp_ordered (gimple_seq body)
1029 {
1030 gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
1031 if (body)
1032 gimple_omp_set_body (p, body);
1033
1034 return p;
1035 }
1036
1037
1038 /* Build a GIMPLE_OMP_RETURN statement.
1039 WAIT_P is true if this is a non-waiting return. */
1040
1041 gimple
1042 gimple_build_omp_return (bool wait_p)
1043 {
1044 gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
1045 if (wait_p)
1046 gimple_omp_return_set_nowait (p);
1047
1048 return p;
1049 }
1050
1051
1052 /* Build a GIMPLE_OMP_SECTIONS statement.
1053
1054 BODY is a sequence of section statements.
1055 CLAUSES are any of the OMP sections contsruct's clauses: private,
1056 firstprivate, lastprivate, reduction, and nowait. */
1057
1058 gimple
1059 gimple_build_omp_sections (gimple_seq body, tree clauses)
1060 {
1061 gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
1062 if (body)
1063 gimple_omp_set_body (p, body);
1064 gimple_omp_sections_set_clauses (p, clauses);
1065
1066 return p;
1067 }
1068
1069
1070 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
1071
1072 gimple
1073 gimple_build_omp_sections_switch (void)
1074 {
1075 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH, 0);
1076 }
1077
1078
1079 /* Build a GIMPLE_OMP_SINGLE statement.
1080
1081 BODY is the sequence of statements that will be executed once.
1082 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1083 copyprivate, nowait. */
1084
1085 gimple
1086 gimple_build_omp_single (gimple_seq body, tree clauses)
1087 {
1088 gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
1089 if (body)
1090 gimple_omp_set_body (p, body);
1091 gimple_omp_single_set_clauses (p, clauses);
1092
1093 return p;
1094 }
1095
1096
1097 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1098
1099 gimple
1100 gimple_build_omp_atomic_load (tree lhs, tree rhs)
1101 {
1102 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
1103 gimple_omp_atomic_load_set_lhs (p, lhs);
1104 gimple_omp_atomic_load_set_rhs (p, rhs);
1105 return p;
1106 }
1107
1108 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1109
1110 VAL is the value we are storing. */
1111
1112 gimple
1113 gimple_build_omp_atomic_store (tree val)
1114 {
1115 gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
1116 gimple_omp_atomic_store_set_val (p, val);
1117 return p;
1118 }
1119
1120 /* Build a GIMPLE_TRANSACTION statement. */
1121
1122 gimple
1123 gimple_build_transaction (gimple_seq body, tree label)
1124 {
1125 gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
1126 gimple_transaction_set_body (p, body);
1127 gimple_transaction_set_label (p, label);
1128 return p;
1129 }
1130
1131 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1132 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1133
1134 gimple
1135 gimple_build_predict (enum br_predictor predictor, enum prediction outcome)
1136 {
1137 gimple p = gimple_alloc (GIMPLE_PREDICT, 0);
1138 /* Ensure all the predictors fit into the lower bits of the subcode. */
1139 gcc_assert ((int) END_PREDICTORS <= GF_PREDICT_TAKEN);
1140 gimple_predict_set_predictor (p, predictor);
1141 gimple_predict_set_outcome (p, outcome);
1142 return p;
1143 }
1144
1145 #if defined ENABLE_GIMPLE_CHECKING
1146 /* Complain of a gimple type mismatch and die. */
1147
1148 void
1149 gimple_check_failed (const_gimple gs, const char *file, int line,
1150 const char *function, enum gimple_code code,
1151 enum tree_code subcode)
1152 {
1153 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1154 gimple_code_name[code],
1155 tree_code_name[subcode],
1156 gimple_code_name[gimple_code (gs)],
1157 gs->gsbase.subcode > 0
1158 ? tree_code_name[gs->gsbase.subcode]
1159 : "",
1160 function, trim_filename (file), line);
1161 }
1162 #endif /* ENABLE_GIMPLE_CHECKING */
1163
1164
1165 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1166 *SEQ_P is NULL, a new sequence is allocated. */
1167
1168 void
1169 gimple_seq_add_stmt (gimple_seq *seq_p, gimple gs)
1170 {
1171 gimple_stmt_iterator si;
1172 if (gs == NULL)
1173 return;
1174
1175 si = gsi_last (*seq_p);
1176 gsi_insert_after (&si, gs, GSI_NEW_STMT);
1177 }
1178
1179
1180 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1181 NULL, a new sequence is allocated. */
1182
1183 void
1184 gimple_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
1185 {
1186 gimple_stmt_iterator si;
1187 if (src == NULL)
1188 return;
1189
1190 si = gsi_last (*dst_p);
1191 gsi_insert_seq_after (&si, src, GSI_NEW_STMT);
1192 }
1193
1194
1195 /* Helper function of empty_body_p. Return true if STMT is an empty
1196 statement. */
1197
1198 static bool
1199 empty_stmt_p (gimple stmt)
1200 {
1201 if (gimple_code (stmt) == GIMPLE_NOP)
1202 return true;
1203 if (gimple_code (stmt) == GIMPLE_BIND)
1204 return empty_body_p (gimple_bind_body (stmt));
1205 return false;
1206 }
1207
1208
1209 /* Return true if BODY contains nothing but empty statements. */
1210
1211 bool
1212 empty_body_p (gimple_seq body)
1213 {
1214 gimple_stmt_iterator i;
1215
1216 if (gimple_seq_empty_p (body))
1217 return true;
1218 for (i = gsi_start (body); !gsi_end_p (i); gsi_next (&i))
1219 if (!empty_stmt_p (gsi_stmt (i))
1220 && !is_gimple_debug (gsi_stmt (i)))
1221 return false;
1222
1223 return true;
1224 }
1225
1226
1227 /* Perform a deep copy of sequence SRC and return the result. */
1228
1229 gimple_seq
1230 gimple_seq_copy (gimple_seq src)
1231 {
1232 gimple_stmt_iterator gsi;
1233 gimple_seq new_seq = NULL;
1234 gimple stmt;
1235
1236 for (gsi = gsi_start (src); !gsi_end_p (gsi); gsi_next (&gsi))
1237 {
1238 stmt = gimple_copy (gsi_stmt (gsi));
1239 gimple_seq_add_stmt (&new_seq, stmt);
1240 }
1241
1242 return new_seq;
1243 }
1244
1245
1246 /* Walk all the statements in the sequence *PSEQ calling walk_gimple_stmt
1247 on each one. WI is as in walk_gimple_stmt.
1248
1249 If walk_gimple_stmt returns non-NULL, the walk is stopped, and the
1250 value is stored in WI->CALLBACK_RESULT. Also, the statement that
1251 produced the value is returned if this statement has not been
1252 removed by a callback (wi->removed_stmt). If the statement has
1253 been removed, NULL is returned.
1254
1255 Otherwise, all the statements are walked and NULL returned. */
1256
1257 gimple
1258 walk_gimple_seq_mod (gimple_seq *pseq, walk_stmt_fn callback_stmt,
1259 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1260 {
1261 gimple_stmt_iterator gsi;
1262
1263 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi); )
1264 {
1265 tree ret = walk_gimple_stmt (&gsi, callback_stmt, callback_op, wi);
1266 if (ret)
1267 {
1268 /* If CALLBACK_STMT or CALLBACK_OP return a value, WI must exist
1269 to hold it. */
1270 gcc_assert (wi);
1271 wi->callback_result = ret;
1272
1273 return wi->removed_stmt ? NULL : gsi_stmt (gsi);
1274 }
1275
1276 if (!wi->removed_stmt)
1277 gsi_next (&gsi);
1278 }
1279
1280 if (wi)
1281 wi->callback_result = NULL_TREE;
1282
1283 return NULL;
1284 }
1285
1286
1287 /* Like walk_gimple_seq_mod, but ensure that the head of SEQ isn't
1288 changed by the callbacks. */
1289
1290 gimple
1291 walk_gimple_seq (gimple_seq seq, walk_stmt_fn callback_stmt,
1292 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1293 {
1294 gimple_seq seq2 = seq;
1295 gimple ret = walk_gimple_seq_mod (&seq2, callback_stmt, callback_op, wi);
1296 gcc_assert (seq2 == seq);
1297 return ret;
1298 }
1299
1300
1301 /* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
1302
1303 static tree
1304 walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
1305 struct walk_stmt_info *wi)
1306 {
1307 tree ret, op;
1308 unsigned noutputs;
1309 const char **oconstraints;
1310 unsigned i, n;
1311 const char *constraint;
1312 bool allows_mem, allows_reg, is_inout;
1313
1314 noutputs = gimple_asm_noutputs (stmt);
1315 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
1316
1317 if (wi)
1318 wi->is_lhs = true;
1319
1320 for (i = 0; i < noutputs; i++)
1321 {
1322 op = gimple_asm_output_op (stmt, i);
1323 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1324 oconstraints[i] = constraint;
1325 parse_output_constraint (&constraint, i, 0, 0, &allows_mem, &allows_reg,
1326 &is_inout);
1327 if (wi)
1328 wi->val_only = (allows_reg || !allows_mem);
1329 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1330 if (ret)
1331 return ret;
1332 }
1333
1334 n = gimple_asm_ninputs (stmt);
1335 for (i = 0; i < n; i++)
1336 {
1337 op = gimple_asm_input_op (stmt, i);
1338 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
1339 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1340 oconstraints, &allows_mem, &allows_reg);
1341 if (wi)
1342 {
1343 wi->val_only = (allows_reg || !allows_mem);
1344 /* Although input "m" is not really a LHS, we need a lvalue. */
1345 wi->is_lhs = !wi->val_only;
1346 }
1347 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1348 if (ret)
1349 return ret;
1350 }
1351
1352 if (wi)
1353 {
1354 wi->is_lhs = false;
1355 wi->val_only = true;
1356 }
1357
1358 n = gimple_asm_nlabels (stmt);
1359 for (i = 0; i < n; i++)
1360 {
1361 op = gimple_asm_label_op (stmt, i);
1362 ret = walk_tree (&TREE_VALUE (op), callback_op, wi, NULL);
1363 if (ret)
1364 return ret;
1365 }
1366
1367 return NULL_TREE;
1368 }
1369
1370
1371 /* Helper function of WALK_GIMPLE_STMT. Walk every tree operand in
1372 STMT. CALLBACK_OP and WI are as in WALK_GIMPLE_STMT.
1373
1374 CALLBACK_OP is called on each operand of STMT via walk_tree.
1375 Additional parameters to walk_tree must be stored in WI. For each operand
1376 OP, walk_tree is called as:
1377
1378 walk_tree (&OP, CALLBACK_OP, WI, WI->PSET)
1379
1380 If CALLBACK_OP returns non-NULL for an operand, the remaining
1381 operands are not scanned.
1382
1383 The return value is that returned by the last call to walk_tree, or
1384 NULL_TREE if no CALLBACK_OP is specified. */
1385
1386 tree
1387 walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
1388 struct walk_stmt_info *wi)
1389 {
1390 struct pointer_set_t *pset = (wi) ? wi->pset : NULL;
1391 unsigned i;
1392 tree ret = NULL_TREE;
1393
1394 switch (gimple_code (stmt))
1395 {
1396 case GIMPLE_ASSIGN:
1397 /* Walk the RHS operands. If the LHS is of a non-renamable type or
1398 is a register variable, we may use a COMPONENT_REF on the RHS. */
1399 if (wi)
1400 {
1401 tree lhs = gimple_assign_lhs (stmt);
1402 wi->val_only
1403 = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
1404 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1405 }
1406
1407 for (i = 1; i < gimple_num_ops (stmt); i++)
1408 {
1409 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi,
1410 pset);
1411 if (ret)
1412 return ret;
1413 }
1414
1415 /* Walk the LHS. If the RHS is appropriate for a memory, we
1416 may use a COMPONENT_REF on the LHS. */
1417 if (wi)
1418 {
1419 /* If the RHS is of a non-renamable type or is a register variable,
1420 we may use a COMPONENT_REF on the LHS. */
1421 tree rhs1 = gimple_assign_rhs1 (stmt);
1422 wi->val_only
1423 = (is_gimple_reg_type (TREE_TYPE (rhs1)) && !is_gimple_reg (rhs1))
1424 || gimple_assign_rhs_class (stmt) != GIMPLE_SINGLE_RHS;
1425 wi->is_lhs = true;
1426 }
1427
1428 ret = walk_tree (gimple_op_ptr (stmt, 0), callback_op, wi, pset);
1429 if (ret)
1430 return ret;
1431
1432 if (wi)
1433 {
1434 wi->val_only = true;
1435 wi->is_lhs = false;
1436 }
1437 break;
1438
1439 case GIMPLE_CALL:
1440 if (wi)
1441 {
1442 wi->is_lhs = false;
1443 wi->val_only = true;
1444 }
1445
1446 ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
1447 if (ret)
1448 return ret;
1449
1450 ret = walk_tree (gimple_call_fn_ptr (stmt), callback_op, wi, pset);
1451 if (ret)
1452 return ret;
1453
1454 for (i = 0; i < gimple_call_num_args (stmt); i++)
1455 {
1456 if (wi)
1457 wi->val_only
1458 = is_gimple_reg_type (TREE_TYPE (gimple_call_arg (stmt, i)));
1459 ret = walk_tree (gimple_call_arg_ptr (stmt, i), callback_op, wi,
1460 pset);
1461 if (ret)
1462 return ret;
1463 }
1464
1465 if (gimple_call_lhs (stmt))
1466 {
1467 if (wi)
1468 {
1469 wi->is_lhs = true;
1470 wi->val_only
1471 = is_gimple_reg_type (TREE_TYPE (gimple_call_lhs (stmt)));
1472 }
1473
1474 ret = walk_tree (gimple_call_lhs_ptr (stmt), callback_op, wi, pset);
1475 if (ret)
1476 return ret;
1477 }
1478
1479 if (wi)
1480 {
1481 wi->is_lhs = false;
1482 wi->val_only = true;
1483 }
1484 break;
1485
1486 case GIMPLE_CATCH:
1487 ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
1488 pset);
1489 if (ret)
1490 return ret;
1491 break;
1492
1493 case GIMPLE_EH_FILTER:
1494 ret = walk_tree (gimple_eh_filter_types_ptr (stmt), callback_op, wi,
1495 pset);
1496 if (ret)
1497 return ret;
1498 break;
1499
1500 case GIMPLE_ASM:
1501 ret = walk_gimple_asm (stmt, callback_op, wi);
1502 if (ret)
1503 return ret;
1504 break;
1505
1506 case GIMPLE_OMP_CONTINUE:
1507 ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
1508 callback_op, wi, pset);
1509 if (ret)
1510 return ret;
1511
1512 ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
1513 callback_op, wi, pset);
1514 if (ret)
1515 return ret;
1516 break;
1517
1518 case GIMPLE_OMP_CRITICAL:
1519 ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
1520 pset);
1521 if (ret)
1522 return ret;
1523 break;
1524
1525 case GIMPLE_OMP_FOR:
1526 ret = walk_tree (gimple_omp_for_clauses_ptr (stmt), callback_op, wi,
1527 pset);
1528 if (ret)
1529 return ret;
1530 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1531 {
1532 ret = walk_tree (gimple_omp_for_index_ptr (stmt, i), callback_op,
1533 wi, pset);
1534 if (ret)
1535 return ret;
1536 ret = walk_tree (gimple_omp_for_initial_ptr (stmt, i), callback_op,
1537 wi, pset);
1538 if (ret)
1539 return ret;
1540 ret = walk_tree (gimple_omp_for_final_ptr (stmt, i), callback_op,
1541 wi, pset);
1542 if (ret)
1543 return ret;
1544 ret = walk_tree (gimple_omp_for_incr_ptr (stmt, i), callback_op,
1545 wi, pset);
1546 }
1547 if (ret)
1548 return ret;
1549 break;
1550
1551 case GIMPLE_OMP_PARALLEL:
1552 ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
1553 wi, pset);
1554 if (ret)
1555 return ret;
1556 ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
1557 wi, pset);
1558 if (ret)
1559 return ret;
1560 ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
1561 wi, pset);
1562 if (ret)
1563 return ret;
1564 break;
1565
1566 case GIMPLE_OMP_TASK:
1567 ret = walk_tree (gimple_omp_task_clauses_ptr (stmt), callback_op,
1568 wi, pset);
1569 if (ret)
1570 return ret;
1571 ret = walk_tree (gimple_omp_task_child_fn_ptr (stmt), callback_op,
1572 wi, pset);
1573 if (ret)
1574 return ret;
1575 ret = walk_tree (gimple_omp_task_data_arg_ptr (stmt), callback_op,
1576 wi, pset);
1577 if (ret)
1578 return ret;
1579 ret = walk_tree (gimple_omp_task_copy_fn_ptr (stmt), callback_op,
1580 wi, pset);
1581 if (ret)
1582 return ret;
1583 ret = walk_tree (gimple_omp_task_arg_size_ptr (stmt), callback_op,
1584 wi, pset);
1585 if (ret)
1586 return ret;
1587 ret = walk_tree (gimple_omp_task_arg_align_ptr (stmt), callback_op,
1588 wi, pset);
1589 if (ret)
1590 return ret;
1591 break;
1592
1593 case GIMPLE_OMP_SECTIONS:
1594 ret = walk_tree (gimple_omp_sections_clauses_ptr (stmt), callback_op,
1595 wi, pset);
1596 if (ret)
1597 return ret;
1598
1599 ret = walk_tree (gimple_omp_sections_control_ptr (stmt), callback_op,
1600 wi, pset);
1601 if (ret)
1602 return ret;
1603
1604 break;
1605
1606 case GIMPLE_OMP_SINGLE:
1607 ret = walk_tree (gimple_omp_single_clauses_ptr (stmt), callback_op, wi,
1608 pset);
1609 if (ret)
1610 return ret;
1611 break;
1612
1613 case GIMPLE_OMP_ATOMIC_LOAD:
1614 ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
1615 pset);
1616 if (ret)
1617 return ret;
1618
1619 ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
1620 pset);
1621 if (ret)
1622 return ret;
1623 break;
1624
1625 case GIMPLE_OMP_ATOMIC_STORE:
1626 ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
1627 wi, pset);
1628 if (ret)
1629 return ret;
1630 break;
1631
1632 case GIMPLE_TRANSACTION:
1633 ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
1634 wi, pset);
1635 if (ret)
1636 return ret;
1637 break;
1638
1639 /* Tuples that do not have operands. */
1640 case GIMPLE_NOP:
1641 case GIMPLE_RESX:
1642 case GIMPLE_OMP_RETURN:
1643 case GIMPLE_PREDICT:
1644 break;
1645
1646 default:
1647 {
1648 enum gimple_statement_structure_enum gss;
1649 gss = gimple_statement_structure (stmt);
1650 if (gss == GSS_WITH_OPS || gss == GSS_WITH_MEM_OPS)
1651 for (i = 0; i < gimple_num_ops (stmt); i++)
1652 {
1653 ret = walk_tree (gimple_op_ptr (stmt, i), callback_op, wi, pset);
1654 if (ret)
1655 return ret;
1656 }
1657 }
1658 break;
1659 }
1660
1661 return NULL_TREE;
1662 }
1663
1664
1665 /* Walk the current statement in GSI (optionally using traversal state
1666 stored in WI). If WI is NULL, no state is kept during traversal.
1667 The callback CALLBACK_STMT is called. If CALLBACK_STMT indicates
1668 that it has handled all the operands of the statement, its return
1669 value is returned. Otherwise, the return value from CALLBACK_STMT
1670 is discarded and its operands are scanned.
1671
1672 If CALLBACK_STMT is NULL or it didn't handle the operands,
1673 CALLBACK_OP is called on each operand of the statement via
1674 walk_gimple_op. If walk_gimple_op returns non-NULL for any
1675 operand, the remaining operands are not scanned. In this case, the
1676 return value from CALLBACK_OP is returned.
1677
1678 In any other case, NULL_TREE is returned. */
1679
1680 tree
1681 walk_gimple_stmt (gimple_stmt_iterator *gsi, walk_stmt_fn callback_stmt,
1682 walk_tree_fn callback_op, struct walk_stmt_info *wi)
1683 {
1684 gimple ret;
1685 tree tree_ret;
1686 gimple stmt = gsi_stmt (*gsi);
1687
1688 if (wi)
1689 {
1690 wi->gsi = *gsi;
1691 wi->removed_stmt = false;
1692
1693 if (wi->want_locations && gimple_has_location (stmt))
1694 input_location = gimple_location (stmt);
1695 }
1696
1697 ret = NULL;
1698
1699 /* Invoke the statement callback. Return if the callback handled
1700 all of STMT operands by itself. */
1701 if (callback_stmt)
1702 {
1703 bool handled_ops = false;
1704 tree_ret = callback_stmt (gsi, &handled_ops, wi);
1705 if (handled_ops)
1706 return tree_ret;
1707
1708 /* If CALLBACK_STMT did not handle operands, it should not have
1709 a value to return. */
1710 gcc_assert (tree_ret == NULL);
1711
1712 if (wi && wi->removed_stmt)
1713 return NULL;
1714
1715 /* Re-read stmt in case the callback changed it. */
1716 stmt = gsi_stmt (*gsi);
1717 }
1718
1719 /* If CALLBACK_OP is defined, invoke it on every operand of STMT. */
1720 if (callback_op)
1721 {
1722 tree_ret = walk_gimple_op (stmt, callback_op, wi);
1723 if (tree_ret)
1724 return tree_ret;
1725 }
1726
1727 /* If STMT can have statements inside (e.g. GIMPLE_BIND), walk them. */
1728 switch (gimple_code (stmt))
1729 {
1730 case GIMPLE_BIND:
1731 ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
1732 callback_op, wi);
1733 if (ret)
1734 return wi->callback_result;
1735 break;
1736
1737 case GIMPLE_CATCH:
1738 ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
1739 callback_op, wi);
1740 if (ret)
1741 return wi->callback_result;
1742 break;
1743
1744 case GIMPLE_EH_FILTER:
1745 ret = walk_gimple_seq_mod (gimple_eh_filter_failure_ptr (stmt), callback_stmt,
1746 callback_op, wi);
1747 if (ret)
1748 return wi->callback_result;
1749 break;
1750
1751 case GIMPLE_EH_ELSE:
1752 ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
1753 callback_stmt, callback_op, wi);
1754 if (ret)
1755 return wi->callback_result;
1756 ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
1757 callback_stmt, callback_op, wi);
1758 if (ret)
1759 return wi->callback_result;
1760 break;
1761
1762 case GIMPLE_TRY:
1763 ret = walk_gimple_seq_mod (gimple_try_eval_ptr (stmt), callback_stmt, callback_op,
1764 wi);
1765 if (ret)
1766 return wi->callback_result;
1767
1768 ret = walk_gimple_seq_mod (gimple_try_cleanup_ptr (stmt), callback_stmt,
1769 callback_op, wi);
1770 if (ret)
1771 return wi->callback_result;
1772 break;
1773
1774 case GIMPLE_OMP_FOR:
1775 ret = walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), callback_stmt,
1776 callback_op, wi);
1777 if (ret)
1778 return wi->callback_result;
1779
1780 /* FALL THROUGH. */
1781 case GIMPLE_OMP_CRITICAL:
1782 case GIMPLE_OMP_MASTER:
1783 case GIMPLE_OMP_ORDERED:
1784 case GIMPLE_OMP_SECTION:
1785 case GIMPLE_OMP_PARALLEL:
1786 case GIMPLE_OMP_TASK:
1787 case GIMPLE_OMP_SECTIONS:
1788 case GIMPLE_OMP_SINGLE:
1789 ret = walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), callback_stmt,
1790 callback_op, wi);
1791 if (ret)
1792 return wi->callback_result;
1793 break;
1794
1795 case GIMPLE_WITH_CLEANUP_EXPR:
1796 ret = walk_gimple_seq_mod (gimple_wce_cleanup_ptr (stmt), callback_stmt,
1797 callback_op, wi);
1798 if (ret)
1799 return wi->callback_result;
1800 break;
1801
1802 case GIMPLE_TRANSACTION:
1803 ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
1804 callback_stmt, callback_op, wi);
1805 if (ret)
1806 return wi->callback_result;
1807 break;
1808
1809 default:
1810 gcc_assert (!gimple_has_substatements (stmt));
1811 break;
1812 }
1813
1814 return NULL;
1815 }
1816
1817
1818 /* Set sequence SEQ to be the GIMPLE body for function FN. */
1819
1820 void
1821 gimple_set_body (tree fndecl, gimple_seq seq)
1822 {
1823 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1824 if (fn == NULL)
1825 {
1826 /* If FNDECL still does not have a function structure associated
1827 with it, then it does not make sense for it to receive a
1828 GIMPLE body. */
1829 gcc_assert (seq == NULL);
1830 }
1831 else
1832 fn->gimple_body = seq;
1833 }
1834
1835
1836 /* Return the body of GIMPLE statements for function FN. After the
1837 CFG pass, the function body doesn't exist anymore because it has
1838 been split up into basic blocks. In this case, it returns
1839 NULL. */
1840
1841 gimple_seq
1842 gimple_body (tree fndecl)
1843 {
1844 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1845 return fn ? fn->gimple_body : NULL;
1846 }
1847
1848 /* Return true when FNDECL has Gimple body either in unlowered
1849 or CFG form. */
1850 bool
1851 gimple_has_body_p (tree fndecl)
1852 {
1853 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
1854 return (gimple_body (fndecl) || (fn && fn->cfg));
1855 }
1856
1857 /* Return true if calls C1 and C2 are known to go to the same function. */
1858
1859 bool
1860 gimple_call_same_target_p (const_gimple c1, const_gimple c2)
1861 {
1862 if (gimple_call_internal_p (c1))
1863 return (gimple_call_internal_p (c2)
1864 && gimple_call_internal_fn (c1) == gimple_call_internal_fn (c2));
1865 else
1866 return (gimple_call_fn (c1) == gimple_call_fn (c2)
1867 || (gimple_call_fndecl (c1)
1868 && gimple_call_fndecl (c1) == gimple_call_fndecl (c2)));
1869 }
1870
1871 /* Detect flags from a GIMPLE_CALL. This is just like
1872 call_expr_flags, but for gimple tuples. */
1873
1874 int
1875 gimple_call_flags (const_gimple stmt)
1876 {
1877 int flags;
1878 tree decl = gimple_call_fndecl (stmt);
1879
1880 if (decl)
1881 flags = flags_from_decl_or_type (decl);
1882 else if (gimple_call_internal_p (stmt))
1883 flags = internal_fn_flags (gimple_call_internal_fn (stmt));
1884 else
1885 flags = flags_from_decl_or_type (gimple_call_fntype (stmt));
1886
1887 if (stmt->gsbase.subcode & GF_CALL_NOTHROW)
1888 flags |= ECF_NOTHROW;
1889
1890 return flags;
1891 }
1892
1893 /* Return the "fn spec" string for call STMT. */
1894
1895 static tree
1896 gimple_call_fnspec (const_gimple stmt)
1897 {
1898 tree type, attr;
1899
1900 type = gimple_call_fntype (stmt);
1901 if (!type)
1902 return NULL_TREE;
1903
1904 attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
1905 if (!attr)
1906 return NULL_TREE;
1907
1908 return TREE_VALUE (TREE_VALUE (attr));
1909 }
1910
1911 /* Detects argument flags for argument number ARG on call STMT. */
1912
1913 int
1914 gimple_call_arg_flags (const_gimple stmt, unsigned arg)
1915 {
1916 tree attr = gimple_call_fnspec (stmt);
1917
1918 if (!attr || 1 + arg >= (unsigned) TREE_STRING_LENGTH (attr))
1919 return 0;
1920
1921 switch (TREE_STRING_POINTER (attr)[1 + arg])
1922 {
1923 case 'x':
1924 case 'X':
1925 return EAF_UNUSED;
1926
1927 case 'R':
1928 return EAF_DIRECT | EAF_NOCLOBBER | EAF_NOESCAPE;
1929
1930 case 'r':
1931 return EAF_NOCLOBBER | EAF_NOESCAPE;
1932
1933 case 'W':
1934 return EAF_DIRECT | EAF_NOESCAPE;
1935
1936 case 'w':
1937 return EAF_NOESCAPE;
1938
1939 case '.':
1940 default:
1941 return 0;
1942 }
1943 }
1944
1945 /* Detects return flags for the call STMT. */
1946
1947 int
1948 gimple_call_return_flags (const_gimple stmt)
1949 {
1950 tree attr;
1951
1952 if (gimple_call_flags (stmt) & ECF_MALLOC)
1953 return ERF_NOALIAS;
1954
1955 attr = gimple_call_fnspec (stmt);
1956 if (!attr || TREE_STRING_LENGTH (attr) < 1)
1957 return 0;
1958
1959 switch (TREE_STRING_POINTER (attr)[0])
1960 {
1961 case '1':
1962 case '2':
1963 case '3':
1964 case '4':
1965 return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
1966
1967 case 'm':
1968 return ERF_NOALIAS;
1969
1970 case '.':
1971 default:
1972 return 0;
1973 }
1974 }
1975
1976
1977 /* Return true if GS is a copy assignment. */
1978
1979 bool
1980 gimple_assign_copy_p (gimple gs)
1981 {
1982 return (gimple_assign_single_p (gs)
1983 && is_gimple_val (gimple_op (gs, 1)));
1984 }
1985
1986
1987 /* Return true if GS is a SSA_NAME copy assignment. */
1988
1989 bool
1990 gimple_assign_ssa_name_copy_p (gimple gs)
1991 {
1992 return (gimple_assign_single_p (gs)
1993 && TREE_CODE (gimple_assign_lhs (gs)) == SSA_NAME
1994 && TREE_CODE (gimple_assign_rhs1 (gs)) == SSA_NAME);
1995 }
1996
1997
1998 /* Return true if GS is an assignment with a unary RHS, but the
1999 operator has no effect on the assigned value. The logic is adapted
2000 from STRIP_NOPS. This predicate is intended to be used in tuplifying
2001 instances in which STRIP_NOPS was previously applied to the RHS of
2002 an assignment.
2003
2004 NOTE: In the use cases that led to the creation of this function
2005 and of gimple_assign_single_p, it is typical to test for either
2006 condition and to proceed in the same manner. In each case, the
2007 assigned value is represented by the single RHS operand of the
2008 assignment. I suspect there may be cases where gimple_assign_copy_p,
2009 gimple_assign_single_p, or equivalent logic is used where a similar
2010 treatment of unary NOPs is appropriate. */
2011
2012 bool
2013 gimple_assign_unary_nop_p (gimple gs)
2014 {
2015 return (is_gimple_assign (gs)
2016 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs))
2017 || gimple_assign_rhs_code (gs) == NON_LVALUE_EXPR)
2018 && gimple_assign_rhs1 (gs) != error_mark_node
2019 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs)))
2020 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs)))));
2021 }
2022
2023 /* Set BB to be the basic block holding G. */
2024
2025 void
2026 gimple_set_bb (gimple stmt, basic_block bb)
2027 {
2028 stmt->gsbase.bb = bb;
2029
2030 /* If the statement is a label, add the label to block-to-labels map
2031 so that we can speed up edge creation for GIMPLE_GOTOs. */
2032 if (cfun->cfg && gimple_code (stmt) == GIMPLE_LABEL)
2033 {
2034 tree t;
2035 int uid;
2036
2037 t = gimple_label_label (stmt);
2038 uid = LABEL_DECL_UID (t);
2039 if (uid == -1)
2040 {
2041 unsigned old_len = vec_safe_length (label_to_block_map);
2042 LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
2043 if (old_len <= (unsigned) uid)
2044 {
2045 unsigned new_len = 3 * uid / 2 + 1;
2046
2047 vec_safe_grow_cleared (label_to_block_map, new_len);
2048 }
2049 }
2050
2051 (*label_to_block_map)[uid] = bb;
2052 }
2053 }
2054
2055
2056 /* Modify the RHS of the assignment pointed-to by GSI using the
2057 operands in the expression tree EXPR.
2058
2059 NOTE: The statement pointed-to by GSI may be reallocated if it
2060 did not have enough operand slots.
2061
2062 This function is useful to convert an existing tree expression into
2063 the flat representation used for the RHS of a GIMPLE assignment.
2064 It will reallocate memory as needed to expand or shrink the number
2065 of operand slots needed to represent EXPR.
2066
2067 NOTE: If you find yourself building a tree and then calling this
2068 function, you are most certainly doing it the slow way. It is much
2069 better to build a new assignment or to use the function
2070 gimple_assign_set_rhs_with_ops, which does not require an
2071 expression tree to be built. */
2072
2073 void
2074 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
2075 {
2076 enum tree_code subcode;
2077 tree op1, op2, op3;
2078
2079 extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
2080 gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
2081 }
2082
2083
2084 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
2085 operands OP1, OP2 and OP3.
2086
2087 NOTE: The statement pointed-to by GSI may be reallocated if it
2088 did not have enough operand slots. */
2089
2090 void
2091 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
2092 tree op1, tree op2, tree op3)
2093 {
2094 unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
2095 gimple stmt = gsi_stmt (*gsi);
2096
2097 /* If the new CODE needs more operands, allocate a new statement. */
2098 if (gimple_num_ops (stmt) < new_rhs_ops + 1)
2099 {
2100 tree lhs = gimple_assign_lhs (stmt);
2101 gimple new_stmt = gimple_alloc (gimple_code (stmt), new_rhs_ops + 1);
2102 memcpy (new_stmt, stmt, gimple_size (gimple_code (stmt)));
2103 gimple_init_singleton (new_stmt);
2104 gsi_replace (gsi, new_stmt, true);
2105 stmt = new_stmt;
2106
2107 /* The LHS needs to be reset as this also changes the SSA name
2108 on the LHS. */
2109 gimple_assign_set_lhs (stmt, lhs);
2110 }
2111
2112 gimple_set_num_ops (stmt, new_rhs_ops + 1);
2113 gimple_set_subcode (stmt, code);
2114 gimple_assign_set_rhs1 (stmt, op1);
2115 if (new_rhs_ops > 1)
2116 gimple_assign_set_rhs2 (stmt, op2);
2117 if (new_rhs_ops > 2)
2118 gimple_assign_set_rhs3 (stmt, op3);
2119 }
2120
2121
2122 /* Return the LHS of a statement that performs an assignment,
2123 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
2124 for a call to a function that returns no value, or for a
2125 statement other than an assignment or a call. */
2126
2127 tree
2128 gimple_get_lhs (const_gimple stmt)
2129 {
2130 enum gimple_code code = gimple_code (stmt);
2131
2132 if (code == GIMPLE_ASSIGN)
2133 return gimple_assign_lhs (stmt);
2134 else if (code == GIMPLE_CALL)
2135 return gimple_call_lhs (stmt);
2136 else
2137 return NULL_TREE;
2138 }
2139
2140
2141 /* Set the LHS of a statement that performs an assignment,
2142 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
2143
2144 void
2145 gimple_set_lhs (gimple stmt, tree lhs)
2146 {
2147 enum gimple_code code = gimple_code (stmt);
2148
2149 if (code == GIMPLE_ASSIGN)
2150 gimple_assign_set_lhs (stmt, lhs);
2151 else if (code == GIMPLE_CALL)
2152 gimple_call_set_lhs (stmt, lhs);
2153 else
2154 gcc_unreachable();
2155 }
2156
2157 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
2158 GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
2159 expression with a different value.
2160
2161 This will update any annotations (say debug bind stmts) referring
2162 to the original LHS, so that they use the RHS instead. This is
2163 done even if NLHS and LHS are the same, for it is understood that
2164 the RHS will be modified afterwards, and NLHS will not be assigned
2165 an equivalent value.
2166
2167 Adjusting any non-annotation uses of the LHS, if needed, is a
2168 responsibility of the caller.
2169
2170 The effect of this call should be pretty much the same as that of
2171 inserting a copy of STMT before STMT, and then removing the
2172 original stmt, at which time gsi_remove() would have update
2173 annotations, but using this function saves all the inserting,
2174 copying and removing. */
2175
2176 void
2177 gimple_replace_lhs (gimple stmt, tree nlhs)
2178 {
2179 if (MAY_HAVE_DEBUG_STMTS)
2180 {
2181 tree lhs = gimple_get_lhs (stmt);
2182
2183 gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
2184
2185 insert_debug_temp_for_var_def (NULL, lhs);
2186 }
2187
2188 gimple_set_lhs (stmt, nlhs);
2189 }
2190
2191 /* Return a deep copy of statement STMT. All the operands from STMT
2192 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
2193 and VUSE operand arrays are set to empty in the new copy. The new
2194 copy isn't part of any sequence. */
2195
2196 gimple
2197 gimple_copy (gimple stmt)
2198 {
2199 enum gimple_code code = gimple_code (stmt);
2200 unsigned num_ops = gimple_num_ops (stmt);
2201 gimple copy = gimple_alloc (code, num_ops);
2202 unsigned i;
2203
2204 /* Shallow copy all the fields from STMT. */
2205 memcpy (copy, stmt, gimple_size (code));
2206 gimple_init_singleton (copy);
2207
2208 /* If STMT has sub-statements, deep-copy them as well. */
2209 if (gimple_has_substatements (stmt))
2210 {
2211 gimple_seq new_seq;
2212 tree t;
2213
2214 switch (gimple_code (stmt))
2215 {
2216 case GIMPLE_BIND:
2217 new_seq = gimple_seq_copy (gimple_bind_body (stmt));
2218 gimple_bind_set_body (copy, new_seq);
2219 gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
2220 gimple_bind_set_block (copy, gimple_bind_block (stmt));
2221 break;
2222
2223 case GIMPLE_CATCH:
2224 new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
2225 gimple_catch_set_handler (copy, new_seq);
2226 t = unshare_expr (gimple_catch_types (stmt));
2227 gimple_catch_set_types (copy, t);
2228 break;
2229
2230 case GIMPLE_EH_FILTER:
2231 new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
2232 gimple_eh_filter_set_failure (copy, new_seq);
2233 t = unshare_expr (gimple_eh_filter_types (stmt));
2234 gimple_eh_filter_set_types (copy, t);
2235 break;
2236
2237 case GIMPLE_EH_ELSE:
2238 new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
2239 gimple_eh_else_set_n_body (copy, new_seq);
2240 new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
2241 gimple_eh_else_set_e_body (copy, new_seq);
2242 break;
2243
2244 case GIMPLE_TRY:
2245 new_seq = gimple_seq_copy (gimple_try_eval (stmt));
2246 gimple_try_set_eval (copy, new_seq);
2247 new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
2248 gimple_try_set_cleanup (copy, new_seq);
2249 break;
2250
2251 case GIMPLE_OMP_FOR:
2252 new_seq = gimple_seq_copy (gimple_omp_for_pre_body (stmt));
2253 gimple_omp_for_set_pre_body (copy, new_seq);
2254 t = unshare_expr (gimple_omp_for_clauses (stmt));
2255 gimple_omp_for_set_clauses (copy, t);
2256 copy->gimple_omp_for.iter
2257 = ggc_alloc_vec_gimple_omp_for_iter
2258 (gimple_omp_for_collapse (stmt));
2259 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2260 {
2261 gimple_omp_for_set_cond (copy, i,
2262 gimple_omp_for_cond (stmt, i));
2263 gimple_omp_for_set_index (copy, i,
2264 gimple_omp_for_index (stmt, i));
2265 t = unshare_expr (gimple_omp_for_initial (stmt, i));
2266 gimple_omp_for_set_initial (copy, i, t);
2267 t = unshare_expr (gimple_omp_for_final (stmt, i));
2268 gimple_omp_for_set_final (copy, i, t);
2269 t = unshare_expr (gimple_omp_for_incr (stmt, i));
2270 gimple_omp_for_set_incr (copy, i, t);
2271 }
2272 goto copy_omp_body;
2273
2274 case GIMPLE_OMP_PARALLEL:
2275 t = unshare_expr (gimple_omp_parallel_clauses (stmt));
2276 gimple_omp_parallel_set_clauses (copy, t);
2277 t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
2278 gimple_omp_parallel_set_child_fn (copy, t);
2279 t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
2280 gimple_omp_parallel_set_data_arg (copy, t);
2281 goto copy_omp_body;
2282
2283 case GIMPLE_OMP_TASK:
2284 t = unshare_expr (gimple_omp_task_clauses (stmt));
2285 gimple_omp_task_set_clauses (copy, t);
2286 t = unshare_expr (gimple_omp_task_child_fn (stmt));
2287 gimple_omp_task_set_child_fn (copy, t);
2288 t = unshare_expr (gimple_omp_task_data_arg (stmt));
2289 gimple_omp_task_set_data_arg (copy, t);
2290 t = unshare_expr (gimple_omp_task_copy_fn (stmt));
2291 gimple_omp_task_set_copy_fn (copy, t);
2292 t = unshare_expr (gimple_omp_task_arg_size (stmt));
2293 gimple_omp_task_set_arg_size (copy, t);
2294 t = unshare_expr (gimple_omp_task_arg_align (stmt));
2295 gimple_omp_task_set_arg_align (copy, t);
2296 goto copy_omp_body;
2297
2298 case GIMPLE_OMP_CRITICAL:
2299 t = unshare_expr (gimple_omp_critical_name (stmt));
2300 gimple_omp_critical_set_name (copy, t);
2301 goto copy_omp_body;
2302
2303 case GIMPLE_OMP_SECTIONS:
2304 t = unshare_expr (gimple_omp_sections_clauses (stmt));
2305 gimple_omp_sections_set_clauses (copy, t);
2306 t = unshare_expr (gimple_omp_sections_control (stmt));
2307 gimple_omp_sections_set_control (copy, t);
2308 /* FALLTHRU */
2309
2310 case GIMPLE_OMP_SINGLE:
2311 case GIMPLE_OMP_SECTION:
2312 case GIMPLE_OMP_MASTER:
2313 case GIMPLE_OMP_ORDERED:
2314 copy_omp_body:
2315 new_seq = gimple_seq_copy (gimple_omp_body (stmt));
2316 gimple_omp_set_body (copy, new_seq);
2317 break;
2318
2319 case GIMPLE_TRANSACTION:
2320 new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
2321 gimple_transaction_set_body (copy, new_seq);
2322 break;
2323
2324 case GIMPLE_WITH_CLEANUP_EXPR:
2325 new_seq = gimple_seq_copy (gimple_wce_cleanup (stmt));
2326 gimple_wce_set_cleanup (copy, new_seq);
2327 break;
2328
2329 default:
2330 gcc_unreachable ();
2331 }
2332 }
2333
2334 /* Make copy of operands. */
2335 for (i = 0; i < num_ops; i++)
2336 gimple_set_op (copy, i, unshare_expr (gimple_op (stmt, i)));
2337
2338 if (gimple_has_mem_ops (stmt))
2339 {
2340 gimple_set_vdef (copy, gimple_vdef (stmt));
2341 gimple_set_vuse (copy, gimple_vuse (stmt));
2342 }
2343
2344 /* Clear out SSA operand vectors on COPY. */
2345 if (gimple_has_ops (stmt))
2346 {
2347 gimple_set_use_ops (copy, NULL);
2348
2349 /* SSA operands need to be updated. */
2350 gimple_set_modified (copy, true);
2351 }
2352
2353 return copy;
2354 }
2355
2356
2357 /* Return true if statement S has side-effects. We consider a
2358 statement to have side effects if:
2359
2360 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
2361 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
2362
2363 bool
2364 gimple_has_side_effects (const_gimple s)
2365 {
2366 if (is_gimple_debug (s))
2367 return false;
2368
2369 /* We don't have to scan the arguments to check for
2370 volatile arguments, though, at present, we still
2371 do a scan to check for TREE_SIDE_EFFECTS. */
2372 if (gimple_has_volatile_ops (s))
2373 return true;
2374
2375 if (gimple_code (s) == GIMPLE_ASM
2376 && gimple_asm_volatile_p (s))
2377 return true;
2378
2379 if (is_gimple_call (s))
2380 {
2381 int flags = gimple_call_flags (s);
2382
2383 /* An infinite loop is considered a side effect. */
2384 if (!(flags & (ECF_CONST | ECF_PURE))
2385 || (flags & ECF_LOOPING_CONST_OR_PURE))
2386 return true;
2387
2388 return false;
2389 }
2390
2391 return false;
2392 }
2393
2394 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
2395 Return true if S can trap. When INCLUDE_MEM is true, check whether
2396 the memory operations could trap. When INCLUDE_STORES is true and
2397 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
2398
2399 bool
2400 gimple_could_trap_p_1 (gimple s, bool include_mem, bool include_stores)
2401 {
2402 tree t, div = NULL_TREE;
2403 enum tree_code op;
2404
2405 if (include_mem)
2406 {
2407 unsigned i, start = (is_gimple_assign (s) && !include_stores) ? 1 : 0;
2408
2409 for (i = start; i < gimple_num_ops (s); i++)
2410 if (tree_could_trap_p (gimple_op (s, i)))
2411 return true;
2412 }
2413
2414 switch (gimple_code (s))
2415 {
2416 case GIMPLE_ASM:
2417 return gimple_asm_volatile_p (s);
2418
2419 case GIMPLE_CALL:
2420 t = gimple_call_fndecl (s);
2421 /* Assume that calls to weak functions may trap. */
2422 if (!t || !DECL_P (t) || DECL_WEAK (t))
2423 return true;
2424 return false;
2425
2426 case GIMPLE_ASSIGN:
2427 t = gimple_expr_type (s);
2428 op = gimple_assign_rhs_code (s);
2429 if (get_gimple_rhs_class (op) == GIMPLE_BINARY_RHS)
2430 div = gimple_assign_rhs2 (s);
2431 return (operation_could_trap_p (op, FLOAT_TYPE_P (t),
2432 (INTEGRAL_TYPE_P (t)
2433 && TYPE_OVERFLOW_TRAPS (t)),
2434 div));
2435
2436 default:
2437 break;
2438 }
2439
2440 return false;
2441 }
2442
2443 /* Return true if statement S can trap. */
2444
2445 bool
2446 gimple_could_trap_p (gimple s)
2447 {
2448 return gimple_could_trap_p_1 (s, true, true);
2449 }
2450
2451 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
2452
2453 bool
2454 gimple_assign_rhs_could_trap_p (gimple s)
2455 {
2456 gcc_assert (is_gimple_assign (s));
2457 return gimple_could_trap_p_1 (s, true, false);
2458 }
2459
2460
2461 /* Print debugging information for gimple stmts generated. */
2462
2463 void
2464 dump_gimple_statistics (void)
2465 {
2466 int i, total_tuples = 0, total_bytes = 0;
2467
2468 if (! GATHER_STATISTICS)
2469 {
2470 fprintf (stderr, "No gimple statistics\n");
2471 return;
2472 }
2473
2474 fprintf (stderr, "\nGIMPLE statements\n");
2475 fprintf (stderr, "Kind Stmts Bytes\n");
2476 fprintf (stderr, "---------------------------------------\n");
2477 for (i = 0; i < (int) gimple_alloc_kind_all; ++i)
2478 {
2479 fprintf (stderr, "%-20s %7d %10d\n", gimple_alloc_kind_names[i],
2480 gimple_alloc_counts[i], gimple_alloc_sizes[i]);
2481 total_tuples += gimple_alloc_counts[i];
2482 total_bytes += gimple_alloc_sizes[i];
2483 }
2484 fprintf (stderr, "---------------------------------------\n");
2485 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_tuples, total_bytes);
2486 fprintf (stderr, "---------------------------------------\n");
2487 }
2488
2489
2490 /* Return the number of operands needed on the RHS of a GIMPLE
2491 assignment for an expression with tree code CODE. */
2492
2493 unsigned
2494 get_gimple_rhs_num_ops (enum tree_code code)
2495 {
2496 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
2497
2498 if (rhs_class == GIMPLE_UNARY_RHS || rhs_class == GIMPLE_SINGLE_RHS)
2499 return 1;
2500 else if (rhs_class == GIMPLE_BINARY_RHS)
2501 return 2;
2502 else if (rhs_class == GIMPLE_TERNARY_RHS)
2503 return 3;
2504 else
2505 gcc_unreachable ();
2506 }
2507
2508 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
2509 (unsigned char) \
2510 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
2511 : ((TYPE) == tcc_binary \
2512 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
2513 : ((TYPE) == tcc_constant \
2514 || (TYPE) == tcc_declaration \
2515 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
2516 : ((SYM) == TRUTH_AND_EXPR \
2517 || (SYM) == TRUTH_OR_EXPR \
2518 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
2519 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
2520 : ((SYM) == COND_EXPR \
2521 || (SYM) == WIDEN_MULT_PLUS_EXPR \
2522 || (SYM) == WIDEN_MULT_MINUS_EXPR \
2523 || (SYM) == DOT_PROD_EXPR \
2524 || (SYM) == REALIGN_LOAD_EXPR \
2525 || (SYM) == VEC_COND_EXPR \
2526 || (SYM) == VEC_PERM_EXPR \
2527 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
2528 : ((SYM) == CONSTRUCTOR \
2529 || (SYM) == OBJ_TYPE_REF \
2530 || (SYM) == ASSERT_EXPR \
2531 || (SYM) == ADDR_EXPR \
2532 || (SYM) == WITH_SIZE_EXPR \
2533 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
2534 : GIMPLE_INVALID_RHS),
2535 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
2536
2537 const unsigned char gimple_rhs_class_table[] = {
2538 #include "all-tree.def"
2539 };
2540
2541 #undef DEFTREECODE
2542 #undef END_OF_BASE_TREE_CODES
2543
2544 /* For the definitive definition of GIMPLE, see doc/tree-ssa.texi. */
2545
2546 /* Validation of GIMPLE expressions. */
2547
2548 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
2549
2550 bool
2551 is_gimple_lvalue (tree t)
2552 {
2553 return (is_gimple_addressable (t)
2554 || TREE_CODE (t) == WITH_SIZE_EXPR
2555 /* These are complex lvalues, but don't have addresses, so they
2556 go here. */
2557 || TREE_CODE (t) == BIT_FIELD_REF);
2558 }
2559
2560 /* Return true if T is a GIMPLE condition. */
2561
2562 bool
2563 is_gimple_condexpr (tree t)
2564 {
2565 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
2566 && !tree_could_throw_p (t)
2567 && is_gimple_val (TREE_OPERAND (t, 0))
2568 && is_gimple_val (TREE_OPERAND (t, 1))));
2569 }
2570
2571 /* Return true if T is something whose address can be taken. */
2572
2573 bool
2574 is_gimple_addressable (tree t)
2575 {
2576 return (is_gimple_id (t) || handled_component_p (t)
2577 || TREE_CODE (t) == MEM_REF);
2578 }
2579
2580 /* Return true if T is a valid gimple constant. */
2581
2582 bool
2583 is_gimple_constant (const_tree t)
2584 {
2585 switch (TREE_CODE (t))
2586 {
2587 case INTEGER_CST:
2588 case REAL_CST:
2589 case FIXED_CST:
2590 case STRING_CST:
2591 case COMPLEX_CST:
2592 case VECTOR_CST:
2593 return true;
2594
2595 /* Vector constant constructors are gimple invariant. */
2596 case CONSTRUCTOR:
2597 if (TREE_TYPE (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2598 return TREE_CONSTANT (t);
2599 else
2600 return false;
2601
2602 default:
2603 return false;
2604 }
2605 }
2606
2607 /* Return true if T is a gimple address. */
2608
2609 bool
2610 is_gimple_address (const_tree t)
2611 {
2612 tree op;
2613
2614 if (TREE_CODE (t) != ADDR_EXPR)
2615 return false;
2616
2617 op = TREE_OPERAND (t, 0);
2618 while (handled_component_p (op))
2619 {
2620 if ((TREE_CODE (op) == ARRAY_REF
2621 || TREE_CODE (op) == ARRAY_RANGE_REF)
2622 && !is_gimple_val (TREE_OPERAND (op, 1)))
2623 return false;
2624
2625 op = TREE_OPERAND (op, 0);
2626 }
2627
2628 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
2629 return true;
2630
2631 switch (TREE_CODE (op))
2632 {
2633 case PARM_DECL:
2634 case RESULT_DECL:
2635 case LABEL_DECL:
2636 case FUNCTION_DECL:
2637 case VAR_DECL:
2638 case CONST_DECL:
2639 return true;
2640
2641 default:
2642 return false;
2643 }
2644 }
2645
2646 /* Return true if T is a gimple invariant address. */
2647
2648 bool
2649 is_gimple_invariant_address (const_tree t)
2650 {
2651 const_tree op;
2652
2653 if (TREE_CODE (t) != ADDR_EXPR)
2654 return false;
2655
2656 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2657 if (!op)
2658 return false;
2659
2660 if (TREE_CODE (op) == MEM_REF)
2661 {
2662 const_tree op0 = TREE_OPERAND (op, 0);
2663 return (TREE_CODE (op0) == ADDR_EXPR
2664 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2665 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
2666 }
2667
2668 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
2669 }
2670
2671 /* Return true if T is a gimple invariant address at IPA level
2672 (so addresses of variables on stack are not allowed). */
2673
2674 bool
2675 is_gimple_ip_invariant_address (const_tree t)
2676 {
2677 const_tree op;
2678
2679 if (TREE_CODE (t) != ADDR_EXPR)
2680 return false;
2681
2682 op = strip_invariant_refs (TREE_OPERAND (t, 0));
2683 if (!op)
2684 return false;
2685
2686 if (TREE_CODE (op) == MEM_REF)
2687 {
2688 const_tree op0 = TREE_OPERAND (op, 0);
2689 return (TREE_CODE (op0) == ADDR_EXPR
2690 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
2691 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
2692 }
2693
2694 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
2695 }
2696
2697 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
2698 form of function invariant. */
2699
2700 bool
2701 is_gimple_min_invariant (const_tree t)
2702 {
2703 if (TREE_CODE (t) == ADDR_EXPR)
2704 return is_gimple_invariant_address (t);
2705
2706 return is_gimple_constant (t);
2707 }
2708
2709 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
2710 form of gimple minimal invariant. */
2711
2712 bool
2713 is_gimple_ip_invariant (const_tree t)
2714 {
2715 if (TREE_CODE (t) == ADDR_EXPR)
2716 return is_gimple_ip_invariant_address (t);
2717
2718 return is_gimple_constant (t);
2719 }
2720
2721 /* Return true if T is a variable. */
2722
2723 bool
2724 is_gimple_variable (tree t)
2725 {
2726 return (TREE_CODE (t) == VAR_DECL
2727 || TREE_CODE (t) == PARM_DECL
2728 || TREE_CODE (t) == RESULT_DECL
2729 || TREE_CODE (t) == SSA_NAME);
2730 }
2731
2732 /* Return true if T is a GIMPLE identifier (something with an address). */
2733
2734 bool
2735 is_gimple_id (tree t)
2736 {
2737 return (is_gimple_variable (t)
2738 || TREE_CODE (t) == FUNCTION_DECL
2739 || TREE_CODE (t) == LABEL_DECL
2740 || TREE_CODE (t) == CONST_DECL
2741 /* Allow string constants, since they are addressable. */
2742 || TREE_CODE (t) == STRING_CST);
2743 }
2744
2745 /* Return true if T is a non-aggregate register variable. */
2746
2747 bool
2748 is_gimple_reg (tree t)
2749 {
2750 if (virtual_operand_p (t))
2751 return false;
2752
2753 if (TREE_CODE (t) == SSA_NAME)
2754 return true;
2755
2756 if (!is_gimple_variable (t))
2757 return false;
2758
2759 if (!is_gimple_reg_type (TREE_TYPE (t)))
2760 return false;
2761
2762 /* A volatile decl is not acceptable because we can't reuse it as
2763 needed. We need to copy it into a temp first. */
2764 if (TREE_THIS_VOLATILE (t))
2765 return false;
2766
2767 /* We define "registers" as things that can be renamed as needed,
2768 which with our infrastructure does not apply to memory. */
2769 if (needs_to_live_in_memory (t))
2770 return false;
2771
2772 /* Hard register variables are an interesting case. For those that
2773 are call-clobbered, we don't know where all the calls are, since
2774 we don't (want to) take into account which operations will turn
2775 into libcalls at the rtl level. For those that are call-saved,
2776 we don't currently model the fact that calls may in fact change
2777 global hard registers, nor do we examine ASM_CLOBBERS at the tree
2778 level, and so miss variable changes that might imply. All around,
2779 it seems safest to not do too much optimization with these at the
2780 tree level at all. We'll have to rely on the rtl optimizers to
2781 clean this up, as there we've got all the appropriate bits exposed. */
2782 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2783 return false;
2784
2785 /* Complex and vector values must have been put into SSA-like form.
2786 That is, no assignments to the individual components. */
2787 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
2788 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
2789 return DECL_GIMPLE_REG_P (t);
2790
2791 return true;
2792 }
2793
2794
2795 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
2796
2797 bool
2798 is_gimple_val (tree t)
2799 {
2800 /* Make loads from volatiles and memory vars explicit. */
2801 if (is_gimple_variable (t)
2802 && is_gimple_reg_type (TREE_TYPE (t))
2803 && !is_gimple_reg (t))
2804 return false;
2805
2806 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
2807 }
2808
2809 /* Similarly, but accept hard registers as inputs to asm statements. */
2810
2811 bool
2812 is_gimple_asm_val (tree t)
2813 {
2814 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
2815 return true;
2816
2817 return is_gimple_val (t);
2818 }
2819
2820 /* Return true if T is a GIMPLE minimal lvalue. */
2821
2822 bool
2823 is_gimple_min_lval (tree t)
2824 {
2825 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
2826 return false;
2827 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
2828 }
2829
2830 /* Return true if T is a valid function operand of a CALL_EXPR. */
2831
2832 bool
2833 is_gimple_call_addr (tree t)
2834 {
2835 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
2836 }
2837
2838 /* Return true if T is a valid address operand of a MEM_REF. */
2839
2840 bool
2841 is_gimple_mem_ref_addr (tree t)
2842 {
2843 return (is_gimple_reg (t)
2844 || TREE_CODE (t) == INTEGER_CST
2845 || (TREE_CODE (t) == ADDR_EXPR
2846 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
2847 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
2848 }
2849
2850
2851 /* Given a memory reference expression T, return its base address.
2852 The base address of a memory reference expression is the main
2853 object being referenced. For instance, the base address for
2854 'array[i].fld[j]' is 'array'. You can think of this as stripping
2855 away the offset part from a memory address.
2856
2857 This function calls handled_component_p to strip away all the inner
2858 parts of the memory reference until it reaches the base object. */
2859
2860 tree
2861 get_base_address (tree t)
2862 {
2863 while (handled_component_p (t))
2864 t = TREE_OPERAND (t, 0);
2865
2866 if ((TREE_CODE (t) == MEM_REF
2867 || TREE_CODE (t) == TARGET_MEM_REF)
2868 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
2869 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
2870
2871 /* ??? Either the alias oracle or all callers need to properly deal
2872 with WITH_SIZE_EXPRs before we can look through those. */
2873 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2874 return NULL_TREE;
2875
2876 return t;
2877 }
2878
2879 void
2880 recalculate_side_effects (tree t)
2881 {
2882 enum tree_code code = TREE_CODE (t);
2883 int len = TREE_OPERAND_LENGTH (t);
2884 int i;
2885
2886 switch (TREE_CODE_CLASS (code))
2887 {
2888 case tcc_expression:
2889 switch (code)
2890 {
2891 case INIT_EXPR:
2892 case MODIFY_EXPR:
2893 case VA_ARG_EXPR:
2894 case PREDECREMENT_EXPR:
2895 case PREINCREMENT_EXPR:
2896 case POSTDECREMENT_EXPR:
2897 case POSTINCREMENT_EXPR:
2898 /* All of these have side-effects, no matter what their
2899 operands are. */
2900 return;
2901
2902 default:
2903 break;
2904 }
2905 /* Fall through. */
2906
2907 case tcc_comparison: /* a comparison expression */
2908 case tcc_unary: /* a unary arithmetic expression */
2909 case tcc_binary: /* a binary arithmetic expression */
2910 case tcc_reference: /* a reference */
2911 case tcc_vl_exp: /* a function call */
2912 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2913 for (i = 0; i < len; ++i)
2914 {
2915 tree op = TREE_OPERAND (t, i);
2916 if (op && TREE_SIDE_EFFECTS (op))
2917 TREE_SIDE_EFFECTS (t) = 1;
2918 }
2919 break;
2920
2921 case tcc_constant:
2922 /* No side-effects. */
2923 return;
2924
2925 default:
2926 gcc_unreachable ();
2927 }
2928 }
2929
2930 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
2931 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
2932 we failed to create one. */
2933
2934 tree
2935 canonicalize_cond_expr_cond (tree t)
2936 {
2937 /* Strip conversions around boolean operations. */
2938 if (CONVERT_EXPR_P (t)
2939 && (truth_value_p (TREE_CODE (TREE_OPERAND (t, 0)))
2940 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
2941 == BOOLEAN_TYPE))
2942 t = TREE_OPERAND (t, 0);
2943
2944 /* For !x use x == 0. */
2945 if (TREE_CODE (t) == TRUTH_NOT_EXPR)
2946 {
2947 tree top0 = TREE_OPERAND (t, 0);
2948 t = build2 (EQ_EXPR, TREE_TYPE (t),
2949 top0, build_int_cst (TREE_TYPE (top0), 0));
2950 }
2951 /* For cmp ? 1 : 0 use cmp. */
2952 else if (TREE_CODE (t) == COND_EXPR
2953 && COMPARISON_CLASS_P (TREE_OPERAND (t, 0))
2954 && integer_onep (TREE_OPERAND (t, 1))
2955 && integer_zerop (TREE_OPERAND (t, 2)))
2956 {
2957 tree top0 = TREE_OPERAND (t, 0);
2958 t = build2 (TREE_CODE (top0), TREE_TYPE (t),
2959 TREE_OPERAND (top0, 0), TREE_OPERAND (top0, 1));
2960 }
2961
2962 if (is_gimple_condexpr (t))
2963 return t;
2964
2965 return NULL_TREE;
2966 }
2967
2968 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2969 the positions marked by the set ARGS_TO_SKIP. */
2970
2971 gimple
2972 gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
2973 {
2974 int i;
2975 int nargs = gimple_call_num_args (stmt);
2976 vec<tree> vargs;
2977 vargs.create (nargs);
2978 gimple new_stmt;
2979
2980 for (i = 0; i < nargs; i++)
2981 if (!bitmap_bit_p (args_to_skip, i))
2982 vargs.quick_push (gimple_call_arg (stmt, i));
2983
2984 if (gimple_call_internal_p (stmt))
2985 new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
2986 vargs);
2987 else
2988 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2989 vargs.release ();
2990 if (gimple_call_lhs (stmt))
2991 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2992
2993 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2994 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
2995
2996 if (gimple_has_location (stmt))
2997 gimple_set_location (new_stmt, gimple_location (stmt));
2998 gimple_call_copy_flags (new_stmt, stmt);
2999 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
3000
3001 gimple_set_modified (new_stmt, true);
3002
3003 return new_stmt;
3004 }
3005
3006
3007
3008 /* Return true if the field decls F1 and F2 are at the same offset.
3009
3010 This is intended to be used on GIMPLE types only. */
3011
3012 bool
3013 gimple_compare_field_offset (tree f1, tree f2)
3014 {
3015 if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
3016 {
3017 tree offset1 = DECL_FIELD_OFFSET (f1);
3018 tree offset2 = DECL_FIELD_OFFSET (f2);
3019 return ((offset1 == offset2
3020 /* Once gimplification is done, self-referential offsets are
3021 instantiated as operand #2 of the COMPONENT_REF built for
3022 each access and reset. Therefore, they are not relevant
3023 anymore and fields are interchangeable provided that they
3024 represent the same access. */
3025 || (TREE_CODE (offset1) == PLACEHOLDER_EXPR
3026 && TREE_CODE (offset2) == PLACEHOLDER_EXPR
3027 && (DECL_SIZE (f1) == DECL_SIZE (f2)
3028 || (TREE_CODE (DECL_SIZE (f1)) == PLACEHOLDER_EXPR
3029 && TREE_CODE (DECL_SIZE (f2)) == PLACEHOLDER_EXPR)
3030 || operand_equal_p (DECL_SIZE (f1), DECL_SIZE (f2), 0))
3031 && DECL_ALIGN (f1) == DECL_ALIGN (f2))
3032 || operand_equal_p (offset1, offset2, 0))
3033 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1),
3034 DECL_FIELD_BIT_OFFSET (f2)));
3035 }
3036
3037 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
3038 should be, so handle differing ones specially by decomposing
3039 the offset into a byte and bit offset manually. */
3040 if (host_integerp (DECL_FIELD_OFFSET (f1), 0)
3041 && host_integerp (DECL_FIELD_OFFSET (f2), 0))
3042 {
3043 unsigned HOST_WIDE_INT byte_offset1, byte_offset2;
3044 unsigned HOST_WIDE_INT bit_offset1, bit_offset2;
3045 bit_offset1 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1));
3046 byte_offset1 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1))
3047 + bit_offset1 / BITS_PER_UNIT);
3048 bit_offset2 = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2));
3049 byte_offset2 = (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2))
3050 + bit_offset2 / BITS_PER_UNIT);
3051 if (byte_offset1 != byte_offset2)
3052 return false;
3053 return bit_offset1 % BITS_PER_UNIT == bit_offset2 % BITS_PER_UNIT;
3054 }
3055
3056 return false;
3057 }
3058
3059 /* Returning a hash value for gimple type TYPE combined with VAL.
3060
3061 The hash value returned is equal for types considered compatible
3062 by gimple_canonical_types_compatible_p. */
3063
3064 static hashval_t
3065 iterative_hash_canonical_type (tree type, hashval_t val)
3066 {
3067 hashval_t v;
3068 void **slot;
3069 struct tree_int_map *mp, m;
3070
3071 m.base.from = type;
3072 if ((slot = htab_find_slot (canonical_type_hash_cache, &m, INSERT))
3073 && *slot)
3074 return iterative_hash_hashval_t (((struct tree_int_map *) *slot)->to, val);
3075
3076 /* Combine a few common features of types so that types are grouped into
3077 smaller sets; when searching for existing matching types to merge,
3078 only existing types having the same features as the new type will be
3079 checked. */
3080 v = iterative_hash_hashval_t (TREE_CODE (type), 0);
3081 v = iterative_hash_hashval_t (TREE_ADDRESSABLE (type), v);
3082 v = iterative_hash_hashval_t (TYPE_ALIGN (type), v);
3083 v = iterative_hash_hashval_t (TYPE_MODE (type), v);
3084
3085 /* Incorporate common features of numerical types. */
3086 if (INTEGRAL_TYPE_P (type)
3087 || SCALAR_FLOAT_TYPE_P (type)
3088 || FIXED_POINT_TYPE_P (type)
3089 || TREE_CODE (type) == VECTOR_TYPE
3090 || TREE_CODE (type) == COMPLEX_TYPE
3091 || TREE_CODE (type) == OFFSET_TYPE
3092 || POINTER_TYPE_P (type))
3093 {
3094 v = iterative_hash_hashval_t (TYPE_PRECISION (type), v);
3095 v = iterative_hash_hashval_t (TYPE_UNSIGNED (type), v);
3096 }
3097
3098 /* For pointer and reference types, fold in information about the type
3099 pointed to but do not recurse to the pointed-to type. */
3100 if (POINTER_TYPE_P (type))
3101 {
3102 v = iterative_hash_hashval_t (TYPE_REF_CAN_ALIAS_ALL (type), v);
3103 v = iterative_hash_hashval_t (TYPE_ADDR_SPACE (TREE_TYPE (type)), v);
3104 v = iterative_hash_hashval_t (TYPE_RESTRICT (type), v);
3105 v = iterative_hash_hashval_t (TREE_CODE (TREE_TYPE (type)), v);
3106 }
3107
3108 /* For integer types hash only the string flag. */
3109 if (TREE_CODE (type) == INTEGER_TYPE)
3110 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3111
3112 /* For array types hash the domain bounds and the string flag. */
3113 if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
3114 {
3115 v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
3116 /* OMP lowering can introduce error_mark_node in place of
3117 random local decls in types. */
3118 if (TYPE_MIN_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3119 v = iterative_hash_expr (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), v);
3120 if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != error_mark_node)
3121 v = iterative_hash_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)), v);
3122 }
3123
3124 /* Recurse for aggregates with a single element type. */
3125 if (TREE_CODE (type) == ARRAY_TYPE
3126 || TREE_CODE (type) == COMPLEX_TYPE
3127 || TREE_CODE (type) == VECTOR_TYPE)
3128 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3129
3130 /* Incorporate function return and argument types. */
3131 if (TREE_CODE (type) == FUNCTION_TYPE || TREE_CODE (type) == METHOD_TYPE)
3132 {
3133 unsigned na;
3134 tree p;
3135
3136 /* For method types also incorporate their parent class. */
3137 if (TREE_CODE (type) == METHOD_TYPE)
3138 v = iterative_hash_canonical_type (TYPE_METHOD_BASETYPE (type), v);
3139
3140 v = iterative_hash_canonical_type (TREE_TYPE (type), v);
3141
3142 for (p = TYPE_ARG_TYPES (type), na = 0; p; p = TREE_CHAIN (p))
3143 {
3144 v = iterative_hash_canonical_type (TREE_VALUE (p), v);
3145 na++;
3146 }
3147
3148 v = iterative_hash_hashval_t (na, v);
3149 }
3150
3151 if (RECORD_OR_UNION_TYPE_P (type))
3152 {
3153 unsigned nf;
3154 tree f;
3155
3156 for (f = TYPE_FIELDS (type), nf = 0; f; f = TREE_CHAIN (f))
3157 if (TREE_CODE (f) == FIELD_DECL)
3158 {
3159 v = iterative_hash_canonical_type (TREE_TYPE (f), v);
3160 nf++;
3161 }
3162
3163 v = iterative_hash_hashval_t (nf, v);
3164 }
3165
3166 /* Cache the just computed hash value. */
3167 mp = ggc_alloc_cleared_tree_int_map ();
3168 mp->base.from = type;
3169 mp->to = v;
3170 *slot = (void *) mp;
3171
3172 return iterative_hash_hashval_t (v, val);
3173 }
3174
3175 static hashval_t
3176 gimple_canonical_type_hash (const void *p)
3177 {
3178 if (canonical_type_hash_cache == NULL)
3179 canonical_type_hash_cache = htab_create_ggc (512, tree_int_map_hash,
3180 tree_int_map_eq, NULL);
3181
3182 return iterative_hash_canonical_type (CONST_CAST_TREE ((const_tree) p), 0);
3183 }
3184
3185
3186
3187
3188 /* The TYPE_CANONICAL merging machinery. It should closely resemble
3189 the middle-end types_compatible_p function. It needs to avoid
3190 claiming types are different for types that should be treated
3191 the same with respect to TBAA. Canonical types are also used
3192 for IL consistency checks via the useless_type_conversion_p
3193 predicate which does not handle all type kinds itself but falls
3194 back to pointer-comparison of TYPE_CANONICAL for aggregates
3195 for example. */
3196
3197 /* Return true iff T1 and T2 are structurally identical for what
3198 TBAA is concerned. */
3199
3200 static bool
3201 gimple_canonical_types_compatible_p (tree t1, tree t2)
3202 {
3203 /* Before starting to set up the SCC machinery handle simple cases. */
3204
3205 /* Check first for the obvious case of pointer identity. */
3206 if (t1 == t2)
3207 return true;
3208
3209 /* Check that we have two types to compare. */
3210 if (t1 == NULL_TREE || t2 == NULL_TREE)
3211 return false;
3212
3213 /* If the types have been previously registered and found equal
3214 they still are. */
3215 if (TYPE_CANONICAL (t1)
3216 && TYPE_CANONICAL (t1) == TYPE_CANONICAL (t2))
3217 return true;
3218
3219 /* Can't be the same type if the types don't have the same code. */
3220 if (TREE_CODE (t1) != TREE_CODE (t2))
3221 return false;
3222
3223 if (TREE_ADDRESSABLE (t1) != TREE_ADDRESSABLE (t2))
3224 return false;
3225
3226 /* Qualifiers do not matter for canonical type comparison purposes. */
3227
3228 /* Void types and nullptr types are always the same. */
3229 if (TREE_CODE (t1) == VOID_TYPE
3230 || TREE_CODE (t1) == NULLPTR_TYPE)
3231 return true;
3232
3233 /* Can't be the same type if they have different alignment, or mode. */
3234 if (TYPE_ALIGN (t1) != TYPE_ALIGN (t2)
3235 || TYPE_MODE (t1) != TYPE_MODE (t2))
3236 return false;
3237
3238 /* Non-aggregate types can be handled cheaply. */
3239 if (INTEGRAL_TYPE_P (t1)
3240 || SCALAR_FLOAT_TYPE_P (t1)
3241 || FIXED_POINT_TYPE_P (t1)
3242 || TREE_CODE (t1) == VECTOR_TYPE
3243 || TREE_CODE (t1) == COMPLEX_TYPE
3244 || TREE_CODE (t1) == OFFSET_TYPE
3245 || POINTER_TYPE_P (t1))
3246 {
3247 /* Can't be the same type if they have different sign or precision. */
3248 if (TYPE_PRECISION (t1) != TYPE_PRECISION (t2)
3249 || TYPE_UNSIGNED (t1) != TYPE_UNSIGNED (t2))
3250 return false;
3251
3252 if (TREE_CODE (t1) == INTEGER_TYPE
3253 && TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2))
3254 return false;
3255
3256 /* For canonical type comparisons we do not want to build SCCs
3257 so we cannot compare pointed-to types. But we can, for now,
3258 require the same pointed-to type kind and match what
3259 useless_type_conversion_p would do. */
3260 if (POINTER_TYPE_P (t1))
3261 {
3262 /* If the two pointers have different ref-all attributes,
3263 they can't be the same type. */
3264 if (TYPE_REF_CAN_ALIAS_ALL (t1) != TYPE_REF_CAN_ALIAS_ALL (t2))
3265 return false;
3266
3267 if (TYPE_ADDR_SPACE (TREE_TYPE (t1))
3268 != TYPE_ADDR_SPACE (TREE_TYPE (t2)))
3269 return false;
3270
3271 if (TYPE_RESTRICT (t1) != TYPE_RESTRICT (t2))
3272 return false;
3273
3274 if (TREE_CODE (TREE_TYPE (t1)) != TREE_CODE (TREE_TYPE (t2)))
3275 return false;
3276 }
3277
3278 /* Tail-recurse to components. */
3279 if (TREE_CODE (t1) == VECTOR_TYPE
3280 || TREE_CODE (t1) == COMPLEX_TYPE)
3281 return gimple_canonical_types_compatible_p (TREE_TYPE (t1),
3282 TREE_TYPE (t2));
3283
3284 return true;
3285 }
3286
3287 /* Do type-specific comparisons. */
3288 switch (TREE_CODE (t1))
3289 {
3290 case ARRAY_TYPE:
3291 /* Array types are the same if the element types are the same and
3292 the number of elements are the same. */
3293 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))
3294 || TYPE_STRING_FLAG (t1) != TYPE_STRING_FLAG (t2)
3295 || TYPE_NONALIASED_COMPONENT (t1) != TYPE_NONALIASED_COMPONENT (t2))
3296 return false;
3297 else
3298 {
3299 tree i1 = TYPE_DOMAIN (t1);
3300 tree i2 = TYPE_DOMAIN (t2);
3301
3302 /* For an incomplete external array, the type domain can be
3303 NULL_TREE. Check this condition also. */
3304 if (i1 == NULL_TREE && i2 == NULL_TREE)
3305 return true;
3306 else if (i1 == NULL_TREE || i2 == NULL_TREE)
3307 return false;
3308 else
3309 {
3310 tree min1 = TYPE_MIN_VALUE (i1);
3311 tree min2 = TYPE_MIN_VALUE (i2);
3312 tree max1 = TYPE_MAX_VALUE (i1);
3313 tree max2 = TYPE_MAX_VALUE (i2);
3314
3315 /* The minimum/maximum values have to be the same. */
3316 if ((min1 == min2
3317 || (min1 && min2
3318 && ((TREE_CODE (min1) == PLACEHOLDER_EXPR
3319 && TREE_CODE (min2) == PLACEHOLDER_EXPR)
3320 || operand_equal_p (min1, min2, 0))))
3321 && (max1 == max2
3322 || (max1 && max2
3323 && ((TREE_CODE (max1) == PLACEHOLDER_EXPR
3324 && TREE_CODE (max2) == PLACEHOLDER_EXPR)
3325 || operand_equal_p (max1, max2, 0)))))
3326 return true;
3327 else
3328 return false;
3329 }
3330 }
3331
3332 case METHOD_TYPE:
3333 case FUNCTION_TYPE:
3334 /* Function types are the same if the return type and arguments types
3335 are the same. */
3336 if (!gimple_canonical_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))
3337 return false;
3338
3339 if (!comp_type_attributes (t1, t2))
3340 return false;
3341
3342 if (TYPE_ARG_TYPES (t1) == TYPE_ARG_TYPES (t2))
3343 return true;
3344 else
3345 {
3346 tree parms1, parms2;
3347
3348 for (parms1 = TYPE_ARG_TYPES (t1), parms2 = TYPE_ARG_TYPES (t2);
3349 parms1 && parms2;
3350 parms1 = TREE_CHAIN (parms1), parms2 = TREE_CHAIN (parms2))
3351 {
3352 if (!gimple_canonical_types_compatible_p
3353 (TREE_VALUE (parms1), TREE_VALUE (parms2)))
3354 return false;
3355 }
3356
3357 if (parms1 || parms2)
3358 return false;
3359
3360 return true;
3361 }
3362
3363 case RECORD_TYPE:
3364 case UNION_TYPE:
3365 case QUAL_UNION_TYPE:
3366 {
3367 tree f1, f2;
3368
3369 /* For aggregate types, all the fields must be the same. */
3370 for (f1 = TYPE_FIELDS (t1), f2 = TYPE_FIELDS (t2);
3371 f1 || f2;
3372 f1 = TREE_CHAIN (f1), f2 = TREE_CHAIN (f2))
3373 {
3374 /* Skip non-fields. */
3375 while (f1 && TREE_CODE (f1) != FIELD_DECL)
3376 f1 = TREE_CHAIN (f1);
3377 while (f2 && TREE_CODE (f2) != FIELD_DECL)
3378 f2 = TREE_CHAIN (f2);
3379 if (!f1 || !f2)
3380 break;
3381 /* The fields must have the same name, offset and type. */
3382 if (DECL_NONADDRESSABLE_P (f1) != DECL_NONADDRESSABLE_P (f2)
3383 || !gimple_compare_field_offset (f1, f2)
3384 || !gimple_canonical_types_compatible_p
3385 (TREE_TYPE (f1), TREE_TYPE (f2)))
3386 return false;
3387 }
3388
3389 /* If one aggregate has more fields than the other, they
3390 are not the same. */
3391 if (f1 || f2)
3392 return false;
3393
3394 return true;
3395 }
3396
3397 default:
3398 gcc_unreachable ();
3399 }
3400 }
3401
3402
3403 /* Returns nonzero if P1 and P2 are equal. */
3404
3405 static int
3406 gimple_canonical_type_eq (const void *p1, const void *p2)
3407 {
3408 const_tree t1 = (const_tree) p1;
3409 const_tree t2 = (const_tree) p2;
3410 return gimple_canonical_types_compatible_p (CONST_CAST_TREE (t1),
3411 CONST_CAST_TREE (t2));
3412 }
3413
3414 /* Register type T in the global type table gimple_types.
3415 If another type T', compatible with T, already existed in
3416 gimple_types then return T', otherwise return T. This is used by
3417 LTO to merge identical types read from different TUs.
3418
3419 ??? This merging does not exactly match how the tree.c middle-end
3420 functions will assign TYPE_CANONICAL when new types are created
3421 during optimization (which at least happens for pointer and array
3422 types). */
3423
3424 tree
3425 gimple_register_canonical_type (tree t)
3426 {
3427 void **slot;
3428
3429 gcc_assert (TYPE_P (t));
3430
3431 if (TYPE_CANONICAL (t))
3432 return TYPE_CANONICAL (t);
3433
3434 if (gimple_canonical_types == NULL)
3435 gimple_canonical_types = htab_create_ggc (16381, gimple_canonical_type_hash,
3436 gimple_canonical_type_eq, 0);
3437
3438 slot = htab_find_slot (gimple_canonical_types, t, INSERT);
3439 if (*slot
3440 && *(tree *)slot != t)
3441 {
3442 tree new_type = (tree) *((tree *) slot);
3443
3444 TYPE_CANONICAL (t) = new_type;
3445 t = new_type;
3446 }
3447 else
3448 {
3449 TYPE_CANONICAL (t) = t;
3450 *slot = (void *) t;
3451 }
3452
3453 return t;
3454 }
3455
3456
3457 /* Show statistics on references to the global type table gimple_types. */
3458
3459 void
3460 print_gimple_types_stats (const char *pfx)
3461 {
3462 if (gimple_canonical_types)
3463 fprintf (stderr, "[%s] GIMPLE canonical type table: size %ld, "
3464 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3465 (long) htab_size (gimple_canonical_types),
3466 (long) htab_elements (gimple_canonical_types),
3467 (long) gimple_canonical_types->searches,
3468 (long) gimple_canonical_types->collisions,
3469 htab_collisions (gimple_canonical_types));
3470 else
3471 fprintf (stderr, "[%s] GIMPLE canonical type table is empty\n", pfx);
3472 if (canonical_type_hash_cache)
3473 fprintf (stderr, "[%s] GIMPLE canonical type hash table: size %ld, "
3474 "%ld elements, %ld searches, %ld collisions (ratio: %f)\n", pfx,
3475 (long) htab_size (canonical_type_hash_cache),
3476 (long) htab_elements (canonical_type_hash_cache),
3477 (long) canonical_type_hash_cache->searches,
3478 (long) canonical_type_hash_cache->collisions,
3479 htab_collisions (canonical_type_hash_cache));
3480 else
3481 fprintf (stderr, "[%s] GIMPLE canonical type hash table is empty\n", pfx);
3482 }
3483
3484 /* Free the gimple type hashtables used for LTO type merging. */
3485
3486 void
3487 free_gimple_type_tables (void)
3488 {
3489 if (gimple_canonical_types)
3490 {
3491 htab_delete (gimple_canonical_types);
3492 gimple_canonical_types = NULL;
3493 }
3494 if (canonical_type_hash_cache)
3495 {
3496 htab_delete (canonical_type_hash_cache);
3497 canonical_type_hash_cache = NULL;
3498 }
3499 }
3500
3501
3502 /* Return a type the same as TYPE except unsigned or
3503 signed according to UNSIGNEDP. */
3504
3505 static tree
3506 gimple_signed_or_unsigned_type (bool unsignedp, tree type)
3507 {
3508 tree type1;
3509
3510 type1 = TYPE_MAIN_VARIANT (type);
3511 if (type1 == signed_char_type_node
3512 || type1 == char_type_node
3513 || type1 == unsigned_char_type_node)
3514 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3515 if (type1 == integer_type_node || type1 == unsigned_type_node)
3516 return unsignedp ? unsigned_type_node : integer_type_node;
3517 if (type1 == short_integer_type_node || type1 == short_unsigned_type_node)
3518 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3519 if (type1 == long_integer_type_node || type1 == long_unsigned_type_node)
3520 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3521 if (type1 == long_long_integer_type_node
3522 || type1 == long_long_unsigned_type_node)
3523 return unsignedp
3524 ? long_long_unsigned_type_node
3525 : long_long_integer_type_node;
3526 if (int128_integer_type_node && (type1 == int128_integer_type_node || type1 == int128_unsigned_type_node))
3527 return unsignedp
3528 ? int128_unsigned_type_node
3529 : int128_integer_type_node;
3530 #if HOST_BITS_PER_WIDE_INT >= 64
3531 if (type1 == intTI_type_node || type1 == unsigned_intTI_type_node)
3532 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3533 #endif
3534 if (type1 == intDI_type_node || type1 == unsigned_intDI_type_node)
3535 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3536 if (type1 == intSI_type_node || type1 == unsigned_intSI_type_node)
3537 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3538 if (type1 == intHI_type_node || type1 == unsigned_intHI_type_node)
3539 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3540 if (type1 == intQI_type_node || type1 == unsigned_intQI_type_node)
3541 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3542
3543 #define GIMPLE_FIXED_TYPES(NAME) \
3544 if (type1 == short_ ## NAME ## _type_node \
3545 || type1 == unsigned_short_ ## NAME ## _type_node) \
3546 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
3547 : short_ ## NAME ## _type_node; \
3548 if (type1 == NAME ## _type_node \
3549 || type1 == unsigned_ ## NAME ## _type_node) \
3550 return unsignedp ? unsigned_ ## NAME ## _type_node \
3551 : NAME ## _type_node; \
3552 if (type1 == long_ ## NAME ## _type_node \
3553 || type1 == unsigned_long_ ## NAME ## _type_node) \
3554 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
3555 : long_ ## NAME ## _type_node; \
3556 if (type1 == long_long_ ## NAME ## _type_node \
3557 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
3558 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
3559 : long_long_ ## NAME ## _type_node;
3560
3561 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
3562 if (type1 == NAME ## _type_node \
3563 || type1 == u ## NAME ## _type_node) \
3564 return unsignedp ? u ## NAME ## _type_node \
3565 : NAME ## _type_node;
3566
3567 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
3568 if (type1 == sat_ ## short_ ## NAME ## _type_node \
3569 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
3570 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
3571 : sat_ ## short_ ## NAME ## _type_node; \
3572 if (type1 == sat_ ## NAME ## _type_node \
3573 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
3574 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
3575 : sat_ ## NAME ## _type_node; \
3576 if (type1 == sat_ ## long_ ## NAME ## _type_node \
3577 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
3578 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
3579 : sat_ ## long_ ## NAME ## _type_node; \
3580 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
3581 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
3582 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
3583 : sat_ ## long_long_ ## NAME ## _type_node;
3584
3585 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
3586 if (type1 == sat_ ## NAME ## _type_node \
3587 || type1 == sat_ ## u ## NAME ## _type_node) \
3588 return unsignedp ? sat_ ## u ## NAME ## _type_node \
3589 : sat_ ## NAME ## _type_node;
3590
3591 GIMPLE_FIXED_TYPES (fract);
3592 GIMPLE_FIXED_TYPES_SAT (fract);
3593 GIMPLE_FIXED_TYPES (accum);
3594 GIMPLE_FIXED_TYPES_SAT (accum);
3595
3596 GIMPLE_FIXED_MODE_TYPES (qq);
3597 GIMPLE_FIXED_MODE_TYPES (hq);
3598 GIMPLE_FIXED_MODE_TYPES (sq);
3599 GIMPLE_FIXED_MODE_TYPES (dq);
3600 GIMPLE_FIXED_MODE_TYPES (tq);
3601 GIMPLE_FIXED_MODE_TYPES_SAT (qq);
3602 GIMPLE_FIXED_MODE_TYPES_SAT (hq);
3603 GIMPLE_FIXED_MODE_TYPES_SAT (sq);
3604 GIMPLE_FIXED_MODE_TYPES_SAT (dq);
3605 GIMPLE_FIXED_MODE_TYPES_SAT (tq);
3606 GIMPLE_FIXED_MODE_TYPES (ha);
3607 GIMPLE_FIXED_MODE_TYPES (sa);
3608 GIMPLE_FIXED_MODE_TYPES (da);
3609 GIMPLE_FIXED_MODE_TYPES (ta);
3610 GIMPLE_FIXED_MODE_TYPES_SAT (ha);
3611 GIMPLE_FIXED_MODE_TYPES_SAT (sa);
3612 GIMPLE_FIXED_MODE_TYPES_SAT (da);
3613 GIMPLE_FIXED_MODE_TYPES_SAT (ta);
3614
3615 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
3616 the precision; they have precision set to match their range, but
3617 may use a wider mode to match an ABI. If we change modes, we may
3618 wind up with bad conversions. For INTEGER_TYPEs in C, must check
3619 the precision as well, so as to yield correct results for
3620 bit-field types. C++ does not have these separate bit-field
3621 types, and producing a signed or unsigned variant of an
3622 ENUMERAL_TYPE may cause other problems as well. */
3623 if (!INTEGRAL_TYPE_P (type)
3624 || TYPE_UNSIGNED (type) == unsignedp)
3625 return type;
3626
3627 #define TYPE_OK(node) \
3628 (TYPE_MODE (type) == TYPE_MODE (node) \
3629 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
3630 if (TYPE_OK (signed_char_type_node))
3631 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3632 if (TYPE_OK (integer_type_node))
3633 return unsignedp ? unsigned_type_node : integer_type_node;
3634 if (TYPE_OK (short_integer_type_node))
3635 return unsignedp ? short_unsigned_type_node : short_integer_type_node;
3636 if (TYPE_OK (long_integer_type_node))
3637 return unsignedp ? long_unsigned_type_node : long_integer_type_node;
3638 if (TYPE_OK (long_long_integer_type_node))
3639 return (unsignedp
3640 ? long_long_unsigned_type_node
3641 : long_long_integer_type_node);
3642 if (int128_integer_type_node && TYPE_OK (int128_integer_type_node))
3643 return (unsignedp
3644 ? int128_unsigned_type_node
3645 : int128_integer_type_node);
3646
3647 #if HOST_BITS_PER_WIDE_INT >= 64
3648 if (TYPE_OK (intTI_type_node))
3649 return unsignedp ? unsigned_intTI_type_node : intTI_type_node;
3650 #endif
3651 if (TYPE_OK (intDI_type_node))
3652 return unsignedp ? unsigned_intDI_type_node : intDI_type_node;
3653 if (TYPE_OK (intSI_type_node))
3654 return unsignedp ? unsigned_intSI_type_node : intSI_type_node;
3655 if (TYPE_OK (intHI_type_node))
3656 return unsignedp ? unsigned_intHI_type_node : intHI_type_node;
3657 if (TYPE_OK (intQI_type_node))
3658 return unsignedp ? unsigned_intQI_type_node : intQI_type_node;
3659
3660 #undef GIMPLE_FIXED_TYPES
3661 #undef GIMPLE_FIXED_MODE_TYPES
3662 #undef GIMPLE_FIXED_TYPES_SAT
3663 #undef GIMPLE_FIXED_MODE_TYPES_SAT
3664 #undef TYPE_OK
3665
3666 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
3667 }
3668
3669
3670 /* Return an unsigned type the same as TYPE in other respects. */
3671
3672 tree
3673 gimple_unsigned_type (tree type)
3674 {
3675 return gimple_signed_or_unsigned_type (true, type);
3676 }
3677
3678
3679 /* Return a signed type the same as TYPE in other respects. */
3680
3681 tree
3682 gimple_signed_type (tree type)
3683 {
3684 return gimple_signed_or_unsigned_type (false, type);
3685 }
3686
3687
3688 /* Return the typed-based alias set for T, which may be an expression
3689 or a type. Return -1 if we don't do anything special. */
3690
3691 alias_set_type
3692 gimple_get_alias_set (tree t)
3693 {
3694 tree u;
3695
3696 /* Permit type-punning when accessing a union, provided the access
3697 is directly through the union. For example, this code does not
3698 permit taking the address of a union member and then storing
3699 through it. Even the type-punning allowed here is a GCC
3700 extension, albeit a common and useful one; the C standard says
3701 that such accesses have implementation-defined behavior. */
3702 for (u = t;
3703 TREE_CODE (u) == COMPONENT_REF || TREE_CODE (u) == ARRAY_REF;
3704 u = TREE_OPERAND (u, 0))
3705 if (TREE_CODE (u) == COMPONENT_REF
3706 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u, 0))) == UNION_TYPE)
3707 return 0;
3708
3709 /* That's all the expressions we handle specially. */
3710 if (!TYPE_P (t))
3711 return -1;
3712
3713 /* For convenience, follow the C standard when dealing with
3714 character types. Any object may be accessed via an lvalue that
3715 has character type. */
3716 if (t == char_type_node
3717 || t == signed_char_type_node
3718 || t == unsigned_char_type_node)
3719 return 0;
3720
3721 /* Allow aliasing between signed and unsigned variants of the same
3722 type. We treat the signed variant as canonical. */
3723 if (TREE_CODE (t) == INTEGER_TYPE && TYPE_UNSIGNED (t))
3724 {
3725 tree t1 = gimple_signed_type (t);
3726
3727 /* t1 == t can happen for boolean nodes which are always unsigned. */
3728 if (t1 != t)
3729 return get_alias_set (t1);
3730 }
3731
3732 return -1;
3733 }
3734
3735
3736 /* Data structure used to count the number of dereferences to PTR
3737 inside an expression. */
3738 struct count_ptr_d
3739 {
3740 tree ptr;
3741 unsigned num_stores;
3742 unsigned num_loads;
3743 };
3744
3745 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
3746 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
3747
3748 static tree
3749 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
3750 {
3751 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
3752 struct count_ptr_d *count_p = (struct count_ptr_d *) wi_p->info;
3753
3754 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
3755 pointer 'ptr' is *not* dereferenced, it is simply used to compute
3756 the address of 'fld' as 'ptr + offsetof(fld)'. */
3757 if (TREE_CODE (*tp) == ADDR_EXPR)
3758 {
3759 *walk_subtrees = 0;
3760 return NULL_TREE;
3761 }
3762
3763 if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
3764 {
3765 if (wi_p->is_lhs)
3766 count_p->num_stores++;
3767 else
3768 count_p->num_loads++;
3769 }
3770
3771 return NULL_TREE;
3772 }
3773
3774 /* Count the number of direct and indirect uses for pointer PTR in
3775 statement STMT. The number of direct uses is stored in
3776 *NUM_USES_P. Indirect references are counted separately depending
3777 on whether they are store or load operations. The counts are
3778 stored in *NUM_STORES_P and *NUM_LOADS_P. */
3779
3780 void
3781 count_uses_and_derefs (tree ptr, gimple stmt, unsigned *num_uses_p,
3782 unsigned *num_loads_p, unsigned *num_stores_p)
3783 {
3784 ssa_op_iter i;
3785 tree use;
3786
3787 *num_uses_p = 0;
3788 *num_loads_p = 0;
3789 *num_stores_p = 0;
3790
3791 /* Find out the total number of uses of PTR in STMT. */
3792 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
3793 if (use == ptr)
3794 (*num_uses_p)++;
3795
3796 /* Now count the number of indirect references to PTR. This is
3797 truly awful, but we don't have much choice. There are no parent
3798 pointers inside INDIRECT_REFs, so an expression like
3799 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
3800 find all the indirect and direct uses of x_1 inside. The only
3801 shortcut we can take is the fact that GIMPLE only allows
3802 INDIRECT_REFs inside the expressions below. */
3803 if (is_gimple_assign (stmt)
3804 || gimple_code (stmt) == GIMPLE_RETURN
3805 || gimple_code (stmt) == GIMPLE_ASM
3806 || is_gimple_call (stmt))
3807 {
3808 struct walk_stmt_info wi;
3809 struct count_ptr_d count;
3810
3811 count.ptr = ptr;
3812 count.num_stores = 0;
3813 count.num_loads = 0;
3814
3815 memset (&wi, 0, sizeof (wi));
3816 wi.info = &count;
3817 walk_gimple_op (stmt, count_ptr_derefs, &wi);
3818
3819 *num_stores_p = count.num_stores;
3820 *num_loads_p = count.num_loads;
3821 }
3822
3823 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
3824 }
3825
3826 /* From a tree operand OP return the base of a load or store operation
3827 or NULL_TREE if OP is not a load or a store. */
3828
3829 static tree
3830 get_base_loadstore (tree op)
3831 {
3832 while (handled_component_p (op))
3833 op = TREE_OPERAND (op, 0);
3834 if (DECL_P (op)
3835 || INDIRECT_REF_P (op)
3836 || TREE_CODE (op) == MEM_REF
3837 || TREE_CODE (op) == TARGET_MEM_REF)
3838 return op;
3839 return NULL_TREE;
3840 }
3841
3842 /* For the statement STMT call the callbacks VISIT_LOAD, VISIT_STORE and
3843 VISIT_ADDR if non-NULL on loads, store and address-taken operands
3844 passing the STMT, the base of the operand and DATA to it. The base
3845 will be either a decl, an indirect reference (including TARGET_MEM_REF)
3846 or the argument of an address expression.
3847 Returns the results of these callbacks or'ed. */
3848
3849 bool
3850 walk_stmt_load_store_addr_ops (gimple stmt, void *data,
3851 bool (*visit_load)(gimple, tree, void *),
3852 bool (*visit_store)(gimple, tree, void *),
3853 bool (*visit_addr)(gimple, tree, void *))
3854 {
3855 bool ret = false;
3856 unsigned i;
3857 if (gimple_assign_single_p (stmt))
3858 {
3859 tree lhs, rhs;
3860 if (visit_store)
3861 {
3862 lhs = get_base_loadstore (gimple_assign_lhs (stmt));
3863 if (lhs)
3864 ret |= visit_store (stmt, lhs, data);
3865 }
3866 rhs = gimple_assign_rhs1 (stmt);
3867 while (handled_component_p (rhs))
3868 rhs = TREE_OPERAND (rhs, 0);
3869 if (visit_addr)
3870 {
3871 if (TREE_CODE (rhs) == ADDR_EXPR)
3872 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3873 else if (TREE_CODE (rhs) == TARGET_MEM_REF
3874 && TREE_CODE (TMR_BASE (rhs)) == ADDR_EXPR)
3875 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (rhs), 0), data);
3876 else if (TREE_CODE (rhs) == OBJ_TYPE_REF
3877 && TREE_CODE (OBJ_TYPE_REF_OBJECT (rhs)) == ADDR_EXPR)
3878 ret |= visit_addr (stmt, TREE_OPERAND (OBJ_TYPE_REF_OBJECT (rhs),
3879 0), data);
3880 else if (TREE_CODE (rhs) == CONSTRUCTOR)
3881 {
3882 unsigned int ix;
3883 tree val;
3884
3885 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), ix, val)
3886 if (TREE_CODE (val) == ADDR_EXPR)
3887 ret |= visit_addr (stmt, TREE_OPERAND (val, 0), data);
3888 else if (TREE_CODE (val) == OBJ_TYPE_REF
3889 && TREE_CODE (OBJ_TYPE_REF_OBJECT (val)) == ADDR_EXPR)
3890 ret |= visit_addr (stmt,
3891 TREE_OPERAND (OBJ_TYPE_REF_OBJECT (val),
3892 0), data);
3893 }
3894 lhs = gimple_assign_lhs (stmt);
3895 if (TREE_CODE (lhs) == TARGET_MEM_REF
3896 && TREE_CODE (TMR_BASE (lhs)) == ADDR_EXPR)
3897 ret |= visit_addr (stmt, TREE_OPERAND (TMR_BASE (lhs), 0), data);
3898 }
3899 if (visit_load)
3900 {
3901 rhs = get_base_loadstore (rhs);
3902 if (rhs)
3903 ret |= visit_load (stmt, rhs, data);
3904 }
3905 }
3906 else if (visit_addr
3907 && (is_gimple_assign (stmt)
3908 || gimple_code (stmt) == GIMPLE_COND))
3909 {
3910 for (i = 0; i < gimple_num_ops (stmt); ++i)
3911 {
3912 tree op = gimple_op (stmt, i);
3913 if (op == NULL_TREE)
3914 ;
3915 else if (TREE_CODE (op) == ADDR_EXPR)
3916 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
3917 /* COND_EXPR and VCOND_EXPR rhs1 argument is a comparison
3918 tree with two operands. */
3919 else if (i == 1 && COMPARISON_CLASS_P (op))
3920 {
3921 if (TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
3922 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 0),
3923 0), data);
3924 if (TREE_CODE (TREE_OPERAND (op, 1)) == ADDR_EXPR)
3925 ret |= visit_addr (stmt, TREE_OPERAND (TREE_OPERAND (op, 1),
3926 0), data);
3927 }
3928 }
3929 }
3930 else if (is_gimple_call (stmt))
3931 {
3932 if (visit_store)
3933 {
3934 tree lhs = gimple_call_lhs (stmt);
3935 if (lhs)
3936 {
3937 lhs = get_base_loadstore (lhs);
3938 if (lhs)
3939 ret |= visit_store (stmt, lhs, data);
3940 }
3941 }
3942 if (visit_load || visit_addr)
3943 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3944 {
3945 tree rhs = gimple_call_arg (stmt, i);
3946 if (visit_addr
3947 && TREE_CODE (rhs) == ADDR_EXPR)
3948 ret |= visit_addr (stmt, TREE_OPERAND (rhs, 0), data);
3949 else if (visit_load)
3950 {
3951 rhs = get_base_loadstore (rhs);
3952 if (rhs)
3953 ret |= visit_load (stmt, rhs, data);
3954 }
3955 }
3956 if (visit_addr
3957 && gimple_call_chain (stmt)
3958 && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
3959 ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
3960 data);
3961 if (visit_addr
3962 && gimple_call_return_slot_opt_p (stmt)
3963 && gimple_call_lhs (stmt) != NULL_TREE
3964 && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
3965 ret |= visit_addr (stmt, gimple_call_lhs (stmt), data);
3966 }
3967 else if (gimple_code (stmt) == GIMPLE_ASM)
3968 {
3969 unsigned noutputs;
3970 const char *constraint;
3971 const char **oconstraints;
3972 bool allows_mem, allows_reg, is_inout;
3973 noutputs = gimple_asm_noutputs (stmt);
3974 oconstraints = XALLOCAVEC (const char *, noutputs);
3975 if (visit_store || visit_addr)
3976 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
3977 {
3978 tree link = gimple_asm_output_op (stmt, i);
3979 tree op = get_base_loadstore (TREE_VALUE (link));
3980 if (op && visit_store)
3981 ret |= visit_store (stmt, op, data);
3982 if (visit_addr)
3983 {
3984 constraint = TREE_STRING_POINTER
3985 (TREE_VALUE (TREE_PURPOSE (link)));
3986 oconstraints[i] = constraint;
3987 parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
3988 &allows_reg, &is_inout);
3989 if (op && !allows_reg && allows_mem)
3990 ret |= visit_addr (stmt, op, data);
3991 }
3992 }
3993 if (visit_load || visit_addr)
3994 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
3995 {
3996 tree link = gimple_asm_input_op (stmt, i);
3997 tree op = TREE_VALUE (link);
3998 if (visit_addr
3999 && TREE_CODE (op) == ADDR_EXPR)
4000 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4001 else if (visit_load || visit_addr)
4002 {
4003 op = get_base_loadstore (op);
4004 if (op)
4005 {
4006 if (visit_load)
4007 ret |= visit_load (stmt, op, data);
4008 if (visit_addr)
4009 {
4010 constraint = TREE_STRING_POINTER
4011 (TREE_VALUE (TREE_PURPOSE (link)));
4012 parse_input_constraint (&constraint, 0, 0, noutputs,
4013 0, oconstraints,
4014 &allows_mem, &allows_reg);
4015 if (!allows_reg && allows_mem)
4016 ret |= visit_addr (stmt, op, data);
4017 }
4018 }
4019 }
4020 }
4021 }
4022 else if (gimple_code (stmt) == GIMPLE_RETURN)
4023 {
4024 tree op = gimple_return_retval (stmt);
4025 if (op)
4026 {
4027 if (visit_addr
4028 && TREE_CODE (op) == ADDR_EXPR)
4029 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4030 else if (visit_load)
4031 {
4032 op = get_base_loadstore (op);
4033 if (op)
4034 ret |= visit_load (stmt, op, data);
4035 }
4036 }
4037 }
4038 else if (visit_addr
4039 && gimple_code (stmt) == GIMPLE_PHI)
4040 {
4041 for (i = 0; i < gimple_phi_num_args (stmt); ++i)
4042 {
4043 tree op = PHI_ARG_DEF (stmt, i);
4044 if (TREE_CODE (op) == ADDR_EXPR)
4045 ret |= visit_addr (stmt, TREE_OPERAND (op, 0), data);
4046 }
4047 }
4048
4049 return ret;
4050 }
4051
4052 /* Like walk_stmt_load_store_addr_ops but with NULL visit_addr. IPA-CP
4053 should make a faster clone for this case. */
4054
4055 bool
4056 walk_stmt_load_store_ops (gimple stmt, void *data,
4057 bool (*visit_load)(gimple, tree, void *),
4058 bool (*visit_store)(gimple, tree, void *))
4059 {
4060 return walk_stmt_load_store_addr_ops (stmt, data,
4061 visit_load, visit_store, NULL);
4062 }
4063
4064 /* Helper for gimple_ior_addresses_taken_1. */
4065
4066 static bool
4067 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED,
4068 tree addr, void *data)
4069 {
4070 bitmap addresses_taken = (bitmap)data;
4071 addr = get_base_address (addr);
4072 if (addr
4073 && DECL_P (addr))
4074 {
4075 bitmap_set_bit (addresses_taken, DECL_UID (addr));
4076 return true;
4077 }
4078 return false;
4079 }
4080
4081 /* Set the bit for the uid of all decls that have their address taken
4082 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
4083 were any in this stmt. */
4084
4085 bool
4086 gimple_ior_addresses_taken (bitmap addresses_taken, gimple stmt)
4087 {
4088 return walk_stmt_load_store_addr_ops (stmt, addresses_taken, NULL, NULL,
4089 gimple_ior_addresses_taken_1);
4090 }
4091
4092
4093 /* Return a printable name for symbol DECL. */
4094
4095 const char *
4096 gimple_decl_printable_name (tree decl, int verbosity)
4097 {
4098 if (!DECL_NAME (decl))
4099 return NULL;
4100
4101 if (DECL_ASSEMBLER_NAME_SET_P (decl))
4102 {
4103 const char *str, *mangled_str;
4104 int dmgl_opts = DMGL_NO_OPTS;
4105
4106 if (verbosity >= 2)
4107 {
4108 dmgl_opts = DMGL_VERBOSE
4109 | DMGL_ANSI
4110 | DMGL_GNU_V3
4111 | DMGL_RET_POSTFIX;
4112 if (TREE_CODE (decl) == FUNCTION_DECL)
4113 dmgl_opts |= DMGL_PARAMS;
4114 }
4115
4116 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
4117 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
4118 return (str) ? str : mangled_str;
4119 }
4120
4121 return IDENTIFIER_POINTER (DECL_NAME (decl));
4122 }
4123
4124 /* Return TRUE iff stmt is a call to a built-in function. */
4125
4126 bool
4127 is_gimple_builtin_call (gimple stmt)
4128 {
4129 tree callee;
4130
4131 if (is_gimple_call (stmt)
4132 && (callee = gimple_call_fndecl (stmt))
4133 && is_builtin_fn (callee)
4134 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
4135 return true;
4136
4137 return false;
4138 }
4139
4140 /* Return true when STMTs arguments match those of FNDECL. */
4141
4142 static bool
4143 validate_call (gimple stmt, tree fndecl)
4144 {
4145 tree targs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
4146 unsigned nargs = gimple_call_num_args (stmt);
4147 for (unsigned i = 0; i < nargs; ++i)
4148 {
4149 /* Variadic args follow. */
4150 if (!targs)
4151 return true;
4152 tree arg = gimple_call_arg (stmt, i);
4153 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
4154 && INTEGRAL_TYPE_P (TREE_VALUE (targs)))
4155 ;
4156 else if (POINTER_TYPE_P (TREE_TYPE (arg))
4157 && POINTER_TYPE_P (TREE_VALUE (targs)))
4158 ;
4159 else if (TREE_CODE (TREE_TYPE (arg))
4160 != TREE_CODE (TREE_VALUE (targs)))
4161 return false;
4162 targs = TREE_CHAIN (targs);
4163 }
4164 if (targs && !VOID_TYPE_P (TREE_VALUE (targs)))
4165 return false;
4166 return true;
4167 }
4168
4169 /* Return true when STMT is builtins call to CLASS. */
4170
4171 bool
4172 gimple_call_builtin_p (gimple stmt, enum built_in_class klass)
4173 {
4174 tree fndecl;
4175 if (is_gimple_call (stmt)
4176 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4177 && DECL_BUILT_IN_CLASS (fndecl) == klass)
4178 return validate_call (stmt, fndecl);
4179 return false;
4180 }
4181
4182 /* Return true when STMT is builtins call to CODE of CLASS. */
4183
4184 bool
4185 gimple_call_builtin_p (gimple stmt, enum built_in_function code)
4186 {
4187 tree fndecl;
4188 if (is_gimple_call (stmt)
4189 && (fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
4190 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4191 && DECL_FUNCTION_CODE (fndecl) == code)
4192 return validate_call (stmt, fndecl);
4193 return false;
4194 }
4195
4196 /* Return true if STMT clobbers memory. STMT is required to be a
4197 GIMPLE_ASM. */
4198
4199 bool
4200 gimple_asm_clobbers_memory_p (const_gimple stmt)
4201 {
4202 unsigned i;
4203
4204 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
4205 {
4206 tree op = gimple_asm_clobber_op (stmt, i);
4207 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
4208 return true;
4209 }
4210
4211 return false;
4212 }
4213 #include "gt-gimple.h"