1 /* Gimple IR support functions.
3 Copyright (C) 2007-2013 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
30 #include "stor-layout.h"
32 #include "hard-reg-set.h"
33 #include "basic-block.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
39 #include "diagnostic.h"
40 #include "value-prof.h"
44 #include "langhooks.h"
48 /* All the tuples have their operand vector (if present) at the very bottom
49 of the structure. Therefore, the offset required to find the
50 operands vector the size of the structure minus the size of the 1
51 element tree array at the end (see gimple_ops). */
52 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) \
53 (HAS_TREE_OP ? sizeof (struct STRUCT) - sizeof (tree) : 0),
54 EXPORTED_CONST
size_t gimple_ops_offset_
[] = {
55 #include "gsstruct.def"
59 #define DEFGSSTRUCT(SYM, STRUCT, HAS_TREE_OP) sizeof (struct STRUCT),
60 static const size_t gsstruct_code_size
[] = {
61 #include "gsstruct.def"
65 #define DEFGSCODE(SYM, NAME, GSSCODE) NAME,
66 const char *const gimple_code_name
[] = {
71 #define DEFGSCODE(SYM, NAME, GSSCODE) GSSCODE,
72 EXPORTED_CONST
enum gimple_statement_structure_enum gss_for_code_
[] = {
79 int gimple_alloc_counts
[(int) gimple_alloc_kind_all
];
80 int gimple_alloc_sizes
[(int) gimple_alloc_kind_all
];
82 /* Keep in sync with gimple.h:enum gimple_alloc_kind. */
83 static const char * const gimple_alloc_kind_names
[] = {
90 /* Gimple tuple constructors.
91 Note: Any constructor taking a ``gimple_seq'' as a parameter, can
92 be passed a NULL to start with an empty sequence. */
94 /* Set the code for statement G to CODE. */
97 gimple_set_code (gimple g
, enum gimple_code code
)
99 g
->gsbase
.code
= code
;
102 /* Return the number of bytes needed to hold a GIMPLE statement with
106 gimple_size (enum gimple_code code
)
108 return gsstruct_code_size
[gss_for_code (code
)];
111 /* Allocate memory for a GIMPLE statement with code CODE and NUM_OPS
115 gimple_alloc_stat (enum gimple_code code
, unsigned num_ops MEM_STAT_DECL
)
120 size
= gimple_size (code
);
122 size
+= sizeof (tree
) * (num_ops
- 1);
124 if (GATHER_STATISTICS
)
126 enum gimple_alloc_kind kind
= gimple_alloc_kind (code
);
127 gimple_alloc_counts
[(int) kind
]++;
128 gimple_alloc_sizes
[(int) kind
] += size
;
131 stmt
= ggc_alloc_cleared_gimple_statement_d_stat (size PASS_MEM_STAT
);
132 gimple_set_code (stmt
, code
);
133 gimple_set_num_ops (stmt
, num_ops
);
135 /* Do not call gimple_set_modified here as it has other side
136 effects and this tuple is still not completely built. */
137 stmt
->gsbase
.modified
= 1;
138 gimple_init_singleton (stmt
);
143 /* Set SUBCODE to be the code of the expression computed by statement G. */
146 gimple_set_subcode (gimple g
, unsigned subcode
)
148 /* We only have 16 bits for the RHS code. Assert that we are not
150 gcc_assert (subcode
< (1 << 16));
151 g
->gsbase
.subcode
= subcode
;
156 /* Build a tuple with operands. CODE is the statement to build (which
157 must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the subcode
158 for the new tuple. NUM_OPS is the number of operands to allocate. */
160 #define gimple_build_with_ops(c, s, n) \
161 gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
164 gimple_build_with_ops_stat (enum gimple_code code
, unsigned subcode
,
165 unsigned num_ops MEM_STAT_DECL
)
167 gimple s
= gimple_alloc_stat (code
, num_ops PASS_MEM_STAT
);
168 gimple_set_subcode (s
, subcode
);
174 /* Build a GIMPLE_RETURN statement returning RETVAL. */
177 gimple_build_return (tree retval
)
179 gimple s
= gimple_build_with_ops (GIMPLE_RETURN
, ERROR_MARK
, 2);
181 gimple_return_set_retval (s
, retval
);
185 /* Reset alias information on call S. */
188 gimple_call_reset_alias_info (gimple s
)
190 if (gimple_call_flags (s
) & ECF_CONST
)
191 memset (gimple_call_use_set (s
), 0, sizeof (struct pt_solution
));
193 pt_solution_reset (gimple_call_use_set (s
));
194 if (gimple_call_flags (s
) & (ECF_CONST
|ECF_PURE
|ECF_NOVOPS
))
195 memset (gimple_call_clobber_set (s
), 0, sizeof (struct pt_solution
));
197 pt_solution_reset (gimple_call_clobber_set (s
));
200 /* Helper for gimple_build_call, gimple_build_call_valist,
201 gimple_build_call_vec and gimple_build_call_from_tree. Build the basic
202 components of a GIMPLE_CALL statement to function FN with NARGS
206 gimple_build_call_1 (tree fn
, unsigned nargs
)
208 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
209 if (TREE_CODE (fn
) == FUNCTION_DECL
)
210 fn
= build_fold_addr_expr (fn
);
211 gimple_set_op (s
, 1, fn
);
212 gimple_call_set_fntype (s
, TREE_TYPE (TREE_TYPE (fn
)));
213 gimple_call_reset_alias_info (s
);
218 /* Build a GIMPLE_CALL statement to function FN with the arguments
219 specified in vector ARGS. */
222 gimple_build_call_vec (tree fn
, vec
<tree
> args
)
225 unsigned nargs
= args
.length ();
226 gimple call
= gimple_build_call_1 (fn
, nargs
);
228 for (i
= 0; i
< nargs
; i
++)
229 gimple_call_set_arg (call
, i
, args
[i
]);
235 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
236 arguments. The ... are the arguments. */
239 gimple_build_call (tree fn
, unsigned nargs
, ...)
245 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
247 call
= gimple_build_call_1 (fn
, nargs
);
249 va_start (ap
, nargs
);
250 for (i
= 0; i
< nargs
; i
++)
251 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
258 /* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
259 arguments. AP contains the arguments. */
262 gimple_build_call_valist (tree fn
, unsigned nargs
, va_list ap
)
267 gcc_assert (TREE_CODE (fn
) == FUNCTION_DECL
|| is_gimple_call_addr (fn
));
269 call
= gimple_build_call_1 (fn
, nargs
);
271 for (i
= 0; i
< nargs
; i
++)
272 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
278 /* Helper for gimple_build_call_internal and gimple_build_call_internal_vec.
279 Build the basic components of a GIMPLE_CALL statement to internal
280 function FN with NARGS arguments. */
283 gimple_build_call_internal_1 (enum internal_fn fn
, unsigned nargs
)
285 gimple s
= gimple_build_with_ops (GIMPLE_CALL
, ERROR_MARK
, nargs
+ 3);
286 s
->gsbase
.subcode
|= GF_CALL_INTERNAL
;
287 gimple_call_set_internal_fn (s
, fn
);
288 gimple_call_reset_alias_info (s
);
293 /* Build a GIMPLE_CALL statement to internal function FN. NARGS is
294 the number of arguments. The ... are the arguments. */
297 gimple_build_call_internal (enum internal_fn fn
, unsigned nargs
, ...)
303 call
= gimple_build_call_internal_1 (fn
, nargs
);
304 va_start (ap
, nargs
);
305 for (i
= 0; i
< nargs
; i
++)
306 gimple_call_set_arg (call
, i
, va_arg (ap
, tree
));
313 /* Build a GIMPLE_CALL statement to internal function FN with the arguments
314 specified in vector ARGS. */
317 gimple_build_call_internal_vec (enum internal_fn fn
, vec
<tree
> args
)
322 nargs
= args
.length ();
323 call
= gimple_build_call_internal_1 (fn
, nargs
);
324 for (i
= 0; i
< nargs
; i
++)
325 gimple_call_set_arg (call
, i
, args
[i
]);
331 /* Build a GIMPLE_CALL statement from CALL_EXPR T. Note that T is
332 assumed to be in GIMPLE form already. Minimal checking is done of
336 gimple_build_call_from_tree (tree t
)
340 tree fndecl
= get_callee_fndecl (t
);
342 gcc_assert (TREE_CODE (t
) == CALL_EXPR
);
344 nargs
= call_expr_nargs (t
);
345 call
= gimple_build_call_1 (fndecl
? fndecl
: CALL_EXPR_FN (t
), nargs
);
347 for (i
= 0; i
< nargs
; i
++)
348 gimple_call_set_arg (call
, i
, CALL_EXPR_ARG (t
, i
));
350 gimple_set_block (call
, TREE_BLOCK (t
));
352 /* Carry all the CALL_EXPR flags to the new GIMPLE_CALL. */
353 gimple_call_set_chain (call
, CALL_EXPR_STATIC_CHAIN (t
));
354 gimple_call_set_tail (call
, CALL_EXPR_TAILCALL (t
));
355 gimple_call_set_return_slot_opt (call
, CALL_EXPR_RETURN_SLOT_OPT (t
));
357 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
358 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA
359 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
360 gimple_call_set_alloca_for_var (call
, CALL_ALLOCA_FOR_VAR_P (t
));
362 gimple_call_set_from_thunk (call
, CALL_FROM_THUNK_P (t
));
363 gimple_call_set_va_arg_pack (call
, CALL_EXPR_VA_ARG_PACK (t
));
364 gimple_call_set_nothrow (call
, TREE_NOTHROW (t
));
365 gimple_set_no_warning (call
, TREE_NO_WARNING (t
));
371 /* Build a GIMPLE_ASSIGN statement.
373 LHS of the assignment.
374 RHS of the assignment which can be unary or binary. */
377 gimple_build_assign_stat (tree lhs
, tree rhs MEM_STAT_DECL
)
379 enum tree_code subcode
;
382 extract_ops_from_tree_1 (rhs
, &subcode
, &op1
, &op2
, &op3
);
383 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, op3
388 /* Build a GIMPLE_ASSIGN statement with subcode SUBCODE and operands
389 OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
390 GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
393 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
394 tree op2
, tree op3 MEM_STAT_DECL
)
399 /* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
401 num_ops
= get_gimple_rhs_num_ops (subcode
) + 1;
403 p
= gimple_build_with_ops_stat (GIMPLE_ASSIGN
, (unsigned)subcode
, num_ops
405 gimple_assign_set_lhs (p
, lhs
);
406 gimple_assign_set_rhs1 (p
, op1
);
409 gcc_assert (num_ops
> 2);
410 gimple_assign_set_rhs2 (p
, op2
);
415 gcc_assert (num_ops
> 3);
416 gimple_assign_set_rhs3 (p
, op3
);
423 gimple_build_assign_with_ops (enum tree_code subcode
, tree lhs
, tree op1
,
424 tree op2 MEM_STAT_DECL
)
426 return gimple_build_assign_with_ops (subcode
, lhs
, op1
, op2
, NULL_TREE
431 /* Build a GIMPLE_COND statement.
433 PRED is the condition used to compare LHS and the RHS.
434 T_LABEL is the label to jump to if the condition is true.
435 F_LABEL is the label to jump to otherwise. */
438 gimple_build_cond (enum tree_code pred_code
, tree lhs
, tree rhs
,
439 tree t_label
, tree f_label
)
443 gcc_assert (TREE_CODE_CLASS (pred_code
) == tcc_comparison
);
444 p
= gimple_build_with_ops (GIMPLE_COND
, pred_code
, 4);
445 gimple_cond_set_lhs (p
, lhs
);
446 gimple_cond_set_rhs (p
, rhs
);
447 gimple_cond_set_true_label (p
, t_label
);
448 gimple_cond_set_false_label (p
, f_label
);
452 /* Build a GIMPLE_COND statement from the conditional expression tree
453 COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
456 gimple_build_cond_from_tree (tree cond
, tree t_label
, tree f_label
)
461 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
462 return gimple_build_cond (code
, lhs
, rhs
, t_label
, f_label
);
465 /* Set code, lhs, and rhs of a GIMPLE_COND from a suitable
466 boolean expression tree COND. */
469 gimple_cond_set_condition_from_tree (gimple stmt
, tree cond
)
474 gimple_cond_get_ops_from_tree (cond
, &code
, &lhs
, &rhs
);
475 gimple_cond_set_condition (stmt
, code
, lhs
, rhs
);
478 /* Build a GIMPLE_LABEL statement for LABEL. */
481 gimple_build_label (tree label
)
483 gimple p
= gimple_build_with_ops (GIMPLE_LABEL
, ERROR_MARK
, 1);
484 gimple_label_set_label (p
, label
);
488 /* Build a GIMPLE_GOTO statement to label DEST. */
491 gimple_build_goto (tree dest
)
493 gimple p
= gimple_build_with_ops (GIMPLE_GOTO
, ERROR_MARK
, 1);
494 gimple_goto_set_dest (p
, dest
);
499 /* Build a GIMPLE_NOP statement. */
502 gimple_build_nop (void)
504 return gimple_alloc (GIMPLE_NOP
, 0);
508 /* Build a GIMPLE_BIND statement.
509 VARS are the variables in BODY.
510 BLOCK is the containing block. */
513 gimple_build_bind (tree vars
, gimple_seq body
, tree block
)
515 gimple p
= gimple_alloc (GIMPLE_BIND
, 0);
516 gimple_bind_set_vars (p
, vars
);
518 gimple_bind_set_body (p
, body
);
520 gimple_bind_set_block (p
, block
);
524 /* Helper function to set the simple fields of a asm stmt.
526 STRING is a pointer to a string that is the asm blocks assembly code.
527 NINPUT is the number of register inputs.
528 NOUTPUT is the number of register outputs.
529 NCLOBBERS is the number of clobbered registers.
533 gimple_build_asm_1 (const char *string
, unsigned ninputs
, unsigned noutputs
,
534 unsigned nclobbers
, unsigned nlabels
)
537 int size
= strlen (string
);
539 /* ASMs with labels cannot have outputs. This should have been
540 enforced by the front end. */
541 gcc_assert (nlabels
== 0 || noutputs
== 0);
543 p
= gimple_build_with_ops (GIMPLE_ASM
, ERROR_MARK
,
544 ninputs
+ noutputs
+ nclobbers
+ nlabels
);
546 p
->gimple_asm
.ni
= ninputs
;
547 p
->gimple_asm
.no
= noutputs
;
548 p
->gimple_asm
.nc
= nclobbers
;
549 p
->gimple_asm
.nl
= nlabels
;
550 p
->gimple_asm
.string
= ggc_alloc_string (string
, size
);
552 if (GATHER_STATISTICS
)
553 gimple_alloc_sizes
[(int) gimple_alloc_kind (GIMPLE_ASM
)] += size
;
558 /* Build a GIMPLE_ASM statement.
560 STRING is the assembly code.
561 NINPUT is the number of register inputs.
562 NOUTPUT is the number of register outputs.
563 NCLOBBERS is the number of clobbered registers.
564 INPUTS is a vector of the input register parameters.
565 OUTPUTS is a vector of the output register parameters.
566 CLOBBERS is a vector of the clobbered register parameters.
567 LABELS is a vector of destination labels. */
570 gimple_build_asm_vec (const char *string
, vec
<tree
, va_gc
> *inputs
,
571 vec
<tree
, va_gc
> *outputs
, vec
<tree
, va_gc
> *clobbers
,
572 vec
<tree
, va_gc
> *labels
)
577 p
= gimple_build_asm_1 (string
,
578 vec_safe_length (inputs
),
579 vec_safe_length (outputs
),
580 vec_safe_length (clobbers
),
581 vec_safe_length (labels
));
583 for (i
= 0; i
< vec_safe_length (inputs
); i
++)
584 gimple_asm_set_input_op (p
, i
, (*inputs
)[i
]);
586 for (i
= 0; i
< vec_safe_length (outputs
); i
++)
587 gimple_asm_set_output_op (p
, i
, (*outputs
)[i
]);
589 for (i
= 0; i
< vec_safe_length (clobbers
); i
++)
590 gimple_asm_set_clobber_op (p
, i
, (*clobbers
)[i
]);
592 for (i
= 0; i
< vec_safe_length (labels
); i
++)
593 gimple_asm_set_label_op (p
, i
, (*labels
)[i
]);
598 /* Build a GIMPLE_CATCH statement.
600 TYPES are the catch types.
601 HANDLER is the exception handler. */
604 gimple_build_catch (tree types
, gimple_seq handler
)
606 gimple p
= gimple_alloc (GIMPLE_CATCH
, 0);
607 gimple_catch_set_types (p
, types
);
609 gimple_catch_set_handler (p
, handler
);
614 /* Build a GIMPLE_EH_FILTER statement.
616 TYPES are the filter's types.
617 FAILURE is the filter's failure action. */
620 gimple_build_eh_filter (tree types
, gimple_seq failure
)
622 gimple p
= gimple_alloc (GIMPLE_EH_FILTER
, 0);
623 gimple_eh_filter_set_types (p
, types
);
625 gimple_eh_filter_set_failure (p
, failure
);
630 /* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
633 gimple_build_eh_must_not_throw (tree decl
)
635 gimple p
= gimple_alloc (GIMPLE_EH_MUST_NOT_THROW
, 0);
637 gcc_assert (TREE_CODE (decl
) == FUNCTION_DECL
);
638 gcc_assert (flags_from_decl_or_type (decl
) & ECF_NORETURN
);
639 gimple_eh_must_not_throw_set_fndecl (p
, decl
);
644 /* Build a GIMPLE_EH_ELSE statement. */
647 gimple_build_eh_else (gimple_seq n_body
, gimple_seq e_body
)
649 gimple p
= gimple_alloc (GIMPLE_EH_ELSE
, 0);
650 gimple_eh_else_set_n_body (p
, n_body
);
651 gimple_eh_else_set_e_body (p
, e_body
);
655 /* Build a GIMPLE_TRY statement.
657 EVAL is the expression to evaluate.
658 CLEANUP is the cleanup expression.
659 KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
660 whether this is a try/catch or a try/finally respectively. */
663 gimple_build_try (gimple_seq eval
, gimple_seq cleanup
,
664 enum gimple_try_flags kind
)
668 gcc_assert (kind
== GIMPLE_TRY_CATCH
|| kind
== GIMPLE_TRY_FINALLY
);
669 p
= gimple_alloc (GIMPLE_TRY
, 0);
670 gimple_set_subcode (p
, kind
);
672 gimple_try_set_eval (p
, eval
);
674 gimple_try_set_cleanup (p
, cleanup
);
679 /* Construct a GIMPLE_WITH_CLEANUP_EXPR statement.
681 CLEANUP is the cleanup expression. */
684 gimple_build_wce (gimple_seq cleanup
)
686 gimple p
= gimple_alloc (GIMPLE_WITH_CLEANUP_EXPR
, 0);
688 gimple_wce_set_cleanup (p
, cleanup
);
694 /* Build a GIMPLE_RESX statement. */
697 gimple_build_resx (int region
)
699 gimple p
= gimple_build_with_ops (GIMPLE_RESX
, ERROR_MARK
, 0);
700 p
->gimple_eh_ctrl
.region
= region
;
705 /* The helper for constructing a gimple switch statement.
706 INDEX is the switch's index.
707 NLABELS is the number of labels in the switch excluding the default.
708 DEFAULT_LABEL is the default label for the switch statement. */
711 gimple_build_switch_nlabels (unsigned nlabels
, tree index
, tree default_label
)
713 /* nlabels + 1 default label + 1 index. */
714 gcc_checking_assert (default_label
);
715 gimple p
= gimple_build_with_ops (GIMPLE_SWITCH
, ERROR_MARK
,
717 gimple_switch_set_index (p
, index
);
718 gimple_switch_set_default_label (p
, default_label
);
722 /* Build a GIMPLE_SWITCH statement.
724 INDEX is the switch's index.
725 DEFAULT_LABEL is the default label
726 ARGS is a vector of labels excluding the default. */
729 gimple_build_switch (tree index
, tree default_label
, vec
<tree
> args
)
731 unsigned i
, nlabels
= args
.length ();
733 gimple p
= gimple_build_switch_nlabels (nlabels
, index
, default_label
);
735 /* Copy the labels from the vector to the switch statement. */
736 for (i
= 0; i
< nlabels
; i
++)
737 gimple_switch_set_label (p
, i
+ 1, args
[i
]);
742 /* Build a GIMPLE_EH_DISPATCH statement. */
745 gimple_build_eh_dispatch (int region
)
747 gimple p
= gimple_build_with_ops (GIMPLE_EH_DISPATCH
, ERROR_MARK
, 0);
748 p
->gimple_eh_ctrl
.region
= region
;
752 /* Build a new GIMPLE_DEBUG_BIND statement.
754 VAR is bound to VALUE; block and location are taken from STMT. */
757 gimple_build_debug_bind_stat (tree var
, tree value
, gimple stmt MEM_STAT_DECL
)
759 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
760 (unsigned)GIMPLE_DEBUG_BIND
, 2
763 gimple_debug_bind_set_var (p
, var
);
764 gimple_debug_bind_set_value (p
, value
);
766 gimple_set_location (p
, gimple_location (stmt
));
772 /* Build a new GIMPLE_DEBUG_SOURCE_BIND statement.
774 VAR is bound to VALUE; block and location are taken from STMT. */
777 gimple_build_debug_source_bind_stat (tree var
, tree value
,
778 gimple stmt MEM_STAT_DECL
)
780 gimple p
= gimple_build_with_ops_stat (GIMPLE_DEBUG
,
781 (unsigned)GIMPLE_DEBUG_SOURCE_BIND
, 2
784 gimple_debug_source_bind_set_var (p
, var
);
785 gimple_debug_source_bind_set_value (p
, value
);
787 gimple_set_location (p
, gimple_location (stmt
));
793 /* Build a GIMPLE_OMP_CRITICAL statement.
795 BODY is the sequence of statements for which only one thread can execute.
796 NAME is optional identifier for this critical block. */
799 gimple_build_omp_critical (gimple_seq body
, tree name
)
801 gimple p
= gimple_alloc (GIMPLE_OMP_CRITICAL
, 0);
802 gimple_omp_critical_set_name (p
, name
);
804 gimple_omp_set_body (p
, body
);
809 /* Build a GIMPLE_OMP_FOR statement.
811 BODY is sequence of statements inside the for loop.
812 KIND is the `for' variant.
813 CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
814 lastprivate, reductions, ordered, schedule, and nowait.
815 COLLAPSE is the collapse count.
816 PRE_BODY is the sequence of statements that are loop invariant. */
819 gimple_build_omp_for (gimple_seq body
, int kind
, tree clauses
, size_t collapse
,
822 gimple p
= gimple_alloc (GIMPLE_OMP_FOR
, 0);
824 gimple_omp_set_body (p
, body
);
825 gimple_omp_for_set_clauses (p
, clauses
);
826 gimple_omp_for_set_kind (p
, kind
);
827 p
->gimple_omp_for
.collapse
= collapse
;
828 p
->gimple_omp_for
.iter
829 = ggc_alloc_cleared_vec_gimple_omp_for_iter (collapse
);
831 gimple_omp_for_set_pre_body (p
, pre_body
);
837 /* Build a GIMPLE_OMP_PARALLEL statement.
839 BODY is sequence of statements which are executed in parallel.
840 CLAUSES, are the OMP parallel construct's clauses.
841 CHILD_FN is the function created for the parallel threads to execute.
842 DATA_ARG are the shared data argument(s). */
845 gimple_build_omp_parallel (gimple_seq body
, tree clauses
, tree child_fn
,
848 gimple p
= gimple_alloc (GIMPLE_OMP_PARALLEL
, 0);
850 gimple_omp_set_body (p
, body
);
851 gimple_omp_parallel_set_clauses (p
, clauses
);
852 gimple_omp_parallel_set_child_fn (p
, child_fn
);
853 gimple_omp_parallel_set_data_arg (p
, data_arg
);
859 /* Build a GIMPLE_OMP_TASK statement.
861 BODY is sequence of statements which are executed by the explicit task.
862 CLAUSES, are the OMP parallel construct's clauses.
863 CHILD_FN is the function created for the parallel threads to execute.
864 DATA_ARG are the shared data argument(s).
865 COPY_FN is the optional function for firstprivate initialization.
866 ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
869 gimple_build_omp_task (gimple_seq body
, tree clauses
, tree child_fn
,
870 tree data_arg
, tree copy_fn
, tree arg_size
,
873 gimple p
= gimple_alloc (GIMPLE_OMP_TASK
, 0);
875 gimple_omp_set_body (p
, body
);
876 gimple_omp_task_set_clauses (p
, clauses
);
877 gimple_omp_task_set_child_fn (p
, child_fn
);
878 gimple_omp_task_set_data_arg (p
, data_arg
);
879 gimple_omp_task_set_copy_fn (p
, copy_fn
);
880 gimple_omp_task_set_arg_size (p
, arg_size
);
881 gimple_omp_task_set_arg_align (p
, arg_align
);
887 /* Build a GIMPLE_OMP_SECTION statement for a sections statement.
889 BODY is the sequence of statements in the section. */
892 gimple_build_omp_section (gimple_seq body
)
894 gimple p
= gimple_alloc (GIMPLE_OMP_SECTION
, 0);
896 gimple_omp_set_body (p
, body
);
902 /* Build a GIMPLE_OMP_MASTER statement.
904 BODY is the sequence of statements to be executed by just the master. */
907 gimple_build_omp_master (gimple_seq body
)
909 gimple p
= gimple_alloc (GIMPLE_OMP_MASTER
, 0);
911 gimple_omp_set_body (p
, body
);
917 /* Build a GIMPLE_OMP_TASKGROUP statement.
919 BODY is the sequence of statements to be executed by the taskgroup
923 gimple_build_omp_taskgroup (gimple_seq body
)
925 gimple p
= gimple_alloc (GIMPLE_OMP_TASKGROUP
, 0);
927 gimple_omp_set_body (p
, body
);
933 /* Build a GIMPLE_OMP_CONTINUE statement.
935 CONTROL_DEF is the definition of the control variable.
936 CONTROL_USE is the use of the control variable. */
939 gimple_build_omp_continue (tree control_def
, tree control_use
)
941 gimple p
= gimple_alloc (GIMPLE_OMP_CONTINUE
, 0);
942 gimple_omp_continue_set_control_def (p
, control_def
);
943 gimple_omp_continue_set_control_use (p
, control_use
);
947 /* Build a GIMPLE_OMP_ORDERED statement.
949 BODY is the sequence of statements inside a loop that will executed in
953 gimple_build_omp_ordered (gimple_seq body
)
955 gimple p
= gimple_alloc (GIMPLE_OMP_ORDERED
, 0);
957 gimple_omp_set_body (p
, body
);
963 /* Build a GIMPLE_OMP_RETURN statement.
964 WAIT_P is true if this is a non-waiting return. */
967 gimple_build_omp_return (bool wait_p
)
969 gimple p
= gimple_alloc (GIMPLE_OMP_RETURN
, 0);
971 gimple_omp_return_set_nowait (p
);
977 /* Build a GIMPLE_OMP_SECTIONS statement.
979 BODY is a sequence of section statements.
980 CLAUSES are any of the OMP sections contsruct's clauses: private,
981 firstprivate, lastprivate, reduction, and nowait. */
984 gimple_build_omp_sections (gimple_seq body
, tree clauses
)
986 gimple p
= gimple_alloc (GIMPLE_OMP_SECTIONS
, 0);
988 gimple_omp_set_body (p
, body
);
989 gimple_omp_sections_set_clauses (p
, clauses
);
995 /* Build a GIMPLE_OMP_SECTIONS_SWITCH. */
998 gimple_build_omp_sections_switch (void)
1000 return gimple_alloc (GIMPLE_OMP_SECTIONS_SWITCH
, 0);
1004 /* Build a GIMPLE_OMP_SINGLE statement.
1006 BODY is the sequence of statements that will be executed once.
1007 CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
1008 copyprivate, nowait. */
1011 gimple_build_omp_single (gimple_seq body
, tree clauses
)
1013 gimple p
= gimple_alloc (GIMPLE_OMP_SINGLE
, 0);
1015 gimple_omp_set_body (p
, body
);
1016 gimple_omp_single_set_clauses (p
, clauses
);
1022 /* Build a GIMPLE_OMP_TARGET statement.
1024 BODY is the sequence of statements that will be executed.
1025 CLAUSES are any of the OMP target construct's clauses. */
1028 gimple_build_omp_target (gimple_seq body
, int kind
, tree clauses
)
1030 gimple p
= gimple_alloc (GIMPLE_OMP_TARGET
, 0);
1032 gimple_omp_set_body (p
, body
);
1033 gimple_omp_target_set_clauses (p
, clauses
);
1034 gimple_omp_target_set_kind (p
, kind
);
1040 /* Build a GIMPLE_OMP_TEAMS statement.
1042 BODY is the sequence of statements that will be executed.
1043 CLAUSES are any of the OMP teams construct's clauses. */
1046 gimple_build_omp_teams (gimple_seq body
, tree clauses
)
1048 gimple p
= gimple_alloc (GIMPLE_OMP_TEAMS
, 0);
1050 gimple_omp_set_body (p
, body
);
1051 gimple_omp_teams_set_clauses (p
, clauses
);
1057 /* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
1060 gimple_build_omp_atomic_load (tree lhs
, tree rhs
)
1062 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD
, 0);
1063 gimple_omp_atomic_load_set_lhs (p
, lhs
);
1064 gimple_omp_atomic_load_set_rhs (p
, rhs
);
1068 /* Build a GIMPLE_OMP_ATOMIC_STORE statement.
1070 VAL is the value we are storing. */
1073 gimple_build_omp_atomic_store (tree val
)
1075 gimple p
= gimple_alloc (GIMPLE_OMP_ATOMIC_STORE
, 0);
1076 gimple_omp_atomic_store_set_val (p
, val
);
1080 /* Build a GIMPLE_TRANSACTION statement. */
1083 gimple_build_transaction (gimple_seq body
, tree label
)
1085 gimple p
= gimple_alloc (GIMPLE_TRANSACTION
, 0);
1086 gimple_transaction_set_body (p
, body
);
1087 gimple_transaction_set_label (p
, label
);
1091 /* Build a GIMPLE_PREDICT statement. PREDICT is one of the predictors from
1092 predict.def, OUTCOME is NOT_TAKEN or TAKEN. */
1095 gimple_build_predict (enum br_predictor predictor
, enum prediction outcome
)
1097 gimple p
= gimple_alloc (GIMPLE_PREDICT
, 0);
1098 /* Ensure all the predictors fit into the lower bits of the subcode. */
1099 gcc_assert ((int) END_PREDICTORS
<= GF_PREDICT_TAKEN
);
1100 gimple_predict_set_predictor (p
, predictor
);
1101 gimple_predict_set_outcome (p
, outcome
);
1105 #if defined ENABLE_GIMPLE_CHECKING
1106 /* Complain of a gimple type mismatch and die. */
1109 gimple_check_failed (const_gimple gs
, const char *file
, int line
,
1110 const char *function
, enum gimple_code code
,
1111 enum tree_code subcode
)
1113 internal_error ("gimple check: expected %s(%s), have %s(%s) in %s, at %s:%d",
1114 gimple_code_name
[code
],
1115 get_tree_code_name (subcode
),
1116 gimple_code_name
[gimple_code (gs
)],
1117 gs
->gsbase
.subcode
> 0
1118 ? get_tree_code_name ((enum tree_code
) gs
->gsbase
.subcode
)
1120 function
, trim_filename (file
), line
);
1122 #endif /* ENABLE_GIMPLE_CHECKING */
1125 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1126 *SEQ_P is NULL, a new sequence is allocated. */
1129 gimple_seq_add_stmt (gimple_seq
*seq_p
, gimple gs
)
1131 gimple_stmt_iterator si
;
1135 si
= gsi_last (*seq_p
);
1136 gsi_insert_after (&si
, gs
, GSI_NEW_STMT
);
1139 /* Link gimple statement GS to the end of the sequence *SEQ_P. If
1140 *SEQ_P is NULL, a new sequence is allocated. This function is
1141 similar to gimple_seq_add_stmt, but does not scan the operands.
1142 During gimplification, we need to manipulate statement sequences
1143 before the def/use vectors have been constructed. */
1146 gimple_seq_add_stmt_without_update (gimple_seq
*seq_p
, gimple gs
)
1148 gimple_stmt_iterator si
;
1153 si
= gsi_last (*seq_p
);
1154 gsi_insert_after_without_update (&si
, gs
, GSI_NEW_STMT
);
1157 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
1158 NULL, a new sequence is allocated. */
1161 gimple_seq_add_seq (gimple_seq
*dst_p
, gimple_seq src
)
1163 gimple_stmt_iterator si
;
1167 si
= gsi_last (*dst_p
);
1168 gsi_insert_seq_after (&si
, src
, GSI_NEW_STMT
);
1171 /* Determine whether to assign a location to the statement GS. */
1174 should_carry_location_p (gimple gs
)
1176 /* Don't emit a line note for a label. We particularly don't want to
1177 emit one for the break label, since it doesn't actually correspond
1178 to the beginning of the loop/switch. */
1179 if (gimple_code (gs
) == GIMPLE_LABEL
)
1185 /* Set the location for gimple statement GS to LOCATION. */
1188 annotate_one_with_location (gimple gs
, location_t location
)
1190 if (!gimple_has_location (gs
)
1191 && !gimple_do_not_emit_location_p (gs
)
1192 && should_carry_location_p (gs
))
1193 gimple_set_location (gs
, location
);
1196 /* Set LOCATION for all the statements after iterator GSI in sequence
1197 SEQ. If GSI is pointing to the end of the sequence, start with the
1198 first statement in SEQ. */
1201 annotate_all_with_location_after (gimple_seq seq
, gimple_stmt_iterator gsi
,
1202 location_t location
)
1204 if (gsi_end_p (gsi
))
1205 gsi
= gsi_start (seq
);
1209 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
1210 annotate_one_with_location (gsi_stmt (gsi
), location
);
1213 /* Set the location for all the statements in a sequence STMT_P to LOCATION. */
1216 annotate_all_with_location (gimple_seq stmt_p
, location_t location
)
1218 gimple_stmt_iterator i
;
1220 if (gimple_seq_empty_p (stmt_p
))
1223 for (i
= gsi_start (stmt_p
); !gsi_end_p (i
); gsi_next (&i
))
1225 gimple gs
= gsi_stmt (i
);
1226 annotate_one_with_location (gs
, location
);
1230 /* Helper function of empty_body_p. Return true if STMT is an empty
1234 empty_stmt_p (gimple stmt
)
1236 if (gimple_code (stmt
) == GIMPLE_NOP
)
1238 if (gimple_code (stmt
) == GIMPLE_BIND
)
1239 return empty_body_p (gimple_bind_body (stmt
));
1244 /* Return true if BODY contains nothing but empty statements. */
1247 empty_body_p (gimple_seq body
)
1249 gimple_stmt_iterator i
;
1251 if (gimple_seq_empty_p (body
))
1253 for (i
= gsi_start (body
); !gsi_end_p (i
); gsi_next (&i
))
1254 if (!empty_stmt_p (gsi_stmt (i
))
1255 && !is_gimple_debug (gsi_stmt (i
)))
1262 /* Perform a deep copy of sequence SRC and return the result. */
1265 gimple_seq_copy (gimple_seq src
)
1267 gimple_stmt_iterator gsi
;
1268 gimple_seq new_seq
= NULL
;
1271 for (gsi
= gsi_start (src
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1273 stmt
= gimple_copy (gsi_stmt (gsi
));
1274 gimple_seq_add_stmt (&new_seq
, stmt
);
1282 /* Return true if calls C1 and C2 are known to go to the same function. */
1285 gimple_call_same_target_p (const_gimple c1
, const_gimple c2
)
1287 if (gimple_call_internal_p (c1
))
1288 return (gimple_call_internal_p (c2
)
1289 && gimple_call_internal_fn (c1
) == gimple_call_internal_fn (c2
));
1291 return (gimple_call_fn (c1
) == gimple_call_fn (c2
)
1292 || (gimple_call_fndecl (c1
)
1293 && gimple_call_fndecl (c1
) == gimple_call_fndecl (c2
)));
1296 /* Detect flags from a GIMPLE_CALL. This is just like
1297 call_expr_flags, but for gimple tuples. */
1300 gimple_call_flags (const_gimple stmt
)
1303 tree decl
= gimple_call_fndecl (stmt
);
1306 flags
= flags_from_decl_or_type (decl
);
1307 else if (gimple_call_internal_p (stmt
))
1308 flags
= internal_fn_flags (gimple_call_internal_fn (stmt
));
1310 flags
= flags_from_decl_or_type (gimple_call_fntype (stmt
));
1312 if (stmt
->gsbase
.subcode
& GF_CALL_NOTHROW
)
1313 flags
|= ECF_NOTHROW
;
1318 /* Return the "fn spec" string for call STMT. */
1321 gimple_call_fnspec (const_gimple stmt
)
1325 type
= gimple_call_fntype (stmt
);
1329 attr
= lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type
));
1333 return TREE_VALUE (TREE_VALUE (attr
));
1336 /* Detects argument flags for argument number ARG on call STMT. */
1339 gimple_call_arg_flags (const_gimple stmt
, unsigned arg
)
1341 tree attr
= gimple_call_fnspec (stmt
);
1343 if (!attr
|| 1 + arg
>= (unsigned) TREE_STRING_LENGTH (attr
))
1346 switch (TREE_STRING_POINTER (attr
)[1 + arg
])
1353 return EAF_DIRECT
| EAF_NOCLOBBER
| EAF_NOESCAPE
;
1356 return EAF_NOCLOBBER
| EAF_NOESCAPE
;
1359 return EAF_DIRECT
| EAF_NOESCAPE
;
1362 return EAF_NOESCAPE
;
1370 /* Detects return flags for the call STMT. */
1373 gimple_call_return_flags (const_gimple stmt
)
1377 if (gimple_call_flags (stmt
) & ECF_MALLOC
)
1380 attr
= gimple_call_fnspec (stmt
);
1381 if (!attr
|| TREE_STRING_LENGTH (attr
) < 1)
1384 switch (TREE_STRING_POINTER (attr
)[0])
1390 return ERF_RETURNS_ARG
| (TREE_STRING_POINTER (attr
)[0] - '1');
1402 /* Return true if GS is a copy assignment. */
1405 gimple_assign_copy_p (gimple gs
)
1407 return (gimple_assign_single_p (gs
)
1408 && is_gimple_val (gimple_op (gs
, 1)));
1412 /* Return true if GS is a SSA_NAME copy assignment. */
1415 gimple_assign_ssa_name_copy_p (gimple gs
)
1417 return (gimple_assign_single_p (gs
)
1418 && TREE_CODE (gimple_assign_lhs (gs
)) == SSA_NAME
1419 && TREE_CODE (gimple_assign_rhs1 (gs
)) == SSA_NAME
);
1423 /* Return true if GS is an assignment with a unary RHS, but the
1424 operator has no effect on the assigned value. The logic is adapted
1425 from STRIP_NOPS. This predicate is intended to be used in tuplifying
1426 instances in which STRIP_NOPS was previously applied to the RHS of
1429 NOTE: In the use cases that led to the creation of this function
1430 and of gimple_assign_single_p, it is typical to test for either
1431 condition and to proceed in the same manner. In each case, the
1432 assigned value is represented by the single RHS operand of the
1433 assignment. I suspect there may be cases where gimple_assign_copy_p,
1434 gimple_assign_single_p, or equivalent logic is used where a similar
1435 treatment of unary NOPs is appropriate. */
1438 gimple_assign_unary_nop_p (gimple gs
)
1440 return (is_gimple_assign (gs
)
1441 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (gs
))
1442 || gimple_assign_rhs_code (gs
) == NON_LVALUE_EXPR
)
1443 && gimple_assign_rhs1 (gs
) != error_mark_node
1444 && (TYPE_MODE (TREE_TYPE (gimple_assign_lhs (gs
)))
1445 == TYPE_MODE (TREE_TYPE (gimple_assign_rhs1 (gs
)))));
1448 /* Set BB to be the basic block holding G. */
1451 gimple_set_bb (gimple stmt
, basic_block bb
)
1453 stmt
->gsbase
.bb
= bb
;
1455 /* If the statement is a label, add the label to block-to-labels map
1456 so that we can speed up edge creation for GIMPLE_GOTOs. */
1457 if (cfun
->cfg
&& gimple_code (stmt
) == GIMPLE_LABEL
)
1462 t
= gimple_label_label (stmt
);
1463 uid
= LABEL_DECL_UID (t
);
1466 unsigned old_len
= vec_safe_length (label_to_block_map
);
1467 LABEL_DECL_UID (t
) = uid
= cfun
->cfg
->last_label_uid
++;
1468 if (old_len
<= (unsigned) uid
)
1470 unsigned new_len
= 3 * uid
/ 2 + 1;
1472 vec_safe_grow_cleared (label_to_block_map
, new_len
);
1476 (*label_to_block_map
)[uid
] = bb
;
1481 /* Modify the RHS of the assignment pointed-to by GSI using the
1482 operands in the expression tree EXPR.
1484 NOTE: The statement pointed-to by GSI may be reallocated if it
1485 did not have enough operand slots.
1487 This function is useful to convert an existing tree expression into
1488 the flat representation used for the RHS of a GIMPLE assignment.
1489 It will reallocate memory as needed to expand or shrink the number
1490 of operand slots needed to represent EXPR.
1492 NOTE: If you find yourself building a tree and then calling this
1493 function, you are most certainly doing it the slow way. It is much
1494 better to build a new assignment or to use the function
1495 gimple_assign_set_rhs_with_ops, which does not require an
1496 expression tree to be built. */
1499 gimple_assign_set_rhs_from_tree (gimple_stmt_iterator
*gsi
, tree expr
)
1501 enum tree_code subcode
;
1504 extract_ops_from_tree_1 (expr
, &subcode
, &op1
, &op2
, &op3
);
1505 gimple_assign_set_rhs_with_ops_1 (gsi
, subcode
, op1
, op2
, op3
);
1509 /* Set the RHS of assignment statement pointed-to by GSI to CODE with
1510 operands OP1, OP2 and OP3.
1512 NOTE: The statement pointed-to by GSI may be reallocated if it
1513 did not have enough operand slots. */
1516 gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator
*gsi
, enum tree_code code
,
1517 tree op1
, tree op2
, tree op3
)
1519 unsigned new_rhs_ops
= get_gimple_rhs_num_ops (code
);
1520 gimple stmt
= gsi_stmt (*gsi
);
1522 /* If the new CODE needs more operands, allocate a new statement. */
1523 if (gimple_num_ops (stmt
) < new_rhs_ops
+ 1)
1525 tree lhs
= gimple_assign_lhs (stmt
);
1526 gimple new_stmt
= gimple_alloc (gimple_code (stmt
), new_rhs_ops
+ 1);
1527 memcpy (new_stmt
, stmt
, gimple_size (gimple_code (stmt
)));
1528 gimple_init_singleton (new_stmt
);
1529 gsi_replace (gsi
, new_stmt
, true);
1532 /* The LHS needs to be reset as this also changes the SSA name
1534 gimple_assign_set_lhs (stmt
, lhs
);
1537 gimple_set_num_ops (stmt
, new_rhs_ops
+ 1);
1538 gimple_set_subcode (stmt
, code
);
1539 gimple_assign_set_rhs1 (stmt
, op1
);
1540 if (new_rhs_ops
> 1)
1541 gimple_assign_set_rhs2 (stmt
, op2
);
1542 if (new_rhs_ops
> 2)
1543 gimple_assign_set_rhs3 (stmt
, op3
);
1547 /* Return the LHS of a statement that performs an assignment,
1548 either a GIMPLE_ASSIGN or a GIMPLE_CALL. Returns NULL_TREE
1549 for a call to a function that returns no value, or for a
1550 statement other than an assignment or a call. */
1553 gimple_get_lhs (const_gimple stmt
)
1555 enum gimple_code code
= gimple_code (stmt
);
1557 if (code
== GIMPLE_ASSIGN
)
1558 return gimple_assign_lhs (stmt
);
1559 else if (code
== GIMPLE_CALL
)
1560 return gimple_call_lhs (stmt
);
1566 /* Set the LHS of a statement that performs an assignment,
1567 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1570 gimple_set_lhs (gimple stmt
, tree lhs
)
1572 enum gimple_code code
= gimple_code (stmt
);
1574 if (code
== GIMPLE_ASSIGN
)
1575 gimple_assign_set_lhs (stmt
, lhs
);
1576 else if (code
== GIMPLE_CALL
)
1577 gimple_call_set_lhs (stmt
, lhs
);
1583 /* Return a deep copy of statement STMT. All the operands from STMT
1584 are reallocated and copied using unshare_expr. The DEF, USE, VDEF
1585 and VUSE operand arrays are set to empty in the new copy. The new
1586 copy isn't part of any sequence. */
1589 gimple_copy (gimple stmt
)
1591 enum gimple_code code
= gimple_code (stmt
);
1592 unsigned num_ops
= gimple_num_ops (stmt
);
1593 gimple copy
= gimple_alloc (code
, num_ops
);
1596 /* Shallow copy all the fields from STMT. */
1597 memcpy (copy
, stmt
, gimple_size (code
));
1598 gimple_init_singleton (copy
);
1600 /* If STMT has sub-statements, deep-copy them as well. */
1601 if (gimple_has_substatements (stmt
))
1606 switch (gimple_code (stmt
))
1609 new_seq
= gimple_seq_copy (gimple_bind_body (stmt
));
1610 gimple_bind_set_body (copy
, new_seq
);
1611 gimple_bind_set_vars (copy
, unshare_expr (gimple_bind_vars (stmt
)));
1612 gimple_bind_set_block (copy
, gimple_bind_block (stmt
));
1616 new_seq
= gimple_seq_copy (gimple_catch_handler (stmt
));
1617 gimple_catch_set_handler (copy
, new_seq
);
1618 t
= unshare_expr (gimple_catch_types (stmt
));
1619 gimple_catch_set_types (copy
, t
);
1622 case GIMPLE_EH_FILTER
:
1623 new_seq
= gimple_seq_copy (gimple_eh_filter_failure (stmt
));
1624 gimple_eh_filter_set_failure (copy
, new_seq
);
1625 t
= unshare_expr (gimple_eh_filter_types (stmt
));
1626 gimple_eh_filter_set_types (copy
, t
);
1629 case GIMPLE_EH_ELSE
:
1630 new_seq
= gimple_seq_copy (gimple_eh_else_n_body (stmt
));
1631 gimple_eh_else_set_n_body (copy
, new_seq
);
1632 new_seq
= gimple_seq_copy (gimple_eh_else_e_body (stmt
));
1633 gimple_eh_else_set_e_body (copy
, new_seq
);
1637 new_seq
= gimple_seq_copy (gimple_try_eval (stmt
));
1638 gimple_try_set_eval (copy
, new_seq
);
1639 new_seq
= gimple_seq_copy (gimple_try_cleanup (stmt
));
1640 gimple_try_set_cleanup (copy
, new_seq
);
1643 case GIMPLE_OMP_FOR
:
1644 new_seq
= gimple_seq_copy (gimple_omp_for_pre_body (stmt
));
1645 gimple_omp_for_set_pre_body (copy
, new_seq
);
1646 t
= unshare_expr (gimple_omp_for_clauses (stmt
));
1647 gimple_omp_for_set_clauses (copy
, t
);
1648 copy
->gimple_omp_for
.iter
1649 = ggc_alloc_vec_gimple_omp_for_iter
1650 (gimple_omp_for_collapse (stmt
));
1651 for (i
= 0; i
< gimple_omp_for_collapse (stmt
); i
++)
1653 gimple_omp_for_set_cond (copy
, i
,
1654 gimple_omp_for_cond (stmt
, i
));
1655 gimple_omp_for_set_index (copy
, i
,
1656 gimple_omp_for_index (stmt
, i
));
1657 t
= unshare_expr (gimple_omp_for_initial (stmt
, i
));
1658 gimple_omp_for_set_initial (copy
, i
, t
);
1659 t
= unshare_expr (gimple_omp_for_final (stmt
, i
));
1660 gimple_omp_for_set_final (copy
, i
, t
);
1661 t
= unshare_expr (gimple_omp_for_incr (stmt
, i
));
1662 gimple_omp_for_set_incr (copy
, i
, t
);
1666 case GIMPLE_OMP_PARALLEL
:
1667 t
= unshare_expr (gimple_omp_parallel_clauses (stmt
));
1668 gimple_omp_parallel_set_clauses (copy
, t
);
1669 t
= unshare_expr (gimple_omp_parallel_child_fn (stmt
));
1670 gimple_omp_parallel_set_child_fn (copy
, t
);
1671 t
= unshare_expr (gimple_omp_parallel_data_arg (stmt
));
1672 gimple_omp_parallel_set_data_arg (copy
, t
);
1675 case GIMPLE_OMP_TASK
:
1676 t
= unshare_expr (gimple_omp_task_clauses (stmt
));
1677 gimple_omp_task_set_clauses (copy
, t
);
1678 t
= unshare_expr (gimple_omp_task_child_fn (stmt
));
1679 gimple_omp_task_set_child_fn (copy
, t
);
1680 t
= unshare_expr (gimple_omp_task_data_arg (stmt
));
1681 gimple_omp_task_set_data_arg (copy
, t
);
1682 t
= unshare_expr (gimple_omp_task_copy_fn (stmt
));
1683 gimple_omp_task_set_copy_fn (copy
, t
);
1684 t
= unshare_expr (gimple_omp_task_arg_size (stmt
));
1685 gimple_omp_task_set_arg_size (copy
, t
);
1686 t
= unshare_expr (gimple_omp_task_arg_align (stmt
));
1687 gimple_omp_task_set_arg_align (copy
, t
);
1690 case GIMPLE_OMP_CRITICAL
:
1691 t
= unshare_expr (gimple_omp_critical_name (stmt
));
1692 gimple_omp_critical_set_name (copy
, t
);
1695 case GIMPLE_OMP_SECTIONS
:
1696 t
= unshare_expr (gimple_omp_sections_clauses (stmt
));
1697 gimple_omp_sections_set_clauses (copy
, t
);
1698 t
= unshare_expr (gimple_omp_sections_control (stmt
));
1699 gimple_omp_sections_set_control (copy
, t
);
1702 case GIMPLE_OMP_SINGLE
:
1703 case GIMPLE_OMP_TARGET
:
1704 case GIMPLE_OMP_TEAMS
:
1705 case GIMPLE_OMP_SECTION
:
1706 case GIMPLE_OMP_MASTER
:
1707 case GIMPLE_OMP_TASKGROUP
:
1708 case GIMPLE_OMP_ORDERED
:
1710 new_seq
= gimple_seq_copy (gimple_omp_body (stmt
));
1711 gimple_omp_set_body (copy
, new_seq
);
1714 case GIMPLE_TRANSACTION
:
1715 new_seq
= gimple_seq_copy (gimple_transaction_body (stmt
));
1716 gimple_transaction_set_body (copy
, new_seq
);
1719 case GIMPLE_WITH_CLEANUP_EXPR
:
1720 new_seq
= gimple_seq_copy (gimple_wce_cleanup (stmt
));
1721 gimple_wce_set_cleanup (copy
, new_seq
);
1729 /* Make copy of operands. */
1730 for (i
= 0; i
< num_ops
; i
++)
1731 gimple_set_op (copy
, i
, unshare_expr (gimple_op (stmt
, i
)));
1733 if (gimple_has_mem_ops (stmt
))
1735 gimple_set_vdef (copy
, gimple_vdef (stmt
));
1736 gimple_set_vuse (copy
, gimple_vuse (stmt
));
1739 /* Clear out SSA operand vectors on COPY. */
1740 if (gimple_has_ops (stmt
))
1742 gimple_set_use_ops (copy
, NULL
);
1744 /* SSA operands need to be updated. */
1745 gimple_set_modified (copy
, true);
1752 /* Return true if statement S has side-effects. We consider a
1753 statement to have side effects if:
1755 - It is a GIMPLE_CALL not marked with ECF_PURE or ECF_CONST.
1756 - Any of its operands are marked TREE_THIS_VOLATILE or TREE_SIDE_EFFECTS. */
1759 gimple_has_side_effects (const_gimple s
)
1761 if (is_gimple_debug (s
))
1764 /* We don't have to scan the arguments to check for
1765 volatile arguments, though, at present, we still
1766 do a scan to check for TREE_SIDE_EFFECTS. */
1767 if (gimple_has_volatile_ops (s
))
1770 if (gimple_code (s
) == GIMPLE_ASM
1771 && gimple_asm_volatile_p (s
))
1774 if (is_gimple_call (s
))
1776 int flags
= gimple_call_flags (s
);
1778 /* An infinite loop is considered a side effect. */
1779 if (!(flags
& (ECF_CONST
| ECF_PURE
))
1780 || (flags
& ECF_LOOPING_CONST_OR_PURE
))
1789 /* Helper for gimple_could_trap_p and gimple_assign_rhs_could_trap_p.
1790 Return true if S can trap. When INCLUDE_MEM is true, check whether
1791 the memory operations could trap. When INCLUDE_STORES is true and
1792 S is a GIMPLE_ASSIGN, the LHS of the assignment is also checked. */
1795 gimple_could_trap_p_1 (gimple s
, bool include_mem
, bool include_stores
)
1797 tree t
, div
= NULL_TREE
;
1802 unsigned i
, start
= (is_gimple_assign (s
) && !include_stores
) ? 1 : 0;
1804 for (i
= start
; i
< gimple_num_ops (s
); i
++)
1805 if (tree_could_trap_p (gimple_op (s
, i
)))
1809 switch (gimple_code (s
))
1812 return gimple_asm_volatile_p (s
);
1815 t
= gimple_call_fndecl (s
);
1816 /* Assume that calls to weak functions may trap. */
1817 if (!t
|| !DECL_P (t
) || DECL_WEAK (t
))
1822 t
= gimple_expr_type (s
);
1823 op
= gimple_assign_rhs_code (s
);
1824 if (get_gimple_rhs_class (op
) == GIMPLE_BINARY_RHS
)
1825 div
= gimple_assign_rhs2 (s
);
1826 return (operation_could_trap_p (op
, FLOAT_TYPE_P (t
),
1827 (INTEGRAL_TYPE_P (t
)
1828 && TYPE_OVERFLOW_TRAPS (t
)),
1838 /* Return true if statement S can trap. */
1841 gimple_could_trap_p (gimple s
)
1843 return gimple_could_trap_p_1 (s
, true, true);
1846 /* Return true if RHS of a GIMPLE_ASSIGN S can trap. */
1849 gimple_assign_rhs_could_trap_p (gimple s
)
1851 gcc_assert (is_gimple_assign (s
));
1852 return gimple_could_trap_p_1 (s
, true, false);
1856 /* Print debugging information for gimple stmts generated. */
1859 dump_gimple_statistics (void)
1861 int i
, total_tuples
= 0, total_bytes
= 0;
1863 if (! GATHER_STATISTICS
)
1865 fprintf (stderr
, "No gimple statistics\n");
1869 fprintf (stderr
, "\nGIMPLE statements\n");
1870 fprintf (stderr
, "Kind Stmts Bytes\n");
1871 fprintf (stderr
, "---------------------------------------\n");
1872 for (i
= 0; i
< (int) gimple_alloc_kind_all
; ++i
)
1874 fprintf (stderr
, "%-20s %7d %10d\n", gimple_alloc_kind_names
[i
],
1875 gimple_alloc_counts
[i
], gimple_alloc_sizes
[i
]);
1876 total_tuples
+= gimple_alloc_counts
[i
];
1877 total_bytes
+= gimple_alloc_sizes
[i
];
1879 fprintf (stderr
, "---------------------------------------\n");
1880 fprintf (stderr
, "%-20s %7d %10d\n", "Total", total_tuples
, total_bytes
);
1881 fprintf (stderr
, "---------------------------------------\n");
1885 /* Return the number of operands needed on the RHS of a GIMPLE
1886 assignment for an expression with tree code CODE. */
1889 get_gimple_rhs_num_ops (enum tree_code code
)
1891 enum gimple_rhs_class rhs_class
= get_gimple_rhs_class (code
);
1893 if (rhs_class
== GIMPLE_UNARY_RHS
|| rhs_class
== GIMPLE_SINGLE_RHS
)
1895 else if (rhs_class
== GIMPLE_BINARY_RHS
)
1897 else if (rhs_class
== GIMPLE_TERNARY_RHS
)
1903 #define DEFTREECODE(SYM, STRING, TYPE, NARGS) \
1905 ((TYPE) == tcc_unary ? GIMPLE_UNARY_RHS \
1906 : ((TYPE) == tcc_binary \
1907 || (TYPE) == tcc_comparison) ? GIMPLE_BINARY_RHS \
1908 : ((TYPE) == tcc_constant \
1909 || (TYPE) == tcc_declaration \
1910 || (TYPE) == tcc_reference) ? GIMPLE_SINGLE_RHS \
1911 : ((SYM) == TRUTH_AND_EXPR \
1912 || (SYM) == TRUTH_OR_EXPR \
1913 || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
1914 : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
1915 : ((SYM) == COND_EXPR \
1916 || (SYM) == WIDEN_MULT_PLUS_EXPR \
1917 || (SYM) == WIDEN_MULT_MINUS_EXPR \
1918 || (SYM) == DOT_PROD_EXPR \
1919 || (SYM) == REALIGN_LOAD_EXPR \
1920 || (SYM) == VEC_COND_EXPR \
1921 || (SYM) == VEC_PERM_EXPR \
1922 || (SYM) == FMA_EXPR) ? GIMPLE_TERNARY_RHS \
1923 : ((SYM) == CONSTRUCTOR \
1924 || (SYM) == OBJ_TYPE_REF \
1925 || (SYM) == ASSERT_EXPR \
1926 || (SYM) == ADDR_EXPR \
1927 || (SYM) == WITH_SIZE_EXPR \
1928 || (SYM) == SSA_NAME) ? GIMPLE_SINGLE_RHS \
1929 : GIMPLE_INVALID_RHS),
1930 #define END_OF_BASE_TREE_CODES (unsigned char) GIMPLE_INVALID_RHS,
1932 const unsigned char gimple_rhs_class_table
[] = {
1933 #include "all-tree.def"
1937 #undef END_OF_BASE_TREE_CODES
1940 recalculate_side_effects (tree t
)
1942 enum tree_code code
= TREE_CODE (t
);
1943 int len
= TREE_OPERAND_LENGTH (t
);
1946 switch (TREE_CODE_CLASS (code
))
1948 case tcc_expression
:
1954 case PREDECREMENT_EXPR
:
1955 case PREINCREMENT_EXPR
:
1956 case POSTDECREMENT_EXPR
:
1957 case POSTINCREMENT_EXPR
:
1958 /* All of these have side-effects, no matter what their
1967 case tcc_comparison
: /* a comparison expression */
1968 case tcc_unary
: /* a unary arithmetic expression */
1969 case tcc_binary
: /* a binary arithmetic expression */
1970 case tcc_reference
: /* a reference */
1971 case tcc_vl_exp
: /* a function call */
1972 TREE_SIDE_EFFECTS (t
) = TREE_THIS_VOLATILE (t
);
1973 for (i
= 0; i
< len
; ++i
)
1975 tree op
= TREE_OPERAND (t
, i
);
1976 if (op
&& TREE_SIDE_EFFECTS (op
))
1977 TREE_SIDE_EFFECTS (t
) = 1;
1982 /* No side-effects. */
1990 /* Canonicalize a tree T for use in a COND_EXPR as conditional. Returns
1991 a canonicalized tree that is valid for a COND_EXPR or NULL_TREE, if
1992 we failed to create one. */
1995 canonicalize_cond_expr_cond (tree t
)
1997 /* Strip conversions around boolean operations. */
1998 if (CONVERT_EXPR_P (t
)
1999 && (truth_value_p (TREE_CODE (TREE_OPERAND (t
, 0)))
2000 || TREE_CODE (TREE_TYPE (TREE_OPERAND (t
, 0)))
2002 t
= TREE_OPERAND (t
, 0);
2004 /* For !x use x == 0. */
2005 if (TREE_CODE (t
) == TRUTH_NOT_EXPR
)
2007 tree top0
= TREE_OPERAND (t
, 0);
2008 t
= build2 (EQ_EXPR
, TREE_TYPE (t
),
2009 top0
, build_int_cst (TREE_TYPE (top0
), 0));
2011 /* For cmp ? 1 : 0 use cmp. */
2012 else if (TREE_CODE (t
) == COND_EXPR
2013 && COMPARISON_CLASS_P (TREE_OPERAND (t
, 0))
2014 && integer_onep (TREE_OPERAND (t
, 1))
2015 && integer_zerop (TREE_OPERAND (t
, 2)))
2017 tree top0
= TREE_OPERAND (t
, 0);
2018 t
= build2 (TREE_CODE (top0
), TREE_TYPE (t
),
2019 TREE_OPERAND (top0
, 0), TREE_OPERAND (top0
, 1));
2021 /* For x ^ y use x != y. */
2022 else if (TREE_CODE (t
) == BIT_XOR_EXPR
)
2023 t
= build2 (NE_EXPR
, TREE_TYPE (t
),
2024 TREE_OPERAND (t
, 0), TREE_OPERAND (t
, 1));
2026 if (is_gimple_condexpr (t
))
2032 /* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
2033 the positions marked by the set ARGS_TO_SKIP. */
2036 gimple_call_copy_skip_args (gimple stmt
, bitmap args_to_skip
)
2039 int nargs
= gimple_call_num_args (stmt
);
2041 vargs
.create (nargs
);
2044 for (i
= 0; i
< nargs
; i
++)
2045 if (!bitmap_bit_p (args_to_skip
, i
))
2046 vargs
.quick_push (gimple_call_arg (stmt
, i
));
2048 if (gimple_call_internal_p (stmt
))
2049 new_stmt
= gimple_build_call_internal_vec (gimple_call_internal_fn (stmt
),
2052 new_stmt
= gimple_build_call_vec (gimple_call_fn (stmt
), vargs
);
2054 if (gimple_call_lhs (stmt
))
2055 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
2057 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2058 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2060 if (gimple_has_location (stmt
))
2061 gimple_set_location (new_stmt
, gimple_location (stmt
));
2062 gimple_call_copy_flags (new_stmt
, stmt
);
2063 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
2065 gimple_set_modified (new_stmt
, true);
2072 /* Return true if the field decls F1 and F2 are at the same offset.
2074 This is intended to be used on GIMPLE types only. */
2077 gimple_compare_field_offset (tree f1
, tree f2
)
2079 if (DECL_OFFSET_ALIGN (f1
) == DECL_OFFSET_ALIGN (f2
))
2081 tree offset1
= DECL_FIELD_OFFSET (f1
);
2082 tree offset2
= DECL_FIELD_OFFSET (f2
);
2083 return ((offset1
== offset2
2084 /* Once gimplification is done, self-referential offsets are
2085 instantiated as operand #2 of the COMPONENT_REF built for
2086 each access and reset. Therefore, they are not relevant
2087 anymore and fields are interchangeable provided that they
2088 represent the same access. */
2089 || (TREE_CODE (offset1
) == PLACEHOLDER_EXPR
2090 && TREE_CODE (offset2
) == PLACEHOLDER_EXPR
2091 && (DECL_SIZE (f1
) == DECL_SIZE (f2
)
2092 || (TREE_CODE (DECL_SIZE (f1
)) == PLACEHOLDER_EXPR
2093 && TREE_CODE (DECL_SIZE (f2
)) == PLACEHOLDER_EXPR
)
2094 || operand_equal_p (DECL_SIZE (f1
), DECL_SIZE (f2
), 0))
2095 && DECL_ALIGN (f1
) == DECL_ALIGN (f2
))
2096 || operand_equal_p (offset1
, offset2
, 0))
2097 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (f1
),
2098 DECL_FIELD_BIT_OFFSET (f2
)));
2101 /* Fortran and C do not always agree on what DECL_OFFSET_ALIGN
2102 should be, so handle differing ones specially by decomposing
2103 the offset into a byte and bit offset manually. */
2104 if (tree_fits_shwi_p (DECL_FIELD_OFFSET (f1
))
2105 && tree_fits_shwi_p (DECL_FIELD_OFFSET (f2
)))
2107 unsigned HOST_WIDE_INT byte_offset1
, byte_offset2
;
2108 unsigned HOST_WIDE_INT bit_offset1
, bit_offset2
;
2109 bit_offset1
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f1
));
2110 byte_offset1
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f1
))
2111 + bit_offset1
/ BITS_PER_UNIT
);
2112 bit_offset2
= TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (f2
));
2113 byte_offset2
= (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f2
))
2114 + bit_offset2
/ BITS_PER_UNIT
);
2115 if (byte_offset1
!= byte_offset2
)
2117 return bit_offset1
% BITS_PER_UNIT
== bit_offset2
% BITS_PER_UNIT
;
2124 /* Return a type the same as TYPE except unsigned or
2125 signed according to UNSIGNEDP. */
2128 gimple_signed_or_unsigned_type (bool unsignedp
, tree type
)
2132 type1
= TYPE_MAIN_VARIANT (type
);
2133 if (type1
== signed_char_type_node
2134 || type1
== char_type_node
2135 || type1
== unsigned_char_type_node
)
2136 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
2137 if (type1
== integer_type_node
|| type1
== unsigned_type_node
)
2138 return unsignedp
? unsigned_type_node
: integer_type_node
;
2139 if (type1
== short_integer_type_node
|| type1
== short_unsigned_type_node
)
2140 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
2141 if (type1
== long_integer_type_node
|| type1
== long_unsigned_type_node
)
2142 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
2143 if (type1
== long_long_integer_type_node
2144 || type1
== long_long_unsigned_type_node
)
2146 ? long_long_unsigned_type_node
2147 : long_long_integer_type_node
;
2148 if (int128_integer_type_node
&& (type1
== int128_integer_type_node
|| type1
== int128_unsigned_type_node
))
2150 ? int128_unsigned_type_node
2151 : int128_integer_type_node
;
2152 #if HOST_BITS_PER_WIDE_INT >= 64
2153 if (type1
== intTI_type_node
|| type1
== unsigned_intTI_type_node
)
2154 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
2156 if (type1
== intDI_type_node
|| type1
== unsigned_intDI_type_node
)
2157 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
2158 if (type1
== intSI_type_node
|| type1
== unsigned_intSI_type_node
)
2159 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
2160 if (type1
== intHI_type_node
|| type1
== unsigned_intHI_type_node
)
2161 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
2162 if (type1
== intQI_type_node
|| type1
== unsigned_intQI_type_node
)
2163 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
2165 #define GIMPLE_FIXED_TYPES(NAME) \
2166 if (type1 == short_ ## NAME ## _type_node \
2167 || type1 == unsigned_short_ ## NAME ## _type_node) \
2168 return unsignedp ? unsigned_short_ ## NAME ## _type_node \
2169 : short_ ## NAME ## _type_node; \
2170 if (type1 == NAME ## _type_node \
2171 || type1 == unsigned_ ## NAME ## _type_node) \
2172 return unsignedp ? unsigned_ ## NAME ## _type_node \
2173 : NAME ## _type_node; \
2174 if (type1 == long_ ## NAME ## _type_node \
2175 || type1 == unsigned_long_ ## NAME ## _type_node) \
2176 return unsignedp ? unsigned_long_ ## NAME ## _type_node \
2177 : long_ ## NAME ## _type_node; \
2178 if (type1 == long_long_ ## NAME ## _type_node \
2179 || type1 == unsigned_long_long_ ## NAME ## _type_node) \
2180 return unsignedp ? unsigned_long_long_ ## NAME ## _type_node \
2181 : long_long_ ## NAME ## _type_node;
2183 #define GIMPLE_FIXED_MODE_TYPES(NAME) \
2184 if (type1 == NAME ## _type_node \
2185 || type1 == u ## NAME ## _type_node) \
2186 return unsignedp ? u ## NAME ## _type_node \
2187 : NAME ## _type_node;
2189 #define GIMPLE_FIXED_TYPES_SAT(NAME) \
2190 if (type1 == sat_ ## short_ ## NAME ## _type_node \
2191 || type1 == sat_ ## unsigned_short_ ## NAME ## _type_node) \
2192 return unsignedp ? sat_ ## unsigned_short_ ## NAME ## _type_node \
2193 : sat_ ## short_ ## NAME ## _type_node; \
2194 if (type1 == sat_ ## NAME ## _type_node \
2195 || type1 == sat_ ## unsigned_ ## NAME ## _type_node) \
2196 return unsignedp ? sat_ ## unsigned_ ## NAME ## _type_node \
2197 : sat_ ## NAME ## _type_node; \
2198 if (type1 == sat_ ## long_ ## NAME ## _type_node \
2199 || type1 == sat_ ## unsigned_long_ ## NAME ## _type_node) \
2200 return unsignedp ? sat_ ## unsigned_long_ ## NAME ## _type_node \
2201 : sat_ ## long_ ## NAME ## _type_node; \
2202 if (type1 == sat_ ## long_long_ ## NAME ## _type_node \
2203 || type1 == sat_ ## unsigned_long_long_ ## NAME ## _type_node) \
2204 return unsignedp ? sat_ ## unsigned_long_long_ ## NAME ## _type_node \
2205 : sat_ ## long_long_ ## NAME ## _type_node;
2207 #define GIMPLE_FIXED_MODE_TYPES_SAT(NAME) \
2208 if (type1 == sat_ ## NAME ## _type_node \
2209 || type1 == sat_ ## u ## NAME ## _type_node) \
2210 return unsignedp ? sat_ ## u ## NAME ## _type_node \
2211 : sat_ ## NAME ## _type_node;
2213 GIMPLE_FIXED_TYPES (fract
);
2214 GIMPLE_FIXED_TYPES_SAT (fract
);
2215 GIMPLE_FIXED_TYPES (accum
);
2216 GIMPLE_FIXED_TYPES_SAT (accum
);
2218 GIMPLE_FIXED_MODE_TYPES (qq
);
2219 GIMPLE_FIXED_MODE_TYPES (hq
);
2220 GIMPLE_FIXED_MODE_TYPES (sq
);
2221 GIMPLE_FIXED_MODE_TYPES (dq
);
2222 GIMPLE_FIXED_MODE_TYPES (tq
);
2223 GIMPLE_FIXED_MODE_TYPES_SAT (qq
);
2224 GIMPLE_FIXED_MODE_TYPES_SAT (hq
);
2225 GIMPLE_FIXED_MODE_TYPES_SAT (sq
);
2226 GIMPLE_FIXED_MODE_TYPES_SAT (dq
);
2227 GIMPLE_FIXED_MODE_TYPES_SAT (tq
);
2228 GIMPLE_FIXED_MODE_TYPES (ha
);
2229 GIMPLE_FIXED_MODE_TYPES (sa
);
2230 GIMPLE_FIXED_MODE_TYPES (da
);
2231 GIMPLE_FIXED_MODE_TYPES (ta
);
2232 GIMPLE_FIXED_MODE_TYPES_SAT (ha
);
2233 GIMPLE_FIXED_MODE_TYPES_SAT (sa
);
2234 GIMPLE_FIXED_MODE_TYPES_SAT (da
);
2235 GIMPLE_FIXED_MODE_TYPES_SAT (ta
);
2237 /* For ENUMERAL_TYPEs in C++, must check the mode of the types, not
2238 the precision; they have precision set to match their range, but
2239 may use a wider mode to match an ABI. If we change modes, we may
2240 wind up with bad conversions. For INTEGER_TYPEs in C, must check
2241 the precision as well, so as to yield correct results for
2242 bit-field types. C++ does not have these separate bit-field
2243 types, and producing a signed or unsigned variant of an
2244 ENUMERAL_TYPE may cause other problems as well. */
2245 if (!INTEGRAL_TYPE_P (type
)
2246 || TYPE_UNSIGNED (type
) == unsignedp
)
2249 #define TYPE_OK(node) \
2250 (TYPE_MODE (type) == TYPE_MODE (node) \
2251 && TYPE_PRECISION (type) == TYPE_PRECISION (node))
2252 if (TYPE_OK (signed_char_type_node
))
2253 return unsignedp
? unsigned_char_type_node
: signed_char_type_node
;
2254 if (TYPE_OK (integer_type_node
))
2255 return unsignedp
? unsigned_type_node
: integer_type_node
;
2256 if (TYPE_OK (short_integer_type_node
))
2257 return unsignedp
? short_unsigned_type_node
: short_integer_type_node
;
2258 if (TYPE_OK (long_integer_type_node
))
2259 return unsignedp
? long_unsigned_type_node
: long_integer_type_node
;
2260 if (TYPE_OK (long_long_integer_type_node
))
2262 ? long_long_unsigned_type_node
2263 : long_long_integer_type_node
);
2264 if (int128_integer_type_node
&& TYPE_OK (int128_integer_type_node
))
2266 ? int128_unsigned_type_node
2267 : int128_integer_type_node
);
2269 #if HOST_BITS_PER_WIDE_INT >= 64
2270 if (TYPE_OK (intTI_type_node
))
2271 return unsignedp
? unsigned_intTI_type_node
: intTI_type_node
;
2273 if (TYPE_OK (intDI_type_node
))
2274 return unsignedp
? unsigned_intDI_type_node
: intDI_type_node
;
2275 if (TYPE_OK (intSI_type_node
))
2276 return unsignedp
? unsigned_intSI_type_node
: intSI_type_node
;
2277 if (TYPE_OK (intHI_type_node
))
2278 return unsignedp
? unsigned_intHI_type_node
: intHI_type_node
;
2279 if (TYPE_OK (intQI_type_node
))
2280 return unsignedp
? unsigned_intQI_type_node
: intQI_type_node
;
2282 #undef GIMPLE_FIXED_TYPES
2283 #undef GIMPLE_FIXED_MODE_TYPES
2284 #undef GIMPLE_FIXED_TYPES_SAT
2285 #undef GIMPLE_FIXED_MODE_TYPES_SAT
2288 return build_nonstandard_integer_type (TYPE_PRECISION (type
), unsignedp
);
2292 /* Return an unsigned type the same as TYPE in other respects. */
2295 gimple_unsigned_type (tree type
)
2297 return gimple_signed_or_unsigned_type (true, type
);
2301 /* Return a signed type the same as TYPE in other respects. */
2304 gimple_signed_type (tree type
)
2306 return gimple_signed_or_unsigned_type (false, type
);
2310 /* Return the typed-based alias set for T, which may be an expression
2311 or a type. Return -1 if we don't do anything special. */
2314 gimple_get_alias_set (tree t
)
2318 /* Permit type-punning when accessing a union, provided the access
2319 is directly through the union. For example, this code does not
2320 permit taking the address of a union member and then storing
2321 through it. Even the type-punning allowed here is a GCC
2322 extension, albeit a common and useful one; the C standard says
2323 that such accesses have implementation-defined behavior. */
2325 TREE_CODE (u
) == COMPONENT_REF
|| TREE_CODE (u
) == ARRAY_REF
;
2326 u
= TREE_OPERAND (u
, 0))
2327 if (TREE_CODE (u
) == COMPONENT_REF
2328 && TREE_CODE (TREE_TYPE (TREE_OPERAND (u
, 0))) == UNION_TYPE
)
2331 /* That's all the expressions we handle specially. */
2335 /* For convenience, follow the C standard when dealing with
2336 character types. Any object may be accessed via an lvalue that
2337 has character type. */
2338 if (t
== char_type_node
2339 || t
== signed_char_type_node
2340 || t
== unsigned_char_type_node
)
2343 /* Allow aliasing between signed and unsigned variants of the same
2344 type. We treat the signed variant as canonical. */
2345 if (TREE_CODE (t
) == INTEGER_TYPE
&& TYPE_UNSIGNED (t
))
2347 tree t1
= gimple_signed_type (t
);
2349 /* t1 == t can happen for boolean nodes which are always unsigned. */
2351 return get_alias_set (t1
);
2358 /* Helper for gimple_ior_addresses_taken_1. */
2361 gimple_ior_addresses_taken_1 (gimple stmt ATTRIBUTE_UNUSED
,
2362 tree addr
, void *data
)
2364 bitmap addresses_taken
= (bitmap
)data
;
2365 addr
= get_base_address (addr
);
2369 bitmap_set_bit (addresses_taken
, DECL_UID (addr
));
2375 /* Set the bit for the uid of all decls that have their address taken
2376 in STMT in the ADDRESSES_TAKEN bitmap. Returns true if there
2377 were any in this stmt. */
2380 gimple_ior_addresses_taken (bitmap addresses_taken
, gimple stmt
)
2382 return walk_stmt_load_store_addr_ops (stmt
, addresses_taken
, NULL
, NULL
,
2383 gimple_ior_addresses_taken_1
);
2387 /* Return TRUE iff stmt is a call to a built-in function. */
2390 is_gimple_builtin_call (gimple stmt
)
2394 if (is_gimple_call (stmt
)
2395 && (callee
= gimple_call_fndecl (stmt
))
2396 && is_builtin_fn (callee
)
2397 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2403 /* Return true when STMTs arguments match those of FNDECL. */
2406 validate_call (gimple stmt
, tree fndecl
)
2408 tree targs
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
2409 unsigned nargs
= gimple_call_num_args (stmt
);
2410 for (unsigned i
= 0; i
< nargs
; ++i
)
2412 /* Variadic args follow. */
2415 tree arg
= gimple_call_arg (stmt
, i
);
2416 if (INTEGRAL_TYPE_P (TREE_TYPE (arg
))
2417 && INTEGRAL_TYPE_P (TREE_VALUE (targs
)))
2419 else if (POINTER_TYPE_P (TREE_TYPE (arg
))
2420 && POINTER_TYPE_P (TREE_VALUE (targs
)))
2422 else if (TREE_CODE (TREE_TYPE (arg
))
2423 != TREE_CODE (TREE_VALUE (targs
)))
2425 targs
= TREE_CHAIN (targs
);
2427 if (targs
&& !VOID_TYPE_P (TREE_VALUE (targs
)))
2432 /* Return true when STMT is builtins call to CLASS. */
2435 gimple_call_builtin_p (gimple stmt
, enum built_in_class klass
)
2438 if (is_gimple_call (stmt
)
2439 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
2440 && DECL_BUILT_IN_CLASS (fndecl
) == klass
)
2441 return validate_call (stmt
, fndecl
);
2445 /* Return true when STMT is builtins call to CODE of CLASS. */
2448 gimple_call_builtin_p (gimple stmt
, enum built_in_function code
)
2451 if (is_gimple_call (stmt
)
2452 && (fndecl
= gimple_call_fndecl (stmt
)) != NULL_TREE
2453 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
2454 && DECL_FUNCTION_CODE (fndecl
) == code
)
2455 return validate_call (stmt
, fndecl
);
2459 /* Return true if STMT clobbers memory. STMT is required to be a
2463 gimple_asm_clobbers_memory_p (const_gimple stmt
)
2467 for (i
= 0; i
< gimple_asm_nclobbers (stmt
); i
++)
2469 tree op
= gimple_asm_clobber_op (stmt
, i
);
2470 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op
)), "memory") == 0)
2477 /* Dump bitmap SET (assumed to contain VAR_DECLs) to FILE. */
2480 dump_decl_set (FILE *file
, bitmap set
)
2487 fprintf (file
, "{ ");
2489 EXECUTE_IF_SET_IN_BITMAP (set
, 0, i
, bi
)
2491 fprintf (file
, "D.%u", i
);
2492 fprintf (file
, " ");
2495 fprintf (file
, "}");
2498 fprintf (file
, "NIL");
2501 /* Return true when CALL is a call stmt that definitely doesn't
2502 free any memory or makes it unavailable otherwise. */
2504 nonfreeing_call_p (gimple call
)
2506 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
)
2507 && gimple_call_flags (call
) & ECF_LEAF
)
2508 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call
)))
2510 /* Just in case these become ECF_LEAF in the future. */
2512 case BUILT_IN_TM_FREE
:
2513 case BUILT_IN_REALLOC
:
2514 case BUILT_IN_STACK_RESTORE
:
2523 /* Callback for walk_stmt_load_store_ops.
2525 Return TRUE if OP will dereference the tree stored in DATA, FALSE
2528 This routine only makes a superficial check for a dereference. Thus
2529 it must only be used if it is safe to return a false negative. */
2531 check_loadstore (gimple stmt ATTRIBUTE_UNUSED
, tree op
, void *data
)
2533 if ((TREE_CODE (op
) == MEM_REF
|| TREE_CODE (op
) == TARGET_MEM_REF
)
2534 && operand_equal_p (TREE_OPERAND (op
, 0), (tree
)data
, 0))
2539 /* If OP can be inferred to be non-zero after STMT executes, return true. */
2542 infer_nonnull_range (gimple stmt
, tree op
)
2544 /* We can only assume that a pointer dereference will yield
2545 non-NULL if -fdelete-null-pointer-checks is enabled. */
2546 if (!flag_delete_null_pointer_checks
2547 || !POINTER_TYPE_P (TREE_TYPE (op
))
2548 || gimple_code (stmt
) == GIMPLE_ASM
)
2551 if (walk_stmt_load_store_ops (stmt
, (void *)op
,
2552 check_loadstore
, check_loadstore
))
2555 if (is_gimple_call (stmt
) && !gimple_call_internal_p (stmt
))
2557 tree fntype
= gimple_call_fntype (stmt
);
2558 tree attrs
= TYPE_ATTRIBUTES (fntype
);
2559 for (; attrs
; attrs
= TREE_CHAIN (attrs
))
2561 attrs
= lookup_attribute ("nonnull", attrs
);
2563 /* If "nonnull" wasn't specified, we know nothing about
2565 if (attrs
== NULL_TREE
)
2568 /* If "nonnull" applies to all the arguments, then ARG
2569 is non-null if it's in the argument list. */
2570 if (TREE_VALUE (attrs
) == NULL_TREE
)
2572 for (unsigned int i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2574 if (operand_equal_p (op
, gimple_call_arg (stmt
, i
), 0)
2575 && POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (stmt
, i
))))
2581 /* Now see if op appears in the nonnull list. */
2582 for (tree t
= TREE_VALUE (attrs
); t
; t
= TREE_CHAIN (t
))
2584 int idx
= TREE_INT_CST_LOW (TREE_VALUE (t
)) - 1;
2585 tree arg
= gimple_call_arg (stmt
, idx
);
2586 if (operand_equal_p (op
, arg
, 0))
2592 /* If this function is marked as returning non-null, then we can
2593 infer OP is non-null if it is used in the return statement. */
2594 if (gimple_code (stmt
) == GIMPLE_RETURN
2595 && gimple_return_retval (stmt
)
2596 && operand_equal_p (gimple_return_retval (stmt
), op
, 0)
2597 && lookup_attribute ("returns_nonnull",
2598 TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
))))
2604 /* Compare two case labels. Because the front end should already have
2605 made sure that case ranges do not overlap, it is enough to only compare
2606 the CASE_LOW values of each case label. */
2609 compare_case_labels (const void *p1
, const void *p2
)
2611 const_tree
const case1
= *(const_tree
const*)p1
;
2612 const_tree
const case2
= *(const_tree
const*)p2
;
2614 /* The 'default' case label always goes first. */
2615 if (!CASE_LOW (case1
))
2617 else if (!CASE_LOW (case2
))
2620 return tree_int_cst_compare (CASE_LOW (case1
), CASE_LOW (case2
));
2623 /* Sort the case labels in LABEL_VEC in place in ascending order. */
2626 sort_case_labels (vec
<tree
> label_vec
)
2628 label_vec
.qsort (compare_case_labels
);
2631 /* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
2633 LABELS is a vector that contains all case labels to look at.
2635 INDEX_TYPE is the type of the switch index expression. Case labels
2636 in LABELS are discarded if their values are not in the value range
2637 covered by INDEX_TYPE. The remaining case label values are folded
2640 If a default case exists in LABELS, it is removed from LABELS and
2641 returned in DEFAULT_CASEP. If no default case exists, but the
2642 case labels already cover the whole range of INDEX_TYPE, a default
2643 case is returned pointing to one of the existing case labels.
2644 Otherwise DEFAULT_CASEP is set to NULL_TREE.
2646 DEFAULT_CASEP may be NULL, in which case the above comment doesn't
2647 apply and no action is taken regardless of whether a default case is
2651 preprocess_case_label_vec_for_gimple (vec
<tree
> labels
,
2653 tree
*default_casep
)
2655 tree min_value
, max_value
;
2656 tree default_case
= NULL_TREE
;
2660 min_value
= TYPE_MIN_VALUE (index_type
);
2661 max_value
= TYPE_MAX_VALUE (index_type
);
2662 while (i
< labels
.length ())
2664 tree elt
= labels
[i
];
2665 tree low
= CASE_LOW (elt
);
2666 tree high
= CASE_HIGH (elt
);
2667 bool remove_element
= FALSE
;
2671 gcc_checking_assert (TREE_CODE (low
) == INTEGER_CST
);
2672 gcc_checking_assert (!high
|| TREE_CODE (high
) == INTEGER_CST
);
2674 /* This is a non-default case label, i.e. it has a value.
2676 See if the case label is reachable within the range of
2677 the index type. Remove out-of-range case values. Turn
2678 case ranges into a canonical form (high > low strictly)
2679 and convert the case label values to the index type.
2681 NB: The type of gimple_switch_index() may be the promoted
2682 type, but the case labels retain the original type. */
2686 /* This is a case range. Discard empty ranges.
2687 If the bounds or the range are equal, turn this
2688 into a simple (one-value) case. */
2689 int cmp
= tree_int_cst_compare (high
, low
);
2691 remove_element
= TRUE
;
2698 /* If the simple case value is unreachable, ignore it. */
2699 if ((TREE_CODE (min_value
) == INTEGER_CST
2700 && tree_int_cst_compare (low
, min_value
) < 0)
2701 || (TREE_CODE (max_value
) == INTEGER_CST
2702 && tree_int_cst_compare (low
, max_value
) > 0))
2703 remove_element
= TRUE
;
2705 low
= fold_convert (index_type
, low
);
2709 /* If the entire case range is unreachable, ignore it. */
2710 if ((TREE_CODE (min_value
) == INTEGER_CST
2711 && tree_int_cst_compare (high
, min_value
) < 0)
2712 || (TREE_CODE (max_value
) == INTEGER_CST
2713 && tree_int_cst_compare (low
, max_value
) > 0))
2714 remove_element
= TRUE
;
2717 /* If the lower bound is less than the index type's
2718 minimum value, truncate the range bounds. */
2719 if (TREE_CODE (min_value
) == INTEGER_CST
2720 && tree_int_cst_compare (low
, min_value
) < 0)
2722 low
= fold_convert (index_type
, low
);
2724 /* If the upper bound is greater than the index type's
2725 maximum value, truncate the range bounds. */
2726 if (TREE_CODE (max_value
) == INTEGER_CST
2727 && tree_int_cst_compare (high
, max_value
) > 0)
2729 high
= fold_convert (index_type
, high
);
2731 /* We may have folded a case range to a one-value case. */
2732 if (tree_int_cst_equal (low
, high
))
2737 CASE_LOW (elt
) = low
;
2738 CASE_HIGH (elt
) = high
;
2742 gcc_assert (!default_case
);
2744 /* The default case must be passed separately to the
2745 gimple_build_switch routine. But if DEFAULT_CASEP
2746 is NULL, we do not remove the default case (it would
2747 be completely lost). */
2749 remove_element
= TRUE
;
2753 labels
.ordered_remove (i
);
2759 if (!labels
.is_empty ())
2760 sort_case_labels (labels
);
2762 if (default_casep
&& !default_case
)
2764 /* If the switch has no default label, add one, so that we jump
2765 around the switch body. If the labels already cover the whole
2766 range of the switch index_type, add the default label pointing
2767 to one of the existing labels. */
2769 && TYPE_MIN_VALUE (index_type
)
2770 && TYPE_MAX_VALUE (index_type
)
2771 && tree_int_cst_equal (CASE_LOW (labels
[0]),
2772 TYPE_MIN_VALUE (index_type
)))
2774 tree low
, high
= CASE_HIGH (labels
[len
- 1]);
2776 high
= CASE_LOW (labels
[len
- 1]);
2777 if (tree_int_cst_equal (high
, TYPE_MAX_VALUE (index_type
)))
2779 for (i
= 1; i
< len
; i
++)
2781 high
= CASE_LOW (labels
[i
]);
2782 low
= CASE_HIGH (labels
[i
- 1]);
2784 low
= CASE_LOW (labels
[i
- 1]);
2785 if ((TREE_INT_CST_LOW (low
) + 1
2786 != TREE_INT_CST_LOW (high
))
2787 || (TREE_INT_CST_HIGH (low
)
2788 + (TREE_INT_CST_LOW (high
) == 0)
2789 != TREE_INT_CST_HIGH (high
)))
2794 tree label
= CASE_LABEL (labels
[0]);
2795 default_case
= build_case_label (NULL_TREE
, NULL_TREE
,
2803 *default_casep
= default_case
;
2806 /* Set the location of all statements in SEQ to LOC. */
2809 gimple_seq_set_location (gimple_seq seq
, location_t loc
)
2811 for (gimple_stmt_iterator i
= gsi_start (seq
); !gsi_end_p (i
); gsi_next (&i
))
2812 gimple_set_location (gsi_stmt (i
), loc
);