+2014-11-19 David Malcolm <dmalcolm@redhat.com>
+
+ Merger of git branch "gimple-classes-v2-option-3".
+
+ * ChangeLog.gimple-classes: New.
+
+ * coretypes.h (struct gcond): Add forward decl.
+ (struct gdebug): Likewise.
+ (struct ggoto): Likewise.
+ (struct glabel): Likewise.
+ (struct gswitch): Likewise.
+ (struct gassign): Likewise.
+ (struct gasm): Likewise.
+ (struct gcall): Likewise.
+ (struct gtransaction): Likewise.
+ (struct greturn): Likewise.
+ (struct gbind): Likewise.
+ (struct gcatch): Likewise.
+ (struct geh_filter): Likewise.
+ (struct geh_mnt): Likewise.
+ (struct geh_else): Likewise.
+ (struct gresx): Likewise.
+ (struct geh_dispatch): Likewise.
+ (struct gphi): Likewise.
+ (struct gtry): Likewise.
+ (struct gomp_atomic_load): Likewise.
+ (struct gomp_atomic_store): Likewise.
+ (struct gomp_continue): Likewise.
+ (struct gomp_critical): Likewise.
+ (struct gomp_for): Likewise.
+ (struct gomp_parallel): Likewise.
+ (struct gomp_task): Likewise.
+ (struct gomp_sections): Likewise.
+ (struct gomp_single): Likewise.
+ (struct gomp_target): Likewise.
+ (struct gomp_teams): Likewise.
+
+ * doc/gimple.texi (Class hierarchy of GIMPLE statements): Update
+ for renaming of gimple subclasses.
+
+ * gdbhooks.py: Update.
+
+ * gimple-iterator.c (gsi_for_phi): New.
+ (gsi_start_phis): Strengthen return type from gimple_stmt_iterator
+ to gphi_iterator.
+ * gimple-iterator.h (struct gphi_iterator): New subclass of
+ gimple_stmt_iterator.
+ (gsi_for_phi): New prototype.
+ (gsi_start_phis): Strengthen return type from gimple_stmt_iterator
+ to gphi_iterator.
+ (gsi_next_nonvirtual_phi): Strengthen param from
+ gimple_stmt_iterator * to gphi_iterator *, and local "phi" from
+ gimple to gphi *.
+
+ * gsstruct.def: Update for renamings of classes.
+
+ * gimple.c (gimple_build_return): Strengthen return type from
+ gimple to greturn *.
+ (gimple_call_reset_alias_info): Strengthen param to gcall *.
+ (gimple_build_call_1): Strengthen return type from gimple to
+ gcall *.
+ (gimple_build_call_vec): Likewise.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal_1): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_build_assign_stat): Strengthen return type from gimple to
+ gassign *.
+ (gimple_build_assign_with_ops): Likewise.
+ (gimple_build_assign_with_ops): Likewise.
+ (gimple_build_cond): Strengthen return type from gimple to
+ gcond *.
+ (gimple_build_cond_from_tree): Likewise.
+ (gimple_cond_set_condition_from_tree): Require a gcond *.
+ (gimple_build_label): Strengthen return type from gimple to
+ glabel *.
+ (gimple_build_goto): Strengthen return type from gimple to
+ ggoto *.
+ (gimple_build_bind): Strengthen return type from gimple to
+ gbind *.
+ (gimple_build_asm_1): Strengthen return type from gimple to
+ gasm *.
+ (gimple_build_asm_vec): Likewise.
+ (gimple_build_catch): Strengthen return type from gimple to
+ gcatch *.
+ (gimple_build_eh_filter): Strengthen return type from gimple to
+ geh_filter *.
+ (gimple_build_eh_must_not_throw): Strengthen return type from
+ gimple to geh_mnt *.
+ (gimple_build_eh_else): Strengthen return type from gimple to
+ geh_else *.
+ (gimple_build_try): Update for renaming of gimple_statement_try to
+ gtry.
+ (gimple_build_resx): Strengthen return type from gimple to
+ gresx *.
+ (gimple_build_switch_nlabels): Strengthen return type from gimple
+ to gswitch *.
+ (gimple_build_switch): Likewise.
+ (gimple_build_eh_dispatch): Strengthen return type from gimple to
+ geh_dispatch *.
+ (gimple_build_debug_bind_stat): Strengthen return type from gimple
+ to gdebug *.
+ (gimple_build_debug_source_bind_stat): Strengthen return type from
+ gimple to gdebug *.
+ (gimple_build_omp_critical): Strengthen return type from gimple to
+ gomp_critical *.
+ (gimple_build_omp_for): Strengthen return type from gimple to
+ gomp_for *.
+ (gimple_build_omp_parallel): Strengthen return type from gimple to
+ gomp_parallel *.
+ (gimple_build_omp_task): Strengthen return type from gimple to
+ gomp_task *.
+ (gimple_build_omp_continue): Strengthen return type from gimple to
+ gomp_continue *.
+ (gimple_build_omp_sections): Strengthen return type from gimple to
+ gomp_sections *.
+ (gimple_build_omp_single): Strengthen return type from gimple to
+ gomp_single *.
+ (gimple_build_omp_target): Strengthen return type from gimple to
+ gomp_target *.
+ (gimple_build_omp_teams): Strengthen return type from gimple to
+ gomp_teams *.
+ (gimple_build_omp_atomic_load): Strengthen return type from gimple
+ to gomp_atomic_load *.
+ (gimple_build_omp_atomic_store): Strengthen return type from gimple
+ to gomp_atomic_store *.
+ (gimple_build_transaction): Strengthen return type from gimple
+ to gtransaction *.
+ (empty_stmt_p): Replace check for GIMPLE_BIND with a dyn_cast.
+ (gimple_call_fnspec): Require a const gcall *.
+ (gimple_call_arg_flags): Likewise.
+ (gimple_call_return_flags): Likewise.
+ (gimple_set_bb): Add a checked cast.
+ (gimple_copy): Within the cases, add locals of the appropriate
+ subclass and use in place of "stmt" and "copy" for typesafety.
+ (gimple_has_side_effects): Add a checked cast.
+ (gimple_could_trap_p_1): Likewise.
+ (gimple_call_copy_skip_args): Require a gcall *, and return one.
+ (gimple_asm_clobbers_memory_p): Require a const gasm *.
+ (infer_nonnull_range): Replace a check for GIMPLE_RETURN with a
+ dyn_cast, introducing local "return_stmt" and using ti in place
+ of "stmt".
+
+ * gimple.h (gimple_vec): Eliminate this typedef.
+ (struct gimple_statement_call): Rename to...
+ (struct gcall): ...this.
+ (struct gimple_statement_bind): Rename to...
+ (struct gbind): ...this.
+ (struct gimple_statement_catch): Rename to...
+ (struct gcatch): ...this.
+ (struct gimple_statement_eh_filter): Rename to...
+ (struct geh_filter): ...this.
+ (struct gimple_statement_eh_else): Rename to...
+ (struct geh_else): ...this.
+ (struct gimple_statement_eh_mnt): Rename to...
+ (struct geh_mnt): ...this.
+ (struct gimple_statement_phi): Rename to...
+ (struct gphi): ...this.
+ (struct gimple_statement_resx): Rename to...
+ (struct gresx): ...this.
+ (struct gimple_statement_eh_dispatch): Rename to...
+ (struct geh_dispatch): ...this.
+ (struct gimple_statement_try): Rename to...
+ (struct gtry): ...this.
+ (struct gimple_statement_asm): Rename to...
+ (struct gasm): ...this.
+ (struct gimple_statement_omp_critical): Rename to...
+ (struct gomp_critical): ...this.
+ (struct gimple_statement_omp_for): Rename to...
+ (struct gomp_for): ...this.
+ (struct gimple_statement_omp_parallel): Rename to...
+ (struct gomp_parallel): ...this.
+ (struct gimple_statement_omp_target): Rename to...
+ (struct gomp_target): ...this.
+ (struct gimple_statement_omp_task): Rename to...
+ (struct gomp_task): ...this.
+ (struct gimple_statement_omp_sections): Rename to...
+ (struct gomp_sections): ...this.
+ (struct gimple_statement_omp_continue): Rename to...
+ (struct gomp_continue): ...this.
+ (struct gimple_statement_omp_single): Rename to...
+ (struct gomp_single): ...this.
+ (struct gimple_statement_omp_teams): Rename to...
+ (struct gomp_teams): ...this.
+ (struct gimple_statement_omp_atomic_load): Rename to...
+ (struct gomp_atomic_load): ...this.
+ (struct gimple_statement_omp_atomic_store :): Rename to...
+ (struct gomp_atomic_store :): ...this.
+ (struct gimple_statement_transaction): Rename to...
+ (struct gtransaction): ...this.
+ (struct gcond): New subclass.
+ (struct gdebug): New subclass.
+ (struct ggoto): New subclass.
+ (struct glabel): New subclass.
+ (struct gswitch): New subclass.
+ (struct gassign): New subclass.
+ (struct greturn): New subclass.
+ (is_a_helper <gimple_statement_asm *>::test): Rename to...
+ (is_a_helper <gasm *>::test): ...this.
+ (is_a_helper <gimple_statement_bind *>::test): Rename to...
+ (is_a_helper <gbind *>::test): ...this.
+ (is_a_helper <gassign *>::test): New.
+ (is_a_helper <gimple_statement_call *>::test): Rename to...
+ (is_a_helper <gcall *>::test): ...this.
+ (is_a_helper <gimple_statement_catch *>::test): Rename to...
+ (is_a_helper <gcatch *>::test): ...this.
+ (is_a_helper <gimple_statement_resx *>::test): Rename to...
+ (is_a_helper <gresx *>::test): ...this.
+ (is_a_helper <gcond *>::test): New.
+ (is_a_helper <gdebug *>::test): New.
+ (is_a_helper <ggoto *>::test): New.
+ (is_a_helper <glabel *>::test): New.
+ (is_a_helper <gimple_statement_eh_dispatch *>::test): Rename to...
+ (is_a_helper <geh_dispatch *>::test): ...this.
+ (is_a_helper <gimple_statement_eh_else *>::test): Rename to...
+ (is_a_helper <geh_else *>::test): ...this.
+ (is_a_helper <gimple_statement_eh_filter *>::test): Rename to...
+ (is_a_helper <geh_filter *>::test): ...this.
+ (is_a_helper <gimple_statement_eh_mnt *>::test): Rename to...
+ (is_a_helper <geh_mnt *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_atomic_load *>::test): Rename to...
+ (is_a_helper <gomp_atomic_load *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_atomic_store *>::test): Rename to...
+ (is_a_helper <gomp_atomic_store *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_continue *>::test): Rename to...
+ (is_a_helper <gomp_continue *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_critical *>::test): Rename to...
+ (is_a_helper <gomp_critical *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_for *>::test): Rename to...
+ (is_a_helper <gomp_for *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_parallel *>::test): Rename to...
+ (is_a_helper <gomp_parallel *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_target *>::test): Rename to...
+ (is_a_helper <gomp_target *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_sections *>::test): Rename to...
+ (is_a_helper <gomp_sections *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_single *>::test): Rename to...
+ (is_a_helper <gomp_single *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_teams *>::test): Rename to...
+ (is_a_helper <gomp_teams *>::test): ...this.
+ (is_a_helper <gimple_statement_omp_task *>::test): Rename to...
+ (is_a_helper <gomp_task *>::test): ...this.
+ (is_a_helper <gimple_statement_phi *>::test): Rename to...
+ (is_a_helper <gphi *>::test): ...this.
+ (is_a_helper <gimple_statement_transaction *>::test): Rename to...
+ (is_a_helper <gtransaction *>::test): ...this.
+ (is_a_helper <greturn *>::test): New.
+ (is_a_helper <gswitch *>::test): New.
+ (is_a_helper <gimple_statement_try *>::test): Rename to...
+ (is_a_helper <gtry *>::test): ...this.
+ (is_a_helper <const gimple_statement_asm *>::test): Rename to...
+ (is_a_helper <const gasm *>::test): ...this.
+ (is_a_helper <const gimple_statement_bind *>::test): Rename to...
+ (is_a_helper <const gbind *>::test): ...this.
+ (is_a_helper <const gimple_statement_call *>::test): Rename to...
+ (is_a_helper <const gcall *>::test): ...this.
+ (is_a_helper <const gimple_statement_catch *>::test): Rename to...
+ (is_a_helper <const gcatch *>::test): ...this.
+ (is_a_helper <const gimple_statement_resx *>::test): Rename to...
+ (is_a_helper <const gresx *>::test): ...this.
+ (is_a_helper <const gimple_statement_eh_dispatch *>::test): Rename to...
+ (is_a_helper <const geh_dispatch *>::test): ...this.
+ (is_a_helper <const gimple_statement_eh_filter *>::test): Rename to...
+ (is_a_helper <const geh_filter *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_atomic_load *>::test):
+ Rename to...
+ (is_a_helper <const gomp_atomic_load *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_atomic_store *>::test):
+ Rename to...
+ (is_a_helper <const gomp_atomic_store *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_continue *>::test):
+ Rename to...
+ (is_a_helper <const gomp_continue *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_critical *>::test):
+ Rename to...
+ (is_a_helper <const gomp_critical *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_for *>::test): Rename to...
+ (is_a_helper <const gomp_for *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_parallel *>::test):
+ Rename to...
+ (is_a_helper <const gomp_parallel *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_target *>::test): Rename to...
+ (is_a_helper <const gomp_target *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_sections *>::test):
+ Rename to...
+ (is_a_helper <const gomp_sections *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_single *>::test): Rename to...
+ (is_a_helper <const gomp_single *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_teams *>::test): Rename to...
+ (is_a_helper <const gomp_teams *>::test): ...this.
+ (is_a_helper <const gimple_statement_omp_task *>::test): Rename to...
+ (is_a_helper <const gomp_task *>::test): ...this.
+ (is_a_helper <const gimple_statement_phi *>::test): Rename to...
+ (is_a_helper <const gphi *>::test): ...this.
+ (is_a_helper <const gimple_statement_transaction *>::test): Rename to...
+ (is_a_helper <const gtransaction *>::test): ...this.
+ (gimple_build_return): Strengthen return type to greturn *.
+ (gimple_call_reset_alias_info): Require a gcall *.
+ (gimple_build_call_vec): Return a gcall *.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_build_assign_stat): Return a gassign *.
+ (gimple_build_assign_with_ops): Likewise.
+ (gimple_build_cond): Return a gcond *.
+ (gimple_build_cond_from_tree): Likewise.
+ (gimple_cond_set_condition_from_tree): Require a gcond *.
+ (gimple_build_label): Return a glabel *.
+ (gimple_build_goto): Return a ggoto *.
+ (gimple_build_bind): Return a gbind *.
+ (gimple_build_asm_vec): Return a gasm *.
+ (gimple_build_catch): Return a gcatch *.
+ (gimple_build_eh_filter): Return a geh_filter *.
+ (gimple_build_eh_must_not_throw): Return a geh_mnt *.
+ (gimple_build_eh_else): Return a geh_else *.
+ (gimple_build_try): Return a gtry *.
+ (gimple_build_resx): Return a gresx *.
+ (gimple_build_switch_nlabels): Return a gswitch *.
+ (gimple_build_switch): Return a gswitch *.
+ (gimple_build_eh_dispatch): Return a geh_dispatch *.
+ (gimple_build_debug_bind_stat): Return a gdebug *.
+ (gimple_build_debug_source_bind_stat): Return a gdebug *.
+ (gimple_build_omp_critical): Return a gomp_critical *.
+ (gimple_build_omp_for): Return a gomp_for *.
+ (gimple_build_omp_parallel): Return a gomp_parallel *.
+ (gimple_build_omp_task): Return a gomp_task *.
+ (gimple_build_omp_continue): Return a gomp_continue *.
+ (gimple_build_omp_sections): Return a gomp_sections *.
+ (gimple_build_omp_single): Return a gomp_single *.
+ (gimple_build_omp_target): Return a gomp_target *.
+ (gimple_build_omp_teams): Return a gomp_teams *.
+ (gimple_build_omp_atomic_load): Return a gomp_atomic_load *.
+ (gimple_build_omp_atomic_store): Return a gomp_atomic_store *.
+ (gimple_build_transaction): Return a gtransaction *.
+ (gimple_call_arg_flags): Require a const gcall *.
+ (gimple_call_return_flags): Likewise.
+ (gimple_call_copy_skip_args): Require and return a gcall *.
+ (gimple_asm_clobbers_memory_p): Require a const gasm *.
+ (gimple_seq_first_stmt_as_a_bind): New.
+ (gimple_assign_nontemporal_move_p): Require a const gassign *
+ rather than a const_gimple.
+ (gimple_call_internal_fn): Update for renaming to gcall.
+ (gimple_call_fntype): Likewise.
+ (gimple_call_set_fntype): Require a gcall * rather than a gimple.
+ (gimple_call_set_fn): Likewise.
+ (gimple_call_set_internal_fn): Likewise.
+ (gimple_call_set_chain): Likewise.
+ (gimple_call_set_tail): Likewise.
+ (gimple_call_tail_p): Likewise.
+ (gimple_call_set_return_slot_opt): Likewise.
+ (gimple_call_return_slot_opt_p): Likewise.
+ (gimple_call_set_from_thunk): Likewise.
+ (gimple_call_from_thunk_p): Likewise.
+ (gimple_call_set_va_arg_pack): Likewise.
+ (gimple_call_va_arg_pack_p): Likewise.
+ (gimple_call_set_nothrow): Likewise.
+ (gimple_call_nothrow_p): Likewise.
+ (gimple_call_set_alloca_for_var): Likewise.
+ (gimple_call_alloca_for_var_p): Likewise.
+ (gimple_call_use_set): Likewise.
+ (gimple_call_clobber_set): Likewise.
+ (gimple_call_return_type): Require a const gcall * rather than a
+ const_gimple.
+ (gimple_call_chain_ptr): Likewise.
+ (gimple_call_copy_flags): Require a pair of gcall *.
+ (gimple_cond_set_code): Require a gcond * rather than a gimple
+ (gimple_cond_set_lhs): Likewise.
+ (gimple_cond_set_rhs): Likewise.
+ (gimple_cond_set_true_label): Likewise.
+ (gimple_cond_set_false_label): Likewise.
+ (gimple_cond_make_false): Likewise.
+ (gimple_cond_make_true): Likewise.
+ (gimple_cond_lhs_ptr): Require a const gcond * rather than a
+ const_gimple.
+ (gimple_cond_rhs_ptr): Likewise.
+ (gimple_cond_true_label): Likewise.
+ (gimple_cond_false_label): Likewise.
+ (gimple_cond_true_p): Likewise.
+ (gimple_cond_false_p): Likewise.
+ (gimple_cond_set_condition): Likewise.
+ (gimple_label_label): Require a const glabel *.
+ (gimple_label_set_label): Require a glabel *.
+ (gimple_goto_set_dest): Require a ggoto *.
+ (gimple_bind_vars): Require a const gbind *.
+ (gimple_bind_block): Likewise.
+ (gimple_bind_set_vars): Require a gbind *.
+ (gimple_bind_append_vars): Likewise.
+ (gimple_bind_body_ptr): Likewise.
+ (gimple_bind_body): Likewise.
+ (gimple_bind_set_body): Likewise.
+ (gimple_bind_add_stmt): Likewise.
+ (gimple_bind_add_seq): Likewise.
+ (gimple_bind_set_block): Likewise.
+ (gimple_asm_ninputs): Require a const gasm *.
+ (gimple_asm_noutputs): Likewise.
+ (gimple_asm_nclobbers): Likewise.
+ (gimple_asm_nlabels): Likewise.
+ (gimple_asm_input_op): Likewise.
+ (gimple_asm_input_op_ptr): Likewise.
+ (gimple_asm_output_op): Likewise.
+ (gimple_asm_output_op_ptr): Likewise.
+ (gimple_asm_clobber_op): Likewise.
+ (gimple_asm_label_op): Likewise.
+ (gimple_asm_string): Likewise.
+ (gimple_asm_volatile_p): Likewise.
+ (gimple_asm_input_p): Likewise.
+ (gimple_asm_set_input_op): Require a gasm *.
+ (gimple_asm_set_output_op): Likewise.
+ (gimple_asm_set_clobber_op): Likewise.
+ (gimple_asm_set_label_op): Likewise.
+ (gimple_asm_set_volatile): Likewise.
+ (gimple_asm_set_input): Likewise.
+ (gimple_catch_types): Require a const gcatch *.
+ (gimple_catch_types_ptr): Require a gcatch *.
+ (gimple_catch_handler_ptr): Likewise.
+ (gimple_catch_handler): Likewise.
+ (gimple_catch_set_types): Likewise.
+ (gimple_catch_set_handler): Likewise.
+ (gimple_eh_filter_types): Update for renaming of subclass to
+ geh_filter.
+ (gimple_eh_filter_types_ptr): Likewise.
+ (gimple_eh_filter_failure_ptr): Likewise.
+ (gimple_eh_filter_set_types): Require a geh_filter *.
+ (gimple_eh_filter_set_failure): Likewise.
+ (gimple_eh_must_not_throw_fndecl): Require a geh_mnt *.
+ (gimple_eh_must_not_throw_set_fndecl): Likewise.
+ (gimple_eh_else_n_body_ptr): Require a geh_else *.
+ (gimple_eh_else_n_body): Likewise.
+ (gimple_eh_else_e_body_ptr): Likewise.
+ (gimple_eh_else_e_body): Likewise.
+ (gimple_eh_else_set_n_body): Likewise.
+ (gimple_eh_else_set_e_body): Likewise.
+ (gimple_try_set_kind): Require a gtry *.
+ (gimple_try_set_catch_is_cleanup): Likewise.
+ (gimple_try_set_eval): Likewise.
+ (gimple_try_set_cleanup): Likewise.
+ (gimple_try_eval_ptr): Update for renaming of subclass to gtry.
+ (gimple_try_cleanup_ptr): Likewise.
+ (gimple_phi_capacity): Update for renaming of subclass to gphi.
+ (gimple_phi_num_args): Likewise.
+ (gimple_phi_result): Likewise.
+ (gimple_phi_result_ptr): Likewise.
+ (gimple_phi_arg): Likewise.
+ (gimple_phi_set_result): Require a gphi *.
+ (gimple_phi_set_arg): Likewise.
+ (gimple_phi_arg_def_ptr): Likewise.
+ (gimple_phi_arg_edge): Likewise.
+ (gimple_phi_arg_location): Likewise.
+ (gimple_phi_arg_location_from_edge): Likewise.
+ (gimple_phi_arg_set_location): Likewise.
+ (gimple_phi_arg_has_location): Likewise.
+ (gimple_resx_region): Require a const gresx *.
+ (gimple_resx_set_region): Require a gresx *.
+ (gimple_eh_dispatch_region): Require a const geh_dispatch *.
+ (gimple_eh_dispatch_set_region): Require a geh_dispatch *.
+ (gimple_switch_num_labels): Require a const gswitch *.
+ (gimple_switch_set_num_labels): Likewise.
+ (gimple_switch_index): Likewise.
+ (gimple_switch_index_ptr): Likewise.
+ (gimple_switch_label): Likewise.
+ (gimple_switch_default_label): Likewise.
+ (gimple_switch_set_index): Require a gswitch *.
+ (gimple_switch_set_label): Likewise.
+ (gimple_switch_set_default_label): Likewise.
+ (gimple_omp_critical_name): Require a const gomp_critical *.
+ (gimple_omp_critical_name_ptr): Require a gomp_critical *.
+ (gimple_omp_critical_set_name): Likewise.
+ (gimple_omp_for_set_kind): Require a gomp_for *.
+ (gimple_omp_for_set_combined_p): Likewise.
+ (gimple_omp_for_set_combined_into_p): Likewise.
+ (gimple_omp_for_clauses): Update for renaming of subclass to
+ gomp_for.
+ (gimple_omp_for_clauses_ptr): Likewise.
+ (gimple_omp_for_set_clauses): Likewise.
+ (gimple_omp_for_collapse): Likewise.
+ (gimple_omp_for_index): Likewise.
+ (gimple_omp_for_index_ptr): Likewise.
+ (gimple_omp_for_set_index): Likewise.
+ (gimple_omp_for_initial): Likewise.
+ (gimple_omp_for_initial_ptr): Likewise.
+ (gimple_omp_for_set_initial): Likewise.
+ (gimple_omp_for_final): Likewise.
+ (gimple_omp_for_final_ptr): Likewise.
+ (gimple_omp_for_set_final): Likewise.
+ (gimple_omp_for_incr): Likewise.
+ (gimple_omp_for_incr_ptr): Likewise.
+ (gimple_omp_for_set_incr): Likewise.
+ (gimple_omp_for_pre_body): Likewise.
+ (gimple_omp_for_set_pre_body): Likewise.
+ (gimple_omp_parallel_clauses): Update for renaming of subclass to
+ gomp_parallel.
+ (gimple_omp_parallel_clauses_ptr): Require a gomp_parallel *.
+ (gimple_omp_parallel_set_clauses): Likewise.
+ (gimple_omp_parallel_child_fn_ptr): Likewise.
+ (gimple_omp_parallel_set_child_fn): Likewise.
+ (gimple_omp_parallel_data_arg_ptr): Likewise.
+ (gimple_omp_parallel_set_data_arg): Likewise.
+ (gimple_omp_parallel_child_fn): Require a const gomp_parallel *.
+ (gimple_omp_parallel_data_arg): Likewise.
+ (gimple_omp_task_clauses): Update for renaming of subclass to
+ gomp_task.
+ (gimple_omp_task_clauses_ptr): Likewise.
+ (gimple_omp_task_set_clauses): Likewise.
+ (gimple_omp_task_child_fn): Likewise.
+ (gimple_omp_task_child_fn_ptr): Likewise.
+ (gimple_omp_task_set_child_fn): Likewise.
+ (gimple_omp_task_data_arg): Likewise.
+ (gimple_omp_task_data_arg_ptr): Likewise.
+ (gimple_omp_task_set_data_arg): Likewise.
+ (gimple_omp_taskreg_clauses): Whitespace fixes.
+ (gimple_omp_taskreg_clauses_ptr): Likewise.
+ (gimple_omp_taskreg_set_clauses): Likewise.
+ (gimple_omp_taskreg_child_fn): Likewise.
+ (gimple_omp_taskreg_child_fn_ptr): Likewise.
+ (gimple_omp_taskreg_set_child_fn): Likewise.
+ (gimple_omp_taskreg_data_arg): Likewise.
+ (gimple_omp_taskreg_data_arg_ptr): Likewise.
+ (gimple_omp_taskreg_set_data_arg): Likewise.
+ (gimple_omp_task_copy_fn): Update for renaming of subclass to
+ gomp_task.
+ (gimple_omp_task_copy_fn_ptr): Likewise.
+ (gimple_omp_task_set_copy_fn): Likewise.
+ (gimple_omp_task_arg_size): Likewise.
+ (gimple_omp_task_arg_size_ptr): Likewise.
+ (gimple_omp_task_set_arg_size): Likewise.
+ (gimple_omp_task_arg_align): Likewise.
+ (gimple_omp_task_arg_align_ptr): Likewise.
+ (gimple_omp_task_set_arg_align): Likewise.
+ (gimple_omp_single_clauses): Update for renaming of subclass to
+ gomp_single.
+ (gimple_omp_single_clauses_ptr): Likewise.
+ (gimple_omp_single_set_clauses): Likewise.
+ (gimple_omp_target_clauses): Update for renaming of subclass to
+ gomp_target.
+ (gimple_omp_target_clauses_ptr): Likewise.
+ (gimple_omp_target_set_clauses): Require a gomp_target *.
+ (gimple_omp_target_set_kind): Likewise.
+ (gimple_omp_target_child_fn_ptr): Likewise.
+ (gimple_omp_target_set_child_fn): Likewise.
+ (gimple_omp_target_data_arg_ptr): Likewise.
+ (gimple_omp_target_set_data_arg): Likewise.
+ (gimple_omp_target_child_fn): Require a const gomp_target *.
+ (gimple_omp_target_data_arg): Likewise.
+ (gimple_omp_teams_clauses): Update for renaming of subclass to
+ gomp_teams.
+ (gimple_omp_teams_clauses_ptr): Likewise.
+ (gimple_omp_teams_set_clauses): Require a gomp_teams *.
+ (gimple_omp_sections_clauses): Update for renaming of subclass to
+ gomp_sections.
+ (gimple_omp_sections_clauses_ptr): Likewise.
+ (gimple_omp_sections_set_clauses): Likewise.
+ (gimple_omp_sections_control): Likewise.
+ (gimple_omp_sections_control_ptr): Likewise.
+ (gimple_omp_sections_set_control): Likewise.
+ (gimple_omp_for_set_cond): Likewise.
+ (gimple_omp_for_cond): Likewise.
+ (gimple_omp_atomic_store_set_val): Require a gomp_atomic_store *.
+ (gimple_omp_atomic_store_val_ptr): Likewise.
+ (gimple_omp_atomic_load_set_lhs): Likewise.
+ (gimple_omp_atomic_store_val): Require a const gomp_atomic_store *.
+ (gimple_omp_atomic_load_lhs): Likewise.
+ (gimple_omp_atomic_load_rhs): Likewise.
+ (gimple_omp_atomic_load_lhs_ptr): Require a gomp_atomic_load *.
+ (gimple_omp_atomic_load_set_rhs): Likewise.
+ (gimple_omp_atomic_load_rhs_ptr): Likewise.
+ (gimple_omp_continue_control_def): Require a const gomp_continue *.
+ (gimple_omp_continue_control_use): Likewise.
+ (gimple_omp_continue_control_def_ptr): Require a gomp_continue *.
+ (gimple_omp_continue_set_control_def): Likewise.
+ (gimple_omp_continue_control_use_ptr): Likewise.
+ (gimple_omp_continue_set_control_use): Likewise.
+ (gimple_transaction_body_ptr): Require a gtransaction *.
+ (gimple_transaction_body): Likewise.
+ (gimple_transaction_label_ptr): Likewise.
+ (gimple_transaction_label): Require a const gtransaction *.
+ (gimple_transaction_subcode): Likewise.
+ (gimple_transaction_set_body): Require a gtransaction *.
+ (gimple_transaction_set_label): Likewise.
+ (gimple_transaction_set_subcode): Likewise.
+ (gimple_return_retval_ptr): Require a const greturn *.
+ (gimple_return_retval): Likewise.
+ (gimple_return_set_retval): Require a greturn *.
+ (gimple_expr_type): Introduce local "call_stmt" and use in place of
+ "stmt" for typesafety.
+
+ * asan.c: Use gimple subclasses.
+ * auto-profile.c: Likewise.
+ * builtins.c: Likewise.
+ * builtins.h: Likewise.
+ * cfgexpand.c: Likewise.
+ * cfgloop.c: Likewise.
+ * cfgloopmanip.c: Likewise.
+ * cgraph.c: Likewise.
+ * cgraph.h: Likewise.
+ * cgraphbuild.c: Likewise.
+ * cgraphclones.c: Likewise.
+ * cgraphunit.c: Likewise.
+ * expr.h: Likewise.
+ * gimple-builder.c: Likewise.
+ * gimple-builder.h: Likewise.
+ * gimple-fold.c: Likewise.
+ * gimple-low.c: Likewise.
+ * gimple-pretty-print.c: Likewise.
+ * gimple-ssa-isolate-paths.c: Likewise.
+ * gimple-ssa-strength-reduction.c: Likewise.
+ * gimple-streamer-in.c: Likewise.
+ * gimple-streamer-out.c: Likewise.
+ * gimple-walk.c: Likewise.
+ * gimplify-me.c: Likewise.
+ * gimplify.c: Likewise.
+ * gimplify.h: Likewise.
+ * graphite-scop-detection.c: Likewise.
+ * graphite-sese-to-poly.c: Likewise.
+ * internal-fn.c: Likewise.
+ * internal-fn.def:: Likewise.
+ * internal-fn.h: Likewise.
+ * ipa-icf-gimple.c: Likewise.
+ * ipa-icf-gimple.h: Likewise.
+ * ipa-icf.c: Likewise.
+ * ipa-inline-analysis.c: Likewise.
+ * ipa-prop.c: Likewise.
+ * ipa-prop.h: Likewise.
+ * ipa-pure-const.c: Likewise.
+ * ipa-split.c: Likewise.
+ * lto-streamer-in.c: Likewise.
+ * lto-streamer-out.c: Likewise.
+ * omp-low.c: Likewise.
+ * predict.c: Likewise.
+ * sanopt.c: Likewise.
+ * sese.c: Likewise.
+ * ssa-iterators.h: Likewise.
+ * stmt.c: Likewise.
+ * trans-mem.c: Likewise.
+ * tree-call-cdce.c: Likewise.
+ * tree-cfg.c: Likewise.
+ * tree-cfg.h: Likewise.
+ * tree-cfgcleanup.c: Likewise.
+ * tree-chkp.c: Likewise.
+ * tree-chkp.h: Likewise.
+ * tree-complex.c: Likewise.
+ * tree-data-ref.c: Likewise.
+ * tree-dfa.c: Likewise.
+ * tree-eh.c: Likewise.
+ * tree-eh.h: Likewise.
+ * tree-emutls.c: Likewise.
+ * tree-if-conv.c: Likewise.
+ * tree-inline.c: Likewise.
+ * tree-inline.h: Likewise.
+ * tree-into-ssa.c: Likewise.
+ * tree-into-ssa.h: Likewise.
+ * tree-loop-distribution.c: Likewise.
+ * tree-nrv.c: Likewise.
+ * tree-object-size.c: Likewise.
+ * tree-outof-ssa.c: Likewise.
+ * tree-parloops.c: Likewise.
+ * tree-phinodes.c: Likewise.
+ * tree-phinodes.h: Likewise.
+ * tree-predcom.c: Likewise.
+ * tree-profile.c: Likewise.
+ * tree-scalar-evolution.c: Likewise.
+ * tree-scalar-evolution.h
+ * tree-sra.cn_function):
+ * tree-ssa-alias.c: Likewise.
+ * tree-ssa-alias.h: Likewise.
+ * tree-ssa-ccp.c: Likewise.
+ * tree-ssa-coalesce.c: Likewise.
+ * tree-ssa-copy.c: Likewise.
+ * tree-ssa-copyrename.c: Likewise.
+ * tree-ssa-dce.c: Likewise.
+ * tree-ssa-dom.c: Likewise.
+ * tree-ssa-forwprop.c: Likewise.
+ * tree-ssa-ifcombine.c: Likewise.
+ * tree-ssa-live.c: Likewise.
+ * tree-ssa-loop-im.c: Likewise.
+ * tree-ssa-loop-ivcanon.c: Likewise.
+ * tree-ssa-loop-ivopts.c: Likewise.
+ * tree-ssa-loop-manip.c: Likewise.
+ * tree-ssa-loop-niter.c: Likewise.
+ * tree-ssa-loop-prefetch.c: Likewise.
+ * tree-ssa-loop-unswitch.c: Likewise.
+ * tree-ssa-math-opts.c: Likewise.
+ * tree-ssa-operands.c: Likewise.
+ * tree-ssa-phiopt.c: Likewise.
+ * tree-ssa-phiprop.c: Likewise.
+ * tree-ssa-pre.c: Likewise.
+ * tree-ssa-propagate.c: Likewise.
+ * tree-ssa-propagate.h: Likewise.
+ * tree-ssa-reassoc.c: Likewise.
+ * tree-ssa-sccvn.c: Likewise.
+ * tree-ssa-sccvn.h: Likewise.
+ * tree-ssa-sink.c: Likewise.
+ * tree-ssa-strlen.c
+ * tree-ssa-structalias.c
+ * tree-ssa-tail-merge.c: Likewise.
+ * tree-ssa-ter.c: Likewise.
+ * tree-ssa-threadedge.c: Likewise.
+ * tree-ssa-threadedge.h: Likewise.
+ * tree-ssa-threadupdate.c: Likewise.
+ * tree-ssa-uncprop.c: Likewise.
+ * tree-ssa-uninit.c: Likewise.
+ * tree-ssa.c: Likewise.
+ * tree-stdarg.c: Likewise.
+ * tree-switch-conversion.c: Likewise.
+ * tree-tailcall.c: Likewise.
+ * tree-vect-data-refs.c: Likewise.
+ * tree-vect-generic.c: Likewise.
+ * tree-vect-loop-manip.c: Likewise.
+ * tree-vect-loop.c: Likewise.
+ * tree-vect-patterns.c: Likewise.
+ * tree-vect-slp.c: Likewise.
+ * tree-vect-stmts.c: Likewise.
+ * tree-vectorizer.h: Likewise.
+ * tree-vrp.c: Likewise.
+ * tree.c: Likewise.
+ * ubsan.c: Likewise.
+ * value-prof.c: Likewise.
+ * value-prof.h: Likewise.
+ * vtable-verify.c: Likewise.
+
2014-11-19 Markus Trippelsdorf <markus@trippelsdorf.de>
* config/rs6000/constraints.md: Avoid signed integer overflows.
--- /dev/null
+2014-11-19 David Malcolm <dmalcolm@redhat.com>
+
+ * gimple.h (struct gimple_statement_structure): Remove stray
+ newline.
+
+2014-11-19 David Malcolm <dmalcolm@redhat.com>
+
+ * gimple.c (gimple_build_assign_with_ops): Strengthen return type
+ of new overload, from gimple to gassign *.
+
+2014-11-18 David Malcolm <dmalcolm@redhat.com>
+
+ * cgraphbuild.c (pass_build_cgraph_edges::execute): Fix linebreak
+ at assignment statement.
+ * gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour):
+ Likewise.
+ * gimple-ssa-strength-reduction.c (replace_one_candidate): Likewise.
+ * gimple-walk.c (walk_gimple_stmt): Likewise.
+ * gimple.c (gimple_build_return): Likewise.
+ (gimple_build_call_1): Likewise.
+ (gimple_build_call_internal_1): Likewise.
+ (gimple_build_label): Likewise.
+ (gimple_build_goto): Likewise.
+ (gimple_build_eh_filter): Likewise.
+ (gimple_build_eh_must_not_throw): Likewise.
+ (gimple_build_resx): Likewise.
+ (gimple_build_switch_nlabels): Likewise.
+ (gimple_build_eh_dispatch): Likewise.
+ (gimple_build_debug_bind_stat): Likewise.
+ (gimple_build_debug_source_bind_stat): Likewise.
+ (gimple_build_omp_critical): Likewise.
+ (gimple_build_omp_parallel): Likewise.
+ (gimple_build_omp_task): Likewise.
+ (gimple_build_omp_continue): Likewise.
+ (gimple_build_omp_sections): Likewise.
+ (gimple_build_omp_single): Likewise.
+ (gimple_build_omp_target): Likewise.
+ (gimple_build_omp_teams): Likewise.
+ (gimple_build_omp_atomic_load): Likewise.
+ (gimple_build_omp_atomic_store): Likewise.
+ (gimple_build_transaction): Likewise.
+ (gimple_copy): Likewise.
+ * gimple.h (gimple_call_fntype): Likewise.
+ (gimple_eh_filter_types): Likewise.
+ (gimple_eh_filter_types_ptr): Likewise.
+ (gimple_eh_filter_failure_ptr
+ (gimple_phi_capacity): Likewise.
+ (gimple_phi_num_args): Likewise.
+ (gimple_phi_result): Likewise.
+ (gimple_omp_for_clauses): Likewise.
+ (gimple_omp_for_clauses_ptr): Likewise.
+ (gimple_omp_for_set_clauses): Likewise.
+ (gimple_omp_for_collapse): Likewise.
+ (gimple_omp_for_index): Likewise.
+ (gimple_omp_for_index_ptr): Likewise.
+ (gimple_omp_for_set_index): Likewise.
+ (gimple_omp_for_initial): Likewise.
+ (gimple_omp_for_initial_ptr): Likewise.
+ (gimple_omp_for_set_initial): Likewise.
+ (gimple_omp_for_final): Likewise.
+ (gimple_omp_for_final_ptr): Likewise.
+ (gimple_omp_for_set_final): Likewise.
+ (gimple_omp_for_incr): Likewise.
+ (gimple_omp_for_incr_ptr): Likewise.
+ (gimple_omp_for_set_incr): Likewise.
+ (gimple_omp_for_pre_body_ptr): Likewise.
+ (gimple_omp_for_set_pre_body): Likewise.
+ (gimple_omp_parallel_clauses): Likewise.
+ (gimple_omp_task_clauses): Likewise.
+ (gimple_omp_task_clauses_ptr): Likewise.
+ (gimple_omp_task_set_clauses): Likewise.
+ (gimple_omp_task_child_fn): Likewise.
+ (gimple_omp_task_child_fn_ptr): Likewise.
+ (gimple_omp_task_set_child_fn): Likewise.
+ (gimple_omp_task_data_arg): Likewise.
+ (gimple_omp_task_data_arg_ptr): Likewise.
+ (gimple_omp_task_set_data_arg): Likewise.
+ (gimple_omp_taskreg_clauses): Likewise.
+ (gimple_omp_taskreg_clauses_ptr): Likewise.
+ (gimple_omp_taskreg_set_clauses): Likewise.
+ (gimple_omp_taskreg_child_fn): Likewise.
+ (gimple_omp_taskreg_child_fn_ptr): Likewise.
+ (gimple_omp_taskreg_set_child_fn): Likewise.
+ (gimple_omp_taskreg_data_arg): Likewise.
+ (gimple_omp_taskreg_data_arg_ptr): Likewise.
+ (gimple_omp_taskreg_set_data_arg): Likewise.
+ (gimple_omp_task_copy_fn): Likewise.
+ (gimple_omp_task_copy_fn_ptr): Likewise.
+ (gimple_omp_task_set_copy_fn): Likewise.
+ (gimple_omp_task_arg_size): Likewise.
+ (gimple_omp_task_arg_size_ptr): Likewise.
+ (gimple_omp_task_set_arg_size): Likewise.
+ (gimple_omp_task_arg_align): Likewise.
+ (gimple_omp_task_arg_align_ptr): Likewise.
+ (gimple_omp_task_set_arg_align): Likewise.
+ (gimple_omp_single_clauses): Likewise.
+ (gimple_omp_single_clauses_ptr): Likewise.
+ (gimple_omp_target_clauses): Likewise.
+ (gimple_omp_target_clauses_ptr): Likewise.
+ (gimple_omp_teams_clauses): Likewise.
+ (gimple_omp_teams_clauses_ptr): Likewise.
+ (gimple_omp_sections_clauses): Likewise.
+ (gimple_omp_sections_clauses_ptr): Likewise.
+ (gimple_omp_sections_set_clauses): Likewise.
+ (gimple_omp_sections_control): Likewise.
+ (gimple_omp_sections_control_ptr): Likewise.
+ (gimple_omp_sections_set_control): Likewise.
+ (gimple_omp_for_set_cond): Likewise.
+ (gimple_omp_for_cond): Likewise.
+ * graphite-sese-to-poly.c (follow_ssa_with_commutative_ops): Likewise.
+ (detect_commutative_reduction_assign): Likewise.
+ * ipa-split.c (verify_non_ssa_vars): Likewise.
+ (split_function): Likewise.
+ * omp-low.c (check_omp_nesting_restrictions): Likewise.
+ (remove_exit_barrier): Likewise.
+ (expand_omp_for_generic): Likewise.
+ (expand_omp_for_static_nochunk): Likewise.
+ (expand_omp_for_static_chunk): Likewise.
+ (expand_omp_atomic
+ (lower_omp_for): Likewise.
+ (lower_omp_taskreg): Likewise.
+ * predict.c (tree_estimate_probability_bb): Likewise.
+ * trans-mem.c (propagate_tm_flags_out): Likewise.
+ (execute_tm_mark): Likewise.
+ * tree-cfg.c (make_edges): Likewise.
+ * tree-inline.c (remap_gimple_stmt): Likewise.
+ (estimate_num_insns): Likewise.
+ * tree-sra.c (init_subtree_with_zero): Likewise.
+ (sra_modify_expr): Likewise.
+ * tree-ssa-forwprop.c (pass_forwprop::execute): Likewise.
+ * tree-ssa-loop-ivopts.c (remove_unused_ivs): Likewise.
+ * tree-ssa-math-opts.c (build_and_insert_cast): Likewise.
+ (pass_cse_sincos::execute): Likewise.
+ * tree-ssa-pre.c (do_regular_insertion): Likewise.
+ (eliminate_insert): Likewise.
+ * tree-ssa-reassoc.c (update_ops): Likewise.
+ * tree-ssa-tail-merge.c (same_succ_hash): Likewise.
+ * tree-tailcall.c (static): Likewise.
+ * tree-vrp.c (simplify_truth_ops_using_ranges): Likewise.
+
+2014-11-14 David Malcolm <dmalcolm@redhat.com>
+
+ * cgraph.c (cgraph_edge::redirect_call_stmt_to_callee):
+ Strengthen locals "dbndret" and "ibndret" from gimple to gcall *.
+ * gimple-iterator.c (gsi_for_phi): New function.
+ * gimple-iterator.h (gsi_for_phi): New prototype.
+ * internal-fn.c (expand_ADD_OVERFLOW): Strengthen param "stmt"
+ from gimple to gcall *.
+ (expand_SUB_OVERFLOW): Likewise.
+ (expand_MUL_OVERFLOW): Likewise.
+ * ipa-icf-gimple.c (func_checker::compare_bb): Add checked casts
+ within case GIMPLE_CALL.
+ (func_checker::compare_gimple_call): Strengthen params from gimple
+ to gcall *.
+ * ipa-icf-gimple.h (func_checker::compare_gimple_call): Likewise.
+ * sanopt.c (sanopt_optimize_walker): Replace check for GIMPLE_ASM
+ with a dyn_cast, introducing local "asm_stmt" and using it in
+ place of "stmt" for typesafety.
+ * tree-chkp.c (chkp_recompute_phi_bounds): Strengthen locals
+ "bounds_phi" and "ptr_phi" from gimple to gphi *.
+ (chkp_add_bounds_to_ret_stmt): Strengthen local "ret" from gimple
+ to greturn *.
+ (chkp_add_bounds_to_call_stmt): Strengthen locals "call" and
+ "new_call" from gimple to gcall *.
+ (chkp_build_returned_bound): Likewise for param "call".
+ (chkp_retbnd_call_by_val): Likewise for return type.
+ (chkp_get_bounds_by_definition): Strengthen param "iter" from
+ gimple_stmt_iterator * to gphi_iterator *. Add a checked cast
+ within case GIMPLE_CALL. Use gsi_for_phi rather than
+ gsi_for_stmt.
+ (chkp_find_bounds_1): Strengthen local "phi_iter" from
+ gimple_stmt_iterator to gphi_iterator. Replace check for
+ GIMPLE_PHI with a dyn_cast, introducing local "def_phi" and using
+ in place of "def_stmt" for typesafety.
+ (chkp_copy_bounds_for_assign): Add checked cast.
+ (chkp_instrument_function): Within case GIMPLE_RETURN, add local
+ greturn * "r" from a checked cast and use in place of "s" for
+ typesafety.
+ * tree-chkp.h (chkp_retbnd_call_by_val): Strengthen return type
+ from gimple to gcall *.
+ * tree-inline.c (copy_bb): Update for renaming of field
+ "gimple_call" to "call_stmt" on the gimple-classes branch.
+ (expand_call_inline): Strengthen local "retbnc" from gimple to
+ gcall *.
+ * tree-ssa-forwprop.c (pass_forwprop::execute): Replace check for
+ GIMPLE_COND with a dyn_cast, introducing local "cond" and using
+ in place of "stmt" for typesafety.
+ * value-prof.c (gimple_ic): Strengthen local "iretbnd_stmt" from
+ gimple to gcall *. Weaken top-level local "psi" from
+ gphi_iterator back to gimple_stmt_iterator, reintroducing the
+ name as a phi_iterator within the for loop that needs it.
+
+2014-10-28 David Malcolm <dmalcolm@redhat.com>
+
+ * auto-profile.c (autofdo::function_instance::find_icall_target_map):
+ Strengthen param "stmt" from gimple to gcall *.
+ (autofdo::autofdo_source_profile::update_inlined_ind_target):
+ Likewise.
+ (autofdo::afdo_indirect_call): Rename local gimple "stmt" to "gs",
+ reintroducing "stmt" as a gcall * via a dyn_cast once we've
+ established that we have a GIMPLE_CALL.
+ (autofdo::afdo_set_bb_count): Use a gphi_iterator for the phi
+ iteration, renaming it from "gsi" to "gpi", strengthening
+ local "phi" from gimple to gphi *.
+ (autofdo::afdo_propagate_circuit): Rename local gimple "phi_stmt"
+ to "def_stmt". Reintroduce "phi_stmt" as a gphi * via a dyn_cast
+ once we know we have a GIMPLE_PHI.
+ (autofdo::afdo_vpt_for_early_inline): Strengthen local "stmt" from
+ gimple to gcall *, using a dyn_cast.
+ * gimple-fold.c (replace_stmt_with_simplification): Replace check
+ against GIMPLE_COND with a dyn_cast <gcond *>, introducing local
+ "cond_stmt". Use "cond_stmt" in place of "stmt" for typesafety.
+ * gimple-iterator.h (gsi_next_nonvirtual_phi): Strengthen param
+ from gimple_stmt_iterator * to gphi_iterator *, and local "phi"
+ from gimple to gphi *.
+ * ipa-icf-gimple.c (ipa_icf_gimple::func_checker::parse_labels):
+ Replace check against GIMPLE_LABEL with a dyn_cast <glabel *>,
+ introducing local "label_stmt". Use it in place of "stmt" for
+ typesafety.
+ (ipa_icf_gimple::func_checker::compare_bb): Add checked casts
+ to appropriate gimple subclasses when invoking comparison methods
+ within the cases for GIMPLE_SWITCH, GIMPLE_RESX, GIMPLE_LABEL,
+ GIMPLE_RETURN, GIMPLE_ASM.
+ (ipa_icf_gimple::func_checker::compare_gimple_label): Strengthen
+ both params from gimple to const glabel *.
+ (ipa_icf_gimple::func_checker::compare_gimple_switch): Strengthen
+ both params from gimple to const gswitch *.
+ (ipa_icf_gimple::func_checker::compare_gimple_return): Strengthen
+ both params from gimple to const greturn *.
+ (ipa_icf_gimple::func_checker::compare_gimple_resx): Strengthen
+ both params from gimple to const gresx *.
+ (ipa_icf_gimple::func_checker::compare_gimple_asm): Strengthen
+ both params from gimple to const gasm *.
+ * ipa-icf-gimple.h (ipa_icf_gimple::func_checker::compare_gimple_label):
+ Strengthen both params from gimple to const glabel *.
+ (ipa_icf_gimple::func_checker::compare_gimple_switch): Strengthen
+ both params from gimple to const gswitch *.
+ (ipa_icf_gimple::func_checker::compare_gimple_return): Strengthen
+ both params from gimple to const greturn *.
+ (ipa_icf_gimple::func_checker::compare_gimple_resx): Strengthen
+ both params from gimple to const gresx *.
+ (ipa_icf_gimple::func_checker::compare_gimple_asm): Strengthen
+ both params from gimple to const gasm *.
+ * ipa-icf.c (ipa_icf_gimple::sem_function::compare_phi_node):
+ Strengthen locals "si1" and "si2" from gimple_stmt_iterator to
+ gphi_iterator, and locals "phi1" and "phi2" from gimple to gphi *.
+ * tree-ssa-forwprop.c (fold_all_stmts): Replace check against
+ GIMPLE_COND with a dyn_cast <gcond *>, introducing local
+ "cond_stmt". Use "cond_stmt" in place of "stmt" for typesafety.
+ * tree-ssa-reassoc.c (branch_fixup): Strengthen local "phi" from
+ gimple to gphi *.
+
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ * doc/gimple.texi (Class hierarchy of GIMPLE statements): Update
+ to reflect the new gimple subclasses and the renamings of existing
+ classes.
+
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ * gimple.h (struct gdebug): Remove out-of-date references to
+ typedefs.
+ (struct ggoto): Likewise.
+ (struct glabel): Likewise.
+
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ Patch autogenerated by rename_gimple_subclasses.py from
+ https://github.com/davidmalcolm/gcc-refactoring-scripts
+ revision 7d754b63ff2bf47226a67b2c0af5d74b54d4709f
+
+ * asan.c (get_mem_ref_of_assignment): Rename gimple subclass types.
+ (instrument_strlen_call): Likewise.
+ (instrument_builtin_call): Likewise.
+ (has_mem_ref_been_instrumented): Likewise.
+ (has_stmt_been_instrumented_p): Likewise.
+ (create_cond_insert_point): Likewise.
+ (asan_expand_check_ifn): Likewise.
+ * builtins.c (validate_arg): Likewise.
+ (do_mpc_arg2): Likewise.
+ * builtins.h (validate_gimple_arglist): Likewise.
+ (fold_call_stmt): Likewise.
+ * cfgexpand.c (mark_transaction_restart_calls): Likewise.
+ (expand_gimple_stmt): Likewise.
+ (expand_asm_operands): Likewise.
+ (label_rtx_for_bb): Likewise.
+ (expand_gimple_stmt_1): Likewise.
+ (maybe_cleanup_end_of_block): Likewise.
+ (expand_gimple_basic_block): Likewise.
+ * cfgloop.c (find_subloop_latch_edge_by_ivs): Likewise.
+ * cfgloopmanip.c (create_empty_if_region_on_edge): Likewise.
+ (create_empty_loop_on_edge): Likewise.
+ * cgraph.c (cgraph_add_edge_to_call_site_hash): Likewise.
+ (cgraph_allocate_init_indirect_info): Likewise.
+ (cgraph_set_edge_callee): Likewise.
+ (cgraph_update_edges_for_call_stmt_node): Likewise.
+ * cgraph.h (set_call_stmt_including_clones): Likewise.
+ (create_indirect_edge): Likewise.
+ (create_edge_including_clones): Likewise.
+ (set_call_stmt): Likewise.
+ (redirect_call_stmt_to_callee): Likewise.
+ (rebuild_references): Likewise.
+ (create_edge): Likewise.
+ * cgraphbuild.c (mark_store): Likewise.
+ (record_references_in_initializer): Likewise.
+ * cgraphclones.c (function): Likewise.
+ (clone_function_name): Likewise.
+ * cgraphunit.c (thunk_adjust): Likewise.
+ * coretypes.h: Likewise.
+ * expr.h (expand_normal): Likewise.
+ * gimple-builder.c (get_expr_type): Likewise.
+ (build_assign): Likewise.
+ (build_type_cast): Likewise.
+ * gimple-builder.h (build_assign): Likewise.
+ (build_type_cast): Likewise.
+ * gimple-fold.c (gimple_fold_builtin_snprintf_chk): Likewise.
+ (gimple_fold_builtin_sprintf_chk): Likewise.
+ (gimple_fold_builtin_snprintf): Likewise.
+ (gimple_fold_builtin): Likewise.
+ (gimple_fold_call): Likewise.
+ (gimple_fold_stmt_to_constant_1): Likewise.
+ (fold_gimple_assign): Likewise.
+ (fold_stmt_1): Likewise.
+ * gimple-iterator.c (gsi_commit_one_edge_insert): Likewise.
+ (gsi_start_phis): Likewise.
+ * gimple-iterator.h (gsi_commit_one_edge_insert): Likewise.
+ (gimple_phi_iterator::phi): Likewise.
+ * gimple-low.c (lower_try_catch): Likewise.
+ (gimple_try_catch_may_fallthru): Likewise.
+ (lower_gimple_bind): Likewise.
+ (gimple_stmt_may_fallthru): Likewise.
+ (struct return_statements_t): Likewise.
+ (lower_gimple_return): Likewise.
+ (lower_stmt): Likewise.
+ * gimple-pretty-print.c (dump_gimple_omp_target): Likewise.
+ (dump_gimple_omp_single): Likewise.
+ (dump_gimple_omp_continue): Likewise.
+ (dump_gimple_omp_teams): Likewise.
+ (dump_gimple_omp_parallel): Likewise.
+ (dump_gimple_phi): Likewise.
+ (dump_gimple_debug): Likewise.
+ (dump_gimple_omp_block): Likewise.
+ (dump_gimple_omp_for): Likewise.
+ (dump_gimple_omp_atomic_load): Likewise.
+ (dump_gimple_omp_task): Likewise.
+ (dump_gimple_bind): Likewise.
+ (dump_ssaname_info): Likewise.
+ (dump_phi_nodes): Likewise.
+ (gimple_dump_bb_for_graph): Likewise.
+ (dump_gimple_resx): Likewise.
+ (dump_gimple_eh_else): Likewise.
+ (dump_gimple_eh_must_not_throw): Likewise.
+ (dump_gimple_eh_filter): Likewise.
+ (dump_gimple_catch): Likewise.
+ (dump_gimple_try): Likewise.
+ (dump_gimple_goto): Likewise.
+ (dump_gimple_assign): Likewise.
+ (dump_gimple_omp_return): Likewise.
+ (dump_gimple_return): Likewise.
+ (pp_points_to_solution): Likewise.
+ (dump_gimple_transaction): Likewise.
+ (dump_gimple_fmt): Likewise.
+ (dump_unary_rhs): Likewise.
+ (dump_binary_rhs): Likewise.
+ (dump_ternary_rhs): Likewise.
+ (dump_gimple_call): Likewise.
+ (dump_gimple_cond): Likewise.
+ (pp_cfg_jump): Likewise.
+ (dump_gimple_label): Likewise.
+ (dump_gimple_eh_dispatch): Likewise.
+ (dump_gimple_switch): Likewise.
+ (pp_gimple_stmt_1): Likewise.
+ * gimple-ssa-isolate-paths.c (isolate_path): Likewise.
+ (find_implicit_erroneous_behaviour): Likewise.
+ (find_explicit_erroneous_behaviour): Likewise.
+ (insert_trap_and_remove_trailing_statements): Likewise.
+ * gimple-ssa-strength-reduction.c (slsr_process_copy): Likewise.
+ (add_cand_for_stmt): Likewise.
+ (create_phi_basis): Likewise.
+ (ncd_for_two_cands): Likewise.
+ (ncd_with_phi): Likewise.
+ (ncd_of_cand_and_phis): Likewise.
+ (replace_mult_candidate): Likewise.
+ (create_add_on_incoming_edge): Likewise.
+ (insert_initializers): Likewise.
+ (introduce_cast_before_cand): Likewise.
+ (replace_one_candidate): Likewise.
+ * gimple-streamer-in.c (input_phi): Likewise.
+ (input_gimple_stmt): Likewise.
+ * gimple-streamer-out.c (output_phi): Likewise.
+ (output_bb): Likewise.
+ (output_gimple_stmt): Likewise.
+ * gimple-walk.c (walk_gimple_stmt): Likewise.
+ (walk_gimple_seq): Likewise.
+ (walk_gimple_op): Likewise.
+ (walk_stmt_load_store_addr_ops): Likewise.
+ * gimple.c (gimple_build_omp_target): Likewise.
+ (gimple_build_omp_sections_switch): Likewise.
+ (gimple_build_omp_single): Likewise.
+ (gimple_build_omp_return): Likewise.
+ (gimple_build_omp_sections): Likewise.
+ (gimple_build_omp_task): Likewise.
+ (gimple_build_omp_parallel): Likewise.
+ (gimple_build_omp_for): Likewise.
+ (gimple_build_omp_critical): Likewise.
+ (gimple_build_omp_taskgroup): Likewise.
+ (gimple_build_omp_continue): Likewise.
+ (gimple_build_omp_teams): Likewise.
+ (gimple_build_omp_atomic_load): Likewise.
+ (gimple_build_try): Likewise.
+ (gimple_build_wce): Likewise.
+ (gimple_build_eh_else): Likewise.
+ (gimple_build_eh_must_not_throw): Likewise.
+ (gimple_build_eh_filter): Likewise.
+ (gimple_build_catch): Likewise.
+ (gimple_build_nop): Likewise.
+ (empty_stmt_p): Likewise.
+ (gimple_build_with_ops_stat): Likewise.
+ (infer_nonnull_range): Likewise.
+ (gimple_build_omp_atomic_store): Likewise.
+ (gimple_build_transaction): Likewise.
+ (gimple_copy): Likewise.
+ (gimple_call_flags): Likewise.
+ (gimple_call_fnspec): Likewise.
+ (gimple_call_arg_flags): Likewise.
+ (gimple_build_return): Likewise.
+ (gimple_call_reset_alias_info): Likewise.
+ (gimple_build_call_1): Likewise.
+ (gimple_build_call_vec): Likewise.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal_1): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (canonicalize_cond_expr_cond): Likewise.
+ (gimple_call_copy_skip_args): Likewise.
+ (gimple_has_side_effects): Likewise.
+ (gimple_call_builtin_p): Likewise.
+ (gimple_build_bind): Likewise.
+ (gimple_build_asm_vec): Likewise.
+ (gimple_could_trap_p_1): Likewise.
+ (gimple_build_asm_1): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_build_assign_stat): Likewise.
+ (gimple_build_resx): Likewise.
+ (gimple_build_switch_nlabels): Likewise.
+ (gimple_build_switch): Likewise.
+ (gimple_cond_set_condition_from_tree): Likewise.
+ (gimple_set_bb): Likewise.
+ (gimple_build_label): Likewise.
+ (gimple_build_goto): Likewise.
+ (gimple_build_eh_dispatch): Likewise.
+ (gimple_build_debug_bind_stat): Likewise.
+ (gimple_build_debug_source_bind_stat): Likewise.
+ (gimple_build_assign_with_ops): Likewise.
+ (gimple_build_cond): Likewise.
+ (gimple_build_cond_from_tree): Likewise.
+ * gimple.h (gimple_build_omp_target): Likewise.
+ (gimple_omp_teams_clauses): Likewise.
+ (gimple_omp_teams_clauses_ptr): Likewise.
+ (gimple_omp_target_set_kind): Likewise.
+ (gimple_omp_target_set_child_fn): Likewise.
+ (gimple_build_omp_single): Likewise.
+ (gimple_omp_target_kind): Likewise.
+ (gimple_omp_target_child_fn): Likewise.
+ (gimple_omp_target_child_fn_ptr): Likewise.
+ (gimple_omp_target_data_arg): Likewise.
+ (gimple_omp_target_data_arg_ptr): Likewise.
+ (gimple_omp_target_clauses): Likewise.
+ (gimple_omp_target_clauses_ptr): Likewise.
+ (gimple_build_omp_sections_switch): Likewise.
+ (gimple_omp_single_clauses): Likewise.
+ (gimple_omp_single_clauses_ptr): Likewise.
+ (gimple_build_omp_return): Likewise.
+ (gimple_omp_sections_clauses): Likewise.
+ (gimple_omp_sections_clauses_ptr): Likewise.
+ (gimple_omp_sections_set_clauses): Likewise.
+ (gimple_omp_sections_control): Likewise.
+ (gimple_omp_sections_control_ptr): Likewise.
+ (gimple_omp_sections_set_control): Likewise.
+ (gimple_build_omp_parallel): Likewise.
+ (gimple_omp_task_clauses): Likewise.
+ (gimple_omp_task_clauses_ptr): Likewise.
+ (gimple_omp_task_set_clauses): Likewise.
+ (gimple_omp_task_child_fn): Likewise.
+ (gimple_omp_task_child_fn_ptr): Likewise.
+ (gimple_omp_task_set_child_fn): Likewise.
+ (gimple_omp_task_data_arg): Likewise.
+ (gimple_omp_task_data_arg_ptr): Likewise.
+ (gimple_omp_task_set_data_arg): Likewise.
+ (gimple_omp_task_copy_fn): Likewise.
+ (gimple_omp_task_copy_fn_ptr): Likewise.
+ (gimple_omp_task_set_copy_fn): Likewise.
+ (gimple_omp_task_arg_size): Likewise.
+ (gimple_omp_task_arg_size_ptr): Likewise.
+ (gimple_omp_task_set_arg_size): Likewise.
+ (gimple_omp_task_arg_align): Likewise.
+ (gimple_omp_task_arg_align_ptr): Likewise.
+ (gimple_omp_task_set_arg_align): Likewise.
+ (gimple_omp_parallel_set_clauses): Likewise.
+ (gimple_omp_parallel_set_child_fn): Likewise.
+ (gimple_build_omp_for): Likewise.
+ (gimple_omp_parallel_clauses_ptr): Likewise.
+ (gimple_omp_parallel_child_fn): Likewise.
+ (gimple_omp_parallel_child_fn_ptr): Likewise.
+ (gimple_omp_parallel_data_arg): Likewise.
+ (gimple_omp_parallel_data_arg_ptr): Likewise.
+ (gimple_omp_parallel_clauses): Likewise.
+ (gimple_build_omp_critical): Likewise.
+ (gimple_omp_for_kind): Likewise.
+ (gimple_omp_for_combined_p): Likewise.
+ (gimple_omp_for_combined_into_p): Likewise.
+ (gimple_omp_for_clauses): Likewise.
+ (gimple_omp_for_clauses_ptr): Likewise.
+ (gimple_omp_for_set_clauses): Likewise.
+ (gimple_omp_for_collapse): Likewise.
+ (gimple_omp_for_index): Likewise.
+ (gimple_omp_for_index_ptr): Likewise.
+ (gimple_omp_for_set_index): Likewise.
+ (gimple_omp_for_initial): Likewise.
+ (gimple_omp_for_initial_ptr): Likewise.
+ (gimple_omp_for_set_initial): Likewise.
+ (gimple_omp_for_final): Likewise.
+ (gimple_omp_for_final_ptr): Likewise.
+ (gimple_omp_for_set_final): Likewise.
+ (gimple_omp_for_incr): Likewise.
+ (gimple_omp_for_incr_ptr): Likewise.
+ (gimple_omp_for_set_incr): Likewise.
+ (gimple_omp_for_pre_body_ptr): Likewise.
+ (gimple_omp_for_set_pre_body): Likewise.
+ (gimple_omp_for_set_cond): Likewise.
+ (gimple_omp_set_body): Likewise.
+ (gimple_build_debug_source_bind_stat): Likewise.
+ (gimple_omp_critical_name): Likewise.
+ (gimple_omp_critical_name_ptr): Likewise.
+ (gimple_omp_atomic_load_rhs_ptr): Likewise.
+ (gimple_omp_continue_set_control_def): Likewise.
+ (gimple_build_omp_taskgroup): Likewise.
+ (gimple_omp_continue_control_def): Likewise.
+ (gimple_omp_continue_control_def_ptr): Likewise.
+ (gimple_omp_continue_control_use): Likewise.
+ (gimple_omp_continue_control_use_ptr): Likewise.
+ (gimple_omp_atomic_store_set_val): Likewise.
+ (gimple_build_omp_atomic_load): Likewise.
+ (gimple_omp_for_cond): Likewise.
+ (gimple_omp_atomic_store_val): Likewise.
+ (gimple_omp_atomic_load_set_lhs): Likewise.
+ (gimple_omp_atomic_load_set_rhs): Likewise.
+ (gimple_build_omp_teams): Likewise.
+ (gimple_omp_atomic_store_val_ptr): Likewise.
+ (gimple_omp_atomic_load_lhs): Likewise.
+ (gimple_omp_atomic_load_lhs_ptr): Likewise.
+ (gimple_omp_atomic_load_rhs): Likewise.
+ (gimple_try_kind): Likewise.
+ (gimple_try_cleanup): Likewise.
+ (gimple_try_set_catch_is_cleanup): Likewise.
+ (gimple_try_set_eval): Likewise.
+ (gimple_build_eh_else): Likewise.
+ (gimple_try_eval_ptr): Likewise.
+ (gimple_try_cleanup_ptr): Likewise.
+ (gimple_phi_capacity): Likewise.
+ (gimple_phi_num_args): Likewise.
+ (gimple_phi_result): Likewise.
+ (gimple_phi_result_ptr): Likewise.
+ (gimple_phi_arg): Likewise.
+ (gimple_phi_arg_def): Likewise.
+ (gimple_phi_arg_def_ptr): Likewise.
+ (gimple_phi_arg_edge): Likewise.
+ (gimple_phi_arg_location): Likewise.
+ (gimple_phi_arg_location_from_edge): Likewise.
+ (gimple_phi_arg_set_location): Likewise.
+ (gimple_resx_set_region): Likewise.
+ (gimple_build_switch): Likewise.
+ (gimple_eh_dispatch_region): Likewise.
+ (gimple_phi_arg_has_location): Likewise.
+ (gimple_build_wce): Likewise.
+ (gimple_resx_region): Likewise.
+ (gimple_build_eh_must_not_throw): Likewise.
+ (gimple_eh_must_not_throw_set_fndecl): Likewise.
+ (gimple_eh_else_n_body_ptr): Likewise.
+ (gimple_eh_else_n_body): Likewise.
+ (gimple_eh_else_e_body_ptr): Likewise.
+ (gimple_eh_else_e_body): Likewise.
+ (gimple_eh_else_set_n_body): Likewise.
+ (gimple_build_eh_filter): Likewise.
+ (gimple_eh_filter_set_failure): Likewise.
+ (gimple_eh_must_not_throw_fndecl): Likewise.
+ (gimple_build_catch): Likewise.
+ (gimple_eh_filter_failure): Likewise.
+ (gimple_eh_filter_set_types): Likewise.
+ (gimple_eh_filter_types): Likewise.
+ (gimple_eh_filter_types_ptr): Likewise.
+ (gimple_eh_filter_failure_ptr): Likewise.
+ (gimple_asm_input_p): Likewise.
+ (gimple_build_asm_vec): Likewise.
+ (gimple_catch_types): Likewise.
+ (gimple_catch_types_ptr): Likewise.
+ (gimple_catch_handler_ptr): Likewise.
+ (gimple_catch_handler): Likewise.
+ (gimple_catch_set_types): Likewise.
+ (gimple_statement_catch): Likewise.
+ (gimple_goto_set_dest): Likewise.
+ (gimple_bind_add_seq): Likewise.
+ (gimple_build_nop): Likewise.
+ (gimple_seq_first_stmt): Likewise.
+ (gimple_seq_first_stmt_as_a_bind): Likewise.
+ (gimple_bind_vars): Likewise.
+ (gimple_bind_set_vars): Likewise.
+ (gimple_bind_append_vars): Likewise.
+ (gimple_bind_body_ptr): Likewise.
+ (gimple_bind_body): Likewise.
+ (gimple_bind_set_body): Likewise.
+ (gimple_bind_add_stmt): Likewise.
+ (gimple_bind_block): Likewise.
+ (gimple_transaction_set_subcode): Likewise.
+ (gimple_return_retval_ptr): Likewise.
+ (gimple_alloc_stat): Likewise.
+ (gimple_return_retval): Likewise.
+ (gimple_transaction_body): Likewise.
+ (gimple_transaction_label_ptr): Likewise.
+ (gimple_build_omp_atomic_store): Likewise.
+ (gimple_omp_continue_set_control_use): Likewise.
+ (gimple_transaction_body_ptr): Likewise.
+ (gimple_transaction_label): Likewise.
+ (gimple_transaction_subcode): Likewise.
+ (gimple_transaction_set_body): Likewise.
+ (gimple_transaction_set_label): Likewise.
+ (gimple_call_arg_flags): Likewise.
+ (gimple_call_return_flags): Likewise.
+ (gimple_call_fndecl): Likewise.
+ (gimple_call_chain): Likewise.
+ (gimple_expr_type): Likewise.
+ (gimple_call_reset_alias_info): Likewise.
+ (gimple_build_call_vec): Likewise.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (canonicalize_cond_expr_cond): Likewise.
+ (gimple_call_copy_skip_args): Likewise.
+ (gimple_call_fn_ptr): Likewise.
+ (gimple_call_set_fndecl): Likewise.
+ (gimple_call_chain_ptr): Likewise.
+ (gimple_call_set_arg): Likewise.
+ (gimple_call_set_tail): Likewise.
+ (gimple_call_tail_p): Likewise.
+ (gimple_call_set_return_slot_opt): Likewise.
+ (gimple_call_return_slot_opt_p): Likewise.
+ (gimple_call_set_from_thunk): Likewise.
+ (gimple_call_from_thunk_p): Likewise.
+ (gimple_call_set_va_arg_pack): Likewise.
+ (gimple_call_noreturn_p): Likewise.
+ (gimple_call_set_nothrow): Likewise.
+ (gimple_call_nothrow_p): Likewise.
+ (gimple_call_set_alloca_for_var): Likewise.
+ (gimple_call_alloca_for_var_p): Likewise.
+ (gimple_call_copy_flags): Likewise.
+ (gimple_call_use_set): Likewise.
+ (gimple_statement_call): Likewise.
+ (gimple_call_internal_fn): Likewise.
+ (gimple_call_fntype): Likewise.
+ (gimple_asm_clobbers_memory_p): Likewise.
+ (gimple_bind_set_block): Likewise.
+ (gimple_asm_ninputs): Likewise.
+ (gimple_asm_noutputs): Likewise.
+ (gimple_asm_nclobbers): Likewise.
+ (gimple_asm_nlabels): Likewise.
+ (gimple_asm_input_op): Likewise.
+ (gimple_asm_set_input_op): Likewise.
+ (gimple_asm_output_op): Likewise.
+ (gimple_asm_set_output_op): Likewise.
+ (gimple_asm_set_clobber_op): Likewise.
+ (gimple_asm_set_label_op): Likewise.
+ (gimple_asm_string): Likewise.
+ (gimple_asm_set_input): Likewise.
+ (gimple_build_bind): Likewise.
+ (gimple_asm_input_op_ptr): Likewise.
+ (gimple_asm_output_op_ptr): Likewise.
+ (gimple_asm_clobber_op): Likewise.
+ (gimple_asm_label_op): Likewise.
+ (gimple_asm_volatile_p): Likewise.
+ (gimple_asm_set_volatile): Likewise.
+ (gimple_assign_set_rhs_with_ops): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_build_assign_stat): Likewise.
+ (gimple_eh_dispatch_set_region): Likewise.
+ (gimple_switch_set_num_labels): Likewise.
+ (gimple_switch_index): Likewise.
+ (gimple_switch_set_index): Likewise.
+ (gimple_switch_set_label): Likewise.
+ (gimple_build_resx): Likewise.
+ (gimple_build_switch_nlabels): Likewise.
+ (gimple_switch_num_labels): Likewise.
+ (gimple_switch_index_ptr): Likewise.
+ (gimple_switch_label): Likewise.
+ (gimple_switch_default_label): Likewise.
+ (gimple_cond_set_condition): Likewise.
+ (gimple_label_label): Likewise.
+ (gimple_build_label): Likewise.
+ (gimple_goto_dest): Likewise.
+ (gimple_build_eh_dispatch): Likewise.
+ (gimple_build_debug_bind_stat): Likewise.
+ (gimple_cond_lhs): Likewise.
+ (gimple_cond_rhs): Likewise.
+ (gimple_cond_set_rhs): Likewise.
+ (gimple_cond_set_false_label): Likewise.
+ (gimple_cond_make_true): Likewise.
+ (gimple_cond_true_p): Likewise.
+ (gimple_build_assign_with_ops): Likewise.
+ (gimple_build_cond): Likewise.
+ (gimple_cond_set_condition_from_tree): Likewise.
+ (gimple_cond_code): Likewise.
+ (gimple_cond_lhs_ptr): Likewise.
+ (gimple_cond_rhs_ptr): Likewise.
+ (gimple_cond_true_label): Likewise.
+ (gimple_cond_set_true_label): Likewise.
+ (gimple_cond_false_label): Likewise.
+ (gimple_cond_make_false): Likewise.
+ (gimple_cond_false_p): Likewise.
+ (gimple_statement_cond): Likewise.
+ (is_a_helper <gimple_statement_cond *>): Likewise.
+ * gimplify-me.c (gimple_regimplify_operands): Likewise.
+ * gimplify.c (gimplify_omp_target_update): Likewise.
+ (gimplify_omp_for): Likewise.
+ (gimplify_omp_atomic): Likewise.
+ (gimplify_cleanup_point_expr): Likewise.
+ (struct gimplify_ctx): Likewise.
+ (pop_gimplify_context): Likewise.
+ (gimple_pop_bind_expr): Likewise.
+ (gimple_current_bind_expr): Likewise.
+ (declare_vars): Likewise.
+ (gimplify_one_sizepos): Likewise.
+ (gimplify_body): Likewise.
+ (gimplify_return_expr): Likewise.
+ (gimplify_transaction): Likewise.
+ (voidify_wrapper_expr): Likewise.
+ (gimplify_bind_expr): Likewise.
+ (gimplify_call_expr): Likewise.
+ (gimplify_modify_expr_to_memcpy): Likewise.
+ (gimplify_modify_expr_to_memset): Likewise.
+ (gimplify_modify_expr): Likewise.
+ (gimplify_expr): Likewise.
+ (gimplify_function_tree): Likewise.
+ (gimplify_asm_expr): Likewise.
+ (gimplify_init_constructor): Likewise.
+ (gimple_push_cleanup): Likewise.
+ (gimplify_switch_expr): Likewise.
+ (gimplify_case_label_expr): Likewise.
+ (gimplify_cond_expr): Likewise.
+ * gimplify.h (pop_gimplify_context): Likewise.
+ (gimple_current_bind_expr): Likewise.
+ (gimplify_one_sizepos): Likewise.
+ * graphite-scop-detection.c (limit_scops): Likewise.
+ (same_close_phi_node): Likewise.
+ (make_close_phi_nodes_unique): Likewise.
+ (canonicalize_loop_closed_ssa): Likewise.
+ * graphite-sese-to-poly.c (phi_arg_in_outermost_loop): Likewise.
+ (simple_copy_phi_p): Likewise.
+ (gsi_for_phi_node): Likewise.
+ (rewrite_close_phi_out_of_ssa): Likewise.
+ (rewrite_cross_bb_scalar_deps): Likewise.
+ (rewrite_commutative_reductions_out_of_ssa_loop): Likewise.
+ (tree_int_to_gmp): Likewise.
+ (reduction_phi_p): Likewise.
+ (build_scop_drs): Likewise.
+ (rewrite_phi_out_of_ssa): Likewise.
+ (rewrite_degenerate_phi): Likewise.
+ (rewrite_reductions_out_of_ssa): Likewise.
+ (is_reduction_operation_p): Likewise.
+ (phi_contains_arg): Likewise.
+ (follow_ssa_with_commutative_ops): Likewise.
+ (detect_commutative_reduction_arg): Likewise.
+ (detect_commutative_reduction_assign): Likewise.
+ (follow_inital_value_to_phi): Likewise.
+ (edge_initial_value_for_loop_phi): Likewise.
+ (used_outside_reduction): Likewise.
+ (detect_commutative_reduction): Likewise.
+ (dr_indices_valid_in_loop): Likewise.
+ (translate_scalar_reduction_to_array): Likewise.
+ (scop_ivs_can_be_represented): Likewise.
+ (remove_simple_copy_phi): Likewise.
+ (remove_invariant_phi): Likewise.
+ (insert_out_of_ssa_copy): Likewise.
+ (translate_scalar_reduction_to_array_for_stmt): Likewise.
+ (create_pw_aff_from_tree): Likewise.
+ (add_conditions_to_domain): Likewise.
+ (add_conditions_to_constraints): Likewise.
+ (single_pred_cond_non_loop_exit): Likewise.
+ * gsstruct.def: Likewise.
+ * internal-fn.c (get_multi_vector_move): Likewise.
+ (expand_LOAD_LANES): Likewise.
+ (expand_STORE_LANES): Likewise.
+ (expand_ANNOTATE): Likewise.
+ (expand_GOMP_SIMD_LANE): Likewise.
+ (expand_GOMP_SIMD_VF): Likewise.
+ (expand_GOMP_SIMD_LAST_LANE): Likewise.
+ (expand_UBSAN_NULL): Likewise.
+ (expand_UBSAN_BOUNDS): Likewise.
+ (expand_UBSAN_OBJECT_SIZE): Likewise.
+ (expand_ASAN_CHECK): Likewise.
+ (ubsan_expand_si_overflow_addsub_check): Likewise.
+ (ubsan_expand_si_overflow_neg_check): Likewise.
+ (ubsan_expand_si_overflow_mul_check): Likewise.
+ (expand_UBSAN_CHECK_ADD): Likewise.
+ (expand_UBSAN_CHECK_SUB): Likewise.
+ (expand_UBSAN_CHECK_MUL): Likewise.
+ (expand_LOOP_VECTORIZED): Likewise.
+ (expand_MASK_LOAD): Likewise.
+ (expand_MASK_STORE): Likewise.
+ (expand_ABNORMAL_DISPATCHER): Likewise.
+ (expand_BUILTIN_EXPECT): Likewise.
+ * internal-fn.h (internal_fn_fnspec): Likewise.
+ * ipa-inline-analysis.c (estimate_function_body_sizes): Likewise.
+ (phi_result_unknown_predicate): Likewise.
+ (set_switch_stmt_execution_predicate): Likewise.
+ * ipa-prop.c (param_type_may_change_p): Likewise.
+ (detect_type_change_from_memory_writes): Likewise.
+ (detect_type_change): Likewise.
+ (ipa_load_from_parm_agg): Likewise.
+ (get_ancestor_addr_info): Likewise.
+ (compute_complex_ancestor_jump_func): Likewise.
+ (build_agg_jump_func_from_list): Likewise.
+ (ipa_compute_jump_functions_for_edge): Likewise.
+ (ipa_is_ssa_with_stmt_def): Likewise.
+ (ipa_note_param_call): Likewise.
+ (ipa_analyze_indirect_call_uses): Likewise.
+ (ipa_analyze_virtual_call_uses): Likewise.
+ (ipa_analyze_stmt_uses): Likewise.
+ (ipa_modify_formal_parameters): Likewise.
+ (ipa_modify_call_arguments): Likewise.
+ * ipa-prop.h (ipa_get_agg_replacements_for_node): Likewise.
+ * ipa-pure-const.c (special_builtin_state): Likewise.
+ (check_stmt): Likewise.
+ * ipa-split.c (consider_split): Likewise.
+ (visit_bb): Likewise.
+ (find_return_bb): Likewise.
+ (find_retval): Likewise.
+ (split_function): Likewise.
+ (verify_non_ssa_vars): Likewise.
+ (check_forbidden_calls): Likewise.
+ * lto-streamer-in.c (fixup_call_stmt_edges_1): Likewise.
+ * lto-streamer-out.c (output_function): Likewise.
+ * omp-low.c (scan_omp_target): Likewise.
+ (scan_omp_single): Likewise.
+ (expand_omp_target): Likewise.
+ (scan_omp_sections): Likewise.
+ (lower_omp_single_simple): Likewise.
+ (scan_omp_for): Likewise.
+ (new_omp_context): Likewise.
+ (delete_omp_context): Likewise.
+ (scan_omp_task): Likewise.
+ (expand_cilk_for_call): Likewise.
+ (task_copyfn_remap_type): Likewise.
+ (gimple_build_cond_empty): Likewise.
+ (remove_exit_barrier): Likewise.
+ (expand_omp_taskreg): Likewise.
+ (expand_cilk_for): Likewise.
+ (struct omp_for_data): Likewise.
+ (is_combined_parallel): Likewise.
+ (workshare_safe_to_combine_p): Likewise.
+ (get_ws_args_for): Likewise.
+ (scan_omp_parallel): Likewise.
+ (finish_taskreg_scan): Likewise.
+ (expand_omp_for): Likewise.
+ (check_omp_nesting_restrictions): Likewise.
+ (expand_omp_sections): Likewise.
+ (expand_omp_atomic): Likewise.
+ (expand_parallel_call): Likewise.
+ (finalize_task_copyfn): Likewise.
+ (scan_omp_1_stmt): Likewise.
+ (lower_omp_sections): Likewise.
+ (lower_omp_single): Likewise.
+ (lower_omp_master): Likewise.
+ (lower_omp_critical): Likewise.
+ (lower_omp_for): Likewise.
+ (lower_omp_taskreg): Likewise.
+ (lower_omp_target): Likewise.
+ (lower_omp_teams): Likewise.
+ (ipa_simd_modify_function_body): Likewise.
+ (build_omp_barrier): Likewise.
+ (lower_omp_taskgroup): Likewise.
+ (lower_omp_ordered): Likewise.
+ (simd_clone_adjust): Likewise.
+ (lower_rec_input_clauses): Likewise.
+ (expand_omp_for_init_vars): Likewise.
+ (expand_omp_atomic_pipeline): Likewise.
+ (expand_omp_atomic_mutex): Likewise.
+ (diagnose_sb_1): Likewise.
+ (lower_lastprivate_clauses): Likewise.
+ (lower_send_shared_vars): Likewise.
+ (expand_omp_for_init_counts): Likewise.
+ (expand_omp_for_generic): Likewise.
+ (expand_omp_for_static_nochunk): Likewise.
+ (expand_omp_for_static_chunk): Likewise.
+ (expand_omp_simd): Likewise.
+ (lower_omp_1): Likewise.
+ (diagnose_sb_2): Likewise.
+ * predict.c (apply_return_prediction): Likewise.
+ (tree_estimate_probability_bb): Likewise.
+ (get_base_value): Likewise.
+ (predict_iv_comparison): Likewise.
+ (predict_extra_loop_exits): Likewise.
+ (predict_loops): Likewise.
+ * sese.c (sese_build_liveouts_bb): Likewise.
+ (sese_add_exit_phis_edge): Likewise.
+ (set_ifsese_condition): Likewise.
+ * ssa-iterators.h (for): Likewise.
+ (num_ssa_operands): Likewise.
+ (single_phi_def): Likewise.
+ (op_iter_init_phiuse): Likewise.
+ (link_use_stmts_after): Likewise.
+ (op_iter_init): Likewise.
+ * stmt.c (reset_out_edges_aux): Likewise.
+ (compute_cases_per_edge): Likewise.
+ * trans-mem.c (diagnose_tm_1): Likewise.
+ (lower_transaction): Likewise.
+ (tm_region_init): Likewise.
+ (generate_tm_state): Likewise.
+ (make_pass_lower_tm): Likewise.
+ (transaction_subcode_ior): Likewise.
+ (build_tm_load): Likewise.
+ (build_tm_store): Likewise.
+ (expand_transaction): Likewise.
+ (expand_block_edges): Likewise.
+ (dump_tm_memopt_transform): Likewise.
+ (tm_memopt_transform_blocks): Likewise.
+ (ipa_tm_insert_irr_call): Likewise.
+ (ipa_tm_transform_calls_redirect): Likewise.
+ (expand_call_tm): Likewise.
+ (ipa_tm_insert_gettmclone_call): Likewise.
+ * tree-call-cdce.c (check_target_format): Likewise.
+ (check_pow): Likewise.
+ (check_builtin_call): Likewise.
+ (gen_conditions_for_pow_int_base): Likewise.
+ (get_no_error_domain): Likewise.
+ (gen_shrink_wrap_conditions): Likewise.
+ (shrink_wrap_one_built_in_call): Likewise.
+ (shrink_wrap_conditional_dead_built_in_calls): Likewise.
+ (gen_one_condition): Likewise.
+ * tree-cfg.c (replace_uses_by): Likewise.
+ (reinstall_phi_args): Likewise.
+ (verify_gimple_in_cfg): Likewise.
+ (gimple_make_forwarder_block): Likewise.
+ (gimple_duplicate_bb): Likewise.
+ (add_phi_args_after_copy_edge): Likewise.
+ (gimple_duplicate_sese_tail): Likewise.
+ (gimple_lv_adjust_loop_header_phi): Likewise.
+ (move_stmt_r): Likewise.
+ (do_warn_unused_result): Likewise.
+ (verify_gimple_assign): Likewise.
+ (extract_true_false_edges_from_block): Likewise.
+ (verify_gimple_transaction): Likewise.
+ (verify_gimple_in_seq_2): Likewise.
+ (single_noncomplex_succ): Likewise.
+ (valid_fixed_convert_types_p): Likewise.
+ (make_gimple_asm_edges): Likewise.
+ (is_ctrl_altering_stmt): Likewise.
+ (need_fake_edge_p): Likewise.
+ (verify_gimple_comparison): Likewise.
+ (verify_gimple_assign_unary): Likewise.
+ (verify_gimple_assign_binary): Likewise.
+ (verify_gimple_assign_ternary): Likewise.
+ (verify_gimple_assign_single): Likewise.
+ (make_gimple_switch_edges): Likewise.
+ (find_case_label_for_value): Likewise.
+ (end_recording_case_labels): Likewise.
+ (get_cases_for_edge): Likewise.
+ (group_case_labels): Likewise.
+ (find_taken_edge): Likewise.
+ (find_taken_edge_cond_expr): Likewise.
+ (find_taken_edge_switch_expr): Likewise.
+ (verify_gimple_goto): Likewise.
+ (gimple_redirect_edge_and_branch): Likewise.
+ (make_edges): Likewise.
+ (gimple_can_merge_blocks_p): Likewise.
+ (gimple_merge_blocks): Likewise.
+ (remove_bb): Likewise.
+ (stmt_starts_bb_p): Likewise.
+ (verify_gimple_debug): Likewise.
+ (gimple_verify_flow_info): Likewise.
+ (gimple_block_label): Likewise.
+ (move_block_to_fn): Likewise.
+ (verify_gimple_return): Likewise.
+ (fold_cond_expr_cond): Likewise.
+ (make_cond_expr_edges): Likewise.
+ (cleanup_dead_labels): Likewise.
+ (verify_gimple_label): Likewise.
+ (verify_gimple_stmt): Likewise.
+ * tree-cfg.h (notice_special_calls): Likewise.
+ (group_case_labels_stmt): Likewise.
+ * tree-cfgcleanup.c (phi_alternatives_equal): Likewise.
+ (cleanup_control_expr_graph): Likewise.
+ (tree_forwarder_block_p): Likewise.
+ (remove_forwarder_block): Likewise.
+ (remove_forwarder_block_with_phi): Likewise.
+ * tree-complex.c (init_dont_simulate_again): Likewise.
+ (complex_visit_stmt): Likewise.
+ (update_phi_components): Likewise.
+ (expand_complex_move): Likewise.
+ (expand_complex_libcall): Likewise.
+ (expand_complex_asm): Likewise.
+ (expand_complex_comparison): Likewise.
+ * tree-data-ref.c (get_references_in_stmt): Likewise.
+ * tree-dfa.c (collect_dfa_stats): Likewise.
+ (dump_dfa_stats): Likewise.
+ * tree-eh.c (lookup_stmt_eh_lp): Likewise.
+ (record_in_finally_tree): Likewise.
+ (collect_finally_tree_1): Likewise.
+ (outside_finally_tree): Likewise.
+ (eh_region_may_contain_throw): Likewise.
+ (lower_try_finally_dup_block): Likewise.
+ (cleanup_is_dead_in): Likewise.
+ (lower_try_finally): Likewise.
+ (lower_eh_filter): Likewise.
+ (same_handler_p): Likewise.
+ (cleanup_empty_eh_merge_phis): Likewise.
+ (emit_eh_dispatch): Likewise.
+ (make_pass_lower_eh): Likewise.
+ (redirect_eh_edge): Likewise.
+ (sink_clobbers): Likewise.
+ (lower_eh_dispatch): Likewise.
+ (verify_eh_edges): Likewise.
+ (emit_resx): Likewise.
+ (make_pass_refactor_eh): Likewise.
+ (lower_resx): Likewise.
+ (mark_reachable_handlers): Likewise.
+ (lower_try_finally_fallthru_label): Likewise.
+ (get_eh_else): Likewise.
+ (lower_try_finally_nofallthru): Likewise.
+ (lower_try_finally_copy): Likewise.
+ (lower_try_finally_switch): Likewise.
+ (decide_copy_try_finally): Likewise.
+ (honor_protect_cleanup_actions): Likewise.
+ (lower_eh_must_not_throw): Likewise.
+ (replace_goto_queue_1): Likewise.
+ (lower_catch): Likewise.
+ (refactor_eh_r): Likewise.
+ (stmt_could_throw_p): Likewise.
+ (lower_eh_constructs_2): Likewise.
+ (collect_finally_tree): Likewise.
+ (emit_post_landing_pad): Likewise.
+ (lower_try_finally_onedest): Likewise.
+ (unsplit_eh): Likewise.
+ (cleanup_empty_eh_unsplit): Likewise.
+ (do_goto_redirection): Likewise.
+ (maybe_record_in_goto_queue): Likewise.
+ * tree-eh.h (make_eh_dispatch_edges): Likewise.
+ (redirect_eh_dispatch_edge): Likewise.
+ (verify_eh_dispatch_edge): Likewise.
+ * tree-emutls.c (lower_emutls_function_body): Likewise.
+ (lower_emutls_stmt): Likewise.
+ (gen_emutls_addr): Likewise.
+ * tree-if-conv.c (if_convertible_loop_p_1): Likewise.
+ (bb_with_exit_edge_p): Likewise.
+ (convert_scalar_cond_reduction): Likewise.
+ (predicate_all_scalar_phis): Likewise.
+ * tree-inline.c (update_ssa_across_abnormal_edges): Likewise.
+ (copy_phis_for_bb): Likewise.
+ (copy_edges_for_bb): Likewise.
+ (remap_gimple_seq): Likewise.
+ (replace_locals_stmt): Likewise.
+ (copy_bb): Likewise.
+ (inline_forbidden_p_stmt): Likewise.
+ (expand_call_inline): Likewise.
+ (estimate_num_insns): Likewise.
+ (mark_local_labels_stmt): Likewise.
+ (remap_gimple_stmt): Likewise.
+ (maybe_move_debug_stmts_to_successors): Likewise.
+ (copy_cfg_body): Likewise.
+ (copy_debug_stmts): Likewise.
+ * tree-inline.h (void): Likewise.
+ * tree-into-ssa.c (node): Likewise.
+ (find_def_blocks_for): Likewise.
+ (mark_phi_for_rewrite): Likewise.
+ (insert_phi_nodes_for): Likewise.
+ (rewrite_add_phi_arguments): Likewise.
+ (rewrite_update_phi_arguments): Likewise.
+ (mark_use_interesting): Likewise.
+ (prepare_block_for_update): Likewise.
+ (prepare_use_sites_for): Likewise.
+ (delete_update_ssa): Likewise.
+ (mark_virtual_operand_for_renaming): Likewise.
+ * tree-into-ssa.h (mark_virtual_phi_result_for_renaming): Likewise.
+ * tree-loop-distribution.c (stmts_from_loop): Likewise.
+ (destroy_loop): Likewise.
+ (distribute_loop): Likewise.
+ (generate_loops_for_partition): Likewise.
+ * tree-nested.c (walk_function): Likewise.
+ (convert_nonlocal_reference_stmt): Likewise.
+ (convert_local_reference_stmt): Likewise.
+ (finalize_nesting_tree_1): Likewise.
+ (get_chain_field): Likewise.
+ (convert_nl_goto_reference): Likewise.
+ (convert_tramp_reference_op): Likewise.
+ (convert_gimple_call): Likewise.
+ (convert_nl_goto_receiver): Likewise.
+ * tree-nrv.c (finalize_nrv_r): Likewise.
+ (make_pass_nrv): Likewise.
+ (dest_safe_for_nrv_p): Likewise.
+ * tree-object-size.c (pass_through_call): Likewise.
+ (addr_object_size): Likewise.
+ (alloc_object_size): Likewise.
+ (expr_object_size): Likewise.
+ (collect_object_sizes_for): Likewise.
+ (check_for_plus_in_loops_1): Likewise.
+ (fini_object_sizes): Likewise.
+ * tree-outof-ssa.c (eliminate_build): Likewise.
+ (eliminate_phi): Likewise.
+ (remove_gimple_phi_args): Likewise.
+ (eliminate_useless_phis): Likewise.
+ (rewrite_trees): Likewise.
+ (insert_backedge_copies): Likewise.
+ * tree-parloops.c (printf): Likewise.
+ (create_phi_for_local_result): Likewise.
+ (loop_has_vector_phi_nodes): Likewise.
+ (gather_scalar_reductions): Likewise.
+ (try_create_reduction_list): Likewise.
+ (take_address_of): Likewise.
+ (transform_to_exit_first_loop): Likewise.
+ (create_parallel_loop): Likewise.
+ * tree-phinodes.c (remove_phi_args): Likewise.
+ (phinodes_print_statistics): Likewise.
+ (release_phi_node): Likewise.
+ (reserve_phi_args_for_new_edge): Likewise.
+ (add_phi_node_to_bb): Likewise.
+ (create_phi_node): Likewise.
+ (add_phi_arg): Likewise.
+ (remove_phi_nodes): Likewise.
+ (allocate_phi_node): Likewise.
+ (ideal_phi_node_len): Likewise.
+ (make_phi_node): Likewise.
+ (resize_phi_node): Likewise.
+ * tree-phinodes.h (add_phi_node_to_bb): Likewise.
+ (add_phi_arg): Likewise.
+ (degenerate_phi_result): Likewise.
+ * tree-predcom.c (valid_initializer_p): Likewise.
+ (find_looparound_phi): Likewise.
+ (add_looparound_copies): Likewise.
+ (initialize_root_vars): Likewise.
+ (eliminate_temp_copies): Likewise.
+ (replace_ref_with): Likewise.
+ (initialize_root_vars_lm): Likewise.
+ (reassociate_to_the_same_stmt): Likewise.
+ * tree-profile.c (gimple_gen_interval_profiler): Likewise.
+ (gimple_gen_pow2_profiler): Likewise.
+ (gimple_gen_one_value_profiler): Likewise.
+ (gimple_gen_time_profiler): Likewise.
+ (gimple_gen_average_profiler): Likewise.
+ (gimple_gen_ior_profiler): Likewise.
+ (gimple_gen_edge_profiler): Likewise.
+ (gimple_gen_ic_profiler): Likewise.
+ (gimple_gen_ic_func_profiler): Likewise.
+ * tree-scalar-evolution.c (follow_ssa_edge_binary): Likewise.
+ (follow_ssa_edge_expr): Likewise.
+ (follow_ssa_edge_in_rhs): Likewise.
+ (backedge_phi_arg_p): Likewise.
+ (follow_ssa_edge_in_condition_phi_branch): Likewise.
+ (follow_ssa_edge_in_condition_phi): Likewise.
+ (follow_ssa_edge_inner_loop_phi): Likewise.
+ (follow_ssa_edge): Likewise.
+ (simplify_peeled_chrec): Likewise.
+ (analyze_evolution_in_loop): Likewise.
+ (analyze_initial_condition): Likewise.
+ (interpret_loop_phi): Likewise.
+ (analyze_scalar_evolution_1): Likewise.
+ (loop_closed_phi_def): Likewise.
+ (scev_const_prop): Likewise.
+ (add_to_evolution): Likewise.
+ (get_loop_exit_condition): Likewise.
+ * tree-scalar-evolution.h (number_of_latch_executions): Likewise.
+ * tree-sra.c (replace_removed_params_ssa_names): Likewise.
+ (convert_callers): Likewise.
+ (scan_function): Likewise.
+ (sra_modify_function_body): Likewise.
+ (ipa_sra_modify_function_body): Likewise.
+ (build_ref_for_offset): Likewise.
+ (generate_subtree_copies): Likewise.
+ (init_subtree_with_zero): Likewise.
+ (sra_modify_expr): Likewise.
+ (load_assign_lhs_subreplacements): Likewise.
+ (sra_modify_assign): Likewise.
+ (sra_ipa_reset_debug_stmts): Likewise.
+ * tree-ssa-alias.c (refs_output_dependent_p): Likewise.
+ (ref_maybe_used_by_call_p_1): Likewise.
+ (ref_maybe_used_by_stmt_p): Likewise.
+ (call_may_clobber_ref_p_1): Likewise.
+ (stmt_may_clobber_ref_p_1): Likewise.
+ * tree-ssa-alias.h (call_may_clobber_ref_p): Likewise.
+ (call_may_clobber_ref_p_1): Likewise.
+ * tree-ssa-ccp.c (ccp_initialize): Likewise.
+ (ccp_lattice_meet): Likewise.
+ (insert_clobber_before_stack_restore): Likewise.
+ (ccp_fold): Likewise.
+ (evaluate_stmt): Likewise.
+ (ccp_fold_stmt): Likewise.
+ (optimize_unreachable): Likewise.
+ * tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
+ (coalesce_partitions): Likewise.
+ (create_outofssa_var_map): Likewise.
+ * tree-ssa-copy.c (copy_prop_visit_stmt): Likewise.
+ (init_copy_prop): Likewise.
+ * tree-ssa-copyrename.c (copy_rename_partition_coalesce): Likewise.
+ * tree-ssa-dce.c (remove_dead_phis): Likewise.
+ (forward_edge_to_pdom): Likewise.
+ (eliminate_unnecessary_stmts): Likewise.
+ (propagate_necessity): Likewise.
+ (remove_dead_stmt): Likewise.
+ * tree-ssa-dom.c (record_equivalences_from_phis): Likewise.
+ (cprop_into_successor_phis): Likewise.
+ (get_rhs_or_phi_arg): Likewise.
+ (eliminate_degenerate_phis_1): Likewise.
+ (struct hashable_expr): Likewise.
+ (add_hashable_expr): Likewise.
+ (print_expr_hash_elt): Likewise.
+ (record_equivalences_from_stmt): Likewise.
+ (initialize_hash_element): Likewise.
+ (record_edge_info): Likewise.
+ (eliminate_redundant_computations): Likewise.
+ (propagate_rhs_into_lhs): Likewise.
+ (free_all_edge_infos): Likewise.
+ (make_pass_dominator): Likewise.
+ (optimize_stmt): Likewise.
+ * tree-ssa-forwprop.c (remove_prop_source_from_use): Likewise.
+ (simplify_bitwise_binary): Likewise.
+ (simplify_rotate): Likewise.
+ (simplify_not_neg_expr): Likewise.
+ (simplify_gimple_switch_label_vec): Likewise.
+ (forward_propagate_into_comparison): Likewise.
+ (simplify_mult): Likewise.
+ * tree-ssa-ifcombine.c (same_phi_args_p): Likewise.
+ (get_name_for_bit_test): Likewise.
+ (recognize_single_bit_test): Likewise.
+ (ifcombine_ifandif): Likewise.
+ * tree-ssa-live.c (remove_unused_locals): Likewise.
+ (set_var_live_on_entry): Likewise.
+ (calculate_live_on_exit): Likewise.
+ (verify_live_on_entry): Likewise.
+ * tree-ssa-loop-im.c (mem_ref_in_stmt): Likewise.
+ (determine_max_movement): Likewise.
+ (execute_sm_if_changed): Likewise.
+ (rewrite_reciprocal): Likewise.
+ (execute_sm): Likewise.
+ (rewrite_bittest): Likewise.
+ * tree-ssa-loop-ivcanon.c (propagate_constants_for_unrolling):
+ Likewise.
+ (unloop_loops): Likewise.
+ (tree_estimate_loop_size): Likewise.
+ (create_canonical_iv): Likewise.
+ (remove_exits_and_undefined_stmts): Likewise.
+ (remove_redundant_iv_tests): Likewise.
+ (try_unroll_loop_completely): Likewise.
+ * tree-ssa-loop-ivopts.c (get_iv): Likewise.
+ (find_bivs): Likewise.
+ (mark_bivs): Likewise.
+ (find_interesting_uses_outside): Likewise.
+ (determine_set_costs): Likewise.
+ (rewrite_use_nonlinear_expr): Likewise.
+ (remove_unused_ivs): Likewise.
+ (extract_cond_operands): Likewise.
+ (rewrite_use_compare): Likewise.
+ * tree-ssa-loop-manip.c (tree_unroll_loop): Likewise.
+ (rewrite_all_phi_nodes_with_iv): Likewise.
+ (add_exit_phi): Likewise.
+ (find_uses_to_rename_bb): Likewise.
+ (verify_loop_closed_ssa): Likewise.
+ (split_loop_exit_edge): Likewise.
+ (create_iv): Likewise.
+ (rewrite_phi_with_iv): Likewise.
+ (tree_transform_and_unroll_loop): Likewise.
+ (canonicalize_loop_ivs): Likewise.
+ * tree-ssa-loop-niter.c (determine_value_range): Likewise.
+ (finite_loop_p): Likewise.
+ (chain_of_csts_start): Likewise.
+ (get_base_for): Likewise.
+ (loop_niter_by_eval): Likewise.
+ (number_of_iterations_exit): Likewise.
+ * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Likewise.
+ (emit_mfence_after_loop): Likewise.
+ * tree-ssa-loop-unswitch.c (tree_may_unswitch_on): Likewise.
+ (tree_unswitch_single_loop): Likewise.
+ * tree-ssa-math-opts.c (execute_cse_reciprocals_1): Likewise.
+ (build_and_insert_call): Likewise.
+ (insert_reciprocals): Likewise.
+ (powi_as_mults_1): Likewise.
+ (powi_as_mults): Likewise.
+ (build_and_insert_binop): Likewise.
+ (build_and_insert_cast): Likewise.
+ (gimple_expand_builtin_cabs): Likewise.
+ (convert_mult_to_fma): Likewise.
+ * tree-ssa-operands.c (get_tmr_operands): Likewise.
+ (maybe_add_call_vops): Likewise.
+ (parse_ssa_operands): Likewise.
+ * tree-ssa-phiopt.c (tree_ssa_cs_elim): Likewise.
+ (single_non_singleton_phi_for_edges): Likewise.
+ (tree_ssa_phiopt_worker): Likewise.
+ (replace_phi_edge_with_variable): Likewise.
+ (hoist_adjacent_loads): Likewise.
+ (conditional_replacement): Likewise.
+ (abs_replacement): Likewise.
+ (cond_store_replacement): Likewise.
+ (cond_if_else_store_replacement_1): Likewise.
+ (minmax_replacement): Likewise.
+ * tree-ssa-phiprop.c (propagate_with_phi): Likewise.
+ (phivn_valid_p): Likewise.
+ (phiprop_insert_phi): Likewise.
+ * tree-ssa-pre.c (insert_into_preds_of_block): Likewise.
+ (compute_avail): Likewise.
+ (create_expression_by_pieces): Likewise.
+ (do_regular_insertion): Likewise.
+ (eliminate_insert): Likewise.
+ * tree-ssa-propagate.c (simulate_stmt): Likewise.
+ (replace_uses_in): Likewise.
+ (replace_phi_args_in): Likewise.
+ (update_gimple_call): Likewise.
+ (update_call_from_tree): Likewise.
+ (may_propagate_copy_into_stmt): Likewise.
+ (propagate_tree_value_into_stmt): Likewise.
+ * tree-ssa-propagate.h (prop_simulate_again_p): Likewise.
+ * tree-ssa-reassoc.c (suitable_cond_bb): Likewise.
+ (is_phi_for_stmt): Likewise.
+ (build_and_add_sum): Likewise.
+ (update_ops): Likewise.
+ (get_reassociation_width): Likewise.
+ (reassociate_bb): Likewise.
+ (maybe_optimize_range_tests): Likewise.
+ * tree-ssa-sccvn.c (DFS): Likewise.
+ (ao_ref_init_from_vn_reference): Likewise.
+ (valueize_shared_reference_ops_from_ref): Likewise.
+ (vn_reference_lookup_3): Likewise.
+ (vn_reference_lookup): Likewise.
+ (visit_nary_op): Likewise.
+ (simplify_binary_expression): Likewise.
+ (simplify_unary_expression): Likewise.
+ (visit_use): Likewise.
+ (set_hashtable_value_ids): Likewise.
+ * tree-ssa-sccvn.h (vn_constant_eq_with_type): Likewise.
+ * tree-ssa-sink.c (find_bb_for_arg): Likewise.
+ (nearest_common_dominator_of_uses): Likewise.
+ (statement_sink_location): Likewise.
+ * tree-ssa-strlen.c (do_invalidate): Likewise.
+ * tree-ssa-structalias.c (new_var_info): Likewise.
+ (get_call_vi): Likewise.
+ (lookup_call_use_vi): Likewise.
+ (lookup_call_clobber_vi): Likewise.
+ (get_call_use_vi): Likewise.
+ (get_function_part_constraint): Likewise.
+ (handle_rhs_call): Likewise.
+ (handle_lhs_call): Likewise.
+ (handle_const_call): Likewise.
+ (handle_pure_call): Likewise.
+ (get_fi_for_callee): Likewise.
+ (find_func_aliases_for_builtin_call): Likewise.
+ (find_func_clobbers): Likewise.
+ (compute_points_to_sets): Likewise.
+ (ipa_pta_execute): Likewise.
+ (find_func_aliases): Likewise.
+ * tree-ssa-tail-merge.c (same_succ_hash): Likewise.
+ (release_last_vdef): Likewise.
+ (same_phi_alternatives_1): Likewise.
+ (find_clusters): Likewise.
+ (vop_phi): Likewise.
+ (replace_block_by): Likewise.
+ * tree-ssa-ter.c (ter_is_replaceable_p): Likewise.
+ * tree-ssa-threadedge.c (record_temporary_equivalences_from_phis):
+ Likewise.
+ (record_temporary_equivalences_from_stmts_at_dest): Likewise.
+ (simplify_control_stmt_condition): Likewise.
+ (dummy_simplify): Likewise.
+ (propagate_threaded_block_debug_into): Likewise.
+ (thread_around_empty_blocks): Likewise.
+ (thread_through_normal_block): Likewise.
+ * tree-ssa-threadedge.h (thread_across_edge): Likewise.
+ * tree-ssa-threadupdate.c (copy_phi_arg_into_existing_phi): Likewise.
+ (get_value_locus_in_path): Likewise.
+ (copy_phi_args): Likewise.
+ (phi_args_equal_on_edges): Likewise.
+ * tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
+ * tree-ssa-uninit.c (can_skip_redundant_opnd): Likewise.
+ (find_predicates): Likewise.
+ (collect_phi_def_edges): Likewise.
+ (find_matching_predicate_in_rest_chains): Likewise.
+ (prune_uninit_phi_opnds_in_unrealizable_paths): Likewise.
+ (use_pred_not_overlap_with_undef_path_pred): Likewise.
+ (normalize_preds): Likewise.
+ (is_use_properly_guarded): Likewise.
+ (find_uninit_use): Likewise.
+ (gate_warn_uninitialized): Likewise.
+ * tree-ssa.c (ssa_redirect_edge): Likewise.
+ (flush_pending_stmts): Likewise.
+ (verify_use): Likewise.
+ (verify_ssa): Likewise.
+ (execute_update_addresses_taken): Likewise.
+ (insert_debug_temp_for_var_def): Likewise.
+ * tree-stdarg.c (check_all_va_list_escapes): Likewise.
+ * tree-switch-conversion.c (check_final_bb): Likewise.
+ (build_constructors): Likewise.
+ (build_arrays): Likewise.
+ (gen_def_assigns): Likewise.
+ (case_bit_test_cmp): Likewise.
+ (emit_case_bit_tests): Likewise.
+ (gather_default_values): Likewise.
+ (constructor_contains_same_values_p): Likewise.
+ (array_value_type): Likewise.
+ (build_one_array): Likewise.
+ (fix_phi_nodes): Likewise.
+ (process_switch): Likewise.
+ (hoist_edge_and_branch_if_true): Likewise.
+ (gen_inbound_check): Likewise.
+ * tree-tailcall.c (add_successor_phi_arg): Likewise.
+ (propagate_through_phis): Likewise.
+ (eliminate_tail_call): Likewise.
+ (create_tailcall_accumulator): Likewise.
+ (tree_optimize_tail_calls_1): Likewise.
+ (adjust_return_value): Likewise.
+ (optimize_tail_call): Likewise.
+ (independent_of_stmt_p): Likewise.
+ (find_tail_calls): Likewise.
+ (adjust_return_value_with_ops): Likewise.
+ (update_accumulator_with_ops): Likewise.
+ * tree-vect-data-refs.c (bump_vector_ptr): Likewise.
+ (vect_setup_realignment): Likewise.
+ * tree-vect-generic.c (expand_vector_condition): Likewise.
+ (optimize_vector_constructor): Likewise.
+ (lower_vec_perm): Likewise.
+ (expand_vector_operations_1): Likewise.
+ * tree-vect-loop-manip.c (rename_variables_in_bb): Likewise.
+ (vect_can_advance_ivs_p): Likewise.
+ (slpeel_update_phi_nodes_for_guard1): Likewise.
+ (slpeel_update_phi_nodes_for_guard2): Likewise.
+ (slpeel_tree_duplicate_loop_to_edge_cfg): Likewise.
+ (slpeel_tree_peel_loop_to_edge): Likewise.
+ (vect_update_ivs_after_vectorizer): Likewise.
+ (vect_loop_versioning): Likewise.
+ (slpeel_make_loop_iterate_ntimes): Likewise.
+ (slpeel_add_loop_guard): Likewise.
+ (slpeel_can_duplicate_loop_p): Likewise.
+ (set_prologue_iterations): Likewise.
+ * tree-vect-loop.c (vect_determine_vectorization_factor): Likewise.
+ (vect_analyze_scalar_cycles_1): Likewise.
+ (vect_analyze_loop_operations): Likewise.
+ (get_initial_def_for_induction): Likewise.
+ (vectorizable_reduction): Likewise.
+ (vect_transform_loop): Likewise.
+ (vect_create_epilog_for_reduction): Likewise.
+ (vect_analyze_scalar_cycles): Likewise.
+ (vect_analyze_loop_form): Likewise.
+ * tree-vect-patterns.c (vect_recog_pow_pattern): Likewise.
+ * tree-vect-slp.c (vect_build_slp_tree_1): Likewise.
+ * tree-vect-stmts.c (vectorizable_simd_clone_call): Likewise.
+ (vectorizable_load): Likewise.
+ (vect_finish_stmt_generation): Likewise.
+ (vectorizable_call): Likewise.
+ (vectorizable_condition): Likewise.
+ * tree-vectorizer.h (unlimited_cost_model): Likewise.
+ * tree-vrp.c (find_assert_locations): Likewise.
+ (vrp_initialize): Likewise.
+ (vrp_meet): Likewise.
+ (extract_range_from_unary_expr): Likewise.
+ (extract_range_basic): Likewise.
+ (build_assert_expr_for): Likewise.
+ (vrp_visit_assignment_or_call): Likewise.
+ (simplify_truth_ops_using_ranges): Likewise.
+ (simplify_float_conversion_using_ranges): Likewise.
+ (live_on_edge): Likewise.
+ (compare_case_labels): Likewise.
+ (vrp_visit_cond_stmt): Likewise.
+ (find_case_label_index): Likewise.
+ (find_case_label_range): Likewise.
+ (find_case_label_ranges): Likewise.
+ (simplify_cond_using_ranges): Likewise.
+ (register_edge_assert_for): Likewise.
+ (find_assert_locations_1): Likewise.
+ (vrp_evaluate_conditional): Likewise.
+ (vrp_visit_stmt): Likewise.
+ (range_fits_type_p): Likewise.
+ (simplify_stmt_using_ranges): Likewise.
+ (fold_predicate_in): Likewise.
+ (simplify_stmt_for_jump_threading): Likewise.
+ (identify_jump_threads): Likewise.
+ * tree.c (find_decls_types_in_node): Likewise.
+ * ubsan.c (instrument_nonnull_return): Likewise.
+ (instrument_mem_ref): Likewise.
+ * value-prof.c (find_func_by_profile_id): Likewise.
+ (check_ic_target): Likewise.
+ (gimple_ic_transform): Likewise.
+ (interesting_stringop_to_profile_p): Likewise.
+ (gimple_stringops_transform): Likewise.
+ (gimple_stringops_values_to_profile): Likewise.
+ (gimple_value_profile_transformations): Likewise.
+ (gimple_divmod_fixed_value_transform): Likewise.
+ (gimple_mod_pow2_value_transform): Likewise.
+ (gimple_mod_subtract_transform): Likewise.
+ (gimple_divmod_fixed_value): Likewise.
+ (gimple_mod_pow2): Likewise.
+ (gimple_mod_subtract): Likewise.
+ (gimple_ic): Likewise.
+ (gimple_stringop_fixed_value): Likewise.
+ * value-prof.h (stringop_block_profile): Likewise.
+ (gimple_ic): Likewise.
+ * vtable-verify.c (verify_bb_vtables): Likewise.
+
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ * coretypes.h (gimple_cond): Drop this typedef.
+ (const_gimple_cond): Likewise.
+ (gimple_debug): Likewise.
+ (const_gimple_debug): Likewise.
+ (gimple_goto): Likewise.
+ (const_gimple_goto): Likewise.
+ (gimple_label): Likewise.
+ (const_gimple_label): Likewise.
+ (gimple_switch): Likewise.
+ (const_gimple_switch): Likewise.
+ (gimple_assign): Likewise.
+ (const_gimple_assign): Likewise.
+ (gimple_asm): Likewise.
+ (const_gimple_asm): Likewise.
+ (gimple_call): Likewise.
+ (const_gimple_call): Likewise.
+ (gimple_transaction): Likewise.
+ (const_gimple_transaction): Likewise.
+ (gimple_return): Likewise.
+ (const_gimple_return): Likewise.
+ (gimple_bind): Likewise.
+ (const_gimple_bind): Likewise.
+ (gimple_catch): Likewise.
+ (const_gimple_catch): Likewise.
+ (gimple_eh_filter): Likewise.
+ (const_gimple_eh_filter): Likewise.
+ (gimple_eh_must_not_throw;): Likewise.
+ (const_gimple_eh_must_not_throw): Likewise.
+ (gimple_eh_else): Likewise.
+ (const_gimple_eh_else): Likewise.
+ (gimple_resx): Likewise.
+ (const_gimple_resx): Likewise.
+ (gimple_eh_dispatch): Likewise.
+ (const_gimple_eh_dispatch): Likewise.
+ (gimple_phi): Likewise.
+ (const_gimple_phi): Likewise.
+ (gimple_try): Likewise.
+ (const_gimple_try): Likewise.
+ (gimple_omp_atomic_load): Likewise.
+ (const_gimple_omp_atomic_load): Likewise.
+ (gimple_omp_atomic_store): Likewise.
+ (const_gimple_omp_atomic_store): Likewise.
+ (gimple_omp_continue): Likewise.
+ (const_gimple_omp_continue): Likewise.
+ (gimple_omp_critical): Likewise.
+ (const_gimple_omp_critical): Likewise.
+ (gimple_omp_for): Likewise.
+ (const_gimple_omp_for): Likewise.
+ (gimple_omp_parallel): Likewise.
+ (const_gimple_omp_parallel): Likewise.
+ (gimple_omp_task): Likewise.
+ (const_gimple_omp_task): Likewise.
+ (gimple_omp_sections): Likewise.
+ (const_gimple_omp_sections): Likewise.
+ (gimple_omp_single): Likewise.
+ (const_gimple_omp_single): Likewise.
+ (gimple_omp_target): Likewise.
+ (const_gimple_omp_target): Likewise.
+ (gimple_omp_teams): Likewise.
+ (const_gimple_omp_teams): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ * tree-inline.h (struct copy_body_data): Rename field
+ "gimple_call" to "call_stmt".
+
+ * gimplify.c (gimplify_bind_expr): Rename local "gimple_bind" to
+ "bind_stmt".
+ (gimplify_switch_expr): Rename local "gimple_switch" to
+ "switch_stmt".
+ (gimplify_cond_expr): Rename local "gimple_cond" to "cond_stmt".
+ * tree-eh.c (lower_catch): Rename local "gcatch" to "catch_stmt".
+ * tree-inline.c (copy_bb): Update for renaming of field within
+ struct copy_body_data from "gimple_call" to "call_stmt".
+ (copy_cfg_body): Likewise.
+ (copy_debug_stmt): Likewise.
+ (expand_call_inline): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Convert various gimple to gimple_phi within ssa-iterators.h
+
+ * ssa-iterators.h (FOR_EACH_PHI_OR_STMT_USE): Add checked cast to
+ gimple_phi.
+ (FOR_EACH_PHI_OR_STMT_DEF): Likewise.
+
+ * ssa-iterators.h (single_phi_def): Require a gimple_phi.
+ (op_iter_init_phiuse): Likewise.
+ (op_iter_init_phidef): Likewise.
+ * tree-ssa-loop-im.c (extract_true_false_args_from_phi): Likewise.
+
+ * tree-ssa-loop-im.c (link_use_stmts_after): Replace check against
+ GIMPLE_PHI with add a dyn_cast to gimple_phi, using result as needed.
+ (determine_max_movement): Likewise.
+ * tree-ssa-reassoc.c (is_phi_for_stmt): Likewise.
+ * tree-ssa-sccvn.c (start_over:): Likewise.
+
+ * tree-ssa-loop-im.c
+ (move_computations_dom_walker::before_dom_children): Split iterator
+ into gimple_stmt_iterator and a gimple_phi_iterator so that a local
+ can become a gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Use gimple_phi in many more places.
+
+ * gimple-ssa-strength-reduction.c (slsr_process_phi): Require a
+ gimple_phi.
+ * ipa-inline-analysis.c (predicate_for_phi_result): Likewise.
+ * tree-emutls.c (lower_emutls_phi_arg): Likewise.
+ * tree-if-conv.c (predicate_scalar_phi): Likewise.
+ * tree-into-ssa.c (mark_virtual_phi_result_for_renaming): Likewise.
+ * tree-into-ssa.h (mark_virtual_phi_result_for_renaming): Likewise.
+ * tree-phinodes.c (degenerate_phi_result): Likewise.
+ * tree-phinodes.h (degenerate_phi_result): Likewise.
+ * tree-ssa.c (verify_use): Likewise.
+ * tree-switch-conversion.c (array_value_type): Likewise.
+
+ * graphite-scop-detection.c (same_close_phi_node): Require a pair
+ of gimple_phi.
+ (remove_duplicate_close_phi): Require a gimple_phi and a
+ gimple_phi_iterator.
+ (make_close_phi_nodes_unique): Convert a local into a
+ gimple_phi_iterator.
+
+ * gimple-pretty-print.c (gimple_dump_bb_for_graph): Split iterator
+ into gimple_stmt_iterator and gimple_phi_iterator, converting local
+ from gimple to gimple_phi.
+ * gimple-ssa-strength-reduction.c
+ (find_candidates_dom_walker::before_dom_children): Likewise.
+ * ipa-inline-analysis.c (estimate_function_body_sizes): Likewise.
+ * ipa-split.c (verify_non_ssa_vars): Likewise.
+ (visit_bb): Likewise.
+ (split_function): Likewise.
+ * lto-streamer-out.c (output_function): Likewise.
+ * sese.c (sese_build_liveouts_bb): Likewise.
+ * tree-cfg.c (gimple_can_merge_blocks_p): Likewise.
+ * tree-complex.c (init_dont_simulate_again): Likewise.
+ * tree-dfa.c (collect_dfa_stats): Likewise.
+ * tree-eh.c (sink_clobbers): Likewise.
+ * tree-emutls.c (lower_emutls_function_body): Likewise.
+ * tree-into-ssa.c (rewrite_dom_walker::before_dom_children):
+ Likewise.
+ (rewrite_update_dom_walker::before_dom_children): Likewise.
+ (prepare_block_for_update): Likewise.
+ * tree-loop-distribution.c (stmts_from_loop): Likewise.
+ (generate_loops_for_partition): Likewise.
+ (destroy_loop): Likewise.
+ (tree_loop_distribution): Likewise.
+ * tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
+ * tree-ssa-copy.c (init_copy_prop): Likewise.
+ * tree-ssa-copyrename.c (rename_ssa_copies): Likewise.
+ * tree-ssa-loop-ivcanon.c (propagate_constants_for_unrolling): Likewise.
+ * tree-ssa-loop-manip.c (find_uses_to_rename_bb): Likewise.
+ (verify_loop_closed_ssa): Likewise.
+ * tree-ssa-math-opts.c (execute_cse_reciprocals): Likewise.
+ * tree-ssa-pre.c (compute_avail): Likewise.
+ (eliminate_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-strlen.c (strlen_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-structalias.c (compute_points_to_sets): Likewise.
+ (ipa_pta_execute): Likewise.
+ * tree-ssa-tail-merge.c (same_succ_hash): Likewise.
+ (release_last_vdef): Likewise.
+ * tree-ssa.c (verify_ssa): Likewise.
+ (execute_update_addresses_taken): Likewise.
+ * tree-stdarg.c (check_all_va_list_escapes): Likewise.
+ (execute_optimize_stdarg): Likewise.
+ * tree-switch-conversion.c (build_arrays): Likewise.
+ * tree-vect-loop-manip.c (rename_variables_in_bb): Likewise.
+ * tree-vect-loop.c (vect_determine_vectorization_factor): Likewise.
+ (vect_analyze_loop_operations): Likewise.
+ (vect_transform_loop): Likewise.
+ * tree-vrp.c (find_assert_locations_1): Likewise.
+ (vrp_initialize): Likewise.
+
+ * graphite-sese-to-poly.c (gsi_for_phi_node): Convert psi to a
+ gimple_phi_iterator.
+ (rewrite_degenerate_phi): Require a gimple_phi_iterator; strengthen
+ local "phi" from gimple to gimple-phi.
+
+ * ipa-split.c (consider_split): Convert local psi to a
+ gimple_phi_iterator.
+ * tree-cfg.c (gimple_merge_blocks): Likewise.
+
+ * tree-eh.c (unsplit_eh): Replace reuse of gsi with a new
+ gimple_phi_iterator gpi, using it to convert statement from gimple
+ to a gimple_phi.
+ (cleanup_empty_eh_merge_phis): Strengthen ophi from gimple to
+ gimple_phi.
+ * tree-ssa-dce.c (eliminate_unnecessary_stmts): Replace reuse of
+ gsi with a new gimple_phi_iterator gpi, using it to convert
+ expressions from gimple to gimple_phi; add a checked cast to
+ gimple_phi.
+
+ * tree-if-conv.c (predicate_all_scalar_phis): Convert local phi_gsi
+ to be a gimple_phi_iterator; convert "phi" to be a gimple_phi.
+ * tree-parloops.c (loop_has_vector_phi_nodes): Convert local to be a
+ gimple_phi_iterator.
+ * tree-ssa-ccp.c (ccp_initialize): Likewise.
+
+ * tree-scalar-evolution.c (analyze_initial_condition): Add checked cast
+ to a gimple_phi.
+ * tree-ssa.c (insert_debug_temp_for_var_def): Likewise.
+ * tree-ssa-dom.c (get_rhs_or_phi_arg): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Use gimple_call in some places within tree-ssa-dom.c
+
+ * tree-ssa-dom.c (struct hashable_expr): Strengthen field
+ call.fn_from from gimple to gimple_call.
+ (initialize_hash_element): Replace check against GIMPLE_CALL
+ with dyn_cast<gimple_call> and update gimple_call_ uses to use
+ new gimple_call local, along with fn_from initializer.
+ (iterative_hash_hashable_expr): Strengthen type of local "fn_from"
+ from gimple to gimple_call.
+ (print_expr_hash_elt): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_copy_flags and ipa_modify_call_arguments
+
+ * gimple.h (gimple_call_copy_flags): Require gimple_calls.
+
+ * ipa-prop.c (ipa_modify_call_arguments): Require a gimple_call.
+ * ipa-prop.h (ipa_modify_call_arguments): Likewise.
+
+ * tree-inline.c (copy_bb): Replace is_gimple_call with new local
+ and call to dyn_cast<gimple_call>, updating gimple_call_ uses to
+ use the type-checked local.
+
+ * tree-sra.c (convert_callers): Replace check for GIMPLE_CALL with
+ a dyn_cast.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_assign_nontemporal_move_p
+
+ * gimple.h (gimple_assign_nontemporal_move_p): Require a
+ const_gimple_assign rather than a const_gimple.
+
+ * cfgexpand.c (expand_gimple_stmt_1): Add local assign_stmt and
+ checked cast within "case GIMPLE_ASSIGN".
+
+ * gimple-streamer-out.c (output_gimple_stmt): Add checked cast to
+ gimple_assign.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_arg_flags
+
+ * gimple.h (gimple_call_arg_flags): Require a const_gimple_call
+ rather than a const_gimple.
+
+ * gimple.c (gimple_call_fnspec): Likewise.
+ (gimple_call_arg_flags): Likewise.
+
+ * tree-inline.c (remap_gimple_stmt): Use a dyn_cast rather than a
+ is_gimple_call and repeated checked casts.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_set_tail and gimple_call_tail_p
+
+ * gimple.h (gimple_call_set_tail): Require a gimple_call.
+ (gimple_call_tail_p): Likewise.
+
+ * cfgexpand.c (expand_gimple_tailcall): Likewise.
+ (expand_gimple_basic_block): Convert calls to is_gimple_call to a
+ dyn_cast, introducing a new "call_stmt" local.
+
+ * trans-mem.c (expand_block_edges): Likewise, for comparison against
+ GIMPLE_CALL.
+
+ * tree-inline.c (remap_gimple_stmt): Add checked casts to
+ gimple_call in region guarded by is_gimple_call.
+
+ * tree-tailcall.c (optimize_tail_call): Add checked cast to gimple_call
+ for t->call_gsi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_set_fntype
+
+ * gimple-fold.c (gimple_fold_builtin_sprintf_chk): Strengthen
+ local "stmt" from gimple to gimple_call.
+
+ * gimple.h (gimple_call_set_fntype): Require a gimple_call.
+
+ * omp-low.c (lower_omp_1): Add a new local gimple_call "call_stmt",
+ from a checked cast to gimple_call within the "case GIMPLE_CALL",
+ for the regions where "stmt" is not subsequently overwritten.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_set_fn
+
+ * gimple.h (gimple_call_set_fn): Require a gimple_call.
+
+ * trans-mem.c (dump_tm_memopt_transform): Likewise.
+ (tm_memopt_transform_blocks): Add checked casts to gimple_call in
+ suites guarded by is_tm_simple_{load|store}, which enforce that
+ the statement must be a GIMPLE_CALL; use this when invoking
+ dump_tm_memopt_transform.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Tweak to gimplify_modify_expr
+
+ * gimplify.c (gimplify_modify_expr): Introduce local "call_stmt".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_nothrow_p
+
+ * gimple.h (gimple_call_nothrow_p): Require a gimple_call.
+
+ * tree-eh.c (stmt_could_throw_p): Add checked cast to gimple_call.
+
+ * tree-vect-slp.c (vect_build_slp_tree_1): Replace call to
+ is_gimple_call with dyn_cast<gimple_call>, introducing a local.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_set_nothrow
+
+ * gimple.h (gimple_call_set_nothrow): Require a gimple_call.
+
+ * trans-mem.c (ipa_tm_insert_gettmclone_call): Likewise.
+ (ipa_tm_transform_calls_redirect): Add checked cast to gimple call; this
+ is only called for gsi on a GIMPLE_CALL statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize various expressions from gimple to gimple_cond
+
+ * ipa-split.c (check_forbidden_calls): Replace check against
+ GIMPLE_COND with a dyn_cast<gimple_cond>, introducing a
+ gimple_cond local.
+ * predict.c (predict_extra_loop_exits): Likewise.
+ * tree-vrp.c (fold_predicate_in): Likewise.
+ (simplify_stmt_for_jump_threading): Likewise.
+
+ * predict.c (is_comparison_with_loop_invariant_p): Require a
+ gimple_cond.
+ (predict_iv_comparison): Add checked cast to gimple_cond once we
+ know the code is GIMPLE_COND.
+ (predict_loops): Change type of "stmt" to gimple_cond,
+ adding checked casts to its assignments (which are both guarded by
+ checks against GIMPLE_COND).
+
+ * tree-vrp.c (find_conditional_asserts): Require a gimple_cond.
+ (vrp_evaluate_conditional): Likewise.
+ (find_assert_locations_1): Add checked cast to gimple_cond.
+ (vrp_visit_stmt): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_cond_{lhs|rhs}_ptr
+
+ * gimple.h (gimple_cond_lhs_ptr): Require a const_gimple_cond
+ rather than just a const_gimple_cond.
+ (gimple_cond_rhs_ptr): Likewise.
+
+ * gimplify-me.c (gimple_regimplify_operands): Add a checked cast
+ to gimple_cond within "case GIMPLE_COND".
+ * omp-low.c (lower_omp_1): Likewise.
+
+ * omp-low.c (expand_omp_simd): Introduce a new local cond_stmt
+ to express that the conditional is indeed a gimple_cond.
+
+ * tree-ssa-loop-ivopts.c (extract_cond_operands): Add a checked
+ cast to gimple_cond within a region where the code is known to
+ be GIMPLE_COND.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_cond_set_{lhs|rhs}
+
+ * gimple.h (gimple_cond_set_lhs): Require a gimple_cond.
+ (gimple_cond_set_rhs): Likewise.
+
+ * tree-parloops.c (transform_to_exit_first_loop): Convert locals
+ "cond_stmt" and "cond_nit" to gimple_cond, adding checked casts,
+ since the existing code assumes that code is GIMPLE_COND. Convert
+ "stmt" to a gimple_assign.
+ (create_parallel_loop): Likewise for "cond_stmt".
+
+ * tree-ssa-loop-im.c (rewrite_bittest): Replace check for code
+ GIMPLE_COND with a dyn_cast, adding new local "cond_stmt".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_cond_set_code
+
+ * gimple.h (gimple_cond_set_code): Require a gimple_cond.
+
+ * tree-complex.c (expand_complex_comparison): Add a checked cast to
+ gimple_cond within "case GIMPLE_COND".
+
+ * tree-ssa-loop-ivcanon.c (create_canonical_iv): Convert local "cond"
+ to a gimple_cond, adding a checked cast. The existing code requires
+ that the last statement before the exit edge have code GIMPLE_COND,
+ though it's not clear to me where this is verified.
+
+ * tree-ssa-loop-ivopts.c (rewrite_use_compare): Add a checked cast
+ to gimple_cond on "use->stmt".
+
+ * tree-ssa-loop-manip.c (tree_transform_and_unroll_loop): Convert
+ local "exit_if" to gimple_cond, adding a checked cast. It's not
+ clear to me exactly where the GIMPLE_COND-ness of this is
+ established, but the existing code requires it.
+ (canonicalize_loop_ivs): Similarly for "stmt".
+
+ * tree-ssa-propagate.c (propagate_tree_value_into_stmt): Replace
+ a check against GIMPLE_COND with a dyn_cast<gimple_cond>.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_cond_{true|false}_label
+
+ * gimple.h (gimple_cond_true_label): Require a const_gimple_cond
+ rather than just a const_gimple.
+ (gimple_cond_false_label): Likewise.
+
+ * omp-low.c (diagnose_sb_2): Add checked cast to gimple_cond within
+ case GIMPLE_COND.
+ * tree-eh.c (maybe_record_in_goto_queue): Likewise.
+
+ * tree-cfg.c (verify_gimple_stmt): Add a checked cast to gimple_cond
+ within "case GIMPLE_COND", splitting it out into...
+ (verify_gimple_label): New.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_switch_index and gimple_switch_index_ptr
+
+ * gimple.h (gimple_switch_index): Require a const_gimple_switch rather
+ than a plain const_gimple.
+ (gimple_switch_index_ptr): Likewise.
+
+ * gimplify-me.c (gimple_regimplify_operands): Add checked cast to
+ gimple_switch within "case GIMPLE_SWITCH".
+ * tree-cfgcleanup.c (cleanup_control_expr_graph): Likewise.
+ * tree-ssa-ccp.c (ccp_fold): Likewise.
+ * tree-ssa-dom.c (optimize_stmt): Likewise.
+
+ * tree-ssa-ccp.c (evaluate_stmt): Add checked cast to
+ gimple_switch within region guarded by check for GIMPLE_SWITCH.
+ * tree-ssa-dom.c (record_edge_info): Likewise.
+ (eliminate_redundant_computations): Likewise.
+ * tree-ssa-loop-ivcanon.c (tree_estimate_loop_size): Likewise.
+ * tree-ssa-threadedge.c (simplify_control_stmt_condition): Likewise.
+
+ * tree-ssa-dom.c (initialize_hash_element): Replace check for
+ code GIMPLE_SWITCH with a dyn_cast<gimple_switch>.
+ (propagate_rhs_into_lhs): Likewise.
+ * tree-ssa-propagate.c (may_propagate_copy_into_stmt): Likewise.
+ (propagate_tree_value_into_stmt): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_cond_make_{false|true}
+
+ * gimple.h (gimple_cond_make_false): Require a gimple_cond.
+ (gimple_cond_make_true): Likewise.
+
+ * tree-cfg.c (fold_cond_expr_cond): Add a checked cast to
+ gimple_cond within region guarded by check for GIMPLE_COND.
+ * tree-ssa-ccp.c (ccp_fold_stmt): Likewise.
+
+ * tree-loop-distribution.c (generate_loops_for_partition): Replace
+ a check for GIMPLE_COND with a dyn_cast<gimple_cond>.
+ * tree-ssa-ccp.c (optimize_unreachable): Likewise.
+ * tree-ssa-loop-niter.c (number_of_iterations_exit): Likewise.
+ * tree-ssa-pre.c (eliminate_dom_walker::before_dom_children):
+ Likewise.
+
+ * tree-vrp.c (fold_predicate_in): Add a checked cast to
+ gimple_cond. We must be dealing with a GIMPLE_COND since logic
+ at top of the function ensures we only act on GIMPLE_ASSIGN and
+ GIMPLE_COND statements, and we're now within a "not a GIMPLE_ASSIGN"
+ clause.
+
+ * tree-ssa-loop-ivcanon.c (remove_exits_and_undefined_stmts): Add
+ checked cast of elt->stmt to gimple_cond. The existing code requires
+ this to be a GIMPLE_COND, though it's not clear to me how this
+ requirement is enforced.
+ (remove_redundant_iv_tests): Likewise.
+ (try_unroll_loop_completely): Likewise, for the last_stmt of the
+ preceding bb along edge_to_cancel.
+ * tree-ssa-reassoc.c (maybe_optimize_range_tests): Likewise, for the
+ last_stmt of bb.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize locals within expand_omp_for_init_counts
+
+ * omp-low.c (expand_omp_for_init_counts): Eliminate local "stmt"
+ in favor of new locals "cond_stmt" and "assign_stmt" with more
+ concrete types.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_cond_set_{true|false}_label require gimple_cond.
+
+ * gimple.h (gimple_cond_set_true_label): Require a gimple_cond.
+ (gimple_cond_set_false_label): Likewise.
+
+ * tree-cfg.c (make_cond_expr_edges): Convert "entry" from gimple to
+ a gimple_cond.
+ (cleanup_dead_labels): Introduce a checked cast to a gimple_cond within
+ the GIMPLE_COND case.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize three gimple_return_ accessors
+
+ * gimple.h (gimple_return_retval_ptr): Require a const_gimple_return
+ rather than a const_gimple.
+ (gimple_return_retval): Likewise.
+ (gimple_return_set_retval): Require a gimple_return.
+
+ * cfgexpand.c (expand_gimple_stmt_1): Add a checked cast to
+ gimple_return.
+ (expand_gimple_basic_block): Likewise.
+ * tree-complex.c (expand_complex_move): Likewise.
+ (expand_complex_comparison): Likewise.
+ * tree-inline.c (remap_gimple_stmt): Likewise.
+ * tree-sra.c (scan_function): Likewise.
+ (sra_modify_function_body): Likewise.
+ (ipa_sra_modify_function_body): Likewise.
+ * tree-ssa-structalias.c (find_func_aliases): Likewise.
+
+ * gimple-ssa-isolate-paths.c (isolate_path): Strengthen local
+ "ret" from gimple to gimple_return.
+ (find_implicit_erroneous_behaviour): Replace a check for code
+ GIMPLE_RETURN with a dyn_cast and a new local.
+ (find_explicit_erroneous_behaviour): Likewise.
+ * gimple-walk.c (walk_stmt_load_store_addr_ops): Likewise.
+ * gimple.c (infer_nonnull_range): Likewise.
+ * ipa-split.c (find_return_bb): Likewise.
+ (find_retval): Likewise.
+ (split_function): Likewise.
+ * omp-low.c (ipa_simd_modify_function_body): Likewise.
+ * tree-cfg.c (pass_warn_function_return::execute): Likewise.
+ * tree-nrv.c (tree_nrv): Likewise.
+ * tree-ssa-alias.c (ref_maybe_used_by_stmt_p): Likewise.
+ * tree-ssa-dce.c (propagate_necessity): Likewise.
+ * tree-ssa-structalias.c (find_func_clobbers): Likewise.
+ * tree-tailcall.c (find_tail_calls): Likewise.
+
+ * predict.c (apply_return_prediction): Rework the search for
+ return_stmt so that the latter can have type gimple_return.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_phi_arg_location require a gimple_phi.
+
+ * gimple.h (gimple_phi_arg_location): Require a gimple_phi.
+
+ * tree-into-ssa.c (rewrite_update_phi_arguments): Replace a check
+ for code GIMPLE_PHI with a dyn_cast and a new local.
+ * tree-ssa-ter.c (ter_is_replaceable_p): Likewise.
+
+ * tree-ssa-live.c (remove_unused_locals): Replace a
+ gimple_stmt_iterator with a gimple_phi_iterator, using it to make
+ local "phi" be a gimple_phi.
+ * tree-ssa-phiopt.c (tree_ssa_phiopt_worker): Likewise.
+
+ * tree-ssa-phiopt.c (conditional_replacement): Require a gimple_phi.
+ (single_non_singleton_phi_for_edges): Return a gimple_phi; update
+ local to be a gimple_phi, adding checked casts since we're working
+ on a sequence of gimple_phi.
+ (conditional_replacement): Require a gimple_phi.
+
+ * tree-ssa-threadupdate.c (get_value_locus_in_path): Strengthen
+ type of local "def_phi" to gimple_phi by replacing a check of the
+ code for GIMPLE_PHI with a dyn_cast<gimple_phi>.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_phi_arg_location_from_edge require a gimple_phi
+
+ * gimple.h (gimple_phi_arg_location_from_edge): Require a
+ gimple_phi.
+
+ * tree-parloops.c (create_parallel_loop): Split up local variable
+ "stmt", introducing other locals for the various statements created
+ by this function. Reuse "stmt" within the phi-handling code, and
+ change to type gimple_phi, since this is the only remaining
+ "non-phi" user of gimple_phi_arg_location_from_edge.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize three gimple_try_set_ accessors
+
+ * gimple.c (gimple_copy): Add checked casts to gimple_try.
+
+ * gimple.h (gimple_try_set_kind): Require a gimple_try.
+ (gimple_try_set_eval): Likewise.
+ (gimple_try_set_cleanup): Likewise.
+
+ * tree-eh.c (optimize_double_finally): Require a pair of gimple_try
+ statements.
+ (refactor_eh_r): Convert code comparisons to dynamic casts.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_try_set_catch_is_cleanup
+
+ * gimple.h (gimple_try_set_catch_is_cleanup): Require a gimple_try.
+
+ * gimplify.c (gimplify_expr): Convert local "try_" from a gimple
+ to a gimple_try.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_eh_filter_set_types and gimple_eh_filter_set_failure
+
+ * gimple.h (gimple_eh_filter_set_types): Require a gimple_eh_filter.
+ (gimple_eh_filter_set_failure): Likewise.
+ * gimple.c (gimple_copy): Add checked casts to gimple_eh_filter
+ within GIMPLE_EH_FILTER case.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_label_label
+
+ * gimple.h (gimple_label_label): Require a const_gimple_label
+ rather than just a const_gimple.
+
+ * cfgexpand.c (label_rtx_for_bb): Convert local from gimple to
+ gimple_label, replacing a check against GIMPLE_LABEL with a
+ dyn_cast<gimple_label>.
+ * predict.c (tree_estimate_probability_bb): Likewise.
+ * tree-cfg.c (make_edges): Likewise.
+ (cleanup_dead_labels): Likewise (twice).
+ (gimple_can_merge_blocks_p): Likewise.
+ (gimple_block_label): Likewise.
+ * tree-eh.c (unsplit_eh): Likewise.
+ (cleanup_empty_eh_unsplit): Likewise.
+ * tree-inline.c (mark_local_labels_stmt): Likewise.
+ * tree-nested.c (convert_nl_goto_receiver): Likewise.
+
+ * cfgexpand.c (expand_gimple_stmt_1): Add a checked cast to
+ gimple_label when invoking gimple_label_label in a region where
+ we've checked the code is GIMPLE_LABEL.
+ * gimple-pretty-print.c (pp_cfg_jump): Likewise.
+ * gimple.c (gimple_set_bb): Likewise.
+ * ipa-pure-const.c (check_stmt): Likewise.
+ * omp-low.c (diagnose_sb_1): Likewise.
+ * tree-cfg.c (gimple_verify_flow_info): Likewise.
+ * tree-cfgcleanup.c (tree_forwarder_block_p): Likewise.
+ (remove_forwarder_block): Likewise.
+ * tree-eh.c (collect_finally_tree): Likewise.
+
+ * ipa-split.c (verify_non_ssa_vars): Replace a check against
+ GIMPLE_LABEL with a dyn_cast<gimple_label>, introducing a
+ gimple_label local.
+ * tree-cfg.c (gimple_can_merge_blocks_p): Likewise.
+ (gimple_merge_blocks): Likewise.
+ (remove_bb): Likewise.
+ (stmt_starts_bb_p): Likewise.
+ (gimple_verify_flow_info): Likewise.
+ (move_block_to_fn): Likewise.
+ * tree-cfgcleanup.c (remove_forwarder_block): Likewise.
+ (remove_forwarder_block_with_phi): Likewise.
+ * tree-ssa-ccp.c (optimize_unreachable): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_call_use_set and gimple_call_clobber_set
+
+ * gimple.h (gimple_call_use_set): Require a gimple_call.
+ (gimple_call_clobber_set): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize gimple_catch_types
+
+ * gimple.h (gimple_catch_types): Require a const_gimple_catch
+ rather than a const_gimple.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_goto_set_dest require a gimple_goto
+
+ * gimple.h (gimple_goto_set_dest): Require a gimple_goto.
+
+ * tree-cfg.c (factor_computed_gotos): Add checked cast to
+ gimple_goto.
+ (cleanup_dead_labels): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_label_set_label require a gimple_label
+
+ * gimple.h (gimple_label_set_label): Require a gimple_label.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize parameter to gimple_call_copy_skip_args
+
+ * gimple.c (gimple_call_copy_skip_args): Require a gimple_call.
+ * gimple.h (gimple_call_copy_skip_args): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Various gimple to gimple_call conversions in IPA
+
+ * ipa-prop.c (detect_type_change_from_memory_writes): Require a
+ gimple_call rather than a plain gimple.
+ (detect_type_change): Likewise.
+ (detect_type_change_ssa): Likewise.
+ (compute_complex_assign_jump_func): Likewise.
+ (compute_complex_ancestor_jump_func): Likewise.
+ (compute_known_type_jump_func): Likewise.
+ (determine_locally_known_aggregate_parts): Likewise.
+ (ipa_compute_jump_functions_for_edge): Strengthen local "call" to
+ a gimple_call; add checked cast to gimple_phi.
+ (ipa_note_param_call): Require a gimple_call rather than a plain
+ gimple.
+ (ipa_analyze_indirect_call_uses): Likewise.
+ (ipa_analyze_virtual_call_uses): Likewise.
+ (ipa_analyze_call_uses): Likewise.
+ (ipa_analyze_stmt_uses):Add checked cast to gimple_call.
+
+ * tree-ssa-pre.c (eliminate_dom_walker::before_dom_children):
+ Replace use of is_gimple_call with dyn_cast<gimple_call> and a
+ new local "call_stmt".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Use gimple_call for callgraph edges
+
+ * cgraph.h (cgraph_edge::call_stmt): Strengthen field from plain
+ gimple to a gimple_call.
+ (cgraph_node::set_call_stmt_including_clones): Likewise for param
+ "new_stmt".
+ (cgraph_node::create_edge): Likewise for param "call_stmt".
+ (cgraph_node::create_indirect_edge): Likewise.
+ (cgraph_node::create_edge_including_clones): Likewise for param
+ "stmt".
+ (cgraph_edge::set_call_stmt): Likewise for param "new_stmt".
+ (cgraph_edge::clone): Likewise for param "call_stmt".
+ (symbol_table::create_edge): Likewise.
+
+ * cgraph.c (cgraph_edge::set_call_stmt): Require a gimple_call
+ rather than a plain gimple.
+ (symbol_table::create_edge): Likewise.
+ (cgraph_node::create_edge): Likewise.
+ (cgraph_node::create_indirect_edge): Likewise.
+ (cgraph_edge::redirect_call_stmt_to_callee): Strengthen local
+ "new_stmt" from gimple to gimple_call.
+ (cgraph_update_edges_for_call_stmt_node): Add checked casts to
+ gimple_call.
+
+ * cgraphbuild.c (pass_build_cgraph_edges::execute): Replace
+ is_gimple_call with dyn_cast<gimple_call> and new local
+ "call_stmt".
+ (cgraph_edge::rebuild_edges): Likewise.
+
+ * cgraphclones.c (cgraph_edge::clone): Require a gimple_call
+ rather than a plain gimple.
+ (cgraph_node::set_call_stmt_including_clones): Likewise.
+ (cgraph_node::create_edge_including_clones): Likewise.
+
+ * lto-streamer-in.c (fixup_call_stmt_edges_1): Add checked casts
+ to gimple_call.
+
+ * omp-low.c (simd_clone_adjust): Strengthen local "call" from
+ gimple to gimple_call.
+
+ * trans-mem.c (ipa_tm_insert_irr_call): Likewise for "g".
+ (ipa_tm_insert_gettmclone_call): Likewise; also strengthen "g2"
+ to gimple_assign.
+
+ * tree-emutls.c (gen_emutls_addr): Strengthen local "x" from
+ gimple to gimple_call.
+
+ * tree-inline.c (copy_bb): Replace is_gimple_call with
+ dyn_cast<gimple_call> and new local "call_stmt".
+
+ * value-prof.c (gimple_ic): Require and return a gimple_call,
+ rather than a plain gimple.
+ * value-prof.h (gimple_ic): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_call_return_slot_opt_p require a gimple_call.
+
+ * gimple.h (gimple_call_return_slot_opt_p): Require a gimple_call
+ rather than a plain gimple.
+
+ * gimple-walk.c (walk_stmt_load_store_addr_ops): Convert usage of
+ is_gimple_call to dyn_cast<gimple_call>, introducing a new local
+ "call_stmt".
+
+ * trans-mem.c (expand_call_tm): Split local "stmt", strengthening
+ from plain gimple to a gimple_call, and introducing new local
+ gimple_assign "assign_stmt".
+
+ * tree-inline.c (expand_call_inline): Convert check of code against
+ GIMPLE_CALL to dyn_cast<gimple_call>, introducing a new local
+ "call_stmt".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ More gimple_phi
+
+ * gimple.h (gimple_phi_set_result): Require a gimple_phi rather
+ than a plain gimple.
+ (gimple_phi_set_arg): Likewise.
+
+ * tree-outof-ssa.c (remove_gimple_phi_args): Likewise; add a checked
+ cast to gimple_phi.
+
+ * tree-sra.c (replace_removed_params_ssa_names): Add a checked
+ cast to gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_phi_arg_edge require a gimple_phi
+
+ * gimple.h (gimple_phi_arg_edge): Require a gimple_phi rather
+ than a plain gimple.
+
+ * gimple-ssa-strength-reduction.c (ncd_with_phi): Strengthen
+ param "phi" from gimple to gimple_phi. Add a checked cast.
+ (ncd_of_cand_and_phis): Add a checked cast.
+
+ * graphite-sese-to-poly.c (rewrite_phi_out_of_ssa): Require a
+ gimple_phi_iterator; strengthen local "phi" from gimple to a
+ gimple_phi.
+ (rewrite_cross_bb_scalar_deps): Strengthen local "psi" from
+ a gimple_stmt_iterator to a gimple_phi_iterator.
+ (edge_initial_value_for_loop_phi): Require a gimple phi.
+ (initial_value_for_loop_phi): Likewise.
+
+ * ipa-split.c (consider_split): Convert "bsi" to a
+ gimple_phi_iterator and "stmt" to a gimple_phi.
+
+ * predict.c (predict_extra_loop_exits): Convert "phi_stmt" to be
+ a gimple_phi; introduce "lhs_def_stmt" as plain gimple.
+ (apply_return_prediction): Convert "phi" to be a gimple_phi.
+
+ * tree-cfg.c (replace_uses_by): Add checked cast to gimple_phi.
+ (verify_gimple_in_cfg): Introduce gimple_phi_iterator "gpi" and use
+ it to convert "phi" to a gimple_phi.
+
+ * tree-eh.c (cleanup_empty_eh_merge_phis): Convert "ngsi", "ogsi"
+ to be gimple_phi_iterators. Convert "ophi", "nphi" to be
+ gimple_phi.
+
+ * tree-into-ssa.c (prepare_use_sites_for): Add checked cast to
+ gimple_phi.
+
+ * tree-ssa-coalesce.c (create_outofssa_var_map): Introduce
+ gimple_phi_iterator "gpi" and use it to convert "phi" to a
+ gimple_phi.
+
+ * tree-ssa-dce.c (propagate_necessity): Introduce local "phi",
+ from checked cast to gimple_phi.
+
+ * tree-ssa-live.c (set_var_live_on_entry): Add checked cast to
+ gimple_phi.
+
+ * tree-ssa-propagate.c (replace_phi_args_in): Require a gimple_phi
+ rather than a plain gimple.
+ (substitute_and_fold_dom_walker::before_dom_children): Introduce
+ gimple_phi_iterator "gpi".
+
+ * tree-ssa-sink.c (find_bb_for_arg): Require a gimple_phi rather
+ than a plain gimple.
+ (nearest_common_dominator_of_uses): Replace check of code against
+ GIMPLE_PHI with a dyn_cast<gimple_phi>, introducing a new local.
+ (statement_sink_location): Add checked cast to gimple_phi.
+
+ * tree-ssa-uninit.c (compute_uninit_opnds_pos): Require a
+ gimple_phi rather than a plain gimple.
+ (collect_phi_def_edges): Likewise. Add a checked cast.
+ (find_def_preds): Strengthen param "phi" from gimple to
+ gimple_phi.
+ (prune_uninit_phi_opnds_in_unrealizable_paths): Likewise for
+ params "phi" and "flag_def". Strenghen param "visited_phis" from
+ hash_set<gimple> * to hash_set<gimple_phi> *. Convert
+ "flag_arg_def", "phi_arg_def" to gimple_phi using
+ dyn_cast<gimple_phi>. Similarly, introduce new local
+ "opnd_def_phi".
+ (use_pred_not_overlap_with_undef_path_pred): Strengthen param
+ "phi" from gimple to gimple_phi, and param "visited_phis" from
+ hash_set<gimple> * to hash_set<gimple_phi> *. Add a checked cast.
+ (is_use_properly_guarded): Likewise for params.
+ (find_uninit_use): Replace check of code against GIMPLE_PHI with
+ a dyn_cast<gimple_phi>, introducing a new local "use_phi".
+ Strengthen local "visited_phis" from hash_set<gimple> to
+ hash_set<gimple_phi>.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Update GRAPHITE to use more concrete gimple statement classes
+
+ * graphite-scop-detection.c (canonicalize_loop_closed_ssa):
+ Strengthen local "psi" to be a gimple_phi_iterator and "phi" to
+ a gimple_phi.
+
+ * graphite-sese-to-poly.c (phi_arg_in_outermost_loop): Require
+ a gimple_phi rathen than a plain gimple.
+ (remove_simple_copy_phi): Require a gimple_phi_iterator;
+ strengthen local "phi" to be a gimple_phi and "stmt" to be a
+ gimple_assign.
+ (remove_invariant_phi): Likewise.
+ (simple_copy_phi_p): Require a gimple_phi.
+ (reduction_phi_p): Require a gimple_phi_iterator; strengthen
+ local "phi" to be a gimple_phi.
+ (add_condition_to_pbb): Require a gimple_cond rather than a
+ plain gimple.
+ (add_conditions_to_domain): Add checked cast to gimple_cond
+ within GIMPLE_COND case of switch statement.
+ (single_pred_cond_non_loop_exit): Return a gimple_cond rather
+ than a plain gimple, via a checked cast.
+ (sese_dom_walker::before_dom_children): Strengthen local "stmt"
+ from gimple to gimple_cond.
+ (gsi_for_phi_node): Require a gimple_phi, and return a
+ gimple_phi_iterator.
+ (insert_out_of_ssa_copy): Strengthen local "stmt" from gimple to
+ gimple_assign.
+ (rewrite_reductions_out_of_ssa): Strengthen "psi" to be a
+ gimple_phi_iterator, and "phi" to be a gimple_phi.
+ (phi_contains_arg): Require a gimple_phi.
+ (follow_ssa_with_commutative_ops): Strengthen return type from
+ gimple to gimple_phi, by converting a check for code GIMPLE_PHI to
+ a dyn_cast<gimple_phi>, and strengthening local "res" from gimple
+ to gimple_phi.
+ (detect_commutative_reduction_arg): Strengthen return type from
+ gimple to gimple_phi, and strengthen local "phi" to be a
+ gimple_phi.
+ (detect_commutative_reduction_assign): Strengthen return type from
+ gimple to gimple_phi, and strengthen local "res" to be a
+ gimple_phi.
+ (follow_inital_value_to_phi): Strengthen return type from
+ gimple to gimple_phi. Replace check for code GIMPLE_PHI with
+ a dyn_cast<gimple_phi>.
+ (detect_commutative_reduction): Strengthen return type and locals
+ "loop_phi", "phi", "close_phi" from gimple to gimple_phi,
+ introducing a checked cast of "stmt" in region guarded by
+ scalar_close_phi_node_p (stmt).
+ (translate_scalar_reduction_to_array_for_stmt): Require param
+ "loop_phi" to be a gimple_phi. Strengthen local "assign" from
+ gimple to gimple_assign.
+ (remove_phi): Require a gimple_phi.
+ (close_phi_written_to_memory): Likewise.
+ (translate_scalar_reduction_to_array): We expect the first element
+ in each vector to be an arbitrary statement, but all of the
+ subsequent elements to be phi nodes. Hence the decls of gimple
+ locals "loop_phi" and "close_phi" are replaced with decls of gimple
+ "loop_stmt" and "close_stmt", with decls of the more-strongly typed
+ gimple_phi "loop_phi" and "close_phi" occurring lower down, within
+ the region where we're dealing with i > 0 and hence where we can
+ safely assign them using the checked cast as_a <gimple_phi>.
+ This allows many of the strengthenings from gimple to gimple_phi
+ above. We eliminate the local "stmt", since we can simply use
+ "loop_stmt".
+ (rewrite_commutative_reductions_out_of_ssa_close_phi): Strengthen
+ param "close_phi" from gimple to gimple_phi, and local "gsi" from
+ gimple_stmt_iterator to gimple_phi_iterator, converting uses of
+ gsi_stmt to gsi.phi for type-safety.
+ (scop_ivs_can_be_represented): Strengthen local "gsi" from
+ gimple_stmt_iterator to gimple_phi_iterator, and "phi" from gimple
+ to gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_phi_arg_set_location require a gimple_phi
+
+ * gimple.h (gimple_phi_arg_set_location): Require a gimple_phi
+ rather than a plain gimple.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make add_phi_arg require a gimple_phi
+
+ * tree-phinodes.h (add_phi_arg): Require a gimple_phi rather than
+ a plain gimple.
+ * tree-phinodes.c (add_phi_arg): Likewise.
+
+ * gimple-ssa-strength-reduction.c (create_phi_basis): Strengthen
+ local "phi" from plain gimple to gimple_phi.
+
+ * graphite-scop-detection.c (canonicalize_loop_closed_ssa):
+ Likewise for "close_phi".
+
+ * ipa-split.c (split_function): Convert "psi" to
+ gimple_phi_iterator.
+
+ * omp-low.c (expand_omp_for_static_nochunk): Introduce
+ gimple_phi_iterator gpi, using it to strengthen "phi" to be a
+ gimple_phi.
+ (expand_omp_for_static_chunk): Likewise.
+
+ * tree-cfg.c (gimple_duplicate_bb): Make topmost "gsi" decl more
+ tightly-scoped, and eliminate decls "phis", "phi", "stmt", "copy"
+ in favor of more tightly-scoped gimple_phi_iterator gpi and
+ gimple_phi decls "phi" and "copy", and gimple decls "stmt" and
+ "copy".
+
+ * tree-parloops.c (create_parallel_loop): Introduce
+ gimple_phi_iterator gpi, using it to strengthen "phi" to be a
+ gimple_phi.
+
+ * tree-ssa-loop-im.c (execute_sm_if_changed): Likewise.
+
+ * tree-ssa-loop-manip.c (create_iv): Split out new gimple_phi
+ local "phi" from "stmt", and convert the latter into being a
+ gimple_assign.
+
+ * tree-ssa-pre.c (insert_into_preds_of_block): Strengthen local
+ "phi" to be a gimple_phi.
+
+ * tree-ssa-tail-merge.c (vop_phi): Require a gimple_phi rather
+ than a plain gimple.
+ (replace_block_by): Strengthen local "bb2_phi" to be a gimple_phi.
+
+ * tree-tailcall.c (add_successor_phi_arg): Use gsi.phi when
+ invoking add_phi_arg.
+ (eliminate_tail_call): Introduce gimple_phi_iterator gpi, using it
+ to strengthen "phi" to be a gimple_phi.
+ (create_tailcall_accumulator): Strengthen local "phi" to be a
+ gimple_phi.
+ (tree_optimize_tail_calls_1): Likewise.
+
+ * tree-vect-data-refs.c (vect_setup_realignment): Strengthen
+ local "phi_stmt" to be a gimple_phi.
+
+ * tree-vect-loop-manip.c (slpeel_tree_duplicate_loop_to_edge_cfg):
+ Strengthen "gsi", "gsi_orig", "gsi_new" to be
+ gimple_phi_iterators, and "phi" "orig_phi", "new_phi" to be
+ gimple_phi instances.
+ (slpeel_tree_peel_loop_to_edge): Strengthen local "new_phi" to be
+ a gimple_phi.
+
+ * tree-vect-loop.c (get_initial_def_for_induction): Likewise for
+ "induction_phi".
+ (vect_create_epilog_for_reduction): Add checked casts to
+ gimple_phi; strengthen local "outer_phi" to gimple_phi and
+ "new_vec_stmt" to gimple_assign.
+ (vect_finalize_reduction): Strengthen local "vect_phi" to
+ gimple_phi.
+ (vectorizable_reduction): Likewise for "new_phi".
+
+ * tree-vect-stmts.c (vectorizable_simd_clone_call): Likewise.
+ (vectorizable_load): Likewise for "phi".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Make gimple_phi_arg_def_ptr and gimple_phi_arg_has_location require a gimple_phi
+
+ * gimple.h (gimple_phi_arg_def_ptr): Require a gimple_phi rather
+ than a plain gimple.
+ (gimple_phi_arg_has_location): Likewise.
+
+ * gimple-streamer-in.c (input_phi): Return a gimple_phi rather
+ than a plain gimple.
+ * gimple-streamer-out.c (output_phi): Require a gimple_phi rather
+ than a plain gimple.
+ (output_bb): Convert iteration to a gimple_phi_iterator, and local
+ "phi" to gimple_phi.
+
+ * omp-low.c (expand_omp_for_static_chunk): Convert iterator "psi"
+ to a gimple_phi_iterator; convert locals "phi" and "nphi" to be
+ gimple_phi.
+
+ * tree-cfg.c (gimple_duplicate_sese_tail): Likewise for "psi" and
+ "phi".
+ (move_block_to_fn): Introduce new gimple_phi_iterator "psi", using
+ it in place of "gsi" where necessary. Convert "phi" to be a
+ gimple_phi.
+
+ * tree-cfgcleanup.c (remove_forwarder_block): Likewise.
+
+ * tree-vect-loop-manip.c (vect_loop_versioning): Convert "gsi" to
+ a gimple_phi_iterator, and "orig_phi" and "new_phi" to be
+ gimple_phi.
+
+ * tree.c (find_decls_types_in_node): Introduce new
+ gimple_phi_iterator "psi", using it in place of "si" where
+ necessary. Convert "phi" to be a gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ omp-low.c: Use more concrete types of gimple statement for various locals
+
+ * omp-low.c (finalize_task_copyfn): Strengthen local "bind" from
+ plain gimple to gimple_bind.
+ (lower_rec_input_clauses): Strengthen local "g" from
+ plain gimple to gimple_assign.
+ (lower_lastprivate_clauses): Likewise for "stmt" to gimple_cond
+ and "g" to gimple_call.
+ (expand_omp_for_init_vars): Likewise, for two decls of "stmt" to
+ gimple_assign.
+ (expand_omp_atomic_pipeline): Likewise for one decl of "stmt".
+ (expand_omp_atomic_mutex): Likewise.
+ (lower_omp_master): Likewise for "x" to gimple_call.
+ (lower_omp_ordered): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-parloops.c: Use gimple_phi in various places
+
+ * tree-parloops.c (reduction_info::keep_res): Strengthen field
+ from plain gimple to gimple_phi.
+ (transform_to_exit_first_loop): Strengthen locals "phi", "nphi"
+ to gimple_phi. Eliminate early decl of gimple_stmt_iterator gsi
+ in favor of more tightly scoped gimple_phi_iterators, and a final
+ later decl as a gimple_stmt_iterator.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_sections
+
+ * coretypes.h (gimple_omp_sections): New typedef.
+ (const_gimple_omp_sections): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_omp_sections): Require a
+ gimple_omp_sections rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_sections within
+ GIMPLE_OMP_SECTIONS case of switch statement.
+
+ * gimple.c (gimple_build_omp_sections): Return a
+ gimple_omp_sections rather than a plain gimple.
+
+ * gimple.h (gimple_build_omp_sections): Return a
+ gimple_omp_sections rather than a plain gimple.
+
+ * omp-low.c (scan_omp_sections): Require a gimple_omp_sections
+ rather than a plain gimple.
+ (scan_omp_1_stmt): Add checked cast to gimple_omp_sections within
+ GIMPLE_OMP_SECTIONS case of switch statement.
+ (expand_omp_sections): Strengthen local "sections_stmt" from gimple
+ to gimple_omp_sections.
+ (lower_omp_sections): Likewise for "stmt".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_teams
+
+ * coretypes.h (gimple_omp_teams): New typedef.
+ (const_gimple_omp_teams): New typedef.
+
+ * gimple.h (gimple_build_omp_teams): Return a gimple_omp_teams
+ rather than a plain gimple.
+ (gimple_omp_teams_set_clauses): Require a gimple_omp_teams rather
+ than a plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_omp_teams): Require a
+ gimple_omp_teams rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_teams within
+ GIMPLE_OMP_TEAMS case of switch statement.
+
+ * gimple.c (gimple_build_omp_teams): Return a gimple_omp_teams
+ rather than a plain gimple.
+
+ * omp-low.c (scan_omp_teams): Likewise.
+ (scan_omp_1_stmt): Add checked cast to gimple_omp_teams within
+ GIMPLE_OMP_TEAMS case of switch statement.
+ (lower_omp_teams): Strengthen local "teams_stmt" from gimple to
+ gimple_omp_teams.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_target
+
+ * coretypes.h (gimple_omp_target): New typedef.
+ (const_gimple_omp_target): New typedef.
+
+ * gimple.h (gimple_build_omp_target): Return a gimple_omp_target
+ rather than a plain gimple.
+ (gimple_omp_target_set_clauses): Require a gimple_omp_target
+ rather than a plain gimple.
+ (gimple_omp_target_set_kind): Likewise.
+ (gimple_omp_target_child_fn_ptr): Likewise.
+ (gimple_omp_target_set_child_fn): Likewise.
+ (gimple_omp_target_data_arg_ptr): Likewise.
+ (gimple_omp_target_set_data_arg): Likewise.
+ (gimple_omp_target_child_fn): Require a const_gimple_omp_target
+ rather than a plain const_gimple.
+ (gimple_omp_target_data_arg): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_omp_target): Require a
+ gimple_omp_target rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_target within
+ GIMPLE_OMP_TARGET case of switch statement.
+
+ * gimple.c (gimple_build_omp_target): Return a gimple_omp_target
+ rather than a plain gimple.
+
+ * gimplify.c (gimplify_omp_target_update): Strengthen local "stmt"
+ from gimple to gimple_omp_target.
+
+ * omp-low.c (scan_omp_target): Require a gimple_omp_target rather
+ than a plain gimple.
+ (scan_omp_1_stmt): Add checked cast to gimple_omp_target within
+ GIMPLE_OMP_TARGET case of switch statement.
+ (expand_omp_target): Strengthen local "entry_stmt" from gimple to
+ gimple_omp_target.
+ (lower_omp_target): Likewise for "stmt".
+
+ * tree-nested.c (convert_nonlocal_reference_stmt): Add checked
+ cast to gimple_omp_target.
+ (convert_local_reference_stmt): Likewise.
+ (convert_gimple_call): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_single
+
+ * coretypes.h (gimple_omp_single): New typedef.
+ (const_gimple_omp_single): New typedef.
+
+ * gimple.h (gimple_build_omp_single): Return a gimple_omp_single
+ rather than a plain gimple.
+ (gimple_omp_single_set_clauses): Require a gimple_omp_single
+ rather than a plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_omp_single): Require a
+ gimple_omp_single rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_single within
+ GIMPLE_OMP_SINGLE case of switch statement.
+
+ * gimple.c (gimple_build_omp_single): Return a gimple_omp_single
+ rather than a plain gimple.
+
+ * omp-low.c (scan_omp_single): Require a gimple_omp_single rather
+ than a plain gimple.
+ (scan_omp_1_stmt): Add checked cast to gimple_omp_single within
+ GIMPLE_OMP_SINGLE case of switch statement.
+ (lower_omp_single_simple): Require a gimple_omp_single rather
+ than a plain gimple.
+ (lower_omp_single_copy): Likewise.
+ (lower_omp_single): Strengthen local "single_stmt" from gimple to
+ gimple_omp_single.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_task
+
+ * coretypes.h (gimple_omp_task): New typedef.
+ (const_gimple_omp_task): New typedef.
+
+ * gimple.h (gimple_build_omp_task): Return a gimple_omp_task
+ rather than a plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_omp_task): Require a
+ gimple_omp_task rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_task within
+ GIMPLE_OMP_TASK case of switch statement.
+
+ * gimple.c (gimple_build_omp_task): Return a gimple_omp_task
+ rather than a plain gimple.
+
+ * omp-low.c (finalize_task_copyfn): Require a gimple_omp_task
+ rather than a plain gimple.
+ (delete_omp_context): Add checked cast to gimple_omp_task.
+ (scan_omp_task): Strengthen local "stmt" from gimple to
+ gimple_omp_task.
+ (expand_task_call): Require a gimple_omp_task rather than a plain
+ gimple.
+ (expand_omp_taskreg): Add checked cast to gimple_omp_task.
+ (create_task_copyfn): Require a gimple_omp_task rather than a
+ plain gimple.
+ (lower_omp_taskreg): Add checked cast to gimple_omp_task.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-cfg.c: Make verify_gimple_call require a gimple_call
+
+ * tree-cfg.c (verify_gimple_call): Require a gimple_call rather
+ than a plain gimple.
+ (verify_gimple_stmt): Add checked cast to gimple_call within
+ GIMPLE_CALL case of switch statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_parallel
+
+ * coretypes.h (gimple_omp_parallel): New typedef.
+ (const_gimple_omp_parallel): New typedef.
+
+ * cgraphbuild.c (build_cgraph_edges): Convert check of code
+ against GIMPLE_OMP_PARALLEL to a dyn_cast <gimple_omp_parallel>
+ and new local.
+
+ * gimple-pretty-print.c (dump_gimple_omp_parallel): Require a
+ gimple_omp_parallel rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_omp_parallel
+ within GIMPLE_OMP_PARALLEL case of switch statement.
+
+ * gimple-walk.c (walk_gimple_op): Likewise, introducing a local.
+
+ * gimple.c (gimple_build_omp_parallel): Return a
+ gimple_omp_parallel rather than a plain gimple.
+ (gimple_copy): Add checked casts to gimple_omp_parallel within
+ GIMPLE_OMP_PARALLEL case of switch statement, introducing locals.
+
+ * gimple.h (gimple_build_omp_parallel): Return a
+ gimple_omp_parallel rather than a plain gimple.
+ (gimple_omp_parallel_clauses_ptr): Require a gimple_omp_parallel
+ rather than a plain gimple.
+ (gimple_omp_parallel_set_clauses): Likewise.
+ (gimple_omp_parallel_data_arg_ptr): Likewise.
+ (gimple_omp_parallel_set_data_arg): Likewise.
+ (gimple_omp_parallel_child_fn_ptr): Likewise.
+ (gimple_omp_parallel_set_child_fn): Likewise.
+ (gimple_omp_parallel_child_fn): Require a
+ const_gimple_omp_parallel rather than a plain const_gimple.
+ (gimple_omp_parallel_data_arg): Likewise.
+
+ * omp-low.c (scan_omp_parallel): Strengthen local "stmt" from
+ gimple to gimple_omp_parallel.
+ (expand_parallel_call): Require a gimple_omp_parallel for
+ "entry_stmt" rather than a plain gimple.
+ (remove_exit_barrier): Strengthen local "parallel_stmt" from
+ gimple to gimple_omp_parallel.
+ (expand_omp_taskreg): Add checked casts to gimple_omp_parallel.
+
+ * tree-inline.c (remap_gimple_stmt): Add a checked cast to
+ gimple_omp_parallel within GIMPLE_OMP_PARALLEL case of switch
+ statement, introducing local.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_for
+
+ * coretypes.h (gimple_omp_for): New.
+ (const_gimple_omp_for): New.
+
+ * gimple.h (gimple_build_omp_for): Return a gimple_omp_for rather
+ than a plain gimple.
+ (gimple_omp_for_set_kind): Require a gimple_omp_for rather than a
+ plain gimple.
+ (gimple_omp_for_set_combined_p): Likewise.
+ (gimple_omp_for_set_combined_into_p): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_omp_for): Require a
+ gimple_omp_for rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_omp_for in
+ GIMPLE_OMP_FOR case of switch statement.
+
+ * gimple.c (gimple_build_omp_for): Return a gimple_omp_for rather
+ than a plain gimple.
+ (gimple_copy): Add a checked cast to gimple_omp_for and a new local.
+
+ * gimplify.c (gimplify_omp_for): Strengthen local "gfor" from
+ gimple to gimple_omp_for.
+
+ * omp-low.c (omp_for_data::for_stmt): Strengthen field from gimple
+ to gimple_omp_for.
+ (extract_omp_for_data): Require a gimple_omp_for rather than a
+ plain gimple.
+ (workshare_safe_to_combine_p): Add a checked cast to
+ gimple_omp_for.
+ (get_ws_args_for): Convert check of code against GIMPLE_OMP_FOR
+ with a dyn_cast<gimple_omp_for> and a new local.
+ (scan_omp_parallel): Add a checked cast to gimple_omp_for and a
+ new local.
+ (scan_omp_for): Require a gimple_omp_for rather than a plain
+ gimple.
+ (scan_omp_1_stmt): Add a checked cast to gimple_omp_for in
+ GIMPLE_OMP_FOR case of switch statement.
+ (expand_omp_for): Add a checked cast to gimple_omp_for.
+ (lower_omp_for): Strengthen local "stmt" from gimple to
+ gimple_omp_for.
+
+ * tree-nested.c (walk_gimple_omp_for): Require a gimple_omp_for
+ rather than a plain gimple.
+ (convert_nonlocal_reference_stmt): Add a checked cast to
+ gimple_omp_for in GIMPLE_OMP_FOR case of switch statement.
+ (convert_local_reference_stmt): Likewise.
+
+ * tree-parloops.c (create_parallel_loop): Strengthen local
+ "for_stmt" from gimple to gimple_omp_for.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_critical
+
+ * coretypes.h (gimple_omp_critical): New typedef.
+ (const_gimple_omp_critical): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_omp_critical): Require a
+ gimple_omp_critical rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_omp_critical
+ within GIMPLE_OMP_CRITICAL case of switch statement.
+
+ * gimple-walk.c (walk_gimple_op): Likewise.
+
+ * gimple.c (gimple_build_omp_critical): Return a gimple_omp_critical
+ rather than a plain gimple.
+ (gimple_copy): Add checked casts to gimple_omp_critical
+ within GIMPLE_OMP_CRITICAL case of switch statement.
+
+ * gimple.h (gimple_debug): Likewise.
+ (gimple_build_omp_critical): Return a gimple_omp_critical rather
+ than a plain gimple.
+ (gimple_omp_critical_name): Require a const_gimple_omp_critical
+ rather than a plain const_gimple.
+ (gimple_omp_critical_name_ptr): Require a gimple_omp_critical
+ rather than a plain gimple.
+ (gimple_omp_critical_set_name): Likewise.
+
+ * omp-low.c (check_omp_nesting_restrictions): Add a checked cast
+ to gimple_omp_critical within GIMPLE_OMP_CRITICAL case of switch
+ statement, introducing a new local "other_crit" for type-safety.
+ (lower_omp_critical): Strengthen local "stmt" to
+ gimple_omp_critical.
+
+ * tree-inline.c (remap_gimple_stmt): Add a checked cast to
+ gimple_omp_critical within GIMPLE_OMP_CRITICAL case of switch
+ statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_continue
+
+ * coretypes.h (gimple_omp_continue): New typedef.
+ (const_gimple_omp_continue): New typedef.
+
+ * gimple.h (gimple_build_omp_continue): Return a
+ gimple_omp_continue rather than a plain gimple.
+ (gimple_omp_continue_control_def): Require a
+ const_gimple_omp_continue rather than a plain const_gimple.
+ (gimple_omp_continue_control_use): Likewise.
+ (gimple_omp_continue_control_def_ptr): Require a gimple_omp_continue
+ rather than a plain gimple.
+ (gimple_omp_continue_set_control_def): Likewise.
+ (gimple_omp_continue_control_use_ptr): Likewise.
+ (gimple_omp_continue_set_control_use): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_omp_continue): Require a
+ gimple_omp_continue rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_omp_continue
+ within GIMPLE_OMP_CONTINUE case of switch statement.
+
+ * gimple-walk.c (walk_gimple_op): Likewise, adding a new local.
+
+ * gimple.c (gimple_build_omp_continue): Return a
+ gimple_omp_continue rather than a plain gimple.
+
+ * omp-low.c (gimple_build_cond_empty): Return a gimple_cond
+ rather than a plain gimple.
+ (expand_omp_for_generic): Split local "stmt" into "assign_stmt",
+ "cont_stmt", "cond_stmt", "call_stmt" of types gimple_assign,
+ gimple_omp_continue, gimple_cond, gimple_call respectively.
+ (expand_omp_for_static_nochunk): Likewise, splitting into two
+ "cond_stmt" decls. "assign_stmt", "cont_stmt"
+ (expand_omp_for_static_chunk): Likewise, splitting into
+ "cond_stmt", "assign_stmt", "cont_stmt".
+ (expand_omp_sections): Strengthen local "cont" from gimple to
+ gimple_omp_continue.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_atomic_store
+
+ * coretypes.h (gimple_omp_atomic_store): New typedef.
+ (const_gimple_omp_atomic_store): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_omp_atomic_store): Require
+ a gimple_omp_atomic_store rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_omp_atomic_store
+ within GIMPLE_OMP_ATOMIC_STORE case of switch statement.
+ * gimple-walk.c (walk_gimple_op): Likewise.
+
+ * gimple.c (gimple_build_omp_atomic_store): Return a
+ gimple_omp_atomic_store rather than a plain gimple.
+
+ * gimple.h (gimple_build_omp_atomic_store): Return a
+ gimple_omp_atomic_store rather than a plain gimple.
+ (gimple_omp_atomic_store_set_val): Require a gimple_omp_atomic_store
+ rather than a plain gimple.
+ (gimple_omp_atomic_store_val_ptr): Likewise.
+ (gimple_omp_atomic_store_val): Require a
+ const_gimple_omp_atomic_store rather than a plain const_gimple.
+
+ * gimplify.c (gimplify_omp_atomic): Strengthen locals "loadstmt" and
+ "storestmt" from gimple to gimple_omp_atomic_load loadstmt and
+ gimple_omp_atomic_store storestmt respectively.
+
+ * omp-low.c (expand_omp_atomic): Strengthen local "store" from
+ gimple to gimple_omp_atomic_store.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_omp_atomic_load
+
+ * coretypes.h (gimple_omp_atomic_load): New typedef.
+ (const_gimple_omp_atomic_load): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_omp_atomic_load): Require a
+ gimple_omp_atomic_load rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_omp_atomic_load
+ within GIMPLE_OMP_ATOMIC_LOAD case of switch statement.
+
+ * gimple-walk.c (walk_gimple_op): Likewise, introducing a new local.
+
+ * gimple.c (gimple_build_omp_atomic_load): Return a
+ gimple_omp_atomic_load rather than a plain gimple.
+
+ * gimple.h (gimple_build_omp_atomic_load): Return a
+ gimple_omp_atomic_load rather than a plain gimple.
+ (gimple_omp_atomic_load_set_lhs): Require a
+ gimple_omp_atomic_load rather than a plain gimple.
+ (gimple_omp_atomic_load_lhs_ptr): Likewise.
+ (gimple_omp_atomic_load_set_rhs): Likewise.
+ (gimple_omp_atomic_load_rhs_ptr): Likewise.
+ (gimple_omp_atomic_load_lhs): Require a
+ const_gimple_omp_atomic_load rather than a plain const_gimple.
+ (gimple_omp_atomic_load_rhs): Likewise.
+
+ * gimplify-me.c (gimple_regimplify_operands): Add a checked cast
+ to gimple_omp_atomic_load within GIMPLE_OMP_ATOMIC_LOAD case of
+ switch statement.
+
+ * omp-low.c (expand_omp_atomic): Strengthen type of local "load"
+ from gimple to gimple_omp_atomic_load.
+ (lower_omp_1): Add a checked cast to gimple_omp_atomic_load within
+ GIMPLE_OMP_ATOMIC_LOAD case of switch statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Use more concrete types for various gimple statements
+
+ * cgraphunit.c (thunk_adjust): Strengthen local "stmt" from gimple
+ to gimple_assign.
+
+ * gimple-ssa-isolate-paths.c
+ (insert_trap_and_remove_trailing_statements): Strengthen local
+ "new_stmt" from gimple to gimple_call.
+
+ * gimple-ssa-strength-reduction.c (replace_mult_candidate):
+ Strengthen local "copy_stmt" from gimple to gimple_assign.
+ (create_add_on_incoming_edge): Likewise, for "new_stmt".
+ (insert_initializers): Likewise, for "init_stmt".
+ (introduce_cast_before_cand): Likewise, for "cast_stmt".
+ (replace_one_candidate): Likewise, for "copy_stmt" and
+ "cast_stmt".
+
+ * gimplify.c (build_stack_save_restore): Require gimple_calls
+ rather than plain gimples.
+ (gimplify_bind_expr): Strengthen locals "stack_save" and
+ "stack_restore" from gimple to gimple_call. Strengthen "gs"
+ to gimple_try.
+ (gimplify_switch_expr): Strengthen local "gimple_switch" from
+ gimple to gimple_switch, and "new_default" to gimple_label.
+ (gimplify_cond_expr): Strengthen local "gimple_cond" from gimple
+ to gimple_cond.
+ (gimplify_init_constructor): Strengthen local "init" from gimple
+ to gimple_assign.
+ (gimplify_cleanup_point_expr): Strengthen local "gtry" from gimple
+ to gimple_try.
+ (gimple_push_cleanup): Strengthen locals "ffalse" and "ftrue" from
+ gimple to gimple_assign.
+
+ * tree-eh.c (do_goto_redirection): Strengthen local to gimple_goto.
+ (emit_post_landing_pad): Strengthen local to gimple_label.
+
+ * tree-outof-ssa.c (insert_backedge_copies): Strengthen local
+ "stmt" from gimple to gimple_assign.
+
+ * tree-parloops.c (take_address_of): Likewise.
+
+ * tree-predcom.c (replace_ref_with): Likewise, for "new_stmt".
+ (initialize_root_vars_lm): Likewise, for "init_stmt".
+ (reassociate_to_the_same_stmt): Likewise, for "new_stmt" and "tmp_stmt".
+
+ * tree-profile.c (gimple_gen_edge_profiler): Likewise, for "stmt1",
+ "stmt2", "stmt3".
+ (gimple_gen_ic_profiler): Likewise.
+ (gimple_gen_ic_func_profiler): Strengthen local "stmt1" from
+ gimple to gimple_call, and "stmt2" to gimple_assign.
+
+ * tree-scalar-evolution.c (scev_const_prop): Strengthen local
+ "ass" from gimple to gimple_assign.
+
+ * tree-sra.c (build_ref_for_offset): Likewise for "stmt".
+ (generate_subtree_copies): Likewise; also strengthen "ds" to
+ gimple_debug.
+ (init_subtree_with_zero): Likewise.
+ (sra_modify_expr): Likewise.
+ (load_assign_lhs_subreplacements): Likewise.
+ (sra_modify_assign): Strengthen "ds" to gimple_debug.
+ (sra_ipa_reset_debug_stmts): Likewise for "def_temp".
+
+ * tree-ssa-ccp.c (insert_clobber_before_stack_restore):
+ Strengthen local "clobber_stmt" from gimple to gimple_assign.
+
+ * tree-ssa-dce.c (remove_dead_stmt): Strengthen "note" to
+ gimple_debug.
+
+ * tree-ssa-dom.c (record_equivalences_from_stmt): Strengthen
+ local "new_stmt" from gimple to gimple_assign.
+ (optimize_stmt): Likewise.
+
+ * tree-ssa-forwprop.c (simplify_bitwise_binary): Likewise for
+ 4 declarations of "newop".
+ (simplify_rotate): Likewise for "g".
+
+ * tree-ssa-loop-im.c (rewrite_reciprocal): Likewise for 3 locals.
+ (rewrite_bittest): Likewise for "stmt" and "stmt2".
+ (move_computations_dom_walker::before_dom_children): Likewise for
+ "new_stmt".
+ (execute_sm): Likewise for "load" and "store".
+
+ * tree-ssa-loop-ivcanon.c (remove_exits_and_undefined_stmts):
+ Strengthen local "stmt" from gimple to gimple_call.
+ (unloop_loops): Likewise.
+
+ * tree-ssa-loop-ivopts.c (rewrite_use_nonlinear_expr): Strengthen
+ local "ass" from gimple to gimple_assign.
+ (remove_unused_ivs): Strengthen "def_temp" to gimple_debug.
+
+ * tree-ssa-loop-manip.c (rewrite_phi_with_iv): Strengthen local "stmt"
+ from gimple to gimple_assign.
+
+ * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Strengthen local
+ "prefetch" from gimple to gimple_call.
+
+ * tree-ssa-math-opts.c (insert_reciprocals): Strengthen local
+ "new_stmt" from gimple to gimple_assign.
+ (powi_as_mults_1): Likewise for "mult_stmt".
+ (powi_as_mults): Likewise for "div_stmt".
+ (build_and_insert_binop): Likewise for "stmt".
+ (build_and_insert_cast): Likewise.
+ (pass_cse_sincos::execute): Likewise for "stmt" and various decls
+ of "new_stmt".
+ (convert_mult_to_fma): Likewise for "fma_stmt".
+
+ * tree-ssa-phiopt.c (conditional_replacement): Likewise for "new_stmt".
+ (abs_replacement): Likewise.
+
+ * tree-ssa-phiprop.c (phiprop_insert_phi): Likewise for "tmp".
+
+ * tree-ssa-pre.c (create_expression_by_pieces): Likewise for "newstmt".
+ (eliminate_insert): Likewise for "tem".
+
+ * tree-ssa-propagate.c (update_gimple_call): Strengthen locals
+ "new_stmt" and "stmt" from gimple to gimple_call.
+ (update_call_from_tree): Likewise for "new_stmt".
+
+ * tree-ssa-reassoc.c (build_and_add_sum): Likewise for "sum".
+ (update_ops): Likewise for "g".
+ (maybe_optimize_range_tests): Likewise.
+ (rewrite_expr_tree_parallel): Require a gimple_assign rather than
+ a plain gimple.
+ (reassociate_bb): Add a checked cast to gimple_assign.
+
+ * tree-ssa.c (insert_debug_temp_for_var_def): Strengthen local
+ "def_temp" from gimple to gimple_debug.
+
+ * tree-switch-conversion.c (emit_case_bit_tests): Strengthen local
+ "shift_stmt" from gimple to gimple_assign.
+
+ * tree-tailcall.c (adjust_return_value_with_ops): Likewise for
+ "stmt".
+ (update_accumulator_with_ops): Likewise.
+
+ * tree-vect-data-refs.c (bump_vector_ptr): Likewise for
+ "incr_stmt".
+
+ * tree-vect-stmts.c (vectorizable_condition): Likewise for
+ "new_stmt".
+
+ * tree-vrp.c (build_assert_expr_for): Likewise for "assertion".
+ (simplify_truth_ops_using_ranges): Likewise for "newop".
+ (simplify_float_conversion_using_ranges): Likewise for "conv".
+
+ * ubsan.c (instrument_mem_ref): Strengthen local "g" from gimple
+ to gimple_call.
+
+ * value-prof.c (gimple_divmod_fixed_value): Require a
+ gimple_assign rather than a plain gimple; strengthen types of locals.
+ (gimple_mod_pow2): Likewise.
+ (gimple_mod_subtract): Likewise.
+ (gimple_divmod_fixed_value_transform): Strengthen local
+ "stmt" from gimple to gimple_assign.
+ (gimple_mod_pow2_value_transform): Likewise.
+ (gimple_mod_subtract_transform): Likewise.
+ (gimple_ic): Strengthen types of locals.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_try
+
+ * coretypes.h (gimple_try): New typedef.
+ (const_gimple_try): New typedef.
+
+ * gimple-low.c (gimple_try_catch_may_fallthru): Require a
+ gimple_try rather than a plain gimple.
+ (gimple_stmt_may_fallthru): Add checked cast to gimple_try.
+
+ * gimple-pretty-print.c (dump_gimple_try): Require a gimple_try
+ rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_try within
+ GIMPLE_TRY case of switch statement.
+
+ * tree-eh.c (finally_tree_node::parent): Strengthen field from
+ gimple to gimple_try.
+ (record_in_finally_tree): Require a gimple_try rather than a plain
+ gimple.
+ (collect_finally_tree): Likewise.
+ (collect_finally_tree_1): Likewise.
+ (struct leh_tf_state::try_finally_expr): Strengthen field from
+ gimple to gimple_try.
+ (struct leh_tf_state::top_p): Likewise.
+ (lower_eh_must_not_throw): Require a gimple_try rather than a
+ plain gimple.
+ (frob_into_branch_around): Likewise.
+ (lower_try_finally_dup_block): Strengthen local from gimple to
+ gimple_try.
+ (honor_protect_cleanup_actions): Split out uses of "x" into new
+ locals "eh_mnt" and "try_stmt" with stronger types.
+ (lower_try_finally): Require a gimple_try rather than a plain
+ gimple.
+ (lower_catch): Likewise.
+ (lower_eh_filter): Likewise.
+ (lower_eh_must_not_throw): Likewise.
+ (lower_cleanup): Likewise.
+ (lower_eh_constructs_2): Add checked cast to gimple_try within
+ GIMPLE_TRY case of switch statement, introducing new local
+ "try_stmt", using it for type-safety.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Use subclasses of gimple in various places
+
+ * asan.c (insert_if_then_before_iter): Require a gimple cond
+ rathern than a plain gimple.
+ (asan_expand_check_ifn): Add a checked cast to gimple_cond.
+
+ * cfgloopmanip.c (create_empty_if_region_on_edge): Likewise.
+
+ * omp-low.c (simd_clone_adjust): Strengthen local from gimple
+ to gimple_phi.
+
+ * sese.c (set_ifsese_condition): Strengthen local from gimple to
+ gimple_cond.
+
+ * tree-call-cdce.c (gen_one_condition): Strengthen locals from
+ gimple to gimple_assign and gimple_cond.
+
+ * tree-ssa-phiopt.c (minmax_replacement): Likewise.
+ (cond_store_replacement): Strengthen locals from gimple to
+ gimple_phi and gimple_assign.
+ (cond_if_else_store_replacement_1): Likewise.
+
+ * tree-ssa-pre.c (do_regular_insertion): Strengthen local from
+ gimple to gimple_assign.
+
+ * tree-switch-conversion.c (hoist_edge_and_branch_if_true):
+ Strengthen local from gimple to gimple_cond.
+ (gen_def_assigns): Return a gimple_assign rather than a plain
+ gimple.
+ (gen_inbound_check): Strengthen locals from gimple to gimple_cond
+ and gimple_assign.
+
+ * tree-vect-loop-manip.c (slpeel_add_loop_guard): Strengthen local
+ from gimple to gimple_cond.
+ (set_prologue_iterations): Strengthen locals from gimple to
+ gimple_phi and gimple_cond.
+
+ * value-prof.c (gimple_ic): Strengthen local from gimple to
+ gimple_phi.
+ (gimple_stringop_fixed_value): Strengthen locals from gimple to
+ gimple_assign, gimple_cond, gimple_call, and gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_eh_dispatch
+
+ * coretypes.h (gimple_eh_dispatch): New typedef.
+ (const_gimple_eh_dispatch): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_eh_dispatch): Require a
+ gimple_eh_dispatch rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_eh_dispatch
+ within GIMPLE_EH_DISPATCH case of switch statement.
+
+ * gimple-streamer-in.c (input_gimple_stmt): Likewise.
+
+ * gimple-streamer-out.c (output_gimple_stmt): Likewise.
+
+ * gimple.c (gimple_build_eh_dispatch): Return a gimple_eh_dispatch
+ rather than a plain gimple.
+
+ * gimple.h (gimple_build_eh_dispatch): Return a gimple_eh_dispatch
+ rather than a plain gimple.
+ (gimple_eh_dispatch_region): Require a const_gimple_eh_dispatch
+ rather than a plain const_gimple.
+ (gimple_eh_dispatch_set_region): Require a gimple_eh_dispatch
+ rather than a plain gimple.
+
+ * tree-cfg.c (make_edges): Add a checked cast to gimple_eh_dispatch
+ within GIMPLE_EH_DISPATCH case of switch statement.
+ (gimple_verify_flow_info): Likewise.
+ (gimple_redirect_edge_and_branch): Likewise.
+ (move_stmt_r): Likewise, adding a local.
+
+ * tree-eh.c (emit_eh_dispatch): Convert local from gimple to
+ gimple_eh_dispatch.
+ (make_eh_dispatch_edges): Require a gimple_eh_dispatch rather than
+ a plain gimple.
+ (redirect_eh_dispatch_edge): Likewise.
+ (lower_eh_dispatch): Likewise.
+ (execute_lower_eh_dispatch): Add a checked cast to
+ gimple_eh_dispatch.
+ (mark_reachable_handlers): Likewise.
+ (verify_eh_dispatch_edge): Require a gimple_eh_dispatch rather
+ than a plain gimple.
+
+ * tree-eh.h (make_eh_dispatch_edges): Likewise.
+ (redirect_eh_dispatch_edge): Likewise.
+ (verify_eh_dispatch_edge): Likewise.
+
+ * tree-inline.c (remap_gimple_stmt): Add a checked cast to
+ gimple_eh_dispatch within GIMPLE_EH_DISPATCH case of switch
+ statement, adding a local.
+ (copy_edges_for_bb): Add a checked cast to gimple_eh_dispatch.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_resx
+
+ * coretypes.h (gimple_resx): New typedef.
+ (const_gimple_resx): New typedef.
+
+ * gimple.h (gimple_build_resx): Return a gimple_resx rather than a
+ plain gimple.
+ (gimple_resx_region): Require a const_gimple_resx rather than a
+ plain const_gimple.
+ (gimple_resx_set_region): Require a gimple_resx rather than a
+ plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_resx): Require a gimple_resx
+ rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_resx within
+ GIMPLE_RESX case of switch statement.
+
+ * gimple-streamer-in.c (input_gimple_stmt): Likewise.
+
+ * gimple-streamer-out.c (output_gimple_stmt): Likewise.
+
+ * gimple.c (gimple_build_resx): Return a gimple_resx rather than
+ a plain gimple.
+
+ * tree-cfg.c (move_stmt_r): Add a checked cast to gimple_resx
+ within GIMPLE_RESX case of switch statement, adding a new local.
+
+ * tree-eh.c (emit_resx): Convert local "x" from gimple to
+ gimple_resx.
+ (lower_resx): Require a gimple_resx rather than a plain gimple.
+ (pass_lower_resx::execute): Add a checked cast to gimple_resx.
+ (mark_reachable_handlers): Likewise.
+
+ * tree-inline.c (remap_gimple_stmt): Add a checked cast to
+ gimple_resx within GIMPLE_RESX case of switch statement, adding
+ a new local.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_eh_else
+
+ * coretypes.h (gimple_eh_else): New typedef.
+ (const_gimple_eh_else): New typedef.
+
+ * gimple.h (gimple_build_eh_else): Return a gimple_eh_else rather
+ than a plain gimple.
+ (gimple_eh_else_n_body_ptr): Require a gimple_eh_else rather than
+ a plain gimple.
+ (gimple_eh_else_n_body): Likewise.
+ (gimple_eh_else_e_body_ptr): Likewise.
+ (gimple_eh_else_e_body): Likewise.
+ (gimple_eh_else_set_n_body): Likewise.
+ (gimple_eh_else_set_e_body): Likewise.
+
+ * gimple-low.c (lower_stmt): Add checked cast to gimple_eh_else
+ within GIMPLE_EH_ELSE case of switch statement, introducing a new
+ local.
+ (gimple_stmt_may_fallthru): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_eh_else): Require a
+ gimple_eh_else rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_eh_else within
+ GIMPLE_EH_ELSE case of switch statement
+
+ * gimple-walk.c (walk_gimple_stmt): Add checked cast to
+ gimple_eh_else within GIMPLE_EH_ELSE case of switch statement,
+ introducing a new local.
+
+ * gimple.c (gimple_build_eh_else): Return a gimple_eh_else
+ rather than a plain gimple.
+ (gimple_copy): Add checked casts to gimple_eh_else within
+ GIMPLE_EH_ELSE case of switch statement, introducing new locals.
+
+ * tree-cfg.c (verify_gimple_in_seq_2): Add checked cast to
+ gimple_eh_else within GIMPLE_EH_ELSE case of switch statement,
+ introducing a new local.
+
+ * tree-eh.c (collect_finally_tree): Likewise.
+ (replace_goto_queue_1): Likewise.
+ (get_eh_else): Return a gimple_eh_else rather than a plain gimple.
+ (honor_protect_cleanup_actions): Convert local "eh_else" from
+ gimple to gimple_eh_else.
+ (lower_try_finally_nofallthru): Likewise.
+ (lower_try_finally_onedest): Introduce locals "eh_else" and
+ "label_stmt", using them in favor of "x" for the gimple_eh_else
+ and the gimple_label.
+ (lower_try_finally_copy): Convert local "eh_else" from gimple to
+ gimple_eh_else.
+ (lower_try_finally_switch): Likewise.
+ (decide_copy_try_finally): Likewise.
+ (refactor_eh_r): Add checked cast to gimple_eh_else within
+ GIMPLE_EH_ELSE case of switch statement, introducing a new local.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_eh_must_not_throw
+
+ * coretypes.h (gimple_eh_must_not_throw): New typedef.
+ (const_gimple_eh_must_not_throw): New typedef.
+
+ * gimple-pretty-print.c (dump_gimple_eh_must_not_throw): Require
+ a gimple_eh_must_not_throw rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_eh_must_not_throw
+ within GIMPLE_EH_MUST_NOT_THROW case of switch statement.
+
+ * gimple-streamer-in.c (input_gimple_stmt): Likewise.
+
+ * gimple-streamer-out.c (output_gimple_stmt): Likewise.
+
+ * gimple.c (gimple_build_eh_must_not_throw): Return a
+ gimple_eh_must_not_throw rather than a plain gimple.
+
+ * gimple.h (gimple_build_eh_must_not_throw): Return a
+ gimple_eh_must_not_throw rather than a plain gimple.
+ (gimple_eh_must_not_throw_fndecl): Require a
+ gimple_eh_must_not_throw rather than a plain gimple.
+ (gimple_eh_must_not_throw_set_fndecl): Likewise.
+
+ * tree-eh.c (lower_eh_must_not_throw): Add checked cast.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_eh_filter
+
+ * coretypes.h (gimple_eh_filter): New typedef.
+ (const_gimple_eh_filter): New typedef.
+
+ * gimple.h (gimple_build_eh_filter): Return a gimple_eh_filter
+ rather than a plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_eh_filter): Require a
+ gimple_eh_filter rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_eh_filter within
+ GIMPLE_EH_FILTER case of switch statement.
+
+ * gimple.c (gimple_build_eh_filter): Return a gimple_eh_filter
+ rather than a plain gimple.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_catch
+
+ * coretypes.h (gimple_catch): New typedef.
+ (const_gimple_catch): New typedef.
+
+ * gimple-low.c (lower_try_catch): Add checked cast to gimple_catch.
+ (gimple_try_catch_may_fallthru): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_catch): Require a gimple_catch
+ rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_catch within
+ GIMPLE_CATCH case of switch statement.
+
+ * gimple-walk.c (walk_gimple_op): Likewise.
+ (walk_gimple_stmt): Likewise.
+
+ * gimple.c (gimple_build_catch): Return a gimple_catch rather than
+ a plain gimple.
+ (gimple_copy): Add checked casts to gimple_catch within
+ GIMPLE_CATCH case of switch statement, introducing new locals.
+
+ * gimple.h (gimple_build_catch): Return a gimple_catch rather than
+ a plain gimple.
+ (gimple_catch_types_ptr): Require a gimple_catch rather than a
+ plain gimple.
+ (gimple_catch_handler_ptr): Likewise.
+ (gimple_catch_handler): Likewise.
+ (gimple_catch_set_types): Likewise.
+ (gimple_catch_set_handler): Likewise.
+
+ * omp-low.c (lower_omp_1): Add checked cast to gimple_catch within
+ GIMPLE_CATCH case of switch statement.
+
+ * tree-cfg.c (verify_gimple_in_seq_2): Likewise.
+ (do_warn_unused_result): Likewise.
+
+ * tree-eh.c (collect_finally_tree): Likewise.
+ (replace_goto_queue_1): Likewise.
+ (lower_catch): Convert local from gimple to gimple_catch.
+ (refactor_eh_r): Add checked cast to gimple_catch within
+ GIMPLE_CATCH case of switch statement.
+
+ * tree-inline.c (remap_gimple_stmt): Likewise.
+ (estimate_num_insns): Add checked cast to gimple_catch within
+ GIMPLE_CATCH case of switch statement, introducing new local.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_transaction
+
+ * coretypes.h (gimple_transaction): New typedef.
+ (const_gimple_transaction): New typedef.
+
+ * gimple.h (gimple_build_transaction): Return a gimple_transaction
+ rather than a plain gimple.
+ (gimple_transaction_body_ptr): Require a gimple_transaction rather
+ than a plain gimple.
+ (gimple_transaction_body): Likewise.
+ (gimple_transaction_label_ptr): Likewise.
+ (gimple_transaction_set_body): Likewise.
+ (gimple_transaction_set_label): Likewise.
+ (gimple_transaction_set_subcode): Likewise.
+ (gimple_transaction_label): Require a const_gimple_transaction
+ rather than a plain const_gimple.
+ (gimple_transaction_subcode): Likewise.
+
+ * gimple-low.c (lower_stmt): Add checked cast to
+ gimple_transaction within GIMPLE_TRANSACTION case of switch
+ statement.
+
+ * gimple-pretty-print.c (dump_gimple_transaction): Require a
+ gimple_transaction rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_transaction within
+ GIMPLE_TRANSACTION case of switch statement.
+ * gimple-streamer-in.c (input_gimple_stmt): Likewise.
+ * gimple-streamer-out.c (output_gimple_stmt): Likewise.
+ * gimple-walk.c (walk_gimple_op): Likewise.
+ (walk_gimple_stmt): Likewise.
+
+ * gimple.c (gimple_build_transaction): Return a gimple_transaction
+ rather than a plain gimple.
+ (gimple_copy): Add checked casts to gimple_transaction within
+ GIMPLE_TRANSACTION case of switch statement.
+
+ * gimplify.c (gimplify_transaction): Split local "g" into
+ "body_stmt" and "trans_stmt", strengthening the type of the latter
+ from gimple to gimple_transaction.
+
+ * omp-low.c (lower_omp_1): Add checked cast to gimple_transaction
+ within GIMPLE_TRANSACTION case of switch statement.
+
+ * trans-mem.c (diagnose_tm_1): Add checked cast within
+ GIMPLE_TRANSACTION case of switch statement, introducing a new
+ local "trans_stmt". Use it in place of "stmt".
+ (examine_call_tm): Convert local from gimple to gimple_transaction.
+ (tm_region::get_transaction_stmt): New method.
+ (tm_region::transaction_stmt): Add clarification of type to the
+ comment.
+ (tm_region_init_0): Require a gimple_transaction rather than a
+ plain gimple.
+ (tm_region_init): Convert a check against GIMPLE_TRANSACTION to a
+ dyn_cast<gimple_transaction> and new local.
+ (transaction_subcode_ior): Add a new local, using the new
+ get_transaction_stmt method to perform a checked cast.
+ (propagate_tm_flags_out): Likewise.
+ (expand_transaction): Add a checked cast using the new
+ get_transaction_stmt method.
+ (generate_tm_state): Likewise.
+ (execute_tm_mark): Likewise.
+ (ipa_tm_diagnose_transaction): Likewise.
+
+ * tree-cfg.c (verify_gimple_transaction): Require a
+ gimple_transaction rather than a plain gimple.
+ (make_edges): Add checked cast within GIMPLE_TRANSACTION case of
+ switch statement
+ (cleanup_dead_labels): Likewise.
+ (verify_gimple_stmt): Likewise.
+ (verify_gimple_in_seq_2): Likewise.
+ (verify_gimple_in_seq_2): Likewise.
+ (gimple_redirect_edge_and_branch): Add checked cast.
+
+ * tree-inline.c (remap_gimple_stmt): Add checked cast within
+ GIMPLE_TRANSACTION case of switch statement, introducing a new
+ local "old_trans_stmt". Use it in place of "stmt". Add new
+ local "new_trans_stmt", using it to initialize "copy", and for
+ type-safe operations as a transaction.
+ (estimate_num_insns): Add checked cast within GIMPLE_TRANSACTION
+ case of switch statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_asm
+
+ * coretypes.h (gimple_asm): New typedef.
+ (const_gimple_asm): New typedef.
+
+ * gimple.h (gimple_build_asm_vec): Return a gimple_asm rather than
+ just a gimple.
+ (gimple_asm_clobbers_memory_p): Require a const_gimple_asm rather
+ than just a const_gimple.
+ (gimple_asm_ninputs): Likewise.
+ (gimple_asm_noutputs): Likewise.
+ (gimple_asm_nclobbers): Likewise.
+ (gimple_asm_nlabels): Likewise.
+ (gimple_asm_input_op): Likewise.
+ (gimple_asm_input_op_ptr): Likewise.
+ (gimple_asm_output_op): Likewise.
+ (gimple_asm_output_op_ptr): Likewise.
+ (gimple_asm_clobber_op): Likewise.
+ (gimple_asm_label_op): Likewise.
+ (gimple_asm_string): Likewise.
+ (gimple_asm_volatile_p): Likewise.
+ (gimple_asm_input_p): Likewise.
+ (gimple_asm_set_input_op): Require a gimple_asm rather than a plain
+ gimple.
+ (gimple_asm_set_output_op): Likewise.
+ (gimple_asm_set_clobber_op): Likewise.
+ (gimple_asm_set_label_op): Likewise.
+ (gimple_asm_set_volatile): Likewise.
+ (gimple_asm_set_input): Likewise.
+
+ * cfgexpand.c (expand_asm_stmt): Require a gimple_asm rather than
+ a plain gimple.
+ (expand_gimple_stmt_1): Add checked cast to gimple_asm within
+ GIMPLE_ASM case of switch statement.
+
+ * gimple-fold.c (fold_stmt_1): Add new local from checked cast to
+ gimple_asm within case GIMPLE_ASM.
+
+ * gimple-pretty-print.c (dump_gimple_asm): Require a gimple_asm
+ rather than a plain gimple.
+ (pp_gimple_stmt_1): Add checked cast to gimple_asm within
+ GIMPLE_ASM case of switch statement.
+
+ * gimple-streamer-in.c (input_gimple_stmt): Rework existing
+ checked cast to gimple_asm; add a new one.
+
+ * gimple-streamer-out.c (output_gimple_stmt): Add new local from
+ checked cast to gimple_asm within case GIMPLE_ASM.
+
+ * gimple-walk.c (walk_gimple_asm): Require a gimple_asm rather
+ than a plain gimple.
+ (walk_gimple_op): Add checked cast to gimple_asm within GIMPLE_ASM
+ case of switch statement.
+ (walk_stmt_load_store_addr_ops): Use dyn_cast<gimple_asm> in place
+ of a code check against GIMPLE_ASM to introduce a new gimple_asm
+ local.
+
+ * gimple.c (gimple_build_asm_1): Return a gimple_asm rather than
+ a plain gimple.
+ (gimple_build_asm_vec): Likewise.
+ (gimple_has_side_effects): Add a checked cast to gimple_asm.
+ (gimple_could_trap_p_1): Likewise.
+ (gimple_call_builtin_p): Require a const_gimple_asm rather then
+ a const_gimple.
+
+ * gimplify-me.c (gimple_regimplify_operands): Add a checked cast
+ and a new local of type gimple_asm within GIMPLE_ASM case.
+
+ * gimplify.c (gimplify_asm_expr): Convert a local from gimple to
+ gimple_asm.
+
+ * ipa-pure-const.c (check_stmt): Add checked casts within
+ GIMPLE_ASM case.
+
+ * ssa-iterators.h (op_iter_init): Likewise.
+
+ * tree-cfg.c (make_goto_expr_edges): Convert a local from gimple
+ to gimple_asm.
+ (cleanup_dead_labels): Add a checked cast and a new local of type
+ gimple_asm within GIMPLE_ASM case.
+ (gimple_redirect_edge_and_branch): Likewise.
+ (is_ctrl_altering_stmt): Add a checked cast.
+ (need_fake_edge_p): Replace a code check against GIMPLE_ASM with a
+ dyn_cast<gimple_asm>.
+
+ * tree-complex.c (expand_complex_comparison): Convert a local from
+ gimple to gimple_asm.
+
+ * tree-data-ref.c (get_references_in_stmt): Add a checked cast to
+ gimple_asm.
+
+ * tree-eh.c (stmt_could_throw_p): Likewise.
+
+ * tree-inline.c (estimate_num_insns): Likewise.
+
+ * tree-sra.c (scan_function): Add a checked cast and a new local
+ of type gimple_asm within GIMPLE_ASM case.
+ (sra_modify_function_body): Likewise.
+ (ipa_sra_modify_function_body): Likewise.
+
+ * tree-ssa-coalesce.c (create_outofssa_var_map): Likewise.
+
+ * tree-ssa-dce.c (propagate_necessity): Replace a code check
+ against GIMPLE_ASM with a dyn_cast<gimple_asm>.
+
+ * tree-ssa-operands.c (maybe_add_call_vops): Require a gimple_asm
+ rather than a plain gimple.
+ (parse_ssa_operands): Add a checked cast to gimple_asm.
+
+ * tree-ssa-structalias.c (find_func_aliases): Replace a check for
+ GIMPLE_ASM with a dyn_cast<gimple_asm>, introducing a new local
+ "asm_stmt", using it in place of "t" for typesafety.
+
+ * tree-ssa-threadedge.c
+ (record_temporary_equivalences_from_stmts_at_dest): Add a checked
+ cast to gimple_asm.
+
+ * tree-ssa.c (execute_update_addresses_taken): Add checked casts
+ and new locals of type gimple_asm within clauses guarded by code
+ check.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_goto
+
+ * coretypes.h (gimple_goto): New typedef.
+ (const_gimple_goto): New typedef.
+
+ * gimple.h (gimple_statement_goto): New subclass of
+ gimple_statement_with_ops, adding the invariant that
+ stmt->code == GIMPLE_GOTO.
+ (is_a_helper <gimple_statement_goto>::test): New.
+ (gimple_build_goto): Return a gimple_goto rather than a
+ plain gimple.
+
+ * gimple-pretty-print.c (dump_gimple_goto): Require a gimple_goto
+ rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_goto within
+ GIMPLE_GOTO case of switch statement.
+
+ * gimple.c (gimple_build_goto): Return a gimple_goto rather than a
+ plain gimple.
+
+ * tree-cfg.c (verify_gimple_goto): Require a gimple_goto rather
+ than a plain gimple.
+ (verify_gimple_stmt): Add a checked cast to gimple_goto within
+ GIMPLE_GOTO case of switch statement.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_return
+
+ * coretypes.h (gimple_return): New typedef.
+ (const_gimple_return): New typedef.
+
+ * gimple.h (gimple_statement_return): New subclass of
+ gimple_statement_with_memory_ops, adding the invariant that
+ stmt->code == GIMPLE_RETURN.
+ (is_a_helper <gimple_statement_return>::test): New.
+ (gimple_build_return): Return a gimple_return rather
+ than a plain gimple.
+
+ * gimple.c (gimple_build_return): Return a gimple_return rather
+ than a plain gimple.
+
+ * cgraphunit.c (expand_thunk): Convert local from a gimple to
+ a gimple_return.
+
+ * gimple-low.c (struct return_statements_t): Convert field "stmt"
+ from a gimple to a gimple_return.
+ (lower_gimple_return): Convert local from a gimple to a
+ gimple_return.
+
+ * gimple-pretty-print.c (dump_gimple_return): Require a
+ gimple_return rather than a plain gimple.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_return within
+ case GIMPLE_RETURN of switch statement.
+
+ * gimplify.c (gimplify_return_expr): Convert locals from
+ gimple to gimple_return.
+
+ * ipa-split.c (split_function): Likewise.
+
+ * tree-cfg.c (verify_gimple_assign): Require a gimple_return
+ rather than a plain gimple.
+ (verify_gimple_stmt): Add checked cast to gimple_return within
+ case GIMPLE_RETURN of switch statement.
+
+ * tree-tailcall.c (adjust_return_value): Convert local from
+ gimple to gimple_return.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_call
+
+ * coretypes.h (gimple_call): New typedef.
+ (const_gimple_call): New typedef.
+
+ * asan.c (get_mem_refs_of_builtin_call): Require a
+ const_gimple_call rather than a const gimple.
+ (has_stmt_been_instrumented_p): Add a checked cast to
+ gimple_call.
+ (instrument_strlen_call): Likewise.
+ (instrument_builtin_call): Likewise.
+ * builtins.c (validate_gimple_arglist): Require a
+ const_gimple_call rather than a const gimple.
+ (fold_call_stmt): Require a gimple_call rather than a gimple.
+ * builtins.h (validate_gimple_arglist): Require a
+ const_gimple_call rather than a const gimple.
+ (fold_call_stmt): Require a gimple_call rather than a gimple.
+ * cfgexpand.c (expand_call_stmt): Likewise.
+ (expand_gimple_stmt_1): Add a checked cast to gimple_call within
+ GIMPLE_CALL case.
+ * cgraph.c (cgraph_edge::redirect_call_stmt_to_callee): Strengthen
+ local "new_stmt" from gimple to gimple_call, adding a checked
+ cast.
+ * cgraphunit.c (cgraph_node::expand_thunk): Likewise for local
+ "call".
+ * gimple-fold.c (gimple_fold_builtin_snprintf_chk): Likewise for
+ local "stmt".
+ (gimple_fold_builtin_snprintf): Likewise.
+ (gimple_fold_builtin): Likewise.
+ (gimple_fold_call): Likewise.
+ (gimple_fold_stmt_to_constant_1): Introduce local "call_stmt" via
+ checked cast of "stmt" to gimple_call, using it in various places
+ for typesafety.
+ * gimple-pretty-print.c (dump_gimple_call_args): Strengthen param
+ 2 from gimple to gimple_call.
+ (dump_gimple_call): Likewise.
+ (pp_gimple_stmt_1): Add a checked cast to gimple_call within
+ GIMPLE_CALL case.
+ * gimple-streamer-in.c (input_gimple_stmt): Replace is_gimple_call
+ with a dyn_cast<gimple_call>, introducing local "call_stmt", and
+ using it in place of "stmt" for typesafety. Add a checked cast
+ in statement guarded by check for GIMPLE_CALL.
+ * gimple-walk.c (walk_gimple_op): Add a checked cast to
+ gimple_call.
+ * gimple.c (gimple_call_reset_alias_info): Strengthen param from
+ gimple to gimple_call.
+ (gimple_build_call_1): Strengthen return type and local from
+ gimple to gimple_call.
+ (gimple_build_call_vec): Likewise.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal_1): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_call_return_flags): Strengthen param from
+ const_gimple to const_gimple_call.
+ (gimple_call_copy_skip_args): Strengthen return type and local from
+ gimple to gimple_call.
+ * gimple.h (gimple_call_reset_alias_info): Strengthen param from
+ gimple to gimple_call.
+ (gimple_build_call_vec): Strengthen return type from gimple to
+ gimple_call.
+ (gimple_build_call): Likewise.
+ (gimple_build_call_valist): Likewise.
+ (gimple_build_call_internal): Likewise.
+ (gimple_build_call_internal_vec): Likewise.
+ (gimple_build_call_from_tree): Likewise.
+ (gimple_call_return_flags): Strengthen param from const_gimple to
+ const_gimple_call.
+ (gimple_call_copy_skip_args): Strengthen return type from gimple
+ to gimple_call.
+ (gimple_call_set_internal_fn): Strengthen param "call_stmt" from
+ gimple to gimple_call.
+ (gimple_call_return_type): Strengthen param from const_gimple to
+ const_gimple_call.
+ (gimple_call_chain_ptr): Likewise.
+ (gimple_call_set_chain): Strengthen param from gimple to
+ gimple_call.
+ (gimple_call_set_return_slot_opt): Likewise.
+ (gimple_call_set_from_thunk): Likewise.
+ (gimple_call_from_thunk_p): Likewise.
+ (gimple_call_set_va_arg_pack): Likewise.
+ (gimple_call_va_arg_pack_p): Likewise.
+ (gimple_call_set_alloca_for_var): Likewise.
+ (gimple_call_alloca_for_var_p): Likewise.
+ (gimple_expr_type): Introduce local "call_stmt" via a checked cast
+ and use it for typesafety.
+ * gimplify.c (gimplify_call_expr): Strengthen local "call" from
+ gimple to gimple_call.
+ (gimplify_modify_expr_to_memcpy): Likewise for local "gs".
+ (gimplify_modify_expr_to_memset): Likewise.
+ (gimplify_modify_expr): Add a checked cast to gimple_call.
+ (gimplify_expr): Strengthen local "call" from gimple to
+ gimple_call.
+ (gimplify_function_tree): Likewise.
+ * internal-fn.c (expand_LOAD_LANES): Strengthen param from gimple
+ to gimple_call.
+ (expand_STORE_LANES): Likewise.
+ (expand_ANNOTATE): Likewise.
+ (expand_GOMP_SIMD_LANE): Likewise.
+ (expand_GOMP_SIMD_VF): Likewise.
+ (expand_GOMP_SIMD_LAST_LANE): Likewise.
+ (expand_UBSAN_NULL): Likewise.
+ (expand_UBSAN_BOUNDS): Likewise.
+ (expand_UBSAN_OBJECT_SIZE): Likewise.
+ (expand_ASAN_CHECK): Likewise.
+ (ubsan_expand_si_overflow_addsub_check): Likewise.
+ (ubsan_expand_si_overflow_neg_check): Likewise.
+ (ubsan_expand_si_overflow_mul_check): Likewise.
+ (expand_UBSAN_CHECK_ADD): Likewise.
+ (expand_UBSAN_CHECK_SUB): Likewise.
+ (expand_UBSAN_CHECK_MUL): Likewise.
+ (expand_LOOP_VECTORIZED): Likewise.
+ (expand_MASK_LOAD): Likewise.
+ (expand_MASK_STORE): Likewise.
+ (expand_ABNORMAL_DISPATCHER): Likewise.
+ (expand_BUILTIN_EXPECT): Likewise.
+ (internal_fn_expanders): Likewise for entries in this table.
+ (expand_internal_call): Likewise.
+ * internal-fn.def: Update comment to reflect strengthening of
+ param of expanders.
+ * internal-fn.h (expand_internal_call): Strengthen param from
+ gimple to gimple_call.
+ * ipa-prop.c (ipa_modify_call_arguments): Likewise for local
+ "new_stmt".
+ * ipa-pure-const.c (check_call): Likewise for param "call".
+ (check_stmt): Add a checked cast to gimple_call within GIMPLE_CALL
+ case.
+ * ipa-split.c (split_function): Strengthen local "call" from
+ gimple to gimple_call.
+ * omp-low.c (build_omp_barrier): Likewise for local "g".
+ (lower_rec_input_clauses): Likewise for local "stmt".
+ * trans-mem.c (build_tm_load): Likewise for return type and local
+ "gcall".
+ (build_tm_store): Likewise.
+ (expand_transaction): Likewise for local "call".
+ * tree-call-cdce.c (check_pow): Likewise for param.
+ (check_builtin_call): Likewise.
+ (is_call_dce_candidate): Likewise.
+ (gen_conditions_for_pow): Likewise.
+ (gen_shrink_wrap_conditions): Likewise.
+ (shrink_wrap_one_built_in_call): Likewise.
+ (shrink_wrap_conditional_dead_built_in_calls): Strengthen param
+ from vec<gimple> to vec<gimple_call>, and local "bi_call" from
+ gimple to gimple_call.
+ (pass_call_cdce::execute): Strengthen local
+ "cond_dead_built_in_calls" from auto_vec<gimple> to
+ auto_vec<gimple_call> and local "stmt" from gimple to gimple_call,
+ * tree-cfg.c (notice_special_calls): Strengthen param from gimple
+ to gimple_call.
+ * tree-cfg.h (notice_special_calls): Likewise.
+ * tree-complex.c (expand_complex_libcall): Likewise for local
+ "stmt".
+ * tree-inline.c (remap_gimple_stmt): Add checked cast to
+ gimple_call.
+ (copy_bb): Likewise. Strengthen local "new_call" from gimple to
+ gimple_call.
+ (inline_forbidden_p_stmt): Add checked cast to gimple_call.
+ * tree-nested.c (init_tmp_var_with_call): Strengthen param "call"
+ from gimple to gimple_call.
+ (convert_nl_goto_reference): Likewise for local "call".
+ (convert_tramp_reference_op): Likewise.
+ (convert_gimple_call): Add checked cast to gimple_call.
+ * tree-nrv.c (dest_safe_for_nrv_p): Strengthen param "call" from
+ gimple to gimple_call.
+ (pass_return_slot::execute): Likewise for local "stmt", using a
+ dyn_cast<gimple_call> rather than an is_gimple_call call.
+ * tree-object-size.c (alloc_object_size): Strengthen param "call"
+ from const_gimple to const_gimple_call.
+ (pass_through_call): Likewise.
+ (call_object_size): Strengthen param "call" from gimple to
+ gimple_call.
+ (collect_object_sizes_for): Introduce local "call_stmt" via a
+ checked cast, using it for typesafety.
+ (check_for_plus_in_loops_1): Likewise.
+ (pass_object_sizes::execute): Add a checked cast to gimple_call.
+ * tree-profile.c (gimple_gen_interval_profiler): Strengthen local
+ "call" from gimple to gimple_call.
+ (gimple_gen_pow2_profiler): Likewise.
+ (gimple_gen_one_value_profiler): Likewise.
+ (gimple_gen_time_profiler): Likewise.
+ (gimple_gen_average_profiler): Likewise.
+ (gimple_gen_ior_profiler): Likewise.
+ * tree-ssa-alias.c (ref_maybe_used_by_call_p_1): Likewise for
+ param "call".
+ (ref_maybe_used_by_call_p): Likewise.
+ (ref_maybe_used_by_stmt_p): Add a checked cast to gimple_call.
+ (call_may_clobber_ref_p_1): Strengthen param "call" from gimple to
+ gimple_call.
+ (call_may_clobber_ref_p): Likewise.
+ (stmt_may_clobber_ref_p_1): Add a checked cast to gimple_call.
+ * tree-ssa-alias.h (call_may_clobber_ref_p): Strengthen param 1
+ from gimple to gimple_call.
+ (call_may_clobber_ref_p_1): Likewise.
+ * tree-ssa-dce.c (eliminate_unnecessary_stmts): Add a checked cast
+ to gimple_call.
+ * tree-ssa-loop-prefetch.c (emit_mfence_after_loop): Strengthen
+ local "call" from gimple to gimple_call.
+ * tree-ssa-math-opts.c (build_and_insert_call): Likewise for local
+ "call_stmt".
+ * tree-ssa-operands.c (maybe_add_call_vops): Likewise for param
+ "stmt".
+ (parse_ssa_operands): Add a checked cast to gimple_call within
+ GIMPLE_CALL case.
+ * tree-ssa-pre.c (compute_avail): Add a checked cast to
+ gimple_call.
+ * tree-ssa-sccvn.c (copy_reference_ops_from_call): Strengthen
+ param "call" from gimple to gimple_call.
+ (valueize_shared_reference_ops_from_call): Likewise.
+ (vn_reference_lookup_3): Add a checked cast to gimple_call.
+ (vn_reference_lookup_call): Strengthen param "call" from gimple to
+ gimple_call.
+ (visit_reference_op_call): Likewise for param "stmt".
+ (visit_use): Replace is_gimple_call with a dyn_cast, introducing
+ local "call_stmt", using it in place of "stmt" for type-safety.
+ * tree-ssa-sccvn.h (vn_reference_lookup_call): Strengthen param 1
+ from gimple to gimple_call.
+ * tree-ssa-structalias.c (get_call_vi): Likewise.
+ (lookup_call_use_vi): Likewise.
+ (lookup_call_clobber_vi): Likewise.
+ (get_call_use_vi): Likewise.
+ (get_call_clobber_vi): Likewise.
+ (handle_rhs_call): Likewise.
+ (handle_lhs_call): Likewise.
+ (handle_const_call): Likewise.
+ (handle_pure_call): Likewise.
+ (get_fi_for_callee): Likewise.
+ (find_func_aliases_for_builtin_call): Likewise for param 2.
+ (find_func_aliases_for_call): Likewise.
+ (find_func_aliases): Add a checked cast to gimple_call.
+ (find_func_clobbers): Replace is_gimple_call with a dyn_cast,
+ introducing local "call_stmt", using it in place of "stmt" for
+ type-safety.
+ (compute_points_to_sets): Strengthen local "stmt" from gimple to
+ gimple_call, replacing is_gimple_call with a
+ dyn_cast <gimple_call>.
+ (ipa_pta_execute): Likewise.
+ * tree-ssa-threadedge.c
+ (record_temporary_equivalences_from_stmts_at_dest): Add checked
+ cast to gimple_call.
+ * tree-tailcall.c (find_tail_calls): Strengthen local "call" from
+ gimple to gimple_call, adding a checked cast.
+ * tree-vect-data-refs.c (vect_setup_realignment): Eliminate
+ top-level local "new_stmt" in favor of more tightly-scoped locals
+ "new_stmt" of type gimple_assign and gimple_call.
+ * tree-vect-patterns.c (vect_recog_pow_pattern): Strenghthen local
+ "stmt" from gimple to gimple_call.
+ * tree-vect-stmts.c (vectorizable_function): Likewise for param
+ "call".
+ (vectorizable_call): Rename param 1 from "stmt" to "gs",
+ reintroducing "stmt" as a gimple_call once we've established that
+ we're working with a GIMPLE_CALL.
+ * tree-vectorizer.h (vectorizable_function): Strengthen param 1
+ from gimple to gimple_call.
+ * value-prof.c (check_ic_target): Likewise.
+ (gimple_ic_transform): Likewise for local "stmt", replacing a
+ check for GIMPLE_CALL with a dyn_cast.
+ (interesting_stringop_to_profile_p): Strengthen param "call"
+ from gimple to gimple_call.
+ (gimple_stringop_fixed_value): Likewise for param "vcall_stmt".
+ (gimple_stringops_transform): Likewise for local "stmt",
+ replacing a check for GIMPLE_CALL with a dyn_cast.
+ (gimple_stringops_values_to_profile): Rename param 1 from "stmt"
+ to "gs", reintroducing "stmt" as a gimple_call once we've
+ established that we're working with a GIMPLE_CALL.
+ * vtable-verify.c (verify_bb_vtables): Strengthen local
+ "call_stmt" from gimple to gimple_call.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Concretize get_loop_exit_condition et al to working on gimple_cond
+
+ * tree-scalar-evolution.h (get_loop_exit_condition): Return a
+ gimple_cond.
+ * tree-scalar-evolution.c (get_loop_exit_condition): Likewise, also
+ concretizing local "res" from gimple to gimple_cond.
+ * tree-vect-loop-manip.c (slpeel_make_loop_iterate_ntimes): Convert
+ locals from gimple to gimple_cond.
+ (slpeel_can_duplicate_loop_p): Likewise.
+ * tree-vect-loop.c (vect_get_loop_niters): Return a gimple_cond.
+ (vect_analyze_loop_form): Convert local from gimple to gimple_cond.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Update various expressions within tree-scalar-evolution.c to be gimple_phi
+
+ * tree-scalar-evolution.c (follow_ssa_edge): Require a gimple_phi,
+ rather than a gimple.
+ (follow_ssa_edge_binary): Likewise.
+ (follow_ssa_edge_expr): Likewise.
+ (follow_ssa_edge_in_rhs): Likewise.
+ (backedge_phi_arg_p): Likewise.
+ (follow_ssa_edge_in_condition_phi_branch): Likewise.
+ (follow_ssa_edge_in_condition_phi): Likewise.
+ (follow_ssa_edge_inner_loop_phi): Likewise.
+ (analyze_evolution_in_loop): Likewise.
+ (analyze_initial_condition): Likewise.
+ (interpret_loop_phi): Likewise.
+ (interpret_condition_phi): Likewise.
+ (follow_ssa_edge): Likewise; also, add checked casts to gimple_phi.
+
+ (analyze_scalar_evolution_1): Add checked casts to gimple_phi
+ within "case GIMPLE_PHI".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-ssa-loop-ivopts.c: use gimple_phi in a few places
+
+ * tree-ssa-loop-ivopts.c (determine_biv_step): Require a gimple_phi.
+ (find_bivs): Convert local "phi" into a gimple_phi.
+ (mark_bivs): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-ssa-loop-manip.c: use gimple_phi in three places
+
+ * tree-ssa-loop-manip.c (add_exit_phi): Convert local "phi" to be a
+ gimple_phi.
+ (split_loop_exit_edge): Likewise for "phi" and "new_phi".
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-ssa-loop-niter.c: use gimple_phi in a few places
+
+ * tree-ssa-loop-niter.c (chain_of_csts_start): Return a gimple_phi
+ rather than a gimple.
+ (get_base_for): Likewise; convert local "phi" to be a gimple_phi.
+ (loop_niter_by_eval): Convert local "phi" to be a gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-ssa-phiprop.c: use gimple_phi
+
+ * tree-ssa-phiprop.c (phiprop_insert_phi): Strengthen types of
+ parameter "phi" and local "new_phi" from gimple to gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-predcom.c: use gimple_phi in various places
+
+ * tree-predcom.c (find_looparound_phi): Return a gimple_phi rather
+ than just a gimple.
+ (insert_looparound_copy): Require a gimple_phi rather than just a
+ gimple.
+ (add_looparound_copies): Convert local "phi" to be a gimple_phi.
+ (initialize_root_vars): Likewise.
+ (initialize_root_vars_lm): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ tree-parloops.c: use gimple_phi in various places
+
+ * tree-parloops.c (struct reduction_info): Strengthen field "new_phi"
+ from gimple to gimple_phi.
+ (create_phi_for_local_result): Convert local "new_phi" to gimple_phi.
+ (loop_has_vector_phi_nodes): Require a gimple_phi rather than a gimple.
+ (gather_scalar_reductions): Convert to a gimple_phi_iterator and
+ gimple_phi.
+ (try_create_reduction_list): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Update ssa_prop_visit_phi_fn callbacks to take a gimple_phi
+
+ * tree-ssa-propagate.h (typedef ssa_prop_visit_phi_fn): Strengthen
+ type of parameter from gimple to gimple_phi.
+
+ * tree-complex.c (complex_visit_phi): Update signature of callback
+ implementation accordingly.
+ * tree-ssa-ccp.c (ccp_visit_phi_node): Likewise.
+ * tree-ssa-copy.c (copy_prop_visit_phi_node): Likewise.
+ * tree-vrp.c (vrp_visit_phi_node): Likewise.
+
+ * tree-ssa-propagate.c (simulate_stmt): Add a checked cast to
+ gimple_phi when invoking the ssa_prop_visit_phi callback.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_phi_iterator
+
+ * gimple-iterator.h (struct gimple_phi_iterator): New subclass of
+ gimple_stmt_iterator with identical layout, but adding...
+ (gimple_phi_iterator::phi): ...new method, equivalent to
+ gsi_stmt (), but casting the underlying gimple to gimple_phi,
+ checking that code == GIMPLE_PHI in a checked build.
+ (gsi_start_phis): Return a gimple_phi_iterator, rather than just a
+ gimple_stmt_iterator.
+
+ * tree-if-conv.c (bb_with_exit_edge_p): Require a gimple_phi rather
+ than just a gimple.
+ (if_convertible_phi_p): Likewise.
+ * tree-phinodes.h (add_phi_node_to_bb): Likewise.
+ * tree-ssa-phiprop.c (propagate_with_phi): Likewise.
+
+ * tree-ssa-uninit.c (warn_uninitialized_phi): Require a gimple_phi
+ and a vec<gimple_phi> *, rather than just a gimple and
+ vec<gimple> *, and a hash_set<gimple_phi> * rather than a
+ hash_set<gimple> *.
+ (find_uninit_use): Likewise; add checked cast to gimple_phi when
+ adding to worklist.
+ (pass_late_warn_uninitialized::execute): Strengthen types of
+ various locals, "worklist" from vec<gimple> to vec<gimple_phi>,
+ "gsi" to a gimple_phi_iterator, "phi" and "cur_phi" to a
+ gimple_phi, "added_to_worklist" from hash_set<gimple> to
+ hash_set<gimple_phi>.
+
+ * tree-ssa-loop-manip.c (rewrite_phi_with_iv): Require a
+ gimple_phi_iterator * rather than a gimple_stmt_iterator *;
+ use it to strengthen local from a gimple to a gimple_phi.
+
+ * cfgloop.c (find_subloop_latch_edge_by_ivs): Convert local from a
+ gimple_stmt_iterator to a gimple_phi_iterator. Use the iterator's
+ "phi" method rather than gsi_stmt. Use this checked cast to convert
+ the type of related local from a plain gimple to a gimple_phi.
+ * gimple-pretty-print.c (dump_phi_nodes): Likewise.
+ * gimple-ssa-isolate-paths.c (find_implicit_erroneous_behaviour):
+ Likewise.
+ * sese.c (sese_add_exit_phis_edge): Likewise.
+ * tree-cfg.c (reinstall_phi_args): Likewise.
+ (gimple_make_forwarder_block): Likewise.
+ (add_phi_args_after_copy_edge): Likewise.
+ (gimple_lv_adjust_loop_header_phi): Likewise.
+ * tree-cfgcleanup.c (phi_alternatives_equal): Likewise.
+ (remove_forwarder_block_with_phi): Likewise.
+ (merge_phi_nodes): Likewise.
+ * tree-complex.c (update_phi_components): Likewise.
+ * tree-if-conv.c (if_convertible_loop_p_1): Likewise.
+ * tree-inline.c (update_ssa_across_abnormal_edges): Likewise.
+ (copy_phis_for_bb): Likewise.
+ * tree-into-ssa.c (rewrite_add_phi_arguments): Likewise.
+ * tree-outof-ssa.c (eliminate_build): Likewise.
+ (eliminate_useless_phis): Likewise.
+ (rewrite_trees): Likewise.
+ (insert_backedge_copies): Likewise.
+ * tree-phinodes.c (reserve_phi_args_for_new_edge): Likewise.
+ (remove_phi_args): Likewise.
+ (remove_phi_nodes): Likewise.
+ * tree-predcom.c (find_looparound_phi): Likewise.
+ (eliminate_temp_copies): Likewise.
+ * tree-scalar-evolution.c (loop_closed_phi_def): Likewise.
+ (scev_const_prop): Likewise; also, add checked cast to phi.
+ * tree-ssa-coalesce.c (coalesce_partitions): Likewise.
+ * tree-ssa-dce.c (remove_dead_phis): Likewise.
+ (forward_edge_to_pdom): Likewise.
+ * tree-ssa-dom.c (record_equivalences_from_phis): Likewise.
+ (cprop_into_successor_phis): Likewise.
+ (propagate_rhs_into_lhs): Likewise.
+ (eliminate_degenerate_phis_1): Likewise.
+ * tree-ssa-ifcombine.c (same_phi_args_p): Likewise.
+ * tree-ssa-live.c (calculate_live_on_exit): Likewise.
+ (verify_live_on_entry): Likewise.
+ * tree-ssa-loop-im.c
+ (move_computations_dom_walker::before_dom_children): Likewise.
+ * tree-ssa-loop-ivopts.c (find_bivs): Likewise.
+ (mark_bivs): Likewise.
+ (find_interesting_uses_outside): Likewise.
+ (determine_set_costs): Likewise.
+ * tree-ssa-loop-manip.c (split_loop_exit_edge): Likewise.
+ (tree_transform_and_unroll_loop): Likewise.
+ (rewrite_all_phi_nodes_with_iv): Likewise.
+ (canonicalize_loop_ivs): Likewise.
+ * tree-ssa-loop-niter.c (determine_value_range): Likewise.
+ * tree-ssa-phiopt.c (hoist_adjacent_loads): Likewise.
+ * tree-ssa-phiprop.c (tree_ssa_phiprop): Likewise.
+ * tree-ssa-reassoc.c (suitable_cond_bb): Likewise.
+ * tree-ssa-tail-merge.c (same_phi_alternatives_1): Likewise.
+ (vop_phi): Likewise.
+ * tree-ssa-threadedge.c (record_temporary_equivalences_from_phis):
+ Likewise.
+ * tree-ssa-threadupdate.c (copy_phi_arg_into_existing_phi): Likewise.
+ (copy_phi_args): Likewise.
+ (phi_args_equal_on_edges): Likewise.
+ * tree-ssa.c (ssa_redirect_edge): Likewise.
+ (flush_pending_stmts): Likewise.
+ * tree-switch-conversion.c (check_final_bb): Likewise.
+ (gather_default_values): Likewise.
+ (build_constructors): Likewise.
+ (fix_phi_nodes): Likewise.
+ * tree-tailcall.c (propagate_through_phis): Likewise.
+ (add_successor_phi_arg): Likewise.
+ * tree-vect-loop-manip.c (slpeel_update_phi_nodes_for_guard1):
+ Likewise.
+ (slpeel_update_phi_nodes_for_guard2): Likewise.
+ (slpeel_tree_peel_loop_to_edge): Likewise.
+ (vect_can_advance_ivs_p): Likewise.
+ (vect_update_ivs_after_vectorizer): Likewise.
+ * tree-vect-loop.c (vect_analyze_scalar_cycles_1): Likewise.
+ * tree-vrp.c (find_assert_locations): Likewise.
+ * value-prof.c (gimple_ic): Likewise.
+
+ * omp-low.c (expand_parallel_call): Convert local to a gimple_phi.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_phi and use it in various places
+
+ * coretypes.h (gimple_phi): New typedef.
+ (const_gimple_phi): New typedef.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_phi and its
+ variants, using the gimple printer.
+
+ * gimple.h (gimple_vec): Eliminate thie typedef in the hope of using
+ vecs of more concrete gimple subclasses as appropriate; also the
+ comment is about to become misleading.
+
+ * gimple.h (gimple_phi_capacity): Use const_gimple_phi typedef
+ rather than spelling out the full type.
+ (gimple_phi_num_args): Likewise.
+ (gimple_phi_result): Likewise.
+ (gimple_phi_result_ptr): Use gimple_phi typedef.
+ (gimple_phi_set_result): Likewise.
+ (gimple_phi_arg): Likewise.
+ (gimple_phi_set_arg): Likewise.
+ * tree-phinodes.c (allocate_phi_node): Likewise.
+ (resize_phi_node): Likewise.
+ (reserve_phi_args_for_new_edge): Likewise.
+ (remove_phi_arg_num): Likewise.
+
+ * gimple-pretty-print.c (dump_gimple_phi): Require a gimple_phi
+ rather than just a gimple.
+ * tree-into-ssa.c (mark_phi_for_rewrite): Likewise.
+
+ * tree-phinodes.c (make_phi_node): Return a gimple_phi rather than
+ just a gimple.
+ (create_phi_node): Likewise.
+ * tree-phinodes.h (create_phi_node): Likewise.
+
+ * trans-mem.c (struct struct tm_log_entry): Replace use of
+ now-removed gimple_vec with a plain vec<gimple>.
+
+ * tree-into-ssa.c (phis_to_rewrite): Strengthen from a
+ vec<gimple_vec> to a vec< vec<gimple_phi> >.
+
+ * tree-into-ssa.c (insert_phi_nodes_for): Update local to be a
+ gimple_phi.
+ * tree-into-ssa.c (rewrite_update_phi_arguments): Strengthen local
+ "phis" from a gimple_vec to a vec<gimple_phi>, and local "phi" to
+ a gimple_phi.
+ * tree-into-ssa.c (delete_update_ssa): Strengthen local
+ "phis" from a gimple_vec to a vec<gimple_phi>.
+
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Add checked cast to
+ gimple_phi in regions where a stmt is known to have code
+ GIMPLE_PHI.
+ * tree-into-ssa.c (mark_use_interesting): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_debug and use it in a few places
+
+ * coretypes.h (gimple_debug): New typedef.
+ (const_gimple_debug): New typedef.
+
+ * gimple.h (struct gimple_statement_debug): New subclass of
+ gimple_statement_with_ops, adding the invariant that
+ stmt->code == GIMPLE_DEBUG.
+ (is_a_helper <gimple_statement_debug>::test): New.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_debug and its
+ variants, using the gimple printer.
+
+ * gimple-pretty-print.c (dump_gimple_debug): Require a gimple_debug
+ rather than just a gimple.
+ * tree-inline.c (copy_debug_stmt): Likewise.
+
+ * tree-inline.h (struct copy_body_data): Strengthen field
+ "debug_stmts" from a vec<gimple> to a vec<gimple_debug>.
+
+ * gimple.c (gimple_build_debug_bind_stat): Return a gimple_debug
+ rather than just a gimple.
+ (gimple_build_debug_source_bind_stat): Likewise.
+ * gimple.h (gimple_build_debug_bind_stat): Likewise.
+ (gimple_build_debug_source_bind_stat): Likewise.
+
+ * tree-inline.c (remap_gimple_stmt): Update locals to be a
+ gimple_debug.
+ (maybe_move_debug_stmts_to_successors): Likewise.
+ (copy_debug_stmts): Likewise.
+
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Add checked cast to
+ gimple_debug in regions where a stmt is known to have code
+ GIMPLE_DEBUG.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_label and use it in a few places
+
+ * coretypes.h (gimple_label): New typedef.
+ (const_gimple_label): New typedef.
+
+ * gimple.h (struct gimple_statement_label): New subclass of
+ gimple_statement_with_ops, adding the invariant that
+ stmt->code == GIMPLE_LABEL.
+ (is_a_helper <gimple_statement_label>::test): New.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_label and its
+ variants, reusing the gimple printer.
+
+ * gimple-pretty-print.c (dump_gimple_label): Require a gimple_label
+ rather than just a gimple.
+ * tree-cfg.c (verify_gimple_label): Likewise.
+
+ * gimple.c (gimple_build_label): Return a gimple_label rather than
+ just a gimple.
+ * gimple.h (gimple_build_label): Likewise.
+
+ * gimplify.c (gimplify_case_label_expr): Update local to be a
+ gimple_label.
+ * tree-switch-conversion.c (gen_inbound_check): Likewise.
+
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Add checked cast to
+ gimple_label in regions where a stmt is known to have code
+ GIMPLE_LABEL.
+ * tree-cfg.c (verify_gimple_stmt): Likewise.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_assign and use it in various places
+
+ * coretypes.h (gimple_assign): New typedef.
+ (const_gimple_assign): New typedef.
+
+ * gimple.h (struct gimple_statement_assign): New subclass of
+ gimple_statement_with_memory_ops, adding the invariant that
+ stmt->code == GIMPLE_ASSIGN.
+ (is_a_helper <gimple_statement_assign>::test): New.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_assign and its
+ variants, using the gimple printer.
+
+ * gimple-builder.c (build_assign): Return a gimple_assign rather
+ than just a gimple from each of the overloaded variants.
+ (build_type_cast): Likewise.
+ * gimple-builder.h (build_assign): Likewise.
+ (build_type_cast): Likewise.
+ * gimple.c (gimple_build_assign_stat): Likewise.
+ (gimple_build_assign_with_ops): Likewise.
+ * gimple.h (gimple_build_assign_stat): Likewise.
+ (gimple_build_assign_with_ops): Likewise.
+
+ * asan.c (get_mem_ref_of_assignment): Require a const_gimple_assign
+ rather than just a "const gimple" (the latter is not a
+ "const_gimple").
+ * gimple-pretty-print.c (dump_unary_rhs): Require a gimple_assign
+ rather than just a gimple.
+ (dump_binary_rhs): Likewise.
+ (dump_ternary_rhs): Likewise.
+ * tree-cfg.c (verify_gimple_assign_unary): Likewise.
+ (verify_gimple_assign_binary): Likewise.
+ (verify_gimple_assign_ternary): Likewise.
+ (verify_gimple_assign_single): Likewise.
+ (verify_gimple_assign): Likewise.
+ * tree-ssa-sccvn.c (simplify_unary_expression): Likewise.
+ (try_to_simplify): Likewise.
+ * tree-tailcall.c (process_assignment): Likewise.
+ * tree-vect-generic.c (expand_vector_operation): Likewise.
+ * tree-vrp.c (extract_range_from_cond_expr): Likewise.
+ (extract_range_from_assignment): Likewise.
+
+ * asan.c (has_stmt_been_instrumented_p): Add checked cast to
+ gimple_assign in regions where a stmt is known to have code
+ GIMPLE_ASSIGN.
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Likewise.
+ * tree-cfg.c (verify_gimple_stmt): Likewise.
+ * tree-ssa-sccvn.c (visit_use): Likewise.
+ * tree-tailcall.c (find_tail_calls): Likewise.
+ * tree-vrp.c (vrp_visit_assignment_or_call): Likewise.
+
+ * tree-vrp.c (simplify_stmt_for_jump_threading): Replace a check
+ against GIMPLE_ASSIGN with a dyn_cast<gimple_assign>, introducing
+ a gimple_assign local.
+
+ * tree-vect-generic.c (expand_vector_condition): Convert local to a
+ gimple_assign, adding a checked cast when extracting from gsi, since
+ this is only called when underlying stmt has code GIMPLE_ASSIGN.
+ (optimize_vector_constructor): Likewise.
+ (lower_vec_perm): Likewise.
+ (expand_vector_operations_1): Convert local to a gimple_assign,
+ introducing a dyn_cast.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_cond and use it in various places
+
+ * coretypes.h (gimple_cond): New typedef.
+ (const_gimple_cond): Likewise.
+
+ * gimple.h (struct gimple_statement_cond): New subclass of
+ gimple_statement_with_ops, adding the invariant that
+ stmt->code == GIMPLE_COND.
+ (is_a_helper <gimple_statement_cond>::test): New.
+ (gimple_build_cond): Return a gimple_cond, rather than just
+ a gimple.
+ (gimple_build_cond_from_tree): Likewise.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_cond and its
+ variants, using the gimple printer.
+
+ * cfgexpand.c (expand_gimple_cond): Require a gimple_cond rather
+ than just a gimple.
+ * gimple.h (gimple_cond_set_condition_from_tree): Likewise.
+ (gimple_cond_true_p): Likewise.
+ (gimple_cond_false_p): Likewise.
+ (gimple_cond_set_condition): Likewise.
+ * gimple.c (gimple_cond_set_condition_from_tree): Likewise.
+ * gimple-fold.c (fold_gimple_cond): Likewise.
+ * gimple-pretty-print.c (dump_gimple_cond): Likewise.
+ * tree-ssa-dom.c (canonicalize_comparison): Likewise.
+ * tree-ssa-forwprop.c (forward_propagate_into_gimple_cond): Likewise.
+ * tree-ssa-ifcombine.c (recognize_single_bit_test): Likewise.
+ (recognize_bits_test): Likewise.
+ * tree-ssa-threadedge.c (simplify_control_stmt_condition): Likewise.
+ (thread_around_empty_blocks): Likewise.
+ (thread_through_normal_block): Likewise.
+ (thread_across_edge): Likewise.
+ * tree-ssa-threadedge.h (thread_across_edge): Likewise.
+ * tree-vrp.c (range_fits_type_p): Likewise.
+
+ * cfgexpand.c (expand_gimple_basic_block): Add checked cast to
+ gimple_cond in regions where a stmt is known to have code GIMPLE_COND.
+ * gimple-fold.c (fold_stmt_1): Likewise.
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Likewise.
+ * tree-ssa-dom.c (optimize_stmt): Likewise.
+ * tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise.
+ * tree-ssa-loop-unswitch.c (tree_unswitch_single_loop): Likewise.
+ * tree-ssa-pre.c (eliminate_dom_walker::before_dom_children):
+ Likewise.
+ * tree-vrp.c (simplify_stmt_using_ranges): Likewise.
+
+ * cfgloopmanip.c (create_empty_loop_on_edge): Update local to be a
+ gimple_cond.
+ * tree-vrp.c (identify_jump_threads): Likewise.
+
+ * gimple.c (gimple_build_cond): Return a gimple_cond, rather than
+ just a gimple.
+ (gimple_build_cond_from_tree): Likewise.
+
+ * tree-ssa-dom.c (class dom_opt_dom_walker): Strengthen type of
+ field "m_dummy_cond" from a plain gimple to a gimple_cond.
+
+ * tree-ssa-ifcombine.c (ifcombine_ifandif): Introduce locals
+ inner_stmt and outer_stmt so that inner_cond and outer_cond can be
+ of type gimple_cond once we know that we have code == GIMPLE_COND.
+ * tree-ssa-loop-unswitch.c (tree_may_unswitch_on): Introduce local
+ "last" so that stmt can be of type gimple_cond.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_bind and use it for accessors.
+
+ * coretypes.h (gimple_bind): New typedef.
+ (const_gimple_bind): New typedef.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_bind
+ and its variants, using the gimple printer.
+
+ * gimple-pretty-print.c (dump_gimple_bind): Update type-signature to
+ require a gimple_bind rather than just a gimple.
+
+ * gimple.c (gimple_build_bind): Return a gimple_bind rather than
+ just a gimple.
+ * gimple.h (gimple_build_bind): Likewise.
+
+ * gimple.h (gimple_seq_first_stmt_as_a_bind): New.
+
+ * gimple.h (gimple_bind_vars): Update type-signature to
+ require a gimple_bind rather than just a gimple, removing
+ as_a and hence run-time check.
+ (gimple_bind_set_vars): Likewise.
+ (gimple_bind_append_vars): Likewise.
+ (gimple_bind_body_ptr): Likewise.
+ (gimple_bind_body): Likewise.
+ (gimple_bind_set_body): Likewise.
+ (gimple_bind_add_stmt): Likewise.
+ (gimple_bind_add_seq): Likewise.
+ (gimple_bind_block): Likewise.
+ (gimple_bind_set_block): Likewise.
+ * gimplify.c (gimple_push_bind_expr): Likewise.
+ (gimple_current_bind_expr): Likewise.
+ * tree-inline.c (copy_gimple_bind): Likewise.
+
+ * gimplify.h (gimple_current_bind_expr): Return a gimple_bind
+ rather than a plain gimple.
+ (gimplify_body): Likewise.
+ (gimple_bind_expr_stack): Return a vec<gimple_bind> rather than
+ a vec<gimple>.
+
+ * gimplify.c (struct gimplify_ctx): Strengthen field
+ "bind_expr_stack" from vec<gimple> to vec<gimple_bind>.
+ (gimple_bind_expr_stack): Likewise for type of returned value.
+
+ * gimplify.c (gimplify_body): Strengthen various types from gimple
+ to gimple_bind, including the return type.
+
+ * gimplify.c (declare_vars): Introduce "gs" as a generic gimple,
+ so that local "scope" can be of type gimple_bind once we've reached
+ the region where it must be of code GIMPLE_BIND.
+
+ * gimple-low.c (lower_gimple_bind): Add checked cast to
+ gimple_bind, since both callers (lower_function_body and
+ lower_stmt) have checked the code for us.
+
+ * gimple.c (gimple_copy): Add checked cast to gimple_bind in
+ region guarded by check for code GIMPLE_BIND.
+ * gimple-low.c (gimple_stmt_may_fallthru): Likewise.
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Likewise.
+ * gimple-walk.c (walk_gimple_stmt): Likewise.
+ * omp-low.c (scan_omp_1_stmt): Likewise.
+ (lower_omp_1): Likewise.
+ (lower_omp_for): Likewise.
+ * tree-cfg.c (verify_gimple_in_seq_2): Likewise.
+ (do_warn_unused_result): Likewise.
+ * tree-inline.c (remap_gimple_stmt): Likewise.
+ (estimate_num_insns): Likewise.
+ * tree-nested.c (convert_nonlocal_reference_stmt): Likewise.
+
+ * gimplify.c (gimplify_bind_expr): Update local(s) to be a
+ gimple_bind rather than just a gimple.
+ (gimplify_function_tree): Likewise.
+ * omp-low.c (lower_omp_sections): Likewise.
+ (lower_omp_single): Likewise.
+ (lower_omp_master): Likewise.
+ (lower_omp_taskgroup): Likewise.
+ (lower_omp_ordered): Likewise.
+ (lower_omp_critical): Likewise.
+ (lower_omp_taskreg): Likewise.
+ (lower_omp_teams): Likewise.
+ * omp-low.c (lower_omp_for): Likewise; use
+ gimple_seq_first_stmt_as_a_bind to encapsulate the checked cast.
+ (lower_omp_target): Likewise.
+ * tree-nested.c (finalize_nesting_tree_1): Likewise.
+
+ * gimple.c (empty_stmt_p): Add dyn_cast to a gimple_bind.
+ * tree-inline.c (replace_locals_stmt): Add dyn_cast to gimple_bind.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_switch and use it in various places
+
+ * gimple.h (gimple_statement_switch): New subclass of
+ gimple_statement_with_ops, adding the invariant that
+ stmt->code == GIMPLE_SWITCH.
+ (is_a_helper <gimple_statement_switch>::test (gimple)): New.
+
+ * coretypes.h (gimple_switch): New typedef
+ (const_gimple_switch): Likewise.
+
+ * gdbhooks.py (build_pretty_printer): Add gimple_switch
+ and its variants, using the gimple printer.
+
+ * gimple.c (gimple_build_switch_nlabels): Return a gimple_switch
+ rather than just a gimple.
+ (gimple_build_switch): Likewise.
+ * gimple.h (gimple_build_switch_nlabels): Likewise.
+ (gimple_build_switch): Likewise.
+
+ * gimple.h (gimple_switch_num_labels): Update type-signature to
+ require a gimple_switch rather than just a gimple.
+ (gimple_switch_set_num_labels): Likewise.
+ (gimple_switch_set_index): Likewise.
+ (gimple_switch_label): Likewise.
+ (gimple_switch_set_label): Likewise.
+ (gimple_switch_default_label): Likewise.
+ (gimple_switch_set_default_label): Likewise.
+ * expr.h (expand_case): Likewise.
+ * gimple-pretty-print.c (dump_gimple_call): Likewise.
+ * stmt.c (compute_cases_per_edge): Likewise.
+ (expand_case): Likewise.
+ * tree-cfg.h (group_case_labels_stmt): Likewise.
+ * tree-cfg.c (make_gimple_switch_edges): Likewise.
+ (find_taken_edge_switch_expr) Likewise.
+ (find_case_label_for_value) Likewise.
+ (get_cases_for_edge): Likewise.
+ (group_case_labels_stmt): Likewise.
+ (verify_gimple_switch): Likewise.
+ * tree-eh.c (verify_norecord_switch_expr): Likewise.
+ * tree-eh.c (lower_eh_constructs_2): Likewise.
+ * tree-loop-distribution.c (generate_loops_for_partition): Likewise.
+ * tree-ssa-dom.c (record_edge_info): Likewise.
+ * tree-ssa-forwprop.c (simplify_gimple_switch_label_vec): Likewise.
+ (simplify_gimple_switch): Likewise.
+ * tree-switch-conversion.c (emit_case_bit_tests): Likewise.
+ (collect_switch_conv_info): Likewise.
+ (build_constructors): Likewise.
+ (array_value_type): Likewise.
+ (build_one_array): Likewise.
+ (build_arrays): Likewise.
+ (gen_inbound_check): Likewise.
+ * tree-vrp.c (find_switch_asserts): Likewise.
+ (find_case_label_range): Likewise.
+ (find_case_label_ranges): Likewise.
+ (vrp_visit_switch_stmt): Likewise.
+ (simplify_switch_using_ranges): Likewise.
+
+ * tree-vrp.c (switch_update): Strengthen field "stmt" from being
+ merely a gimple to being a gimple_switch.
+
+ * cfgexpand.c (expand_gimple_stmt_1): Add checked cast to
+ gimple_switch in regions where the stmt code has been tested as
+ GIMPLE_SWITCH.
+ * gimple-pretty-print.c (pp_gimple_stmt_1): Likewise.
+ * tree-cfg.c (make_edges): Likewise.
+ (end_recording_case_labels): Likewise.
+ (cleanup_dead_labels): Likewise.
+ (cleanup_dead_labels): Likewise.
+ (group_case_labels): Likewise.
+ (find_taken_edge): Likewise.
+ (find_case_label_for_value): Likewise.
+ (verify_gimple_stmt): Likewise.
+ (gimple_verify_flow_info): Likewise.
+ (gimple_redirect_edge_and_branch): Likewise.
+ * tree-inline.c (estimate_num_insns): Likewise.
+ * tree-ssa-forwprop.c (ssa_forward_propagate_and_combine): Likewise.
+ * tree-ssa-uncprop.c (associate_equivalences_with_edges): Likewise.
+ * tree-switch-conversion.c (do_switchconv): Likewise.
+ * tree-vrp.c (find_assert_locations_1): Likewise.
+ (vrp_visit_stmt): Likewise.
+ (simplify_stmt_using_ranges): Likewise.
+
+ * ipa-inline-analysis.c (set_switch_stmt_execution_predicate):
+ Introduce local "lastg" as a generic gimple, so that local "last"
+ can be of type gimple_switch once lastg's code has been verified.
+
+ * omp-low.c (diagnose_sb_2): Introduce switch_stmt local to handle
+ the GIMPLE_SWITCH case.
+
+ * tree-cfg.c (find_taken_edge_switch_expr): Add gimple_switch
+ argument, since the caller (find_taken_edge) has checked that
+ last_stmt is a switch.
+
+Copyright (C) 2014 Free Software Foundation, Inc.
+
+Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.
otherwise. */
static bool
-get_mem_ref_of_assignment (const gimple assignment,
+get_mem_ref_of_assignment (const gassign *assignment,
asan_mem_ref *ref,
bool *ref_is_store)
{
representing a builtin call that has to do with memory access. */
static bool
-get_mem_refs_of_builtin_call (const gimple call,
+get_mem_refs_of_builtin_call (const gcall *call,
asan_mem_ref *src0,
tree *src0_len,
bool *src0_is_store,
asan_mem_ref r;
asan_mem_ref_init (&r, NULL, 1);
- if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
+ if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
+ &r_is_store))
return has_mem_ref_been_instrumented (&r);
}
else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
bool src0_is_store = false, src1_is_store = false,
dest_is_store = false, dest_is_deref = false, intercepted_p = true;
- if (get_mem_refs_of_builtin_call (stmt,
+ if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
&src0, &src0_len, &src0_is_store,
&src1, &src1_len, &src1_is_store,
&dest, &dest_len, &dest_is_store,
pointing to initially. */
static void
-insert_if_then_before_iter (gimple cond,
+insert_if_then_before_iter (gcond *cond,
gimple_stmt_iterator *iter,
bool then_more_likely_p,
basic_block *then_bb,
return false;
bool iter_advanced_p = false;
- gimple call = gsi_stmt (*iter);
+ gcall *call = as_a <gcall *> (gsi_stmt (*iter));
gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
gimple_set_location (g, loc);
basic_block then_bb, fallthrough_bb;
- insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
- &then_bb, &fallthrough_bb);
+ insert_if_then_before_iter (as_a <gcond *> (g), iter,
+ /*then_more_likely_p=*/true,
+ &then_bb, &fallthrough_bb);
/* Note that fallthrough_bb starts with the statement that was
pointed to by ITER. */
/* Read the inlined indirect call target profile for STMT and store it in
MAP, return the total count for all inlined indirect calls. */
- gcov_type find_icall_target_map (gimple stmt, icall_target_map *map) const;
+ gcov_type find_icall_target_map (gcall *stmt, icall_target_map *map) const;
/* Sum of counts that is used during annotation. */
gcov_type total_annotated_count () const;
/* Update value profile INFO for STMT from the inlined indirect callsite.
Return true if INFO is updated. */
- bool update_inlined_ind_target (gimple stmt, count_info *info);
+ bool update_inlined_ind_target (gcall *stmt, count_info *info);
/* Mark LOC as annotated. */
void mark_annotated (location_t loc);
MAP, return the total count for all inlined indirect calls. */
gcov_type
-function_instance::find_icall_target_map (gimple stmt,
+function_instance::find_icall_target_map (gcall *stmt,
icall_target_map *map) const
{
gcov_type ret = 0;
Return true if INFO is updated. */
bool
-autofdo_source_profile::update_inlined_ind_target (gimple stmt,
+autofdo_source_profile::update_inlined_ind_target (gcall *stmt,
count_info *info)
{
if (LOCATION_LOCUS (gimple_location (stmt)) == cfun->function_end_locus)
afdo_indirect_call (gimple_stmt_iterator *gsi, const icall_target_map &map,
bool transform)
{
- gimple stmt = gsi_stmt (*gsi);
+ gimple gs = gsi_stmt (*gsi);
tree callee;
- if (map.size () == 0 || gimple_code (stmt) != GIMPLE_CALL
- || gimple_call_fndecl (stmt) != NULL_TREE)
+ if (map.size () == 0)
+ return;
+ gcall *stmt = dyn_cast <gcall *> (gs);
+ if ((!stmt) || gimple_call_fndecl (stmt) != NULL_TREE)
return;
callee = gimple_call_fn (stmt);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
afdo_source_profile->mark_annotated (gimple_location (gsi_stmt (gsi)));
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (bb);
+ !gsi_end_p (gpi);
+ gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
size_t i;
for (i = 0; i < gimple_phi_num_args (phi); i++)
afdo_source_profile->mark_annotated (gimple_phi_arg_location (phi, i));
basic_block bb;
FOR_ALL_BB_FN (bb, cfun)
{
- gimple phi_stmt;
+ gimple def_stmt;
tree cmp_rhs, cmp_lhs;
gimple cmp_stmt = last_stmt (bb);
edge e;
continue;
if (!is_bb_annotated (bb, annotated_bb))
continue;
- phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
- while (phi_stmt && gimple_code (phi_stmt) == GIMPLE_ASSIGN
- && gimple_assign_single_p (phi_stmt)
- && TREE_CODE (gimple_assign_rhs1 (phi_stmt)) == SSA_NAME)
- phi_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (phi_stmt));
- if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI)
+ def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
+ while (def_stmt && gimple_code (def_stmt) == GIMPLE_ASSIGN
+ && gimple_assign_single_p (def_stmt)
+ && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
+ def_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
+ if (!def_stmt)
+ continue;
+ gphi *phi_stmt = dyn_cast <gphi *> (def_stmt);
+ if (!phi_stmt)
continue;
FOR_EACH_EDGE (e, ei, bb->succs)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gcall *stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
/* IC_promotion and early_inline_2 is done in multiple iterations.
No need to promoted the stmt if its in promoted_stmts (means
it is already been promoted in the previous iterations). */
- if (gimple_code (stmt) != GIMPLE_CALL || gimple_call_fn (stmt) == NULL
+ if ((!stmt) || gimple_call_fn (stmt) == NULL
|| TREE_CODE (gimple_call_fn (stmt)) == FUNCTION_DECL
|| promoted_stmts->find (stmt) != promoted_stmts->end ())
continue;
validate_arglist will then be removed. */
bool
-validate_gimple_arglist (const_gimple call, ...)
+validate_gimple_arglist (const gcall *call, ...)
{
enum tree_code code;
bool res = 0;
call node earlier than the warning is generated. */
tree
-fold_call_stmt (gimple stmt, bool ignore)
+fold_call_stmt (gcall *stmt, bool ignore)
{
tree ret = NULL_TREE;
tree fndecl = gimple_call_fndecl (stmt);
extern tree fold_call_expr (location_t, tree, bool);
extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
extern tree fold_builtin_n (location_t, tree, tree *, int, bool);
-extern bool validate_gimple_arglist (const_gimple, ...);
+extern bool validate_gimple_arglist (const gcall *, ...);
extern rtx default_expand_builtin (tree, rtx, rtx, machine_mode, int);
extern bool fold_builtin_next_arg (tree, bool);
extern tree do_mpc_arg2 (tree, tree, tree, int, int (*)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t));
-extern tree fold_call_stmt (gimple, bool);
+extern tree fold_call_stmt (gcall *, bool);
extern void set_builtin_user_assembler_name (tree decl, const char *asmspec);
extern bool is_simple_builtin (tree);
extern bool is_inexpensive_builtin (tree);
+2014-11-19 David Malcolm <dmalcolm@redhat.com>
+
+ Merger of git branch "gimple-classes-v2-option-3".
+ * ChangeLog.gimple-classes: New.
+ * c-gimplify.c (add_block_to_enclosing): Strengthen local "stack"
+ from being just a vec<gimple> to a vec<gbind *>.
+
2014-11-18 Jakub Jelinek <jakub@redhat.com>
PR sanitizer/63813
--- /dev/null
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ Patch autogenerated by rename_gimple_subclasses.py from
+ https://github.com/davidmalcolm/gcc-refactoring-scripts
+ revision 7d754b63ff2bf47226a67b2c0af5d74b54d4709f
+
+ * c-gimplify.c (add_block_to_enclosing): Rename gimple subclass types.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_bind and use it for accessors.
+
+ * c-gimplify.c (add_block_to_enclosing): Strengthen local "stack"
+ from being just a vec<gimple> to a vec<gimple_bind>.
+
+Copyright (C) 2014 Free Software Foundation, Inc.
+
+Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.
{
unsigned i;
tree enclosing;
- gimple bind;
- vec<gimple> stack = gimple_bind_expr_stack ();
+ gbind *bind;
+ vec<gbind *> stack = gimple_bind_expr_stack ();
FOR_EACH_VEC_ELT (stack, i, bind)
if (gimple_bind_block (bind))
{
gimple_stmt_iterator gsi;
tree lab;
- gimple lab_stmt;
if (bb->flags & BB_RTL)
return block_label (bb);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- lab_stmt = gsi_stmt (gsi);
- if (gimple_code (lab_stmt) != GIMPLE_LABEL)
+ glabel *lab_stmt;
+
+ lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
+ if (!lab_stmt)
break;
lab = gimple_label_label (lab_stmt);
block and created a new one. */
static basic_block
-expand_gimple_cond (basic_block bb, gimple stmt)
+expand_gimple_cond (basic_block bb, gcond *stmt)
{
basic_block new_bb, dest;
edge new_edge;
statement STMT. */
static void
-expand_call_stmt (gimple stmt)
+expand_call_stmt (gcall *stmt)
{
tree exp, decl, lhs;
bool builtin_p;
static void
-expand_asm_stmt (gimple stmt)
+expand_asm_stmt (gasm *stmt)
{
int noutputs;
tree outputs, tail, t;
expand_computed_goto (op0);
break;
case GIMPLE_LABEL:
- expand_label (gimple_label_label (stmt));
+ expand_label (gimple_label_label (as_a <glabel *> (stmt)));
break;
case GIMPLE_NOP:
case GIMPLE_PREDICT:
break;
case GIMPLE_SWITCH:
- expand_case (stmt);
+ expand_case (as_a <gswitch *> (stmt));
break;
case GIMPLE_ASM:
- expand_asm_stmt (stmt);
+ expand_asm_stmt (as_a <gasm *> (stmt));
break;
case GIMPLE_CALL:
- expand_call_stmt (stmt);
+ expand_call_stmt (as_a <gcall *> (stmt));
break;
case GIMPLE_RETURN:
- op0 = gimple_return_retval (stmt);
+ op0 = gimple_return_retval (as_a <greturn *> (stmt));
if (op0 && op0 != error_mark_node)
{
case GIMPLE_ASSIGN:
{
- tree lhs = gimple_assign_lhs (stmt);
+ gassign *assign_stmt = as_a <gassign *> (stmt);
+ tree lhs = gimple_assign_lhs (assign_stmt);
/* Tree expand used to fiddle with |= and &= of two bitfield
COMPONENT_REFs here. This can't happen with gimple, the LHS
|| get_gimple_rhs_class (gimple_expr_code (stmt))
== GIMPLE_SINGLE_RHS)
{
- tree rhs = gimple_assign_rhs1 (stmt);
+ tree rhs = gimple_assign_rhs1 (assign_stmt);
gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
== GIMPLE_SINGLE_RHS);
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
;
else
expand_assignment (lhs, rhs,
- gimple_assign_nontemporal_move_p (stmt));
+ gimple_assign_nontemporal_move_p (
+ assign_stmt));
}
else
{
rtx target, temp;
- bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
+ bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
struct separate_ops ops;
bool promoted = false;
if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
promoted = true;
- ops.code = gimple_assign_rhs_code (stmt);
+ ops.code = gimple_assign_rhs_code (assign_stmt);
ops.type = TREE_TYPE (lhs);
switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
{
case GIMPLE_TERNARY_RHS:
- ops.op2 = gimple_assign_rhs3 (stmt);
+ ops.op2 = gimple_assign_rhs3 (assign_stmt);
/* Fallthru */
case GIMPLE_BINARY_RHS:
- ops.op1 = gimple_assign_rhs2 (stmt);
+ ops.op1 = gimple_assign_rhs2 (assign_stmt);
/* Fallthru */
case GIMPLE_UNARY_RHS:
- ops.op0 = gimple_assign_rhs1 (stmt);
+ ops.op0 = gimple_assign_rhs1 (assign_stmt);
break;
default:
gcc_unreachable ();
tailcall) and the normal result happens via a sqrt instruction. */
static basic_block
-expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
+expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
{
rtx_insn *last2, *last;
edge e;
if (!gsi_end_p (gsi)
&& gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
{
- gimple ret_stmt = gsi_stmt (gsi);
+ greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
gcc_assert (single_succ_p (bb));
gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
fixup the CFG accordingly. */
if (gimple_code (stmt) == GIMPLE_COND)
{
- new_bb = expand_gimple_cond (bb, stmt);
+ new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
if (new_bb)
return new_bb;
}
}
else
{
- if (is_gimple_call (stmt)
- && gimple_call_tail_p (stmt)
+ gcall *call_stmt = dyn_cast <gcall *> (stmt);
+ if (call_stmt
+ && gimple_call_tail_p (call_stmt)
&& disable_tail_calls)
- gimple_call_set_tail (stmt, false);
+ gimple_call_set_tail (call_stmt, false);
- if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
+ if (call_stmt && gimple_call_tail_p (call_stmt))
{
bool can_fallthru;
- new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
+ new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
if (new_bb)
{
if (can_fallthru)
{
edge e, latch = latches[0];
unsigned i;
- gimple phi;
- gimple_stmt_iterator psi;
+ gphi *phi;
+ gphi_iterator psi;
tree lop;
basic_block bb;
a subloop. */
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
lop = PHI_ARG_DEF_FROM_EDGE (phi, latch);
/* Ignore the values that are not changed inside the subloop. */
basic_block cond_bb, true_bb, false_bb, join_bb;
edge e_true, e_false, exit_edge;
- gimple cond_stmt;
+ gcond *cond_stmt;
tree simple_cond;
gimple_stmt_iterator gsi;
struct loop *loop;
gimple_stmt_iterator gsi;
gimple_seq stmts;
- gimple cond_expr;
+ gcond *cond_expr;
tree exit_test;
edge exit_e;
int prob;
edge, then update all components. */
void
-cgraph_edge::set_call_stmt (gimple new_stmt, bool update_speculative)
+cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
{
tree decl;
cgraph_edge *
symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq,
- bool indir_unknown_callee)
+ gcall *call_stmt, gcov_type count, int freq,
+ bool indir_unknown_callee)
{
cgraph_edge *edge;
cgraph_edge *
cgraph_node::create_edge (cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq)
+ gcall *call_stmt, gcov_type count, int freq)
{
cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
freq, false);
PARAM_INDEX. */
cgraph_edge *
-cgraph_node::create_indirect_edge (gimple call_stmt, int ecf_flags,
+cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
gcov_type count, int freq,
bool compute_indirect_info)
{
tree decl = gimple_call_fndecl (e->call_stmt);
tree lhs = gimple_call_lhs (e->call_stmt);
- gimple new_stmt;
+ gcall *new_stmt;
gimple_stmt_iterator gsi;
#ifdef ENABLE_CHECKING
cgraph_node *node;
if (e->speculative)
{
cgraph_edge *e2;
- gimple new_stmt;
+ gcall *new_stmt;
ipa_ref *ref;
e->speculative_call_info (e, e2, ref);
{
tree dresult = gimple_call_lhs (new_stmt);
tree iresult = gimple_call_lhs (e2->call_stmt);
- gimple dbndret = chkp_retbnd_call_by_val (dresult);
- gimple ibndret = chkp_retbnd_call_by_val (iresult);
+ gcall *dbndret = chkp_retbnd_call_by_val (dresult);
+ gcall *ibndret = chkp_retbnd_call_by_val (iresult);
struct cgraph_edge *iedge
= e2->caller->cgraph_node::get_edge (ibndret);
struct cgraph_edge *dedge;
if (callee->decl == new_call
|| callee->former_clone_of == new_call)
{
- e->set_call_stmt (new_stmt);
+ e->set_call_stmt (as_a <gcall *> (new_stmt));
return;
}
callee = callee->clone_of;
if (new_call)
{
ne = node->create_edge (cgraph_node::get_create (new_call),
- new_stmt, count, frequency);
+ as_a <gcall *> (new_stmt), count,
+ frequency);
gcc_assert (ne->inline_failed);
}
}
/* We only updated the call stmt; update pointer in cgraph edge.. */
else if (old_stmt != new_stmt)
- node->get_edge (old_stmt)->set_call_stmt (new_stmt);
+ node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
}
/* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
When WHOLE_SPECULATIVE_EDGES is true, all three components of
speculative edge gets updated. Otherwise we update only direct
call. */
- void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+ void set_call_stmt_including_clones (gimple old_stmt, gcall *new_stmt,
bool update_speculative = true);
/* Walk the alias chain to return the function cgraph_node is alias of.
/* Create edge from a given function to CALLEE in the cgraph. */
cgraph_edge *create_edge (cgraph_node *callee,
- gimple call_stmt, gcov_type count,
+ gcall *call_stmt, gcov_type count,
int freq);
/* Create an indirect edge with a yet-undetermined callee where the call
statement destination is a formal parameter of the caller with index
PARAM_INDEX. */
- cgraph_edge *create_indirect_edge (gimple call_stmt, int ecf_flags,
+ cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
gcov_type count, int freq,
bool compute_indirect_info = true);
same function body. If clones already have edge for OLD_STMT; only
update the edge same way as cgraph_set_call_stmt_including_clones does. */
void create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gimple stmt,
+ gimple old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason);
/* Change field call_stmt of edge to NEW_STMT.
If UPDATE_SPECULATIVE and E is any component of speculative
edge, then update all components. */
- void set_call_stmt (gimple new_stmt, bool update_speculative = true);
+ void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
/* Redirect callee of the edge to N. The function does not update underlying
call expression. */
/* Create clone of edge in the node N represented
by CALL_EXPR the callgraph. */
- cgraph_edge * clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
+ cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
gcov_type count_scale, int freq_scale, bool update_original);
/* Return true when call of edge can not lead to return from caller
cgraph_edge *next_caller;
cgraph_edge *prev_callee;
cgraph_edge *next_callee;
- gimple call_stmt;
+ gcall *call_stmt;
/* Additional information about an indirect call. Not cleared when an edge
becomes direct. */
cgraph_indirect_call_info *indirect_info;
parameters of which only CALLEE can be NULL (when creating an indirect call
edge). */
cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
- gimple call_stmt, gcov_type count, int freq,
+ gcall *call_stmt, gcov_type count, int freq,
bool indir_unknown_callee);
/* Put the edge onto the free list. */
if (is_gimple_debug (stmt))
continue;
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
int freq = compute_call_stmt_bb_frequency (current_function_decl,
bb);
- decl = gimple_call_fndecl (stmt);
+ decl = gimple_call_fndecl (call_stmt);
if (decl)
- node->create_edge (cgraph_node::get_create (decl), stmt, bb->count, freq);
- else if (gimple_call_internal_p (stmt))
+ node->create_edge (cgraph_node::get_create (decl), call_stmt, bb->count, freq);
+ else if (gimple_call_internal_p (call_stmt))
;
else
- node->create_indirect_edge (stmt,
- gimple_call_flags (stmt),
+ node->create_indirect_edge (call_stmt,
+ gimple_call_flags (call_stmt),
bb->count, freq);
}
node->record_stmt_references (stmt);
- if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
- && gimple_omp_parallel_child_fn (stmt))
+ if (gomp_parallel *omp_par_stmt = dyn_cast <gomp_parallel *> (stmt))
{
- tree fn = gimple_omp_parallel_child_fn (stmt);
+ tree fn = gimple_omp_parallel_child_fn (omp_par_stmt);
node->create_reference (cgraph_node::get_create (fn),
IPA_REF_ADDR, stmt);
}
gimple stmt = gsi_stmt (gsi);
tree decl;
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
int freq = compute_call_stmt_bb_frequency (current_function_decl,
bb);
- decl = gimple_call_fndecl (stmt);
+ decl = gimple_call_fndecl (call_stmt);
if (decl)
- node->create_edge (cgraph_node::get_create (decl), stmt,
+ node->create_edge (cgraph_node::get_create (decl), call_stmt,
bb->count, freq);
- else if (gimple_call_internal_p (stmt))
+ else if (gimple_call_internal_p (call_stmt))
;
else
- node->create_indirect_edge (stmt,
- gimple_call_flags (stmt),
+ node->create_indirect_edge (call_stmt,
+ gimple_call_flags (call_stmt),
bb->count, freq);
}
node->record_stmt_references (stmt);
the callgraph. */
cgraph_edge *
-cgraph_edge::clone (cgraph_node *n, gimple call_stmt, unsigned stmt_uid,
+cgraph_edge::clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
gcov_type count_scale, int freq_scale, bool update_original)
{
cgraph_edge *new_edge;
call. */
void
-cgraph_node::set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+cgraph_node::set_call_stmt_including_clones (gimple old_stmt,
+ gcall *new_stmt,
bool update_speculative)
{
cgraph_node *node;
void
cgraph_node::create_edge_including_clones (cgraph_node *callee,
- gimple old_stmt, gimple stmt,
+ gimple old_stmt, gcall *stmt,
gcov_type count,
int freq,
cgraph_inline_failed_t reason)
tree ptr, bool this_adjusting,
HOST_WIDE_INT fixed_offset, tree virtual_offset)
{
- gimple stmt;
+ gassign *stmt;
tree ret;
if (this_adjusting
tree resdecl;
tree restmp = NULL;
- gimple call;
- gimple ret;
+ gcall *call;
+ greturn *ret;
if (in_lto_p)
get_untransformed_body ();
typedef const struct gimple_statement_base *const_gimple;
typedef gimple gimple_seq;
struct gimple_stmt_iterator;
+
+/* Forward decls for leaf gimple subclasses (for individual gimple codes).
+ Keep this in the same order as the corresponding codes in gimple.def. */
+
+struct gcond;
+struct gdebug;
+struct ggoto;
+struct glabel;
+struct gswitch;
+struct gassign;
+struct gasm;
+struct gcall;
+struct gtransaction;
+struct greturn;
+struct gbind;
+struct gcatch;
+struct geh_filter;
+struct geh_mnt;
+struct geh_else;
+struct gresx;
+struct geh_dispatch;
+struct gphi;
+struct gtry;
+struct gomp_atomic_load;
+struct gomp_atomic_store;
+struct gomp_continue;
+struct gomp_critical;
+struct gomp_for;
+struct gomp_parallel;
+struct gomp_task;
+struct gomp_sections;
+struct gomp_single;
+struct gomp_target;
+struct gomp_teams;
+
union section;
typedef union section section;
struct gcc_options;
| | (no GSS layout)
| |
| + gimple_statement_with_ops
- | | layout: GSS_WITH_OPS
- | | Used for 5 codes: GIMPLE_COND
- | | GIMPLE_DEBUG
- | | GIMPLE_GOTO
- | | GIMPLE_LABEL
- | | GIMPLE_SWITCH
+ | | | layout: GSS_WITH_OPS
+ | | |
+ | | + gcond
+ | | | code: GIMPLE_COND
+ | | |
+ | | + gdebug
+ | | | code: GIMPLE_DEBUG
+ | | |
+ | | + ggoto
+ | | | code: GIMPLE_GOTO
+ | | |
+ | | + glabel
+ | | | code: GIMPLE_LABEL
+ | | |
+ | | + gswitch
+ | | code: GIMPLE_SWITCH
| |
| + gimple_statement_with_memory_ops_base
| | layout: GSS_WITH_MEM_OPS_BASE
| |
| + gimple_statement_with_memory_ops
- | | layout: GSS_WITH_MEM_OPS.
- | | used for codes GIMPLE_ASSIGN and GIMPLE_RETURN.
+ | | | layout: GSS_WITH_MEM_OPS
+ | | |
+ | | + gassign
+ | | | code GIMPLE_ASSIGN
+ | | |
+ | | + greturn
+ | | code GIMPLE_RETURN
| |
- | + gimple_statement_call
+ | + gcall
| | layout: GSS_CALL, code: GIMPLE_CALL
| |
- | + gimple_statement_asm
+ | + gasm
| | layout: GSS_ASM, code: GIMPLE_ASM
| |
- | + gimple_statement_transaction
+ | + gtransaction
| layout: GSS_TRANSACTION, code: GIMPLE_TRANSACTION
|
+ gimple_statement_omp
| | layout: GSS_OMP. Used for code GIMPLE_OMP_SECTION
| |
- | + gimple_statement_omp_critical
+ | + gomp_critical
| | layout: GSS_OMP_CRITICAL, code: GIMPLE_OMP_CRITICAL
| |
- | + gimple_statement_omp_for
+ | + gomp_for
| | layout: GSS_OMP_FOR, code: GIMPLE_OMP_FOR
| |
- | + gimple_statement_omp_parallel_layout
+ | + gomp_parallel_layout
| | | layout: GSS_OMP_PARALLEL_LAYOUT
| | |
| | + gimple_statement_omp_taskreg
| | | |
- | | | + gimple_statement_omp_parallel
+ | | | + gomp_parallel
| | | | code: GIMPLE_OMP_PARALLEL
| | | |
- | | | + gimple_statement_omp_task
+ | | | + gomp_task
| | | code: GIMPLE_OMP_TASK
| | |
| | + gimple_statement_omp_target
| | code: GIMPLE_OMP_TARGET
| |
- | + gimple_statement_omp_sections
+ | + gomp_sections
| | layout: GSS_OMP_SECTIONS, code: GIMPLE_OMP_SECTIONS
| |
| + gimple_statement_omp_single_layout
| | layout: GSS_OMP_SINGLE_LAYOUT
| |
- | + gimple_statement_omp_single
+ | + gomp_single
| | code: GIMPLE_OMP_SINGLE
| |
- | + gimple_statement_omp_teams
+ | + gomp_teams
| code: GIMPLE_OMP_TEAMS
|
- + gimple_statement_bind
+ + gbind
| layout: GSS_BIND, code: GIMPLE_BIND
|
- + gimple_statement_catch
+ + gcatch
| layout: GSS_CATCH, code: GIMPLE_CATCH
|
- + gimple_statement_eh_filter
+ + geh_filter
| layout: GSS_EH_FILTER, code: GIMPLE_EH_FILTER
|
- + gimple_statement_eh_else
+ + geh_else
| layout: GSS_EH_ELSE, code: GIMPLE_EH_ELSE
|
- + gimple_statement_eh_mnt
+ + geh_mnt
| layout: GSS_EH_MNT, code: GIMPLE_EH_MUST_NOT_THROW
|
- + gimple_statement_phi
+ + gphi
| layout: GSS_PHI, code: GIMPLE_PHI
|
+ gimple_statement_eh_ctrl
| | layout: GSS_EH_CTRL
| |
- | + gimple_statement_resx
+ | + gresx
| | code: GIMPLE_RESX
| |
- | + gimple_statement_eh_dispatch
+ | + geh_dispatch
| code: GIMPLE_EH_DISPATCH
|
- + gimple_statement_try
+ + gtry
| layout: GSS_TRY, code: GIMPLE_TRY
|
+ gimple_statement_wce
| layout: GSS_WCE, code: GIMPLE_WITH_CLEANUP_EXPR
|
- + gimple_statement_omp_continue
+ + gomp_continue
| layout: GSS_OMP_CONTINUE, code: GIMPLE_OMP_CONTINUE
|
- + gimple_statement_omp_atomic_load
+ + gomp_atomic_load
| layout: GSS_OMP_ATOMIC_LOAD, code: GIMPLE_OMP_ATOMIC_LOAD
|
+ gimple_statement_omp_atomic_store_layout
| layout: GSS_OMP_ATOMIC_STORE_LAYOUT,
| code: GIMPLE_OMP_ATOMIC_STORE
|
- + gimple_statement_omp_atomic_store
+ + gomp_atomic_store
| code: GIMPLE_OMP_ATOMIC_STORE
|
- + gimple_statement_omp_return
+ + gomp_return
code: GIMPLE_OMP_RETURN
@end smallexample
/* In stmt.c */
/* Expand a GIMPLE_SWITCH statement. */
-extern void expand_case (gimple);
+extern void expand_case (gswitch *);
/* Like expand_case but special-case for SJLJ exception dispatching. */
extern void expand_sjlj_dispatch_table (rtx, vec<tree> );
'tree', TreePrinter)
pp.add_printer_for_types(['cgraph_node *'],
'cgraph_node', CGraphNodePrinter)
- pp.add_printer_for_types(['gimple', 'gimple_statement_base *'],
+ pp.add_printer_for_types(['gimple', 'gimple_statement_base *',
+
+ # Keep this in the same order as gimple.def:
+ 'gimple_cond', 'const_gimple_cond',
+ 'gimple_statement_cond *',
+ 'gimple_debug', 'const_gimple_debug',
+ 'gimple_statement_debug *',
+ 'gimple_label', 'const_gimple_label',
+ 'gimple_statement_label *',
+ 'gimple_switch', 'const_gimple_switch',
+ 'gimple_statement_switch *',
+ 'gimple_assign', 'const_gimple_assign',
+ 'gimple_statement_assign *',
+ 'gimple_bind', 'const_gimple_bind',
+ 'gimple_statement_bind *',
+ 'gimple_phi', 'const_gimple_phi',
+ 'gimple_statement_phi *'],
+
'gimple',
GimplePrinter)
pp.add_printer_for_types(['basic_block', 'basic_block_def *'],
the expression code for the RHS. OP1 is the first operand and VAL
is an integer value to be used as the second operand. */
-gimple
+gassign *
build_assign (enum tree_code code, tree op1, int val, tree lhs)
{
tree op2 = build_int_cst (TREE_TYPE (op1), val);
return gimple_build_assign_with_ops (code, lhs, op1, op2);
}
-gimple
+gassign *
build_assign (enum tree_code code, gimple g, int val, tree lhs )
{
return build_assign (code, gimple_assign_lhs (g), val, lhs);
in normal form depending on the type of builder invoking this
function. */
-gimple
+gassign *
build_assign (enum tree_code code, tree op1, tree op2, tree lhs)
{
if (lhs == NULL_TREE)
return gimple_build_assign_with_ops (code, lhs, op1, op2);
}
-gimple
+gassign *
build_assign (enum tree_code code, gimple op1, tree op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), op2, lhs);
}
-gimple
+gassign *
build_assign (enum tree_code code, tree op1, gimple op2, tree lhs)
{
return build_assign (code, op1, gimple_assign_lhs (op2), lhs);
}
-gimple
+gassign *
build_assign (enum tree_code code, gimple op1, gimple op2, tree lhs)
{
return build_assign (code, gimple_assign_lhs (op1), gimple_assign_lhs (op2),
/* Create and return a type cast assignment. This creates a NOP_EXPR
that converts OP to TO_TYPE. */
-gimple
+gassign *
build_type_cast (tree to_type, tree op, tree lhs)
{
if (lhs == NULL_TREE)
return gimple_build_assign_with_ops (NOP_EXPR, lhs, op);
}
-gimple
+gassign *
build_type_cast (tree to_type, gimple op, tree lhs)
{
return build_type_cast (to_type, gimple_assign_lhs (op), lhs);
#ifndef GCC_GIMPLE_BUILDER_H
#define GCC_GIMPLE_BUILDER_H
-gimple build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
-gimple build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
-gimple build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
-gimple build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
-gimple build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
-gimple build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
-gimple build_type_cast (tree, tree, tree lhs = NULL_TREE);
-gimple build_type_cast (tree, gimple, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, tree, int, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple, int, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, tree, tree, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple, tree, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, tree, gimple, tree lhs = NULL_TREE);
+gassign *build_assign (enum tree_code, gimple, gimple, tree lhs = NULL_TREE);
+gassign *build_type_cast (tree, tree, tree lhs = NULL_TREE);
+gassign *build_type_cast (tree, gimple, tree lhs = NULL_TREE);
#endif /* GCC_GIMPLE_BUILDER_H */
assumed that the operands have been previously folded. */
static bool
-fold_gimple_cond (gimple stmt)
+fold_gimple_cond (gcond *stmt)
{
tree result = fold_binary_loc (gimple_location (stmt),
gimple_cond_code (stmt),
gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
enum built_in_function fcode)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest, size, len, fn, fmt, flag;
const char *fmt_str;
unsigned nargs = gimple_call_num_args (stmt);
static bool
gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree dest = gimple_call_arg (stmt, 0);
tree destsize = gimple_call_arg (stmt, 1);
tree fmt = gimple_call_arg (stmt, 2);
static bool
gimple_fold_builtin (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
tree callee = gimple_call_fndecl (stmt);
/* Give up for always_inline inline builtins until they are
static bool
gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree callee;
bool changed = false;
unsigned i;
&& SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
return false;
- if (gimple_code (stmt) == GIMPLE_COND)
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
gcc_assert (rcode.is_tree_code ());
if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
|| !operation_could_trap_p (rcode,
FLOAT_TYPE_P (TREE_TYPE (ops[0])),
false, NULL_TREE)))
- gimple_cond_set_condition (stmt, rcode, ops[0], ops[1]);
+ gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
else if (rcode == SSA_NAME)
- gimple_cond_set_condition (stmt, NE_EXPR, ops[0],
+ gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
build_zero_cst (TREE_TYPE (ops[0])));
else if (rcode == INTEGER_CST)
{
if (integer_zerop (ops[0]))
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
}
else if (!inplace)
{
ops, seq);
if (!res)
return false;
- gimple_cond_set_condition (stmt, NE_EXPR, res,
+ gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
build_zero_cst (TREE_TYPE (res)));
}
else
}
case GIMPLE_ASM:
{
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
- tree link = gimple_asm_output_op (stmt, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if (REFERENCE_CLASS_P (op)
&& maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
changed = true;
}
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (stmt, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if ((REFERENCE_CLASS_P (op)
|| TREE_CODE (op) == ADDR_EXPR)
}
case GIMPLE_COND:
- changed |= fold_gimple_cond (stmt);
+ changed |= fold_gimple_cond (as_a <gcond *> (stmt));
break;
case GIMPLE_CALL:
case GIMPLE_ASM:
/* Fold *& in asm operands. */
{
+ gasm *asm_stmt = as_a <gasm *> (stmt);
size_t noutputs;
const char **oconstraints;
const char *constraint;
bool allows_mem, allows_reg;
- noutputs = gimple_asm_noutputs (stmt);
+ noutputs = gimple_asm_noutputs (asm_stmt);
oconstraints = XALLOCAVEC (const char *, noutputs);
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
- tree link = gimple_asm_output_op (stmt, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
tree op = TREE_VALUE (link);
oconstraints[i]
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
changed = true;
}
}
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (stmt, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
case GIMPLE_CALL:
{
tree fn;
+ gcall *call_stmt = as_a <gcall *> (stmt);
if (gimple_call_internal_p (stmt))
{
for (i = 0; i < gimple_call_num_args (stmt); ++i)
args[i] = (*valueize) (gimple_call_arg (stmt, i));
call = build_call_array_loc (loc,
- gimple_call_return_type (stmt),
+ gimple_call_return_type (call_stmt),
fn, gimple_call_num_args (stmt), args);
retval = fold_call_expr (EXPR_LOCATION (call), call, false);
if (retval)
{
/* fold_call_expr wraps the result inside a NOP_EXPR. */
STRIP_NOPS (retval);
- retval = fold_convert (gimple_call_return_type (stmt), retval);
+ retval = fold_convert (gimple_call_return_type (call_stmt),
+ retval);
}
return retval;
}
return i;
}
+/* Finds iterator for PHI. */
+
+gphi_iterator
+gsi_for_phi (gphi *phi)
+{
+ gphi_iterator i;
+ basic_block bb = gimple_bb (phi);
+
+ i = gsi_start_phis (bb);
+ i.ptr = phi;
+
+ return i;
+}
/* Move the statement at FROM so it comes right after the statement at TO. */
/* Returns iterator at the start of the list of phi nodes of BB. */
-gimple_stmt_iterator
+gphi_iterator
gsi_start_phis (basic_block bb)
{
gimple_seq *pseq = phi_nodes_ptr (bb);
- return gsi_start_1 (pseq);
+
+ /* Adapted from gsi_start_1. */
+ gphi_iterator i;
+
+ i.ptr = gimple_seq_first (*pseq);
+ i.seq = pseq;
+ i.bb = i.ptr ? gimple_bb (i.ptr) : NULL;
+
+ return i;
}
gimple_seq *seq;
basic_block bb;
};
+
+/* Iterator over GIMPLE_PHI statements. */
+struct gphi_iterator : public gimple_stmt_iterator
+{
+ gphi *phi () const
+ {
+ return as_a <gphi *> (ptr);
+ }
+};
enum gsi_iterator_update
{
enum gsi_iterator_update);
extern bool gsi_remove (gimple_stmt_iterator *, bool);
extern gimple_stmt_iterator gsi_for_stmt (gimple);
+extern gphi_iterator gsi_for_phi (gphi *);
extern void gsi_move_after (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_before (gimple_stmt_iterator *, gimple_stmt_iterator *);
extern void gsi_move_to_bb_end (gimple_stmt_iterator *, basic_block);
extern basic_block gsi_insert_seq_on_edge_immediate (edge, gimple_seq);
extern void gsi_commit_edge_inserts (void);
extern void gsi_commit_one_edge_insert (edge, basic_block *);
-extern gimple_stmt_iterator gsi_start_phis (basic_block);
+extern gphi_iterator gsi_start_phis (basic_block);
/* Return a new iterator pointing to GIMPLE_SEQ's first statement. */
/* Iterates I statement iterator to the next non-virtual statement. */
static inline void
-gsi_next_nonvirtual_phi (gimple_stmt_iterator *i)
+gsi_next_nonvirtual_phi (gphi_iterator *i)
{
- gimple phi;
+ gphi *phi;
if (gsi_end_p (*i))
return;
- phi = gsi_stmt (*i);
+ phi = i->phi ();
gcc_assert (phi != NULL);
while (virtual_operand_p (gimple_phi_result (phi)))
if (gsi_end_p (*i))
return;
- phi = gsi_stmt (*i);
+ phi = i->phi ();
}
}
struct return_statements_t
{
tree label;
- gimple stmt;
+ greturn *stmt;
};
typedef struct return_statements_t return_statements_t;
return;
case GIMPLE_EH_ELSE:
- lower_sequence (gimple_eh_else_n_body_ptr (stmt), data);
- lower_sequence (gimple_eh_else_e_body_ptr (stmt), data);
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
+ lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
+ }
break;
case GIMPLE_NOP:
return;
case GIMPLE_TRANSACTION:
- lower_sequence (gimple_transaction_body_ptr (stmt), data);
+ lower_sequence (gimple_transaction_body_ptr (
+ as_a <gtransaction *> (stmt)),
+ data);
break;
default:
lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
{
tree old_block = data->block;
- gimple stmt = gsi_stmt (*gsi);
+ gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
tree new_block = gimple_bind_block (stmt);
if (new_block)
for (; !gsi_end_p (i); gsi_next (&i))
{
data->cannot_fallthru = false;
- lower_sequence (gimple_catch_handler_ptr (gsi_stmt (i)), data);
+ lower_sequence (gimple_catch_handler_ptr (
+ as_a <gcatch *> (gsi_stmt (i))),
+ data);
if (!data->cannot_fallthru)
cannot_fallthru = false;
}
This is a subroutine of gimple_stmt_may_fallthru. */
static bool
-gimple_try_catch_may_fallthru (gimple stmt)
+gimple_try_catch_may_fallthru (gtry *stmt)
{
gimple_stmt_iterator i;
through iff any of the catch bodies falls through. */
for (; !gsi_end_p (i); gsi_next (&i))
{
- if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
+ if (gimple_seq_may_fallthru (gimple_catch_handler (
+ as_a <gcatch *> (gsi_stmt (i)))))
return true;
}
return false;
return false;
case GIMPLE_BIND:
- return gimple_seq_may_fallthru (gimple_bind_body (stmt));
+ return gimple_seq_may_fallthru (
+ gimple_bind_body (as_a <gbind *> (stmt)));
case GIMPLE_TRY:
if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
- return gimple_try_catch_may_fallthru (stmt);
+ return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
/* It must be a GIMPLE_TRY_FINALLY. */
&& gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
case GIMPLE_EH_ELSE:
- return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
- || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
+ || gimple_seq_may_fallthru (gimple_eh_else_e_body (
+ eh_else_stmt)));
+ }
case GIMPLE_CALL:
/* Functions that do not return do not fall through. */
static void
lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
{
- gimple stmt = gsi_stmt (*gsi);
+ greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
gimple t;
int i;
return_statements_t tmp_rs;
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
-dump_unary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_unary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
enum tree_code rhs_code = gimple_assign_rhs_code (gs);
tree lhs = gimple_assign_lhs (gs);
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
-dump_binary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_binary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
const char *p;
enum tree_code code = gimple_assign_rhs_code (gs);
assignment GS. BUFFER, SPC and FLAGS are as in pp_gimple_stmt_1. */
static void
-dump_ternary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_ternary_rhs (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
const char *p;
enum tree_code code = gimple_assign_rhs_code (gs);
pp_gimple_stmt_1. */
static void
-dump_gimple_assign (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_assign (pretty_printer *buffer, gassign *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
pp_gimple_stmt_1. */
static void
-dump_gimple_return (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_return (pretty_printer *buffer, greturn *gs, int spc, int flags)
{
tree t, t2;
dump_gimple_call. */
static void
-dump_gimple_call_args (pretty_printer *buffer, gimple gs, int flags)
+dump_gimple_call_args (pretty_printer *buffer, gcall *gs, int flags)
{
size_t i;
pp_gimple_stmt_1. */
static void
-dump_gimple_call (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_call (pretty_printer *buffer, gcall *gs, int spc, int flags)
{
tree lhs = gimple_call_lhs (gs);
tree fn = gimple_call_fn (gs);
pp_gimple_stmt_1. */
static void
-dump_gimple_switch (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_switch (pretty_printer *buffer, gswitch *gs, int spc,
+ int flags)
{
unsigned int i;
pp_gimple_stmt_1. */
static void
-dump_gimple_cond (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_cond (pretty_printer *buffer, gcond *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%s, %T, %T, %T, %T>", gs,
TDF_* in dumpfils.h). */
static void
-dump_gimple_label (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_label (pretty_printer *buffer, glabel *gs, int spc, int flags)
{
tree label = gimple_label_label (gs);
if (flags & TDF_RAW)
TDF_* in dumpfile.h). */
static void
-dump_gimple_goto (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_goto (pretty_printer *buffer, ggoto *gs, int spc, int flags)
{
tree label = gimple_goto_dest (gs);
if (flags & TDF_RAW)
TDF_* in dumpfile.h). */
static void
-dump_gimple_bind (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_bind (pretty_printer *buffer, gbind *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <", gs);
dumpfile.h). */
static void
-dump_gimple_try (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_try (pretty_printer *buffer, gtry *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
dumpfile.h). */
static void
-dump_gimple_catch (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_catch (pretty_printer *buffer, gcatch *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+CATCH <%S>%->", gs,
dumpfile.h). */
static void
-dump_gimple_eh_filter (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_eh_filter (pretty_printer *buffer, geh_filter *gs, int spc,
+ int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T, %+FAILURE <%S>%->", gs,
/* Dump a GIMPLE_EH_MUST_NOT_THROW tuple. */
static void
-dump_gimple_eh_must_not_throw (pretty_printer *buffer, gimple gs,
- int spc, int flags)
+dump_gimple_eh_must_not_throw (pretty_printer *buffer,
+ geh_mnt *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%T>", gs,
dumpfile.h). */
static void
-dump_gimple_eh_else (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_eh_else (pretty_printer *buffer, geh_else *gs, int spc,
+ int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags,
dumpfile.h). */
static void
-dump_gimple_resx (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_resx (pretty_printer *buffer, gresx *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%d>", gs,
/* Dump a GIMPLE_EH_DISPATCH tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_eh_dispatch (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_eh_dispatch (pretty_printer *buffer, geh_dispatch *gs, int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%d>", gs,
in dumpfile.h). */
static void
-dump_gimple_debug (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_debug (pretty_printer *buffer, gdebug *gs, int spc, int flags)
{
switch (gs->subcode)
{
/* Dump a GIMPLE_OMP_FOR tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_for (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_for (pretty_printer *buffer, gomp_for *gs, int spc, int flags)
{
size_t i;
/* Dump a GIMPLE_OMP_CONTINUE tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_continue (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_continue (pretty_printer *buffer, gomp_continue *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
{
/* Dump a GIMPLE_OMP_SINGLE tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_single (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_single (pretty_printer *buffer, gomp_single *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
{
/* Dump a GIMPLE_OMP_TARGET tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_target (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_target (pretty_printer *buffer, gomp_target *gs,
+ int spc, int flags)
{
const char *kind;
switch (gimple_omp_target_kind (gs))
/* Dump a GIMPLE_OMP_TEAMS tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_teams (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_omp_teams (pretty_printer *buffer, gomp_teams *gs, int spc,
+ int flags)
{
if (flags & TDF_RAW)
{
/* Dump a GIMPLE_OMP_SECTIONS tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_sections (pretty_printer *buffer, gimple gs, int spc,
- int flags)
+dump_gimple_omp_sections (pretty_printer *buffer, gomp_sections *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
{
/* Dump a GIMPLE_OMP_CRITICAL tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_omp_critical (pretty_printer *buffer, gimple gs, int spc,
- int flags)
+dump_gimple_omp_critical (pretty_printer *buffer, gomp_critical *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
dump_gimple_fmt (buffer, spc, flags, "%G <%+BODY <%S> >", gs,
/* Dump a GIMPLE_TRANSACTION tuple on the pretty_printer BUFFER. */
static void
-dump_gimple_transaction (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_transaction (pretty_printer *buffer, gtransaction *gs,
+ int spc, int flags)
{
unsigned subcode = gimple_transaction_subcode (gs);
dumpfile.h). */
static void
-dump_gimple_asm (pretty_printer *buffer, gimple gs, int spc, int flags)
+dump_gimple_asm (pretty_printer *buffer, gasm *gs, int spc, int flags)
{
unsigned int i, n, f, fields;
pretty printer. If COMMENT is true, print this after #. */
static void
-dump_gimple_phi (pretty_printer *buffer, gimple phi, int spc, bool comment,
+dump_gimple_phi (pretty_printer *buffer, gphi *phi, int spc, bool comment,
int flags)
{
size_t i;
dumpfile.h). */
static void
-dump_gimple_omp_parallel (pretty_printer *buffer, gimple gs, int spc,
- int flags)
+dump_gimple_omp_parallel (pretty_printer *buffer, gomp_parallel *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
{
dumpfile.h). */
static void
-dump_gimple_omp_task (pretty_printer *buffer, gimple gs, int spc,
+dump_gimple_omp_task (pretty_printer *buffer, gomp_task *gs, int spc,
int flags)
{
if (flags & TDF_RAW)
in dumpfile.h). */
static void
-dump_gimple_omp_atomic_load (pretty_printer *buffer, gimple gs, int spc,
- int flags)
+dump_gimple_omp_atomic_load (pretty_printer *buffer, gomp_atomic_load *gs,
+ int spc, int flags)
{
if (flags & TDF_RAW)
{
in dumpfile.h). */
static void
-dump_gimple_omp_atomic_store (pretty_printer *buffer, gimple gs, int spc,
- int flags)
+dump_gimple_omp_atomic_store (pretty_printer *buffer,
+ gomp_atomic_store *gs, int spc, int flags)
{
if (flags & TDF_RAW)
{
switch (gimple_code (gs))
{
case GIMPLE_ASM:
- dump_gimple_asm (buffer, gs, spc, flags);
+ dump_gimple_asm (buffer, as_a <gasm *> (gs), spc, flags);
break;
case GIMPLE_ASSIGN:
- dump_gimple_assign (buffer, gs, spc, flags);
+ dump_gimple_assign (buffer, as_a <gassign *> (gs), spc, flags);
break;
case GIMPLE_BIND:
- dump_gimple_bind (buffer, gs, spc, flags);
+ dump_gimple_bind (buffer, as_a <gbind *> (gs), spc, flags);
break;
case GIMPLE_CALL:
- dump_gimple_call (buffer, gs, spc, flags);
+ dump_gimple_call (buffer, as_a <gcall *> (gs), spc, flags);
break;
case GIMPLE_COND:
- dump_gimple_cond (buffer, gs, spc, flags);
+ dump_gimple_cond (buffer, as_a <gcond *> (gs), spc, flags);
break;
case GIMPLE_LABEL:
- dump_gimple_label (buffer, gs, spc, flags);
+ dump_gimple_label (buffer, as_a <glabel *> (gs), spc, flags);
break;
case GIMPLE_GOTO:
- dump_gimple_goto (buffer, gs, spc, flags);
+ dump_gimple_goto (buffer, as_a <ggoto *> (gs), spc, flags);
break;
case GIMPLE_NOP:
break;
case GIMPLE_RETURN:
- dump_gimple_return (buffer, gs, spc, flags);
+ dump_gimple_return (buffer, as_a <greturn *> (gs), spc, flags);
break;
case GIMPLE_SWITCH:
- dump_gimple_switch (buffer, gs, spc, flags);
+ dump_gimple_switch (buffer, as_a <gswitch *> (gs), spc, flags);
break;
case GIMPLE_TRY:
- dump_gimple_try (buffer, gs, spc, flags);
+ dump_gimple_try (buffer, as_a <gtry *> (gs), spc, flags);
break;
case GIMPLE_PHI:
- dump_gimple_phi (buffer, gs, spc, false, flags);
+ dump_gimple_phi (buffer, as_a <gphi *> (gs), spc, false, flags);
break;
case GIMPLE_OMP_PARALLEL:
- dump_gimple_omp_parallel (buffer, gs, spc, flags);
+ dump_gimple_omp_parallel (buffer, as_a <gomp_parallel *> (gs), spc,
+ flags);
break;
case GIMPLE_OMP_TASK:
- dump_gimple_omp_task (buffer, gs, spc, flags);
+ dump_gimple_omp_task (buffer, as_a <gomp_task *> (gs), spc, flags);
break;
case GIMPLE_OMP_ATOMIC_LOAD:
- dump_gimple_omp_atomic_load (buffer, gs, spc, flags);
-
+ dump_gimple_omp_atomic_load (buffer, as_a <gomp_atomic_load *> (gs),
+ spc, flags);
break;
case GIMPLE_OMP_ATOMIC_STORE:
- dump_gimple_omp_atomic_store (buffer, gs, spc, flags);
+ dump_gimple_omp_atomic_store (buffer,
+ as_a <gomp_atomic_store *> (gs),
+ spc, flags);
break;
case GIMPLE_OMP_FOR:
- dump_gimple_omp_for (buffer, gs, spc, flags);
+ dump_gimple_omp_for (buffer, as_a <gomp_for *> (gs), spc, flags);
break;
case GIMPLE_OMP_CONTINUE:
- dump_gimple_omp_continue (buffer, gs, spc, flags);
+ dump_gimple_omp_continue (buffer, as_a <gomp_continue *> (gs), spc,
+ flags);
break;
case GIMPLE_OMP_SINGLE:
- dump_gimple_omp_single (buffer, gs, spc, flags);
+ dump_gimple_omp_single (buffer, as_a <gomp_single *> (gs), spc,
+ flags);
break;
case GIMPLE_OMP_TARGET:
- dump_gimple_omp_target (buffer, gs, spc, flags);
+ dump_gimple_omp_target (buffer, as_a <gomp_target *> (gs), spc,
+ flags);
break;
case GIMPLE_OMP_TEAMS:
- dump_gimple_omp_teams (buffer, gs, spc, flags);
+ dump_gimple_omp_teams (buffer, as_a <gomp_teams *> (gs), spc,
+ flags);
break;
case GIMPLE_OMP_RETURN:
break;
case GIMPLE_OMP_SECTIONS:
- dump_gimple_omp_sections (buffer, gs, spc, flags);
+ dump_gimple_omp_sections (buffer, as_a <gomp_sections *> (gs),
+ spc, flags);
break;
case GIMPLE_OMP_SECTIONS_SWITCH:
break;
case GIMPLE_OMP_CRITICAL:
- dump_gimple_omp_critical (buffer, gs, spc, flags);
+ dump_gimple_omp_critical (buffer, as_a <gomp_critical *> (gs), spc,
+ flags);
break;
case GIMPLE_CATCH:
- dump_gimple_catch (buffer, gs, spc, flags);
+ dump_gimple_catch (buffer, as_a <gcatch *> (gs), spc, flags);
break;
case GIMPLE_EH_FILTER:
- dump_gimple_eh_filter (buffer, gs, spc, flags);
+ dump_gimple_eh_filter (buffer, as_a <geh_filter *> (gs), spc, flags);
break;
case GIMPLE_EH_MUST_NOT_THROW:
- dump_gimple_eh_must_not_throw (buffer, gs, spc, flags);
+ dump_gimple_eh_must_not_throw (buffer,
+ as_a <geh_mnt *> (gs),
+ spc, flags);
break;
case GIMPLE_EH_ELSE:
- dump_gimple_eh_else (buffer, gs, spc, flags);
+ dump_gimple_eh_else (buffer, as_a <geh_else *> (gs), spc, flags);
break;
case GIMPLE_RESX:
- dump_gimple_resx (buffer, gs, spc, flags);
+ dump_gimple_resx (buffer, as_a <gresx *> (gs), spc, flags);
break;
case GIMPLE_EH_DISPATCH:
- dump_gimple_eh_dispatch (buffer, gs, spc, flags);
+ dump_gimple_eh_dispatch (buffer, as_a <geh_dispatch *> (gs), spc,
+ flags);
break;
case GIMPLE_DEBUG:
- dump_gimple_debug (buffer, gs, spc, flags);
+ dump_gimple_debug (buffer, as_a <gdebug *> (gs), spc, flags);
break;
case GIMPLE_PREDICT:
break;
case GIMPLE_TRANSACTION:
- dump_gimple_transaction (buffer, gs, spc, flags);
+ dump_gimple_transaction (buffer, as_a <gtransaction *> (gs), spc,
+ flags);
break;
default:
static void
dump_phi_nodes (pretty_printer *buffer, basic_block bb, int indent, int flags)
{
- gimple_stmt_iterator i;
+ gphi_iterator i;
for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
if (!virtual_operand_p (gimple_phi_result (phi)) || (flags & TDF_VOPS))
{
INDENT (indent);
if (stmt && gimple_code (stmt) == GIMPLE_LABEL)
{
pp_string (buffer, " (");
- dump_generic_node (buffer, gimple_label_label (stmt), 0, 0, false);
+ dump_generic_node (buffer,
+ gimple_label_label (as_a <glabel *> (stmt)),
+ 0, 0, false);
pp_right_paren (buffer);
pp_semicolon (buffer);
}
void
gimple_dump_bb_for_graph (pretty_printer *pp, basic_block bb)
{
- gimple_stmt_iterator gsi;
-
pp_printf (pp, "<bb %d>:\n", bb->index);
pp_write_text_as_dot_label_to_stream (pp, /*for_record=*/true);
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
|| (dump_flags & TDF_VOPS))
{
}
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
pp_bar (pp);
update_stmt (stmt);
}
- gimple new_stmt
+ gcall *new_stmt
= gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
gimple_seq seq = NULL;
gimple_seq_add_stmt (&seq, new_stmt);
{
if (ret_zero)
{
- gimple ret = gsi_stmt (si2);
+ greturn *ret = as_a <greturn *> (gsi_stmt (si2));
tree zero = build_zero_cst (TREE_TYPE (gimple_return_retval (ret)));
gimple_return_set_retval (ret, zero);
update_stmt (ret);
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator si;
+ gphi_iterator si;
/* Out of an abundance of caution, do not isolate paths to a
block where the block has any abnormal outgoing edges.
cases. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
tree lhs = gimple_phi_result (phi);
/* If the result is not a pointer, then there is no need to
{
FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
{
- if (gimple_code (use_stmt) != GIMPLE_RETURN
- || gimple_return_retval (use_stmt) != lhs)
+ greturn *return_stmt
+ = dyn_cast <greturn *> (use_stmt);
+ if (!return_stmt)
+ continue;
+
+ if (gimple_return_retval (return_stmt) != lhs)
continue;
if (warning_at (gimple_location (use_stmt),
/* Detect returning the address of a local variable. This only
becomes undefined behavior if the result is used, so we do not
insert a trap and only return NULL instead. */
- if (gimple_code (stmt) == GIMPLE_RETURN)
+ if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
- tree val = gimple_return_retval (stmt);
+ tree val = gimple_return_retval (return_stmt);
if (val && TREE_CODE (val) == ADDR_EXPR)
{
tree valbase = get_base_address (TREE_OPERAND (val, 0));
OPT_Wreturn_local_addr, msg))
inform (DECL_SOURCE_LOCATION(valbase), "declared here");
tree zero = build_zero_cst (TREE_TYPE (val));
- gimple_return_set_retval (stmt, zero);
+ gimple_return_set_retval (return_stmt, zero);
update_stmt (stmt);
}
}
is used to help find a basis for subsequent candidates. */
static void
-slsr_process_phi (gimple phi, bool speed)
+slsr_process_phi (gphi *phi, bool speed)
{
unsigned i;
tree arg0_base = NULL_TREE, base_type;
find_candidates_dom_walker::before_dom_children (basic_block bb)
{
bool speed = optimize_bb_for_speed_p (bb);
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- slsr_process_phi (gsi_stmt (gsi), speed);
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ slsr_process_phi (gsi.phi (), speed);
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple gs = gsi_stmt (gsi);
if (bump == 0)
{
tree lhs = gimple_assign_lhs (c->cand_stmt);
- gimple copy_stmt = gimple_build_assign (lhs, basis_name);
+ gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, copy_stmt, false);
basic_block insert_bb;
gimple_stmt_iterator gsi;
tree lhs, basis_type;
- gimple new_stmt;
+ gassign *new_stmt;
/* If the add candidate along this incoming edge has the same
index as C's hidden basis, the hidden basis represents this
{
int i;
tree name, phi_arg;
- gimple phi;
+ gphi *phi;
vec<tree> phi_args;
slsr_cand_t basis = lookup_cand (c->basis);
int nargs = gimple_phi_num_args (from_phi);
candidates, return the earliest candidate in the block in *WHERE. */
static basic_block
-ncd_with_phi (slsr_cand_t c, const widest_int &incr, gimple phi,
+ncd_with_phi (slsr_cand_t c, const widest_int &incr, gphi *phi,
basic_block ncd, slsr_cand_t *where)
{
unsigned i;
gimple arg_def = SSA_NAME_DEF_STMT (arg);
if (gimple_code (arg_def) == GIMPLE_PHI)
- ncd = ncd_with_phi (c, incr, arg_def, ncd, where);
+ ncd = ncd_with_phi (c, incr, as_a <gphi *> (arg_def), ncd,
+ where);
else
{
slsr_cand_t arg_cand = base_cand_from_table (arg);
}
if (phi_dependent_cand_p (c))
- ncd = ncd_with_phi (c, incr, lookup_cand (c->def_phi)->cand_stmt,
+ ncd = ncd_with_phi (c, incr,
+ as_a <gphi *> (lookup_cand (c->def_phi)->cand_stmt),
ncd, where);
return ncd;
{
basic_block bb;
slsr_cand_t where = NULL;
- gimple init_stmt;
+ gassign *init_stmt;
tree stride_type, new_name, incr_tree;
widest_int incr = incr_vec[i].incr;
introduce_cast_before_cand (slsr_cand_t c, tree to_type, tree from_expr)
{
tree cast_lhs;
- gimple cast_stmt;
+ gassign *cast_stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
cast_lhs = make_temp_ssa_name (to_type, NULL, "slsr");
if (types_compatible_p (lhs_type, basis_type))
{
- gimple copy_stmt = gimple_build_assign (lhs, basis_name);
+ gassign *copy_stmt = gimple_build_assign (lhs, basis_name);
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
gimple_set_location (copy_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, copy_stmt, false);
else
{
gimple_stmt_iterator gsi = gsi_for_stmt (c->cand_stmt);
- gimple cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
- basis_name);
+ gassign *cast_stmt = gimple_build_assign_with_ops (NOP_EXPR, lhs,
+ basis_name);
gimple_set_location (cast_stmt, gimple_location (c->cand_stmt));
gsi_replace (&gsi, cast_stmt, false);
c->cand_stmt = cast_stmt;
/* Read a PHI function for basic block BB in function FN. DATA_IN is
the file being read. IB is the input block to use for reading. */
-static gimple
+static gphi *
input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
struct function *fn)
{
unsigned HOST_WIDE_INT ix;
tree phi_result;
int i, len;
- gimple result;
+ gphi *result;
ix = streamer_read_uhwi (ib);
phi_result = (*SSANAMES (fn))[ix];
switch (code)
{
case GIMPLE_RESX:
- gimple_resx_set_region (stmt, streamer_read_hwi (ib));
+ gimple_resx_set_region (as_a <gresx *> (stmt),
+ streamer_read_hwi (ib));
break;
case GIMPLE_EH_MUST_NOT_THROW:
- gimple_eh_must_not_throw_set_fndecl (stmt, stream_read_tree (ib, data_in));
+ gimple_eh_must_not_throw_set_fndecl (
+ as_a <geh_mnt *> (stmt),
+ stream_read_tree (ib, data_in));
break;
case GIMPLE_EH_DISPATCH:
- gimple_eh_dispatch_set_region (stmt, streamer_read_hwi (ib));
+ gimple_eh_dispatch_set_region (as_a <geh_dispatch *> (stmt),
+ streamer_read_hwi (ib));
break;
case GIMPLE_ASM:
{
/* FIXME lto. Move most of this into a new gimple_asm_set_string(). */
- gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (stmt);
+ gasm *asm_stmt = as_a <gasm *> (stmt);
tree str;
asm_stmt->ni = streamer_read_uhwi (ib);
asm_stmt->no = streamer_read_uhwi (ib);
== TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*opp, 0), 0))))
*opp = TREE_OPERAND (TREE_OPERAND (*opp, 0), 0);
}
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
- if (gimple_call_internal_p (stmt))
+ if (gimple_call_internal_p (call_stmt))
gimple_call_set_internal_fn
- (stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
+ (call_stmt, streamer_read_enum (ib, internal_fn, IFN_LAST));
else
- gimple_call_set_fntype (stmt, stream_read_tree (ib, data_in));
+ gimple_call_set_fntype (call_stmt, stream_read_tree (ib, data_in));
}
break;
break;
case GIMPLE_TRANSACTION:
- gimple_transaction_set_label (stmt, stream_read_tree (ib, data_in));
+ gimple_transaction_set_label (as_a <gtransaction *> (stmt),
+ stream_read_tree (ib, data_in));
break;
default:
}
else if (code == GIMPLE_ASM)
{
+ gasm *asm_stmt = as_a <gasm *> (stmt);
unsigned i;
- for (i = 0; i < gimple_asm_noutputs (stmt); i++)
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
{
- tree op = TREE_VALUE (gimple_asm_output_op (stmt, i));
+ tree op = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
if (TREE_CODE (op) == SSA_NAME)
SSA_NAME_DEF_STMT (op) = stmt;
}
/* Reset alias information. */
if (code == GIMPLE_CALL)
- gimple_call_reset_alias_info (stmt);
+ gimple_call_reset_alias_info (as_a <gcall *> (stmt));
/* Mark the statement modified so its operand vectors can be filled in. */
gimple_set_modified (stmt, true);
/* Output PHI function PHI to the main stream in OB. */
static void
-output_phi (struct output_block *ob, gimple phi)
+output_phi (struct output_block *ob, gphi *phi)
{
unsigned i, len = gimple_phi_num_args (phi);
bp_pack_var_len_unsigned (&bp, gimple_num_ops (stmt));
bp_pack_value (&bp, gimple_no_warning_p (stmt), 1);
if (is_gimple_assign (stmt))
- bp_pack_value (&bp, gimple_assign_nontemporal_move_p (stmt), 1);
+ bp_pack_value (&bp,
+ gimple_assign_nontemporal_move_p (
+ as_a <gassign *> (stmt)),
+ 1);
bp_pack_value (&bp, gimple_has_volatile_ops (stmt), 1);
hist = gimple_histogram_value (cfun, stmt);
bp_pack_value (&bp, hist != NULL, 1);
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
- streamer_write_hwi (ob, gimple_resx_region (stmt));
+ streamer_write_hwi (ob, gimple_resx_region (as_a <gresx *> (stmt)));
break;
case GIMPLE_EH_MUST_NOT_THROW:
- stream_write_tree (ob, gimple_eh_must_not_throw_fndecl (stmt), true);
+ stream_write_tree (ob,
+ gimple_eh_must_not_throw_fndecl (
+ as_a <geh_mnt *> (stmt)),
+ true);
break;
case GIMPLE_EH_DISPATCH:
- streamer_write_hwi (ob, gimple_eh_dispatch_region (stmt));
+ streamer_write_hwi (ob,
+ gimple_eh_dispatch_region (
+ as_a <geh_dispatch *> (stmt)));
break;
case GIMPLE_ASM:
- streamer_write_uhwi (ob, gimple_asm_ninputs (stmt));
- streamer_write_uhwi (ob, gimple_asm_noutputs (stmt));
- streamer_write_uhwi (ob, gimple_asm_nclobbers (stmt));
- streamer_write_uhwi (ob, gimple_asm_nlabels (stmt));
- streamer_write_string (ob, ob->main_stream, gimple_asm_string (stmt),
- true);
+ {
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ streamer_write_uhwi (ob, gimple_asm_ninputs (asm_stmt));
+ streamer_write_uhwi (ob, gimple_asm_noutputs (asm_stmt));
+ streamer_write_uhwi (ob, gimple_asm_nclobbers (asm_stmt));
+ streamer_write_uhwi (ob, gimple_asm_nlabels (asm_stmt));
+ streamer_write_string (ob, ob->main_stream,
+ gimple_asm_string (asm_stmt), true);
+ }
/* Fallthru */
case GIMPLE_ASSIGN:
break;
case GIMPLE_TRANSACTION:
- gcc_assert (gimple_transaction_body (stmt) == NULL);
- stream_write_tree (ob, gimple_transaction_label (stmt), true);
+ {
+ gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
+ gcc_assert (gimple_transaction_body (trans_stmt) == NULL);
+ stream_write_tree (ob, gimple_transaction_label (trans_stmt), true);
+ }
break;
default:
streamer_write_record_start (ob, LTO_null);
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator psi = gsi_start_phis (bb);
+ !gsi_end_p (psi);
+ gsi_next (&psi))
{
- gimple phi = gsi_stmt (bsi);
+ gphi *phi = psi.phi ();
/* Only emit PHIs for gimple registers. PHI nodes for .MEM
will be filled in on reading when the SSA form is
/* Helper function for walk_gimple_stmt. Walk operands of a GIMPLE_ASM. */
static tree
-walk_gimple_asm (gimple stmt, walk_tree_fn callback_op,
+walk_gimple_asm (gasm *stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
tree ret, op;
wi->val_only = true;
}
- ret = walk_tree (gimple_call_chain_ptr (stmt), callback_op, wi, pset);
+ ret = walk_tree (gimple_call_chain_ptr (as_a <gcall *> (stmt)),
+ callback_op, wi, pset);
if (ret)
return ret;
break;
case GIMPLE_CATCH:
- ret = walk_tree (gimple_catch_types_ptr (stmt), callback_op, wi,
- pset);
+ ret = walk_tree (gimple_catch_types_ptr (as_a <gcatch *> (stmt)),
+ callback_op, wi, pset);
if (ret)
return ret;
break;
break;
case GIMPLE_ASM:
- ret = walk_gimple_asm (stmt, callback_op, wi);
+ ret = walk_gimple_asm (as_a <gasm *> (stmt), callback_op, wi);
if (ret)
return ret;
break;
case GIMPLE_OMP_CONTINUE:
- ret = walk_tree (gimple_omp_continue_control_def_ptr (stmt),
- callback_op, wi, pset);
- if (ret)
- return ret;
-
- ret = walk_tree (gimple_omp_continue_control_use_ptr (stmt),
- callback_op, wi, pset);
- if (ret)
- return ret;
+ {
+ gomp_continue *cont_stmt = as_a <gomp_continue *> (stmt);
+ ret = walk_tree (gimple_omp_continue_control_def_ptr (cont_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+
+ ret = walk_tree (gimple_omp_continue_control_use_ptr (cont_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+ }
break;
case GIMPLE_OMP_CRITICAL:
- ret = walk_tree (gimple_omp_critical_name_ptr (stmt), callback_op, wi,
- pset);
+ ret = walk_tree (gimple_omp_critical_name_ptr (
+ as_a <gomp_critical *> (stmt)),
+ callback_op, wi, pset);
if (ret)
return ret;
break;
break;
case GIMPLE_OMP_PARALLEL:
- ret = walk_tree (gimple_omp_parallel_clauses_ptr (stmt), callback_op,
- wi, pset);
- if (ret)
- return ret;
- ret = walk_tree (gimple_omp_parallel_child_fn_ptr (stmt), callback_op,
- wi, pset);
- if (ret)
- return ret;
- ret = walk_tree (gimple_omp_parallel_data_arg_ptr (stmt), callback_op,
- wi, pset);
- if (ret)
- return ret;
+ {
+ gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
+ ret = walk_tree (gimple_omp_parallel_clauses_ptr (omp_par_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+ ret = walk_tree (gimple_omp_parallel_child_fn_ptr (omp_par_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+ ret = walk_tree (gimple_omp_parallel_data_arg_ptr (omp_par_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+ }
break;
case GIMPLE_OMP_TASK:
break;
case GIMPLE_OMP_ATOMIC_LOAD:
- ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (stmt), callback_op, wi,
- pset);
- if (ret)
- return ret;
-
- ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt), callback_op, wi,
- pset);
- if (ret)
- return ret;
+ {
+ gomp_atomic_load *omp_stmt = as_a <gomp_atomic_load *> (stmt);
+ ret = walk_tree (gimple_omp_atomic_load_lhs_ptr (omp_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+
+ ret = walk_tree (gimple_omp_atomic_load_rhs_ptr (omp_stmt),
+ callback_op, wi, pset);
+ if (ret)
+ return ret;
+ }
break;
case GIMPLE_OMP_ATOMIC_STORE:
- ret = walk_tree (gimple_omp_atomic_store_val_ptr (stmt), callback_op,
- wi, pset);
+ ret = walk_tree (gimple_omp_atomic_store_val_ptr (
+ as_a <gomp_atomic_store *> (stmt)),
+ callback_op, wi, pset);
if (ret)
return ret;
break;
case GIMPLE_TRANSACTION:
- ret = walk_tree (gimple_transaction_label_ptr (stmt), callback_op,
- wi, pset);
+ ret = walk_tree (gimple_transaction_label_ptr (
+ as_a <gtransaction *> (stmt)),
+ callback_op, wi, pset);
if (ret)
return ret;
break;
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
- ret = walk_gimple_seq_mod (gimple_bind_body_ptr (stmt), callback_stmt,
- callback_op, wi);
+ ret = walk_gimple_seq_mod (gimple_bind_body_ptr (as_a <gbind *> (stmt)),
+ callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
break;
case GIMPLE_CATCH:
- ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (stmt), callback_stmt,
- callback_op, wi);
+ ret = walk_gimple_seq_mod (gimple_catch_handler_ptr (
+ as_a <gcatch *> (stmt)),
+ callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
break;
break;
case GIMPLE_EH_ELSE:
- ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (stmt),
- callback_stmt, callback_op, wi);
- if (ret)
- return wi->callback_result;
- ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (stmt),
- callback_stmt, callback_op, wi);
- if (ret)
- return wi->callback_result;
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ ret = walk_gimple_seq_mod (gimple_eh_else_n_body_ptr (eh_else_stmt),
+ callback_stmt, callback_op, wi);
+ if (ret)
+ return wi->callback_result;
+ ret = walk_gimple_seq_mod (gimple_eh_else_e_body_ptr (eh_else_stmt),
+ callback_stmt, callback_op, wi);
+ if (ret)
+ return wi->callback_result;
+ }
break;
case GIMPLE_TRY:
break;
case GIMPLE_TRANSACTION:
- ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (stmt),
+ ret = walk_gimple_seq_mod (gimple_transaction_body_ptr (
+ as_a <gtransaction *> (stmt)),
callback_stmt, callback_op, wi);
if (ret)
return wi->callback_result;
}
}
}
- else if (is_gimple_call (stmt))
+ else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
if (visit_store)
{
- tree arg = gimple_call_lhs (stmt);
+ tree arg = gimple_call_lhs (call_stmt);
if (arg)
{
tree lhs = get_base_loadstore (arg);
}
}
if (visit_load || visit_addr)
- for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ for (i = 0; i < gimple_call_num_args (call_stmt); ++i)
{
- tree arg = gimple_call_arg (stmt, i);
+ tree arg = gimple_call_arg (call_stmt, i);
if (visit_addr
&& TREE_CODE (arg) == ADDR_EXPR)
ret |= visit_addr (stmt, TREE_OPERAND (arg, 0), arg, data);
}
}
if (visit_addr
- && gimple_call_chain (stmt)
- && TREE_CODE (gimple_call_chain (stmt)) == ADDR_EXPR)
- ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (stmt), 0),
- gimple_call_chain (stmt), data);
+ && gimple_call_chain (call_stmt)
+ && TREE_CODE (gimple_call_chain (call_stmt)) == ADDR_EXPR)
+ ret |= visit_addr (stmt, TREE_OPERAND (gimple_call_chain (call_stmt), 0),
+ gimple_call_chain (call_stmt), data);
if (visit_addr
- && gimple_call_return_slot_opt_p (stmt)
- && gimple_call_lhs (stmt) != NULL_TREE
- && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
- ret |= visit_addr (stmt, gimple_call_lhs (stmt),
- gimple_call_lhs (stmt), data);
+ && gimple_call_return_slot_opt_p (call_stmt)
+ && gimple_call_lhs (call_stmt) != NULL_TREE
+ && TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (call_stmt))))
+ ret |= visit_addr (stmt, gimple_call_lhs (call_stmt),
+ gimple_call_lhs (call_stmt), data);
}
- else if (gimple_code (stmt) == GIMPLE_ASM)
+ else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
{
unsigned noutputs;
const char *constraint;
const char **oconstraints;
bool allows_mem, allows_reg, is_inout;
- noutputs = gimple_asm_noutputs (stmt);
+ noutputs = gimple_asm_noutputs (asm_stmt);
oconstraints = XALLOCAVEC (const char *, noutputs);
if (visit_store || visit_addr)
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
- tree link = gimple_asm_output_op (stmt, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
tree op = get_base_loadstore (TREE_VALUE (link));
if (op && visit_store)
ret |= visit_store (stmt, op, TREE_VALUE (link), data);
}
}
if (visit_load || visit_addr)
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (stmt, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
if (visit_addr
&& TREE_CODE (op) == ADDR_EXPR)
}
}
}
- else if (gimple_code (stmt) == GIMPLE_RETURN)
+ else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
- tree op = gimple_return_retval (stmt);
+ tree op = gimple_return_retval (return_stmt);
if (op)
{
if (visit_addr
/* Build a GIMPLE_RETURN statement returning RETVAL. */
-gimple
+greturn *
gimple_build_return (tree retval)
{
- gimple s = gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK, 2);
+ greturn *s
+ = as_a <greturn *> (gimple_build_with_ops (GIMPLE_RETURN, ERROR_MARK,
+ 2));
if (retval)
gimple_return_set_retval (s, retval);
return s;
/* Reset alias information on call S. */
void
-gimple_call_reset_alias_info (gimple s)
+gimple_call_reset_alias_info (gcall *s)
{
if (gimple_call_flags (s) & ECF_CONST)
memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
components of a GIMPLE_CALL statement to function FN with NARGS
arguments. */
-static inline gimple
+static inline gcall *
gimple_build_call_1 (tree fn, unsigned nargs)
{
- gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
+ gcall *s
+ = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
+ nargs + 3));
if (TREE_CODE (fn) == FUNCTION_DECL)
fn = build_fold_addr_expr (fn);
gimple_set_op (s, 1, fn);
/* Build a GIMPLE_CALL statement to function FN with the arguments
specified in vector ARGS. */
-gimple
+gcall *
gimple_build_call_vec (tree fn, vec<tree> args)
{
unsigned i;
unsigned nargs = args.length ();
- gimple call = gimple_build_call_1 (fn, nargs);
+ gcall *call = gimple_build_call_1 (fn, nargs);
for (i = 0; i < nargs; i++)
gimple_call_set_arg (call, i, args[i]);
/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
arguments. The ... are the arguments. */
-gimple
+gcall *
gimple_build_call (tree fn, unsigned nargs, ...)
{
va_list ap;
- gimple call;
+ gcall *call;
unsigned i;
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
/* Build a GIMPLE_CALL statement to function FN. NARGS is the number of
arguments. AP contains the arguments. */
-gimple
+gcall *
gimple_build_call_valist (tree fn, unsigned nargs, va_list ap)
{
- gimple call;
+ gcall *call;
unsigned i;
gcc_assert (TREE_CODE (fn) == FUNCTION_DECL || is_gimple_call_addr (fn));
Build the basic components of a GIMPLE_CALL statement to internal
function FN with NARGS arguments. */
-static inline gimple
+static inline gcall *
gimple_build_call_internal_1 (enum internal_fn fn, unsigned nargs)
{
- gimple s = gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK, nargs + 3);
+ gcall *s
+ = as_a <gcall *> (gimple_build_with_ops (GIMPLE_CALL, ERROR_MARK,
+ nargs + 3));
s->subcode |= GF_CALL_INTERNAL;
gimple_call_set_internal_fn (s, fn);
gimple_call_reset_alias_info (s);
/* Build a GIMPLE_CALL statement to internal function FN. NARGS is
the number of arguments. The ... are the arguments. */
-gimple
+gcall *
gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
{
va_list ap;
- gimple call;
+ gcall *call;
unsigned i;
call = gimple_build_call_internal_1 (fn, nargs);
/* Build a GIMPLE_CALL statement to internal function FN with the arguments
specified in vector ARGS. */
-gimple
+gcall *
gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
{
unsigned i, nargs;
- gimple call;
+ gcall *call;
nargs = args.length ();
call = gimple_build_call_internal_1 (fn, nargs);
assumed to be in GIMPLE form already. Minimal checking is done of
this fact. */
-gimple
+gcall *
gimple_build_call_from_tree (tree t)
{
unsigned i, nargs;
- gimple call;
+ gcall *call;
tree fndecl = get_callee_fndecl (t);
gcc_assert (TREE_CODE (t) == CALL_EXPR);
LHS of the assignment.
RHS of the assignment which can be unary or binary. */
-gimple
+gassign *
gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
{
enum tree_code subcode;
OP1 and OP2. If OP2 is NULL then SUBCODE must be of class
GIMPLE_UNARY_RHS or GIMPLE_SINGLE_RHS. */
-gimple
+gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
tree op2, tree op3 MEM_STAT_DECL)
{
unsigned num_ops;
- gimple p;
+ gassign *p;
/* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
code). */
num_ops = get_gimple_rhs_num_ops (subcode) + 1;
- p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
- PASS_MEM_STAT);
+ p = as_a <gassign *> (
+ gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
+ PASS_MEM_STAT));
gimple_assign_set_lhs (p, lhs);
gimple_assign_set_rhs1 (p, op1);
if (op2)
return p;
}
-gimple
+gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1,
tree op2 MEM_STAT_DECL)
{
PASS_MEM_STAT);
}
-gimple
+gassign *
gimple_build_assign_with_ops (enum tree_code subcode, tree lhs, tree op1
MEM_STAT_DECL)
{
T_LABEL is the label to jump to if the condition is true.
F_LABEL is the label to jump to otherwise. */
-gimple
+gcond *
gimple_build_cond (enum tree_code pred_code, tree lhs, tree rhs,
tree t_label, tree f_label)
{
- gimple p;
+ gcond *p;
gcc_assert (TREE_CODE_CLASS (pred_code) == tcc_comparison);
- p = gimple_build_with_ops (GIMPLE_COND, pred_code, 4);
+ p = as_a <gcond *> (gimple_build_with_ops (GIMPLE_COND, pred_code, 4));
gimple_cond_set_lhs (p, lhs);
gimple_cond_set_rhs (p, rhs);
gimple_cond_set_true_label (p, t_label);
/* Build a GIMPLE_COND statement from the conditional expression tree
COND. T_LABEL and F_LABEL are as in gimple_build_cond. */
-gimple
+gcond *
gimple_build_cond_from_tree (tree cond, tree t_label, tree f_label)
{
enum tree_code code;
boolean expression tree COND. */
void
-gimple_cond_set_condition_from_tree (gimple stmt, tree cond)
+gimple_cond_set_condition_from_tree (gcond *stmt, tree cond)
{
enum tree_code code;
tree lhs, rhs;
/* Build a GIMPLE_LABEL statement for LABEL. */
-gimple
+glabel *
gimple_build_label (tree label)
{
- gimple p = gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1);
+ glabel *p
+ = as_a <glabel *> (gimple_build_with_ops (GIMPLE_LABEL, ERROR_MARK, 1));
gimple_label_set_label (p, label);
return p;
}
/* Build a GIMPLE_GOTO statement to label DEST. */
-gimple
+ggoto *
gimple_build_goto (tree dest)
{
- gimple p = gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1);
+ ggoto *p
+ = as_a <ggoto *> (gimple_build_with_ops (GIMPLE_GOTO, ERROR_MARK, 1));
gimple_goto_set_dest (p, dest);
return p;
}
VARS are the variables in BODY.
BLOCK is the containing block. */
-gimple
+gbind *
gimple_build_bind (tree vars, gimple_seq body, tree block)
{
- gimple p = gimple_alloc (GIMPLE_BIND, 0);
+ gbind *p = as_a <gbind *> (gimple_alloc (GIMPLE_BIND, 0));
gimple_bind_set_vars (p, vars);
if (body)
gimple_bind_set_body (p, body);
NCLOBBERS is the number of clobbered registers.
*/
-static inline gimple
+static inline gasm *
gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
unsigned nclobbers, unsigned nlabels)
{
- gimple_statement_asm *p;
+ gasm *p;
int size = strlen (string);
/* ASMs with labels cannot have outputs. This should have been
enforced by the front end. */
gcc_assert (nlabels == 0 || noutputs == 0);
- p = as_a <gimple_statement_asm *> (
+ p = as_a <gasm *> (
gimple_build_with_ops (GIMPLE_ASM, ERROR_MARK,
ninputs + noutputs + nclobbers + nlabels));
CLOBBERS is a vector of the clobbered register parameters.
LABELS is a vector of destination labels. */
-gimple
+gasm *
gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
vec<tree, va_gc> *labels)
{
- gimple p;
+ gasm *p;
unsigned i;
p = gimple_build_asm_1 (string,
TYPES are the catch types.
HANDLER is the exception handler. */
-gimple
+gcatch *
gimple_build_catch (tree types, gimple_seq handler)
{
- gimple p = gimple_alloc (GIMPLE_CATCH, 0);
+ gcatch *p = as_a <gcatch *> (gimple_alloc (GIMPLE_CATCH, 0));
gimple_catch_set_types (p, types);
if (handler)
gimple_catch_set_handler (p, handler);
TYPES are the filter's types.
FAILURE is the filter's failure action. */
-gimple
+geh_filter *
gimple_build_eh_filter (tree types, gimple_seq failure)
{
- gimple p = gimple_alloc (GIMPLE_EH_FILTER, 0);
+ geh_filter *p = as_a <geh_filter *> (gimple_alloc (GIMPLE_EH_FILTER, 0));
gimple_eh_filter_set_types (p, types);
if (failure)
gimple_eh_filter_set_failure (p, failure);
/* Build a GIMPLE_EH_MUST_NOT_THROW statement. */
-gimple
+geh_mnt *
gimple_build_eh_must_not_throw (tree decl)
{
- gimple p = gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0);
+ geh_mnt *p = as_a <geh_mnt *> (gimple_alloc (GIMPLE_EH_MUST_NOT_THROW, 0));
gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
gcc_assert (flags_from_decl_or_type (decl) & ECF_NORETURN);
/* Build a GIMPLE_EH_ELSE statement. */
-gimple
+geh_else *
gimple_build_eh_else (gimple_seq n_body, gimple_seq e_body)
{
- gimple p = gimple_alloc (GIMPLE_EH_ELSE, 0);
+ geh_else *p = as_a <geh_else *> (gimple_alloc (GIMPLE_EH_ELSE, 0));
gimple_eh_else_set_n_body (p, n_body);
gimple_eh_else_set_e_body (p, e_body);
return p;
KIND is either GIMPLE_TRY_CATCH or GIMPLE_TRY_FINALLY depending on
whether this is a try/catch or a try/finally respectively. */
-gimple_statement_try *
+gtry *
gimple_build_try (gimple_seq eval, gimple_seq cleanup,
enum gimple_try_flags kind)
{
- gimple_statement_try *p;
+ gtry *p;
gcc_assert (kind == GIMPLE_TRY_CATCH || kind == GIMPLE_TRY_FINALLY);
- p = as_a <gimple_statement_try *> (gimple_alloc (GIMPLE_TRY, 0));
+ p = as_a <gtry *> (gimple_alloc (GIMPLE_TRY, 0));
gimple_set_subcode (p, kind);
if (eval)
gimple_try_set_eval (p, eval);
/* Build a GIMPLE_RESX statement. */
-gimple
+gresx *
gimple_build_resx (int region)
{
- gimple_statement_resx *p =
- as_a <gimple_statement_resx *> (
- gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
+ gresx *p
+ = as_a <gresx *> (gimple_build_with_ops (GIMPLE_RESX, ERROR_MARK, 0));
p->region = region;
return p;
}
NLABELS is the number of labels in the switch excluding the default.
DEFAULT_LABEL is the default label for the switch statement. */
-gimple
+gswitch *
gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
{
/* nlabels + 1 default label + 1 index. */
gcc_checking_assert (default_label);
- gimple p = gimple_build_with_ops (GIMPLE_SWITCH, ERROR_MARK,
- 1 + 1 + nlabels);
+ gswitch *p = as_a <gswitch *> (gimple_build_with_ops (GIMPLE_SWITCH,
+ ERROR_MARK,
+ 1 + 1 + nlabels));
gimple_switch_set_index (p, index);
gimple_switch_set_default_label (p, default_label);
return p;
DEFAULT_LABEL is the default label
ARGS is a vector of labels excluding the default. */
-gimple
+gswitch *
gimple_build_switch (tree index, tree default_label, vec<tree> args)
{
unsigned i, nlabels = args.length ();
- gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
+ gswitch *p = gimple_build_switch_nlabels (nlabels, index, default_label);
/* Copy the labels from the vector to the switch statement. */
for (i = 0; i < nlabels; i++)
/* Build a GIMPLE_EH_DISPATCH statement. */
-gimple
+geh_dispatch *
gimple_build_eh_dispatch (int region)
{
- gimple_statement_eh_dispatch *p =
- as_a <gimple_statement_eh_dispatch *> (
- gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
+ geh_dispatch *p
+ = as_a <geh_dispatch *> (
+ gimple_build_with_ops (GIMPLE_EH_DISPATCH, ERROR_MARK, 0));
p->region = region;
return p;
}
VAR is bound to VALUE; block and location are taken from STMT. */
-gimple
+gdebug *
gimple_build_debug_bind_stat (tree var, tree value, gimple stmt MEM_STAT_DECL)
{
- gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
- (unsigned)GIMPLE_DEBUG_BIND, 2
- PASS_MEM_STAT);
-
+ gdebug *p
+ = as_a <gdebug *> (gimple_build_with_ops_stat (GIMPLE_DEBUG,
+ (unsigned)GIMPLE_DEBUG_BIND, 2
+ PASS_MEM_STAT));
gimple_debug_bind_set_var (p, var);
gimple_debug_bind_set_value (p, value);
if (stmt)
VAR is bound to VALUE; block and location are taken from STMT. */
-gimple
+gdebug *
gimple_build_debug_source_bind_stat (tree var, tree value,
gimple stmt MEM_STAT_DECL)
{
- gimple p = gimple_build_with_ops_stat (GIMPLE_DEBUG,
- (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
- PASS_MEM_STAT);
+ gdebug *p
+ = as_a <gdebug *> (
+ gimple_build_with_ops_stat (GIMPLE_DEBUG,
+ (unsigned)GIMPLE_DEBUG_SOURCE_BIND, 2
+ PASS_MEM_STAT));
gimple_debug_source_bind_set_var (p, var);
gimple_debug_source_bind_set_value (p, value);
BODY is the sequence of statements for which only one thread can execute.
NAME is optional identifier for this critical block. */
-gimple
+gomp_critical *
gimple_build_omp_critical (gimple_seq body, tree name)
{
- gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
+ gomp_critical *p
+ = as_a <gomp_critical *> (gimple_alloc (GIMPLE_OMP_CRITICAL, 0));
gimple_omp_critical_set_name (p, name);
if (body)
gimple_omp_set_body (p, body);
COLLAPSE is the collapse count.
PRE_BODY is the sequence of statements that are loop invariant. */
-gimple
+gomp_for *
gimple_build_omp_for (gimple_seq body, int kind, tree clauses, size_t collapse,
gimple_seq pre_body)
{
- gimple_statement_omp_for *p =
- as_a <gimple_statement_omp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
+ gomp_for *p = as_a <gomp_for *> (gimple_alloc (GIMPLE_OMP_FOR, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_for_set_clauses (p, clauses);
CHILD_FN is the function created for the parallel threads to execute.
DATA_ARG are the shared data argument(s). */
-gimple
+gomp_parallel *
gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
tree data_arg)
{
- gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
+ gomp_parallel *p
+ = as_a <gomp_parallel *> (gimple_alloc (GIMPLE_OMP_PARALLEL, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_parallel_set_clauses (p, clauses);
COPY_FN is the optional function for firstprivate initialization.
ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
-gimple
+gomp_task *
gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
tree data_arg, tree copy_fn, tree arg_size,
tree arg_align)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TASK, 0);
+ gomp_task *p = as_a <gomp_task *> (gimple_alloc (GIMPLE_OMP_TASK, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_task_set_clauses (p, clauses);
CONTROL_DEF is the definition of the control variable.
CONTROL_USE is the use of the control variable. */
-gimple
+gomp_continue *
gimple_build_omp_continue (tree control_def, tree control_use)
{
- gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
+ gomp_continue *p
+ = as_a <gomp_continue *> (gimple_alloc (GIMPLE_OMP_CONTINUE, 0));
gimple_omp_continue_set_control_def (p, control_def);
gimple_omp_continue_set_control_use (p, control_use);
return p;
CLAUSES are any of the OMP sections contsruct's clauses: private,
firstprivate, lastprivate, reduction, and nowait. */
-gimple
+gomp_sections *
gimple_build_omp_sections (gimple_seq body, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
+ gomp_sections *p
+ = as_a <gomp_sections *> (gimple_alloc (GIMPLE_OMP_SECTIONS, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_sections_set_clauses (p, clauses);
CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
copyprivate, nowait. */
-gimple
+gomp_single *
gimple_build_omp_single (gimple_seq body, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
+ gomp_single *p
+ = as_a <gomp_single *> (gimple_alloc (GIMPLE_OMP_SINGLE, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_single_set_clauses (p, clauses);
BODY is the sequence of statements that will be executed.
CLAUSES are any of the OMP target construct's clauses. */
-gimple
+gomp_target *
gimple_build_omp_target (gimple_seq body, int kind, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TARGET, 0);
+ gomp_target *p
+ = as_a <gomp_target *> (gimple_alloc (GIMPLE_OMP_TARGET, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_target_set_clauses (p, clauses);
BODY is the sequence of statements that will be executed.
CLAUSES are any of the OMP teams construct's clauses. */
-gimple
+gomp_teams *
gimple_build_omp_teams (gimple_seq body, tree clauses)
{
- gimple p = gimple_alloc (GIMPLE_OMP_TEAMS, 0);
+ gomp_teams *p = as_a <gomp_teams *> (gimple_alloc (GIMPLE_OMP_TEAMS, 0));
if (body)
gimple_omp_set_body (p, body);
gimple_omp_teams_set_clauses (p, clauses);
/* Build a GIMPLE_OMP_ATOMIC_LOAD statement. */
-gimple
+gomp_atomic_load *
gimple_build_omp_atomic_load (tree lhs, tree rhs)
{
- gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0);
+ gomp_atomic_load *p
+ = as_a <gomp_atomic_load *> (gimple_alloc (GIMPLE_OMP_ATOMIC_LOAD, 0));
gimple_omp_atomic_load_set_lhs (p, lhs);
gimple_omp_atomic_load_set_rhs (p, rhs);
return p;
VAL is the value we are storing. */
-gimple
+gomp_atomic_store *
gimple_build_omp_atomic_store (tree val)
{
- gimple p = gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0);
+ gomp_atomic_store *p
+ = as_a <gomp_atomic_store *> (gimple_alloc (GIMPLE_OMP_ATOMIC_STORE, 0));
gimple_omp_atomic_store_set_val (p, val);
return p;
}
/* Build a GIMPLE_TRANSACTION statement. */
-gimple
+gtransaction *
gimple_build_transaction (gimple_seq body, tree label)
{
- gimple p = gimple_alloc (GIMPLE_TRANSACTION, 0);
+ gtransaction *p
+ = as_a <gtransaction *> (gimple_alloc (GIMPLE_TRANSACTION, 0));
gimple_transaction_set_body (p, body);
gimple_transaction_set_label (p, label);
return p;
{
if (gimple_code (stmt) == GIMPLE_NOP)
return true;
- if (gimple_code (stmt) == GIMPLE_BIND)
- return empty_body_p (gimple_bind_body (stmt));
+ if (gbind *bind_stmt = dyn_cast <gbind *> (stmt))
+ return empty_body_p (gimple_bind_body (bind_stmt));
return false;
}
/* Return the "fn spec" string for call STMT. */
static const_tree
-gimple_call_fnspec (const_gimple stmt)
+gimple_call_fnspec (const gcall *stmt)
{
tree type, attr;
/* Detects argument flags for argument number ARG on call STMT. */
int
-gimple_call_arg_flags (const_gimple stmt, unsigned arg)
+gimple_call_arg_flags (const gcall *stmt, unsigned arg)
{
const_tree attr = gimple_call_fnspec (stmt);
/* Detects return flags for the call STMT. */
int
-gimple_call_return_flags (const_gimple stmt)
+gimple_call_return_flags (const gcall *stmt)
{
const_tree attr;
tree t;
int uid;
- t = gimple_label_label (stmt);
+ t = gimple_label_label (as_a <glabel *> (stmt));
uid = LABEL_DECL_UID (t);
if (uid == -1)
{
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
- new_seq = gimple_seq_copy (gimple_bind_body (stmt));
- gimple_bind_set_body (copy, new_seq);
- gimple_bind_set_vars (copy, unshare_expr (gimple_bind_vars (stmt)));
- gimple_bind_set_block (copy, gimple_bind_block (stmt));
+ {
+ gbind *bind_stmt = as_a <gbind *> (stmt);
+ gbind *bind_copy = as_a <gbind *> (copy);
+ new_seq = gimple_seq_copy (gimple_bind_body (bind_stmt));
+ gimple_bind_set_body (bind_copy, new_seq);
+ gimple_bind_set_vars (bind_copy,
+ unshare_expr (gimple_bind_vars (bind_stmt)));
+ gimple_bind_set_block (bind_copy, gimple_bind_block (bind_stmt));
+ }
break;
case GIMPLE_CATCH:
- new_seq = gimple_seq_copy (gimple_catch_handler (stmt));
- gimple_catch_set_handler (copy, new_seq);
- t = unshare_expr (gimple_catch_types (stmt));
- gimple_catch_set_types (copy, t);
+ {
+ gcatch *catch_stmt = as_a <gcatch *> (stmt);
+ gcatch *catch_copy = as_a <gcatch *> (copy);
+ new_seq = gimple_seq_copy (gimple_catch_handler (catch_stmt));
+ gimple_catch_set_handler (catch_copy, new_seq);
+ t = unshare_expr (gimple_catch_types (catch_stmt));
+ gimple_catch_set_types (catch_copy, t);
+ }
break;
case GIMPLE_EH_FILTER:
- new_seq = gimple_seq_copy (gimple_eh_filter_failure (stmt));
- gimple_eh_filter_set_failure (copy, new_seq);
- t = unshare_expr (gimple_eh_filter_types (stmt));
- gimple_eh_filter_set_types (copy, t);
+ {
+ geh_filter *eh_filter_stmt = as_a <geh_filter *> (stmt);
+ geh_filter *eh_filter_copy = as_a <geh_filter *> (copy);
+ new_seq
+ = gimple_seq_copy (gimple_eh_filter_failure (eh_filter_stmt));
+ gimple_eh_filter_set_failure (eh_filter_copy, new_seq);
+ t = unshare_expr (gimple_eh_filter_types (eh_filter_stmt));
+ gimple_eh_filter_set_types (eh_filter_copy, t);
+ }
break;
case GIMPLE_EH_ELSE:
- new_seq = gimple_seq_copy (gimple_eh_else_n_body (stmt));
- gimple_eh_else_set_n_body (copy, new_seq);
- new_seq = gimple_seq_copy (gimple_eh_else_e_body (stmt));
- gimple_eh_else_set_e_body (copy, new_seq);
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ geh_else *eh_else_copy = as_a <geh_else *> (copy);
+ new_seq = gimple_seq_copy (gimple_eh_else_n_body (eh_else_stmt));
+ gimple_eh_else_set_n_body (eh_else_copy, new_seq);
+ new_seq = gimple_seq_copy (gimple_eh_else_e_body (eh_else_stmt));
+ gimple_eh_else_set_e_body (eh_else_copy, new_seq);
+ }
break;
case GIMPLE_TRY:
- new_seq = gimple_seq_copy (gimple_try_eval (stmt));
- gimple_try_set_eval (copy, new_seq);
- new_seq = gimple_seq_copy (gimple_try_cleanup (stmt));
- gimple_try_set_cleanup (copy, new_seq);
+ {
+ gtry *try_stmt = as_a <gtry *> (stmt);
+ gtry *try_copy = as_a <gtry *> (copy);
+ new_seq = gimple_seq_copy (gimple_try_eval (try_stmt));
+ gimple_try_set_eval (try_copy, new_seq);
+ new_seq = gimple_seq_copy (gimple_try_cleanup (try_stmt));
+ gimple_try_set_cleanup (try_copy, new_seq);
+ }
break;
case GIMPLE_OMP_FOR:
t = unshare_expr (gimple_omp_for_clauses (stmt));
gimple_omp_for_set_clauses (copy, t);
{
- gimple_statement_omp_for *omp_for_copy =
- as_a <gimple_statement_omp_for *> (copy);
+ gomp_for *omp_for_copy = as_a <gomp_for *> (copy);
omp_for_copy->iter = ggc_vec_alloc<gimple_omp_for_iter>
( gimple_omp_for_collapse (stmt));
}
goto copy_omp_body;
case GIMPLE_OMP_PARALLEL:
- t = unshare_expr (gimple_omp_parallel_clauses (stmt));
- gimple_omp_parallel_set_clauses (copy, t);
- t = unshare_expr (gimple_omp_parallel_child_fn (stmt));
- gimple_omp_parallel_set_child_fn (copy, t);
- t = unshare_expr (gimple_omp_parallel_data_arg (stmt));
- gimple_omp_parallel_set_data_arg (copy, t);
+ {
+ gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
+ gomp_parallel *omp_par_copy = as_a <gomp_parallel *> (copy);
+ t = unshare_expr (gimple_omp_parallel_clauses (omp_par_stmt));
+ gimple_omp_parallel_set_clauses (omp_par_copy, t);
+ t = unshare_expr (gimple_omp_parallel_child_fn (omp_par_stmt));
+ gimple_omp_parallel_set_child_fn (omp_par_copy, t);
+ t = unshare_expr (gimple_omp_parallel_data_arg (omp_par_stmt));
+ gimple_omp_parallel_set_data_arg (omp_par_copy, t);
+ }
goto copy_omp_body;
case GIMPLE_OMP_TASK:
goto copy_omp_body;
case GIMPLE_OMP_CRITICAL:
- t = unshare_expr (gimple_omp_critical_name (stmt));
- gimple_omp_critical_set_name (copy, t);
+ t = unshare_expr (gimple_omp_critical_name (
+ as_a <gomp_critical *> (stmt)));
+ gimple_omp_critical_set_name (as_a <gomp_critical *> (copy), t);
goto copy_omp_body;
case GIMPLE_OMP_SECTIONS:
break;
case GIMPLE_TRANSACTION:
- new_seq = gimple_seq_copy (gimple_transaction_body (stmt));
- gimple_transaction_set_body (copy, new_seq);
+ new_seq = gimple_seq_copy (gimple_transaction_body (
+ as_a <gtransaction *> (stmt)));
+ gimple_transaction_set_body (as_a <gtransaction *> (copy),
+ new_seq);
break;
case GIMPLE_WITH_CLEANUP_EXPR:
return true;
if (gimple_code (s) == GIMPLE_ASM
- && gimple_asm_volatile_p (s))
+ && gimple_asm_volatile_p (as_a <const gasm *> (s)))
return true;
if (is_gimple_call (s))
switch (gimple_code (s))
{
case GIMPLE_ASM:
- return gimple_asm_volatile_p (s);
+ return gimple_asm_volatile_p (as_a <gasm *> (s));
case GIMPLE_CALL:
t = gimple_call_fndecl (s);
/* Build a GIMPLE_CALL identical to STMT but skipping the arguments in
the positions marked by the set ARGS_TO_SKIP. */
-gimple
-gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
+gcall *
+gimple_call_copy_skip_args (gcall *stmt, bitmap args_to_skip)
{
int i;
int nargs = gimple_call_num_args (stmt);
auto_vec<tree> vargs (nargs);
- gimple new_stmt;
+ gcall *new_stmt;
for (i = 0; i < nargs; i++)
if (!bitmap_bit_p (args_to_skip, i))
GIMPLE_ASM. */
bool
-gimple_asm_clobbers_memory_p (const_gimple stmt)
+gimple_asm_clobbers_memory_p (const gasm *stmt)
{
unsigned i;
/* If this function is marked as returning non-null, then we can
infer OP is non-null if it is used in the return statement. */
- if (attribute
- && gimple_code (stmt) == GIMPLE_RETURN
- && gimple_return_retval (stmt)
- && operand_equal_p (gimple_return_retval (stmt), op, 0)
- && lookup_attribute ("returns_nonnull",
- TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
- return true;
+ if (attribute)
+ if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
+ if (gimple_return_retval (return_stmt)
+ && operand_equal_p (gimple_return_retval (return_stmt), op, 0)
+ && lookup_attribute ("returns_nonnull",
+ TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl))))
+ return true;
return false;
}
typedef gimple gimple_seq_node;
-/* For each block, the PHI nodes that need to be rewritten are stored into
- these vectors. */
-typedef vec<gimple> gimple_vec;
-
enum gimple_code {
#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
#include "gimple.def"
/* Call statements that take both memory and register operands. */
struct GTY((tag("GSS_CALL")))
- gimple_statement_call : public gimple_statement_with_memory_ops_base
+ gcall : public gimple_statement_with_memory_ops_base
{
/* [ WORD 1-9 ] : base class */
/* GIMPLE_BIND */
struct GTY((tag("GSS_BIND")))
- gimple_statement_bind : public gimple_statement_base
+ gbind : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_CATCH */
struct GTY((tag("GSS_CATCH")))
- gimple_statement_catch : public gimple_statement_base
+ gcatch : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_FILTER */
struct GTY((tag("GSS_EH_FILTER")))
- gimple_statement_eh_filter : public gimple_statement_base
+ geh_filter : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_ELSE */
struct GTY((tag("GSS_EH_ELSE")))
- gimple_statement_eh_else : public gimple_statement_base
+ geh_else : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_EH_MUST_NOT_THROW */
struct GTY((tag("GSS_EH_MNT")))
- gimple_statement_eh_mnt : public gimple_statement_base
+ geh_mnt : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_PHI */
struct GTY((tag("GSS_PHI")))
- gimple_statement_phi : public gimple_statement_base
+ gphi : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
};
struct GTY((tag("GSS_EH_CTRL")))
- gimple_statement_resx : public gimple_statement_eh_ctrl
+ gresx : public gimple_statement_eh_ctrl
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_RESX. */
};
struct GTY((tag("GSS_EH_CTRL")))
- gimple_statement_eh_dispatch : public gimple_statement_eh_ctrl
+ geh_dispatch : public gimple_statement_eh_ctrl
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_EH_DISPATH. */
/* GIMPLE_TRY */
struct GTY((tag("GSS_TRY")))
- gimple_statement_try : public gimple_statement_base
+ gtry : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
/* GIMPLE_ASM */
struct GTY((tag("GSS_ASM")))
- gimple_statement_asm : public gimple_statement_with_memory_ops_base
+ gasm : public gimple_statement_with_memory_ops_base
{
/* [ WORD 1-9 ] : base class */
/* GIMPLE_OMP_CRITICAL */
struct GTY((tag("GSS_OMP_CRITICAL")))
- gimple_statement_omp_critical : public gimple_statement_omp
+ gomp_critical : public gimple_statement_omp
{
/* [ WORD 1-7 ] : base class */
/* GIMPLE_OMP_FOR */
struct GTY((tag("GSS_OMP_FOR")))
- gimple_statement_omp_for : public gimple_statement_omp
+ gomp_for : public gimple_statement_omp
{
/* [ WORD 1-7 ] : base class */
/* GIMPLE_OMP_PARALLEL */
struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
- gimple_statement_omp_parallel : public gimple_statement_omp_taskreg
+ gomp_parallel : public gimple_statement_omp_taskreg
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_OMP_PARALLEL. */
};
struct GTY((tag("GSS_OMP_PARALLEL_LAYOUT")))
- gimple_statement_omp_target : public gimple_statement_omp_parallel_layout
+ gomp_target : public gimple_statement_omp_parallel_layout
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_OMP_TARGET. */
/* GIMPLE_OMP_TASK */
struct GTY((tag("GSS_OMP_TASK")))
- gimple_statement_omp_task : public gimple_statement_omp_taskreg
+ gomp_task : public gimple_statement_omp_taskreg
{
/* [ WORD 1-10 ] : base class */
/* GIMPLE_OMP_SECTIONS */
struct GTY((tag("GSS_OMP_SECTIONS")))
- gimple_statement_omp_sections : public gimple_statement_omp
+ gomp_sections : public gimple_statement_omp
{
/* [ WORD 1-7 ] : base class */
do not need the body field. */
struct GTY((tag("GSS_OMP_CONTINUE")))
- gimple_statement_omp_continue : public gimple_statement_base
+ gomp_continue : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
};
struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
- gimple_statement_omp_single : public gimple_statement_omp_single_layout
+ gomp_single : public gimple_statement_omp_single_layout
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_OMP_SINGLE. */
};
struct GTY((tag("GSS_OMP_SINGLE_LAYOUT")))
- gimple_statement_omp_teams : public gimple_statement_omp_single_layout
+ gomp_teams : public gimple_statement_omp_single_layout
{
/* No extra fields; adds invariant:
stmt->code == GIMPLE_OMP_TEAMS. */
contains a sequence, which we don't need here. */
struct GTY((tag("GSS_OMP_ATOMIC_LOAD")))
- gimple_statement_omp_atomic_load : public gimple_statement_base
+ gomp_atomic_load : public gimple_statement_base
{
/* [ WORD 1-6 ] : base class */
};
struct GTY((tag("GSS_OMP_ATOMIC_STORE_LAYOUT")))
- gimple_statement_omp_atomic_store :
+ gomp_atomic_store :
public gimple_statement_omp_atomic_store_layout
{
/* No extra fields; adds invariant:
#define GTMA_HAS_NO_INSTRUMENTATION (1u << 7)
struct GTY((tag("GSS_TRANSACTION")))
- gimple_statement_transaction : public gimple_statement_with_memory_ops_base
+ gtransaction : public gimple_statement_with_memory_ops_base
{
/* [ WORD 1-9 ] : base class */
};
#undef DEFGSSTRUCT
+/* A statement with the invariant that
+ stmt->code == GIMPLE_COND
+ i.e. a conditional jump statement. */
+
+struct GTY((tag("GSS_WITH_OPS")))
+ gcond : public gimple_statement_with_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_DEBUG
+ i.e. a debug statement. */
+
+struct GTY((tag("GSS_WITH_OPS")))
+ gdebug : public gimple_statement_with_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_GOTO
+ i.e. a goto statement. */
+
+struct GTY((tag("GSS_WITH_OPS")))
+ ggoto : public gimple_statement_with_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_LABEL
+ i.e. a label statement. */
+
+struct GTY((tag("GSS_WITH_OPS")))
+ glabel : public gimple_statement_with_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_SWITCH
+ i.e. a switch statement. */
+
+struct GTY((tag("GSS_WITH_OPS")))
+ gswitch : public gimple_statement_with_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_ASSIGN
+ i.e. an assignment statement. */
+
+struct GTY((tag("GSS_WITH_MEM_OPS")))
+ gassign : public gimple_statement_with_memory_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
+};
+
+/* A statement with the invariant that
+ stmt->code == GIMPLE_RETURN
+ i.e. a return statement. */
+
+struct GTY((tag("GSS_WITH_MEM_OPS")))
+ greturn : public gimple_statement_with_memory_ops
+{
+ /* no additional fields; this uses the layout for GSS_WITH_MEM_OPS. */
+};
+
template <>
template <>
inline bool
-is_a_helper <gimple_statement_asm *>::test (gimple gs)
+is_a_helper <gasm *>::test (gimple gs)
{
return gs->code == GIMPLE_ASM;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_bind *>::test (gimple gs)
+is_a_helper <gassign *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_ASSIGN;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <gbind *>::test (gimple gs)
{
return gs->code == GIMPLE_BIND;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_call *>::test (gimple gs)
+is_a_helper <gcall *>::test (gimple gs)
{
return gs->code == GIMPLE_CALL;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_catch *>::test (gimple gs)
+is_a_helper <gcatch *>::test (gimple gs)
{
return gs->code == GIMPLE_CATCH;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_resx *>::test (gimple gs)
+is_a_helper <gcond *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_COND;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <gdebug *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_DEBUG;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <ggoto *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_GOTO;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <glabel *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_LABEL;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <gresx *>::test (gimple gs)
{
return gs->code == GIMPLE_RESX;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_eh_dispatch *>::test (gimple gs)
+is_a_helper <geh_dispatch *>::test (gimple gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_eh_else *>::test (gimple gs)
+is_a_helper <geh_else *>::test (gimple gs)
{
return gs->code == GIMPLE_EH_ELSE;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_eh_filter *>::test (gimple gs)
+is_a_helper <geh_filter *>::test (gimple gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_eh_mnt *>::test (gimple gs)
+is_a_helper <geh_mnt *>::test (gimple gs)
{
return gs->code == GIMPLE_EH_MUST_NOT_THROW;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_atomic_load *>::test (gimple gs)
+is_a_helper <gomp_atomic_load *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_atomic_store *>::test (gimple gs)
+is_a_helper <gomp_atomic_store *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_continue *>::test (gimple gs)
+is_a_helper <gomp_continue *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_critical *>::test (gimple gs)
+is_a_helper <gomp_critical *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_for *>::test (gimple gs)
+is_a_helper <gomp_for *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_parallel *>::test (gimple gs)
+is_a_helper <gomp_parallel *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_target *>::test (gimple gs)
+is_a_helper <gomp_target *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_sections *>::test (gimple gs)
+is_a_helper <gomp_sections *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_single *>::test (gimple gs)
+is_a_helper <gomp_single *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_teams *>::test (gimple gs)
+is_a_helper <gomp_teams *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_omp_task *>::test (gimple gs)
+is_a_helper <gomp_task *>::test (gimple gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_phi *>::test (gimple gs)
+is_a_helper <gphi *>::test (gimple gs)
{
return gs->code == GIMPLE_PHI;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_transaction *>::test (gimple gs)
+is_a_helper <greturn *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_RETURN;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <gswitch *>::test (gimple gs)
+{
+ return gs->code == GIMPLE_SWITCH;
+}
+
+template <>
+template <>
+inline bool
+is_a_helper <gtransaction *>::test (gimple gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
template <>
template <>
inline bool
-is_a_helper <gimple_statement_try *>::test (gimple gs)
+is_a_helper <gtry *>::test (gimple gs)
{
return gs->code == GIMPLE_TRY;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_asm *>::test (const_gimple gs)
+is_a_helper <const gasm *>::test (const_gimple gs)
{
return gs->code == GIMPLE_ASM;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_bind *>::test (const_gimple gs)
+is_a_helper <const gbind *>::test (const_gimple gs)
{
return gs->code == GIMPLE_BIND;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_call *>::test (const_gimple gs)
+is_a_helper <const gcall *>::test (const_gimple gs)
{
return gs->code == GIMPLE_CALL;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_catch *>::test (const_gimple gs)
+is_a_helper <const gcatch *>::test (const_gimple gs)
{
return gs->code == GIMPLE_CATCH;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_resx *>::test (const_gimple gs)
+is_a_helper <const gresx *>::test (const_gimple gs)
{
return gs->code == GIMPLE_RESX;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_eh_dispatch *>::test (const_gimple gs)
+is_a_helper <const geh_dispatch *>::test (const_gimple gs)
{
return gs->code == GIMPLE_EH_DISPATCH;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_eh_filter *>::test (const_gimple gs)
+is_a_helper <const geh_filter *>::test (const_gimple gs)
{
return gs->code == GIMPLE_EH_FILTER;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_atomic_load *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_load *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_LOAD;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_atomic_store *>::test (const_gimple gs)
+is_a_helper <const gomp_atomic_store *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_ATOMIC_STORE;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_continue *>::test (const_gimple gs)
+is_a_helper <const gomp_continue *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_CONTINUE;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_critical *>::test (const_gimple gs)
+is_a_helper <const gomp_critical *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_CRITICAL;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_for *>::test (const_gimple gs)
+is_a_helper <const gomp_for *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_FOR;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_parallel *>::test (const_gimple gs)
+is_a_helper <const gomp_parallel *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_PARALLEL;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_target *>::test (const_gimple gs)
+is_a_helper <const gomp_target *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_TARGET;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_sections *>::test (const_gimple gs)
+is_a_helper <const gomp_sections *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_SECTIONS;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_single *>::test (const_gimple gs)
+is_a_helper <const gomp_single *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_SINGLE;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_teams *>::test (const_gimple gs)
+is_a_helper <const gomp_teams *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_TEAMS;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_omp_task *>::test (const_gimple gs)
+is_a_helper <const gomp_task *>::test (const_gimple gs)
{
return gs->code == GIMPLE_OMP_TASK;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_phi *>::test (const_gimple gs)
+is_a_helper <const gphi *>::test (const_gimple gs)
{
return gs->code == GIMPLE_PHI;
}
template <>
template <>
inline bool
-is_a_helper <const gimple_statement_transaction *>::test (const_gimple gs)
+is_a_helper <const gtransaction *>::test (const_gimple gs)
{
return gs->code == GIMPLE_TRANSACTION;
}
#define gimple_alloc(c, n) gimple_alloc_stat (c, n MEM_STAT_INFO)
gimple gimple_alloc_stat (enum gimple_code, unsigned MEM_STAT_DECL);
-gimple gimple_build_return (tree);
-void gimple_call_reset_alias_info (gimple);
-gimple gimple_build_call_vec (tree, vec<tree> );
-gimple gimple_build_call (tree, unsigned, ...);
-gimple gimple_build_call_valist (tree, unsigned, va_list);
-gimple gimple_build_call_internal (enum internal_fn, unsigned, ...);
-gimple gimple_build_call_internal_vec (enum internal_fn, vec<tree> );
-gimple gimple_build_call_from_tree (tree);
-gimple gimple_build_assign_stat (tree, tree MEM_STAT_DECL);
+greturn *gimple_build_return (tree);
+void gimple_call_reset_alias_info (gcall *);
+gcall *gimple_build_call_vec (tree, vec<tree> );
+gcall *gimple_build_call (tree, unsigned, ...);
+gcall *gimple_build_call_valist (tree, unsigned, va_list);
+gcall *gimple_build_call_internal (enum internal_fn, unsigned, ...);
+gcall *gimple_build_call_internal_vec (enum internal_fn, vec<tree> );
+gcall *gimple_build_call_from_tree (tree);
+gassign *gimple_build_assign_stat (tree, tree MEM_STAT_DECL);
#define gimple_build_assign(l,r) gimple_build_assign_stat (l, r MEM_STAT_INFO)
-gimple gimple_build_assign_with_ops (enum tree_code, tree,
- tree, tree, tree CXX_MEM_STAT_INFO);
-gimple gimple_build_assign_with_ops (enum tree_code, tree,
- tree, tree CXX_MEM_STAT_INFO);
-gimple gimple_build_assign_with_ops (enum tree_code, tree,
- tree CXX_MEM_STAT_INFO);
-gimple gimple_build_cond (enum tree_code, tree, tree, tree, tree);
-gimple gimple_build_cond_from_tree (tree, tree, tree);
-void gimple_cond_set_condition_from_tree (gimple, tree);
-gimple gimple_build_label (tree label);
-gimple gimple_build_goto (tree dest);
+gassign *gimple_build_assign_with_ops (enum tree_code, tree,
+ tree, tree, tree CXX_MEM_STAT_INFO);
+gassign *gimple_build_assign_with_ops (enum tree_code, tree,
+ tree, tree CXX_MEM_STAT_INFO);
+gassign *gimple_build_assign_with_ops (enum tree_code, tree,
+ tree CXX_MEM_STAT_INFO);
+gcond *gimple_build_cond (enum tree_code, tree, tree, tree, tree);
+gcond *gimple_build_cond_from_tree (tree, tree, tree);
+void gimple_cond_set_condition_from_tree (gcond *, tree);
+glabel *gimple_build_label (tree label);
+ggoto *gimple_build_goto (tree dest);
gimple gimple_build_nop (void);
-gimple gimple_build_bind (tree, gimple_seq, tree);
-gimple gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
- vec<tree, va_gc> *, vec<tree, va_gc> *,
- vec<tree, va_gc> *);
-gimple gimple_build_catch (tree, gimple_seq);
-gimple gimple_build_eh_filter (tree, gimple_seq);
-gimple gimple_build_eh_must_not_throw (tree);
-gimple gimple_build_eh_else (gimple_seq, gimple_seq);
-gimple_statement_try *gimple_build_try (gimple_seq, gimple_seq,
+gbind *gimple_build_bind (tree, gimple_seq, tree);
+gasm *gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
+ vec<tree, va_gc> *, vec<tree, va_gc> *,
+ vec<tree, va_gc> *);
+gcatch *gimple_build_catch (tree, gimple_seq);
+geh_filter *gimple_build_eh_filter (tree, gimple_seq);
+geh_mnt *gimple_build_eh_must_not_throw (tree);
+geh_else *gimple_build_eh_else (gimple_seq, gimple_seq);
+gtry *gimple_build_try (gimple_seq, gimple_seq,
enum gimple_try_flags);
gimple gimple_build_wce (gimple_seq);
-gimple gimple_build_resx (int);
-gimple gimple_build_switch_nlabels (unsigned, tree, tree);
-gimple gimple_build_switch (tree, tree, vec<tree> );
-gimple gimple_build_eh_dispatch (int);
-gimple gimple_build_debug_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gresx *gimple_build_resx (int);
+gswitch *gimple_build_switch_nlabels (unsigned, tree, tree);
+gswitch *gimple_build_switch (tree, tree, vec<tree> );
+geh_dispatch *gimple_build_eh_dispatch (int);
+gdebug *gimple_build_debug_bind_stat (tree, tree, gimple MEM_STAT_DECL);
#define gimple_build_debug_bind(var,val,stmt) \
gimple_build_debug_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
-gimple gimple_build_debug_source_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+gdebug *gimple_build_debug_source_bind_stat (tree, tree, gimple MEM_STAT_DECL);
#define gimple_build_debug_source_bind(var,val,stmt) \
gimple_build_debug_source_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
-gimple gimple_build_omp_critical (gimple_seq, tree);
-gimple gimple_build_omp_for (gimple_seq, int, tree, size_t, gimple_seq);
-gimple gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
-gimple gimple_build_omp_task (gimple_seq, tree, tree, tree, tree, tree, tree);
+gomp_critical *gimple_build_omp_critical (gimple_seq, tree);
+gomp_for *gimple_build_omp_for (gimple_seq, int, tree, size_t, gimple_seq);
+gomp_parallel *gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
+gomp_task *gimple_build_omp_task (gimple_seq, tree, tree, tree, tree,
+ tree, tree);
gimple gimple_build_omp_section (gimple_seq);
gimple gimple_build_omp_master (gimple_seq);
gimple gimple_build_omp_taskgroup (gimple_seq);
-gimple gimple_build_omp_continue (tree, tree);
+gomp_continue *gimple_build_omp_continue (tree, tree);
gimple gimple_build_omp_ordered (gimple_seq);
gimple gimple_build_omp_return (bool);
-gimple gimple_build_omp_sections (gimple_seq, tree);
+gomp_sections *gimple_build_omp_sections (gimple_seq, tree);
gimple gimple_build_omp_sections_switch (void);
-gimple gimple_build_omp_single (gimple_seq, tree);
-gimple gimple_build_omp_target (gimple_seq, int, tree);
-gimple gimple_build_omp_teams (gimple_seq, tree);
-gimple gimple_build_omp_atomic_load (tree, tree);
-gimple gimple_build_omp_atomic_store (tree);
-gimple gimple_build_transaction (gimple_seq, tree);
+gomp_single *gimple_build_omp_single (gimple_seq, tree);
+gomp_target *gimple_build_omp_target (gimple_seq, int, tree);
+gomp_teams *gimple_build_omp_teams (gimple_seq, tree);
+gomp_atomic_load *gimple_build_omp_atomic_load (tree, tree);
+gomp_atomic_store *gimple_build_omp_atomic_store (tree);
+gtransaction *gimple_build_transaction (gimple_seq, tree);
gimple gimple_build_predict (enum br_predictor, enum prediction);
extern void gimple_seq_add_stmt (gimple_seq *, gimple);
extern void gimple_seq_add_stmt_without_update (gimple_seq *, gimple);
gimple_seq gimple_seq_copy (gimple_seq);
bool gimple_call_same_target_p (const_gimple, const_gimple);
int gimple_call_flags (const_gimple);
-int gimple_call_arg_flags (const_gimple, unsigned);
-int gimple_call_return_flags (const_gimple);
+int gimple_call_arg_flags (const gcall *, unsigned);
+int gimple_call_return_flags (const gcall *);
bool gimple_assign_copy_p (gimple);
bool gimple_assign_ssa_name_copy_p (gimple);
bool gimple_assign_unary_nop_p (gimple);
extern void dump_gimple_statistics (void);
unsigned get_gimple_rhs_num_ops (enum tree_code);
extern tree canonicalize_cond_expr_cond (tree);
-gimple gimple_call_copy_skip_args (gimple, bitmap);
+gcall *gimple_call_copy_skip_args (gcall *, bitmap);
extern bool gimple_compare_field_offset (tree, tree);
extern tree gimple_unsigned_type (tree);
extern tree gimple_signed_type (tree);
extern bool gimple_call_builtin_p (const_gimple);
extern bool gimple_call_builtin_p (const_gimple, enum built_in_class);
extern bool gimple_call_builtin_p (const_gimple, enum built_in_function);
-extern bool gimple_asm_clobbers_memory_p (const_gimple);
+extern bool gimple_asm_clobbers_memory_p (const gasm *);
extern void dump_decl_set (FILE *, bitmap);
extern bool nonfreeing_call_p (gimple);
extern bool infer_nonnull_range (gimple, tree, bool, bool);
return n;
}
+/* Return the first statement in GIMPLE sequence S as a gbind *,
+ verifying that it has code GIMPLE_BIND in a checked build. */
+
+static inline gbind *
+gimple_seq_first_stmt_as_a_bind (gimple_seq s)
+{
+ gimple_seq_node n = gimple_seq_first (s);
+ return as_a <gbind *> (n);
+}
+
/* Return the last node in GIMPLE sequence S. */
/* Returns true if GS is a nontemporal move. */
static inline bool
-gimple_assign_nontemporal_move_p (const_gimple gs)
+gimple_assign_nontemporal_move_p (const gassign *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
return gs->nontemporal_move;
}
gimple_call_internal_fn (const_gimple gs)
{
gcc_gimple_checking_assert (gimple_call_internal_p (gs));
- return static_cast <const gimple_statement_call *> (gs)->u.internal_fn;
+ return static_cast <const gcall *> (gs)->u.internal_fn;
}
/* If CTRL_ALTERING_P is true, mark GIMPLE_CALL S to be a stmt
static inline tree
gimple_call_fntype (const_gimple gs)
{
- const gimple_statement_call *call_stmt =
- as_a <const gimple_statement_call *> (gs);
+ const gcall *call_stmt = as_a <const gcall *> (gs);
if (gimple_call_internal_p (gs))
return NULL_TREE;
return call_stmt->u.fntype;
}
-/* Set the type of the function called by GS to FNTYPE. */
+/* Set the type of the function called by CALL_STMT to FNTYPE. */
static inline void
-gimple_call_set_fntype (gimple gs, tree fntype)
+gimple_call_set_fntype (gcall *call_stmt, tree fntype)
{
- gimple_statement_call *call_stmt = as_a <gimple_statement_call *> (gs);
- gcc_gimple_checking_assert (!gimple_call_internal_p (gs));
+ gcc_gimple_checking_assert (!gimple_call_internal_p (call_stmt));
call_stmt->u.fntype = fntype;
}
/* Set FN to be the function called by call statement GS. */
static inline void
-gimple_call_set_fn (gimple gs, tree fn)
+gimple_call_set_fn (gcall *gs, tree fn)
{
- GIMPLE_CHECK (gs, GIMPLE_CALL);
gcc_gimple_checking_assert (!gimple_call_internal_p (gs));
gimple_set_op (gs, 1, fn);
}
}
-/* Set internal function FN to be the function called by call statement GS. */
+/* Set internal function FN to be the function called by call statement CALL_STMT. */
static inline void
-gimple_call_set_internal_fn (gimple gs, enum internal_fn fn)
+gimple_call_set_internal_fn (gcall *call_stmt, enum internal_fn fn)
{
- gimple_statement_call *call_stmt = as_a <gimple_statement_call *> (gs);
- gcc_gimple_checking_assert (gimple_call_internal_p (gs));
+ gcc_gimple_checking_assert (gimple_call_internal_p (call_stmt));
call_stmt->u.internal_fn = fn;
}
/* Return the type returned by call statement GS. */
static inline tree
-gimple_call_return_type (const_gimple gs)
+gimple_call_return_type (const gcall *gs)
{
tree type = gimple_call_fntype (gs);
}
-/* Return a pointer to the static chain for call statement GS. */
+/* Return a pointer to the static chain for call statement CALL_STMT. */
static inline tree *
-gimple_call_chain_ptr (const_gimple gs)
+gimple_call_chain_ptr (const gcall *call_stmt)
{
- GIMPLE_CHECK (gs, GIMPLE_CALL);
- return gimple_op_ptr (gs, 2);
+ return gimple_op_ptr (call_stmt, 2);
}
-/* Set CHAIN to be the static chain for call statement GS. */
+/* Set CHAIN to be the static chain for call statement CALL_STMT. */
static inline void
-gimple_call_set_chain (gimple gs, tree chain)
+gimple_call_set_chain (gcall *call_stmt, tree chain)
{
- GIMPLE_CHECK (gs, GIMPLE_CALL);
-
- gimple_set_op (gs, 2, chain);
+ gimple_set_op (call_stmt, 2, chain);
}
candidate for tail call optimization. */
static inline void
-gimple_call_set_tail (gimple s, bool tail_p)
+gimple_call_set_tail (gcall *s, bool tail_p)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (tail_p)
s->subcode |= GF_CALL_TAILCALL;
else
/* Return true if GIMPLE_CALL S is marked as a tail call. */
static inline bool
-gimple_call_tail_p (gimple s)
+gimple_call_tail_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (s->subcode & GF_CALL_TAILCALL) != 0;
}
expansion as the return slot for calls that return in memory. */
static inline void
-gimple_call_set_return_slot_opt (gimple s, bool return_slot_opt_p)
+gimple_call_set_return_slot_opt (gcall *s, bool return_slot_opt_p)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (return_slot_opt_p)
s->subcode |= GF_CALL_RETURN_SLOT_OPT;
else
/* Return true if S is marked for return slot optimization. */
static inline bool
-gimple_call_return_slot_opt_p (gimple s)
+gimple_call_return_slot_opt_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (s->subcode & GF_CALL_RETURN_SLOT_OPT) != 0;
}
thunk to the thunked-to function. */
static inline void
-gimple_call_set_from_thunk (gimple s, bool from_thunk_p)
+gimple_call_set_from_thunk (gcall *s, bool from_thunk_p)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (from_thunk_p)
s->subcode |= GF_CALL_FROM_THUNK;
else
/* Return true if GIMPLE_CALL S is a jump from a thunk. */
static inline bool
-gimple_call_from_thunk_p (gimple s)
+gimple_call_from_thunk_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (s->subcode & GF_CALL_FROM_THUNK) != 0;
}
argument pack in its argument list. */
static inline void
-gimple_call_set_va_arg_pack (gimple s, bool pass_arg_pack_p)
+gimple_call_set_va_arg_pack (gcall *s, bool pass_arg_pack_p)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (pass_arg_pack_p)
s->subcode |= GF_CALL_VA_ARG_PACK;
else
argument pack in its argument list. */
static inline bool
-gimple_call_va_arg_pack_p (gimple s)
+gimple_call_va_arg_pack_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (s->subcode & GF_CALL_VA_ARG_PACK) != 0;
}
even if the called function can throw in other cases. */
static inline void
-gimple_call_set_nothrow (gimple s, bool nothrow_p)
+gimple_call_set_nothrow (gcall *s, bool nothrow_p)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (nothrow_p)
s->subcode |= GF_CALL_NOTHROW;
else
/* Return true if S is a nothrow call. */
static inline bool
-gimple_call_nothrow_p (gimple s)
+gimple_call_nothrow_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (gimple_call_flags (s) & ECF_NOTHROW) != 0;
}
stack growth even when they occur in loops. */
static inline void
-gimple_call_set_alloca_for_var (gimple s, bool for_var)
+gimple_call_set_alloca_for_var (gcall *s, bool for_var)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
if (for_var)
s->subcode |= GF_CALL_ALLOCA_FOR_VAR;
else
/* Return true of S is a call to builtin_alloca emitted for VLA objects. */
static inline bool
-gimple_call_alloca_for_var_p (gimple s)
+gimple_call_alloca_for_var_p (gcall *s)
{
- GIMPLE_CHECK (s, GIMPLE_CALL);
return (s->subcode & GF_CALL_ALLOCA_FOR_VAR) != 0;
}
/* Copy all the GF_CALL_* flags from ORIG_CALL to DEST_CALL. */
static inline void
-gimple_call_copy_flags (gimple dest_call, gimple orig_call)
+gimple_call_copy_flags (gcall *dest_call, gcall *orig_call)
{
- GIMPLE_CHECK (dest_call, GIMPLE_CALL);
- GIMPLE_CHECK (orig_call, GIMPLE_CALL);
dest_call->subcode = orig_call->subcode;
}
/* Return a pointer to the points-to solution for the set of call-used
- variables of the call CALL. */
+ variables of the call CALL_STMT. */
static inline struct pt_solution *
-gimple_call_use_set (gimple call)
+gimple_call_use_set (gcall *call_stmt)
{
- gimple_statement_call *call_stmt = as_a <gimple_statement_call *> (call);
return &call_stmt->call_used;
}
/* Return a pointer to the points-to solution for the set of call-used
- variables of the call CALL. */
+ variables of the call CALL_STMT. */
static inline struct pt_solution *
-gimple_call_clobber_set (gimple call)
+gimple_call_clobber_set (gcall *call_stmt)
{
- gimple_statement_call *call_stmt = as_a <gimple_statement_call *> (call);
return &call_stmt->call_clobbered;
}
/* Set CODE to be the predicate code for the conditional statement GS. */
static inline void
-gimple_cond_set_code (gimple gs, enum tree_code code)
+gimple_cond_set_code (gcond *gs, enum tree_code code)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
gs->subcode = code;
}
statement GS. */
static inline tree *
-gimple_cond_lhs_ptr (const_gimple gs)
+gimple_cond_lhs_ptr (const gcond *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
return gimple_op_ptr (gs, 0);
}
conditional statement GS. */
static inline void
-gimple_cond_set_lhs (gimple gs, tree lhs)
+gimple_cond_set_lhs (gcond *gs, tree lhs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
gimple_set_op (gs, 0, lhs);
}
conditional GS. */
static inline tree *
-gimple_cond_rhs_ptr (const_gimple gs)
+gimple_cond_rhs_ptr (const gcond *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
return gimple_op_ptr (gs, 1);
}
conditional statement GS. */
static inline void
-gimple_cond_set_rhs (gimple gs, tree rhs)
+gimple_cond_set_rhs (gcond *gs, tree rhs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
gimple_set_op (gs, 1, rhs);
}
predicate evaluates to true. */
static inline tree
-gimple_cond_true_label (const_gimple gs)
+gimple_cond_true_label (const gcond *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
return gimple_op (gs, 2);
}
predicate evaluates to true. */
static inline void
-gimple_cond_set_true_label (gimple gs, tree label)
+gimple_cond_set_true_label (gcond *gs, tree label)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
gimple_set_op (gs, 2, label);
}
predicate evaluates to false. */
static inline void
-gimple_cond_set_false_label (gimple gs, tree label)
+gimple_cond_set_false_label (gcond *gs, tree label)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
gimple_set_op (gs, 3, label);
}
predicate evaluates to false. */
static inline tree
-gimple_cond_false_label (const_gimple gs)
+gimple_cond_false_label (const gcond *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_COND);
+
return gimple_op (gs, 3);
}
/* Set the conditional COND_STMT to be of the form 'if (1 == 0)'. */
static inline void
-gimple_cond_make_false (gimple gs)
+gimple_cond_make_false (gcond *gs)
{
gimple_cond_set_lhs (gs, boolean_true_node);
gimple_cond_set_rhs (gs, boolean_false_node);
/* Set the conditional COND_STMT to be of the form 'if (1 == 1)'. */
static inline void
-gimple_cond_make_true (gimple gs)
+gimple_cond_make_true (gcond *gs)
{
gimple_cond_set_lhs (gs, boolean_true_node);
gimple_cond_set_rhs (gs, boolean_true_node);
'if (0 == 0)', 'if (1 != 0)' or 'if (0 != 1)' */
static inline bool
-gimple_cond_true_p (const_gimple gs)
+gimple_cond_true_p (const gcond *gs)
{
tree lhs = gimple_cond_lhs (gs);
tree rhs = gimple_cond_rhs (gs);
'if (0 != 0)', 'if (1 == 0)' or 'if (0 == 1)' */
static inline bool
-gimple_cond_false_p (const_gimple gs)
+gimple_cond_false_p (const gcond *gs)
{
tree lhs = gimple_cond_lhs (gs);
tree rhs = gimple_cond_rhs (gs);
/* Set the code, LHS and RHS of GIMPLE_COND STMT from CODE, LHS and RHS. */
static inline void
-gimple_cond_set_condition (gimple stmt, enum tree_code code, tree lhs, tree rhs)
+gimple_cond_set_condition (gcond *stmt, enum tree_code code, tree lhs,
+ tree rhs)
{
gimple_cond_set_code (stmt, code);
gimple_cond_set_lhs (stmt, lhs);
/* Return the LABEL_DECL node used by GIMPLE_LABEL statement GS. */
static inline tree
-gimple_label_label (const_gimple gs)
+gimple_label_label (const glabel *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_LABEL);
return gimple_op (gs, 0);
}
GS. */
static inline void
-gimple_label_set_label (gimple gs, tree label)
+gimple_label_set_label (glabel *gs, tree label)
{
- GIMPLE_CHECK (gs, GIMPLE_LABEL);
gimple_set_op (gs, 0, label);
}
/* Set DEST to be the destination of the unconditonal jump GS. */
static inline void
-gimple_goto_set_dest (gimple gs, tree dest)
+gimple_goto_set_dest (ggoto *gs, tree dest)
{
- GIMPLE_CHECK (gs, GIMPLE_GOTO);
gimple_set_op (gs, 0, dest);
}
/* Return the variables declared in the GIMPLE_BIND statement GS. */
static inline tree
-gimple_bind_vars (const_gimple gs)
+gimple_bind_vars (const gbind *bind_stmt)
{
- const gimple_statement_bind *bind_stmt =
- as_a <const gimple_statement_bind *> (gs);
return bind_stmt->vars;
}
statement GS. */
static inline void
-gimple_bind_set_vars (gimple gs, tree vars)
+gimple_bind_set_vars (gbind *bind_stmt, tree vars)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
bind_stmt->vars = vars;
}
statement GS. */
static inline void
-gimple_bind_append_vars (gimple gs, tree vars)
+gimple_bind_append_vars (gbind *bind_stmt, tree vars)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
bind_stmt->vars = chainon (bind_stmt->vars, vars);
}
static inline gimple_seq *
-gimple_bind_body_ptr (gimple gs)
+gimple_bind_body_ptr (gbind *bind_stmt)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
return &bind_stmt->body;
}
/* Return the GIMPLE sequence contained in the GIMPLE_BIND statement GS. */
static inline gimple_seq
-gimple_bind_body (gimple gs)
+gimple_bind_body (gbind *gs)
{
return *gimple_bind_body_ptr (gs);
}
statement GS. */
static inline void
-gimple_bind_set_body (gimple gs, gimple_seq seq)
+gimple_bind_set_body (gbind *bind_stmt, gimple_seq seq)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
bind_stmt->body = seq;
}
/* Append a statement to the end of a GIMPLE_BIND's body. */
static inline void
-gimple_bind_add_stmt (gimple gs, gimple stmt)
+gimple_bind_add_stmt (gbind *bind_stmt, gimple stmt)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
gimple_seq_add_stmt (&bind_stmt->body, stmt);
}
/* Append a sequence of statements to the end of a GIMPLE_BIND's body. */
static inline void
-gimple_bind_add_seq (gimple gs, gimple_seq seq)
+gimple_bind_add_seq (gbind *bind_stmt, gimple_seq seq)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
gimple_seq_add_seq (&bind_stmt->body, seq);
}
GS. This is analogous to the BIND_EXPR_BLOCK field in trees. */
static inline tree
-gimple_bind_block (const_gimple gs)
+gimple_bind_block (const gbind *bind_stmt)
{
- const gimple_statement_bind *bind_stmt =
- as_a <const gimple_statement_bind *> (gs);
return bind_stmt->block;
}
statement GS. */
static inline void
-gimple_bind_set_block (gimple gs, tree block)
+gimple_bind_set_block (gbind *bind_stmt, tree block)
{
- gimple_statement_bind *bind_stmt = as_a <gimple_statement_bind *> (gs);
gcc_gimple_checking_assert (block == NULL_TREE
|| TREE_CODE (block) == BLOCK);
bind_stmt->block = block;
}
-/* Return the number of input operands for GIMPLE_ASM GS. */
+/* Return the number of input operands for GIMPLE_ASM ASM_STMT. */
static inline unsigned
-gimple_asm_ninputs (const_gimple gs)
+gimple_asm_ninputs (const gasm *asm_stmt)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
return asm_stmt->ni;
}
-/* Return the number of output operands for GIMPLE_ASM GS. */
+/* Return the number of output operands for GIMPLE_ASM ASM_STMT. */
static inline unsigned
-gimple_asm_noutputs (const_gimple gs)
+gimple_asm_noutputs (const gasm *asm_stmt)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
return asm_stmt->no;
}
-/* Return the number of clobber operands for GIMPLE_ASM GS. */
+/* Return the number of clobber operands for GIMPLE_ASM ASM_STMT. */
static inline unsigned
-gimple_asm_nclobbers (const_gimple gs)
+gimple_asm_nclobbers (const gasm *asm_stmt)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
return asm_stmt->nc;
}
-/* Return the number of label operands for GIMPLE_ASM GS. */
+/* Return the number of label operands for GIMPLE_ASM ASM_STMT. */
static inline unsigned
-gimple_asm_nlabels (const_gimple gs)
+gimple_asm_nlabels (const gasm *asm_stmt)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
return asm_stmt->nl;
}
-/* Return input operand INDEX of GIMPLE_ASM GS. */
+/* Return input operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree
-gimple_asm_input_op (const_gimple gs, unsigned index)
+gimple_asm_input_op (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->ni);
- return gimple_op (gs, index + asm_stmt->no);
+ return gimple_op (asm_stmt, index + asm_stmt->no);
}
-/* Return a pointer to input operand INDEX of GIMPLE_ASM GS. */
+/* Return a pointer to input operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree *
-gimple_asm_input_op_ptr (const_gimple gs, unsigned index)
+gimple_asm_input_op_ptr (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->ni);
- return gimple_op_ptr (gs, index + asm_stmt->no);
+ return gimple_op_ptr (asm_stmt, index + asm_stmt->no);
}
-/* Set IN_OP to be input operand INDEX in GIMPLE_ASM GS. */
+/* Set IN_OP to be input operand INDEX in GIMPLE_ASM ASM_STMT. */
static inline void
-gimple_asm_set_input_op (gimple gs, unsigned index, tree in_op)
+gimple_asm_set_input_op (gasm *asm_stmt, unsigned index, tree in_op)
{
- gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->ni
&& TREE_CODE (in_op) == TREE_LIST);
- gimple_set_op (gs, index + asm_stmt->no, in_op);
+ gimple_set_op (asm_stmt, index + asm_stmt->no, in_op);
}
-/* Return output operand INDEX of GIMPLE_ASM GS. */
+/* Return output operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree
-gimple_asm_output_op (const_gimple gs, unsigned index)
+gimple_asm_output_op (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->no);
- return gimple_op (gs, index);
+ return gimple_op (asm_stmt, index);
}
-/* Return a pointer to output operand INDEX of GIMPLE_ASM GS. */
+/* Return a pointer to output operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree *
-gimple_asm_output_op_ptr (const_gimple gs, unsigned index)
+gimple_asm_output_op_ptr (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->no);
- return gimple_op_ptr (gs, index);
+ return gimple_op_ptr (asm_stmt, index);
}
-/* Set OUT_OP to be output operand INDEX in GIMPLE_ASM GS. */
+/* Set OUT_OP to be output operand INDEX in GIMPLE_ASM ASM_STMT. */
static inline void
-gimple_asm_set_output_op (gimple gs, unsigned index, tree out_op)
+gimple_asm_set_output_op (gasm *asm_stmt, unsigned index, tree out_op)
{
- gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->no
&& TREE_CODE (out_op) == TREE_LIST);
- gimple_set_op (gs, index, out_op);
+ gimple_set_op (asm_stmt, index, out_op);
}
-/* Return clobber operand INDEX of GIMPLE_ASM GS. */
+/* Return clobber operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree
-gimple_asm_clobber_op (const_gimple gs, unsigned index)
+gimple_asm_clobber_op (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->nc);
- return gimple_op (gs, index + asm_stmt->ni + asm_stmt->no);
+ return gimple_op (asm_stmt, index + asm_stmt->ni + asm_stmt->no);
}
-/* Set CLOBBER_OP to be clobber operand INDEX in GIMPLE_ASM GS. */
+/* Set CLOBBER_OP to be clobber operand INDEX in GIMPLE_ASM ASM_STMT. */
static inline void
-gimple_asm_set_clobber_op (gimple gs, unsigned index, tree clobber_op)
+gimple_asm_set_clobber_op (gasm *asm_stmt, unsigned index, tree clobber_op)
{
- gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->nc
&& TREE_CODE (clobber_op) == TREE_LIST);
- gimple_set_op (gs, index + asm_stmt->ni + asm_stmt->no, clobber_op);
+ gimple_set_op (asm_stmt, index + asm_stmt->ni + asm_stmt->no, clobber_op);
}
-/* Return label operand INDEX of GIMPLE_ASM GS. */
+/* Return label operand INDEX of GIMPLE_ASM ASM_STMT. */
static inline tree
-gimple_asm_label_op (const_gimple gs, unsigned index)
+gimple_asm_label_op (const gasm *asm_stmt, unsigned index)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->nl);
- return gimple_op (gs, index + asm_stmt->ni + asm_stmt->nc);
+ return gimple_op (asm_stmt, index + asm_stmt->ni + asm_stmt->nc);
}
-/* Set LABEL_OP to be label operand INDEX in GIMPLE_ASM GS. */
+/* Set LABEL_OP to be label operand INDEX in GIMPLE_ASM ASM_STMT. */
static inline void
-gimple_asm_set_label_op (gimple gs, unsigned index, tree label_op)
+gimple_asm_set_label_op (gasm *asm_stmt, unsigned index, tree label_op)
{
- gimple_statement_asm *asm_stmt = as_a <gimple_statement_asm *> (gs);
gcc_gimple_checking_assert (index < asm_stmt->nl
&& TREE_CODE (label_op) == TREE_LIST);
- gimple_set_op (gs, index + asm_stmt->ni + asm_stmt->nc, label_op);
+ gimple_set_op (asm_stmt, index + asm_stmt->ni + asm_stmt->nc, label_op);
}
/* Return the string representing the assembly instruction in
- GIMPLE_ASM GS. */
+ GIMPLE_ASM ASM_STMT. */
static inline const char *
-gimple_asm_string (const_gimple gs)
+gimple_asm_string (const gasm *asm_stmt)
{
- const gimple_statement_asm *asm_stmt =
- as_a <const gimple_statement_asm *> (gs);
return asm_stmt->string;
}
-/* Return true if GS is an asm statement marked volatile. */
+/* Return true ASM_STMT ASM_STMT is an asm statement marked volatile. */
static inline bool
-gimple_asm_volatile_p (const_gimple gs)
+gimple_asm_volatile_p (const gasm *asm_stmt)
{
- GIMPLE_CHECK (gs, GIMPLE_ASM);
- return (gs->subcode & GF_ASM_VOLATILE) != 0;
+ return (asm_stmt->subcode & GF_ASM_VOLATILE) != 0;
}
-/* If VOLATLE_P is true, mark asm statement GS as volatile. */
+/* If VOLATLE_P is true, mark asm statement ASM_STMT as volatile. */
static inline void
-gimple_asm_set_volatile (gimple gs, bool volatile_p)
+gimple_asm_set_volatile (gasm *asm_stmt, bool volatile_p)
{
- GIMPLE_CHECK (gs, GIMPLE_ASM);
if (volatile_p)
- gs->subcode |= GF_ASM_VOLATILE;
+ asm_stmt->subcode |= GF_ASM_VOLATILE;
else
- gs->subcode &= ~GF_ASM_VOLATILE;
+ asm_stmt->subcode &= ~GF_ASM_VOLATILE;
}
-/* If INPUT_P is true, mark asm GS as an ASM_INPUT. */
+/* If INPUT_P is true, mark asm ASM_STMT as an ASM_INPUT. */
static inline void
-gimple_asm_set_input (gimple gs, bool input_p)
+gimple_asm_set_input (gasm *asm_stmt, bool input_p)
{
- GIMPLE_CHECK (gs, GIMPLE_ASM);
if (input_p)
- gs->subcode |= GF_ASM_INPUT;
+ asm_stmt->subcode |= GF_ASM_INPUT;
else
- gs->subcode &= ~GF_ASM_INPUT;
+ asm_stmt->subcode &= ~GF_ASM_INPUT;
}
-/* Return true if asm GS is an ASM_INPUT. */
+/* Return true if asm ASM_STMT is an ASM_INPUT. */
static inline bool
-gimple_asm_input_p (const_gimple gs)
+gimple_asm_input_p (const gasm *asm_stmt)
{
- GIMPLE_CHECK (gs, GIMPLE_ASM);
- return (gs->subcode & GF_ASM_INPUT) != 0;
+ return (asm_stmt->subcode & GF_ASM_INPUT) != 0;
}
-/* Return the types handled by GIMPLE_CATCH statement GS. */
+/* Return the types handled by GIMPLE_CATCH statement CATCH_STMT. */
static inline tree
-gimple_catch_types (const_gimple gs)
+gimple_catch_types (const gcatch *catch_stmt)
{
- const gimple_statement_catch *catch_stmt =
- as_a <const gimple_statement_catch *> (gs);
return catch_stmt->types;
}
-/* Return a pointer to the types handled by GIMPLE_CATCH statement GS. */
+/* Return a pointer to the types handled by GIMPLE_CATCH statement CATCH_STMT. */
static inline tree *
-gimple_catch_types_ptr (gimple gs)
+gimple_catch_types_ptr (gcatch *catch_stmt)
{
- gimple_statement_catch *catch_stmt = as_a <gimple_statement_catch *> (gs);
return &catch_stmt->types;
}
/* Return a pointer to the GIMPLE sequence representing the body of
- the handler of GIMPLE_CATCH statement GS. */
+ the handler of GIMPLE_CATCH statement CATCH_STMT. */
static inline gimple_seq *
-gimple_catch_handler_ptr (gimple gs)
+gimple_catch_handler_ptr (gcatch *catch_stmt)
{
- gimple_statement_catch *catch_stmt = as_a <gimple_statement_catch *> (gs);
return &catch_stmt->handler;
}
/* Return the GIMPLE sequence representing the body of the handler of
- GIMPLE_CATCH statement GS. */
+ GIMPLE_CATCH statement CATCH_STMT. */
static inline gimple_seq
-gimple_catch_handler (gimple gs)
+gimple_catch_handler (gcatch *catch_stmt)
{
- return *gimple_catch_handler_ptr (gs);
+ return *gimple_catch_handler_ptr (catch_stmt);
}
-/* Set T to be the set of types handled by GIMPLE_CATCH GS. */
+/* Set T to be the set of types handled by GIMPLE_CATCH CATCH_STMT. */
static inline void
-gimple_catch_set_types (gimple gs, tree t)
+gimple_catch_set_types (gcatch *catch_stmt, tree t)
{
- gimple_statement_catch *catch_stmt = as_a <gimple_statement_catch *> (gs);
catch_stmt->types = t;
}
-/* Set HANDLER to be the body of GIMPLE_CATCH GS. */
+/* Set HANDLER to be the body of GIMPLE_CATCH CATCH_STMT. */
static inline void
-gimple_catch_set_handler (gimple gs, gimple_seq handler)
+gimple_catch_set_handler (gcatch *catch_stmt, gimple_seq handler)
{
- gimple_statement_catch *catch_stmt = as_a <gimple_statement_catch *> (gs);
catch_stmt->handler = handler;
}
static inline tree
gimple_eh_filter_types (const_gimple gs)
{
- const gimple_statement_eh_filter *eh_filter_stmt =
- as_a <const gimple_statement_eh_filter *> (gs);
+ const geh_filter *eh_filter_stmt = as_a <const geh_filter *> (gs);
return eh_filter_stmt->types;
}
static inline tree *
gimple_eh_filter_types_ptr (gimple gs)
{
- gimple_statement_eh_filter *eh_filter_stmt =
- as_a <gimple_statement_eh_filter *> (gs);
+ geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->types;
}
static inline gimple_seq *
gimple_eh_filter_failure_ptr (gimple gs)
{
- gimple_statement_eh_filter *eh_filter_stmt =
- as_a <gimple_statement_eh_filter *> (gs);
+ geh_filter *eh_filter_stmt = as_a <geh_filter *> (gs);
return &eh_filter_stmt->failure;
}
}
-/* Set TYPES to be the set of types handled by GIMPLE_EH_FILTER GS. */
+/* Set TYPES to be the set of types handled by GIMPLE_EH_FILTER
+ EH_FILTER_STMT. */
static inline void
-gimple_eh_filter_set_types (gimple gs, tree types)
+gimple_eh_filter_set_types (geh_filter *eh_filter_stmt, tree types)
{
- gimple_statement_eh_filter *eh_filter_stmt =
- as_a <gimple_statement_eh_filter *> (gs);
eh_filter_stmt->types = types;
}
/* Set FAILURE to be the sequence of statements to execute on failure
- for GIMPLE_EH_FILTER GS. */
+ for GIMPLE_EH_FILTER EH_FILTER_STMT. */
static inline void
-gimple_eh_filter_set_failure (gimple gs, gimple_seq failure)
+gimple_eh_filter_set_failure (geh_filter *eh_filter_stmt,
+ gimple_seq failure)
{
- gimple_statement_eh_filter *eh_filter_stmt =
- as_a <gimple_statement_eh_filter *> (gs);
eh_filter_stmt->failure = failure;
}
/* Get the function decl to be called by the MUST_NOT_THROW region. */
static inline tree
-gimple_eh_must_not_throw_fndecl (gimple gs)
+gimple_eh_must_not_throw_fndecl (geh_mnt *eh_mnt_stmt)
{
- gimple_statement_eh_mnt *eh_mnt_stmt = as_a <gimple_statement_eh_mnt *> (gs);
return eh_mnt_stmt->fndecl;
}
/* Set the function decl to be called by GS to DECL. */
static inline void
-gimple_eh_must_not_throw_set_fndecl (gimple gs, tree decl)
+gimple_eh_must_not_throw_set_fndecl (geh_mnt *eh_mnt_stmt,
+ tree decl)
{
- gimple_statement_eh_mnt *eh_mnt_stmt = as_a <gimple_statement_eh_mnt *> (gs);
eh_mnt_stmt->fndecl = decl;
}
/* GIMPLE_EH_ELSE accessors. */
static inline gimple_seq *
-gimple_eh_else_n_body_ptr (gimple gs)
+gimple_eh_else_n_body_ptr (geh_else *eh_else_stmt)
{
- gimple_statement_eh_else *eh_else_stmt =
- as_a <gimple_statement_eh_else *> (gs);
return &eh_else_stmt->n_body;
}
static inline gimple_seq
-gimple_eh_else_n_body (gimple gs)
+gimple_eh_else_n_body (geh_else *eh_else_stmt)
{
- return *gimple_eh_else_n_body_ptr (gs);
+ return *gimple_eh_else_n_body_ptr (eh_else_stmt);
}
static inline gimple_seq *
-gimple_eh_else_e_body_ptr (gimple gs)
+gimple_eh_else_e_body_ptr (geh_else *eh_else_stmt)
{
- gimple_statement_eh_else *eh_else_stmt =
- as_a <gimple_statement_eh_else *> (gs);
return &eh_else_stmt->e_body;
}
static inline gimple_seq
-gimple_eh_else_e_body (gimple gs)
+gimple_eh_else_e_body (geh_else *eh_else_stmt)
{
- return *gimple_eh_else_e_body_ptr (gs);
+ return *gimple_eh_else_e_body_ptr (eh_else_stmt);
}
static inline void
-gimple_eh_else_set_n_body (gimple gs, gimple_seq seq)
+gimple_eh_else_set_n_body (geh_else *eh_else_stmt, gimple_seq seq)
{
- gimple_statement_eh_else *eh_else_stmt =
- as_a <gimple_statement_eh_else *> (gs);
eh_else_stmt->n_body = seq;
}
static inline void
-gimple_eh_else_set_e_body (gimple gs, gimple_seq seq)
+gimple_eh_else_set_e_body (geh_else *eh_else_stmt, gimple_seq seq)
{
- gimple_statement_eh_else *eh_else_stmt =
- as_a <gimple_statement_eh_else *> (gs);
eh_else_stmt->e_body = seq;
}
/* Set the kind of try block represented by GIMPLE_TRY GS. */
static inline void
-gimple_try_set_kind (gimple gs, enum gimple_try_flags kind)
+gimple_try_set_kind (gtry *gs, enum gimple_try_flags kind)
{
- GIMPLE_CHECK (gs, GIMPLE_TRY);
gcc_gimple_checking_assert (kind == GIMPLE_TRY_CATCH
|| kind == GIMPLE_TRY_FINALLY);
if (gimple_try_kind (gs) != kind)
static inline gimple_seq *
gimple_try_eval_ptr (gimple gs)
{
- gimple_statement_try *try_stmt = as_a <gimple_statement_try *> (gs);
+ gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->eval;
}
static inline gimple_seq *
gimple_try_cleanup_ptr (gimple gs)
{
- gimple_statement_try *try_stmt = as_a <gimple_statement_try *> (gs);
+ gtry *try_stmt = as_a <gtry *> (gs);
return &try_stmt->cleanup;
}
/* Set the GIMPLE_TRY_CATCH_IS_CLEANUP flag. */
static inline void
-gimple_try_set_catch_is_cleanup (gimple g, bool catch_is_cleanup)
+gimple_try_set_catch_is_cleanup (gtry *g, bool catch_is_cleanup)
{
gcc_gimple_checking_assert (gimple_try_kind (g) == GIMPLE_TRY_CATCH);
if (catch_is_cleanup)
/* Set EVAL to be the sequence of statements to use as the body for
- GIMPLE_TRY GS. */
+ GIMPLE_TRY TRY_STMT. */
static inline void
-gimple_try_set_eval (gimple gs, gimple_seq eval)
+gimple_try_set_eval (gtry *try_stmt, gimple_seq eval)
{
- gimple_statement_try *try_stmt = as_a <gimple_statement_try *> (gs);
try_stmt->eval = eval;
}
/* Set CLEANUP to be the sequence of statements to use as the cleanup
- body for GIMPLE_TRY GS. */
+ body for GIMPLE_TRY TRY_STMT. */
static inline void
-gimple_try_set_cleanup (gimple gs, gimple_seq cleanup)
+gimple_try_set_cleanup (gtry *try_stmt, gimple_seq cleanup)
{
- gimple_statement_try *try_stmt = as_a <gimple_statement_try *> (gs);
try_stmt->cleanup = cleanup;
}
static inline unsigned
gimple_phi_capacity (const_gimple gs)
{
- const gimple_statement_phi *phi_stmt =
- as_a <const gimple_statement_phi *> (gs);
+ const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->capacity;
}
static inline unsigned
gimple_phi_num_args (const_gimple gs)
{
- const gimple_statement_phi *phi_stmt =
- as_a <const gimple_statement_phi *> (gs);
+ const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->nargs;
}
static inline tree
gimple_phi_result (const_gimple gs)
{
- const gimple_statement_phi *phi_stmt =
- as_a <const gimple_statement_phi *> (gs);
+ const gphi *phi_stmt = as_a <const gphi *> (gs);
return phi_stmt->result;
}
static inline tree *
gimple_phi_result_ptr (gimple gs)
{
- gimple_statement_phi *phi_stmt = as_a <gimple_statement_phi *> (gs);
+ gphi *phi_stmt = as_a <gphi *> (gs);
return &phi_stmt->result;
}
-/* Set RESULT to be the SSA name created by GIMPLE_PHI GS. */
+/* Set RESULT to be the SSA name created by GIMPLE_PHI PHI. */
static inline void
-gimple_phi_set_result (gimple gs, tree result)
+gimple_phi_set_result (gphi *phi, tree result)
{
- gimple_statement_phi *phi_stmt = as_a <gimple_statement_phi *> (gs);
- phi_stmt->result = result;
+ phi->result = result;
if (result && TREE_CODE (result) == SSA_NAME)
- SSA_NAME_DEF_STMT (result) = gs;
+ SSA_NAME_DEF_STMT (result) = phi;
}
static inline struct phi_arg_d *
gimple_phi_arg (gimple gs, unsigned index)
{
- gimple_statement_phi *phi_stmt = as_a <gimple_statement_phi *> (gs);
+ gphi *phi_stmt = as_a <gphi *> (gs);
gcc_gimple_checking_assert (index <= phi_stmt->capacity);
return &(phi_stmt->args[index]);
}
/* Set PHIARG to be the argument corresponding to incoming edge INDEX
- for GIMPLE_PHI GS. */
+ for GIMPLE_PHI PHI. */
static inline void
-gimple_phi_set_arg (gimple gs, unsigned index, struct phi_arg_d * phiarg)
+gimple_phi_set_arg (gphi *phi, unsigned index, struct phi_arg_d * phiarg)
{
- gimple_statement_phi *phi_stmt = as_a <gimple_statement_phi *> (gs);
- gcc_gimple_checking_assert (index <= phi_stmt->nargs);
- phi_stmt->args[index] = *phiarg;
+ gcc_gimple_checking_assert (index <= phi->nargs);
+ phi->args[index] = *phiarg;
}
/* Return the PHI nodes for basic block BB, or NULL if there are no
}
-/* Return a pointer to the tree operand for argument I of PHI node GS. */
+/* Return a pointer to the tree operand for argument I of phi node PHI. */
static inline tree *
-gimple_phi_arg_def_ptr (gimple gs, size_t index)
+gimple_phi_arg_def_ptr (gphi *phi, size_t index)
{
- return &gimple_phi_arg (gs, index)->def;
+ return &gimple_phi_arg (phi, index)->def;
}
-/* Return the edge associated with argument I of phi node GS. */
+/* Return the edge associated with argument I of phi node PHI. */
static inline edge
-gimple_phi_arg_edge (gimple gs, size_t i)
+gimple_phi_arg_edge (gphi *phi, size_t i)
{
- return EDGE_PRED (gimple_bb (gs), i);
+ return EDGE_PRED (gimple_bb (phi), i);
}
-/* Return the source location of gimple argument I of phi node GS. */
+/* Return the source location of gimple argument I of phi node PHI. */
static inline source_location
-gimple_phi_arg_location (gimple gs, size_t i)
+gimple_phi_arg_location (gphi *phi, size_t i)
{
- return gimple_phi_arg (gs, i)->locus;
+ return gimple_phi_arg (phi, i)->locus;
}
-/* Return the source location of the argument on edge E of phi node GS. */
+/* Return the source location of the argument on edge E of phi node PHI. */
static inline source_location
-gimple_phi_arg_location_from_edge (gimple gs, edge e)
+gimple_phi_arg_location_from_edge (gphi *phi, edge e)
{
- return gimple_phi_arg (gs, e->dest_idx)->locus;
+ return gimple_phi_arg (phi, e->dest_idx)->locus;
}
-/* Set the source location of gimple argument I of phi node GS to LOC. */
+/* Set the source location of gimple argument I of phi node PHI to LOC. */
static inline void
-gimple_phi_arg_set_location (gimple gs, size_t i, source_location loc)
+gimple_phi_arg_set_location (gphi *phi, size_t i, source_location loc)
{
- gimple_phi_arg (gs, i)->locus = loc;
+ gimple_phi_arg (phi, i)->locus = loc;
}
-/* Return TRUE if argument I of phi node GS has a location record. */
+/* Return TRUE if argument I of phi node PHI has a location record. */
static inline bool
-gimple_phi_arg_has_location (gimple gs, size_t i)
+gimple_phi_arg_has_location (gphi *phi, size_t i)
{
- return gimple_phi_arg_location (gs, i) != UNKNOWN_LOCATION;
+ return gimple_phi_arg_location (phi, i) != UNKNOWN_LOCATION;
}
-/* Return the region number for GIMPLE_RESX GS. */
+/* Return the region number for GIMPLE_RESX RESX_STMT. */
static inline int
-gimple_resx_region (const_gimple gs)
+gimple_resx_region (const gresx *resx_stmt)
{
- const gimple_statement_resx *resx_stmt =
- as_a <const gimple_statement_resx *> (gs);
return resx_stmt->region;
}
-/* Set REGION to be the region number for GIMPLE_RESX GS. */
+/* Set REGION to be the region number for GIMPLE_RESX RESX_STMT. */
static inline void
-gimple_resx_set_region (gimple gs, int region)
+gimple_resx_set_region (gresx *resx_stmt, int region)
{
- gimple_statement_resx *resx_stmt = as_a <gimple_statement_resx *> (gs);
resx_stmt->region = region;
}
-/* Return the region number for GIMPLE_EH_DISPATCH GS. */
+/* Return the region number for GIMPLE_EH_DISPATCH EH_DISPATCH_STMT. */
static inline int
-gimple_eh_dispatch_region (const_gimple gs)
+gimple_eh_dispatch_region (const geh_dispatch *eh_dispatch_stmt)
{
- const gimple_statement_eh_dispatch *eh_dispatch_stmt =
- as_a <const gimple_statement_eh_dispatch *> (gs);
return eh_dispatch_stmt->region;
}
-/* Set REGION to be the region number for GIMPLE_EH_DISPATCH GS. */
+/* Set REGION to be the region number for GIMPLE_EH_DISPATCH
+ EH_DISPATCH_STMT. */
static inline void
-gimple_eh_dispatch_set_region (gimple gs, int region)
+gimple_eh_dispatch_set_region (geh_dispatch *eh_dispatch_stmt, int region)
{
- gimple_statement_eh_dispatch *eh_dispatch_stmt =
- as_a <gimple_statement_eh_dispatch *> (gs);
eh_dispatch_stmt->region = region;
}
/* Return the number of labels associated with the switch statement GS. */
static inline unsigned
-gimple_switch_num_labels (const_gimple gs)
+gimple_switch_num_labels (const gswitch *gs)
{
unsigned num_ops;
GIMPLE_CHECK (gs, GIMPLE_SWITCH);
/* Set NLABELS to be the number of labels for the switch statement GS. */
static inline void
-gimple_switch_set_num_labels (gimple g, unsigned nlabels)
+gimple_switch_set_num_labels (gswitch *g, unsigned nlabels)
{
GIMPLE_CHECK (g, GIMPLE_SWITCH);
gimple_set_num_ops (g, nlabels + 1);
/* Return the index variable used by the switch statement GS. */
static inline tree
-gimple_switch_index (const_gimple gs)
+gimple_switch_index (const gswitch *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_SWITCH);
return gimple_op (gs, 0);
}
/* Return a pointer to the index variable for the switch statement GS. */
static inline tree *
-gimple_switch_index_ptr (const_gimple gs)
+gimple_switch_index_ptr (const gswitch *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_SWITCH);
return gimple_op_ptr (gs, 0);
}
/* Set INDEX to be the index variable for switch statement GS. */
static inline void
-gimple_switch_set_index (gimple gs, tree index)
+gimple_switch_set_index (gswitch *gs, tree index)
{
GIMPLE_CHECK (gs, GIMPLE_SWITCH);
gcc_gimple_checking_assert (SSA_VAR_P (index) || CONSTANT_CLASS_P (index));
labels in a switch statement. */
static inline tree
-gimple_switch_label (const_gimple gs, unsigned index)
+gimple_switch_label (const gswitch *gs, unsigned index)
{
GIMPLE_CHECK (gs, GIMPLE_SWITCH);
gcc_gimple_checking_assert (gimple_num_ops (gs) > index + 1);
/* Set the label number INDEX to LABEL. 0 is always the default label. */
static inline void
-gimple_switch_set_label (gimple gs, unsigned index, tree label)
+gimple_switch_set_label (gswitch *gs, unsigned index, tree label)
{
GIMPLE_CHECK (gs, GIMPLE_SWITCH);
gcc_gimple_checking_assert (gimple_num_ops (gs) > index + 1
/* Return the default label for a switch statement. */
static inline tree
-gimple_switch_default_label (const_gimple gs)
+gimple_switch_default_label (const gswitch *gs)
{
tree label = gimple_switch_label (gs, 0);
gcc_checking_assert (!CASE_LOW (label) && !CASE_HIGH (label));
/* Set the default label for a switch statement. */
static inline void
-gimple_switch_set_default_label (gimple gs, tree label)
+gimple_switch_set_default_label (gswitch *gs, tree label)
{
gcc_checking_assert (!CASE_LOW (label) && !CASE_HIGH (label));
gimple_switch_set_label (gs, 0, label);
}
-/* Return the name associated with OMP_CRITICAL statement GS. */
+/* Return the name associated with OMP_CRITICAL statement CRIT_STMT. */
static inline tree
-gimple_omp_critical_name (const_gimple gs)
+gimple_omp_critical_name (const gomp_critical *crit_stmt)
{
- const gimple_statement_omp_critical *omp_critical_stmt =
- as_a <const gimple_statement_omp_critical *> (gs);
- return omp_critical_stmt->name;
+ return crit_stmt->name;
}
/* Return a pointer to the name associated with OMP critical statement GS. */
static inline tree *
-gimple_omp_critical_name_ptr (gimple gs)
+gimple_omp_critical_name_ptr (gomp_critical *crit_stmt)
{
- gimple_statement_omp_critical *omp_critical_stmt =
- as_a <gimple_statement_omp_critical *> (gs);
- return &omp_critical_stmt->name;
+ return &crit_stmt->name;
}
/* Set NAME to be the name associated with OMP critical statement GS. */
static inline void
-gimple_omp_critical_set_name (gimple gs, tree name)
+gimple_omp_critical_set_name (gomp_critical *crit_stmt, tree name)
{
- gimple_statement_omp_critical *omp_critical_stmt =
- as_a <gimple_statement_omp_critical *> (gs);
- omp_critical_stmt->name = name;
+ crit_stmt->name = name;
}
/* Set the OMP for kind. */
static inline void
-gimple_omp_for_set_kind (gimple g, int kind)
+gimple_omp_for_set_kind (gomp_for *g, int kind)
{
- GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
g->subcode = (g->subcode & ~GF_OMP_FOR_KIND_MASK)
| (kind & GF_OMP_FOR_KIND_MASK);
}
value of COMBINED_P. */
static inline void
-gimple_omp_for_set_combined_p (gimple g, bool combined_p)
+gimple_omp_for_set_combined_p (gomp_for *g, bool combined_p)
{
- GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
if (combined_p)
g->subcode |= GF_OMP_FOR_COMBINED;
else
value of COMBINED_P. */
static inline void
-gimple_omp_for_set_combined_into_p (gimple g, bool combined_p)
+gimple_omp_for_set_combined_into_p (gomp_for *g, bool combined_p)
{
- GIMPLE_CHECK (g, GIMPLE_OMP_FOR);
if (combined_p)
g->subcode |= GF_OMP_FOR_COMBINED_INTO;
else
static inline tree
gimple_omp_for_clauses (const_gimple gs)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
return omp_for_stmt->clauses;
}
static inline tree *
gimple_omp_for_clauses_ptr (gimple gs)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->clauses;
}
static inline void
gimple_omp_for_set_clauses (gimple gs, tree clauses)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->clauses = clauses;
}
static inline size_t
gimple_omp_for_collapse (gimple gs)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return omp_for_stmt->collapse;
}
static inline tree
gimple_omp_for_index (const_gimple gs, size_t i)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return omp_for_stmt->iter[i].index;
}
static inline tree *
gimple_omp_for_index_ptr (gimple gs, size_t i)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return &omp_for_stmt->iter[i].index;
}
static inline void
gimple_omp_for_set_index (gimple gs, size_t i, tree index)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
omp_for_stmt->iter[i].index = index;
}
static inline tree
gimple_omp_for_initial (const_gimple gs, size_t i)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return omp_for_stmt->iter[i].initial;
}
static inline tree *
gimple_omp_for_initial_ptr (gimple gs, size_t i)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return &omp_for_stmt->iter[i].initial;
}
static inline void
gimple_omp_for_set_initial (gimple gs, size_t i, tree initial)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
omp_for_stmt->iter[i].initial = initial;
}
static inline tree
gimple_omp_for_final (const_gimple gs, size_t i)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return omp_for_stmt->iter[i].final;
}
static inline tree *
gimple_omp_for_final_ptr (gimple gs, size_t i)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return &omp_for_stmt->iter[i].final;
}
static inline void
gimple_omp_for_set_final (gimple gs, size_t i, tree final)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
omp_for_stmt->iter[i].final = final;
}
static inline tree
gimple_omp_for_incr (const_gimple gs, size_t i)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return omp_for_stmt->iter[i].incr;
}
static inline tree *
gimple_omp_for_incr_ptr (gimple gs, size_t i)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return &omp_for_stmt->iter[i].incr;
}
static inline void
gimple_omp_for_set_incr (gimple gs, size_t i, tree incr)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
omp_for_stmt->iter[i].incr = incr;
}
static inline gimple_seq *
gimple_omp_for_pre_body_ptr (gimple gs)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
return &omp_for_stmt->pre_body;
}
static inline void
gimple_omp_for_set_pre_body (gimple gs, gimple_seq pre_body)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
omp_for_stmt->pre_body = pre_body;
}
static inline tree
gimple_omp_parallel_clauses (const_gimple gs)
{
- const gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <const gimple_statement_omp_parallel *> (gs);
+ const gomp_parallel *omp_parallel_stmt = as_a <const gomp_parallel *> (gs);
return omp_parallel_stmt->clauses;
}
-/* Return a pointer to the clauses associated with OMP_PARALLEL GS. */
+/* Return a pointer to the clauses associated with OMP_PARALLEL_STMT. */
static inline tree *
-gimple_omp_parallel_clauses_ptr (gimple gs)
+gimple_omp_parallel_clauses_ptr (gomp_parallel *omp_parallel_stmt)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
return &omp_parallel_stmt->clauses;
}
-/* Set CLAUSES to be the list of clauses associated with OMP_PARALLEL
- GS. */
+/* Set CLAUSES to be the list of clauses associated with OMP_PARALLEL_STMT. */
static inline void
-gimple_omp_parallel_set_clauses (gimple gs, tree clauses)
+gimple_omp_parallel_set_clauses (gomp_parallel *omp_parallel_stmt,
+ tree clauses)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
omp_parallel_stmt->clauses = clauses;
}
-/* Return the child function used to hold the body of OMP_PARALLEL GS. */
+/* Return the child function used to hold the body of OMP_PARALLEL_STMT. */
static inline tree
-gimple_omp_parallel_child_fn (const_gimple gs)
+gimple_omp_parallel_child_fn (const gomp_parallel *omp_parallel_stmt)
{
- const gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <const gimple_statement_omp_parallel *> (gs);
return omp_parallel_stmt->child_fn;
}
/* Return a pointer to the child function used to hold the body of
- OMP_PARALLEL GS. */
+ OMP_PARALLEL_STMT. */
static inline tree *
-gimple_omp_parallel_child_fn_ptr (gimple gs)
+gimple_omp_parallel_child_fn_ptr (gomp_parallel *omp_parallel_stmt)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
return &omp_parallel_stmt->child_fn;
}
-/* Set CHILD_FN to be the child function for OMP_PARALLEL GS. */
+/* Set CHILD_FN to be the child function for OMP_PARALLEL_STMT. */
static inline void
-gimple_omp_parallel_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_parallel_set_child_fn (gomp_parallel *omp_parallel_stmt,
+ tree child_fn)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
omp_parallel_stmt->child_fn = child_fn;
}
/* Return the artificial argument used to send variables and values
- from the parent to the children threads in OMP_PARALLEL GS. */
+ from the parent to the children threads in OMP_PARALLEL_STMT. */
static inline tree
-gimple_omp_parallel_data_arg (const_gimple gs)
+gimple_omp_parallel_data_arg (const gomp_parallel *omp_parallel_stmt)
{
- const gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <const gimple_statement_omp_parallel *> (gs);
return omp_parallel_stmt->data_arg;
}
-/* Return a pointer to the data argument for OMP_PARALLEL GS. */
+/* Return a pointer to the data argument for OMP_PARALLEL_STMT. */
static inline tree *
-gimple_omp_parallel_data_arg_ptr (gimple gs)
+gimple_omp_parallel_data_arg_ptr (gomp_parallel *omp_parallel_stmt)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
return &omp_parallel_stmt->data_arg;
}
-/* Set DATA_ARG to be the data argument for OMP_PARALLEL GS. */
+/* Set DATA_ARG to be the data argument for OMP_PARALLEL_STMT. */
static inline void
-gimple_omp_parallel_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_parallel_set_data_arg (gomp_parallel *omp_parallel_stmt,
+ tree data_arg)
{
- gimple_statement_omp_parallel *omp_parallel_stmt =
- as_a <gimple_statement_omp_parallel *> (gs);
omp_parallel_stmt->data_arg = data_arg;
}
static inline tree
gimple_omp_task_clauses (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->clauses;
}
static inline tree *
gimple_omp_task_clauses_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->clauses;
}
static inline void
gimple_omp_task_set_clauses (gimple gs, tree clauses)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->clauses = clauses;
}
static inline tree
gimple_omp_task_child_fn (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->child_fn;
}
static inline tree *
gimple_omp_task_child_fn_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->child_fn;
}
static inline void
gimple_omp_task_set_child_fn (gimple gs, tree child_fn)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->child_fn = child_fn;
}
static inline tree
gimple_omp_task_data_arg (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->data_arg;
}
static inline tree *
gimple_omp_task_data_arg_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->data_arg;
}
static inline void
gimple_omp_task_set_data_arg (gimple gs, tree data_arg)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->data_arg = data_arg;
}
static inline tree
gimple_omp_taskreg_clauses (const_gimple gs)
{
- const gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <const gimple_statement_omp_taskreg *> (gs);
+ const gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <const gimple_statement_omp_taskreg *> (gs);
return omp_taskreg_stmt->clauses;
}
static inline tree *
gimple_omp_taskreg_clauses_ptr (gimple gs)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
return &omp_taskreg_stmt->clauses;
}
static inline void
gimple_omp_taskreg_set_clauses (gimple gs, tree clauses)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
omp_taskreg_stmt->clauses = clauses;
}
static inline tree
gimple_omp_taskreg_child_fn (const_gimple gs)
{
- const gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <const gimple_statement_omp_taskreg *> (gs);
+ const gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <const gimple_statement_omp_taskreg *> (gs);
return omp_taskreg_stmt->child_fn;
}
static inline tree *
gimple_omp_taskreg_child_fn_ptr (gimple gs)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
return &omp_taskreg_stmt->child_fn;
}
static inline void
gimple_omp_taskreg_set_child_fn (gimple gs, tree child_fn)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
omp_taskreg_stmt->child_fn = child_fn;
}
static inline tree
gimple_omp_taskreg_data_arg (const_gimple gs)
{
- const gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <const gimple_statement_omp_taskreg *> (gs);
+ const gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <const gimple_statement_omp_taskreg *> (gs);
return omp_taskreg_stmt->data_arg;
}
static inline tree *
gimple_omp_taskreg_data_arg_ptr (gimple gs)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
return &omp_taskreg_stmt->data_arg;
}
static inline void
gimple_omp_taskreg_set_data_arg (gimple gs, tree data_arg)
{
- gimple_statement_omp_taskreg *omp_taskreg_stmt =
- as_a <gimple_statement_omp_taskreg *> (gs);
+ gimple_statement_omp_taskreg *omp_taskreg_stmt
+ = as_a <gimple_statement_omp_taskreg *> (gs);
omp_taskreg_stmt->data_arg = data_arg;
}
static inline tree
gimple_omp_task_copy_fn (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->copy_fn;
}
static inline tree *
gimple_omp_task_copy_fn_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->copy_fn;
}
static inline void
gimple_omp_task_set_copy_fn (gimple gs, tree copy_fn)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->copy_fn = copy_fn;
}
static inline tree
gimple_omp_task_arg_size (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_size;
}
static inline tree *
gimple_omp_task_arg_size_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_size;
}
static inline void
gimple_omp_task_set_arg_size (gimple gs, tree arg_size)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_size = arg_size;
}
static inline tree
gimple_omp_task_arg_align (const_gimple gs)
{
- const gimple_statement_omp_task *omp_task_stmt =
- as_a <const gimple_statement_omp_task *> (gs);
+ const gomp_task *omp_task_stmt = as_a <const gomp_task *> (gs);
return omp_task_stmt->arg_align;
}
static inline tree *
gimple_omp_task_arg_align_ptr (gimple gs)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
return &omp_task_stmt->arg_align;
}
static inline void
gimple_omp_task_set_arg_align (gimple gs, tree arg_align)
{
- gimple_statement_omp_task *omp_task_stmt =
- as_a <gimple_statement_omp_task *> (gs);
+ gomp_task *omp_task_stmt = as_a <gomp_task *> (gs);
omp_task_stmt->arg_align = arg_align;
}
static inline tree
gimple_omp_single_clauses (const_gimple gs)
{
- const gimple_statement_omp_single *omp_single_stmt =
- as_a <const gimple_statement_omp_single *> (gs);
+ const gomp_single *omp_single_stmt = as_a <const gomp_single *> (gs);
return omp_single_stmt->clauses;
}
static inline tree *
gimple_omp_single_clauses_ptr (gimple gs)
{
- gimple_statement_omp_single *omp_single_stmt =
- as_a <gimple_statement_omp_single *> (gs);
+ gomp_single *omp_single_stmt = as_a <gomp_single *> (gs);
return &omp_single_stmt->clauses;
}
-/* Set CLAUSES to be the clauses associated with OMP_SINGLE GS. */
+/* Set CLAUSES to be the clauses associated with OMP_SINGLE_STMT. */
static inline void
-gimple_omp_single_set_clauses (gimple gs, tree clauses)
+gimple_omp_single_set_clauses (gomp_single *omp_single_stmt, tree clauses)
{
- gimple_statement_omp_single *omp_single_stmt =
- as_a <gimple_statement_omp_single *> (gs);
omp_single_stmt->clauses = clauses;
}
static inline tree
gimple_omp_target_clauses (const_gimple gs)
{
- const gimple_statement_omp_target *omp_target_stmt =
- as_a <const gimple_statement_omp_target *> (gs);
+ const gomp_target *omp_target_stmt = as_a <const gomp_target *> (gs);
return omp_target_stmt->clauses;
}
static inline tree *
gimple_omp_target_clauses_ptr (gimple gs)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
+ gomp_target *omp_target_stmt = as_a <gomp_target *> (gs);
return &omp_target_stmt->clauses;
}
-/* Set CLAUSES to be the clauses associated with OMP_TARGET GS. */
+/* Set CLAUSES to be the clauses associated with OMP_TARGET_STMT. */
static inline void
-gimple_omp_target_set_clauses (gimple gs, tree clauses)
+gimple_omp_target_set_clauses (gomp_target *omp_target_stmt,
+ tree clauses)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
omp_target_stmt->clauses = clauses;
}
/* Set the OMP target kind. */
static inline void
-gimple_omp_target_set_kind (gimple g, int kind)
+gimple_omp_target_set_kind (gomp_target *g, int kind)
{
- GIMPLE_CHECK (g, GIMPLE_OMP_TARGET);
g->subcode = (g->subcode & ~GF_OMP_TARGET_KIND_MASK)
| (kind & GF_OMP_TARGET_KIND_MASK);
}
-/* Return the child function used to hold the body of OMP_TARGET GS. */
+/* Return the child function used to hold the body of OMP_TARGET_STMT. */
static inline tree
-gimple_omp_target_child_fn (const_gimple gs)
+gimple_omp_target_child_fn (const gomp_target *omp_target_stmt)
{
- const gimple_statement_omp_target *omp_target_stmt =
- as_a <const gimple_statement_omp_target *> (gs);
return omp_target_stmt->child_fn;
}
/* Return a pointer to the child function used to hold the body of
- OMP_TARGET GS. */
+ OMP_TARGET_STMT. */
static inline tree *
-gimple_omp_target_child_fn_ptr (gimple gs)
+gimple_omp_target_child_fn_ptr (gomp_target *omp_target_stmt)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
return &omp_target_stmt->child_fn;
}
-/* Set CHILD_FN to be the child function for OMP_TARGET GS. */
+/* Set CHILD_FN to be the child function for OMP_TARGET_STMT. */
static inline void
-gimple_omp_target_set_child_fn (gimple gs, tree child_fn)
+gimple_omp_target_set_child_fn (gomp_target *omp_target_stmt,
+ tree child_fn)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
omp_target_stmt->child_fn = child_fn;
}
/* Return the artificial argument used to send variables and values
- from the parent to the children threads in OMP_TARGET GS. */
+ from the parent to the children threads in OMP_TARGET_STMT. */
static inline tree
-gimple_omp_target_data_arg (const_gimple gs)
+gimple_omp_target_data_arg (const gomp_target *omp_target_stmt)
{
- const gimple_statement_omp_target *omp_target_stmt =
- as_a <const gimple_statement_omp_target *> (gs);
return omp_target_stmt->data_arg;
}
/* Return a pointer to the data argument for OMP_TARGET GS. */
static inline tree *
-gimple_omp_target_data_arg_ptr (gimple gs)
+gimple_omp_target_data_arg_ptr (gomp_target *omp_target_stmt)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
return &omp_target_stmt->data_arg;
}
-/* Set DATA_ARG to be the data argument for OMP_TARGET GS. */
+/* Set DATA_ARG to be the data argument for OMP_TARGET_STMT. */
static inline void
-gimple_omp_target_set_data_arg (gimple gs, tree data_arg)
+gimple_omp_target_set_data_arg (gomp_target *omp_target_stmt,
+ tree data_arg)
{
- gimple_statement_omp_target *omp_target_stmt =
- as_a <gimple_statement_omp_target *> (gs);
omp_target_stmt->data_arg = data_arg;
}
static inline tree
gimple_omp_teams_clauses (const_gimple gs)
{
- const gimple_statement_omp_teams *omp_teams_stmt =
- as_a <const gimple_statement_omp_teams *> (gs);
+ const gomp_teams *omp_teams_stmt = as_a <const gomp_teams *> (gs);
return omp_teams_stmt->clauses;
}
static inline tree *
gimple_omp_teams_clauses_ptr (gimple gs)
{
- gimple_statement_omp_teams *omp_teams_stmt =
- as_a <gimple_statement_omp_teams *> (gs);
+ gomp_teams *omp_teams_stmt = as_a <gomp_teams *> (gs);
return &omp_teams_stmt->clauses;
}
-/* Set CLAUSES to be the clauses associated with OMP_TEAMS GS. */
+/* Set CLAUSES to be the clauses associated with OMP_TEAMS_STMT. */
static inline void
-gimple_omp_teams_set_clauses (gimple gs, tree clauses)
+gimple_omp_teams_set_clauses (gomp_teams *omp_teams_stmt, tree clauses)
{
- gimple_statement_omp_teams *omp_teams_stmt =
- as_a <gimple_statement_omp_teams *> (gs);
omp_teams_stmt->clauses = clauses;
}
static inline tree
gimple_omp_sections_clauses (const_gimple gs)
{
- const gimple_statement_omp_sections *omp_sections_stmt =
- as_a <const gimple_statement_omp_sections *> (gs);
+ const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->clauses;
}
static inline tree *
gimple_omp_sections_clauses_ptr (gimple gs)
{
- gimple_statement_omp_sections *omp_sections_stmt =
- as_a <gimple_statement_omp_sections *> (gs);
+ gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->clauses;
}
static inline void
gimple_omp_sections_set_clauses (gimple gs, tree clauses)
{
- gimple_statement_omp_sections *omp_sections_stmt =
- as_a <gimple_statement_omp_sections *> (gs);
+ gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->clauses = clauses;
}
static inline tree
gimple_omp_sections_control (const_gimple gs)
{
- const gimple_statement_omp_sections *omp_sections_stmt =
- as_a <const gimple_statement_omp_sections *> (gs);
+ const gomp_sections *omp_sections_stmt = as_a <const gomp_sections *> (gs);
return omp_sections_stmt->control;
}
static inline tree *
gimple_omp_sections_control_ptr (gimple gs)
{
- gimple_statement_omp_sections *omp_sections_stmt =
- as_a <gimple_statement_omp_sections *> (gs);
+ gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
return &omp_sections_stmt->control;
}
static inline void
gimple_omp_sections_set_control (gimple gs, tree control)
{
- gimple_statement_omp_sections *omp_sections_stmt =
- as_a <gimple_statement_omp_sections *> (gs);
+ gomp_sections *omp_sections_stmt = as_a <gomp_sections *> (gs);
omp_sections_stmt->control = control;
}
static inline void
gimple_omp_for_set_cond (gimple gs, size_t i, enum tree_code cond)
{
- gimple_statement_omp_for *omp_for_stmt =
- as_a <gimple_statement_omp_for *> (gs);
+ gomp_for *omp_for_stmt = as_a <gomp_for *> (gs);
gcc_gimple_checking_assert (TREE_CODE_CLASS (cond) == tcc_comparison
&& i < omp_for_stmt->collapse);
omp_for_stmt->iter[i].cond = cond;
static inline enum tree_code
gimple_omp_for_cond (const_gimple gs, size_t i)
{
- const gimple_statement_omp_for *omp_for_stmt =
- as_a <const gimple_statement_omp_for *> (gs);
+ const gomp_for *omp_for_stmt = as_a <const gomp_for *> (gs);
gcc_gimple_checking_assert (i < omp_for_stmt->collapse);
return omp_for_stmt->iter[i].cond;
}
/* Set the value being stored in an atomic store. */
static inline void
-gimple_omp_atomic_store_set_val (gimple g, tree val)
+gimple_omp_atomic_store_set_val (gomp_atomic_store *store_stmt, tree val)
{
- gimple_statement_omp_atomic_store *omp_atomic_store_stmt =
- as_a <gimple_statement_omp_atomic_store *> (g);
- omp_atomic_store_stmt->val = val;
+ store_stmt->val = val;
}
/* Return the value being stored in an atomic store. */
static inline tree
-gimple_omp_atomic_store_val (const_gimple g)
+gimple_omp_atomic_store_val (const gomp_atomic_store *store_stmt)
{
- const gimple_statement_omp_atomic_store *omp_atomic_store_stmt =
- as_a <const gimple_statement_omp_atomic_store *> (g);
- return omp_atomic_store_stmt->val;
+ return store_stmt->val;
}
/* Return a pointer to the value being stored in an atomic store. */
static inline tree *
-gimple_omp_atomic_store_val_ptr (gimple g)
+gimple_omp_atomic_store_val_ptr (gomp_atomic_store *store_stmt)
{
- gimple_statement_omp_atomic_store *omp_atomic_store_stmt =
- as_a <gimple_statement_omp_atomic_store *> (g);
- return &omp_atomic_store_stmt->val;
+ return &store_stmt->val;
}
/* Set the LHS of an atomic load. */
static inline void
-gimple_omp_atomic_load_set_lhs (gimple g, tree lhs)
+gimple_omp_atomic_load_set_lhs (gomp_atomic_load *load_stmt, tree lhs)
{
- gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <gimple_statement_omp_atomic_load *> (g);
- omp_atomic_load_stmt->lhs = lhs;
+ load_stmt->lhs = lhs;
}
/* Get the LHS of an atomic load. */
static inline tree
-gimple_omp_atomic_load_lhs (const_gimple g)
+gimple_omp_atomic_load_lhs (const gomp_atomic_load *load_stmt)
{
- const gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <const gimple_statement_omp_atomic_load *> (g);
- return omp_atomic_load_stmt->lhs;
+ return load_stmt->lhs;
}
/* Return a pointer to the LHS of an atomic load. */
static inline tree *
-gimple_omp_atomic_load_lhs_ptr (gimple g)
+gimple_omp_atomic_load_lhs_ptr (gomp_atomic_load *load_stmt)
{
- gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <gimple_statement_omp_atomic_load *> (g);
- return &omp_atomic_load_stmt->lhs;
+ return &load_stmt->lhs;
}
/* Set the RHS of an atomic load. */
static inline void
-gimple_omp_atomic_load_set_rhs (gimple g, tree rhs)
+gimple_omp_atomic_load_set_rhs (gomp_atomic_load *load_stmt, tree rhs)
{
- gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <gimple_statement_omp_atomic_load *> (g);
- omp_atomic_load_stmt->rhs = rhs;
+ load_stmt->rhs = rhs;
}
/* Get the RHS of an atomic load. */
static inline tree
-gimple_omp_atomic_load_rhs (const_gimple g)
+gimple_omp_atomic_load_rhs (const gomp_atomic_load *load_stmt)
{
- const gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <const gimple_statement_omp_atomic_load *> (g);
- return omp_atomic_load_stmt->rhs;
+ return load_stmt->rhs;
}
/* Return a pointer to the RHS of an atomic load. */
static inline tree *
-gimple_omp_atomic_load_rhs_ptr (gimple g)
+gimple_omp_atomic_load_rhs_ptr (gomp_atomic_load *load_stmt)
{
- gimple_statement_omp_atomic_load *omp_atomic_load_stmt =
- as_a <gimple_statement_omp_atomic_load *> (g);
- return &omp_atomic_load_stmt->rhs;
+ return &load_stmt->rhs;
}
/* Get the definition of the control variable in a GIMPLE_OMP_CONTINUE. */
static inline tree
-gimple_omp_continue_control_def (const_gimple g)
+gimple_omp_continue_control_def (const gomp_continue *cont_stmt)
{
- const gimple_statement_omp_continue *omp_continue_stmt =
- as_a <const gimple_statement_omp_continue *> (g);
- return omp_continue_stmt->control_def;
+ return cont_stmt->control_def;
}
/* The same as above, but return the address. */
static inline tree *
-gimple_omp_continue_control_def_ptr (gimple g)
+gimple_omp_continue_control_def_ptr (gomp_continue *cont_stmt)
{
- gimple_statement_omp_continue *omp_continue_stmt =
- as_a <gimple_statement_omp_continue *> (g);
- return &omp_continue_stmt->control_def;
+ return &cont_stmt->control_def;
}
/* Set the definition of the control variable in a GIMPLE_OMP_CONTINUE. */
static inline void
-gimple_omp_continue_set_control_def (gimple g, tree def)
+gimple_omp_continue_set_control_def (gomp_continue *cont_stmt, tree def)
{
- gimple_statement_omp_continue *omp_continue_stmt =
- as_a <gimple_statement_omp_continue *> (g);
- omp_continue_stmt->control_def = def;
+ cont_stmt->control_def = def;
}
/* Get the use of the control variable in a GIMPLE_OMP_CONTINUE. */
static inline tree
-gimple_omp_continue_control_use (const_gimple g)
+gimple_omp_continue_control_use (const gomp_continue *cont_stmt)
{
- const gimple_statement_omp_continue *omp_continue_stmt =
- as_a <const gimple_statement_omp_continue *> (g);
- return omp_continue_stmt->control_use;
+ return cont_stmt->control_use;
}
/* The same as above, but return the address. */
static inline tree *
-gimple_omp_continue_control_use_ptr (gimple g)
+gimple_omp_continue_control_use_ptr (gomp_continue *cont_stmt)
{
- gimple_statement_omp_continue *omp_continue_stmt =
- as_a <gimple_statement_omp_continue *> (g);
- return &omp_continue_stmt->control_use;
+ return &cont_stmt->control_use;
}
/* Set the use of the control variable in a GIMPLE_OMP_CONTINUE. */
static inline void
-gimple_omp_continue_set_control_use (gimple g, tree use)
+gimple_omp_continue_set_control_use (gomp_continue *cont_stmt, tree use)
{
- gimple_statement_omp_continue *omp_continue_stmt =
- as_a <gimple_statement_omp_continue *> (g);
- omp_continue_stmt->control_use = use;
+ cont_stmt->control_use = use;
}
-/* Return a pointer to the body for the GIMPLE_TRANSACTION statement GS. */
+/* Return a pointer to the body for the GIMPLE_TRANSACTION statement
+ TRANSACTION_STMT. */
static inline gimple_seq *
-gimple_transaction_body_ptr (gimple gs)
+gimple_transaction_body_ptr (gtransaction *transaction_stmt)
{
- gimple_statement_transaction *transaction_stmt =
- as_a <gimple_statement_transaction *> (gs);
return &transaction_stmt->body;
}
-/* Return the body for the GIMPLE_TRANSACTION statement GS. */
+/* Return the body for the GIMPLE_TRANSACTION statement TRANSACTION_STMT. */
static inline gimple_seq
-gimple_transaction_body (gimple gs)
+gimple_transaction_body (gtransaction *transaction_stmt)
{
- return *gimple_transaction_body_ptr (gs);
+ return *gimple_transaction_body_ptr (transaction_stmt);
}
/* Return the label associated with a GIMPLE_TRANSACTION. */
static inline tree
-gimple_transaction_label (const_gimple gs)
+gimple_transaction_label (const gtransaction *transaction_stmt)
{
- const gimple_statement_transaction *transaction_stmt =
- as_a <const gimple_statement_transaction *> (gs);
return transaction_stmt->label;
}
static inline tree *
-gimple_transaction_label_ptr (gimple gs)
+gimple_transaction_label_ptr (gtransaction *transaction_stmt)
{
- gimple_statement_transaction *transaction_stmt =
- as_a <gimple_statement_transaction *> (gs);
return &transaction_stmt->label;
}
/* Return the subcode associated with a GIMPLE_TRANSACTION. */
static inline unsigned int
-gimple_transaction_subcode (const_gimple gs)
+gimple_transaction_subcode (const gtransaction *transaction_stmt)
{
- GIMPLE_CHECK (gs, GIMPLE_TRANSACTION);
- return gs->subcode;
+ return transaction_stmt->subcode;
}
-/* Set BODY to be the body for the GIMPLE_TRANSACTION statement GS. */
+/* Set BODY to be the body for the GIMPLE_TRANSACTION statement
+ TRANSACTION_STMT. */
static inline void
-gimple_transaction_set_body (gimple gs, gimple_seq body)
+gimple_transaction_set_body (gtransaction *transaction_stmt,
+ gimple_seq body)
{
- gimple_statement_transaction *transaction_stmt =
- as_a <gimple_statement_transaction *> (gs);
transaction_stmt->body = body;
}
/* Set the label associated with a GIMPLE_TRANSACTION. */
static inline void
-gimple_transaction_set_label (gimple gs, tree label)
+gimple_transaction_set_label (gtransaction *transaction_stmt, tree label)
{
- gimple_statement_transaction *transaction_stmt =
- as_a <gimple_statement_transaction *> (gs);
transaction_stmt->label = label;
}
/* Set the subcode associated with a GIMPLE_TRANSACTION. */
static inline void
-gimple_transaction_set_subcode (gimple gs, unsigned int subcode)
+gimple_transaction_set_subcode (gtransaction *transaction_stmt,
+ unsigned int subcode)
{
- GIMPLE_CHECK (gs, GIMPLE_TRANSACTION);
- gs->subcode = subcode;
+ transaction_stmt->subcode = subcode;
}
/* Return a pointer to the return value for GIMPLE_RETURN GS. */
static inline tree *
-gimple_return_retval_ptr (const_gimple gs)
+gimple_return_retval_ptr (const greturn *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_RETURN);
return gimple_op_ptr (gs, 0);
}
/* Return the return value for GIMPLE_RETURN GS. */
static inline tree
-gimple_return_retval (const_gimple gs)
+gimple_return_retval (const greturn *gs)
{
- GIMPLE_CHECK (gs, GIMPLE_RETURN);
return gimple_op (gs, 0);
}
/* Set RETVAL to be the return value for GIMPLE_RETURN GS. */
static inline void
-gimple_return_set_retval (gimple gs, tree retval)
+gimple_return_set_retval (greturn *gs, tree retval)
{
- GIMPLE_CHECK (gs, GIMPLE_RETURN);
gimple_set_op (gs, 0, retval);
}
original RHS type as far as we can reconstruct it. */
if (code == GIMPLE_CALL)
{
- if (gimple_call_internal_p (stmt)
- && gimple_call_internal_fn (stmt) == IFN_MASK_STORE)
- type = TREE_TYPE (gimple_call_arg (stmt, 3));
+ const gcall *call_stmt = as_a <const gcall *> (stmt);
+ if (gimple_call_internal_p (call_stmt)
+ && gimple_call_internal_fn (call_stmt) == IFN_MASK_STORE)
+ type = TREE_TYPE (gimple_call_arg (call_stmt, 3));
else
- type = gimple_call_return_type (stmt);
+ type = gimple_call_return_type (call_stmt);
}
else
switch (gimple_assign_rhs_code (stmt))
switch (gimple_code (stmt))
{
case GIMPLE_COND:
- gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
- is_gimple_val, fb_rvalue);
- gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
- is_gimple_val, fb_rvalue);
+ {
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ gimplify_expr (gimple_cond_lhs_ptr (cond_stmt), &pre, NULL,
+ is_gimple_val, fb_rvalue);
+ gimplify_expr (gimple_cond_rhs_ptr (cond_stmt), &pre, NULL,
+ is_gimple_val, fb_rvalue);
+ }
break;
case GIMPLE_SWITCH:
- gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
- is_gimple_val, fb_rvalue);
+ gimplify_expr (gimple_switch_index_ptr (as_a <gswitch *> (stmt)),
+ &pre, NULL, is_gimple_val, fb_rvalue);
break;
case GIMPLE_OMP_ATOMIC_LOAD:
- gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
- is_gimple_val, fb_rvalue);
+ gimplify_expr (gimple_omp_atomic_load_rhs_ptr (
+ as_a <gomp_atomic_load *> (stmt)),
+ &pre, NULL, is_gimple_val, fb_rvalue);
break;
case GIMPLE_ASM:
{
- size_t i, noutputs = gimple_asm_noutputs (stmt);
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ size_t i, noutputs = gimple_asm_noutputs (asm_stmt);
const char *constraint, **oconstraints;
bool allows_mem, allows_reg, is_inout;
= (const char **) alloca ((noutputs) * sizeof (const char *));
for (i = 0; i < noutputs; i++)
{
- tree op = gimple_asm_output_op (stmt, i);
+ tree op = gimple_asm_output_op (asm_stmt, i);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
oconstraints[i] = constraint;
parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
is_inout ? is_gimple_min_lval : is_gimple_lvalue,
fb_lvalue | fb_mayfail);
}
- for (i = 0; i < gimple_asm_ninputs (stmt); i++)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
{
- tree op = gimple_asm_input_op (stmt, i);
+ tree op = gimple_asm_input_op (asm_stmt, i);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
parse_input_constraint (&constraint, 0, 0, noutputs, 0,
oconstraints, &allows_mem, &allows_reg);
{
struct gimplify_ctx *prev_context;
- vec<gimple> bind_expr_stack;
+ vec<gbind *> bind_expr_stack;
tree temps;
gimple_seq conditional_cleanups;
tree exit_label;
/* Push a GIMPLE_BIND tuple onto the stack of bindings. */
static void
-gimple_push_bind_expr (gimple gimple_bind)
+gimple_push_bind_expr (gbind *bind_stmt)
{
gimplify_ctxp->bind_expr_stack.reserve (8);
- gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
+ gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
}
/* Pop the first element off the stack of bindings. */
/* Return the first element of the stack of bindings. */
-gimple
+gbind *
gimple_current_bind_expr (void)
{
return gimplify_ctxp->bind_expr_stack.last ();
/* Return the stack of bindings created during gimplification. */
-vec<gimple>
+vec<gbind *>
gimple_bind_expr_stack (void)
{
return gimplify_ctxp->bind_expr_stack;
generate debug info for them; otherwise don't. */
void
-declare_vars (tree vars, gimple scope, bool debug_info)
+declare_vars (tree vars, gimple gs, bool debug_info)
{
tree last = vars;
if (last)
{
tree temps, block;
- gcc_assert (gimple_code (scope) == GIMPLE_BIND);
+ gbind *scope = as_a <gbind *> (gs);
temps = nreverse (last);
a temporary through which they communicate. */
static void
-build_stack_save_restore (gimple *save, gimple *restore)
+build_stack_save_restore (gcall **save, gcall **restore)
{
tree tmp_var;
tree bind_expr = *expr_p;
bool old_save_stack = gimplify_ctxp->save_stack;
tree t;
- gimple gimple_bind;
+ gbind *bind_stmt;
gimple_seq body, cleanup;
- gimple stack_save;
+ gcall *stack_save;
location_t start_locus = 0, end_locus = 0;
tree temp = voidify_wrapper_expr (bind_expr, NULL);
DECL_GIMPLE_REG_P (t) = 1;
}
- gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
+ bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
BIND_EXPR_BLOCK (bind_expr));
- gimple_push_bind_expr (gimple_bind);
+ gimple_push_bind_expr (bind_stmt);
gimplify_ctxp->save_stack = false;
/* Gimplify the body into the GIMPLE_BIND tuple's body. */
body = NULL;
gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
- gimple_bind_set_body (gimple_bind, body);
+ gimple_bind_set_body (bind_stmt, body);
/* Source location wise, the cleanup code (stack_restore and clobbers)
belongs to the end of the block, so propagate what we have. The
stack_save = NULL;
if (gimplify_ctxp->save_stack)
{
- gimple stack_restore;
+ gcall *stack_restore;
/* Save stack on entry and restore it on exit. Add a try_finally
block to achieve this. */
if (cleanup)
{
- gimple gs;
+ gtry *gs;
gimple_seq new_body;
new_body = NULL;
- gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
+ gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
GIMPLE_TRY_FINALLY);
if (stack_save)
gimplify_seq_add_stmt (&new_body, stack_save);
gimplify_seq_add_stmt (&new_body, gs);
- gimple_bind_set_body (gimple_bind, new_body);
+ gimple_bind_set_body (bind_stmt, new_body);
}
gimplify_ctxp->save_stack = old_save_stack;
gimple_pop_bind_expr ();
- gimplify_seq_add_stmt (pre_p, gimple_bind);
+ gimplify_seq_add_stmt (pre_p, bind_stmt);
if (temp)
{
static enum gimplify_status
gimplify_return_expr (tree stmt, gimple_seq *pre_p)
{
- gimple ret;
+ greturn *ret;
tree ret_expr = TREE_OPERAND (stmt, 0);
tree result_decl, result;
|| TREE_CODE (ret_expr) == RESULT_DECL
|| ret_expr == error_mark_node)
{
- gimple ret = gimple_build_return (ret_expr);
+ greturn *ret = gimple_build_return (ret_expr);
gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
gimplify_seq_add_stmt (pre_p, ret);
return GS_ALL_DONE;
vec<tree> labels;
vec<tree> saved_labels;
tree default_case = NULL_TREE;
- gimple gimple_switch;
+ gswitch *switch_stmt;
/* If someone can be bothered to fill in the labels, they can
be bothered to null out the body too. */
if (!default_case)
{
- gimple new_default;
+ glabel *new_default;
default_case
= build_case_label (NULL_TREE, NULL_TREE,
gimplify_seq_add_stmt (&switch_body_seq, new_default);
}
- gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
+ switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
default_case, labels);
- gimplify_seq_add_stmt (pre_p, gimple_switch);
+ gimplify_seq_add_stmt (pre_p, switch_stmt);
gimplify_seq_add_seq (pre_p, switch_body_seq);
labels.release ();
}
gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
{
struct gimplify_ctx *ctxp;
- gimple gimple_label;
+ glabel *label_stmt;
/* Invalid OpenMP programs can play Duff's Device type games with
#pragma omp parallel. At least in the C front end, we don't
if (ctxp->case_labels.exists ())
break;
- gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
+ label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
ctxp->case_labels.safe_push (*expr_p);
- gimplify_seq_add_stmt (pre_p, gimple_label);
+ gimplify_seq_add_stmt (pre_p, label_stmt);
return GS_ALL_DONE;
}
tree fndecl, parms, p, fnptrtype;
enum gimplify_status ret;
int i, nargs;
- gimple call;
+ gcall *call;
bool builtin_va_start_p = false;
location_t loc = EXPR_LOCATION (*expr_p);
enum gimplify_status ret;
tree label_true, label_false, label_cont;
bool have_then_clause_p, have_else_clause_p;
- gimple gimple_cond;
+ gcond *cond_stmt;
enum tree_code pred_code;
gimple_seq seq = NULL;
gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
&arm2);
- gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
+ cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
label_false);
- gimplify_seq_add_stmt (&seq, gimple_cond);
+ gimplify_seq_add_stmt (&seq, cond_stmt);
label_cont = NULL_TREE;
if (!have_then_clause_p)
{
gimple_seq *seq_p)
{
tree t, to, to_ptr, from, from_ptr;
- gimple gs;
+ gcall *gs;
location_t loc = EXPR_LOCATION (*expr_p);
to = TREE_OPERAND (*expr_p, 0);
gimple_seq *seq_p)
{
tree t, from, to, to_ptr;
- gimple gs;
+ gcall *gs;
location_t loc = EXPR_LOCATION (*expr_p);
/* Assert our assumptions, to abort instead of producing wrong code
{
tree lhs = TREE_OPERAND (*expr_p, 0);
tree rhs = TREE_OPERAND (*expr_p, 1);
- gimple init = gimple_build_assign (lhs, rhs);
+ gassign *init = gimple_build_assign (lhs, rhs);
gimplify_seq_add_stmt (pre_p, init);
*expr_p = NULL;
}
{
/* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
instead of a GIMPLE_ASSIGN. */
+ gcall *call_stmt;
if (CALL_EXPR_FN (*from_p) == NULL_TREE)
{
/* Gimplify internal functions created in the FEs. */
EXPR_LOCATION (*from_p));
vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
}
- assign = gimple_build_call_internal_vec (ifn, vargs);
- gimple_set_location (assign, EXPR_LOCATION (*expr_p));
+ call_stmt = gimple_build_call_internal_vec (ifn, vargs);
+ gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
}
else
{
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
&& call_expr_nargs (*from_p) == 3)
- assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
- CALL_EXPR_ARG (*from_p, 0),
- CALL_EXPR_ARG (*from_p, 1),
- CALL_EXPR_ARG (*from_p, 2));
+ call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
+ CALL_EXPR_ARG (*from_p, 0),
+ CALL_EXPR_ARG (*from_p, 1),
+ CALL_EXPR_ARG (*from_p, 2));
else
{
- assign = gimple_build_call_from_tree (*from_p);
- gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
+ call_stmt = gimple_build_call_from_tree (*from_p);
+ gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
}
}
- notice_special_calls (assign);
- if (!gimple_call_noreturn_p (assign))
- gimple_call_set_lhs (assign, *to_p);
+ notice_special_calls (call_stmt);
+ if (!gimple_call_noreturn_p (call_stmt))
+ gimple_call_set_lhs (call_stmt, *to_p);
+ assign = call_stmt;
}
else
{
const char *constraint;
bool allows_mem, allows_reg, is_inout;
enum gimplify_status ret, tret;
- gimple stmt;
+ gasm *stmt;
vec<tree, va_gc> *inputs;
vec<tree, va_gc> *outputs;
vec<tree, va_gc> *clobbers;
}
else
{
- gimple_statement_try *gtry;
+ gtry *gtry;
gimple_seq seq;
enum gimple_try_flags kind;
val
*/
tree flag = create_tmp_var (boolean_type_node, "cleanup");
- gimple ffalse = gimple_build_assign (flag, boolean_false_node);
- gimple ftrue = gimple_build_assign (flag, boolean_true_node);
+ gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
+ gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
gimplify_stmt (&cleanup, &cleanup_stmts);
tree for_stmt, orig_for_stmt, decl, var, t;
enum gimplify_status ret = GS_ALL_DONE;
enum gimplify_status tret;
- gimple gfor;
+ gomp_for *gfor;
gimple_seq for_body, for_pre_body;
int i;
bool simd;
gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p;
- gimple stmt;
+ gomp_target *stmt;
gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
ORT_WORKSHARE);
? NULL : TREE_OPERAND (*expr_p, 1);
tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
tree tmp_load;
- gimple loadstmt, storestmt;
+ gomp_atomic_load *loadstmt;
+ gomp_atomic_store *storestmt;
tmp_load = create_tmp_reg (type, NULL);
if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
{
tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
- gimple g;
+ gimple body_stmt;
+ gtransaction *trans_stmt;
gimple_seq body = NULL;
int subcode = 0;
push_gimplify_context ();
temp = voidify_wrapper_expr (*expr_p, NULL);
- g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
- pop_gimplify_context (g);
+ body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
+ pop_gimplify_context (body_stmt);
- g = gimple_build_transaction (body, NULL);
+ trans_stmt = gimple_build_transaction (body, NULL);
if (TRANSACTION_EXPR_OUTER (expr))
subcode = GTMA_IS_OUTER;
else if (TRANSACTION_EXPR_RELAXED (expr))
subcode = GTMA_IS_RELAXED;
- gimple_transaction_set_subcode (g, subcode);
+ gimple_transaction_set_subcode (trans_stmt, subcode);
- gimplify_seq_add_stmt (pre_p, g);
+ gimplify_seq_add_stmt (pre_p, trans_stmt);
if (temp)
{
}
tree tmp = create_tmp_var (type, NULL);
gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
- gimple call
+ gcall *call
= gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
gimple_call_set_lhs (call, tmp);
gimplify_seq_add_stmt (pre_p, call);
case TRY_CATCH_EXPR:
{
gimple_seq eval, cleanup;
- gimple try_;
+ gtry *try_;
/* Calls to destructors are generated automatically in FINALLY/CATCH
block. They should have location as UNKNOWN_LOCATION. However,
containing the sequence of corresponding GIMPLE statements. If DO_PARMS
is true, also gimplify the parameters. */
-gimple
+gbind *
gimplify_body (tree fndecl, bool do_parms)
{
location_t saved_location = input_location;
gimple_seq parm_stmts, seq;
- gimple outer_bind;
+ gimple outer_stmt;
+ gbind *outer_bind;
struct cgraph_node *cgn;
timevar_push (TV_TREE_GIMPLIFY);
/* Gimplify the function's body. */
seq = NULL;
gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
- outer_bind = gimple_seq_first_stmt (seq);
- if (!outer_bind)
+ outer_stmt = gimple_seq_first_stmt (seq);
+ if (!outer_stmt)
{
- outer_bind = gimple_build_nop ();
- gimplify_seq_add_stmt (&seq, outer_bind);
+ outer_stmt = gimple_build_nop ();
+ gimplify_seq_add_stmt (&seq, outer_stmt);
}
/* The body must contain exactly one statement, a GIMPLE_BIND. If this is
not the case, wrap everything in a GIMPLE_BIND to make it so. */
- if (gimple_code (outer_bind) == GIMPLE_BIND
+ if (gimple_code (outer_stmt) == GIMPLE_BIND
&& gimple_seq_first (seq) == gimple_seq_last (seq))
- ;
+ outer_bind = as_a <gbind *> (outer_stmt);
else
outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
{
tree parm, ret;
gimple_seq seq;
- gimple bind;
+ gbind *bind;
gcc_assert (!gimple_body (fndecl));
&& !flag_instrument_functions_exclude_p (fndecl))
{
tree x;
- gimple new_bind;
+ gbind *new_bind;
gimple tf;
gimple_seq cleanup = NULL, body = NULL;
tree tmp_var;
- gimple call;
+ gcall *call;
x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
call = gimple_build_call (x, 1, integer_zero_node);
extern void push_gimplify_context (bool in_ssa = false,
bool rhs_cond_ok = false);
extern void pop_gimplify_context (gimple);
-extern gimple gimple_current_bind_expr (void);
-extern vec<gimple> gimple_bind_expr_stack (void);
+extern gbind *gimple_current_bind_expr (void);
+extern vec<gbind *> gimple_bind_expr_stack (void);
extern void gimplify_and_add (tree, gimple_seq *);
extern tree get_formal_tmp_var (tree, gimple_seq *);
extern tree get_initialized_tmp_var (tree, gimple_seq *, gimple_seq *);
extern void gimplify_type_sizes (tree, gimple_seq *);
extern void gimplify_one_sizepos (tree *, gimple_seq *);
-extern gimple gimplify_body (tree, bool);
+extern gbind *gimplify_body (tree, bool);
extern enum gimplify_status gimplify_arg (tree *, gimple_seq *, location_t);
extern void gimplify_function_tree (tree);
extern enum gimplify_status gimplify_va_arg_expr (tree *, gimple_seq *,
argument. */
static inline bool
-same_close_phi_node (gimple p1, gimple p2)
+same_close_phi_node (gphi *p1, gphi *p2)
{
return operand_equal_p (gimple_phi_arg_def (p1, 0),
gimple_phi_arg_def (p2, 0), 0);
of PHI. */
static void
-remove_duplicate_close_phi (gimple phi, gimple_stmt_iterator *gsi)
+remove_duplicate_close_phi (gphi *phi, gphi_iterator *gsi)
{
gimple use_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
tree res = gimple_phi_result (phi);
- tree def = gimple_phi_result (gsi_stmt (*gsi));
+ tree def = gimple_phi_result (gsi->phi ());
- gcc_assert (same_close_phi_node (phi, gsi_stmt (*gsi)));
+ gcc_assert (same_close_phi_node (phi, gsi->phi ()));
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
{
static void
make_close_phi_nodes_unique (basic_block bb)
{
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
- gimple_stmt_iterator gsi = psi;
- gimple phi = gsi_stmt (psi);
+ gphi_iterator gsi = psi;
+ gphi *phi = psi.phi ();
/* At this point, PHI should be a close phi in normal form. */
gcc_assert (gimple_phi_num_args (phi) == 1);
/* Iterate over the next phis and remove duplicates. */
gsi_next (&gsi);
while (!gsi_end_p (gsi))
- if (same_close_phi_node (phi, gsi_stmt (gsi)))
+ if (same_close_phi_node (phi, gsi.phi ()))
remove_duplicate_close_phi (phi, &gsi);
else
gsi_next (&gsi);
}
else
{
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
basic_block close = split_edge (e);
e = single_succ_edge (close);
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
unsigned i;
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree res, arg = gimple_phi_arg_def (phi, i);
use_operand_p use_p;
- gimple close_phi;
+ gphi *close_phi;
if (TREE_CODE (arg) != SSA_NAME)
continue;
loop. */
static size_t
-phi_arg_in_outermost_loop (gimple phi)
+phi_arg_in_outermost_loop (gphi *phi)
{
loop_p loop = gimple_bb (phi)->loop_father;
size_t i, res = 0;
PSI by inserting on the loop ENTRY edge assignment "RES = INIT". */
static void
-remove_simple_copy_phi (gimple_stmt_iterator *psi)
+remove_simple_copy_phi (gphi_iterator *psi)
{
- gimple phi = gsi_stmt (*psi);
+ gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
size_t entry = phi_arg_in_outermost_loop (phi);
tree init = gimple_phi_arg_def (phi, entry);
- gimple stmt = gimple_build_assign (res, init);
+ gassign *stmt = gimple_build_assign (res, init);
edge e = gimple_phi_arg_edge (phi, entry);
remove_phi_node (psi, false);
loop ENTRY edge the assignment RES = INIT. */
static void
-remove_invariant_phi (sese region, gimple_stmt_iterator *psi)
+remove_invariant_phi (sese region, gphi_iterator *psi)
{
- gimple phi = gsi_stmt (*psi);
+ gphi *phi = psi->phi ();
loop_p loop = loop_containing_stmt (phi);
tree res = gimple_phi_result (phi);
tree scev = scalar_evolution_in_region (region, loop, res);
size_t entry = phi_arg_in_outermost_loop (phi);
edge e = gimple_phi_arg_edge (phi, entry);
tree var;
- gimple stmt;
+ gassign *stmt;
gimple_seq stmts = NULL;
if (tree_contains_chrecs (scev, NULL))
/* Returns true when the phi node at PSI is of the form "a = phi (a, x)". */
static inline bool
-simple_copy_phi_p (gimple phi)
+simple_copy_phi_p (gphi *phi)
{
tree res;
be considered. */
static bool
-reduction_phi_p (sese region, gimple_stmt_iterator *psi)
+reduction_phi_p (sese region, gphi_iterator *psi)
{
loop_p loop;
- gimple phi = gsi_stmt (*psi);
+ gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
loop = loop_containing_stmt (phi);
inequalities. */
static void
-add_condition_to_pbb (poly_bb_p pbb, gimple stmt, enum tree_code code)
+add_condition_to_pbb (poly_bb_p pbb, gcond *stmt, enum tree_code code)
{
isl_pw_aff *lhs = create_pw_aff_from_tree (pbb, gimple_cond_lhs (stmt));
isl_pw_aff *rhs = create_pw_aff_from_tree (pbb, gimple_cond_rhs (stmt));
{
case GIMPLE_COND:
{
- enum tree_code code = gimple_cond_code (stmt);
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ enum tree_code code = gimple_cond_code (cond_stmt);
/* The conditions for ELSE-branches are inverted. */
if (!GBB_CONDITION_CASES (gbb)[i])
code = invert_tree_comparison (code, false);
- add_condition_to_pbb (pbb, stmt, code);
+ add_condition_to_pbb (pbb, cond_stmt, code);
break;
}
edge between BB and its predecessor is not a loop exit edge, and
the last statement of the single predecessor is a COND_EXPR. */
-static gimple
+static gcond *
single_pred_cond_non_loop_exit (basic_block bb)
{
if (single_pred_p (bb))
stmt = last_stmt (pred);
if (stmt && gimple_code (stmt) == GIMPLE_COND)
- return stmt;
+ return as_a <gcond *> (stmt);
}
return NULL;
sese_dom_walker::before_dom_children (basic_block bb)
{
gimple_bb_p gbb;
- gimple stmt;
+ gcond *stmt;
if (!bb_in_sese_p (bb, m_region))
return;
/* Return a gsi at the position of the phi node STMT. */
-static gimple_stmt_iterator
-gsi_for_phi_node (gimple stmt)
+static gphi_iterator
+gsi_for_phi_node (gphi *stmt)
{
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
basic_block bb = gimple_bb (stmt);
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
- if (stmt == gsi_stmt (psi))
+ if (stmt == psi.phi ())
return psi;
gcc_unreachable ();
gimple_seq stmts;
gimple_stmt_iterator gsi;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
- gimple stmt = gimple_build_assign (unshare_expr (res), var);
+ gassign *stmt = gimple_build_assign (unshare_expr (res), var);
auto_vec<gimple, 3> x;
gimple_seq_add_stmt (&stmts, stmt);
dimension array for it. */
static void
-rewrite_phi_out_of_ssa (scop_p scop, gimple_stmt_iterator *psi)
+rewrite_phi_out_of_ssa (scop_p scop, gphi_iterator *psi)
{
size_t i;
- gimple phi = gsi_stmt (*psi);
+ gphi *phi = psi->phi ();
basic_block bb = gimple_bb (phi);
tree res = gimple_phi_result (phi);
tree zero_dim_array = create_zero_dim_array (res, "phi_out_of_ssa");
form "x = phi (y, y, ..., y)" to "x = y". */
static void
-rewrite_degenerate_phi (gimple_stmt_iterator *psi)
+rewrite_degenerate_phi (gphi_iterator *psi)
{
tree rhs;
gimple stmt;
gimple_stmt_iterator gsi;
- gimple phi = gsi_stmt (*psi);
+ gphi *phi = psi->phi ();
tree res = gimple_phi_result (phi);
basic_block bb;
rewrite_reductions_out_of_ssa (scop_p scop)
{
basic_block bb;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
sese region = SCOP_REGION (scop);
FOR_EACH_BB_FN (bb, cfun)
if (bb_in_sese_p (bb, region))
for (psi = gsi_start_phis (bb); !gsi_end_p (psi);)
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
{
if (gimple_code (use_stmt) == GIMPLE_PHI
&& (res = true))
{
- gimple_stmt_iterator psi = gsi_for_stmt (use_stmt);
+ gphi_iterator psi = gsi_start_phis (gimple_bb (use_stmt));
if (scalar_close_phi_node_p (gsi_stmt (psi)))
rewrite_close_phi_out_of_ssa (scop, &psi);
/* Returns true when PHI contains an argument ARG. */
static bool
-phi_contains_arg (gimple phi, tree arg)
+phi_contains_arg (gphi *phi, tree arg)
{
size_t i;
/* Return a loop phi node that corresponds to a reduction containing LHS. */
-static gimple
+static gphi *
follow_ssa_with_commutative_ops (tree arg, tree lhs)
{
gimple stmt;
|| gimple_code (stmt) == GIMPLE_CALL)
return NULL;
- if (gimple_code (stmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (stmt))
{
- if (phi_contains_arg (stmt, lhs))
- return stmt;
+ if (phi_contains_arg (phi, lhs))
+ return phi;
return NULL;
}
if (is_reduction_operation_p (stmt))
{
- gimple res = follow_ssa_with_commutative_ops (gimple_assign_rhs1 (stmt), lhs);
+ gphi *res
+ = follow_ssa_with_commutative_ops (gimple_assign_rhs1 (stmt), lhs);
return res ? res :
follow_ssa_with_commutative_ops (gimple_assign_rhs2 (stmt), lhs);
/* Detect commutative and associative scalar reductions starting at
the STMT. Return the phi node of the reduction cycle, or NULL. */
-static gimple
+static gphi *
detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
vec<gimple> *in,
vec<gimple> *out)
{
- gimple phi = follow_ssa_with_commutative_ops (arg, lhs);
+ gphi *phi = follow_ssa_with_commutative_ops (arg, lhs);
if (!phi)
return NULL;
/* Detect commutative and associative scalar reductions starting at
STMT. Return the phi node of the reduction cycle, or NULL. */
-static gimple
+static gphi *
detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
vec<gimple> *out)
{
if (is_reduction_operation_p (stmt))
{
- gimple res = detect_commutative_reduction_arg (lhs, stmt,
- gimple_assign_rhs1 (stmt),
- in, out);
+ gphi *res = detect_commutative_reduction_arg (lhs, stmt,
+ gimple_assign_rhs1 (stmt),
+ in, out);
return res ? res
: detect_commutative_reduction_arg (lhs, stmt,
gimple_assign_rhs2 (stmt),
/* Return a loop phi node that corresponds to a reduction containing LHS. */
-static gimple
+static gphi *
follow_inital_value_to_phi (tree arg, tree lhs)
{
gimple stmt;
stmt = SSA_NAME_DEF_STMT (arg);
- if (gimple_code (stmt) == GIMPLE_PHI
- && phi_contains_arg (stmt, lhs))
- return stmt;
+ if (gphi *phi = dyn_cast <gphi *> (stmt))
+ if (phi_contains_arg (phi, lhs))
+ return phi;
return NULL;
}
from outside the loop. */
static edge
-edge_initial_value_for_loop_phi (gimple phi)
+edge_initial_value_for_loop_phi (gphi *phi)
{
size_t i;
from outside the loop. */
static tree
-initial_value_for_loop_phi (gimple phi)
+initial_value_for_loop_phi (gphi *phi)
{
size_t i;
the SCOP starting at the loop closed phi node STMT. Return the phi
node of the reduction cycle, or NULL. */
-static gimple
+static gphi *
detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
vec<gimple> *out)
{
if (scalar_close_phi_node_p (stmt))
{
- gimple def, loop_phi, phi, close_phi = stmt;
+ gimple def;
+ gphi *loop_phi, *phi, *close_phi = as_a <gphi *> (stmt);
tree init, lhs, arg = gimple_phi_arg_def (close_phi, 0);
if (TREE_CODE (arg) != SSA_NAME)
static void
translate_scalar_reduction_to_array_for_stmt (scop_p scop, tree red,
- gimple stmt, gimple loop_phi)
+ gimple stmt, gphi *loop_phi)
{
tree res = gimple_phi_result (loop_phi);
- gimple assign = gimple_build_assign (res, unshare_expr (red));
+ gassign *assign = gimple_build_assign (res, unshare_expr (red));
gimple_stmt_iterator gsi;
insert_stmts (scop, assign, NULL, gsi_after_labels (gimple_bb (loop_phi)));
the PHI_RESULT. */
static void
-remove_phi (gimple phi)
+remove_phi (gphi *phi)
{
imm_use_iterator imm_iter;
tree def;
NULL_TREE. */
static tree
-close_phi_written_to_memory (gimple close_phi)
+close_phi_written_to_memory (gphi *close_phi)
{
imm_use_iterator imm_iter;
use_operand_p use_p;
vec<gimple> in,
vec<gimple> out)
{
- gimple loop_phi;
+ gimple loop_stmt;
unsigned int i = out.length () - 1;
- tree red = close_phi_written_to_memory (out[i]);
+ tree red = close_phi_written_to_memory (as_a <gphi *> (out[i]));
- FOR_EACH_VEC_ELT (in, i, loop_phi)
+ FOR_EACH_VEC_ELT (in, i, loop_stmt)
{
- gimple close_phi = out[i];
+ gimple close_stmt = out[i];
if (i == 0)
{
- gimple stmt = loop_phi;
- basic_block bb = split_reduction_stmt (scop, stmt);
+ basic_block bb = split_reduction_stmt (scop, loop_stmt);
poly_bb_p pbb = pbb_from_bb (bb);
PBB_IS_REDUCTION (pbb) = true;
- gcc_assert (close_phi == loop_phi);
+ gcc_assert (close_stmt == loop_stmt);
if (!red)
red = create_zero_dim_array
- (gimple_assign_lhs (stmt), "Commutative_Associative_Reduction");
+ (gimple_assign_lhs (loop_stmt), "Commutative_Associative_Reduction");
- translate_scalar_reduction_to_array_for_stmt (scop, red, stmt, in[1]);
+ translate_scalar_reduction_to_array_for_stmt (scop, red, loop_stmt,
+ as_a <gphi *> (in[1]));
continue;
}
+ gphi *loop_phi = as_a <gphi *> (loop_stmt);
+ gphi *close_phi = as_a <gphi *> (close_stmt);
+
if (i == in.length () - 1)
{
insert_out_of_ssa_copy (scop, gimple_phi_result (close_phi),
static bool
rewrite_commutative_reductions_out_of_ssa_close_phi (scop_p scop,
- gimple close_phi)
+ gphi *close_phi)
{
bool res;
auto_vec<gimple, 10> in;
rewrite_commutative_reductions_out_of_ssa_loop (scop_p scop,
loop_p loop)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
edge exit = single_exit (loop);
tree res;
bool changed = false;
return false;
for (gsi = gsi_start_phis (exit->dest); !gsi_end_p (gsi); gsi_next (&gsi))
- if ((res = gimple_phi_result (gsi_stmt (gsi)))
+ if ((res = gimple_phi_result (gsi.phi ()))
&& !virtual_operand_p (res)
&& !scev_analyzable_p (res, SCOP_REGION (scop)))
changed |= rewrite_commutative_reductions_out_of_ssa_close_phi
- (scop, gsi_stmt (gsi));
+ (scop, gsi.phi ());
return changed;
}
scop_ivs_can_be_represented (scop_p scop)
{
loop_p loop;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
bool result = true;
FOR_EACH_LOOP (loop, 0)
for (psi = gsi_start_phis (loop->header);
!gsi_end_p (psi); gsi_next (&psi))
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
tree res = PHI_RESULT (phi);
tree type = TREE_TYPE (res);
DEFGSSTRUCT(GSS_WITH_OPS, gimple_statement_with_ops, true)
DEFGSSTRUCT(GSS_WITH_MEM_OPS_BASE, gimple_statement_with_memory_ops_base, false)
DEFGSSTRUCT(GSS_WITH_MEM_OPS, gimple_statement_with_memory_ops, true)
-DEFGSSTRUCT(GSS_CALL, gimple_statement_call, true)
-DEFGSSTRUCT(GSS_ASM, gimple_statement_asm, true)
-DEFGSSTRUCT(GSS_BIND, gimple_statement_bind, false)
-DEFGSSTRUCT(GSS_PHI, gimple_statement_phi, false)
-DEFGSSTRUCT(GSS_TRY, gimple_statement_try, false)
-DEFGSSTRUCT(GSS_CATCH, gimple_statement_catch, false)
-DEFGSSTRUCT(GSS_EH_FILTER, gimple_statement_eh_filter, false)
-DEFGSSTRUCT(GSS_EH_MNT, gimple_statement_eh_mnt, false)
+DEFGSSTRUCT(GSS_CALL, gcall, true)
+DEFGSSTRUCT(GSS_ASM, gasm, true)
+DEFGSSTRUCT(GSS_BIND, gbind, false)
+DEFGSSTRUCT(GSS_PHI, gphi, false)
+DEFGSSTRUCT(GSS_TRY, gtry, false)
+DEFGSSTRUCT(GSS_CATCH, gcatch, false)
+DEFGSSTRUCT(GSS_EH_FILTER, geh_filter, false)
+DEFGSSTRUCT(GSS_EH_MNT, geh_mnt, false)
DEFGSSTRUCT(GSS_EH_CTRL, gimple_statement_eh_ctrl, false)
-DEFGSSTRUCT(GSS_EH_ELSE, gimple_statement_eh_else, false)
+DEFGSSTRUCT(GSS_EH_ELSE, geh_else, false)
DEFGSSTRUCT(GSS_WCE, gimple_statement_wce, false)
DEFGSSTRUCT(GSS_OMP, gimple_statement_omp, false)
-DEFGSSTRUCT(GSS_OMP_CRITICAL, gimple_statement_omp_critical, false)
-DEFGSSTRUCT(GSS_OMP_FOR, gimple_statement_omp_for, false)
+DEFGSSTRUCT(GSS_OMP_CRITICAL, gomp_critical, false)
+DEFGSSTRUCT(GSS_OMP_FOR, gomp_for, false)
DEFGSSTRUCT(GSS_OMP_PARALLEL_LAYOUT, gimple_statement_omp_parallel_layout, false)
-DEFGSSTRUCT(GSS_OMP_TASK, gimple_statement_omp_task, false)
-DEFGSSTRUCT(GSS_OMP_SECTIONS, gimple_statement_omp_sections, false)
+DEFGSSTRUCT(GSS_OMP_TASK, gomp_task, false)
+DEFGSSTRUCT(GSS_OMP_SECTIONS, gomp_sections, false)
DEFGSSTRUCT(GSS_OMP_SINGLE_LAYOUT, gimple_statement_omp_single_layout, false)
-DEFGSSTRUCT(GSS_OMP_CONTINUE, gimple_statement_omp_continue, false)
-DEFGSSTRUCT(GSS_OMP_ATOMIC_LOAD, gimple_statement_omp_atomic_load, false)
-DEFGSSTRUCT(GSS_OMP_ATOMIC_STORE_LAYOUT, gimple_statement_omp_atomic_store, false)
-DEFGSSTRUCT(GSS_TRANSACTION, gimple_statement_transaction, false)
+DEFGSSTRUCT(GSS_OMP_CONTINUE, gomp_continue, false)
+DEFGSSTRUCT(GSS_OMP_ATOMIC_LOAD, gomp_atomic_load, false)
+DEFGSSTRUCT(GSS_OMP_ATOMIC_STORE_LAYOUT, gomp_atomic_store, false)
+DEFGSSTRUCT(GSS_TRANSACTION, gtransaction, false)
/* Expand LOAD_LANES call STMT. */
static void
-expand_LOAD_LANES (gimple stmt)
+expand_LOAD_LANES (gcall *stmt)
{
struct expand_operand ops[2];
tree type, lhs, rhs;
/* Expand STORE_LANES call STMT. */
static void
-expand_STORE_LANES (gimple stmt)
+expand_STORE_LANES (gcall *stmt)
{
struct expand_operand ops[2];
tree type, lhs, rhs;
}
static void
-expand_ANNOTATE (gimple stmt ATTRIBUTE_UNUSED)
+expand_ANNOTATE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in adjust_simduid_builtins. */
static void
-expand_GOMP_SIMD_LANE (gimple stmt ATTRIBUTE_UNUSED)
+expand_GOMP_SIMD_LANE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in adjust_simduid_builtins. */
static void
-expand_GOMP_SIMD_VF (gimple stmt ATTRIBUTE_UNUSED)
+expand_GOMP_SIMD_VF (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in adjust_simduid_builtins. */
static void
-expand_GOMP_SIMD_LAST_LANE (gimple stmt ATTRIBUTE_UNUSED)
+expand_GOMP_SIMD_LAST_LANE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in the sanopt pass. */
static void
-expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
+expand_UBSAN_NULL (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in the sanopt pass. */
static void
-expand_UBSAN_BOUNDS (gimple stmt ATTRIBUTE_UNUSED)
+expand_UBSAN_BOUNDS (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in the sanopt pass. */
static void
-expand_UBSAN_OBJECT_SIZE (gimple stmt ATTRIBUTE_UNUSED)
+expand_UBSAN_OBJECT_SIZE (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* This should get expanded in the sanopt pass. */
static void
-expand_ASAN_CHECK (gimple stmt ATTRIBUTE_UNUSED)
+expand_ASAN_CHECK (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
/* Expand UBSAN_CHECK_ADD call STMT. */
static void
-expand_UBSAN_CHECK_ADD (gimple stmt)
+expand_UBSAN_CHECK_ADD (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
/* Expand UBSAN_CHECK_SUB call STMT. */
static void
-expand_UBSAN_CHECK_SUB (gimple stmt)
+expand_UBSAN_CHECK_SUB (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
/* Expand UBSAN_CHECK_MUL call STMT. */
static void
-expand_UBSAN_CHECK_MUL (gimple stmt)
+expand_UBSAN_CHECK_MUL (gcall *stmt)
{
location_t loc = gimple_location (stmt);
tree lhs = gimple_call_lhs (stmt);
/* Expand ADD_OVERFLOW STMT. */
static void
-expand_ADD_OVERFLOW (gimple stmt)
+expand_ADD_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (PLUS_EXPR, stmt);
}
/* Expand SUB_OVERFLOW STMT. */
static void
-expand_SUB_OVERFLOW (gimple stmt)
+expand_SUB_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (MINUS_EXPR, stmt);
}
/* Expand MUL_OVERFLOW STMT. */
static void
-expand_MUL_OVERFLOW (gimple stmt)
+expand_MUL_OVERFLOW (gcall *stmt)
{
expand_arith_overflow (MULT_EXPR, stmt);
}
/* This should get folded in tree-vectorizer.c. */
static void
-expand_LOOP_VECTORIZED (gimple stmt ATTRIBUTE_UNUSED)
+expand_LOOP_VECTORIZED (gcall *stmt ATTRIBUTE_UNUSED)
{
gcc_unreachable ();
}
static void
-expand_MASK_LOAD (gimple stmt)
+expand_MASK_LOAD (gcall *stmt)
{
struct expand_operand ops[3];
tree type, lhs, rhs, maskt;
}
static void
-expand_MASK_STORE (gimple stmt)
+expand_MASK_STORE (gcall *stmt)
{
struct expand_operand ops[3];
tree type, lhs, rhs, maskt;
}
static void
-expand_ABNORMAL_DISPATCHER (gimple)
+expand_ABNORMAL_DISPATCHER (gcall *)
{
}
static void
-expand_BUILTIN_EXPECT (gimple stmt)
+expand_BUILTIN_EXPECT (gcall *stmt)
{
/* When guessing was done, the hints should be already stripped away. */
gcc_assert (!flag_guess_branch_prob || optimize == 0 || seen_error ());
/* Routines to expand each internal function, indexed by function number.
Each routine has the prototype:
- expand_<NAME> (gimple stmt)
+ expand_<NAME> (gcall *stmt)
where STMT is the statement that performs the call. */
-static void (*const internal_fn_expanders[]) (gimple) = {
+static void (*const internal_fn_expanders[]) (gcall *) = {
#define DEF_INTERNAL_FN(CODE, FLAGS, FNSPEC) expand_##CODE,
#include "internal-fn.def"
#undef DEF_INTERNAL_FN
/* Expand STMT, which is a call to internal function FN. */
void
-expand_internal_call (gimple stmt)
+expand_internal_call (gcall *stmt)
{
internal_fn_expanders[(int) gimple_call_internal_fn (stmt)] (stmt);
}
Each entry must have a corresponding expander of the form:
- void expand_NAME (gimple stmt)
+ void expand_NAME (gimple_call stmt)
where STMT is the statement that performs the call. */
return internal_fn_fnspec_array[(int) fn];
}
-extern void expand_internal_call (gimple);
+extern void expand_internal_call (gcall *);
#endif
{
gimple stmt = gsi_stmt (gsi);
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
- tree t = gimple_label_label (stmt);
+ tree t = gimple_label_label (label_stmt);
gcc_assert (TREE_CODE (t) == LABEL_DECL);
m_label_bb_map.put (t, bb->bb->index);
switch (gimple_code (s1))
{
case GIMPLE_CALL:
- if (!compare_gimple_call (s1, s2))
+ if (!compare_gimple_call (as_a <gcall *> (s1),
+ as_a <gcall *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_CALL");
break;
case GIMPLE_ASSIGN:
return return_different_stmts (s1, s2, "GIMPLE_COND");
break;
case GIMPLE_SWITCH:
- if (!compare_gimple_switch (s1, s2))
+ if (!compare_gimple_switch (as_a <gswitch *> (s1),
+ as_a <gswitch *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_SWITCH");
break;
case GIMPLE_DEBUG:
case GIMPLE_EH_DISPATCH:
break;
case GIMPLE_RESX:
- if (!compare_gimple_resx (s1, s2))
+ if (!compare_gimple_resx (as_a <gresx *> (s1),
+ as_a <gresx *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_RESX");
break;
case GIMPLE_LABEL:
- if (!compare_gimple_label (s1, s2))
+ if (!compare_gimple_label (as_a <glabel *> (s1),
+ as_a <glabel *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_LABEL");
break;
case GIMPLE_RETURN:
- if (!compare_gimple_return (s1, s2))
+ if (!compare_gimple_return (as_a <greturn *> (s1),
+ as_a <greturn *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_RETURN");
break;
case GIMPLE_GOTO:
return return_different_stmts (s1, s2, "GIMPLE_GOTO");
break;
case GIMPLE_ASM:
- if (!compare_gimple_asm (s1, s2))
+ if (!compare_gimple_asm (as_a <gasm *> (s1),
+ as_a <gasm *> (s2)))
return return_different_stmts (s1, s2, "GIMPLE_ASM");
break;
case GIMPLE_PREDICT:
call statements are semantically equivalent. */
bool
-func_checker::compare_gimple_call (gimple s1, gimple s2)
+func_checker::compare_gimple_call (gcall *s1, gcall *s2)
{
unsigned i;
tree t1, t2;
return compare_operand (t1, t2);
}
-/* Verifies for given GIMPLEs S1 and S2 that
+/* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
bool
-func_checker::compare_gimple_label (gimple g1, gimple g2)
+func_checker::compare_gimple_label (const glabel *g1, const glabel *g2)
{
if (m_ignore_labels)
return true;
return true;
}
-/* Verifies for given GIMPLEs S1 and S2 that
+/* Verifies for given GIMPLE_SWITCH stmts S1 and S2 that
switch statements are semantically equivalent. */
bool
-func_checker::compare_gimple_switch (gimple g1, gimple g2)
+func_checker::compare_gimple_switch (const gswitch *g1, const gswitch *g2)
{
unsigned lsize1, lsize2, i;
return true;
}
-/* Verifies for given GIMPLEs S1 and S2 that
+/* Verifies for given GIMPLE_RETURN stmts S1 and S2 that
return statements are semantically equivalent. */
bool
-func_checker::compare_gimple_return (gimple g1, gimple g2)
+func_checker::compare_gimple_return (const greturn *g1, const greturn *g2)
{
tree t1, t2;
return compare_operand (dest1, dest2);
}
-/* Verifies for given GIMPLEs S1 and S2 that
+/* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
bool
-func_checker::compare_gimple_resx (gimple g1, gimple g2)
+func_checker::compare_gimple_resx (const gresx *g1, const gresx *g2)
{
return gimple_resx_region (g1) == gimple_resx_region (g2);
}
'__asm__ __volatile__ ("", "", "", "memory")'. */
bool
-func_checker::compare_gimple_asm (gimple g1, gimple g2)
+func_checker::compare_gimple_asm (const gasm *g1, const gasm *g2)
{
if (gimple_asm_volatile_p (g1) != gimple_asm_volatile_p (g2))
return false;
/* Verifies for given GIMPLEs S1 and S2 that
call statements are semantically equivalent. */
- bool compare_gimple_call (gimple s1, gimple s2);
+ bool compare_gimple_call (gcall *s1, gcall *s2);
/* Verifies for given GIMPLEs S1 and S2 that
assignment statements are semantically equivalent. */
condition statements are semantically equivalent. */
bool compare_gimple_cond (gimple s1, gimple s2);
- /* Verifies for given GIMPLEs S1 and S2 that
+ /* Verifies for given GIMPLE_LABEL stmts S1 and S2 that
label statements are semantically equivalent. */
- bool compare_gimple_label (gimple s1, gimple s2);
+ bool compare_gimple_label (const glabel *s1, const glabel *s2);
- /* Verifies for given GIMPLEs S1 and S2 that
+ /* Verifies for given GIMPLE_SWITCH stmts S1 and S2 that
switch statements are semantically equivalent. */
- bool compare_gimple_switch (gimple s1, gimple s2);
+ bool compare_gimple_switch (const gswitch *s1, const gswitch *s2);
- /* Verifies for given GIMPLEs S1 and S2 that
+ /* Verifies for given GIMPLE_RETURN stmts S1 and S2 that
return statements are semantically equivalent. */
- bool compare_gimple_return (gimple s1, gimple s2);
+ bool compare_gimple_return (const greturn *s1, const greturn *s2);
/* Verifies for given GIMPLEs S1 and S2 that
goto statements are semantically equivalent. */
bool compare_gimple_goto (gimple s1, gimple s2);
- /* Verifies for given GIMPLEs S1 and S2 that
+ /* Verifies for given GIMPLE_RESX stmts S1 and S2 that
resx statements are semantically equivalent. */
- bool compare_gimple_resx (gimple s1, gimple s2);
+ bool compare_gimple_resx (const gresx *s1, const gresx *s2);
- /* Verifies for given GIMPLEs S1 and S2 that ASM statements are equivalent.
+ /* Verifies for given GIMPLE_ASM stmts S1 and S2 that ASM statements
+ are equivalent.
For the beginning, the pass only supports equality for
'__asm__ __volatile__ ("", "", "", "memory")'. */
- bool compare_gimple_asm (gimple s1, gimple s2);
+ bool compare_gimple_asm (const gasm *s1, const gasm *s2);
/* Verification function for declaration trees T1 and T2. */
bool compare_decl (tree t1, tree t2);
bool
sem_function::compare_phi_node (basic_block bb1, basic_block bb2)
{
- gimple_stmt_iterator si1, si2;
- gimple phi1, phi2;
+ gphi_iterator si1, si2;
+ gphi *phi1, *phi2;
unsigned size1, size2, i;
tree t1, t2;
edge e1, e2;
if (gsi_end_p (si1) || gsi_end_p (si2))
return return_false();
- phi1 = gsi_stmt (si1);
- phi2 = gsi_stmt (si2);
+ phi1 = si1.phi ();
+ phi2 = si2.phi ();
tree phi_result1 = gimple_phi_result (phi1);
tree phi_result2 = gimple_phi_result (phi2);
struct inline_summary *summary,
basic_block bb)
{
- gimple last;
+ gimple lastg;
tree op;
int index;
struct agg_position_info aggpos;
size_t n;
size_t case_idx;
- last = last_stmt (bb);
- if (!last || gimple_code (last) != GIMPLE_SWITCH)
+ lastg = last_stmt (bb);
+ if (!lastg || gimple_code (lastg) != GIMPLE_SWITCH)
return;
+ gswitch *last = as_a <gswitch *> (lastg);
op = gimple_switch_index (last);
if (!unmodified_parm_or_parm_agg_item (info, last, op, &index, &aggpos))
return;
NONCONSTANT_NAMES, if possible. */
static void
-predicate_for_phi_result (struct inline_summary *summary, gimple phi,
+predicate_for_phi_result (struct inline_summary *summary, gphi *phi,
struct predicate *p,
vec<predicate_t> nonconstant_names)
{
/* Benefits are scaled by probability of elimination that is in range
<0,2>. */
basic_block bb;
- gimple_stmt_iterator bsi;
struct function *my_function = DECL_STRUCT_FUNCTION (node->decl);
int freq;
struct inline_summary *info = inline_summary (node);
struct predicate phi_predicate;
bool first_phi = true;
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
if (first_phi
&& !phi_result_unknown_predicate (parms_info, info, bb,
fprintf (dump_file, " ");
print_gimple_stmt (dump_file, gsi_stmt (bsi), 0, 0);
}
- predicate_for_phi_result (info, gsi_stmt (bsi), &phi_predicate,
+ predicate_for_phi_result (info, bsi.phi (), &phi_predicate,
nonconstant_names);
}
}
fix_builtin_expect_stmt = find_foldable_builtin_expect (bb);
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
int this_size = estimate_num_insns (stmt, &eni_size_weights);
static bool
detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
- gimple call, struct ipa_jump_func *jfunc,
+ gcall *call, struct ipa_jump_func *jfunc,
HOST_WIDE_INT offset)
{
struct prop_type_change_info tci;
returned by get_ref_base_and_extent, as is the offset. */
static bool
-detect_type_change (tree arg, tree base, tree comp_type, gimple call,
+detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
{
if (!flag_devirtualize)
static bool
detect_type_change_ssa (tree arg, tree comp_type,
- gimple call, struct ipa_jump_func *jfunc)
+ gcall *call, struct ipa_jump_func *jfunc)
{
gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
if (!flag_devirtualize
compute_complex_assign_jump_func (struct func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gimple call, gimple stmt, tree name,
+ gcall *call, gimple stmt, tree name,
tree param_type)
{
HOST_WIDE_INT offset, size, max_size;
compute_complex_ancestor_jump_func (struct func_body_info *fbi,
struct ipa_node_params *info,
struct ipa_jump_func *jfunc,
- gimple call, gimple phi)
+ gcall *call, gphi *phi)
{
HOST_WIDE_INT offset;
gimple assign, cond;
subsequently stored. */
static void
-determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
+determine_locally_known_aggregate_parts (gcall *call, tree arg,
+ tree arg_type,
struct ipa_jump_func *jfunc)
{
struct ipa_known_agg_contents_list *list = NULL;
{
struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
struct ipa_edge_args *args = IPA_EDGE_REF (cs);
- gimple call = cs->call_stmt;
+ gcall *call = cs->call_stmt;
int n, arg_num = gimple_call_num_args (call);
bool useful_context = false;
call, stmt, arg, param_type);
else if (gimple_code (stmt) == GIMPLE_PHI)
compute_complex_ancestor_jump_func (fbi, info, jfunc,
- call, stmt);
+ call,
+ as_a <gphi *> (stmt));
}
}
indirect call graph edge. */
static struct cgraph_edge *
-ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
+ipa_note_param_call (struct cgraph_node *node, int param_index,
+ gcall *stmt)
{
struct cgraph_edge *cs;
passed by value or reference. */
static void
-ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
+ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gcall *call,
tree target)
{
struct ipa_node_params *info = fbi->info;
static void
ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
- gimple call, tree target)
+ gcall *call, tree target)
{
tree obj = OBJ_TYPE_REF_OBJECT (target);
int index;
containing intermediate information about each formal parameter. */
static void
-ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
+ipa_analyze_call_uses (struct func_body_info *fbi, gcall *call)
{
tree target = gimple_call_fn (call);
ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
{
if (is_gimple_call (stmt))
- ipa_analyze_call_uses (fbi, stmt);
+ ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
}
/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
contain the corresponding call graph edge. */
void
-ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
+ipa_modify_call_arguments (struct cgraph_edge *cs, gcall *stmt,
ipa_parm_adjustment_vec adjustments)
{
struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
vec<tree> vargs;
vec<tree, va_gc> **debug_args = NULL;
- gimple new_stmt;
+ gcall *new_stmt;
gimple_stmt_iterator gsi, prev_gsi;
tree callee_decl;
int i, len;
vec<tree> ipa_get_vector_of_formal_parms (tree fndecl);
vec<tree> ipa_get_vector_of_formal_parm_types (tree fntype);
void ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec);
-void ipa_modify_call_arguments (struct cgraph_edge *, gimple,
+void ipa_modify_call_arguments (struct cgraph_edge *, gcall *,
ipa_parm_adjustment_vec);
ipa_parm_adjustment_vec ipa_combine_adjustments (ipa_parm_adjustment_vec,
ipa_parm_adjustment_vec);
the entire call expression. */
static void
-check_call (funct_state local, gimple call, bool ipa)
+check_call (funct_state local, gcall *call, bool ipa)
{
int flags = gimple_call_flags (call);
tree callee_t = gimple_call_fndecl (call);
switch (gimple_code (stmt))
{
case GIMPLE_CALL:
- check_call (local, stmt, ipa);
+ check_call (local, as_a <gcall *> (stmt), ipa);
break;
case GIMPLE_LABEL:
- if (DECL_NONLOCAL (gimple_label_label (stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
/* Target of long jump. */
{
if (dump_file)
}
break;
case GIMPLE_ASM:
- if (gimple_asm_clobbers_memory_p (stmt))
+ if (gimple_asm_clobbers_memory_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " memory asm clobber is not const/pure\n");
local->pure_const_state = IPA_NEITHER;
local->can_free = true;
}
- if (gimple_asm_volatile_p (stmt))
+ if (gimple_asm_volatile_p (as_a <gasm *> (stmt)))
{
if (dump_file)
fprintf (dump_file, " volatile is not const/pure\n");
while (!worklist.is_empty ())
{
- gimple_stmt_iterator bsi;
-
bb = worklist.pop ();
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
e->src->index));
worklist.safe_push (e->src);
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
if (is_gimple_debug (stmt))
ok = false;
goto done;
}
- if (gimple_code (stmt) == GIMPLE_LABEL
- && test_nonssa_use (stmt, gimple_label_label (stmt),
- NULL_TREE, non_ssa_vars))
- {
- ok = false;
- goto done;
- }
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
+ if (test_nonssa_use (stmt, gimple_label_label (label_stmt),
+ NULL_TREE, non_ssa_vars))
+ {
+ ok = false;
+ goto done;
+ }
}
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
if (walk_stmt_load_store_addr_ops
(gsi_stmt (bsi), non_ssa_vars, test_nonssa_use, test_nonssa_use,
{
if (e->dest != return_bb)
continue;
- for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi);
+ for (gphi_iterator bsi = gsi_start_phis (return_bb);
+ !gsi_end_p (bsi);
gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gphi *stmt = bsi.phi ();
tree op = gimple_phi_arg_def (stmt, e->dest_idx);
if (virtual_operand_p (gimple_phi_result (stmt)))
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- if (gimple_code (gsi_stmt (bsi)) == GIMPLE_LABEL
- && test_nonssa_use (gsi_stmt (bsi),
- gimple_label_label (gsi_stmt (bsi)),
- NULL_TREE, non_ssa_vars))
+ if (glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (bsi)))
{
- ok = false;
- goto done;
+ if (test_nonssa_use (label_stmt,
+ gimple_label_label (label_stmt),
+ NULL_TREE, non_ssa_vars))
+ {
+ ok = false;
+ goto done;
+ }
}
- else if (gimple_code (gsi_stmt (bsi)) != GIMPLE_LABEL)
+ else
break;
}
basic_block use_bb, forbidden_bb;
enum tree_code code;
edge true_edge, false_edge;
- gimple use_stmt = USE_STMT (use_p);
+ gcond *use_stmt;
- if (gimple_code (use_stmt) != GIMPLE_COND)
+ use_stmt = dyn_cast <gcond *> (USE_STMT (use_p));
+ if (!use_stmt)
continue;
/* Assuming canonical form for GIMPLE_COND here, with constant
unsigned int call_overhead;
edge e;
edge_iterator ei;
- gimple_stmt_iterator bsi;
+ gphi_iterator bsi;
unsigned int i;
int incoming_freq = 0;
tree retval;
incoming from header are the same. */
for (bsi = gsi_start_phis (current->entry_bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gphi *stmt = bsi.phi ();
tree val = NULL;
if (virtual_operand_p (gimple_phi_result (stmt)))
for the return value. If there are other PHIs, give up. */
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
for (psi = gsi_start_phis (return_bb); !gsi_end_p (psi); gsi_next (&psi))
- if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi)))
+ if (!virtual_operand_p (gimple_phi_result (psi.phi ()))
&& !(retval
&& current->split_part_set_retval
&& TREE_CODE (retval) == SSA_NAME
&& !DECL_BY_REFERENCE (DECL_RESULT (current_function_decl))
- && SSA_NAME_DEF_STMT (retval) == gsi_stmt (psi)))
+ && SSA_NAME_DEF_STMT (retval) == psi.phi ()))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
|| is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
&& retval == gimple_assign_lhs (stmt))
;
- else if (gimple_code (stmt) == GIMPLE_RETURN)
+ else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
found_return = true;
- retval = gimple_return_retval (stmt);
+ retval = gimple_return_retval (return_stmt);
}
else
break;
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
- if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
- return gimple_return_retval (gsi_stmt (bsi));
+ if (greturn *return_stmt = dyn_cast <greturn *> (gsi_stmt (bsi)))
+ return gimple_return_retval (return_stmt);
else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
&& !gimple_clobber_p (gsi_stmt (bsi)))
return gimple_assign_rhs1 (gsi_stmt (bsi));
bitmap set_ssa_names, bitmap used_ssa_names,
bitmap non_ssa_vars)
{
- gimple_stmt_iterator bsi;
edge e;
edge_iterator ei;
bool can_split = true;
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
tree op;
mark_nonssa_use,
mark_nonssa_use);
}
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gphi *stmt = bsi.phi ();
unsigned int i;
if (virtual_operand_p (gimple_phi_result (stmt)))
FOR_EACH_EDGE (e, ei, bb->succs)
if (e->dest == return_bb)
{
- for (bsi = gsi_start_phis (return_bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (return_bb);
+ !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- gimple stmt = gsi_stmt (bsi);
+ gphi *stmt = bsi.phi ();
tree op = gimple_phi_arg_def (stmt, e->dest_idx);
if (virtual_operand_p (gimple_phi_result (stmt)))
cgraph_node *node, *cur_node = cgraph_node::get (current_function_decl);
basic_block return_bb = find_return_bb ();
basic_block call_bb;
- gimple_stmt_iterator gsi;
- gimple call;
+ gcall *call;
edge e;
edge_iterator ei;
tree retval = NULL, real_retval = NULL, retbnd = NULL;
if (return_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
{
bool phi_p = false;
- for (gsi = gsi_start_phis (return_bb); !gsi_end_p (gsi);)
+ for (gphi_iterator gsi = gsi_start_phis (return_bb);
+ !gsi_end_p (gsi);)
{
- gimple stmt = gsi_stmt (gsi);
+ gphi *stmt = gsi.phi ();
if (!virtual_operand_p (gimple_phi_result (stmt)))
{
gsi_next (&gsi);
entry of the SESE region as the vuse of the call and the reaching
vdef of the exit of the SESE region as the vdef of the call. */
if (!phi_p)
- for (gsi = gsi_start_bb (return_bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (return_bb);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
if (gimple_vuse (stmt))
/* Create the basic block we place call into. It is the entry basic block
split after last label. */
call_bb = split_point->entry_bb;
- for (gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
+ for (gimple_stmt_iterator gsi = gsi_start_bb (call_bb); !gsi_end_p (gsi);)
if (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL)
{
last_stmt = gsi_stmt (gsi);
remove_edge (e);
/* Produce the call statement. */
- gsi = gsi_last_bb (call_bb);
+ gimple_stmt_iterator gsi = gsi_last_bb (call_bb);
FOR_EACH_VEC_ELT (args_to_pass, i, arg)
if (!is_gimple_val (arg))
{
if (real_retval && split_point->split_part_set_retval)
{
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
/* See if we need new SSA_NAME for the result.
When DECL_BY_REFERENCE is true, retval is actually pointer to
/* See if there is PHI defining return value. */
for (psi = gsi_start_phis (return_bb);
!gsi_end_p (psi); gsi_next (&psi))
- if (!virtual_operand_p (gimple_phi_result (gsi_stmt (psi))))
+ if (!virtual_operand_p (gimple_phi_result (psi.phi ())))
break;
/* When there is PHI, just update its value. */
if (TREE_CODE (retval) == SSA_NAME
&& !gsi_end_p (psi))
- add_phi_arg (gsi_stmt (psi), retval, e, UNKNOWN_LOCATION);
+ add_phi_arg (psi.phi (), retval, e, UNKNOWN_LOCATION);
/* Otherwise update the return BB itself.
find_return_bb allows at most one assignment to return value,
so update first statement. */
gimple_stmt_iterator bsi;
for (bsi = gsi_start_bb (return_bb); !gsi_end_p (bsi);
gsi_next (&bsi))
- if (gimple_code (gsi_stmt (bsi)) == GIMPLE_RETURN)
+ if (greturn *return_stmt
+ = dyn_cast <greturn *> (gsi_stmt (bsi)))
{
- gimple_return_set_retval (gsi_stmt (bsi), retval);
+ gimple_return_set_retval (return_stmt, retval);
break;
}
else if (gimple_code (gsi_stmt (bsi)) == GIMPLE_ASSIGN
*/
else
{
- gimple ret;
+ greturn *ret;
if (split_point->split_part_set_retval
&& !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
{
+2014-11-19 David Malcolm <dmalcolm@redhat.com>
+
+ Merger of git branch "gimple-classes-v2-option-3".
+ * ChangeLog.gimple-classes: New.
+ * java-gimplify.c (java_gimplify_block): Strengthen local "outer"
+ from gimple to gbind *.
+
2014-11-11 David Malcolm <dmalcolm@redhat.com>
* gcc/ChangeLog.jit: New.
--- /dev/null
+2014-10-27 David Malcolm <dmalcolm@redhat.com>
+
+ Patch autogenerated by rename_gimple_subclasses.py from
+ https://github.com/davidmalcolm/gcc-refactoring-scripts
+ revision 7d754b63ff2bf47226a67b2c0af5d74b54d4709f
+
+ * java-gimplify.c (java_gimplify_block): Rename gimple subclass types.
+
+2014-10-24 David Malcolm <dmalcolm@redhat.com>
+
+ Introduce gimple_bind and use it for accessors.
+
+ * java-gimplify.c (java_gimplify_block): Update local to be a
+ gimple_bind rather than just a gimple.
+
+Copyright (C) 2014 Free Software Foundation, Inc.
+
+Copying and distribution of this file, with or without modification,
+are permitted in any medium without royalty provided the copyright
+notice and this notice are preserved.
{
tree decls = BLOCK_VARS (java_block);
tree body = BLOCK_EXPR_BODY (java_block);
- gimple outer = gimple_current_bind_expr ();
+ gbind *outer = gimple_current_bind_expr ();
tree block;
/* Don't bother with empty blocks. */
{
if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
fatal_error ("Cgraph edge statement index out of range");
- cedge->call_stmt = stmts[cedge->lto_stmt_uid - 1];
+ cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
if (!cedge->call_stmt)
fatal_error ("Cgraph edge statement index not found");
}
{
if (gimple_stmt_max_uid (fn) < cedge->lto_stmt_uid)
fatal_error ("Cgraph edge statement index out of range");
- cedge->call_stmt = stmts[cedge->lto_stmt_uid - 1];
+ cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
if (!cedge->call_stmt)
fatal_error ("Cgraph edge statement index not found");
}
set_gimple_stmt_max_uid (cfun, 0);
FOR_ALL_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gphi *stmt = gsi.phi ();
/* Virtual PHIs are not going to be streamed. */
if (!virtual_operand_p (gimple_phi_result (stmt)))
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
virtual phis now. */
FOR_ALL_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gphi *stmt = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (stmt)))
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
{
struct omp_for_data_loop loop;
tree chunk_size;
- gimple for_stmt;
+ gomp_for *for_stmt;
tree pre, iter_type;
int collapse;
bool have_nowait, have_ordered;
them into *FD. */
static void
-extract_omp_for_data (gimple for_stmt, struct omp_for_data *fd,
+extract_omp_for_data (gomp_for *for_stmt, struct omp_for_data *fd,
struct omp_for_data_loop *loops)
{
tree t, var, *collapse_iter, *collapse_count;
gcc_assert (gimple_code (ws_stmt) == GIMPLE_OMP_FOR);
- extract_omp_for_data (ws_stmt, &fd, NULL);
+ extract_omp_for_data (as_a <gomp_for *> (ws_stmt), &fd, NULL);
if (fd.collapse > 1 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
return false;
location_t loc = gimple_location (ws_stmt);
vec<tree, va_gc> *ws_args;
- if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
+ if (gomp_for *for_stmt = dyn_cast <gomp_for *> (ws_stmt))
{
struct omp_for_data fd;
tree n1, n2;
- extract_omp_for_data (ws_stmt, &fd, NULL);
+ extract_omp_for_data (for_stmt, &fd, NULL);
n1 = fd.loop.n1;
n2 = fd.loop.n2;
- if (gimple_omp_for_combined_into_p (ws_stmt))
+ if (gimple_omp_for_combined_into_p (for_stmt))
{
tree innerc
= find_omp_clause (gimple_omp_parallel_clauses (par_stmt),
/* Finalize task copyfn. */
static void
-finalize_task_copyfn (gimple task_stmt)
+finalize_task_copyfn (gomp_task *task_stmt)
{
struct function *child_cfun;
tree child_fn;
gimple_seq seq = NULL, new_seq;
- gimple bind;
+ gbind *bind;
child_fn = gimple_omp_task_copy_fn (task_stmt);
if (child_fn == NULL_TREE)
}
if (is_task_ctx (ctx))
- finalize_task_copyfn (ctx->stmt);
+ finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
XDELETE (ctx);
}
{
omp_context *ctx;
tree name;
- gimple stmt = gsi_stmt (*gsi);
+ gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
/* Ignore parallel directives with empty bodies, unless there
are copyin clauses. */
if (gimple_omp_parallel_combined_p (stmt))
{
- gimple for_stmt;
struct walk_stmt_info wi;
memset (&wi, 0, sizeof (wi));
wi.val_only = true;
walk_gimple_seq (gimple_omp_body (stmt),
find_combined_for, NULL, &wi);
- for_stmt = (gimple) wi.info;
- if (for_stmt)
+ if (wi.info)
{
+ gomp_for *for_stmt = as_a <gomp_for *> ((gimple) wi.info);
struct omp_for_data fd;
extract_omp_for_data (for_stmt, &fd, NULL);
/* We need two temporaries with fd.loop.v type (istart/iend)
{
omp_context *ctx;
tree name, t;
- gimple stmt = gsi_stmt (*gsi);
+ gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
/* Ignore task directives with empty bodies. */
if (optimize > 0
/* Scan an OpenMP loop directive. */
static void
-scan_omp_for (gimple stmt, omp_context *outer_ctx)
+scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
{
omp_context *ctx;
size_t i;
/* Scan an OpenMP sections directive. */
static void
-scan_omp_sections (gimple stmt, omp_context *outer_ctx)
+scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
{
omp_context *ctx;
/* Scan an OpenMP single directive. */
static void
-scan_omp_single (gimple stmt, omp_context *outer_ctx)
+scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
{
omp_context *ctx;
tree name;
/* Scan an OpenMP target{, data, update} directive. */
static void
-scan_omp_target (gimple stmt, omp_context *outer_ctx)
+scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
{
omp_context *ctx;
tree name;
/* Scan an OpenMP teams directive. */
static void
-scan_omp_teams (gimple stmt, omp_context *outer_ctx)
+scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
{
omp_context *ctx = new_omp_context (stmt, outer_ctx);
scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
}
break;
case GIMPLE_OMP_CRITICAL:
- for (; ctx != NULL; ctx = ctx->outer)
- if (gimple_code (ctx->stmt) == GIMPLE_OMP_CRITICAL
- && (gimple_omp_critical_name (stmt)
- == gimple_omp_critical_name (ctx->stmt)))
- {
- error_at (gimple_location (stmt),
- "critical region may not be nested inside a critical "
- "region with the same name");
- return false;
- }
+ {
+ tree this_stmt_name
+ = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
+ for (; ctx != NULL; ctx = ctx->outer)
+ if (gomp_critical *other_crit
+ = dyn_cast <gomp_critical *> (ctx->stmt))
+ if (this_stmt_name == gimple_omp_critical_name (other_crit))
+ {
+ error_at (gimple_location (stmt),
+ "critical region may not be nested inside a critical "
+ "region with the same name");
+ return false;
+ }
+ }
break;
case GIMPLE_OMP_TEAMS:
if (ctx == NULL
break;
case GIMPLE_OMP_FOR:
- scan_omp_for (stmt, ctx);
+ scan_omp_for (as_a <gomp_for *> (stmt), ctx);
break;
case GIMPLE_OMP_SECTIONS:
- scan_omp_sections (stmt, ctx);
+ scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
break;
case GIMPLE_OMP_SINGLE:
- scan_omp_single (stmt, ctx);
+ scan_omp_single (as_a <gomp_single *> (stmt), ctx);
break;
case GIMPLE_OMP_SECTION:
break;
case GIMPLE_OMP_TARGET:
- scan_omp_target (stmt, ctx);
+ scan_omp_target (as_a <gomp_target *> (stmt), ctx);
break;
case GIMPLE_OMP_TEAMS:
- scan_omp_teams (stmt, ctx);
+ scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
break;
case GIMPLE_BIND:
*handled_ops_p = false;
if (ctx)
- for (var = gimple_bind_vars (stmt); var ; var = DECL_CHAIN (var))
+ for (var = gimple_bind_vars (as_a <gbind *> (stmt));
+ var ;
+ var = DECL_CHAIN (var))
insert_decl_map (&ctx->cb, var, var);
}
break;
{
tree fndecl = builtin_decl_explicit (lhs ? BUILT_IN_GOMP_BARRIER_CANCEL
: BUILT_IN_GOMP_BARRIER);
- gimple g = gimple_build_call (fndecl, 0);
+ gcall *g = gimple_build_call (fndecl, 0);
if (lhs)
gimple_call_set_lhs (g, lhs);
return g;
if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
{
- gimple stmt;
+ gcall *stmt;
tree tmp, atmp;
ptr = DECL_VALUE_EXPR (new_var);
gimplify_and_add (x, ilist);
gimple_stmt_iterator gsi
= gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
- gimple g
+ gassign *g
= gimple_build_assign (unshare_expr (lvar), iv);
gsi_insert_before_without_update (&gsi, g,
GSI_SAME_STMT);
if (predicate)
{
- gimple stmt;
+ gcond *stmt;
tree label_true, arm1, arm2;
label = create_artificial_label (UNKNOWN_LOCATION);
if (lastlane == NULL)
{
lastlane = create_tmp_var (unsigned_type_node, NULL);
- gimple g
+ gcall *g
= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
2, simduid,
TREE_OPERAND (val, 1));
/* A convenience function to build an empty GIMPLE_COND with just the
condition. */
-static gimple
+static gcond *
gimple_build_cond_empty (tree cond)
{
enum tree_code pred_code;
static void
expand_parallel_call (struct omp_region *region, basic_block bb,
- gimple entry_stmt, vec<tree, va_gc> *ws_args)
+ gomp_parallel *entry_stmt,
+ vec<tree, va_gc> *ws_args)
{
tree t, t1, t2, val, cond, c, clauses, flags;
gimple_stmt_iterator gsi;
if (gimple_in_ssa_p (cfun))
{
- gimple phi = create_phi_node (tmp_join, bb);
+ gphi *phi = create_phi_node (tmp_join, bb);
add_phi_arg (phi, tmp_then, e_then, UNKNOWN_LOCATION);
add_phi_arg (phi, tmp_else, e_else, UNKNOWN_LOCATION);
}
ENTRY_STMT into the basic_block BB. */
static void
-expand_cilk_for_call (basic_block bb, gimple entry_stmt,
+expand_cilk_for_call (basic_block bb, gomp_parallel *entry_stmt,
vec <tree, va_gc> *ws_args)
{
tree t, t1, t2;
generate the task operation. BB is the block where to insert the code. */
static void
-expand_task_call (basic_block bb, gimple entry_stmt)
+expand_task_call (basic_block bb, gomp_task *entry_stmt)
{
tree t, t1, t2, t3, flags, cond, c, c2, clauses, depend;
gimple_stmt_iterator gsi;
of such a variable. */
if (any_addressable_vars < 0)
{
- gimple parallel_stmt = last_stmt (region->entry);
+ gomp_parallel *parallel_stmt
+ = as_a <gomp_parallel *> (last_stmt (region->entry));
tree child_fun = gimple_omp_parallel_child_fn (parallel_stmt);
tree local_decls, block, decl;
unsigned ix;
/* Emit a library call to launch the children threads. */
if (is_cilk_for)
- expand_cilk_for_call (new_bb, entry_stmt, ws_args);
+ expand_cilk_for_call (new_bb,
+ as_a <gomp_parallel *> (entry_stmt), ws_args);
else if (gimple_code (entry_stmt) == GIMPLE_OMP_PARALLEL)
- expand_parallel_call (region, new_bb, entry_stmt, ws_args);
+ expand_parallel_call (region, new_bb,
+ as_a <gomp_parallel *> (entry_stmt), ws_args);
else
- expand_task_call (new_bb, entry_stmt);
+ expand_task_call (new_bb, as_a <gomp_task *> (entry_stmt));
if (gimple_in_ssa_p (cfun))
update_ssa (TODO_update_ssa_only_virtuals);
}
basic_block &l2_dom_bb)
{
tree t, type = TREE_TYPE (fd->loop.v);
- gimple stmt;
edge e, ne;
int i;
fold_convert (itype, fd->loops[i].n2)))
== NULL_TREE || !integer_onep (t)))
{
+ gcond *cond_stmt;
tree n1, n2;
n1 = fold_convert (itype, unshare_expr (fd->loops[i].n1));
n1 = force_gimple_operand_gsi (gsi, n1, true, NULL_TREE,
n2 = fold_convert (itype, unshare_expr (fd->loops[i].n2));
n2 = force_gimple_operand_gsi (gsi, n2, true, NULL_TREE,
true, GSI_SAME_STMT);
- stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
- NULL_TREE, NULL_TREE);
- gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
- if (walk_tree (gimple_cond_lhs_ptr (stmt),
+ cond_stmt = gimple_build_cond (fd->loops[i].cond_code, n1, n2,
+ NULL_TREE, NULL_TREE);
+ gsi_insert_before (gsi, cond_stmt, GSI_SAME_STMT);
+ if (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL)
- || walk_tree (gimple_cond_rhs_ptr (stmt),
+ || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL))
{
- *gsi = gsi_for_stmt (stmt);
- gimple_regimplify_operands (stmt, gsi);
+ *gsi = gsi_for_stmt (cond_stmt);
+ gimple_regimplify_operands (cond_stmt, gsi);
}
- e = split_block (entry_bb, stmt);
+ e = split_block (entry_bb, cond_stmt);
if (zero_iter_bb == NULL)
{
+ gassign *assign_stmt;
first_zero_iter = i;
zero_iter_bb = create_empty_bb (entry_bb);
add_bb_to_loop (zero_iter_bb, entry_bb->loop_father);
*gsi = gsi_after_labels (zero_iter_bb);
- stmt = gimple_build_assign (fd->loop.n2,
- build_zero_cst (type));
- gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (fd->loop.n2,
+ build_zero_cst (type));
+ gsi_insert_before (gsi, assign_stmt, GSI_SAME_STMT);
set_immediate_dominator (CDI_DOMINATORS, zero_iter_bb,
entry_bb);
}
tree t = fold_convert (TREE_TYPE (tem), counts[i]);
t = force_gimple_operand_gsi (gsi, t, false, NULL_TREE,
false, GSI_CONTINUE_LINKING);
- gimple stmt = gimple_build_assign (tem, t);
+ gassign *stmt = gimple_build_assign (tem, t);
gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
}
}
tree type = TREE_TYPE (fd->loop.v);
tree tem = create_tmp_reg (type, ".tem");
- gimple stmt = gimple_build_assign (tem, startvar);
+ gassign *stmt = gimple_build_assign (tem, startvar);
gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
for (i = fd->collapse - 1; i >= 0; i--)
basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, collapse_bb;
basic_block l2_bb = NULL, l3_bb = NULL;
gimple_stmt_iterator gsi;
- gimple stmt;
+ gassign *assign_stmt;
bool in_combined_parallel = is_combined_parallel (region);
bool broken_loop = region->cont == NULL;
edge e, ne;
DECL_P (startvar)
&& TREE_ADDRESSABLE (startvar),
NULL_TREE, false, GSI_CONTINUE_LINKING);
- stmt = gimple_build_assign (startvar, t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (startvar, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
t = iend0;
if (bias)
false, GSI_CONTINUE_LINKING);
if (endvar)
{
- stmt = gimple_build_assign (endvar, iend);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (endvar, iend);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
if (useless_type_conversion_p (TREE_TYPE (fd->loop.v), TREE_TYPE (iend)))
- stmt = gimple_build_assign (fd->loop.v, iend);
+ assign_stmt = gimple_build_assign (fd->loop.v, iend);
else
- stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, iend);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, iend);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
}
if (fd->collapse > 1)
expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
/* Code to control the increment and predicate for the sequential
loop goes in the CONT_BB. */
gsi = gsi_last_bb (cont_bb);
- stmt = gsi_stmt (gsi);
- gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
- vmain = gimple_omp_continue_control_use (stmt);
- vback = gimple_omp_continue_control_def (stmt);
+ gomp_continue *cont_stmt = as_a <gomp_continue *> (gsi_stmt (gsi));
+ gcc_assert (gimple_code (cont_stmt) == GIMPLE_OMP_CONTINUE);
+ vmain = gimple_omp_continue_control_use (cont_stmt);
+ vback = gimple_omp_continue_control_def (cont_stmt);
if (!gimple_omp_for_combined_p (fd->for_stmt))
{
DECL_P (vback)
&& TREE_ADDRESSABLE (vback),
NULL_TREE, true, GSI_SAME_STMT);
- stmt = gimple_build_assign (vback, t);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (vback, t);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
t = build2 (fd->loop.cond_code, boolean_type_node,
DECL_P (vback) && TREE_ADDRESSABLE (vback) ? t : vback,
iend);
- stmt = gimple_build_cond_empty (t);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ gcond *cond_stmt = gimple_build_cond_empty (t);
+ gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
}
/* Remove GIMPLE_OMP_CONTINUE. */
if (TREE_TYPE (t) != boolean_type_node)
t = fold_build2 (NE_EXPR, boolean_type_node,
t, build_int_cst (TREE_TYPE (t), 0));
- stmt = gimple_build_cond_empty (t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ gcond *cond_stmt = gimple_build_cond_empty (t);
+ gsi_insert_after (&gsi, cond_stmt, GSI_CONTINUE_LINKING);
}
/* Add the loop cleanup function. */
t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END_CANCEL);
else
t = builtin_decl_explicit (BUILT_IN_GOMP_LOOP_END);
- stmt = gimple_build_call (t, 0);
+ gcall *call_stmt = gimple_build_call (t, 0);
if (gimple_omp_return_lhs (gsi_stmt (gsi)))
- gimple_call_set_lhs (stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
- gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
+ gimple_call_set_lhs (call_stmt, gimple_omp_return_lhs (gsi_stmt (gsi)));
+ gsi_insert_after (&gsi, call_stmt, GSI_SAME_STMT);
gsi_remove (&gsi, true);
/* Connect the new blocks. */
basic_block body_bb, cont_bb, collapse_bb = NULL;
basic_block fin_bb;
gimple_stmt_iterator gsi;
- gimple stmt;
edge ep;
enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
n2 = fold_convert (type, unshare_expr (fd->loop.n2));
n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
true, GSI_SAME_STMT);
- stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
- NULL_TREE, NULL_TREE);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
- if (walk_tree (gimple_cond_lhs_ptr (stmt),
+ gcond *cond_stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
+ NULL_TREE, NULL_TREE);
+ gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
+ if (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL)
- || walk_tree (gimple_cond_rhs_ptr (stmt),
+ || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL))
{
- gsi = gsi_for_stmt (stmt);
- gimple_regimplify_operands (stmt, &gsi);
+ gsi = gsi_for_stmt (cond_stmt);
+ gimple_regimplify_operands (cond_stmt, &gsi);
}
- ep = split_block (entry_bb, stmt);
+ ep = split_block (entry_bb, cond_stmt);
ep->flags = EDGE_TRUE_VALUE;
entry_bb = ep->dest;
ep->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
if (gimple_in_ssa_p (cfun))
{
int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
- for (gsi = gsi_start_phis (fin_bb);
- !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (fin_bb);
+ !gsi_end_p (gpi); gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
ep, UNKNOWN_LOCATION);
}
gsi_insert_before (&gsi, gimple_build_assign (tt, t), GSI_SAME_STMT);
t = build2 (LT_EXPR, boolean_type_node, threadid, tt);
- stmt = gimple_build_cond_empty (t);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ gcond *cond_stmt = gimple_build_cond_empty (t);
+ gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
- second_bb = split_block (entry_bb, stmt)->dest;
+ second_bb = split_block (entry_bb, cond_stmt)->dest;
gsi = gsi_last_bb (second_bb);
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
gsi_insert_before (&gsi, gimple_build_assign (tt, build_int_cst (itype, 0)),
GSI_SAME_STMT);
- stmt = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
- build_int_cst (itype, 1));
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ gassign *assign_stmt
+ = gimple_build_assign_with_ops (PLUS_EXPR, q, q,
+ build_int_cst (itype, 1));
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
- third_bb = split_block (second_bb, stmt)->dest;
+ third_bb = split_block (second_bb, assign_stmt)->dest;
gsi = gsi_last_bb (third_bb);
gcc_assert (gimple_code (gsi_stmt (gsi)) == GIMPLE_OMP_FOR);
DECL_P (startvar)
&& TREE_ADDRESSABLE (startvar),
NULL_TREE, false, GSI_CONTINUE_LINKING);
- stmt = gimple_build_assign (startvar, t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (startvar, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, step);
false, GSI_CONTINUE_LINKING);
if (endvar)
{
- stmt = gimple_build_assign (endvar, e);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (endvar, e);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
if (useless_type_conversion_p (TREE_TYPE (fd->loop.v), TREE_TYPE (e)))
- stmt = gimple_build_assign (fd->loop.v, e);
+ assign_stmt = gimple_build_assign (fd->loop.v, e);
else
- stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, e);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, e);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
}
if (fd->collapse > 1)
expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
/* The code controlling the sequential loop replaces the
GIMPLE_OMP_CONTINUE. */
gsi = gsi_last_bb (cont_bb);
- stmt = gsi_stmt (gsi);
- gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
- vmain = gimple_omp_continue_control_use (stmt);
- vback = gimple_omp_continue_control_def (stmt);
+ gomp_continue *cont_stmt = as_a <gomp_continue *> (gsi_stmt (gsi));
+ gcc_assert (gimple_code (cont_stmt) == GIMPLE_OMP_CONTINUE);
+ vmain = gimple_omp_continue_control_use (cont_stmt);
+ vback = gimple_omp_continue_control_def (cont_stmt);
if (!gimple_omp_for_combined_p (fd->for_stmt))
{
DECL_P (vback)
&& TREE_ADDRESSABLE (vback),
NULL_TREE, true, GSI_SAME_STMT);
- stmt = gimple_build_assign (vback, t);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (vback, t);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
t = build2 (fd->loop.cond_code, boolean_type_node,
DECL_P (vback) && TREE_ADDRESSABLE (vback)
basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
basic_block trip_update_bb = NULL, cont_bb, collapse_bb = NULL, fin_bb;
gimple_stmt_iterator gsi;
- gimple stmt;
edge se;
enum built_in_function get_num_threads = BUILT_IN_OMP_GET_NUM_THREADS;
enum built_in_function get_thread_num = BUILT_IN_OMP_GET_THREAD_NUM;
n2 = fold_convert (type, unshare_expr (fd->loop.n2));
n2 = force_gimple_operand_gsi (&gsi, n2, true, NULL_TREE,
true, GSI_SAME_STMT);
- stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
- NULL_TREE, NULL_TREE);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
- if (walk_tree (gimple_cond_lhs_ptr (stmt),
+ gcond *cond_stmt = gimple_build_cond (fd->loop.cond_code, n1, n2,
+ NULL_TREE, NULL_TREE);
+ gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
+ if (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL)
- || walk_tree (gimple_cond_rhs_ptr (stmt),
+ || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
expand_omp_regimplify_p, NULL, NULL))
{
- gsi = gsi_for_stmt (stmt);
- gimple_regimplify_operands (stmt, &gsi);
+ gsi = gsi_for_stmt (cond_stmt);
+ gimple_regimplify_operands (cond_stmt, &gsi);
}
- se = split_block (entry_bb, stmt);
+ se = split_block (entry_bb, cond_stmt);
se->flags = EDGE_TRUE_VALUE;
entry_bb = se->dest;
se->probability = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
if (gimple_in_ssa_p (cfun))
{
int dest_idx = find_edge (entry_bb, fin_bb)->dest_idx;
- for (gsi = gsi_start_phis (fin_bb);
- !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (fin_bb);
+ !gsi_end_p (gpi); gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
add_phi_arg (phi, gimple_phi_arg_def (phi, dest_idx),
se, UNKNOWN_LOCATION);
}
trip_back = trip_var;
}
- stmt = gimple_build_assign (trip_init, build_int_cst (itype, 0));
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ gassign *assign_stmt
+ = gimple_build_assign (trip_init, build_int_cst (itype, 0));
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
t = fold_build2 (MULT_EXPR, itype, threadid, fd->chunk_size);
t = fold_build2 (MULT_EXPR, itype, t, step);
DECL_P (startvar)
&& TREE_ADDRESSABLE (startvar),
NULL_TREE, false, GSI_CONTINUE_LINKING);
- stmt = gimple_build_assign (startvar, t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (startvar, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
t = fold_convert (itype, e0);
t = fold_build2 (MULT_EXPR, itype, t, step);
false, GSI_CONTINUE_LINKING);
if (endvar)
{
- stmt = gimple_build_assign (endvar, e);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (endvar, e);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
if (useless_type_conversion_p (TREE_TYPE (fd->loop.v), TREE_TYPE (e)))
- stmt = gimple_build_assign (fd->loop.v, e);
+ assign_stmt = gimple_build_assign (fd->loop.v, e);
else
- stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, e);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign_with_ops (NOP_EXPR, fd->loop.v, e);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
}
if (fd->collapse > 1)
expand_omp_for_init_vars (fd, &gsi, counts, inner_stmt, startvar);
/* The code controlling the sequential loop goes in CONT_BB,
replacing the GIMPLE_OMP_CONTINUE. */
gsi = gsi_last_bb (cont_bb);
- stmt = gsi_stmt (gsi);
- gcc_assert (gimple_code (stmt) == GIMPLE_OMP_CONTINUE);
- vmain = gimple_omp_continue_control_use (stmt);
- vback = gimple_omp_continue_control_def (stmt);
+ gomp_continue *cont_stmt = as_a <gomp_continue *> (gsi_stmt (gsi));
+ vmain = gimple_omp_continue_control_use (cont_stmt);
+ vback = gimple_omp_continue_control_def (cont_stmt);
if (!gimple_omp_for_combined_p (fd->for_stmt))
{
if (DECL_P (vback) && TREE_ADDRESSABLE (vback))
t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
true, GSI_SAME_STMT);
- stmt = gimple_build_assign (vback, t);
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (vback, t);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
t = build2 (fd->loop.cond_code, boolean_type_node,
DECL_P (vback) && TREE_ADDRESSABLE (vback)
t = build_int_cst (itype, 1);
t = build2 (PLUS_EXPR, itype, trip_main, t);
- stmt = gimple_build_assign (trip_back, t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
+ assign_stmt = gimple_build_assign (trip_back, t);
+ gsi_insert_after (&gsi, assign_stmt, GSI_CONTINUE_LINKING);
}
/* Replace the GIMPLE_OMP_RETURN with a barrier, or nothing. */
if (gimple_in_ssa_p (cfun))
{
- gimple_stmt_iterator psi;
- gimple phi;
+ gphi_iterator psi;
+ gphi *phi;
edge re, ene;
edge_var_map *vm;
size_t i;
for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
gsi_next (&psi), ++i)
{
- gimple nphi;
+ gphi *nphi;
source_location locus;
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
t = gimple_phi_result (phi);
gcc_assert (t == redirect_edge_var_map_result (vm));
nphi = create_phi_node (t, iter_part_bb);
comment). */
tree child_fndecl
- = gimple_omp_parallel_child_fn (last_stmt (region->outer->entry));
+ = gimple_omp_parallel_child_fn (
+ as_a <gomp_parallel *> (last_stmt (region->outer->entry)));
tree t, low_val = NULL_TREE, high_val = NULL_TREE;
for (t = DECL_ARGUMENTS (child_fndecl); t; t = TREE_CHAIN (t))
{
basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l2_dom_bb;
gimple_stmt_iterator gsi;
gimple stmt;
+ gcond *cond_stmt;
bool broken_loop = region->cont == NULL;
edge e, ne;
tree *counts = NULL;
t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
false, GSI_CONTINUE_LINKING);
t = build2 (fd->loop.cond_code, boolean_type_node, fd->loop.v, t);
- stmt = gimple_build_cond_empty (t);
- gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
- if (walk_tree (gimple_cond_lhs_ptr (stmt), expand_omp_regimplify_p,
+ cond_stmt = gimple_build_cond_empty (t);
+ gsi_insert_after (&gsi, cond_stmt, GSI_CONTINUE_LINKING);
+ if (walk_tree (gimple_cond_lhs_ptr (cond_stmt), expand_omp_regimplify_p,
NULL, NULL)
- || walk_tree (gimple_cond_rhs_ptr (stmt), expand_omp_regimplify_p,
+ || walk_tree (gimple_cond_rhs_ptr (cond_stmt), expand_omp_regimplify_p,
NULL, NULL))
{
- gsi = gsi_for_stmt (stmt);
- gimple_regimplify_operands (stmt, &gsi);
+ gsi = gsi_for_stmt (cond_stmt);
+ gimple_regimplify_operands (cond_stmt, &gsi);
}
/* Remove GIMPLE_OMP_RETURN. */
= (struct omp_for_data_loop *)
alloca (gimple_omp_for_collapse (last_stmt (region->entry))
* sizeof (struct omp_for_data_loop));
- extract_omp_for_data (last_stmt (region->entry), &fd, loops);
+ extract_omp_for_data (as_a <gomp_for *> (last_stmt (region->entry)),
+ &fd, loops);
region->sched_kind = fd.sched_kind;
gcc_assert (EDGE_COUNT (region->entry->succs) == 2);
unsigned len;
basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
gimple_stmt_iterator si, switch_si;
- gimple sections_stmt, stmt, cont;
+ gomp_sections *sections_stmt;
+ gimple stmt;
+ gomp_continue *cont;
edge_iterator ei;
edge e;
struct omp_region *inner;
/* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
GIMPLE_OMP_SECTIONS statement. */
si = gsi_last_bb (entry_bb);
- sections_stmt = gsi_stmt (si);
+ sections_stmt = as_a <gomp_sections *> (gsi_stmt (si));
gcc_assert (gimple_code (sections_stmt) == GIMPLE_OMP_SECTIONS);
vin = gimple_omp_sections_control (sections_stmt);
if (!is_combined_parallel (region))
gcc_assert (gimple_code (gsi_stmt (switch_si)) == GIMPLE_OMP_SECTIONS_SWITCH);
if (exit_reachable)
{
- cont = last_stmt (l1_bb);
+ cont = as_a <gomp_continue *> (last_stmt (l1_bb));
gcc_assert (gimple_code (cont) == GIMPLE_OMP_CONTINUE);
vmain = gimple_omp_continue_control_use (cont);
vnext = gimple_omp_continue_control_def (cont);
gsi2 = gsi_start_bb (loop_header);
if (gimple_in_ssa_p (cfun))
{
- gimple stmt;
+ gassign *stmt;
x = force_gimple_operand_gsi (&gsi2, x, true, NULL_TREE,
true, GSI_SAME_STMT);
stmt = gimple_build_assign (loaded_val, x);
tree addr, tree loaded_val, tree stored_val)
{
gimple_stmt_iterator si;
- gimple stmt;
+ gassign *stmt;
tree t;
si = gsi_last_bb (load_bb);
expand_omp_atomic (struct omp_region *region)
{
basic_block load_bb = region->entry, store_bb = region->exit;
- gimple load = last_stmt (load_bb), store = last_stmt (store_bb);
+ gomp_atomic_load *load = as_a <gomp_atomic_load *> (last_stmt (load_bb));
+ gomp_atomic_store *store = as_a <gomp_atomic_store *> (last_stmt (store_bb));
tree loaded_val = gimple_omp_atomic_load_lhs (load);
tree addr = gimple_omp_atomic_load_rhs (load);
tree stored_val = gimple_omp_atomic_store_val (store);
struct function *child_cfun = NULL;
tree child_fn = NULL_TREE, block, t;
gimple_stmt_iterator gsi;
- gimple entry_stmt, stmt;
+ gomp_target *entry_stmt;
+ gimple stmt;
edge e;
- entry_stmt = last_stmt (region->entry);
+ entry_stmt = as_a <gomp_target *> (last_stmt (region->entry));
new_bb = region->entry;
int kind = gimple_omp_target_kind (entry_stmt);
if (kind == GF_OMP_TARGET_KIND_REGION)
{
tree block, control;
gimple_stmt_iterator tgsi;
- gimple stmt, new_stmt, bind, t;
+ gomp_sections *stmt;
+ gimple t;
+ gbind *new_stmt, *bind;
gimple_seq ilist, dlist, olist, new_body;
- stmt = gsi_stmt (*gsi_p);
+ stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
push_gimplify_context ();
to a synchronization analysis pass. */
static void
-lower_omp_single_simple (gimple single_stmt, gimple_seq *pre_p)
+lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
{
location_t loc = gimple_location (single_stmt);
tree tlabel = create_artificial_label (loc);
to a synchronization analysis pass. */
static void
-lower_omp_single_copy (gimple single_stmt, gimple_seq *pre_p, omp_context *ctx)
+lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
+ omp_context *ctx)
{
tree ptr_type, t, l0, l1, l2, bfn_decl;
gimple_seq copyin_seq;
lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple t, bind, single_stmt = gsi_stmt (*gsi_p);
+ gimple t;
+ gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
+ gbind *bind;
gimple_seq bind_body, bind_body_tail = NULL, dlist;
push_gimplify_context ();
lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block, lab = NULL, x, bfn_decl;
- gimple stmt = gsi_stmt (*gsi_p), bind;
+ gimple stmt = gsi_stmt (*gsi_p);
+ gbind *bind;
location_t loc = gimple_location (stmt);
gimple_seq tseq;
static void
lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple stmt = gsi_stmt (*gsi_p), bind, x;
+ gimple stmt = gsi_stmt (*gsi_p);
+ gcall *x;
+ gbind *bind;
tree block = make_node (BLOCK);
bind = gimple_build_bind (NULL, NULL, block);
lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
tree block;
- gimple stmt = gsi_stmt (*gsi_p), bind, x;
+ gimple stmt = gsi_stmt (*gsi_p);
+ gcall *x;
+ gbind *bind;
push_gimplify_context ();
{
tree block;
tree name, lock, unlock;
- gimple stmt = gsi_stmt (*gsi_p), bind;
+ gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
+ gbind *bind;
location_t loc = gimple_location (stmt);
gimple_seq tbody;
{
tree *rhs_p, block;
struct omp_for_data fd, *fdp = NULL;
- gimple stmt = gsi_stmt (*gsi_p), new_stmt;
+ gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
+ gbind *new_stmt;
gimple_seq omp_for_body, body, dlist;
size_t i;
if (!gimple_seq_empty_p (omp_for_body)
&& gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
{
- gimple inner_bind = gimple_seq_first_stmt (omp_for_body);
+ gbind *inner_bind
+ = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
tree vars = gimple_bind_vars (inner_bind);
gimple_bind_append_vars (new_stmt, vars);
/* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
/* Create task copyfn. */
static void
-create_task_copyfn (gimple task_stmt, omp_context *ctx)
+create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
{
struct function *child_cfun;
tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
tree clauses;
tree child_fn, t;
gimple stmt = gsi_stmt (*gsi_p);
- gimple par_bind, bind, dep_bind = NULL;
+ gbind *par_bind, *bind, *dep_bind = NULL;
gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
location_t loc = gimple_location (stmt);
clauses = gimple_omp_taskreg_clauses (stmt);
- par_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
+ par_bind
+ = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
par_body = gimple_bind_body (par_bind);
child_fn = ctx->cb.dst_fn;
if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
}
if (ctx->srecord_type)
- create_task_copyfn (stmt, ctx);
+ create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
push_gimplify_context ();
{
tree clauses;
tree child_fn, t, c;
- gimple stmt = gsi_stmt (*gsi_p);
- gimple tgt_bind = NULL, bind;
+ gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
+ gbind *tgt_bind = NULL, *bind;
gimple_seq tgt_body = NULL, olist, ilist, new_body;
location_t loc = gimple_location (stmt);
int kind = gimple_omp_target_kind (stmt);
clauses = gimple_omp_target_clauses (stmt);
if (kind == GF_OMP_TARGET_KIND_REGION)
{
- tgt_bind = gimple_seq_first_stmt (gimple_omp_body (stmt));
+ tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
tgt_body = gimple_bind_body (tgt_bind);
}
else if (kind == GF_OMP_TARGET_KIND_DATA)
static void
lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
{
- gimple teams_stmt = gsi_stmt (*gsi_p);
+ gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
push_gimplify_context ();
tree block = make_node (BLOCK);
- gimple bind = gimple_build_bind (NULL, NULL, block);
+ gbind *bind = gimple_build_bind (NULL, NULL, block);
gsi_replace (gsi_p, bind, true);
gimple_seq bind_body = NULL;
gimple_seq dlist = NULL;
{
gimple stmt = gsi_stmt (*gsi_p);
struct walk_stmt_info wi;
+ gcall *call_stmt;
if (gimple_has_location (stmt))
input_location = gimple_location (stmt);
switch (gimple_code (stmt))
{
case GIMPLE_COND:
- if ((ctx || task_shared_vars)
- && (walk_tree (gimple_cond_lhs_ptr (stmt), lower_omp_regimplify_p,
- ctx ? NULL : &wi, NULL)
- || walk_tree (gimple_cond_rhs_ptr (stmt), lower_omp_regimplify_p,
- ctx ? NULL : &wi, NULL)))
- gimple_regimplify_operands (stmt, gsi_p);
+ {
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ if ((ctx || task_shared_vars)
+ && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
+ lower_omp_regimplify_p,
+ ctx ? NULL : &wi, NULL)
+ || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
+ lower_omp_regimplify_p,
+ ctx ? NULL : &wi, NULL)))
+ gimple_regimplify_operands (cond_stmt, gsi_p);
+ }
break;
case GIMPLE_CATCH:
- lower_omp (gimple_catch_handler_ptr (stmt), ctx);
+ lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
break;
case GIMPLE_EH_FILTER:
lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
break;
case GIMPLE_TRANSACTION:
- lower_omp (gimple_transaction_body_ptr (stmt), ctx);
+ lower_omp (gimple_transaction_body_ptr (
+ as_a <gtransaction *> (stmt)),
+ ctx);
break;
case GIMPLE_BIND:
- lower_omp (gimple_bind_body_ptr (stmt), ctx);
+ lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
break;
case GIMPLE_OMP_PARALLEL:
case GIMPLE_OMP_TASK:
break;
case GIMPLE_OMP_ATOMIC_LOAD:
if ((ctx || task_shared_vars)
- && walk_tree (gimple_omp_atomic_load_rhs_ptr (stmt),
+ && walk_tree (gimple_omp_atomic_load_rhs_ptr (
+ as_a <gomp_atomic_load *> (stmt)),
lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
gimple_regimplify_operands (stmt, gsi_p);
break;
break;
case GIMPLE_CALL:
tree fndecl;
- fndecl = gimple_call_fndecl (stmt);
+ call_stmt = as_a <gcall *> (stmt);
+ fndecl = gimple_call_fndecl (call_stmt);
if (fndecl
&& DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
switch (DECL_FUNCTION_CODE (fndecl))
cctx = ctx;
if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
cctx = cctx->outer;
- gcc_assert (gimple_call_lhs (stmt) == NULL_TREE);
+ gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
if (!cctx->cancellable)
{
if (DECL_FUNCTION_CODE (fndecl)
if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
{
fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
- gimple_call_set_fndecl (stmt, fndecl);
- gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
+ gimple_call_set_fndecl (call_stmt, fndecl);
+ gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
}
tree lhs;
lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)), NULL);
- gimple_call_set_lhs (stmt, lhs);
+ gimple_call_set_lhs (call_stmt, lhs);
tree fallthru_label;
fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
gimple g;
break;
case GIMPLE_LABEL:
- splay_tree_insert (all_labels, (splay_tree_key) gimple_label_label (stmt),
+ splay_tree_insert (all_labels,
+ (splay_tree_key) gimple_label_label (
+ as_a <glabel *> (stmt)),
(splay_tree_value) context);
break;
case GIMPLE_COND:
{
- tree lab = gimple_cond_true_label (stmt);
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ tree lab = gimple_cond_true_label (cond_stmt);
if (lab)
{
n = splay_tree_lookup (all_labels,
diagnose_sb_0 (gsi_p, context,
n ? (gimple) n->value : NULL);
}
- lab = gimple_cond_false_label (stmt);
+ lab = gimple_cond_false_label (cond_stmt);
if (lab)
{
n = splay_tree_lookup (all_labels,
case GIMPLE_SWITCH:
{
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
unsigned int i;
- for (i = 0; i < gimple_switch_num_labels (stmt); ++i)
+ for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
{
- tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
+ tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
if (n && diagnose_sb_0 (gsi_p, context, (gimple) n->value))
break;
wi.info = &info;
walk_gimple_op (stmt, ipa_simd_modify_stmt_ops, &wi);
- if (gimple_code (stmt) == GIMPLE_RETURN)
+ if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
- tree retval = gimple_return_retval (stmt);
+ tree retval = gimple_return_retval (return_stmt);
if (!retval)
{
gsi_remove (&gsi, true);
make_edge (incr_bb, latch_bb, EDGE_TRUE_VALUE); */
FALLTHRU_EDGE (incr_bb)->flags = EDGE_TRUE_VALUE;
- gimple phi = create_phi_node (iter1, body_bb);
+ gphi *phi = create_phi_node (iter1, body_bb);
edge preheader_edge = find_edge (entry_bb, body_bb);
edge latch_edge = single_succ_edge (latch_bb);
add_phi_arg (phi, build_zero_cst (unsigned_type_node), preheader_edge,
tree fn = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
gimple_seq seq = NULL;
bool need_cvt = false;
- gimple call
+ gcall *call
= gimple_build_call (fn, 2, def, size_int (alignment));
g = call;
if (!useless_type_conversion_p (TREE_TYPE (orig_arg),
Otherwise return false and set LOOP_INVAIANT to NULL. */
static bool
-is_comparison_with_loop_invariant_p (gimple stmt, struct loop *loop,
+is_comparison_with_loop_invariant_p (gcond *stmt, struct loop *loop,
tree *loop_invariant,
enum tree_code *compare_code,
tree *loop_step,
stmt = last_stmt (bb);
if (!stmt || gimple_code (stmt) != GIMPLE_COND)
return;
- if (!is_comparison_with_loop_invariant_p (stmt, loop, &compare_var,
+ if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
+ loop, &compare_var,
&compare_code,
&compare_step_var,
&compare_base))
{
unsigned i;
bool check_value_one;
- gimple phi_stmt;
+ gimple lhs_def_stmt;
+ gphi *phi_stmt;
tree cmp_rhs, cmp_lhs;
- gimple cmp_stmt = last_stmt (exit_edge->src);
+ gimple last;
+ gcond *cmp_stmt;
- if (!cmp_stmt || gimple_code (cmp_stmt) != GIMPLE_COND)
+ last = last_stmt (exit_edge->src);
+ if (!last)
+ return;
+ cmp_stmt = dyn_cast <gcond *> (last);
+ if (!cmp_stmt)
return;
+
cmp_rhs = gimple_cond_rhs (cmp_stmt);
cmp_lhs = gimple_cond_lhs (cmp_stmt);
if (!TREE_CONSTANT (cmp_rhs)
^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
- phi_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
- if (!phi_stmt || gimple_code (phi_stmt) != GIMPLE_PHI)
+ lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
+ if (!lhs_def_stmt)
+ return;
+
+ phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
+ if (!phi_stmt)
return;
for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
tree loop_bound_step = NULL;
tree loop_bound_var = NULL;
tree loop_iv_base = NULL;
- gimple stmt = NULL;
+ gcond *stmt = NULL;
exits = get_loop_exit_edges (loop);
n_exits = exits.length ();
if (nb_iter->stmt
&& gimple_code (nb_iter->stmt) == GIMPLE_COND)
{
- stmt = nb_iter->stmt;
+ stmt = as_a <gcond *> (nb_iter->stmt);
break;
}
if (!stmt && last_stmt (loop->header)
&& gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
- stmt = last_stmt (loop->header);
+ stmt = as_a <gcond *> (last_stmt (loop->header));
if (stmt)
is_comparison_with_loop_invariant_p (stmt, loop,
&loop_bound_var,
static void
apply_return_prediction (void)
{
- gimple return_stmt = NULL;
+ greturn *return_stmt = NULL;
tree return_val;
edge e;
- gimple phi;
+ gphi *phi;
int phi_num_args, i;
enum br_predictor pred;
enum prediction direction;
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
{
- return_stmt = last_stmt (e->src);
- if (return_stmt
- && gimple_code (return_stmt) == GIMPLE_RETURN)
- break;
+ gimple last = last_stmt (e->src);
+ if (last
+ && gimple_code (last) == GIMPLE_RETURN)
+ {
+ return_stmt = as_a <greturn *> (last);
+ break;
+ }
}
if (!e)
return;
|| !SSA_NAME_DEF_STMT (return_val)
|| gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
return;
- phi = SSA_NAME_DEF_STMT (return_val);
+ phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
phi_num_args = gimple_phi_num_args (phi);
pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
gimple_stmt_iterator gi;
for (gi = gsi_start_bb (e->dest); !gsi_end_p (gi); gsi_next (&gi))
{
- gimple stmt = gsi_stmt (gi);
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gi));
tree decl;
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!label_stmt)
break;
- decl = gimple_label_label (stmt);
+ decl = gimple_label_label (label_stmt);
if (DECL_ARTIFICIAL (decl))
continue;
{
/* Handle asm volatile or asm with "memory" clobber
the same as potentionally freeing call. */
- if (gimple_code (stmt) == GIMPLE_ASM
+ gasm *asm_stmt = dyn_cast <gasm *> (stmt);
+ if (asm_stmt
&& asan_check_optimize
- && (gimple_asm_clobbers_memory_p (stmt)
- || gimple_asm_volatile_p (stmt)))
+ && (gimple_asm_clobbers_memory_p (asm_stmt)
+ || gimple_asm_volatile_p (asm_stmt)))
info->freeing_call_events++;
gsi_next (&gsi);
continue;
static void
sese_build_liveouts_bb (sese region, bitmap liveouts, basic_block bb)
{
- gimple_stmt_iterator bsi;
edge e;
edge_iterator ei;
ssa_op_iter iter;
use_operand_p use_p;
FOR_EACH_EDGE (e, ei, bb->succs)
- for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi);
+ gsi_next (&bsi))
sese_build_liveouts_use (region, liveouts, bb,
- PHI_ARG_DEF_FROM_EDGE (gsi_stmt (bsi), e));
+ PHI_ARG_DEF_FROM_EDGE (bsi.phi (), e));
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
static void
sese_add_exit_phis_edge (basic_block exit, tree use, edge false_e, edge true_e)
{
- gimple phi = create_phi_node (NULL_TREE, exit);
+ gphi *phi = create_phi_node (NULL_TREE, exit);
create_new_def_for (use, phi, gimple_phi_result_ptr (phi));
add_phi_arg (phi, use, false_e, UNKNOWN_LOCATION);
add_phi_arg (phi, use, true_e, UNKNOWN_LOCATION);
basic_block bb = entry->dest;
gimple last = last_stmt (bb);
gimple_stmt_iterator gsi = gsi_last_bb (bb);
- gimple cond_stmt;
+ gcond *cond_stmt;
gcc_assert (gimple_code (last) == GIMPLE_COND);
a real stmt or a PHI node, looking at the USE nodes matching FLAGS. */
#define FOR_EACH_PHI_OR_STMT_USE(USEVAR, STMT, ITER, FLAGS) \
for ((USEVAR) = (gimple_code (STMT) == GIMPLE_PHI \
- ? op_iter_init_phiuse (&(ITER), STMT, FLAGS) \
+ ? op_iter_init_phiuse (&(ITER), \
+ as_a <gphi *> (STMT), \
+ FLAGS) \
: op_iter_init_use (&(ITER), STMT, FLAGS)); \
!op_iter_done (&(ITER)); \
(USEVAR) = op_iter_next_use (&(ITER)))
a real stmt or a PHI node, looking at the DEF nodes matching FLAGS. */
#define FOR_EACH_PHI_OR_STMT_DEF(DEFVAR, STMT, ITER, FLAGS) \
for ((DEFVAR) = (gimple_code (STMT) == GIMPLE_PHI \
- ? op_iter_init_phidef (&(ITER), STMT, FLAGS) \
+ ? op_iter_init_phidef (&(ITER), \
+ as_a <gphi *> (STMT), \
+ FLAGS) \
: op_iter_init_def (&(ITER), STMT, FLAGS)); \
!op_iter_done (&(ITER)); \
(DEFVAR) = op_iter_next_def (&(ITER)))
ptr->numops = 1;
break;
case GIMPLE_ASM:
- ptr->numops = gimple_asm_noutputs (stmt);
+ ptr->numops = gimple_asm_noutputs (as_a <gasm *> (stmt));
break;
default:
ptr->numops = 0;
/* If there is a single DEF in the PHI node which matches FLAG, return it.
Otherwise return NULL_DEF_OPERAND_P. */
static inline tree
-single_phi_def (gimple stmt, int flags)
+single_phi_def (gphi *stmt, int flags)
{
tree def = PHI_RESULT (stmt);
if ((flags & SSA_OP_DEF) && is_gimple_reg (def))
/* Initialize the iterator PTR for uses matching FLAGS in PHI. FLAGS should
be either SSA_OP_USES or SSA_OP_VIRTUAL_USES. */
static inline use_operand_p
-op_iter_init_phiuse (ssa_op_iter *ptr, gimple phi, int flags)
+op_iter_init_phiuse (ssa_op_iter *ptr, gphi *phi, int flags)
{
tree phi_def = gimple_phi_result (phi);
int comp;
/* Start an iterator for a PHI definition. */
static inline def_operand_p
-op_iter_init_phidef (ssa_op_iter *ptr, gimple phi, int flags)
+op_iter_init_phidef (ssa_op_iter *ptr, gphi *phi, int flags)
{
tree phi_def = PHI_RESULT (phi);
int comp;
/* Only look at virtual or real uses, depending on the type of HEAD. */
flag = (is_gimple_reg (use) ? SSA_OP_USE : SSA_OP_VIRTUAL_USES);
- if (gimple_code (head_stmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (head_stmt))
{
- FOR_EACH_PHI_ARG (use_p, head_stmt, op_iter, flag)
+ FOR_EACH_PHI_ARG (use_p, phi, op_iter, flag)
if (USE_FROM_PTR (use_p) == use)
last_p = move_use_after_head (use_p, head, last_p);
}
STMT. Record this information in the aux field of the edge. */
static inline void
-compute_cases_per_edge (gimple stmt)
+compute_cases_per_edge (gswitch *stmt)
{
basic_block bb = gimple_bb (stmt);
reset_out_edges_aux (bb);
Generate the code to test it and jump to the right place. */
void
-expand_case (gimple stmt)
+expand_case (gswitch *stmt)
{
tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
rtx default_label = NULL_RTX;
case GIMPLE_TRANSACTION:
{
+ gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
unsigned char inner_flags = DIAG_TM_SAFE;
- if (gimple_transaction_subcode (stmt) & GTMA_IS_RELAXED)
+ if (gimple_transaction_subcode (trans_stmt) & GTMA_IS_RELAXED)
{
if (d->block_flags & DIAG_TM_SAFE)
error_at (gimple_location (stmt),
"relaxed transaction in %<transaction_safe%> function");
inner_flags = DIAG_TM_RELAXED;
}
- else if (gimple_transaction_subcode (stmt) & GTMA_IS_OUTER)
+ else if (gimple_transaction_subcode (trans_stmt) & GTMA_IS_OUTER)
{
if (d->block_flags)
error_at (gimple_location (stmt),
}
*handled_ops_p = true;
- if (gimple_transaction_body (stmt))
+ if (gimple_transaction_body (trans_stmt))
{
struct walk_stmt_info wi_inner;
struct diagnose_tm d_inner;
memset (&wi_inner, 0, sizeof (wi_inner));
wi_inner.info = &d_inner;
- walk_gimple_seq (gimple_transaction_body (stmt),
+ walk_gimple_seq (gimple_transaction_body (trans_stmt),
diagnose_tm_1, diagnose_tm_1_op, &wi_inner);
}
}
/* Entry block for the transaction this address occurs in. */
basic_block entry_block;
/* Dominating statements the store occurs in. */
- gimple_vec stmts;
+ vec<gimple> stmts;
/* Initially, while we are building the log, we place a nonzero
value here to mean that this address *will* be saved with a
save/restore sequence. Later, when generating the save sequence
static void
lower_transaction (gimple_stmt_iterator *gsi, struct walk_stmt_info *wi)
{
- gimple g, stmt = gsi_stmt (*gsi);
+ gimple g;
+ gtransaction *stmt = as_a <gtransaction *> (gsi_stmt (*gsi));
unsigned int *outer_state = (unsigned int *) wi->info;
unsigned int this_state = 0;
struct walk_stmt_info this_wi;
struct tm_region
{
+public:
+
+ /* The field "transaction_stmt" is initially a gtransaction *,
+ but eventually gets lowered to a gcall *(to BUILT_IN_TM_START).
+
+ Helper method to get it as a gtransaction *, with code-checking
+ in a checked-build. */
+
+ gtransaction *
+ get_transaction_stmt () const
+ {
+ return as_a <gtransaction *> (transaction_stmt);
+ }
+
+public:
+
/* Link to the next unnested transaction. */
struct tm_region *next;
/* The GIMPLE_TRANSACTION statement beginning this transaction.
After TM_MARK, this gets replaced by a call to
- BUILT_IN_TM_START. */
+ BUILT_IN_TM_START.
+ Hence this will be either a gtransaction *or a gcall *. */
gimple transaction_stmt;
/* After TM_MARK expands the GIMPLE_TRANSACTION into a call to
GIMPLE_TRANSACTION statement in a tree of tm_region elements. */
static struct tm_region *
-tm_region_init_0 (struct tm_region *outer, basic_block bb, gimple stmt)
+tm_region_init_0 (struct tm_region *outer, basic_block bb,
+ gtransaction *stmt)
{
struct tm_region *region;
/* Check for the last statement in the block beginning a new region. */
g = last_stmt (bb);
old_region = region;
- if (g && gimple_code (g) == GIMPLE_TRANSACTION)
- region = tm_region_init_0 (region, bb, g);
+ if (g)
+ if (gtransaction *trans_stmt = dyn_cast <gtransaction *> (g))
+ region = tm_region_init_0 (region, bb, trans_stmt);
/* Process subsequent blocks. */
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (region && region->transaction_stmt)
{
- flags |= gimple_transaction_subcode (region->transaction_stmt);
- gimple_transaction_set_subcode (region->transaction_stmt, flags);
+ gtransaction *transaction_stmt = region->get_transaction_stmt ();
+ flags |= gimple_transaction_subcode (transaction_stmt);
+ gimple_transaction_set_subcode (transaction_stmt, flags);
}
}
LOC is the location to use for the new statement(s). */
-static gimple
+static gcall *
build_tm_load (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
{
enum built_in_function code = END_BUILTINS;
tree t, type = TREE_TYPE (rhs), decl;
- gimple gcall;
+ gcall *gcall;
if (type == float_type_node)
code = BUILT_IN_TM_LOAD_FLOAT;
/* Similarly for storing TYPE in a transactional context. */
-static gimple
+static gcall *
build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
{
enum built_in_function code = END_BUILTINS;
tree t, fn, type = TREE_TYPE (rhs), simple_type;
- gimple gcall;
+ gcall *gcall;
if (type == float_type_node)
code = BUILT_IN_TM_STORE_FLOAT;
expand_call_tm (struct tm_region *region,
gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
tree lhs = gimple_call_lhs (stmt);
tree fn_decl;
struct cgraph_node *node;
tree tmp = create_tmp_reg (TREE_TYPE (lhs), NULL);
location_t loc = gimple_location (stmt);
edge fallthru_edge = NULL;
+ gassign *assign_stmt;
/* Remember if the call was going to throw. */
if (stmt_can_throw_internal (stmt))
gimple_call_set_lhs (stmt, tmp);
update_stmt (stmt);
- stmt = gimple_build_assign (lhs, tmp);
- gimple_set_location (stmt, loc);
+ assign_stmt = gimple_build_assign (lhs, tmp);
+ gimple_set_location (assign_stmt, loc);
/* We cannot throw in the middle of a BB. If the call was going
to throw, place the instrumentation on the fallthru edge, so
the call remains the last statement in the block. */
if (fallthru_edge)
{
- gimple_seq fallthru_seq = gimple_seq_alloc_with_stmt (stmt);
+ gimple_seq fallthru_seq = gimple_seq_alloc_with_stmt (assign_stmt);
gimple_stmt_iterator fallthru_gsi = gsi_start (fallthru_seq);
expand_assign_tm (region, &fallthru_gsi);
gsi_insert_seq_on_edge (fallthru_edge, fallthru_seq);
}
else
{
- gsi_insert_after (gsi, stmt, GSI_CONTINUE_LINKING);
+ gsi_insert_after (gsi, assign_stmt, GSI_CONTINUE_LINKING);
expand_assign_tm (region, gsi);
}
/* ??? There are plenty of bits here we're not computing. */
{
- int subcode = gimple_transaction_subcode (region->transaction_stmt);
+ int subcode = gimple_transaction_subcode (region->get_transaction_stmt ());
int flags = 0;
if (subcode & GTMA_DOES_GO_IRREVOCABLE)
flags |= PR_DOESGOIRREVOCABLE;
if (subcode & GTMA_IS_OUTER)
region->original_transaction_was_outer = true;
tree t = build_int_cst (tm_state_type, flags);
- gimple call = gimple_build_call (tm_start, 1, t);
+ gcall *call = gimple_build_call (tm_start, 1, t);
gimple_call_set_lhs (call, tm_state);
gimple_set_location (call, gimple_location (region->transaction_stmt));
// again as we process blocks.
if (region->exit_blocks)
{
- unsigned int subcode
- = gimple_transaction_subcode (region->transaction_stmt);
+ gtransaction *transaction_stmt = region->get_transaction_stmt ();
+ unsigned int subcode = gimple_transaction_subcode (transaction_stmt);
if (subcode & GTMA_DOES_GO_IRREVOCABLE)
subcode &= (GTMA_DECLARATION_MASK | GTMA_DOES_GO_IRREVOCABLE
| GTMA_HAS_NO_INSTRUMENTATION);
else
subcode &= GTMA_DECLARATION_MASK;
- gimple_transaction_set_subcode (region->transaction_stmt, subcode);
+ gimple_transaction_set_subcode (transaction_stmt, subcode);
}
return NULL;
if (region->outer && region->outer->transaction_stmt)
{
- unsigned s = gimple_transaction_subcode (region->transaction_stmt);
+ unsigned s
+ = gimple_transaction_subcode (region->get_transaction_stmt ());
s &= (GTMA_HAVE_ABORT | GTMA_HAVE_LOAD | GTMA_HAVE_STORE
| GTMA_MAY_ENTER_IRREVOCABLE);
- s |= gimple_transaction_subcode (region->outer->transaction_stmt);
- gimple_transaction_set_subcode (region->outer->transaction_stmt, s);
+ s |= gimple_transaction_subcode (region->outer->get_transaction_stmt ());
+ gimple_transaction_set_subcode (region->outer->get_transaction_stmt (),
+ s);
}
propagate_tm_flags_out (region->next);
{
if (r->transaction_stmt)
{
- unsigned sub = gimple_transaction_subcode (r->transaction_stmt);
+ unsigned sub
+ = gimple_transaction_subcode (r->get_transaction_stmt ());
/* If we're sure to go irrevocable, there won't be
anything to expand, since the run-time will go
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi = next_gsi)
{
gimple stmt = gsi_stmt (gsi);
+ gcall *call_stmt;
next_gsi = gsi;
gsi_next (&next_gsi);
// ??? Shouldn't we split for any non-pure, non-irrevocable function?
- if (gimple_code (stmt) != GIMPLE_CALL
- || (gimple_call_flags (stmt) & ECF_TM_BUILTIN) == 0)
+ call_stmt = dyn_cast <gcall *> (stmt);
+ if ((!call_stmt)
+ || (gimple_call_flags (call_stmt) & ECF_TM_BUILTIN) == 0)
continue;
- if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) == BUILT_IN_TM_ABORT)
+ if (DECL_FUNCTION_CODE (gimple_call_fndecl (call_stmt))
+ == BUILT_IN_TM_ABORT)
{
// If we have a ``_transaction_cancel [[outer]]'', there is only
// one abnormal edge: to the transaction marked OUTER.
// All compiler-generated instances of BUILT_IN_TM_ABORT have a
// constant argument, which we can examine here. Users invoking
// TM_ABORT directly get what they deserve.
- tree arg = gimple_call_arg (stmt, 0);
+ tree arg = gimple_call_arg (call_stmt, 0);
if (TREE_CODE (arg) == INTEGER_CST
&& (TREE_INT_CST_LOW (arg) & AR_OUTERABORT) != 0
&& !decl_is_tm_clone (current_function_decl))
for (struct tm_region *o = region; o; o = o->outer)
if (o->original_transaction_was_outer)
{
- split_bb_make_tm_edge (stmt, o->restart_block,
+ split_bb_make_tm_edge (call_stmt, o->restart_block,
gsi, &next_gsi);
break;
}
// Non-outer, TM aborts have an abnormal edge to the inner-most
// transaction, the one being aborted;
- split_bb_make_tm_edge (stmt, region->restart_block, gsi, &next_gsi);
+ split_bb_make_tm_edge (call_stmt, region->restart_block, gsi,
+ &next_gsi);
}
// All TM builtins have an abnormal edge to the outer-most transaction.
for (struct tm_region *o = region; o; o = o->outer)
if (!o->outer)
{
- split_bb_make_tm_edge (stmt, o->restart_block, gsi, &next_gsi);
+ split_bb_make_tm_edge (call_stmt, o->restart_block, gsi, &next_gsi);
break;
}
// Delete any tail-call annotation that may have been added.
// The tail-call pass may have mis-identified the commit as being
// a candidate because we had not yet added this restart edge.
- gimple_call_set_tail (stmt, false);
+ gimple_call_set_tail (call_stmt, false);
}
}
static void
tm_memopt_transform_stmt (unsigned int offset,
- gimple stmt,
+ gcall *stmt,
gimple_stmt_iterator *gsi)
{
tree fn = gimple_call_fn (stmt);
if (is_tm_simple_load (stmt))
{
+ gcall *call_stmt = as_a <gcall *> (stmt);
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
- tm_memopt_transform_stmt (TRANSFORM_RAW, stmt, &gsi);
+ tm_memopt_transform_stmt (TRANSFORM_RAW, call_stmt, &gsi);
else if (store_antic && bitmap_bit_p (store_antic, loc))
{
- tm_memopt_transform_stmt (TRANSFORM_RFW, stmt, &gsi);
+ tm_memopt_transform_stmt (TRANSFORM_RFW, call_stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
else if (read_avail && bitmap_bit_p (read_avail, loc))
- tm_memopt_transform_stmt (TRANSFORM_RAR, stmt, &gsi);
+ tm_memopt_transform_stmt (TRANSFORM_RAR, call_stmt, &gsi);
else
bitmap_set_bit (read_avail, loc);
}
else if (is_tm_simple_store (stmt))
{
+ gcall *call_stmt = as_a <gcall *> (stmt);
loc = tm_memopt_value_number (stmt, NO_INSERT);
if (store_avail && bitmap_bit_p (store_avail, loc))
- tm_memopt_transform_stmt (TRANSFORM_WAW, stmt, &gsi);
+ tm_memopt_transform_stmt (TRANSFORM_WAW, call_stmt, &gsi);
else
{
if (read_avail && bitmap_bit_p (read_avail, loc))
- tm_memopt_transform_stmt (TRANSFORM_WAR, stmt, &gsi);
+ tm_memopt_transform_stmt (TRANSFORM_WAR, call_stmt, &gsi);
bitmap_set_bit (store_avail, loc);
}
}
struct tm_region *r;
for (r = all_tm_regions; r ; r = r->next)
- if (gimple_transaction_subcode (r->transaction_stmt) & GTMA_IS_RELAXED)
+ if (gimple_transaction_subcode (r->get_transaction_stmt ())
+ & GTMA_IS_RELAXED)
{
/* Atomic transactions can be nested inside relaxed. */
if (r->inner)
basic_block bb)
{
gimple_stmt_iterator gsi;
- gimple g;
+ gcall *g;
transaction_subcode_ior (region, GTMA_MAY_ENTER_IRREVOCABLE);
static bool
ipa_tm_insert_gettmclone_call (struct cgraph_node *node,
struct tm_region *region,
- gimple_stmt_iterator *gsi, gimple stmt)
+ gimple_stmt_iterator *gsi, gcall *stmt)
{
tree gettm_fn, ret, old_fn, callfn;
- gimple g, g2;
+ gcall *g;
+ gassign *g2;
bool safe;
old_fn = gimple_call_fn (stmt);
gimple_stmt_iterator *gsi,
bool *need_ssa_rename_p)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
struct cgraph_node *new_node;
struct cgraph_edge *e = node->get_edge (stmt);
tree fndecl = gimple_call_fndecl (stmt);
#define MAX_BASE_INT_BIT_SIZE 32
static bool
-check_pow (gimple pow_call)
+check_pow (gcall *pow_call)
{
tree base, expn;
enum tree_code bc, ec;
Returns true if the function call is a candidate. */
static bool
-check_builtin_call (gimple bcall)
+check_builtin_call (gcall *bcall)
{
tree arg;
is a candidate. */
static bool
-is_call_dce_candidate (gimple call)
+is_call_dce_candidate (gcall *call)
{
tree fn;
enum built_in_function fnc;
{
tree lbub_real_cst, lbub_cst, float_type;
tree temp, tempn, tempc, tempcn;
- gimple stmt1, stmt2, stmt3;
+ gassign *stmt1;
+ gassign *stmt2;
+ gcond *stmt3;
float_type = TREE_TYPE (arg);
lbub_cst = build_int_cst (integer_type_node, lbub);
and *NCONDS is the number of logical conditions. */
static void
-gen_conditions_for_pow (gimple pow_call, vec<gimple> conds,
+gen_conditions_for_pow (gcall *pow_call, vec<gimple> conds,
unsigned *nconds)
{
tree base, expn;
condition are separated by NULL tree in the vector. */
static void
-gen_shrink_wrap_conditions (gimple bi_call, vec<gimple> conds,
+gen_shrink_wrap_conditions (gcall *bi_call, vec<gimple> conds,
unsigned int *nconds)
{
- gimple call;
+ gcall *call;
tree fn;
enum built_in_function fnc;
transformation actually happens. */
static bool
-shrink_wrap_one_built_in_call (gimple bi_call)
+shrink_wrap_one_built_in_call (gcall *bi_call)
{
gimple_stmt_iterator bi_call_bsi;
basic_block bi_call_bb, join_tgt_bb, guard_bb, guard_bb0;
wrapping transformation. */
static bool
-shrink_wrap_conditional_dead_built_in_calls (vec<gimple> calls)
+shrink_wrap_conditional_dead_built_in_calls (vec<gcall *> calls)
{
bool changed = false;
unsigned i = 0;
for (; i < n ; i++)
{
- gimple bi_call = calls[i];
+ gcall *bi_call = calls[i];
changed |= shrink_wrap_one_built_in_call (bi_call);
}
basic_block bb;
gimple_stmt_iterator i;
bool something_changed = false;
- auto_vec<gimple> cond_dead_built_in_calls;
+ auto_vec<gcall *> cond_dead_built_in_calls;
FOR_EACH_BB_FN (bb, fun)
{
/* Collect dead call candidates. */
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple stmt = gsi_stmt (i);
- if (is_gimple_call (stmt)
- && is_call_dce_candidate (stmt))
+ gcall *stmt = dyn_cast <gcall *> (gsi_stmt (i));
+ if (stmt && is_call_dce_candidate (stmt))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
static void make_edges (void);
static void assign_discriminators (void);
static void make_cond_expr_edges (basic_block);
-static void make_gimple_switch_edges (basic_block);
+static void make_gimple_switch_edges (gswitch *, basic_block);
static bool make_goto_expr_edges (basic_block);
static void make_gimple_asm_edges (basic_block);
static edge gimple_redirect_edge_and_branch (edge, basic_block);
static int gimple_verify_flow_info (void);
static void gimple_make_forwarder_block (edge);
static gimple first_non_label_stmt (basic_block);
-static bool verify_gimple_transaction (gimple);
+static bool verify_gimple_transaction (gtransaction *);
static bool call_can_make_abnormal_goto (gimple);
/* Flowgraph optimization and cleanup. */
static void remove_bb (basic_block);
static edge find_taken_edge_computed_goto (basic_block, tree);
static edge find_taken_edge_cond_expr (basic_block, tree);
-static edge find_taken_edge_switch_expr (basic_block, tree);
-static tree find_case_label_for_value (gimple, tree);
+static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
+static tree find_case_label_for_value (gswitch *, tree);
void
init_empty_tree_cfg_for_function (struct function *fn)
if (stmt && gimple_code (stmt) == GIMPLE_COND)
{
+ gcond *cond_stmt = as_a <gcond *> (stmt);
location_t loc = gimple_location (stmt);
tree cond;
bool zerop, onep;
fold_defer_overflow_warnings ();
- cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
- gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
+ cond = fold_binary_loc (loc, gimple_cond_code (cond_stmt),
+ boolean_type_node,
+ gimple_cond_lhs (cond_stmt),
+ gimple_cond_rhs (cond_stmt));
if (cond)
{
zerop = integer_zerop (cond);
stmt,
WARN_STRICT_OVERFLOW_CONDITIONAL);
if (zerop)
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else if (onep)
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
}
}
}
fallthru = false;
break;
case GIMPLE_SWITCH:
- make_gimple_switch_edges (bb);
+ make_gimple_switch_edges (as_a <gswitch *> (last), bb);
fallthru = false;
break;
case GIMPLE_RESX:
fallthru = false;
break;
case GIMPLE_EH_DISPATCH:
- fallthru = make_eh_dispatch_edges (last);
+ fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
break;
case GIMPLE_CALL:
case GIMPLE_TRANSACTION:
{
- tree abort_label = gimple_transaction_label (last);
+ tree abort_label
+ = gimple_transaction_label (as_a <gtransaction *> (last));
if (abort_label)
make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
fallthru = true;
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple label_stmt = gsi_stmt (gsi);
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
tree target;
- if (gimple_code (label_stmt) != GIMPLE_LABEL)
+ if (!label_stmt)
break;
target = gimple_label_label (label_stmt);
static void
make_cond_expr_edges (basic_block bb)
{
- gimple entry = last_stmt (bb);
+ gcond *entry = as_a <gcond *> (last_stmt (bb));
gimple then_stmt, else_stmt;
basic_block then_bb, else_bb;
tree then_label, else_label;
{
gimple stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
- group_case_labels_stmt (stmt);
+ group_case_labels_stmt (as_a <gswitch *> (stmt));
}
}
BITMAP_FREE (touched_switch_bbs);
Otherwise return NULL. */
static tree
-get_cases_for_edge (edge e, gimple t)
+get_cases_for_edge (edge e, gswitch *t)
{
tree *slot;
size_t i, n;
/* Create the edges for a GIMPLE_SWITCH starting at block BB. */
static void
-make_gimple_switch_edges (basic_block bb)
+make_gimple_switch_edges (gswitch *entry, basic_block bb)
{
- gimple entry = last_stmt (bb);
size_t i, n;
n = gimple_switch_num_labels (entry);
static void
make_gimple_asm_edges (basic_block bb)
{
- gimple stmt = last_stmt (bb);
+ gasm *stmt = as_a <gasm *> (last_stmt (bb));
int i, n = gimple_asm_nlabels (stmt);
for (i = 0; i < n; ++i)
for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
{
tree label;
- gimple stmt = gsi_stmt (i);
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!label_stmt)
break;
- label = gimple_label_label (stmt);
+ label = gimple_label_label (label_stmt);
/* If we have not yet seen a label for the current block,
remember this one and see if there are more labels. */
switch (gimple_code (stmt))
{
case GIMPLE_COND:
- label = gimple_cond_true_label (stmt);
- if (label)
- {
- new_label = main_block_label (label);
- if (new_label != label)
- gimple_cond_set_true_label (stmt, new_label);
- }
+ {
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ label = gimple_cond_true_label (cond_stmt);
+ if (label)
+ {
+ new_label = main_block_label (label);
+ if (new_label != label)
+ gimple_cond_set_true_label (cond_stmt, new_label);
+ }
- label = gimple_cond_false_label (stmt);
- if (label)
- {
- new_label = main_block_label (label);
- if (new_label != label)
- gimple_cond_set_false_label (stmt, new_label);
- }
+ label = gimple_cond_false_label (cond_stmt);
+ if (label)
+ {
+ new_label = main_block_label (label);
+ if (new_label != label)
+ gimple_cond_set_false_label (cond_stmt, new_label);
+ }
+ }
break;
case GIMPLE_SWITCH:
{
- size_t i, n = gimple_switch_num_labels (stmt);
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
+ size_t i, n = gimple_switch_num_labels (switch_stmt);
/* Replace all destination labels. */
for (i = 0; i < n; ++i)
{
- tree case_label = gimple_switch_label (stmt, i);
+ tree case_label = gimple_switch_label (switch_stmt, i);
label = CASE_LABEL (case_label);
new_label = main_block_label (label);
if (new_label != label)
case GIMPLE_ASM:
{
- int i, n = gimple_asm_nlabels (stmt);
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ int i, n = gimple_asm_nlabels (asm_stmt);
for (i = 0; i < n; ++i)
{
- tree cons = gimple_asm_label_op (stmt, i);
+ tree cons = gimple_asm_label_op (asm_stmt, i);
tree label = main_block_label (TREE_VALUE (cons));
TREE_VALUE (cons) = label;
}
case GIMPLE_GOTO:
if (!computed_goto_p (stmt))
{
- label = gimple_goto_dest (stmt);
+ ggoto *goto_stmt = as_a <ggoto *> (stmt);
+ label = gimple_goto_dest (goto_stmt);
new_label = main_block_label (label);
if (new_label != label)
- gimple_goto_set_dest (stmt, new_label);
+ gimple_goto_set_dest (goto_stmt, new_label);
}
break;
case GIMPLE_TRANSACTION:
{
- tree label = gimple_transaction_label (stmt);
+ gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
+ tree label = gimple_transaction_label (trans_stmt);
if (label)
{
tree new_label = main_block_label (label);
if (new_label != label)
- gimple_transaction_set_label (stmt, new_label);
+ gimple_transaction_set_label (trans_stmt, new_label);
}
}
break;
for (i = gsi_start_bb (bb); !gsi_end_p (i); )
{
tree label;
- gimple stmt = gsi_stmt (i);
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!label_stmt)
break;
- label = gimple_label_label (stmt);
+ label = gimple_label_label (label_stmt);
if (label == label_for_this_bb
|| !DECL_ARTIFICIAL (label)
Eg. three separate entries 1: 2: 3: become one entry 1..3: */
void
-group_case_labels_stmt (gimple stmt)
+group_case_labels_stmt (gswitch *stmt)
{
int old_size = gimple_switch_num_labels (stmt);
int i, j, new_size = old_size;
{
gimple stmt = last_stmt (bb);
if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
- group_case_labels_stmt (stmt);
+ group_case_labels_stmt (as_a <gswitch *> (stmt));
}
}
gimple_can_merge_blocks_p (basic_block a, basic_block b)
{
gimple stmt;
- gimple_stmt_iterator gsi;
if (!single_succ_p (a))
return false;
return false;
/* Do not allow a block with only a non-local label to be merged. */
- if (stmt
- && gimple_code (stmt) == GIMPLE_LABEL
- && DECL_NONLOCAL (gimple_label_label (stmt)))
- return false;
+ if (stmt)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
+ if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
+ return false;
/* Examine the labels at the beginning of B. */
- for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
tree lab;
- stmt = gsi_stmt (gsi);
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
+ if (!label_stmt)
break;
- lab = gimple_label_label (stmt);
+ lab = gimple_label_label (label_stmt);
/* Do not remove user forced labels or for -O0 any user labels. */
if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
/* It must be possible to eliminate all phi nodes in B. If ssa form
is not up-to-date and a name-mapping is registered, we cannot eliminate
any phis. Symbols marked for renaming are never a problem though. */
- for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
/* Technically only new names matter. */
if (name_registered_for_update_p (PHI_RESULT (phi)))
return false;
if (gimple_code (stmt) == GIMPLE_PHI)
{
- e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
+ e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
+ PHI_ARG_INDEX_FROM_USE (use));
if (e->flags & EDGE_ABNORMAL)
{
/* This can only occur for virtual operands, since
static void
gimple_merge_blocks (basic_block a, basic_block b)
{
- gimple_stmt_iterator last, gsi, psi;
+ gimple_stmt_iterator last, gsi;
+ gphi_iterator psi;
if (dump_file)
fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
{
gimple stmt = gsi_stmt (gsi);
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
- tree label = gimple_label_label (stmt);
+ tree label = gimple_label_label (label_stmt);
int lp_nr;
gsi_remove (&gsi, false);
/* T is CALL_EXPR. Set current_function_calls_* flags. */
void
-notice_special_calls (gimple call)
+notice_special_calls (gcall *call)
{
int flags = gimple_call_flags (call);
for (i = gsi_last_bb (bb); !gsi_end_p (i);)
{
gimple stmt = gsi_stmt (i);
- if (gimple_code (stmt) == GIMPLE_LABEL
- && (FORCED_LABEL (gimple_label_label (stmt))
- || DECL_NONLOCAL (gimple_label_label (stmt))))
+ glabel *label_stmt = dyn_cast <glabel *> (stmt);
+ if (label_stmt
+ && (FORCED_LABEL (gimple_label_label (label_stmt))
+ || DECL_NONLOCAL (gimple_label_label (label_stmt))))
{
basic_block new_bb;
gimple_stmt_iterator new_gsi;
/* A non-reachable non-local label may still be referenced.
But it no longer needs to carry the extra semantics of
non-locality. */
- if (DECL_NONLOCAL (gimple_label_label (stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
{
- DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
- FORCED_LABEL (gimple_label_label (stmt)) = 1;
+ DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
+ FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
}
new_bb = bb->prev_bb;
return find_taken_edge_cond_expr (bb, val);
if (gimple_code (stmt) == GIMPLE_SWITCH)
- return find_taken_edge_switch_expr (bb, val);
+ return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
if (computed_goto_p (stmt))
{
NULL if any edge may be taken. */
static edge
-find_taken_edge_switch_expr (basic_block bb, tree val)
+find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
+ tree val)
{
basic_block dest_bb;
edge e;
- gimple switch_stmt;
tree taken_case;
- switch_stmt = last_stmt (bb);
taken_case = find_case_label_for_value (switch_stmt, val);
dest_bb = label_to_block (CASE_LABEL (taken_case));
sorted: We can do a binary search for a case matching VAL. */
static tree
-find_case_label_for_value (gimple switch_stmt, tree val)
+find_case_label_for_value (gswitch *switch_stmt, tree val)
{
size_t low, high, n = gimple_switch_num_labels (switch_stmt);
tree default_case = gimple_switch_default_label (switch_stmt);
return true;
case GIMPLE_ASM:
- if (gimple_asm_nlabels (t) > 0)
+ if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
return true;
break;
/* Labels start a new basic block only if the preceding statement
wasn't a label of the same type. This prevents the creation of
consecutive blocks that have nothing but a single label. */
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
/* Nonlocal and computed GOTO targets always start a new block. */
- if (DECL_NONLOCAL (gimple_label_label (stmt))
- || FORCED_LABEL (gimple_label_label (stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (label_stmt))
+ || FORCED_LABEL (gimple_label_label (label_stmt)))
return true;
if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
{
- if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (
+ as_a <glabel *> (prev_stmt))))
return true;
cfg_stats.num_merged_labels++;
{
edge_var_map *vm;
int i;
- gimple_stmt_iterator phis;
+ gphi_iterator phis;
vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
if (!v)
v->iterate (i, &vm) && !gsi_end_p (phis);
i++, gsi_next (&phis))
{
- gimple phi = gsi_stmt (phis);
+ gphi *phi = phis.phi ();
tree result = redirect_edge_var_map_result (vm);
tree arg = redirect_edge_var_map_def (vm);
is a problem, otherwise false. */
static bool
-verify_gimple_call (gimple stmt)
+verify_gimple_call (gcall *stmt)
{
tree fn = gimple_call_fn (stmt);
tree fntype, fndecl;
Returns true if anything is wrong. */
static bool
-verify_gimple_assign_unary (gimple stmt)
+verify_gimple_assign_unary (gassign *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
Returns true if anything is wrong. */
static bool
-verify_gimple_assign_binary (gimple stmt)
+verify_gimple_assign_binary (gassign *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
Returns true if anything is wrong. */
static bool
-verify_gimple_assign_ternary (gimple stmt)
+verify_gimple_assign_ternary (gassign *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
Returns true if anything is wrong. */
static bool
-verify_gimple_assign_single (gimple stmt)
+verify_gimple_assign_single (gassign *stmt)
{
enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
is a problem, otherwise false. */
static bool
-verify_gimple_assign (gimple stmt)
+verify_gimple_assign (gassign *stmt)
{
switch (gimple_assign_rhs_class (stmt))
{
is a problem, otherwise false. */
static bool
-verify_gimple_return (gimple stmt)
+verify_gimple_return (greturn *stmt)
{
tree op = gimple_return_retval (stmt);
tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
is a problem, otherwise false. */
static bool
-verify_gimple_goto (gimple stmt)
+verify_gimple_goto (ggoto *stmt)
{
tree dest = gimple_goto_dest (stmt);
is a problem, otherwise false. */
static bool
-verify_gimple_switch (gimple stmt)
+verify_gimple_switch (gswitch *stmt)
{
unsigned int i, n;
tree elt, prev_upper_bound = NULL_TREE;
Returns true if anything is wrong. */
static bool
-verify_gimple_label (gimple stmt)
+verify_gimple_label (glabel *stmt)
{
tree decl = gimple_label_label (stmt);
int uid;
return err;
}
+/* Verify a gimple cond statement STMT.
+ Returns true if anything is wrong. */
+
+static bool
+verify_gimple_cond (gcond *stmt)
+{
+ if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
+ {
+ error ("invalid comparison code in gimple cond");
+ return true;
+ }
+ if (!(!gimple_cond_true_label (stmt)
+ || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
+ || !(!gimple_cond_false_label (stmt)
+ || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
+ {
+ error ("invalid labels in gimple cond");
+ return true;
+ }
+
+ return verify_gimple_comparison (boolean_type_node,
+ gimple_cond_lhs (stmt),
+ gimple_cond_rhs (stmt));
+}
+
/* Verify the GIMPLE statement STMT. Returns true if there is an
error, otherwise false. */
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
- return verify_gimple_assign (stmt);
+ return verify_gimple_assign (as_a <gassign *> (stmt));
case GIMPLE_LABEL:
- return verify_gimple_label (stmt);
+ return verify_gimple_label (as_a <glabel *> (stmt));
case GIMPLE_CALL:
- return verify_gimple_call (stmt);
+ return verify_gimple_call (as_a <gcall *> (stmt));
case GIMPLE_COND:
- if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
- {
- error ("invalid comparison code in gimple cond");
- return true;
- }
- if (!(!gimple_cond_true_label (stmt)
- || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
- || !(!gimple_cond_false_label (stmt)
- || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
- {
- error ("invalid labels in gimple cond");
- return true;
- }
-
- return verify_gimple_comparison (boolean_type_node,
- gimple_cond_lhs (stmt),
- gimple_cond_rhs (stmt));
+ return verify_gimple_cond (as_a <gcond *> (stmt));
case GIMPLE_GOTO:
- return verify_gimple_goto (stmt);
+ return verify_gimple_goto (as_a <ggoto *> (stmt));
case GIMPLE_SWITCH:
- return verify_gimple_switch (stmt);
+ return verify_gimple_switch (as_a <gswitch *> (stmt));
case GIMPLE_RETURN:
- return verify_gimple_return (stmt);
+ return verify_gimple_return (as_a <greturn *> (stmt));
case GIMPLE_ASM:
return false;
case GIMPLE_TRANSACTION:
- return verify_gimple_transaction (stmt);
+ return verify_gimple_transaction (as_a <gtransaction *> (stmt));
/* Tuples that do not have tree operands. */
case GIMPLE_NOP:
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
- err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
+ err |= verify_gimple_in_seq_2 (
+ gimple_bind_body (as_a <gbind *> (stmt)));
break;
case GIMPLE_TRY:
break;
case GIMPLE_EH_ELSE:
- err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
- err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
+ {
+ geh_else *eh_else = as_a <geh_else *> (stmt);
+ err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
+ err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
+ }
break;
case GIMPLE_CATCH:
- err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
+ err |= verify_gimple_in_seq_2 (gimple_catch_handler (
+ as_a <gcatch *> (stmt)));
break;
case GIMPLE_TRANSACTION:
- err |= verify_gimple_transaction (stmt);
+ err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
break;
default:
is a problem, otherwise false. */
static bool
-verify_gimple_transaction (gimple stmt)
+verify_gimple_transaction (gtransaction *stmt)
{
tree lab = gimple_transaction_label (stmt);
if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
{
gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (bb);
+ !gsi_end_p (gpi);
+ gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
bool err2 = false;
unsigned i;
if (gimple_code (stmt) != GIMPLE_LABEL)
break;
- label = gimple_label_label (stmt);
+ label = gimple_label_label (as_a <glabel *> (stmt));
if (prev_stmt && DECL_NONLOCAL (label))
{
error ("nonlocal label ");
if (stmt_ends_bb_p (stmt))
found_ctrl_stmt = true;
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
error ("label ");
- print_generic_expr (stderr, gimple_label_label (stmt), 0);
+ print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
fprintf (stderr, " in the middle of basic block %d", bb->index);
err = 1;
}
case GIMPLE_SWITCH:
{
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
tree prev;
edge e;
size_t i, n;
- n = gimple_switch_num_labels (stmt);
+ n = gimple_switch_num_labels (switch_stmt);
/* Mark all the destination basic blocks. */
for (i = 0; i < n; ++i)
{
- tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
+ tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
basic_block label_bb = label_to_block (lab);
gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
label_bb->aux = (void *)1;
}
/* Verify that the case labels are sorted. */
- prev = gimple_switch_label (stmt, 0);
+ prev = gimple_switch_label (switch_stmt, 0);
for (i = 1; i < n; ++i)
{
- tree c = gimple_switch_label (stmt, i);
+ tree c = gimple_switch_label (switch_stmt, i);
if (!CASE_LOW (c))
{
error ("found default case not at the start of "
/* Check that we have all of them. */
for (i = 0; i < n; ++i)
{
- tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
+ tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
basic_block label_bb = label_to_block (lab);
if (label_bb->aux != (void *)2)
break;
case GIMPLE_EH_DISPATCH:
- err |= verify_eh_dispatch_edge (stmt);
+ err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
break;
default:
edge_iterator ei;
basic_block dummy, bb;
tree var;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
dummy = fallthru->src;
bb = fallthru->dest;
start of BB. */
for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi, new_phi;
+ gphi *phi, *new_phi;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
var = gimple_phi_result (phi);
new_phi = create_phi_node (var, bb);
gimple_phi_set_result (phi, copy_ssa_name (var, phi));
gimple_stmt_iterator i, s = gsi_start_bb (bb);
bool first = true;
tree label;
- gimple stmt;
+ glabel *stmt;
for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
{
- stmt = gsi_stmt (i);
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ stmt = dyn_cast <glabel *> (gsi_stmt (i));
+ if (!stmt)
break;
label = gimple_label_label (stmt);
if (!DECL_NONLOCAL (label))
case GIMPLE_SWITCH:
{
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
tree label = gimple_block_label (dest);
- tree cases = get_cases_for_edge (e, stmt);
+ tree cases = get_cases_for_edge (e, switch_stmt);
/* If we have a list of cases associated with E, then use it
as it's a lot faster than walking the entire case vector. */
to move all the cases associated with E to E2. */
if (e2)
{
- tree cases2 = get_cases_for_edge (e2, stmt);
+ tree cases2 = get_cases_for_edge (e2, switch_stmt);
CASE_CHAIN (last) = CASE_CHAIN (cases2);
CASE_CHAIN (cases2) = first;
}
else
{
- size_t i, n = gimple_switch_num_labels (stmt);
+ size_t i, n = gimple_switch_num_labels (switch_stmt);
for (i = 0; i < n; i++)
{
- tree elt = gimple_switch_label (stmt, i);
+ tree elt = gimple_switch_label (switch_stmt, i);
if (label_to_block (CASE_LABEL (elt)) == e->dest)
CASE_LABEL (elt) = label;
}
case GIMPLE_ASM:
{
- int i, n = gimple_asm_nlabels (stmt);
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ int i, n = gimple_asm_nlabels (asm_stmt);
tree label = NULL;
for (i = 0; i < n; ++i)
{
- tree cons = gimple_asm_label_op (stmt, i);
+ tree cons = gimple_asm_label_op (asm_stmt, i);
if (label_to_block (TREE_VALUE (cons)) == e->dest)
{
if (!label)
case GIMPLE_EH_DISPATCH:
if (!(e->flags & EDGE_FALLTHRU))
- redirect_eh_dispatch_edge (stmt, e, dest);
+ redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
break;
case GIMPLE_TRANSACTION:
/* The ABORT edge has a stored label associated with it, otherwise
the edges are simply redirectable. */
if (e->flags == 0)
- gimple_transaction_set_label (stmt, gimple_block_label (dest));
+ gimple_transaction_set_label (as_a <gtransaction *> (stmt),
+ gimple_block_label (dest));
break;
default:
gimple_duplicate_bb (basic_block bb)
{
basic_block new_bb;
- gimple_stmt_iterator gsi, gsi_tgt;
- gimple_seq phis = phi_nodes (bb);
- gimple phi, stmt, copy;
+ gimple_stmt_iterator gsi_tgt;
new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
/* Copy the PHI nodes. We ignore PHI node arguments here because
the incoming edges have not been setup yet. */
- for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (bb);
+ !gsi_end_p (gpi);
+ gsi_next (&gpi))
{
- phi = gsi_stmt (gsi);
+ gphi *phi, *copy;
+ phi = gpi.phi ();
copy = create_phi_node (NULL_TREE, new_bb);
create_new_def_for (gimple_phi_result (phi), copy,
gimple_phi_result_ptr (copy));
}
gsi_tgt = gsi_start_bb (new_bb);
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
def_operand_p def_p;
ssa_op_iter op_iter;
tree lhs;
+ gimple stmt, copy;
stmt = gsi_stmt (gsi);
if (gimple_code (stmt) == GIMPLE_LABEL)
basic_block bb, bb_copy = e_copy->src, dest;
edge e;
edge_iterator ei;
- gimple phi, phi_copy;
+ gphi *phi, *phi_copy;
tree def;
- gimple_stmt_iterator psi, psi_copy;
+ gphi_iterator psi, psi_copy;
if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
return;
!gsi_end_p (psi);
gsi_next (&psi), gsi_next (&psi_copy))
{
- phi = gsi_stmt (psi);
- phi_copy = gsi_stmt (psi_copy);
+ phi = psi.phi ();
+ phi_copy = psi_copy.phi ();
def = PHI_ARG_DEF_FROM_EDGE (phi, e);
add_phi_arg (phi_copy, def, e_copy,
gimple_phi_arg_location_from_edge (phi, e));
gimple cond_stmt;
edge sorig, snew;
basic_block exit_bb;
- gimple_stmt_iterator psi;
- gimple phi;
+ gphi_iterator psi;
+ gphi *phi;
tree def;
struct loop *target, *aloop, *cloop;
!gsi_end_p (psi);
gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
}
case GIMPLE_RESX:
{
- int r = gimple_resx_region (stmt);
+ gresx *resx_stmt = as_a <gresx *> (stmt);
+ int r = gimple_resx_region (resx_stmt);
r = move_stmt_eh_region_nr (r, p);
- gimple_resx_set_region (stmt, r);
+ gimple_resx_set_region (resx_stmt, r);
}
break;
case GIMPLE_EH_DISPATCH:
{
- int r = gimple_eh_dispatch_region (stmt);
+ geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
+ int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
r = move_stmt_eh_region_nr (r, p);
- gimple_eh_dispatch_set_region (stmt, r);
+ gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
}
break;
(*cfg->x_basic_block_info)[bb->index] = bb;
/* Remap the variables in phi nodes. */
- for (si = gsi_start_phis (bb); !gsi_end_p (si); )
+ for (gphi_iterator psi = gsi_start_phis (bb);
+ !gsi_end_p (psi); )
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = psi.phi ();
use_operand_p use;
tree op = PHI_RESULT (phi);
ssa_op_iter oi;
{
/* Remove the phi nodes for virtual operands (alias analysis will be
run for the new function, anyway). */
- remove_phi_node (&si, true);
+ remove_phi_node (&psi, true);
continue;
}
}
}
- gsi_next (&si);
+ gsi_next (&psi);
}
for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
wi.info = d;
walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
- tree label = gimple_label_label (stmt);
+ tree label = gimple_label_label (label_stmt);
int uid = LABEL_DECL_UID (label);
gcc_assert (uid > -1);
return true;
}
- if (gimple_code (t) == GIMPLE_ASM
- && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
- return true;
+ if (gasm *asm_stmt = dyn_cast <gasm *> (t))
+ if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
+ return true;
return false;
}
gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
basic_block new_head, edge e)
{
- gimple phi1, phi2;
- gimple_stmt_iterator psi1, psi2;
+ gphi *phi1, *phi2;
+ gphi_iterator psi1, psi2;
tree def;
edge e2 = find_edge (new_head, second);
!gsi_end_p (psi2) && !gsi_end_p (psi1);
gsi_next (&psi2), gsi_next (&psi1))
{
- phi1 = gsi_stmt (psi1);
- phi2 = gsi_stmt (psi2);
+ phi1 = psi1.phi ();
+ phi2 = psi2.phi ();
def = PHI_ARG_DEF (phi2, e2->dest_idx);
add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
}
FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
{
gimple last = last_stmt (e->src);
- if (gimple_code (last) == GIMPLE_RETURN
- && gimple_return_retval (last) == NULL
+ greturn *return_stmt = dyn_cast <greturn *> (last);
+ if (return_stmt
+ && gimple_return_retval (return_stmt) == NULL
&& !gimple_no_warning_p (last))
{
location = gimple_location (last);
switch (gimple_code (g))
{
case GIMPLE_BIND:
- do_warn_unused_result (gimple_bind_body (g));
+ do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
break;
case GIMPLE_TRY:
do_warn_unused_result (gimple_try_eval (g));
do_warn_unused_result (gimple_try_cleanup (g));
break;
case GIMPLE_CATCH:
- do_warn_unused_result (gimple_catch_handler (g));
+ do_warn_unused_result (gimple_catch_handler (
+ as_a <gcatch *> (g)));
break;
case GIMPLE_EH_FILTER:
do_warn_unused_result (gimple_eh_filter_failure (g));
extern basic_block label_to_block_fn (struct function *, tree);
#define label_to_block(t) (label_to_block_fn (cfun, t))
extern void cleanup_dead_labels (void);
-extern void group_case_labels_stmt (gimple);
+extern void group_case_labels_stmt (gswitch *);
extern void group_case_labels (void);
extern void replace_uses_by (tree, tree);
extern basic_block single_noncomplex_succ (basic_block bb);
-extern void notice_special_calls (gimple);
+extern void notice_special_calls (gcall *);
extern void clear_special_calls (void);
extern edge find_taken_edge (basic_block, tree);
extern void gimple_debug_bb (basic_block);
break;
case GIMPLE_SWITCH:
- val = gimple_switch_index (stmt);
+ val = gimple_switch_index (as_a <gswitch *> (stmt));
break;
default:
switch (gimple_code (stmt))
{
case GIMPLE_LABEL:
- if (DECL_NONLOCAL (gimple_label_label (stmt)))
+ if (DECL_NONLOCAL (gimple_label_label (as_a <glabel *> (stmt))))
return false;
if (optimize == 0 && gimple_location (stmt) != locus)
return false;
{
int n1 = e1->dest_idx;
int n2 = e2->dest_idx;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree val1 = gimple_phi_arg_def (phi, n1);
tree val2 = gimple_phi_arg_def (phi, n2);
/* If the destination block consists of a nonlocal label or is a
EH landing pad, do not merge it. */
label = first_stmt (dest);
- if (label
- && gimple_code (label) == GIMPLE_LABEL
- && (DECL_NONLOCAL (gimple_label_label (label))
- || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0))
- return false;
+ if (label)
+ if (glabel *label_stmt = dyn_cast <glabel *> (label))
+ if (DECL_NONLOCAL (gimple_label_label (label_stmt))
+ || EH_LANDING_PAD_NR (gimple_label_label (label_stmt)) != 0)
+ return false;
/* If there is an abnormal edge to basic block BB, but not into
dest, problems might occur during removal of the phi node at out
{
/* Create arguments for the phi nodes, since the edge was not
here before. */
- for (gsi = gsi_start_phis (dest);
- !gsi_end_p (gsi);
- gsi_next (&gsi))
+ for (gphi_iterator psi = gsi_start_phis (dest);
+ !gsi_end_p (psi);
+ gsi_next (&psi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = psi.phi ();
source_location l = gimple_phi_arg_location_from_edge (phi, succ);
tree def = gimple_phi_arg_def (phi, succ->dest_idx);
add_phi_arg (phi, unshare_expr (def), s, l);
label = gsi_stmt (gsi);
if (is_gimple_debug (label))
break;
- decl = gimple_label_label (label);
+ decl = gimple_label_label (as_a <glabel *> (label));
if (EH_LANDING_PAD_NR (decl) != 0
|| DECL_NONLOCAL (decl)
|| FORCED_LABEL (decl)
/* If the destination block consists of a nonlocal label, do not
merge it. */
label = first_stmt (dest);
- if (label
- && gimple_code (label) == GIMPLE_LABEL
- && DECL_NONLOCAL (gimple_label_label (label)))
- return false;
+ if (label)
+ if (glabel *label_stmt = dyn_cast <glabel *> (label))
+ if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
+ return false;
/* Record BB's single pred in case we need to update the father
loop's latch information later. */
while (EDGE_COUNT (bb->preds) > 0)
{
edge e = EDGE_PRED (bb, 0), s;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
s = find_edge (e->src, dest);
if (s)
!gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree def = gimple_phi_arg_def (phi, succ->dest_idx);
source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
}
else
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
/* BB dominates DEST. There may be many users of the PHI
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
use_operand_p imm_use;
gimple use_stmt;
void *res ATTRIBUTE_UNUSED)
{
tree ptr = *slot;
- gimple bounds_phi;
- gimple ptr_phi;
+ gphi *bounds_phi;
+ gphi *ptr_phi;
unsigned i;
gcc_assert (TREE_CODE (bounds) == SSA_NAME);
gcc_assert (TREE_CODE (ptr) == SSA_NAME);
- bounds_phi = SSA_NAME_DEF_STMT (bounds);
- ptr_phi = SSA_NAME_DEF_STMT (ptr);
-
- gcc_assert (bounds_phi && gimple_code (bounds_phi) == GIMPLE_PHI);
- gcc_assert (ptr_phi && gimple_code (ptr_phi) == GIMPLE_PHI);
+ bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
+ ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
{
static void
chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
{
- gimple ret = gsi_stmt (*gsi);
+ greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
tree retval = gimple_return_retval (ret);
tree ret_decl = DECL_RESULT (cfun->decl);
tree bounds;
static void
chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
{
- gimple call = gsi_stmt (*gsi);
+ gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
unsigned arg_no = 0;
tree fndecl = gimple_call_fndecl (call);
tree fntype;
bool use_fntype = false;
tree op;
ssa_op_iter iter;
- gimple new_call;
+ gcall *new_call;
/* Do nothing for internal functions. */
if (gimple_call_internal_p (call))
/* Build bounds returned by CALL. */
static tree
-chkp_build_returned_bound (gimple call)
+chkp_build_returned_bound (gcall *call)
{
gimple_stmt_iterator gsi;
tree bounds;
/* Return bounds used as returned by call
which produced SSA name VAL. */
-gimple
+gcall *
chkp_retbnd_call_by_val (tree val)
{
if (TREE_CODE (val) != SSA_NAME)
FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
&& gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
- return USE_STMT (use_p);
+ return as_a <gcall *> (USE_STMT (use_p));
return NULL;
}
Return computed bounds. */
static tree
chkp_get_bounds_by_definition (tree node, gimple def_stmt,
- gimple_stmt_iterator *iter)
+ gphi_iterator *iter)
{
tree var, bounds;
enum gimple_code code = gimple_code (def_stmt);
- gimple stmt;
+ gphi *stmt;
if (dump_file && (dump_flags & TDF_DETAILS))
{
break;
case GIMPLE_CALL:
- bounds = chkp_build_returned_bound (def_stmt);
+ bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
break;
case GIMPLE_PHI:
var = chkp_get_tmp_var ();
stmt = create_phi_node (var, gimple_bb (def_stmt));
bounds = gimple_phi_result (stmt);
- *iter = gsi_for_stmt (stmt);
+ *iter = gsi_for_phi (stmt);
bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
if (!bounds)
{
gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
- gimple_stmt_iterator phi_iter;
+ gphi_iterator phi_iter;
bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
gcc_assert (bounds);
- if (gimple_code (def_stmt) == GIMPLE_PHI)
+ if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
{
unsigned i;
- for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
+ for (i = 0; i < gimple_phi_num_args (def_phi); i++)
{
- tree arg = gimple_phi_arg_def (def_stmt, i);
+ tree arg = gimple_phi_arg_def (def_phi, i);
tree arg_bnd;
- gimple phi_bnd;
+ gphi *phi_bnd;
arg_bnd = chkp_find_bounds (arg, NULL);
Previous call to chkp_find_bounds could create
new basic block and therefore change phi statement
phi_iter points to. */
- phi_bnd = gsi_stmt (phi_iter);
+ phi_bnd = phi_iter.phi ();
add_phi_arg (phi_bnd, arg_bnd,
- gimple_phi_arg_edge (def_stmt, i),
+ gimple_phi_arg_edge (def_phi, i),
UNKNOWN_LOCATION);
}
|| fndecl == chkp_bndldx_fndecl
|| fndecl == chkp_ret_bnd_fndecl);
- new_edge = edge->caller->create_edge (callee, stmt, edge->count,
+ new_edge = edge->caller->create_edge (callee,
+ as_a <gcall *> (stmt),
+ edge->count,
edge->frequency);
new_edge->frequency = compute_call_stmt_bb_frequency
(edge->caller->decl, gimple_bb (stmt));
break;
case GIMPLE_RETURN:
- if (gimple_return_retval (s) != NULL_TREE)
- {
- chkp_process_stmt (&i, gimple_return_retval (s),
- gimple_location (s),
- integer_zero_node,
- NULL_TREE, NULL_TREE, safe);
-
- /* Additionally we need to add bounds
- to return statement. */
- chkp_add_bounds_to_ret_stmt (&i);
- }
- break;
+ {
+ greturn *r = as_a <greturn *> (s);
+ if (gimple_return_retval (r) != NULL_TREE)
+ {
+ chkp_process_stmt (&i, gimple_return_retval (r),
+ gimple_location (r),
+ integer_zero_node,
+ NULL_TREE, NULL_TREE, safe);
+
+ /* Additionally we need to add bounds
+ to return statement. */
+ chkp_add_bounds_to_ret_stmt (&i);
+ }
+ }
+ break;
case GIMPLE_CALL:
chkp_add_bounds_to_call_stmt (&i);
extern void chkp_find_bound_slots (const_tree type, bitmap res);
extern void chkp_build_bndstx (tree addr, tree ptr, tree bounds,
gimple_stmt_iterator *gsi);
-extern gimple chkp_retbnd_call_by_val (tree val);
+extern gcall *chkp_retbnd_call_by_val (tree val);
extern bool chkp_function_instrumented_p (tree fndecl);
extern void chkp_function_mark_instrumented (tree fndecl);
extern void chkp_copy_bounds_for_assign (gimple assign,
init_dont_simulate_again (void)
{
basic_block bb;
- gimple_stmt_iterator gsi;
- gimple phi;
bool saw_a_complex_op = false;
FOR_EACH_BB_FN (bb, cfun)
{
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
prop_set_simulate_again (phi,
is_complex_reg (gimple_phi_result (phi)));
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt;
tree op0, op1;
/* Evaluate a PHI node against the complex lattice defined above. */
static enum ssa_prop_result
-complex_visit_phi (gimple phi)
+complex_visit_phi (gphi *phi)
{
complex_lattice_t new_l, old_l;
unsigned int ver;
static void
update_phi_components (basic_block bb)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (is_complex_reg (gimple_phi_result (phi)))
{
stmt = gsi_stmt (*gsi);
gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
- gimple_return_set_retval (stmt, lhs);
+ gimple_return_set_retval (as_a <greturn *> (stmt), lhs);
}
update_stmt (stmt);
machine_mode mode;
enum built_in_function bcode;
tree fn, type, lhs;
- gimple old_stmt, stmt;
+ gimple old_stmt;
+ gcall *stmt;
old_stmt = gsi_stmt (*gsi);
lhs = gimple_assign_lhs (old_stmt);
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
- type = TREE_TYPE (gimple_return_retval (stmt));
- gimple_return_set_retval (stmt, fold_convert (type, cc));
+ {
+ greturn *return_stmt = as_a <greturn *> (stmt);
+ type = TREE_TYPE (gimple_return_retval (return_stmt));
+ gimple_return_set_retval (return_stmt, fold_convert (type, cc));
+ }
break;
case GIMPLE_ASSIGN:
break;
case GIMPLE_COND:
- gimple_cond_set_code (stmt, EQ_EXPR);
- gimple_cond_set_lhs (stmt, cc);
- gimple_cond_set_rhs (stmt, boolean_true_node);
+ {
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ gimple_cond_set_code (cond_stmt, EQ_EXPR);
+ gimple_cond_set_lhs (cond_stmt, cc);
+ gimple_cond_set_rhs (cond_stmt, boolean_true_node);
+ }
break;
default:
static void
expand_complex_asm (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi));
unsigned int i;
for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
clobbers_memory = true;
}
else if (stmt_code == GIMPLE_ASM
- && (gimple_asm_volatile_p (stmt) || gimple_vuse (stmt)))
+ && (gimple_asm_volatile_p (as_a <gasm *> (stmt))
+ || gimple_vuse (stmt)))
clobbers_memory = true;
if (!gimple_vuse (stmt))
fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
SCALE (size), LABEL (size));
- size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
+ size = dfa_stats.num_phis * sizeof (struct gphi);
total += size;
fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
SCALE (size), LABEL (size));
/* Walk all the statements in the function counting references. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator si;
-
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
dfa_stats_p->num_phis++;
dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
}
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
tree) leaves the TRY block, its necessary to record a tree in
this field. Thus a treemple is used. */
treemple child;
- gimple parent;
+ gtry *parent;
};
/* Hashtable helpers. */
static hash_table<finally_tree_hasher> *finally_tree;
static void
-record_in_finally_tree (treemple child, gimple parent)
+record_in_finally_tree (treemple child, gtry *parent)
{
struct finally_tree_node *n;
finally_tree_node **slot;
}
static void
-collect_finally_tree (gimple stmt, gimple region);
+collect_finally_tree (gimple stmt, gtry *region);
/* Go through the gimple sequence. Works with collect_finally_tree to
record all GIMPLE_LABEL and GIMPLE_TRY statements. */
static void
-collect_finally_tree_1 (gimple_seq seq, gimple region)
+collect_finally_tree_1 (gimple_seq seq, gtry *region)
{
gimple_stmt_iterator gsi;
}
static void
-collect_finally_tree (gimple stmt, gimple region)
+collect_finally_tree (gimple stmt, gtry *region)
{
treemple temp;
switch (gimple_code (stmt))
{
case GIMPLE_LABEL:
- temp.t = gimple_label_label (stmt);
+ temp.t = gimple_label_label (as_a <glabel *> (stmt));
record_in_finally_tree (temp, region);
break;
{
temp.g = stmt;
record_in_finally_tree (temp, region);
- collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
+ collect_finally_tree_1 (gimple_try_eval (stmt),
+ as_a <gtry *> (stmt));
collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
}
else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
break;
case GIMPLE_CATCH:
- collect_finally_tree_1 (gimple_catch_handler (stmt), region);
+ collect_finally_tree_1 (gimple_catch_handler (
+ as_a <gcatch *> (stmt)),
+ region);
break;
case GIMPLE_EH_FILTER:
break;
case GIMPLE_EH_ELSE:
- collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
- collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ collect_finally_tree_1 (gimple_eh_else_n_body (eh_else_stmt), region);
+ collect_finally_tree_1 (gimple_eh_else_e_body (eh_else_stmt), region);
+ }
break;
default:
try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
this so that outside_finally_tree can reliably reference the tree used
in the collect_finally_tree data structures. */
- gimple try_finally_expr;
- gimple top_p;
+ gtry *try_finally_expr;
+ gtry *top_p;
/* While lowering a top_p usually it is expanded into multiple statements,
thus we need the following field to store them. */
bool may_throw;
};
-static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
+static gimple_seq lower_eh_must_not_throw (struct leh_state *, gtry *);
/* Search for STMT in the goto queue. Return the replacement,
or null if the statement isn't in the queue. */
replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
break;
case GIMPLE_CATCH:
- replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
+ replace_goto_queue_stmt_list (gimple_catch_handler_ptr (
+ as_a <gcatch *> (stmt)),
+ tf);
break;
case GIMPLE_EH_FILTER:
replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
break;
case GIMPLE_EH_ELSE:
- replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
- replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
+ replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (eh_else_stmt),
+ tf);
+ replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (eh_else_stmt),
+ tf);
+ }
break;
default:
switch (gimple_code (stmt))
{
case GIMPLE_COND:
- new_stmt.tp = gimple_op_ptr (stmt, 2);
- record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
- EXPR_LOCATION (*new_stmt.tp));
- new_stmt.tp = gimple_op_ptr (stmt, 3);
- record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
- EXPR_LOCATION (*new_stmt.tp));
+ {
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ new_stmt.tp = gimple_op_ptr (cond_stmt, 2);
+ record_in_goto_queue_label (tf, new_stmt,
+ gimple_cond_true_label (cond_stmt),
+ EXPR_LOCATION (*new_stmt.tp));
+ new_stmt.tp = gimple_op_ptr (cond_stmt, 3);
+ record_in_goto_queue_label (tf, new_stmt,
+ gimple_cond_false_label (cond_stmt),
+ EXPR_LOCATION (*new_stmt.tp));
+ }
break;
case GIMPLE_GOTO:
new_stmt.g = stmt;
of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
static void
-verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
+verify_norecord_switch_expr (struct leh_state *state,
+ gswitch *switch_expr)
{
struct leh_tf_state *tf = state->tf;
size_t i, n;
do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
struct leh_tf_state *tf)
{
- gimple x;
+ ggoto *x;
gcc_assert (q->is_label);
emit_post_landing_pad (gimple_seq *seq, eh_region region)
{
eh_landing_pad lp = region->landing_pads;
- gimple x;
+ glabel *x;
if (lp == NULL)
lp = gen_eh_landing_pad (region);
static void
emit_resx (gimple_seq *seq, eh_region region)
{
- gimple x = gimple_build_resx (region->index);
+ gresx *x = gimple_build_resx (region->index);
gimple_seq_add_stmt (seq, x);
if (region->outer)
record_stmt_eh_region (region->outer, x);
static void
emit_eh_dispatch (gimple_seq *seq, eh_region region)
{
- gimple x = gimple_build_eh_dispatch (region->index);
+ geh_dispatch *x = gimple_build_eh_dispatch (region->index);
gimple_seq_add_stmt (seq, x);
}
an existing label that should be put at the exit, or NULL. */
static gimple_seq
-frob_into_branch_around (gimple tp, eh_region region, tree over)
+frob_into_branch_around (gtry *tp, eh_region region, tree over)
{
gimple x;
gimple_seq cleanup, result;
lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
location_t loc)
{
- gimple region = NULL;
+ gtry *region = NULL;
gimple_seq new_seq;
gimple_stmt_iterator gsi;
/* A subroutine of lower_try_finally. If FINALLY consits of a
GIMPLE_EH_ELSE node, return it. */
-static inline gimple
+static inline geh_else *
get_eh_else (gimple_seq finally)
{
gimple x = gimple_seq_first_stmt (finally);
if (gimple_code (x) == GIMPLE_EH_ELSE)
{
gcc_assert (gimple_seq_singleton_p (finally));
- return x;
+ return as_a <geh_else *> (x);
}
return NULL;
}
gimple_stmt_iterator gsi;
bool finally_may_fallthru;
gimple_seq finally;
- gimple x, eh_else;
+ gimple x;
+ geh_mnt *eh_mnt;
+ gtry *try_stmt;
+ geh_else *eh_else;
/* First check for nothing to do. */
if (lang_hooks.eh_protect_cleanup_actions == NULL)
}
/* Wrap the block with protect_cleanup_actions as the action. */
- x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
- x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
- GIMPLE_TRY_CATCH);
- finally = lower_eh_must_not_throw (outer_state, x);
+ eh_mnt = gimple_build_eh_must_not_throw (protect_cleanup_actions);
+ try_stmt = gimple_build_try (finally, gimple_seq_alloc_with_stmt (eh_mnt),
+ GIMPLE_TRY_CATCH);
+ finally = lower_eh_must_not_throw (outer_state, try_stmt);
/* Drop all of this into the exception sequence. */
emit_post_landing_pad (&eh_seq, tf->region);
struct leh_tf_state *tf)
{
tree lab;
- gimple x, eh_else;
+ gimple x;
+ geh_else *eh_else;
gimple_seq finally;
struct goto_queue_node *q, *qe;
lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
{
struct goto_queue_node *q, *qe;
+ geh_else *eh_else;
+ glabel *label_stmt;
gimple x;
gimple_seq finally;
gimple_stmt_iterator gsi;
/* Since there's only one destination, and the destination edge can only
either be EH or non-EH, that implies that all of our incoming edges
are of the same type. Therefore we can lower EH_ELSE immediately. */
- x = get_eh_else (finally);
- if (x)
+ eh_else = get_eh_else (finally);
+ if (eh_else)
{
if (tf->may_throw)
- finally = gimple_eh_else_e_body (x);
+ finally = gimple_eh_else_e_body (eh_else);
else
- finally = gimple_eh_else_n_body (x);
+ finally = gimple_eh_else_n_body (eh_else);
}
lower_eh_constructs_1 (state, &finally);
}
finally_label = create_artificial_label (loc);
- x = gimple_build_label (finally_label);
- gimple_seq_add_stmt (&tf->top_p_seq, x);
+ label_stmt = gimple_build_label (finally_label);
+ gimple_seq_add_stmt (&tf->top_p_seq, label_stmt);
gimple_seq_add_seq (&tf->top_p_seq, finally);
gimple_seq finally;
gimple_seq new_stmt;
gimple_seq seq;
- gimple x, eh_else;
+ gimple x;
+ geh_else *eh_else;
tree tmp;
location_t tf_loc = gimple_location (tf->try_finally_expr);
tree last_case;
vec<tree> case_label_vec;
gimple_seq switch_body = NULL;
- gimple x, eh_else;
+ gimple x;
+ geh_else *eh_else;
tree tmp;
gimple switch_stmt;
gimple_seq finally;
decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
{
int f_estimate, sw_estimate;
- gimple eh_else;
+ geh_else *eh_else;
/* If there's an EH_ELSE involved, the exception path is separate
and really doesn't come into play for this computation. */
arrange for the FINALLY block to be executed on all exits. */
static gimple_seq
-lower_try_finally (struct leh_state *state, gimple tp)
+lower_try_finally (struct leh_state *state, gtry *tp)
{
struct leh_tf_state this_tf;
struct leh_state this_state;
exception region trees that records all the magic. */
static gimple_seq
-lower_catch (struct leh_state *state, gimple tp)
+lower_catch (struct leh_state *state, gtry *tp)
{
eh_region try_region = NULL;
struct leh_state this_state = *state;
gsi_next (&gsi))
{
eh_catch c;
- gimple gcatch;
+ gcatch *catch_stmt;
gimple_seq handler;
- gcatch = gsi_stmt (gsi);
- c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
+ catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
+ c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
- handler = gimple_catch_handler (gcatch);
+ handler = gimple_catch_handler (catch_stmt);
lower_eh_constructs_1 (&this_state, &handler);
c->label = create_artificial_label (UNKNOWN_LOCATION);
region trees that record all the magic. */
static gimple_seq
-lower_eh_filter (struct leh_state *state, gimple tp)
+lower_eh_filter (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
plus the exception region trees that record all the magic. */
static gimple_seq
-lower_eh_must_not_throw (struct leh_state *state, gimple tp)
+lower_eh_must_not_throw (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
this_region = gen_eh_region_must_not_throw (state->cur_region);
this_region->u.must_not_throw.failure_decl
- = gimple_eh_must_not_throw_fndecl (inner);
+ = gimple_eh_must_not_throw_fndecl (
+ as_a <geh_mnt *> (inner));
this_region->u.must_not_throw.failure_loc
= LOCATION_LOCUS (gimple_location (tp));
except that we only execute the cleanup block for exception edges. */
static gimple_seq
-lower_cleanup (struct leh_state *state, gimple tp)
+lower_cleanup (struct leh_state *state, gtry *tp)
{
struct leh_state this_state = *state;
eh_region this_region = NULL;
break;
case GIMPLE_SWITCH:
- verify_norecord_switch_expr (state, stmt);
+ verify_norecord_switch_expr (state, as_a <gswitch *> (stmt));
break;
case GIMPLE_TRY:
- if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
- replace = lower_try_finally (state, stmt);
- else
- {
- x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
- if (!x)
- {
- replace = gimple_try_eval (stmt);
- lower_eh_constructs_1 (state, &replace);
- }
- else
- switch (gimple_code (x))
+ {
+ gtry *try_stmt = as_a <gtry *> (stmt);
+ if (gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
+ replace = lower_try_finally (state, try_stmt);
+ else
+ {
+ x = gimple_seq_first_stmt (gimple_try_cleanup (try_stmt));
+ if (!x)
{
+ replace = gimple_try_eval (try_stmt);
+ lower_eh_constructs_1 (state, &replace);
+ }
+ else
+ switch (gimple_code (x))
+ {
case GIMPLE_CATCH:
- replace = lower_catch (state, stmt);
- break;
+ replace = lower_catch (state, try_stmt);
+ break;
case GIMPLE_EH_FILTER:
- replace = lower_eh_filter (state, stmt);
- break;
+ replace = lower_eh_filter (state, try_stmt);
+ break;
case GIMPLE_EH_MUST_NOT_THROW:
- replace = lower_eh_must_not_throw (state, stmt);
- break;
+ replace = lower_eh_must_not_throw (state, try_stmt);
+ break;
case GIMPLE_EH_ELSE:
- /* This code is only valid with GIMPLE_TRY_FINALLY. */
- gcc_unreachable ();
+ /* This code is only valid with GIMPLE_TRY_FINALLY. */
+ gcc_unreachable ();
default:
- replace = lower_cleanup (state, stmt);
- break;
- }
- }
+ replace = lower_cleanup (state, try_stmt);
+ break;
+ }
+ }
+ }
/* Remove the old stmt and insert the transformed sequence
instead. */
no fallthru edge; false if there is. */
bool
-make_eh_dispatch_edges (gimple stmt)
+make_eh_dispatch_edges (geh_dispatch *stmt)
{
eh_region r;
eh_catch c;
The actual edge update will happen in the caller. */
void
-redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
+redirect_eh_dispatch_edge (geh_dispatch *stmt, edge e, basic_block new_bb)
{
tree new_lab = gimple_block_label (new_bb);
bool any_changed = false;
return true;
case GIMPLE_CALL:
- return !gimple_call_nothrow_p (stmt);
+ return !gimple_call_nothrow_p (as_a <gcall *> (stmt));
case GIMPLE_ASSIGN:
case GIMPLE_COND:
case GIMPLE_ASM:
if (!cfun->can_throw_non_call_exceptions)
return false;
- return gimple_asm_volatile_p (stmt);
+ return gimple_asm_volatile_p (as_a <gasm *> (stmt));
default:
return false;
temporary used in the initializer for A. */
static void
-optimize_double_finally (gimple one, gimple two)
+optimize_double_finally (gtry *one, gtry *two)
{
gimple oneh;
gimple_stmt_iterator gsi;
two = NULL;
else
two = gsi_stmt (gsi);
- if (one
- && two
- && gimple_code (one) == GIMPLE_TRY
- && gimple_code (two) == GIMPLE_TRY
- && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
- && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
- optimize_double_finally (one, two);
+ if (one && two)
+ if (gtry *try_one = dyn_cast <gtry *> (one))
+ if (gtry *try_two = dyn_cast <gtry *> (two))
+ if (gimple_try_kind (try_one) == GIMPLE_TRY_FINALLY
+ && gimple_try_kind (try_two) == GIMPLE_TRY_FINALLY)
+ optimize_double_finally (try_one, try_two);
if (one)
switch (gimple_code (one))
{
refactor_eh_r (gimple_try_cleanup (one));
break;
case GIMPLE_CATCH:
- refactor_eh_r (gimple_catch_handler (one));
+ refactor_eh_r (gimple_catch_handler (as_a <gcatch *> (one)));
break;
case GIMPLE_EH_FILTER:
refactor_eh_r (gimple_eh_filter_failure (one));
break;
case GIMPLE_EH_ELSE:
- refactor_eh_r (gimple_eh_else_n_body (one));
- refactor_eh_r (gimple_eh_else_e_body (one));
+ {
+ geh_else *eh_else_stmt = as_a <geh_else *> (one);
+ refactor_eh_r (gimple_eh_else_n_body (eh_else_stmt));
+ refactor_eh_r (gimple_eh_else_e_body (eh_else_stmt));
+ }
break;
default:
break;
/* At the end of gimple optimization, we can lower RESX. */
static bool
-lower_resx (basic_block bb, gimple stmt, hash_map<eh_region, tree> *mnt_map)
+lower_resx (basic_block bb, gresx *stmt,
+ hash_map<eh_region, tree> *mnt_map)
{
int lp_nr;
eh_region src_r, dst_r;
gimple last = last_stmt (bb);
if (last && is_gimple_resx (last))
{
- dominance_invalidated |= lower_resx (bb, last, &mnt_map);
+ dominance_invalidated |=
+ lower_resx (bb, as_a <gresx *> (last), &mnt_map);
any_rewritten = true;
}
}
/* See if there is a virtual PHI node to take an updated virtual
operand from. */
- gimple vphi = NULL;
+ gphi *vphi = NULL;
tree vuse = NULL_TREE;
- for (gsi = gsi_start_phis (succbb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (succbb);
+ !gsi_end_p (gpi); gsi_next (&gpi))
{
- tree res = gimple_phi_result (gsi_stmt (gsi));
+ tree res = gimple_phi_result (gpi.phi ());
if (virtual_operand_p (res))
{
- vphi = gsi_stmt (gsi);
+ vphi = gpi.phi ();
vuse = res;
break;
}
we have found some duplicate labels and removed some edges. */
static bool
-lower_eh_dispatch (basic_block src, gimple stmt)
+lower_eh_dispatch (basic_block src, geh_dispatch *stmt)
{
gimple_stmt_iterator gsi;
int region_nr;
continue;
if (gimple_code (last) == GIMPLE_EH_DISPATCH)
{
- redirected |= lower_eh_dispatch (bb, last);
+ redirected |= lower_eh_dispatch (bb,
+ as_a <geh_dispatch *> (last));
flags |= TODO_update_ssa_only_virtuals;
}
else if (gimple_code (last) == GIMPLE_RESX)
switch (gimple_code (stmt))
{
case GIMPLE_RESX:
- bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
+ bitmap_set_bit (r_reachable,
+ gimple_resx_region (as_a <gresx *> (stmt)));
break;
case GIMPLE_EH_DISPATCH:
- bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
+ bitmap_set_bit (r_reachable,
+ gimple_eh_dispatch_region (
+ as_a <geh_dispatch *> (stmt)));
break;
default:
break;
for a different region. */
for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
tree lab;
int lp_nr;
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!label_stmt)
break;
- lab = gimple_label_label (stmt);
+ lab = gimple_label_label (label_stmt);
lp_nr = EH_LANDING_PAD_NR (lab);
if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
return false;
that doesn't appear to handle virtuals. Propagate by hand. */
if (!gimple_seq_empty_p (phi_nodes (bb)))
{
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
+ for (gphi_iterator gpi = gsi_start_phis (bb); !gsi_end_p (gpi); )
{
- gimple use_stmt, phi = gsi_stmt (gsi);
+ gimple use_stmt;
+ gphi *phi = gpi.phi ();
tree lhs = gimple_phi_result (phi);
tree rhs = gimple_phi_arg_def (phi, 0);
use_operand_p use_p;
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
- remove_phi_node (&gsi, true);
+ remove_phi_node (&gpi, true);
}
}
cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
edge old_bb_out, bool change_region)
{
- gimple_stmt_iterator ngsi, ogsi;
+ gphi_iterator ngsi, ogsi;
edge_iterator ei;
edge e;
bitmap ophi_handled;
for the edges we're going to move. */
for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
{
- gimple ophi, nphi = gsi_stmt (ngsi);
+ gphi *ophi, *nphi = ngsi.phi ();
tree nresult, nop;
nresult = gimple_phi_result (nphi);
ophi = NULL;
for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
{
- ophi = gsi_stmt (ogsi);
+ ophi = ogsi.phi ();
if (gimple_phi_result (ophi) == nop)
break;
ophi = NULL;
we don't know what values from the other edges into NEW_BB to use. */
for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
{
- gimple ophi = gsi_stmt (ogsi);
+ gphi *ophi = ogsi.phi ();
tree oresult = gimple_phi_result (ophi);
if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
goto fail;
lab = NULL;
for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
int lp_nr;
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!stmt)
break;
lab = gimple_label_label (stmt);
lp_nr = EH_LANDING_PAD_NR (lab);
/* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
DEBUG_FUNCTION bool
-verify_eh_dispatch_edge (gimple stmt)
+verify_eh_dispatch_edge (geh_dispatch *stmt)
{
eh_region r;
eh_catch c;
extern bool remove_stmt_from_eh_lp (gimple);
extern int lookup_stmt_eh_lp_fn (struct function *, gimple);
extern int lookup_stmt_eh_lp (gimple);
-extern bool make_eh_dispatch_edges (gimple);
+extern bool make_eh_dispatch_edges (geh_dispatch *);
extern void make_eh_edges (gimple);
extern edge redirect_eh_edge (edge, basic_block);
-extern void redirect_eh_dispatch_edge (gimple, edge, basic_block);
+extern void redirect_eh_dispatch_edge (geh_dispatch *, edge, basic_block);
extern bool operation_could_trap_helper_p (enum tree_code, bool, bool, bool,
bool, tree, bool *);
extern bool operation_could_trap_p (enum tree_code, bool, bool, tree);
extern bool maybe_duplicate_eh_stmt (gimple, gimple);
extern void maybe_remove_unreachable_handlers (void);
extern bool verify_eh_edges (gimple);
-extern bool verify_eh_dispatch_edge (gimple);
+extern bool verify_eh_dispatch_edge (geh_dispatch *);
#endif /* GCC_TREE_EH_H */
{
varpool_node *cvar;
tree cdecl;
- gimple x;
+ gcall *x;
cvar = data->control_var;
cdecl = cvar->decl;
/* Lower the I'th operand of PHI. */
static void
-lower_emutls_phi_arg (gimple phi, unsigned int i, struct lower_emutls_data *d)
+lower_emutls_phi_arg (gphi *phi, unsigned int i,
+ struct lower_emutls_data *d)
{
struct walk_stmt_info wi;
struct phi_arg_d *pd = gimple_phi_arg (phi, i);
FOR_EACH_BB_FN (d.bb, cfun)
{
- gimple_stmt_iterator gsi;
unsigned int i, nedge;
/* Lower each of the PHI nodes of the block, as we may have
clear_access_vars ();
d.seq = NULL;
- for (gsi = gsi_start_phis (d.bb);
+ for (gphi_iterator gsi = gsi_start_phis (d.bb);
!gsi_end_p (gsi);
gsi_next (&gsi))
- lower_emutls_phi_arg (gsi_stmt (gsi), i, &d);
+ lower_emutls_phi_arg (gsi.phi (), i, &d);
/* Insert all statements generated by all phi nodes for this
particular edge all at once. */
clear_access_vars ();
/* Lower each of the statements of the block. */
- for (gsi = gsi_start_bb (d.bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (d.bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
d.seq = NULL;
lower_emutls_stmt (gsi_stmt (gsi), &d);
- there is a virtual PHI in a BB other than the loop->header. */
static bool
-if_convertible_phi_p (struct loop *loop, basic_block bb, gimple phi,
+if_convertible_phi_p (struct loop *loop, basic_block bb, gphi *phi,
bool any_mask_load_store)
{
if (dump_file && (dump_flags & TDF_DETAILS))
for (i = 0; i < loop->num_nodes; i++)
{
basic_block bb = ifc_bbs[i];
- gimple_stmt_iterator itr;
+ gphi_iterator itr;
for (itr = gsi_start_phis (bb); !gsi_end_p (itr); gsi_next (&itr))
- if (!if_convertible_phi_p (loop, bb, gsi_stmt (itr),
+ if (!if_convertible_phi_p (loop, bb, itr.phi (),
*any_mask_load_store))
return false;
}
TRUE_BB is selected. */
static void
-predicate_scalar_phi (gimple phi, tree cond,
+predicate_scalar_phi (gphi *phi, tree cond,
basic_block true_bb,
gimple_stmt_iterator *gsi)
{
for (i = 1; i < orig_loop_num_nodes; i++)
{
- gimple phi;
+ gphi *phi;
tree cond = NULL_TREE;
- gimple_stmt_iterator gsi, phi_gsi;
+ gimple_stmt_iterator gsi;
+ gphi_iterator phi_gsi;
basic_block true_bb = NULL;
bb = ifc_bbs[i];
while (!gsi_end_p (phi_gsi))
{
- phi = gsi_stmt (phi_gsi);
+ phi = phi_gsi.phi ();
predicate_scalar_phi (phi, cond, true_bb, &gsi);
release_phi_node (phi);
gsi_next (&phi_gsi);
block using the mapping information in ID. */
static gimple
-copy_gimple_bind (gimple stmt, copy_body_data *id)
+copy_gimple_bind (gbind *stmt, copy_body_data *id)
{
gimple new_bind;
tree new_block, new_vars;
statement. */
if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
{
- tree retval = gimple_return_retval (stmt);
+ tree retval = gimple_return_retval (as_a <greturn *> (stmt));
tree retbnd = gimple_return_retbnd (stmt);
tree bndslot = id->retbnd;
switch (gimple_code (stmt))
{
case GIMPLE_BIND:
- copy = copy_gimple_bind (stmt, id);
+ copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
break;
case GIMPLE_CATCH:
- s1 = remap_gimple_seq (gimple_catch_handler (stmt), id);
- copy = gimple_build_catch (gimple_catch_types (stmt), s1);
+ {
+ gcatch *catch_stmt = as_a <gcatch *> (stmt);
+ s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
+ copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
+ }
break;
case GIMPLE_EH_FILTER:
break;
case GIMPLE_OMP_PARALLEL:
- s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy = gimple_build_omp_parallel
- (s1,
- gimple_omp_parallel_clauses (stmt),
- gimple_omp_parallel_child_fn (stmt),
- gimple_omp_parallel_data_arg (stmt));
+ {
+ gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
+ s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
+ copy = gimple_build_omp_parallel
+ (s1,
+ gimple_omp_parallel_clauses (omp_par_stmt),
+ gimple_omp_parallel_child_fn (omp_par_stmt),
+ gimple_omp_parallel_data_arg (omp_par_stmt));
+ }
break;
case GIMPLE_OMP_TASK:
case GIMPLE_OMP_CRITICAL:
s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
- copy
- = gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
+ copy = gimple_build_omp_critical (s1,
+ gimple_omp_critical_name (
+ as_a <gomp_critical *> (stmt)));
break;
case GIMPLE_TRANSACTION:
- s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
- copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
- gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
+ {
+ gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
+ gtransaction *new_trans_stmt;
+ s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
+ id);
+ copy = new_trans_stmt
+ = gimple_build_transaction (
+ s1,
+ gimple_transaction_label (old_trans_stmt));
+ gimple_transaction_set_subcode (
+ new_trans_stmt,
+ gimple_transaction_subcode (old_trans_stmt));
+ }
break;
default:
if (gimple_debug_bind_p (stmt))
{
- copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
- gimple_debug_bind_get_value (stmt),
- stmt);
+ gdebug *copy
+ = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
+ gimple_debug_bind_get_value (stmt),
+ stmt);
id->debug_stmts.safe_push (copy);
gimple_seq_add_stmt (&stmts, copy);
return stmts;
}
if (gimple_debug_source_bind_p (stmt))
{
- copy = gimple_build_debug_source_bind
- (gimple_debug_source_bind_get_var (stmt),
- gimple_debug_source_bind_get_value (stmt), stmt);
+ gdebug *copy = gimple_build_debug_source_bind
+ (gimple_debug_source_bind_get_var (stmt),
+ gimple_debug_source_bind_get_value (stmt),
+ stmt);
id->debug_stmts.safe_push (copy);
gimple_seq_add_stmt (&stmts, copy);
return stmts;
copy = gimple_copy (stmt);
/* Clear flags that need revisiting. */
- if (is_gimple_call (copy)
- && gimple_call_tail_p (copy))
- gimple_call_set_tail (copy, false);
+ if (gcall *call_stmt = dyn_cast <gcall *> (copy))
+ if (gimple_call_tail_p (call_stmt))
+ gimple_call_set_tail (call_stmt, false);
/* Remap the region numbers for __builtin_eh_{pointer,filter},
RESX and EH_DISPATCH. */
keep it valid over inlining by setting DECL_PT_UID. */
if (!id->src_cfun->gimple_df
|| !id->src_cfun->gimple_df->ipa_pta)
- gimple_call_reset_alias_info (copy);
+ gimple_call_reset_alias_info (as_a <gcall *> (copy));
}
break;
case GIMPLE_RESX:
{
- int r = gimple_resx_region (copy);
+ gresx *resx_stmt = as_a <gresx *> (copy);
+ int r = gimple_resx_region (resx_stmt);
r = remap_eh_region_nr (r, id);
- gimple_resx_set_region (copy, r);
+ gimple_resx_set_region (resx_stmt, r);
}
break;
case GIMPLE_EH_DISPATCH:
{
- int r = gimple_eh_dispatch_region (copy);
+ geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
+ int r = gimple_eh_dispatch_region (eh_dispatch);
r = remap_eh_region_nr (r, id);
- gimple_eh_dispatch_set_region (copy, r);
+ gimple_eh_dispatch_set_region (eh_dispatch, r);
}
break;
do
{
tree fn;
+ gcall *call_stmt;
stmt = gsi_stmt (copy_gsi);
- if (is_gimple_call (stmt)
- && gimple_call_va_arg_pack_p (stmt)
- && id->gimple_call)
+ call_stmt = dyn_cast <gcall *> (stmt);
+ if (call_stmt
+ && gimple_call_va_arg_pack_p (call_stmt)
+ && id->call_stmt)
{
/* __builtin_va_arg_pack () should be replaced by
all arguments corresponding to ... in the caller. */
tree p;
- gimple new_call;
+ gcall *new_call;
vec<tree> argarray;
- size_t nargs = gimple_call_num_args (id->gimple_call);
+ size_t nargs = gimple_call_num_args (id->call_stmt);
size_t n, i, nargs_to_copy;
bool remove_bounds = false;
we handle not instrumented call in instrumented
function. */
nargs_to_copy = nargs;
- if (gimple_call_with_bounds_p (id->gimple_call)
+ if (gimple_call_with_bounds_p (id->call_stmt)
&& !gimple_call_with_bounds_p (stmt))
{
- for (i = gimple_call_num_args (id->gimple_call) - nargs;
- i < gimple_call_num_args (id->gimple_call);
+ for (i = gimple_call_num_args (id->call_stmt) - nargs;
+ i < gimple_call_num_args (id->call_stmt);
i++)
- if (POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
+ if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
nargs_to_copy--;
remove_bounds = true;
}
/* Create the new array of arguments. */
- n = nargs_to_copy + gimple_call_num_args (stmt);
+ n = nargs_to_copy + gimple_call_num_args (call_stmt);
argarray.create (n);
argarray.safe_grow_cleared (n);
/* Copy all the arguments before '...' */
memcpy (argarray.address (),
- gimple_call_arg_ptr (stmt, 0),
- gimple_call_num_args (stmt) * sizeof (tree));
+ gimple_call_arg_ptr (call_stmt, 0),
+ gimple_call_num_args (call_stmt) * sizeof (tree));
if (remove_bounds)
{
/* Append the rest of arguments removing bounds. */
- unsigned cur = gimple_call_num_args (stmt);
- i = gimple_call_num_args (id->gimple_call) - nargs;
- for (i = gimple_call_num_args (id->gimple_call) - nargs;
- i < gimple_call_num_args (id->gimple_call);
+ unsigned cur = gimple_call_num_args (call_stmt);
+ i = gimple_call_num_args (id->call_stmt) - nargs;
+ for (i = gimple_call_num_args (id->call_stmt) - nargs;
+ i < gimple_call_num_args (id->call_stmt);
i++)
- if (!POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
- argarray[cur++] = gimple_call_arg (id->gimple_call, i);
+ if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
+ argarray[cur++] = gimple_call_arg (id->call_stmt, i);
gcc_assert (cur == n);
}
else
{
/* Append the arguments passed in '...' */
- memcpy (argarray.address () + gimple_call_num_args (stmt),
- gimple_call_arg_ptr (id->gimple_call, 0)
- + (gimple_call_num_args (id->gimple_call) - nargs),
+ memcpy (argarray.address () + gimple_call_num_args (call_stmt),
+ gimple_call_arg_ptr (id->call_stmt, 0)
+ + (gimple_call_num_args (id->call_stmt) - nargs),
nargs * sizeof (tree));
}
- new_call = gimple_build_call_vec (gimple_call_fn (stmt),
+ new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
argarray);
argarray.release ();
/* Copy all GIMPLE_CALL flags, location and block, except
GF_CALL_VA_ARG_PACK. */
- gimple_call_copy_flags (new_call, stmt);
+ gimple_call_copy_flags (new_call, call_stmt);
gimple_call_set_va_arg_pack (new_call, false);
gimple_set_location (new_call, gimple_location (stmt));
gimple_set_block (new_call, gimple_block (stmt));
- gimple_call_set_lhs (new_call, gimple_call_lhs (stmt));
+ gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
gsi_replace (©_gsi, new_call, false);
stmt = new_call;
}
else if (is_gimple_call (stmt)
- && id->gimple_call
+ && id->call_stmt
&& (decl = gimple_call_fndecl (stmt))
&& DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
&& DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
{
/* __builtin_va_arg_pack_len () should be replaced by
the number of anonymous arguments. */
- size_t nargs = gimple_call_num_args (id->gimple_call), i;
+ size_t nargs = gimple_call_num_args (id->call_stmt), i;
tree count, p;
gimple new_stmt;
nargs--;
/* For instrumented calls we should ignore bounds. */
- for (i = gimple_call_num_args (id->gimple_call) - nargs;
- i < gimple_call_num_args (id->gimple_call);
+ for (i = gimple_call_num_args (id->call_stmt) - nargs;
+ i < gimple_call_num_args (id->call_stmt);
i++)
- if (POINTER_BOUNDS_P (gimple_call_arg (id->gimple_call, i)))
+ if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
nargs--;
count = build_int_cst (integer_type_node, nargs);
/* We're duplicating a CALL_EXPR. Find any corresponding
callgraph edges and update or duplicate them. */
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
struct cgraph_edge *edge;
int edge_freq = edge->frequency;
int new_freq;
struct cgraph_edge *old_edge = edge;
- edge = edge->clone (id->dst_node, stmt,
+ edge = edge->clone (id->dst_node, call_stmt,
gimple_uid (stmt),
REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
true);
gcc_assert (!edge->indirect_unknown_callee);
old_edge->speculative_call_info (direct, indirect, ref);
- indirect = indirect->clone (id->dst_node, stmt,
+ indirect = indirect->clone (id->dst_node, call_stmt,
gimple_uid (stmt),
REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
true);
case CB_CGE_MOVE_CLONES:
id->dst_node->set_call_stmt_including_clones (orig_stmt,
- stmt);
+ call_stmt);
edge = id->dst_node->get_edge (stmt);
break;
case CB_CGE_MOVE:
edge = id->dst_node->get_edge (orig_stmt);
if (edge)
- edge->set_call_stmt (stmt);
+ edge->set_call_stmt (call_stmt);
break;
default:
|| !id->dst_node->definition);
if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
id->dst_node->create_edge_including_clones
- (dest, orig_stmt, stmt, bb->count,
+ (dest, orig_stmt, call_stmt, bb->count,
compute_call_stmt_bb_frequency (id->dst_node->decl,
copy_basic_block),
CIF_ORIGINALLY_INDIRECT_CALL);
else
- id->dst_node->create_edge (dest, stmt,
+ id->dst_node->create_edge (dest, call_stmt,
bb->count,
compute_call_stmt_bb_frequency
(id->dst_node->decl,
}
}
- notice_special_calls (stmt);
+ notice_special_calls (as_a <gcall *> (stmt));
}
maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
if (!e->dest->aux
|| ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
{
- gimple phi;
- gimple_stmt_iterator si;
+ gphi *phi;
+ gphi_iterator si;
if (!nonlocal_goto)
gcc_assert (e->flags & EDGE_EH);
{
edge re;
- phi = gsi_stmt (si);
+ phi = si.phi ();
/* For abnormal goto/call edges the receiver can be the
ENTRY_BLOCK. Do not assert this cannot happen. */
}
if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
- make_eh_dispatch_edges (copy_stmt);
+ make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
else if (can_throw)
make_eh_edges (copy_stmt);
{
basic_block const new_bb = (basic_block) bb->aux;
edge_iterator ei;
- gimple phi;
- gimple_stmt_iterator si;
+ gphi *phi;
+ gphi_iterator si;
edge new_edge;
bool inserted = false;
for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
tree res, new_res;
- gimple new_phi;
+ gphi *new_phi;
- phi = gsi_stmt (si);
+ phi = si.phi ();
res = PHI_RESULT (phi);
new_res = res;
if (!virtual_operand_p (res))
gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
while (is_gimple_debug (gsi_stmt (ssi)))
{
- gimple stmt = gsi_stmt (ssi), new_stmt;
+ gimple stmt = gsi_stmt (ssi);
+ gdebug *new_stmt;
tree var;
tree value;
/* Now that we've duplicated the blocks, duplicate their edges. */
basic_block abnormal_goto_dest = NULL;
- if (id->gimple_call
- && stmt_can_make_abnormal_goto (id->gimple_call))
+ if (id->call_stmt
+ && stmt_can_make_abnormal_goto (id->call_stmt))
{
- gimple_stmt_iterator gsi = gsi_for_stmt (id->gimple_call);
+ gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
- bb = gimple_bb (id->gimple_call);
+ bb = gimple_bb (id->call_stmt);
gsi_next (&gsi);
if (gsi_end_p (gsi))
abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
this arises, we drop the VALUE expression altogether. */
static void
-copy_debug_stmt (gimple stmt, copy_body_data *id)
+copy_debug_stmt (gdebug *stmt, copy_body_data *id)
{
tree t, *n;
struct walk_stmt_info wi;
t = gimple_debug_source_bind_get_value (stmt);
if (t != NULL_TREE
&& TREE_CODE (t) == PARM_DECL
- && id->gimple_call)
+ && id->call_stmt)
{
vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
unsigned int i;
copy_debug_stmts (copy_body_data *id)
{
size_t i;
- gimple stmt;
+ gdebug *stmt;
if (!id->debug_stmts.exists ())
return;
VLA objects as those can't cause unbounded growth (they're always
wrapped inside stack_save/stack_restore regions. */
if (gimple_alloca_call_p (stmt)
- && !gimple_call_alloca_for_var_p (stmt)
+ && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
&& !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
{
inline_forbidden_reason
break;
case GIMPLE_SWITCH:
- /* Take into account cost of the switch + guess 2 conditional jumps for
- each case label.
-
- TODO: once the switch expansion logic is sufficiently separated, we can
- do better job on estimating cost of the switch. */
- if (weights->time_based)
- cost = floor_log2 (gimple_switch_num_labels (stmt)) * 2;
- else
- cost = gimple_switch_num_labels (stmt) * 2;
+ {
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
+ /* Take into account cost of the switch + guess 2 conditional jumps for
+ each case label.
+
+ TODO: once the switch expansion logic is sufficiently separated, we can
+ do better job on estimating cost of the switch. */
+ if (weights->time_based)
+ cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
+ else
+ cost = gimple_switch_num_labels (switch_stmt) * 2;
+ }
break;
case GIMPLE_CALL:
case GIMPLE_ASM:
{
- int count = asm_str_count (gimple_asm_string (stmt));
+ int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
/* 1000 means infinity. This avoids overflows later
with very long asm statements. */
if (count > 1000)
return 10;
case GIMPLE_BIND:
- return estimate_num_insns_seq (gimple_bind_body (stmt), weights);
+ return estimate_num_insns_seq (
+ gimple_bind_body (as_a <gbind *> (stmt)),
+ weights);
case GIMPLE_EH_FILTER:
return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
case GIMPLE_CATCH:
- return estimate_num_insns_seq (gimple_catch_handler (stmt), weights);
+ return estimate_num_insns_seq (gimple_catch_handler (
+ as_a <gcatch *> (stmt)),
+ weights);
case GIMPLE_TRY:
return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
case GIMPLE_TRANSACTION:
return (weights->tm_cost
- + estimate_num_insns_seq (gimple_transaction_body (stmt),
+ + estimate_num_insns_seq (gimple_transaction_body (
+ as_a <gtransaction *> (stmt)),
weights));
default:
gimple_stmt_iterator gsi, stmt_gsi;
bool successfully_inlined = FALSE;
bool purge_dead_abnormal_edges;
+ gcall *call_stmt;
unsigned int i;
/* Set input_location here so we get the right instantiation context
input_location = gimple_location (stmt);
/* From here on, we're only interested in CALL_EXPRs. */
- if (gimple_code (stmt) != GIMPLE_CALL)
+ call_stmt = dyn_cast <gcall *> (stmt);
+ if (!call_stmt)
goto egress;
cg_edge = id->dst_node->get_edge (stmt);
id->src_fn = fn;
id->src_node = cg_edge->callee;
id->src_cfun = DECL_STRUCT_FUNCTION (fn);
- id->gimple_call = stmt;
+ id->call_stmt = stmt;
gcc_assert (!id->src_cfun->after_inlining);
if (gimple_call_with_bounds_p (stmt)
&& TREE_CODE (modify_dest) == SSA_NAME)
{
- gimple retbnd = chkp_retbnd_call_by_val (modify_dest);
+ gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
if (retbnd)
{
return_bounds = gimple_call_lhs (retbnd);
if (DECL_P (modify_dest))
TREE_NO_WARNING (modify_dest) = 1;
- if (gimple_call_return_slot_opt_p (stmt))
+ if (gimple_call_return_slot_opt_p (call_stmt))
{
return_slot = modify_dest;
modify_dest = NULL;
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
- gimple stmt = gsi_stmt (*gsip);
+ glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (stmt)
{
tree decl = gimple_label_label (stmt);
struct walk_stmt_info *wi)
{
copy_body_data *id = (copy_body_data *) wi->info;
- gimple stmt = gsi_stmt (*gsip);
+ gimple gs = gsi_stmt (*gsip);
- if (gimple_code (stmt) == GIMPLE_BIND)
+ if (gbind *stmt = dyn_cast <gbind *> (gs))
{
tree block = gimple_bind_block (stmt);
/* GIMPLE_CALL if va arg parameter packs should be expanded or NULL
is not. */
- gimple gimple_call;
+ gimple call_stmt;
/* Exception landing pad the inlined call lies in. */
int eh_lp_nr;
bitmap blocks_to_copy;
/* Debug statements that need processing. */
- vec<gimple> debug_stmts;
+ vec<gdebug *> debug_stmts;
/* A map from local declarations in the inlined function to
equivalents in the function into which it is being inlined, where
/* vec of vec of PHIs to rewrite in a basic block. Element I corresponds
the to basic block with index I. Allocated once per compilation, *not*
released between different functions. */
-static vec<gimple_vec> phis_to_rewrite;
+static vec< vec<gphi *> > phis_to_rewrite;
/* The bitmap of non-NULL elements of PHIS_TO_REWRITE. */
static bitmap blocks_with_phis_to_rewrite;
/* Marks phi node PHI in basic block BB for rewrite. */
static void
-mark_phi_for_rewrite (basic_block bb, gimple phi)
+mark_phi_for_rewrite (basic_block bb, gphi *phi)
{
- gimple_vec phis;
+ vec<gphi *> phis;
unsigned n, idx = bb->index;
if (rewrite_uses_p (phi))
{
unsigned bb_index;
edge e;
- gimple phi;
+ gphi *phi;
basic_block bb;
bitmap_iterator bi;
struct def_blocks_d *def_map = find_def_blocks_for (var);
FOR_EACH_EDGE (e, ei, bb->succs)
{
- gimple phi;
- gimple_stmt_iterator gsi;
+ gphi *phi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi);
gsi_next (&gsi))
tree currdef, res;
location_t loc;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
res = gimple_phi_result (phi);
currdef = get_reaching_def (SSA_NAME_VAR (res));
/* Virtual operand PHI args do not need a location. */
void
rewrite_dom_walker::before_dom_children (basic_block bb)
{
- gimple_stmt_iterator gsi;
-
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
/* Step 1. Register new definitions for every PHI node in the block.
Conceptually, all the PHI nodes are executed in parallel and each PHI
node introduces a new version for the associated variable. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
tree result = gimple_phi_result (gsi_stmt (gsi));
register_new_def (result, SSA_NAME_VAR (result));
with its immediate reaching definitions. Update the current definition
of a variable when a new real or virtual definition is found. */
if (bitmap_bit_p (interesting_blocks, bb->index))
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
rewrite_stmt (&gsi);
/* Step 3. Visit all the successor blocks of BB looking for PHI nodes.
FOR_EACH_EDGE (e, ei, bb->succs)
{
- gimple phi;
- gimple_vec phis;
+ gphi *phi;
+ vec<gphi *> phis;
if (!bitmap_bit_p (blocks_with_phis_to_rewrite, e->dest->index))
continue;
else
{
gimple stmt = SSA_NAME_DEF_STMT (reaching_def);
+ gphi *other_phi = dyn_cast <gphi *> (stmt);
/* Single element PHI nodes behave like copies, so get the
location from the phi argument. */
- if (gimple_code (stmt) == GIMPLE_PHI
- && gimple_phi_num_args (stmt) == 1)
- locus = gimple_phi_arg_location (stmt, 0);
+ if (other_phi
+ && gimple_phi_num_args (other_phi) == 1)
+ locus = gimple_phi_arg_location (other_phi, 0);
else
locus = gimple_location (stmt);
}
rewrite_update_dom_walker::before_dom_children (basic_block bb)
{
bool is_abnormal_phi;
- gimple_stmt_iterator gsi;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Registering new PHI nodes in block #%d\n",
register it as a new definition for its corresponding name. Also
register definitions for names whose underlying symbols are
marked for renaming. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
tree lhs, lhs_sym;
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (!register_defs_p (phi))
continue;
if (bitmap_bit_p (interesting_blocks, bb->index))
{
gcc_checking_assert (bitmap_bit_p (blocks_to_update, bb->index));
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
if (rewrite_update_stmt (gsi_stmt (gsi), gsi))
gsi_remove (&gsi, true);
else
mark_block_for_update (bb);
if (gimple_code (stmt) == GIMPLE_PHI)
- mark_phi_for_rewrite (def_bb, stmt);
+ mark_phi_for_rewrite (def_bb, as_a <gphi *> (stmt));
else
{
set_rewrite_uses (stmt, true);
prepare_block_for_update (basic_block bb, bool insert_phi_p)
{
basic_block son;
- gimple_stmt_iterator si;
edge e;
edge_iterator ei;
/* Process PHI nodes marking interesting those that define or use
the symbols that we are interested in. */
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
tree lhs_sym, lhs = gimple_phi_result (phi);
if (TREE_CODE (lhs) == SSA_NAME
}
/* Process the statements. */
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt;
ssa_op_iter i;
if (gimple_code (stmt) == GIMPLE_PHI)
{
int ix = PHI_ARG_INDEX_FROM_USE (use_p);
- edge e = gimple_phi_arg_edge (stmt, ix);
+ edge e = gimple_phi_arg_edge (as_a <gphi *> (stmt), ix);
mark_use_interesting (name, stmt, e->src, insert_phi_p);
}
else
if (blocks_with_phis_to_rewrite)
EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
{
- gimple_vec phis = phis_to_rewrite[i];
+ vec<gphi *> phis = phis_to_rewrite[i];
phis.release ();
phis_to_rewrite[i].create (0);
}
removed. */
void
-mark_virtual_phi_result_for_renaming (gimple phi)
+mark_virtual_phi_result_for_renaming (gphi *phi)
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
tree create_new_def_for (tree, gimple, def_operand_p);
void mark_virtual_operands_for_renaming (struct function *);
void mark_virtual_operand_for_renaming (tree);
-void mark_virtual_phi_result_for_renaming (gimple);
+void mark_virtual_phi_result_for_renaming (gphi *);
bool need_ssa_update_p (struct function *);
bool name_registered_for_update_p (tree);
void release_ssa_name_after_update_ssa (tree);
for (i = 0; i < loop->num_nodes; i++)
{
basic_block bb = bbs[i];
- gimple_stmt_iterator bsi;
- gimple stmt;
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- if (!virtual_operand_p (gimple_phi_result (gsi_stmt (bsi))))
- stmts->safe_push (gsi_stmt (bsi));
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
+ if (!virtual_operand_p (gimple_phi_result (bsi.phi ())))
+ stmts->safe_push (bsi.phi ());
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- stmt = gsi_stmt (bsi);
+ gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt))
stmts->safe_push (stmt);
}
bool copy_p)
{
unsigned i;
- gimple_stmt_iterator bsi;
basic_block *bbs;
if (copy_p)
{
basic_block bb = bbs[i];
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- gimple phi = gsi_stmt (bsi);
+ gphi *phi = bsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
&& !bitmap_bit_p (partition->stmts, gimple_uid (phi)))
reset_debug_uses (phi);
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
{
basic_block bb = bbs[i];
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);)
{
- gimple phi = gsi_stmt (bsi);
+ gphi *phi = bsi.phi ();
if (!virtual_operand_p (gimple_phi_result (phi))
&& !bitmap_bit_p (partition->stmts, gimple_uid (phi)))
remove_phi_node (&bsi, true);
gsi_next (&bsi);
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);)
{
gimple stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL
{
/* Choose an arbitrary path through the empty CFG part
that this unnecessary control stmt controls. */
- if (gimple_code (stmt) == GIMPLE_COND)
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
update_stmt (stmt);
}
else if (gimple_code (stmt) == GIMPLE_SWITCH)
{
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
gimple_switch_set_index
- (stmt, CASE_LOW (gimple_switch_label (stmt, 1)));
+ (switch_stmt, CASE_LOW (gimple_switch_label (switch_stmt, 1)));
update_stmt (stmt);
}
else
Make sure we replace all uses of virtual defs that will remain
outside of the loop with the bare symbol as delete_basic_block
will release them. */
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
mark_virtual_phi_result_for_renaming (phi);
}
- for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
tree vdef = gimple_vdef (stmt);
bbs = get_loop_body_in_dom_order (loop);
for (i = 0; i < loop->num_nodes; ++i)
{
- gimple_stmt_iterator gsi;
- for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bbs[i]);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
continue;
/* Distribute stmts which have defs that are used outside of
continue;
work_list.safe_push (phi);
}
- for (gsi = gsi_start_bb (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
static tree
init_tmp_var_with_call (struct nesting_info *info, gimple_stmt_iterator *gsi,
- gimple call)
+ gcall *call)
{
tree t;
/* Invoke CALLBACK on a GIMPLE_OMP_FOR's init, cond, incr and pre-body. */
static void
-walk_gimple_omp_for (gimple for_stmt,
+walk_gimple_omp_for (gomp_for *for_stmt,
walk_stmt_fn callback_stmt, walk_tree_fn callback_op,
struct nesting_info *info)
{
case GIMPLE_OMP_FOR:
save_suppress = info->suppress_expansion;
convert_nonlocal_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
- walk_gimple_omp_for (stmt, convert_nonlocal_reference_stmt,
+ walk_gimple_omp_for (as_a <gomp_for *> (stmt),
+ convert_nonlocal_reference_stmt,
convert_nonlocal_reference_op, info);
walk_body (convert_nonlocal_reference_stmt,
convert_nonlocal_reference_op, info, gimple_omp_body_ptr (stmt));
OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TO;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
- gimple_omp_target_set_clauses (stmt, c);
+ gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
}
save_local_var_chain = info->new_local_var_chain;
break;
case GIMPLE_BIND:
- if (!optimize && gimple_bind_block (stmt))
- note_nonlocal_block_vlas (info, gimple_bind_block (stmt));
+ {
+ gbind *bind_stmt = as_a <gbind *> (stmt);
+ if (!optimize && gimple_bind_block (bind_stmt))
+ note_nonlocal_block_vlas (info, gimple_bind_block (bind_stmt));
- for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
+ for (tree var = gimple_bind_vars (bind_stmt); var; var = DECL_CHAIN (var))
if (TREE_CODE (var) == NAMELIST_DECL)
{
/* Adjust decls mentioned in NAMELIST_DECL. */
*handled_ops_p = false;
return NULL_TREE;
-
+ }
case GIMPLE_COND:
wi->val_only = true;
wi->is_lhs = false;
case GIMPLE_OMP_FOR:
save_suppress = info->suppress_expansion;
convert_local_omp_clauses (gimple_omp_for_clauses_ptr (stmt), wi);
- walk_gimple_omp_for (stmt, convert_local_reference_stmt,
+ walk_gimple_omp_for (as_a <gomp_for *> (stmt),
+ convert_local_reference_stmt,
convert_local_reference_op, info);
walk_body (convert_local_reference_stmt, convert_local_reference_op,
info, gimple_omp_body_ptr (stmt));
OMP_CLAUSE_MAP_KIND (c) = OMP_CLAUSE_MAP_TOFROM;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (info->frame_decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
- gimple_omp_target_set_clauses (stmt, c);
+ gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt), c);
}
save_local_var_chain = info->new_local_var_chain;
return NULL_TREE;
case GIMPLE_BIND:
- for (tree var = gimple_bind_vars (stmt); var; var = DECL_CHAIN (var))
+ for (tree var = gimple_bind_vars (as_a <gbind *> (stmt));
+ var;
+ var = DECL_CHAIN (var))
if (TREE_CODE (var) == NAMELIST_DECL)
{
/* Adjust decls mentioned in NAMELIST_DECL. */
{
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree label, new_label, target_context, x, field;
- gimple call;
+ gcall *call;
gimple stmt = gsi_stmt (*gsi);
if (gimple_code (stmt) != GIMPLE_GOTO)
struct nesting_info *const info = (struct nesting_info *) wi->info;
tree label, new_label;
gimple_stmt_iterator tmp_gsi;
- gimple stmt = gsi_stmt (*gsi);
+ glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsi));
- if (gimple_code (stmt) != GIMPLE_LABEL)
+ if (!stmt)
{
*handled_ops_p = false;
return NULL_TREE;
struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
struct nesting_info *const info = (struct nesting_info *) wi->info, *i;
tree t = *tp, decl, target_context, x, builtin;
- gimple call;
+ gcall *call;
*walk_subtrees = 0;
switch (TREE_CODE (t))
target_context = decl_function_context (decl);
if (target_context && DECL_STATIC_CHAIN (decl))
{
- gimple_call_set_chain (stmt, get_static_chain (info, target_context,
- &wi->gsi));
+ gimple_call_set_chain (as_a <gcall *> (stmt),
+ get_static_chain (info, target_context,
+ &wi->gsi));
info->static_chain_added |= (1 << (info->context != target_context));
}
break;
= i ? OMP_CLAUSE_MAP_TO : OMP_CLAUSE_MAP_TOFROM;
OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
OMP_CLAUSE_CHAIN (c) = gimple_omp_target_clauses (stmt);
- gimple_omp_target_set_clauses (stmt, c);
+ gimple_omp_target_set_clauses (as_a <gomp_target *> (stmt),
+ c);
}
}
info->static_chain_added |= save_static_chain_added;
/* If we created initialization statements, insert them. */
if (stmt_list)
{
- gimple bind;
+ gbind *bind;
annotate_all_with_location (stmt_list, DECL_SOURCE_LOCATION (context));
- bind = gimple_seq_first_stmt (gimple_body (context));
+ bind = gimple_seq_first_stmt_as_a_bind (gimple_body (context));
gimple_seq_add_seq (&stmt_list, gimple_bind_body (bind));
gimple_bind_set_body (bind, stmt_list);
}
if (root->debug_var_chain)
{
tree debug_var;
- gimple scope;
+ gbind *scope;
remap_vla_decls (DECL_INITIAL (root->context), root);
delete id.cb.decl_map;
}
- scope = gimple_seq_first_stmt (gimple_body (root->context));
+ scope = gimple_seq_first_stmt_as_a_bind (gimple_body (root->context));
if (gimple_bind_block (scope))
declare_vars (root->debug_var_chain, scope, true);
else
gimple stmt = gsi_stmt (gsi);
tree ret_val;
- if (gimple_code (stmt) == GIMPLE_RETURN)
+ if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
/* In a function with an aggregate return value, the
gimplifier has changed all non-empty RETURN_EXPRs to
return the RESULT_DECL. */
- ret_val = gimple_return_retval (stmt);
+ ret_val = gimple_return_retval (return_stmt);
if (ret_val)
gcc_assert (ret_val == result);
}
DEST is available if it is not clobbered or used by the call. */
static bool
-dest_safe_for_nrv_p (gimple call)
+dest_safe_for_nrv_p (gcall *call)
{
tree dest = gimple_call_lhs (call);
gimple_stmt_iterator gsi;
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gcall *stmt;
bool slot_opt_p;
- if (is_gimple_call (stmt)
+ stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
+ if (stmt
&& gimple_call_lhs (stmt)
&& !gimple_call_return_slot_opt_p (stmt)
&& aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
static tree compute_object_offset (const_tree, const_tree);
static unsigned HOST_WIDE_INT addr_object_size (struct object_size_info *,
const_tree, int);
-static unsigned HOST_WIDE_INT alloc_object_size (const_gimple, int);
-static tree pass_through_call (const_gimple);
+static unsigned HOST_WIDE_INT alloc_object_size (const gcall *, int);
+static tree pass_through_call (const gcall *);
static void collect_object_sizes_for (struct object_size_info *, tree);
static void expr_object_size (struct object_size_info *, tree, tree);
static bool merge_object_sizes (struct object_size_info *, tree, tree,
unknown[object_size_type]. */
static unsigned HOST_WIDE_INT
-alloc_object_size (const_gimple call, int object_size_type)
+alloc_object_size (const gcall *call, int object_size_type)
{
tree callee, bytes = NULL_TREE;
tree alloc_size;
Otherwise return NULL. */
static tree
-pass_through_call (const_gimple call)
+pass_through_call (const gcall *call)
{
tree callee = gimple_call_fndecl (call);
/* Compute object_sizes for PTR, defined to the result of a call. */
static void
-call_object_size (struct object_size_info *osi, tree ptr, gimple call)
+call_object_size (struct object_size_info *osi, tree ptr, gcall *call)
{
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (ptr);
case GIMPLE_CALL:
{
- tree arg = pass_through_call (stmt);
+ gcall *call_stmt = as_a <gcall *> (stmt);
+ tree arg = pass_through_call (call_stmt);
if (arg)
{
if (TREE_CODE (arg) == SSA_NAME
expr_object_size (osi, var, arg);
}
else
- call_object_size (osi, var, stmt);
+ call_object_size (osi, var, call_stmt);
break;
}
case GIMPLE_CALL:
{
- tree arg = pass_through_call (stmt);
+ gcall *call_stmt = as_a <gcall *> (stmt);
+ tree arg = pass_through_call (call_stmt);
if (arg)
{
if (TREE_CODE (arg) == SSA_NAME)
continue;
init_object_sizes ();
- result = fold_call_stmt (call, false);
+ result = fold_call_stmt (as_a <gcall *> (call), false);
if (!result)
{
if (gimple_call_num_args (call) == 2
{
tree Ti;
int p0, pi;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
clear_elim_graph (g);
for (gsi = gsi_start_phis (g->e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
source_location locus;
p0 = var_to_partition (g->map, gimple_phi_result (phi));
check to see if this allows another PHI node to be removed. */
static void
-remove_gimple_phi_args (gimple phi)
+remove_gimple_phi_args (gphi *phi)
{
use_operand_p arg_p;
ssa_op_iter iter;
/* Also remove the def if it is a PHI node. */
if (gimple_code (stmt) == GIMPLE_PHI)
{
- remove_gimple_phi_args (stmt);
+ remove_gimple_phi_args (as_a <gphi *> (stmt));
gsi = gsi_for_stmt (stmt);
remove_phi_node (&gsi, true);
}
eliminate_useless_phis (void)
{
basic_block bb;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
tree result;
FOR_EACH_BB_FN (bb, cfun)
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
result = gimple_phi_result (phi);
if (virtual_operand_p (result))
{
create incorrect code. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree T0 = var_to_partition_to_var (map, gimple_phi_result (phi));
if (T0 == NULL_TREE)
{
insert_backedge_copies (void)
{
basic_block bb;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
mark_dfs_back_edges ();
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
size_t i;
|| trivially_conflicts_p (bb, result, arg)))
{
tree name;
- gimple stmt, last = NULL;
+ gassign *stmt;
+ gimple last = NULL;
gimple_stmt_iterator gsi2;
gsi2 = gsi_last_bb (gimple_phi_arg_edge (phi, i)->src);
enum tree_code reduction_code;/* code for the reduction operation. */
unsigned reduc_version; /* SSA_NAME_VERSION of original reduc_phi
result. */
- gimple keep_res; /* The PHI_RESULT of this phi is the resulting value
+ gphi *keep_res; /* The PHI_RESULT of this phi is the resulting value
of the reduction variable when existing the loop. */
tree initial_value; /* The initial value of the reduction var before entering the loop. */
tree field; /* the name of the field in the parloop data structure intended for reduction. */
tree init; /* reduction initialization value. */
- gimple new_phi; /* (helper field) Newly created phi node whose result
+ gphi *new_phi; /* (helper field) Newly created phi node whose result
will be passed to the atomic operation. Represents
the local result each thread computed for the reduction
operation. */
{
int uid;
tree *var_p, name, addr;
- gimple stmt;
+ gassign *stmt;
gimple_seq stmts;
/* Since the address of OBJ is invariant, the trees may be shared.
{
struct reduction_info *const reduc = *slot;
edge e;
- gimple new_phi;
+ gphi *new_phi;
basic_block store_bb;
tree local_res;
source_location locus;
bool ok;
edge exit = single_dom_exit (loop), hpred;
tree control, control_name, res, t;
- gimple phi, nphi, cond_stmt, stmt, cond_nit;
- gimple_stmt_iterator gsi;
+ gphi *phi, *nphi;
+ gassign *stmt;
+ gcond *cond_stmt, *cond_nit;
tree nit_1;
split_block_after_labels (loop->header);
orig_header = single_succ (loop->header);
hpred = single_succ_edge (loop->header);
- cond_stmt = last_stmt (exit->src);
+ cond_stmt = as_a <gcond *> (last_stmt (exit->src));
control = gimple_cond_lhs (cond_stmt);
gcc_assert (gimple_cond_rhs (cond_stmt) == nit);
/* Make sure that we have phi nodes on exit for all loop header phis
(create_parallel_loop requires that). */
- for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (loop->header);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
res = PHI_RESULT (phi);
t = copy_ssa_name (res, phi);
SET_PHI_RESULT (phi, t);
out of the loop is the control variable. */
exit = single_dom_exit (loop);
control_name = NULL_TREE;
- for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); )
+ for (gphi_iterator gsi = gsi_start_phis (ex_bb);
+ !gsi_end_p (gsi); )
{
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
res = PHI_RESULT (phi);
if (virtual_operand_p (res))
{
/* Initialize the control variable to number of iterations
according to the rhs of the exit condition. */
- gsi = gsi_after_labels (ex_bb);
- cond_nit = last_stmt (exit->src);
+ gimple_stmt_iterator gsi = gsi_after_labels (ex_bb);
+ cond_nit = as_a <gcond *> (last_stmt (exit->src));
nit_1 = gimple_cond_rhs (cond_nit);
nit_1 = force_gimple_operand_gsi (&gsi,
fold_convert (TREE_TYPE (control_name), nit_1),
gimple_stmt_iterator gsi;
basic_block bb, paral_bb, for_bb, ex_bb;
tree t, param;
- gimple stmt, for_stmt, phi, cond_stmt;
+ gomp_parallel *omp_par_stmt;
+ gimple omp_return_stmt1, omp_return_stmt2;
+ gimple phi;
+ gcond *cond_stmt;
+ gomp_for *for_stmt;
+ gomp_continue *omp_cont_stmt;
tree cvar, cvar_init, initvar, cvar_next, cvar_base, type;
edge exit, nexit, guard, end, e;
t = build_omp_clause (loc, OMP_CLAUSE_NUM_THREADS);
OMP_CLAUSE_NUM_THREADS_EXPR (t)
= build_int_cst (integer_type_node, n_threads);
- stmt = gimple_build_omp_parallel (NULL, t, loop_fn, data);
- gimple_set_location (stmt, loc);
+ omp_par_stmt = gimple_build_omp_parallel (NULL, t, loop_fn, data);
+ gimple_set_location (omp_par_stmt, loc);
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+ gsi_insert_after (&gsi, omp_par_stmt, GSI_NEW_STMT);
/* Initialize NEW_DATA. */
if (data)
{
+ gassign *assign_stmt;
+
gsi = gsi_after_labels (bb);
param = make_ssa_name (DECL_ARGUMENTS (loop_fn), NULL);
- stmt = gimple_build_assign (param, build_fold_addr_expr (data));
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ assign_stmt = gimple_build_assign (param, build_fold_addr_expr (data));
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
- stmt = gimple_build_assign (new_data,
+ assign_stmt = gimple_build_assign (new_data,
fold_convert (TREE_TYPE (new_data), param));
- gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
+ gsi_insert_before (&gsi, assign_stmt, GSI_SAME_STMT);
}
/* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_PARALLEL. */
bb = split_loop_exit_edge (single_dom_exit (loop));
gsi = gsi_last_bb (bb);
- stmt = gimple_build_omp_return (false);
- gimple_set_location (stmt, loc);
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+ omp_return_stmt1 = gimple_build_omp_return (false);
+ gimple_set_location (omp_return_stmt1, loc);
+ gsi_insert_after (&gsi, omp_return_stmt1, GSI_NEW_STMT);
/* Extract data for GIMPLE_OMP_FOR. */
gcc_assert (loop->header == single_dom_exit (loop)->src);
- cond_stmt = last_stmt (loop->header);
+ cond_stmt = as_a <gcond *> (last_stmt (loop->header));
cvar = gimple_cond_lhs (cond_stmt);
cvar_base = SSA_NAME_VAR (cvar);
guard = make_edge (for_bb, ex_bb, 0);
single_succ_edge (loop->latch)->flags = 0;
end = make_edge (loop->latch, ex_bb, EDGE_FALLTHRU);
- for (gsi = gsi_start_phis (ex_bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (ex_bb);
+ !gsi_end_p (gpi); gsi_next (&gpi))
{
source_location locus;
tree def;
- phi = gsi_stmt (gsi);
- stmt = SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit));
+ gphi *phi = gpi.phi ();
+ gphi *stmt;
+
+ stmt = as_a <gphi *> (
+ SSA_NAME_DEF_STMT (PHI_ARG_DEF_FROM_EDGE (phi, exit)));
def = PHI_ARG_DEF_FROM_EDGE (stmt, loop_preheader_edge (loop));
locus = gimple_phi_arg_location_from_edge (stmt,
/* Emit GIMPLE_OMP_CONTINUE. */
gsi = gsi_last_bb (loop->latch);
- stmt = gimple_build_omp_continue (cvar_next, cvar);
- gimple_set_location (stmt, loc);
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
- SSA_NAME_DEF_STMT (cvar_next) = stmt;
+ omp_cont_stmt = gimple_build_omp_continue (cvar_next, cvar);
+ gimple_set_location (omp_cont_stmt, loc);
+ gsi_insert_after (&gsi, omp_cont_stmt, GSI_NEW_STMT);
+ SSA_NAME_DEF_STMT (cvar_next) = omp_cont_stmt;
/* Emit GIMPLE_OMP_RETURN for GIMPLE_OMP_FOR. */
gsi = gsi_last_bb (ex_bb);
- stmt = gimple_build_omp_return (true);
- gimple_set_location (stmt, loc);
- gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
+ omp_return_stmt2 = gimple_build_omp_return (true);
+ gimple_set_location (omp_return_stmt2, loc);
+ gsi_insert_after (&gsi, omp_return_stmt2, GSI_NEW_STMT);
/* After the above dom info is hosed. Re-compute it. */
free_dominance_info (CDI_DOMINATORS);
{
unsigned i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
bool res = true;
for (i = 0; i < loop->num_nodes; i++)
for (gsi = gsi_start_phis (bbs[i]); !gsi_end_p (gsi); gsi_next (&gsi))
- if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi_stmt (gsi)))) == VECTOR_TYPE)
+ if (TREE_CODE (TREE_TYPE (PHI_RESULT (gsi.phi ()))) == VECTOR_TYPE)
goto end;
res = false;
static void
build_new_reduction (reduction_info_table_type *reduction_list,
- gimple reduc_stmt, gimple phi)
+ gimple reduc_stmt, gphi *phi)
{
reduction_info **slot;
struct reduction_info *new_reduction;
static void
gather_scalar_reductions (loop_p loop, reduction_info_table_type *reduction_list)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
loop_vec_info simple_loop_info;
simple_loop_info = vect_analyze_loop_form (loop);
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
affine_iv iv;
tree res = PHI_RESULT (phi);
bool double_reduc;
reduction_info_table_type *reduction_list)
{
edge exit = single_dom_exit (loop);
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
gcc_assert (exit);
for (gsi = gsi_start_phis (exit->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
struct reduction_info *red;
imm_use_iterator imm_iter;
use_operand_p use_p;
iteration space can be distributed efficiently. */
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree def = PHI_RESULT (phi);
affine_iv iv;
happens to contain a PHI node with LEN arguments or more, return
that one. */
-static inline gimple_statement_phi *
+static inline gphi *
allocate_phi_node (size_t len)
{
- gimple_statement_phi *phi;
+ gphi *phi;
size_t bucket = NUM_BUCKETS - 2;
- size_t size = sizeof (struct gimple_statement_phi)
+ size_t size = sizeof (struct gphi)
+ (len - 1) * sizeof (struct phi_arg_d);
if (free_phinode_count)
&& gimple_phi_capacity ((*free_phinodes[bucket])[0]) >= len)
{
free_phinode_count--;
- phi = as_a <gimple_statement_phi *> (free_phinodes[bucket]->pop ());
+ phi = as_a <gphi *> (free_phinodes[bucket]->pop ());
if (free_phinodes[bucket]->is_empty ())
vec_free (free_phinodes[bucket]);
if (GATHER_STATISTICS)
}
else
{
- phi = static_cast <gimple_statement_phi *> (
- ggc_internal_alloc (size));
+ phi = static_cast <gphi *> (ggc_internal_alloc (size));
if (GATHER_STATISTICS)
{
enum gimple_alloc_kind kind = gimple_alloc_kind (GIMPLE_PHI);
len = 2;
/* Compute the number of bytes of the original request. */
- size = sizeof (struct gimple_statement_phi)
+ size = sizeof (struct gphi)
+ (len - 1) * sizeof (struct phi_arg_d);
/* Round it up to the next power of two. */
/* Return a PHI node with LEN argument slots for variable VAR. */
-static gimple
+static gphi *
make_phi_node (tree var, int len)
{
- gimple_statement_phi *phi;
+ gphi *phi;
int capacity, i;
capacity = ideal_phi_node_len (len);
/* We need to clear the entire PHI node, including the argument
portion, because we represent a "missing PHI argument" by placing
NULL_TREE in PHI_ARG_DEF. */
- memset (phi, 0, (sizeof (struct gimple_statement_phi)
+ memset (phi, 0, (sizeof (struct gphi)
- sizeof (struct phi_arg_d)
+ sizeof (struct phi_arg_d) * len));
phi->code = GIMPLE_PHI;
/* Resize an existing PHI node. The only way is up. Return the
possibly relocated phi. */
-static gimple_statement_phi *
-resize_phi_node (gimple_statement_phi *phi, size_t len)
+static gphi *
+resize_phi_node (gphi *phi, size_t len)
{
size_t old_size, i;
- gimple_statement_phi *new_phi;
+ gphi *new_phi;
gcc_assert (len > gimple_phi_capacity (phi));
/* The garbage collector will not look at the PHI node beyond the
first PHI_NUM_ARGS elements. Therefore, all we have to copy is a
portion of the PHI node currently in use. */
- old_size = sizeof (struct gimple_statement_phi)
+ old_size = sizeof (struct gphi)
+ (gimple_phi_num_args (phi) - 1) * sizeof (struct phi_arg_d);
new_phi = allocate_phi_node (len);
{
size_t len = EDGE_COUNT (bb->preds);
size_t cap = ideal_phi_node_len (len + 4);
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple_statement_phi *stmt =
- as_a <gimple_statement_phi *> (gsi_stmt (gsi));
+ gphi *stmt = gsi.phi ();
if (len > gimple_phi_capacity (stmt))
{
- gimple_statement_phi *new_phi = resize_phi_node (stmt, cap);
+ gphi *new_phi = resize_phi_node (stmt, cap);
/* The result of the PHI is defined by this PHI node. */
SSA_NAME_DEF_STMT (gimple_phi_result (new_phi)) = new_phi;
/* Adds PHI to BB. */
void
-add_phi_node_to_bb (gimple phi, basic_block bb)
+add_phi_node_to_bb (gphi *phi, basic_block bb)
{
gimple_seq seq = phi_nodes (bb);
/* Add the new PHI node to the list of PHI nodes for block BB. */
/* Create a new PHI node for variable VAR at basic block BB. */
-gimple
+gphi *
create_phi_node (tree var, basic_block bb)
{
- gimple phi = make_phi_node (var, EDGE_COUNT (bb->preds));
+ gphi *phi = make_phi_node (var, EDGE_COUNT (bb->preds));
add_phi_node_to_bb (phi, bb);
return phi;
PHI points to the reallocated phi node when we return. */
void
-add_phi_arg (gimple phi, tree def, edge e, source_location locus)
+add_phi_arg (gphi *phi, tree def, edge e, source_location locus)
{
basic_block bb = e->dest;
is consistent with how we remove an edge from the edge vector. */
static void
-remove_phi_arg_num (gimple_statement_phi *phi, int i)
+remove_phi_arg_num (gphi *phi, int i)
{
int num_elem = gimple_phi_num_args (phi);
void
remove_phi_args (edge e)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
- remove_phi_arg_num (as_a <gimple_statement_phi *> (gsi_stmt (gsi)),
+ remove_phi_arg_num (gsi.phi (),
e->dest_idx);
}
void
remove_phi_nodes (basic_block bb)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
remove_phi_node (&gsi, true);
NULL. */
tree
-degenerate_phi_result (gimple phi)
+degenerate_phi_result (gphi *phi)
{
tree lhs = gimple_phi_result (phi);
tree val = NULL;
extern void phinodes_print_statistics (void);
extern void release_phi_node (gimple);
extern void reserve_phi_args_for_new_edge (basic_block);
-extern void add_phi_node_to_bb (gimple phi, basic_block bb);
-extern gimple create_phi_node (tree, basic_block);
-extern void add_phi_arg (gimple, tree, edge, source_location);
+extern void add_phi_node_to_bb (gphi *phi, basic_block bb);
+extern gphi *create_phi_node (tree, basic_block);
+extern void add_phi_arg (gphi *, tree, edge, source_location);
extern void remove_phi_args (edge);
extern void remove_phi_node (gimple_stmt_iterator *, bool);
extern void remove_phi_nodes (basic_block);
-extern tree degenerate_phi_result (gimple);
+extern tree degenerate_phi_result (gphi *);
extern void set_phi_nodes (basic_block, gimple_seq);
static inline use_operand_p
iteration), returns the phi node. Otherwise, NULL_TREE is returned. ROOT
is the root of the current chain. */
-static gimple
+static gphi *
find_looparound_phi (struct loop *loop, dref ref, dref root)
{
tree name, init, init_ref;
- gimple phi = NULL, init_stmt;
+ gphi *phi = NULL;
+ gimple init_stmt;
edge latch = loop_latch_edge (loop);
struct data_reference init_dr;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
if (is_gimple_assign (ref->stmt))
{
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
if (PHI_ARG_DEF_FROM_EDGE (phi, latch) == name)
break;
}
/* Adds a reference for the looparound copy of REF in PHI to CHAIN. */
static void
-insert_looparound_copy (chain_p chain, dref ref, gimple phi)
+insert_looparound_copy (chain_p chain, dref ref, gphi *phi)
{
dref nw = XCNEW (struct dref_d), aref;
unsigned i;
{
unsigned i;
dref ref, root = get_chain_root (chain);
- gimple phi;
+ gphi *phi;
FOR_EACH_VEC_ELT (chain->refs, i, ref)
{
replace_ref_with (gimple stmt, tree new_tree, bool set, bool in_lhs)
{
tree val;
- gimple new_stmt;
+ gassign *new_stmt;
gimple_stmt_iterator bsi, psi;
if (gimple_code (stmt) == GIMPLE_PHI)
dref root = get_chain_root (chain);
bool reuse_first = !chain->has_max_use_after;
tree ref, init, var, next;
- gimple phi;
+ gphi *phi;
gimple_seq stmts;
edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
unsigned i;
tree ref = DR_REF (root->ref), init, var, next;
gimple_seq stmts;
- gimple phi;
+ gphi *phi;
edge entry = loop_preheader_edge (loop), latch = loop_latch_edge (loop);
/* Find the initializer for the variable, and check that it cannot
}
else
{
- gimple init_stmt = gimple_build_assign (var, init);
+ gassign *init_stmt = gimple_build_assign (var, init);
gsi_insert_on_edge_immediate (entry, init_stmt);
}
}
eliminate_temp_copies (struct loop *loop, bitmap tmp_vars)
{
edge e;
- gimple phi, stmt;
+ gphi *phi;
+ gimple stmt;
tree name, use, var;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
e = loop_latch_edge (loop);
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
name = PHI_RESULT (phi);
var = SSA_NAME_VAR (name);
if (!var || !bitmap_bit_p (tmp_vars, DECL_UID (var)))
reassociate_to_the_same_stmt (tree name1, tree name2)
{
gimple stmt1, stmt2, root1, root2, s1, s2;
- gimple new_stmt, tmp_stmt;
+ gassign *new_stmt, *tmp_stmt;
tree new_name, tmp_name, var, r1, r2;
unsigned dist1, dist2;
enum tree_code code;
gimple_gen_edge_profiler (int edgeno, edge e)
{
tree ref, one, gcov_type_tmp_var;
- gimple stmt1, stmt2, stmt3;
+ gassign *stmt1, *stmt2, *stmt3;
ref = tree_coverage_counter_ref (GCOV_COUNTER_ARCS, edgeno);
one = build_int_cst (gcov_type_node, 1);
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref = tree_coverage_counter_ref (tag, base), ref_ptr;
- gimple call;
+ gcall *call;
tree val;
tree start = build_int_cst_type (integer_type_node,
value->hdata.intvl.int_start);
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
- gimple call;
+ gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
- gimple call;
+ gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
gimple_gen_ic_profiler (histogram_value value, unsigned tag, unsigned base)
{
tree tmp1;
- gimple stmt1, stmt2, stmt3;
+ gassign *stmt1, *stmt2, *stmt3;
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
{
struct cgraph_node * c_node = cgraph_node::get (current_function_decl);
gimple_stmt_iterator gsi;
- gimple stmt1, stmt2;
+ gcall *stmt1;
+ gassign *stmt2;
tree tree_uid, cur_func, void0;
if (c_node->only_called_directly_p ())
gimple_stmt_iterator &gsi)
{
tree ref_ptr = tree_coverage_counter_addr (tag, base);
- gimple call;
+ gcall *call;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
true, NULL_TREE, true, GSI_SAME_STMT);
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
- gimple call;
+ gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
gimple stmt = value->hvalue.stmt;
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
tree ref_ptr = tree_coverage_counter_addr (tag, base);
- gimple call;
+ gcall *call;
tree val;
ref_ptr = force_gimple_operand_gsi (&gsi, ref_ptr,
guards the exit edge. If the expression is too difficult to
analyze, then give up. */
-gimple
+gcond *
get_loop_exit_condition (const struct loop *loop)
{
- gimple res = NULL;
+ gcond *res = NULL;
edge exit_edge = single_exit (loop);
if (dump_file && (dump_flags & TDF_SCEV))
gimple stmt;
stmt = last_stmt (exit_edge->src);
- if (gimple_code (stmt) == GIMPLE_COND)
- res = stmt;
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
+ res = cond_stmt;
}
if (dump_file && (dump_flags & TDF_SCEV))
} t_bool;
-static t_bool follow_ssa_edge (struct loop *loop, gimple, gimple, tree *, int);
+static t_bool follow_ssa_edge (struct loop *loop, gimple, gphi *,
+ tree *, int);
/* Follow the ssa edge into the binary expression RHS0 CODE RHS1.
Return true if the strongly connected component has been found. */
static t_bool
follow_ssa_edge_binary (struct loop *loop, gimple at_stmt,
tree type, tree rhs0, enum tree_code code, tree rhs1,
- gimple halting_phi, tree *evolution_of_loop, int limit)
+ gphi *halting_phi, tree *evolution_of_loop,
+ int limit)
{
t_bool res = t_false;
tree evol;
static t_bool
follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
- gimple halting_phi, tree *evolution_of_loop, int limit)
+ gphi *halting_phi, tree *evolution_of_loop,
+ int limit)
{
enum tree_code code = TREE_CODE (expr);
tree type = TREE_TYPE (expr), rhs0, rhs1;
static t_bool
follow_ssa_edge_in_rhs (struct loop *loop, gimple stmt,
- gimple halting_phi, tree *evolution_of_loop, int limit)
+ gphi *halting_phi, tree *evolution_of_loop,
+ int limit)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree type = gimple_expr_type (stmt), rhs1, rhs2;
/* Checks whether the I-th argument of a PHI comes from a backedge. */
static bool
-backedge_phi_arg_p (gimple phi, int i)
+backedge_phi_arg_p (gphi *phi, int i)
{
const_edge e = gimple_phi_arg_edge (phi, i);
static inline t_bool
follow_ssa_edge_in_condition_phi_branch (int i,
struct loop *loop,
- gimple condition_phi,
- gimple halting_phi,
+ gphi *condition_phi,
+ gphi *halting_phi,
tree *evolution_of_branch,
tree init_cond, int limit)
{
static t_bool
follow_ssa_edge_in_condition_phi (struct loop *loop,
- gimple condition_phi,
- gimple halting_phi,
+ gphi *condition_phi,
+ gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
int i, n;
static t_bool
follow_ssa_edge_inner_loop_phi (struct loop *outer_loop,
- gimple loop_phi_node,
- gimple halting_phi,
+ gphi *loop_phi_node,
+ gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *loop = loop_containing_stmt (loop_phi_node);
path that is analyzed on the return walk. */
static t_bool
-follow_ssa_edge (struct loop *loop, gimple def, gimple halting_phi,
+follow_ssa_edge (struct loop *loop, gimple def, gphi *halting_phi,
tree *evolution_of_loop, int limit)
{
struct loop *def_loop;
information and set the approximation to the main
variable. */
return follow_ssa_edge_in_condition_phi
- (loop, def, halting_phi, evolution_of_loop, limit);
+ (loop, as_a <gphi *> (def), halting_phi, evolution_of_loop,
+ limit);
/* When the analyzed phi is the halting_phi, the
depth-first search is over: we have found a path from
/* Inner loop. */
if (flow_loop_nested_p (loop, def_loop))
return follow_ssa_edge_inner_loop_phi
- (loop, def, halting_phi, evolution_of_loop, limit + 1);
+ (loop, as_a <gphi *> (def), halting_phi, evolution_of_loop,
+ limit + 1);
/* Outer loop. */
return t_false;
function from LOOP_PHI_NODE to LOOP_PHI_NODE in the loop. */
static tree
-analyze_evolution_in_loop (gimple loop_phi_node,
+analyze_evolution_in_loop (gphi *loop_phi_node,
tree init_cond)
{
int i, n = gimple_phi_num_args (loop_phi_node);
loop, and leaves this task to the on-demand tree reconstructor. */
static tree
-analyze_initial_condition (gimple loop_phi_node)
+analyze_initial_condition (gphi *loop_phi_node)
{
int i, n;
tree init_cond = chrec_not_analyzed_yet;
if (TREE_CODE (init_cond) == SSA_NAME)
{
gimple def = SSA_NAME_DEF_STMT (init_cond);
- tree res;
- if (gimple_code (def) == GIMPLE_PHI
- && (res = degenerate_phi_result (def)) != NULL_TREE
- /* Only allow invariants here, otherwise we may break
- loop-closed SSA form. */
- && is_gimple_min_invariant (res))
- init_cond = res;
+ if (gphi *phi = dyn_cast <gphi *> (def))
+ {
+ tree res = degenerate_phi_result (phi);
+ if (res != NULL_TREE
+ /* Only allow invariants here, otherwise we may break
+ loop-closed SSA form. */
+ && is_gimple_min_invariant (res))
+ init_cond = res;
+ }
}
if (dump_file && (dump_flags & TDF_SCEV))
/* Analyze the scalar evolution for LOOP_PHI_NODE. */
static tree
-interpret_loop_phi (struct loop *loop, gimple loop_phi_node)
+interpret_loop_phi (struct loop *loop, gphi *loop_phi_node)
{
tree res;
struct loop *phi_loop = loop_containing_stmt (loop_phi_node);
analyzed. */
static tree
-interpret_condition_phi (struct loop *loop, gimple condition_phi)
+interpret_condition_phi (struct loop *loop, gphi *condition_phi)
{
int i, n = gimple_phi_num_args (condition_phi);
tree res = chrec_not_analyzed_yet;
case GIMPLE_PHI:
if (loop_phi_node_p (def))
- res = interpret_loop_phi (loop, def);
+ res = interpret_loop_phi (loop, as_a <gphi *> (def));
else
- res = interpret_condition_phi (loop, def);
+ res = interpret_condition_phi (loop, as_a <gphi *> (def));
break;
default:
{
struct loop *loop;
edge exit;
- gimple phi;
- gimple_stmt_iterator psi;
+ gphi *phi;
+ gphi_iterator psi;
if (var == NULL_TREE
|| TREE_CODE (var) != SSA_NAME)
for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
if (PHI_ARG_DEF_FROM_EDGE (phi, exit) == var)
return PHI_RESULT (phi);
}
{
basic_block bb;
tree name, type, ev;
- gimple phi, ass;
+ gphi *phi;
+ gassign *ass;
struct loop *loop, *ex_loop;
bitmap ssa_names_to_remove = NULL;
unsigned i;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
if (number_of_loops (cfun) <= 1)
return 0;
for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
name = PHI_RESULT (phi);
if (virtual_operand_p (name))
{
gimple_stmt_iterator psi;
name = ssa_name (i);
- phi = SSA_NAME_DEF_STMT (name);
+ phi = as_a <gphi *> (SSA_NAME_DEF_STMT (name));
gcc_assert (gimple_code (phi) == GIMPLE_PHI);
psi = gsi_for_stmt (phi);
for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); )
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
rslt = PHI_RESULT (phi);
def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
if (virtual_operand_p (def))
#define GCC_TREE_SCALAR_EVOLUTION_H
extern tree number_of_latch_executions (struct loop *);
-extern gimple get_loop_exit_condition (const struct loop *);
+extern gcond *get_loop_exit_condition (const struct loop *);
extern void scev_initialize (void);
extern bool scev_initialized_p (void);
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
- t = gimple_return_retval (stmt);
+ t = gimple_return_retval (as_a <greturn *> (stmt));
if (t != NULL_TREE)
ret |= build_access_from_expr (t, stmt, false);
if (final_bbs)
break;
case GIMPLE_ASM:
- walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
- asm_visit_addr);
- if (final_bbs)
- bitmap_set_bit (final_bbs, bb->index);
+ {
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
+ asm_visit_addr);
+ if (final_bbs)
+ bitmap_set_bit (final_bbs, bb->index);
- for (i = 0; i < gimple_asm_ninputs (stmt); i++)
- {
- t = TREE_VALUE (gimple_asm_input_op (stmt, i));
- ret |= build_access_from_expr (t, stmt, false);
- }
- for (i = 0; i < gimple_asm_noutputs (stmt); i++)
- {
- t = TREE_VALUE (gimple_asm_output_op (stmt, i));
- ret |= build_access_from_expr (t, stmt, true);
- }
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
+ {
+ t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
+ ret |= build_access_from_expr (t, asm_stmt, false);
+ }
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
+ {
+ t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
+ ret |= build_access_from_expr (t, asm_stmt, true);
+ }
+ }
break;
default:
offset such as array[var_index]. */
if (!base)
{
- gimple stmt;
+ gassign *stmt;
tree tmp, addr;
gcc_checking_assert (gsi);
|| access->offset + access->size > start_offset))
{
tree expr, repl = get_access_replacement (access);
- gimple stmt;
+ gassign *stmt;
expr = build_ref_for_model (loc, agg, access->offset - top_offset,
access, gsi, insert_after);
&& (chunk_size == 0
|| access->offset + access->size > start_offset))
{
- gimple ds;
+ gdebug *ds;
tree drhs = build_debug_ref_for_model (loc, agg,
access->offset - top_offset,
access);
if (access->grp_to_be_replaced)
{
- gimple stmt;
+ gassign *stmt;
stmt = gimple_build_assign (get_access_replacement (access),
build_zero_cst (access->type));
}
else if (access->grp_to_be_debug_replaced)
{
- gimple ds = gimple_build_debug_bind (get_access_replacement (access),
- build_zero_cst (access->type),
- gsi_stmt (*gsi));
+ gdebug *ds
+ = gimple_build_debug_bind (get_access_replacement (access),
+ build_zero_cst (access->type),
+ gsi_stmt (*gsi));
if (insert_after)
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
else
if (write)
{
- gimple stmt;
+ gassign *stmt;
if (access->grp_partial_lhs)
ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
}
else
{
- gimple stmt;
+ gassign *stmt;
if (access->grp_partial_lhs)
repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
}
else if (write && access->grp_to_be_debug_replaced)
{
- gimple ds = gimple_build_debug_bind (get_access_replacement (access),
- NULL_TREE,
- gsi_stmt (*gsi));
+ gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
+ NULL_TREE,
+ gsi_stmt (*gsi));
gsi_insert_after (gsi, ds, GSI_NEW_STMT);
}
if (lacc->grp_to_be_replaced)
{
struct access *racc;
- gimple stmt;
+ gassign *stmt;
tree rhs;
racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
if (lacc && lacc->grp_to_be_debug_replaced)
{
- gimple ds;
+ gdebug *ds;
tree drhs;
struct access *racc = find_access_in_subtree (sad->top_racc,
offset,
drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
TREE_TYPE (dlhs), drhs);
}
- gimple ds = gimple_build_debug_bind (dlhs, drhs, stmt);
+ gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
gsi_insert_before (gsi, ds, GSI_SAME_STMT);
}
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
- t = gimple_return_retval_ptr (stmt);
+ t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
if (*t != NULL_TREE)
modified |= sra_modify_expr (t, &gsi, false);
break;
break;
case GIMPLE_ASM:
- for (i = 0; i < gimple_asm_ninputs (stmt); i++)
- {
- t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
- modified |= sra_modify_expr (t, &gsi, false);
- }
- for (i = 0; i < gimple_asm_noutputs (stmt); i++)
- {
- t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
- modified |= sra_modify_expr (t, &gsi, true);
- }
+ {
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
+ {
+ t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
+ modified |= sra_modify_expr (t, &gsi, false);
+ }
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
+ {
+ t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
+ modified |= sra_modify_expr (t, &gsi, true);
+ }
+ }
break;
default:
else if (is_gimple_call (stmt))
gimple_call_set_lhs (stmt, name);
else
- gimple_phi_set_result (stmt, name);
+ gimple_phi_set_result (as_a <gphi *> (stmt), name);
replace_uses_by (lhs, name);
release_ssa_name (lhs);
switch (gimple_code (stmt))
{
case GIMPLE_RETURN:
- t = gimple_return_retval_ptr (stmt);
+ t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
if (*t != NULL_TREE)
modified |= ipa_modify_expr (t, true, adjustments);
break;
break;
case GIMPLE_ASM:
- for (i = 0; i < gimple_asm_ninputs (stmt); i++)
- {
- t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
- modified |= ipa_modify_expr (t, true, adjustments);
- }
- for (i = 0; i < gimple_asm_noutputs (stmt); i++)
- {
- t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
- modified |= ipa_modify_expr (t, false, adjustments);
- }
+ {
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
+ {
+ t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
+ modified |= ipa_modify_expr (t, true, adjustments);
+ }
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
+ {
+ t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
+ modified |= ipa_modify_expr (t, false, adjustments);
+ }
+ }
break;
default:
{
struct ipa_parm_adjustment *adj;
imm_use_iterator ui;
- gimple stmt, def_temp;
+ gimple stmt;
+ gdebug *def_temp;
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gcall *stmt;
tree call_fndecl;
- if (gimple_code (stmt) != GIMPLE_CALL)
+ stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
+ if (!stmt)
continue;
call_fndecl = gimple_call_fndecl (stmt);
if (call_fndecl == old_decl)
otherwise return false. */
static bool
-ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
+ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
{
tree base, callee;
unsigned i;
}
static bool
-ref_maybe_used_by_call_p (gimple call, ao_ref *ref)
+ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
{
bool res;
res = ref_maybe_used_by_call_p_1 (call, ref);
return refs_may_alias_p (rhs, ref);
}
else if (is_gimple_call (stmt))
- return ref_maybe_used_by_call_p (stmt, ref);
- else if (gimple_code (stmt) == GIMPLE_RETURN)
+ return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
+ else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
- tree retval = gimple_return_retval (stmt);
+ tree retval = gimple_return_retval (return_stmt);
if (retval
&& TREE_CODE (retval) != SSA_NAME
&& !is_gimple_min_invariant (retval)
return true, otherwise return false. */
bool
-call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
+call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
{
tree base;
tree callee;
return true, otherwise return false. */
bool
-call_may_clobber_ref_p (gimple call, tree ref)
+call_may_clobber_ref_p (gcall *call, tree ref)
{
bool res;
ao_ref r;
return true;
}
- return call_may_clobber_ref_p_1 (stmt, ref);
+ return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
}
else if (gimple_assign_single_p (stmt))
{
extern bool stmt_may_clobber_global_p (gimple);
extern bool stmt_may_clobber_ref_p (gimple, tree);
extern bool stmt_may_clobber_ref_p_1 (gimple, ao_ref *);
-extern bool call_may_clobber_ref_p (gimple, tree);
-extern bool call_may_clobber_ref_p_1 (gimple, ao_ref *);
+extern bool call_may_clobber_ref_p (gcall *, tree);
+extern bool call_may_clobber_ref_p_1 (gcall *, ao_ref *);
extern bool stmt_kills_ref_p (gimple, tree);
extern bool stmt_kills_ref_p (gimple, ao_ref *);
extern tree get_continuation_for_phi (gimple, ao_ref *,
except for phi nodes for virtual operands when we do not do store ccp. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator i;
+ gphi_iterator i;
for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
{
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
prop_set_simulate_again (phi, false);
of the PHI node that are incoming via executable edges. */
static enum ssa_prop_result
-ccp_visit_phi_node (gimple phi)
+ccp_visit_phi_node (gphi *phi)
{
unsigned i;
ccp_prop_value_t *old_val, new_val;
case GIMPLE_SWITCH:
{
/* Return the constant switch index. */
- return valueize_op (gimple_switch_index (stmt));
+ return valueize_op (gimple_switch_index (as_a <gswitch *> (stmt)));
}
case GIMPLE_ASSIGN:
simplified = gimple_assign_rhs1 (stmt);
}
else if (code == GIMPLE_SWITCH)
- simplified = gimple_switch_index (stmt);
+ simplified = gimple_switch_index (as_a <gswitch *> (stmt));
else
/* These cannot satisfy is_gimple_min_invariant without folding. */
gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
insert_clobber_before_stack_restore (tree saved_val, tree var,
gimple_htab **visited)
{
- gimple stmt, clobber_stmt;
+ gimple stmt;
+ gassign *clobber_stmt;
tree clobber;
imm_use_iterator iter;
gimple_stmt_iterator i;
{
case GIMPLE_COND:
{
+ gcond *cond_stmt = as_a <gcond *> (stmt);
ccp_prop_value_t val;
/* Statement evaluation will handle type mismatches in constants
more gracefully than the final propagation. This allows us to
}
if (integer_zerop (val.value))
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
return true;
}
if (is_gimple_debug (stmt))
continue;
- if (gimple_code (stmt) == GIMPLE_LABEL)
+ if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
{
/* Verify we do not need to preserve the label. */
- if (FORCED_LABEL (gimple_label_label (stmt)))
+ if (FORCED_LABEL (gimple_label_label (label_stmt)))
return false;
continue;
continue;
stmt = gsi_stmt (gsi);
- if (gimple_code (stmt) == GIMPLE_COND)
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
if (e->flags & EDGE_TRUE_VALUE)
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else if (e->flags & EDGE_FALSE_VALUE)
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
else
gcc_unreachable ();
- update_stmt (stmt);
+ update_stmt (cond_stmt);
}
else
{
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
-
/* Start with live on exit temporaries. */
live_track_init (live, live_on_exit (liveinfo, bb));
- for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_last_bb (bb); !gsi_end_p (gsi);
+ gsi_prev (&gsi))
{
tree var;
gimple stmt = gsi_stmt (gsi);
There must be a conflict recorded between the result of the PHI and
any variables that are live. Otherwise the out-of-ssa translation
may create incorrect code. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree result = PHI_RESULT (phi);
if (live_track_live_p (live, result))
live_track_process_def (live, result, graph);
{
tree arg;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (bb);
+ !gsi_end_p (gpi);
+ gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
size_t i;
int ver;
tree res;
case GIMPLE_ASM:
{
+ gasm *asm_stmt = as_a <gasm *> (stmt);
unsigned long noutputs, i;
unsigned long ninputs;
tree *outputs, link;
- noutputs = gimple_asm_noutputs (stmt);
- ninputs = gimple_asm_ninputs (stmt);
+ noutputs = gimple_asm_noutputs (asm_stmt);
+ ninputs = gimple_asm_ninputs (asm_stmt);
outputs = (tree *) alloca (noutputs * sizeof (tree));
for (i = 0; i < noutputs; ++i)
{
- link = gimple_asm_output_op (stmt, i);
+ link = gimple_asm_output_op (asm_stmt, i);
outputs[i] = TREE_VALUE (link);
}
char *end;
unsigned long match;
- link = gimple_asm_input_op (stmt, i);
+ link = gimple_asm_input_op (asm_stmt, i);
constraint
= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
input = TREE_VALUE (link);
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->flags & EDGE_ABNORMAL)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree res = PHI_RESULT (phi);
tree arg = PHI_ARG_DEF (phi, e->dest_idx);
int v1 = SSA_NAME_VERSION (res);
set it to be the value of the LHS of PHI. */
static enum ssa_prop_result
-copy_prop_visit_phi_node (gimple phi)
+copy_prop_visit_phi_node (gphi *phi)
{
enum ssa_prop_result retval;
unsigned i;
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator si;
-
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
ssa_op_iter iter;
set_copy_of_val (def, def);
}
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
tree def;
def = gimple_phi_result (phi);
{
var_map map;
basic_block bb;
- gimple_stmt_iterator gsi;
tree var, part_var;
- gimple stmt, phi;
+ gimple stmt;
unsigned x;
FILE *debug;
FOR_EACH_BB_FN (bb, fun)
{
/* Scan for real copies. */
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
stmt = gsi_stmt (gsi);
if (gimple_assign_ssa_name_copy_p (stmt))
FOR_EACH_BB_FN (bb, fun)
{
/* Treat PHI nodes as copies between the result and each argument. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
size_t i;
tree res;
-
- phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
res = gimple_phi_result (phi);
/* Do not process virtual SSA_NAMES. */
we also consider the control dependent edges leading to the
predecessor block associated with each PHI alternative as
necessary. */
+ gphi *phi = as_a <gphi *> (stmt);
size_t k;
for (k = 0; k < gimple_phi_num_args (stmt); k++)
{
for (k = 0; k < gimple_phi_num_args (stmt); k++)
{
- basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
+ basic_block arg_bb = gimple_phi_arg_edge (phi, k)->src;
if (gimple_bb (stmt)
!= get_immediate_dominator (CDI_POST_DOMINATORS, arg_bb))
mark_all_reaching_defs_necessary (stmt);
}
}
- else if (gimple_code (stmt) == GIMPLE_RETURN)
+ else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
{
- tree rhs = gimple_return_retval (stmt);
+ tree rhs = gimple_return_retval (return_stmt);
/* A return statement may perform a load. */
if (rhs
&& TREE_CODE (rhs) != SSA_NAME
mark_all_reaching_defs_necessary (stmt);
}
}
- else if (gimple_code (stmt) == GIMPLE_ASM)
+ else if (gasm *asm_stmt = dyn_cast <gasm *> (stmt))
{
unsigned i;
mark_all_reaching_defs_necessary (stmt);
/* Inputs may perform loads. */
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
+ tree op = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
if (TREE_CODE (op) != SSA_NAME
&& !is_gimple_min_invariant (op)
&& TREE_CODE (op) != CONSTRUCTOR
remove_dead_phis (basic_block bb)
{
bool something_changed = false;
- gimple phi;
- gimple_stmt_iterator gsi;
+ gphi *phi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);)
{
stats.total_phis++;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
/* We do not track necessity of virtual PHI nodes. Instead do
very simple dead PHI removal here. */
static edge
forward_edge_to_pdom (edge e, basic_block post_dom_bb)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
edge e2 = NULL;
edge_iterator ei;
break;
for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree op;
source_location locus;
&& !DECL_HAS_VALUE_EXPR_P (lhs))
{
tree rhs = gimple_assign_rhs1 (stmt);
- gimple note
+ gdebug *note
= gimple_build_debug_bind (lhs, unshare_expr (rhs), stmt);
gsi_insert_after (i, note, GSI_SAME_STMT);
}
{
tree name = gimple_call_lhs (stmt);
- notice_special_calls (stmt);
+ notice_special_calls (as_a <gcall *> (stmt));
/* When LHS of var = call (); is dead, simplify it into
call (); saving one operand. */
if (!bitmap_bit_p (bb_contains_live_stmts, bb->index)
|| !(bb->flags & BB_REACHABLE))
{
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ if (virtual_operand_p (gimple_phi_result (gsi.phi ())))
{
bool found = false;
imm_use_iterator iter;
- FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
+ FOR_EACH_IMM_USE_STMT (stmt, iter,
+ gimple_phi_result (gsi.phi ()))
{
if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
continue;
}
}
if (found)
- mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
+ mark_virtual_phi_result_for_renaming (gsi.phi ());
}
if (!(bb->flags & BB_REACHABLE))
struct { enum tree_code op; tree opnd; } unary;
struct { enum tree_code op; tree opnd0, opnd1; } binary;
struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
- struct { gimple fn_from; bool pure; size_t nargs; tree *args; } call;
+ struct { gcall *fn_from; bool pure; size_t nargs; tree *args; } call;
struct { size_t nargs; tree *args; } phi;
} ops;
};
expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
}
- else if (code == GIMPLE_CALL)
+ else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
- size_t nargs = gimple_call_num_args (stmt);
+ size_t nargs = gimple_call_num_args (call_stmt);
size_t i;
- gcc_assert (gimple_call_lhs (stmt));
+ gcc_assert (gimple_call_lhs (call_stmt));
- expr->type = TREE_TYPE (gimple_call_lhs (stmt));
+ expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
expr->kind = EXPR_CALL;
- expr->ops.call.fn_from = stmt;
+ expr->ops.call.fn_from = call_stmt;
- if (gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))
+ if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
expr->ops.call.pure = true;
else
expr->ops.call.pure = false;
expr->ops.call.nargs = nargs;
expr->ops.call.args = XCNEWVEC (tree, nargs);
for (i = 0; i < nargs; i++)
- expr->ops.call.args[i] = gimple_call_arg (stmt, i);
+ expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
}
- else if (code == GIMPLE_SWITCH)
+ else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
{
- expr->type = TREE_TYPE (gimple_switch_index (stmt));
+ expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
expr->kind = EXPR_SINGLE;
- expr->ops.single.rhs = gimple_switch_index (stmt);
+ expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
}
else if (code == GIMPLE_GOTO)
{
{
size_t i;
enum tree_code code = CALL_EXPR;
- gimple fn_from;
+ gcall *fn_from;
hstate.add_object (code);
fn_from = expr->ops.call.fn_from;
{
size_t i;
size_t nargs = element->expr.ops.call.nargs;
- gimple fn_from;
+ gcall *fn_from;
fn_from = element->expr.ops.call.fn_from;
if (gimple_call_internal_p (fn_from))
private:
void thread_across_edge (edge);
- gimple m_dummy_cond;
+ gcond *m_dummy_cond;
};
/* Jump threading, redundancy elimination and const/copy propagation.
condition to a canonical form. */
static void
-canonicalize_comparison (gimple condstmt)
+canonicalize_comparison (gcond *condstmt)
{
tree op0;
tree op1;
static void
record_equivalences_from_phis (basic_block bb)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree lhs = gimple_phi_result (phi);
tree rhs = NULL;
FOR_EACH_EDGE (e, ei, bb->succs)
{
int indx;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* If this is an abnormal edge, then we do not want to copy propagate
into the PHI alternative associated with this edge. */
tree new_val;
use_operand_p orig_p;
tree orig_val;
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
/* The alternative may be associated with a constant, so verify
it is an SSA_NAME before doing anything with it. */
if (gimple_code (stmt) == GIMPLE_SWITCH)
{
- tree index = gimple_switch_index (stmt);
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
+ tree index = gimple_switch_index (switch_stmt);
if (TREE_CODE (index) == SSA_NAME)
{
int i;
- int n_labels = gimple_switch_num_labels (stmt);
+ int n_labels = gimple_switch_num_labels (switch_stmt);
tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
edge e;
edge_iterator ei;
for (i = 0; i < n_labels; i++)
{
- tree label = gimple_switch_label (stmt, i);
+ tree label = gimple_switch_label (switch_stmt, i);
basic_block target_bb = label_to_block (CASE_LABEL (label));
if (CASE_HIGH (label)
|| !CASE_LOW (label)
expr_type = TREE_TYPE (gimple_call_lhs (stmt));
assigns_var_p = true;
}
- else if (gimple_code (stmt) == GIMPLE_SWITCH)
- expr_type = TREE_TYPE (gimple_switch_index (stmt));
+ else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
+ expr_type = TREE_TYPE (gimple_switch_index (swtch_stmt));
else if (gimple_code (stmt) == GIMPLE_PHI)
/* We can't propagate into a phi, so the logic below doesn't apply.
Instead record an equivalence between the cached LHS and the
&& !is_gimple_reg (lhs))
{
tree rhs = gimple_assign_rhs1 (stmt);
- gimple new_stmt;
+ gassign *new_stmt;
/* Build a new statement with the RHS and LHS exchanged. */
if (TREE_CODE (rhs) == SSA_NAME)
}
if (gimple_code (stmt) == GIMPLE_COND)
- canonicalize_comparison (stmt);
+ canonicalize_comparison (as_a <gcond *> (stmt));
update_stmt_if_modified (stmt);
opt_stats.num_stmts++;
rhs = gimple_assign_rhs1 (stmt);
else if (gimple_code (stmt) == GIMPLE_GOTO)
rhs = gimple_goto_dest (stmt);
- else if (gimple_code (stmt) == GIMPLE_SWITCH)
+ else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
/* This should never be an ADDR_EXPR. */
- rhs = gimple_switch_index (stmt);
+ rhs = gimple_switch_index (swtch_stmt);
if (rhs && TREE_CODE (rhs) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (rhs);
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
tree cached_lhs;
- gimple new_stmt;
+ gassign *new_stmt;
if (TREE_CODE (rhs) == SSA_NAME)
{
tree tem = SSA_NAME_VALUE (rhs);
val = fold_binary_loc (gimple_location (stmt),
gimple_cond_code (stmt), boolean_type_node,
gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
- else if (gimple_code (stmt) == GIMPLE_SWITCH)
- val = gimple_switch_index (stmt);
+ else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
+ val = gimple_switch_index (swtch_stmt);
if (val && TREE_CODE (val) == INTEGER_CST && find_taken_edge (bb, val))
cfg_altered = true;
get_rhs_or_phi_arg (gimple stmt)
{
if (gimple_code (stmt) == GIMPLE_PHI)
- return degenerate_phi_result (stmt);
+ return degenerate_phi_result (as_a <gphi *> (stmt));
else if (gimple_assign_single_p (stmt))
return gimple_assign_rhs1 (stmt);
else
gimple_cond_lhs (use_stmt),
gimple_cond_rhs (use_stmt));
else if (gimple_code (use_stmt) == GIMPLE_SWITCH)
- val = gimple_switch_index (use_stmt);
+ val = gimple_switch_index (as_a <gswitch *> (use_stmt));
else
val = gimple_goto_dest (use_stmt);
edge te = find_taken_edge (bb, val);
edge_iterator ei;
edge e;
- gimple_stmt_iterator gsi, psi;
+ gimple_stmt_iterator gsi;
+ gphi_iterator psi;
/* Remove all outgoing edges except TE. */
for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei));)
!gsi_end_p (psi);
gsi_next (&psi))
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
tree result = gimple_phi_result (phi);
int version = SSA_NAME_VERSION (result);
static void
eliminate_degenerate_phis_1 (basic_block bb, bitmap interesting_names)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
basic_block son;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
eliminate_const_or_copy (phi, interesting_names);
}
return cfg_changed;
}
-/* Return the rhs of a gimple_assign STMT in a form of a single tree,
+/* Return the rhs of a gassign *STMT in a form of a single tree,
converted to type TYPE.
This should disappear, but is needed so we can combine expressions and use
This must be kept in sync with forward_propagate_into_cond. */
static int
-forward_propagate_into_gimple_cond (gimple stmt)
+forward_propagate_into_gimple_cond (gcond *stmt)
{
tree tmp;
enum tree_code code = gimple_cond_code (stmt);
have values outside the range of the new type. */
static void
-simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
+simplify_gimple_switch_label_vec (gswitch *stmt, tree index_type)
{
unsigned int branch_num = gimple_switch_num_labels (stmt);
auto_vec<tree> labels (branch_num);
the condition which we may be able to optimize better. */
static bool
-simplify_gimple_switch (gimple stmt)
+simplify_gimple_switch (gswitch *stmt)
{
/* The optimization that we really care about is removing unnecessary
casts. That will let us do much better in propagating the inferred
bitmap_set_bit (to_purge, bb->index);
/* Cleanup the CFG if we simplified a condition to
true or false. */
- if (gimple_code (stmt) == GIMPLE_COND
- && (gimple_cond_true_p (stmt)
- || gimple_cond_false_p (stmt)))
- cfg_changed = true;
+ if (gcond *cond = dyn_cast <gcond *> (stmt))
+ if (gimple_cond_true_p (cond)
+ || gimple_cond_false_p (cond))
+ cfg_changed = true;
update_stmt (stmt);
}
}
case GIMPLE_SWITCH:
- changed = simplify_gimple_switch (stmt);
+ changed = simplify_gimple_switch (as_a <gswitch *> (stmt));
break;
case GIMPLE_COND:
{
- int did_something;
- did_something = forward_propagate_into_gimple_cond (stmt);
+ int did_something
+ = forward_propagate_into_gimple_cond (as_a <gcond *> (stmt));
if (did_something == 2)
cfg_changed = true;
changed = did_something != 0;
{
edge e1 = find_edge (bb1, dest);
edge e2 = find_edge (bb2, dest);
- gimple_stmt_iterator gsi;
- gimple phi;
+ gphi_iterator gsi;
+ gphi *phi;
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
return false;
Returns true if the pattern matched, false otherwise. */
static bool
-recognize_single_bit_test (gimple cond, tree *name, tree *bit, bool inv)
+recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
{
gimple stmt;
Returns true if the pattern matched, false otherwise. */
static bool
-recognize_bits_test (gimple cond, tree *name, tree *bits, bool inv)
+recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
{
gimple stmt;
basic_block outer_cond_bb, bool outer_inv, bool result_inv)
{
gimple_stmt_iterator gsi;
- gimple inner_cond, outer_cond;
+ gimple inner_stmt, outer_stmt;
+ gcond *inner_cond, *outer_cond;
tree name1, name2, bit1, bit2, bits1, bits2;
- inner_cond = last_stmt (inner_cond_bb);
- if (!inner_cond
- || gimple_code (inner_cond) != GIMPLE_COND)
+ inner_stmt = last_stmt (inner_cond_bb);
+ if (!inner_stmt
+ || gimple_code (inner_stmt) != GIMPLE_COND)
return false;
+ inner_cond = as_a <gcond *> (inner_stmt);
- outer_cond = last_stmt (outer_cond_bb);
- if (!outer_cond
- || gimple_code (outer_cond) != GIMPLE_COND)
+ outer_stmt = last_stmt (outer_cond_bb);
+ if (!outer_stmt
+ || gimple_code (outer_stmt) != GIMPLE_COND)
return false;
+ outer_cond = as_a <gcond *> (outer_stmt);
/* See if we test a single bit of the same name in both tests. In
that case remove the outer test, merging both else edges,
mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
}
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (bb);
+ !gsi_end_p (gpi);
+ gsi_next (&gpi))
{
use_operand_p arg_p;
ssa_op_iter i;
tree def;
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
continue;
as this is where a copy would be inserted. Check to see if it is
defined in that block, or whether its live on entry. */
int index = PHI_ARG_INDEX_FROM_USE (use);
- edge e = gimple_phi_arg_edge (use_stmt, index);
+ edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
{
if (e->src != def_bb)
/* Set all the live-on-exit bits for uses in PHIs. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
size_t i;
/* Mark the PHI arguments which are live on exit to the pred block. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree t = PHI_ARG_DEF (phi, i);
if it occurs in a PHI argument of the block. */
size_t z;
bool ok = false;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest);
!gsi_end_p (gsi) && !ok;
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
for (z = 0; z < gimple_phi_num_args (phi); z++)
if (var == gimple_phi_arg_def (phi, z))
{
else return false. */
static bool
-extract_true_false_args_from_phi (basic_block dom, gimple phi,
+extract_true_false_args_from_phi (basic_block dom, gphi *phi,
tree *true_arg_p, tree *false_arg_p)
{
basic_block bb = gimple_bb (phi);
level = superloop_at_depth (loop, 1);
lim_data->max_loop = level;
- if (gimple_code (stmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (stmt))
{
use_operand_p use_p;
unsigned min_cost = UINT_MAX;
evaluated. For this reason the PHI cost (and thus the
cost we remove from the loop by doing the invariant motion)
is that of the cheapest PHI argument dependency chain. */
- FOR_EACH_PHI_ARG (use_p, stmt, iter, SSA_OP_USE)
+ FOR_EACH_PHI_ARG (use_p, phi, iter, SSA_OP_USE)
{
val = USE_FROM_PTR (use_p);
min_cost = MIN (min_cost, total_cost);
lim_data->cost += min_cost;
- if (gimple_phi_num_args (stmt) > 1)
+ if (gimple_phi_num_args (phi) > 1)
{
basic_block dom = get_immediate_dominator (CDI_DOMINATORS, bb);
gimple cond;
/* Verify that this is an extended form of a diamond and
the PHI arguments are completely controlled by the
predicate in DOM. */
- if (!extract_true_false_args_from_phi (dom, stmt, NULL, NULL))
+ if (!extract_true_false_args_from_phi (dom, phi, NULL, NULL))
return false;
/* Fold in dependencies and cost of the condition. */
static gimple
rewrite_reciprocal (gimple_stmt_iterator *bsi)
{
- gimple stmt, stmt1, stmt2;
+ gassign *stmt, *stmt1, *stmt2;
tree name, lhs, type;
tree real_one;
gimple_stmt_iterator gsi;
- stmt = gsi_stmt (*bsi);
+ stmt = as_a <gassign *> (gsi_stmt (*bsi));
lhs = gimple_assign_lhs (stmt);
type = TREE_TYPE (lhs);
static gimple
rewrite_bittest (gimple_stmt_iterator *bsi)
{
- gimple stmt, use_stmt, stmt1, stmt2;
+ gassign *stmt;
+ gimple stmt1;
+ gassign *stmt2;
+ gimple use_stmt;
+ gcond *cond_stmt;
tree lhs, name, t, a, b;
use_operand_p use;
- stmt = gsi_stmt (*bsi);
+ stmt = as_a <gassign *> (gsi_stmt (*bsi));
lhs = gimple_assign_lhs (stmt);
/* Verify that the single use of lhs is a comparison against zero. */
if (TREE_CODE (lhs) != SSA_NAME
- || !single_imm_use (lhs, &use, &use_stmt)
- || gimple_code (use_stmt) != GIMPLE_COND)
+ || !single_imm_use (lhs, &use, &use_stmt))
return stmt;
- if (gimple_cond_lhs (use_stmt) != lhs
- || (gimple_cond_code (use_stmt) != NE_EXPR
- && gimple_cond_code (use_stmt) != EQ_EXPR)
- || !integer_zerop (gimple_cond_rhs (use_stmt)))
+ cond_stmt = dyn_cast <gcond *> (use_stmt);
+ if (!cond_stmt)
+ return stmt;
+ if (gimple_cond_lhs (cond_stmt) != lhs
+ || (gimple_cond_code (cond_stmt) != NE_EXPR
+ && gimple_cond_code (cond_stmt) != EQ_EXPR)
+ || !integer_zerop (gimple_cond_rhs (cond_stmt)))
return stmt;
/* Get at the operands of the shift. The rhs is TMP1 & 1. */
/* Replace the SSA_NAME we compare against zero. Adjust
the type of zero accordingly. */
SET_USE (use, name);
- gimple_cond_set_rhs (use_stmt, build_int_cst_type (TREE_TYPE (name), 0));
+ gimple_cond_set_rhs (cond_stmt,
+ build_int_cst_type (TREE_TYPE (name),
+ 0));
/* Don't use gsi_replace here, none of the new assignments sets
the variable originally set in stmt. Move bsi to stmt1, and
move_computations_dom_walker::before_dom_children (basic_block bb)
{
struct loop *level;
- gimple_stmt_iterator bsi;
- gimple stmt;
unsigned cost = 0;
struct lim_aux_data *lim_data;
if (!loop_outer (bb->loop_father))
return;
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi); )
{
- gimple new_stmt;
- stmt = gsi_stmt (bsi);
+ gassign *new_stmt;
+ gphi *stmt = bsi.phi ();
lim_data = get_lim_data (stmt);
if (lim_data == NULL)
remove_phi_node (&bsi, false);
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi); )
{
edge e;
- stmt = gsi_stmt (bsi);
+ gimple stmt = gsi_stmt (bsi);
lim_data = get_lim_data (stmt);
if (lim_data == NULL)
{
/* The new VUSE is the one from the virtual PHI in the loop
header or the one already present. */
- gimple_stmt_iterator gsi2;
+ gphi_iterator gsi2;
for (gsi2 = gsi_start_phis (e->dest);
!gsi_end_p (gsi2); gsi_next (&gsi2))
{
- gimple phi = gsi_stmt (gsi2);
+ gphi *phi = gsi2.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
{
gimple_set_vuse (stmt, PHI_ARG_DEF_FROM_EDGE (phi, e));
}
if (!loop_has_only_one_exit)
- for (gsi = gsi_start_phis (old_dest); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gpi = gsi_start_phis (old_dest);
+ !gsi_end_p (gpi); gsi_next (&gpi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gpi.phi ();
unsigned i;
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree tmp_var, store_flag = NULL_TREE;
unsigned i;
- gimple load;
+ gassign *load;
struct fmt_data fmt_data;
edge ex;
struct lim_aux_data *lim_data;
FOR_EACH_VEC_ELT (exits, i, ex)
if (!multi_threaded_model_p)
{
- gimple store;
+ gassign *store;
store = gimple_build_assign (unshare_expr (ref->mem.ref), tmp_var);
gsi_insert_on_edge (ex, store);
}
{
edge in;
tree type, var;
- gimple cond;
+ gcond *cond;
gimple_stmt_iterator incr_at;
enum tree_code cmp;
fprintf (dump_file, " iterations.\n");
}
- cond = last_stmt (exit->src);
+ cond = as_a <gcond *> (last_stmt (exit->src));
in = EDGE_SUCC (exit->src, 0);
if (in == exit)
in = EDGE_SUCC (exit->src, 1);
&& constant_after_peeling (gimple_cond_lhs (stmt), stmt, loop)
&& constant_after_peeling (gimple_cond_rhs (stmt), stmt, loop))
|| (gimple_code (stmt) == GIMPLE_SWITCH
- && constant_after_peeling (gimple_switch_index (stmt), stmt, loop)))
+ && constant_after_peeling (gimple_switch_index (
+ as_a <gswitch *> (stmt)),
+ stmt, loop)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Constant conditional.\n");
&& (!constant_after_peeling (gimple_cond_lhs (stmt), stmt, loop)
|| constant_after_peeling (gimple_cond_rhs (stmt), stmt, loop)))
|| (gimple_code (stmt) == GIMPLE_SWITCH
- && !constant_after_peeling (gimple_switch_index (stmt), stmt, loop)))
+ && !constant_after_peeling (gimple_switch_index (
+ as_a <gswitch *> (stmt)),
+ stmt, loop)))
&& (!exit || bb != exit->src))
size->num_branches_on_hot_path++;
}
&& wi::ltu_p (elt->bound, npeeled))
{
gimple_stmt_iterator gsi = gsi_for_stmt (elt->stmt);
- gimple stmt = gimple_build_call
+ gcall *stmt = gimple_build_call
(builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
gimple_set_location (stmt, gimple_location (elt->stmt));
if (!loop_exit_edge_p (loop, exit_edge))
exit_edge = EDGE_SUCC (bb, 1);
gcc_checking_assert (loop_exit_edge_p (loop, exit_edge));
+ gcond *cond_stmt = as_a <gcond *> (elt->stmt);
if (exit_edge->flags & EDGE_TRUE_VALUE)
- gimple_cond_make_true (elt->stmt);
+ gimple_cond_make_true (cond_stmt);
else
- gimple_cond_make_false (elt->stmt);
- update_stmt (elt->stmt);
+ gimple_cond_make_false (cond_stmt);
+ update_stmt (cond_stmt);
changed = true;
}
}
fprintf (dump_file, "Removed pointless exit: ");
print_gimple_stmt (dump_file, elt->stmt, 0, 0);
}
+ gcond *cond_stmt = as_a <gcond *> (elt->stmt);
if (exit_edge->flags & EDGE_TRUE_VALUE)
- gimple_cond_make_false (elt->stmt);
+ gimple_cond_make_false (cond_stmt);
else
- gimple_cond_make_true (elt->stmt);
- update_stmt (elt->stmt);
+ gimple_cond_make_true (cond_stmt);
+ update_stmt (cond_stmt);
changed = true;
}
}
edge latch_edge = loop_latch_edge (loop);
int flags = latch_edge->flags;
location_t locus = latch_edge->goto_locus;
- gimple stmt;
+ gcall *stmt;
gimple_stmt_iterator gsi;
remove_exits_and_undefined_stmts (loop, n_unroll);
location_t locus)
{
unsigned HOST_WIDE_INT n_unroll = 0, ninsns, unr_insns;
- gimple cond;
struct loop_size size;
bool n_unroll_found = false;
edge edge_to_cancel = NULL;
/* Remove the conditional from the last copy of the loop. */
if (edge_to_cancel)
{
- cond = last_stmt (edge_to_cancel->src);
+ gcond *cond = as_a <gcond *> (last_stmt (edge_to_cancel->src));
if (edge_to_cancel->flags & EDGE_TRUE_VALUE)
gimple_cond_make_false (cond);
else
static void
propagate_constants_for_unrolling (basic_block bb)
{
- gimple_stmt_iterator gsi;
-
/* Look for degenerate PHI nodes with constant argument. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
tree arg = gimple_phi_arg_def (phi, 0);
}
/* Look for assignments to SSA names with constant RHS. */
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
{
gimple stmt = gsi_stmt (gsi);
tree lhs;
not define a simple affine biv with nonzero step. */
static tree
-determine_biv_step (gimple phi)
+determine_biv_step (gphi *phi)
{
struct loop *loop = gimple_bb (phi)->loop_father;
tree name = PHI_RESULT (phi);
static bool
find_bivs (struct ivopts_data *data)
{
- gimple phi;
+ gphi *phi;
tree step, type, base;
bool found = false;
struct loop *loop = data->current_loop;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
continue;
static void
mark_bivs (struct ivopts_data *data)
{
- gimple phi, def;
+ gphi *phi;
+ gimple def;
tree var;
struct iv *iv, *incr_iv;
struct loop *loop = data->current_loop;
basic_block incr_bb;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
iv = get_iv (data, PHI_RESULT (phi));
if (!iv)
if (gimple_code (stmt) == GIMPLE_COND)
{
- op0 = gimple_cond_lhs_ptr (stmt);
- op1 = gimple_cond_rhs_ptr (stmt);
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ op0 = gimple_cond_lhs_ptr (cond_stmt);
+ op1 = gimple_cond_rhs_ptr (cond_stmt);
}
else
{
static void
find_interesting_uses_outside (struct ivopts_data *data, edge exit)
{
- gimple phi;
- gimple_stmt_iterator psi;
+ gphi *phi;
+ gphi_iterator psi;
tree def;
for (psi = gsi_start_phis (exit->dest); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
def = PHI_ARG_DEF_FROM_EDGE (phi, exit);
if (!virtual_operand_p (def))
find_interesting_uses_op (data, def);
determine_set_costs (struct ivopts_data *data)
{
unsigned j, n;
- gimple phi;
- gimple_stmt_iterator psi;
+ gphi *phi;
+ gphi_iterator psi;
tree op;
struct loop *loop = data->current_loop;
bitmap_iterator bi;
n = 0;
for (psi = gsi_start_phis (loop->header); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
op = PHI_RESULT (phi);
if (virtual_operand_p (op))
{
tree comp;
tree op, tgt;
- gimple ass;
+ gassign *ass;
gimple_stmt_iterator bsi;
/* An important special case -- if we are asked to express value of
loop_preheader_edge (data->current_loop),
stmts);
- gimple_cond_set_lhs (use->stmt, var);
- gimple_cond_set_code (use->stmt, compare);
- gimple_cond_set_rhs (use->stmt, op);
+ gcond *cond_stmt = as_a <gcond *> (use->stmt);
+ gimple_cond_set_lhs (cond_stmt, var);
+ gimple_cond_set_code (cond_stmt, compare);
+ gimple_cond_set_rhs (cond_stmt, op);
return;
}
DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (def));
else
DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (vexpr));
- gimple def_temp = gimple_build_debug_bind (vexpr, comp, NULL);
+ gdebug *def_temp
+ = gimple_build_debug_bind (vexpr, comp, NULL);
gimple_stmt_iterator gsi;
if (gimple_code (SSA_NAME_DEF_STMT (def)) == GIMPLE_PHI)
gimple_stmt_iterator *incr_pos, bool after,
tree *var_before, tree *var_after)
{
- gimple stmt;
+ gassign *stmt;
+ gphi *phi;
tree initial, step1;
gimple_seq stmts;
tree vb, va;
if (stmts)
gsi_insert_seq_on_edge_immediate (pe, stmts);
- stmt = create_phi_node (vb, loop->header);
- add_phi_arg (stmt, initial, loop_preheader_edge (loop), UNKNOWN_LOCATION);
- add_phi_arg (stmt, va, loop_latch_edge (loop), UNKNOWN_LOCATION);
+ phi = create_phi_node (vb, loop->header);
+ add_phi_arg (phi, initial, loop_preheader_edge (loop), UNKNOWN_LOCATION);
+ add_phi_arg (phi, va, loop_latch_edge (loop), UNKNOWN_LOCATION);
}
/* Return the innermost superloop LOOP of USE_LOOP that is a superloop of
static void
add_exit_phi (basic_block exit, tree var)
{
- gimple phi;
+ gphi *phi;
edge e;
edge_iterator ei;
static void
find_uses_to_rename_bb (basic_block bb, bitmap *use_blocks, bitmap need_phis)
{
- gimple_stmt_iterator bsi;
edge e;
edge_iterator ei;
FOR_EACH_EDGE (e, ei, bb->succs)
- for (bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- gimple phi = gsi_stmt (bsi);
+ gphi *phi = bsi.phi ();
if (! virtual_operand_p (gimple_phi_result (phi)))
find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (phi, e),
use_blocks, need_phis);
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
find_uses_to_rename_stmt (gsi_stmt (bsi), use_blocks, need_phis);
}
verify_loop_closed_ssa (bool verify_ssa_p)
{
basic_block bb;
- gimple_stmt_iterator bsi;
- gimple phi;
edge e;
edge_iterator ei;
FOR_EACH_BB_FN (bb, cfun)
{
- for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
{
- phi = gsi_stmt (bsi);
+ gphi *phi = bsi.phi ();
FOR_EACH_EDGE (e, ei, bb->preds)
check_loop_closed_ssa_use (e->src,
PHI_ARG_DEF_FROM_EDGE (phi, e));
}
- for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
+ gsi_next (&bsi))
check_loop_closed_ssa_stmt (bb, gsi_stmt (bsi));
}
{
basic_block dest = exit->dest;
basic_block bb = split_edge (exit);
- gimple phi, new_phi;
+ gphi *phi, *new_phi;
tree new_name, name;
use_operand_p op_p;
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
source_location locus;
for (psi = gsi_start_phis (dest); !gsi_end_p (psi); gsi_next (&psi))
{
- phi = gsi_stmt (psi);
+ phi = psi.phi ();
op_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (bb));
locus = gimple_phi_arg_location_from_edge (phi, single_succ_edge (bb));
transform_callback transform,
void *data)
{
- gimple exit_if;
+ gcond *exit_if;
tree ctr_before, ctr_after;
tree enter_main_cond, exit_base, exit_step, exit_bound;
enum tree_code exit_cmp;
- gimple phi_old_loop, phi_new_loop, phi_rest;
- gimple_stmt_iterator psi_old_loop, psi_new_loop;
+ gphi *phi_old_loop, *phi_new_loop, *phi_rest;
+ gphi_iterator psi_old_loop, psi_new_loop;
tree init, next, new_init;
struct loop *new_loop;
basic_block rest, exit_bb;
!gsi_end_p (psi_old_loop);
gsi_next (&psi_old_loop), gsi_next (&psi_new_loop))
{
- phi_old_loop = gsi_stmt (psi_old_loop);
- phi_new_loop = gsi_stmt (psi_new_loop);
+ phi_old_loop = psi_old_loop.phi ();
+ phi_new_loop = psi_new_loop.phi ();
init = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_entry);
op = PHI_ARG_DEF_PTR_FROM_EDGE (phi_new_loop, new_entry);
/* Finally create the new counter for number of iterations and add the new
exit instruction. */
bsi = gsi_last_nondebug_bb (exit_bb);
- exit_if = gsi_stmt (bsi);
+ exit_if = as_a <gcond *> (gsi_stmt (bsi));
create_iv (exit_base, exit_step, NULL_TREE, loop,
&bsi, false, &ctr_before, &ctr_after);
gimple_cond_set_code (exit_if, exit_cmp);
static void
rewrite_phi_with_iv (loop_p loop,
- gimple_stmt_iterator *psi,
+ gphi_iterator *psi,
gimple_stmt_iterator *gsi,
tree main_iv)
{
affine_iv iv;
- gimple stmt, phi = gsi_stmt (*psi);
+ gassign *stmt;
+ gphi *phi = psi->phi ();
tree atype, mtype, val, res = PHI_RESULT (phi);
if (virtual_operand_p (res) || res == main_iv)
{
unsigned i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
- gimple_stmt_iterator psi;
+ gphi_iterator psi;
for (i = 0; i < loop->num_nodes; i++)
{
unsigned precision = TYPE_PRECISION (TREE_TYPE (*nit));
unsigned original_precision = precision;
tree type, var_before;
- gimple_stmt_iterator gsi, psi;
- gimple stmt;
+ gimple_stmt_iterator gsi;
+ gphi_iterator psi;
+ gcond *stmt;
edge exit = single_dom_exit (loop);
gimple_seq stmts;
machine_mode mode;
for (psi = gsi_start_phis (loop->header);
!gsi_end_p (psi); gsi_next (&psi))
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
tree res = PHI_RESULT (phi);
bool uns;
rewrite_all_phi_nodes_with_iv (loop, var_before);
- stmt = last_stmt (exit->src);
+ stmt = as_a <gcond *> (last_stmt (exit->src));
/* Make the loop exit if the control condition is not satisfied. */
if (exit->flags & EDGE_TRUE_VALUE)
{
{
edge e = loop_preheader_edge (loop);
signop sgn = TYPE_SIGN (type);
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* Either for VAR itself... */
rtype = get_range_info (var, &minv, &maxv);
PHI argument from the loop preheader edge. */
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
wide_int minc, maxc;
if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var
&& (get_range_info (gimple_phi_result (phi), &minc, &maxc)
struct tree_niter_desc *niter,
bool warn, bool every_iteration)
{
- gimple stmt;
+ gimple last;
+ gcond *stmt;
tree type;
tree op0, op1;
enum tree_code code;
return false;
niter->assumptions = boolean_false_node;
- stmt = last_stmt (exit->src);
- if (!stmt || gimple_code (stmt) != GIMPLE_COND)
+ last = last_stmt (exit->src);
+ if (!last)
+ return false;
+ stmt = dyn_cast <gcond *> (last);
+ if (!stmt)
return false;
/* We want the condition for staying inside loop. */
result by a chain of operations such that all but exactly one of their
operands are constants. */
-static gimple
+static gphi *
chain_of_csts_start (struct loop *loop, tree x)
{
gimple stmt = SSA_NAME_DEF_STMT (x);
if (gimple_code (stmt) == GIMPLE_PHI)
{
if (bb == loop->header)
- return stmt;
+ return as_a <gphi *> (stmt);
return NULL;
}
If such phi node exists, it is returned, otherwise NULL is returned. */
-static gimple
+static gphi *
get_base_for (struct loop *loop, tree x)
{
- gimple phi;
+ gphi *phi;
tree init, next;
if (is_gimple_min_invariant (x))
{
tree acnd;
tree op[2], val[2], next[2], aval[2];
- gimple phi, cond;
+ gphi *phi;
+ gimple cond;
unsigned i, j;
enum tree_code cmp;
{
HOST_WIDE_INT delta;
tree addr, addr_base, write_p, local, forward;
- gimple prefetch;
+ gcall *prefetch;
gimple_stmt_iterator bsi;
unsigned n_prefetches, ap;
bool nontemporal = ref->reuse_distance >= L2_CACHE_SIZE_BYTES;
{
vec<edge> exits = get_loop_exit_edges (loop);
edge exit;
- gimple call;
+ gcall *call;
gimple_stmt_iterator bsi;
unsigned i;
static tree
tree_may_unswitch_on (basic_block bb, struct loop *loop)
{
- gimple stmt, def;
+ gimple last, def;
+ gcond *stmt;
tree cond, use;
basic_block def_bb;
ssa_op_iter iter;
/* BB must end in a simple conditional jump. */
- stmt = last_stmt (bb);
- if (!stmt || gimple_code (stmt) != GIMPLE_COND)
+ last = last_stmt (bb);
+ if (!last || gimple_code (last) != GIMPLE_COND)
return NULL_TREE;
+ stmt = as_a <gcond *> (last);
/* To keep the things simple, we do not directly remove the conditions,
but just replace tests with 0 != 0 resp. 1 != 0. Prevent the infinite
if (integer_nonzerop (cond))
{
/* Remove false path. */
- gimple_cond_set_condition_from_tree (stmt, boolean_true_node);
+ gimple_cond_set_condition_from_tree (as_a <gcond *> (stmt),
+ boolean_true_node);
changed = true;
}
else if (integer_zerop (cond))
{
/* Remove true path. */
- gimple_cond_set_condition_from_tree (stmt, boolean_false_node);
+ gimple_cond_set_condition_from_tree (as_a <gcond *> (stmt),
+ boolean_false_node);
changed = true;
}
/* Do not unswitch too much. */
if (stmt
&& gimple_code (stmt) == GIMPLE_COND)
{
- if (gimple_cond_true_p (stmt))
+ gcond *cond_stmt = as_a <gcond *> (stmt);
+ if (gimple_cond_true_p (cond_stmt))
flags = EDGE_FALSE_VALUE;
- else if (gimple_cond_false_p (stmt))
+ else if (gimple_cond_false_p (cond_stmt))
flags = EDGE_TRUE_VALUE;
}
}
tree def, tree recip_def, int threshold)
{
tree type;
- gimple new_stmt;
+ gassign *new_stmt;
gimple_stmt_iterator gsi;
struct occurrence *occ_child;
FOR_EACH_BB_FN (bb, fun)
{
- gimple_stmt_iterator gsi;
- gimple phi;
tree def;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
def = PHI_RESULT (phi);
if (! virtual_operand_p (def)
&& FLOAT_TYPE_P (TREE_TYPE (def)))
execute_cse_reciprocals_1 (NULL, def);
}
- for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
continue;
/* Scan for a/func(b) and convert it to reciprocal a*rfunc(b). */
- for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_after_labels (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
tree fndecl;
{
tree op0, op1, ssa_target;
unsigned HOST_WIDE_INT digit;
- gimple mult_stmt;
+ gassign *mult_stmt;
if (n < POWI_TABLE_SIZE && cache[n])
return cache[n];
tree arg0, HOST_WIDE_INT n)
{
tree cache[POWI_TABLE_SIZE], result, type = TREE_TYPE (arg0);
- gimple div_stmt;
+ gassign *div_stmt;
tree target;
if (n == 0)
build_and_insert_call (gimple_stmt_iterator *gsi, location_t loc,
tree fn, tree arg)
{
- gimple call_stmt;
+ gcall *call_stmt;
tree ssa_target;
call_stmt = gimple_build_call (fn, 1, arg);
tree arg0, tree arg1)
{
tree result = make_temp_ssa_name (TREE_TYPE (arg0), NULL, name);
- gimple stmt = gimple_build_assign_with_ops (code, result, arg0, arg1);
+ gassign *stmt = gimple_build_assign_with_ops (code, result, arg0, arg1);
gimple_set_location (stmt, loc);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return result;
tree type, tree val)
{
tree result = make_ssa_name (type, NULL);
- gimple stmt = gimple_build_assign_with_ops (NOP_EXPR, result, val);
+ gassign *stmt = gimple_build_assign_with_ops (NOP_EXPR, result, val);
gimple_set_location (stmt, loc);
gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
return result;
if (result)
{
tree lhs = gimple_get_lhs (stmt);
- gimple new_stmt = gimple_build_assign (lhs, result);
+ gassign *new_stmt = gimple_build_assign (lhs, result);
gimple_set_location (new_stmt, loc);
unlink_stmt_vdef (stmt);
gsi_replace (&gsi, new_stmt, true);
if (real_minus_onep (arg0))
{
tree t0, t1, cond, one, minus_one;
- gimple stmt;
+ gassign *stmt;
t0 = TREE_TYPE (arg0);
t1 = TREE_TYPE (arg1);
if (result)
{
tree lhs = gimple_get_lhs (stmt);
- gimple new_stmt = gimple_build_assign (lhs, result);
+ gassign *new_stmt = gimple_build_assign (lhs, result);
gimple_set_location (new_stmt, loc);
unlink_stmt_vdef (stmt);
gsi_replace (&gsi, new_stmt, true);
if (result)
{
tree lhs = gimple_get_lhs (stmt);
- gimple new_stmt = gimple_build_assign (lhs, result);
+ gassign *new_stmt = gimple_build_assign (lhs, result);
gimple_set_location (new_stmt, loc);
unlink_stmt_vdef (stmt);
gsi_replace (&gsi, new_stmt, true);
{
tree mul_result = gimple_get_lhs (mul_stmt);
tree type = TREE_TYPE (mul_result);
- gimple use_stmt, neguse_stmt, fma_stmt;
+ gimple use_stmt, neguse_stmt;
+ gassign *fma_stmt;
use_operand_p use_p;
imm_use_iterator imm_iter;
escape, add them to the VDEF/VUSE lists for it. */
static void
-maybe_add_call_vops (struct function *fn, gimple stmt)
+maybe_add_call_vops (struct function *fn, gcall *stmt)
{
int call_flags = gimple_call_flags (stmt);
/* Scan operands in the ASM_EXPR stmt referred to in INFO. */
static void
-get_asm_stmt_operands (struct function *fn, gimple stmt)
+get_asm_stmt_operands (struct function *fn, gasm *stmt)
{
size_t i, noutputs;
const char **oconstraints;
switch (code)
{
case GIMPLE_ASM:
- get_asm_stmt_operands (fn, stmt);
+ get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
break;
case GIMPLE_TRANSACTION:
case GIMPLE_CALL:
/* Add call-clobbered operands, if needed. */
- maybe_add_call_vops (fn, stmt);
+ maybe_add_call_vops (fn, as_a <gcall *> (stmt));
/* FALLTHRU */
case GIMPLE_ASSIGN:
static unsigned int tree_ssa_phiopt_worker (bool, bool);
static bool conditional_replacement (basic_block, basic_block,
- edge, edge, gimple, tree, tree);
+ edge, edge, gphi *, tree, tree);
static int value_replacement (basic_block, basic_block,
edge, edge, gimple, tree, tree);
static bool minmax_replacement (basic_block, basic_block,
/* Return the singleton PHI in the SEQ of PHIs for edges E0 and E1. */
-static gimple
+static gphi *
single_non_singleton_phi_for_edges (gimple_seq seq, edge e0, edge e1)
{
gimple_stmt_iterator i;
- gimple phi = NULL;
+ gphi *phi = NULL;
if (gimple_seq_singleton_p (seq))
- return gsi_stmt (gsi_start (seq));
+ return as_a <gphi *> (gsi_stmt (gsi_start (seq)));
for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
{
- gimple p = gsi_stmt (i);
+ gphi *p = as_a <gphi *> (gsi_stmt (i));
/* If the PHI arguments are equal then we can skip this PHI. */
if (operand_equal_for_phi_arg_p (gimple_phi_arg_def (p, e0->dest_idx),
gimple_phi_arg_def (p, e1->dest_idx)))
for (i = 0; i < n; i++)
{
- gimple cond_stmt, phi;
+ gimple cond_stmt;
+ gphi *phi;
basic_block bb1, bb2;
edge e1, e2;
tree arg0, arg1;
so try that first. */
for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ phi = as_a <gphi *> (gsi_stmt (gsi));
arg0 = gimple_phi_arg_def (phi, e1->dest_idx);
arg1 = gimple_phi_arg_def (phi, e2->dest_idx);
if (value_replacement (bb, bb1, e1, e2, phi, arg0, arg1) == 2)
static bool
conditional_replacement (basic_block cond_bb, basic_block middle_bb,
- edge e0, edge e1, gimple phi,
+ edge e0, edge e1, gphi *phi,
tree arg0, tree arg1)
{
tree result;
- gimple stmt, new_stmt;
+ gimple stmt;
+ gassign *new_stmt;
tree cond;
gimple_stmt_iterator gsi;
edge true_edge, false_edge;
tree arg0, tree arg1)
{
tree result, type;
- gimple cond, new_stmt;
+ gcond *cond;
+ gassign *new_stmt;
edge true_edge, false_edge;
enum tree_code cmp, minmax, ass_code;
tree smaller, larger, arg_true, arg_false;
if (HONOR_NANS (TYPE_MODE (type)))
return false;
- cond = last_stmt (cond_bb);
+ cond = as_a <gcond *> (last_stmt (cond_bb));
cmp = gimple_cond_code (cond);
/* This transformation is only valid for order comparisons. Record which
gimple phi, tree arg0, tree arg1)
{
tree result;
- gimple new_stmt, cond;
+ gassign *new_stmt;
+ gimple cond;
gimple_stmt_iterator gsi;
edge true_edge, false_edge;
gimple assign;
{
gimple assign = last_and_only_stmt (middle_bb);
tree lhs, rhs, name, name2;
- gimple newphi, new_stmt;
+ gphi *newphi;
+ gassign *new_stmt;
gimple_stmt_iterator gsi;
source_location locus;
tree lhs_base, lhs, then_rhs, else_rhs, name;
source_location then_locus, else_locus;
gimple_stmt_iterator gsi;
- gimple newphi, new_stmt;
+ gphi *newphi;
+ gassign *new_stmt;
if (then_assign == NULL
|| !gimple_assign_single_p (then_assign)
{
int param_align = PARAM_VALUE (PARAM_L1_CACHE_LINE_SIZE);
unsigned param_align_bits = (unsigned) (param_align * BITS_PER_UNIT);
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* Walk the phis in bb3 looking for an opportunity. We are looking
for phis of two SSA names, one each of which is defined in bb1 and
bb2. */
for (gsi = gsi_start_phis (bb3); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi_stmt = gsi_stmt (gsi);
+ gphi *phi_stmt = gsi.phi ();
gimple def1, def2, defswap;
tree arg1, arg2, ref1, ref2, field1, field2, fieldswap;
tree tree_offset1, tree_offset2, tree_size2, next;
BB with the virtual operands from USE_STMT. */
static tree
-phiprop_insert_phi (basic_block bb, gimple phi, gimple use_stmt,
+phiprop_insert_phi (basic_block bb, gphi *phi, gimple use_stmt,
struct phiprop_d *phivn, size_t n)
{
tree res;
- gimple new_phi;
+ gphi *new_phi;
edge_iterator ei;
edge e;
FOR_EACH_EDGE (e, ei, bb->preds)
{
tree old_arg, new_var;
- gimple tmp;
+ gassign *tmp;
source_location locus;
old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
with aliasing issues as we are moving memory reads. */
static bool
-propagate_with_phi (basic_block bb, gimple phi, struct phiprop_d *phivn,
+propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
size_t n)
{
tree ptr = PHI_RESULT (phi);
struct phiprop_d *phivn;
bool did_something = false;
basic_block bb;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
unsigned i;
size_t n;
single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
FOR_EACH_VEC_ELT (bbs, i, bb)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
+ did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n);
if (did_something)
gsi_commit_edge_inserts ();
gimple_stmt_iterator gsi;
tree exprtype = type ? type : get_expr_type (expr);
pre_expr nameexpr;
- gimple newstmt;
+ gassign *newstmt;
switch (expr->kind)
{
edge_iterator ei;
tree type = get_expr_type (expr);
tree temp;
- gimple phi;
+ gphi *phi;
/* Make sure we aren't creating an induction variable. */
if (bb_loop_depth (block) > 0 && EDGE_COUNT (block->preds) == 2)
tree temp = make_temp_ssa_name (get_expr_type (expr),
NULL, "pretmp");
- gimple assign = gimple_build_assign (temp,
- edoubleprime->kind == CONSTANT ? PRE_EXPR_CONSTANT (edoubleprime) : PRE_EXPR_NAME (edoubleprime));
+ gassign *assign
+ = gimple_build_assign (temp,
+ edoubleprime->kind == CONSTANT ?
+ PRE_EXPR_CONSTANT (edoubleprime) :
+ PRE_EXPR_NAME (edoubleprime));
gimple_stmt_iterator gsi = gsi_after_labels (block);
gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
/* Loop until the worklist is empty. */
while (sp)
{
- gimple_stmt_iterator gsi;
gimple stmt;
basic_block dom;
}
/* Generate values for PHI nodes. */
- for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (block); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- tree result = gimple_phi_result (gsi_stmt (gsi));
+ tree result = gimple_phi_result (gsi.phi ());
/* We have no need for virtual phis, as they don't represent
actual computations. */
/* Now compute value numbers and populate value sets with all
the expressions computed in BLOCK. */
- for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (block); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
ssa_op_iter iter;
tree op;
if (gimple_call_internal_p (stmt))
continue;
- vn_reference_lookup_call (stmt, &ref, &ref1);
+ vn_reference_lookup_call (as_a <gcall *> (stmt), &ref, &ref1);
if (!ref)
continue;
return NULL_TREE;
tree res = make_temp_ssa_name (TREE_TYPE (val), NULL, "pretmp");
- gimple tem = gimple_build_assign (res,
- fold_build1 (TREE_CODE (expr),
- TREE_TYPE (expr), leader));
+ gassign *tem = gimple_build_assign (res,
+ fold_build1 (TREE_CODE (expr),
+ TREE_TYPE (expr), leader));
gsi_insert_before (gsi, tem, GSI_SAME_STMT);
VN_INFO_GET (res)->valnum = val;
void
eliminate_dom_walker::before_dom_children (basic_block b)
{
- gimple_stmt_iterator gsi;
- gimple stmt;
-
/* Mark new bb. */
el_avail_stack.safe_push (NULL_TREE);
tailmerging. Eventually we can reduce its reliance on SCCVN now
that we fully copy/constant-propagate (most) things. */
- for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
+ for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree res = PHI_RESULT (phi);
if (virtual_operand_p (res))
gsi_next (&gsi);
}
- for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (b);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
tree sprime = NULL_TREE;
- stmt = gsi_stmt (gsi);
+ gimple stmt = gsi_stmt (gsi);
tree lhs = gimple_get_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME
&& !gimple_has_volatile_ops (stmt)
/* Visit indirect calls and turn them into direct calls if
possible using the devirtualization machinery. */
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
- tree fn = gimple_call_fn (stmt);
+ tree fn = gimple_call_fn (call_stmt);
if (fn
&& flag_devirtualize
&& virtual_method_call_p (fn))
"function %s\n",
cgraph_node::get (fn)->name ());
}
- gimple_call_set_fndecl (stmt, fn);
+ gimple_call_set_fndecl (call_stmt, fn);
gimple_set_modified (stmt, true);
}
}
fold_stmt (&gsi);
stmt = gsi_stmt (gsi);
if ((gimple_code (stmt) == GIMPLE_COND
- && (gimple_cond_true_p (stmt)
- || gimple_cond_false_p (stmt)))
+ && (gimple_cond_true_p (as_a <gcond *> (stmt))
+ || gimple_cond_false_p (as_a <gcond *> (stmt))))
|| (gimple_code (stmt) == GIMPLE_SWITCH
- && TREE_CODE (gimple_switch_index (stmt)) == INTEGER_CST))
+ && TREE_CODE (gimple_switch_index (
+ as_a <gswitch *> (stmt)))
+ == INTEGER_CST))
el_todo |= TODO_cleanup_cfg;
}
/* If we removed EH side-effects from the statement, clean
edge e;
FOR_EACH_EDGE (e, ei, b->succs)
{
- for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (e->dest);
+ !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
tree arg = USE_FROM_PTR (use_p);
if (TREE_CODE (arg) != SSA_NAME
if (gimple_code (stmt) == GIMPLE_PHI)
{
- val = ssa_prop_visit_phi (stmt);
+ val = ssa_prop_visit_phi (as_a <gphi *> (stmt));
output_name = gimple_phi_result (stmt);
}
else
update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
{
va_list ap;
- gimple new_stmt, stmt = gsi_stmt (*si_p);
+ gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
gcc_assert (is_gimple_call (stmt));
va_start (ap, nargs);
unsigned i;
unsigned nargs = call_expr_nargs (expr);
vec<tree> args = vNULL;
- gimple new_stmt;
+ gcall *new_stmt;
if (nargs > 0)
{
values from PROP_VALUE. */
static bool
-replace_phi_args_in (gimple phi, ssa_prop_get_value_fn get_value)
+replace_phi_args_in (gphi *phi, ssa_prop_get_value_fn get_value)
{
size_t i;
bool replaced = false;
void
substitute_and_fold_dom_walker::before_dom_children (basic_block bb)
{
- gimple_stmt_iterator i;
-
/* Propagate known values into PHI nodes. */
- for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gphi_iterator i = gsi_start_phis (bb);
+ !gsi_end_p (i);
+ gsi_next (&i))
{
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
tree res = gimple_phi_result (phi);
if (virtual_operand_p (res))
continue;
/* Propagate known values into stmts. In some case it exposes
more trivially deletable stmts to walk backward. */
- for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gimple_stmt_iterator i = gsi_start_bb (bb);
+ !gsi_end_p (i);
+ gsi_next (&i))
{
bool did_replace;
gimple stmt = gsi_stmt (i);
if (gimple_assign_single_p (dest))
return may_propagate_copy (gimple_assign_rhs1 (dest), orig);
- else if (gimple_code (dest) == GIMPLE_SWITCH)
- return may_propagate_copy (gimple_switch_index (dest), orig);
+ else if (gswitch *dest_swtch = dyn_cast <gswitch *> (dest))
+ return may_propagate_copy (gimple_switch_index (dest_swtch), orig);
/* In other cases, the expression is not materialized, so there
is no destination to pass to may_propagate_copy. On the other
propagate_tree_value (&expr, val);
gimple_assign_set_rhs_from_tree (gsi, expr);
}
- else if (gimple_code (stmt) == GIMPLE_COND)
+ else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
{
tree lhs = NULL_TREE;
tree rhs = build_zero_cst (TREE_TYPE (val));
propagate_tree_value (&lhs, val);
- gimple_cond_set_code (stmt, NE_EXPR);
- gimple_cond_set_lhs (stmt, lhs);
- gimple_cond_set_rhs (stmt, rhs);
+ gimple_cond_set_code (cond_stmt, NE_EXPR);
+ gimple_cond_set_lhs (cond_stmt, lhs);
+ gimple_cond_set_rhs (cond_stmt, rhs);
}
else if (is_gimple_call (stmt)
&& gimple_call_lhs (stmt) != NULL_TREE)
res = update_call_from_tree (gsi, expr);
gcc_assert (res);
}
- else if (gimple_code (stmt) == GIMPLE_SWITCH)
- propagate_tree_value (gimple_switch_index_ptr (stmt), val);
+ else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
+ propagate_tree_value (gimple_switch_index_ptr (swtch_stmt), val);
else
gcc_unreachable ();
}
/* Call-back functions used by the value propagation engine. */
typedef enum ssa_prop_result (*ssa_prop_visit_stmt_fn) (gimple, edge *, tree *);
-typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (gimple);
+typedef enum ssa_prop_result (*ssa_prop_visit_phi_fn) (gphi *);
typedef bool (*ssa_prop_fold_stmt_fn) (gimple_stmt_iterator *gsi);
typedef tree (*ssa_prop_get_value_fn) (tree);
gimple op1def = NULL, op2def = NULL;
gimple_stmt_iterator gsi;
tree op;
- gimple sum;
+ gassign *sum;
/* Create the addition statement. */
op = make_ssa_name (type, NULL);
edge_iterator ei, ei2;
edge e, e2;
gimple stmt;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
bool other_edge_seen = false;
bool is_cond;
e2 = find_edge (test_bb, *other_bb);
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
/* If both BB and TEST_BB end with GIMPLE_COND, all PHI arguments
corresponding to BB and TEST_BB predecessor must be the same. */
if (!operand_equal_p (gimple_phi_arg_def (phi, e->dest_idx),
{
gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
var = make_ssa_name (TREE_TYPE (var), NULL);
- gimple g = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt),
- var, rhs[2], rhs[3]);
+ gassign *g = gimple_build_assign_with_ops (gimple_assign_rhs_code (stmt),
+ var, rhs[2], rhs[3]);
gimple_set_uid (g, gimple_uid (stmt));
gimple_set_visited (g, true);
gsi_insert_before (&gsi, g, GSI_SAME_STMT);
tree new_lhs = make_ssa_name (TREE_TYPE (lhs), NULL);
enum tree_code rhs_code
= gimple_assign_rhs_code (cast_stmt);
- gimple g;
+ gassign *g;
if (is_gimple_min_invariant (new_op))
{
new_op = fold_convert (TREE_TYPE (lhs), new_op);
&& bbinfo[idx].op == NULL_TREE
&& ops[bbinfo[idx].first_idx]->op != NULL_TREE)
{
- stmt = last_stmt (bb);
+ gcond *cond_stmt = as_a <gcond *> (last_stmt (bb));
if (integer_zerop (ops[bbinfo[idx].first_idx]->op))
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else if (integer_onep (ops[bbinfo[idx].first_idx]->op))
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
else
{
- gimple_cond_set_code (stmt, NE_EXPR);
- gimple_cond_set_lhs (stmt, ops[bbinfo[idx].first_idx]->op);
- gimple_cond_set_rhs (stmt, boolean_false_node);
+ gimple_cond_set_code (cond_stmt, NE_EXPR);
+ gimple_cond_set_lhs (cond_stmt,
+ ops[bbinfo[idx].first_idx]->op);
+ gimple_cond_set_rhs (cond_stmt, boolean_false_node);
}
- update_stmt (stmt);
+ update_stmt (cond_stmt);
}
if (bb == first_bb)
break;
is_phi_for_stmt (gimple stmt, tree operand)
{
gimple def_stmt;
+ gphi *def_phi;
tree lhs;
use_operand_p arg_p;
ssa_op_iter i;
lhs = gimple_assign_lhs (stmt);
def_stmt = SSA_NAME_DEF_STMT (operand);
- if (gimple_code (def_stmt) != GIMPLE_PHI)
+ def_phi = dyn_cast <gphi *> (def_stmt);
+ if (!def_phi)
return false;
- FOR_EACH_PHI_ARG (arg_p, def_stmt, i, SSA_OP_USE)
+ FOR_EACH_PHI_ARG (arg_p, def_phi, i, SSA_OP_USE)
if (lhs == USE_FROM_PTR (arg_p))
return true;
return false;
parallel. */
static void
-rewrite_expr_tree_parallel (gimple stmt, int width,
+rewrite_expr_tree_parallel (gassign *stmt, int width,
vec<operand_entry_t> ops)
{
enum tree_code opcode = gimple_assign_rhs_code (stmt);
if (width > 1
&& ops.length () > 3)
- rewrite_expr_tree_parallel (stmt, width, ops);
+ rewrite_expr_tree_parallel (as_a <gassign *> (stmt),
+ width, ops);
else
{
/* When there are three operands left, we want
else
gcc_unreachable ();
tree lhs = gimple_assign_lhs (use_stmt);
- gimple phi = create_phi_node (lhs, merge_bb);
+ gphi *phi = create_phi_node (lhs, merge_bb);
add_phi_arg (phi, build_one_cst (TREE_TYPE (lhs)), etrue, loc);
add_phi_arg (phi, othervar, single_succ_edge (then_bb), loc);
gsi = gsi_for_stmt (use_stmt);
vn_reference_op_s's. */
static void
-copy_reference_ops_from_call (gimple call,
+copy_reference_ops_from_call (gcall *call,
vec<vn_reference_op_s> *result)
{
vn_reference_op_s temp;
this function. */
static vec<vn_reference_op_s>
-valueize_shared_reference_ops_from_call (gimple call)
+valueize_shared_reference_ops_from_call (gcall *call)
{
if (!call)
return vNULL;
}
if (valueized_anything)
{
- bool res = call_may_clobber_ref_p_1 (def_stmt, ref);
+ bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
+ ref);
for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
gimple_call_set_arg (def_stmt, i, oldargs[i]);
if (!res)
*VNRESULT if found. Populates *VR for the hashtable lookup. */
void
-vn_reference_lookup_call (gimple call, vn_reference_t *vnresult,
+vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
vn_reference_t vr)
{
if (vnresult)
of the LHS has changed as a result. */
static bool
-visit_reference_op_call (tree lhs, gimple stmt)
+visit_reference_op_call (tree lhs, gcall *stmt)
{
bool changed = false;
struct vn_reference_s vr1;
simplified. */
static tree
-simplify_unary_expression (gimple stmt)
+simplify_unary_expression (gassign *stmt)
{
tree result = NULL_TREE;
tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
/* Try to simplify RHS using equivalences and constant folding. */
static tree
-try_to_simplify (gimple stmt)
+try_to_simplify (gassign *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
tree tem;
changed = visit_copy (lhs, rhs1);
goto done;
}
- simplified = try_to_simplify (stmt);
+ simplified = try_to_simplify (as_a <gassign *> (stmt));
if (simplified)
{
if (dump_file && (dump_flags & TDF_DETAILS))
else
changed = defs_to_varying (stmt);
}
- else if (is_gimple_call (stmt))
+ else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
tree lhs = gimple_call_lhs (stmt);
if (lhs && TREE_CODE (lhs) == SSA_NAME)
not alias with anything else. In which case the
information that the values are distinct are encoded
in the IL. */
- && !(gimple_call_return_flags (stmt) & ERF_NOALIAS)
+ && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
/* Only perform the following when being called from PRE
which embeds tail merging. */
&& default_vn_walk_kind == VN_WALK)))
- changed = visit_reference_op_call (lhs, stmt);
+ changed = visit_reference_op_call (lhs, call_stmt);
else
changed = defs_to_varying (stmt);
}
if (!gimple_nop_p (defstmt))
{
/* Push a new iterator. */
- if (gimple_code (defstmt) == GIMPLE_PHI)
- usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
+ if (gphi *phi = dyn_cast <gphi *> (defstmt))
+ usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
else
usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
}
break;
}
case GIMPLE_SWITCH:
- val = gimple_switch_index (stmt);
+ val = gimple_switch_index (as_a <gswitch *> (stmt));
break;
case GIMPLE_GOTO:
val = gimple_goto_dest (stmt);
vec<vn_reference_op_s> ,
vn_reference_t *, vn_lookup_kind);
tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *);
-void vn_reference_lookup_call (gimple, vn_reference_t *, vn_reference_t);
+void vn_reference_lookup_call (gcall *, vn_reference_t *, vn_reference_t);
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
vec<vn_reference_op_s> ,
tree, unsigned int);
we return NULL. */
static basic_block
-find_bb_for_arg (gimple phi, tree def)
+find_bb_for_arg (gphi *phi, tree def)
{
size_t i;
bool foundone = false;
gimple usestmt = USE_STMT (use_p);
basic_block useblock;
- if (gimple_code (usestmt) == GIMPLE_PHI)
+ if (gphi *phi = dyn_cast <gphi *> (usestmt))
{
int idx = PHI_ARG_INDEX_FROM_USE (use_p);
- useblock = gimple_phi_arg_edge (usestmt, idx)->src;
+ useblock = gimple_phi_arg_edge (phi, idx)->src;
}
else if (is_gimple_debug (usestmt))
{
}
}
- sinkbb = find_bb_for_arg (use, DEF_FROM_PTR (def_p));
+ sinkbb = find_bb_for_arg (as_a <gphi *> (use), DEF_FROM_PTR (def_p));
/* This can happen if there are multiple uses in a PHI. */
if (!sinkbb)
void
strlen_dom_walker::before_dom_children (basic_block bb)
{
- gimple_stmt_iterator gsi;
basic_block dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
if (dombb == NULL)
stridx_to_strinfo = ((vec<strinfo, va_heap, vl_embed> *) dombb->aux);
if (stridx_to_strinfo)
{
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
{
bitmap visited = BITMAP_ALLOC (NULL);
/* If all PHI arguments have the same string index, the PHI result
has it as well. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree result = gimple_phi_result (phi);
if (!virtual_operand_p (result) && POINTER_TYPE_P (TREE_TYPE (result)))
{
}
/* Attempt to optimize individual statements. */
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
if (strlen_optimize_stmt (&gsi))
gsi_next (&gsi);
/* Lookup or create the variable for the call statement CALL. */
static varinfo_t
-get_call_vi (gimple call)
+get_call_vi (gcall *call)
{
varinfo_t vi, vi2;
the uses. Returns NULL if there is nothing special about this call. */
static varinfo_t
-lookup_call_use_vi (gimple call)
+lookup_call_use_vi (gcall *call)
{
varinfo_t *slot_p = call_stmt_vars->get (call);
if (slot_p)
the clobbers. Returns NULL if there is nothing special about this call. */
static varinfo_t
-lookup_call_clobber_vi (gimple call)
+lookup_call_clobber_vi (gcall *call)
{
varinfo_t uses = lookup_call_use_vi (call);
if (!uses)
the uses. */
static varinfo_t
-get_call_use_vi (gimple call)
+get_call_use_vi (gcall *call)
{
return get_call_vi (call);
}
the clobbers. */
static varinfo_t ATTRIBUTE_UNUSED
-get_call_clobber_vi (gimple call)
+get_call_clobber_vi (gcall *call)
{
return vi_next (get_call_vi (call));
}
RHS. */
static void
-handle_rhs_call (gimple stmt, vec<ce_s> *results)
+handle_rhs_call (gcall *stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
+handle_lhs_call (gcall *stmt, tree lhs, int flags, vec<ce_s> rhsc,
tree fndecl)
{
auto_vec<ce_s> lhsc;
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt, vec<ce_s> *results)
+handle_const_call (gcall *stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned int k;
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt, vec<ce_s> *results)
+handle_pure_call (gcall *stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
/* Return the varinfo for the callee of CALL. */
static varinfo_t
-get_fi_for_callee (gimple call)
+get_fi_for_callee (gcall *call)
{
tree decl, fn = gimple_call_fn (call);
was handled, otherwise false. */
static bool
-find_func_aliases_for_builtin_call (struct function *fn, gimple t)
+find_func_aliases_for_builtin_call (struct function *fn, gcall *t)
{
tree fndecl = gimple_call_fndecl (t);
auto_vec<ce_s, 2> lhsc;
/* Create constraints for the call T. */
static void
-find_func_aliases_for_call (struct function *fn, gimple t)
+find_func_aliases_for_call (struct function *fn, gcall *t)
{
tree fndecl = gimple_call_fndecl (t);
varinfo_t fi;
In non-ipa mode, we need to generate constraints for each
pointer passed by address. */
else if (is_gimple_call (t))
- find_func_aliases_for_call (fn, t);
+ find_func_aliases_for_call (fn, as_a <gcall *> (t));
/* Otherwise, just a regular assignment statement. Only care about
operations with pointer result, others are dealt with as escape
}
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
- && gimple_return_retval (t) != NULL_TREE)
+ && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE)
{
+ greturn *return_stmt = as_a <greturn *> (t);
fi = NULL;
if (!in_ipa_mode
|| !(fi = get_vi_for_tree (fn->decl)))
- make_escape_constraint (gimple_return_retval (t));
+ make_escape_constraint (gimple_return_retval (return_stmt));
else if (in_ipa_mode
&& fi != NULL)
{
unsigned i;
lhs = get_function_part_constraint (fi, fi_result);
- get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
+ get_constraint_for_rhs (gimple_return_retval (return_stmt), &rhsc);
FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
/* Handle asms conservatively by adding escape constraints to everything. */
- else if (gimple_code (t) == GIMPLE_ASM)
+ else if (gasm *asm_stmt = dyn_cast <gasm *> (t))
{
unsigned i, noutputs;
const char **oconstraints;
const char *constraint;
bool allows_mem, allows_reg, is_inout;
- noutputs = gimple_asm_noutputs (t);
+ noutputs = gimple_asm_noutputs (asm_stmt);
oconstraints = XALLOCAVEC (const char *, noutputs);
for (i = 0; i < noutputs; ++i)
{
- tree link = gimple_asm_output_op (t, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
tree op = TREE_VALUE (link);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
process_constraint (new_constraint (*lhsp, rhsc));
}
}
- for (i = 0; i < gimple_asm_ninputs (t); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (t, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
tree op = TREE_VALUE (link);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
/* Account for uses in assigments and returns. */
if (gimple_assign_single_p (t)
|| (gimple_code (t) == GIMPLE_RETURN
- && gimple_return_retval (t) != NULL_TREE))
+ && gimple_return_retval (as_a <greturn *> (t)) != NULL_TREE))
{
tree rhs = (gimple_assign_single_p (t)
- ? gimple_assign_rhs1 (t) : gimple_return_retval (t));
+ ? gimple_assign_rhs1 (t)
+ : gimple_return_retval (as_a <greturn *> (t)));
tree tem = rhs;
while (handled_component_p (tem))
tem = TREE_OPERAND (tem, 0);
}
}
- if (is_gimple_call (t))
+ if (gcall *call_stmt = dyn_cast <gcall *> (t))
{
varinfo_t cfi = NULL;
tree decl = gimple_call_fndecl (t);
/* Build constraints for propagating clobbers/uses along the
callgraph edges. */
- cfi = get_fi_for_callee (t);
+ cfi = get_fi_for_callee (call_stmt);
if (cfi->id == anything_id)
{
if (gimple_vdef (t))
make_copy_constraint (first_vi_for_offset (fi, fi_uses), escaped_id);
/* Also honor the call statement use/clobber info. */
- if ((vi = lookup_call_clobber_vi (t)) != NULL)
+ if ((vi = lookup_call_clobber_vi (call_stmt)) != NULL)
make_copy_constraint (first_vi_for_offset (fi, fi_clobbers),
vi->id);
- if ((vi = lookup_call_use_vi (t)) != NULL)
+ if ((vi = lookup_call_use_vi (call_stmt)) != NULL)
make_copy_constraint (first_vi_for_offset (fi, fi_uses),
vi->id);
return;
/* Now walk all statements and build the constraint set. */
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator gsi;
-
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (cfun, phi);
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gcall *stmt;
struct pt_solution *pt;
- if (!is_gimple_call (stmt))
+
+ stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
+ if (!stmt)
continue;
pt = gimple_call_use_set (stmt);
/* Build constriants for the function body. */
FOR_EACH_BB_FN (bb, func)
{
- gimple_stmt_iterator gsi;
-
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (! virtual_operand_p (gimple_phi_result (phi)))
find_func_aliases (func, phi);
}
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple stmt = gsi_stmt (gsi);
+ gcall *stmt;
struct pt_solution *pt;
varinfo_t vi, fi;
tree decl;
- if (!is_gimple_call (stmt))
+ stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
+ if (!stmt)
continue;
/* Handle direct calls to functions with body. */
unsigned int first = bitmap_first_set_bit (e->bbs);
basic_block bb = BASIC_BLOCK_FOR_FN (cfun, first);
int size = 0;
- gimple_stmt_iterator gsi;
gimple stmt;
tree arg;
unsigned int s;
bitmap_iterator bs;
- for (gsi = gsi_start_nondebug_bb (bb);
+ for (gimple_stmt_iterator gsi = gsi_start_nondebug_bb (bb);
!gsi_end_p (gsi); gsi_next_nondebug (&gsi))
{
stmt = gsi_stmt (gsi);
EXECUTE_IF_SET_IN_BITMAP (e->succs, 0, s, bs)
{
int n = find_edge (bb, BASIC_BLOCK_FOR_FN (cfun, s))->dest_idx;
- for (gsi = gsi_start_phis (BASIC_BLOCK_FOR_FN (cfun, s)); !gsi_end_p (gsi);
+ for (gphi_iterator gsi = gsi_start_phis (BASIC_BLOCK_FOR_FN (cfun, s));
+ !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree lhs = gimple_phi_result (phi);
tree val = gimple_phi_arg_def (phi, n);
static void
release_last_vdef (basic_block bb)
{
- gimple_stmt_iterator i;
-
- for (i = gsi_last_bb (bb); !gsi_end_p (i); gsi_prev_nondebug (&i))
+ for (gimple_stmt_iterator i = gsi_last_bb (bb); !gsi_end_p (i);
+ gsi_prev_nondebug (&i))
{
gimple stmt = gsi_stmt (i);
if (gimple_vdef (stmt) == NULL_TREE)
return;
}
- for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
+ gsi_next (&i))
{
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
tree res = gimple_phi_result (phi);
if (!virtual_operand_p (res))
same_phi_alternatives_1 (basic_block dest, edge e1, edge e2)
{
int n1 = e1->dest_idx, n2 = e2->dest_idx;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree lhs = gimple_phi_result (phi);
tree val1 = gimple_phi_arg_def (phi, n1);
tree val2 = gimple_phi_arg_def (phi, n2);
/* Returns the vop phi of BB, if any. */
-static gimple
+static gphi *
vop_phi (basic_block bb)
{
- gimple stmt;
- gimple_stmt_iterator gsi;
+ gphi *stmt;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- stmt = gsi_stmt (gsi);
+ stmt = gsi.phi ();
if (! virtual_operand_p (gimple_phi_result (stmt)))
continue;
return stmt;
edge e1, e2;
edge_iterator ei;
unsigned int i;
- gimple bb2_phi;
+ gphi *bb2_phi;
bb2_phi = vop_phi (bb2);
block1 = LOCATION_BLOCK (locus1);
locus1 = LOCATION_LOCUS (locus1);
- if (gimple_code (use_stmt) == GIMPLE_PHI)
- locus2 = gimple_phi_arg_location (use_stmt,
+ if (gphi *phi = dyn_cast <gphi *> (use_stmt))
+ locus2 = gimple_phi_arg_location (phi,
PHI_ARG_INDEX_FROM_USE (use_p));
else
locus2 = gimple_location (use_stmt);
static bool
record_temporary_equivalences_from_phis (edge e, vec<tree> *stack)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* Each PHI creates a temporary equivalence, record them.
These are context sensitive equivalences and will be removed
later. */
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree src = PHI_ARG_DEF_FROM_EDGE (phi, e);
tree dst = gimple_phi_result (phi);
/* If the statement has volatile operands, then we assume we
can not thread through this block. This is overly
conservative in some ways. */
- if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt))
+ if (gimple_code (stmt) == GIMPLE_ASM
+ && gimple_asm_volatile_p (as_a <gasm *> (stmt)))
return NULL;
/* If duplicating this block is going to cause too much code
/* Try to fold/lookup the new expression. Inserting the
expression into the hash table is unlikely to help. */
if (is_gimple_call (stmt))
- cached_lhs = fold_call_stmt (stmt, false);
+ cached_lhs = fold_call_stmt (as_a <gcall *> (stmt), false);
else
cached_lhs = fold_assignment_stmt (stmt);
static tree
simplify_control_stmt_condition (edge e,
gimple stmt,
- gimple dummy_cond,
+ gcond *dummy_cond,
tree (*simplify) (gimple, gimple),
bool handle_dominating_asserts)
{
}
if (code == GIMPLE_SWITCH)
- cond = gimple_switch_index (stmt);
+ cond = gimple_switch_index (as_a <gswitch *> (stmt));
else if (code == GIMPLE_GOTO)
cond = gimple_goto_dest (stmt);
else
try and simplify the condition at the end of TAKEN_EDGE->dest. */
static bool
thread_around_empty_blocks (edge taken_edge,
- gimple dummy_cond,
+ gcond *dummy_cond,
bool handle_dominating_asserts,
tree (*simplify) (gimple, gimple),
bitmap visited,
static int
thread_through_normal_block (edge e,
- gimple dummy_cond,
+ gcond *dummy_cond,
bool handle_dominating_asserts,
vec<tree> *stack,
tree (*simplify) (gimple, gimple),
SIMPLIFY is a pass-specific function used to simplify statements. */
void
-thread_across_edge (gimple dummy_cond,
+thread_across_edge (gcond *dummy_cond,
edge e,
bool handle_dominating_asserts,
vec<tree> *stack,
extern void threadedge_finalize_values (void);
extern bool potentially_threadable_block (basic_block);
extern void propagate_threaded_block_debug_into (basic_block, basic_block);
-extern void thread_across_edge (gimple, edge, bool,
+extern void thread_across_edge (gcond *, edge, bool,
vec<tree> *, tree (*) (gimple, gimple));
#endif /* GCC_TREE_SSA_THREADEDGE_H */
int tgt_idx = tgt_e->dest_idx;
/* Iterate over each PHI in e->dest. */
- for (gimple_stmt_iterator gsi = gsi_start_phis (src_e->dest),
- gsi2 = gsi_start_phis (tgt_e->dest);
+ for (gphi_iterator gsi = gsi_start_phis (src_e->dest),
+ gsi2 = gsi_start_phis (tgt_e->dest);
!gsi_end_p (gsi);
gsi_next (&gsi), gsi_next (&gsi2))
{
- gimple src_phi = gsi_stmt (gsi);
- gimple dest_phi = gsi_stmt (gsi2);
+ gphi *src_phi = gsi.phi ();
+ gphi *dest_phi = gsi2.phi ();
tree val = gimple_phi_arg_def (src_phi, src_idx);
source_location locus = gimple_phi_arg_location (src_phi, src_idx);
basic_block bb, int idx, source_location *locus)
{
tree arg;
- gimple def_phi;
+ gphi *def_phi;
basic_block def_bb;
if (path == NULL || idx == 0)
return def;
- def_phi = SSA_NAME_DEF_STMT (def);
- if (gimple_code (def_phi) != GIMPLE_PHI)
+ def_phi = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (def));
+ if (!def_phi)
return def;
def_bb = gimple_bb (def_phi);
copy_phi_args (basic_block bb, edge src_e, edge tgt_e,
vec<jump_thread_edge *> *path, int idx)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
int src_indx = src_e->dest_idx;
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree def = gimple_phi_arg_def (phi, src_indx);
source_location locus = gimple_phi_arg_location (phi, src_indx);
static bool
phi_args_equal_on_edges (edge e1, edge e2)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
int indx1 = e1->dest_idx;
int indx2 = e2->dest_idx;
for (gsi = gsi_start_phis (e1->dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (!operand_equal_p (gimple_phi_arg_def (phi, indx1),
gimple_phi_arg_def (phi, indx2), 0))
target block creates an equivalence. */
else if (gimple_code (stmt) == GIMPLE_SWITCH)
{
- tree cond = gimple_switch_index (stmt);
+ gswitch *switch_stmt = as_a <gswitch *> (stmt);
+ tree cond = gimple_switch_index (switch_stmt);
if (TREE_CODE (cond) == SSA_NAME
&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (cond))
{
- int i, n_labels = gimple_switch_num_labels (stmt);
+ int i, n_labels = gimple_switch_num_labels (switch_stmt);
tree *info = XCNEWVEC (tree, last_basic_block_for_fn (cfun));
/* Walk over the case label vector. Record blocks
a single value. */
for (i = 0; i < n_labels; i++)
{
- tree label = gimple_switch_label (stmt, i);
+ tree label = gimple_switch_label (switch_stmt, i);
basic_block bb = label_to_block (CASE_LABEL (label));
if (CASE_HIGH (label)
that have empty (or possibly empty) definitions. */
static unsigned
-compute_uninit_opnds_pos (gimple phi)
+compute_uninit_opnds_pos (gphi *phi)
{
size_t i, n;
unsigned uninit_opnds = 0;
VISITED_PHIS is a pointer set for detecting cycles. */
static void
-collect_phi_def_edges (gimple phi, basic_block cd_root,
+collect_phi_def_edges (gphi *phi, basic_block cd_root,
vec<edge> *edges,
hash_set<gimple> *visited_phis)
{
if (gimple_code (def) == GIMPLE_PHI
&& dominated_by_p (CDI_DOMINATORS,
gimple_bb (def), cd_root))
- collect_phi_def_edges (def, cd_root, edges,
+ collect_phi_def_edges (as_a <gphi *> (def), cd_root, edges,
visited_phis);
else if (!uninit_undefined_value_p (opnd))
{
composite predicates pointed to by PREDS. */
static bool
-find_def_preds (pred_chain_union *preds, gimple phi)
+find_def_preds (pred_chain_union *preds, gphi *phi)
{
size_t num_chains = 0, i, n;
vec<edge> dep_chains[MAX_NUM_CHAINS];
static bool
is_use_properly_guarded (gimple use_stmt,
basic_block use_bb,
- gimple phi,
+ gphi *phi,
unsigned uninit_opnds,
- hash_set<gimple> *visited_phis);
+ hash_set<gphi *> *visited_phis);
/* Returns true if all uninitialized opnds are pruned. Returns false
otherwise. PHI is the phi node with uninitialized operands,
*/
static bool
-prune_uninit_phi_opnds_in_unrealizable_paths (gimple phi,
+prune_uninit_phi_opnds_in_unrealizable_paths (gphi *phi,
unsigned uninit_opnds,
- gimple flag_def,
+ gphi *flag_def,
tree boundary_cst,
enum tree_code cmp_code,
- hash_set<gimple> *visited_phis,
+ hash_set<gphi *> *visited_phis,
bitmap *visited_flag_phis)
{
unsigned i;
flag_arg = gimple_phi_arg_def (flag_def, i);
if (!is_gimple_constant (flag_arg))
{
- gimple flag_arg_def, phi_arg_def;
+ gphi *flag_arg_def, *phi_arg_def;
tree phi_arg;
unsigned uninit_opnds_arg_phi;
if (TREE_CODE (flag_arg) != SSA_NAME)
return false;
- flag_arg_def = SSA_NAME_DEF_STMT (flag_arg);
- if (gimple_code (flag_arg_def) != GIMPLE_PHI)
+ flag_arg_def = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (flag_arg));
+ if (!flag_arg_def)
return false;
phi_arg = gimple_phi_arg_def (phi, i);
if (TREE_CODE (phi_arg) != SSA_NAME)
return false;
- phi_arg_def = SSA_NAME_DEF_STMT (phi_arg);
- if (gimple_code (phi_arg_def) != GIMPLE_PHI)
+ phi_arg_def = dyn_cast <gphi *> (SSA_NAME_DEF_STMT (phi_arg));
+ if (!phi_arg_def)
return false;
if (gimple_bb (phi_arg_def) != gimple_bb (flag_arg_def))
opnd = gimple_phi_arg_def (phi, i);
opnd_def = SSA_NAME_DEF_STMT (opnd);
- if (gimple_code (opnd_def) == GIMPLE_PHI)
+ if (gphi *opnd_def_phi = dyn_cast <gphi *> (opnd_def))
{
edge opnd_edge;
unsigned uninit_opnds2
- = compute_uninit_opnds_pos (opnd_def);
+ = compute_uninit_opnds_pos (opnd_def_phi);
gcc_assert (!MASK_EMPTY (uninit_opnds2));
opnd_edge = gimple_phi_arg_edge (phi, i);
if (!is_use_properly_guarded (phi,
opnd_edge->src,
- opnd_def,
+ opnd_def_phi,
uninit_opnds2,
visited_phis))
return false;
static bool
use_pred_not_overlap_with_undef_path_pred (pred_chain_union preds,
- gimple phi, unsigned uninit_opnds,
- hash_set<gimple> *visited_phis)
+ gphi *phi, unsigned uninit_opnds,
+ hash_set<gphi *> *visited_phis)
{
unsigned int i, n;
gimple flag_def = 0;
all_pruned = prune_uninit_phi_opnds_in_unrealizable_paths (phi,
uninit_opnds,
- flag_def,
+ as_a <gphi *> (flag_def),
boundary_cst,
cmp_code,
visited_phis,
static bool
is_use_properly_guarded (gimple use_stmt,
basic_block use_bb,
- gimple phi,
+ gphi *phi,
unsigned uninit_opnds,
- hash_set<gimple> *visited_phis)
+ hash_set<gphi *> *visited_phis)
{
basic_block phi_bb;
pred_chain_union preds = vNULL;
if the new phi is already in the worklist. */
static gimple
-find_uninit_use (gimple phi, unsigned uninit_opnds,
- vec<gimple> *worklist,
- hash_set<gimple> *added_to_worklist)
+find_uninit_use (gphi *phi, unsigned uninit_opnds,
+ vec<gphi *> *worklist,
+ hash_set<gphi *> *added_to_worklist)
{
tree phi_result;
use_operand_p use_p;
if (is_gimple_debug (use_stmt))
continue;
- if (gimple_code (use_stmt) == GIMPLE_PHI)
- use_bb = gimple_phi_arg_edge (use_stmt,
+ if (gphi *use_phi = dyn_cast <gphi *> (use_stmt))
+ use_bb = gimple_phi_arg_edge (use_phi,
PHI_ARG_INDEX_FROM_USE (use_p))->src;
else
use_bb = gimple_bb (use_stmt);
- hash_set<gimple> visited_phis;
+ hash_set<gphi *> visited_phis;
if (is_use_properly_guarded (use_stmt, use_bb, phi, uninit_opnds,
&visited_phis))
continue;
/* Found a phi use that is not guarded,
add the phi to the worklist. */
- if (!added_to_worklist->add (use_stmt))
+ if (!added_to_worklist->add (as_a <gphi *> (use_stmt)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
print_gimple_stmt (dump_file, use_stmt, 0, 0);
}
- worklist->safe_push (use_stmt);
+ worklist->safe_push (as_a <gphi *> (use_stmt));
possibly_undefined_names->add (phi_result);
}
}
a pointer set tracking if the new phi is added to the worklist or not. */
static void
-warn_uninitialized_phi (gimple phi, vec<gimple> *worklist,
- hash_set<gimple> *added_to_worklist)
+warn_uninitialized_phi (gphi *phi, vec<gphi *> *worklist,
+ hash_set<gphi *> *added_to_worklist)
{
unsigned uninit_opnds;
gimple uninit_use_stmt = 0;
pass_late_warn_uninitialized::execute (function *fun)
{
basic_block bb;
- gimple_stmt_iterator gsi;
- vec<gimple> worklist = vNULL;
+ gphi_iterator gsi;
+ vec<gphi *> worklist = vNULL;
calculate_dominance_info (CDI_DOMINATORS);
calculate_dominance_info (CDI_POST_DOMINATORS);
timevar_push (TV_TREE_UNINIT);
possibly_undefined_names = new hash_set<tree>;
- hash_set<gimple> added_to_worklist;
+ hash_set<gphi *> added_to_worklist;
/* Initialize worklist */
FOR_EACH_BB_FN (bb, fun)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
size_t n, i;
n = gimple_phi_num_args (phi);
while (worklist.length () != 0)
{
- gimple cur_phi = 0;
+ gphi *cur_phi = 0;
cur_phi = worklist.pop ();
warn_uninitialized_phi (cur_phi, &worklist, &added_to_worklist);
}
edge
ssa_redirect_edge (edge e, basic_block dest)
{
- gimple_stmt_iterator gsi;
- gimple phi;
+ gphi_iterator gsi;
+ gphi *phi;
redirect_edge_var_map_clear (e);
tree def;
source_location locus ;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
def = gimple_phi_arg_def (phi, e->dest_idx);
locus = gimple_phi_arg_location (phi, e->dest_idx);
void
flush_pending_stmts (edge e)
{
- gimple phi;
+ gphi *phi;
edge_var_map *vm;
int i;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
if (!v)
{
tree def;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
def = redirect_edge_var_map_def (vm);
add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
}
we'll have to drop debug information. */
if (gimple_code (def_stmt) == GIMPLE_PHI)
{
- value = degenerate_phi_result (def_stmt);
+ value = degenerate_phi_result (as_a <gphi *> (def_stmt));
if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
value = NULL;
/* error_mark_node is what fixup_noreturn_call changes PHI arguments
;
else
{
- gimple def_temp;
+ gdebug *def_temp;
tree vexpr = make_node (DEBUG_EXPR_DECL);
def_temp = gimple_build_debug_bind (vexpr,
definition of SSA_NAME. */
static bool
-verify_phi_args (gimple phi, basic_block bb, basic_block *definition_block)
+verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
{
edge e;
bool err = false;
FOR_EACH_BB_FN (bb, cfun)
{
edge e;
- gimple phi;
edge_iterator ei;
- gimple_stmt_iterator gsi;
/* Make sure that all edges have a clear 'aux' field. */
FOR_EACH_EDGE (e, ei, bb->preds)
}
/* Verify the arguments for every PHI node in the block. */
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (verify_phi_args (phi, bb, definition_block))
goto err;
}
/* Now verify all the uses and vuses in every statement of the block. */
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
use_operand_p use_p;
void
execute_update_addresses_taken (void)
{
- gimple_stmt_iterator gsi;
basic_block bb;
bitmap addresses_taken = BITMAP_ALLOC (NULL);
bitmap not_reg_needs = BITMAP_ALLOC (NULL);
the function body. */
FOR_EACH_BB_FN (bb, cfun)
{
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
enum gimple_code code = gimple_code (stmt);
else if (code == GIMPLE_ASM)
{
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ gasm *asm_stmt = as_a <gasm *> (stmt);
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
- tree link = gimple_asm_output_op (stmt, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
tree lhs = TREE_VALUE (link);
if (TREE_CODE (lhs) != SSA_NAME)
{
bitmap_set_bit (not_reg_needs, DECL_UID (decl));
}
}
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (stmt, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
bitmap_set_bit (not_reg_needs, DECL_UID (decl));
}
}
}
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
size_t i;
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
if (!bitmap_empty_p (suitable_for_renaming))
{
FOR_EACH_BB_FN (bb, cfun)
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
{
gimple stmt = gsi_stmt (gsi);
else if (gimple_code (stmt) == GIMPLE_ASM)
{
+ gasm *asm_stmt = as_a <gasm *> (stmt);
unsigned i;
- for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
{
- tree link = gimple_asm_output_op (stmt, i);
+ tree link = gimple_asm_output_op (asm_stmt, i);
maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
suitable_for_renaming);
}
- for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+ for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
{
- tree link = gimple_asm_input_op (stmt, i);
+ tree link = gimple_asm_input_op (asm_stmt, i);
maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
suitable_for_renaming);
}
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator i;
-
- for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
+ gsi_next (&i))
{
tree lhs;
use_operand_p uop;
ssa_op_iter soi;
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
lhs = PHI_RESULT (phi);
if (virtual_operand_p (lhs)
}
}
- for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
+ gsi_next (&i))
{
gimple stmt = gsi_stmt (i);
tree use;
FOR_EACH_BB_FN (bb, fun)
{
- gimple_stmt_iterator i;
-
si.compute_sizes = -1;
si.bb = bb;
use_operand_p uop;
ssa_op_iter soi;
- for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
+ for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
+ gsi_next (&i))
{
- gimple phi = gsi_stmt (i);
+ gphi *phi = i.phi ();
lhs = PHI_RESULT (phi);
if (virtual_operand_p (lhs))
}
}
- for (i = gsi_start_bb (bb);
+ for (gimple_stmt_iterator i = gsi_start_bb (bb);
!gsi_end_p (i) && !va_list_escapes;
gsi_next (&i))
{
bool update_dominators)
{
tree tmp;
- gimple cond_stmt;
+ gcond *cond_stmt;
edge e_false;
basic_block new_bb, split_bb = gsi_bb (*gsip);
bool dominated_e_true = false;
node targets. */
static void
-emit_case_bit_tests (gimple swtch, tree index_expr,
+emit_case_bit_tests (gswitch *swtch, tree index_expr,
tree minval, tree range, tree maxval)
{
struct case_bit_test test[MAX_CASE_BIT_TESTS];
unsigned int branch_num = gimple_switch_num_labels (swtch);
gimple_stmt_iterator gsi;
- gimple shift_stmt;
+ gassign *shift_stmt;
tree idx, tmp, csui;
tree word_type_node = lang_hooks.types.type_for_mode (word_mode, 1);
/* Collect information about GIMPLE_SWITCH statement SWTCH into INFO. */
static void
-collect_switch_conv_info (gimple swtch, struct switch_conv_info *info)
+collect_switch_conv_info (gswitch *swtch, struct switch_conv_info *info)
{
unsigned int branch_num = gimple_switch_num_labels (swtch);
tree min_case, max_case;
static bool
check_final_bb (struct switch_conv_info *info)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
info->phi_count = 0;
for (gsi = gsi_start_phis (info->final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
unsigned int i;
info->phi_count++;
static void
gather_default_values (tree default_case, struct switch_conv_info *info)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
basic_block bb = label_to_block (CASE_LABEL (default_case));
edge e;
int i = 0;
for (gsi = gsi_start_phis (info->final_bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
gcc_assert (val);
info->default_values[i++] = val;
order of phi nodes. SWTCH is the switch statement being converted. */
static void
-build_constructors (gimple swtch, struct switch_conv_info *info)
+build_constructors (gswitch *swtch, struct switch_conv_info *info)
{
unsigned i, branch_num = gimple_switch_num_labels (swtch);
tree pos = info->range_min;
basic_block bb = label_to_block (CASE_LABEL (cs));
edge e;
tree high;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
int j;
if (bb == info->final_bb)
for (gsi = gsi_start_phis (info->final_bb);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree val = PHI_ARG_DEF_FROM_EDGE (phi, e);
tree low = CASE_LOW (cs);
pos = CASE_LOW (cs);
all the constants. */
static tree
-array_value_type (gimple swtch, tree type, int num,
+array_value_type (gswitch *swtch, tree type, int num,
struct switch_conv_info *info)
{
unsigned int i, len = vec_safe_length (info->constructors[num]);
new array. */
static void
-build_one_array (gimple swtch, int num, tree arr_index_type, gimple phi,
- tree tidx, struct switch_conv_info *info)
+build_one_array (gswitch *swtch, int num, tree arr_index_type,
+ gphi *phi, tree tidx, struct switch_conv_info *info)
{
tree name, cst;
gimple load;
them. */
static void
-build_arrays (gimple swtch, struct switch_conv_info *info)
+build_arrays (gswitch *swtch, struct switch_conv_info *info)
{
tree arr_index_type;
tree tidx, sub, utype;
gimple stmt;
gimple_stmt_iterator gsi;
+ gphi_iterator gpi;
int i;
location_t loc = gimple_location (swtch);
update_stmt (stmt);
info->arr_ref_first = stmt;
- for (gsi = gsi_start_phis (info->final_bb), i = 0;
- !gsi_end_p (gsi); gsi_next (&gsi), i++)
- build_one_array (swtch, i, arr_index_type, gsi_stmt (gsi), tidx, info);
+ for (gpi = gsi_start_phis (info->final_bb), i = 0;
+ !gsi_end_p (gpi); gsi_next (&gpi), i++)
+ build_one_array (swtch, i, arr_index_type, gpi.phi (), tidx, info);
}
/* Generates and appropriately inserts loads of default values at the position
given by BSI. Returns the last inserted statement. */
-static gimple
+static gassign *
gen_def_assigns (gimple_stmt_iterator *gsi, struct switch_conv_info *info)
{
int i;
- gimple assign = NULL;
+ gassign *assign = NULL;
for (i = 0; i < info->phi_count; i++)
{
fix_phi_nodes (edge e1f, edge e2f, basic_block bbf,
struct switch_conv_info *info)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
int i;
for (gsi = gsi_start_phis (bbf), i = 0;
!gsi_end_p (gsi); gsi_next (&gsi), i++)
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
add_phi_arg (phi, info->target_inbound_names[i], e1f, UNKNOWN_LOCATION);
add_phi_arg (phi, info->target_outbound_names[i], e2f, UNKNOWN_LOCATION);
}
*/
static void
-gen_inbound_check (gimple swtch, struct switch_conv_info *info)
+gen_inbound_check (gswitch *swtch, struct switch_conv_info *info)
{
tree label_decl1 = create_artificial_label (UNKNOWN_LOCATION);
tree label_decl2 = create_artificial_label (UNKNOWN_LOCATION);
tree label_decl3 = create_artificial_label (UNKNOWN_LOCATION);
- gimple label1, label2, label3;
+ glabel *label1, *label2, *label3;
tree utype, tidx;
tree bound;
- gimple cond_stmt;
+ gcond *cond_stmt;
- gimple last_assign;
+ gassign *last_assign;
gimple_stmt_iterator gsi;
basic_block bb0, bb1, bb2, bbf, bbd;
edge e01, e02, e21, e1d, e1f, e2f;
conversion failed. */
static const char *
-process_switch (gimple swtch)
+process_switch (gswitch *swtch)
{
struct switch_conv_info info;
putc ('\n', dump_file);
}
- failure_reason = process_switch (stmt);
+ failure_reason = process_switch (as_a <gswitch *> (stmt));
if (! failure_reason)
{
if (dump_file)
additive factor for the real return value. */
static bool
-process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
+process_assignment (gassign *stmt, gimple_stmt_iterator call, tree *m,
tree *a, tree *ass_var)
{
tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
propagate_through_phis (tree var, edge e)
{
basic_block dest = e->dest;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
return PHI_RESULT (phi);
}
find_tail_calls (basic_block bb, struct tailcall **ret)
{
tree ass_var = NULL_TREE, ret_var, func, param;
- gimple stmt, call = NULL;
+ gimple stmt;
+ gcall *call = NULL;
gimple_stmt_iterator gsi, agsi;
bool tail_recursion;
struct tailcall *nw;
/* Check for a call. */
if (is_gimple_call (stmt))
{
- call = stmt;
- ass_var = gimple_call_lhs (stmt);
+ call = as_a <gcall *> (stmt);
+ ass_var = gimple_call_lhs (call);
break;
}
return;
/* This is a gimple assign. */
- if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
+ if (! process_assignment (as_a <gassign *> (stmt), gsi, &tmp_m,
+ &tmp_a, &ass_var))
return;
if (tmp_a)
}
/* See if this is a tail call we can handle. */
- ret_var = gimple_return_retval (stmt);
+ ret_var = gimple_return_retval (as_a <greturn *> (stmt));
/* We may proceed if there either is no return value, or the return value
is identical to the call's return. */
static void
add_successor_phi_arg (edge e, tree var, tree phi_arg)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
- if (PHI_RESULT (gsi_stmt (gsi)) == var)
+ if (PHI_RESULT (gsi.phi ()) == var)
break;
gcc_assert (!gsi_end_p (gsi));
- add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
+ add_phi_arg (gsi.phi (), phi_arg, e, UNKNOWN_LOCATION);
}
/* Creates a GIMPLE statement which computes the operation specified by
tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
tree result = make_temp_ssa_name (ret_type, NULL, label);
- gimple stmt;
+ gassign *stmt;
if (POINTER_TYPE_P (ret_type))
{
update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
gimple_stmt_iterator gsi)
{
- gimple stmt;
+ gassign *stmt;
tree var = copy_ssa_name (acc, NULL);
if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
stmt = gimple_build_assign_with_ops (code, var, acc, op1);
adjust_return_value (basic_block bb, tree m, tree a)
{
tree retval;
- gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
+ greturn *ret_stmt = as_a <greturn *> (gimple_seq_last_stmt (bb_seq (bb)));
gimple_stmt_iterator gsi = gsi_last_bb (bb);
gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
size_t idx;
basic_block bb, first;
edge e;
- gimple phi;
+ gphi *phi;
+ gphi_iterator gpi;
gimple_stmt_iterator gsi;
gimple orig_stmt;
/* Add phi node entries for arguments. The ordering of the phi nodes should
be the same as the ordering of the arguments. */
for (param = DECL_ARGUMENTS (current_function_decl),
- idx = 0, gsi = gsi_start_phis (first);
+ idx = 0, gpi = gsi_start_phis (first);
param;
param = DECL_CHAIN (param), idx++)
{
continue;
arg = gimple_call_arg (stmt, idx);
- phi = gsi_stmt (gsi);
+ phi = gpi.phi ();
gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
add_phi_arg (phi, arg, e, gimple_location (stmt));
- gsi_next (&gsi);
+ gsi_next (&gpi);
}
/* Update the values of accumulators. */
if (opt_tailcalls)
{
- gimple stmt = gsi_stmt (t->call_gsi);
+ gcall *stmt = as_a <gcall *> (gsi_stmt (t->call_gsi));
gimple_call_set_tail (stmt, true);
cfun->tail_call_marked = true;
ret_type = sizetype;
tree tmp = make_temp_ssa_name (ret_type, NULL, label);
- gimple phi;
+ gphi *phi;
phi = create_phi_node (tmp, bb);
/* RET_TYPE can be a float when -ffast-maths is enabled. */
{
tree name = ssa_default_def (cfun, param);
tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
- gimple phi;
+ gphi *phi;
set_ssa_default_def (cfun, param, new_name);
phi = create_phi_node (name, first);
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
tree update = TYPE_SIZE_UNIT (vectype);
- gimple incr_stmt;
+ gassign *incr_stmt;
ssa_op_iter iter;
use_operand_p use_p;
tree new_dataref_ptr;
gimple inc;
tree ptr;
tree data_ref;
- gimple new_stmt;
basic_block new_bb;
tree msq_init = NULL_TREE;
tree new_temp;
- gimple phi_stmt;
+ gphi *phi_stmt;
tree msq = NULL_TREE;
gimple_seq stmts = NULL;
bool inv_p;
if (alignment_support_scheme == dr_explicit_realign_optimized)
{
/* Create msq_init = *(floor(p1)) in the loop preheader */
+ gassign *new_stmt;
gcc_assert (!compute_in_loop);
vec_dest = vect_create_destination_var (scalar_dest, vectype);
if (targetm.vectorize.builtin_mask_for_load)
{
+ gcall *new_stmt;
tree builtin_decl;
/* Compute INIT_ADDR - the initial addressed accessed by this memref. */
static void
expand_vector_condition (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi));
tree type = gimple_expr_type (stmt);
tree a = gimple_assign_rhs1 (stmt);
tree a1 = a;
static tree
expand_vector_operation (gimple_stmt_iterator *gsi, tree type, tree compute_type,
- gimple assign, enum tree_code code)
+ gassign *assign, enum tree_code code)
{
machine_mode compute_mode = TYPE_MODE (compute_type);
static void
optimize_vector_constructor (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi));
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
tree type = TREE_TYPE (rhs);
static void
lower_vec_perm (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gassign *stmt = as_a <gassign *> (gsi_stmt (*gsi));
tree mask = gimple_assign_rhs3 (stmt);
tree vec0 = gimple_assign_rhs1 (stmt);
tree vec1 = gimple_assign_rhs2 (stmt);
static void
expand_vector_operations_1 (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
tree lhs, rhs1, rhs2 = NULL, type, compute_type = NULL_TREE;
enum tree_code code;
optab op = unknown_optab;
enum gimple_rhs_class rhs_class;
tree new_rhs;
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ /* Only consider code == GIMPLE_ASSIGN. */
+ gassign *stmt = dyn_cast <gassign *> (gsi_stmt (*gsi));
+ if (!stmt)
return;
code = gimple_assign_rhs_code (stmt);
static void
rename_variables_in_bb (basic_block bb)
{
- gimple_stmt_iterator gsi;
gimple stmt;
use_operand_p use_p;
ssa_op_iter iter;
edge_iterator ei;
struct loop *loop = bb->loop_father;
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
{
stmt = gsi_stmt (gsi);
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
{
if (!flow_bb_inside_loop_p (loop, e->src))
continue;
- for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- rename_use_op (PHI_ARG_DEF_PTR_FROM_EDGE (gsi_stmt (gsi), e));
+ for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
+ gsi_next (&gsi))
+ rename_use_op (PHI_ARG_DEF_PTR_FROM_EDGE (gsi.phi (), e));
}
}
slpeel_update_phi_nodes_for_guard1 (edge guard_edge, struct loop *loop,
bool is_new_loop, basic_block *new_exit_bb)
{
- gimple orig_phi, new_phi;
- gimple update_phi, update_phi2;
+ gphi *orig_phi, *new_phi;
+ gphi *update_phi, *update_phi2;
tree guard_arg, loop_arg;
basic_block new_merge_bb = guard_edge->dest;
edge e = EDGE_SUCC (new_merge_bb, 0);
basic_block orig_bb = loop->header;
edge new_exit_e;
tree current_new_name;
- gimple_stmt_iterator gsi_orig, gsi_update;
+ gphi_iterator gsi_orig, gsi_update;
/* Create new bb between loop and new_merge_bb. */
*new_exit_bb = split_edge (single_exit (loop));
{
source_location loop_locus, guard_locus;
tree new_res;
- orig_phi = gsi_stmt (gsi_orig);
- update_phi = gsi_stmt (gsi_update);
+ orig_phi = gsi_orig.phi ();
+ update_phi = gsi_update.phi ();
/** 1. Handle new-merge-point phis **/
slpeel_update_phi_nodes_for_guard2 (edge guard_edge, struct loop *loop,
bool is_new_loop, basic_block *new_exit_bb)
{
- gimple orig_phi, new_phi;
- gimple update_phi, update_phi2;
+ gphi *orig_phi, *new_phi;
+ gphi *update_phi, *update_phi2;
tree guard_arg, loop_arg;
basic_block new_merge_bb = guard_edge->dest;
edge e = EDGE_SUCC (new_merge_bb, 0);
tree orig_def, orig_def_new_name;
tree new_name, new_name2;
tree arg;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* Create new bb between loop and new_merge_bb. */
*new_exit_bb = split_edge (single_exit (loop));
for (gsi = gsi_start_phis (update_bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree new_res;
- update_phi = gsi_stmt (gsi);
+ update_phi = gsi.phi ();
orig_phi = update_phi;
orig_def = PHI_ARG_DEF_FROM_EDGE (orig_phi, e);
/* This loop-closed-phi actually doesn't represent a use
slpeel_make_loop_iterate_ntimes (struct loop *loop, tree niters)
{
tree indx_before_incr, indx_after_incr;
- gimple cond_stmt;
- gimple orig_cond;
+ gcond *cond_stmt;
+ gcond *orig_cond;
edge exit_edge = single_exit (loop);
gimple_stmt_iterator loop_cond_gsi;
gimple_stmt_iterator incr_gsi;
{
if (scalar_loop != loop)
{
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
new_exit = redirect_edge_and_branch (new_exit, exit_dest);
for (gsi = gsi_start_phis (exit_dest); !gsi_end_p (gsi);
gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree orig_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
location_t orig_locus
= gimple_phi_arg_location_from_edge (phi, e);
{
/* Update new_loop->header PHIs, so that on the preheader
edge they are the ones from loop rather than scalar_loop. */
- gimple_stmt_iterator gsi_orig, gsi_new;
+ gphi_iterator gsi_orig, gsi_new;
edge orig_e = loop_preheader_edge (loop);
edge new_e = loop_preheader_edge (new_loop);
!gsi_end_p (gsi_orig) && !gsi_end_p (gsi_new);
gsi_next (&gsi_orig), gsi_next (&gsi_new))
{
- gimple orig_phi = gsi_stmt (gsi_orig);
- gimple new_phi = gsi_stmt (gsi_new);
+ gphi *orig_phi = gsi_orig.phi ();
+ gphi *new_phi = gsi_new.phi ();
tree orig_arg = PHI_ARG_DEF_FROM_EDGE (orig_phi, orig_e);
location_t orig_locus
= gimple_phi_arg_location_from_edge (orig_phi, orig_e);
{
gimple_stmt_iterator gsi;
edge new_e, enter_e;
- gimple cond_stmt;
+ gcond *cond_stmt;
gimple_seq gimplify_stmt_list = NULL;
enter_e = EDGE_SUCC (guard_bb, 0);
{
edge exit_e = single_exit (loop);
edge entry_e = loop_preheader_edge (loop);
- gimple orig_cond = get_loop_exit_condition (loop);
+ gcond *orig_cond = get_loop_exit_condition (loop);
gimple_stmt_iterator loop_exit_gsi = gsi_last_bb (exit_e->src);
if (loop->inner
basic_block cond_bb, then_bb;
tree var, prologue_after_cost_adjust_name;
gimple_stmt_iterator gsi;
- gimple newphi;
+ gphi *newphi;
edge e_true, e_false, e_fallthru;
- gimple cond_stmt;
+ gcond *cond_stmt;
gimple_seq stmts = NULL;
tree cost_pre_condition = NULL_TREE;
tree scalar_loop_iters =
basic_block bb_before_first_loop;
basic_block bb_between_loops;
basic_block new_exit_bb;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
edge exit_e = single_exit (loop);
source_location loop_loc;
/* There are many aspects to how likely the first loop is going to be executed.
for (gsi = gsi_start_phis (loop->header); !gsi_end_p (gsi); gsi_next (&gsi))
if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
for (gsi = gsi_start_phis (exit_e->dest);
!gsi_end_p (gsi); gsi_next (&gsi))
if (virtual_operand_p (gimple_phi_result (gsi_stmt (gsi))))
if (gsi_end_p (gsi))
{
tree new_vop = copy_ssa_name (PHI_RESULT (phi), NULL);
- gimple new_phi = create_phi_node (new_vop, exit_e->dest);
+ gphi *new_phi = create_phi_node (new_vop, exit_e->dest);
tree vop = PHI_ARG_DEF_FROM_EDGE (phi, EDGE_SUCC (loop->latch, 0));
imm_use_iterator imm_iter;
gimple stmt;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block bb = loop->header;
gimple phi;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
/* Analyze phi functions of the loop header. */
{
tree evolution_part;
- phi = gsi_stmt (gsi);
+ phi = gsi.phi ();
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: ");
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block exit_bb = single_exit (loop)->dest;
- gimple phi, phi1;
- gimple_stmt_iterator gsi, gsi1;
+ gphi *phi, *phi1;
+ gphi_iterator gsi, gsi1;
basic_block update_bb = update_e->dest;
gcc_checking_assert (vect_can_advance_ivs_p (loop_vinfo));
gimple_stmt_iterator last_gsi;
stmt_vec_info stmt_info;
- phi = gsi_stmt (gsi);
- phi1 = gsi_stmt (gsi1);
+ phi = gsi.phi ();
+ phi1 = gsi1.phi ();
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location,
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
struct loop *scalar_loop = LOOP_VINFO_SCALAR_LOOP (loop_vinfo);
basic_block condition_bb;
- gimple_stmt_iterator gsi, cond_exp_gsi;
+ gphi_iterator gsi;
+ gimple_stmt_iterator cond_exp_gsi;
basic_block merge_bb;
basic_block new_exit_bb;
edge new_exit_e, e;
- gimple orig_phi, new_phi;
+ gphi *orig_phi, *new_phi;
tree cond_expr = NULL_TREE;
gimple_seq cond_expr_stmt_list = NULL;
tree arg;
for (gsi = gsi_start_phis (merge_bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
tree new_res;
- orig_phi = gsi_stmt (gsi);
+ orig_phi = gsi.phi ();
new_res = copy_ssa_name (PHI_RESULT (orig_phi), NULL);
new_phi = create_phi_node (new_res, new_exit_bb);
arg = PHI_ARG_DEF_FROM_EDGE (orig_phi, e);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
int nbbs = loop->num_nodes;
- gimple_stmt_iterator si;
unsigned int vectorization_factor = 0;
tree scalar_type;
- gimple phi;
+ gphi *phi;
tree vectype;
unsigned int nunits;
stmt_vec_info stmt_info;
{
basic_block bb = bbs[i];
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- phi = gsi_stmt (si);
+ phi = si.phi ();
stmt_info = vinfo_for_stmt (phi);
if (dump_enabled_p ())
{
}
}
- for (si = gsi_start_bb (bb); !gsi_end_p (si) || analyze_pattern_stmt;)
+ for (gimple_stmt_iterator si = gsi_start_bb (bb);
+ !gsi_end_p (si) || analyze_pattern_stmt;)
{
tree vf_vectype;
basic_block bb = loop->header;
tree init, step;
auto_vec<gimple, 64> worklist;
- gimple_stmt_iterator gsi;
+ gphi_iterator gsi;
bool double_reduc;
if (dump_enabled_p ())
changed. */
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
tree access_fn = NULL;
tree def = PHI_RESULT (phi);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
Return the loop exit condition. */
-static gimple
+
+static gcond *
vect_get_loop_niters (struct loop *loop, tree *number_of_iterations,
tree *number_of_iterationsm1)
{
vect_analyze_loop_form (struct loop *loop)
{
loop_vec_info loop_vinfo;
- gimple loop_cond;
+ gcond *loop_cond;
tree number_of_iterations = NULL, number_of_iterationsm1 = NULL;
loop_vec_info inner_loop_vinfo = NULL;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
int nbbs = loop->num_nodes;
- gimple_stmt_iterator si;
unsigned int vectorization_factor = 0;
int i;
- gimple phi;
stmt_vec_info stmt_info;
bool need_to_vectorize = false;
int min_profitable_iters;
for (i = 0; i < nbbs; i++)
{
basic_block bb = bbs[i];
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
{
basic_block bb = bbs[i];
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
ok = true;
stmt_info = vinfo_for_stmt (phi);
}
}
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
if (!gimple_clobber_p (stmt)
tree new_vec, vec_init, vec_step, t;
tree new_var;
tree new_name;
- gimple init_stmt, induction_phi, new_stmt;
+ gimple init_stmt, new_stmt;
+ gphi *induction_phi;
tree induc_def, vec_def, vec_dest;
tree init_expr, step_expr;
int vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
for (j = 0; j < ncopies; j++)
{
/* Set the loop-entry arg of the reduction-phi. */
- add_phi_arg (phi, vec_init_def, loop_preheader_edge (loop),
- UNKNOWN_LOCATION);
+ add_phi_arg (as_a <gphi *> (phi), vec_init_def,
+ loop_preheader_edge (loop), UNKNOWN_LOCATION);
/* Set the loop-latch arg for the reduction-phi. */
if (j > 0)
def = vect_get_vec_def_for_stmt_copy (vect_unknown_def_type, def);
- add_phi_arg (phi, def, loop_latch_edge (loop), UNKNOWN_LOCATION);
+ add_phi_arg (as_a <gphi *> (phi), def, loop_latch_edge (loop),
+ UNKNOWN_LOCATION);
if (dump_enabled_p ())
{
FOR_EACH_VEC_ELT (new_phis, i, phi)
{
tree new_result = copy_ssa_name (PHI_RESULT (phi), NULL);
- gimple outer_phi = create_phi_node (new_result, exit_bb);
+ gphi *outer_phi = create_phi_node (new_result, exit_bb);
SET_PHI_ARG_DEF (outer_phi, single_exit (loop)->dest_idx,
PHI_RESULT (phi));
set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
{
tree first_vect = PHI_RESULT (new_phis[0]);
tree tmp;
- gimple new_vec_stmt = NULL;
+ gassign *new_vec_stmt = NULL;
vec_dest = vect_create_destination_var (scalar_dest, vectype);
for (k = 1; k < new_phis.length (); k++)
if (outer_loop)
{
stmt_vec_info exit_phi_vinfo = vinfo_for_stmt (exit_phi);
- gimple vect_phi;
+ gphi *vect_phi;
/* FORNOW. Currently not supporting the case that an inner-loop
reduction is not used in the outer-loop (but only outside the
tree def;
gimple def_stmt;
enum vect_def_type dt;
- gimple new_phi = NULL;
+ gphi *new_phi = NULL;
tree scalar_type;
bool is_simple_use;
gimple orig_stmt;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
int nbbs = loop->num_nodes;
- gimple_stmt_iterator si;
int i;
tree ratio = NULL;
int vectorization_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
{
basic_block bb = bbs[i];
stmt_vec_info stmt_info;
- gimple phi;
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location,
}
pattern_stmt = NULL;
- for (si = gsi_start_bb (bb); !gsi_end_p (si) || transform_pattern_stmt;)
+ for (gimple_stmt_iterator si = gsi_start_bb (bb);
+ !gsi_end_p (si) || transform_pattern_stmt;)
{
bool is_store;
*type_in = get_vectype_for_scalar_type (TREE_TYPE (base));
if (*type_in)
{
- gimple stmt = gimple_build_call (newfn, 1, base);
+ gcall *stmt = gimple_build_call (newfn, 1, base);
if (vectorizable_function (stmt, *type_in, *type_in)
!= NULL_TREE)
{
vectorization_factor = *max_nunits;
}
- if (is_gimple_call (stmt))
+ if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
{
rhs_code = CALL_EXPR;
- if (gimple_call_internal_p (stmt)
- || gimple_call_tail_p (stmt)
- || gimple_call_noreturn_p (stmt)
- || !gimple_call_nothrow_p (stmt)
- || gimple_call_chain (stmt))
+ if (gimple_call_internal_p (call_stmt)
+ || gimple_call_tail_p (call_stmt)
+ || gimple_call_noreturn_p (call_stmt)
+ || !gimple_call_nothrow_p (call_stmt)
+ || gimple_call_chain (call_stmt))
{
if (dump_enabled_p ())
{
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: unsupported call type ");
- dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
+ dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM,
+ call_stmt, 0);
dump_printf (MSG_MISSED_OPTIMIZATION, "\n");
}
/* Fatal mismatch. */
of the function, or NULL_TREE if the function cannot be vectorized. */
tree
-vectorizable_function (gimple call, tree vectype_out, tree vectype_in)
+vectorizable_function (gcall *call, tree vectype_out, tree vectype_in)
{
tree fndecl = gimple_call_fndecl (call);
/* Function vectorizable_call.
- Check if STMT performs a function call that can be vectorized.
+ Check if GS performs a function call that can be vectorized.
If VEC_STMT is also passed, vectorize the STMT: create a vectorized
stmt to replace it, put it in VEC_STMT, and insert it at BSI.
Return FALSE if not a vectorizable STMT, TRUE otherwise. */
static bool
-vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
+vectorizable_call (gimple gs, gimple_stmt_iterator *gsi, gimple *vec_stmt,
slp_tree slp_node)
{
+ gcall *stmt;
tree vec_dest;
tree scalar_dest;
tree op, type;
tree vec_oprnd0 = NULL_TREE, vec_oprnd1 = NULL_TREE;
- stmt_vec_info stmt_info = vinfo_for_stmt (stmt), prev_stmt_info;
+ stmt_vec_info stmt_info = vinfo_for_stmt (gs), prev_stmt_info;
tree vectype_out, vectype_in;
int nunits_in;
int nunits_out;
if (STMT_VINFO_DEF_TYPE (stmt_info) != vect_internal_def)
return false;
- /* Is STMT a vectorizable call? */
- if (!is_gimple_call (stmt))
+ /* Is GS a vectorizable call? */
+ stmt = dyn_cast <gcall *> (gs);
+ if (!stmt)
return false;
if (gimple_call_internal_p (stmt)
gcc_assert (!new_bb);
}
tree phi_res = copy_ssa_name (op, NULL);
- gimple new_phi = create_phi_node (phi_res, loop->header);
+ gphi *new_phi = create_phi_node (phi_res, loop->header);
set_vinfo_for_stmt (new_phi,
new_stmt_vec_info (new_phi, loop_vinfo,
NULL));
tree offset = NULL_TREE;
tree byte_offset = NULL_TREE;
tree realignment_token = NULL_TREE;
- gimple phi = NULL;
+ gphi *phi = NULL;
vec<tree> dr_chain = vNULL;
bool grouped_load = false;
bool load_lanes_p = false;
&at_loop);
if (alignment_support_scheme == dr_explicit_realign_optimized)
{
- phi = SSA_NAME_DEF_STMT (msq);
+ phi = as_a <gphi *> (SSA_NAME_DEF_STMT (msq));
byte_offset = size_binop (MINUS_EXPR, TYPE_SIZE_UNIT (vectype),
size_one_node);
}
/* Handle cond expr. */
for (j = 0; j < ncopies; j++)
{
- gimple new_stmt = NULL;
+ gassign *new_stmt = NULL;
if (j == 0)
{
if (slp_node)
extern stmt_vec_info new_stmt_vec_info (gimple stmt, loop_vec_info,
bb_vec_info);
extern void free_stmt_vec_info (gimple stmt);
-extern tree vectorizable_function (gimple, tree, tree);
+extern tree vectorizable_function (gcall *, tree, tree);
extern void vect_model_simple_cost (stmt_vec_info, int, enum vect_def_type *,
stmt_vector_for_cost *,
stmt_vector_for_cost *);
static int *vr_phi_edge_counts;
typedef struct {
- gimple stmt;
+ gswitch *stmt;
tree vec;
} switch_update;
the ranges of each of its operands and the expression code. */
static void
-extract_range_from_cond_expr (value_range_t *vr, gimple stmt)
+extract_range_from_cond_expr (value_range_t *vr, gassign *stmt)
{
tree op0, op1;
value_range_t vr0 = VR_INITIALIZER;
in *VR. */
static void
-extract_range_from_assignment (value_range_t *vr, gimple stmt)
+extract_range_from_assignment (value_range_t *vr, gassign *stmt)
{
enum tree_code code = gimple_assign_rhs_code (stmt);
build_assert_expr_for (tree cond, tree v)
{
tree a;
- gimple assertion;
+ gassign *assertion;
gcc_assert (TREE_CODE (v) == SSA_NAME
&& COMPARISON_CLASS_P (cond));
list of assertions for the corresponding operands. */
static void
-find_conditional_asserts (basic_block bb, gimple last)
+find_conditional_asserts (basic_block bb, gcond *last)
{
gimple_stmt_iterator bsi;
tree op;
list of assertions for the corresponding operands. */
static void
-find_switch_asserts (basic_block bb, gimple last)
+find_switch_asserts (basic_block bb, gswitch *last)
{
gimple_stmt_iterator bsi;
tree op;
static void
find_assert_locations_1 (basic_block bb, sbitmap live)
{
- gimple_stmt_iterator si;
gimple last;
last = last_stmt (bb);
&& gimple_code (last) == GIMPLE_COND
&& !fp_predicate (last)
&& !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
- find_conditional_asserts (bb, last);
+ find_conditional_asserts (bb, as_a <gcond *> (last));
/* If BB's last statement is a switch statement involving integer
operands, determine if we need to add ASSERT_EXPRs. */
if (last
&& gimple_code (last) == GIMPLE_SWITCH
&& !ZERO_SSA_OPERANDS (last, SSA_OP_USE))
- find_switch_asserts (bb, last);
+ find_switch_asserts (bb, as_a <gswitch *> (last));
/* Traverse all the statements in BB marking used names and looking
for statements that may infer assertions for their used operands. */
- for (si = gsi_last_bb (bb); !gsi_end_p (si); gsi_prev (&si))
+ for (gimple_stmt_iterator si = gsi_last_bb (bb); !gsi_end_p (si);
+ gsi_prev (&si))
{
gimple stmt;
tree op;
}
/* Traverse all PHI nodes in BB, updating live. */
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
use_operand_p arg_p;
ssa_op_iter i;
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
tree res = gimple_phi_result (phi);
if (virtual_operand_p (res))
{
i = loop->latch->index;
unsigned int j = single_succ_edge (loop->latch)->dest_idx;
- for (gimple_stmt_iterator gsi = gsi_start_phis (loop->header);
+ for (gphi_iterator gsi = gsi_start_phis (loop->header);
!gsi_end_p (gsi); gsi_next (&gsi))
{
- gimple phi = gsi_stmt (gsi);
+ gphi *phi = gsi.phi ();
if (virtual_operand_p (gimple_phi_result (phi)))
continue;
tree arg = gimple_phi_arg_def (phi, j);
FOR_EACH_BB_FN (bb, cfun)
{
- gimple_stmt_iterator si;
-
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = si.phi ();
if (!stmt_interesting_for_vrp (phi))
{
tree lhs = PHI_RESULT (phi);
prop_set_simulate_again (phi, true);
}
- for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ for (gimple_stmt_iterator si = gsi_start_bb (bb); !gsi_end_p (si);
+ gsi_next (&si))
{
gimple stmt = gsi_stmt (si);
else if (code == GIMPLE_CALL)
extract_range_basic (&new_vr, stmt);
else
- extract_range_from_assignment (&new_vr, stmt);
+ extract_range_from_assignment (&new_vr, as_a <gassign *> (stmt));
if (update_value_range (lhs, &new_vr))
{
SSA_PROP_VARYING. */
static enum ssa_prop_result
-vrp_visit_cond_stmt (gimple stmt, edge *taken_edge_p)
+vrp_visit_cond_stmt (gcond *stmt, edge *taken_edge_p)
{
tree val;
bool sop;
returned. */
static bool
-find_case_label_index (gimple stmt, size_t start_idx, tree val, size_t *idx)
+find_case_label_index (gswitch *stmt, size_t start_idx, tree val, size_t *idx)
{
size_t n = gimple_switch_num_labels (stmt);
size_t low, high;
Returns true if the default label is not needed. */
static bool
-find_case_label_range (gimple stmt, tree min, tree max, size_t *min_idx,
+find_case_label_range (gswitch *stmt, tree min, tree max, size_t *min_idx,
size_t *max_idx)
{
size_t i, j;
Returns true if the default label is not needed. */
static bool
-find_case_label_ranges (gimple stmt, value_range_t *vr, size_t *min_idx1,
+find_case_label_ranges (gswitch *stmt, value_range_t *vr, size_t *min_idx1,
size_t *max_idx1, size_t *min_idx2,
size_t *max_idx2)
{
SSA_PROP_VARYING. */
static enum ssa_prop_result
-vrp_visit_switch_stmt (gimple stmt, edge *taken_edge_p)
+vrp_visit_switch_stmt (gswitch *stmt, edge *taken_edge_p)
{
tree op, val;
value_range_t *vr;
else if (is_gimple_assign (stmt) || is_gimple_call (stmt))
return vrp_visit_assignment_or_call (stmt, output_p);
else if (gimple_code (stmt) == GIMPLE_COND)
- return vrp_visit_cond_stmt (stmt, taken_edge_p);
+ return vrp_visit_cond_stmt (as_a <gcond *> (stmt), taken_edge_p);
else if (gimple_code (stmt) == GIMPLE_SWITCH)
- return vrp_visit_switch_stmt (stmt, taken_edge_p);
+ return vrp_visit_switch_stmt (as_a <gswitch *> (stmt), taken_edge_p);
/* All other statements produce nothing of interest for VRP, so mark
their outputs varying and prevent further simulation. */
value ranges, set a new range for the LHS of PHI. */
static enum ssa_prop_result
-vrp_visit_phi_node (gimple phi)
+vrp_visit_phi_node (gphi *phi)
{
size_t i;
tree lhs = PHI_RESULT (phi);
else if (need_conversion)
{
tree tem = make_ssa_name (TREE_TYPE (op0), NULL);
- gimple newop = gimple_build_assign_with_ops (BIT_XOR_EXPR, tem, op0, op1);
+ gassign *newop
+ = gimple_build_assign_with_ops (BIT_XOR_EXPR, tem, op0, op1);
gsi_insert_before (gsi, newop, GSI_SAME_STMT);
gimple_assign_set_rhs_with_ops (gsi, NOP_EXPR, tem);
}
the original conditional. */
static bool
-simplify_cond_using_ranges (gimple stmt)
+simplify_cond_using_ranges (gcond *stmt)
{
tree op0 = gimple_cond_lhs (stmt);
tree op1 = gimple_cond_rhs (stmt);
argument. */
static bool
-simplify_switch_using_ranges (gimple stmt)
+simplify_switch_using_ranges (gswitch *stmt)
{
tree op = gimple_switch_index (stmt);
value_range_t *vr;
machine_mode fltmode = TYPE_MODE (TREE_TYPE (gimple_assign_lhs (stmt)));
machine_mode mode;
tree tem;
- gimple conv;
+ gassign *conv;
/* We can only handle constant ranges. */
if (vr->type != VR_RANGE
}
}
else if (gimple_code (stmt) == GIMPLE_COND)
- return simplify_cond_using_ranges (stmt);
+ return simplify_cond_using_ranges (as_a <gcond *> (stmt));
else if (gimple_code (stmt) == GIMPLE_SWITCH)
- return simplify_switch_using_ranges (stmt);
+ return simplify_switch_using_ranges (as_a <gswitch *> (stmt));
else if (is_gimple_call (stmt)
&& gimple_call_internal_p (stmt))
return simplify_internal_call_using_ranges (gsi, stmt);
gimple_assign_rhs2 (stmt),
stmt);
}
- else if (gimple_code (stmt) == GIMPLE_COND)
- val = vrp_evaluate_conditional (gimple_cond_code (stmt),
- gimple_cond_lhs (stmt),
- gimple_cond_rhs (stmt),
+ else if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
+ val = vrp_evaluate_conditional (gimple_cond_code (cond_stmt),
+ gimple_cond_lhs (cond_stmt),
+ gimple_cond_rhs (cond_stmt),
stmt);
else
return false;
else
{
gcc_assert (gimple_code (stmt) == GIMPLE_COND);
+ gcond *cond_stmt = as_a <gcond *> (stmt);
if (integer_zerop (val))
- gimple_cond_make_false (stmt);
+ gimple_cond_make_false (cond_stmt);
else if (integer_onep (val))
- gimple_cond_make_true (stmt);
+ gimple_cond_make_true (cond_stmt);
else
gcc_unreachable ();
}
static tree
simplify_stmt_for_jump_threading (gimple stmt, gimple within_stmt)
{
- if (gimple_code (stmt) == GIMPLE_COND)
- return vrp_evaluate_conditional (gimple_cond_code (stmt),
- gimple_cond_lhs (stmt),
- gimple_cond_rhs (stmt), within_stmt);
+ if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
+ return vrp_evaluate_conditional (gimple_cond_code (cond_stmt),
+ gimple_cond_lhs (cond_stmt),
+ gimple_cond_rhs (cond_stmt),
+ within_stmt);
- if (gimple_code (stmt) == GIMPLE_ASSIGN)
+ if (gassign *assign_stmt = dyn_cast <gassign *> (stmt))
{
value_range_t new_vr = VR_INITIALIZER;
- tree lhs = gimple_assign_lhs (stmt);
+ tree lhs = gimple_assign_lhs (assign_stmt);
if (TREE_CODE (lhs) == SSA_NAME
&& (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
|| POINTER_TYPE_P (TREE_TYPE (lhs))))
{
- extract_range_from_assignment (&new_vr, stmt);
+ extract_range_from_assignment (&new_vr, assign_stmt);
if (range_int_cst_singleton_p (&new_vr))
return new_vr.min;
}
identify_jump_threads (void)
{
basic_block bb;
- gimple dummy;
+ gcond *dummy;
int i;
edge e;
/* Traverse every statement in FN. */
FOR_EACH_BB_FN (bb, fn)
{
+ gphi_iterator psi;
gimple_stmt_iterator si;
unsigned i;
- for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi))
{
- gimple phi = gsi_stmt (si);
+ gphi *phi = psi.phi ();
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
ikind = UBSAN_MEMBER_ACCESS;
tree kind = build_int_cst (TREE_TYPE (t), ikind);
tree alignt = build_int_cst (pointer_sized_int_node, align);
- gimple g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
+ gcall *g = gimple_build_call_internal (IFN_UBSAN_NULL, 3, t, kind, alignt);
gimple_set_location (g, gimple_location (gsi_stmt (*iter)));
gsi_insert_before (iter, g, GSI_SAME_STMT);
}
static void
instrument_nonnull_return (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
location_t loc[2];
tree arg = gimple_return_retval (stmt);
/* infer_nonnull_range needs flag_delete_null_pointer_checks set,
and gimple_value_profile_transformations table-driven, perhaps...
*/
-static tree gimple_divmod_fixed_value (gimple, tree, int, gcov_type, gcov_type);
-static tree gimple_mod_pow2 (gimple, int, gcov_type, gcov_type);
-static tree gimple_mod_subtract (gimple, int, int, int, gcov_type, gcov_type,
- gcov_type);
+static tree gimple_divmod_fixed_value (gassign *, tree, int, gcov_type,
+ gcov_type);
+static tree gimple_mod_pow2 (gassign *, int, gcov_type, gcov_type);
+static tree gimple_mod_subtract (gassign *, int, int, int, gcov_type,
+ gcov_type, gcov_type);
static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
alter the original STMT. */
static tree
-gimple_divmod_fixed_value (gimple stmt, tree value, int prob, gcov_type count,
- gcov_type all)
+gimple_divmod_fixed_value (gassign *stmt, tree value, int prob,
+ gcov_type count, gcov_type all)
{
- gimple stmt1, stmt2, stmt3;
+ gassign *stmt1, *stmt2;
+ gcond *stmt3;
tree tmp0, tmp1, tmp2;
gimple bb1end, bb2end, bb3end;
basic_block bb, bb2, bb3, bb4;
gcov_type val, count, all;
tree result, value, tree_val;
gcov_type prob;
- gimple stmt;
+ gassign *stmt;
- stmt = gsi_stmt (*si);
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ stmt = dyn_cast <gassign *> (gsi_stmt (*si));
+ if (!stmt)
return false;
if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
within roundoff error). This generates the result into a temp and returns
the temp; it does not replace or alter the original STMT. */
static tree
-gimple_mod_pow2 (gimple stmt, int prob, gcov_type count, gcov_type all)
+gimple_mod_pow2 (gassign *stmt, int prob, gcov_type count, gcov_type all)
{
- gimple stmt1, stmt2, stmt3, stmt4;
+ gassign *stmt1, *stmt2, *stmt3;
+ gcond *stmt4;
tree tmp2, tmp3;
gimple bb1end, bb2end, bb3end;
basic_block bb, bb2, bb3, bb4;
gcov_type count, wrong_values, all;
tree lhs_type, result, value;
gcov_type prob;
- gimple stmt;
+ gassign *stmt;
- stmt = gsi_stmt (*si);
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ stmt = dyn_cast <gassign *> (gsi_stmt (*si));
+ if (!stmt)
return false;
lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
/* FIXME: Generalize the interface to handle NCOUNTS > 1. */
static tree
-gimple_mod_subtract (gimple stmt, int prob1, int prob2, int ncounts,
+gimple_mod_subtract (gassign *stmt, int prob1, int prob2, int ncounts,
gcov_type count1, gcov_type count2, gcov_type all)
{
- gimple stmt1, stmt2, stmt3;
+ gassign *stmt1;
+ gimple stmt2;
+ gcond *stmt3;
tree tmp1;
gimple bb1end, bb2end = NULL, bb3end;
basic_block bb, bb2, bb3, bb4;
gcov_type prob1, prob2;
unsigned int i, steps;
gcov_type count1, count2;
- gimple stmt;
+ gassign *stmt;
- stmt = gsi_stmt (*si);
- if (gimple_code (stmt) != GIMPLE_ASSIGN)
+ stmt = dyn_cast <gassign *> (gsi_stmt (*si));
+ if (!stmt)
return false;
lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
Returns true if TARGET is considered ok for call CALL_STMT. */
bool
-check_ic_target (gimple call_stmt, struct cgraph_node *target)
+check_ic_target (gcall *call_stmt, struct cgraph_node *target)
{
location_t locus;
if (gimple_check_call_matching_types (call_stmt, target->decl, true))
old call
*/
-gimple
-gimple_ic (gimple icall_stmt, struct cgraph_node *direct_call,
+gcall *
+gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
int prob, gcov_type count, gcov_type all)
{
- gimple dcall_stmt, load_stmt, cond_stmt, iretbnd_stmt = NULL;
+ gcall *dcall_stmt;
+ gassign *load_stmt;
+ gcond *cond_stmt;
+ gcall *iretbnd_stmt = NULL;
tree tmp0, tmp1, tmp;
basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
tree optype = build_pointer_type (void_type_node);
gimple_set_vdef (icall_stmt, NULL_TREE);
gimple_set_vuse (icall_stmt, NULL_TREE);
update_stmt (icall_stmt);
- dcall_stmt = gimple_copy (icall_stmt);
+ dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
dflags = flags_from_decl_or_type (direct_call->decl);
if ((dflags & ECF_NORETURN) != 0)
&& (dflags & ECF_NORETURN) == 0)
{
tree result = gimple_call_lhs (icall_stmt);
- gimple phi = create_phi_node (result, join_bb);
+ gphi *phi = create_phi_node (result, join_bb);
gimple_call_set_lhs (icall_stmt,
duplicate_ssa_name (result, icall_stmt));
add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
{
e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
- for (psi = gsi_start_phis (e_eh->dest);
+ for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
!gsi_end_p (psi); gsi_next (&psi))
{
- gimple phi = gsi_stmt (psi);
+ gphi *phi = psi.phi ();
SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
}
static bool
gimple_ic_transform (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt;
histogram_value histogram;
gcov_type val, count, all, bb_all;
struct cgraph_node *direct_call;
- if (gimple_code (stmt) != GIMPLE_CALL)
+ stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
+ if (!stmt)
return false;
if (gimple_call_fndecl (stmt) != NULL_TREE)
operation.
*/
static bool
-interesting_stringop_to_profile_p (tree fndecl, gimple call, int *size_arg)
+interesting_stringop_to_profile_p (tree fndecl, gcall *call, int *size_arg)
{
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
assuming we'll propagate a true constant into ICALL_SIZE later. */
static void
-gimple_stringop_fixed_value (gimple vcall_stmt, tree icall_size, int prob,
+gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, int prob,
gcov_type count, gcov_type all)
{
- gimple tmp_stmt, cond_stmt, icall_stmt;
+ gassign *tmp_stmt;
+ gcond *cond_stmt;
+ gcall *icall_stmt;
tree tmp0, tmp1, vcall_size, optype;
basic_block cond_bb, icall_bb, vcall_bb, join_bb;
edge e_ci, e_cv, e_iv, e_ij, e_vj;
gimple_set_vdef (vcall_stmt, NULL);
gimple_set_vuse (vcall_stmt, NULL);
update_stmt (vcall_stmt);
- icall_stmt = gimple_copy (vcall_stmt);
+ icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
gimple_call_set_arg (icall_stmt, size_arg, icall_size);
gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
&& TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
{
tree result = gimple_call_lhs (vcall_stmt);
- gimple phi = create_phi_node (result, join_bb);
+ gphi *phi = create_phi_node (result, join_bb);
gimple_call_set_lhs (vcall_stmt,
duplicate_ssa_name (result, vcall_stmt));
add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
static bool
gimple_stringops_transform (gimple_stmt_iterator *gsi)
{
- gimple stmt = gsi_stmt (*gsi);
+ gcall *stmt;
tree fndecl;
tree blck_size;
enum built_in_function fcode;
tree tree_val;
int size_arg;
- if (gimple_code (stmt) != GIMPLE_CALL)
+ stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
+ if (!stmt)
return false;
fndecl = gimple_call_fndecl (stmt);
if (!fndecl)
/* Find values inside STMT for that we want to measure histograms for
string operations. */
static void
-gimple_stringops_values_to_profile (gimple stmt, histogram_values *values)
+gimple_stringops_values_to_profile (gimple gs, histogram_values *values)
{
+ gcall *stmt;
tree fndecl;
tree blck_size;
tree dest;
int size_arg;
- if (gimple_code (stmt) != GIMPLE_CALL)
+ stmt = dyn_cast <gcall *> (gs);
+ if (!stmt)
return;
fndecl = gimple_call_fndecl (stmt);
if (!fndecl)
void verify_histograms (void);
void free_histograms (void);
void stringop_block_profile (gimple, unsigned int *, HOST_WIDE_INT *);
-gimple gimple_ic (gimple, struct cgraph_node *, int, gcov_type, gcov_type);
-bool check_ic_target (gimple, struct cgraph_node *);
+gcall *gimple_ic (gcall *, struct cgraph_node *, int, gcov_type,
+ gcov_type);
+bool check_ic_target (gcall *, struct cgraph_node *);
/* In tree-profile.c. */
tree vtbl_var_decl = NULL_TREE;
struct vtbl_map_node *vtable_map_node;
tree vtbl_decl = NULL_TREE;
- gimple call_stmt;
+ gcall *call_stmt;
const char *vtable_name = "<unknown>";
tree tmp0;
bool found;