From: Nathan Sidwell Date: Wed, 8 Sep 2004 08:05:14 +0000 (+0000) Subject: emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable. X-Git-Url: https://git.libre-soc.org/?a=commitdiff_plain;h=5b0264cb4d7a017784253061843a52f7776a3942;p=gcc.git emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable. * emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable. (gen_rtx_SUBREG, gen_reg_rtx, mark_user_reg, subreg_hard_regno, gen_lowpart_common, gen_highpart, gen_highpart_mode, subreg_highpart_offset, operand_subword, operand_subword_force, mem_expr_equal_p, set_mem_attributes_minus_bitpos, set_mem_alias_set, change_address_1, verify_rtx_sharing, copy_most_rtx, set_first_insn, set_last_insn, prev_cc0_setter, try_split, add_insn_after, add_insn_before, remove_insn, remove_unnecessary_notes, emit_insn_before, emit_jump_insn_before, emit_call_insn_before, emit_insn_after, emit_jump_insn_after, emit_call_insn_after, emit_insn, emit_jump_insn, emit_call_insn, set_unique_reg_note, emit, push_to_full_sequence, copy_insn_1, gen_const_vector_0, emit_copy_of_insn_after): Likewise. * et-forest.c (set_prev, set_next, et_check_occ_sanity, record_path_before_1, check_path_after_1, check_path_after): Likewise. * except.c (gen_eh_region, resolve_one_fixup_region, remove_unreachable_regions, convert_from_eh_region_ranges_1, add_ehl_entry, duplicate_eh_region_1, build_post_landing_pads, connect_post_landing_pads, sjlj_emit_function_exit, remove_exception_handler_label, remove_eh_handler, reachable_next_level, collect_one_action_chain, output_function_exception_table): Likewise. * explow.c (trunc_int_for_mode, copy_to_mode_reg, optimize_save_area_alloca, allocate_dynamic_stack_space, probe_stack_range, hard_function_value): Likewise. * expmed.c (mode_for_extraction, store_bit_field, store_fixed_bit_field, extract_bit_field, expand_shift, expand_mult_const, expand_mult, choose_multiplier, expand_mult_highpart, expand_divmod, emit_store_flag, do_cmp_and_jump): Likewise. * expr.c (convert_move, convert_modes, move_by_pieces, move_by_pieces_ninsns, move_by_pieces_1, emit_block_move, move_block_from_reg, gen_group_rtx, emit_group_load, emit_group_move, emit_group_store, use_reg, use_regs, can_store_by_pieces, store_by_pieces, store_by_pieces_1, emit_move_insn, emit_move_insn_1, emit_push_insn, expand_assignment, store_expr, count_type_elements, store_constructor, store_field, safe_from_p, expand_var, expand_expr_addr_expr, expand_expr_real_1, do_store_flag): Likewise. From-SVN: r87178 --- diff --git a/gcc/ChangeLog b/gcc/ChangeLog index c9de7f7ce87..88432812e4c 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,45 @@ +2004-09-08 Nathan Sidwell + + * emit-rtl.c (immed_double_const): Use gcc_assert and gcc_unreachable. + (gen_rtx_SUBREG, gen_reg_rtx, mark_user_reg, subreg_hard_regno, + gen_lowpart_common, gen_highpart, gen_highpart_mode, + subreg_highpart_offset, operand_subword, operand_subword_force, + mem_expr_equal_p, set_mem_attributes_minus_bitpos, + set_mem_alias_set, change_address_1, verify_rtx_sharing, + copy_most_rtx, set_first_insn, set_last_insn, prev_cc0_setter, + try_split, add_insn_after, add_insn_before, remove_insn, + remove_unnecessary_notes, emit_insn_before, emit_jump_insn_before, + emit_call_insn_before, emit_insn_after, emit_jump_insn_after, + emit_call_insn_after, emit_insn, emit_jump_insn, emit_call_insn, + set_unique_reg_note, emit, push_to_full_sequence, copy_insn_1, + gen_const_vector_0, emit_copy_of_insn_after): Likewise. + * et-forest.c (set_prev, set_next, et_check_occ_sanity, + record_path_before_1, check_path_after_1, check_path_after): Likewise. + * except.c (gen_eh_region, resolve_one_fixup_region, + remove_unreachable_regions, convert_from_eh_region_ranges_1, + add_ehl_entry, duplicate_eh_region_1, build_post_landing_pads, + connect_post_landing_pads, sjlj_emit_function_exit, + remove_exception_handler_label, remove_eh_handler, + reachable_next_level, collect_one_action_chain, + output_function_exception_table): Likewise. + * explow.c (trunc_int_for_mode, copy_to_mode_reg, + optimize_save_area_alloca, allocate_dynamic_stack_space, + probe_stack_range, hard_function_value): Likewise. + * expmed.c (mode_for_extraction, store_bit_field, + store_fixed_bit_field, extract_bit_field, expand_shift, + expand_mult_const, expand_mult, choose_multiplier, + expand_mult_highpart, expand_divmod, emit_store_flag, + do_cmp_and_jump): Likewise. + * expr.c (convert_move, convert_modes, move_by_pieces, + move_by_pieces_ninsns, move_by_pieces_1, emit_block_move, + move_block_from_reg, gen_group_rtx, emit_group_load, + emit_group_move, emit_group_store, use_reg, use_regs, + can_store_by_pieces, store_by_pieces, store_by_pieces_1, + emit_move_insn, emit_move_insn_1, emit_push_insn, + expand_assignment, store_expr, count_type_elements, + store_constructor, store_field, safe_from_p, expand_var, + expand_expr_addr_expr, expand_expr_real_1, do_store_flag): Likewise. + 2004-09-08 Nathan Sidwell * dbxout.c (dbxout_type, dbxout_type_name, dbxout_symbol): Use @@ -6066,7 +6108,7 @@ * config/i386/xmmintrin.h: Include . 2004-08-03 H.J. Lu - Tanguy Fautrà + Tanguy Fautrà * config/i386/pmm_malloc.h: New file. diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index 61b62e5618c..9f9289bfad5 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -456,12 +456,12 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) if (mode != VOIDmode) { int width; - if (GET_MODE_CLASS (mode) != MODE_INT - && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT - /* We can get a 0 for an error mark. */ - && GET_MODE_CLASS (mode) != MODE_VECTOR_INT - && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT) - abort (); + + gcc_assert (GET_MODE_CLASS (mode) == MODE_INT + || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT + /* We can get a 0 for an error mark. */ + || GET_MODE_CLASS (mode) == MODE_VECTOR_INT + || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT); /* We clear out all bits that don't belong in MODE, unless they and our sign bit are all one. So we get either a reasonable negative @@ -474,9 +474,9 @@ immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode) else if (width == HOST_BITS_PER_WIDE_INT && ! (i1 == ~0 && i0 < 0)) i1 = 0; - else if (width > 2 * HOST_BITS_PER_WIDE_INT) - /* We cannot represent this value as a constant. */ - abort (); + else + /* We should be able to represent this value as a constant. */ + gcc_assert (width <= 2 * HOST_BITS_PER_WIDE_INT); /* If this would be an entire word for the target, but is not for the host, then sign-extend on the host so that the number will @@ -623,16 +623,14 @@ gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) { /* This is the most common failure type. Catch it early so we can see who does it. */ - if ((offset % GET_MODE_SIZE (mode)) != 0) - abort (); + gcc_assert (!(offset % GET_MODE_SIZE (mode))); /* This check isn't usable right now because combine will throw arbitrary crap like a CALL into a SUBREG in gen_lowpart_for_combine so we must just eat it. */ #if 0 /* Check for this too. */ - if (offset >= GET_MODE_SIZE (GET_MODE (reg))) - abort (); + gcc_assert (offset < GET_MODE_SIZE (GET_MODE (reg))); #endif return gen_rtx_raw_SUBREG (mode, reg, offset); } @@ -711,8 +709,7 @@ gen_reg_rtx (enum machine_mode mode) /* Don't let anything called after initial flow analysis create new registers. */ - if (no_new_pseudos) - abort (); + gcc_assert (!no_new_pseudos); if (generating_concat_p && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT @@ -972,10 +969,11 @@ mark_user_reg (rtx reg) REG_USERVAR_P (XEXP (reg, 0)) = 1; REG_USERVAR_P (XEXP (reg, 1)) = 1; } - else if (REG_P (reg)) - REG_USERVAR_P (reg) = 1; else - abort (); + { + gcc_assert (REG_P (reg)); + REG_USERVAR_P (reg) = 1; + } } /* Identify REG as a probable pointer register and show its alignment @@ -1044,23 +1042,17 @@ subreg_hard_regno (rtx x, int check_mode) /* This is where we attempt to catch illegal subregs created by the compiler. */ - if (GET_CODE (x) != SUBREG - || !REG_P (reg)) - abort (); + gcc_assert (GET_CODE (x) == SUBREG && REG_P (reg)); base_regno = REGNO (reg); - if (base_regno >= FIRST_PSEUDO_REGISTER) - abort (); - if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg))) - abort (); + gcc_assert (base_regno < FIRST_PSEUDO_REGISTER); + gcc_assert (!check_mode || HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg))); #ifdef ENABLE_CHECKING - if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg), - SUBREG_BYTE (x), mode)) - abort (); + gcc_assert (subreg_offset_representable_p (REGNO (reg), GET_MODE (reg), + SUBREG_BYTE (x), mode)); #endif /* Catch non-congruent offsets too. */ byte_offset = SUBREG_BYTE (x); - if ((byte_offset % GET_MODE_SIZE (mode)) != 0) - abort (); + gcc_assert (!(byte_offset % GET_MODE_SIZE (mode))); final_regno = subreg_regno (x); @@ -1096,8 +1088,7 @@ gen_lowpart_common (enum machine_mode mode, rtx x) xsize = GET_MODE_SIZE (innermode); - if (innermode == VOIDmode || innermode == BLKmode) - abort (); + gcc_assert (innermode != VOIDmode && innermode != BLKmode); if (innermode == mode) return x; @@ -1224,21 +1215,22 @@ gen_highpart (enum machine_mode mode, rtx x) /* This case loses if X is a subreg. To catch bugs early, complain if an invalid MODE is used even in other cases. */ - if (msize > UNITS_PER_WORD - && msize != (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))) - abort (); + gcc_assert (msize <= UNITS_PER_WORD + || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); result = simplify_gen_subreg (mode, x, GET_MODE (x), subreg_highpart_offset (mode, GET_MODE (x))); - + gcc_assert (result); + /* simplify_gen_subreg is not guaranteed to return a valid operand for the target if we have a MEM. gen_highpart must return a valid operand, emitting code if necessary to do so. */ - if (result != NULL_RTX && MEM_P (result)) - result = validize_mem (result); - - if (!result) - abort (); + if (MEM_P (result)) + { + result = validize_mem (result); + gcc_assert (result); + } + return result; } @@ -1249,8 +1241,7 @@ gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx { if (GET_MODE (exp) != VOIDmode) { - if (GET_MODE (exp) != innermode) - abort (); + gcc_assert (GET_MODE (exp) == innermode); return gen_highpart (outermode, exp); } return simplify_gen_subreg (outermode, exp, innermode, @@ -1285,8 +1276,7 @@ subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode unsigned int offset = 0; int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode)); - if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode)) - abort (); + gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode)); if (difference > 0) { @@ -1346,8 +1336,7 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine if (mode == VOIDmode) mode = GET_MODE (op); - if (mode == VOIDmode) - abort (); + gcc_assert (mode != VOIDmode); /* If OP is narrower than a word, fail. */ if (mode != BLKmode @@ -1405,8 +1394,7 @@ operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode) } result = operand_subword (op, offset, 1, mode); - if (result == 0) - abort (); + gcc_assert (result); return result; } @@ -1500,13 +1488,13 @@ mem_expr_equal_p (tree expr1, tree expr2) if (TREE_CODE (expr1) == INDIRECT_REF) return mem_expr_equal_p (TREE_OPERAND (expr1, 0), TREE_OPERAND (expr2, 0)); - - /* Decls with different pointers can't be equal. */ - if (DECL_P (expr1)) - return 0; - abort(); /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already + /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already have been resolved here. */ + gcc_assert (DECL_P (expr1)); + + /* Decls with different pointers can't be equal. */ + return 0; } /* Given REF, a MEM, and T, either the type of X or the expression @@ -1540,8 +1528,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, wrong answer, as it assumes that DECL_RTL already has the right alias info. Callers should not set DECL_RTL until after the call to set_mem_attributes. */ - if (DECL_P (t) && ref == DECL_RTL_IF_SET (t)) - abort (); + gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); /* Get the alias set from the expression or type (perhaps using a front-end routine) and use it. */ @@ -1754,8 +1741,7 @@ set_mem_alias_set (rtx mem, HOST_WIDE_INT set) { #ifdef ENABLE_CHECKING /* If the new and old alias sets don't conflict, something is wrong. */ - if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))) - abort (); + gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); #endif MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem), @@ -1814,8 +1800,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) { rtx new; - if (!MEM_P (memref)) - abort (); + gcc_assert (MEM_P (memref)); if (mode == VOIDmode) mode = GET_MODE (memref); if (addr == 0) @@ -1827,10 +1812,7 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) if (validate) { if (reload_in_progress || reload_completed) - { - if (! memory_address_p (mode, addr)) - abort (); - } + gcc_assert (memory_address_p (mode, addr)); else addr = memory_address (mode, addr); } @@ -2285,7 +2267,7 @@ verify_rtx_sharing (rtx orig, rtx insn) debug_rtx (insn); error ("Shared rtx"); debug_rtx (x); - abort (); + fatal_error ("Internal consistency failure"); } RTX_FLAG (x, used) = 1; @@ -2490,7 +2472,7 @@ copy_most_rtx (rtx orig, rtx may_share) break; default: - abort (); + gcc_unreachable (); } } return copy; @@ -2822,8 +2804,7 @@ get_insns (void) void set_first_insn (rtx insn) { - if (PREV_INSN (insn) != 0) - abort (); + gcc_assert (!PREV_INSN (insn)); first_insn = insn; } @@ -2840,8 +2821,7 @@ get_last_insn (void) void set_last_insn (rtx insn) { - if (NEXT_INSN (insn) != 0) - abort (); + gcc_assert (!NEXT_INSN (insn)); last_insn = insn; } @@ -3191,8 +3171,7 @@ prev_cc0_setter (rtx insn) return XEXP (note, 0); insn = prev_nonnote_insn (insn); - if (! sets_cc0_p (PATTERN (insn))) - abort (); + gcc_assert (sets_cc0_p (PATTERN (insn))); return insn; } @@ -3292,8 +3271,7 @@ try_split (rtx pat, rtx trial, int last) one jump is created, otherwise the machine description is responsible for this step using split_branch_probability variable. */ - if (njumps != 1) - abort (); + gcc_assert (njumps == 1); REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (probability), @@ -3510,8 +3488,7 @@ add_insn_after (rtx insn, rtx after) rtx next = NEXT_INSN (after); basic_block bb; - if (optimize && INSN_DELETED_P (after)) - abort (); + gcc_assert (!optimize || !INSN_DELETED_P (after)); NEXT_INSN (insn) = next; PREV_INSN (insn) = after; @@ -3535,8 +3512,7 @@ add_insn_after (rtx insn, rtx after) break; } - if (stack == 0) - abort (); + gcc_assert (stack); } if (!BARRIER_P (after) @@ -3575,8 +3551,7 @@ add_insn_before (rtx insn, rtx before) rtx prev = PREV_INSN (before); basic_block bb; - if (optimize && INSN_DELETED_P (before)) - abort (); + gcc_assert (!optimize || !INSN_DELETED_P (before)); PREV_INSN (insn) = prev; NEXT_INSN (insn) = before; @@ -3603,8 +3578,7 @@ add_insn_before (rtx insn, rtx before) break; } - if (stack == 0) - abort (); + gcc_assert (stack); } if (!BARRIER_P (before) @@ -3614,14 +3588,13 @@ add_insn_before (rtx insn, rtx before) set_block_for_insn (insn, bb); if (INSN_P (insn)) bb->flags |= BB_DIRTY; - /* Should not happen as first in the BB is always - either NOTE or LABEl. */ - if (BB_HEAD (bb) == insn - /* Avoid clobbering of structure when creating new BB. */ - && !BARRIER_P (insn) - && (!NOTE_P (insn) - || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK)) - abort (); + /* Should not happen as first in the BB is always either NOTE or + LABEl. */ + gcc_assert (BB_HEAD (bb) != insn + /* Avoid clobbering of structure when creating new BB. */ + || BARRIER_P (insn) + || (NOTE_P (insn) + && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK)); } PREV_INSN (before) = insn; @@ -3660,8 +3633,7 @@ remove_insn (rtx insn) break; } - if (stack == 0) - abort (); + gcc_assert (stack); } if (next) @@ -3683,8 +3655,7 @@ remove_insn (rtx insn) break; } - if (stack == 0) - abort (); + gcc_assert (stack); } if (!BARRIER_P (insn) && (bb = BLOCK_FOR_INSN (insn))) @@ -3695,8 +3666,7 @@ remove_insn (rtx insn) { /* Never ever delete the basic block note without deleting whole basic block. */ - if (NOTE_P (insn)) - abort (); + gcc_assert (!NOTE_P (insn)); BB_HEAD (bb) = next; } if (BB_END (bb) == insn) @@ -3709,8 +3679,7 @@ remove_insn (rtx insn) void add_function_usage_to (rtx call_insn, rtx call_fusage) { - if (! call_insn || !CALL_P (call_insn)) - abort (); + gcc_assert (call_insn && CALL_P (call_insn)); /* Put the register usage information on the CALL. If there is already some usage information, put ours at the end. */ @@ -3857,11 +3826,10 @@ remove_unnecessary_notes (void) case NOTE_INSN_EH_REGION_END: /* Too many end notes. */ - if (eh_stack == NULL_RTX) - abort (); + gcc_assert (eh_stack); /* Mismatched nesting. */ - if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn)) - abort (); + gcc_assert (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) + == NOTE_EH_HANDLER (insn)); tmp = eh_stack; eh_stack = XEXP (eh_stack, 1); free_INSN_LIST_node (tmp); @@ -3870,18 +3838,15 @@ remove_unnecessary_notes (void) case NOTE_INSN_BLOCK_BEG: /* By now, all notes indicating lexical blocks should have NOTE_BLOCK filled in. */ - if (NOTE_BLOCK (insn) == NULL_TREE) - abort (); + gcc_assert (NOTE_BLOCK (insn)); block_stack = alloc_INSN_LIST (insn, block_stack); break; case NOTE_INSN_BLOCK_END: /* Too many end notes. */ - if (block_stack == NULL_RTX) - abort (); + gcc_assert (block_stack); /* Mismatched nesting. */ - if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn)) - abort (); + gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn)); tmp = block_stack; block_stack = XEXP (block_stack, 1); free_INSN_LIST_node (tmp); @@ -3930,8 +3895,7 @@ remove_unnecessary_notes (void) } /* Too many begin notes. */ - if (block_stack || eh_stack) - abort (); + gcc_assert (!block_stack && !eh_stack); } @@ -3968,10 +3932,7 @@ emit_insn_before (rtx x, rtx before) rtx last = before; rtx insn; -#ifdef ENABLE_RTL_CHECKING - if (before == NULL_RTX) - abort (); -#endif + gcc_assert (before); if (x == NULL_RTX) return last; @@ -3996,7 +3957,7 @@ emit_insn_before (rtx x, rtx before) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4017,10 +3978,7 @@ emit_jump_insn_before (rtx x, rtx before) { rtx insn, last = NULL_RTX; -#ifdef ENABLE_RTL_CHECKING - if (before == NULL_RTX) - abort (); -#endif + gcc_assert (before); switch (GET_CODE (x)) { @@ -4042,7 +4000,7 @@ emit_jump_insn_before (rtx x, rtx before) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4063,10 +4021,7 @@ emit_call_insn_before (rtx x, rtx before) { rtx last = NULL_RTX, insn; -#ifdef ENABLE_RTL_CHECKING - if (before == NULL_RTX) - abort (); -#endif + gcc_assert (before); switch (GET_CODE (x)) { @@ -4088,7 +4043,7 @@ emit_call_insn_before (rtx x, rtx before) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4196,10 +4151,7 @@ emit_insn_after (rtx x, rtx after) { rtx last = after; -#ifdef ENABLE_RTL_CHECKING - if (after == NULL_RTX) - abort (); -#endif + gcc_assert (after); if (x == NULL_RTX) return last; @@ -4217,7 +4169,7 @@ emit_insn_after (rtx x, rtx after) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4255,10 +4207,7 @@ emit_jump_insn_after (rtx x, rtx after) { rtx last; -#ifdef ENABLE_RTL_CHECKING - if (after == NULL_RTX) - abort (); -#endif + gcc_assert (after); switch (GET_CODE (x)) { @@ -4273,7 +4222,7 @@ emit_jump_insn_after (rtx x, rtx after) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4294,10 +4243,7 @@ emit_call_insn_after (rtx x, rtx after) { rtx last; -#ifdef ENABLE_RTL_CHECKING - if (after == NULL_RTX) - abort (); -#endif + gcc_assert (after); switch (GET_CODE (x)) { @@ -4312,7 +4258,7 @@ emit_call_insn_after (rtx x, rtx after) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4513,7 +4459,7 @@ emit_insn (rtx x) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4554,7 +4500,7 @@ emit_jump_insn (rtx x) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4588,7 +4534,7 @@ emit_call_insn (rtx x) #ifdef ENABLE_RTL_CHECKING case SEQUENCE: - abort (); + gcc_unreachable (); break; #endif @@ -4737,8 +4683,7 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) means the insn only has one * useful * set). */ if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn)) { - if (note) - abort (); + gcc_assert (!note); return NULL_RTX; } @@ -4807,21 +4752,24 @@ emit (rtx x) { enum rtx_code code = classify_insn (x); - if (code == CODE_LABEL) - return emit_label (x); - else if (code == INSN) - return emit_insn (x); - else if (code == JUMP_INSN) + switch (code) { - rtx insn = emit_jump_insn (x); - if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN) - return emit_barrier (); - return insn; + case CODE_LABEL: + return emit_label (x); + case INSN: + return emit_insn (x); + case JUMP_INSN: + { + rtx insn = emit_jump_insn (x); + if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN) + return emit_barrier (); + return insn; + } + case CALL_INSN: + return emit_call_insn (x); + default: + gcc_unreachable (); } - else if (code == CALL_INSN) - return emit_call_insn (x); - else - abort (); } /* Space for free sequence stack entries. */ @@ -4883,8 +4831,7 @@ push_to_full_sequence (rtx first, rtx last) first_insn = first; last_insn = last; /* We really should have the end of the insn chain here. */ - if (last && NEXT_INSN (last)) - abort (); + gcc_assert (!last || !NEXT_INSN (last)); } /* Set up the outer-level insn chain @@ -5104,15 +5051,14 @@ copy_insn_1 (rtx orig) break; default: - abort (); + gcc_unreachable (); } } if (code == SCRATCH) { i = copy_insn_n_scratches++; - if (i >= MAX_RECOG_OPERANDS) - abort (); + gcc_assert (i < MAX_RECOG_OPERANDS); copy_insn_scratch_in[i] = orig; copy_insn_scratch_out[i] = copy; } @@ -5227,8 +5173,7 @@ gen_const_vector_0 (enum machine_mode mode) v = rtvec_alloc (units); /* We need to call this function after we to set CONST0_RTX first. */ - if (!CONST0_RTX (inner)) - abort (); + gcc_assert (CONST0_RTX (inner)); for (i = 0; i < units; ++i) RTVEC_ELT (v, i) = CONST0_RTX (inner); @@ -5477,7 +5422,7 @@ emit_copy_of_insn_after (rtx insn, rtx after) break; default: - abort (); + gcc_unreachable (); } /* Update LABEL_NUSES. */ diff --git a/gcc/et-forest.c b/gcc/et-forest.c index 1c5eb9ab5bd..7fbbb77312c 100644 --- a/gcc/et-forest.c +++ b/gcc/et-forest.c @@ -88,8 +88,7 @@ static inline void set_prev (struct et_occ *occ, struct et_occ *t) { #ifdef DEBUG_ET - if (occ == t) - abort (); + gcc_assert (occ != t); #endif occ->prev = t; @@ -103,8 +102,7 @@ static inline void set_next (struct et_occ *occ, struct et_occ *t) { #ifdef DEBUG_ET - if (occ == t) - abort (); + gcc_assert (occ != t); #endif occ->next = t; @@ -145,40 +143,26 @@ et_check_occ_sanity (struct et_occ *occ) if (!occ) return; - if (occ->parent == occ) - abort (); - - if (occ->prev == occ) - abort (); - - if (occ->next == occ) - abort (); - - if (occ->next && occ->next == occ->prev) - abort (); + gcc_assert (occ->parent != occ); + gcc_assert (occ->prev != occ); + gcc_assert (occ->next != occ); + gcc_assert (!occ->next || occ->next != occ->prev); if (occ->next) { - if (occ->next == occ->parent) - abort (); - - if (occ->next->parent != occ) - abort (); + gcc_assert (occ->next != occ->parent); + gcc_assert (occ->next->parent == occ); } if (occ->prev) { - if (occ->prev == occ->parent) - abort (); - - if (occ->prev->parent != occ) - abort (); + gcc_assert (occ->prev != occ->parent); + gcc_assert (occ->prev->parent == occ); } - if (occ->parent - && occ->parent->prev != occ - && occ->parent->next != occ) - abort (); + gcc_assert (!occ->parent + || occ->parent->prev == occ + || occ->parent->next == occ); } /* Checks whether tree rooted at OCC is sane. */ @@ -233,8 +217,7 @@ record_path_before_1 (struct et_occ *occ, int depth) fprintf (stderr, "%d (%d); ", ((basic_block) occ->of->data)->index, depth); - if (len >= MAX_NODES) - abort (); + gcc_assert (len < MAX_NODES); depths[len] = depth; datas[len] = occ->of; @@ -247,8 +230,7 @@ record_path_before_1 (struct et_occ *occ, int depth) mn = m; } - if (mn != occ->min + depth - occ->depth) - abort (); + gcc_assert (mn == occ->min + depth - occ->depth); return mn; } @@ -285,9 +267,7 @@ check_path_after_1 (struct et_occ *occ, int depth) } len--; - if (depths[len] != depth - || datas[len] != occ->of) - abort (); + gcc_assert (depths[len] == depth && datas[len] == occ->of); if (occ->prev) { @@ -296,8 +276,7 @@ check_path_after_1 (struct et_occ *occ, int depth) mn = m; } - if (mn != occ->min + depth - occ->depth) - abort (); + gcc_assert (mn == occ->min + depth - occ->depth); return mn; } @@ -312,8 +291,7 @@ check_path_after (struct et_occ *occ) occ = occ->parent; check_path_after_1 (occ, 0); - if (len != 0) - abort (); + gcc_assert (!len); } #endif diff --git a/gcc/except.c b/gcc/except.c index 458b88c2af0..b09ff8338a7 100644 --- a/gcc/except.c +++ b/gcc/except.c @@ -460,8 +460,7 @@ gen_eh_region (enum eh_region_type type, struct eh_region *outer) struct eh_region *new; #ifdef ENABLE_CHECKING - if (! doing_eh (0)) - abort (); + gcc_assert (doing_eh (0)); #endif /* Insert a new blank region as a leaf in the tree. */ @@ -688,8 +687,7 @@ resolve_one_fixup_region (struct eh_region *fixup) && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp) break; } - if (j > n) - abort (); + gcc_assert (j <= n); real = cleanup->outer; if (real && real->type == ERT_FIXUP) @@ -811,14 +809,12 @@ remove_unreachable_regions (rtx insns) if (r->resume) { - if (uid_region_num[INSN_UID (r->resume)]) - abort (); + gcc_assert (!uid_region_num[INSN_UID (r->resume)]); uid_region_num[INSN_UID (r->resume)] = i; } if (r->label) { - if (uid_region_num[INSN_UID (r->label)]) - abort (); + gcc_assert (!uid_region_num[INSN_UID (r->label)]); uid_region_num[INSN_UID (r->label)] = i; } } @@ -942,8 +938,7 @@ convert_from_eh_region_ranges_1 (rtx *pinsns, int *orig_sp, int cur) } } - if (sp != orig_sp) - abort (); + gcc_assert (sp == orig_sp); } static void @@ -1006,8 +1001,7 @@ add_ehl_entry (rtx label, struct eh_region *region) label. After landing pad creation, the exception handlers may share landing pads. This is ok, since maybe_remove_eh_handler only requires the 1-1 mapping before landing pad creation. */ - if (*slot && !cfun->eh->built_landing_pads) - abort (); + gcc_assert (!*slot || cfun->eh->built_landing_pads); *slot = entry; } @@ -1104,7 +1098,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map) n->u.throw.type = o->u.throw.type; default: - abort (); + gcc_unreachable (); } if (o->label) @@ -1112,8 +1106,7 @@ duplicate_eh_region_1 (struct eh_region *o, struct inline_remap *map) if (o->resume) { n->resume = map->insn_map[INSN_UID (o->resume)]; - if (n->resume == NULL) - abort (); + gcc_assert (n->resume); } return n; @@ -1586,7 +1579,7 @@ build_post_landing_pads (void) break; default: - abort (); + gcc_unreachable (); } } } @@ -1657,8 +1650,7 @@ connect_post_landing_pads (void) end_sequence (); barrier = emit_insn_before (seq, region->resume); /* Avoid duplicate barrier. */ - if (!BARRIER_P (barrier)) - abort (); + gcc_assert (BARRIER_P (barrier)); delete_insn (barrier); delete_insn (region->resume); @@ -2049,8 +2041,7 @@ sjlj_emit_function_exit (void) /* Figure out whether the place we are supposed to insert libcall is inside the last basic block or after it. In the other case we need to emit to edge. */ - if (e->src->next_bb != EXIT_BLOCK_PTR) - abort (); + gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR); for (insn = NEXT_INSN (BB_END (e->src)); insn; insn = NEXT_INSN (insn)) if (insn == cfun->eh->sjlj_exit_after) break; @@ -2258,8 +2249,7 @@ remove_exception_handler_label (rtx label) tmp.label = label; slot = (struct ehl_map_entry **) htab_find_slot (cfun->eh->exception_handler_label_map, &tmp, NO_INSERT); - if (! slot) - abort (); + gcc_assert (slot); htab_clear_slot (cfun->eh->exception_handler_label_map, (void **) slot); } @@ -2330,8 +2320,7 @@ remove_eh_handler (struct eh_region *region) try->type == ERT_CATCH; try = try->next_peer) continue; - if (try->type != ERT_TRY) - abort (); + gcc_assert (try->type == ERT_TRY); next = region->u.catch.next_catch; prev = region->u.catch.prev_catch; @@ -2642,10 +2631,11 @@ reachable_next_level (struct eh_region *region, tree type_thrown, case ERT_FIXUP: case ERT_UNKNOWN: /* Shouldn't see these here. */ + gcc_unreachable (); break; + default: + gcc_unreachable (); } - - abort (); } /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */ @@ -3259,7 +3249,7 @@ collect_one_action_chain (htab_t ar_hash, struct eh_region *region) return collect_one_action_chain (ar_hash, region->outer); default: - abort (); + gcc_unreachable (); } } @@ -3763,8 +3753,8 @@ output_function_exception_table (void) cgraph_varpool_mark_needed_node (node); } } - else if (TREE_CODE (type) != INTEGER_CST) - abort (); + else + gcc_assert (TREE_CODE (type) == INTEGER_CST); } if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned) diff --git a/gcc/explow.c b/gcc/explow.c index 47cb8671e27..62ca87c3ca0 100644 --- a/gcc/explow.c +++ b/gcc/explow.c @@ -50,8 +50,7 @@ trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode) int width = GET_MODE_BITSIZE (mode); /* You want to truncate to a _what_? */ - if (! SCALAR_INT_MODE_P (mode)) - abort (); + gcc_assert (SCALAR_INT_MODE_P (mode)); /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */ if (mode == BImode) @@ -633,8 +632,7 @@ copy_to_mode_reg (enum machine_mode mode, rtx x) if (! general_operand (x, VOIDmode)) x = force_operand (x, temp); - if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode) - abort (); + gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode); if (x != temp) emit_move_insn (temp, x); return temp; @@ -1106,11 +1104,10 @@ optimize_save_area_alloca (void) Right now only supported port with stack that grow upward is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */ - if (GET_CODE (pat) != SET - || SET_DEST (pat) != stack_pointer_rtx - || GET_CODE (SET_SRC (pat)) != MINUS - || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx) - abort (); + gcc_assert (GET_CODE (pat) == SET + && SET_DEST (pat) == stack_pointer_rtx + && GET_CODE (SET_SRC (pat)) == MINUS + && XEXP (SET_SRC (pat), 0) == stack_pointer_rtx); /* This will now be transformed into a (set REG REG) so we can just blow away all the other notes. */ @@ -1134,8 +1131,7 @@ optimize_save_area_alloca (void) if (XEXP (srch, 1) == note) break; - if (srch == NULL_RTX) - abort (); + gcc_assert (srch); XEXP (srch, 1) = XEXP (note, 1); } @@ -1229,8 +1225,7 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align) /* ??? Code below assumes that the save area needs maximal alignment. This constraint may be too strong. */ - if (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT) - abort (); + gcc_assert (PREFERRED_STACK_BOUNDARY == BIGGEST_ALIGNMENT); if (GET_CODE (size) == CONST_INT) { @@ -1287,8 +1282,8 @@ allocate_dynamic_stack_space (rtx size, rtx target, int known_align) /* We ought to be called always on the toplevel and stack ought to be aligned properly. */ - if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)) - abort (); + gcc_assert (!(stack_pointer_delta + % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))); /* If needed, check that we have the required amount of stack. Take into account what has already been checked. */ @@ -1539,8 +1534,7 @@ probe_stack_range (HOST_WIDE_INT first, rtx size) 1, OPTAB_WIDEN); #endif - if (temp != test_addr) - abort (); + gcc_assert (temp == test_addr); emit_label (test_lab); emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE, @@ -1594,8 +1588,7 @@ hard_function_value (tree valtype, tree func ATTRIBUTE_UNUSED, } /* No suitable mode found. */ - if (tmpmode == VOIDmode) - abort (); + gcc_assert (tmpmode != VOIDmode); PUT_MODE (val, tmpmode); } diff --git a/gcc/expmed.c b/gcc/expmed.c index 10084e5ae8b..fe9aeb5c4c9 100644 --- a/gcc/expmed.c +++ b/gcc/expmed.c @@ -298,7 +298,7 @@ mode_for_extraction (enum extraction_pattern pattern, int opno) return MAX_MACHINE_MODE; default: - abort (); + gcc_unreachable (); } if (opno == -1) @@ -386,10 +386,9 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, /* We could handle this, but we should always be called with a pseudo for our targets and all insns should take them as outputs. */ - if (! (*insn_data[icode].operand[0].predicate) (dest, mode0) - || ! (*insn_data[icode].operand[1].predicate) (src, mode1) - || ! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)) - abort (); + gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0) + && (*insn_data[icode].operand[1].predicate) (src, mode1) + && (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)); pat = GEN_FCN (icode) (dest, src, rtxpos); seq = get_insns (); end_sequence (); @@ -433,15 +432,14 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, { if (GET_CODE (op0) == SUBREG) { - if (GET_MODE (SUBREG_REG (op0)) == fieldmode - || GET_MODE_CLASS (fieldmode) == MODE_INT - || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT) - op0 = SUBREG_REG (op0); - else - /* Else we've got some float mode source being extracted into - a different float mode destination -- this combination of - subregs results in Severe Tire Damage. */ - abort (); + /* Else we've got some float mode source being extracted + into a different float mode destination -- this + combination of subregs results in Severe Tire + Damage. */ + gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode + || GET_MODE_CLASS (fieldmode) == MODE_INT + || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT); + op0 = SUBREG_REG (op0); } if (REG_P (op0)) op0 = gen_rtx_SUBREG (fieldmode, op0, byte_offset); @@ -462,10 +460,11 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, { if (MEM_P (op0)) op0 = adjust_address (op0, imode, 0); - else if (imode != BLKmode) - op0 = gen_lowpart (imode, op0); else - abort (); + { + gcc_assert (imode != BLKmode); + op0 = gen_lowpart (imode, op0); + } } } @@ -510,15 +509,13 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, if (GET_CODE (op0) == SUBREG) { - if (GET_MODE (SUBREG_REG (op0)) == fieldmode - || GET_MODE_CLASS (fieldmode) == MODE_INT - || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT) - op0 = SUBREG_REG (op0); - else - /* Else we've got some float mode source being extracted into - a different float mode destination -- this combination of - subregs results in Severe Tire Damage. */ - abort (); + /* Else we've got some float mode source being extracted into + a different float mode destination -- this combination of + subregs results in Severe Tire Damage. */ + gcc_assert (GET_MODE (SUBREG_REG (op0)) == fieldmode + || GET_MODE_CLASS (fieldmode) == MODE_INT + || GET_MODE_CLASS (fieldmode) == MODE_PARTIAL_INT); + op0 = SUBREG_REG (op0); } emit_insn (GEN_FCN (icode) @@ -589,12 +586,10 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, pseudo. We can trivially remove a SUBREG that does not change the size of the operand. Such a SUBREG may have been added above. Otherwise, abort. */ - if (GET_CODE (op0) == SUBREG - && (GET_MODE_SIZE (GET_MODE (op0)) - == GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))) - op0 = SUBREG_REG (op0); - else - abort (); + gcc_assert (GET_CODE (op0) == SUBREG + && (GET_MODE_SIZE (GET_MODE (op0)) + == GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))); + op0 = SUBREG_REG (op0); } op0 = gen_rtx_SUBREG (mode_for_size (BITS_PER_WORD, MODE_INT, 0), op0, (offset * UNITS_PER_WORD)); @@ -731,12 +726,12 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, } else if (GET_CODE (value) == CONST_INT) value1 = gen_int_mode (INTVAL (value), maxmode); - else if (!CONSTANT_P (value)) + else /* Parse phase is supposed to make VALUE's data type match that of the component reference, which is a type at least as wide as the field; so VALUE should have a mode that corresponds to that type. */ - abort (); + gcc_assert (CONSTANT_P (value)); } /* If this machine's insv insists on a register, @@ -790,8 +785,7 @@ store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset, if (REG_P (op0) || GET_CODE (op0) == SUBREG) { - if (offset != 0) - abort (); + gcc_assert (!offset); /* Special treatment for a bit field split across two registers. */ if (bitsize + bitpos > BITS_PER_WORD) { @@ -1146,10 +1140,9 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, /* We could handle this, but we should always be called with a pseudo for our targets and all insns should take them as outputs. */ - if (! (*insn_data[icode].operand[0].predicate) (dest, mode0) - || ! (*insn_data[icode].operand[1].predicate) (src, mode1) - || ! (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)) - abort (); + gcc_assert ((*insn_data[icode].operand[0].predicate) (dest, mode0) + && (*insn_data[icode].operand[1].predicate) (src, mode1) + && (*insn_data[icode].operand[2].predicate) (rtxpos, mode2)); pat = GEN_FCN (icode) (dest, src, rtxpos); seq = get_insns (); @@ -1170,10 +1163,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, { if (MEM_P (op0)) op0 = adjust_address (op0, imode, 0); - else if (imode != BLKmode) - op0 = gen_lowpart (imode, op0); else - abort (); + { + gcc_assert (imode != BLKmode); + op0 = gen_lowpart (imode, op0); + } } } @@ -1299,8 +1293,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, bitnum + bit_offset, 1, target_part, mode, word_mode); - if (target_part == 0) - abort (); + gcc_assert (target_part); if (result_part != target_part) emit_move_insn (target_part, result_part); @@ -1346,13 +1339,11 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize, int_mode = int_mode_for_mode (tmode); if (int_mode == BLKmode) int_mode = int_mode_for_mode (mode); - if (int_mode == BLKmode) - abort (); /* Should probably push op0 out to memory and then - do a load. */ + /* Should probably push op0 out to memory and then do a load. */ + gcc_assert (int_mode != BLKmode); /* OFFSET is the number of words or bytes (UNIT says which) from STR_RTX to the first word or byte containing part of the field. */ - if (!MEM_P (op0)) { if (offset != 0 @@ -2145,8 +2136,7 @@ expand_shift (enum tree_code code, enum machine_mode mode, rtx shifted, define_expand for lshrsi3 was added to vax.md. */ } - if (temp == 0) - abort (); + gcc_assert (temp); return temp; } @@ -2649,7 +2639,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, val_so_far = 1; } else - abort (); + gcc_unreachable (); for (opno = 1; opno < alg->ops; opno++) { @@ -2727,7 +2717,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, break; default: - abort (); + gcc_unreachable (); } /* Write a REG_EQUAL note on the last insn so that we can cse @@ -2762,8 +2752,7 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val, in the result mode, to avoid sign-/zero-extension confusion. */ val &= GET_MODE_MASK (mode); val_so_far &= GET_MODE_MASK (mode); - if (val != val_so_far) - abort (); + gcc_assert (val == val_so_far); return accum; } @@ -2848,8 +2837,7 @@ expand_mult (enum machine_mode mode, rtx op0, rtx op1, rtx target, && flag_trapv && (GET_MODE_CLASS(mode) == MODE_INT) ? smulv_optab : smul_optab, op0, op1, target, unsignedp, OPTAB_LIB_WIDEN); - if (op0 == 0) - abort (); + gcc_assert (op0); return op0; } @@ -2893,18 +2881,15 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision, /* lgup = ceil(log2(divisor)); */ lgup = ceil_log2 (d); - if (lgup > n) - abort (); + gcc_assert (lgup <= n); pow = n + lgup; pow2 = n + lgup - precision; - if (pow == 2 * HOST_BITS_PER_WIDE_INT) - { - /* We could handle this with some effort, but this case is much better - handled directly with a scc insn, so rely on caller using that. */ - abort (); - } + /* We could handle this with some effort, but this case is much + better handled directly with a scc insn, so rely on caller using + that. */ + gcc_assert (pow != 2 * HOST_BITS_PER_WIDE_INT); /* mlow = 2^(N + lgup)/d */ if (pow >= HOST_BITS_PER_WIDE_INT) @@ -2928,13 +2913,11 @@ choose_multiplier (unsigned HOST_WIDE_INT d, int n, int precision, div_and_round_double (TRUNC_DIV_EXPR, 1, nl, nh, d, (HOST_WIDE_INT) 0, &mhigh_lo, &mhigh_hi, &dummy1, &dummy2); - if (mhigh_hi && nh - d >= d) - abort (); - if (mhigh_hi > 1 || mlow_hi > 1) - abort (); + gcc_assert (!mhigh_hi || nh - d < d); + gcc_assert (mhigh_hi <= 1 && mlow_hi <= 1); /* Assert that mlow < mhigh. */ - if (! (mlow_hi < mhigh_hi || (mlow_hi == mhigh_hi && mlow_lo < mhigh_lo))) - abort (); + gcc_assert (mlow_hi < mhigh_hi + || (mlow_hi == mhigh_hi && mlow_lo < mhigh_lo)); /* If precision == N, then mlow, mhigh exceed 2^N (but they do not exceed 2^(N+1)). */ @@ -3156,8 +3139,7 @@ expand_mult_highpart (enum machine_mode mode, rtx op0, rtx op1, tem; /* We can't support modes wider than HOST_BITS_PER_INT. */ - if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) - abort (); + gcc_assert (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT); op1 = gen_int_mode (cnst1, wider_mode); cnst1 &= GET_MODE_MASK (mode); @@ -3662,8 +3644,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, mh = choose_multiplier (d >> pre_shift, size, size - pre_shift, &ml, &post_shift, &dummy); - if (mh) - abort (); + gcc_assert (!mh); } else pre_shift = 0; @@ -3939,8 +3920,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, mh = choose_multiplier (d, size, size - 1, &ml, &post_shift, &lgup); - if (mh) - abort (); + gcc_assert (!mh); if (post_shift < BITS_PER_WORD && size - 1 < BITS_PER_WORD) @@ -4398,7 +4378,7 @@ expand_divmod (int rem_flag, enum tree_code code, enum machine_mode mode, return gen_lowpart (mode, rem_flag ? remainder : quotient); default: - abort (); + gcc_unreachable (); } if (quotient == 0) @@ -4899,20 +4879,23 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1, = compare_from_rtx (op0, op1, code, unsignedp, mode, NULL_RTX); if (CONSTANT_P (comparison)) { - if (GET_CODE (comparison) == CONST_INT) + switch (GET_CODE (comparison)) { + case CONST_INT: if (comparison == const0_rtx) return const0_rtx; - } + break; + #ifdef FLOAT_STORE_FLAG_VALUE - else if (GET_CODE (comparison) == CONST_DOUBLE) - { + case CONST_DOUBLE: if (comparison == CONST0_RTX (GET_MODE (comparison))) return const0_rtx; - } + break; #endif - else - abort (); + default: + gcc_unreachable (); + } + if (normalizep == 1) return const1_rtx; if (normalizep == -1) @@ -4987,14 +4970,14 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1, op0 = expand_shift (RSHIFT_EXPR, compare_mode, op0, size_int (GET_MODE_BITSIZE (compare_mode) - 1), subtarget, normalizep == 1); - else if (STORE_FLAG_VALUE & 1) + else { + gcc_assert (STORE_FLAG_VALUE & 1); + op0 = expand_and (compare_mode, op0, const1_rtx, subtarget); if (normalizep == -1) op0 = expand_unop (compare_mode, neg_optab, op0, op0, 0); } - else - abort (); /* If we were converting to a smaller mode, do the conversion now. */ @@ -5262,19 +5245,17 @@ do_cmp_and_jump (rtx arg1, rtx arg2, enum rtx_code op, enum machine_mode mode, /* do_jump_by_parts_equality_rtx compares with zero. Luckily that's the only equality operations we do */ case EQ: - if (arg2 != const0_rtx || mode != GET_MODE(arg1)) - abort (); + gcc_assert (arg2 == const0_rtx && mode == GET_MODE(arg1)); do_jump_by_parts_equality_rtx (arg1, label2, label); break; case NE: - if (arg2 != const0_rtx || mode != GET_MODE(arg1)) - abort (); + gcc_assert (arg2 == const0_rtx && mode == GET_MODE(arg1)); do_jump_by_parts_equality_rtx (arg1, label, label2); break; default: - abort (); + gcc_unreachable (); } emit_label (label2); diff --git a/gcc/expr.c b/gcc/expr.c index 076b612c0b6..40beef3413e 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -335,8 +335,7 @@ convert_move (rtx to, rtx from, int unsignedp) : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND)); - if (to_real != from_real) - abort (); + gcc_assert (to_real == from_real); /* If the source and destination are already the same, then there's nothing to do. */ @@ -353,8 +352,7 @@ convert_move (rtx to, rtx from, int unsignedp) && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp) from = gen_lowpart (to_mode, from), from_mode = to_mode; - if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to)) - abort (); + gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to)); if (to_mode == from_mode || (from_mode == VOIDmode && CONSTANT_P (from))) @@ -365,8 +363,7 @@ convert_move (rtx to, rtx from, int unsignedp) if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode)) { - if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode)) - abort (); + gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode)); if (VECTOR_MODE_P (to_mode)) from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0); @@ -389,12 +386,13 @@ convert_move (rtx to, rtx from, int unsignedp) rtx value, insns; convert_optab tab; + gcc_assert (GET_MODE_PRECISION (from_mode) + != GET_MODE_PRECISION (to_mode)); + if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)) tab = sext_optab; - else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode)) - tab = trunc_optab; else - abort (); + tab = trunc_optab; /* Try converting directly if the insn is supported. */ @@ -409,9 +407,8 @@ convert_move (rtx to, rtx from, int unsignedp) /* Otherwise use a libcall. */ libcall = tab->handlers[to_mode][from_mode].libfunc; - if (!libcall) - /* This conversion is not implemented yet. */ - abort (); + /* Is this conversion implemented yet? */ + gcc_assert (libcall); start_sequence (); value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode, @@ -433,9 +430,8 @@ convert_move (rtx to, rtx from, int unsignedp) enum machine_mode full_mode = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT); - if (trunc_optab->handlers[to_mode][full_mode].insn_code - == CODE_FOR_nothing) - abort (); + gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code + != CODE_FOR_nothing); if (full_mode != from_mode) from = convert_to_mode (full_mode, from, unsignedp); @@ -448,9 +444,8 @@ convert_move (rtx to, rtx from, int unsignedp) enum machine_mode full_mode = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT); - if (sext_optab->handlers[full_mode][from_mode].insn_code - == CODE_FOR_nothing) - abort (); + gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code + != CODE_FOR_nothing); emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code, to, from, UNKNOWN); @@ -557,8 +552,7 @@ convert_move (rtx to, rtx from, int unsignedp) int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i); rtx subword = operand_subword (to, index, 1, to_mode); - if (subword == 0) - abort (); + gcc_assert (subword); if (fill_value != subword) emit_move_insn (subword, fill_value); @@ -683,7 +677,7 @@ convert_move (rtx to, rtx from, int unsignedp) } /* Mode combination is not recognized. */ - abort (); + gcc_unreachable (); } /* Return an rtx for a value that would result @@ -797,8 +791,7 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns subreg operation. */ if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode) { - if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode)) - abort (); + gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode)); return simplify_gen_subreg (mode, x, oldmode, 0); } @@ -964,15 +957,13 @@ move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len, } /* The code above should have handled everything. */ - if (data.len > 0) - abort (); + gcc_assert (!data.len); if (endp) { rtx to1; - if (data.reverse) - abort (); + gcc_assert (!data.reverse); if (data.autinc_to) { if (endp == 2) @@ -1045,8 +1036,7 @@ move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align, max_size = GET_MODE_SIZE (mode); } - if (l) - abort (); + gcc_assert (!l); return n_insns; } @@ -1095,7 +1085,7 @@ move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode, #ifdef PUSH_ROUNDING emit_single_push_insn (mode, from1, NULL); #else - abort (); + gcc_unreachable (); #endif } @@ -1149,17 +1139,14 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method) break; default: - abort (); + gcc_unreachable (); } align = MIN (MEM_ALIGN (x), MEM_ALIGN (y)); - if (!MEM_P (x)) - abort (); - if (!MEM_P (y)) - abort (); - if (size == 0) - abort (); + gcc_assert (MEM_P (x)); + gcc_assert (MEM_P (y)); + gcc_assert (size); /* Make sure we've got BLKmode addresses; store_one_arg can decide that block copy is more efficient for other large modes, e.g. DCmode. */ @@ -1530,8 +1517,7 @@ move_block_from_reg (int regno, rtx x, int nregs) { rtx tem = operand_subword (x, i, 1, BLKmode); - if (tem == 0) - abort (); + gcc_assert (tem); emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i)); } @@ -1549,8 +1535,7 @@ gen_group_rtx (rtx orig) int i, length; rtx *tmps; - if (GET_CODE (orig) != PARALLEL) - abort (); + gcc_assert (GET_CODE (orig) == PARALLEL); length = XVECLEN (orig, 0); tmps = alloca (sizeof (rtx) * length); @@ -1583,8 +1568,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) rtx *tmps, src; int start, i; - if (GET_CODE (dst) != PARALLEL) - abort (); + gcc_assert (GET_CODE (dst) == PARALLEL); /* Check for a NULL entry, used to indicate that the parameter goes both on the stack and in registers. */ @@ -1618,8 +1602,7 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) ) shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT; bytelen = ssize - bytepos; - if (bytelen <= 0) - abort (); + gcc_assert (bytelen > 0); } /* If we won't be loading directly from memory, protect the real source @@ -1668,14 +1651,15 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize) (bytepos % slen0) * BITS_PER_UNIT, 1, NULL_RTX, mode, mode); } - else if (bytepos == 0) + else { - rtx mem = assign_stack_temp (GET_MODE (src), slen, 0); + rtx mem; + + gcc_assert (!bytepos); + mem = assign_stack_temp (GET_MODE (src), slen, 0); emit_move_insn (mem, src); tmps[i] = adjust_address (mem, mode, 0); } - else - abort (); } /* FIXME: A SIMD parallel will eventually lead to a subreg of a SIMD register, which is currently broken. While we get GCC @@ -1719,10 +1703,9 @@ emit_group_move (rtx dst, rtx src) { int i; - if (GET_CODE (src) != PARALLEL - || GET_CODE (dst) != PARALLEL - || XVECLEN (src, 0) != XVECLEN (dst, 0)) - abort (); + gcc_assert (GET_CODE (src) == PARALLEL + && GET_CODE (dst) == PARALLEL + && XVECLEN (src, 0) == XVECLEN (dst, 0)); /* Skip first entry if NULL. */ for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++) @@ -1741,8 +1724,7 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) rtx *tmps, dst; int start, i; - if (GET_CODE (src) != PARALLEL) - abort (); + gcc_assert (GET_CODE (src) == PARALLEL); /* Check for a NULL entry, used to indicate that the parameter goes both on the stack and in registers. */ @@ -1829,8 +1811,9 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))); dest = XEXP (dst, 1); } - else if (bytepos == 0 && XVECLEN (src, 0)) + else { + gcc_assert (bytepos == 0 && XVECLEN (src, 0)); dest = assign_stack_temp (GET_MODE (dest), GET_MODE_SIZE (GET_MODE (dest)), 0); emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos), @@ -1838,8 +1821,6 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize) dst = dest; break; } - else - abort (); } /* Optimize the access just a bit. */ @@ -1947,10 +1928,8 @@ copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type) void use_reg (rtx *call_fusage, rtx reg) { - if (!REG_P (reg) - || REGNO (reg) >= FIRST_PSEUDO_REGISTER) - abort (); - + gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER); + *call_fusage = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_USE (VOIDmode, reg), *call_fusage); @@ -1964,8 +1943,7 @@ use_regs (rtx *call_fusage, int regno, int nregs) { int i; - if (regno + nregs > FIRST_PSEUDO_REGISTER) - abort (); + gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER); for (i = 0; i < nregs; i++) use_reg (call_fusage, regno_reg_rtx[regno + i]); @@ -2081,8 +2059,7 @@ can_store_by_pieces (unsigned HOST_WIDE_INT len, } /* The code above should have handled everything. */ - if (l != 0) - abort (); + gcc_assert (!l); } return 1; @@ -2105,13 +2082,11 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, if (len == 0) { - if (endp == 2) - abort (); + gcc_assert (endp != 2); return to; } - if (! STORE_BY_PIECES_P (len, align)) - abort (); + gcc_assert (STORE_BY_PIECES_P (len, align)); data.constfun = constfun; data.constfundata = constfundata; data.len = len; @@ -2121,8 +2096,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len, { rtx to1; - if (data.reverse) - abort (); + gcc_assert (!data.reverse); if (data.autinc_to) { if (endp == 2) @@ -2271,8 +2245,7 @@ store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED, } /* The code above should have handled everything. */ - if (data->len != 0) - abort (); + gcc_assert (!data->len); } /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate @@ -2508,8 +2481,8 @@ emit_move_insn (rtx x, rtx y) rtx y_cst = NULL_RTX; rtx last_insn, set; - if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode)) - abort (); + gcc_assert (mode != BLKmode + && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode)); if (CONSTANT_P (y)) { @@ -2547,8 +2520,7 @@ emit_move_insn (rtx x, rtx y) && CONSTANT_ADDRESS_P (XEXP (y, 0))))) y = validize_mem (y); - if (mode == BLKmode) - abort (); + gcc_assert (mode != BLKmode); last_insn = emit_move_insn_1 (x, y); @@ -2572,8 +2544,7 @@ emit_move_insn_1 (rtx x, rtx y) enum machine_mode submode; enum mode_class class = GET_MODE_CLASS (mode); - if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE) - abort (); + gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE); if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) return @@ -2753,8 +2724,7 @@ emit_move_insn_1 (rtx x, rtx y) if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode)) break; - if (tmode == VOIDmode) - abort (); + gcc_assert (tmode != VOIDmode); /* Get X and Y in TMODE. We can't use gen_lowpart here because it may call change_address which is not appropriate if we were @@ -2803,13 +2773,15 @@ emit_move_insn_1 (rtx x, rtx y) /* This will handle any multi-word or full-word mode that lacks a move_insn pattern. However, you will get better code if you define such patterns, even if they must turn into multiple assembler instructions. */ - else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD) + else { rtx last_insn = 0; rtx seq, inner; int need_clobber; int i; - + + gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD); + #ifdef PUSH_ROUNDING /* If X is a push on the stack, do the push now and replace @@ -2883,8 +2855,7 @@ emit_move_insn_1 (rtx x, rtx y) else if (ypart == 0) ypart = operand_subword_force (y, i, mode); - if (xpart == 0 || ypart == 0) - abort (); + gcc_assert (xpart && ypart); need_clobber |= (GET_CODE (xpart) == SUBREG); @@ -2907,8 +2878,6 @@ emit_move_insn_1 (rtx x, rtx y) return last_insn; } - else - abort (); } /* If Y is representable exactly in a narrower mode, and the target can @@ -3192,8 +3161,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size, else offset = used % (PARM_BOUNDARY / BITS_PER_UNIT); - if (size == 0) - abort (); + gcc_assert (size); used -= offset; @@ -3490,8 +3458,7 @@ expand_assignment (tree to, tree from, int want_value) { rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); - if (!MEM_P (to_rtx)) - abort (); + gcc_assert (MEM_P (to_rtx)); #ifdef POINTERS_EXTEND_UNSIGNED if (GET_MODE (offset_rtx) != Pmode) @@ -3821,8 +3788,7 @@ store_expr (tree exp, rtx target, int want_value) /* C++ can generate ?: expressions with a throw expression in one branch and an rvalue in the other. Here, we resolve attempts to store the throw expression's nonexistent result. */ - if (want_value) - abort (); + gcc_assert (!want_value); expand_expr (exp, const0_rtx, VOIDmode, 0); return NULL_RTX; } @@ -4304,7 +4270,7 @@ count_type_elements (tree type) case FUNCTION_TYPE: case LANG_TYPE: default: - abort (); + gcc_unreachable (); } } @@ -4397,721 +4363,731 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size) HOST_WIDE_INT exp_size = int_size_in_bytes (type); #endif - if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE - || TREE_CODE (type) == QUAL_UNION_TYPE) + switch (TREE_CODE (type)) { - tree elt; - - /* If size is zero or the target is already cleared, do nothing. */ - if (size == 0 || cleared) - cleared = 1; - /* We either clear the aggregate or indicate the value is dead. */ - else if ((TREE_CODE (type) == UNION_TYPE - || TREE_CODE (type) == QUAL_UNION_TYPE) - && ! CONSTRUCTOR_ELTS (exp)) - /* If the constructor is empty, clear the union. */ - { - clear_storage (target, expr_size (exp)); - cleared = 1; - } - - /* If we are building a static constructor into a register, - set the initial value as zero so we can fold the value into - a constant. But if more than one register is involved, - this probably loses. */ - else if (REG_P (target) && TREE_STATIC (exp) - && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) - { - emit_move_insn (target, CONST0_RTX (GET_MODE (target))); - cleared = 1; - } + case RECORD_TYPE: + case UNION_TYPE: + case QUAL_UNION_TYPE: + { + tree elt; - /* If the constructor has fewer fields than the structure - or if we are initializing the structure to mostly zeros, - clear the whole structure first. Don't do this if TARGET is a - register whose mode size isn't equal to SIZE since clear_storage - can't handle this case. */ - else if (size > 0 - && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type)) - || mostly_zeros_p (exp)) - && (!REG_P (target) - || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) - == size))) - { - clear_storage (target, GEN_INT (size)); + /* If size is zero or the target is already cleared, do nothing. */ + if (size == 0 || cleared) cleared = 1; - } - - if (! cleared) - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); - - /* Store each element of the constructor into - the corresponding field of TARGET. */ - - for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) - { - tree field = TREE_PURPOSE (elt); - tree value = TREE_VALUE (elt); - enum machine_mode mode; - HOST_WIDE_INT bitsize; - HOST_WIDE_INT bitpos = 0; - tree offset; - rtx to_rtx = target; - - /* Just ignore missing fields. - We cleared the whole structure, above, - if any fields are missing. */ - if (field == 0) - continue; - - if (cleared && initializer_zerop (value)) - continue; - - if (host_integerp (DECL_SIZE (field), 1)) - bitsize = tree_low_cst (DECL_SIZE (field), 1); - else - bitsize = -1; - - mode = DECL_MODE (field); - if (DECL_BIT_FIELD (field)) - mode = VOIDmode; + /* We either clear the aggregate or indicate the value is dead. */ + else if ((TREE_CODE (type) == UNION_TYPE + || TREE_CODE (type) == QUAL_UNION_TYPE) + && ! CONSTRUCTOR_ELTS (exp)) + /* If the constructor is empty, clear the union. */ + { + clear_storage (target, expr_size (exp)); + cleared = 1; + } - offset = DECL_FIELD_OFFSET (field); - if (host_integerp (offset, 0) - && host_integerp (bit_position (field), 0)) - { - bitpos = int_bit_position (field); - offset = 0; - } - else - bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); + /* If we are building a static constructor into a register, + set the initial value as zero so we can fold the value into + a constant. But if more than one register is involved, + this probably loses. */ + else if (REG_P (target) && TREE_STATIC (exp) + && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD) + { + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); + cleared = 1; + } - if (offset) - { - rtx offset_rtx; + /* If the constructor has fewer fields than the structure or + if we are initializing the structure to mostly zeros, clear + the whole structure first. Don't do this if TARGET is a + register whose mode size isn't equal to SIZE since + clear_storage can't handle this case. */ + else if (size > 0 + && ((list_length (CONSTRUCTOR_ELTS (exp)) + != fields_length (type)) + || mostly_zeros_p (exp)) + && (!REG_P (target) + || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) + == size))) + { + clear_storage (target, GEN_INT (size)); + cleared = 1; + } - offset - = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, - make_tree (TREE_TYPE (exp), - target)); + if (! cleared) + emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); - offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); - if (!MEM_P (to_rtx)) - abort (); + /* Store each element of the constructor into the + corresponding field of TARGET. */ + for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt)) + { + tree field = TREE_PURPOSE (elt); + tree value = TREE_VALUE (elt); + enum machine_mode mode; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos = 0; + tree offset; + rtx to_rtx = target; + + /* Just ignore missing fields. We cleared the whole + structure, above, if any fields are missing. */ + if (field == 0) + continue; + + if (cleared && initializer_zerop (value)) + continue; + + if (host_integerp (DECL_SIZE (field), 1)) + bitsize = tree_low_cst (DECL_SIZE (field), 1); + else + bitsize = -1; + + mode = DECL_MODE (field); + if (DECL_BIT_FIELD (field)) + mode = VOIDmode; + + offset = DECL_FIELD_OFFSET (field); + if (host_integerp (offset, 0) + && host_integerp (bit_position (field), 0)) + { + bitpos = int_bit_position (field); + offset = 0; + } + else + bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0); + + if (offset) + { + rtx offset_rtx; + + offset + = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset, + make_tree (TREE_TYPE (exp), + target)); + + offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); + gcc_assert (MEM_P (to_rtx)); + #ifdef POINTERS_EXTEND_UNSIGNED - if (GET_MODE (offset_rtx) != Pmode) - offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); + if (GET_MODE (offset_rtx) != Pmode) + offset_rtx = convert_to_mode (Pmode, offset_rtx, 0); #else - if (GET_MODE (offset_rtx) != ptr_mode) - offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); + if (GET_MODE (offset_rtx) != ptr_mode) + offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0); #endif - to_rtx = offset_address (to_rtx, offset_rtx, - highest_pow2_factor (offset)); - } + to_rtx = offset_address (to_rtx, offset_rtx, + highest_pow2_factor (offset)); + } #ifdef WORD_REGISTER_OPERATIONS - /* If this initializes a field that is smaller than a word, at the - start of a word, try to widen it to a full word. - This special case allows us to output C++ member function - initializations in a form that the optimizers can understand. */ - if (REG_P (target) - && bitsize < BITS_PER_WORD - && bitpos % BITS_PER_WORD == 0 - && GET_MODE_CLASS (mode) == MODE_INT - && TREE_CODE (value) == INTEGER_CST - && exp_size >= 0 - && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) - { - tree type = TREE_TYPE (value); - - if (TYPE_PRECISION (type) < BITS_PER_WORD) - { - type = lang_hooks.types.type_for_size - (BITS_PER_WORD, TYPE_UNSIGNED (type)); - value = convert (type, value); - } - - if (BYTES_BIG_ENDIAN) - value - = fold (build2 (LSHIFT_EXPR, type, value, - build_int_cst (NULL_TREE, - BITS_PER_WORD - bitsize))); - bitsize = BITS_PER_WORD; - mode = word_mode; - } + /* If this initializes a field that is smaller than a + word, at the start of a word, try to widen it to a full + word. This special case allows us to output C++ member + function initializations in a form that the optimizers + can understand. */ + if (REG_P (target) + && bitsize < BITS_PER_WORD + && bitpos % BITS_PER_WORD == 0 + && GET_MODE_CLASS (mode) == MODE_INT + && TREE_CODE (value) == INTEGER_CST + && exp_size >= 0 + && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT) + { + tree type = TREE_TYPE (value); + + if (TYPE_PRECISION (type) < BITS_PER_WORD) + { + type = lang_hooks.types.type_for_size + (BITS_PER_WORD, TYPE_UNSIGNED (type)); + value = convert (type, value); + } + + if (BYTES_BIG_ENDIAN) + value + = fold (build2 (LSHIFT_EXPR, type, value, + build_int_cst (NULL_TREE, + BITS_PER_WORD - bitsize))); + bitsize = BITS_PER_WORD; + mode = word_mode; + } #endif - if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) - && DECL_NONADDRESSABLE_P (field)) - { - to_rtx = copy_rtx (to_rtx); - MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; - } - - store_constructor_field (to_rtx, bitsize, bitpos, mode, - value, type, cleared, - get_alias_set (TREE_TYPE (field))); - } - } - - else if (TREE_CODE (type) == ARRAY_TYPE) - { - tree elt; - int i; - int need_to_clear; - tree domain; - tree elttype = TREE_TYPE (type); - int const_bounds_p; - HOST_WIDE_INT minelt = 0; - HOST_WIDE_INT maxelt = 0; - - domain = TYPE_DOMAIN (type); - const_bounds_p = (TYPE_MIN_VALUE (domain) - && TYPE_MAX_VALUE (domain) - && host_integerp (TYPE_MIN_VALUE (domain), 0) - && host_integerp (TYPE_MAX_VALUE (domain), 0)); - - /* If we have constant bounds for the range of the type, get them. */ - if (const_bounds_p) - { - minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); - maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); - } - - /* If the constructor has fewer elements than the array, - clear the whole array first. Similarly if this is - static constructor of a non-BLKmode object. */ - if (cleared) - need_to_clear = 0; - else if (REG_P (target) && TREE_STATIC (exp)) - need_to_clear = 1; - else - { - HOST_WIDE_INT count = 0, zero_count = 0; - need_to_clear = ! const_bounds_p; - - /* This loop is a more accurate version of the loop in - mostly_zeros_p (it handles RANGE_EXPR in an index). - It is also needed to check for missing elements. */ - for (elt = CONSTRUCTOR_ELTS (exp); - elt != NULL_TREE && ! need_to_clear; - elt = TREE_CHAIN (elt)) - { - tree index = TREE_PURPOSE (elt); - HOST_WIDE_INT this_node_count; - - if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) - { - tree lo_index = TREE_OPERAND (index, 0); - tree hi_index = TREE_OPERAND (index, 1); - - if (! host_integerp (lo_index, 1) - || ! host_integerp (hi_index, 1)) - { - need_to_clear = 1; - break; - } - - this_node_count = (tree_low_cst (hi_index, 1) - - tree_low_cst (lo_index, 1) + 1); - } - else - this_node_count = 1; - - count += this_node_count; - if (mostly_zeros_p (TREE_VALUE (elt))) - zero_count += this_node_count; - } - - /* Clear the entire array first if there are any missing elements, - or if the incidence of zero elements is >= 75%. */ - if (! need_to_clear - && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count)) - need_to_clear = 1; - } - - if (need_to_clear && size > 0) - { - if (REG_P (target)) - emit_move_insn (target, CONST0_RTX (GET_MODE (target))); - else - clear_storage (target, GEN_INT (size)); - cleared = 1; - } - - if (!cleared && REG_P (target)) - /* Inform later passes that the old value is dead. */ - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); - - /* Store each element of the constructor into - the corresponding element of TARGET, determined - by counting the elements. */ - for (elt = CONSTRUCTOR_ELTS (exp), i = 0; - elt; - elt = TREE_CHAIN (elt), i++) - { - enum machine_mode mode; - HOST_WIDE_INT bitsize; - HOST_WIDE_INT bitpos; - int unsignedp; - tree value = TREE_VALUE (elt); - tree index = TREE_PURPOSE (elt); - rtx xtarget = target; - - if (cleared && initializer_zerop (value)) - continue; - - unsignedp = TYPE_UNSIGNED (elttype); - mode = TYPE_MODE (elttype); - if (mode == BLKmode) - bitsize = (host_integerp (TYPE_SIZE (elttype), 1) - ? tree_low_cst (TYPE_SIZE (elttype), 1) - : -1); - else - bitsize = GET_MODE_BITSIZE (mode); - - if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) - { - tree lo_index = TREE_OPERAND (index, 0); - tree hi_index = TREE_OPERAND (index, 1); - rtx index_r, pos_rtx; - HOST_WIDE_INT lo, hi, count; - tree position; - - /* If the range is constant and "small", unroll the loop. */ - if (const_bounds_p - && host_integerp (lo_index, 0) - && host_integerp (hi_index, 0) - && (lo = tree_low_cst (lo_index, 0), - hi = tree_low_cst (hi_index, 0), - count = hi - lo + 1, - (!MEM_P (target) - || count <= 2 - || (host_integerp (TYPE_SIZE (elttype), 1) - && (tree_low_cst (TYPE_SIZE (elttype), 1) * count - <= 40 * 8))))) - { - lo -= minelt; hi -= minelt; - for (; lo <= hi; lo++) - { - bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); - - if (MEM_P (target) - && !MEM_KEEP_ALIAS_SET_P (target) - && TREE_CODE (type) == ARRAY_TYPE - && TYPE_NONALIASED_COMPONENT (type)) - { - target = copy_rtx (target); - MEM_KEEP_ALIAS_SET_P (target) = 1; - } - - store_constructor_field - (target, bitsize, bitpos, mode, value, type, cleared, - get_alias_set (elttype)); - } - } - else - { - rtx loop_start = gen_label_rtx (); - rtx loop_end = gen_label_rtx (); - tree exit_cond; - - expand_expr (hi_index, NULL_RTX, VOIDmode, 0); - unsignedp = TYPE_UNSIGNED (domain); - - index = build_decl (VAR_DECL, NULL_TREE, domain); - - index_r - = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), - &unsignedp, 0)); - SET_DECL_RTL (index, index_r); - store_expr (lo_index, index_r, 0); - - /* Build the head of the loop. */ - do_pending_stack_adjust (); - emit_label (loop_start); - - /* Assign value to element index. */ - position - = convert (ssizetype, - fold (build2 (MINUS_EXPR, TREE_TYPE (index), - index, TYPE_MIN_VALUE (domain)))); - position = size_binop (MULT_EXPR, position, - convert (ssizetype, - TYPE_SIZE_UNIT (elttype))); - - pos_rtx = expand_expr (position, 0, VOIDmode, 0); - xtarget = offset_address (target, pos_rtx, - highest_pow2_factor (position)); - xtarget = adjust_address (xtarget, mode, 0); - if (TREE_CODE (value) == CONSTRUCTOR) - store_constructor (value, xtarget, cleared, - bitsize / BITS_PER_UNIT); - else - store_expr (value, xtarget, 0); - - /* Generate a conditional jump to exit the loop. */ - exit_cond = build2 (LT_EXPR, integer_type_node, - index, hi_index); - jumpif (exit_cond, loop_end); - - /* Update the loop counter, and jump to the head of - the loop. */ - expand_assignment (index, - build2 (PLUS_EXPR, TREE_TYPE (index), - index, integer_one_node), 0); - - emit_jump (loop_start); - - /* Build the end of the loop. */ - emit_label (loop_end); - } - } - else if ((index != 0 && ! host_integerp (index, 0)) - || ! host_integerp (TYPE_SIZE (elttype), 1)) - { - tree position; - - if (index == 0) - index = ssize_int (1); - - if (minelt) - index = fold_convert (ssizetype, - fold (build2 (MINUS_EXPR, - TREE_TYPE (index), - index, - TYPE_MIN_VALUE (domain)))); - - position = size_binop (MULT_EXPR, index, - convert (ssizetype, - TYPE_SIZE_UNIT (elttype))); - xtarget = offset_address (target, - expand_expr (position, 0, VOIDmode, 0), - highest_pow2_factor (position)); - xtarget = adjust_address (xtarget, mode, 0); - store_expr (value, xtarget, 0); - } - else - { - if (index != 0) - bitpos = ((tree_low_cst (index, 0) - minelt) - * tree_low_cst (TYPE_SIZE (elttype), 1)); - else - bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); - - if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) - && TREE_CODE (type) == ARRAY_TYPE - && TYPE_NONALIASED_COMPONENT (type)) - { - target = copy_rtx (target); - MEM_KEEP_ALIAS_SET_P (target) = 1; - } - store_constructor_field (target, bitsize, bitpos, mode, value, - type, cleared, get_alias_set (elttype)); - } - } - } - - else if (TREE_CODE (type) == VECTOR_TYPE) - { - tree elt; - int i; - int need_to_clear; - int icode = 0; - tree elttype = TREE_TYPE (type); - int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); - enum machine_mode eltmode = TYPE_MODE (elttype); - HOST_WIDE_INT bitsize; - HOST_WIDE_INT bitpos; - rtx *vector = NULL; - unsigned n_elts; - - if (eltmode == BLKmode) - abort (); - - n_elts = TYPE_VECTOR_SUBPARTS (type); - if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) - { - enum machine_mode mode = GET_MODE (target); - - icode = (int) vec_init_optab->handlers[mode].insn_code; - if (icode != CODE_FOR_nothing) - { - unsigned int i; - - vector = alloca (n_elts); - for (i = 0; i < n_elts; i++) - vector [i] = CONST0_RTX (GET_MODE_INNER (mode)); - } - } - - /* If the constructor has fewer elements than the vector, - clear the whole array first. Similarly if this is - static constructor of a non-BLKmode object. */ - if (cleared) - need_to_clear = 0; - else if (REG_P (target) && TREE_STATIC (exp)) - need_to_clear = 1; - else - { - unsigned HOST_WIDE_INT count = 0, zero_count = 0; - - for (elt = CONSTRUCTOR_ELTS (exp); - elt != NULL_TREE; - elt = TREE_CHAIN (elt)) - { - int n_elts_here = - tree_low_cst ( - int_const_binop (TRUNC_DIV_EXPR, - TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))), - TYPE_SIZE (elttype), 0), 1); - - count += n_elts_here; - if (mostly_zeros_p (TREE_VALUE (elt))) - zero_count += n_elts_here; - } - - /* Clear the entire vector first if there are any missing elements, - or if the incidence of zero elements is >= 75%. */ - need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); - } - - if (need_to_clear && size > 0 && !vector) - { - if (REG_P (target)) - emit_move_insn (target, CONST0_RTX (GET_MODE (target))); - else - clear_storage (target, GEN_INT (size)); - cleared = 1; - } + if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx) + && DECL_NONADDRESSABLE_P (field)) + { + to_rtx = copy_rtx (to_rtx); + MEM_KEEP_ALIAS_SET_P (to_rtx) = 1; + } + + store_constructor_field (to_rtx, bitsize, bitpos, mode, + value, type, cleared, + get_alias_set (TREE_TYPE (field))); + } + break; + } + case ARRAY_TYPE: + { + tree elt; + int i; + int need_to_clear; + tree domain; + tree elttype = TREE_TYPE (type); + int const_bounds_p; + HOST_WIDE_INT minelt = 0; + HOST_WIDE_INT maxelt = 0; + + domain = TYPE_DOMAIN (type); + const_bounds_p = (TYPE_MIN_VALUE (domain) + && TYPE_MAX_VALUE (domain) + && host_integerp (TYPE_MIN_VALUE (domain), 0) + && host_integerp (TYPE_MAX_VALUE (domain), 0)); + + /* If we have constant bounds for the range of the type, get them. */ + if (const_bounds_p) + { + minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0); + maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0); + } - if (!cleared && REG_P (target)) - /* Inform later passes that the old value is dead. */ - emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); + /* If the constructor has fewer elements than the array, clear + the whole array first. Similarly if this is static + constructor of a non-BLKmode object. */ + if (cleared) + need_to_clear = 0; + else if (REG_P (target) && TREE_STATIC (exp)) + need_to_clear = 1; + else + { + HOST_WIDE_INT count = 0, zero_count = 0; + need_to_clear = ! const_bounds_p; + + /* This loop is a more accurate version of the loop in + mostly_zeros_p (it handles RANGE_EXPR in an index). It + is also needed to check for missing elements. */ + for (elt = CONSTRUCTOR_ELTS (exp); + elt != NULL_TREE && ! need_to_clear; + elt = TREE_CHAIN (elt)) + { + tree index = TREE_PURPOSE (elt); + HOST_WIDE_INT this_node_count; + + if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) + { + tree lo_index = TREE_OPERAND (index, 0); + tree hi_index = TREE_OPERAND (index, 1); + + if (! host_integerp (lo_index, 1) + || ! host_integerp (hi_index, 1)) + { + need_to_clear = 1; + break; + } + + this_node_count = (tree_low_cst (hi_index, 1) + - tree_low_cst (lo_index, 1) + 1); + } + else + this_node_count = 1; + + count += this_node_count; + if (mostly_zeros_p (TREE_VALUE (elt))) + zero_count += this_node_count; + } + + /* Clear the entire array first if there are any missing + elements, or if the incidence of zero elements is >= + 75%. */ + if (! need_to_clear + && (count < maxelt - minelt + 1 + || 4 * zero_count >= 3 * count)) + need_to_clear = 1; + } + + if (need_to_clear && size > 0) + { + if (REG_P (target)) + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); + else + clear_storage (target, GEN_INT (size)); + cleared = 1; + } - /* Store each element of the constructor into the corresponding - element of TARGET, determined by counting the elements. */ - for (elt = CONSTRUCTOR_ELTS (exp), i = 0; - elt; - elt = TREE_CHAIN (elt), i += bitsize / elt_size) - { - tree value = TREE_VALUE (elt); - tree index = TREE_PURPOSE (elt); - HOST_WIDE_INT eltpos; + if (!cleared && REG_P (target)) + /* Inform later passes that the old value is dead. */ + emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); - bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); - if (cleared && initializer_zerop (value)) - continue; + /* Store each element of the constructor into the + corresponding element of TARGET, determined by counting the + elements. */ + for (elt = CONSTRUCTOR_ELTS (exp), i = 0; + elt; + elt = TREE_CHAIN (elt), i++) + { + enum machine_mode mode; + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; + int unsignedp; + tree value = TREE_VALUE (elt); + tree index = TREE_PURPOSE (elt); + rtx xtarget = target; + + if (cleared && initializer_zerop (value)) + continue; + + unsignedp = TYPE_UNSIGNED (elttype); + mode = TYPE_MODE (elttype); + if (mode == BLKmode) + bitsize = (host_integerp (TYPE_SIZE (elttype), 1) + ? tree_low_cst (TYPE_SIZE (elttype), 1) + : -1); + else + bitsize = GET_MODE_BITSIZE (mode); + + if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR) + { + tree lo_index = TREE_OPERAND (index, 0); + tree hi_index = TREE_OPERAND (index, 1); + rtx index_r, pos_rtx; + HOST_WIDE_INT lo, hi, count; + tree position; + + /* If the range is constant and "small", unroll the loop. */ + if (const_bounds_p + && host_integerp (lo_index, 0) + && host_integerp (hi_index, 0) + && (lo = tree_low_cst (lo_index, 0), + hi = tree_low_cst (hi_index, 0), + count = hi - lo + 1, + (!MEM_P (target) + || count <= 2 + || (host_integerp (TYPE_SIZE (elttype), 1) + && (tree_low_cst (TYPE_SIZE (elttype), 1) * count + <= 40 * 8))))) + { + lo -= minelt; hi -= minelt; + for (; lo <= hi; lo++) + { + bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0); + + if (MEM_P (target) + && !MEM_KEEP_ALIAS_SET_P (target) + && TREE_CODE (type) == ARRAY_TYPE + && TYPE_NONALIASED_COMPONENT (type)) + { + target = copy_rtx (target); + MEM_KEEP_ALIAS_SET_P (target) = 1; + } + + store_constructor_field + (target, bitsize, bitpos, mode, value, type, cleared, + get_alias_set (elttype)); + } + } + else + { + rtx loop_start = gen_label_rtx (); + rtx loop_end = gen_label_rtx (); + tree exit_cond; + + expand_expr (hi_index, NULL_RTX, VOIDmode, 0); + unsignedp = TYPE_UNSIGNED (domain); + + index = build_decl (VAR_DECL, NULL_TREE, domain); + + index_r + = gen_reg_rtx (promote_mode (domain, DECL_MODE (index), + &unsignedp, 0)); + SET_DECL_RTL (index, index_r); + store_expr (lo_index, index_r, 0); + + /* Build the head of the loop. */ + do_pending_stack_adjust (); + emit_label (loop_start); + + /* Assign value to element index. */ + position + = convert (ssizetype, + fold (build2 (MINUS_EXPR, TREE_TYPE (index), + index, TYPE_MIN_VALUE (domain)))); + position = size_binop (MULT_EXPR, position, + convert (ssizetype, + TYPE_SIZE_UNIT (elttype))); + + pos_rtx = expand_expr (position, 0, VOIDmode, 0); + xtarget = offset_address (target, pos_rtx, + highest_pow2_factor (position)); + xtarget = adjust_address (xtarget, mode, 0); + if (TREE_CODE (value) == CONSTRUCTOR) + store_constructor (value, xtarget, cleared, + bitsize / BITS_PER_UNIT); + else + store_expr (value, xtarget, 0); + + /* Generate a conditional jump to exit the loop. */ + exit_cond = build2 (LT_EXPR, integer_type_node, + index, hi_index); + jumpif (exit_cond, loop_end); + + /* Update the loop counter, and jump to the head of + the loop. */ + expand_assignment (index, + build2 (PLUS_EXPR, TREE_TYPE (index), + index, integer_one_node), 0); + + emit_jump (loop_start); + + /* Build the end of the loop. */ + emit_label (loop_end); + } + } + else if ((index != 0 && ! host_integerp (index, 0)) + || ! host_integerp (TYPE_SIZE (elttype), 1)) + { + tree position; + + if (index == 0) + index = ssize_int (1); + + if (minelt) + index = fold_convert (ssizetype, + fold (build2 (MINUS_EXPR, + TREE_TYPE (index), + index, + TYPE_MIN_VALUE (domain)))); + + position = size_binop (MULT_EXPR, index, + convert (ssizetype, + TYPE_SIZE_UNIT (elttype))); + xtarget = offset_address (target, + expand_expr (position, 0, VOIDmode, 0), + highest_pow2_factor (position)); + xtarget = adjust_address (xtarget, mode, 0); + store_expr (value, xtarget, 0); + } + else + { + if (index != 0) + bitpos = ((tree_low_cst (index, 0) - minelt) + * tree_low_cst (TYPE_SIZE (elttype), 1)); + else + bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1)); + + if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target) + && TREE_CODE (type) == ARRAY_TYPE + && TYPE_NONALIASED_COMPONENT (type)) + { + target = copy_rtx (target); + MEM_KEEP_ALIAS_SET_P (target) = 1; + } + store_constructor_field (target, bitsize, bitpos, mode, value, + type, cleared, get_alias_set (elttype)); + } + } + break; + } - if (index != 0) - eltpos = tree_low_cst (index, 1); - else - eltpos = i; + case VECTOR_TYPE: + { + tree elt; + int i; + int need_to_clear; + int icode = 0; + tree elttype = TREE_TYPE (type); + int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1); + enum machine_mode eltmode = TYPE_MODE (elttype); + HOST_WIDE_INT bitsize; + HOST_WIDE_INT bitpos; + rtx *vector = NULL; + unsigned n_elts; + + gcc_assert (eltmode != BLKmode); + + n_elts = TYPE_VECTOR_SUBPARTS (type); + if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target))) + { + enum machine_mode mode = GET_MODE (target); + + icode = (int) vec_init_optab->handlers[mode].insn_code; + if (icode != CODE_FOR_nothing) + { + unsigned int i; + + vector = alloca (n_elts); + for (i = 0; i < n_elts; i++) + vector [i] = CONST0_RTX (GET_MODE_INNER (mode)); + } + } + + /* If the constructor has fewer elements than the vector, + clear the whole array first. Similarly if this is static + constructor of a non-BLKmode object. */ + if (cleared) + need_to_clear = 0; + else if (REG_P (target) && TREE_STATIC (exp)) + need_to_clear = 1; + else + { + unsigned HOST_WIDE_INT count = 0, zero_count = 0; + + for (elt = CONSTRUCTOR_ELTS (exp); + elt != NULL_TREE; + elt = TREE_CHAIN (elt)) + { + int n_elts_here = tree_low_cst + (int_const_binop (TRUNC_DIV_EXPR, + TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))), + TYPE_SIZE (elttype), 0), 1); + + count += n_elts_here; + if (mostly_zeros_p (TREE_VALUE (elt))) + zero_count += n_elts_here; + } - if (vector) - { - /* Vector CONSTRUCTORs should only be built from smaller - vectors in the case of BLKmode vectors. */ - if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE) - abort (); - vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0); - } - else - { - enum machine_mode value_mode = - TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE + /* Clear the entire vector first if there are any missing elements, + or if the incidence of zero elements is >= 75%. */ + need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count); + } + + if (need_to_clear && size > 0 && !vector) + { + if (REG_P (target)) + emit_move_insn (target, CONST0_RTX (GET_MODE (target))); + else + clear_storage (target, GEN_INT (size)); + cleared = 1; + } + + if (!cleared && REG_P (target)) + /* Inform later passes that the old value is dead. */ + emit_insn (gen_rtx_CLOBBER (VOIDmode, target)); + + /* Store each element of the constructor into the corresponding + element of TARGET, determined by counting the elements. */ + for (elt = CONSTRUCTOR_ELTS (exp), i = 0; + elt; + elt = TREE_CHAIN (elt), i += bitsize / elt_size) + { + tree value = TREE_VALUE (elt); + tree index = TREE_PURPOSE (elt); + HOST_WIDE_INT eltpos; + + bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1); + if (cleared && initializer_zerop (value)) + continue; + + if (index != 0) + eltpos = tree_low_cst (index, 1); + else + eltpos = i; + + if (vector) + { + /* Vector CONSTRUCTORs should only be built from smaller + vectors in the case of BLKmode vectors. */ + gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE); + vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0); + } + else + { + enum machine_mode value_mode = + TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE ? TYPE_MODE (TREE_TYPE (value)) : eltmode; - bitpos = eltpos * elt_size; - store_constructor_field (target, bitsize, bitpos, value_mode, value, - type, cleared, get_alias_set (elttype)); - } - } - - if (vector) - emit_insn (GEN_FCN (icode) (target, - gen_rtx_PARALLEL (GET_MODE (target), - gen_rtvec_v (n_elts, vector)))); - } - - /* Set constructor assignments. */ - else if (TREE_CODE (type) == SET_TYPE) - { - tree elt = CONSTRUCTOR_ELTS (exp); - unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; - tree domain = TYPE_DOMAIN (type); - tree domain_min, domain_max, bitlength; - - /* The default implementation strategy is to extract the constant - parts of the constructor, use that to initialize the target, - and then "or" in whatever non-constant ranges we need in addition. - - If a large set is all zero or all ones, it is - probably better to set it using memset. - Also, if a large set has just a single range, it may also be - better to first clear all the first clear the set (using - memset), and set the bits we want. */ - - /* Check for all zeros. */ - if (elt == NULL_TREE && size > 0) - { - if (!cleared) - clear_storage (target, GEN_INT (size)); - return; - } - - domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); - domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); - bitlength = size_binop (PLUS_EXPR, - size_diffop (domain_max, domain_min), - ssize_int (1)); - - nbits = tree_low_cst (bitlength, 1); - - /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that - are "complicated" (more than one range), initialize (the - constant parts) by copying from a constant. */ - if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD - || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) - { - unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); - enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); - char *bit_buffer = alloca (nbits); - HOST_WIDE_INT word = 0; - unsigned int bit_pos = 0; - unsigned int ibit = 0; - unsigned int offset = 0; /* In bytes from beginning of set. */ - - elt = get_set_constructor_bits (exp, bit_buffer, nbits); - for (;;) - { - if (bit_buffer[ibit]) - { - if (BYTES_BIG_ENDIAN) - word |= (1 << (set_word_size - 1 - bit_pos)); - else - word |= 1 << bit_pos; - } - - bit_pos++; ibit++; - if (bit_pos >= set_word_size || ibit == nbits) - { - if (word != 0 || ! cleared) - { - rtx datum = gen_int_mode (word, mode); - rtx to_rtx; - - /* The assumption here is that it is safe to use - XEXP if the set is multi-word, but not if - it's single-word. */ - if (MEM_P (target)) - to_rtx = adjust_address (target, mode, offset); - else if (offset == 0) - to_rtx = target; - else - abort (); - emit_move_insn (to_rtx, datum); - } - - if (ibit == nbits) - break; - word = 0; - bit_pos = 0; - offset += set_word_size / BITS_PER_UNIT; - } - } - } - else if (!cleared) - /* Don't bother clearing storage if the set is all ones. */ - if (TREE_CHAIN (elt) != NULL_TREE - || (TREE_PURPOSE (elt) == NULL_TREE - ? nbits != 1 - : ( ! host_integerp (TREE_VALUE (elt), 0) - || ! host_integerp (TREE_PURPOSE (elt), 0) - || (tree_low_cst (TREE_VALUE (elt), 0) - - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 - != (HOST_WIDE_INT) nbits)))) - clear_storage (target, expr_size (exp)); - - for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) - { - /* Start of range of element or NULL. */ - tree startbit = TREE_PURPOSE (elt); - /* End of range of element, or element value. */ - tree endbit = TREE_VALUE (elt); - HOST_WIDE_INT startb, endb; - rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; - - bitlength_rtx = expand_expr (bitlength, - NULL_RTX, MEM, EXPAND_CONST_ADDRESS); - - /* Handle non-range tuple element like [ expr ]. */ - if (startbit == NULL_TREE) - { - startbit = save_expr (endbit); - endbit = startbit; - } + bitpos = eltpos * elt_size; + store_constructor_field (target, bitsize, bitpos, + value_mode, value, type, + cleared, get_alias_set (elttype)); + } + } + + if (vector) + emit_insn (GEN_FCN (icode) + (target, + gen_rtx_PARALLEL (GET_MODE (target), + gen_rtvec_v (n_elts, vector)))); + break; + } - startbit = convert (sizetype, startbit); - endbit = convert (sizetype, endbit); - if (! integer_zerop (domain_min)) - { - startbit = size_binop (MINUS_EXPR, startbit, domain_min); - endbit = size_binop (MINUS_EXPR, endbit, domain_min); - } - startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, + /* Set constructor assignments. */ + case SET_TYPE: + { + tree elt = CONSTRUCTOR_ELTS (exp); + unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits; + tree domain = TYPE_DOMAIN (type); + tree domain_min, domain_max, bitlength; + + /* The default implementation strategy is to extract the + constant parts of the constructor, use that to initialize + the target, and then "or" in whatever non-constant ranges + we need in addition. + + If a large set is all zero or all ones, it is probably + better to set it using memset. Also, if a large set has + just a single range, it may also be better to first clear + all the first clear the set (using memset), and set the + bits we want. */ + + /* Check for all zeros. */ + if (elt == NULL_TREE && size > 0) + { + if (!cleared) + clear_storage (target, GEN_INT (size)); + return; + } + + domain_min = convert (sizetype, TYPE_MIN_VALUE (domain)); + domain_max = convert (sizetype, TYPE_MAX_VALUE (domain)); + bitlength = size_binop (PLUS_EXPR, + size_diffop (domain_max, domain_min), + ssize_int (1)); + + nbits = tree_low_cst (bitlength, 1); + + /* For "small" sets, or "medium-sized" (up to 32 bytes) sets + that are "complicated" (more than one range), initialize + (the constant parts) by copying from a constant. */ + if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD + || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE)) + { + unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp)); + enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1); + char *bit_buffer = alloca (nbits); + HOST_WIDE_INT word = 0; + unsigned int bit_pos = 0; + unsigned int ibit = 0; + unsigned int offset = 0; /* In bytes from beginning of set. */ + + elt = get_set_constructor_bits (exp, bit_buffer, nbits); + for (;;) + { + if (bit_buffer[ibit]) + { + if (BYTES_BIG_ENDIAN) + word |= (1 << (set_word_size - 1 - bit_pos)); + else + word |= 1 << bit_pos; + } + + bit_pos++; ibit++; + if (bit_pos >= set_word_size || ibit == nbits) + { + if (word != 0 || ! cleared) + { + rtx datum = gen_int_mode (word, mode); + rtx to_rtx; + + /* The assumption here is that it is safe to + use XEXP if the set is multi-word, but not + if it's single-word. */ + if (MEM_P (target)) + to_rtx = adjust_address (target, mode, offset); + else + { + gcc_assert (!offset); + to_rtx = target; + } + emit_move_insn (to_rtx, datum); + } + + if (ibit == nbits) + break; + word = 0; + bit_pos = 0; + offset += set_word_size / BITS_PER_UNIT; + } + } + } + else if (!cleared) + /* Don't bother clearing storage if the set is all ones. */ + if (TREE_CHAIN (elt) != NULL_TREE + || (TREE_PURPOSE (elt) == NULL_TREE + ? nbits != 1 + : ( ! host_integerp (TREE_VALUE (elt), 0) + || ! host_integerp (TREE_PURPOSE (elt), 0) + || (tree_low_cst (TREE_VALUE (elt), 0) + - tree_low_cst (TREE_PURPOSE (elt), 0) + 1 + != (HOST_WIDE_INT) nbits)))) + clear_storage (target, expr_size (exp)); + + for (; elt != NULL_TREE; elt = TREE_CHAIN (elt)) + { + /* Start of range of element or NULL. */ + tree startbit = TREE_PURPOSE (elt); + /* End of range of element, or element value. */ + tree endbit = TREE_VALUE (elt); + HOST_WIDE_INT startb, endb; + rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx; + + bitlength_rtx = expand_expr (bitlength, + NULL_RTX, MEM, EXPAND_CONST_ADDRESS); + + /* Handle non-range tuple element like [ expr ]. */ + if (startbit == NULL_TREE) + { + startbit = save_expr (endbit); + endbit = startbit; + } + + startbit = convert (sizetype, startbit); + endbit = convert (sizetype, endbit); + if (! integer_zerop (domain_min)) + { + startbit = size_binop (MINUS_EXPR, startbit, domain_min); + endbit = size_binop (MINUS_EXPR, endbit, domain_min); + } + startbit_rtx = expand_expr (startbit, NULL_RTX, MEM, + EXPAND_CONST_ADDRESS); + endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, EXPAND_CONST_ADDRESS); - endbit_rtx = expand_expr (endbit, NULL_RTX, MEM, - EXPAND_CONST_ADDRESS); - - if (REG_P (target)) - { - targetx - = assign_temp + + if (REG_P (target)) + { + targetx + = assign_temp ((build_qualified_type (lang_hooks.types.type_for_mode (GET_MODE (target), 0), TYPE_QUAL_CONST)), 0, 1, 1); - emit_move_insn (targetx, target); - } + emit_move_insn (targetx, target); + } + + else + { + gcc_assert (MEM_P (target)); + targetx = target; + } - else if (MEM_P (target)) - targetx = target; - else - abort (); - - /* Optimization: If startbit and endbit are constants divisible - by BITS_PER_UNIT, call memset instead. */ - if (TREE_CODE (startbit) == INTEGER_CST - && TREE_CODE (endbit) == INTEGER_CST - && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 - && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) - { - emit_library_call (memset_libfunc, LCT_NORMAL, - VOIDmode, 3, - plus_constant (XEXP (targetx, 0), - startb / BITS_PER_UNIT), - Pmode, - constm1_rtx, TYPE_MODE (integer_type_node), - GEN_INT ((endb - startb) / BITS_PER_UNIT), - TYPE_MODE (sizetype)); - } - else - emit_library_call (setbits_libfunc, LCT_NORMAL, - VOIDmode, 4, XEXP (targetx, 0), - Pmode, bitlength_rtx, TYPE_MODE (sizetype), - startbit_rtx, TYPE_MODE (sizetype), - endbit_rtx, TYPE_MODE (sizetype)); - - if (REG_P (target)) - emit_move_insn (target, targetx); - } + /* Optimization: If startbit and endbit are constants divisible + by BITS_PER_UNIT, call memset instead. */ + if (TREE_CODE (startbit) == INTEGER_CST + && TREE_CODE (endbit) == INTEGER_CST + && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0 + && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0) + { + emit_library_call (memset_libfunc, LCT_NORMAL, + VOIDmode, 3, + plus_constant (XEXP (targetx, 0), + startb / BITS_PER_UNIT), + Pmode, + constm1_rtx, TYPE_MODE (integer_type_node), + GEN_INT ((endb - startb) / BITS_PER_UNIT), + TYPE_MODE (sizetype)); + } + else + emit_library_call (setbits_libfunc, LCT_NORMAL, + VOIDmode, 4, XEXP (targetx, 0), + Pmode, bitlength_rtx, TYPE_MODE (sizetype), + startbit_rtx, TYPE_MODE (sizetype), + endbit_rtx, TYPE_MODE (sizetype)); + + if (REG_P (target)) + emit_move_insn (target, targetx); + } + break; + } + default: + gcc_unreachable (); } - - else - abort (); } /* Store the value of EXP (an expression tree) @@ -5183,8 +5159,7 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, { /* We're storing into a struct containing a single __complex. */ - if (bitpos != 0) - abort (); + gcc_assert (!bitpos); return store_expr (exp, target, value_mode != VOIDmode); } @@ -5237,9 +5212,8 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, boundary. If so, we simply do a block copy. */ if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode) { - if (!MEM_P (target) || !MEM_P (temp) - || bitpos % BITS_PER_UNIT != 0) - abort (); + gcc_assert (MEM_P (target) && MEM_P (temp) + && !(bitpos % BITS_PER_UNIT)); target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT); emit_block_move (target, temp, @@ -5873,7 +5847,7 @@ safe_from_p (rtx x, tree exp, int top_p) case WITH_CLEANUP_EXPR: case CLEANUP_POINT_EXPR: /* Lowered by gimplify.c. */ - abort (); + gcc_unreachable (); case SAVE_EXPR: return safe_from_p (x, TREE_OPERAND (exp, 0), 0); @@ -6037,13 +6011,12 @@ expand_var (tree var) expand_decl (var); else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var)) rest_of_decl_compilation (var, 0, 0); - else if (TREE_CODE (var) == TYPE_DECL - || TREE_CODE (var) == CONST_DECL - || TREE_CODE (var) == FUNCTION_DECL - || TREE_CODE (var) == LABEL_DECL) - /* No expansion needed. */; else - abort (); + /* No expansion needed. */ + gcc_assert (TREE_CODE (var) == TYPE_DECL + || TREE_CODE (var) == CONST_DECL + || TREE_CODE (var) == FUNCTION_DECL + || TREE_CODE (var) == LABEL_DECL); } } @@ -6142,8 +6115,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, /* If the DECL isn't in memory, then the DECL wasn't properly marked TREE_ADDRESSABLE, which will be either a front-end or a tree optimizer bug. */ - if (GET_CODE (result) != MEM) - abort (); + gcc_assert (GET_CODE (result) == MEM); result = XEXP (result, 0); /* ??? Is this needed anymore? */ @@ -6165,8 +6137,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, } /* We must have made progress. */ - if (inner == exp) - abort (); + gcc_assert (inner != exp); subtarget = offset || bitpos ? NULL_RTX : target; result = expand_expr_addr_expr (inner, subtarget, tmode, modifier); @@ -6200,8 +6171,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode, { /* Someone beforehand should have rejected taking the address of such an object. */ - if (bitpos % BITS_PER_UNIT != 0) - abort (); + gcc_assert (!(bitpos % BITS_PER_UNIT)); result = plus_constant (result, bitpos / BITS_PER_UNIT); if (modifier < EXPAND_SUM) @@ -6479,8 +6449,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case FUNCTION_DECL: case RESULT_DECL: - if (DECL_RTL (exp) == 0) - abort (); + gcc_assert (DECL_RTL (exp)); /* Ensure variable marked as used even if it doesn't go through a parser. If it hasn't be used yet, write out an external @@ -6497,18 +6466,17 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* Variables inherited from containing functions should have been lowered by this point. */ context = decl_function_context (exp); - if (context != 0 - && context != current_function_decl - && !TREE_STATIC (exp) - /* ??? C++ creates functions that are not TREE_STATIC. */ - && TREE_CODE (exp) != FUNCTION_DECL) - abort (); + gcc_assert (!context + || context == current_function_decl + || TREE_STATIC (exp) + /* ??? C++ creates functions that are not TREE_STATIC. */ + || TREE_CODE (exp) == FUNCTION_DECL); /* This is the case of an array whose size is to be determined from its initializer, while the initializer is still being parsed. See expand_decl. */ - else if (MEM_P (DECL_RTL (exp)) + if (MEM_P (DECL_RTL (exp)) && REG_P (XEXP (DECL_RTL (exp), 0))) temp = validize_mem (DECL_RTL (exp)); @@ -6548,12 +6516,13 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (REG_P (DECL_RTL (exp)) && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp)) { + enum machine_mode pmode; + /* Get the signedness used for this variable. Ensure we get the same mode we got when the variable was declared. */ - if (GET_MODE (DECL_RTL (exp)) - != promote_mode (type, DECL_MODE (exp), &unsignedp, - (TREE_CODE (exp) == RESULT_DECL ? 1 : 0))) - abort (); + pmode = promote_mode (type, DECL_MODE (exp), &unsignedp, + (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)); + gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode); temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp)); SUBREG_PROMOTED_VAR_P (temp) = 1; @@ -6654,8 +6623,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, expanders calling save_expr immediately before expanding something. Assume this means that we only have to deal with non-BLKmode values. */ - if (GET_MODE (ret) == BLKmode) - abort (); + gcc_assert (GET_MODE (ret) != BLKmode); val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp)); DECL_ARTIFICIAL (val) = 1; @@ -6772,17 +6740,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case ARRAY_REF: -#ifdef ENABLE_CHECKING - if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE) - abort (); -#endif - { tree array = TREE_OPERAND (exp, 0); tree low_bound = array_ref_low_bound (exp); tree index = convert (sizetype, TREE_OPERAND (exp, 1)); HOST_WIDE_INT i; + gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE); + /* Optimize the special-case of a zero lower bound. We convert the low_bound to sizetype to avoid some problems @@ -6952,8 +6917,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* If we got back the original object, something is wrong. Perhaps we are evaluating an expression too early. In any event, don't infinitely recurse. */ - if (tem == exp) - abort (); + gcc_assert (tem != exp); /* If TEM's type is a union of variable size, pass TARGET to the inner computation, since it will need a temporary and TARGET is known @@ -7007,8 +6971,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM); - if (!MEM_P (op0)) - abort (); + gcc_assert (MEM_P (op0)); #ifdef POINTERS_EXTEND_UNSIGNED if (GET_MODE (offset_rtx) != Pmode) @@ -7055,8 +7018,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, one element arrays having the same mode as its element. */ if (GET_CODE (op0) == CONCAT) { - if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0))) - abort (); + gcc_assert (bitpos == 0 + && bitsize == GET_MODE_BITSIZE (GET_MODE (op0))); return op0; } @@ -7113,10 +7076,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, /* In this case, BITPOS must start at a byte boundary and TARGET, if specified, must be a MEM. */ - if (!MEM_P (op0) - || (target != 0 && !MEM_P (target)) - || bitpos % BITS_PER_UNIT != 0) - abort (); + gcc_assert (MEM_P (op0) + && (!target || MEM_P (target)) + && !(bitpos % BITS_PER_UNIT)); emit_block_move (target, adjust_address (op0, VOIDmode, @@ -7261,17 +7223,19 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, adjust_address (target, TYPE_MODE (valtype), 0), modifier == EXPAND_STACK_PARM ? 2 : 0); - else if (REG_P (target)) - /* Store this field into a union of the proper type. */ - store_field (target, - MIN ((int_size_in_bytes (TREE_TYPE - (TREE_OPERAND (exp, 0))) - * BITS_PER_UNIT), - (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), - 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), - VOIDmode, 0, type, 0); else - abort (); + { + gcc_assert (REG_P (target)); + + /* Store this field into a union of the proper type. */ + store_field (target, + MIN ((int_size_in_bytes (TREE_TYPE + (TREE_OPERAND (exp, 0))) + * BITS_PER_UNIT), + (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)), + 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0), + VOIDmode, 0, type, 0); + } /* Return the entire union. */ return target; @@ -7347,8 +7311,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, constants to change mode. */ tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); - if (TREE_ADDRESSABLE (exp)) - abort (); + gcc_assert (!TREE_ADDRESSABLE (exp)); if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type)) target @@ -7381,8 +7344,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, temp_size, 0, type); rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0); - if (TREE_ADDRESSABLE (exp)) - abort (); + gcc_assert (!TREE_ADDRESSABLE (exp)); if (GET_MODE (op0) == BLKmode) emit_block_move (new_with_op0_mode, op0, @@ -7712,7 +7674,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case FIX_ROUND_EXPR: case FIX_FLOOR_EXPR: case FIX_CEIL_EXPR: - abort (); /* Not used for C. */ + gcc_unreachable (); /* Not used for C. */ case FIX_TRUNC_EXPR: op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); @@ -7741,8 +7703,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, temp = expand_unop (mode, optab_for_tree_code (NEGATE_EXPR, type), op0, target, 0); - if (temp == 0) - abort (); + gcc_assert (temp); return REDUCE_BIT_FIELD (temp); case ABS_EXPR: @@ -7751,9 +7712,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, target = 0; /* ABS_EXPR is not valid for complex arguments. */ - if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT - || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT) - abort (); + gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT + && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT); /* Unsigned abs is simply the operand. Testing here means we don't risk generating incorrect code below. */ @@ -7830,8 +7790,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (modifier == EXPAND_STACK_PARM) target = 0; temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); - if (temp == 0) - abort (); + gcc_assert (temp); return temp; /* ??? Can optimize bitwise operations with one arg constant. @@ -7968,16 +7927,14 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, only with operands that are always zero or one. */ temp = expand_binop (mode, xor_optab, op0, const1_rtx, target, 1, OPTAB_LIB_WIDEN); - if (temp == 0) - abort (); + gcc_assert (temp); return temp; case STATEMENT_LIST: { tree_stmt_iterator iter; - if (!ignore) - abort (); + gcc_assert (ignore); for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter)) expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier); @@ -7992,11 +7949,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, tree then_ = TREE_OPERAND (exp, 1); tree else_ = TREE_OPERAND (exp, 2); - if (TREE_CODE (then_) != GOTO_EXPR - || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL - || TREE_CODE (else_) != GOTO_EXPR - || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL) - abort (); + gcc_assert (TREE_CODE (then_) == GOTO_EXPR + && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL + && TREE_CODE (else_) == GOTO_EXPR + && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL); jumpif (pred, label_rtx (GOTO_DESTINATION (then_))); return expand_expr (else_, const0_rtx, VOIDmode, 0); @@ -8007,11 +7963,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, a temporary variable, so that we can evaluate them here for side effect only. If type is void, we must do likewise. */ - if (TREE_ADDRESSABLE (type) - || ignore - || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node - || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node) - abort (); + gcc_assert (!TREE_ADDRESSABLE (type) + && !ignore + && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node + && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node); /* If we are not to produce a result, we have no target. Otherwise, if a target was specified use it; it will not be used as an @@ -8163,7 +8118,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case EH_FILTER_EXPR: case TRY_FINALLY_EXPR: /* Lowered by tree-eh.c. */ - abort (); + gcc_unreachable (); case WITH_CLEANUP_EXPR: case CLEANUP_POINT_EXPR: @@ -8185,7 +8140,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case TRUTH_ANDIF_EXPR: case TRUTH_ORIF_EXPR: /* Lowered by gimplify.c. */ - abort (); + gcc_unreachable (); case EXC_PTR_EXPR: return get_exception_pointer (cfun); @@ -8196,7 +8151,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, case FDESC_EXPR: /* Function descriptors are not valid except for as initialization constants, and should not be expanded. */ - abort (); + gcc_unreachable (); case SWITCH_EXPR: expand_case (exp); @@ -8232,8 +8187,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, target = 0; temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp, OPTAB_LIB_WIDEN); - if (temp == 0) - abort (); + gcc_assert (temp); return REDUCE_BIT_FIELD (temp); } #undef REDUCE_BIT_FIELD @@ -8499,7 +8453,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) break; default: - abort (); + gcc_unreachable (); } /* Put a constant second. */ @@ -8595,8 +8549,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap) code = GET_CODE (result); label = gen_label_rtx (); - if (bcc_gen_fctn[(int) code] == 0) - abort (); + gcc_assert (bcc_gen_fctn[(int) code]); emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label)); emit_move_insn (target, invert ? const1_rtx : const0_rtx);