From 91c5ee5b4ac9c85b6cbf596dc7917910b0d7b1b3 Mon Sep 17 00:00:00 2001 From: Vladimir Makarov Date: Mon, 16 Dec 2013 18:24:54 +0000 Subject: [PATCH] re PR rtl-optimization/59466 (Slow code generation by LRA for memory addresses on PPC) 2013-12-16 Vladimir Makarov PR rtl-optimization/59466 * emit-rtl.c (change_address_1): Don't validate address for LRA. * recog.c (general_operand): Accept any memory for LRA. * lra.c (lra_set_insn_recog_data): Add an assert. From-SVN: r206023 --- gcc/ChangeLog | 7 +++++++ gcc/emit-rtl.c | 4 +++- gcc/lra.c | 13 ++++++++++--- gcc/recog.c | 8 ++++++-- 4 files changed, 26 insertions(+), 6 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index f279da392ea..941753aa2b2 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,10 @@ +2013-12-16 Vladimir Makarov + + PR rtl-optimization/59466 + * emit-rtl.c (change_address_1): Don't validate address for LRA. + * recog.c (general_operand): Accept any memory for LRA. + * lra.c (lra_set_insn_recog_data): Add an assert. + 2013-12-16 Kyrylo Tkachov * config/arm/driver-arm.c (arm_cpu_table): Add cortex-a12 entry. diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index d7fa3a5e0dc..ad63d0b8b99 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -1951,7 +1951,9 @@ change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate) && (!validate || memory_address_addr_space_p (mode, addr, as))) return memref; - if (validate) + /* Don't validate address for LRA. LRA can make the address valid + by itself in most efficient way. */ + if (validate && !lra_in_progress) { if (reload_in_progress || reload_completed) gcc_assert (memory_address_addr_space_p (mode, addr, as)); diff --git a/gcc/lra.c b/gcc/lra.c index 1491fc78fc5..ed070c7f556 100644 --- a/gcc/lra.c +++ b/gcc/lra.c @@ -1072,9 +1072,16 @@ lra_set_insn_recog_data (rtx insn) nop = asm_noperands (PATTERN (insn)); data->operand_loc = data->dup_loc = NULL; if (nop < 0) - /* Its is a special insn like USE or CLOBBER. */ - data->insn_static_data = insn_static_data - = get_static_insn_data (-1, 0, 0, 1); + { + /* Its is a special insn like USE or CLOBBER. We should + recognize any regular insn otherwise LRA can do nothing + with this insn. */ + gcc_assert (GET_CODE (PATTERN (insn)) == USE + || GET_CODE (PATTERN (insn)) == CLOBBER + || GET_CODE (PATTERN (insn)) == ASM_INPUT); + data->insn_static_data = insn_static_data + = get_static_insn_data (-1, 0, 0, 1); + } else { /* expand_asm_operands makes sure there aren't too many diff --git a/gcc/recog.c b/gcc/recog.c index dbd9a8a5065..37e7692ea59 100644 --- a/gcc/recog.c +++ b/gcc/recog.c @@ -1021,8 +1021,12 @@ general_operand (rtx op, enum machine_mode mode) if (! volatile_ok && MEM_VOLATILE_P (op)) return 0; - /* Use the mem's mode, since it will be reloaded thus. */ - if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op))) + /* Use the mem's mode, since it will be reloaded thus. LRA can + generate move insn with invalid addresses which is made valid + and efficiently calculated by LRA through further numerous + transformations. */ + if (lra_in_progress + || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op))) return 1; } -- 2.30.2