avr.h (BRANCH_COST): Increase by 4.
authorGeorg-Johann Lay <avr@gjlay.de>
Mon, 16 Jan 2017 09:48:45 +0000 (09:48 +0000)
committerGeorg-Johann Lay <gjl@gcc.gnu.org>
Mon, 16 Jan 2017 09:48:45 +0000 (09:48 +0000)
gcc/
* config/avr/avr.h (BRANCH_COST) [reload_completed]: Increase by 4.

From-SVN: r244488

gcc/ChangeLog
gcc/config/avr/avr.h

index cdc423a05e113c2d48e0d6db047c9359454a7bbc..0df0bbc3956aa5cf3d6addf7ff6e8454fcd20347 100644 (file)
@@ -1,3 +1,7 @@
+2017-01-16  Georg-Johann Lay  <avr@gjlay.de>
+
+       * config/avr/avr.h (BRANCH_COST) [reload_completed]: Increase by 4.
+
 2017-01-15  Uros Bizjak  <ubizjak@gmail.com>
 
        * config/i386/i386.c (ix86_legitimate_combined_insn): Do not
index c78d4e7bffbe6e2280a72fdb512ecd9e02bb0912..3dfa8c3b00b90a57aefe8e9e147f146e626101b0 100644 (file)
@@ -360,7 +360,12 @@ typedef struct avr_args
       }                                                                 \
   } while (0)
 
-#define BRANCH_COST(speed_p, predictable_p) avr_branch_cost
+/* We increase branch costs after reload in order to keep basic-block
+   reordering from introducing out-of-line jumps and to prefer fall-through
+   edges instead.  The default branch costs are 0, mainly because otherwise
+   do_store_flag might come up with bloated code.  */
+#define BRANCH_COST(speed_p, predictable_p)     \
+  (avr_branch_cost + (reload_completed ? 4 : 0))
 
 #define SLOW_BYTE_ACCESS 0