[AArch64] Generalize code alignment
authorWilco Dijkstra <wdijkstr@arm.com>
Thu, 18 Dec 2014 13:46:22 +0000 (13:46 +0000)
committerJiong Wang <jiwang@gcc.gnu.org>
Thu, 18 Dec 2014 13:46:22 +0000 (13:46 +0000)
2014-12-18  Wilco Dijkstra  <wilco.dijkstra@arm.com>

* config/aarch64/aarch64-protos.h (tune-params): Add code alignment
tuning parameters.
* gcc/config/aarch64/aarch64.c (generic_tunings): Add code alignment
tuning parameters.
(cortexa53_tunings): Likewise.
(cortexa57_tunings): Likewise.
(thunderx_tunings): Likewise.
(aarch64_override_options): Use new alignment tunings.

From-SVN: r218865

gcc/ChangeLog
gcc/config/aarch64/aarch64-protos.h
gcc/config/aarch64/aarch64.c

index 1bb694447174d790c0f0c628c448d9c68c10eb0f..6bb5ec2f35f65bcfbbf64f70c2a2107f26f89cce 100644 (file)
@@ -1,3 +1,14 @@
+2014-12-18  Wilco Dijkstra  <wilco.dijkstra@arm.com>
+
+       * config/aarch64/aarch64-protos.h (tune-params): Add code alignment
+       tuning parameters.
+       * gcc/config/aarch64/aarch64.c (generic_tunings): Add code alignment
+       tuning parameters.
+       (cortexa53_tunings): Likewise.
+       (cortexa57_tunings): Likewise.
+       (thunderx_tunings): Likewise.
+       (aarch64_override_options): Use new alignment tunings.
+
 2014-12-18  Martin Liska  <mliska@suse.cz>
 
        PR tree-optimization/64330
index 234efcb32705ddc07a0dd1e1edeae5e6f4ed49d3..f22573bae15d99b4277182099cf45c1458ddb5e4 100644 (file)
@@ -170,8 +170,10 @@ struct tune_params
   const struct cpu_vector_cost *const vec_costs;
   const int memmov_cost;
   const int issue_rate;
-  const int align;
   const unsigned int fuseable_ops;
+  const int function_align;
+  const int jump_align;
+  const int loop_align;
   const int int_reassoc_width;
   const int fp_reassoc_width;
   const int vec_reassoc_width;
index 226a808a01ad43af72e20fcd2de44b5ab069b6fe..cca53f2faa837951b3a24b90bea336e056a3702b 100644 (file)
@@ -323,8 +323,10 @@ static const struct tune_params generic_tunings =
   &generic_vector_cost,
   NAMED_PARAM (memmov_cost, 4),
   NAMED_PARAM (issue_rate, 2),
-  NAMED_PARAM (align, 4),
   NAMED_PARAM (fuseable_ops, AARCH64_FUSE_NOTHING),
+  8,   /* function_align.  */
+  8,   /* jump_align.  */
+  4,   /* loop_align.  */
   2,   /* int_reassoc_width.  */
   4,   /* fp_reassoc_width.  */
   1    /* vec_reassoc_width.  */
@@ -338,9 +340,11 @@ static const struct tune_params cortexa53_tunings =
   &generic_vector_cost,
   NAMED_PARAM (memmov_cost, 4),
   NAMED_PARAM (issue_rate, 2),
-  NAMED_PARAM (align, 8),
   NAMED_PARAM (fuseable_ops, (AARCH64_FUSE_MOV_MOVK | AARCH64_FUSE_ADRP_ADD
                              | AARCH64_FUSE_MOVK_MOVK | AARCH64_FUSE_ADRP_LDR)),
+  8,   /* function_align.  */
+  8,   /* jump_align.  */
+  4,   /* loop_align.  */
   2,   /* int_reassoc_width.  */
   4,   /* fp_reassoc_width.  */
   1    /* vec_reassoc_width.  */
@@ -354,8 +358,10 @@ static const struct tune_params cortexa57_tunings =
   &cortexa57_vector_cost,
   NAMED_PARAM (memmov_cost, 4),
   NAMED_PARAM (issue_rate, 3),
-  NAMED_PARAM (align, 8),
   NAMED_PARAM (fuseable_ops, (AARCH64_FUSE_MOV_MOVK | AARCH64_FUSE_ADRP_ADD | AARCH64_FUSE_MOVK_MOVK)),
+  16,  /* function_align.  */
+  8,   /* jump_align.  */
+  4,   /* loop_align.  */
   2,   /* int_reassoc_width.  */
   4,   /* fp_reassoc_width.  */
   1    /* vec_reassoc_width.  */
@@ -369,8 +375,10 @@ static const struct tune_params thunderx_tunings =
   &generic_vector_cost,
   NAMED_PARAM (memmov_cost, 6),
   NAMED_PARAM (issue_rate, 2),
-  NAMED_PARAM (align, 8),
   NAMED_PARAM (fuseable_ops, AARCH64_FUSE_CMP_BRANCH),
+  8,   /* function_align.  */
+  8,   /* jump_align.  */
+  8,   /* loop_align.  */
   2,   /* int_reassoc_width.  */
   4,   /* fp_reassoc_width.  */
   1    /* vec_reassoc_width.  */
@@ -6771,11 +6779,11 @@ aarch64_override_options (void)
   if (!optimize_size)
     {
       if (align_loops <= 0)
-       align_loops = aarch64_tune_params->align;
+       align_loops = aarch64_tune_params->loop_align;
       if (align_jumps <= 0)
-       align_jumps = aarch64_tune_params->align;
+       align_jumps = aarch64_tune_params->jump_align;
       if (align_functions <= 0)
-       align_functions = aarch64_tune_params->align;
+       align_functions = aarch64_tune_params->function_align;
     }
 
   aarch64_override_options_after_change ();