tree-ssa-pre.c (postorder, [...]): Make locals ...
authorRichard Biener <rguenther@suse.de>
Mon, 18 Apr 2016 11:38:05 +0000 (11:38 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Mon, 18 Apr 2016 11:38:05 +0000 (11:38 +0000)
2016-04-18  Richard Biener  <rguenther@suse.de>

* tree-ssa-pre.c (postorder, postorder_num): Make locals ...
(compute_antic): ... here.  For partial antic use regular
postorder and scrap iteration.
(compute_partial_antic_aux): Remove unused return value.
(init_pre): Do not allocate postorder.
(fini_pre): Do not free postorder.

From-SVN: r235130

gcc/ChangeLog
gcc/tree-ssa-pre.c

index d6b08614628e56ff7ce5c8972947fc1cb2b8c54a..51bc8d764a1fe0ddac1c9cd148815fe87496c583 100644 (file)
@@ -1,3 +1,12 @@
+2016-04-18  Richard Biener  <rguenther@suse.de>
+
+       * tree-ssa-pre.c (postorder, postorder_num): Make locals ...
+       (compute_antic): ... here.  For partial antic use regular
+       postorder and scrap iteration.
+       (compute_partial_antic_aux): Remove unused return value.
+       (init_pre): Do not allocate postorder.
+       (fini_pre): Do not free postorder.
+
 2016-04-18  Richard Biener  <rguenther@suse.de>
 
         PR middle-end/37870
index f1a3130cb1c213955c5368d2a7d2ef3467beb6f0..c0e3b807c059307ee77481fa3edda0d12e0cf4b2 100644 (file)
@@ -432,10 +432,6 @@ typedef struct bb_bitmap_sets
 #define BB_LIVE_VOP_ON_EXIT(BB) ((bb_value_sets_t) ((BB)->aux))->vop_on_exit
 
 
-/* Basic block list in postorder.  */
-static int *postorder;
-static int postorder_num;
-
 /* This structure is used to keep track of statistics on what
    optimization PRE was able to perform.  */
 static struct
@@ -2209,11 +2205,10 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
                                  - ANTIC_IN[BLOCK])
 
 */
-static bool
+static void
 compute_partial_antic_aux (basic_block block,
                           bool block_has_abnormal_pred_edge)
 {
-  bool changed = false;
   bitmap_set_t old_PA_IN;
   bitmap_set_t PA_OUT;
   edge e;
@@ -2312,9 +2307,6 @@ compute_partial_antic_aux (basic_block block,
 
   dependent_clean (PA_IN (block), ANTIC_IN (block));
 
-  if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
-    changed = true;
-
  maybe_dump_sets:
   if (dump_file && (dump_flags & TDF_DETAILS))
     {
@@ -2327,7 +2319,6 @@ compute_partial_antic_aux (basic_block block,
     bitmap_set_free (old_PA_IN);
   if (PA_OUT)
     bitmap_set_free (PA_OUT);
-  return changed;
 }
 
 /* Compute ANTIC and partial ANTIC sets.  */
@@ -2349,23 +2340,33 @@ compute_antic (void)
 
   FOR_ALL_BB_FN (block, cfun)
     {
+      BB_VISITED (block) = 0;
+
       FOR_EACH_EDGE (e, ei, block->preds)
        if (e->flags & EDGE_ABNORMAL)
          {
            bitmap_set_bit (has_abnormal_preds, block->index);
+
+           /* We also anticipate nothing.  */
+           BB_VISITED (block) = 1;
            break;
          }
 
-      BB_VISITED (block) = 0;
-
       /* While we are here, give empty ANTIC_IN sets to each block.  */
       ANTIC_IN (block) = bitmap_set_new ();
-      PA_IN (block) = bitmap_set_new ();
+      if (do_partial_partial)
+       PA_IN (block) = bitmap_set_new ();
     }
 
   /* At the exit block we anticipate nothing.  */
   BB_VISITED (EXIT_BLOCK_PTR_FOR_FN (cfun)) = 1;
 
+  /* For ANTIC computation we need a postorder that also guarantees that
+     a block with a single successor is visited after its successor.
+     RPO on the inverted CFG has this property.  */
+  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+  int postorder_num = inverted_post_order_compute (postorder);
+
   sbitmap worklist = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
   bitmap_ones (worklist);
   while (changed)
@@ -2403,39 +2404,21 @@ compute_antic (void)
 
   if (do_partial_partial)
     {
-      bitmap_ones (worklist);
-      num_iterations = 0;
-      changed = true;
-      while (changed)
+      /* For partial antic we ignore backedges and thus we do not need
+         to perform any iteration when we process blocks in postorder.  */
+      postorder_num = pre_and_rev_post_order_compute (NULL, postorder, false);
+      for (i = postorder_num - 1 ; i >= 0; i--)
        {
-         if (dump_file && (dump_flags & TDF_DETAILS))
-           fprintf (dump_file, "Starting iteration %d\n", num_iterations);
-         num_iterations++;
-         changed = false;
-         for (i = postorder_num - 1 ; i >= 0; i--)
-           {
-             if (bitmap_bit_p (worklist, postorder[i]))
-               {
-                 basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
-                 bitmap_clear_bit (worklist, block->index);
-                 if (compute_partial_antic_aux (block,
-                                                bitmap_bit_p (has_abnormal_preds,
-                                                              block->index)))
-                   {
-                     FOR_EACH_EDGE (e, ei, block->preds)
-                       bitmap_set_bit (worklist, e->src->index);
-                     changed = true;
-                   }
-               }
-           }
-         /* Theoretically possible, but *highly* unlikely.  */
-         gcc_checking_assert (num_iterations < 500);
+         basic_block block = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
+         compute_partial_antic_aux (block,
+                                    bitmap_bit_p (has_abnormal_preds,
+                                                  block->index));
        }
-      statistics_histogram_event (cfun, "compute_partial_antic iterations",
-                                 num_iterations);
     }
+
   sbitmap_free (has_abnormal_preds);
   sbitmap_free (worklist);
+  free (postorder);
 }
 
 
@@ -4694,12 +4677,6 @@ init_pre (void)
   connect_infinite_loops_to_exit ();
   memset (&pre_stats, 0, sizeof (pre_stats));
 
-  /* For ANTIC computation we need a postorder that also guarantees that
-     a block with a single successor is visited after its successor.
-     RPO on the inverted CFG has this property.  */
-  postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
-  postorder_num = inverted_post_order_compute (postorder);
-
   alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
 
   calculate_dominance_info (CDI_DOMINATORS);
@@ -4722,7 +4699,6 @@ init_pre (void)
 static void
 fini_pre ()
 {
-  free (postorder);
   value_expressions.release ();
   BITMAP_FREE (inserted_exprs);
   bitmap_obstack_release (&grand_bitmap_obstack);