cfganal.c (pre_and_rev_post_order_compute_fn): Use fn instead of cfun everywhere.
authorJakub Jelinek <jakub@redhat.com>
Fri, 8 Feb 2019 07:14:57 +0000 (08:14 +0100)
committerJakub Jelinek <jakub@gcc.gnu.org>
Fri, 8 Feb 2019 07:14:57 +0000 (08:14 +0100)
* cfganal.c (pre_and_rev_post_order_compute_fn): Use fn instead of
cfun everywhere.

From-SVN: r268664

gcc/ChangeLog
gcc/cfganal.c

index cbb1ebe4a3ebed76c13f049b6a0d2299a1c5f6fd..f2652a422591bdb6d57700363956384299ea8f2e 100644 (file)
@@ -1,3 +1,8 @@
+2019-02-08  Jakub Jelinek  <jakub@redhat.com>
+
+       * cfganal.c (pre_and_rev_post_order_compute_fn): Use fn instead of
+       cfun everywhere.
+
 2019-02-07  David Malcolm  <dmalcolm@redhat.com>
 
        PR tree-optimization/86637
index ad52bff8962b700ac9bc6e1f3869401c75161a61..45ebd1ead60b09ba239a41f22b9d97e0f3f1e24b 100644 (file)
@@ -951,10 +951,10 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
                                   bool include_entry_exit)
 {
   int pre_order_num = 0;
-  int rev_post_order_num = n_basic_blocks_for_fn (cfun) - 1;
+  int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
 
   /* Allocate stack for back-tracking up CFG.  */
-  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
+  auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
 
   if (include_entry_exit)
     {
@@ -968,7 +968,7 @@ pre_and_rev_post_order_compute_fn (struct function *fn,
     rev_post_order_num -= NUM_FIXED_BLOCKS;
 
   /* Allocate bitmap to track nodes that have been visited.  */
-  auto_sbitmap visited (last_basic_block_for_fn (cfun));
+  auto_sbitmap visited (last_basic_block_for_fn (fn));
 
   /* None of the nodes in the CFG have been visited yet.  */
   bitmap_clear (visited);