IA MCU psABI support: changes to libraries
[gcc.git] / gcc / lcm.c
index c5c2cfc5621e46fcce7d8d4df0c47d24bd738ff4..af5e78e0fa47ab7797582f42517dba7d69d3bd84 100644 (file)
--- a/gcc/lcm.c
+++ b/gcc/lcm.c
@@ -1,6 +1,5 @@
 /* Generic partial redundancy elimination with lazy code motion support.
-   Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
-   Free Software Foundation, Inc.
+   Copyright (C) 1998-2015 Free Software Foundation, Inc.
 
 This file is part of GCC.
 
@@ -57,17 +56,18 @@ along with GCC; see the file COPYING3.  If not see
 #include "regs.h"
 #include "hard-reg-set.h"
 #include "flags.h"
-#include "real.h"
 #include "insn-config.h"
 #include "recog.h"
+#include "predict.h"
+#include "function.h"
+#include "dominance.h"
+#include "cfg.h"
+#include "cfganal.h"
+#include "lcm.h"
 #include "basic-block.h"
-#include "output.h"
 #include "tm_p.h"
-#include "function.h"
-
-/* We want target macros for the mode switching code to be able to refer
-   to instruction attribute values.  */
-#include "insn-attr.h"
+#include "sbitmap.h"
+#include "dumpfile.h"
 
 /* Edge based LCM routines.  */
 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
@@ -106,28 +106,32 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks);
+  qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
 
   /* We want a maximal solution, so make an optimistic initialization of
      ANTIN.  */
-  sbitmap_vector_ones (antin, last_basic_block);
+  bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
 
   /* Put every block on the worklist; this is necessary because of the
      optimistic initialization of ANTIN above.  */
-  FOR_EACH_BB_REVERSE (bb)
+  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+  int postorder_num = post_order_compute (postorder, false, false);
+  for (int i = 0; i < postorder_num; ++i)
     {
+      bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
       *qin++ = bb;
       bb->aux = bb;
     }
+  free (postorder);
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
-  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are predecessors of the exit block so that we
      can easily identify them below.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-    e->src->aux = EXIT_BLOCK_PTR;
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
+    e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -139,26 +143,26 @@ compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
       if (qout >= qend)
        qout = worklist;
 
-      if (bb->aux == EXIT_BLOCK_PTR)
+      if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
        /* Do not clear the aux field for blocks which are predecessors of
           the EXIT block.  That way we never add then to the worklist
           again.  */
-       sbitmap_zero (antout[bb->index]);
+       bitmap_clear (antout[bb->index]);
       else
        {
          /* Clear the aux field of this block so that it can be added to
             the worklist again if necessary.  */
          bb->aux = NULL;
-         sbitmap_intersection_of_succs (antout[bb->index], antin, bb->index);
+         bitmap_intersection_of_succs (antout[bb->index], antin, bb);
        }
 
-      if (sbitmap_a_or_b_and_c_cg (antin[bb->index], antloc[bb->index],
+      if (bitmap_or_and (antin[bb->index], antloc[bb->index],
                                   transp[bb->index], antout[bb->index]))
        /* If the in state of this block changed, then we need
           to add the predecessors of this block to the worklist
           if they are not already on the worklist.  */
        FOR_EACH_EDGE (e, ei, bb->preds)
-         if (!e->src->aux && e->src != ENTRY_BLOCK_PTR)
+         if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
            {
              *qin++ = e->src;
              e->src->aux = e;
@@ -193,18 +197,18 @@ compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
     {
       pred = INDEX_EDGE_PRED_BB (edge_list, x);
       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
-      if (pred == ENTRY_BLOCK_PTR)
-       sbitmap_copy (earliest[x], antin[succ->index]);
+      if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+       bitmap_copy (earliest[x], antin[succ->index]);
       else
        {
-         if (succ == EXIT_BLOCK_PTR)
-           sbitmap_zero (earliest[x]);
+         if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
+           bitmap_clear (earliest[x]);
          else
            {
-             sbitmap_difference (difference, antin[succ->index],
+             bitmap_and_compl (difference, antin[succ->index],
                                  avout[pred->index]);
-             sbitmap_not (temp_bitmap, antout[pred->index]);
-             sbitmap_a_and_b_or_c (earliest[x], difference,
+             bitmap_not (temp_bitmap, antout[pred->index]);
+             bitmap_and_or (earliest[x], difference,
                                    kill[pred->index], temp_bitmap);
            }
        }
@@ -259,7 +263,7 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
   qin = qout = worklist
-    = XNEWVEC (basic_block, n_basic_blocks);
+    = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
 
   /* Initialize a mapping from each edge to its index.  */
   for (i = 0; i < num_edges; i++)
@@ -275,28 +279,35 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
      example the expression is ANTLOC in a block within the loop) then
      this algorithm will detect it when we process the block at the head
      of the optimistic edge.  That will requeue the affected blocks.  */
-  sbitmap_vector_ones (later, num_edges);
+  bitmap_vector_ones (later, num_edges);
 
   /* Note that even though we want an optimistic setting of LATER, we
      do not want to be overly optimistic.  Consider an outgoing edge from
      the entry block.  That edge should always have a LATER value the
      same as EARLIEST for that edge.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-    sbitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
 
   /* Add all the blocks to the worklist.  This prevents an early exit from
      the loop given our optimistic initialization of LATER above.  */
-  FOR_EACH_BB (bb)
+  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+  int postorder_num = inverted_post_order_compute (postorder);
+  for (int i = 0; i < postorder_num; ++i)
     {
+      bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
+      if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
+         || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+       continue;
       *qin++ = bb;
       bb->aux = bb;
     }
+  free (postorder);
 
   /* Note that we do not use the last allocated element for our queue,
      as EXIT_BLOCK is never inserted into it. */
   qin = worklist;
-  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
-  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -309,20 +320,20 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
        qout = worklist;
 
       /* Compute the intersection of LATERIN for each incoming edge to B.  */
-      sbitmap_ones (laterin[bb->index]);
+      bitmap_ones (laterin[bb->index]);
       FOR_EACH_EDGE (e, ei, bb->preds)
-       sbitmap_a_and_b (laterin[bb->index], laterin[bb->index],
-                        later[(size_t)e->aux]);
+       bitmap_and (laterin[bb->index], laterin[bb->index],
+                   later[(size_t)e->aux]);
 
       /* Calculate LATER for all outgoing edges.  */
       FOR_EACH_EDGE (e, ei, bb->succs)
-       if (sbitmap_union_of_diff_cg (later[(size_t) e->aux],
-                                     earliest[(size_t) e->aux],
-                                     laterin[e->src->index],
-                                     antloc[e->src->index])
+       if (bitmap_ior_and_compl (later[(size_t) e->aux],
+                                 earliest[(size_t) e->aux],
+                                 laterin[bb->index],
+                                 antloc[bb->index])
            /* If LATER for an outgoing edge was changed, then we need
               to add the target of the outgoing edge to the worklist.  */
-           && e->dest != EXIT_BLOCK_PTR && e->dest->aux == 0)
+           && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
          {
            *qin++ = e->dest;
            e->dest->aux = e;
@@ -335,11 +346,11 @@ compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
   /* Computation of insertion and deletion points requires computing LATERIN
      for the EXIT block.  We allocated an extra entry in the LATERIN array
      for just this purpose.  */
-  sbitmap_ones (laterin[last_basic_block]);
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-    sbitmap_a_and_b (laterin[last_basic_block],
-                    laterin[last_basic_block],
-                    later[(size_t) e->aux]);
+  bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
+    bitmap_and (laterin[last_basic_block_for_fn (cfun)],
+               laterin[last_basic_block_for_fn (cfun)],
+               later[(size_t) e->aux]);
 
   clear_aux_for_edges ();
   free (worklist);
@@ -355,32 +366,33 @@ compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
   int x;
   basic_block bb;
 
-  FOR_EACH_BB (bb)
-    sbitmap_difference (del[bb->index], antloc[bb->index],
+  FOR_EACH_BB_FN (bb, cfun)
+    bitmap_and_compl (del[bb->index], antloc[bb->index],
                        laterin[bb->index]);
 
   for (x = 0; x < NUM_EDGES (edge_list); x++)
     {
       basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
 
-      if (b == EXIT_BLOCK_PTR)
-       sbitmap_difference (insert[x], later[x], laterin[last_basic_block]);
+      if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
+       bitmap_and_compl (insert[x], later[x],
+                         laterin[last_basic_block_for_fn (cfun)]);
       else
-       sbitmap_difference (insert[x], later[x], laterin[b->index]);
+       bitmap_and_compl (insert[x], later[x], laterin[b->index]);
     }
 }
 
-/* Given local properties TRANSP, ANTLOC, AVOUT, KILL return the insert and
-   delete vectors for edge based LCM.  Returns an edgelist which is used to
+/* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and
+   delete vectors for edge based LCM  and return the AVIN, AVOUT bitmap.
    map the insert vector to what edge an expression should be inserted on.  */
 
 struct edge_list *
-pre_edge_lcm (int n_exprs, sbitmap *transp,
+pre_edge_lcm_avs (int n_exprs, sbitmap *transp,
              sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
+             sbitmap *avin, sbitmap *avout,
              sbitmap **insert, sbitmap **del)
 {
   sbitmap *antin, *antout, *earliest;
-  sbitmap *avin, *avout;
   sbitmap *later, *laterin;
   struct edge_list *edge_list;
   int num_edges;
@@ -394,29 +406,32 @@ pre_edge_lcm (int n_exprs, sbitmap *transp,
       fprintf (dump_file, "Edge List:\n");
       verify_edge_list (dump_file, edge_list);
       print_edge_list (dump_file, edge_list);
-      dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
-      dump_sbitmap_vector (dump_file, "antloc", "", antloc, last_basic_block);
-      dump_sbitmap_vector (dump_file, "avloc", "", avloc, last_basic_block);
-      dump_sbitmap_vector (dump_file, "kill", "", kill, last_basic_block);
+      dump_bitmap_vector (dump_file, "transp", "", transp,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "antloc", "", antloc,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "avloc", "", avloc,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "kill", "", kill,
+                         last_basic_block_for_fn (cfun));
     }
 #endif
 
   /* Compute global availability.  */
-  avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
   compute_available (avloc, kill, avout, avin);
-  sbitmap_vector_free (avin);
 
   /* Compute global anticipatability.  */
-  antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_antinout_edge (antloc, transp, antin, antout);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "antin", "", antin, last_basic_block);
-      dump_sbitmap_vector (dump_file, "antout", "", antout, last_basic_block);
+      dump_bitmap_vector (dump_file, "antin", "", antin,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "antout", "", antout,
+                         last_basic_block_for_fn (cfun));
     }
 #endif
 
@@ -426,31 +441,34 @@ pre_edge_lcm (int n_exprs, sbitmap *transp,
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
-    dump_sbitmap_vector (dump_file, "earliest", "", earliest, num_edges);
+    dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
 #endif
 
   sbitmap_vector_free (antout);
   sbitmap_vector_free (antin);
-  sbitmap_vector_free (avout);
 
   later = sbitmap_vector_alloc (num_edges, n_exprs);
 
   /* Allocate an extra element for the exit block in the laterin vector.  */
-  laterin = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
+  laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
+                                 n_exprs);
   compute_laterin (edge_list, earliest, antloc, later, laterin);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "laterin", "", laterin, last_basic_block + 1);
-      dump_sbitmap_vector (dump_file, "later", "", later, num_edges);
+      dump_bitmap_vector (dump_file, "laterin", "", laterin,
+                         last_basic_block_for_fn (cfun) + 1);
+      dump_bitmap_vector (dump_file, "later", "", later, num_edges);
     }
 #endif
 
   sbitmap_vector_free (earliest);
 
   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
-  *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  bitmap_vector_clear (*insert, num_edges);
+  bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
   compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
 
   sbitmap_vector_free (laterin);
@@ -459,15 +477,37 @@ pre_edge_lcm (int n_exprs, sbitmap *transp,
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
-      dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
-                          last_basic_block);
+      dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
+      dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
+                         last_basic_block_for_fn (cfun));
     }
 #endif
 
   return edge_list;
 }
 
+/* Wrapper to allocate avin/avout and call pre_edge_lcm_avs.  */
+
+struct edge_list *
+pre_edge_lcm (int n_exprs, sbitmap *transp,
+             sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
+             sbitmap **insert, sbitmap **del)
+{
+  struct edge_list *edge_list;
+  sbitmap *avin, *avout;
+
+  avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+
+  edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill,
+                                avin, avout, insert, del);
+
+  sbitmap_vector_free (avout);
+  sbitmap_vector_free (avin);
+
+  return edge_list;
+}
+
 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
    Return the number of passes we performed to iterate to a solution.  */
 
@@ -483,28 +523,36 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  qin = qout = worklist = 
-    XNEWVEC (basic_block, n_basic_blocks - NUM_FIXED_BLOCKS);
+  qin = qout = worklist =
+    XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
 
   /* We want a maximal solution.  */
-  sbitmap_vector_ones (avout, last_basic_block);
+  bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
 
   /* Put every block on the worklist; this is necessary because of the
-     optimistic initialization of AVOUT above.  */
-  FOR_EACH_BB (bb)
+     optimistic initialization of AVOUT above.  Use inverted postorder
+     to make the dataflow problem require less iterations.  */
+  int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
+  int postorder_num = inverted_post_order_compute (postorder);
+  for (int i = 0; i < postorder_num; ++i)
     {
+      bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
+      if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
+         || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+       continue;
       *qin++ = bb;
       bb->aux = bb;
     }
+  free (postorder);
 
   qin = worklist;
-  qend = &worklist[n_basic_blocks - NUM_FIXED_BLOCKS];
-  qlen = n_basic_blocks - NUM_FIXED_BLOCKS;
+  qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
+  qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
 
   /* Mark blocks which are successors of the entry block so that we
      can easily identify them below.  */
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-    e->dest->aux = ENTRY_BLOCK_PTR;
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
 
   /* Iterate until the worklist is empty.  */
   while (qlen)
@@ -519,25 +567,25 @@ compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
       /* If one of the predecessor blocks is the ENTRY block, then the
         intersection of avouts is the null set.  We can identify such blocks
         by the special value in the AUX field in the block structure.  */
-      if (bb->aux == ENTRY_BLOCK_PTR)
+      if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
        /* Do not clear the aux field for blocks which are successors of the
           ENTRY block.  That way we never add then to the worklist again.  */
-       sbitmap_zero (avin[bb->index]);
+       bitmap_clear (avin[bb->index]);
       else
        {
          /* Clear the aux field of this block so that it can be added to
             the worklist again if necessary.  */
          bb->aux = NULL;
-         sbitmap_intersection_of_preds (avin[bb->index], avout, bb->index);
+         bitmap_intersection_of_preds (avin[bb->index], avout, bb);
        }
 
-      if (sbitmap_union_of_diff_cg (avout[bb->index], avloc[bb->index],
+      if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
                                    avin[bb->index], kill[bb->index]))
        /* If the out state of this block changed, then we need
           to add the successors of this block to the worklist
           if they are not already on the worklist.  */
        FOR_EACH_EDGE (e, ei, bb->succs)
-         if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR)
+         if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
            {
              *qin++ = e->dest;
              e->dest->aux = e;
@@ -573,18 +621,18 @@ compute_farthest (struct edge_list *edge_list, int n_exprs,
     {
       pred = INDEX_EDGE_PRED_BB (edge_list, x);
       succ = INDEX_EDGE_SUCC_BB (edge_list, x);
-      if (succ == EXIT_BLOCK_PTR)
-       sbitmap_copy (farthest[x], st_avout[pred->index]);
+      if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
+       bitmap_copy (farthest[x], st_avout[pred->index]);
       else
        {
-         if (pred == ENTRY_BLOCK_PTR)
-           sbitmap_zero (farthest[x]);
+         if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+           bitmap_clear (farthest[x]);
          else
            {
-             sbitmap_difference (difference, st_avout[pred->index],
+             bitmap_and_compl (difference, st_avout[pred->index],
                                  st_antin[succ->index]);
-             sbitmap_not (temp_bitmap, st_avin[succ->index]);
-             sbitmap_a_and_b_or_c (farthest[x], difference,
+             bitmap_not (temp_bitmap, st_avin[succ->index]);
+             bitmap_and_or (farthest[x], difference,
                                    kill[succ->index], temp_bitmap);
            }
        }
@@ -613,7 +661,7 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
   /* Allocate a worklist array/queue.  Entries are only added to the
      list if they were not already on the list.  So the size is
      bounded by the number of basic blocks.  */
-  tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
+  tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
 
   /* Initialize NEARER for each edge and build a mapping from an edge to
      its index.  */
@@ -621,18 +669,18 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
     INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
 
   /* We want a maximal solution.  */
-  sbitmap_vector_ones (nearer, num_edges);
+  bitmap_vector_ones (nearer, num_edges);
 
   /* Note that even though we want an optimistic setting of NEARER, we
      do not want to be overly optimistic.  Consider an incoming edge to
      the exit block.  That edge should always have a NEARER value the
      same as FARTHEST for that edge.  */
-  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
-    sbitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
+  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
+    bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
 
   /* Add all the blocks to the worklist.  This prevents an early exit
      from the loop given our optimistic initialization of NEARER.  */
-  FOR_EACH_BB (bb)
+  FOR_EACH_BB_FN (bb, cfun)
     {
       *tos++ = bb;
       bb->aux = bb;
@@ -646,20 +694,20 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
       bb->aux = NULL;
 
       /* Compute the intersection of NEARER for each outgoing edge from B.  */
-      sbitmap_ones (nearerout[bb->index]);
+      bitmap_ones (nearerout[bb->index]);
       FOR_EACH_EDGE (e, ei, bb->succs)
-       sbitmap_a_and_b (nearerout[bb->index], nearerout[bb->index],
+       bitmap_and (nearerout[bb->index], nearerout[bb->index],
                         nearer[(size_t) e->aux]);
 
       /* Calculate NEARER for all incoming edges.  */
       FOR_EACH_EDGE (e, ei, bb->preds)
-       if (sbitmap_union_of_diff_cg (nearer[(size_t) e->aux],
+       if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
                                      farthest[(size_t) e->aux],
                                      nearerout[e->dest->index],
                                      st_avloc[e->dest->index])
            /* If NEARER for an incoming edge was changed, then we need
               to add the source of the incoming edge to the worklist.  */
-           && e->src != ENTRY_BLOCK_PTR && e->src->aux == 0)
+           && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
          {
            *tos++ = e->src;
            e->src->aux = e;
@@ -669,10 +717,10 @@ compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
   /* Computation of insertion and deletion points requires computing NEAREROUT
      for the ENTRY block.  We allocated an extra entry in the NEAREROUT array
      for just this purpose.  */
-  sbitmap_ones (nearerout[last_basic_block]);
-  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
-    sbitmap_a_and_b (nearerout[last_basic_block],
-                    nearerout[last_basic_block],
+  bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
+  FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
+    bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
+                    nearerout[last_basic_block_for_fn (cfun)],
                     nearer[(size_t) e->aux]);
 
   clear_aux_for_edges ();
@@ -689,17 +737,18 @@ compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
   int x;
   basic_block bb;
 
-  FOR_EACH_BB (bb)
-    sbitmap_difference (del[bb->index], st_avloc[bb->index],
+  FOR_EACH_BB_FN (bb, cfun)
+    bitmap_and_compl (del[bb->index], st_avloc[bb->index],
                        nearerout[bb->index]);
 
   for (x = 0; x < NUM_EDGES (edge_list); x++)
     {
       basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
-      if (b == ENTRY_BLOCK_PTR)
-       sbitmap_difference (insert[x], nearer[x], nearerout[last_basic_block]);
+      if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
+       bitmap_and_compl (insert[x], nearer[x],
+                         nearerout[last_basic_block_for_fn (cfun)]);
       else
-       sbitmap_difference (insert[x], nearer[x], nearerout[b->index]);
+       bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
     }
 }
 
@@ -722,15 +771,15 @@ pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
   edge_list = create_edge_list ();
   num_edges = NUM_EDGES (edge_list);
 
-  st_antin = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  st_antout = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  sbitmap_vector_zero (st_antin, last_basic_block);
-  sbitmap_vector_zero (st_antout, last_basic_block);
+  st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
+  bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
   compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
 
   /* Compute global anticipatability.  */
-  st_avout = sbitmap_vector_alloc (last_basic_block, n_exprs);
-  st_avin = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
+  st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_available (st_avloc, kill, st_avout, st_avin);
 
 #ifdef LCM_DEBUG_INFO
@@ -739,20 +788,26 @@ pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
       fprintf (dump_file, "Edge List:\n");
       verify_edge_list (dump_file, edge_list);
       print_edge_list (dump_file, edge_list);
-      dump_sbitmap_vector (dump_file, "transp", "", transp, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_antin", "", st_antin, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_antout", "", st_antout, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_kill", "", kill, last_basic_block);
+      dump_bitmap_vector (dump_file, "transp", "", transp,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
+                         last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_kill", "", kill,
+                         last_basic_block_for_fn (cfun));
     }
 #endif
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block);
-      dump_sbitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block);
+      dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
+      dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
     }
 #endif
 
@@ -763,7 +818,7 @@ pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
-    dump_sbitmap_vector (dump_file, "farthest", "", farthest, num_edges);
+    dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
 #endif
 
   sbitmap_vector_free (st_antin);
@@ -775,22 +830,23 @@ pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
   nearer = sbitmap_vector_alloc (num_edges, n_exprs);
 
   /* Allocate an extra element for the entry block.  */
-  nearerout = sbitmap_vector_alloc (last_basic_block + 1, n_exprs);
+  nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
+                                   n_exprs);
   compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
 
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "nearerout", "", nearerout,
-                          last_basic_block + 1);
-      dump_sbitmap_vector (dump_file, "nearer", "", nearer, num_edges);
+      dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
+                          last_basic_block_for_fn (cfun) + 1);
+      dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
     }
 #endif
 
   sbitmap_vector_free (farthest);
 
   *insert = sbitmap_vector_alloc (num_edges, n_exprs);
-  *del = sbitmap_vector_alloc (last_basic_block, n_exprs);
+  *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
   compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
                             *insert, *del);
 
@@ -800,9 +856,9 @@ pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
 #ifdef LCM_DEBUG_INFO
   if (dump_file)
     {
-      dump_sbitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
-      dump_sbitmap_vector (dump_file, "pre_delete_map", "", *del,
-                          last_basic_block);
+      dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
+      dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
+                          last_basic_block_for_fn (cfun));
     }
 #endif
   return edge_list;