* doc/invoke.texi (Warning Options): Document -Winvalid-memory-model.
[gcc.git] / gcc / lcm.c
1 /* Generic partial redundancy elimination with lazy code motion support.
2 Copyright (C) 1998-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* These routines are meant to be used by various optimization
21 passes which can be modeled as lazy code motion problems.
22 Including, but not limited to:
23
24 * Traditional partial redundancy elimination.
25
26 * Placement of caller/caller register save/restores.
27
28 * Load/store motion.
29
30 * Copy motion.
31
32 * Conversion of flat register files to a stacked register
33 model.
34
35 * Dead load/store elimination.
36
37 These routines accept as input:
38
39 * Basic block information (number of blocks, lists of
40 predecessors and successors). Note the granularity
41 does not need to be basic block, they could be statements
42 or functions.
43
44 * Bitmaps of local properties (computed, transparent and
45 anticipatable expressions).
46
47 The output of these routines is bitmap of redundant computations
48 and a bitmap of optimal placement points. */
49
50
51 #include "config.h"
52 #include "system.h"
53 #include "coretypes.h"
54 #include "backend.h"
55 #include "cfganal.h"
56 #include "lcm.h"
57
58 /* Edge based LCM routines. */
59 static void compute_antinout_edge (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
60 static void compute_earliest (struct edge_list *, int, sbitmap *, sbitmap *,
61 sbitmap *, sbitmap *, sbitmap *);
62 static void compute_laterin (struct edge_list *, sbitmap *, sbitmap *,
63 sbitmap *, sbitmap *);
64 static void compute_insert_delete (struct edge_list *edge_list, sbitmap *,
65 sbitmap *, sbitmap *, sbitmap *, sbitmap *);
66
67 /* Edge based LCM routines on a reverse flowgraph. */
68 static void compute_farthest (struct edge_list *, int, sbitmap *, sbitmap *,
69 sbitmap*, sbitmap *, sbitmap *);
70 static void compute_nearerout (struct edge_list *, sbitmap *, sbitmap *,
71 sbitmap *, sbitmap *);
72 static void compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *,
73 sbitmap *, sbitmap *, sbitmap *,
74 sbitmap *);
75 \f
76 /* Edge based lcm routines. */
77
78 /* Compute expression anticipatability at entrance and exit of each block.
79 This is done based on the flow graph, and not on the pred-succ lists.
80 Other than that, its pretty much identical to compute_antinout. */
81
82 static void
83 compute_antinout_edge (sbitmap *antloc, sbitmap *transp, sbitmap *antin,
84 sbitmap *antout)
85 {
86 basic_block bb;
87 edge e;
88 basic_block *worklist, *qin, *qout, *qend;
89 unsigned int qlen;
90 edge_iterator ei;
91
92 /* Allocate a worklist array/queue. Entries are only added to the
93 list if they were not already on the list. So the size is
94 bounded by the number of basic blocks. */
95 qin = qout = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
96
97 /* We want a maximal solution, so make an optimistic initialization of
98 ANTIN. */
99 bitmap_vector_ones (antin, last_basic_block_for_fn (cfun));
100
101 /* Put every block on the worklist; this is necessary because of the
102 optimistic initialization of ANTIN above. */
103 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
104 int postorder_num = post_order_compute (postorder, false, false);
105 for (int i = 0; i < postorder_num; ++i)
106 {
107 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
108 *qin++ = bb;
109 bb->aux = bb;
110 }
111 free (postorder);
112
113 qin = worklist;
114 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
115 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
116
117 /* Mark blocks which are predecessors of the exit block so that we
118 can easily identify them below. */
119 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
120 e->src->aux = EXIT_BLOCK_PTR_FOR_FN (cfun);
121
122 /* Iterate until the worklist is empty. */
123 while (qlen)
124 {
125 /* Take the first entry off the worklist. */
126 bb = *qout++;
127 qlen--;
128
129 if (qout >= qend)
130 qout = worklist;
131
132 if (bb->aux == EXIT_BLOCK_PTR_FOR_FN (cfun))
133 /* Do not clear the aux field for blocks which are predecessors of
134 the EXIT block. That way we never add then to the worklist
135 again. */
136 bitmap_clear (antout[bb->index]);
137 else
138 {
139 /* Clear the aux field of this block so that it can be added to
140 the worklist again if necessary. */
141 bb->aux = NULL;
142 bitmap_intersection_of_succs (antout[bb->index], antin, bb);
143 }
144
145 if (bitmap_or_and (antin[bb->index], antloc[bb->index],
146 transp[bb->index], antout[bb->index]))
147 /* If the in state of this block changed, then we need
148 to add the predecessors of this block to the worklist
149 if they are not already on the worklist. */
150 FOR_EACH_EDGE (e, ei, bb->preds)
151 if (!e->src->aux && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
152 {
153 *qin++ = e->src;
154 e->src->aux = e;
155 qlen++;
156 if (qin >= qend)
157 qin = worklist;
158 }
159 }
160
161 clear_aux_for_edges ();
162 clear_aux_for_blocks ();
163 free (worklist);
164 }
165
166 /* Compute the earliest vector for edge based lcm. */
167
168 static void
169 compute_earliest (struct edge_list *edge_list, int n_exprs, sbitmap *antin,
170 sbitmap *antout, sbitmap *avout, sbitmap *kill,
171 sbitmap *earliest)
172 {
173 sbitmap difference, temp_bitmap;
174 int x, num_edges;
175 basic_block pred, succ;
176
177 num_edges = NUM_EDGES (edge_list);
178
179 difference = sbitmap_alloc (n_exprs);
180 temp_bitmap = sbitmap_alloc (n_exprs);
181
182 for (x = 0; x < num_edges; x++)
183 {
184 pred = INDEX_EDGE_PRED_BB (edge_list, x);
185 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
186 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
187 bitmap_copy (earliest[x], antin[succ->index]);
188 else
189 {
190 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
191 bitmap_clear (earliest[x]);
192 else
193 {
194 bitmap_and_compl (difference, antin[succ->index],
195 avout[pred->index]);
196 bitmap_not (temp_bitmap, antout[pred->index]);
197 bitmap_and_or (earliest[x], difference,
198 kill[pred->index], temp_bitmap);
199 }
200 }
201 }
202
203 sbitmap_free (temp_bitmap);
204 sbitmap_free (difference);
205 }
206
207 /* later(p,s) is dependent on the calculation of laterin(p).
208 laterin(p) is dependent on the calculation of later(p2,p).
209
210 laterin(ENTRY) is defined as all 0's
211 later(ENTRY, succs(ENTRY)) are defined using laterin(ENTRY)
212 laterin(succs(ENTRY)) is defined by later(ENTRY, succs(ENTRY)).
213
214 If we progress in this manner, starting with all basic blocks
215 in the work list, anytime we change later(bb), we need to add
216 succs(bb) to the worklist if they are not already on the worklist.
217
218 Boundary conditions:
219
220 We prime the worklist all the normal basic blocks. The ENTRY block can
221 never be added to the worklist since it is never the successor of any
222 block. We explicitly prevent the EXIT block from being added to the
223 worklist.
224
225 We optimistically initialize LATER. That is the only time this routine
226 will compute LATER for an edge out of the entry block since the entry
227 block is never on the worklist. Thus, LATERIN is neither used nor
228 computed for the ENTRY block.
229
230 Since the EXIT block is never added to the worklist, we will neither
231 use nor compute LATERIN for the exit block. Edges which reach the
232 EXIT block are handled in the normal fashion inside the loop. However,
233 the insertion/deletion computation needs LATERIN(EXIT), so we have
234 to compute it. */
235
236 static void
237 compute_laterin (struct edge_list *edge_list, sbitmap *earliest,
238 sbitmap *antloc, sbitmap *later, sbitmap *laterin)
239 {
240 int num_edges, i;
241 edge e;
242 basic_block *worklist, *qin, *qout, *qend, bb;
243 unsigned int qlen;
244 edge_iterator ei;
245
246 num_edges = NUM_EDGES (edge_list);
247
248 /* Allocate a worklist array/queue. Entries are only added to the
249 list if they were not already on the list. So the size is
250 bounded by the number of basic blocks. */
251 qin = qout = worklist
252 = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
253
254 /* Initialize a mapping from each edge to its index. */
255 for (i = 0; i < num_edges; i++)
256 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
257
258 /* We want a maximal solution, so initially consider LATER true for
259 all edges. This allows propagation through a loop since the incoming
260 loop edge will have LATER set, so if all the other incoming edges
261 to the loop are set, then LATERIN will be set for the head of the
262 loop.
263
264 If the optimistic setting of LATER on that edge was incorrect (for
265 example the expression is ANTLOC in a block within the loop) then
266 this algorithm will detect it when we process the block at the head
267 of the optimistic edge. That will requeue the affected blocks. */
268 bitmap_vector_ones (later, num_edges);
269
270 /* Note that even though we want an optimistic setting of LATER, we
271 do not want to be overly optimistic. Consider an outgoing edge from
272 the entry block. That edge should always have a LATER value the
273 same as EARLIEST for that edge. */
274 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
275 bitmap_copy (later[(size_t) e->aux], earliest[(size_t) e->aux]);
276
277 /* Add all the blocks to the worklist. This prevents an early exit from
278 the loop given our optimistic initialization of LATER above. */
279 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
280 int postorder_num = inverted_post_order_compute (postorder);
281 for (int i = 0; i < postorder_num; ++i)
282 {
283 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
284 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
285 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
286 continue;
287 *qin++ = bb;
288 bb->aux = bb;
289 }
290 free (postorder);
291
292 /* Note that we do not use the last allocated element for our queue,
293 as EXIT_BLOCK is never inserted into it. */
294 qin = worklist;
295 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
296 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
297
298 /* Iterate until the worklist is empty. */
299 while (qlen)
300 {
301 /* Take the first entry off the worklist. */
302 bb = *qout++;
303 bb->aux = NULL;
304 qlen--;
305 if (qout >= qend)
306 qout = worklist;
307
308 /* Compute the intersection of LATERIN for each incoming edge to B. */
309 bitmap_ones (laterin[bb->index]);
310 FOR_EACH_EDGE (e, ei, bb->preds)
311 bitmap_and (laterin[bb->index], laterin[bb->index],
312 later[(size_t)e->aux]);
313
314 /* Calculate LATER for all outgoing edges. */
315 FOR_EACH_EDGE (e, ei, bb->succs)
316 if (bitmap_ior_and_compl (later[(size_t) e->aux],
317 earliest[(size_t) e->aux],
318 laterin[bb->index],
319 antloc[bb->index])
320 /* If LATER for an outgoing edge was changed, then we need
321 to add the target of the outgoing edge to the worklist. */
322 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest->aux == 0)
323 {
324 *qin++ = e->dest;
325 e->dest->aux = e;
326 qlen++;
327 if (qin >= qend)
328 qin = worklist;
329 }
330 }
331
332 /* Computation of insertion and deletion points requires computing LATERIN
333 for the EXIT block. We allocated an extra entry in the LATERIN array
334 for just this purpose. */
335 bitmap_ones (laterin[last_basic_block_for_fn (cfun)]);
336 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
337 bitmap_and (laterin[last_basic_block_for_fn (cfun)],
338 laterin[last_basic_block_for_fn (cfun)],
339 later[(size_t) e->aux]);
340
341 clear_aux_for_edges ();
342 free (worklist);
343 }
344
345 /* Compute the insertion and deletion points for edge based LCM. */
346
347 static void
348 compute_insert_delete (struct edge_list *edge_list, sbitmap *antloc,
349 sbitmap *later, sbitmap *laterin, sbitmap *insert,
350 sbitmap *del)
351 {
352 int x;
353 basic_block bb;
354
355 FOR_EACH_BB_FN (bb, cfun)
356 bitmap_and_compl (del[bb->index], antloc[bb->index],
357 laterin[bb->index]);
358
359 for (x = 0; x < NUM_EDGES (edge_list); x++)
360 {
361 basic_block b = INDEX_EDGE_SUCC_BB (edge_list, x);
362
363 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
364 bitmap_and_compl (insert[x], later[x],
365 laterin[last_basic_block_for_fn (cfun)]);
366 else
367 bitmap_and_compl (insert[x], later[x], laterin[b->index]);
368 }
369 }
370
371 /* Given local properties TRANSP, ANTLOC, AVLOC, KILL return the insert and
372 delete vectors for edge based LCM and return the AVIN, AVOUT bitmap.
373 map the insert vector to what edge an expression should be inserted on. */
374
375 struct edge_list *
376 pre_edge_lcm_avs (int n_exprs, sbitmap *transp,
377 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
378 sbitmap *avin, sbitmap *avout,
379 sbitmap **insert, sbitmap **del)
380 {
381 sbitmap *antin, *antout, *earliest;
382 sbitmap *later, *laterin;
383 struct edge_list *edge_list;
384 int num_edges;
385
386 edge_list = create_edge_list ();
387 num_edges = NUM_EDGES (edge_list);
388
389 #ifdef LCM_DEBUG_INFO
390 if (dump_file)
391 {
392 fprintf (dump_file, "Edge List:\n");
393 verify_edge_list (dump_file, edge_list);
394 print_edge_list (dump_file, edge_list);
395 dump_bitmap_vector (dump_file, "transp", "", transp,
396 last_basic_block_for_fn (cfun));
397 dump_bitmap_vector (dump_file, "antloc", "", antloc,
398 last_basic_block_for_fn (cfun));
399 dump_bitmap_vector (dump_file, "avloc", "", avloc,
400 last_basic_block_for_fn (cfun));
401 dump_bitmap_vector (dump_file, "kill", "", kill,
402 last_basic_block_for_fn (cfun));
403 }
404 #endif
405
406 /* Compute global availability. */
407 compute_available (avloc, kill, avout, avin);
408
409 /* Compute global anticipatability. */
410 antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
411 antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
412 compute_antinout_edge (antloc, transp, antin, antout);
413
414 #ifdef LCM_DEBUG_INFO
415 if (dump_file)
416 {
417 dump_bitmap_vector (dump_file, "antin", "", antin,
418 last_basic_block_for_fn (cfun));
419 dump_bitmap_vector (dump_file, "antout", "", antout,
420 last_basic_block_for_fn (cfun));
421 }
422 #endif
423
424 /* Compute earliestness. */
425 earliest = sbitmap_vector_alloc (num_edges, n_exprs);
426 compute_earliest (edge_list, n_exprs, antin, antout, avout, kill, earliest);
427
428 #ifdef LCM_DEBUG_INFO
429 if (dump_file)
430 dump_bitmap_vector (dump_file, "earliest", "", earliest, num_edges);
431 #endif
432
433 sbitmap_vector_free (antout);
434 sbitmap_vector_free (antin);
435
436 later = sbitmap_vector_alloc (num_edges, n_exprs);
437
438 /* Allocate an extra element for the exit block in the laterin vector. */
439 laterin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
440 n_exprs);
441 compute_laterin (edge_list, earliest, antloc, later, laterin);
442
443 #ifdef LCM_DEBUG_INFO
444 if (dump_file)
445 {
446 dump_bitmap_vector (dump_file, "laterin", "", laterin,
447 last_basic_block_for_fn (cfun) + 1);
448 dump_bitmap_vector (dump_file, "later", "", later, num_edges);
449 }
450 #endif
451
452 sbitmap_vector_free (earliest);
453
454 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
455 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
456 bitmap_vector_clear (*insert, num_edges);
457 bitmap_vector_clear (*del, last_basic_block_for_fn (cfun));
458 compute_insert_delete (edge_list, antloc, later, laterin, *insert, *del);
459
460 sbitmap_vector_free (laterin);
461 sbitmap_vector_free (later);
462
463 #ifdef LCM_DEBUG_INFO
464 if (dump_file)
465 {
466 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
467 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
468 last_basic_block_for_fn (cfun));
469 }
470 #endif
471
472 return edge_list;
473 }
474
475 /* Wrapper to allocate avin/avout and call pre_edge_lcm_avs. */
476
477 struct edge_list *
478 pre_edge_lcm (int n_exprs, sbitmap *transp,
479 sbitmap *avloc, sbitmap *antloc, sbitmap *kill,
480 sbitmap **insert, sbitmap **del)
481 {
482 struct edge_list *edge_list;
483 sbitmap *avin, *avout;
484
485 avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
486 avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
487
488 edge_list = pre_edge_lcm_avs (n_exprs, transp, avloc, antloc, kill,
489 avin, avout, insert, del);
490
491 sbitmap_vector_free (avout);
492 sbitmap_vector_free (avin);
493
494 return edge_list;
495 }
496
497 /* Compute the AVIN and AVOUT vectors from the AVLOC and KILL vectors.
498 Return the number of passes we performed to iterate to a solution. */
499
500 void
501 compute_available (sbitmap *avloc, sbitmap *kill, sbitmap *avout,
502 sbitmap *avin)
503 {
504 edge e;
505 basic_block *worklist, *qin, *qout, *qend, bb;
506 unsigned int qlen;
507 edge_iterator ei;
508
509 /* Allocate a worklist array/queue. Entries are only added to the
510 list if they were not already on the list. So the size is
511 bounded by the number of basic blocks. */
512 qin = qout = worklist =
513 XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
514
515 /* We want a maximal solution. */
516 bitmap_vector_ones (avout, last_basic_block_for_fn (cfun));
517
518 /* Put every block on the worklist; this is necessary because of the
519 optimistic initialization of AVOUT above. Use inverted postorder
520 to make the dataflow problem require less iterations. */
521 int *postorder = XNEWVEC (int, n_basic_blocks_for_fn (cfun));
522 int postorder_num = inverted_post_order_compute (postorder);
523 for (int i = 0; i < postorder_num; ++i)
524 {
525 bb = BASIC_BLOCK_FOR_FN (cfun, postorder[i]);
526 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
527 || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
528 continue;
529 *qin++ = bb;
530 bb->aux = bb;
531 }
532 free (postorder);
533
534 qin = worklist;
535 qend = &worklist[n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS];
536 qlen = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
537
538 /* Mark blocks which are successors of the entry block so that we
539 can easily identify them below. */
540 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
541 e->dest->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun);
542
543 /* Iterate until the worklist is empty. */
544 while (qlen)
545 {
546 /* Take the first entry off the worklist. */
547 bb = *qout++;
548 qlen--;
549
550 if (qout >= qend)
551 qout = worklist;
552
553 /* If one of the predecessor blocks is the ENTRY block, then the
554 intersection of avouts is the null set. We can identify such blocks
555 by the special value in the AUX field in the block structure. */
556 if (bb->aux == ENTRY_BLOCK_PTR_FOR_FN (cfun))
557 /* Do not clear the aux field for blocks which are successors of the
558 ENTRY block. That way we never add then to the worklist again. */
559 bitmap_clear (avin[bb->index]);
560 else
561 {
562 /* Clear the aux field of this block so that it can be added to
563 the worklist again if necessary. */
564 bb->aux = NULL;
565 bitmap_intersection_of_preds (avin[bb->index], avout, bb);
566 }
567
568 if (bitmap_ior_and_compl (avout[bb->index], avloc[bb->index],
569 avin[bb->index], kill[bb->index]))
570 /* If the out state of this block changed, then we need
571 to add the successors of this block to the worklist
572 if they are not already on the worklist. */
573 FOR_EACH_EDGE (e, ei, bb->succs)
574 if (!e->dest->aux && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
575 {
576 *qin++ = e->dest;
577 e->dest->aux = e;
578 qlen++;
579
580 if (qin >= qend)
581 qin = worklist;
582 }
583 }
584
585 clear_aux_for_edges ();
586 clear_aux_for_blocks ();
587 free (worklist);
588 }
589
590 /* Compute the farthest vector for edge based lcm. */
591
592 static void
593 compute_farthest (struct edge_list *edge_list, int n_exprs,
594 sbitmap *st_avout, sbitmap *st_avin, sbitmap *st_antin,
595 sbitmap *kill, sbitmap *farthest)
596 {
597 sbitmap difference, temp_bitmap;
598 int x, num_edges;
599 basic_block pred, succ;
600
601 num_edges = NUM_EDGES (edge_list);
602
603 difference = sbitmap_alloc (n_exprs);
604 temp_bitmap = sbitmap_alloc (n_exprs);
605
606 for (x = 0; x < num_edges; x++)
607 {
608 pred = INDEX_EDGE_PRED_BB (edge_list, x);
609 succ = INDEX_EDGE_SUCC_BB (edge_list, x);
610 if (succ == EXIT_BLOCK_PTR_FOR_FN (cfun))
611 bitmap_copy (farthest[x], st_avout[pred->index]);
612 else
613 {
614 if (pred == ENTRY_BLOCK_PTR_FOR_FN (cfun))
615 bitmap_clear (farthest[x]);
616 else
617 {
618 bitmap_and_compl (difference, st_avout[pred->index],
619 st_antin[succ->index]);
620 bitmap_not (temp_bitmap, st_avin[succ->index]);
621 bitmap_and_or (farthest[x], difference,
622 kill[succ->index], temp_bitmap);
623 }
624 }
625 }
626
627 sbitmap_free (temp_bitmap);
628 sbitmap_free (difference);
629 }
630
631 /* Compute nearer and nearerout vectors for edge based lcm.
632
633 This is the mirror of compute_laterin, additional comments on the
634 implementation can be found before compute_laterin. */
635
636 static void
637 compute_nearerout (struct edge_list *edge_list, sbitmap *farthest,
638 sbitmap *st_avloc, sbitmap *nearer, sbitmap *nearerout)
639 {
640 int num_edges, i;
641 edge e;
642 basic_block *worklist, *tos, bb;
643 edge_iterator ei;
644
645 num_edges = NUM_EDGES (edge_list);
646
647 /* Allocate a worklist array/queue. Entries are only added to the
648 list if they were not already on the list. So the size is
649 bounded by the number of basic blocks. */
650 tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun) + 1);
651
652 /* Initialize NEARER for each edge and build a mapping from an edge to
653 its index. */
654 for (i = 0; i < num_edges; i++)
655 INDEX_EDGE (edge_list, i)->aux = (void *) (size_t) i;
656
657 /* We want a maximal solution. */
658 bitmap_vector_ones (nearer, num_edges);
659
660 /* Note that even though we want an optimistic setting of NEARER, we
661 do not want to be overly optimistic. Consider an incoming edge to
662 the exit block. That edge should always have a NEARER value the
663 same as FARTHEST for that edge. */
664 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
665 bitmap_copy (nearer[(size_t)e->aux], farthest[(size_t)e->aux]);
666
667 /* Add all the blocks to the worklist. This prevents an early exit
668 from the loop given our optimistic initialization of NEARER. */
669 FOR_EACH_BB_FN (bb, cfun)
670 {
671 *tos++ = bb;
672 bb->aux = bb;
673 }
674
675 /* Iterate until the worklist is empty. */
676 while (tos != worklist)
677 {
678 /* Take the first entry off the worklist. */
679 bb = *--tos;
680 bb->aux = NULL;
681
682 /* Compute the intersection of NEARER for each outgoing edge from B. */
683 bitmap_ones (nearerout[bb->index]);
684 FOR_EACH_EDGE (e, ei, bb->succs)
685 bitmap_and (nearerout[bb->index], nearerout[bb->index],
686 nearer[(size_t) e->aux]);
687
688 /* Calculate NEARER for all incoming edges. */
689 FOR_EACH_EDGE (e, ei, bb->preds)
690 if (bitmap_ior_and_compl (nearer[(size_t) e->aux],
691 farthest[(size_t) e->aux],
692 nearerout[e->dest->index],
693 st_avloc[e->dest->index])
694 /* If NEARER for an incoming edge was changed, then we need
695 to add the source of the incoming edge to the worklist. */
696 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun) && e->src->aux == 0)
697 {
698 *tos++ = e->src;
699 e->src->aux = e;
700 }
701 }
702
703 /* Computation of insertion and deletion points requires computing NEAREROUT
704 for the ENTRY block. We allocated an extra entry in the NEAREROUT array
705 for just this purpose. */
706 bitmap_ones (nearerout[last_basic_block_for_fn (cfun)]);
707 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
708 bitmap_and (nearerout[last_basic_block_for_fn (cfun)],
709 nearerout[last_basic_block_for_fn (cfun)],
710 nearer[(size_t) e->aux]);
711
712 clear_aux_for_edges ();
713 free (tos);
714 }
715
716 /* Compute the insertion and deletion points for edge based LCM. */
717
718 static void
719 compute_rev_insert_delete (struct edge_list *edge_list, sbitmap *st_avloc,
720 sbitmap *nearer, sbitmap *nearerout,
721 sbitmap *insert, sbitmap *del)
722 {
723 int x;
724 basic_block bb;
725
726 FOR_EACH_BB_FN (bb, cfun)
727 bitmap_and_compl (del[bb->index], st_avloc[bb->index],
728 nearerout[bb->index]);
729
730 for (x = 0; x < NUM_EDGES (edge_list); x++)
731 {
732 basic_block b = INDEX_EDGE_PRED_BB (edge_list, x);
733 if (b == ENTRY_BLOCK_PTR_FOR_FN (cfun))
734 bitmap_and_compl (insert[x], nearer[x],
735 nearerout[last_basic_block_for_fn (cfun)]);
736 else
737 bitmap_and_compl (insert[x], nearer[x], nearerout[b->index]);
738 }
739 }
740
741 /* Given local properties TRANSP, ST_AVLOC, ST_ANTLOC, KILL return the
742 insert and delete vectors for edge based reverse LCM. Returns an
743 edgelist which is used to map the insert vector to what edge
744 an expression should be inserted on. */
745
746 struct edge_list *
747 pre_edge_rev_lcm (int n_exprs, sbitmap *transp,
748 sbitmap *st_avloc, sbitmap *st_antloc, sbitmap *kill,
749 sbitmap **insert, sbitmap **del)
750 {
751 sbitmap *st_antin, *st_antout;
752 sbitmap *st_avout, *st_avin, *farthest;
753 sbitmap *nearer, *nearerout;
754 struct edge_list *edge_list;
755 int num_edges;
756
757 edge_list = create_edge_list ();
758 num_edges = NUM_EDGES (edge_list);
759
760 st_antin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
761 st_antout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
762 bitmap_vector_clear (st_antin, last_basic_block_for_fn (cfun));
763 bitmap_vector_clear (st_antout, last_basic_block_for_fn (cfun));
764 compute_antinout_edge (st_antloc, transp, st_antin, st_antout);
765
766 /* Compute global anticipatability. */
767 st_avout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
768 st_avin = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
769 compute_available (st_avloc, kill, st_avout, st_avin);
770
771 #ifdef LCM_DEBUG_INFO
772 if (dump_file)
773 {
774 fprintf (dump_file, "Edge List:\n");
775 verify_edge_list (dump_file, edge_list);
776 print_edge_list (dump_file, edge_list);
777 dump_bitmap_vector (dump_file, "transp", "", transp,
778 last_basic_block_for_fn (cfun));
779 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
780 last_basic_block_for_fn (cfun));
781 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
782 last_basic_block_for_fn (cfun));
783 dump_bitmap_vector (dump_file, "st_antin", "", st_antin,
784 last_basic_block_for_fn (cfun));
785 dump_bitmap_vector (dump_file, "st_antout", "", st_antout,
786 last_basic_block_for_fn (cfun));
787 dump_bitmap_vector (dump_file, "st_kill", "", kill,
788 last_basic_block_for_fn (cfun));
789 }
790 #endif
791
792 #ifdef LCM_DEBUG_INFO
793 if (dump_file)
794 {
795 dump_bitmap_vector (dump_file, "st_avout", "", st_avout, last_basic_block_for_fn (cfun));
796 dump_bitmap_vector (dump_file, "st_avin", "", st_avin, last_basic_block_for_fn (cfun));
797 }
798 #endif
799
800 /* Compute farthestness. */
801 farthest = sbitmap_vector_alloc (num_edges, n_exprs);
802 compute_farthest (edge_list, n_exprs, st_avout, st_avin, st_antin,
803 kill, farthest);
804
805 #ifdef LCM_DEBUG_INFO
806 if (dump_file)
807 dump_bitmap_vector (dump_file, "farthest", "", farthest, num_edges);
808 #endif
809
810 sbitmap_vector_free (st_antin);
811 sbitmap_vector_free (st_antout);
812
813 sbitmap_vector_free (st_avin);
814 sbitmap_vector_free (st_avout);
815
816 nearer = sbitmap_vector_alloc (num_edges, n_exprs);
817
818 /* Allocate an extra element for the entry block. */
819 nearerout = sbitmap_vector_alloc (last_basic_block_for_fn (cfun) + 1,
820 n_exprs);
821 compute_nearerout (edge_list, farthest, st_avloc, nearer, nearerout);
822
823 #ifdef LCM_DEBUG_INFO
824 if (dump_file)
825 {
826 dump_bitmap_vector (dump_file, "nearerout", "", nearerout,
827 last_basic_block_for_fn (cfun) + 1);
828 dump_bitmap_vector (dump_file, "nearer", "", nearer, num_edges);
829 }
830 #endif
831
832 sbitmap_vector_free (farthest);
833
834 *insert = sbitmap_vector_alloc (num_edges, n_exprs);
835 *del = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), n_exprs);
836 compute_rev_insert_delete (edge_list, st_avloc, nearer, nearerout,
837 *insert, *del);
838
839 sbitmap_vector_free (nearerout);
840 sbitmap_vector_free (nearer);
841
842 #ifdef LCM_DEBUG_INFO
843 if (dump_file)
844 {
845 dump_bitmap_vector (dump_file, "pre_insert_map", "", *insert, num_edges);
846 dump_bitmap_vector (dump_file, "pre_delete_map", "", *del,
847 last_basic_block_for_fn (cfun));
848 }
849 #endif
850 return edge_list;
851 }
852