backport: et-forest.h (et_forest_create, [...]): Declarations removed.
[gcc.git] / gcc / bt-load.c
1 /* Perform branch target register load optimizations.
2 Copyright (C) 2001, 2002, 2003 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 2, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "bitmap.h"
26 #include "sbitmap.h"
27 #include "rtl.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "regs.h"
31 #include "obstack.h"
32 #include "fibheap.h"
33 #include "output.h"
34 #include "target.h"
35 #include "expr.h"
36 #include "flags.h"
37 #include "insn-attr.h"
38 #include "function.h"
39 #include "tm_p.h"
40
41 /* Target register optimizations - these are performed after reload. */
42
43 typedef struct btr_def_group_s
44 {
45 struct btr_def_group_s *next;
46 rtx src;
47 struct btr_def_s *members;
48 } *btr_def_group;
49
50 typedef struct btr_user_s
51 {
52 struct btr_user_s *next;
53 basic_block bb;
54 int luid;
55 rtx insn;
56 /* If INSN has a single use of a single branch register, then
57 USE points to it within INSN. If there is more than
58 one branch register use, or the use is in some way ambiguous,
59 then USE is NULL. */
60 rtx use;
61 int n_reaching_defs;
62 int first_reaching_def;
63 char other_use_this_block;
64 } *btr_user;
65
66 /* btr_def structs appear on three lists:
67 1. A list of all btr_def structures (head is
68 ALL_BTR_DEFS, linked by the NEXT field).
69 2. A list of branch reg definitions per basic block (head is
70 BB_BTR_DEFS[i], linked by the NEXT_THIS_BB field).
71 3. A list of all branch reg definitions belonging to the same
72 group (head is in a BTR_DEF_GROUP struct, linked by
73 NEXT_THIS_GROUP field). */
74
75 typedef struct btr_def_s
76 {
77 struct btr_def_s *next_this_bb;
78 struct btr_def_s *next_this_group;
79 basic_block bb;
80 int luid;
81 rtx insn;
82 int btr;
83 int cost;
84 /* For a branch register setting insn that has a constant
85 source (i.e. a label), group links together all the
86 insns with the same source. For other branch register
87 setting insns, group is NULL. */
88 btr_def_group group;
89 btr_user uses;
90 /* If this def has a reaching use which is not a simple use
91 in a branch instruction, then has_ambiguous_use will be true,
92 and we will not attempt to migrate this definition. */
93 char has_ambiguous_use;
94 /* live_range is an approximation to the true live range for this
95 def/use web, because it records the set of blocks that contain
96 the live range. There could be other live ranges for the same
97 branch register in that set of blocks, either in the block
98 containing the def (before the def), or in a block containing
99 a use (after the use). If there are such other live ranges, then
100 other_btr_uses_before_def or other_btr_uses_after_use must be set true
101 as appropriate. */
102 char other_btr_uses_before_def;
103 char other_btr_uses_after_use;
104 bitmap live_range;
105 } *btr_def;
106
107 static int issue_rate;
108
109 static int basic_block_freq (basic_block);
110 static int insn_sets_btr_p (rtx, int, int *);
111 static rtx *find_btr_use (rtx);
112 static int btr_referenced_p (rtx, rtx *);
113 static int find_btr_reference (rtx *, void *);
114 static void find_btr_def_group (btr_def_group *, btr_def);
115 static btr_def add_btr_def (fibheap_t, basic_block, int, rtx,
116 unsigned int, int, btr_def_group *);
117 static btr_user new_btr_user (basic_block, int, rtx);
118 static void dump_hard_reg_set (HARD_REG_SET);
119 static void dump_btrs_live (int);
120 static void note_other_use_this_block (unsigned int, btr_user);
121 static void compute_defs_uses_and_gen (fibheap_t, btr_def *,btr_user *,
122 sbitmap *, sbitmap *, HARD_REG_SET *);
123 static void compute_kill (sbitmap *, sbitmap *, HARD_REG_SET *);
124 static void compute_out (sbitmap *bb_out, sbitmap *, sbitmap *, int);
125 static void link_btr_uses (btr_def *, btr_user *, sbitmap *, sbitmap *, int);
126 static void build_btr_def_use_webs (fibheap_t);
127 static int block_at_edge_of_live_range_p (int, btr_def);
128 static void clear_btr_from_live_range (btr_def def);
129 static void add_btr_to_live_range (btr_def);
130 static void augment_live_range (bitmap, HARD_REG_SET *, basic_block,
131 basic_block);
132 static int choose_btr (HARD_REG_SET);
133 static void combine_btr_defs (btr_def, HARD_REG_SET *);
134 static void btr_def_live_range (btr_def, HARD_REG_SET *);
135 static void move_btr_def (basic_block, int, btr_def, bitmap, HARD_REG_SET *);
136 static int migrate_btr_def (btr_def, int);
137 static void migrate_btr_defs (enum reg_class, int);
138 static int can_move_up (basic_block, rtx, int);
139 static void note_btr_set (rtx, rtx, void *);
140 \f
141 /* The following code performs code motion of target load instructions
142 (instructions that set branch target registers), to move them
143 forward away from the branch instructions and out of loops (or,
144 more generally, from a more frequently executed place to a less
145 frequently executed place).
146 Moving target load instructions further in front of the branch
147 instruction that uses the target register value means that the hardware
148 has a better chance of preloading the instructions at the branch
149 target by the time the branch is reached. This avoids bubbles
150 when a taken branch needs to flush out the pipeline.
151 Moving target load instructions out of loops means they are executed
152 less frequently. */
153
154 /* An obstack to hold the def-use web data structures built up for
155 migrating branch target load instructions. */
156 static struct obstack migrate_btrl_obstack;
157
158 /* Array indexed by basic block number, giving the set of registers
159 live in that block. */
160 static HARD_REG_SET *btrs_live;
161
162 /* Set of all target registers that we are willing to allocate. */
163 static HARD_REG_SET all_btrs;
164
165 /* Provide lower and upper bounds for target register numbers, so that
166 we don't need to search through all the hard registers all the time. */
167 static int first_btr, last_btr;
168
169
170
171 /* Return an estimate of the frequency of execution of block bb.
172 If we have a profiling count available, we could use it here. */
173 static int
174 basic_block_freq (basic_block bb)
175 {
176 return bb->frequency;
177 }
178
179 static rtx *btr_reference_found;
180
181 /* A subroutine of btr_referenced_p, called through for_each_rtx.
182 PREG is a pointer to an rtx that is to be excluded from the
183 traversal. If we find a reference to a target register anywhere
184 else, return 1, and put a pointer to it into btr_reference_found. */
185 static int
186 find_btr_reference (rtx *px, void *preg)
187 {
188 rtx x;
189 int regno, i;
190
191 if (px == preg)
192 return -1;
193 x = *px;
194 if (GET_CODE (x) != REG)
195 return 0;
196 regno = REGNO (x);
197 for (i = HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1; i >= 0; i--)
198 if (TEST_HARD_REG_BIT (all_btrs, regno+i))
199 {
200 btr_reference_found = px;
201 return 1;
202 }
203 return -1;
204 }
205
206 /* Return nonzero if X references (sets or reads) any branch target register.
207 If EXCLUDEP is set, disregard any references within the rtx pointed to
208 by it. If returning nonzero, also set btr_reference_found as above. */
209 static int
210 btr_referenced_p (rtx x, rtx *excludep)
211 {
212 return for_each_rtx (&x, find_btr_reference, excludep);
213 }
214
215 /* Return true if insn is an instruction that sets a target register.
216 if CHECK_CONST is true, only return true if the source is constant.
217 If such a set is found and REGNO is nonzero, assign the register number
218 of the destination register to *REGNO. */
219 static int
220 insn_sets_btr_p (rtx insn, int check_const, int *regno)
221 {
222 rtx set;
223
224 if (GET_CODE (insn) == INSN
225 && (set = single_set (insn)))
226 {
227 rtx dest = SET_DEST (set);
228 rtx src = SET_SRC (set);
229
230 if (GET_CODE (dest) == SUBREG)
231 dest = XEXP (dest, 0);
232
233 if (GET_CODE (dest) == REG
234 && TEST_HARD_REG_BIT (all_btrs, REGNO (dest)))
235 {
236 if (btr_referenced_p (src, NULL))
237 abort();
238 if (!check_const || CONSTANT_P (src))
239 {
240 if (regno)
241 *regno = REGNO (dest);
242 return 1;
243 }
244 }
245 }
246 return 0;
247 }
248
249 /* Find and return a use of a target register within an instruction INSN. */
250 static rtx *
251 find_btr_use (rtx insn)
252 {
253 return btr_referenced_p (insn, NULL) ? btr_reference_found : NULL;
254 }
255
256 /* Find the group that the target register definition DEF belongs
257 to in the list starting with *ALL_BTR_DEF_GROUPS. If no such
258 group exists, create one. Add def to the group. */
259 static void
260 find_btr_def_group (btr_def_group *all_btr_def_groups, btr_def def)
261 {
262 if (insn_sets_btr_p (def->insn, 1, NULL))
263 {
264 btr_def_group this_group;
265 rtx def_src = SET_SRC (single_set (def->insn));
266
267 /* ?? This linear search is an efficiency concern, particularly
268 as the search will almost always fail to find a match. */
269 for (this_group = *all_btr_def_groups;
270 this_group != NULL;
271 this_group = this_group->next)
272 if (rtx_equal_p (def_src, this_group->src))
273 break;
274
275 if (!this_group)
276 {
277 this_group = obstack_alloc (&migrate_btrl_obstack,
278 sizeof (struct btr_def_group_s));
279 this_group->src = def_src;
280 this_group->members = NULL;
281 this_group->next = *all_btr_def_groups;
282 *all_btr_def_groups = this_group;
283 }
284 def->group = this_group;
285 def->next_this_group = this_group->members;
286 this_group->members = def;
287 }
288 else
289 def->group = NULL;
290 }
291
292 /* Create a new target register definition structure, for a definition in
293 block BB, instruction INSN, and insert it into ALL_BTR_DEFS. Return
294 the new definition. */
295 static btr_def
296 add_btr_def (fibheap_t all_btr_defs, basic_block bb, int insn_luid, rtx insn,
297 unsigned int dest_reg, int other_btr_uses_before_def,
298 btr_def_group *all_btr_def_groups)
299 {
300 btr_def this
301 = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_def_s));
302 this->bb = bb;
303 this->luid = insn_luid;
304 this->insn = insn;
305 this->btr = dest_reg;
306 this->cost = basic_block_freq (bb);
307 this->has_ambiguous_use = 0;
308 this->other_btr_uses_before_def = other_btr_uses_before_def;
309 this->other_btr_uses_after_use = 0;
310 this->next_this_bb = NULL;
311 this->next_this_group = NULL;
312 this->uses = NULL;
313 this->live_range = NULL;
314 find_btr_def_group (all_btr_def_groups, this);
315
316 fibheap_insert (all_btr_defs, -this->cost, this);
317
318 if (rtl_dump_file)
319 fprintf (rtl_dump_file,
320 "Found target reg definition: sets %u { bb %d, insn %d }%s priority %d\n",
321 dest_reg, bb->index, INSN_UID (insn), (this->group ? "" : ":not const"),
322 this->cost);
323
324 return this;
325 }
326
327 /* Create a new target register user structure, for a use in block BB,
328 instruction INSN. Return the new user. */
329 static btr_user
330 new_btr_user (basic_block bb, int insn_luid, rtx insn)
331 {
332 /* This instruction reads target registers. We need
333 to decide whether we can replace all target register
334 uses easily.
335 */
336 rtx *usep = find_btr_use (PATTERN (insn));
337 rtx use;
338 btr_user user = NULL;
339
340 if (usep)
341 {
342 int unambiguous_single_use;
343
344 /* We want to ensure that USE is the only use of a target
345 register in INSN, so that we know that to rewrite INSN to use
346 a different target register, all we have to do is replace USE. */
347 unambiguous_single_use = !btr_referenced_p (PATTERN (insn), usep);
348 if (!unambiguous_single_use)
349 usep = NULL;
350 }
351 use = usep ? *usep : NULL_RTX;
352 user = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_user_s));
353 user->bb = bb;
354 user->luid = insn_luid;
355 user->insn = insn;
356 user->use = use;
357 user->other_use_this_block = 0;
358 user->next = NULL;
359 user->n_reaching_defs = 0;
360 user->first_reaching_def = -1;
361
362 if (rtl_dump_file)
363 {
364 fprintf (rtl_dump_file, "Uses target reg: { bb %d, insn %d }",
365 bb->index, INSN_UID (insn));
366
367 if (user->use)
368 fprintf (rtl_dump_file, ": unambiguous use of reg %d\n",
369 REGNO (user->use));
370 }
371
372 return user;
373 }
374
375 /* Write the contents of S to the dump file. */
376 static void
377 dump_hard_reg_set (HARD_REG_SET s)
378 {
379 int reg;
380 for (reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
381 if (TEST_HARD_REG_BIT (s, reg))
382 fprintf (rtl_dump_file, " %d", reg);
383 }
384
385 /* Write the set of target regs live in block BB to the dump file. */
386 static void
387 dump_btrs_live (int bb)
388 {
389 fprintf (rtl_dump_file, "BB%d live:", bb);
390 dump_hard_reg_set (btrs_live[bb]);
391 fprintf (rtl_dump_file, "\n");
392 }
393
394 /* REGNO is the number of a branch target register that is being used or
395 set. USERS_THIS_BB is a list of preceding branch target register users;
396 If any of them use the same register, set their other_use_this_block
397 flag. */
398 static void
399 note_other_use_this_block (unsigned int regno, btr_user users_this_bb)
400 {
401 btr_user user;
402
403 for (user = users_this_bb; user != NULL; user = user->next)
404 if (user->use && REGNO (user->use) == regno)
405 user->other_use_this_block = 1;
406 }
407
408 typedef struct {
409 btr_user users_this_bb;
410 HARD_REG_SET btrs_written_in_block;
411 HARD_REG_SET btrs_live_in_block;
412 sbitmap bb_gen;
413 sbitmap *btr_defset;
414 } defs_uses_info;
415
416 /* Called via note_stores or directly to register stores into /
417 clobbers of a branch target register DEST that are not recognized as
418 straightforward definitions. DATA points to information about the
419 current basic block that needs updating. */
420 static void
421 note_btr_set (rtx dest, rtx set ATTRIBUTE_UNUSED, void *data)
422 {
423 defs_uses_info *info = data;
424 int regno, end_regno;
425
426 if (GET_CODE (dest) != REG)
427 return;
428 regno = REGNO (dest);
429 end_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (dest));
430 for (; regno < end_regno; regno++)
431 if (TEST_HARD_REG_BIT (all_btrs, regno))
432 {
433 note_other_use_this_block (regno, info->users_this_bb);
434 SET_HARD_REG_BIT (info->btrs_written_in_block, regno);
435 SET_HARD_REG_BIT (info->btrs_live_in_block, regno);
436 sbitmap_difference (info->bb_gen, info->bb_gen,
437 info->btr_defset[regno - first_btr]);
438 }
439 }
440
441 static void
442 compute_defs_uses_and_gen (fibheap_t all_btr_defs, btr_def *def_array,
443 btr_user *use_array, sbitmap *btr_defset,
444 sbitmap *bb_gen, HARD_REG_SET *btrs_written)
445 {
446 /* Scan the code building up the set of all defs and all uses.
447 For each target register, build the set of defs of that register.
448 For each block, calculate the set of target registers
449 written in that block.
450 Also calculate the set of btrs ever live in that block.
451 */
452 int i;
453 int insn_luid = 0;
454 btr_def_group all_btr_def_groups = NULL;
455 defs_uses_info info;
456
457 sbitmap_vector_zero (bb_gen, n_basic_blocks);
458 for (i = 0; i < n_basic_blocks; i++)
459 {
460 basic_block bb = BASIC_BLOCK (i);
461 int reg;
462 btr_def defs_this_bb = NULL;
463 rtx insn;
464 rtx last;
465
466 info.users_this_bb = NULL;
467 info.bb_gen = bb_gen[i];
468 info.btr_defset = btr_defset;
469
470 CLEAR_HARD_REG_SET (info.btrs_live_in_block);
471 CLEAR_HARD_REG_SET (info.btrs_written_in_block);
472 for (reg = first_btr; reg <= last_btr; reg++)
473 if (TEST_HARD_REG_BIT (all_btrs, reg)
474 && REGNO_REG_SET_P (bb->global_live_at_start, reg))
475 SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
476
477 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
478 insn != last;
479 insn = NEXT_INSN (insn), insn_luid++)
480 {
481 if (INSN_P (insn))
482 {
483 int regno;
484 int insn_uid = INSN_UID (insn);
485
486 if (insn_sets_btr_p (insn, 0, &regno))
487 {
488 btr_def def = add_btr_def (
489 all_btr_defs, bb, insn_luid, insn, regno,
490 TEST_HARD_REG_BIT (info.btrs_live_in_block, regno),
491 &all_btr_def_groups);
492
493 def_array[insn_uid] = def;
494 SET_HARD_REG_BIT (info.btrs_written_in_block, regno);
495 SET_HARD_REG_BIT (info.btrs_live_in_block, regno);
496 sbitmap_difference (bb_gen[i], bb_gen[i],
497 btr_defset[regno - first_btr]);
498 SET_BIT (bb_gen[i], insn_uid);
499 def->next_this_bb = defs_this_bb;
500 defs_this_bb = def;
501 SET_BIT (btr_defset[regno - first_btr], insn_uid);
502 note_other_use_this_block (regno, info.users_this_bb);
503 }
504 else
505 {
506 if (btr_referenced_p (PATTERN (insn), NULL))
507 {
508 btr_user user = new_btr_user (bb, insn_luid, insn);
509
510 use_array[insn_uid] = user;
511 if (user->use)
512 SET_HARD_REG_BIT (info.btrs_live_in_block,
513 REGNO (user->use));
514 else
515 {
516 int reg;
517 for (reg = first_btr; reg <= last_btr; reg++)
518 if (TEST_HARD_REG_BIT (all_btrs, reg)
519 && refers_to_regno_p (reg, reg + 1, user->insn,
520 NULL))
521 {
522 note_other_use_this_block (reg,
523 info.users_this_bb);
524 SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
525 }
526 note_stores (PATTERN (insn), note_btr_set, &info);
527 }
528 user->next = info.users_this_bb;
529 info.users_this_bb = user;
530 }
531 if (GET_CODE (insn) == CALL_INSN)
532 {
533 HARD_REG_SET *clobbered = &call_used_reg_set;
534 HARD_REG_SET call_saved;
535 rtx pat = PATTERN (insn);
536 int i;
537
538 /* Check for sibcall. */
539 if (GET_CODE (pat) == PARALLEL)
540 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
541 if (GET_CODE (XVECEXP (pat, 0, i)) == RETURN)
542 {
543 COMPL_HARD_REG_SET (call_saved,
544 call_used_reg_set);
545 clobbered = &call_saved;
546 }
547
548 for (regno = first_btr; regno <= last_btr; regno++)
549 if (TEST_HARD_REG_BIT (*clobbered, regno))
550 note_btr_set (regno_reg_rtx[regno], NULL_RTX, &info);
551 }
552 }
553 }
554 }
555
556 COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
557 COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
558 if (rtl_dump_file)
559 dump_btrs_live(i);
560 }
561 }
562
563 static void
564 compute_kill (sbitmap *bb_kill, sbitmap *btr_defset,
565 HARD_REG_SET *btrs_written)
566 {
567 int i;
568 int regno;
569
570 /* For each basic block, form the set BB_KILL - the set
571 of definitions that the block kills. */
572 sbitmap_vector_zero (bb_kill, n_basic_blocks);
573 for (i = 0; i < n_basic_blocks; i++)
574 {
575 for (regno = first_btr; regno <= last_btr; regno++)
576 if (TEST_HARD_REG_BIT (all_btrs, regno)
577 && TEST_HARD_REG_BIT (btrs_written[i], regno))
578 sbitmap_a_or_b (bb_kill[i], bb_kill[i],
579 btr_defset[regno - first_btr]);
580 }
581 }
582
583 static void
584 compute_out (sbitmap *bb_out, sbitmap *bb_gen, sbitmap *bb_kill, int max_uid)
585 {
586 /* Perform iterative dataflow:
587 Initially, for all blocks, BB_OUT = BB_GEN.
588 For each block,
589 BB_IN = union over predecessors of BB_OUT(pred)
590 BB_OUT = (BB_IN - BB_KILL) + BB_GEN
591 Iterate until the bb_out sets stop growing. */
592 int i;
593 int changed;
594 sbitmap bb_in = sbitmap_alloc (max_uid);
595
596 for (i = 0; i < n_basic_blocks; i++)
597 sbitmap_copy (bb_out[i], bb_gen[i]);
598
599 changed = 1;
600 while (changed)
601 {
602 changed = 0;
603 for (i = 0; i < n_basic_blocks; i++)
604 {
605 sbitmap_union_of_preds (bb_in, bb_out, i);
606 changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
607 bb_in, bb_kill[i]);
608 }
609 }
610 sbitmap_free (bb_in);
611 }
612
613 static void
614 link_btr_uses (btr_def *def_array, btr_user *use_array, sbitmap *bb_out,
615 sbitmap *btr_defset, int max_uid)
616 {
617 int i;
618 sbitmap reaching_defs = sbitmap_alloc (max_uid);
619
620 /* Link uses to the uses lists of all of their reaching defs.
621 Count up the number of reaching defs of each use. */
622 for (i = 0; i < n_basic_blocks; i++)
623 {
624 basic_block bb = BASIC_BLOCK (i);
625 rtx insn;
626 rtx last;
627
628 sbitmap_union_of_preds (reaching_defs, bb_out, i);
629 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
630 insn != last;
631 insn = NEXT_INSN (insn))
632 {
633 if (INSN_P (insn))
634 {
635 int insn_uid = INSN_UID (insn);
636
637 btr_def def = def_array[insn_uid];
638 btr_user user = use_array[insn_uid];
639 if (def != NULL)
640 {
641 /* Remove all reaching defs of regno except
642 for this one. */
643 sbitmap_difference (reaching_defs, reaching_defs,
644 btr_defset[def->btr - first_btr]);
645 SET_BIT(reaching_defs, insn_uid);
646 }
647
648 if (user != NULL)
649 {
650 /* Find all the reaching defs for this use. */
651 sbitmap reaching_defs_of_reg = sbitmap_alloc(max_uid);
652 int uid;
653
654 if (user->use)
655 sbitmap_a_and_b (
656 reaching_defs_of_reg,
657 reaching_defs,
658 btr_defset[REGNO (user->use) - first_btr]);
659 else
660 {
661 int reg;
662
663 sbitmap_zero (reaching_defs_of_reg);
664 for (reg = first_btr; reg <= last_btr; reg++)
665 if (TEST_HARD_REG_BIT (all_btrs, reg)
666 && refers_to_regno_p (reg, reg + 1, user->insn,
667 NULL))
668 sbitmap_a_or_b_and_c (reaching_defs_of_reg,
669 reaching_defs_of_reg,
670 reaching_defs,
671 btr_defset[reg - first_btr]);
672 }
673 EXECUTE_IF_SET_IN_SBITMAP (reaching_defs_of_reg, 0, uid,
674 {
675 btr_def def = def_array[uid];
676
677 /* We now know that def reaches user. */
678
679 if (rtl_dump_file)
680 fprintf (rtl_dump_file,
681 "Def in insn %d reaches use in insn %d\n",
682 uid, insn_uid);
683
684 user->n_reaching_defs++;
685 if (!user->use)
686 def->has_ambiguous_use = 1;
687 if (user->first_reaching_def != -1)
688 { /* There is more than one reaching def. This is
689 a rare case, so just give up on this def/use
690 web when it occurs. */
691 def->has_ambiguous_use = 1;
692 def_array[user->first_reaching_def]
693 ->has_ambiguous_use = 1;
694 if (rtl_dump_file)
695 fprintf (rtl_dump_file,
696 "(use %d has multiple reaching defs)\n",
697 insn_uid);
698 }
699 else
700 user->first_reaching_def = uid;
701 if (user->other_use_this_block)
702 def->other_btr_uses_after_use = 1;
703 user->next = def->uses;
704 def->uses = user;
705 });
706 sbitmap_free (reaching_defs_of_reg);
707 }
708
709 if (GET_CODE (insn) == CALL_INSN)
710 {
711 int regno;
712
713 for (regno = first_btr; regno <= last_btr; regno++)
714 if (TEST_HARD_REG_BIT (all_btrs, regno)
715 && TEST_HARD_REG_BIT (call_used_reg_set, regno))
716 sbitmap_difference (reaching_defs, reaching_defs,
717 btr_defset[regno - first_btr]);
718 }
719 }
720 }
721 }
722 sbitmap_free (reaching_defs);
723 }
724
725 static void
726 build_btr_def_use_webs (fibheap_t all_btr_defs)
727 {
728 const int max_uid = get_max_uid ();
729 btr_def *def_array = xcalloc (max_uid, sizeof (btr_def));
730 btr_user *use_array = xcalloc (max_uid, sizeof (btr_user));
731 sbitmap *btr_defset = sbitmap_vector_alloc (
732 (last_btr - first_btr) + 1, max_uid);
733 sbitmap *bb_gen = sbitmap_vector_alloc (n_basic_blocks, max_uid);
734 HARD_REG_SET *btrs_written = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
735 sbitmap *bb_kill;
736 sbitmap *bb_out;
737
738 sbitmap_vector_zero (btr_defset, (last_btr - first_btr) + 1);
739
740 compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
741 bb_gen, btrs_written);
742
743 bb_kill = sbitmap_vector_alloc (n_basic_blocks, max_uid);
744 compute_kill (bb_kill, btr_defset, btrs_written);
745 free (btrs_written);
746
747 bb_out = sbitmap_vector_alloc (n_basic_blocks, max_uid);
748 compute_out (bb_out, bb_gen, bb_kill, max_uid);
749
750 sbitmap_vector_free (bb_gen);
751 sbitmap_vector_free (bb_kill);
752
753 link_btr_uses (def_array, use_array, bb_out, btr_defset, max_uid);
754
755 sbitmap_vector_free (bb_out);
756 sbitmap_vector_free (btr_defset);
757 free (use_array);
758 free (def_array);
759 }
760
761 /* Return true if basic block BB contains the start or end of the
762 live range of the definition DEF, AND there are other live
763 ranges of the same target register that include BB. */
764 static int
765 block_at_edge_of_live_range_p (int bb, btr_def def)
766 {
767 if (def->other_btr_uses_before_def && BASIC_BLOCK (bb) == def->bb)
768 return 1;
769 else if (def->other_btr_uses_after_use)
770 {
771 btr_user user;
772 for (user = def->uses; user != NULL; user = user->next)
773 if (BASIC_BLOCK (bb) == user->bb)
774 return 1;
775 }
776 return 0;
777 }
778
779 /* We are removing the def/use web DEF. The target register
780 used in this web is therefore no longer live in the live range
781 of this web, so remove it from the live set of all basic blocks
782 in the live range of the web.
783 Blocks at the boundary of the live range may contain other live
784 ranges for the same target register, so we have to be careful
785 to remove the target register from the live set of these blocks
786 only if they do not contain other live ranges for the same register. */
787 static void
788 clear_btr_from_live_range (btr_def def)
789 {
790 int bb;
791
792 EXECUTE_IF_SET_IN_BITMAP
793 (def->live_range, 0, bb,
794 {
795 if ((!def->other_btr_uses_before_def
796 && !def->other_btr_uses_after_use)
797 || !block_at_edge_of_live_range_p (bb, def))
798 {
799 CLEAR_HARD_REG_BIT (btrs_live[bb], def->btr);
800 if (rtl_dump_file)
801 dump_btrs_live (bb);
802 }
803 });
804 }
805
806
807 /* We are adding the def/use web DEF. Add the target register used
808 in this web to the live set of all of the basic blocks that contain
809 the live range of the web. */
810 static void
811 add_btr_to_live_range (btr_def def)
812 {
813 int bb;
814 EXECUTE_IF_SET_IN_BITMAP
815 (def->live_range, 0, bb,
816 {
817 SET_HARD_REG_BIT (btrs_live[bb], def->btr);
818 if (rtl_dump_file)
819 dump_btrs_live (bb);
820 });
821 }
822
823 /* Update a live range to contain the basic block NEW_BLOCK, and all
824 blocks on paths between the existing live range and NEW_BLOCK.
825 HEAD is a block contained in the existing live range that dominates
826 all other blocks in the existing live range.
827 Also add to the set BTRS_LIVE_IN_RANGE all target registers that
828 are live in the blocks that we add to the live range.
829 It is a precondition that either NEW_BLOCK dominates HEAD,or
830 HEAD dom NEW_BLOCK. This is used to speed up the
831 implementation of this function. */
832 static void
833 augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
834 basic_block head_bb, basic_block new_bb)
835 {
836 basic_block *worklist, *tos;
837
838 tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
839
840 if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
841 *tos++ = new_bb;
842 else if (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb))
843 {
844 edge e;
845 int new_block = new_bb->index;
846
847 bitmap_set_bit (live_range, new_block);
848 IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
849 if (rtl_dump_file)
850 {
851 fprintf (rtl_dump_file,
852 "Adding block %d to live range\n", new_block);
853 fprintf (rtl_dump_file,"Now live btrs are ");
854 dump_hard_reg_set (*btrs_live_in_range);
855 fprintf (rtl_dump_file, "\n");
856 }
857 for (e = head_bb->pred; e; e = e->pred_next)
858 *tos++ = e->src;
859 }
860 else
861 abort();
862
863 while (tos != worklist)
864 {
865 basic_block bb = *--tos;
866 if (!bitmap_bit_p (live_range, bb->index))
867 {
868 edge e;
869
870 bitmap_set_bit (live_range, bb->index);
871 IOR_HARD_REG_SET (*btrs_live_in_range,
872 btrs_live[bb->index]);
873 if (rtl_dump_file)
874 {
875 fprintf (rtl_dump_file,
876 "Adding block %d to live range\n", bb->index);
877 fprintf (rtl_dump_file,"Now live btrs are ");
878 dump_hard_reg_set (*btrs_live_in_range);
879 fprintf (rtl_dump_file, "\n");
880 }
881
882 for (e = bb->pred; e != NULL; e = e->pred_next)
883 {
884 basic_block pred = e->src;
885 if (!bitmap_bit_p (live_range, pred->index))
886 *tos++ = pred;
887 }
888 }
889 }
890
891 free (worklist);
892 }
893
894 /* Return the most desirable target register that is not in
895 the set USED_BTRS. */
896 static int
897 choose_btr (HARD_REG_SET used_btrs)
898 {
899 int i;
900 GO_IF_HARD_REG_SUBSET (all_btrs, used_btrs, give_up);
901
902 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
903 {
904 #ifdef REG_ALLOC_ORDER
905 int regno = reg_alloc_order[i];
906 #else
907 int regno = i;
908 #endif
909 if (TEST_HARD_REG_BIT (all_btrs, regno)
910 && !TEST_HARD_REG_BIT (used_btrs, regno))
911 return regno;
912 }
913 give_up:
914 return -1;
915 }
916
917 /* Calculate the set of basic blocks that contain the live range of
918 the def/use web DEF.
919 Also calculate the set of target registers that are live at time
920 in this live range, but ignore the live range represented by DEF
921 when calculating this set. */
922 static void
923 btr_def_live_range (btr_def def, HARD_REG_SET *btrs_live_in_range)
924 {
925 if (!def->live_range)
926 {
927 btr_user user;
928
929 def->live_range = BITMAP_XMALLOC ();
930
931 bitmap_set_bit (def->live_range, def->bb->index);
932 COPY_HARD_REG_SET (*btrs_live_in_range, btrs_live[def->bb->index]);
933
934 for (user = def->uses; user != NULL; user = user->next)
935 augment_live_range (def->live_range, btrs_live_in_range,
936 def->bb, user->bb);
937 }
938 else
939 {
940 /* def->live_range is accurate, but we need to recompute
941 the set of target registers live over it, because migration
942 of other PT instructions may have affected it.
943 */
944 int bb;
945
946 CLEAR_HARD_REG_SET (*btrs_live_in_range);
947 EXECUTE_IF_SET_IN_BITMAP
948 (def->live_range, 0, bb,
949 {
950 IOR_HARD_REG_SET (*btrs_live_in_range,
951 btrs_live[bb]);
952 });
953 }
954 if (!def->other_btr_uses_before_def &&
955 !def->other_btr_uses_after_use)
956 CLEAR_HARD_REG_BIT (*btrs_live_in_range, def->btr);
957 }
958
959 /* Merge into the def/use web DEF any other def/use webs in the same
960 group that are dominated by DEF, provided that there is a target
961 register available to allocate to the merged web. */
962 static void
963 combine_btr_defs (btr_def def, HARD_REG_SET *btrs_live_in_range)
964 {
965 btr_def other_def;
966
967 for (other_def = def->group->members;
968 other_def != NULL;
969 other_def = other_def->next_this_group)
970 {
971 if (other_def != def
972 && other_def->uses != NULL
973 && ! other_def->has_ambiguous_use
974 && dominated_by_p (CDI_DOMINATORS, other_def->bb, def->bb))
975 {
976 /* def->bb dominates the other def, so def and other_def could
977 be combined. */
978 /* Merge their live ranges, and get the set of
979 target registers live over the merged range. */
980 int btr;
981 HARD_REG_SET combined_btrs_live;
982 bitmap combined_live_range = BITMAP_XMALLOC ();
983 btr_user user;
984
985 if (other_def->live_range == NULL)
986 {
987 HARD_REG_SET dummy_btrs_live_in_range;
988 btr_def_live_range (other_def, &dummy_btrs_live_in_range);
989 }
990 COPY_HARD_REG_SET (combined_btrs_live, *btrs_live_in_range);
991 bitmap_copy (combined_live_range, def->live_range);
992
993 for (user = other_def->uses; user != NULL; user = user->next)
994 augment_live_range (combined_live_range, &combined_btrs_live,
995 def->bb, user->bb);
996
997 btr = choose_btr (combined_btrs_live);
998 if (btr != -1)
999 {
1000 /* We can combine them. */
1001 if (rtl_dump_file)
1002 fprintf (rtl_dump_file,
1003 "Combining def in insn %d with def in insn %d\n",
1004 INSN_UID (other_def->insn), INSN_UID (def->insn));
1005
1006 def->btr = btr;
1007 user = other_def->uses;
1008 while (user != NULL)
1009 {
1010 btr_user next = user->next;
1011
1012 user->next = def->uses;
1013 def->uses = user;
1014 user = next;
1015 }
1016 /* Combining def/use webs can make target registers live
1017 after uses where they previously were not. This means
1018 some REG_DEAD notes may no longer be correct. We could
1019 be more precise about this if we looked at the combined
1020 live range, but here I just delete any REG_DEAD notes
1021 in case they are no longer correct. */
1022 for (user = def->uses; user != NULL; user = user->next)
1023 remove_note (user->insn,
1024 find_regno_note (user->insn, REG_DEAD,
1025 REGNO (user->use)));
1026 clear_btr_from_live_range (other_def);
1027 other_def->uses = NULL;
1028 bitmap_copy (def->live_range, combined_live_range);
1029 if (other_def->other_btr_uses_after_use)
1030 def->other_btr_uses_after_use = 1;
1031 COPY_HARD_REG_SET (*btrs_live_in_range, combined_btrs_live);
1032
1033 /* Delete the old target register initialization. */
1034 delete_insn (other_def->insn);
1035
1036 }
1037 BITMAP_XFREE (combined_live_range);
1038 }
1039 }
1040 }
1041
1042 /* Move the definition DEF from its current position to basic
1043 block NEW_DEF_BB, and modify it to use branch target register BTR.
1044 Delete the old defining insn, and insert a new one in NEW_DEF_BB.
1045 Update all reaching uses of DEF in the RTL to use BTR.
1046 If this new position means that other defs in the
1047 same group can be combined with DEF then combine them. */
1048 static void
1049 move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
1050 HARD_REG_SET *btrs_live_in_range)
1051 {
1052 /* We can move the instruction.
1053 Set a target register in block NEW_DEF_BB to the value
1054 needed for this target register definition.
1055 Replace all uses of the old target register definition by
1056 uses of the new definition. Delete the old definition. */
1057 basic_block b = new_def_bb;
1058 rtx insp = BB_HEAD (b);
1059 rtx old_insn = def->insn;
1060 rtx src;
1061 rtx btr_rtx;
1062 rtx new_insn;
1063 enum machine_mode btr_mode;
1064 btr_user user;
1065 rtx set;
1066
1067 if (rtl_dump_file)
1068 fprintf(rtl_dump_file, "migrating to basic block %d, using reg %d\n",
1069 new_def_bb->index, btr);
1070
1071 clear_btr_from_live_range (def);
1072 def->btr = btr;
1073 def->bb = new_def_bb;
1074 def->luid = 0;
1075 def->cost = basic_block_freq (new_def_bb);
1076 def->other_btr_uses_before_def = 0;
1077 bitmap_copy (def->live_range, live_range);
1078 combine_btr_defs (def, btrs_live_in_range);
1079 btr = def->btr;
1080 add_btr_to_live_range (def);
1081 if (GET_CODE (insp) == CODE_LABEL)
1082 insp = NEXT_INSN (insp);
1083 /* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now. Some
1084 optimizations can result in insp being both first and last insn of
1085 its basic block. */
1086 /* ?? some assertions to check that insp is sensible? */
1087
1088 set = single_set (old_insn);
1089 src = SET_SRC (set);
1090 btr_mode = GET_MODE (SET_DEST (set));
1091 btr_rtx = gen_rtx (REG, btr_mode, btr);
1092
1093 new_insn = gen_move_insn (btr_rtx, src);
1094
1095 /* Insert target register initialization at head of basic block. */
1096 def->insn = emit_insn_after (new_insn, insp);
1097
1098 regs_ever_live[btr] = 1;
1099
1100 if (rtl_dump_file)
1101 fprintf (rtl_dump_file, "New pt is insn %d, inserted after insn %d\n",
1102 INSN_UID (def->insn), INSN_UID (insp));
1103
1104 /* Delete the old target register initialization. */
1105 delete_insn (old_insn);
1106
1107 /* Replace each use of the old target register by a use of the new target
1108 register. */
1109 for (user = def->uses; user != NULL; user = user->next)
1110 {
1111 /* Some extra work here to ensure consistent modes, because
1112 it seems that a target register REG rtx can be given a different
1113 mode depending on the context (surely that should not be
1114 the case?). */
1115 rtx replacement_rtx;
1116 if (GET_MODE (user->use) == GET_MODE (btr_rtx)
1117 || GET_MODE (user->use) == VOIDmode)
1118 replacement_rtx = btr_rtx;
1119 else
1120 replacement_rtx = gen_rtx (REG, GET_MODE (user->use), btr);
1121 replace_rtx (user->insn, user->use, replacement_rtx);
1122 user->use = replacement_rtx;
1123 }
1124 }
1125
1126 /* We anticipate intra-block scheduling to be done. See if INSN could move
1127 up within BB by N_INSNS. */
1128 static int
1129 can_move_up (basic_block bb, rtx insn, int n_insns)
1130 {
1131 while (insn != BB_HEAD (bb) && n_insns > 0)
1132 {
1133 insn = PREV_INSN (insn);
1134 /* ??? What if we have an anti-dependency that actually prevents the
1135 scheduler from doing the move? We'd like to re-allocate the register,
1136 but not necessarily put the load into another basic block. */
1137 if (INSN_P (insn))
1138 n_insns--;
1139 }
1140 return n_insns <= 0;
1141 }
1142
1143 /* Attempt to migrate the target register definition DEF to an
1144 earlier point in the flowgraph.
1145
1146 It is a precondition of this function that DEF is migratable:
1147 i.e. it has a constant source, and all uses are unambiguous.
1148
1149 Only migrations that reduce the cost of DEF will be made.
1150 MIN_COST is the lower bound on the cost of the DEF after migration.
1151 If we migrate DEF so that its cost falls below MIN_COST,
1152 then we do not attempt to migrate further. The idea is that
1153 we migrate definitions in a priority order based on their cost,
1154 when the cost of this definition falls below MIN_COST, then
1155 there is another definition with cost == MIN_COST which now
1156 has a higher priority than this definition.
1157
1158 Return nonzero if there may be benefit from attempting to
1159 migrate this DEF further (i.e. we have reduced the cost below
1160 MIN_COST, but we may be able to reduce it further).
1161 Return zero if no further migration is possible. */
1162 static int
1163 migrate_btr_def (btr_def def, int min_cost)
1164 {
1165 bitmap live_range;
1166 HARD_REG_SET btrs_live_in_range;
1167 int btr_used_near_def = 0;
1168 int def_basic_block_freq;
1169 basic_block try;
1170 int give_up = 0;
1171 int def_moved = 0;
1172 btr_user user;
1173 int def_latency = 1;
1174
1175 if (rtl_dump_file)
1176 fprintf (rtl_dump_file,
1177 "Attempting to migrate pt from insn %d (cost = %d, min_cost = %d) ... ",
1178 INSN_UID (def->insn), def->cost, min_cost);
1179
1180 if (!def->group || def->has_ambiguous_use)
1181 /* These defs are not migratable. */
1182 {
1183 if (rtl_dump_file)
1184 fprintf (rtl_dump_file, "it's not migratable\n");
1185 return 0;
1186 }
1187
1188 if (!def->uses)
1189 /* We have combined this def with another in the same group, so
1190 no need to consider it further.
1191 */
1192 {
1193 if (rtl_dump_file)
1194 fprintf (rtl_dump_file, "it's already combined with another pt\n");
1195 return 0;
1196 }
1197
1198 btr_def_live_range (def, &btrs_live_in_range);
1199 live_range = BITMAP_XMALLOC ();
1200 bitmap_copy (live_range, def->live_range);
1201
1202 #ifdef INSN_SCHEDULING
1203 if ((*targetm.sched.use_dfa_pipeline_interface) ())
1204 def_latency = insn_default_latency (def->insn);
1205 else
1206 def_latency = result_ready_cost (def->insn);
1207 #endif
1208
1209 def_latency *= issue_rate;
1210
1211 for (user = def->uses; user != NULL; user = user->next)
1212 {
1213 if (user->bb == def->bb
1214 && user->luid > def->luid
1215 && (def->luid + def_latency) > user->luid
1216 && ! can_move_up (def->bb, def->insn,
1217 (def->luid + def_latency) - user->luid))
1218 {
1219 btr_used_near_def = 1;
1220 break;
1221 }
1222 }
1223
1224 def_basic_block_freq = basic_block_freq (def->bb);
1225
1226 for (try = get_immediate_dominator (CDI_DOMINATORS, def->bb);
1227 !give_up && try && try != ENTRY_BLOCK_PTR && def->cost >= min_cost;
1228 try = get_immediate_dominator (CDI_DOMINATORS, try))
1229 {
1230 /* Try to move the instruction that sets the target register into
1231 basic block TRY. */
1232 int try_freq = basic_block_freq (try);
1233
1234 if (rtl_dump_file)
1235 fprintf (rtl_dump_file, "trying block %d ...", try->index);
1236
1237 if (try_freq < def_basic_block_freq
1238 || (try_freq == def_basic_block_freq && btr_used_near_def))
1239 {
1240 int btr;
1241 augment_live_range (live_range, &btrs_live_in_range, def->bb, try);
1242 if (rtl_dump_file)
1243 {
1244 fprintf (rtl_dump_file, "Now btrs live in range are: ");
1245 dump_hard_reg_set (btrs_live_in_range);
1246 fprintf (rtl_dump_file, "\n");
1247 }
1248 btr = choose_btr (btrs_live_in_range);
1249 if (btr != -1)
1250 {
1251 move_btr_def (try, btr, def, live_range, &btrs_live_in_range);
1252 bitmap_copy(live_range, def->live_range);
1253 btr_used_near_def = 0;
1254 def_moved = 1;
1255 def_basic_block_freq = basic_block_freq (def->bb);
1256 }
1257 else
1258 {
1259 /* There are no free target registers available to move
1260 this far forward, so give up */
1261 give_up = 1;
1262 if (rtl_dump_file)
1263 fprintf (rtl_dump_file,
1264 "giving up because there are no free target registers\n");
1265 }
1266
1267 }
1268 }
1269 if (!def_moved)
1270 {
1271 give_up = 1;
1272 if (rtl_dump_file)
1273 fprintf (rtl_dump_file, "failed to move\n");
1274 }
1275 BITMAP_XFREE (live_range);
1276 return !give_up;
1277 }
1278
1279 /* Attempt to move instructions that set target registers earlier
1280 in the flowgraph, away from their corresponding uses. */
1281 static void
1282 migrate_btr_defs (enum reg_class btr_class, int allow_callee_save)
1283 {
1284 fibheap_t all_btr_defs = fibheap_new ();
1285 int reg;
1286
1287 gcc_obstack_init (&migrate_btrl_obstack);
1288 if (rtl_dump_file)
1289 {
1290 int i;
1291
1292 for (i = 0; i < n_basic_blocks; i++)
1293 {
1294 basic_block bb = BASIC_BLOCK (i);
1295 fprintf(rtl_dump_file,
1296 "Basic block %d: count = " HOST_WIDEST_INT_PRINT_DEC
1297 " loop-depth = %d idom = %d\n",
1298 i, (HOST_WIDEST_INT) bb->count, bb->loop_depth,
1299 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
1300 }
1301 }
1302
1303 CLEAR_HARD_REG_SET (all_btrs);
1304 for (first_btr = -1, reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
1305 if (TEST_HARD_REG_BIT (reg_class_contents[(int) btr_class], reg)
1306 && (allow_callee_save || call_used_regs[reg] || regs_ever_live[reg]))
1307 {
1308 SET_HARD_REG_BIT (all_btrs, reg);
1309 last_btr = reg;
1310 if (first_btr < 0)
1311 first_btr = reg;
1312 }
1313
1314 btrs_live = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
1315
1316 build_btr_def_use_webs (all_btr_defs);
1317
1318 while (!fibheap_empty (all_btr_defs))
1319 {
1320 btr_def def =
1321 (btr_def) fibheap_extract_min (all_btr_defs);
1322 int min_cost = -fibheap_min_key (all_btr_defs);
1323 if (migrate_btr_def (def, min_cost))
1324 {
1325 fibheap_insert (all_btr_defs, -def->cost, (void *) def);
1326 if (rtl_dump_file)
1327 {
1328 fprintf (rtl_dump_file,
1329 "Putting insn %d back on queue with priority %d\n",
1330 INSN_UID (def->insn), def->cost);
1331 }
1332 }
1333 else
1334 {
1335 if (def->live_range)
1336 BITMAP_XFREE (def->live_range);
1337 }
1338 }
1339
1340 free (btrs_live);
1341 obstack_free (&migrate_btrl_obstack, NULL);
1342 fibheap_delete (all_btr_defs);
1343 }
1344
1345 void
1346 branch_target_load_optimize (rtx insns, bool after_prologue_epilogue_gen)
1347 {
1348 enum reg_class class = (*targetm.branch_target_register_class) ();
1349 if (class != NO_REGS)
1350 {
1351 /* Initialize issue_rate. */
1352 if (targetm.sched.issue_rate)
1353 issue_rate = (*targetm.sched.issue_rate) ();
1354 else
1355 issue_rate = 1;
1356
1357 /* Build the CFG for migrate_btr_defs. */
1358 #if 1
1359 /* This may or may not be needed, depending on where we
1360 run this phase. */
1361 cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
1362 #endif
1363
1364 life_analysis (insns, NULL, 0);
1365
1366 /* Dominator info is also needed for migrate_btr_def. */
1367 calculate_dominance_info (CDI_DOMINATORS);
1368 migrate_btr_defs (class,
1369 ((*targetm.branch_target_register_callee_saved)
1370 (after_prologue_epilogue_gen)));
1371
1372 free_dominance_info (CDI_DOMINATORS);
1373
1374 update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
1375 PROP_DEATH_NOTES | PROP_REG_INFO);
1376 }
1377 }