tree.h (PHI_CHAIN): New.
[gcc.git] / gcc / global.c
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27
28 #include "machmode.h"
29 #include "hard-reg-set.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "flags.h"
33 #include "basic-block.h"
34 #include "regs.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "reload.h"
38 #include "output.h"
39 #include "toplev.h"
40
41 /* This pass of the compiler performs global register allocation.
42 It assigns hard register numbers to all the pseudo registers
43 that were not handled in local_alloc. Assignments are recorded
44 in the vector reg_renumber, not by changing the rtl code.
45 (Such changes are made by final). The entry point is
46 the function global_alloc.
47
48 After allocation is complete, the reload pass is run as a subroutine
49 of this pass, so that when a pseudo reg loses its hard reg due to
50 spilling it is possible to make a second attempt to find a hard
51 reg for it. The reload pass is independent in other respects
52 and it is run even when stupid register allocation is in use.
53
54 1. Assign allocation-numbers (allocnos) to the pseudo-registers
55 still needing allocations and to the pseudo-registers currently
56 allocated by local-alloc which may be spilled by reload.
57 Set up tables reg_allocno and allocno_reg to map
58 reg numbers to allocnos and vice versa.
59 max_allocno gets the number of allocnos in use.
60
61 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
62 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
63 for conflicts between allocnos and explicit hard register use
64 (which includes use of pseudo-registers allocated by local_alloc).
65
66 3. For each basic block
67 walk forward through the block, recording which
68 pseudo-registers and which hardware registers are live.
69 Build the conflict matrix between the pseudo-registers
70 and another of pseudo-registers versus hardware registers.
71 Also record the preferred hardware registers
72 for each pseudo-register.
73
74 4. Sort a table of the allocnos into order of
75 desirability of the variables.
76
77 5. Allocate the variables in that order; each if possible into
78 a preferred register, else into another register. */
79 \f
80 /* Number of pseudo-registers which are candidates for allocation. */
81
82 static int max_allocno;
83
84 /* Indexed by (pseudo) reg number, gives the allocno, or -1
85 for pseudo registers which are not to be allocated. */
86
87 static int *reg_allocno;
88
89 struct allocno
90 {
91 int reg;
92 /* Gives the number of consecutive hard registers needed by that
93 pseudo reg. */
94 int size;
95
96 /* Number of calls crossed by each allocno. */
97 int calls_crossed;
98
99 /* Number of refs to each allocno. */
100 int n_refs;
101
102 /* Frequency of uses of each allocno. */
103 int freq;
104
105 /* Guess at live length of each allocno.
106 This is actually the max of the live lengths of the regs. */
107 int live_length;
108
109 /* Set of hard regs conflicting with allocno N. */
110
111 HARD_REG_SET hard_reg_conflicts;
112
113 /* Set of hard regs preferred by allocno N.
114 This is used to make allocnos go into regs that are copied to or from them,
115 when possible, to reduce register shuffling. */
116
117 HARD_REG_SET hard_reg_preferences;
118
119 /* Similar, but just counts register preferences made in simple copy
120 operations, rather than arithmetic. These are given priority because
121 we can always eliminate an insn by using these, but using a register
122 in the above list won't always eliminate an insn. */
123
124 HARD_REG_SET hard_reg_copy_preferences;
125
126 /* Similar to hard_reg_preferences, but includes bits for subsequent
127 registers when an allocno is multi-word. The above variable is used for
128 allocation while this is used to build reg_someone_prefers, below. */
129
130 HARD_REG_SET hard_reg_full_preferences;
131
132 /* Set of hard registers that some later allocno has a preference for. */
133
134 HARD_REG_SET regs_someone_prefers;
135
136 #ifdef STACK_REGS
137 /* Set to true if allocno can't be allocated in the stack register. */
138 bool no_stack_reg;
139 #endif
140 };
141
142 static struct allocno *allocno;
143
144 /* A vector of the integers from 0 to max_allocno-1,
145 sorted in the order of first-to-be-allocated first. */
146
147 static int *allocno_order;
148
149 /* Indexed by (pseudo) reg number, gives the number of another
150 lower-numbered pseudo reg which can share a hard reg with this pseudo
151 *even if the two pseudos would otherwise appear to conflict*. */
152
153 static int *reg_may_share;
154
155 /* Define the number of bits in each element of `conflicts' and what
156 type that element has. We use the largest integer format on the
157 host machine. */
158
159 #define INT_BITS HOST_BITS_PER_WIDE_INT
160 #define INT_TYPE HOST_WIDE_INT
161
162 /* max_allocno by max_allocno array of bits,
163 recording whether two allocno's conflict (can't go in the same
164 hardware register).
165
166 `conflicts' is symmetric after the call to mirror_conflicts. */
167
168 static INT_TYPE *conflicts;
169
170 /* Number of ints require to hold max_allocno bits.
171 This is the length of a row in `conflicts'. */
172
173 static int allocno_row_words;
174
175 /* Two macros to test or store 1 in an element of `conflicts'. */
176
177 #define CONFLICTP(I, J) \
178 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
179 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
180
181 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
182 and execute CODE. */
183 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
184 do { \
185 int i_; \
186 int allocno_; \
187 INT_TYPE *p_ = (ALLOCNO_SET); \
188 \
189 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
190 i_--, allocno_ += INT_BITS) \
191 { \
192 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
193 \
194 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
195 { \
196 if (word_ & 1) \
197 {CODE;} \
198 } \
199 } \
200 } while (0)
201
202 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
203 #if 0
204 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
205 the conflicting allocno, and execute CODE. This macro assumes that
206 mirror_conflicts has been run. */
207 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
208 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
209 OUT_ALLOCNO, (CODE))
210 #endif
211
212 /* Set of hard regs currently live (during scan of all insns). */
213
214 static HARD_REG_SET hard_regs_live;
215
216 /* Set of registers that global-alloc isn't supposed to use. */
217
218 static HARD_REG_SET no_global_alloc_regs;
219
220 /* Set of registers used so far. */
221
222 static HARD_REG_SET regs_used_so_far;
223
224 /* Number of refs to each hard reg, as used by local alloc.
225 It is zero for a reg that contains global pseudos or is explicitly used. */
226
227 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
228
229 /* Frequency of uses of given hard reg. */
230 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
231
232 /* Guess at live length of each hard reg, as used by local alloc.
233 This is actually the sum of the live lengths of the specific regs. */
234
235 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
236
237 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
238 element I, and hard register number J. */
239
240 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
241
242 /* Bit mask for allocnos live at current point in the scan. */
243
244 static INT_TYPE *allocnos_live;
245
246 /* Test, set or clear bit number I in allocnos_live,
247 a bit vector indexed by allocno. */
248
249 #define SET_ALLOCNO_LIVE(I) \
250 (allocnos_live[(unsigned) (I) / INT_BITS] \
251 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
252
253 #define CLEAR_ALLOCNO_LIVE(I) \
254 (allocnos_live[(unsigned) (I) / INT_BITS] \
255 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
256
257 /* This is turned off because it doesn't work right for DImode.
258 (And it is only used for DImode, so the other cases are worthless.)
259 The problem is that it isn't true that there is NO possibility of conflict;
260 only that there is no conflict if the two pseudos get the exact same regs.
261 If they were allocated with a partial overlap, there would be a conflict.
262 We can't safely turn off the conflict unless we have another way to
263 prevent the partial overlap.
264
265 Idea: change hard_reg_conflicts so that instead of recording which
266 hard regs the allocno may not overlap, it records where the allocno
267 may not start. Change both where it is used and where it is updated.
268 Then there is a way to record that (reg:DI 108) may start at 10
269 but not at 9 or 11. There is still the question of how to record
270 this semi-conflict between two pseudos. */
271 #if 0
272 /* Reg pairs for which conflict after the current insn
273 is inhibited by a REG_NO_CONFLICT note.
274 If the table gets full, we ignore any other notes--that is conservative. */
275 #define NUM_NO_CONFLICT_PAIRS 4
276 /* Number of pairs in use in this insn. */
277 int n_no_conflict_pairs;
278 static struct { int allocno1, allocno2;}
279 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
280 #endif /* 0 */
281
282 /* Record all regs that are set in any one insn.
283 Communication from mark_reg_{store,clobber} and global_conflicts. */
284
285 static rtx *regs_set;
286 static int n_regs_set;
287
288 /* All registers that can be eliminated. */
289
290 static HARD_REG_SET eliminable_regset;
291
292 static int allocno_compare (const void *, const void *);
293 static void global_conflicts (void);
294 static void mirror_conflicts (void);
295 static void expand_preferences (void);
296 static void prune_preferences (void);
297 static void find_reg (int, HARD_REG_SET, int, int, int);
298 static void record_one_conflict (int);
299 static void record_conflicts (int *, int);
300 static void mark_reg_store (rtx, rtx, void *);
301 static void mark_reg_clobber (rtx, rtx, void *);
302 static void mark_reg_conflicts (rtx);
303 static void mark_reg_death (rtx);
304 static void mark_reg_live_nc (int, enum machine_mode);
305 static void set_preference (rtx, rtx);
306 static void dump_conflicts (FILE *);
307 static void reg_becomes_live (rtx, rtx, void *);
308 static void reg_dies (int, enum machine_mode, struct insn_chain *);
309
310 static void allocate_bb_info (void);
311 static void free_bb_info (void);
312 static void calculate_local_reg_bb_info (void);
313 static void set_up_bb_rts_numbers (void);
314 static int rpost_cmp (const void *, const void *);
315 static bool modify_bb_reg_pav (basic_block, basic_block, bool);
316 static void calculate_reg_pav (void);
317 static void make_accurate_live_analysis (void);
318
319 \f
320
321 /* Perform allocation of pseudo-registers not allocated by local_alloc.
322 FILE is a file to output debugging information on,
323 or zero if such output is not desired.
324
325 Return value is nonzero if reload failed
326 and we must not do any more for this function. */
327
328 int
329 global_alloc (FILE *file)
330 {
331 int retval;
332 #ifdef ELIMINABLE_REGS
333 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
334 #endif
335 int need_fp
336 = (! flag_omit_frame_pointer
337 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
338 || FRAME_POINTER_REQUIRED);
339
340 size_t i;
341 rtx x;
342
343 make_accurate_live_analysis ();
344
345 max_allocno = 0;
346
347 /* A machine may have certain hard registers that
348 are safe to use only within a basic block. */
349
350 CLEAR_HARD_REG_SET (no_global_alloc_regs);
351
352 /* Build the regset of all eliminable registers and show we can't use those
353 that we already know won't be eliminated. */
354 #ifdef ELIMINABLE_REGS
355 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
356 {
357 bool cannot_elim
358 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
359 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
360
361 if (!regs_asm_clobbered[eliminables[i].from])
362 {
363 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
364
365 if (cannot_elim)
366 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
367 }
368 else if (cannot_elim)
369 error ("%s cannot be used in asm here",
370 reg_names[eliminables[i].from]);
371 else
372 regs_ever_live[eliminables[i].from] = 1;
373 }
374 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
375 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
376 {
377 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
378 if (need_fp)
379 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
380 }
381 else if (need_fp)
382 error ("%s cannot be used in asm here",
383 reg_names[HARD_FRAME_POINTER_REGNUM]);
384 else
385 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
386 #endif
387
388 #else
389 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
390 {
391 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
392 if (need_fp)
393 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
394 }
395 else if (need_fp)
396 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
397 else
398 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
399 #endif
400
401 /* Track which registers have already been used. Start with registers
402 explicitly in the rtl, then registers allocated by local register
403 allocation. */
404
405 CLEAR_HARD_REG_SET (regs_used_so_far);
406 #ifdef LEAF_REGISTERS
407 /* If we are doing the leaf function optimization, and this is a leaf
408 function, it means that the registers that take work to save are those
409 that need a register window. So prefer the ones that can be used in
410 a leaf function. */
411 {
412 const char *cheap_regs;
413 const char *const leaf_regs = LEAF_REGISTERS;
414
415 if (only_leaf_regs_used () && leaf_function_p ())
416 cheap_regs = leaf_regs;
417 else
418 cheap_regs = call_used_regs;
419 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
420 if (regs_ever_live[i] || cheap_regs[i])
421 SET_HARD_REG_BIT (regs_used_so_far, i);
422 }
423 #else
424 /* We consider registers that do not have to be saved over calls as if
425 they were already used since there is no cost in using them. */
426 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
427 if (regs_ever_live[i] || call_used_regs[i])
428 SET_HARD_REG_BIT (regs_used_so_far, i);
429 #endif
430
431 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
432 if (reg_renumber[i] >= 0)
433 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
434
435 /* Establish mappings from register number to allocation number
436 and vice versa. In the process, count the allocnos. */
437
438 reg_allocno = xmalloc (max_regno * sizeof (int));
439
440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
441 reg_allocno[i] = -1;
442
443 /* Initialize the shared-hard-reg mapping
444 from the list of pairs that may share. */
445 reg_may_share = xcalloc (max_regno, sizeof (int));
446 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
447 {
448 int r1 = REGNO (XEXP (x, 0));
449 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
450 if (r1 > r2)
451 reg_may_share[r1] = r2;
452 else
453 reg_may_share[r2] = r1;
454 }
455
456 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
457 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
458 that we are supposed to refrain from putting in a hard reg.
459 -2 means do make an allocno but don't allocate it. */
460 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
461 /* Don't allocate pseudos that cross calls,
462 if this function receives a nonlocal goto. */
463 && (! current_function_has_nonlocal_label
464 || REG_N_CALLS_CROSSED (i) == 0))
465 {
466 if (reg_renumber[i] < 0 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
467 reg_allocno[i] = reg_allocno[reg_may_share[i]];
468 else
469 reg_allocno[i] = max_allocno++;
470 if (REG_LIVE_LENGTH (i) == 0)
471 abort ();
472 }
473 else
474 reg_allocno[i] = -1;
475
476 allocno = xcalloc (max_allocno, sizeof (struct allocno));
477
478 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
479 if (reg_allocno[i] >= 0)
480 {
481 int num = reg_allocno[i];
482 allocno[num].reg = i;
483 allocno[num].size = PSEUDO_REGNO_SIZE (i);
484 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
485 allocno[num].n_refs += REG_N_REFS (i);
486 allocno[num].freq += REG_FREQ (i);
487 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
488 allocno[num].live_length = REG_LIVE_LENGTH (i);
489 }
490
491 /* Calculate amount of usage of each hard reg by pseudos
492 allocated by local-alloc. This is to see if we want to
493 override it. */
494 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
495 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
496 memset (local_reg_freq, 0, sizeof local_reg_freq);
497 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
498 if (reg_renumber[i] >= 0)
499 {
500 int regno = reg_renumber[i];
501 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
502 int j;
503
504 for (j = regno; j < endregno; j++)
505 {
506 local_reg_n_refs[j] += REG_N_REFS (i);
507 local_reg_freq[j] += REG_FREQ (i);
508 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
509 }
510 }
511
512 /* We can't override local-alloc for a reg used not just by local-alloc. */
513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
514 if (regs_ever_live[i])
515 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
516
517 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
518
519 /* We used to use alloca here, but the size of what it would try to
520 allocate would occasionally cause it to exceed the stack limit and
521 cause unpredictable core dumps. Some examples were > 2Mb in size. */
522 conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
523
524 allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
525
526 /* If there is work to be done (at least one reg to allocate),
527 perform global conflict analysis and allocate the regs. */
528
529 if (max_allocno > 0)
530 {
531 /* Scan all the insns and compute the conflicts among allocnos
532 and between allocnos and hard regs. */
533
534 global_conflicts ();
535
536 mirror_conflicts ();
537
538 /* Eliminate conflicts between pseudos and eliminable registers. If
539 the register is not eliminated, the pseudo won't really be able to
540 live in the eliminable register, so the conflict doesn't matter.
541 If we do eliminate the register, the conflict will no longer exist.
542 So in either case, we can ignore the conflict. Likewise for
543 preferences. */
544
545 for (i = 0; i < (size_t) max_allocno; i++)
546 {
547 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
548 eliminable_regset);
549 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
550 eliminable_regset);
551 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
552 eliminable_regset);
553 }
554
555 /* Try to expand the preferences by merging them between allocnos. */
556
557 expand_preferences ();
558
559 /* Determine the order to allocate the remaining pseudo registers. */
560
561 allocno_order = xmalloc (max_allocno * sizeof (int));
562 for (i = 0; i < (size_t) max_allocno; i++)
563 allocno_order[i] = i;
564
565 /* Default the size to 1, since allocno_compare uses it to divide by.
566 Also convert allocno_live_length of zero to -1. A length of zero
567 can occur when all the registers for that allocno have reg_live_length
568 equal to -2. In this case, we want to make an allocno, but not
569 allocate it. So avoid the divide-by-zero and set it to a low
570 priority. */
571
572 for (i = 0; i < (size_t) max_allocno; i++)
573 {
574 if (allocno[i].size == 0)
575 allocno[i].size = 1;
576 if (allocno[i].live_length == 0)
577 allocno[i].live_length = -1;
578 }
579
580 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
581
582 prune_preferences ();
583
584 if (file)
585 dump_conflicts (file);
586
587 /* Try allocating them, one by one, in that order,
588 except for parameters marked with reg_live_length[regno] == -2. */
589
590 for (i = 0; i < (size_t) max_allocno; i++)
591 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
592 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
593 {
594 /* If we have more than one register class,
595 first try allocating in the class that is cheapest
596 for this pseudo-reg. If that fails, try any reg. */
597 if (N_REG_CLASSES > 1)
598 {
599 find_reg (allocno_order[i], 0, 0, 0, 0);
600 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
601 continue;
602 }
603 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
604 find_reg (allocno_order[i], 0, 1, 0, 0);
605 }
606
607 free (allocno_order);
608 }
609
610 /* Do the reloads now while the allocno data still exist, so that we can
611 try to assign new hard regs to any pseudo regs that are spilled. */
612
613 #if 0 /* We need to eliminate regs even if there is no rtl code,
614 for the sake of debugging information. */
615 if (n_basic_blocks > 0)
616 #endif
617 {
618 build_insn_chain (get_insns ());
619 retval = reload (get_insns (), 1);
620 }
621
622 /* Clean up. */
623 free (reg_allocno);
624 free (reg_may_share);
625 free (allocno);
626 free (conflicts);
627 free (allocnos_live);
628
629 return retval;
630 }
631
632 /* Sort predicate for ordering the allocnos.
633 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
634
635 static int
636 allocno_compare (const void *v1p, const void *v2p)
637 {
638 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
639 /* Note that the quotient will never be bigger than
640 the value of floor_log2 times the maximum number of
641 times a register can occur in one insn (surely less than 100)
642 weighted by the frequency (maximally REG_FREQ_MAX).
643 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
644 int pri1
645 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
646 / allocno[v1].live_length)
647 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
648 int pri2
649 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
650 / allocno[v2].live_length)
651 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
652 if (pri2 - pri1)
653 return pri2 - pri1;
654
655 /* If regs are equally good, sort by allocno,
656 so that the results of qsort leave nothing to chance. */
657 return v1 - v2;
658 }
659 \f
660 /* Scan the rtl code and record all conflicts and register preferences in the
661 conflict matrices and preference tables. */
662
663 static void
664 global_conflicts (void)
665 {
666 int i;
667 basic_block b;
668 rtx insn;
669 int *block_start_allocnos;
670
671 /* Make a vector that mark_reg_{store,clobber} will store in. */
672 regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
673
674 block_start_allocnos = xmalloc (max_allocno * sizeof (int));
675
676 FOR_EACH_BB (b)
677 {
678 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
679
680 /* Initialize table of registers currently live
681 to the state at the beginning of this basic block.
682 This also marks the conflicts among hard registers
683 and any allocnos that are live.
684
685 For pseudo-regs, there is only one bit for each one
686 no matter how many hard regs it occupies.
687 This is ok; we know the size from PSEUDO_REGNO_SIZE.
688 For explicit hard regs, we cannot know the size that way
689 since one hard reg can be used with various sizes.
690 Therefore, we must require that all the hard regs
691 implicitly live as part of a multi-word hard reg
692 are explicitly marked in basic_block_live_at_start. */
693
694 {
695 regset old = b->global_live_at_start;
696 int ax = 0;
697
698 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
699 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
700 {
701 int a = reg_allocno[i];
702 if (a >= 0)
703 {
704 SET_ALLOCNO_LIVE (a);
705 block_start_allocnos[ax++] = a;
706 }
707 else if ((a = reg_renumber[i]) >= 0)
708 mark_reg_live_nc
709 (a, PSEUDO_REGNO_MODE (i));
710 });
711
712 /* Record that each allocno now live conflicts with each hard reg
713 now live.
714
715 It is not necessary to mark any conflicts between pseudos as
716 this point, even for pseudos which are live at the start of
717 the basic block.
718
719 Given two pseudos X and Y and any point in the CFG P.
720
721 On any path to point P where X and Y are live one of the
722 following conditions must be true:
723
724 1. X is live at some instruction on the path that
725 evaluates Y.
726
727 2. Y is live at some instruction on the path that
728 evaluates X.
729
730 3. Either X or Y is not evaluated on the path to P
731 (ie it is used uninitialized) and thus the
732 conflict can be ignored.
733
734 In cases #1 and #2 the conflict will be recorded when we
735 scan the instruction that makes either X or Y become live. */
736 record_conflicts (block_start_allocnos, ax);
737
738 /* Pseudos can't go in stack regs at the start of a basic block that
739 is reached by an abnormal edge. Likewise for call clobbered regs,
740 because because caller-save, fixup_abnormal_edges, and possibly
741 the table driven EH machinery are not quite ready to handle such
742 regs live across such edges. */
743 {
744 edge e;
745
746 for (e = b->pred; e ; e = e->pred_next)
747 if (e->flags & EDGE_ABNORMAL)
748 break;
749
750 if (e != NULL)
751 {
752 #ifdef STACK_REGS
753 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
754 {
755 allocno[ax].no_stack_reg = 1;
756 });
757 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
758 record_one_conflict (ax);
759 #endif
760
761 /* No need to record conflicts for call clobbered regs if we have
762 nonlocal labels around, as we don't ever try to allocate such
763 regs in this case. */
764 if (! current_function_has_nonlocal_label)
765 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
766 if (call_used_regs [ax])
767 record_one_conflict (ax);
768 }
769 }
770 }
771
772 insn = BB_HEAD (b);
773
774 /* Scan the code of this basic block, noting which allocnos
775 and hard regs are born or die. When one is born,
776 record a conflict with all others currently live. */
777
778 while (1)
779 {
780 RTX_CODE code = GET_CODE (insn);
781 rtx link;
782
783 /* Make regs_set an empty set. */
784
785 n_regs_set = 0;
786
787 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
788 {
789
790 #if 0
791 int i = 0;
792 for (link = REG_NOTES (insn);
793 link && i < NUM_NO_CONFLICT_PAIRS;
794 link = XEXP (link, 1))
795 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
796 {
797 no_conflict_pairs[i].allocno1
798 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
799 no_conflict_pairs[i].allocno2
800 = reg_allocno[REGNO (XEXP (link, 0))];
801 i++;
802 }
803 #endif /* 0 */
804
805 /* Mark any registers clobbered by INSN as live,
806 so they conflict with the inputs. */
807
808 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
809
810 /* Mark any registers dead after INSN as dead now. */
811
812 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
813 if (REG_NOTE_KIND (link) == REG_DEAD)
814 mark_reg_death (XEXP (link, 0));
815
816 /* Mark any registers set in INSN as live,
817 and mark them as conflicting with all other live regs.
818 Clobbers are processed again, so they conflict with
819 the registers that are set. */
820
821 note_stores (PATTERN (insn), mark_reg_store, NULL);
822
823 #ifdef AUTO_INC_DEC
824 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
825 if (REG_NOTE_KIND (link) == REG_INC)
826 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
827 #endif
828
829 /* If INSN has multiple outputs, then any reg that dies here
830 and is used inside of an output
831 must conflict with the other outputs.
832
833 It is unsafe to use !single_set here since it will ignore an
834 unused output. Just because an output is unused does not mean
835 the compiler can assume the side effect will not occur.
836 Consider if REG appears in the address of an output and we
837 reload the output. If we allocate REG to the same hard
838 register as an unused output we could set the hard register
839 before the output reload insn. */
840 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
841 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
842 if (REG_NOTE_KIND (link) == REG_DEAD)
843 {
844 int used_in_output = 0;
845 int i;
846 rtx reg = XEXP (link, 0);
847
848 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
849 {
850 rtx set = XVECEXP (PATTERN (insn), 0, i);
851 if (GET_CODE (set) == SET
852 && !REG_P (SET_DEST (set))
853 && !rtx_equal_p (reg, SET_DEST (set))
854 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
855 used_in_output = 1;
856 }
857 if (used_in_output)
858 mark_reg_conflicts (reg);
859 }
860
861 /* Mark any registers set in INSN and then never used. */
862
863 while (n_regs_set-- > 0)
864 {
865 rtx note = find_regno_note (insn, REG_UNUSED,
866 REGNO (regs_set[n_regs_set]));
867 if (note)
868 mark_reg_death (XEXP (note, 0));
869 }
870 }
871
872 if (insn == BB_END (b))
873 break;
874 insn = NEXT_INSN (insn);
875 }
876 }
877
878 /* Clean up. */
879 free (block_start_allocnos);
880 free (regs_set);
881 }
882 /* Expand the preference information by looking for cases where one allocno
883 dies in an insn that sets an allocno. If those two allocnos don't conflict,
884 merge any preferences between those allocnos. */
885
886 static void
887 expand_preferences (void)
888 {
889 rtx insn;
890 rtx link;
891 rtx set;
892
893 /* We only try to handle the most common cases here. Most of the cases
894 where this wins are reg-reg copies. */
895
896 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
897 if (INSN_P (insn)
898 && (set = single_set (insn)) != 0
899 && REG_P (SET_DEST (set))
900 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
901 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
902 if (REG_NOTE_KIND (link) == REG_DEAD
903 && REG_P (XEXP (link, 0))
904 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
905 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
906 reg_allocno[REGNO (XEXP (link, 0))]))
907 {
908 int a1 = reg_allocno[REGNO (SET_DEST (set))];
909 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
910
911 if (XEXP (link, 0) == SET_SRC (set))
912 {
913 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
914 allocno[a2].hard_reg_copy_preferences);
915 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
916 allocno[a1].hard_reg_copy_preferences);
917 }
918
919 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
920 allocno[a2].hard_reg_preferences);
921 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
922 allocno[a1].hard_reg_preferences);
923 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
924 allocno[a2].hard_reg_full_preferences);
925 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
926 allocno[a1].hard_reg_full_preferences);
927 }
928 }
929 \f
930 /* Prune the preferences for global registers to exclude registers that cannot
931 be used.
932
933 Compute `regs_someone_prefers', which is a bitmask of the hard registers
934 that are preferred by conflicting registers of lower priority. If possible,
935 we will avoid using these registers. */
936
937 static void
938 prune_preferences (void)
939 {
940 int i;
941 int num;
942 int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
943
944 /* Scan least most important to most important.
945 For each allocno, remove from preferences registers that cannot be used,
946 either because of conflicts or register type. Then compute all registers
947 preferred by each lower-priority register that conflicts. */
948
949 for (i = max_allocno - 1; i >= 0; i--)
950 {
951 HARD_REG_SET temp;
952
953 num = allocno_order[i];
954 allocno_to_order[num] = i;
955 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
956
957 if (allocno[num].calls_crossed == 0)
958 IOR_HARD_REG_SET (temp, fixed_reg_set);
959 else
960 IOR_HARD_REG_SET (temp, call_used_reg_set);
961
962 IOR_COMPL_HARD_REG_SET
963 (temp,
964 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
965
966 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
967 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
968 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
969 }
970
971 for (i = max_allocno - 1; i >= 0; i--)
972 {
973 /* Merge in the preferences of lower-priority registers (they have
974 already been pruned). If we also prefer some of those registers,
975 don't exclude them unless we are of a smaller size (in which case
976 we want to give the lower-priority allocno the first chance for
977 these registers). */
978 HARD_REG_SET temp, temp2;
979 int allocno2;
980
981 num = allocno_order[i];
982
983 CLEAR_HARD_REG_SET (temp);
984 CLEAR_HARD_REG_SET (temp2);
985
986 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
987 allocno2,
988 {
989 if (allocno_to_order[allocno2] > i)
990 {
991 if (allocno[allocno2].size <= allocno[num].size)
992 IOR_HARD_REG_SET (temp,
993 allocno[allocno2].hard_reg_full_preferences);
994 else
995 IOR_HARD_REG_SET (temp2,
996 allocno[allocno2].hard_reg_full_preferences);
997 }
998 });
999
1000 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1001 IOR_HARD_REG_SET (temp, temp2);
1002 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1003 }
1004 free (allocno_to_order);
1005 }
1006 \f
1007 /* Assign a hard register to allocno NUM; look for one that is the beginning
1008 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1009 The registers marked in PREFREGS are tried first.
1010
1011 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1012 be used for this allocation.
1013
1014 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1015 Otherwise ignore that preferred class and use the alternate class.
1016
1017 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1018 will have to be saved and restored at calls.
1019
1020 RETRYING is nonzero if this is called from retry_global_alloc.
1021
1022 If we find one, record it in reg_renumber.
1023 If not, do nothing. */
1024
1025 static void
1026 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1027 {
1028 int i, best_reg, pass;
1029 HARD_REG_SET used, used1, used2;
1030
1031 enum reg_class class = (alt_regs_p
1032 ? reg_alternate_class (allocno[num].reg)
1033 : reg_preferred_class (allocno[num].reg));
1034 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1035
1036 if (accept_call_clobbered)
1037 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1038 else if (allocno[num].calls_crossed == 0)
1039 COPY_HARD_REG_SET (used1, fixed_reg_set);
1040 else
1041 COPY_HARD_REG_SET (used1, call_used_reg_set);
1042
1043 /* Some registers should not be allocated in global-alloc. */
1044 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1045 if (losers)
1046 IOR_HARD_REG_SET (used1, losers);
1047
1048 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1049 COPY_HARD_REG_SET (used2, used1);
1050
1051 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1052
1053 #ifdef CANNOT_CHANGE_MODE_CLASS
1054 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1055 #endif
1056
1057 /* Try each hard reg to see if it fits. Do this in two passes.
1058 In the first pass, skip registers that are preferred by some other pseudo
1059 to give it a better chance of getting one of those registers. Only if
1060 we can't get a register when excluding those do we take one of them.
1061 However, we never allocate a register for the first time in pass 0. */
1062
1063 COPY_HARD_REG_SET (used, used1);
1064 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1065 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1066
1067 best_reg = -1;
1068 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1069 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1070 pass++)
1071 {
1072 if (pass == 1)
1073 COPY_HARD_REG_SET (used, used1);
1074 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1075 {
1076 #ifdef REG_ALLOC_ORDER
1077 int regno = reg_alloc_order[i];
1078 #else
1079 int regno = i;
1080 #endif
1081 if (! TEST_HARD_REG_BIT (used, regno)
1082 && HARD_REGNO_MODE_OK (regno, mode)
1083 && (allocno[num].calls_crossed == 0
1084 || accept_call_clobbered
1085 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1086 {
1087 int j;
1088 int lim = regno + hard_regno_nregs[regno][mode];
1089 for (j = regno + 1;
1090 (j < lim
1091 && ! TEST_HARD_REG_BIT (used, j));
1092 j++);
1093 if (j == lim)
1094 {
1095 best_reg = regno;
1096 break;
1097 }
1098 #ifndef REG_ALLOC_ORDER
1099 i = j; /* Skip starting points we know will lose */
1100 #endif
1101 }
1102 }
1103 }
1104
1105 /* See if there is a preferred register with the same class as the register
1106 we allocated above. Making this restriction prevents register
1107 preferencing from creating worse register allocation.
1108
1109 Remove from the preferred registers and conflicting registers. Note that
1110 additional conflicts may have been added after `prune_preferences' was
1111 called.
1112
1113 First do this for those register with copy preferences, then all
1114 preferred registers. */
1115
1116 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1117 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1118 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1119
1120 if (best_reg >= 0)
1121 {
1122 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1123 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1124 && HARD_REGNO_MODE_OK (i, mode)
1125 && (allocno[num].calls_crossed == 0
1126 || accept_call_clobbered
1127 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1128 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1129 || reg_class_subset_p (REGNO_REG_CLASS (i),
1130 REGNO_REG_CLASS (best_reg))
1131 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1132 REGNO_REG_CLASS (i))))
1133 {
1134 int j;
1135 int lim = i + hard_regno_nregs[i][mode];
1136 for (j = i + 1;
1137 (j < lim
1138 && ! TEST_HARD_REG_BIT (used, j)
1139 && (REGNO_REG_CLASS (j)
1140 == REGNO_REG_CLASS (best_reg + (j - i))
1141 || reg_class_subset_p (REGNO_REG_CLASS (j),
1142 REGNO_REG_CLASS (best_reg + (j - i)))
1143 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1144 REGNO_REG_CLASS (j))));
1145 j++);
1146 if (j == lim)
1147 {
1148 best_reg = i;
1149 goto no_prefs;
1150 }
1151 }
1152 }
1153 no_copy_prefs:
1154
1155 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1156 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1157 reg_class_contents[(int) NO_REGS], no_prefs);
1158
1159 if (best_reg >= 0)
1160 {
1161 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1162 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1163 && HARD_REGNO_MODE_OK (i, mode)
1164 && (allocno[num].calls_crossed == 0
1165 || accept_call_clobbered
1166 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1167 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1168 || reg_class_subset_p (REGNO_REG_CLASS (i),
1169 REGNO_REG_CLASS (best_reg))
1170 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1171 REGNO_REG_CLASS (i))))
1172 {
1173 int j;
1174 int lim = i + hard_regno_nregs[i][mode];
1175 for (j = i + 1;
1176 (j < lim
1177 && ! TEST_HARD_REG_BIT (used, j)
1178 && (REGNO_REG_CLASS (j)
1179 == REGNO_REG_CLASS (best_reg + (j - i))
1180 || reg_class_subset_p (REGNO_REG_CLASS (j),
1181 REGNO_REG_CLASS (best_reg + (j - i)))
1182 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1183 REGNO_REG_CLASS (j))));
1184 j++);
1185 if (j == lim)
1186 {
1187 best_reg = i;
1188 break;
1189 }
1190 }
1191 }
1192 no_prefs:
1193
1194 /* If we haven't succeeded yet, try with caller-saves.
1195 We need not check to see if the current function has nonlocal
1196 labels because we don't put any pseudos that are live over calls in
1197 registers in that case. */
1198
1199 if (flag_caller_saves && best_reg < 0)
1200 {
1201 /* Did not find a register. If it would be profitable to
1202 allocate a call-clobbered register and save and restore it
1203 around calls, do that. */
1204 if (! accept_call_clobbered
1205 && allocno[num].calls_crossed != 0
1206 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1207 allocno[num].calls_crossed))
1208 {
1209 HARD_REG_SET new_losers;
1210 if (! losers)
1211 CLEAR_HARD_REG_SET (new_losers);
1212 else
1213 COPY_HARD_REG_SET (new_losers, losers);
1214
1215 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1216 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1217 if (reg_renumber[allocno[num].reg] >= 0)
1218 {
1219 caller_save_needed = 1;
1220 return;
1221 }
1222 }
1223 }
1224
1225 /* If we haven't succeeded yet,
1226 see if some hard reg that conflicts with us
1227 was utilized poorly by local-alloc.
1228 If so, kick out the regs that were put there by local-alloc
1229 so we can use it instead. */
1230 if (best_reg < 0 && !retrying
1231 /* Let's not bother with multi-reg allocnos. */
1232 && allocno[num].size == 1)
1233 {
1234 /* Count from the end, to find the least-used ones first. */
1235 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1236 {
1237 #ifdef REG_ALLOC_ORDER
1238 int regno = reg_alloc_order[i];
1239 #else
1240 int regno = i;
1241 #endif
1242
1243 if (local_reg_n_refs[regno] != 0
1244 /* Don't use a reg no good for this pseudo. */
1245 && ! TEST_HARD_REG_BIT (used2, regno)
1246 && HARD_REGNO_MODE_OK (regno, mode)
1247 /* The code below assumes that we need only a single
1248 register, but the check of allocno[num].size above
1249 was not enough. Sometimes we need more than one
1250 register for a single-word value. */
1251 && hard_regno_nregs[regno][mode] == 1
1252 && (allocno[num].calls_crossed == 0
1253 || accept_call_clobbered
1254 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1255 #ifdef CANNOT_CHANGE_MODE_CLASS
1256 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1257 mode)
1258 #endif
1259 #ifdef STACK_REGS
1260 && (!allocno[num].no_stack_reg
1261 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1262 #endif
1263 )
1264 {
1265 /* We explicitly evaluate the divide results into temporary
1266 variables so as to avoid excess precision problems that occur
1267 on an i386-unknown-sysv4.2 (unixware) host. */
1268
1269 double tmp1 = ((double) local_reg_freq[regno]
1270 / local_reg_live_length[regno]);
1271 double tmp2 = ((double) allocno[num].freq
1272 / allocno[num].live_length);
1273
1274 if (tmp1 < tmp2)
1275 {
1276 /* Hard reg REGNO was used less in total by local regs
1277 than it would be used by this one allocno! */
1278 int k;
1279 for (k = 0; k < max_regno; k++)
1280 if (reg_renumber[k] >= 0)
1281 {
1282 int r = reg_renumber[k];
1283 int endregno
1284 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1285
1286 if (regno >= r && regno < endregno)
1287 reg_renumber[k] = -1;
1288 }
1289
1290 best_reg = regno;
1291 break;
1292 }
1293 }
1294 }
1295 }
1296
1297 /* Did we find a register? */
1298
1299 if (best_reg >= 0)
1300 {
1301 int lim, j;
1302 HARD_REG_SET this_reg;
1303
1304 /* Yes. Record it as the hard register of this pseudo-reg. */
1305 reg_renumber[allocno[num].reg] = best_reg;
1306 /* Also of any pseudo-regs that share with it. */
1307 if (reg_may_share[allocno[num].reg])
1308 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1309 if (reg_allocno[j] == num)
1310 reg_renumber[j] = best_reg;
1311
1312 /* Make a set of the hard regs being allocated. */
1313 CLEAR_HARD_REG_SET (this_reg);
1314 lim = best_reg + hard_regno_nregs[best_reg][mode];
1315 for (j = best_reg; j < lim; j++)
1316 {
1317 SET_HARD_REG_BIT (this_reg, j);
1318 SET_HARD_REG_BIT (regs_used_so_far, j);
1319 /* This is no longer a reg used just by local regs. */
1320 local_reg_n_refs[j] = 0;
1321 local_reg_freq[j] = 0;
1322 }
1323 /* For each other pseudo-reg conflicting with this one,
1324 mark it as conflicting with the hard regs this one occupies. */
1325 lim = num;
1326 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1327 {
1328 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1329 });
1330 }
1331 }
1332 \f
1333 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1334 Perhaps it had previously seemed not worth a hard reg,
1335 or perhaps its old hard reg has been commandeered for reloads.
1336 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1337 they do not appear to be allocated.
1338 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1339
1340 void
1341 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1342 {
1343 int alloc_no = reg_allocno[regno];
1344 if (alloc_no >= 0)
1345 {
1346 /* If we have more than one register class,
1347 first try allocating in the class that is cheapest
1348 for this pseudo-reg. If that fails, try any reg. */
1349 if (N_REG_CLASSES > 1)
1350 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1351 if (reg_renumber[regno] < 0
1352 && reg_alternate_class (regno) != NO_REGS)
1353 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1354
1355 /* If we found a register, modify the RTL for the register to
1356 show the hard register, and mark that register live. */
1357 if (reg_renumber[regno] >= 0)
1358 {
1359 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1360 mark_home_live (regno);
1361 }
1362 }
1363 }
1364 \f
1365 /* Record a conflict between register REGNO
1366 and everything currently live.
1367 REGNO must not be a pseudo reg that was allocated
1368 by local_alloc; such numbers must be translated through
1369 reg_renumber before calling here. */
1370
1371 static void
1372 record_one_conflict (int regno)
1373 {
1374 int j;
1375
1376 if (regno < FIRST_PSEUDO_REGISTER)
1377 /* When a hard register becomes live,
1378 record conflicts with live pseudo regs. */
1379 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1380 {
1381 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1382 });
1383 else
1384 /* When a pseudo-register becomes live,
1385 record conflicts first with hard regs,
1386 then with other pseudo regs. */
1387 {
1388 int ialloc = reg_allocno[regno];
1389 int ialloc_prod = ialloc * allocno_row_words;
1390
1391 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1392 for (j = allocno_row_words - 1; j >= 0; j--)
1393 conflicts[ialloc_prod + j] |= allocnos_live[j];
1394 }
1395 }
1396
1397 /* Record all allocnos currently live as conflicting
1398 with all hard regs currently live.
1399
1400 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1401 are currently live. Their bits are also flagged in allocnos_live. */
1402
1403 static void
1404 record_conflicts (int *allocno_vec, int len)
1405 {
1406 while (--len >= 0)
1407 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1408 hard_regs_live);
1409 }
1410
1411 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1412 static void
1413 mirror_conflicts (void)
1414 {
1415 int i, j;
1416 int rw = allocno_row_words;
1417 int rwb = rw * INT_BITS;
1418 INT_TYPE *p = conflicts;
1419 INT_TYPE *q0 = conflicts, *q1, *q2;
1420 unsigned INT_TYPE mask;
1421
1422 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1423 {
1424 if (! mask)
1425 {
1426 mask = 1;
1427 q0++;
1428 }
1429 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1430 {
1431 unsigned INT_TYPE word;
1432
1433 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1434 word >>= 1, q2 += rw)
1435 {
1436 if (word & 1)
1437 *q2 |= mask;
1438 }
1439 }
1440 }
1441 }
1442 \f
1443 /* Handle the case where REG is set by the insn being scanned,
1444 during the forward scan to accumulate conflicts.
1445 Store a 1 in regs_live or allocnos_live for this register, record how many
1446 consecutive hardware registers it actually needs,
1447 and record a conflict with all other registers already live.
1448
1449 Note that even if REG does not remain alive after this insn,
1450 we must mark it here as live, to ensure a conflict between
1451 REG and any other regs set in this insn that really do live.
1452 This is because those other regs could be considered after this.
1453
1454 REG might actually be something other than a register;
1455 if so, we do nothing.
1456
1457 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1458 a REG_INC note was found for it). */
1459
1460 static void
1461 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1462 {
1463 int regno;
1464
1465 if (GET_CODE (reg) == SUBREG)
1466 reg = SUBREG_REG (reg);
1467
1468 if (!REG_P (reg))
1469 return;
1470
1471 regs_set[n_regs_set++] = reg;
1472
1473 if (setter && GET_CODE (setter) != CLOBBER)
1474 set_preference (reg, SET_SRC (setter));
1475
1476 regno = REGNO (reg);
1477
1478 /* Either this is one of the max_allocno pseudo regs not allocated,
1479 or it is or has a hardware reg. First handle the pseudo-regs. */
1480 if (regno >= FIRST_PSEUDO_REGISTER)
1481 {
1482 if (reg_allocno[regno] >= 0)
1483 {
1484 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1485 record_one_conflict (regno);
1486 }
1487 }
1488
1489 if (reg_renumber[regno] >= 0)
1490 regno = reg_renumber[regno];
1491
1492 /* Handle hardware regs (and pseudos allocated to hard regs). */
1493 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1494 {
1495 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1496 while (regno < last)
1497 {
1498 record_one_conflict (regno);
1499 SET_HARD_REG_BIT (hard_regs_live, regno);
1500 regno++;
1501 }
1502 }
1503 }
1504 \f
1505 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1506
1507 static void
1508 mark_reg_clobber (rtx reg, rtx setter, void *data)
1509 {
1510 if (GET_CODE (setter) == CLOBBER)
1511 mark_reg_store (reg, setter, data);
1512 }
1513
1514 /* Record that REG has conflicts with all the regs currently live.
1515 Do not mark REG itself as live. */
1516
1517 static void
1518 mark_reg_conflicts (rtx reg)
1519 {
1520 int regno;
1521
1522 if (GET_CODE (reg) == SUBREG)
1523 reg = SUBREG_REG (reg);
1524
1525 if (!REG_P (reg))
1526 return;
1527
1528 regno = REGNO (reg);
1529
1530 /* Either this is one of the max_allocno pseudo regs not allocated,
1531 or it is or has a hardware reg. First handle the pseudo-regs. */
1532 if (regno >= FIRST_PSEUDO_REGISTER)
1533 {
1534 if (reg_allocno[regno] >= 0)
1535 record_one_conflict (regno);
1536 }
1537
1538 if (reg_renumber[regno] >= 0)
1539 regno = reg_renumber[regno];
1540
1541 /* Handle hardware regs (and pseudos allocated to hard regs). */
1542 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1543 {
1544 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1545 while (regno < last)
1546 {
1547 record_one_conflict (regno);
1548 regno++;
1549 }
1550 }
1551 }
1552 \f
1553 /* Mark REG as being dead (following the insn being scanned now).
1554 Store a 0 in regs_live or allocnos_live for this register. */
1555
1556 static void
1557 mark_reg_death (rtx reg)
1558 {
1559 int regno = REGNO (reg);
1560
1561 /* Either this is one of the max_allocno pseudo regs not allocated,
1562 or it is a hardware reg. First handle the pseudo-regs. */
1563 if (regno >= FIRST_PSEUDO_REGISTER)
1564 {
1565 if (reg_allocno[regno] >= 0)
1566 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1567 }
1568
1569 /* For pseudo reg, see if it has been assigned a hardware reg. */
1570 if (reg_renumber[regno] >= 0)
1571 regno = reg_renumber[regno];
1572
1573 /* Handle hardware regs (and pseudos allocated to hard regs). */
1574 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1575 {
1576 /* Pseudo regs already assigned hardware regs are treated
1577 almost the same as explicit hardware regs. */
1578 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1579 while (regno < last)
1580 {
1581 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1582 regno++;
1583 }
1584 }
1585 }
1586
1587 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1588 for the value stored in it. MODE determines how many consecutive
1589 registers are actually in use. Do not record conflicts;
1590 it is assumed that the caller will do that. */
1591
1592 static void
1593 mark_reg_live_nc (int regno, enum machine_mode mode)
1594 {
1595 int last = regno + hard_regno_nregs[regno][mode];
1596 while (regno < last)
1597 {
1598 SET_HARD_REG_BIT (hard_regs_live, regno);
1599 regno++;
1600 }
1601 }
1602 \f
1603 /* Try to set a preference for an allocno to a hard register.
1604 We are passed DEST and SRC which are the operands of a SET. It is known
1605 that SRC is a register. If SRC or the first operand of SRC is a register,
1606 try to set a preference. If one of the two is a hard register and the other
1607 is a pseudo-register, mark the preference.
1608
1609 Note that we are not as aggressive as local-alloc in trying to tie a
1610 pseudo-register to a hard register. */
1611
1612 static void
1613 set_preference (rtx dest, rtx src)
1614 {
1615 unsigned int src_regno, dest_regno;
1616 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1617 to compensate for subregs in SRC or DEST. */
1618 int offset = 0;
1619 unsigned int i;
1620 int copy = 1;
1621
1622 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1623 src = XEXP (src, 0), copy = 0;
1624
1625 /* Get the reg number for both SRC and DEST.
1626 If neither is a reg, give up. */
1627
1628 if (REG_P (src))
1629 src_regno = REGNO (src);
1630 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1631 {
1632 src_regno = REGNO (SUBREG_REG (src));
1633
1634 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1635 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1636 GET_MODE (SUBREG_REG (src)),
1637 SUBREG_BYTE (src),
1638 GET_MODE (src));
1639 else
1640 offset += (SUBREG_BYTE (src)
1641 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1642 }
1643 else
1644 return;
1645
1646 if (REG_P (dest))
1647 dest_regno = REGNO (dest);
1648 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1649 {
1650 dest_regno = REGNO (SUBREG_REG (dest));
1651
1652 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1653 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1654 GET_MODE (SUBREG_REG (dest)),
1655 SUBREG_BYTE (dest),
1656 GET_MODE (dest));
1657 else
1658 offset -= (SUBREG_BYTE (dest)
1659 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1660 }
1661 else
1662 return;
1663
1664 /* Convert either or both to hard reg numbers. */
1665
1666 if (reg_renumber[src_regno] >= 0)
1667 src_regno = reg_renumber[src_regno];
1668
1669 if (reg_renumber[dest_regno] >= 0)
1670 dest_regno = reg_renumber[dest_regno];
1671
1672 /* Now if one is a hard reg and the other is a global pseudo
1673 then give the other a preference. */
1674
1675 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1676 && reg_allocno[src_regno] >= 0)
1677 {
1678 dest_regno -= offset;
1679 if (dest_regno < FIRST_PSEUDO_REGISTER)
1680 {
1681 if (copy)
1682 SET_REGBIT (hard_reg_copy_preferences,
1683 reg_allocno[src_regno], dest_regno);
1684
1685 SET_REGBIT (hard_reg_preferences,
1686 reg_allocno[src_regno], dest_regno);
1687 for (i = dest_regno;
1688 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1689 i++)
1690 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1691 }
1692 }
1693
1694 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1695 && reg_allocno[dest_regno] >= 0)
1696 {
1697 src_regno += offset;
1698 if (src_regno < FIRST_PSEUDO_REGISTER)
1699 {
1700 if (copy)
1701 SET_REGBIT (hard_reg_copy_preferences,
1702 reg_allocno[dest_regno], src_regno);
1703
1704 SET_REGBIT (hard_reg_preferences,
1705 reg_allocno[dest_regno], src_regno);
1706 for (i = src_regno;
1707 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1708 i++)
1709 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1710 }
1711 }
1712 }
1713 \f
1714 /* Indicate that hard register number FROM was eliminated and replaced with
1715 an offset from hard register number TO. The status of hard registers live
1716 at the start of a basic block is updated by replacing a use of FROM with
1717 a use of TO. */
1718
1719 void
1720 mark_elimination (int from, int to)
1721 {
1722 basic_block bb;
1723
1724 FOR_EACH_BB (bb)
1725 {
1726 regset r = bb->global_live_at_start;
1727 if (REGNO_REG_SET_P (r, from))
1728 {
1729 CLEAR_REGNO_REG_SET (r, from);
1730 SET_REGNO_REG_SET (r, to);
1731 }
1732 }
1733 }
1734 \f
1735 /* Used for communication between the following functions. Holds the
1736 current life information. */
1737 static regset live_relevant_regs;
1738
1739 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1740 This is called via note_stores. */
1741 static void
1742 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1743 {
1744 int regno;
1745
1746 if (GET_CODE (reg) == SUBREG)
1747 reg = SUBREG_REG (reg);
1748
1749 if (!REG_P (reg))
1750 return;
1751
1752 regno = REGNO (reg);
1753 if (regno < FIRST_PSEUDO_REGISTER)
1754 {
1755 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1756 while (nregs-- > 0)
1757 {
1758 SET_REGNO_REG_SET (live_relevant_regs, regno);
1759 if (! fixed_regs[regno])
1760 SET_REGNO_REG_SET ((regset) regs_set, regno);
1761 regno++;
1762 }
1763 }
1764 else if (reg_renumber[regno] >= 0)
1765 {
1766 SET_REGNO_REG_SET (live_relevant_regs, regno);
1767 SET_REGNO_REG_SET ((regset) regs_set, regno);
1768 }
1769 }
1770
1771 /* Record in live_relevant_regs that register REGNO died. */
1772 static void
1773 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1774 {
1775 if (regno < FIRST_PSEUDO_REGISTER)
1776 {
1777 int nregs = hard_regno_nregs[regno][mode];
1778 while (nregs-- > 0)
1779 {
1780 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1781 if (! fixed_regs[regno])
1782 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1783 regno++;
1784 }
1785 }
1786 else
1787 {
1788 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1789 if (reg_renumber[regno] >= 0)
1790 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1791 }
1792 }
1793
1794 /* Walk the insns of the current function and build reload_insn_chain,
1795 and record register life information. */
1796 void
1797 build_insn_chain (rtx first)
1798 {
1799 struct insn_chain **p = &reload_insn_chain;
1800 struct insn_chain *prev = 0;
1801 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1802 regset_head live_relevant_regs_head;
1803
1804 live_relevant_regs = INITIALIZE_REG_SET (live_relevant_regs_head);
1805
1806 for (; first; first = NEXT_INSN (first))
1807 {
1808 struct insn_chain *c;
1809
1810 if (first == BB_HEAD (b))
1811 {
1812 int i;
1813
1814 CLEAR_REG_SET (live_relevant_regs);
1815
1816 EXECUTE_IF_SET_IN_BITMAP
1817 (b->global_live_at_start, 0, i,
1818 {
1819 if (i < FIRST_PSEUDO_REGISTER
1820 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1821 : reg_renumber[i] >= 0)
1822 SET_REGNO_REG_SET (live_relevant_regs, i);
1823 });
1824 }
1825
1826 if (GET_CODE (first) != NOTE && GET_CODE (first) != BARRIER)
1827 {
1828 c = new_insn_chain ();
1829 c->prev = prev;
1830 prev = c;
1831 *p = c;
1832 p = &c->next;
1833 c->insn = first;
1834 c->block = b->index;
1835
1836 if (INSN_P (first))
1837 {
1838 rtx link;
1839
1840 /* Mark the death of everything that dies in this instruction. */
1841
1842 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1843 if (REG_NOTE_KIND (link) == REG_DEAD
1844 && REG_P (XEXP (link, 0)))
1845 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1846 c);
1847
1848 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1849
1850 /* Mark everything born in this instruction as live. */
1851
1852 note_stores (PATTERN (first), reg_becomes_live,
1853 &c->dead_or_set);
1854 }
1855 else
1856 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1857
1858 if (INSN_P (first))
1859 {
1860 rtx link;
1861
1862 /* Mark anything that is set in this insn and then unused as dying. */
1863
1864 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1865 if (REG_NOTE_KIND (link) == REG_UNUSED
1866 && REG_P (XEXP (link, 0)))
1867 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1868 c);
1869 }
1870 }
1871
1872 if (first == BB_END (b))
1873 b = b->next_bb;
1874
1875 /* Stop after we pass the end of the last basic block. Verify that
1876 no real insns are after the end of the last basic block.
1877
1878 We may want to reorganize the loop somewhat since this test should
1879 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1880 the previous real insn is a JUMP_INSN. */
1881 if (b == EXIT_BLOCK_PTR)
1882 {
1883 for (first = NEXT_INSN (first) ; first; first = NEXT_INSN (first))
1884 if (INSN_P (first)
1885 && GET_CODE (PATTERN (first)) != USE
1886 && ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
1887 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1888 && prev_real_insn (first) != 0
1889 && GET_CODE (prev_real_insn (first)) == JUMP_INSN))
1890 abort ();
1891 break;
1892 }
1893 }
1894 FREE_REG_SET (live_relevant_regs);
1895 *p = 0;
1896 }
1897 \f
1898 /* Print debugging trace information if -dg switch is given,
1899 showing the information on which the allocation decisions are based. */
1900
1901 static void
1902 dump_conflicts (FILE *file)
1903 {
1904 int i;
1905 int has_preferences;
1906 int nregs;
1907 nregs = 0;
1908 for (i = 0; i < max_allocno; i++)
1909 {
1910 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1911 continue;
1912 nregs++;
1913 }
1914 fprintf (file, ";; %d regs to allocate:", nregs);
1915 for (i = 0; i < max_allocno; i++)
1916 {
1917 int j;
1918 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1919 continue;
1920 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1921 for (j = 0; j < max_regno; j++)
1922 if (reg_allocno[j] == allocno_order[i]
1923 && j != allocno[allocno_order[i]].reg)
1924 fprintf (file, "+%d", j);
1925 if (allocno[allocno_order[i]].size != 1)
1926 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1927 }
1928 fprintf (file, "\n");
1929
1930 for (i = 0; i < max_allocno; i++)
1931 {
1932 int j;
1933 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1934 for (j = 0; j < max_allocno; j++)
1935 if (CONFLICTP (j, i))
1936 fprintf (file, " %d", allocno[j].reg);
1937 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1938 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1939 fprintf (file, " %d", j);
1940 fprintf (file, "\n");
1941
1942 has_preferences = 0;
1943 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1944 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1945 has_preferences = 1;
1946
1947 if (! has_preferences)
1948 continue;
1949 fprintf (file, ";; %d preferences:", allocno[i].reg);
1950 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1951 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1952 fprintf (file, " %d", j);
1953 fprintf (file, "\n");
1954 }
1955 fprintf (file, "\n");
1956 }
1957
1958 void
1959 dump_global_regs (FILE *file)
1960 {
1961 int i, j;
1962
1963 fprintf (file, ";; Register dispositions:\n");
1964 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1965 if (reg_renumber[i] >= 0)
1966 {
1967 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1968 if (++j % 6 == 0)
1969 fprintf (file, "\n");
1970 }
1971
1972 fprintf (file, "\n\n;; Hard regs used: ");
1973 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1974 if (regs_ever_live[i])
1975 fprintf (file, " %d", i);
1976 fprintf (file, "\n\n");
1977 }
1978
1979 \f
1980
1981 /* This page contains code to make live information more accurate.
1982 The accurate register liveness at program point P means:
1983 o there is a path from P to usage of the register and the
1984 register is not redefined or killed on the path.
1985 o register at P is partially available, i.e. there is a path from
1986 a register definition to the point P and the register is not
1987 killed (clobbered) on the path
1988
1989 The standard GCC live information means only the first condition.
1990 Without the partial availability, there will be more register
1991 conflicts and as a consequence worse register allocation. The
1992 typical example where the information can be different is a
1993 register initialized in the loop at the basic block preceding the
1994 loop in CFG. */
1995
1996 /* The following structure contains basic block data flow information
1997 used to calculate partial availability of registers. */
1998
1999 struct bb_info
2000 {
2001 /* The basic block reverse post-order number. */
2002 int rts_number;
2003 /* Registers correspondingly killed (clobbered) and defined but not
2004 killed afterward in the basic block. */
2005 bitmap killed, avloc;
2006 /* Registers partially available correspondingly at the start and
2007 end of the basic block. */
2008 bitmap pavin, pavout;
2009 };
2010
2011 /* Macros for accessing data flow information of basic blocks. */
2012
2013 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2014 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2015
2016 /* The function allocates the info structures of each basic block. It
2017 also initialized PAVIN and PAVOUT as if all hard registers were
2018 partially available. */
2019
2020 static void
2021 allocate_bb_info (void)
2022 {
2023 int i;
2024 basic_block bb;
2025 struct bb_info *bb_info;
2026 bitmap init;
2027
2028 alloc_aux_for_blocks (sizeof (struct bb_info));
2029 init = BITMAP_XMALLOC ();
2030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2031 bitmap_set_bit (init, i);
2032 FOR_EACH_BB (bb)
2033 {
2034 bb_info = bb->aux;
2035 bb_info->avloc = BITMAP_XMALLOC ();
2036 bb_info->killed = BITMAP_XMALLOC ();
2037 bb_info->pavin = BITMAP_XMALLOC ();
2038 bb_info->pavout = BITMAP_XMALLOC ();
2039 bitmap_copy (bb_info->pavin, init);
2040 bitmap_copy (bb_info->pavout, init);
2041 }
2042 BITMAP_XFREE (init);
2043 }
2044
2045 /* The function frees the allocated info of all basic blocks. */
2046
2047 static void
2048 free_bb_info (void)
2049 {
2050 basic_block bb;
2051 struct bb_info *bb_info;
2052
2053 FOR_EACH_BB (bb)
2054 {
2055 bb_info = BB_INFO (bb);
2056 BITMAP_XFREE (bb_info->pavout);
2057 BITMAP_XFREE (bb_info->pavin);
2058 BITMAP_XFREE (bb_info->killed);
2059 BITMAP_XFREE (bb_info->avloc);
2060 }
2061 free_aux_for_blocks ();
2062 }
2063
2064 /* The function modifies local info for register REG being changed in
2065 SETTER. DATA is used to pass the current basic block info. */
2066
2067 static void
2068 mark_reg_change (rtx reg, rtx setter, void *data)
2069 {
2070 int regno;
2071 basic_block bb = data;
2072 struct bb_info *bb_info = BB_INFO (bb);
2073
2074 if (GET_CODE (reg) == SUBREG)
2075 reg = SUBREG_REG (reg);
2076
2077 if (!REG_P (reg))
2078 return;
2079
2080 regno = REGNO (reg);
2081 bitmap_set_bit (bb_info->killed, regno);
2082
2083 if (GET_CODE (setter) != CLOBBER)
2084 bitmap_set_bit (bb_info->avloc, regno);
2085 else
2086 bitmap_clear_bit (bb_info->avloc, regno);
2087 }
2088
2089 /* The function calculates local info for each basic block. */
2090
2091 static void
2092 calculate_local_reg_bb_info (void)
2093 {
2094 basic_block bb;
2095 rtx insn, bound;
2096
2097 FOR_EACH_BB (bb)
2098 {
2099 bound = NEXT_INSN (BB_END (bb));
2100 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2101 if (INSN_P (insn))
2102 note_stores (PATTERN (insn), mark_reg_change, bb);
2103 }
2104 }
2105
2106 /* The function sets up reverse post-order number of each basic
2107 block. */
2108
2109 static void
2110 set_up_bb_rts_numbers (void)
2111 {
2112 int i;
2113 int *rts_order;
2114
2115 rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2116 flow_reverse_top_sort_order_compute (rts_order);
2117 for (i = 0; i < n_basic_blocks; i++)
2118 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2119 free (rts_order);
2120 }
2121
2122 /* Compare function for sorting blocks in reverse postorder. */
2123
2124 static int
2125 rpost_cmp (const void *bb1, const void *bb2)
2126 {
2127 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2128
2129 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2130 }
2131
2132 /* The function calculates partial availability of registers. The
2133 function calculates partial availability at the end of basic block
2134 BB by propagating partial availability at end of predecessor basic
2135 block PRED. The function returns true if the partial availability
2136 at the end of BB has been changed or if CHANGED_P. We have the
2137 following equations:
2138
2139 bb.pavin = empty for entry block | union (pavout of predecessors)
2140 bb.pavout = union (bb.pavin - b.killed, bb.avloc) */
2141
2142 static bool
2143 modify_bb_reg_pav (basic_block bb, basic_block pred, bool changed_p)
2144 {
2145 struct bb_info *bb_info;
2146 bitmap bb_pavin, bb_pavout;
2147
2148 bb_info = BB_INFO (bb);
2149 bb_pavin = bb_info->pavin;
2150 bb_pavout = bb_info->pavout;
2151 if (pred->index != ENTRY_BLOCK)
2152 bitmap_a_or_b (bb_pavin, bb_pavin, BB_INFO (pred)->pavout);
2153 changed_p |= bitmap_union_of_diff (bb_pavout, bb_info->avloc,
2154 bb_pavin, bb_info->killed);
2155 return changed_p;
2156 }
2157
2158 /* The function calculates partial register availability. */
2159
2160 static void
2161 calculate_reg_pav (void)
2162 {
2163 basic_block bb, succ;
2164 edge e;
2165 bool changed_p;
2166 int i, nel;
2167 varray_type bbs, new_bbs, temp;
2168 basic_block *bb_array;
2169 sbitmap wset;
2170
2171 VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
2172 VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
2173 FOR_EACH_BB (bb)
2174 {
2175 VARRAY_PUSH_BB (bbs, bb);
2176 }
2177 wset = sbitmap_alloc (n_basic_blocks + 1);
2178 while (VARRAY_ACTIVE_SIZE (bbs))
2179 {
2180 bb_array = &VARRAY_BB (bbs, 0);
2181 nel = VARRAY_ACTIVE_SIZE (bbs);
2182 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2183 sbitmap_zero (wset);
2184 for (i = 0; i < nel; i++)
2185 {
2186 bb = bb_array [i];
2187 changed_p = 0;
2188 for (e = bb->pred; e; e = e->pred_next)
2189 changed_p = modify_bb_reg_pav (bb, e->src, changed_p);
2190 if (changed_p)
2191 for (e = bb->succ; e; e = e->succ_next)
2192 {
2193 succ = e->dest;
2194 if (succ->index != EXIT_BLOCK && !TEST_BIT (wset, succ->index))
2195 {
2196 SET_BIT (wset, succ->index);
2197 VARRAY_PUSH_BB (new_bbs, succ);
2198 }
2199 }
2200 }
2201 temp = bbs;
2202 bbs = new_bbs;
2203 new_bbs = temp;
2204 VARRAY_POP_ALL (new_bbs);
2205 }
2206 sbitmap_free (wset);
2207 }
2208
2209 /* The following function makes live information more accurate by
2210 modifying global_live_at_start and global_live_at_end of basic
2211 blocks. After the function call a register lives at a program
2212 point only if it is initialized on a path from CFG entry to the
2213 program point. The standard GCC life analysis permits registers to
2214 live uninitialized. */
2215
2216 static void
2217 make_accurate_live_analysis (void)
2218 {
2219 basic_block bb;
2220 struct bb_info *bb_info;
2221
2222 max_regno = max_reg_num ();
2223 compact_blocks ();
2224 allocate_bb_info ();
2225 calculate_local_reg_bb_info ();
2226 set_up_bb_rts_numbers ();
2227 calculate_reg_pav ();
2228 FOR_EACH_BB (bb)
2229 {
2230 bb_info = BB_INFO (bb);
2231
2232 bitmap_a_and_b (bb->global_live_at_start, bb->global_live_at_start,
2233 bb_info->pavin);
2234 bitmap_a_and_b (bb->global_live_at_end, bb->global_live_at_end,
2235 bb_info->pavout);
2236 }
2237 free_bb_info ();
2238 }