local-alloc.c (local_alloc): Avoid call of update_equiv_regs when not optimizing.
[gcc.git] / gcc / local-alloc.c
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* Allocation of hard register numbers to pseudo registers is done in
23 two passes. In this pass we consider only regs that are born and
24 die once within one basic block. We do this one basic block at a
25 time. Then the next pass allocates the registers that remain.
26 Two passes are used because this pass uses methods that work only
27 on linear code, but that do a better job than the general methods
28 used in global_alloc, and more quickly too.
29
30 The assignments made are recorded in the vector reg_renumber
31 whose space is allocated here. The rtl code itself is not altered.
32
33 We assign each instruction in the basic block a number
34 which is its order from the beginning of the block.
35 Then we can represent the lifetime of a pseudo register with
36 a pair of numbers, and check for conflicts easily.
37 We can record the availability of hard registers with a
38 HARD_REG_SET for each instruction. The HARD_REG_SET
39 contains 0 or 1 for each hard reg.
40
41 To avoid register shuffling, we tie registers together when one
42 dies by being copied into another, or dies in an instruction that
43 does arithmetic to produce another. The tied registers are
44 allocated as one. Registers with different reg class preferences
45 can never be tied unless the class preferred by one is a subclass
46 of the one preferred by the other.
47
48 Tying is represented with "quantity numbers".
49 A non-tied register is given a new quantity number.
50 Tied registers have the same quantity number.
51
52 We have provision to exempt registers, even when they are contained
53 within the block, that can be tied to others that are not contained in it.
54 This is so that global_alloc could process them both and tie them then.
55 But this is currently disabled since tying in global_alloc is not
56 yet implemented. */
57
58 /* Pseudos allocated here can be reallocated by global.c if the hard register
59 is used as a spill register. Currently we don't allocate such pseudos
60 here if their preferred class is likely to be used by spills. */
61
62 #include "config.h"
63 #include "system.h"
64 #include "rtl.h"
65 #include "tm_p.h"
66 #include "flags.h"
67 #include "hard-reg-set.h"
68 #include "basic-block.h"
69 #include "regs.h"
70 #include "function.h"
71 #include "insn-config.h"
72 #include "insn-attr.h"
73 #include "recog.h"
74 #include "output.h"
75 #include "toplev.h"
76 #include "except.h"
77 #include "integrate.h"
78 \f
79 /* Next quantity number available for allocation. */
80
81 static int next_qty;
82
83 /* Information we maintain about each quantity. */
84 struct qty
85 {
86 /* The number of refs to quantity Q. */
87
88 int n_refs;
89
90 /* The frequency of uses of quantity Q. */
91
92 int freq;
93
94 /* Insn number (counting from head of basic block)
95 where quantity Q was born. -1 if birth has not been recorded. */
96
97 int birth;
98
99 /* Insn number (counting from head of basic block)
100 where given quantity died. Due to the way tying is done,
101 and the fact that we consider in this pass only regs that die but once,
102 a quantity can die only once. Each quantity's life span
103 is a set of consecutive insns. -1 if death has not been recorded. */
104
105 int death;
106
107 /* Number of words needed to hold the data in given quantity.
108 This depends on its machine mode. It is used for these purposes:
109 1. It is used in computing the relative importances of qtys,
110 which determines the order in which we look for regs for them.
111 2. It is used in rules that prevent tying several registers of
112 different sizes in a way that is geometrically impossible
113 (see combine_regs). */
114
115 int size;
116
117 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
118
119 int n_calls_crossed;
120
121 /* The register number of one pseudo register whose reg_qty value is Q.
122 This register should be the head of the chain
123 maintained in reg_next_in_qty. */
124
125 int first_reg;
126
127 /* Reg class contained in (smaller than) the preferred classes of all
128 the pseudo regs that are tied in given quantity.
129 This is the preferred class for allocating that quantity. */
130
131 enum reg_class min_class;
132
133 /* Register class within which we allocate given qty if we can't get
134 its preferred class. */
135
136 enum reg_class alternate_class;
137
138 /* This holds the mode of the registers that are tied to given qty,
139 or VOIDmode if registers with differing modes are tied together. */
140
141 enum machine_mode mode;
142
143 /* the hard reg number chosen for given quantity,
144 or -1 if none was found. */
145
146 short phys_reg;
147
148 /* Nonzero if this quantity has been used in a SUBREG in some
149 way that is illegal. */
150
151 char changes_mode;
152
153 };
154
155 static struct qty *qty;
156
157 /* These fields are kept separately to speedup their clearing. */
158
159 /* We maintain two hard register sets that indicate suggested hard registers
160 for each quantity. The first, phys_copy_sugg, contains hard registers
161 that are tied to the quantity by a simple copy. The second contains all
162 hard registers that are tied to the quantity via an arithmetic operation.
163
164 The former register set is given priority for allocation. This tends to
165 eliminate copy insns. */
166
167 /* Element Q is a set of hard registers that are suggested for quantity Q by
168 copy insns. */
169
170 static HARD_REG_SET *qty_phys_copy_sugg;
171
172 /* Element Q is a set of hard registers that are suggested for quantity Q by
173 arithmetic insns. */
174
175 static HARD_REG_SET *qty_phys_sugg;
176
177 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
178
179 static short *qty_phys_num_copy_sugg;
180
181 /* Element Q is the number of suggested registers in qty_phys_sugg. */
182
183 static short *qty_phys_num_sugg;
184
185 /* If (REG N) has been assigned a quantity number, is a register number
186 of another register assigned the same quantity number, or -1 for the
187 end of the chain. qty->first_reg point to the head of this chain. */
188
189 static int *reg_next_in_qty;
190
191 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
192 if it is >= 0,
193 of -1 if this register cannot be allocated by local-alloc,
194 or -2 if not known yet.
195
196 Note that if we see a use or death of pseudo register N with
197 reg_qty[N] == -2, register N must be local to the current block. If
198 it were used in more than one block, we would have reg_qty[N] == -1.
199 This relies on the fact that if reg_basic_block[N] is >= 0, register N
200 will not appear in any other block. We save a considerable number of
201 tests by exploiting this.
202
203 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
204 be referenced. */
205
206 static int *reg_qty;
207
208 /* The offset (in words) of register N within its quantity.
209 This can be nonzero if register N is SImode, and has been tied
210 to a subreg of a DImode register. */
211
212 static char *reg_offset;
213
214 /* Vector of substitutions of register numbers,
215 used to map pseudo regs into hardware regs.
216 This is set up as a result of register allocation.
217 Element N is the hard reg assigned to pseudo reg N,
218 or is -1 if no hard reg was assigned.
219 If N is a hard reg number, element N is N. */
220
221 short *reg_renumber;
222
223 /* Set of hard registers live at the current point in the scan
224 of the instructions in a basic block. */
225
226 static HARD_REG_SET regs_live;
227
228 /* Each set of hard registers indicates registers live at a particular
229 point in the basic block. For N even, regs_live_at[N] says which
230 hard registers are needed *after* insn N/2 (i.e., they may not
231 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
232
233 If an object is to conflict with the inputs of insn J but not the
234 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
235 if it is to conflict with the outputs of insn J but not the inputs of
236 insn J + 1, it is said to die at index J*2 + 1. */
237
238 static HARD_REG_SET *regs_live_at;
239
240 /* Communicate local vars `insn_number' and `insn'
241 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
242 static int this_insn_number;
243 static rtx this_insn;
244
245 struct equivalence
246 {
247 /* Set when an attempt should be made to replace a register
248 with the associated src entry. */
249
250 char replace;
251
252 /* Set when a REG_EQUIV note is found or created. Use to
253 keep track of what memory accesses might be created later,
254 e.g. by reload. */
255
256 rtx replacement;
257
258 rtx src;
259
260 /* Loop depth is used to recognize equivalences which appear
261 to be present within the same loop (or in an inner loop). */
262
263 int loop_depth;
264
265 /* The list of each instruction which initializes this register. */
266
267 rtx init_insns;
268 };
269
270 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
271 structure for that register. */
272
273 static struct equivalence *reg_equiv;
274
275 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
276 static int recorded_label_ref;
277
278 static void alloc_qty PARAMS ((int, enum machine_mode, int, int));
279 static void validate_equiv_mem_from_store PARAMS ((rtx, rtx, void *));
280 static int validate_equiv_mem PARAMS ((rtx, rtx, rtx));
281 static int equiv_init_varies_p PARAMS ((rtx));
282 static int equiv_init_movable_p PARAMS ((rtx, int));
283 static int contains_replace_regs PARAMS ((rtx));
284 static int memref_referenced_p PARAMS ((rtx, rtx));
285 static int memref_used_between_p PARAMS ((rtx, rtx, rtx));
286 static void update_equiv_regs PARAMS ((void));
287 static void no_equiv PARAMS ((rtx, rtx, void *));
288 static void block_alloc PARAMS ((int));
289 static int qty_sugg_compare PARAMS ((int, int));
290 static int qty_sugg_compare_1 PARAMS ((const PTR, const PTR));
291 static int qty_compare PARAMS ((int, int));
292 static int qty_compare_1 PARAMS ((const PTR, const PTR));
293 static int combine_regs PARAMS ((rtx, rtx, int, int, rtx, int));
294 static int reg_meets_class_p PARAMS ((int, enum reg_class));
295 static void update_qty_class PARAMS ((int, int));
296 static void reg_is_set PARAMS ((rtx, rtx, void *));
297 static void reg_is_born PARAMS ((rtx, int));
298 static void wipe_dead_reg PARAMS ((rtx, int));
299 static int find_free_reg PARAMS ((enum reg_class, enum machine_mode,
300 int, int, int, int, int));
301 static void mark_life PARAMS ((int, enum machine_mode, int));
302 static void post_mark_life PARAMS ((int, enum machine_mode, int, int, int));
303 static int no_conflict_p PARAMS ((rtx, rtx, rtx));
304 static int requires_inout PARAMS ((const char *));
305 \f
306 /* Allocate a new quantity (new within current basic block)
307 for register number REGNO which is born at index BIRTH
308 within the block. MODE and SIZE are info on reg REGNO. */
309
310 static void
311 alloc_qty (regno, mode, size, birth)
312 int regno;
313 enum machine_mode mode;
314 int size, birth;
315 {
316 int qtyno = next_qty++;
317
318 reg_qty[regno] = qtyno;
319 reg_offset[regno] = 0;
320 reg_next_in_qty[regno] = -1;
321
322 qty[qtyno].first_reg = regno;
323 qty[qtyno].size = size;
324 qty[qtyno].mode = mode;
325 qty[qtyno].birth = birth;
326 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
327 qty[qtyno].min_class = reg_preferred_class (regno);
328 qty[qtyno].alternate_class = reg_alternate_class (regno);
329 qty[qtyno].n_refs = REG_N_REFS (regno);
330 qty[qtyno].freq = REG_FREQ (regno);
331 qty[qtyno].changes_mode = REG_CHANGES_MODE (regno);
332 }
333 \f
334 /* Main entry point of this file. */
335
336 int
337 local_alloc ()
338 {
339 int b, i;
340 int max_qty;
341
342 /* We need to keep track of whether or not we recorded a LABEL_REF so
343 that we know if the jump optimizer needs to be rerun. */
344 recorded_label_ref = 0;
345
346 /* Leaf functions and non-leaf functions have different needs.
347 If defined, let the machine say what kind of ordering we
348 should use. */
349 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
350 ORDER_REGS_FOR_LOCAL_ALLOC;
351 #endif
352
353 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
354 registers. */
355 if (optimize)
356 update_equiv_regs ();
357
358 /* This sets the maximum number of quantities we can have. Quantity
359 numbers start at zero and we can have one for each pseudo. */
360 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
361
362 /* Allocate vectors of temporary data.
363 See the declarations of these variables, above,
364 for what they mean. */
365
366 qty = (struct qty *) xmalloc (max_qty * sizeof (struct qty));
367 qty_phys_copy_sugg
368 = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
369 qty_phys_num_copy_sugg = (short *) xmalloc (max_qty * sizeof (short));
370 qty_phys_sugg = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
371 qty_phys_num_sugg = (short *) xmalloc (max_qty * sizeof (short));
372
373 reg_qty = (int *) xmalloc (max_regno * sizeof (int));
374 reg_offset = (char *) xmalloc (max_regno * sizeof (char));
375 reg_next_in_qty = (int *) xmalloc (max_regno * sizeof (int));
376
377 /* Determine which pseudo-registers can be allocated by local-alloc.
378 In general, these are the registers used only in a single block and
379 which only die once.
380
381 We need not be concerned with which block actually uses the register
382 since we will never see it outside that block. */
383
384 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
385 {
386 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
387 reg_qty[i] = -2;
388 else
389 reg_qty[i] = -1;
390 }
391
392 /* Force loop below to initialize entire quantity array. */
393 next_qty = max_qty;
394
395 /* Allocate each block's local registers, block by block. */
396
397 for (b = 0; b < n_basic_blocks; b++)
398 {
399 /* NEXT_QTY indicates which elements of the `qty_...'
400 vectors might need to be initialized because they were used
401 for the previous block; it is set to the entire array before
402 block 0. Initialize those, with explicit loop if there are few,
403 else with bzero and bcopy. Do not initialize vectors that are
404 explicit set by `alloc_qty'. */
405
406 if (next_qty < 6)
407 {
408 for (i = 0; i < next_qty; i++)
409 {
410 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
411 qty_phys_num_copy_sugg[i] = 0;
412 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
413 qty_phys_num_sugg[i] = 0;
414 }
415 }
416 else
417 {
418 #define CLEAR(vector) \
419 memset ((char *) (vector), 0, (sizeof (*(vector))) * next_qty);
420
421 CLEAR (qty_phys_copy_sugg);
422 CLEAR (qty_phys_num_copy_sugg);
423 CLEAR (qty_phys_sugg);
424 CLEAR (qty_phys_num_sugg);
425 }
426
427 next_qty = 0;
428
429 block_alloc (b);
430 }
431
432 free (qty);
433 free (qty_phys_copy_sugg);
434 free (qty_phys_num_copy_sugg);
435 free (qty_phys_sugg);
436 free (qty_phys_num_sugg);
437
438 free (reg_qty);
439 free (reg_offset);
440 free (reg_next_in_qty);
441
442 return recorded_label_ref;
443 }
444 \f
445 /* Used for communication between the following two functions: contains
446 a MEM that we wish to ensure remains unchanged. */
447 static rtx equiv_mem;
448
449 /* Set nonzero if EQUIV_MEM is modified. */
450 static int equiv_mem_modified;
451
452 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
453 Called via note_stores. */
454
455 static void
456 validate_equiv_mem_from_store (dest, set, data)
457 rtx dest;
458 rtx set ATTRIBUTE_UNUSED;
459 void *data ATTRIBUTE_UNUSED;
460 {
461 if ((GET_CODE (dest) == REG
462 && reg_overlap_mentioned_p (dest, equiv_mem))
463 || (GET_CODE (dest) == MEM
464 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
465 equiv_mem_modified = 1;
466 }
467
468 /* Verify that no store between START and the death of REG invalidates
469 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
470 by storing into an overlapping memory location, or with a non-const
471 CALL_INSN.
472
473 Return 1 if MEMREF remains valid. */
474
475 static int
476 validate_equiv_mem (start, reg, memref)
477 rtx start;
478 rtx reg;
479 rtx memref;
480 {
481 rtx insn;
482 rtx note;
483
484 equiv_mem = memref;
485 equiv_mem_modified = 0;
486
487 /* If the memory reference has side effects or is volatile, it isn't a
488 valid equivalence. */
489 if (side_effects_p (memref))
490 return 0;
491
492 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
493 {
494 if (! INSN_P (insn))
495 continue;
496
497 if (find_reg_note (insn, REG_DEAD, reg))
498 return 1;
499
500 if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
501 && ! CONST_OR_PURE_CALL_P (insn))
502 return 0;
503
504 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
505
506 /* If a register mentioned in MEMREF is modified via an
507 auto-increment, we lose the equivalence. Do the same if one
508 dies; although we could extend the life, it doesn't seem worth
509 the trouble. */
510
511 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
512 if ((REG_NOTE_KIND (note) == REG_INC
513 || REG_NOTE_KIND (note) == REG_DEAD)
514 && GET_CODE (XEXP (note, 0)) == REG
515 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
516 return 0;
517 }
518
519 return 0;
520 }
521
522 /* Returns zero if X is known to be invariant. */
523
524 static int
525 equiv_init_varies_p (x)
526 rtx x;
527 {
528 RTX_CODE code = GET_CODE (x);
529 int i;
530 const char *fmt;
531
532 switch (code)
533 {
534 case MEM:
535 return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
536
537 case QUEUED:
538 return 1;
539
540 case CONST:
541 case CONST_INT:
542 case CONST_DOUBLE:
543 case CONST_VECTOR:
544 case SYMBOL_REF:
545 case LABEL_REF:
546 return 0;
547
548 case REG:
549 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
550
551 case ASM_OPERANDS:
552 if (MEM_VOLATILE_P (x))
553 return 1;
554
555 /* FALLTHROUGH */
556
557 default:
558 break;
559 }
560
561 fmt = GET_RTX_FORMAT (code);
562 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
563 if (fmt[i] == 'e')
564 {
565 if (equiv_init_varies_p (XEXP (x, i)))
566 return 1;
567 }
568 else if (fmt[i] == 'E')
569 {
570 int j;
571 for (j = 0; j < XVECLEN (x, i); j++)
572 if (equiv_init_varies_p (XVECEXP (x, i, j)))
573 return 1;
574 }
575
576 return 0;
577 }
578
579 /* Returns non-zero if X (used to initialize register REGNO) is movable.
580 X is only movable if the registers it uses have equivalent initializations
581 which appear to be within the same loop (or in an inner loop) and movable
582 or if they are not candidates for local_alloc and don't vary. */
583
584 static int
585 equiv_init_movable_p (x, regno)
586 rtx x;
587 int regno;
588 {
589 int i, j;
590 const char *fmt;
591 enum rtx_code code = GET_CODE (x);
592
593 switch (code)
594 {
595 case SET:
596 return equiv_init_movable_p (SET_SRC (x), regno);
597
598 case CC0:
599 case CLOBBER:
600 return 0;
601
602 case PRE_INC:
603 case PRE_DEC:
604 case POST_INC:
605 case POST_DEC:
606 case PRE_MODIFY:
607 case POST_MODIFY:
608 return 0;
609
610 case REG:
611 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
612 && reg_equiv[REGNO (x)].replace)
613 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
614
615 case UNSPEC_VOLATILE:
616 return 0;
617
618 case ASM_OPERANDS:
619 if (MEM_VOLATILE_P (x))
620 return 0;
621
622 /* FALLTHROUGH */
623
624 default:
625 break;
626 }
627
628 fmt = GET_RTX_FORMAT (code);
629 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
630 switch (fmt[i])
631 {
632 case 'e':
633 if (! equiv_init_movable_p (XEXP (x, i), regno))
634 return 0;
635 break;
636 case 'E':
637 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
638 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
639 return 0;
640 break;
641 }
642
643 return 1;
644 }
645
646 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
647
648 static int
649 contains_replace_regs (x)
650 rtx x;
651 {
652 int i, j;
653 const char *fmt;
654 enum rtx_code code = GET_CODE (x);
655
656 switch (code)
657 {
658 case CONST_INT:
659 case CONST:
660 case LABEL_REF:
661 case SYMBOL_REF:
662 case CONST_DOUBLE:
663 case CONST_VECTOR:
664 case PC:
665 case CC0:
666 case HIGH:
667 case LO_SUM:
668 return 0;
669
670 case REG:
671 return reg_equiv[REGNO (x)].replace;
672
673 default:
674 break;
675 }
676
677 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
679 switch (fmt[i])
680 {
681 case 'e':
682 if (contains_replace_regs (XEXP (x, i)))
683 return 1;
684 break;
685 case 'E':
686 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
687 if (contains_replace_regs (XVECEXP (x, i, j)))
688 return 1;
689 break;
690 }
691
692 return 0;
693 }
694 \f
695 /* TRUE if X references a memory location that would be affected by a store
696 to MEMREF. */
697
698 static int
699 memref_referenced_p (memref, x)
700 rtx x;
701 rtx memref;
702 {
703 int i, j;
704 const char *fmt;
705 enum rtx_code code = GET_CODE (x);
706
707 switch (code)
708 {
709 case CONST_INT:
710 case CONST:
711 case LABEL_REF:
712 case SYMBOL_REF:
713 case CONST_DOUBLE:
714 case CONST_VECTOR:
715 case PC:
716 case CC0:
717 case HIGH:
718 case LO_SUM:
719 return 0;
720
721 case REG:
722 return (reg_equiv[REGNO (x)].replacement
723 && memref_referenced_p (memref,
724 reg_equiv[REGNO (x)].replacement));
725
726 case MEM:
727 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
728 return 1;
729 break;
730
731 case SET:
732 /* If we are setting a MEM, it doesn't count (its address does), but any
733 other SET_DEST that has a MEM in it is referencing the MEM. */
734 if (GET_CODE (SET_DEST (x)) == MEM)
735 {
736 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
737 return 1;
738 }
739 else if (memref_referenced_p (memref, SET_DEST (x)))
740 return 1;
741
742 return memref_referenced_p (memref, SET_SRC (x));
743
744 default:
745 break;
746 }
747
748 fmt = GET_RTX_FORMAT (code);
749 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
750 switch (fmt[i])
751 {
752 case 'e':
753 if (memref_referenced_p (memref, XEXP (x, i)))
754 return 1;
755 break;
756 case 'E':
757 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
758 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
759 return 1;
760 break;
761 }
762
763 return 0;
764 }
765
766 /* TRUE if some insn in the range (START, END] references a memory location
767 that would be affected by a store to MEMREF. */
768
769 static int
770 memref_used_between_p (memref, start, end)
771 rtx memref;
772 rtx start;
773 rtx end;
774 {
775 rtx insn;
776
777 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
778 insn = NEXT_INSN (insn))
779 if (INSN_P (insn) && memref_referenced_p (memref, PATTERN (insn)))
780 return 1;
781
782 return 0;
783 }
784 \f
785 /* Return nonzero if the rtx X is invariant over the current function. */
786 /* ??? Actually, the places this is used in reload expect exactly what
787 is tested here, and not everything that is function invariant. In
788 particular, the frame pointer and arg pointer are special cased;
789 pic_offset_table_rtx is not, and this will cause aborts when we
790 go to spill these things to memory. */
791
792 int
793 function_invariant_p (x)
794 rtx x;
795 {
796 if (CONSTANT_P (x))
797 return 1;
798 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
799 return 1;
800 if (GET_CODE (x) == PLUS
801 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
802 && CONSTANT_P (XEXP (x, 1)))
803 return 1;
804 return 0;
805 }
806
807 /* Find registers that are equivalent to a single value throughout the
808 compilation (either because they can be referenced in memory or are set once
809 from a single constant). Lower their priority for a register.
810
811 If such a register is only referenced once, try substituting its value
812 into the using insn. If it succeeds, we can eliminate the register
813 completely. */
814
815 static void
816 update_equiv_regs ()
817 {
818 rtx insn;
819 int block;
820 int loop_depth;
821 regset_head cleared_regs;
822 int clear_regnos = 0;
823
824 reg_equiv = (struct equivalence *) xcalloc (max_regno, sizeof *reg_equiv);
825 INIT_REG_SET (&cleared_regs);
826
827 init_alias_analysis ();
828
829 /* Scan the insns and find which registers have equivalences. Do this
830 in a separate scan of the insns because (due to -fcse-follow-jumps)
831 a register can be set below its use. */
832 for (block = 0; block < n_basic_blocks; block++)
833 {
834 basic_block bb = BASIC_BLOCK (block);
835 loop_depth = bb->loop_depth;
836
837 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
838 {
839 rtx note;
840 rtx set;
841 rtx dest, src;
842 int regno;
843
844 if (! INSN_P (insn))
845 continue;
846
847 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
848 if (REG_NOTE_KIND (note) == REG_INC)
849 no_equiv (XEXP (note, 0), note, NULL);
850
851 set = single_set (insn);
852
853 /* If this insn contains more (or less) than a single SET,
854 only mark all destinations as having no known equivalence. */
855 if (set == 0)
856 {
857 note_stores (PATTERN (insn), no_equiv, NULL);
858 continue;
859 }
860 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
861 {
862 int i;
863
864 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
865 {
866 rtx part = XVECEXP (PATTERN (insn), 0, i);
867 if (part != set)
868 note_stores (part, no_equiv, NULL);
869 }
870 }
871
872 dest = SET_DEST (set);
873 src = SET_SRC (set);
874
875 /* If this sets a MEM to the contents of a REG that is only used
876 in a single basic block, see if the register is always equivalent
877 to that memory location and if moving the store from INSN to the
878 insn that set REG is safe. If so, put a REG_EQUIV note on the
879 initializing insn.
880
881 Don't add a REG_EQUIV note if the insn already has one. The existing
882 REG_EQUIV is likely more useful than the one we are adding.
883
884 If one of the regs in the address has reg_equiv[REGNO].replace set,
885 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
886 optimization may move the set of this register immediately before
887 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
888 the mention in the REG_EQUIV note would be to an uninitialized
889 pseudo. */
890 /* ????? This test isn't good enough; we might see a MEM with a use of
891 a pseudo register before we see its setting insn that will cause
892 reg_equiv[].replace for that pseudo to be set.
893 Equivalences to MEMs should be made in another pass, after the
894 reg_equiv[].replace information has been gathered. */
895
896 if (GET_CODE (dest) == MEM && GET_CODE (src) == REG
897 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
898 && REG_BASIC_BLOCK (regno) >= 0
899 && REG_N_SETS (regno) == 1
900 && reg_equiv[regno].init_insns != 0
901 && reg_equiv[regno].init_insns != const0_rtx
902 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
903 REG_EQUIV, NULL_RTX)
904 && ! contains_replace_regs (XEXP (dest, 0)))
905 {
906 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
907 if (validate_equiv_mem (init_insn, src, dest)
908 && ! memref_used_between_p (dest, init_insn, insn))
909 REG_NOTES (init_insn)
910 = gen_rtx_EXPR_LIST (REG_EQUIV, dest, REG_NOTES (init_insn));
911 }
912
913 /* We only handle the case of a pseudo register being set
914 once, or always to the same value. */
915 /* ??? The mn10200 port breaks if we add equivalences for
916 values that need an ADDRESS_REGS register and set them equivalent
917 to a MEM of a pseudo. The actual problem is in the over-conservative
918 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
919 calculate_needs, but we traditionally work around this problem
920 here by rejecting equivalences when the destination is in a register
921 that's likely spilled. This is fragile, of course, since the
922 preferred class of a pseudo depends on all instructions that set
923 or use it. */
924
925 if (GET_CODE (dest) != REG
926 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
927 || reg_equiv[regno].init_insns == const0_rtx
928 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
929 && GET_CODE (src) == MEM))
930 {
931 /* This might be seting a SUBREG of a pseudo, a pseudo that is
932 also set somewhere else to a constant. */
933 note_stores (set, no_equiv, NULL);
934 continue;
935 }
936
937 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
938
939 /* cse sometimes generates function invariants, but doesn't put a
940 REG_EQUAL note on the insn. Since this note would be redundant,
941 there's no point creating it earlier than here. */
942 if (! note && ! rtx_varies_p (src, 0))
943 note = set_unique_reg_note (insn, REG_EQUAL, src);
944
945 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
946 since it represents a function call */
947 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
948 note = NULL_RTX;
949
950 if (REG_N_SETS (regno) != 1
951 && (! note
952 || rtx_varies_p (XEXP (note, 0), 0)
953 || (reg_equiv[regno].replacement
954 && ! rtx_equal_p (XEXP (note, 0),
955 reg_equiv[regno].replacement))))
956 {
957 no_equiv (dest, set, NULL);
958 continue;
959 }
960 /* Record this insn as initializing this register. */
961 reg_equiv[regno].init_insns
962 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
963
964 /* If this register is known to be equal to a constant, record that
965 it is always equivalent to the constant. */
966 if (note && ! rtx_varies_p (XEXP (note, 0), 0))
967 PUT_MODE (note, (enum machine_mode) REG_EQUIV);
968
969 /* If this insn introduces a "constant" register, decrease the priority
970 of that register. Record this insn if the register is only used once
971 more and the equivalence value is the same as our source.
972
973 The latter condition is checked for two reasons: First, it is an
974 indication that it may be more efficient to actually emit the insn
975 as written (if no registers are available, reload will substitute
976 the equivalence). Secondly, it avoids problems with any registers
977 dying in this insn whose death notes would be missed.
978
979 If we don't have a REG_EQUIV note, see if this insn is loading
980 a register used only in one basic block from a MEM. If so, and the
981 MEM remains unchanged for the life of the register, add a REG_EQUIV
982 note. */
983
984 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
985
986 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
987 && GET_CODE (SET_SRC (set)) == MEM
988 && validate_equiv_mem (insn, dest, SET_SRC (set)))
989 REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
990 REG_NOTES (insn));
991
992 if (note)
993 {
994 int regno = REGNO (dest);
995
996 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
997 We might end up substituting the LABEL_REF for uses of the
998 pseudo here or later. That kind of transformation may turn an
999 indirect jump into a direct jump, in which case we must rerun the
1000 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
1001 if (GET_CODE (XEXP (note, 0)) == LABEL_REF
1002 || (GET_CODE (XEXP (note, 0)) == CONST
1003 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
1004 && (GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))
1005 == LABEL_REF)))
1006 recorded_label_ref = 1;
1007
1008 reg_equiv[regno].replacement = XEXP (note, 0);
1009 reg_equiv[regno].src = src;
1010 reg_equiv[regno].loop_depth = loop_depth;
1011
1012 /* Don't mess with things live during setjmp. */
1013 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
1014 {
1015 /* Note that the statement below does not affect the priority
1016 in local-alloc! */
1017 REG_LIVE_LENGTH (regno) *= 2;
1018
1019
1020 /* If the register is referenced exactly twice, meaning it is
1021 set once and used once, indicate that the reference may be
1022 replaced by the equivalence we computed above. Do this
1023 even if the register is only used in one block so that
1024 dependencies can be handled where the last register is
1025 used in a different block (i.e. HIGH / LO_SUM sequences)
1026 and to reduce the number of registers alive across
1027 calls. */
1028
1029 if (REG_N_REFS (regno) == 2
1030 && (rtx_equal_p (XEXP (note, 0), src)
1031 || ! equiv_init_varies_p (src))
1032 && GET_CODE (insn) == INSN
1033 && equiv_init_movable_p (PATTERN (insn), regno))
1034 reg_equiv[regno].replace = 1;
1035 }
1036 }
1037 }
1038 }
1039
1040 /* Now scan all regs killed in an insn to see if any of them are
1041 registers only used that once. If so, see if we can replace the
1042 reference with the equivalent from. If we can, delete the
1043 initializing reference and this register will go away. If we
1044 can't replace the reference, and the initialzing reference is
1045 within the same loop (or in an inner loop), then move the register
1046 initialization just before the use, so that they are in the same
1047 basic block. */
1048 for (block = n_basic_blocks - 1; block >= 0; block--)
1049 {
1050 basic_block bb = BASIC_BLOCK (block);
1051
1052 loop_depth = bb->loop_depth;
1053 for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
1054 {
1055 rtx link;
1056
1057 if (! INSN_P (insn))
1058 continue;
1059
1060 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1061 {
1062 if (REG_NOTE_KIND (link) == REG_DEAD
1063 /* Make sure this insn still refers to the register. */
1064 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1065 {
1066 int regno = REGNO (XEXP (link, 0));
1067 rtx equiv_insn;
1068
1069 if (! reg_equiv[regno].replace
1070 || reg_equiv[regno].loop_depth < loop_depth)
1071 continue;
1072
1073 /* reg_equiv[REGNO].replace gets set only when
1074 REG_N_REFS[REGNO] is 2, i.e. the register is set
1075 once and used once. (If it were only set, but not used,
1076 flow would have deleted the setting insns.) Hence
1077 there can only be one insn in reg_equiv[REGNO].init_insns. */
1078 if (reg_equiv[regno].init_insns == NULL_RTX
1079 || XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
1080 abort ();
1081 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1082
1083 /* We may not move instructions that can throw, since
1084 that changes basic block boundaries and we are not
1085 prepared to adjust the CFG to match. */
1086 if (can_throw_internal (equiv_insn))
1087 continue;
1088
1089 if (asm_noperands (PATTERN (equiv_insn)) < 0
1090 && validate_replace_rtx (regno_reg_rtx[regno],
1091 reg_equiv[regno].src, insn))
1092 {
1093 rtx equiv_link;
1094 rtx last_link;
1095 rtx note;
1096
1097 /* Find the last note. */
1098 for (last_link = link; XEXP (last_link, 1);
1099 last_link = XEXP (last_link, 1))
1100 ;
1101
1102 /* Append the REG_DEAD notes from equiv_insn. */
1103 equiv_link = REG_NOTES (equiv_insn);
1104 while (equiv_link)
1105 {
1106 note = equiv_link;
1107 equiv_link = XEXP (equiv_link, 1);
1108 if (REG_NOTE_KIND (note) == REG_DEAD)
1109 {
1110 remove_note (equiv_insn, note);
1111 XEXP (last_link, 1) = note;
1112 XEXP (note, 1) = NULL_RTX;
1113 last_link = note;
1114 }
1115 }
1116
1117 remove_death (regno, insn);
1118 REG_N_REFS (regno) = 0;
1119 REG_FREQ (regno) = 0;
1120 delete_insn (equiv_insn);
1121
1122 reg_equiv[regno].init_insns
1123 = XEXP (reg_equiv[regno].init_insns, 1);
1124 }
1125 /* Move the initialization of the register to just before
1126 INSN. Update the flow information. */
1127 else if (PREV_INSN (insn) != equiv_insn)
1128 {
1129 rtx new_insn;
1130
1131 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1132 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1133 REG_NOTES (equiv_insn) = 0;
1134
1135 /* Make sure this insn is recognized before reload begins,
1136 otherwise eliminate_regs_in_insn will abort. */
1137 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1138
1139 delete_insn (equiv_insn);
1140
1141 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1142
1143 REG_BASIC_BLOCK (regno) = block >= 0 ? block : 0;
1144 REG_N_CALLS_CROSSED (regno) = 0;
1145 REG_LIVE_LENGTH (regno) = 2;
1146
1147 if (block >= 0 && insn == BLOCK_HEAD (block))
1148 BLOCK_HEAD (block) = PREV_INSN (insn);
1149
1150 /* Remember to clear REGNO from all basic block's live
1151 info. */
1152 SET_REGNO_REG_SET (&cleared_regs, regno);
1153 clear_regnos++;
1154 }
1155 }
1156 }
1157 }
1158 }
1159
1160 /* Clear all dead REGNOs from all basic block's live info. */
1161 if (clear_regnos)
1162 {
1163 int j, l;
1164 if (clear_regnos > 8)
1165 {
1166 for (l = 0; l < n_basic_blocks; l++)
1167 {
1168 AND_COMPL_REG_SET (BASIC_BLOCK (l)->global_live_at_start,
1169 &cleared_regs);
1170 AND_COMPL_REG_SET (BASIC_BLOCK (l)->global_live_at_end,
1171 &cleared_regs);
1172 }
1173 }
1174 else
1175 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
1176 {
1177 for (l = 0; l < n_basic_blocks; l++)
1178 {
1179 CLEAR_REGNO_REG_SET (BASIC_BLOCK (l)->global_live_at_start, j);
1180 CLEAR_REGNO_REG_SET (BASIC_BLOCK (l)->global_live_at_end, j);
1181 }
1182 });
1183 }
1184
1185 /* Clean up. */
1186 end_alias_analysis ();
1187 CLEAR_REG_SET (&cleared_regs);
1188 free (reg_equiv);
1189 }
1190
1191 /* Mark REG as having no known equivalence.
1192 Some instructions might have been proceessed before and furnished
1193 with REG_EQUIV notes for this register; these notes will have to be
1194 removed.
1195 STORE is the piece of RTL that does the non-constant / conflicting
1196 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1197 but needs to be there because this function is called from note_stores. */
1198 static void
1199 no_equiv (reg, store, data)
1200 rtx reg, store ATTRIBUTE_UNUSED;
1201 void *data ATTRIBUTE_UNUSED;
1202 {
1203 int regno;
1204 rtx list;
1205
1206 if (GET_CODE (reg) != REG)
1207 return;
1208 regno = REGNO (reg);
1209 list = reg_equiv[regno].init_insns;
1210 if (list == const0_rtx)
1211 return;
1212 for (; list; list = XEXP (list, 1))
1213 {
1214 rtx insn = XEXP (list, 0);
1215 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1216 }
1217 reg_equiv[regno].init_insns = const0_rtx;
1218 reg_equiv[regno].replacement = NULL_RTX;
1219 }
1220 \f
1221 /* Allocate hard regs to the pseudo regs used only within block number B.
1222 Only the pseudos that die but once can be handled. */
1223
1224 static void
1225 block_alloc (b)
1226 int b;
1227 {
1228 int i, q;
1229 rtx insn;
1230 rtx note, hard_reg;
1231 int insn_number = 0;
1232 int insn_count = 0;
1233 int max_uid = get_max_uid ();
1234 int *qty_order;
1235 int no_conflict_combined_regno = -1;
1236
1237 /* Count the instructions in the basic block. */
1238
1239 insn = BLOCK_END (b);
1240 while (1)
1241 {
1242 if (GET_CODE (insn) != NOTE)
1243 if (++insn_count > max_uid)
1244 abort ();
1245 if (insn == BLOCK_HEAD (b))
1246 break;
1247 insn = PREV_INSN (insn);
1248 }
1249
1250 /* +2 to leave room for a post_mark_life at the last insn and for
1251 the birth of a CLOBBER in the first insn. */
1252 regs_live_at = (HARD_REG_SET *) xcalloc ((2 * insn_count + 2),
1253 sizeof (HARD_REG_SET));
1254
1255 /* Initialize table of hardware registers currently live. */
1256
1257 REG_SET_TO_HARD_REG_SET (regs_live, BASIC_BLOCK (b)->global_live_at_start);
1258
1259 /* This loop scans the instructions of the basic block
1260 and assigns quantities to registers.
1261 It computes which registers to tie. */
1262
1263 insn = BLOCK_HEAD (b);
1264 while (1)
1265 {
1266 if (GET_CODE (insn) != NOTE)
1267 insn_number++;
1268
1269 if (INSN_P (insn))
1270 {
1271 rtx link, set;
1272 int win = 0;
1273 rtx r0, r1 = NULL_RTX;
1274 int combined_regno = -1;
1275 int i;
1276
1277 this_insn_number = insn_number;
1278 this_insn = insn;
1279
1280 extract_insn (insn);
1281 which_alternative = -1;
1282
1283 /* Is this insn suitable for tying two registers?
1284 If so, try doing that.
1285 Suitable insns are those with at least two operands and where
1286 operand 0 is an output that is a register that is not
1287 earlyclobber.
1288
1289 We can tie operand 0 with some operand that dies in this insn.
1290 First look for operands that are required to be in the same
1291 register as operand 0. If we find such, only try tying that
1292 operand or one that can be put into that operand if the
1293 operation is commutative. If we don't find an operand
1294 that is required to be in the same register as operand 0,
1295 we can tie with any operand.
1296
1297 Subregs in place of regs are also ok.
1298
1299 If tying is done, WIN is set nonzero. */
1300
1301 if (optimize
1302 && recog_data.n_operands > 1
1303 && recog_data.constraints[0][0] == '='
1304 && recog_data.constraints[0][1] != '&')
1305 {
1306 /* If non-negative, is an operand that must match operand 0. */
1307 int must_match_0 = -1;
1308 /* Counts number of alternatives that require a match with
1309 operand 0. */
1310 int n_matching_alts = 0;
1311
1312 for (i = 1; i < recog_data.n_operands; i++)
1313 {
1314 const char *p = recog_data.constraints[i];
1315 int this_match = requires_inout (p);
1316
1317 n_matching_alts += this_match;
1318 if (this_match == recog_data.n_alternatives)
1319 must_match_0 = i;
1320 }
1321
1322 r0 = recog_data.operand[0];
1323 for (i = 1; i < recog_data.n_operands; i++)
1324 {
1325 /* Skip this operand if we found an operand that
1326 must match operand 0 and this operand isn't it
1327 and can't be made to be it by commutativity. */
1328
1329 if (must_match_0 >= 0 && i != must_match_0
1330 && ! (i == must_match_0 + 1
1331 && recog_data.constraints[i-1][0] == '%')
1332 && ! (i == must_match_0 - 1
1333 && recog_data.constraints[i][0] == '%'))
1334 continue;
1335
1336 /* Likewise if each alternative has some operand that
1337 must match operand zero. In that case, skip any
1338 operand that doesn't list operand 0 since we know that
1339 the operand always conflicts with operand 0. We
1340 ignore commutatity in this case to keep things simple. */
1341 if (n_matching_alts == recog_data.n_alternatives
1342 && 0 == requires_inout (recog_data.constraints[i]))
1343 continue;
1344
1345 r1 = recog_data.operand[i];
1346
1347 /* If the operand is an address, find a register in it.
1348 There may be more than one register, but we only try one
1349 of them. */
1350 if (recog_data.constraints[i][0] == 'p')
1351 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1352 r1 = XEXP (r1, 0);
1353
1354 /* Avoid making a call-saved register unnecessarily
1355 clobbered. */
1356 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1357 if (hard_reg != NULL_RTX)
1358 {
1359 if (GET_CODE (hard_reg) == REG
1360 && IN_RANGE (REGNO (hard_reg),
1361 0, FIRST_PSEUDO_REGISTER - 1)
1362 && ! call_used_regs[REGNO (hard_reg)])
1363 continue;
1364 }
1365
1366 if (GET_CODE (r0) == REG || GET_CODE (r0) == SUBREG)
1367 {
1368 /* We have two priorities for hard register preferences.
1369 If we have a move insn or an insn whose first input
1370 can only be in the same register as the output, give
1371 priority to an equivalence found from that insn. */
1372 int may_save_copy
1373 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1374
1375 if (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1376 win = combine_regs (r1, r0, may_save_copy,
1377 insn_number, insn, 0);
1378 }
1379 if (win)
1380 break;
1381 }
1382 }
1383
1384 /* Recognize an insn sequence with an ultimate result
1385 which can safely overlap one of the inputs.
1386 The sequence begins with a CLOBBER of its result,
1387 and ends with an insn that copies the result to itself
1388 and has a REG_EQUAL note for an equivalent formula.
1389 That note indicates what the inputs are.
1390 The result and the input can overlap if each insn in
1391 the sequence either doesn't mention the input
1392 or has a REG_NO_CONFLICT note to inhibit the conflict.
1393
1394 We do the combining test at the CLOBBER so that the
1395 destination register won't have had a quantity number
1396 assigned, since that would prevent combining. */
1397
1398 if (optimize
1399 && GET_CODE (PATTERN (insn)) == CLOBBER
1400 && (r0 = XEXP (PATTERN (insn), 0),
1401 GET_CODE (r0) == REG)
1402 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1403 && XEXP (link, 0) != 0
1404 && GET_CODE (XEXP (link, 0)) == INSN
1405 && (set = single_set (XEXP (link, 0))) != 0
1406 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1407 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1408 NULL_RTX)) != 0)
1409 {
1410 if (r1 = XEXP (note, 0), GET_CODE (r1) == REG
1411 /* Check that we have such a sequence. */
1412 && no_conflict_p (insn, r0, r1))
1413 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1414 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1415 && (r1 = XEXP (XEXP (note, 0), 0),
1416 GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1417 && no_conflict_p (insn, r0, r1))
1418 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1419
1420 /* Here we care if the operation to be computed is
1421 commutative. */
1422 else if ((GET_CODE (XEXP (note, 0)) == EQ
1423 || GET_CODE (XEXP (note, 0)) == NE
1424 || GET_RTX_CLASS (GET_CODE (XEXP (note, 0))) == 'c')
1425 && (r1 = XEXP (XEXP (note, 0), 1),
1426 (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG))
1427 && no_conflict_p (insn, r0, r1))
1428 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1429
1430 /* If we did combine something, show the register number
1431 in question so that we know to ignore its death. */
1432 if (win)
1433 no_conflict_combined_regno = REGNO (r1);
1434 }
1435
1436 /* If registers were just tied, set COMBINED_REGNO
1437 to the number of the register used in this insn
1438 that was tied to the register set in this insn.
1439 This register's qty should not be "killed". */
1440
1441 if (win)
1442 {
1443 while (GET_CODE (r1) == SUBREG)
1444 r1 = SUBREG_REG (r1);
1445 combined_regno = REGNO (r1);
1446 }
1447
1448 /* Mark the death of everything that dies in this instruction,
1449 except for anything that was just combined. */
1450
1451 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1452 if (REG_NOTE_KIND (link) == REG_DEAD
1453 && GET_CODE (XEXP (link, 0)) == REG
1454 && combined_regno != (int) REGNO (XEXP (link, 0))
1455 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1456 || ! find_reg_note (insn, REG_NO_CONFLICT,
1457 XEXP (link, 0))))
1458 wipe_dead_reg (XEXP (link, 0), 0);
1459
1460 /* Allocate qty numbers for all registers local to this block
1461 that are born (set) in this instruction.
1462 A pseudo that already has a qty is not changed. */
1463
1464 note_stores (PATTERN (insn), reg_is_set, NULL);
1465
1466 /* If anything is set in this insn and then unused, mark it as dying
1467 after this insn, so it will conflict with our outputs. This
1468 can't match with something that combined, and it doesn't matter
1469 if it did. Do this after the calls to reg_is_set since these
1470 die after, not during, the current insn. */
1471
1472 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1473 if (REG_NOTE_KIND (link) == REG_UNUSED
1474 && GET_CODE (XEXP (link, 0)) == REG)
1475 wipe_dead_reg (XEXP (link, 0), 1);
1476
1477 /* If this is an insn that has a REG_RETVAL note pointing at a
1478 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1479 block, so clear any register number that combined within it. */
1480 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1481 && GET_CODE (XEXP (note, 0)) == INSN
1482 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1483 no_conflict_combined_regno = -1;
1484 }
1485
1486 /* Set the registers live after INSN_NUMBER. Note that we never
1487 record the registers live before the block's first insn, since no
1488 pseudos we care about are live before that insn. */
1489
1490 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1491 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1492
1493 if (insn == BLOCK_END (b))
1494 break;
1495
1496 insn = NEXT_INSN (insn);
1497 }
1498
1499 /* Now every register that is local to this basic block
1500 should have been given a quantity, or else -1 meaning ignore it.
1501 Every quantity should have a known birth and death.
1502
1503 Order the qtys so we assign them registers in order of the
1504 number of suggested registers they need so we allocate those with
1505 the most restrictive needs first. */
1506
1507 qty_order = (int *) xmalloc (next_qty * sizeof (int));
1508 for (i = 0; i < next_qty; i++)
1509 qty_order[i] = i;
1510
1511 #define EXCHANGE(I1, I2) \
1512 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1513
1514 switch (next_qty)
1515 {
1516 case 3:
1517 /* Make qty_order[2] be the one to allocate last. */
1518 if (qty_sugg_compare (0, 1) > 0)
1519 EXCHANGE (0, 1);
1520 if (qty_sugg_compare (1, 2) > 0)
1521 EXCHANGE (2, 1);
1522
1523 /* ... Fall through ... */
1524 case 2:
1525 /* Put the best one to allocate in qty_order[0]. */
1526 if (qty_sugg_compare (0, 1) > 0)
1527 EXCHANGE (0, 1);
1528
1529 /* ... Fall through ... */
1530
1531 case 1:
1532 case 0:
1533 /* Nothing to do here. */
1534 break;
1535
1536 default:
1537 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1538 }
1539
1540 /* Try to put each quantity in a suggested physical register, if it has one.
1541 This may cause registers to be allocated that otherwise wouldn't be, but
1542 this seems acceptable in local allocation (unlike global allocation). */
1543 for (i = 0; i < next_qty; i++)
1544 {
1545 q = qty_order[i];
1546 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1547 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1548 0, 1, qty[q].birth, qty[q].death);
1549 else
1550 qty[q].phys_reg = -1;
1551 }
1552
1553 /* Order the qtys so we assign them registers in order of
1554 decreasing length of life. Normally call qsort, but if we
1555 have only a very small number of quantities, sort them ourselves. */
1556
1557 for (i = 0; i < next_qty; i++)
1558 qty_order[i] = i;
1559
1560 #define EXCHANGE(I1, I2) \
1561 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1562
1563 switch (next_qty)
1564 {
1565 case 3:
1566 /* Make qty_order[2] be the one to allocate last. */
1567 if (qty_compare (0, 1) > 0)
1568 EXCHANGE (0, 1);
1569 if (qty_compare (1, 2) > 0)
1570 EXCHANGE (2, 1);
1571
1572 /* ... Fall through ... */
1573 case 2:
1574 /* Put the best one to allocate in qty_order[0]. */
1575 if (qty_compare (0, 1) > 0)
1576 EXCHANGE (0, 1);
1577
1578 /* ... Fall through ... */
1579
1580 case 1:
1581 case 0:
1582 /* Nothing to do here. */
1583 break;
1584
1585 default:
1586 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1587 }
1588
1589 /* Now for each qty that is not a hardware register,
1590 look for a hardware register to put it in.
1591 First try the register class that is cheapest for this qty,
1592 if there is more than one class. */
1593
1594 for (i = 0; i < next_qty; i++)
1595 {
1596 q = qty_order[i];
1597 if (qty[q].phys_reg < 0)
1598 {
1599 #ifdef INSN_SCHEDULING
1600 /* These values represent the adjusted lifetime of a qty so
1601 that it conflicts with qtys which appear near the start/end
1602 of this qty's lifetime.
1603
1604 The purpose behind extending the lifetime of this qty is to
1605 discourage the register allocator from creating false
1606 dependencies.
1607
1608 The adjustment value is chosen to indicate that this qty
1609 conflicts with all the qtys in the instructions immediately
1610 before and after the lifetime of this qty.
1611
1612 Experiments have shown that higher values tend to hurt
1613 overall code performance.
1614
1615 If allocation using the extended lifetime fails we will try
1616 again with the qty's unadjusted lifetime. */
1617 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1618 int fake_death = MIN (insn_number * 2 + 1,
1619 qty[q].death + 2 - qty[q].death % 2);
1620 #endif
1621
1622 if (N_REG_CLASSES > 1)
1623 {
1624 #ifdef INSN_SCHEDULING
1625 /* We try to avoid using hard registers allocated to qtys which
1626 are born immediately after this qty or die immediately before
1627 this qty.
1628
1629 This optimization is only appropriate when we will run
1630 a scheduling pass after reload and we are not optimizing
1631 for code size. */
1632 if (flag_schedule_insns_after_reload
1633 && !optimize_size
1634 && !SMALL_REGISTER_CLASSES)
1635 {
1636 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1637 qty[q].mode, q, 0, 0,
1638 fake_birth, fake_death);
1639 if (qty[q].phys_reg >= 0)
1640 continue;
1641 }
1642 #endif
1643 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1644 qty[q].mode, q, 0, 0,
1645 qty[q].birth, qty[q].death);
1646 if (qty[q].phys_reg >= 0)
1647 continue;
1648 }
1649
1650 #ifdef INSN_SCHEDULING
1651 /* Similarly, avoid false dependencies. */
1652 if (flag_schedule_insns_after_reload
1653 && !optimize_size
1654 && !SMALL_REGISTER_CLASSES
1655 && qty[q].alternate_class != NO_REGS)
1656 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1657 qty[q].mode, q, 0, 0,
1658 fake_birth, fake_death);
1659 #endif
1660 if (qty[q].alternate_class != NO_REGS)
1661 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1662 qty[q].mode, q, 0, 0,
1663 qty[q].birth, qty[q].death);
1664 }
1665 }
1666
1667 /* Now propagate the register assignments
1668 to the pseudo regs belonging to the qtys. */
1669
1670 for (q = 0; q < next_qty; q++)
1671 if (qty[q].phys_reg >= 0)
1672 {
1673 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1674 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1675 }
1676
1677 /* Clean up. */
1678 free (regs_live_at);
1679 free (qty_order);
1680 }
1681 \f
1682 /* Compare two quantities' priority for getting real registers.
1683 We give shorter-lived quantities higher priority.
1684 Quantities with more references are also preferred, as are quantities that
1685 require multiple registers. This is the identical prioritization as
1686 done by global-alloc.
1687
1688 We used to give preference to registers with *longer* lives, but using
1689 the same algorithm in both local- and global-alloc can speed up execution
1690 of some programs by as much as a factor of three! */
1691
1692 /* Note that the quotient will never be bigger than
1693 the value of floor_log2 times the maximum number of
1694 times a register can occur in one insn (surely less than 100)
1695 weighted by frequency (max REG_FREQ_MAX).
1696 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1697 QTY_CMP_PRI is also used by qty_sugg_compare. */
1698
1699 #define QTY_CMP_PRI(q) \
1700 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1701 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1702
1703 static int
1704 qty_compare (q1, q2)
1705 int q1, q2;
1706 {
1707 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1708 }
1709
1710 static int
1711 qty_compare_1 (q1p, q2p)
1712 const PTR q1p;
1713 const PTR q2p;
1714 {
1715 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1716 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1717
1718 if (tem != 0)
1719 return tem;
1720
1721 /* If qtys are equally good, sort by qty number,
1722 so that the results of qsort leave nothing to chance. */
1723 return q1 - q2;
1724 }
1725 \f
1726 /* Compare two quantities' priority for getting real registers. This version
1727 is called for quantities that have suggested hard registers. First priority
1728 goes to quantities that have copy preferences, then to those that have
1729 normal preferences. Within those groups, quantities with the lower
1730 number of preferences have the highest priority. Of those, we use the same
1731 algorithm as above. */
1732
1733 #define QTY_CMP_SUGG(q) \
1734 (qty_phys_num_copy_sugg[q] \
1735 ? qty_phys_num_copy_sugg[q] \
1736 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1737
1738 static int
1739 qty_sugg_compare (q1, q2)
1740 int q1, q2;
1741 {
1742 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1743
1744 if (tem != 0)
1745 return tem;
1746
1747 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1748 }
1749
1750 static int
1751 qty_sugg_compare_1 (q1p, q2p)
1752 const PTR q1p;
1753 const PTR q2p;
1754 {
1755 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1756 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1757
1758 if (tem != 0)
1759 return tem;
1760
1761 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1762 if (tem != 0)
1763 return tem;
1764
1765 /* If qtys are equally good, sort by qty number,
1766 so that the results of qsort leave nothing to chance. */
1767 return q1 - q2;
1768 }
1769
1770 #undef QTY_CMP_SUGG
1771 #undef QTY_CMP_PRI
1772 \f
1773 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1774 Returns 1 if have done so, or 0 if cannot.
1775
1776 Combining registers means marking them as having the same quantity
1777 and adjusting the offsets within the quantity if either of
1778 them is a SUBREG).
1779
1780 We don't actually combine a hard reg with a pseudo; instead
1781 we just record the hard reg as the suggestion for the pseudo's quantity.
1782 If we really combined them, we could lose if the pseudo lives
1783 across an insn that clobbers the hard reg (eg, movstr).
1784
1785 ALREADY_DEAD is non-zero if USEDREG is known to be dead even though
1786 there is no REG_DEAD note on INSN. This occurs during the processing
1787 of REG_NO_CONFLICT blocks.
1788
1789 MAY_SAVE_COPYCOPY is non-zero if this insn is simply copying USEDREG to
1790 SETREG or if the input and output must share a register.
1791 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1792
1793 There are elaborate checks for the validity of combining. */
1794
1795 static int
1796 combine_regs (usedreg, setreg, may_save_copy, insn_number, insn, already_dead)
1797 rtx usedreg, setreg;
1798 int may_save_copy;
1799 int insn_number;
1800 rtx insn;
1801 int already_dead;
1802 {
1803 int ureg, sreg;
1804 int offset = 0;
1805 int usize, ssize;
1806 int sqty;
1807
1808 /* Determine the numbers and sizes of registers being used. If a subreg
1809 is present that does not change the entire register, don't consider
1810 this a copy insn. */
1811
1812 while (GET_CODE (usedreg) == SUBREG)
1813 {
1814 rtx subreg = SUBREG_REG (usedreg);
1815
1816 if (GET_CODE (subreg) == REG)
1817 {
1818 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1819 may_save_copy = 0;
1820
1821 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1822 offset += subreg_regno_offset (REGNO (subreg),
1823 GET_MODE (subreg),
1824 SUBREG_BYTE (usedreg),
1825 GET_MODE (usedreg));
1826 else
1827 offset += (SUBREG_BYTE (usedreg)
1828 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1829 }
1830
1831 usedreg = subreg;
1832 }
1833
1834 if (GET_CODE (usedreg) != REG)
1835 return 0;
1836
1837 ureg = REGNO (usedreg);
1838 if (ureg < FIRST_PSEUDO_REGISTER)
1839 usize = HARD_REGNO_NREGS (ureg, GET_MODE (usedreg));
1840 else
1841 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1842 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1843 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1844
1845 while (GET_CODE (setreg) == SUBREG)
1846 {
1847 rtx subreg = SUBREG_REG (setreg);
1848
1849 if (GET_CODE (subreg) == REG)
1850 {
1851 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1852 may_save_copy = 0;
1853
1854 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1855 offset -= subreg_regno_offset (REGNO (subreg),
1856 GET_MODE (subreg),
1857 SUBREG_BYTE (setreg),
1858 GET_MODE (setreg));
1859 else
1860 offset -= (SUBREG_BYTE (setreg)
1861 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1862 }
1863
1864 setreg = subreg;
1865 }
1866
1867 if (GET_CODE (setreg) != REG)
1868 return 0;
1869
1870 sreg = REGNO (setreg);
1871 if (sreg < FIRST_PSEUDO_REGISTER)
1872 ssize = HARD_REGNO_NREGS (sreg, GET_MODE (setreg));
1873 else
1874 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1875 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1876 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1877
1878 /* If UREG is a pseudo-register that hasn't already been assigned a
1879 quantity number, it means that it is not local to this block or dies
1880 more than once. In either event, we can't do anything with it. */
1881 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1882 /* Do not combine registers unless one fits within the other. */
1883 || (offset > 0 && usize + offset > ssize)
1884 || (offset < 0 && usize + offset < ssize)
1885 /* Do not combine with a smaller already-assigned object
1886 if that smaller object is already combined with something bigger. */
1887 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1888 && usize < qty[reg_qty[ureg]].size)
1889 /* Can't combine if SREG is not a register we can allocate. */
1890 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1891 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1892 These have already been taken care of. This probably wouldn't
1893 combine anyway, but don't take any chances. */
1894 || (ureg >= FIRST_PSEUDO_REGISTER
1895 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1896 /* Don't tie something to itself. In most cases it would make no
1897 difference, but it would screw up if the reg being tied to itself
1898 also dies in this insn. */
1899 || ureg == sreg
1900 /* Don't try to connect two different hardware registers. */
1901 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1902 /* Don't connect two different machine modes if they have different
1903 implications as to which registers may be used. */
1904 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1905 return 0;
1906
1907 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1908 qty_phys_sugg for the pseudo instead of tying them.
1909
1910 Return "failure" so that the lifespan of UREG is terminated here;
1911 that way the two lifespans will be disjoint and nothing will prevent
1912 the pseudo reg from being given this hard reg. */
1913
1914 if (ureg < FIRST_PSEUDO_REGISTER)
1915 {
1916 /* Allocate a quantity number so we have a place to put our
1917 suggestions. */
1918 if (reg_qty[sreg] == -2)
1919 reg_is_born (setreg, 2 * insn_number);
1920
1921 if (reg_qty[sreg] >= 0)
1922 {
1923 if (may_save_copy
1924 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1925 {
1926 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1927 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1928 }
1929 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1930 {
1931 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1932 qty_phys_num_sugg[reg_qty[sreg]]++;
1933 }
1934 }
1935 return 0;
1936 }
1937
1938 /* Similarly for SREG a hard register and UREG a pseudo register. */
1939
1940 if (sreg < FIRST_PSEUDO_REGISTER)
1941 {
1942 if (may_save_copy
1943 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1944 {
1945 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1946 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1947 }
1948 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1949 {
1950 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1951 qty_phys_num_sugg[reg_qty[ureg]]++;
1952 }
1953 return 0;
1954 }
1955
1956 /* At this point we know that SREG and UREG are both pseudos.
1957 Do nothing if SREG already has a quantity or is a register that we
1958 don't allocate. */
1959 if (reg_qty[sreg] >= -1
1960 /* If we are not going to let any regs live across calls,
1961 don't tie a call-crossing reg to a non-call-crossing reg. */
1962 || (current_function_has_nonlocal_label
1963 && ((REG_N_CALLS_CROSSED (ureg) > 0)
1964 != (REG_N_CALLS_CROSSED (sreg) > 0))))
1965 return 0;
1966
1967 /* We don't already know about SREG, so tie it to UREG
1968 if this is the last use of UREG, provided the classes they want
1969 are compatible. */
1970
1971 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
1972 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
1973 {
1974 /* Add SREG to UREG's quantity. */
1975 sqty = reg_qty[ureg];
1976 reg_qty[sreg] = sqty;
1977 reg_offset[sreg] = reg_offset[ureg] + offset;
1978 reg_next_in_qty[sreg] = qty[sqty].first_reg;
1979 qty[sqty].first_reg = sreg;
1980
1981 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
1982 update_qty_class (sqty, sreg);
1983
1984 /* Update info about quantity SQTY. */
1985 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
1986 qty[sqty].n_refs += REG_N_REFS (sreg);
1987 qty[sqty].freq += REG_FREQ (sreg);
1988 if (usize < ssize)
1989 {
1990 int i;
1991
1992 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
1993 reg_offset[i] -= offset;
1994
1995 qty[sqty].size = ssize;
1996 qty[sqty].mode = GET_MODE (setreg);
1997 }
1998 }
1999 else
2000 return 0;
2001
2002 return 1;
2003 }
2004 \f
2005 /* Return 1 if the preferred class of REG allows it to be tied
2006 to a quantity or register whose class is CLASS.
2007 True if REG's reg class either contains or is contained in CLASS. */
2008
2009 static int
2010 reg_meets_class_p (reg, class)
2011 int reg;
2012 enum reg_class class;
2013 {
2014 enum reg_class rclass = reg_preferred_class (reg);
2015 return (reg_class_subset_p (rclass, class)
2016 || reg_class_subset_p (class, rclass));
2017 }
2018
2019 /* Update the class of QTYNO assuming that REG is being tied to it. */
2020
2021 static void
2022 update_qty_class (qtyno, reg)
2023 int qtyno;
2024 int reg;
2025 {
2026 enum reg_class rclass = reg_preferred_class (reg);
2027 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2028 qty[qtyno].min_class = rclass;
2029
2030 rclass = reg_alternate_class (reg);
2031 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2032 qty[qtyno].alternate_class = rclass;
2033
2034 if (REG_CHANGES_MODE (reg))
2035 qty[qtyno].changes_mode = 1;
2036 }
2037 \f
2038 /* Handle something which alters the value of an rtx REG.
2039
2040 REG is whatever is set or clobbered. SETTER is the rtx that
2041 is modifying the register.
2042
2043 If it is not really a register, we do nothing.
2044 The file-global variables `this_insn' and `this_insn_number'
2045 carry info from `block_alloc'. */
2046
2047 static void
2048 reg_is_set (reg, setter, data)
2049 rtx reg;
2050 rtx setter;
2051 void *data ATTRIBUTE_UNUSED;
2052 {
2053 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2054 a hard register. These may actually not exist any more. */
2055
2056 if (GET_CODE (reg) != SUBREG
2057 && GET_CODE (reg) != REG)
2058 return;
2059
2060 /* Mark this register as being born. If it is used in a CLOBBER, mark
2061 it as being born halfway between the previous insn and this insn so that
2062 it conflicts with our inputs but not the outputs of the previous insn. */
2063
2064 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2065 }
2066 \f
2067 /* Handle beginning of the life of register REG.
2068 BIRTH is the index at which this is happening. */
2069
2070 static void
2071 reg_is_born (reg, birth)
2072 rtx reg;
2073 int birth;
2074 {
2075 int regno;
2076
2077 if (GET_CODE (reg) == SUBREG)
2078 {
2079 regno = REGNO (SUBREG_REG (reg));
2080 if (regno < FIRST_PSEUDO_REGISTER)
2081 regno = subreg_hard_regno (reg, 1);
2082 }
2083 else
2084 regno = REGNO (reg);
2085
2086 if (regno < FIRST_PSEUDO_REGISTER)
2087 {
2088 mark_life (regno, GET_MODE (reg), 1);
2089
2090 /* If the register was to have been born earlier that the present
2091 insn, mark it as live where it is actually born. */
2092 if (birth < 2 * this_insn_number)
2093 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2094 }
2095 else
2096 {
2097 if (reg_qty[regno] == -2)
2098 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2099
2100 /* If this register has a quantity number, show that it isn't dead. */
2101 if (reg_qty[regno] >= 0)
2102 qty[reg_qty[regno]].death = -1;
2103 }
2104 }
2105
2106 /* Record the death of REG in the current insn. If OUTPUT_P is non-zero,
2107 REG is an output that is dying (i.e., it is never used), otherwise it
2108 is an input (the normal case).
2109 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2110
2111 static void
2112 wipe_dead_reg (reg, output_p)
2113 rtx reg;
2114 int output_p;
2115 {
2116 int regno = REGNO (reg);
2117
2118 /* If this insn has multiple results,
2119 and the dead reg is used in one of the results,
2120 extend its life to after this insn,
2121 so it won't get allocated together with any other result of this insn.
2122
2123 It is unsafe to use !single_set here since it will ignore an unused
2124 output. Just because an output is unused does not mean the compiler
2125 can assume the side effect will not occur. Consider if REG appears
2126 in the address of an output and we reload the output. If we allocate
2127 REG to the same hard register as an unused output we could set the hard
2128 register before the output reload insn. */
2129 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2130 && multiple_sets (this_insn))
2131 {
2132 int i;
2133 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2134 {
2135 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2136 if (GET_CODE (set) == SET
2137 && GET_CODE (SET_DEST (set)) != REG
2138 && !rtx_equal_p (reg, SET_DEST (set))
2139 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2140 output_p = 1;
2141 }
2142 }
2143
2144 /* If this register is used in an auto-increment address, then extend its
2145 life to after this insn, so that it won't get allocated together with
2146 the result of this insn. */
2147 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2148 output_p = 1;
2149
2150 if (regno < FIRST_PSEUDO_REGISTER)
2151 {
2152 mark_life (regno, GET_MODE (reg), 0);
2153
2154 /* If a hard register is dying as an output, mark it as in use at
2155 the beginning of this insn (the above statement would cause this
2156 not to happen). */
2157 if (output_p)
2158 post_mark_life (regno, GET_MODE (reg), 1,
2159 2 * this_insn_number, 2 * this_insn_number + 1);
2160 }
2161
2162 else if (reg_qty[regno] >= 0)
2163 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2164 }
2165 \f
2166 /* Find a block of SIZE words of hard regs in reg_class CLASS
2167 that can hold something of machine-mode MODE
2168 (but actually we test only the first of the block for holding MODE)
2169 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2170 and return the number of the first of them.
2171 Return -1 if such a block cannot be found.
2172 If QTYNO crosses calls, insist on a register preserved by calls,
2173 unless ACCEPT_CALL_CLOBBERED is nonzero.
2174
2175 If JUST_TRY_SUGGESTED is non-zero, only try to see if the suggested
2176 register is available. If not, return -1. */
2177
2178 static int
2179 find_free_reg (class, mode, qtyno, accept_call_clobbered, just_try_suggested,
2180 born_index, dead_index)
2181 enum reg_class class;
2182 enum machine_mode mode;
2183 int qtyno;
2184 int accept_call_clobbered;
2185 int just_try_suggested;
2186 int born_index, dead_index;
2187 {
2188 int i, ins;
2189 #ifdef HARD_REG_SET
2190 /* Declare it register if it's a scalar. */
2191 register
2192 #endif
2193 HARD_REG_SET used, first_used;
2194 #ifdef ELIMINABLE_REGS
2195 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2196 #endif
2197
2198 /* Validate our parameters. */
2199 if (born_index < 0 || born_index > dead_index)
2200 abort ();
2201
2202 /* Don't let a pseudo live in a reg across a function call
2203 if we might get a nonlocal goto. */
2204 if (current_function_has_nonlocal_label
2205 && qty[qtyno].n_calls_crossed > 0)
2206 return -1;
2207
2208 if (accept_call_clobbered)
2209 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2210 else if (qty[qtyno].n_calls_crossed == 0)
2211 COPY_HARD_REG_SET (used, fixed_reg_set);
2212 else
2213 COPY_HARD_REG_SET (used, call_used_reg_set);
2214
2215 if (accept_call_clobbered)
2216 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2217
2218 for (ins = born_index; ins < dead_index; ins++)
2219 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2220
2221 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2222
2223 /* Don't use the frame pointer reg in local-alloc even if
2224 we may omit the frame pointer, because if we do that and then we
2225 need a frame pointer, reload won't know how to move the pseudo
2226 to another hard reg. It can move only regs made by global-alloc.
2227
2228 This is true of any register that can be eliminated. */
2229 #ifdef ELIMINABLE_REGS
2230 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2231 SET_HARD_REG_BIT (used, eliminables[i].from);
2232 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2233 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2234 that it might be eliminated into. */
2235 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2236 #endif
2237 #else
2238 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2239 #endif
2240
2241 #ifdef CLASS_CANNOT_CHANGE_MODE
2242 if (qty[qtyno].changes_mode)
2243 IOR_HARD_REG_SET (used,
2244 reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE]);
2245 #endif
2246
2247 /* Normally, the registers that can be used for the first register in
2248 a multi-register quantity are the same as those that can be used for
2249 subsequent registers. However, if just trying suggested registers,
2250 restrict our consideration to them. If there are copy-suggested
2251 register, try them. Otherwise, try the arithmetic-suggested
2252 registers. */
2253 COPY_HARD_REG_SET (first_used, used);
2254
2255 if (just_try_suggested)
2256 {
2257 if (qty_phys_num_copy_sugg[qtyno] != 0)
2258 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2259 else
2260 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2261 }
2262
2263 /* If all registers are excluded, we can't do anything. */
2264 GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2265
2266 /* If at least one would be suitable, test each hard reg. */
2267
2268 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2269 {
2270 #ifdef REG_ALLOC_ORDER
2271 int regno = reg_alloc_order[i];
2272 #else
2273 int regno = i;
2274 #endif
2275 if (! TEST_HARD_REG_BIT (first_used, regno)
2276 && HARD_REGNO_MODE_OK (regno, mode)
2277 && (qty[qtyno].n_calls_crossed == 0
2278 || accept_call_clobbered
2279 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2280 {
2281 int j;
2282 int size1 = HARD_REGNO_NREGS (regno, mode);
2283 for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2284 if (j == size1)
2285 {
2286 /* Mark that this register is in use between its birth and death
2287 insns. */
2288 post_mark_life (regno, mode, 1, born_index, dead_index);
2289 return regno;
2290 }
2291 #ifndef REG_ALLOC_ORDER
2292 /* Skip starting points we know will lose. */
2293 i += j;
2294 #endif
2295 }
2296 }
2297
2298 fail:
2299 /* If we are just trying suggested register, we have just tried copy-
2300 suggested registers, and there are arithmetic-suggested registers,
2301 try them. */
2302
2303 /* If it would be profitable to allocate a call-clobbered register
2304 and save and restore it around calls, do that. */
2305 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2306 && qty_phys_num_sugg[qtyno] != 0)
2307 {
2308 /* Don't try the copy-suggested regs again. */
2309 qty_phys_num_copy_sugg[qtyno] = 0;
2310 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2311 born_index, dead_index);
2312 }
2313
2314 /* We need not check to see if the current function has nonlocal
2315 labels because we don't put any pseudos that are live over calls in
2316 registers in that case. */
2317
2318 if (! accept_call_clobbered
2319 && flag_caller_saves
2320 && ! just_try_suggested
2321 && qty[qtyno].n_calls_crossed != 0
2322 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2323 qty[qtyno].n_calls_crossed))
2324 {
2325 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2326 if (i >= 0)
2327 caller_save_needed = 1;
2328 return i;
2329 }
2330 return -1;
2331 }
2332 \f
2333 /* Mark that REGNO with machine-mode MODE is live starting from the current
2334 insn (if LIFE is non-zero) or dead starting at the current insn (if LIFE
2335 is zero). */
2336
2337 static void
2338 mark_life (regno, mode, life)
2339 int regno;
2340 enum machine_mode mode;
2341 int life;
2342 {
2343 int j = HARD_REGNO_NREGS (regno, mode);
2344 if (life)
2345 while (--j >= 0)
2346 SET_HARD_REG_BIT (regs_live, regno + j);
2347 else
2348 while (--j >= 0)
2349 CLEAR_HARD_REG_BIT (regs_live, regno + j);
2350 }
2351
2352 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2353 is non-zero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2354 to insn number DEATH (exclusive). */
2355
2356 static void
2357 post_mark_life (regno, mode, life, birth, death)
2358 int regno;
2359 enum machine_mode mode;
2360 int life, birth, death;
2361 {
2362 int j = HARD_REGNO_NREGS (regno, mode);
2363 #ifdef HARD_REG_SET
2364 /* Declare it register if it's a scalar. */
2365 register
2366 #endif
2367 HARD_REG_SET this_reg;
2368
2369 CLEAR_HARD_REG_SET (this_reg);
2370 while (--j >= 0)
2371 SET_HARD_REG_BIT (this_reg, regno + j);
2372
2373 if (life)
2374 while (birth < death)
2375 {
2376 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2377 birth++;
2378 }
2379 else
2380 while (birth < death)
2381 {
2382 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2383 birth++;
2384 }
2385 }
2386 \f
2387 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2388 is the register being clobbered, and R1 is a register being used in
2389 the equivalent expression.
2390
2391 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2392 in which it is used, return 1.
2393
2394 Otherwise, return 0. */
2395
2396 static int
2397 no_conflict_p (insn, r0, r1)
2398 rtx insn, r0 ATTRIBUTE_UNUSED, r1;
2399 {
2400 int ok = 0;
2401 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2402 rtx p, last;
2403
2404 /* If R1 is a hard register, return 0 since we handle this case
2405 when we scan the insns that actually use it. */
2406
2407 if (note == 0
2408 || (GET_CODE (r1) == REG && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2409 || (GET_CODE (r1) == SUBREG && GET_CODE (SUBREG_REG (r1)) == REG
2410 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2411 return 0;
2412
2413 last = XEXP (note, 0);
2414
2415 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2416 if (INSN_P (p))
2417 {
2418 if (find_reg_note (p, REG_DEAD, r1))
2419 ok = 1;
2420
2421 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2422 some earlier optimization pass has inserted instructions into
2423 the sequence, and it is not safe to perform this optimization.
2424 Note that emit_no_conflict_block always ensures that this is
2425 true when these sequences are created. */
2426 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2427 return 0;
2428 }
2429
2430 return ok;
2431 }
2432 \f
2433 /* Return the number of alternatives for which the constraint string P
2434 indicates that the operand must be equal to operand 0 and that no register
2435 is acceptable. */
2436
2437 static int
2438 requires_inout (p)
2439 const char *p;
2440 {
2441 char c;
2442 int found_zero = 0;
2443 int reg_allowed = 0;
2444 int num_matching_alts = 0;
2445
2446 while ((c = *p++))
2447 switch (c)
2448 {
2449 case '=': case '+': case '?':
2450 case '#': case '&': case '!':
2451 case '*': case '%':
2452 case 'm': case '<': case '>': case 'V': case 'o':
2453 case 'E': case 'F': case 'G': case 'H':
2454 case 's': case 'i': case 'n':
2455 case 'I': case 'J': case 'K': case 'L':
2456 case 'M': case 'N': case 'O': case 'P':
2457 case 'X':
2458 /* These don't say anything we care about. */
2459 break;
2460
2461 case ',':
2462 if (found_zero && ! reg_allowed)
2463 num_matching_alts++;
2464
2465 found_zero = reg_allowed = 0;
2466 break;
2467
2468 case '0':
2469 found_zero = 1;
2470 break;
2471
2472 case '1': case '2': case '3': case '4': case '5':
2473 case '6': case '7': case '8': case '9':
2474 /* Skip the balance of the matching constraint. */
2475 while (ISDIGIT (*p))
2476 p++;
2477 break;
2478
2479 default:
2480 if (REG_CLASS_FROM_LETTER (c) == NO_REGS)
2481 break;
2482 /* FALLTHRU */
2483 case 'p':
2484 case 'g': case 'r':
2485 reg_allowed = 1;
2486 break;
2487 }
2488
2489 if (found_zero && ! reg_allowed)
2490 num_matching_alts++;
2491
2492 return num_matching_alts;
2493 }
2494 \f
2495 void
2496 dump_local_alloc (file)
2497 FILE *file;
2498 {
2499 int i;
2500 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2501 if (reg_renumber[i] != -1)
2502 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2503 }