1 /* Output routines for GCC for ARM/RISCiX.
2 Copyright (C) 1991, 1993, 1994 Free Software Foundation, Inc.
3 Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 and Martin Simmons (@harleqn.co.uk).
5 More major hacks by Richard Earnshaw (rwe11@cl.cam.ac.uk)
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
29 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
39 /* The maximum number of insns skipped which will be conditionalised if
41 #define MAX_INSNS_SKIPPED 5
43 /* Some function declarations. */
44 extern FILE *asm_out_file
;
45 extern char *output_multi_immediate ();
46 extern void arm_increase_location ();
48 HOST_WIDE_INT int_log2
PROTO ((HOST_WIDE_INT
));
49 static int get_prologue_size
PROTO ((void));
51 /* Define the information needed to generate branch insns. This is
52 stored from the compare operation. */
54 rtx arm_compare_op0
, arm_compare_op1
;
57 /* What type of cpu are we compiling for? */
59 enum processor_type arm_cpu
;
61 /* In case of a PRE_INC, POST_INC, PRE_DEC, POST_DEC memory reference, we
62 must report the mode of the memory reference from PRINT_OPERAND to
63 PRINT_OPERAND_ADDRESS. */
64 enum machine_mode output_memory_reference_mode
;
66 /* Nonzero if the prologue must setup `fp'. */
67 int current_function_anonymous_args
;
69 /* Location counter of .text segment. */
70 int arm_text_location
= 0;
72 /* Set to one if we think that lr is only saved because of subroutine calls,
73 but all of these can be `put after' return insns */
74 int lr_save_eliminated
;
76 /* A hash table is used to store text segment labels and their associated
77 offset from the start of the text segment. */
82 struct label_offset
*cdr
;
85 #define LABEL_HASH_SIZE 257
87 static struct label_offset
*offset_table
[LABEL_HASH_SIZE
];
89 /* Set to 1 when a return insn is output, this means that the epilogue
92 static int return_used_this_function
;
94 /* For an explanation of these variables, see final_prescan_insn below. */
100 /* The condition codes of the ARM, and the inverse function. */
101 char *arm_condition_codes
[] =
103 "eq", "ne", "cs", "cc", "mi", "pl", "vs", "vc",
104 "hi", "ls", "ge", "lt", "gt", "le", "al", "nv"
107 #define ARM_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
109 /* Return 1 if it is possible to return using a single instruction */
116 if (!reload_completed
||current_function_pretend_args_size
117 || current_function_anonymous_args
118 || (get_frame_size () && !(TARGET_APCS
|| frame_pointer_needed
)))
121 /* Can't be done if any of the FPU regs are pushed, since this also
123 for (regno
= 20; regno
< 24; regno
++)
124 if (regs_ever_live
[regno
])
130 /* Return TRUE if int I is a valid immediate ARM constant. */
136 unsigned HOST_WIDE_INT mask
= ~0xFF;
140 if ((i
& mask
& (unsigned HOST_WIDE_INT
) 0xffffffff) == 0)
143 (mask
<< 2) | ((mask
& (unsigned HOST_WIDE_INT
) 0xffffffff)
144 >> (32 - 2)) | ~((unsigned HOST_WIDE_INT
) 0xffffffff);
145 } while (mask
!= ~0xFF);
150 /* This code has been fixed for cross compilation. */
152 static int fpa_consts_inited
= 0;
154 char *strings_fpa
[8] = {
165 static REAL_VALUE_TYPE values_fpa
[8];
173 for (i
= 0; i
< 8; i
++)
175 r
= REAL_VALUE_ATOF (strings_fpa
[i
], DFmode
);
179 fpa_consts_inited
= 1;
182 /* Return TRUE if rtx X is a valid immediate FPU constant. */
185 const_double_rtx_ok_for_fpu (x
)
191 if (!fpa_consts_inited
)
194 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
195 if (REAL_VALUE_MINUS_ZERO (r
))
198 for (i
= 0; i
< 8; i
++)
199 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
205 /* Return TRUE if rtx X is a valid immediate FPU constant. */
208 neg_const_double_rtx_ok_for_fpu (x
)
214 if (!fpa_consts_inited
)
217 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
218 r
= REAL_VALUE_NEGATE (r
);
219 if (REAL_VALUE_MINUS_ZERO (r
))
222 for (i
= 0; i
< 8; i
++)
223 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
229 /* Predicates for `match_operand' and `match_operator'. */
231 /* s_register_operand is the same as register_operand, but it doesn't accept
232 (SUBREG (MEM)...). */
235 s_register_operand (op
, mode
)
237 enum machine_mode mode
;
239 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
242 if (GET_CODE (op
) == SUBREG
)
243 op
= SUBREG_REG (op
);
245 /* We don't consider registers whose class is NO_REGS
246 to be a register operand. */
247 return (GET_CODE (op
) == REG
248 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
249 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
252 /* Return 1 if OP is an item in memory, given that we are in reload. */
255 reload_memory_operand (op
, mode
)
257 enum machine_mode mode
;
259 int regno
= true_regnum (op
);
261 return (! CONSTANT_P (op
)
263 || (GET_CODE (op
) == REG
264 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)));
267 /* Return TRUE for valid operands for the rhs of an ARM instruction. */
270 arm_rhs_operand (op
, mode
)
272 enum machine_mode mode
;
274 return (s_register_operand (op
, mode
)
275 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
))));
278 /* Return TRUE for valid operands for the rhs of an ARM instruction, or a load.
282 arm_rhsm_operand (op
, mode
)
284 enum machine_mode mode
;
286 return (s_register_operand (op
, mode
)
287 || (GET_CODE (op
) == CONST_INT
&& const_ok_for_arm (INTVAL (op
)))
288 || memory_operand (op
, mode
));
291 /* Return TRUE for valid operands for the rhs of an ARM instruction, or if a
292 constant that is valid when negated. */
295 arm_add_operand (op
, mode
)
297 enum machine_mode mode
;
299 return (s_register_operand (op
, mode
)
300 || (GET_CODE (op
) == CONST_INT
301 && (const_ok_for_arm (INTVAL (op
))
302 || const_ok_for_arm (-INTVAL (op
)))));
306 arm_not_operand (op
, mode
)
308 enum machine_mode mode
;
310 return (s_register_operand (op
, mode
)
311 || (GET_CODE (op
) == CONST_INT
312 && (const_ok_for_arm (INTVAL (op
))
313 || const_ok_for_arm (~INTVAL (op
)))));
316 /* Return TRUE for valid operands for the rhs of an FPU instruction. */
319 fpu_rhs_operand (op
, mode
)
321 enum machine_mode mode
;
323 if (s_register_operand (op
, mode
))
325 else if (GET_CODE (op
) == CONST_DOUBLE
)
326 return (const_double_rtx_ok_for_fpu (op
));
332 fpu_add_operand (op
, mode
)
334 enum machine_mode mode
;
336 if (s_register_operand (op
, mode
))
338 else if (GET_CODE (op
) == CONST_DOUBLE
)
339 return (const_double_rtx_ok_for_fpu (op
)
340 || neg_const_double_rtx_ok_for_fpu (op
));
345 /* Return nonzero if OP is a constant power of two. */
348 power_of_two_operand (op
, mode
)
350 enum machine_mode mode
;
352 if (GET_CODE (op
) == CONST_INT
)
354 HOST_WIDE_INT value
= INTVAL(op
);
355 return value
!= 0 && (value
& (value
- 1)) == 0;
360 /* Return TRUE for a valid operand of a DImode operation.
361 Either: REG, CONST_DOUBLE or MEM(DImode_address).
362 Note that this disallows MEM(REG+REG), but allows
363 MEM(PRE/POST_INC/DEC(REG)). */
366 di_operand (op
, mode
)
368 enum machine_mode mode
;
370 if (s_register_operand (op
, mode
))
373 switch (GET_CODE (op
))
380 return memory_address_p (DImode
, XEXP (op
, 0));
387 /* Return TRUE for valid index operands. */
390 index_operand (op
, mode
)
392 enum machine_mode mode
;
394 return (s_register_operand(op
, mode
)
395 || (immediate_operand (op
, mode
)
396 && INTVAL (op
) < 4096 && INTVAL (op
) > -4096));
399 /* Return TRUE for valid shifts by a constant. This also accepts any
400 power of two on the (somewhat overly relaxed) assumption that the
401 shift operator in this case was a mult. */
404 const_shift_operand (op
, mode
)
406 enum machine_mode mode
;
408 return (power_of_two_operand (op
, mode
)
409 || (immediate_operand (op
, mode
)
410 && (INTVAL (op
) < 32 && INTVAL (op
) > 0)));
413 /* Return TRUE for arithmetic operators which can be combined with a multiply
417 shiftable_operator (x
, mode
)
419 enum machine_mode mode
;
421 if (GET_MODE (x
) != mode
)
425 enum rtx_code code
= GET_CODE (x
);
427 return (code
== PLUS
|| code
== MINUS
428 || code
== IOR
|| code
== XOR
|| code
== AND
);
432 /* Return TRUE for shift operators. */
435 shift_operator (x
, mode
)
437 enum machine_mode mode
;
439 if (GET_MODE (x
) != mode
)
443 enum rtx_code code
= GET_CODE (x
);
446 return power_of_two_operand (XEXP (x
, 1));
448 return (code
== ASHIFT
|| code
== ASHIFTRT
|| code
== LSHIFTRT
);
452 int equality_operator (x
, mode
)
454 enum machine_mode mode
;
456 return GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
;
459 /* Return TRUE for SMIN SMAX UMIN UMAX operators. */
462 minmax_operator (x
, mode
)
464 enum machine_mode mode
;
466 enum rtx_code code
= GET_CODE (x
);
468 if (GET_MODE (x
) != mode
)
471 return code
== SMIN
|| code
== SMAX
|| code
== UMIN
|| code
== UMAX
;
474 /* return TRUE if x is EQ or NE */
476 /* Return TRUE if this is the condition code register, if we aren't given
477 a mode, accept any class CCmode register */
480 cc_register (x
, mode
)
482 enum machine_mode mode
;
484 if (mode
== VOIDmode
)
487 if (GET_MODE_CLASS (mode
) != MODE_CC
)
491 if (mode
== GET_MODE (x
) && GET_CODE (x
) == REG
&& REGNO (x
) == 24)
501 enum rtx_code code
= GET_CODE (x
);
505 else if (code
== SMIN
)
507 else if (code
== UMIN
)
509 else if (code
== UMAX
)
515 /* Return 1 if memory locations are adjacent */
518 adjacent_mem_locations (a
, b
)
521 int val0
= 0, val1
= 0;
524 if ((GET_CODE (XEXP (a
, 0)) == REG
525 || (GET_CODE (XEXP (a
, 0)) == PLUS
526 && GET_CODE (XEXP (XEXP (a
, 0), 1)) == CONST_INT
))
527 && (GET_CODE (XEXP (b
, 0)) == REG
528 || (GET_CODE (XEXP (b
, 0)) == PLUS
529 && GET_CODE (XEXP (XEXP (b
, 0), 1)) == CONST_INT
)))
531 if (GET_CODE (XEXP (a
, 0)) == PLUS
)
533 reg0
= REGNO (XEXP (XEXP (a
, 0), 0));
534 val0
= INTVAL (XEXP (XEXP (a
, 0), 1));
537 reg0
= REGNO (XEXP (a
, 0));
538 if (GET_CODE (XEXP (b
, 0)) == PLUS
)
540 reg1
= REGNO (XEXP (XEXP (b
, 0), 0));
541 val1
= INTVAL (XEXP (XEXP (b
, 0), 1));
544 reg1
= REGNO (XEXP (b
, 0));
545 return (reg0
== reg1
) && ((val1
- val0
) == 4 || (val0
- val1
) == 4);
550 /* Return 1 if OP is a load multiple operation. It is known to be
551 parallel and the first section will be tested. */
554 load_multiple_operation (op
, mode
)
556 enum machine_mode mode
;
558 HOST_WIDE_INT count
= XVECLEN (op
, 0);
561 HOST_WIDE_INT i
= 1, base
= 0;
565 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
568 /* Check to see if this might be a write-back */
569 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
574 /* Now check it more carefully */
575 if (GET_CODE (SET_DEST (elt
)) != REG
576 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
577 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
578 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
579 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
580 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
581 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
582 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
583 != REGNO (SET_DEST (elt
)))
589 /* Perform a quick check so we don't blow up below. */
591 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
592 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != REG
593 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != MEM
)
596 dest_regno
= REGNO (SET_DEST (XVECEXP (op
, 0, i
- 1)));
597 src_addr
= XEXP (SET_SRC (XVECEXP (op
, 0, i
- 1)), 0);
599 for (; i
< count
; i
++)
601 rtx elt
= XVECEXP (op
, 0, i
);
603 if (GET_CODE (elt
) != SET
604 || GET_CODE (SET_DEST (elt
)) != REG
605 || GET_MODE (SET_DEST (elt
)) != SImode
606 || REGNO (SET_DEST (elt
)) != dest_regno
+ i
- base
607 || GET_CODE (SET_SRC (elt
)) != MEM
608 || GET_MODE (SET_SRC (elt
)) != SImode
609 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != PLUS
610 || ! rtx_equal_p (XEXP (XEXP (SET_SRC (elt
), 0), 0), src_addr
)
611 || GET_CODE (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != CONST_INT
612 || INTVAL (XEXP (XEXP (SET_SRC (elt
), 0), 1)) != (i
- base
) * 4)
619 /* Return 1 if OP is a store multiple operation. It is known to be
620 parallel and the first section will be tested. */
623 store_multiple_operation (op
, mode
)
625 enum machine_mode mode
;
627 HOST_WIDE_INT count
= XVECLEN (op
, 0);
630 HOST_WIDE_INT i
= 1, base
= 0;
634 || GET_CODE (XVECEXP (op
, 0, 0)) != SET
)
637 /* Check to see if this might be a write-back */
638 if (GET_CODE (SET_SRC (elt
= XVECEXP (op
, 0, 0))) == PLUS
)
643 /* Now check it more carefully */
644 if (GET_CODE (SET_DEST (elt
)) != REG
645 || GET_CODE (XEXP (SET_SRC (elt
), 0)) != REG
646 || REGNO (XEXP (SET_SRC (elt
), 0)) != REGNO (SET_DEST (elt
))
647 || GET_CODE (XEXP (SET_SRC (elt
), 1)) != CONST_INT
648 || INTVAL (XEXP (SET_SRC (elt
), 1)) != (count
- 2) * 4
649 || GET_CODE (XVECEXP (op
, 0, count
- 1)) != CLOBBER
650 || GET_CODE (XEXP (XVECEXP (op
, 0, count
- 1), 0)) != REG
651 || REGNO (XEXP (XVECEXP (op
, 0, count
- 1), 0))
652 != REGNO (SET_DEST (elt
)))
658 /* Perform a quick check so we don't blow up below. */
660 || GET_CODE (XVECEXP (op
, 0, i
- 1)) != SET
661 || GET_CODE (SET_DEST (XVECEXP (op
, 0, i
- 1))) != MEM
662 || GET_CODE (SET_SRC (XVECEXP (op
, 0, i
- 1))) != REG
)
665 src_regno
= REGNO (SET_SRC (XVECEXP (op
, 0, i
- 1)));
666 dest_addr
= XEXP (SET_DEST (XVECEXP (op
, 0, i
- 1)), 0);
668 for (; i
< count
; i
++)
670 elt
= XVECEXP (op
, 0, i
);
672 if (GET_CODE (elt
) != SET
673 || GET_CODE (SET_SRC (elt
)) != REG
674 || GET_MODE (SET_SRC (elt
)) != SImode
675 || REGNO (SET_SRC (elt
)) != src_regno
+ i
- base
676 || GET_CODE (SET_DEST (elt
)) != MEM
677 || GET_MODE (SET_DEST (elt
)) != SImode
678 || GET_CODE (XEXP (SET_DEST (elt
), 0)) != PLUS
679 || ! rtx_equal_p (XEXP (XEXP (SET_DEST (elt
), 0), 0), dest_addr
)
680 || GET_CODE (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != CONST_INT
681 || INTVAL (XEXP (XEXP (SET_DEST (elt
), 0), 1)) != (i
- base
) * 4)
688 /* Routines for use with attributes */
691 const_pool_offset (symbol
)
694 return get_pool_offset (symbol
) - get_pool_size () - get_prologue_size ();
697 /* Routines for use in generating RTL */
700 arm_gen_load_multiple (base_regno
, count
, from
, up
, write_back
)
709 int sign
= up
? 1 : -1;
711 result
= gen_rtx (PARALLEL
, VOIDmode
,
712 rtvec_alloc (count
+ (write_back
? 2 : 0)));
715 XVECEXP (result
, 0, 0)
716 = gen_rtx (SET
, GET_MODE (from
), from
,
717 plus_constant (from
, count
* 4 * sign
));
722 for (j
= 0; i
< count
; i
++, j
++)
724 XVECEXP (result
, 0, i
)
725 = gen_rtx (SET
, VOIDmode
, gen_rtx (REG
, SImode
, base_regno
+ j
),
726 gen_rtx (MEM
, SImode
,
727 plus_constant (from
, j
* 4 * sign
)));
731 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, from
);
737 arm_gen_store_multiple (base_regno
, count
, to
, up
, write_back
)
746 int sign
= up
? 1 : -1;
748 result
= gen_rtx (PARALLEL
, VOIDmode
,
749 rtvec_alloc (count
+ (write_back
? 2 : 0)));
752 XVECEXP (result
, 0, 0)
753 = gen_rtx (SET
, GET_MODE (to
), to
,
754 plus_constant (to
, count
* 4 * sign
));
759 for (j
= 0; i
< count
; i
++, j
++)
761 XVECEXP (result
, 0, i
)
762 = gen_rtx (SET
, VOIDmode
,
763 gen_rtx (MEM
, SImode
, plus_constant (to
, j
* 4 * sign
)),
764 gen_rtx (REG
, SImode
, base_regno
+ j
));
768 XVECEXP (result
, 0, i
) = gen_rtx (CLOBBER
, SImode
, to
);
773 /* X and Y are two things to compare using CODE. Emit the compare insn and
774 return the rtx for register 0 in the proper mode. FP means this is a
775 floating point compare: I don't think that it is needed on the arm. */
778 gen_compare_reg (code
, x
, y
, fp
)
782 enum machine_mode mode
= SELECT_CC_MODE (code
, x
, y
);
783 rtx cc_reg
= gen_rtx (REG
, mode
, 24);
785 emit_insn (gen_rtx (SET
, VOIDmode
, cc_reg
,
786 gen_rtx (COMPARE
, mode
, x
, y
)));
792 arm_reload_out_hi (operands
)
795 rtx base
= find_replacement (&XEXP (operands
[0], 0));
797 emit_insn (gen_rtx (SET
, VOIDmode
,
798 gen_rtx (MEM
, QImode
, base
),
799 gen_rtx (SUBREG
, QImode
, operands
[1], 0)));
800 emit_insn (gen_rtx (SET
, VOIDmode
, operands
[2],
801 gen_rtx (LSHIFTRT
, SImode
,
802 gen_rtx (SUBREG
, SImode
, operands
[1], 0),
804 emit_insn (gen_rtx (SET
, VOIDmode
,
805 gen_rtx (MEM
, QImode
,
806 plus_constant (base
, 1)),
807 gen_rtx (SUBREG
, QImode
, operands
[2], 0)));
810 /* Check to see if a branch is forwards or backwards. Return TRUE if it
814 arm_backwards_branch (from
, to
)
817 return insn_addresses
[to
] <= insn_addresses
[from
];
820 /* Check to see if a branch is within the distance that can be done using
821 an arithmetic expression. */
823 short_branch (from
, to
)
826 int delta
= insn_addresses
[from
] + 8 - insn_addresses
[to
];
828 return abs (delta
) < 980; /* A small margin for safety */
831 /* Check to see that the insn isn't the target of the conditionalizing
834 arm_insn_not_targeted (insn
)
837 return insn
!= arm_target_insn
;
841 /* Routines to output assembly language. */
843 /* If the rtx is the correct value then return the string of the number.
844 In this way we can ensure that valid double constants are generated even
845 when cross compiling. */
847 fp_immediate_constant (x
)
853 if (!fpa_consts_inited
)
856 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
857 for (i
= 0; i
< 8; i
++)
858 if (REAL_VALUES_EQUAL (r
, values_fpa
[i
]))
859 return strings_fpa
[i
];
864 /* As for fp_immediate_constant, but value is passed directly, not in rtx. */
866 fp_const_from_val (r
)
871 if (! fpa_consts_inited
)
874 for (i
= 0; i
< 8; i
++)
875 if (REAL_VALUES_EQUAL (*r
, values_fpa
[i
]))
876 return strings_fpa
[i
];
881 /* Output the operands of a LDM/STM instruction to STREAM.
882 MASK is the ARM register set mask of which only bits 0-15 are important.
883 INSTR is the possibly suffixed base register. HAT unequals zero if a hat
884 must follow the register list. */
887 print_multi_reg (stream
, instr
, mask
, hat
)
893 int not_first
= FALSE
;
895 fputc ('\t', stream
);
896 fprintf (stream
, instr
, ARM_REG_PREFIX
);
897 fputs (", {", stream
);
898 for (i
= 0; i
< 16; i
++)
902 fprintf (stream
, ", ");
903 fprintf (stream
, "%s%s", ARM_REG_PREFIX
, reg_names
[i
]);
907 fprintf (stream
, "}%s\n", hat
? "^" : "");
910 /* Output a 'call' insn. */
913 output_call (operands
)
916 /* Handle calls to lr using ip (which may be clobbered in subr anyway). */
918 if (REGNO (operands
[0]) == 14)
920 operands
[0] = gen_rtx (REG
, SImode
, 12);
921 output_asm_insn ("mov%?\t%0, %|lr", operands
);
923 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
924 output_asm_insn ("mov%?\t%|pc, %0", operands
);
932 int something_changed
= 0;
934 int code
= GET_CODE (x0
);
941 if (REGNO (x0
) == 14)
943 *x
= gen_rtx (REG
, SImode
, 12);
948 /* Scan through the sub-elements and change any references there */
949 fmt
= GET_RTX_FORMAT (code
);
950 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
952 something_changed
|= eliminate_lr2ip (&XEXP (x0
, i
));
953 else if (fmt
[i
] == 'E')
954 for (j
= 0; j
< XVECLEN (x0
, i
); j
++)
955 something_changed
|= eliminate_lr2ip (&XVECEXP (x0
, i
, j
));
956 return something_changed
;
960 /* Output a 'call' insn that is a reference in memory. */
963 output_call_mem (operands
)
966 operands
[0] = copy_rtx (operands
[0]); /* Be ultra careful */
967 /* Handle calls using lr by using ip (which may be clobbered in subr anyway).
969 if (eliminate_lr2ip (&operands
[0]))
970 output_asm_insn ("mov%?\t%|ip, %|lr", operands
);
972 output_asm_insn ("mov%?\t%|lr, %|pc", operands
);
973 output_asm_insn ("ldr%?\t%|pc, %0", operands
);
978 /* Output a move from arm registers to an fpu registers.
979 OPERANDS[0] is an fpu register.
980 OPERANDS[1] is the first registers of an arm register pair. */
983 output_mov_long_double_fpu_from_arm (operands
)
986 int arm_reg0
= REGNO (operands
[1]);
992 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
993 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
994 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
996 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1, %2}", ops
);
997 output_asm_insn ("ldf%?e\t%0, [%|sp], #12", operands
);
1001 /* Output a move from an fpu register to arm registers.
1002 OPERANDS[0] is the first registers of an arm register pair.
1003 OPERANDS[1] is an fpu register. */
1006 output_mov_long_double_arm_from_fpu (operands
)
1009 int arm_reg0
= REGNO (operands
[0]);
1015 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1016 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1017 ops
[2] = gen_rtx (REG
, SImode
, 2 + arm_reg0
);
1019 output_asm_insn ("stf%?e\t%1, [%|sp, #-12]!", operands
);
1020 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1, %2}", ops
);
1024 /* Output a move from arm registers to arm registers of a long double
1025 OPERANDS[0] is the destination.
1026 OPERANDS[1] is the source. */
1028 output_mov_long_double_arm_from_arm (operands
)
1031 /* We have to be careful here because the two might overlap */
1032 int dest_start
= REGNO (operands
[0]);
1033 int src_start
= REGNO (operands
[1]);
1037 if (dest_start
< src_start
)
1039 for (i
= 0; i
< 3; i
++)
1041 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1042 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1043 output_asm_insn ("mov%?\t%0, %1", ops
);
1048 for (i
= 2; i
>= 0; i
--)
1050 ops
[0] = gen_rtx (REG
, SImode
, dest_start
+ i
);
1051 ops
[1] = gen_rtx (REG
, SImode
, src_start
+ i
);
1052 output_asm_insn ("mov%?\t%0, %1", ops
);
1060 /* Output a move from arm registers to an fpu registers.
1061 OPERANDS[0] is an fpu register.
1062 OPERANDS[1] is the first registers of an arm register pair. */
1065 output_mov_double_fpu_from_arm (operands
)
1068 int arm_reg0
= REGNO (operands
[1]);
1073 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1074 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1075 output_asm_insn ("stm%?fd\t%|sp!, {%0, %1}", ops
);
1076 output_asm_insn ("ldf%?d\t%0, [%|sp], #8", operands
);
1080 /* Output a move from an fpu register to arm registers.
1081 OPERANDS[0] is the first registers of an arm register pair.
1082 OPERANDS[1] is an fpu register. */
1085 output_mov_double_arm_from_fpu (operands
)
1088 int arm_reg0
= REGNO (operands
[0]);
1094 ops
[0] = gen_rtx (REG
, SImode
, arm_reg0
);
1095 ops
[1] = gen_rtx (REG
, SImode
, 1 + arm_reg0
);
1096 output_asm_insn ("stf%?d\t%1, [%|sp, #-8]!", operands
);
1097 output_asm_insn ("ldm%?fd\t%|sp!, {%0, %1}", ops
);
1101 /* Output a move between double words.
1102 It must be REG<-REG, REG<-CONST_DOUBLE, REG<-CONST_INT, REG<-MEM
1103 or MEM<-REG and all MEMs must be offsettable addresses. */
1106 output_move_double (operands
)
1109 enum rtx_code code0
= GET_CODE (operands
[0]);
1110 enum rtx_code code1
= GET_CODE (operands
[1]);
1115 int reg0
= REGNO (operands
[0]);
1117 otherops
[0] = gen_rtx (REG
, SImode
, 1 + reg0
);
1120 int reg1
= REGNO (operands
[1]);
1124 otherops
[1] = gen_rtx (REG
, SImode
, 1 + reg1
);
1126 /* Ensure the second source is not overwritten */
1127 if (reg0
== 1 + reg1
)
1129 output_asm_insn("mov%?\t%0, %1", otherops
);
1130 output_asm_insn("mov%?\t%0, %1", operands
);
1134 output_asm_insn("mov%?\t%0, %1", operands
);
1135 output_asm_insn("mov%?\t%0, %1", otherops
);
1138 else if (code1
== CONST_DOUBLE
)
1140 otherops
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1141 CONST_DOUBLE_HIGH (operands
[1]));
1142 operands
[1] = gen_rtx (CONST_INT
, VOIDmode
,
1143 CONST_DOUBLE_LOW (operands
[1]));
1144 output_mov_immediate (operands
, FALSE
, "");
1145 output_mov_immediate (otherops
, FALSE
, "");
1147 else if (code1
== CONST_INT
)
1149 otherops
[1] = const0_rtx
;
1150 /* sign extend the intval into the high-order word */
1151 /* Note: output_mov_immediate may clobber operands[1], so we
1152 put this out first */
1153 if (INTVAL (operands
[1]) < 0)
1154 output_asm_insn ("mvn%?\t%0, %1", otherops
);
1156 output_asm_insn ("mov%?\t%0, %1", otherops
);
1157 output_mov_immediate (operands
, FALSE
, "");
1159 else if (code1
== MEM
)
1161 switch (GET_CODE (XEXP (operands
[1], 0)))
1164 /* Handle the simple case where address is [r, #0] more
1166 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1169 output_asm_insn ("add%?\t%m1, %m1, #8", operands
);
1170 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1173 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
1174 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1177 output_asm_insn ("ldm%?ia\t%m1!, %M0", operands
);
1180 output_asm_insn ("ldm%?ia\t%m1, %M0", operands
);
1181 output_asm_insn ("sub%?\t%m1, %m1, #8", operands
);
1184 otherops
[1] = adj_offsettable_operand (operands
[1], 4);
1185 /* Take care of overlapping base/data reg. */
1186 if (reg_mentioned_p (operands
[0], operands
[1]))
1188 output_asm_insn ("ldr%?\t%0, %1", otherops
);
1189 output_asm_insn ("ldr%?\t%0, %1", operands
);
1193 output_asm_insn ("ldr%?\t%0, %1", operands
);
1194 output_asm_insn ("ldr%?\t%0, %1", otherops
);
1198 else abort(); /* Constraints should prevent this */
1200 else if (code0
== MEM
&& code1
== REG
)
1202 if (REGNO (operands
[1]) == 12)
1204 switch (GET_CODE (XEXP (operands
[0], 0)))
1207 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1210 output_asm_insn ("add%?\t%m0, %m0, #8", operands
);
1211 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1214 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
1215 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1218 output_asm_insn ("stm%?ia\t%m0!, %M1", operands
);
1221 output_asm_insn ("stm%?ia\t%m0, %M1", operands
);
1222 output_asm_insn ("sub%?\t%m0, %m0, #8", operands
);
1225 otherops
[0] = adj_offsettable_operand (operands
[0], 4);
1226 otherops
[1] = gen_rtx (REG
, SImode
, 1 + REGNO (operands
[1]));
1227 output_asm_insn ("str%?\t%1, %0", operands
);
1228 output_asm_insn ("str%?\t%1, %0", otherops
);
1231 else abort(); /* Constraints should prevent this */
1237 /* Output an arbitrary MOV reg, #n.
1238 OPERANDS[0] is a register. OPERANDS[1] is a const_int. */
1241 output_mov_immediate (operands
)
1244 HOST_WIDE_INT n
= INTVAL (operands
[1]);
1248 /* Try to use one MOV */
1249 if (const_ok_for_arm (n
))
1251 output_asm_insn ("mov%?\t%0, %1", operands
);
1255 /* Try to use one MVN */
1256 if (const_ok_for_arm (~n
))
1258 operands
[1] = GEN_INT (~n
);
1259 output_asm_insn ("mvn%?\t%0, %1", operands
);
1263 /* If all else fails, make it out of ORRs or BICs as appropriate. */
1265 for (i
=0; i
< 32; i
++)
1269 if (n_ones
> 16) /* Shorter to use MVN with BIC in this case. */
1270 output_multi_immediate(operands
, "mvn%?\t%0, %1", "bic%?\t%0, %0, %1", 1,
1273 output_multi_immediate(operands
, "mov%?\t%0, %1", "orr%?\t%0, %0, %1", 1,
1280 /* Output an ADD r, s, #n where n may be too big for one instruction. If
1281 adding zero to one register, output nothing. */
1284 output_add_immediate (operands
)
1287 HOST_WIDE_INT n
= INTVAL (operands
[2]);
1289 if (n
!= 0 || REGNO (operands
[0]) != REGNO (operands
[1]))
1292 output_multi_immediate (operands
,
1293 "sub%?\t%0, %1, %2", "sub%?\t%0, %0, %2", 2,
1296 output_multi_immediate (operands
,
1297 "add%?\t%0, %1, %2", "add%?\t%0, %0, %2", 2,
1304 /* Output a multiple immediate operation.
1305 OPERANDS is the vector of operands referred to in the output patterns.
1306 INSTR1 is the output pattern to use for the first constant.
1307 INSTR2 is the output pattern to use for subsequent constants.
1308 IMMED_OP is the index of the constant slot in OPERANDS.
1309 N is the constant value. */
1312 output_multi_immediate (operands
, instr1
, instr2
, immed_op
, n
)
1314 char *instr1
, *instr2
;
1318 #if HOST_BITS_PER_WIDE_INT > 32
1324 operands
[immed_op
] = const0_rtx
;
1325 output_asm_insn (instr1
, operands
); /* Quick and easy output */
1330 char *instr
= instr1
;
1332 /* Note that n is never zero here (which would give no output) */
1333 for (i
= 0; i
< 32; i
+= 2)
1337 operands
[immed_op
] = GEN_INT (n
& (255 << i
));
1338 output_asm_insn (instr
, operands
);
1348 /* Return the appropriate ARM instruction for the operation code.
1349 The returned result should not be overwritten. OP is the rtx of the
1350 operation. SHIFT_FIRST_ARG is TRUE if the first argument of the operator
1354 arithmetic_instr (op
, shift_first_arg
)
1356 int shift_first_arg
;
1358 switch (GET_CODE (op
))
1364 return shift_first_arg
? "rsb" : "sub";
1381 /* Ensure valid constant shifts and return the appropriate shift mnemonic
1382 for the operation code. The returned result should not be overwritten.
1383 OP is the rtx code of the shift.
1384 On exit, *AMOUNTP will be -1 if the shift is by a register, or a constant
1388 shift_op (op
, amountp
)
1390 HOST_WIDE_INT
*amountp
;
1396 if (GET_CODE (XEXP (op
, 1)) == REG
|| GET_CODE (XEXP (op
, 1)) == SUBREG
)
1398 else if (GET_CODE (XEXP (op
, 1)) == CONST_INT
)
1399 *amountp
= INTVAL (XEXP (op
, 1));
1403 switch (GET_CODE (op
))
1426 *amountp
= int_log2 (*amountp
);
1436 && (*amountp
< min_shift
|| *amountp
> max_shift
))
1442 /* Obtain the shift from the POWER of two. */
1446 HOST_WIDE_INT power
;
1448 HOST_WIDE_INT shift
= 0;
1450 while (((1 << shift
) & power
) == 0)
1460 /* Output a .ascii pseudo-op, keeping track of lengths. This is because
1461 /bin/as is horribly restrictive. */
1464 output_ascii_pseudo_op (stream
, p
, len
)
1470 int len_so_far
= 1000;
1471 int chars_so_far
= 0;
1473 for (i
= 0; i
< len
; i
++)
1475 register int c
= p
[i
];
1477 if (len_so_far
> 50)
1480 fputs ("\"\n", stream
);
1481 fputs ("\t.ascii\t\"", stream
);
1483 arm_increase_location (chars_so_far
);
1487 if (c
== '\"' || c
== '\\')
1493 if (c
>= ' ' && c
< 0177)
1500 fprintf (stream
, "\\%03o", c
);
1507 fputs ("\"\n", stream
);
1508 arm_increase_location (chars_so_far
);
1512 /* Try to determine whether a pattern really clobbers the link register.
1513 This information is useful when peepholing, so that lr need not be pushed
1514 if we combine a call followed by a return.
1515 NOTE: This code does not check for side-effect expressions in a SET_SRC:
1516 such a check should not be needed because these only update an existing
1517 value within a register; the register must still be set elsewhere within
1521 pattern_really_clobbers_lr (x
)
1526 switch (GET_CODE (x
))
1529 switch (GET_CODE (SET_DEST (x
)))
1532 return REGNO (SET_DEST (x
)) == 14;
1535 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == REG
)
1536 return REGNO (XEXP (SET_DEST (x
), 0)) == 14;
1538 if (GET_CODE (XEXP (SET_DEST (x
), 0)) == MEM
)
1547 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
1548 if (pattern_really_clobbers_lr (XVECEXP (x
, 0, i
)))
1553 switch (GET_CODE (XEXP (x
, 0)))
1556 return REGNO (XEXP (x
, 0)) == 14;
1559 if (GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
1560 return REGNO (XEXP (XEXP (x
, 0), 0)) == 14;
1576 function_really_clobbers_lr (first
)
1581 for (insn
= first
; insn
; insn
= next_nonnote_insn (insn
))
1583 switch (GET_CODE (insn
))
1588 case JUMP_INSN
: /* Jump insns only change the PC (and conds) */
1593 if (pattern_really_clobbers_lr (PATTERN (insn
)))
1598 /* Don't yet know how to handle those calls that are not to a
1600 if (GET_CODE (PATTERN (insn
)) != PARALLEL
)
1603 switch (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)))
1606 if (GET_CODE (XEXP (XEXP (XVECEXP (PATTERN (insn
), 0, 0), 0), 0))
1612 if (GET_CODE (XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (insn
),
1618 default: /* Don't recognize it, be safe */
1622 /* A call can be made (by peepholing) not to clobber lr iff it is
1623 followed by a return. There may, however, be a use insn iff
1624 we are returning the result of the call.
1625 If we run off the end of the insn chain, then that means the
1626 call was at the end of the function. Unfortunately we don't
1627 have a return insn for the peephole to recognize, so we
1628 must reject this. (Can this be fixed by adding our own insn?) */
1629 if ((next
= next_nonnote_insn (insn
)) == NULL
)
1632 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == USE
1633 && (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
1634 && (REGNO (SET_DEST (XVECEXP (PATTERN (insn
), 0, 0)))
1635 == REGNO (XEXP (PATTERN (next
), 0))))
1636 if ((next
= next_nonnote_insn (next
)) == NULL
)
1639 if (GET_CODE (next
) == JUMP_INSN
1640 && GET_CODE (PATTERN (next
)) == RETURN
)
1649 /* We have reached the end of the chain so lr was _not_ clobbered */
1654 output_return_instruction (operand
, really_return
)
1659 int reg
, live_regs
= 0;
1661 if (current_function_calls_alloca
&& ! really_return
)
1664 for (reg
= 0; reg
<= 10; reg
++)
1665 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1668 if (live_regs
|| (regs_ever_live
[14] && ! lr_save_eliminated
))
1671 if (frame_pointer_needed
)
1676 if (lr_save_eliminated
|| ! regs_ever_live
[14])
1679 if (frame_pointer_needed
)
1680 strcpy (instr
, "ldm%?%d0ea\t%|fp, {");
1682 strcpy (instr
, "ldm%?%d0fd\t%|sp!, {");
1684 for (reg
= 0; reg
<= 10; reg
++)
1685 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1687 strcat (instr
, "%|");
1688 strcat (instr
, reg_names
[reg
]);
1690 strcat (instr
, ", ");
1693 if (frame_pointer_needed
)
1695 strcat (instr
, "%|");
1696 strcat (instr
, reg_names
[11]);
1697 strcat (instr
, ", ");
1698 strcat (instr
, "%|");
1699 strcat (instr
, reg_names
[13]);
1700 strcat (instr
, ", ");
1701 strcat (instr
, "%|");
1702 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1706 strcat (instr
, "%|");
1707 strcat (instr
, really_return
? reg_names
[15] : reg_names
[14]);
1709 strcat (instr
, (TARGET_6
|| !really_return
) ? "}" : "}^");
1710 output_asm_insn (instr
, &operand
);
1712 else if (really_return
)
1715 TARGET_6
? "mov%?%d0\t%|pc, lr" : "mov%?%d0s\t%|pc, %|lr");
1716 output_asm_insn (instr
, &operand
);
1719 return_used_this_function
= 1;
1723 /* Return the size of the prologue. It's not too bad if we slightly
1727 get_prologue_size ()
1732 /* Until we know which registers are really used return the maximum. */
1733 if (! reload_completed
)
1736 /* Look for integer regs that have to be saved. */
1737 for (regno
= 0; regno
< 15; regno
++)
1738 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1744 /* Clobbering lr when none of the other regs have been saved also requires
1746 if (regs_ever_live
[14])
1749 /* If we need to push a stack frame then there is an extra instruction to
1750 preserve the current value of the stack pointer. */
1751 if (frame_pointer_needed
)
1754 /* Now look for floating-point regs that need saving. We need an
1755 instruction per register. */
1756 for (regno
= 16; regno
< 24; regno
++)
1757 if (regs_ever_live
[regno
] && ! call_used_regs
[regno
])
1760 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1766 /* The amount of stack adjustment that happens here, in output_return and in
1767 output_epilogue must be exactly the same as was calculated during reload,
1768 or things will point to the wrong place. The only time we can safely
1769 ignore this constraint is when a function has no arguments on the stack,
1770 no stack frame requirement and no live registers execpt for `lr'. If we
1771 can guarantee that by making all function calls into tail calls and that
1772 lr is not clobbered in any other way, then there is no need to push lr
1776 output_func_prologue (f
, frame_size
)
1780 int reg
, live_regs_mask
= 0;
1783 /* Nonzero if we must stuff some register arguments onto the stack as if
1784 they were passed there. */
1785 int store_arg_regs
= 0;
1787 if (arm_ccfsm_state
|| arm_target_insn
)
1788 abort (); /* Sanity check */
1790 return_used_this_function
= 0;
1791 lr_save_eliminated
= 0;
1793 fprintf (f
, "\t%c args = %d, pretend = %d, frame = %d\n",
1794 ARM_COMMENT_CHAR
, current_function_args_size
,
1795 current_function_pretend_args_size
, frame_size
);
1796 fprintf (f
, "\t%c frame_needed = %d, current_function_anonymous_args = %d\n",
1797 ARM_COMMENT_CHAR
, frame_pointer_needed
,
1798 current_function_anonymous_args
);
1800 if (current_function_anonymous_args
&& current_function_pretend_args_size
)
1803 for (reg
= 0; reg
<= 10; reg
++)
1804 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1805 live_regs_mask
|= (1 << reg
);
1807 if (frame_pointer_needed
)
1809 live_regs_mask
|= 0xD800;
1810 fprintf (f
, "\tmov\t%sip, %ssp\n", ARM_REG_PREFIX
, ARM_REG_PREFIX
);
1812 else if (regs_ever_live
[14])
1814 if (! current_function_args_size
1815 && ! function_really_clobbers_lr (get_insns ()))
1817 fprintf (f
,"\t%c I don't think this function clobbers lr\n",
1819 lr_save_eliminated
= 1;
1822 live_regs_mask
|= 0x4000;
1825 /* If CURRENT_FUNCTION_PRETEND_ARGS_SIZE, adjust the stack pointer to make
1826 room. If also STORE_ARG_REGS store the argument registers involved in
1827 the created slot (this is for stdarg and varargs). */
1828 if (current_function_pretend_args_size
)
1832 int arg_size
, mask
= 0;
1834 assert (current_function_pretend_args_size
<= 16);
1835 for (reg
= 3, arg_size
= current_function_pretend_args_size
;
1836 arg_size
> 0; reg
--, arg_size
-= 4)
1838 print_multi_reg (f
, "stmfd\t%ssp!", mask
, FALSE
);
1842 operands
[0] = operands
[1] = stack_pointer_rtx
;
1843 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1844 -current_function_pretend_args_size
);
1845 output_add_immediate (operands
);
1851 /* if a di mode load/store multiple is used, and the base register
1852 is r3, then r4 can become an ever live register without lr
1853 doing so, in this case we need to push lr as well, or we
1854 will fail to get a proper return. */
1856 live_regs_mask
|= 0x4000;
1857 lr_save_eliminated
= 0;
1859 /* Now push all the call-saved regs onto the stack */
1860 print_multi_reg (f
, "stmfd\t%ssp!", live_regs_mask
, FALSE
);
1863 for (reg
= 23; reg
> 15; reg
--)
1864 if (regs_ever_live
[reg
] && !call_used_regs
[reg
])
1865 fprintf (f
, "\tstfe\t%s%s, [%ssp, #-12]!\n", ARM_REG_PREFIX
,
1866 reg_names
[reg
], ARM_REG_PREFIX
);
1868 if (frame_pointer_needed
)
1870 /* Make `fp' point to saved value of `pc'. */
1872 operands
[0] = gen_rtx (REG
, SImode
, HARD_FRAME_POINTER_REGNUM
);
1873 operands
[1] = gen_rtx (REG
, SImode
, 12);
1874 operands
[2] = GEN_INT ( - (4 + current_function_pretend_args_size
));
1875 output_add_immediate (operands
);
1880 operands
[0] = operands
[1] = stack_pointer_rtx
;
1881 operands
[2] = GEN_INT (-frame_size
);
1882 output_add_immediate (operands
);
1888 output_func_epilogue (f
, frame_size
)
1892 int reg
, live_regs_mask
= 0, code_size
= 0;
1893 /* If we need this then it will always be at lesat this much */
1894 int floats_offset
= 24;
1897 if (use_return_insn() && return_used_this_function
)
1899 if (frame_size
&& !(frame_pointer_needed
|| TARGET_APCS
))
1906 for (reg
= 0; reg
<= 10; reg
++)
1907 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1909 live_regs_mask
|= (1 << reg
);
1913 if (frame_pointer_needed
)
1915 for (reg
= 23; reg
> 15; reg
--)
1916 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1918 fprintf (f
, "\tldfe\t%s%s, [%sfp, #-%d]\n", ARM_REG_PREFIX
,
1919 reg_names
[reg
], ARM_REG_PREFIX
, floats_offset
);
1920 floats_offset
+= 12;
1924 live_regs_mask
|= 0xA800;
1925 print_multi_reg (f
, "ldmea\t%sfp", live_regs_mask
,
1926 TARGET_6
? FALSE
: TRUE
);
1931 /* Restore stack pointer if necessary. */
1934 operands
[0] = operands
[1] = stack_pointer_rtx
;
1935 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
, frame_size
);
1936 output_add_immediate (operands
);
1939 for (reg
= 16; reg
< 24; reg
++)
1940 if (regs_ever_live
[reg
] && ! call_used_regs
[reg
])
1942 fprintf (f
, "\tldfe\t%s%s, [%ssp], #12\n", ARM_REG_PREFIX
,
1943 reg_names
[reg
], ARM_REG_PREFIX
);
1946 if (current_function_pretend_args_size
== 0 && regs_ever_live
[14])
1948 print_multi_reg (f
, "ldmfd\t%ssp!", live_regs_mask
| 0x8000,
1949 TARGET_6
? FALSE
: TRUE
);
1954 if (live_regs_mask
|| regs_ever_live
[14])
1956 live_regs_mask
|= 0x4000;
1957 print_multi_reg (f
, "ldmfd\t%ssp!", live_regs_mask
, FALSE
);
1960 if (current_function_pretend_args_size
)
1962 operands
[0] = operands
[1] = stack_pointer_rtx
;
1963 operands
[2] = gen_rtx (CONST_INT
, VOIDmode
,
1964 current_function_pretend_args_size
);
1965 output_add_immediate (operands
);
1968 TARGET_6
? "\tmov\t%spc, %slr\n" : "\tmovs\t%spc, %slr\n",
1969 ARM_REG_PREFIX
, ARM_REG_PREFIX
, f
);
1976 /* insn_addresses isn't allocated when not optimizing */
1979 arm_increase_location (code_size
1980 + insn_addresses
[INSN_UID (get_last_insn ())]
1981 + get_prologue_size ());
1983 current_function_anonymous_args
= 0;
1986 /* If CODE is 'd', then the X is a condition operand and the instruction
1987 should only be executed if the condition is true.
1988 if CODE is 'D', then the X is a condition operand and the instruciton
1989 should only be executed if the condition is false: however, if the mode
1990 of the comparison is CCFPEmode, then always execute the instruction -- we
1991 do this because in these circumstances !GE does not necessarily imply LT;
1992 in these cases the instruction pattern will take care to make sure that
1993 an instruction containing %d will follow, thereby undoing the effects of
1994 doing this instrucion unconditionally.
1995 If CODE is 'N' then X is a floating point operand that must be negated
1997 If CODE is 'B' then output a bitwise inverted value of X (a const int).
1998 If X is a REG and CODE is `M', output a ldm/stm style multi-reg. */
2001 arm_print_operand (stream
, x
, code
)
2009 fputc (ARM_COMMENT_CHAR
, stream
);
2013 fputs (ARM_REG_PREFIX
, stream
);
2017 if (arm_ccfsm_state
== 3 || arm_ccfsm_state
== 4)
2018 fputs (arm_condition_codes
[arm_current_cc
], stream
);
2024 REAL_VALUE_FROM_CONST_DOUBLE (r
, x
);
2025 r
= REAL_VALUE_NEGATE (r
);
2026 fprintf (stream
, "%s", fp_const_from_val (&r
));
2031 if (GET_CODE (x
) == CONST_INT
)
2033 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2038 ARM_SIGN_EXTEND (~ INTVAL (x
)));
2042 output_addr_const (stream
, x
);
2047 fprintf (stream
, "%s", arithmetic_instr (x
, 1));
2051 fprintf (stream
, "%s", arithmetic_instr (x
, 0));
2058 fprintf (stream
, "%s ", shift_op (x
, &val
));
2060 arm_print_operand (stream
, XEXP (x
, 1), 0);
2063 #if HOST_BITS_PER_WIDE_INT == HOST_BITS_PER_INT
2075 fputs (ARM_REG_PREFIX
, stream
);
2076 fputs (reg_names
[REGNO (x
) + 1], stream
);
2080 fputs (ARM_REG_PREFIX
, stream
);
2081 if (GET_CODE (XEXP (x
, 0)) == REG
)
2082 fputs (reg_names
[REGNO (XEXP (x
, 0))], stream
);
2084 fputs (reg_names
[REGNO (XEXP (XEXP (x
, 0), 0))], stream
);
2088 fprintf (stream
, "{%s%s-%s%s}", ARM_REG_PREFIX
, reg_names
[REGNO (x
)],
2089 ARM_REG_PREFIX
, reg_names
[REGNO (x
) - 1
2090 + ((GET_MODE_SIZE (GET_MODE (x
))
2091 + GET_MODE_SIZE (SImode
) - 1)
2092 / GET_MODE_SIZE (SImode
))]);
2097 fputs (arm_condition_codes
[get_arm_condition_code (x
)],
2102 if (x
&& (flag_fast_math
2103 || GET_CODE (x
) == EQ
|| GET_CODE (x
) == NE
2104 || (GET_MODE (XEXP (x
, 0)) != CCFPEmode
2105 && (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0)))
2107 fputs (arm_condition_codes
[ARM_INVERSE_CONDITION_CODE
2108 (get_arm_condition_code (x
))],
2116 if (GET_CODE (x
) == REG
)
2118 fputs (ARM_REG_PREFIX
, stream
);
2119 fputs (reg_names
[REGNO (x
)], stream
);
2121 else if (GET_CODE (x
) == MEM
)
2123 output_memory_reference_mode
= GET_MODE (x
);
2124 output_address (XEXP (x
, 0));
2126 else if (GET_CODE (x
) == CONST_DOUBLE
)
2127 fprintf (stream
, "#%s", fp_immediate_constant (x
));
2128 else if (GET_CODE (x
) == NEG
)
2129 abort (); /* This should never happen now. */
2132 fputc ('#', stream
);
2133 output_addr_const (stream
, x
);
2138 /* Increase the `arm_text_location' by AMOUNT if we're in the text
2142 arm_increase_location (amount
)
2145 if (in_text_section ())
2146 arm_text_location
+= amount
;
2150 /* Output a label definition. If this label is within the .text segment, it
2151 is stored in OFFSET_TABLE, to be used when building `llc' instructions.
2152 Maybe GCC remembers names not starting with a `*' for a long time, but this
2153 is a minority anyway, so we just make a copy. Do not store the leading `*'
2154 if the name starts with one. */
2157 arm_asm_output_label (stream
, name
)
2161 char *real_name
, *s
;
2162 struct label_offset
*cur
;
2165 assemble_name (stream
, name
);
2166 fputs (":\n", stream
);
2167 if (! in_text_section ())
2172 real_name
= xmalloc (1 + strlen (&name
[1]));
2173 strcpy (real_name
, &name
[1]);
2177 real_name
= xmalloc (2 + strlen (name
));
2178 strcpy (real_name
, "_");
2179 strcat (real_name
, name
);
2181 for (s
= real_name
; *s
; s
++)
2184 hash
= hash
% LABEL_HASH_SIZE
;
2185 cur
= (struct label_offset
*) xmalloc (sizeof (struct label_offset
));
2186 cur
->name
= real_name
;
2187 cur
->offset
= arm_text_location
;
2188 cur
->cdr
= offset_table
[hash
];
2189 offset_table
[hash
] = cur
;
2192 /* Load a symbol that is known to be in the text segment into a register.
2193 This should never be called when not optimizing. */
2196 output_load_symbol (insn
, operands
)
2201 char *name
= XSTR (operands
[1], 0);
2202 struct label_offset
*he
;
2205 unsigned int mask
, never_mask
= 0xffffffff;
2209 if (optimize
== 0 || *name
!= '*')
2212 for (s
= &name
[1]; *s
; s
++)
2215 hash
= hash
% LABEL_HASH_SIZE
;
2216 he
= offset_table
[hash
];
2217 while (he
&& strcmp (he
->name
, &name
[1]))
2223 offset
= (arm_text_location
+ insn_addresses
[INSN_UID (insn
)]
2224 + get_prologue_size () + 8 - he
->offset
);
2228 /* When generating the instructions, we never mask out the bits that we
2229 think will be always zero, then if a mistake has occured somewhere, the
2230 assembler will spot it and generate an error. */
2232 /* If the symbol is word aligned then we might be able to reduce the
2234 shift
= ((offset
& 3) == 0) ? 2 : 0;
2236 /* Clear the bits from NEVER_MASK that will be orred in with the individual
2238 for (; shift
< 32; shift
+= 8)
2240 mask
= 0xff << shift
;
2241 if ((offset
& mask
) || ((unsigned) offset
) > mask
)
2242 never_mask
&= ~mask
;
2246 mask
= 0xff << (shift
- 32);
2248 while (mask
&& (never_mask
& mask
) == 0)
2252 strcpy (buffer
, "sub%?\t%0, %|pc, #(8 + . -%a1)");
2253 if ((never_mask
| mask
) != 0xffffffff)
2254 sprintf (buffer
+ strlen (buffer
), " & 0x%x", mask
| never_mask
);
2257 sprintf (buffer
, "sub%%?\t%%0, %%0, #(%d + . -%%a1) & 0x%x",
2258 inst
, mask
| never_mask
);
2260 output_asm_insn (buffer
, operands
);
2268 /* Output code resembling an .lcomm directive. /bin/as doesn't have this
2269 directive hence this hack, which works by reserving some `.space' in the
2270 bss segment directly.
2272 XXX This is a severe hack, which is guaranteed NOT to work since it doesn't
2273 define STATIC COMMON space but merely STATIC BSS space. */
2276 output_lcomm_directive (stream
, name
, size
, rounded
)
2281 fprintf (stream
, "\n\t.bss\t%c .lcomm\n", ARM_COMMENT_CHAR
);
2282 assemble_name (stream
, name
);
2283 fprintf (stream
, ":\t.space\t%d\n", rounded
);
2284 if (in_text_section ())
2285 fputs ("\n\t.text\n", stream
);
2287 fputs ("\n\t.data\n", stream
);
2290 /* A finite state machine takes care of noticing whether or not instructions
2291 can be conditionally executed, and thus decrease execution time and code
2292 size by deleting branch instructions. The fsm is controlled by
2293 final_prescan_insn, and controls the actions of ASM_OUTPUT_OPCODE. */
2295 /* The state of the fsm controlling condition codes are:
2296 0: normal, do nothing special
2297 1: make ASM_OUTPUT_OPCODE not output this instruction
2298 2: make ASM_OUTPUT_OPCODE not output this instruction
2299 3: make instructions conditional
2300 4: make instructions conditional
2302 State transitions (state->state by whom under condition):
2303 0 -> 1 final_prescan_insn if the `target' is a label
2304 0 -> 2 final_prescan_insn if the `target' is an unconditional branch
2305 1 -> 3 ASM_OUTPUT_OPCODE after not having output the conditional branch
2306 2 -> 4 ASM_OUTPUT_OPCODE after not having output the conditional branch
2307 3 -> 0 ASM_OUTPUT_INTERNAL_LABEL if the `target' label is reached
2308 (the target label has CODE_LABEL_NUMBER equal to arm_target_label).
2309 4 -> 0 final_prescan_insn if the `target' unconditional branch is reached
2310 (the target insn is arm_target_insn).
2312 If the jump clobbers the conditions then we use states 2 and 4.
2314 A similar thing can be done with conditional return insns.
2316 XXX In case the `target' is an unconditional branch, this conditionalising
2317 of the instructions always reduces code size, but not always execution
2318 time. But then, I want to reduce the code size to somewhere near what
2319 /bin/cc produces. */
2321 /* Returns the index of the ARM condition code string in
2322 `arm_condition_codes'. COMPARISON should be an rtx like
2323 `(eq (...) (...))'. */
2326 get_arm_condition_code (comparison
)
2329 switch (GET_CODE (comparison
))
2331 case NE
: return (1);
2332 case EQ
: return (0);
2333 case GE
: return (10);
2334 case GT
: return (12);
2335 case LE
: return (13);
2336 case LT
: return (11);
2337 case GEU
: return (2);
2338 case GTU
: return (8);
2339 case LEU
: return (9);
2340 case LTU
: return (3);
2349 final_prescan_insn (insn
, opvec
, noperands
)
2354 /* BODY will hold the body of INSN. */
2355 register rtx body
= PATTERN (insn
);
2357 /* This will be 1 if trying to repeat the trick, and things need to be
2358 reversed if it appears to fail. */
2361 /* JUMP_CLOBBERS will be one implies that the conditions if a branch is
2362 taken are clobbered, even if the rtl suggests otherwise. It also
2363 means that we have to grub around within the jump expression to find
2364 out what the conditions are when the jump isn't taken. */
2365 int jump_clobbers
= 0;
2367 /* If we start with a return insn, we only succeed if we find another one. */
2368 int seeking_return
= 0;
2370 /* START_INSN will hold the insn from where we start looking. This is the
2371 first insn after the following code_label if REVERSE is true. */
2372 rtx start_insn
= insn
;
2374 /* If in state 4, check if the target branch is reached, in order to
2375 change back to state 0. */
2376 if (arm_ccfsm_state
== 4)
2378 if (insn
== arm_target_insn
)
2380 arm_target_insn
= NULL
;
2381 arm_ccfsm_state
= 0;
2386 /* If in state 3, it is possible to repeat the trick, if this insn is an
2387 unconditional branch to a label, and immediately following this branch
2388 is the previous target label which is only used once, and the label this
2389 branch jumps to is not too far off. */
2390 if (arm_ccfsm_state
== 3)
2392 if (simplejump_p (insn
))
2394 start_insn
= next_nonnote_insn (start_insn
);
2395 if (GET_CODE (start_insn
) == BARRIER
)
2397 /* XXX Isn't this always a barrier? */
2398 start_insn
= next_nonnote_insn (start_insn
);
2400 if (GET_CODE (start_insn
) == CODE_LABEL
2401 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2402 && LABEL_NUSES (start_insn
) == 1)
2407 else if (GET_CODE (body
) == RETURN
)
2409 start_insn
= next_nonnote_insn (start_insn
);
2410 if (GET_CODE (start_insn
) == BARRIER
)
2411 start_insn
= next_nonnote_insn (start_insn
);
2412 if (GET_CODE (start_insn
) == CODE_LABEL
2413 && CODE_LABEL_NUMBER (start_insn
) == arm_target_label
2414 && LABEL_NUSES (start_insn
) == 1)
2426 if (arm_ccfsm_state
!= 0 && !reverse
)
2428 if (GET_CODE (insn
) != JUMP_INSN
)
2431 /* This jump might be paralled with a clobber of the condition codes
2432 the jump should always come first */
2433 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) > 0)
2434 body
= XVECEXP (body
, 0, 0);
2437 /* If this is a conditional return then we don't want to know */
2438 if (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2439 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
2440 && (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
2441 || GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
))
2446 || (GET_CODE (body
) == SET
&& GET_CODE (SET_DEST (body
)) == PC
2447 && GET_CODE (SET_SRC (body
)) == IF_THEN_ELSE
))
2449 int insns_skipped
= 0, fail
= FALSE
, succeed
= FALSE
;
2450 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2451 int then_not_else
= TRUE
;
2452 rtx this_insn
= start_insn
, label
= 0;
2454 if (get_attr_conds (insn
) == CONDS_JUMP_CLOB
)
2457 /* Register the insn jumped to. */
2460 if (!seeking_return
)
2461 label
= XEXP (SET_SRC (body
), 0);
2463 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == LABEL_REF
)
2464 label
= XEXP (XEXP (SET_SRC (body
), 1), 0);
2465 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == LABEL_REF
)
2467 label
= XEXP (XEXP (SET_SRC (body
), 2), 0);
2468 then_not_else
= FALSE
;
2470 else if (GET_CODE (XEXP (SET_SRC (body
), 1)) == RETURN
)
2472 else if (GET_CODE (XEXP (SET_SRC (body
), 2)) == RETURN
)
2475 then_not_else
= FALSE
;
2480 /* See how many insns this branch skips, and what kind of insns. If all
2481 insns are okay, and the label or unconditional branch to the same
2482 label is not too far away, succeed. */
2483 for (insns_skipped
= 0;
2484 !fail
&& !succeed
&& insns_skipped
< MAX_INSNS_SKIPPED
;
2489 this_insn
= next_nonnote_insn (this_insn
);
2493 scanbody
= PATTERN (this_insn
);
2495 switch (GET_CODE (this_insn
))
2498 /* Succeed if it is the target label, otherwise fail since
2499 control falls in from somewhere else. */
2500 if (this_insn
== label
)
2504 arm_ccfsm_state
= 2;
2505 this_insn
= next_nonnote_insn (this_insn
);
2508 arm_ccfsm_state
= 1;
2516 /* Succeed if the following insn is the target label.
2518 If return insns are used then the last insn in a function
2519 will be a barrier. */
2520 this_insn
= next_nonnote_insn (this_insn
);
2521 if (this_insn
&& this_insn
== label
)
2525 arm_ccfsm_state
= 2;
2526 this_insn
= next_nonnote_insn (this_insn
);
2529 arm_ccfsm_state
= 1;
2537 /* The arm 6xx uses full 32 bit addresses so the cc is not
2538 preserved over calls */
2543 /* If this is an unconditional branch to the same label, succeed.
2544 If it is to another label, do nothing. If it is conditional,
2546 /* XXX Probably, the test for the SET and the PC are unnecessary. */
2548 if (GET_CODE (scanbody
) == SET
2549 && GET_CODE (SET_DEST (scanbody
)) == PC
)
2551 if (GET_CODE (SET_SRC (scanbody
)) == LABEL_REF
2552 && XEXP (SET_SRC (scanbody
), 0) == label
&& !reverse
)
2554 arm_ccfsm_state
= 2;
2557 else if (GET_CODE (SET_SRC (scanbody
)) == IF_THEN_ELSE
)
2560 else if (GET_CODE (scanbody
) == RETURN
2563 arm_ccfsm_state
= 2;
2566 else if (GET_CODE (scanbody
) == PARALLEL
)
2568 switch (get_attr_conds (this_insn
))
2580 /* Instructions using or affecting the condition codes make it
2582 if ((GET_CODE (scanbody
) == SET
2583 || GET_CODE (scanbody
) == PARALLEL
)
2584 && get_attr_conds (this_insn
) != CONDS_NOCOND
)
2594 if ((!seeking_return
) && (arm_ccfsm_state
== 1 || reverse
))
2595 arm_target_label
= CODE_LABEL_NUMBER (label
);
2596 else if (seeking_return
|| arm_ccfsm_state
== 2)
2598 while (this_insn
&& GET_CODE (PATTERN (this_insn
)) == USE
)
2600 this_insn
= next_nonnote_insn (this_insn
);
2601 if (this_insn
&& (GET_CODE (this_insn
) == BARRIER
2602 || GET_CODE (this_insn
) == CODE_LABEL
))
2607 /* Oh, dear! we ran off the end.. give up */
2608 recog (PATTERN (insn
), insn
, NULL_PTR
);
2609 arm_ccfsm_state
= 0;
2610 arm_target_insn
= NULL
;
2613 arm_target_insn
= this_insn
;
2622 get_arm_condition_code (XEXP (XEXP (XEXP (SET_SRC (body
),
2624 if (GET_CODE (XEXP (XEXP (SET_SRC (body
), 0), 0)) == AND
)
2625 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2626 if (GET_CODE (XEXP (SET_SRC (body
), 0)) == NE
)
2627 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2631 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2634 arm_current_cc
= get_arm_condition_code (XEXP (SET_SRC (body
),
2638 if (reverse
|| then_not_else
)
2639 arm_current_cc
= ARM_INVERSE_CONDITION_CODE (arm_current_cc
);
2641 /* restore recog_operand (getting the attributes of other insns can
2642 destroy this array, but final.c assumes that it remains intact
2643 accross this call; since the insn has been recognized already we
2644 call recog direct). */
2645 recog (PATTERN (insn
), insn
, NULL_PTR
);