1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
33 #include "insn-attr.h"
38 #include "diagnostic-core.h"
40 #include "integrate.h"
43 #include "target-def.h"
48 #define streq(a,b) (strcmp (a, b) == 0)
51 static void v850_print_operand_address (FILE *, rtx
);
53 /* Information about the various small memory areas. */
54 static const int small_memory_physical_max
[(int) SMALL_MEMORY_max
] =
61 /* Names of the various data areas used on the v850. */
62 tree GHS_default_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
63 tree GHS_current_section_names
[(int) COUNT_OF_GHS_SECTION_KINDS
];
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element
* data_area_stack
= NULL
;
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p
= FALSE
;
73 rtx v850_compare_op0
, v850_compare_op1
;
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p
= FALSE
;
78 static GTY(()) section
* rosdata_section
;
79 static GTY(()) section
* rozdata_section
;
80 static GTY(()) section
* tdata_section
;
81 static GTY(()) section
* zdata_section
;
82 static GTY(()) section
* zbss_section
;
84 /* Set the maximum size of small memory area TYPE to the value given
85 by SIZE in structure OPTS (option text OPT passed at location LOC). */
88 v850_handle_memory_option (enum small_memory_type type
,
89 struct gcc_options
*opts
, const char *opt
,
90 int size
, location_t loc
)
92 if (size
> small_memory_physical_max
[type
])
93 error_at (loc
, "value passed in %qs is too large", opt
);
95 opts
->x_small_memory_max
[type
] = size
;
98 /* Implement TARGET_HANDLE_OPTION. */
101 v850_handle_option (struct gcc_options
*opts
,
102 struct gcc_options
*opts_set ATTRIBUTE_UNUSED
,
103 const struct cl_decoded_option
*decoded
,
106 size_t code
= decoded
->opt_index
;
107 int value
= decoded
->value
;
112 opts
->x_target_flags
|= MASK_EP
| MASK_PROLOG_FUNCTION
;
116 opts
->x_target_flags
&= ~(MASK_CPU
^ MASK_V850
);
122 opts
->x_target_flags
&= ~(MASK_CPU
^ MASK_V850E
);
126 opts
->x_target_flags
&= ~(MASK_CPU
^ MASK_V850E2
);
130 opts
->x_target_flags
&= ~(MASK_CPU
^ MASK_V850E2V3
);
134 v850_handle_memory_option (SMALL_MEMORY_TDA
, opts
,
135 decoded
->orig_option_with_args_text
,
140 v850_handle_memory_option (SMALL_MEMORY_SDA
, opts
,
141 decoded
->orig_option_with_args_text
,
146 v850_handle_memory_option (SMALL_MEMORY_ZDA
, opts
,
147 decoded
->orig_option_with_args_text
,
156 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
158 static const struct default_options v850_option_optimization_table
[] =
160 { OPT_LEVELS_1_PLUS
, OPT_fomit_frame_pointer
, NULL
, 1 },
161 /* Note - we no longer enable MASK_EP when optimizing. This is
162 because of a hardware bug which stops the SLD and SST instructions
163 from correctly detecting some hazards. If the user is sure that
164 their hardware is fixed or that their program will not encounter
165 the conditions that trigger the bug then they can enable -mep by
167 { OPT_LEVELS_1_PLUS
, OPT_mprolog_function
, NULL
, 1 },
168 { OPT_LEVELS_NONE
, 0, NULL
, 0 }
171 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
172 Specify whether to pass the argument by reference. */
175 v850_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
176 enum machine_mode mode
, const_tree type
,
177 bool named ATTRIBUTE_UNUSED
)
179 unsigned HOST_WIDE_INT size
;
182 size
= int_size_in_bytes (type
);
184 size
= GET_MODE_SIZE (mode
);
189 /* Implementing the Varargs Macros. */
192 v850_strict_argument_naming (CUMULATIVE_ARGS
* ca ATTRIBUTE_UNUSED
)
194 return !TARGET_GHS
? true : false;
197 /* Return an RTX to represent where an argument with mode MODE
198 and type TYPE will be passed to a function. If the result
199 is NULL_RTX, the argument will be pushed. */
202 v850_function_arg (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
203 const_tree type
, bool named
)
205 rtx result
= NULL_RTX
;
212 size
= int_size_in_bytes (type
);
214 size
= GET_MODE_SIZE (mode
);
216 size
= (size
+ UNITS_PER_WORD
-1) & ~(UNITS_PER_WORD
-1);
220 /* Once we have stopped using argument registers, do not start up again. */
221 cum
->nbytes
= 4 * UNITS_PER_WORD
;
225 if (size
<= UNITS_PER_WORD
&& type
)
226 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
230 cum
->nbytes
= (cum
->nbytes
+ align
- 1) &~(align
- 1);
232 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
235 if (type
== NULL_TREE
236 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
239 switch (cum
->nbytes
/ UNITS_PER_WORD
)
242 result
= gen_rtx_REG (mode
, 6);
245 result
= gen_rtx_REG (mode
, 7);
248 result
= gen_rtx_REG (mode
, 8);
251 result
= gen_rtx_REG (mode
, 9);
260 /* Return the number of bytes which must be put into registers
261 for values which are part in registers and part in memory. */
263 v850_arg_partial_bytes (CUMULATIVE_ARGS
* cum
, enum machine_mode mode
,
264 tree type
, bool named
)
268 if (TARGET_GHS
&& !named
)
272 size
= int_size_in_bytes (type
);
274 size
= GET_MODE_SIZE (mode
);
280 align
= TYPE_ALIGN (type
) / BITS_PER_UNIT
;
284 cum
->nbytes
= (cum
->nbytes
+ align
- 1) & ~ (align
- 1);
286 if (cum
->nbytes
> 4 * UNITS_PER_WORD
)
289 if (cum
->nbytes
+ size
<= 4 * UNITS_PER_WORD
)
292 if (type
== NULL_TREE
293 && cum
->nbytes
+ size
> 4 * UNITS_PER_WORD
)
296 return 4 * UNITS_PER_WORD
- cum
->nbytes
;
299 /* Update the data in CUM to advance over an argument
300 of mode MODE and data type TYPE.
301 (TYPE is null for libcalls where that information may not be available.) */
304 v850_function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
305 const_tree type
, bool named ATTRIBUTE_UNUSED
)
307 cum
->nbytes
+= (((type
&& int_size_in_bytes (type
) > 8
308 ? GET_MODE_SIZE (Pmode
)
310 ? GET_MODE_SIZE (mode
)
311 : int_size_in_bytes (type
))) + UNITS_PER_WORD
- 1)
315 /* Return the high and low words of a CONST_DOUBLE */
318 const_double_split (rtx x
, HOST_WIDE_INT
* p_high
, HOST_WIDE_INT
* p_low
)
320 if (GET_CODE (x
) == CONST_DOUBLE
)
325 switch (GET_MODE (x
))
328 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
329 REAL_VALUE_TO_TARGET_DOUBLE (rv
, t
);
330 *p_high
= t
[1]; /* since v850 is little endian */
331 *p_low
= t
[0]; /* high is second word */
335 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
336 REAL_VALUE_TO_TARGET_SINGLE (rv
, *p_high
);
342 *p_high
= CONST_DOUBLE_HIGH (x
);
343 *p_low
= CONST_DOUBLE_LOW (x
);
351 fatal_insn ("const_double_split got a bad insn:", x
);
355 /* Return the cost of the rtx R with code CODE. */
358 const_costs_int (HOST_WIDE_INT value
, int zero_cost
)
360 if (CONST_OK_FOR_I (value
))
362 else if (CONST_OK_FOR_J (value
))
364 else if (CONST_OK_FOR_K (value
))
371 const_costs (rtx r
, enum rtx_code c
)
373 HOST_WIDE_INT high
, low
;
378 return const_costs_int (INTVAL (r
), 0);
381 const_double_split (r
, &high
, &low
);
382 if (GET_MODE (r
) == SFmode
)
383 return const_costs_int (high
, 1);
385 return const_costs_int (high
, 1) + const_costs_int (low
, 1);
401 v850_rtx_costs (rtx x
,
403 int outer_code ATTRIBUTE_UNUSED
,
404 int * total
, bool speed
)
406 enum rtx_code code
= (enum rtx_code
) codearg
;
415 *total
= COSTS_N_INSNS (const_costs (x
, code
));
422 if (TARGET_V850E
&& !speed
)
430 && ( GET_MODE (x
) == SImode
431 || GET_MODE (x
) == HImode
432 || GET_MODE (x
) == QImode
))
434 if (GET_CODE (XEXP (x
, 1)) == REG
)
436 else if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
438 if (CONST_OK_FOR_O (INTVAL (XEXP (x
, 1))))
440 else if (CONST_OK_FOR_K (INTVAL (XEXP (x
, 1))))
449 if (outer_code
== COMPARE
)
458 /* Print operand X using operand code CODE to assembly language output file
462 v850_print_operand (FILE * file
, rtx x
, int code
)
464 HOST_WIDE_INT high
, low
;
469 /* We use 'c' operands with symbols for .vtinherit */
470 if (GET_CODE (x
) == SYMBOL_REF
)
472 output_addr_const(file
, x
);
479 switch ((code
== 'B' || code
== 'C')
480 ? reverse_condition (GET_CODE (x
)) : GET_CODE (x
))
483 if (code
== 'c' || code
== 'C')
484 fprintf (file
, "nz");
486 fprintf (file
, "ne");
489 if (code
== 'c' || code
== 'C')
495 fprintf (file
, "ge");
498 fprintf (file
, "gt");
501 fprintf (file
, "le");
504 fprintf (file
, "lt");
507 fprintf (file
, "nl");
513 fprintf (file
, "nh");
522 case 'F': /* high word of CONST_DOUBLE */
523 switch (GET_CODE (x
))
526 fprintf (file
, "%d", (INTVAL (x
) >= 0) ? 0 : -1);
530 const_double_split (x
, &high
, &low
);
531 fprintf (file
, "%ld", (long) high
);
538 case 'G': /* low word of CONST_DOUBLE */
539 switch (GET_CODE (x
))
542 fprintf (file
, "%ld", (long) INTVAL (x
));
546 const_double_split (x
, &high
, &low
);
547 fprintf (file
, "%ld", (long) low
);
555 fprintf (file
, "%d\n", (int)(INTVAL (x
) & 0xffff));
558 fprintf (file
, "%d", exact_log2 (INTVAL (x
)));
561 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
563 if (GET_CODE (x
) == CONST
)
564 x
= XEXP (XEXP (x
, 0), 0);
566 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
568 if (SYMBOL_REF_ZDA_P (x
))
569 fprintf (file
, "zdaoff");
570 else if (SYMBOL_REF_SDA_P (x
))
571 fprintf (file
, "sdaoff");
572 else if (SYMBOL_REF_TDA_P (x
))
573 fprintf (file
, "tdaoff");
578 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
579 output_addr_const (file
, x
);
582 gcc_assert (special_symbolref_operand (x
, VOIDmode
));
584 if (GET_CODE (x
) == CONST
)
585 x
= XEXP (XEXP (x
, 0), 0);
587 gcc_assert (GET_CODE (x
) == SYMBOL_REF
);
589 if (SYMBOL_REF_ZDA_P (x
))
590 fprintf (file
, "r0");
591 else if (SYMBOL_REF_SDA_P (x
))
592 fprintf (file
, "gp");
593 else if (SYMBOL_REF_TDA_P (x
))
594 fprintf (file
, "ep");
598 case 'R': /* 2nd word of a double. */
599 switch (GET_CODE (x
))
602 fprintf (file
, reg_names
[REGNO (x
) + 1]);
605 x
= XEXP (adjust_address (x
, SImode
, 4), 0);
606 v850_print_operand_address (file
, x
);
607 if (GET_CODE (x
) == CONST_INT
)
608 fprintf (file
, "[r0]");
617 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
618 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), FALSE
))
625 /* Like an 'S' operand above, but for unsigned loads only. */
626 if (GET_CODE (x
) == MEM
&& ep_memory_operand (x
, GET_MODE (x
), TRUE
))
631 case 'W': /* print the instruction suffix */
632 switch (GET_MODE (x
))
637 case QImode
: fputs (".b", file
); break;
638 case HImode
: fputs (".h", file
); break;
639 case SImode
: fputs (".w", file
); break;
640 case SFmode
: fputs (".w", file
); break;
643 case '.': /* register r0 */
644 fputs (reg_names
[0], file
);
646 case 'z': /* reg or zero */
647 if (GET_CODE (x
) == REG
)
648 fputs (reg_names
[REGNO (x
)], file
);
649 else if ((GET_MODE(x
) == SImode
650 || GET_MODE(x
) == DFmode
651 || GET_MODE(x
) == SFmode
)
652 && x
== CONST0_RTX(GET_MODE(x
)))
653 fputs (reg_names
[0], file
);
656 gcc_assert (x
== const0_rtx
);
657 fputs (reg_names
[0], file
);
661 switch (GET_CODE (x
))
664 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
665 output_address (gen_rtx_PLUS (SImode
, gen_rtx_REG (SImode
, 0),
668 output_address (XEXP (x
, 0));
672 fputs (reg_names
[REGNO (x
)], file
);
675 fputs (reg_names
[subreg_regno (x
)], file
);
682 v850_print_operand_address (file
, x
);
693 /* Output assembly language output for the address ADDR to FILE. */
696 v850_print_operand_address (FILE * file
, rtx addr
)
698 switch (GET_CODE (addr
))
701 fprintf (file
, "0[");
702 v850_print_operand (file
, addr
, 0);
706 if (GET_CODE (XEXP (addr
, 0)) == REG
)
709 fprintf (file
, "lo(");
710 v850_print_operand (file
, XEXP (addr
, 1), 0);
711 fprintf (file
, ")[");
712 v850_print_operand (file
, XEXP (addr
, 0), 0);
717 if (GET_CODE (XEXP (addr
, 0)) == REG
718 || GET_CODE (XEXP (addr
, 0)) == SUBREG
)
721 v850_print_operand (file
, XEXP (addr
, 1), 0);
723 v850_print_operand (file
, XEXP (addr
, 0), 0);
728 v850_print_operand (file
, XEXP (addr
, 0), 0);
730 v850_print_operand (file
, XEXP (addr
, 1), 0);
735 const char *off_name
= NULL
;
736 const char *reg_name
= NULL
;
738 if (SYMBOL_REF_ZDA_P (addr
))
743 else if (SYMBOL_REF_SDA_P (addr
))
748 else if (SYMBOL_REF_TDA_P (addr
))
755 fprintf (file
, "%s(", off_name
);
756 output_addr_const (file
, addr
);
758 fprintf (file
, ")[%s]", reg_name
);
762 if (special_symbolref_operand (addr
, VOIDmode
))
764 rtx x
= XEXP (XEXP (addr
, 0), 0);
765 const char *off_name
;
766 const char *reg_name
;
768 if (SYMBOL_REF_ZDA_P (x
))
773 else if (SYMBOL_REF_SDA_P (x
))
778 else if (SYMBOL_REF_TDA_P (x
))
786 fprintf (file
, "%s(", off_name
);
787 output_addr_const (file
, addr
);
788 fprintf (file
, ")[%s]", reg_name
);
791 output_addr_const (file
, addr
);
794 output_addr_const (file
, addr
);
800 v850_print_operand_punct_valid_p (unsigned char code
)
805 /* When assemble_integer is used to emit the offsets for a switch
806 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
807 output_addr_const will normally barf at this, but it is OK to omit
808 the truncate and just emit the difference of the two labels. The
809 .hword directive will automatically handle the truncation for us.
811 Returns true if rtx was handled, false otherwise. */
814 v850_output_addr_const_extra (FILE * file
, rtx x
)
816 if (GET_CODE (x
) != TRUNCATE
)
821 /* We must also handle the case where the switch table was passed a
822 constant value and so has been collapsed. In this case the first
823 label will have been deleted. In such a case it is OK to emit
824 nothing, since the table will not be used.
825 (cf gcc.c-torture/compile/990801-1.c). */
826 if (GET_CODE (x
) == MINUS
827 && GET_CODE (XEXP (x
, 0)) == LABEL_REF
828 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == CODE_LABEL
829 && INSN_DELETED_P (XEXP (XEXP (x
, 0), 0)))
832 output_addr_const (file
, x
);
836 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
840 output_move_single (rtx
* operands
)
842 rtx dst
= operands
[0];
843 rtx src
= operands
[1];
850 else if (GET_CODE (src
) == CONST_INT
)
852 HOST_WIDE_INT value
= INTVAL (src
);
854 if (CONST_OK_FOR_J (value
)) /* Signed 5-bit immediate. */
857 else if (CONST_OK_FOR_K (value
)) /* Signed 16-bit immediate. */
858 return "movea %1,%.,%0";
860 else if (CONST_OK_FOR_L (value
)) /* Upper 16 bits were set. */
861 return "movhi hi0(%1),%.,%0";
863 /* A random constant. */
864 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
867 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
870 else if (GET_CODE (src
) == CONST_DOUBLE
&& GET_MODE (src
) == SFmode
)
872 HOST_WIDE_INT high
, low
;
874 const_double_split (src
, &high
, &low
);
876 if (CONST_OK_FOR_J (high
)) /* Signed 5-bit immediate. */
879 else if (CONST_OK_FOR_K (high
)) /* Signed 16-bit immediate. */
880 return "movea %F1,%.,%0";
882 else if (CONST_OK_FOR_L (high
)) /* Upper 16 bits were set. */
883 return "movhi hi0(%F1),%.,%0";
885 /* A random constant. */
886 else if (TARGET_V850E
|| TARGET_V850E2_ALL
)
890 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
893 else if (GET_CODE (src
) == MEM
)
894 return "%S1ld%W1 %1,%0";
896 else if (special_symbolref_operand (src
, VOIDmode
))
897 return "movea %O1(%P1),%Q1,%0";
899 else if (GET_CODE (src
) == LABEL_REF
900 || GET_CODE (src
) == SYMBOL_REF
901 || GET_CODE (src
) == CONST
)
903 if (TARGET_V850E
|| TARGET_V850E2_ALL
)
904 return "mov hilo(%1),%0";
906 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
909 else if (GET_CODE (src
) == HIGH
)
910 return "movhi hi(%1),%.,%0";
912 else if (GET_CODE (src
) == LO_SUM
)
914 operands
[2] = XEXP (src
, 0);
915 operands
[3] = XEXP (src
, 1);
916 return "movea lo(%3),%2,%0";
920 else if (GET_CODE (dst
) == MEM
)
923 return "%S0st%W0 %1,%0";
925 else if (GET_CODE (src
) == CONST_INT
&& INTVAL (src
) == 0)
926 return "%S0st%W0 %.,%0";
928 else if (GET_CODE (src
) == CONST_DOUBLE
929 && CONST0_RTX (GET_MODE (dst
)) == src
)
930 return "%S0st%W0 %.,%0";
933 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode
, dst
, src
));
937 /* Generate comparison code. */
939 v850_float_z_comparison_operator (rtx op
, enum machine_mode mode
)
941 enum rtx_code code
= GET_CODE (op
);
943 if (GET_RTX_CLASS (code
) != RTX_COMPARE
944 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
947 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
950 if ((GET_CODE (XEXP (op
, 0)) != REG
951 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
952 || XEXP (op
, 1) != const0_rtx
)
955 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LTmode
)
957 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_LEmode
)
959 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_EQmode
)
966 v850_float_nz_comparison_operator (rtx op
, enum machine_mode mode
)
968 enum rtx_code code
= GET_CODE (op
);
970 if (GET_RTX_CLASS (code
) != RTX_COMPARE
971 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
974 if (mode
!= GET_MODE (op
) && mode
!= VOIDmode
)
977 if ((GET_CODE (XEXP (op
, 0)) != REG
978 || REGNO (XEXP (op
, 0)) != CC_REGNUM
)
979 || XEXP (op
, 1) != const0_rtx
)
982 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GTmode
)
984 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_GEmode
)
986 if (GET_MODE (XEXP (op
, 0)) == CC_FPU_NEmode
)
993 v850_select_cc_mode (enum rtx_code cond
, rtx op0
, rtx op1 ATTRIBUTE_UNUSED
)
995 if (GET_MODE_CLASS (GET_MODE (op0
)) == MODE_FLOAT
)
1000 return CC_FPU_LEmode
;
1002 return CC_FPU_GEmode
;
1004 return CC_FPU_LTmode
;
1006 return CC_FPU_GTmode
;
1008 return CC_FPU_EQmode
;
1010 return CC_FPU_NEmode
;
1019 v850_gen_float_compare (enum rtx_code cond
, enum machine_mode mode ATTRIBUTE_UNUSED
, rtx op0
, rtx op1
)
1021 if (GET_MODE(op0
) == DFmode
)
1026 emit_insn (gen_cmpdf_le_insn (op0
, op1
));
1029 emit_insn (gen_cmpdf_ge_insn (op0
, op1
));
1032 emit_insn (gen_cmpdf_lt_insn (op0
, op1
));
1035 emit_insn (gen_cmpdf_gt_insn (op0
, op1
));
1038 emit_insn (gen_cmpdf_eq_insn (op0
, op1
));
1041 emit_insn (gen_cmpdf_ne_insn (op0
, op1
));
1047 else if (GET_MODE(v850_compare_op0
) == SFmode
)
1052 emit_insn (gen_cmpsf_le_insn(op0
, op1
));
1055 emit_insn (gen_cmpsf_ge_insn(op0
, op1
));
1058 emit_insn (gen_cmpsf_lt_insn(op0
, op1
));
1061 emit_insn (gen_cmpsf_gt_insn(op0
, op1
));
1064 emit_insn (gen_cmpsf_eq_insn(op0
, op1
));
1067 emit_insn (gen_cmpsf_ne_insn(op0
, op1
));
1078 return v850_select_cc_mode (cond
, op0
, op1
);
1082 v850_gen_compare (enum rtx_code cond
, enum machine_mode mode
, rtx op0
, rtx op1
)
1084 if (GET_MODE_CLASS(GET_MODE (op0
)) != MODE_FLOAT
)
1086 emit_insn (gen_cmpsi_insn (op0
, op1
));
1087 return gen_rtx_fmt_ee (cond
, mode
, gen_rtx_REG(CCmode
, CC_REGNUM
), const0_rtx
);
1092 mode
= v850_gen_float_compare (cond
, mode
, op0
, op1
);
1093 cc_reg
= gen_rtx_REG (mode
, CC_REGNUM
);
1094 emit_insn (gen_rtx_SET(mode
, cc_reg
, gen_rtx_REG (mode
, FCC_REGNUM
)));
1096 return gen_rtx_fmt_ee (cond
, mode
, cc_reg
, const0_rtx
);
1100 /* Return maximum offset supported for a short EP memory reference of mode
1101 MODE and signedness UNSIGNEDP. */
1104 ep_memory_offset (enum machine_mode mode
, int unsignedp ATTRIBUTE_UNUSED
)
1111 if (TARGET_SMALL_SLD
)
1112 max_offset
= (1 << 4);
1113 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1115 max_offset
= (1 << 4);
1117 max_offset
= (1 << 7);
1121 if (TARGET_SMALL_SLD
)
1122 max_offset
= (1 << 5);
1123 else if ((TARGET_V850E
|| TARGET_V850E2_ALL
)
1125 max_offset
= (1 << 5);
1127 max_offset
= (1 << 8);
1132 max_offset
= (1 << 8);
1142 /* Return true if OP is a valid short EP memory reference */
1145 ep_memory_operand (rtx op
, enum machine_mode mode
, int unsigned_load
)
1151 /* If we are not using the EP register on a per-function basis
1152 then do not allow this optimization at all. This is to
1153 prevent the use of the SLD/SST instructions which cannot be
1154 guaranteed to work properly due to a hardware bug. */
1158 if (GET_CODE (op
) != MEM
)
1161 max_offset
= ep_memory_offset (mode
, unsigned_load
);
1163 mask
= GET_MODE_SIZE (mode
) - 1;
1165 addr
= XEXP (op
, 0);
1166 if (GET_CODE (addr
) == CONST
)
1167 addr
= XEXP (addr
, 0);
1169 switch (GET_CODE (addr
))
1175 return SYMBOL_REF_TDA_P (addr
);
1178 return REGNO (addr
) == EP_REGNUM
;
1181 op0
= XEXP (addr
, 0);
1182 op1
= XEXP (addr
, 1);
1183 if (GET_CODE (op1
) == CONST_INT
1184 && INTVAL (op1
) < max_offset
1185 && INTVAL (op1
) >= 0
1186 && (INTVAL (op1
) & mask
) == 0)
1188 if (GET_CODE (op0
) == REG
&& REGNO (op0
) == EP_REGNUM
)
1191 if (GET_CODE (op0
) == SYMBOL_REF
&& SYMBOL_REF_TDA_P (op0
))
1200 /* Substitute memory references involving a pointer, to use the ep pointer,
1201 taking care to save and preserve the ep. */
1204 substitute_ep_register (rtx first_insn
,
1211 rtx reg
= gen_rtx_REG (Pmode
, regno
);
1216 df_set_regs_ever_live (1, true);
1217 *p_r1
= gen_rtx_REG (Pmode
, 1);
1218 *p_ep
= gen_rtx_REG (Pmode
, 30);
1223 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1224 2 * (uses
- 3), uses
, reg_names
[regno
],
1225 IDENTIFIER_POINTER (DECL_NAME (current_function_decl
)),
1226 INSN_UID (first_insn
), INSN_UID (last_insn
));
1228 if (GET_CODE (first_insn
) == NOTE
)
1229 first_insn
= next_nonnote_insn (first_insn
);
1231 last_insn
= next_nonnote_insn (last_insn
);
1232 for (insn
= first_insn
; insn
&& insn
!= last_insn
; insn
= NEXT_INSN (insn
))
1234 if (GET_CODE (insn
) == INSN
)
1236 rtx pattern
= single_set (insn
);
1238 /* Replace the memory references. */
1242 /* Memory operands are signed by default. */
1243 int unsignedp
= FALSE
;
1245 if (GET_CODE (SET_DEST (pattern
)) == MEM
1246 && GET_CODE (SET_SRC (pattern
)) == MEM
)
1249 else if (GET_CODE (SET_DEST (pattern
)) == MEM
)
1250 p_mem
= &SET_DEST (pattern
);
1252 else if (GET_CODE (SET_SRC (pattern
)) == MEM
)
1253 p_mem
= &SET_SRC (pattern
);
1255 else if (GET_CODE (SET_SRC (pattern
)) == SIGN_EXTEND
1256 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1257 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1259 else if (GET_CODE (SET_SRC (pattern
)) == ZERO_EXTEND
1260 && GET_CODE (XEXP (SET_SRC (pattern
), 0)) == MEM
)
1262 p_mem
= &XEXP (SET_SRC (pattern
), 0);
1270 rtx addr
= XEXP (*p_mem
, 0);
1272 if (GET_CODE (addr
) == REG
&& REGNO (addr
) == (unsigned) regno
)
1273 *p_mem
= change_address (*p_mem
, VOIDmode
, *p_ep
);
1275 else if (GET_CODE (addr
) == PLUS
1276 && GET_CODE (XEXP (addr
, 0)) == REG
1277 && REGNO (XEXP (addr
, 0)) == (unsigned) regno
1278 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1279 && ((INTVAL (XEXP (addr
, 1)))
1280 < ep_memory_offset (GET_MODE (*p_mem
),
1282 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1283 *p_mem
= change_address (*p_mem
, VOIDmode
,
1284 gen_rtx_PLUS (Pmode
,
1292 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1293 insn
= prev_nonnote_insn (first_insn
);
1294 if (insn
&& GET_CODE (insn
) == INSN
1295 && GET_CODE (PATTERN (insn
)) == SET
1296 && SET_DEST (PATTERN (insn
)) == *p_ep
1297 && SET_SRC (PATTERN (insn
)) == *p_r1
)
1300 emit_insn_before (gen_rtx_SET (Pmode
, *p_r1
, *p_ep
), first_insn
);
1302 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, reg
), first_insn
);
1303 emit_insn_before (gen_rtx_SET (Pmode
, *p_ep
, *p_r1
), last_insn
);
1307 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1308 the -mep mode to copy heavily used pointers to ep to use the implicit
1320 regs
[FIRST_PSEUDO_REGISTER
];
1329 /* If not ep mode, just return now. */
1333 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1336 regs
[i
].first_insn
= NULL_RTX
;
1337 regs
[i
].last_insn
= NULL_RTX
;
1340 for (insn
= get_insns (); insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1342 switch (GET_CODE (insn
))
1344 /* End of basic block */
1351 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1353 if (max_uses
< regs
[i
].uses
)
1355 max_uses
= regs
[i
].uses
;
1361 substitute_ep_register (regs
[max_regno
].first_insn
,
1362 regs
[max_regno
].last_insn
,
1363 max_uses
, max_regno
, &r1
, &ep
);
1367 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1370 regs
[i
].first_insn
= NULL_RTX
;
1371 regs
[i
].last_insn
= NULL_RTX
;
1379 pattern
= single_set (insn
);
1381 /* See if there are any memory references we can shorten */
1384 rtx src
= SET_SRC (pattern
);
1385 rtx dest
= SET_DEST (pattern
);
1387 /* Memory operands are signed by default. */
1388 int unsignedp
= FALSE
;
1390 /* We might have (SUBREG (MEM)) here, so just get rid of the
1391 subregs to make this code simpler. */
1392 if (GET_CODE (dest
) == SUBREG
1393 && (GET_CODE (SUBREG_REG (dest
)) == MEM
1394 || GET_CODE (SUBREG_REG (dest
)) == REG
))
1395 alter_subreg (&dest
);
1396 if (GET_CODE (src
) == SUBREG
1397 && (GET_CODE (SUBREG_REG (src
)) == MEM
1398 || GET_CODE (SUBREG_REG (src
)) == REG
))
1399 alter_subreg (&src
);
1401 if (GET_CODE (dest
) == MEM
&& GET_CODE (src
) == MEM
)
1404 else if (GET_CODE (dest
) == MEM
)
1407 else if (GET_CODE (src
) == MEM
)
1410 else if (GET_CODE (src
) == SIGN_EXTEND
1411 && GET_CODE (XEXP (src
, 0)) == MEM
)
1412 mem
= XEXP (src
, 0);
1414 else if (GET_CODE (src
) == ZERO_EXTEND
1415 && GET_CODE (XEXP (src
, 0)) == MEM
)
1417 mem
= XEXP (src
, 0);
1423 if (mem
&& ep_memory_operand (mem
, GET_MODE (mem
), unsignedp
))
1426 else if (!use_ep
&& mem
1427 && GET_MODE_SIZE (GET_MODE (mem
)) <= UNITS_PER_WORD
)
1429 rtx addr
= XEXP (mem
, 0);
1433 if (GET_CODE (addr
) == REG
)
1436 regno
= REGNO (addr
);
1439 else if (GET_CODE (addr
) == PLUS
1440 && GET_CODE (XEXP (addr
, 0)) == REG
1441 && GET_CODE (XEXP (addr
, 1)) == CONST_INT
1442 && ((INTVAL (XEXP (addr
, 1)))
1443 < ep_memory_offset (GET_MODE (mem
), unsignedp
))
1444 && ((INTVAL (XEXP (addr
, 1))) >= 0))
1447 regno
= REGNO (XEXP (addr
, 0));
1456 regs
[regno
].last_insn
= insn
;
1457 if (!regs
[regno
].first_insn
)
1458 regs
[regno
].first_insn
= insn
;
1462 /* Loading up a register in the basic block zaps any savings
1464 if (GET_CODE (dest
) == REG
)
1466 enum machine_mode mode
= GET_MODE (dest
);
1470 regno
= REGNO (dest
);
1471 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
1475 /* See if we can use the pointer before this
1480 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1482 if (max_uses
< regs
[i
].uses
)
1484 max_uses
= regs
[i
].uses
;
1490 && max_regno
>= regno
1491 && max_regno
< endregno
)
1493 substitute_ep_register (regs
[max_regno
].first_insn
,
1494 regs
[max_regno
].last_insn
,
1495 max_uses
, max_regno
, &r1
,
1498 /* Since we made a substitution, zap all remembered
1500 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1503 regs
[i
].first_insn
= NULL_RTX
;
1504 regs
[i
].last_insn
= NULL_RTX
;
1509 for (i
= regno
; i
< endregno
; i
++)
1512 regs
[i
].first_insn
= NULL_RTX
;
1513 regs
[i
].last_insn
= NULL_RTX
;
1521 /* # of registers saved by the interrupt handler. */
1522 #define INTERRUPT_FIXED_NUM 5
1524 /* # of bytes for registers saved by the interrupt handler. */
1525 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1527 /* # of words saved for other registers. */
1528 #define INTERRUPT_ALL_SAVE_NUM \
1529 (30 - INTERRUPT_FIXED_NUM)
1531 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1534 compute_register_save_size (long * p_reg_saved
)
1538 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1539 int call_p
= df_regs_ever_live_p (LINK_POINTER_REGNUM
);
1542 /* Count the return pointer if we need to save it. */
1543 if (crtl
->profile
&& !call_p
)
1545 df_set_regs_ever_live (LINK_POINTER_REGNUM
, true);
1549 /* Count space for the register saves. */
1550 if (interrupt_handler
)
1552 for (i
= 0; i
<= 31; i
++)
1556 if (df_regs_ever_live_p (i
) || call_p
)
1559 reg_saved
|= 1L << i
;
1563 /* We don't save/restore r0 or the stack pointer */
1565 case STACK_POINTER_REGNUM
:
1568 /* For registers with fixed use, we save them, set them to the
1569 appropriate value, and then restore them.
1570 These registers are handled specially, so don't list them
1571 on the list of registers to save in the prologue. */
1572 case 1: /* temp used to hold ep */
1574 case 10: /* temp used to call interrupt save/restore */
1575 case 11: /* temp used to call interrupt save/restore (long call) */
1576 case EP_REGNUM
: /* ep */
1583 /* Find the first register that needs to be saved. */
1584 for (i
= 0; i
<= 31; i
++)
1585 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1586 || i
== LINK_POINTER_REGNUM
))
1589 /* If it is possible that an out-of-line helper function might be
1590 used to generate the prologue for the current function, then we
1591 need to cover the possibility that such a helper function will
1592 be used, despite the fact that there might be gaps in the list of
1593 registers that need to be saved. To detect this we note that the
1594 helper functions always push at least register r29 (provided
1595 that the function is not an interrupt handler). */
1597 if (TARGET_PROLOG_FUNCTION
1598 && (i
== 2 || ((i
>= 20) && (i
< 30))))
1603 reg_saved
|= 1L << i
;
1608 /* Helper functions save all registers between the starting
1609 register and the last register, regardless of whether they
1610 are actually used by the function or not. */
1611 for (; i
<= 29; i
++)
1614 reg_saved
|= 1L << i
;
1617 if (df_regs_ever_live_p (LINK_POINTER_REGNUM
))
1620 reg_saved
|= 1L << LINK_POINTER_REGNUM
;
1625 for (; i
<= 31; i
++)
1626 if (df_regs_ever_live_p (i
) && ((! call_used_regs
[i
])
1627 || i
== LINK_POINTER_REGNUM
))
1630 reg_saved
|= 1L << i
;
1636 *p_reg_saved
= reg_saved
;
1642 compute_frame_size (int size
, long * p_reg_saved
)
1645 + compute_register_save_size (p_reg_saved
)
1646 + crtl
->outgoing_args_size
);
1650 use_prolog_function (int num_save
, int frame_size
)
1652 int alloc_stack
= (4 * num_save
);
1653 int unalloc_stack
= frame_size
- alloc_stack
;
1654 int save_func_len
, restore_func_len
;
1655 int save_normal_len
, restore_normal_len
;
1657 if (! TARGET_DISABLE_CALLT
)
1658 save_func_len
= restore_func_len
= 2;
1660 save_func_len
= restore_func_len
= TARGET_LONG_CALLS
? (4+4+4+2+2) : 4;
1664 save_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1665 restore_func_len
+= CONST_OK_FOR_J (-unalloc_stack
) ? 2 : 4;
1668 /* See if we would have used ep to save the stack. */
1669 if (TARGET_EP
&& num_save
> 3 && (unsigned)frame_size
< 255)
1670 save_normal_len
= restore_normal_len
= (3 * 2) + (2 * num_save
);
1672 save_normal_len
= restore_normal_len
= 4 * num_save
;
1674 save_normal_len
+= CONST_OK_FOR_J (-frame_size
) ? 2 : 4;
1675 restore_normal_len
+= (CONST_OK_FOR_J (frame_size
) ? 2 : 4) + 2;
1677 /* Don't bother checking if we don't actually save any space.
1678 This happens for instance if one register is saved and additional
1679 stack space is allocated. */
1680 return ((save_func_len
+ restore_func_len
) < (save_normal_len
+ restore_normal_len
));
1684 expand_prologue (void)
1687 unsigned int size
= get_frame_size ();
1688 unsigned int actual_fsize
;
1689 unsigned int init_stack_alloc
= 0;
1692 unsigned int num_save
;
1694 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1697 actual_fsize
= compute_frame_size (size
, ®_saved
);
1699 /* Save/setup global registers for interrupt functions right now. */
1700 if (interrupt_handler
)
1702 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1703 emit_insn (gen_callt_save_interrupt ());
1705 emit_insn (gen_save_interrupt ());
1707 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1709 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1710 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1713 /* Identify all of the saved registers. */
1715 for (i
= 1; i
< 32; i
++)
1717 if (((1L << i
) & reg_saved
) != 0)
1718 save_regs
[num_save
++] = gen_rtx_REG (Pmode
, i
);
1721 /* See if we have an insn that allocates stack space and saves the particular
1722 registers we want to. */
1723 save_all
= NULL_RTX
;
1724 if (TARGET_PROLOG_FUNCTION
&& num_save
> 0)
1726 if (use_prolog_function (num_save
, actual_fsize
))
1728 int alloc_stack
= 4 * num_save
;
1731 save_all
= gen_rtx_PARALLEL
1733 rtvec_alloc (num_save
+ 1
1734 + (TARGET_DISABLE_CALLT
? (TARGET_LONG_CALLS
? 2 : 1) : 0)));
1736 XVECEXP (save_all
, 0, 0)
1737 = gen_rtx_SET (VOIDmode
,
1739 gen_rtx_PLUS (Pmode
,
1741 GEN_INT(-alloc_stack
)));
1742 for (i
= 0; i
< num_save
; i
++)
1745 XVECEXP (save_all
, 0, i
+1)
1746 = gen_rtx_SET (VOIDmode
,
1748 gen_rtx_PLUS (Pmode
,
1754 if (TARGET_DISABLE_CALLT
)
1756 XVECEXP (save_all
, 0, num_save
+ 1)
1757 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 10));
1759 if (TARGET_LONG_CALLS
)
1760 XVECEXP (save_all
, 0, num_save
+ 2)
1761 = gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (Pmode
, 11));
1764 code
= recog (save_all
, NULL_RTX
, NULL
);
1767 rtx insn
= emit_insn (save_all
);
1768 INSN_CODE (insn
) = code
;
1769 actual_fsize
-= alloc_stack
;
1773 save_all
= NULL_RTX
;
1777 /* If no prolog save function is available, store the registers the old
1778 fashioned way (one by one). */
1781 /* Special case interrupt functions that save all registers for a call. */
1782 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1784 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
1785 emit_insn (gen_callt_save_all_interrupt ());
1787 emit_insn (gen_save_all_interrupt ());
1792 /* If the stack is too big, allocate it in chunks so we can do the
1793 register saves. We use the register save size so we use the ep
1795 if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1796 init_stack_alloc
= compute_register_save_size (NULL
);
1798 init_stack_alloc
= actual_fsize
;
1800 /* Save registers at the beginning of the stack frame. */
1801 offset
= init_stack_alloc
- 4;
1803 if (init_stack_alloc
)
1804 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1806 GEN_INT (- (signed) init_stack_alloc
)));
1808 /* Save the return pointer first. */
1809 if (num_save
> 0 && REGNO (save_regs
[num_save
-1]) == LINK_POINTER_REGNUM
)
1811 emit_move_insn (gen_rtx_MEM (SImode
,
1812 plus_constant (stack_pointer_rtx
,
1814 save_regs
[--num_save
]);
1818 for (i
= 0; i
< num_save
; i
++)
1820 emit_move_insn (gen_rtx_MEM (SImode
,
1821 plus_constant (stack_pointer_rtx
,
1829 /* Allocate the rest of the stack that was not allocated above (either it is
1830 > 32K or we just called a function to save the registers and needed more
1832 if (actual_fsize
> init_stack_alloc
)
1834 int diff
= actual_fsize
- init_stack_alloc
;
1835 if (CONST_OK_FOR_K (-diff
))
1836 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1841 rtx reg
= gen_rtx_REG (Pmode
, 12);
1842 emit_move_insn (reg
, GEN_INT (-diff
));
1843 emit_insn (gen_addsi3 (stack_pointer_rtx
, stack_pointer_rtx
, reg
));
1847 /* If we need a frame pointer, set it up now. */
1848 if (frame_pointer_needed
)
1849 emit_move_insn (hard_frame_pointer_rtx
, stack_pointer_rtx
);
1854 expand_epilogue (void)
1857 unsigned int size
= get_frame_size ();
1859 int actual_fsize
= compute_frame_size (size
, ®_saved
);
1860 rtx restore_regs
[32];
1862 unsigned int num_restore
;
1864 int interrupt_handler
= v850_interrupt_function_p (current_function_decl
);
1866 /* Eliminate the initial stack stored by interrupt functions. */
1867 if (interrupt_handler
)
1869 actual_fsize
-= INTERRUPT_FIXED_SAVE_SIZE
;
1870 if (((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1871 actual_fsize
-= INTERRUPT_ALL_SAVE_SIZE
;
1874 /* Cut off any dynamic stack created. */
1875 if (frame_pointer_needed
)
1876 emit_move_insn (stack_pointer_rtx
, hard_frame_pointer_rtx
);
1878 /* Identify all of the saved registers. */
1880 for (i
= 1; i
< 32; i
++)
1882 if (((1L << i
) & reg_saved
) != 0)
1883 restore_regs
[num_restore
++] = gen_rtx_REG (Pmode
, i
);
1886 /* See if we have an insn that restores the particular registers we
1888 restore_all
= NULL_RTX
;
1890 if (TARGET_PROLOG_FUNCTION
1892 && !interrupt_handler
)
1894 int alloc_stack
= (4 * num_restore
);
1896 /* Don't bother checking if we don't actually save any space. */
1897 if (use_prolog_function (num_restore
, actual_fsize
))
1900 restore_all
= gen_rtx_PARALLEL (VOIDmode
,
1901 rtvec_alloc (num_restore
+ 2));
1902 XVECEXP (restore_all
, 0, 0) = ret_rtx
;
1903 XVECEXP (restore_all
, 0, 1)
1904 = gen_rtx_SET (VOIDmode
, stack_pointer_rtx
,
1905 gen_rtx_PLUS (Pmode
,
1907 GEN_INT (alloc_stack
)));
1909 offset
= alloc_stack
- 4;
1910 for (i
= 0; i
< num_restore
; i
++)
1912 XVECEXP (restore_all
, 0, i
+2)
1913 = gen_rtx_SET (VOIDmode
,
1916 gen_rtx_PLUS (Pmode
,
1922 code
= recog (restore_all
, NULL_RTX
, NULL
);
1928 actual_fsize
-= alloc_stack
;
1931 if (CONST_OK_FOR_K (actual_fsize
))
1932 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1934 GEN_INT (actual_fsize
)));
1937 rtx reg
= gen_rtx_REG (Pmode
, 12);
1938 emit_move_insn (reg
, GEN_INT (actual_fsize
));
1939 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1945 insn
= emit_jump_insn (restore_all
);
1946 INSN_CODE (insn
) = code
;
1950 restore_all
= NULL_RTX
;
1954 /* If no epilogue save function is available, restore the registers the
1955 old fashioned way (one by one). */
1958 unsigned int init_stack_free
;
1960 /* If the stack is large, we need to cut it down in 2 pieces. */
1961 if (interrupt_handler
)
1962 init_stack_free
= 0;
1963 else if (actual_fsize
&& !CONST_OK_FOR_K (-actual_fsize
))
1964 init_stack_free
= 4 * num_restore
;
1966 init_stack_free
= (signed) actual_fsize
;
1968 /* Deallocate the rest of the stack if it is > 32K. */
1969 if ((unsigned int) actual_fsize
> init_stack_free
)
1973 diff
= actual_fsize
- init_stack_free
;
1975 if (CONST_OK_FOR_K (diff
))
1976 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1981 rtx reg
= gen_rtx_REG (Pmode
, 12);
1982 emit_move_insn (reg
, GEN_INT (diff
));
1983 emit_insn (gen_addsi3 (stack_pointer_rtx
,
1989 /* Special case interrupt functions that save all registers
1991 if (interrupt_handler
&& ((1L << LINK_POINTER_REGNUM
) & reg_saved
) != 0)
1993 if (! TARGET_DISABLE_CALLT
)
1994 emit_insn (gen_callt_restore_all_interrupt ());
1996 emit_insn (gen_restore_all_interrupt ());
2000 /* Restore registers from the beginning of the stack frame. */
2001 int offset
= init_stack_free
- 4;
2003 /* Restore the return pointer first. */
2005 && REGNO (restore_regs
[num_restore
- 1]) == LINK_POINTER_REGNUM
)
2007 emit_move_insn (restore_regs
[--num_restore
],
2008 gen_rtx_MEM (SImode
,
2009 plus_constant (stack_pointer_rtx
,
2014 for (i
= 0; i
< num_restore
; i
++)
2016 emit_move_insn (restore_regs
[i
],
2017 gen_rtx_MEM (SImode
,
2018 plus_constant (stack_pointer_rtx
,
2021 emit_use (restore_regs
[i
]);
2025 /* Cut back the remainder of the stack. */
2026 if (init_stack_free
)
2027 emit_insn (gen_addsi3 (stack_pointer_rtx
,
2029 GEN_INT (init_stack_free
)));
2032 /* And return or use reti for interrupt handlers. */
2033 if (interrupt_handler
)
2035 if (! TARGET_DISABLE_CALLT
&& (TARGET_V850E
|| TARGET_V850E2_ALL
))
2036 emit_insn (gen_callt_return_interrupt ());
2038 emit_jump_insn (gen_return_interrupt ());
2040 else if (actual_fsize
)
2041 emit_jump_insn (gen_return_internal ());
2043 emit_jump_insn (gen_return_simple ());
2046 v850_interrupt_cache_p
= FALSE
;
2047 v850_interrupt_p
= FALSE
;
2050 /* Update the condition code from the insn. */
2052 notice_update_cc (rtx body
, rtx insn
)
2054 switch (get_attr_cc (insn
))
2057 /* Insn does not affect CC at all. */
2061 /* Insn does not change CC, but the 0'th operand has been changed. */
2062 if (cc_status
.value1
!= 0
2063 && reg_overlap_mentioned_p (recog_data
.operand
[0], cc_status
.value1
))
2064 cc_status
.value1
= 0;
2068 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2069 V,C is in an unusable state. */
2071 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
| CC_NO_CARRY
;
2072 cc_status
.value1
= recog_data
.operand
[0];
2076 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2077 C is in an unusable state. */
2079 cc_status
.flags
|= CC_NO_CARRY
;
2080 cc_status
.value1
= recog_data
.operand
[0];
2084 /* The insn is a compare instruction. */
2086 cc_status
.value1
= SET_SRC (body
);
2090 /* Insn doesn't leave CC in a usable state. */
2099 /* Retrieve the data area that has been chosen for the given decl. */
2102 v850_get_data_area (tree decl
)
2104 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2105 return DATA_AREA_SDA
;
2107 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2108 return DATA_AREA_TDA
;
2110 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl
)) != NULL_TREE
)
2111 return DATA_AREA_ZDA
;
2113 return DATA_AREA_NORMAL
;
2116 /* Store the indicated data area in the decl's attributes. */
2119 v850_set_data_area (tree decl
, v850_data_area data_area
)
2125 case DATA_AREA_SDA
: name
= get_identifier ("sda"); break;
2126 case DATA_AREA_TDA
: name
= get_identifier ("tda"); break;
2127 case DATA_AREA_ZDA
: name
= get_identifier ("zda"); break;
2132 DECL_ATTRIBUTES (decl
) = tree_cons
2133 (name
, NULL
, DECL_ATTRIBUTES (decl
));
2136 /* Handle an "interrupt" attribute; arguments as in
2137 struct attribute_spec.handler. */
2139 v850_handle_interrupt_attribute (tree
* node
,
2141 tree args ATTRIBUTE_UNUSED
,
2142 int flags ATTRIBUTE_UNUSED
,
2143 bool * no_add_attrs
)
2145 if (TREE_CODE (*node
) != FUNCTION_DECL
)
2147 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
2149 *no_add_attrs
= true;
2155 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2156 struct attribute_spec.handler. */
2158 v850_handle_data_area_attribute (tree
* node
,
2160 tree args ATTRIBUTE_UNUSED
,
2161 int flags ATTRIBUTE_UNUSED
,
2162 bool * no_add_attrs
)
2164 v850_data_area data_area
;
2165 v850_data_area area
;
2168 /* Implement data area attribute. */
2169 if (is_attribute_p ("sda", name
))
2170 data_area
= DATA_AREA_SDA
;
2171 else if (is_attribute_p ("tda", name
))
2172 data_area
= DATA_AREA_TDA
;
2173 else if (is_attribute_p ("zda", name
))
2174 data_area
= DATA_AREA_ZDA
;
2178 switch (TREE_CODE (decl
))
2181 if (current_function_decl
!= NULL_TREE
)
2183 error_at (DECL_SOURCE_LOCATION (decl
),
2184 "data area attributes cannot be specified for "
2186 *no_add_attrs
= true;
2192 area
= v850_get_data_area (decl
);
2193 if (area
!= DATA_AREA_NORMAL
&& data_area
!= area
)
2195 error ("data area of %q+D conflicts with previous declaration",
2197 *no_add_attrs
= true;
2209 /* Return nonzero if FUNC is an interrupt function as specified
2210 by the "interrupt" attribute. */
2213 v850_interrupt_function_p (tree func
)
2218 if (v850_interrupt_cache_p
)
2219 return v850_interrupt_p
;
2221 if (TREE_CODE (func
) != FUNCTION_DECL
)
2224 a
= lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func
));
2230 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
2231 ret
= a
!= NULL_TREE
;
2234 /* Its not safe to trust global variables until after function inlining has
2236 if (reload_completed
| reload_in_progress
)
2237 v850_interrupt_p
= ret
;
2244 v850_encode_data_area (tree decl
, rtx symbol
)
2248 /* Map explicit sections into the appropriate attribute */
2249 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2251 if (DECL_SECTION_NAME (decl
))
2253 const char *name
= TREE_STRING_POINTER (DECL_SECTION_NAME (decl
));
2255 if (streq (name
, ".zdata") || streq (name
, ".zbss"))
2256 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2258 else if (streq (name
, ".sdata") || streq (name
, ".sbss"))
2259 v850_set_data_area (decl
, DATA_AREA_SDA
);
2261 else if (streq (name
, ".tdata"))
2262 v850_set_data_area (decl
, DATA_AREA_TDA
);
2265 /* If no attribute, support -m{zda,sda,tda}=n */
2268 int size
= int_size_in_bytes (TREE_TYPE (decl
));
2272 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_TDA
])
2273 v850_set_data_area (decl
, DATA_AREA_TDA
);
2275 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_SDA
])
2276 v850_set_data_area (decl
, DATA_AREA_SDA
);
2278 else if (size
<= small_memory_max
[(int) SMALL_MEMORY_ZDA
])
2279 v850_set_data_area (decl
, DATA_AREA_ZDA
);
2282 if (v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2286 flags
= SYMBOL_REF_FLAGS (symbol
);
2287 switch (v850_get_data_area (decl
))
2289 case DATA_AREA_ZDA
: flags
|= SYMBOL_FLAG_ZDA
; break;
2290 case DATA_AREA_TDA
: flags
|= SYMBOL_FLAG_TDA
; break;
2291 case DATA_AREA_SDA
: flags
|= SYMBOL_FLAG_SDA
; break;
2292 default: gcc_unreachable ();
2294 SYMBOL_REF_FLAGS (symbol
) = flags
;
2298 v850_encode_section_info (tree decl
, rtx rtl
, int first
)
2300 default_encode_section_info (decl
, rtl
, first
);
2302 if (TREE_CODE (decl
) == VAR_DECL
2303 && (TREE_STATIC (decl
) || DECL_EXTERNAL (decl
)))
2304 v850_encode_data_area (decl
, XEXP (rtl
, 0));
2307 /* Construct a JR instruction to a routine that will perform the equivalent of
2308 the RTL passed in as an argument. This RTL is a function epilogue that
2309 pops registers off the stack and possibly releases some extra stack space
2310 as well. The code has already verified that the RTL matches these
2314 construct_restore_jr (rtx op
)
2316 int count
= XVECLEN (op
, 0);
2318 unsigned long int mask
;
2319 unsigned long int first
;
2320 unsigned long int last
;
2322 static char buff
[100]; /* XXX */
2326 error ("bogus JR construction: %d", count
);
2330 /* Work out how many bytes to pop off the stack before retrieving
2332 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2333 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2334 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2336 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2338 /* Each pop will remove 4 bytes from the stack.... */
2339 stack_bytes
-= (count
- 2) * 4;
2341 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2342 if (stack_bytes
!= 0)
2344 error ("bad amount of stack space removal: %d", stack_bytes
);
2348 /* Now compute the bit mask of registers to push. */
2350 for (i
= 2; i
< count
; i
++)
2352 rtx vector_element
= XVECEXP (op
, 0, i
);
2354 gcc_assert (GET_CODE (vector_element
) == SET
);
2355 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2356 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2359 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2362 /* Scan for the first register to pop. */
2363 for (first
= 0; first
< 32; first
++)
2365 if (mask
& (1 << first
))
2369 gcc_assert (first
< 32);
2371 /* Discover the last register to pop. */
2372 if (mask
& (1 << LINK_POINTER_REGNUM
))
2374 last
= LINK_POINTER_REGNUM
;
2378 gcc_assert (!stack_bytes
);
2379 gcc_assert (mask
& (1 << 29));
2384 /* Note, it is possible to have gaps in the register mask.
2385 We ignore this here, and generate a JR anyway. We will
2386 be popping more registers than is strictly necessary, but
2387 it does save code space. */
2389 if (TARGET_LONG_CALLS
)
2394 sprintf (name
, "__return_%s", reg_names
[first
]);
2396 sprintf (name
, "__return_%s_%s", reg_names
[first
], reg_names
[last
]);
2398 sprintf (buff
, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2404 sprintf (buff
, "jr __return_%s", reg_names
[first
]);
2406 sprintf (buff
, "jr __return_%s_%s", reg_names
[first
], reg_names
[last
]);
2413 /* Construct a JARL instruction to a routine that will perform the equivalent
2414 of the RTL passed as a parameter. This RTL is a function prologue that
2415 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2416 some stack space as well. The code has already verified that the RTL
2417 matches these requirements. */
2419 construct_save_jarl (rtx op
)
2421 int count
= XVECLEN (op
, 0);
2423 unsigned long int mask
;
2424 unsigned long int first
;
2425 unsigned long int last
;
2427 static char buff
[100]; /* XXX */
2429 if (count
<= (TARGET_LONG_CALLS
? 3 : 2))
2431 error ("bogus JARL construction: %d", count
);
2436 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2437 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2438 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 0)) == REG
);
2439 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2441 /* Work out how many bytes to push onto the stack after storing the
2443 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2445 /* Each push will put 4 bytes from the stack.... */
2446 stack_bytes
+= (count
- (TARGET_LONG_CALLS
? 3 : 2)) * 4;
2448 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2449 if (stack_bytes
!= 0)
2451 error ("bad amount of stack space removal: %d", stack_bytes
);
2455 /* Now compute the bit mask of registers to push. */
2457 for (i
= 1; i
< count
- (TARGET_LONG_CALLS
? 2 : 1); i
++)
2459 rtx vector_element
= XVECEXP (op
, 0, i
);
2461 gcc_assert (GET_CODE (vector_element
) == SET
);
2462 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2463 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2466 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2469 /* Scan for the first register to push. */
2470 for (first
= 0; first
< 32; first
++)
2472 if (mask
& (1 << first
))
2476 gcc_assert (first
< 32);
2478 /* Discover the last register to push. */
2479 if (mask
& (1 << LINK_POINTER_REGNUM
))
2481 last
= LINK_POINTER_REGNUM
;
2485 gcc_assert (!stack_bytes
);
2486 gcc_assert (mask
& (1 << 29));
2491 /* Note, it is possible to have gaps in the register mask.
2492 We ignore this here, and generate a JARL anyway. We will
2493 be pushing more registers than is strictly necessary, but
2494 it does save code space. */
2496 if (TARGET_LONG_CALLS
)
2501 sprintf (name
, "__save_%s", reg_names
[first
]);
2503 sprintf (name
, "__save_%s_%s", reg_names
[first
], reg_names
[last
]);
2505 sprintf (buff
, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2511 sprintf (buff
, "jarl __save_%s, r10", reg_names
[first
]);
2513 sprintf (buff
, "jarl __save_%s_%s, r10", reg_names
[first
],
2520 extern tree last_assemble_variable_decl
;
2521 extern int size_directive_output
;
2523 /* A version of asm_output_aligned_bss() that copes with the special
2524 data areas of the v850. */
2526 v850_output_aligned_bss (FILE * file
,
2529 unsigned HOST_WIDE_INT size
,
2532 switch (v850_get_data_area (decl
))
2535 switch_to_section (zbss_section
);
2539 switch_to_section (sbss_section
);
2543 switch_to_section (tdata_section
);
2546 switch_to_section (bss_section
);
2550 ASM_OUTPUT_ALIGN (file
, floor_log2 (align
/ BITS_PER_UNIT
));
2551 #ifdef ASM_DECLARE_OBJECT_NAME
2552 last_assemble_variable_decl
= decl
;
2553 ASM_DECLARE_OBJECT_NAME (file
, name
, decl
);
2555 /* Standard thing is just output label for the object. */
2556 ASM_OUTPUT_LABEL (file
, name
);
2557 #endif /* ASM_DECLARE_OBJECT_NAME */
2558 ASM_OUTPUT_SKIP (file
, size
? size
: 1);
2561 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2563 v850_output_common (FILE * file
,
2569 if (decl
== NULL_TREE
)
2571 fprintf (file
, "%s", COMMON_ASM_OP
);
2575 switch (v850_get_data_area (decl
))
2578 fprintf (file
, "%s", ZCOMMON_ASM_OP
);
2582 fprintf (file
, "%s", SCOMMON_ASM_OP
);
2586 fprintf (file
, "%s", TCOMMON_ASM_OP
);
2590 fprintf (file
, "%s", COMMON_ASM_OP
);
2595 assemble_name (file
, name
);
2596 fprintf (file
, ",%u,%u\n", size
, align
/ BITS_PER_UNIT
);
2599 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2601 v850_output_local (FILE * file
,
2607 fprintf (file
, "%s", LOCAL_ASM_OP
);
2608 assemble_name (file
, name
);
2609 fprintf (file
, "\n");
2611 ASM_OUTPUT_ALIGNED_DECL_COMMON (file
, decl
, name
, size
, align
);
2614 /* Add data area to the given declaration if a ghs data area pragma is
2615 currently in effect (#pragma ghs startXXX/endXXX). */
2617 v850_insert_attributes (tree decl
, tree
* attr_ptr ATTRIBUTE_UNUSED
)
2620 && data_area_stack
->data_area
2621 && current_function_decl
== NULL_TREE
2622 && (TREE_CODE (decl
) == VAR_DECL
|| TREE_CODE (decl
) == CONST_DECL
)
2623 && v850_get_data_area (decl
) == DATA_AREA_NORMAL
)
2624 v850_set_data_area (decl
, data_area_stack
->data_area
);
2626 /* Initialize the default names of the v850 specific sections,
2627 if this has not been done before. */
2629 if (GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
] == NULL
)
2631 GHS_default_section_names
[(int) GHS_SECTION_KIND_SDATA
]
2632 = build_string (sizeof (".sdata")-1, ".sdata");
2634 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROSDATA
]
2635 = build_string (sizeof (".rosdata")-1, ".rosdata");
2637 GHS_default_section_names
[(int) GHS_SECTION_KIND_TDATA
]
2638 = build_string (sizeof (".tdata")-1, ".tdata");
2640 GHS_default_section_names
[(int) GHS_SECTION_KIND_ZDATA
]
2641 = build_string (sizeof (".zdata")-1, ".zdata");
2643 GHS_default_section_names
[(int) GHS_SECTION_KIND_ROZDATA
]
2644 = build_string (sizeof (".rozdata")-1, ".rozdata");
2647 if (current_function_decl
== NULL_TREE
2648 && (TREE_CODE (decl
) == VAR_DECL
2649 || TREE_CODE (decl
) == CONST_DECL
2650 || TREE_CODE (decl
) == FUNCTION_DECL
)
2651 && (!DECL_EXTERNAL (decl
) || DECL_INITIAL (decl
))
2652 && !DECL_SECTION_NAME (decl
))
2654 enum GHS_section_kind kind
= GHS_SECTION_KIND_DEFAULT
;
2655 tree chosen_section
;
2657 if (TREE_CODE (decl
) == FUNCTION_DECL
)
2658 kind
= GHS_SECTION_KIND_TEXT
;
2661 /* First choose a section kind based on the data area of the decl. */
2662 switch (v850_get_data_area (decl
))
2668 kind
= ((TREE_READONLY (decl
))
2669 ? GHS_SECTION_KIND_ROSDATA
2670 : GHS_SECTION_KIND_SDATA
);
2674 kind
= GHS_SECTION_KIND_TDATA
;
2678 kind
= ((TREE_READONLY (decl
))
2679 ? GHS_SECTION_KIND_ROZDATA
2680 : GHS_SECTION_KIND_ZDATA
);
2683 case DATA_AREA_NORMAL
: /* default data area */
2684 if (TREE_READONLY (decl
))
2685 kind
= GHS_SECTION_KIND_RODATA
;
2686 else if (DECL_INITIAL (decl
))
2687 kind
= GHS_SECTION_KIND_DATA
;
2689 kind
= GHS_SECTION_KIND_BSS
;
2693 /* Now, if the section kind has been explicitly renamed,
2694 then attach a section attribute. */
2695 chosen_section
= GHS_current_section_names
[(int) kind
];
2697 /* Otherwise, if this kind of section needs an explicit section
2698 attribute, then also attach one. */
2699 if (chosen_section
== NULL
)
2700 chosen_section
= GHS_default_section_names
[(int) kind
];
2704 /* Only set the section name if specified by a pragma, because
2705 otherwise it will force those variables to get allocated storage
2706 in this module, rather than by the linker. */
2707 DECL_SECTION_NAME (decl
) = chosen_section
;
2712 /* Construct a DISPOSE instruction that is the equivalent of
2713 the given RTX. We have already verified that this should
2717 construct_dispose_instruction (rtx op
)
2719 int count
= XVECLEN (op
, 0);
2721 unsigned long int mask
;
2723 static char buff
[ 100 ]; /* XXX */
2728 error ("bogus DISPOSE construction: %d", count
);
2732 /* Work out how many bytes to pop off the
2733 stack before retrieving registers. */
2734 gcc_assert (GET_CODE (XVECEXP (op
, 0, 1)) == SET
);
2735 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 1))) == PLUS
);
2736 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1)) == CONST_INT
);
2738 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 1)), 1));
2740 /* Each pop will remove 4 bytes from the stack.... */
2741 stack_bytes
-= (count
- 2) * 4;
2743 /* Make sure that the amount we are popping
2744 will fit into the DISPOSE instruction. */
2745 if (stack_bytes
> 128)
2747 error ("too much stack space to dispose of: %d", stack_bytes
);
2751 /* Now compute the bit mask of registers to push. */
2754 for (i
= 2; i
< count
; i
++)
2756 rtx vector_element
= XVECEXP (op
, 0, i
);
2758 gcc_assert (GET_CODE (vector_element
) == SET
);
2759 gcc_assert (GET_CODE (SET_DEST (vector_element
)) == REG
);
2760 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element
),
2763 if (REGNO (SET_DEST (vector_element
)) == 2)
2766 mask
|= 1 << REGNO (SET_DEST (vector_element
));
2769 if (! TARGET_DISABLE_CALLT
2770 && (use_callt
|| stack_bytes
== 0))
2774 sprintf (buff
, "callt ctoff(__callt_return_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29);
2779 for (i
= 20; i
< 32; i
++)
2780 if (mask
& (1 << i
))
2784 sprintf (buff
, "callt ctoff(__callt_return_r31c)");
2786 sprintf (buff
, "callt ctoff(__callt_return_r%d_r%s)",
2787 i
, (mask
& (1 << 31)) ? "31c" : "29");
2792 static char regs
[100]; /* XXX */
2795 /* Generate the DISPOSE instruction. Note we could just issue the
2796 bit mask as a number as the assembler can cope with this, but for
2797 the sake of our readers we turn it into a textual description. */
2801 for (i
= 20; i
< 32; i
++)
2803 if (mask
& (1 << i
))
2808 strcat (regs
, ", ");
2813 strcat (regs
, reg_names
[ first
]);
2815 for (i
++; i
< 32; i
++)
2816 if ((mask
& (1 << i
)) == 0)
2821 strcat (regs
, " - ");
2822 strcat (regs
, reg_names
[ i
- 1 ] );
2827 sprintf (buff
, "dispose %d {%s}, r31", stack_bytes
/ 4, regs
);
2833 /* Construct a PREPARE instruction that is the equivalent of
2834 the given RTL. We have already verified that this should
2838 construct_prepare_instruction (rtx op
)
2842 unsigned long int mask
;
2844 static char buff
[ 100 ]; /* XXX */
2847 if (XVECLEN (op
, 0) <= 1)
2849 error ("bogus PREPEARE construction: %d", XVECLEN (op
, 0));
2853 /* Work out how many bytes to push onto
2854 the stack after storing the registers. */
2855 gcc_assert (GET_CODE (XVECEXP (op
, 0, 0)) == SET
);
2856 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op
, 0, 0))) == PLUS
);
2857 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1)) == CONST_INT
);
2859 stack_bytes
= INTVAL (XEXP (SET_SRC (XVECEXP (op
, 0, 0)), 1));
2862 /* Make sure that the amount we are popping
2863 will fit into the DISPOSE instruction. */
2864 if (stack_bytes
< -128)
2866 error ("too much stack space to prepare: %d", stack_bytes
);
2870 /* Now compute the bit mask of registers to push. */
2873 for (i
= 1; i
< XVECLEN (op
, 0); i
++)
2875 rtx vector_element
= XVECEXP (op
, 0, i
);
2877 if (GET_CODE (vector_element
) == CLOBBER
)
2880 gcc_assert (GET_CODE (vector_element
) == SET
);
2881 gcc_assert (GET_CODE (SET_SRC (vector_element
)) == REG
);
2882 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element
),
2885 if (REGNO (SET_SRC (vector_element
)) == 2)
2888 mask
|= 1 << REGNO (SET_SRC (vector_element
));
2892 stack_bytes
+= count
* 4;
2894 if ((! TARGET_DISABLE_CALLT
)
2895 && (use_callt
|| stack_bytes
== 0))
2899 sprintf (buff
, "callt ctoff(__callt_save_r2_r%d)", (mask
& (1 << 31)) ? 31 : 29 );
2903 for (i
= 20; i
< 32; i
++)
2904 if (mask
& (1 << i
))
2908 sprintf (buff
, "callt ctoff(__callt_save_r31c)");
2910 sprintf (buff
, "callt ctoff(__callt_save_r%d_r%s)",
2911 i
, (mask
& (1 << 31)) ? "31c" : "29");
2915 static char regs
[100]; /* XXX */
2919 /* Generate the PREPARE instruction. Note we could just issue the
2920 bit mask as a number as the assembler can cope with this, but for
2921 the sake of our readers we turn it into a textual description. */
2925 for (i
= 20; i
< 32; i
++)
2927 if (mask
& (1 << i
))
2932 strcat (regs
, ", ");
2937 strcat (regs
, reg_names
[ first
]);
2939 for (i
++; i
< 32; i
++)
2940 if ((mask
& (1 << i
)) == 0)
2945 strcat (regs
, " - ");
2946 strcat (regs
, reg_names
[ i
- 1 ] );
2951 sprintf (buff
, "prepare {%s}, %d", regs
, (- stack_bytes
) / 4);
2957 /* Return an RTX indicating where the return address to the
2958 calling function can be found. */
2961 v850_return_addr (int count
)
2966 return get_hard_reg_initial_val (Pmode
, LINK_POINTER_REGNUM
);
2969 /* Implement TARGET_ASM_INIT_SECTIONS. */
2972 v850_asm_init_sections (void)
2975 = get_unnamed_section (0, output_section_asm_op
,
2976 "\t.section .rosdata,\"a\"");
2979 = get_unnamed_section (0, output_section_asm_op
,
2980 "\t.section .rozdata,\"a\"");
2983 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2984 "\t.section .tdata,\"aw\"");
2987 = get_unnamed_section (SECTION_WRITE
, output_section_asm_op
,
2988 "\t.section .zdata,\"aw\"");
2991 = get_unnamed_section (SECTION_WRITE
| SECTION_BSS
,
2992 output_section_asm_op
,
2993 "\t.section .zbss,\"aw\"");
2997 v850_select_section (tree exp
,
2998 int reloc ATTRIBUTE_UNUSED
,
2999 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED
)
3001 if (TREE_CODE (exp
) == VAR_DECL
)
3004 if (!TREE_READONLY (exp
)
3005 || TREE_SIDE_EFFECTS (exp
)
3006 || !DECL_INITIAL (exp
)
3007 || (DECL_INITIAL (exp
) != error_mark_node
3008 && !TREE_CONSTANT (DECL_INITIAL (exp
))))
3013 switch (v850_get_data_area (exp
))
3016 return is_const
? rozdata_section
: zdata_section
;
3019 return tdata_section
;
3022 return is_const
? rosdata_section
: sdata_section
;
3025 return is_const
? readonly_data_section
: data_section
;
3028 return readonly_data_section
;
3031 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3034 v850_function_value_regno_p (const unsigned int regno
)
3036 return (regno
== 10);
3039 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3042 v850_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
3044 /* Return values > 8 bytes in length in memory. */
3045 return int_size_in_bytes (type
) > 8 || TYPE_MODE (type
) == BLKmode
;
3048 /* Worker function for TARGET_FUNCTION_VALUE. */
3051 v850_function_value (const_tree valtype
,
3052 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
3053 bool outgoing ATTRIBUTE_UNUSED
)
3055 return gen_rtx_REG (TYPE_MODE (valtype
), 10);
3059 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3062 v850_setup_incoming_varargs (CUMULATIVE_ARGS
*ca
,
3063 enum machine_mode mode ATTRIBUTE_UNUSED
,
3064 tree type ATTRIBUTE_UNUSED
,
3065 int *pretend_arg_size ATTRIBUTE_UNUSED
,
3066 int second_time ATTRIBUTE_UNUSED
)
3068 ca
->anonymous_args
= (!TARGET_GHS
? 1 : 0);
3071 /* Worker function for TARGET_CAN_ELIMINATE. */
3074 v850_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
3076 return (to
== STACK_POINTER_REGNUM
? ! frame_pointer_needed
: true);
3079 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3081 If TARGET_APP_REGS is not defined then add r2 and r5 to
3082 the pool of fixed registers. See PR 14505. */
3085 v850_conditional_register_usage (void)
3087 if (TARGET_APP_REGS
)
3089 fixed_regs
[2] = 0; call_used_regs
[2] = 0;
3090 fixed_regs
[5] = 0; call_used_regs
[5] = 1;
3094 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3097 v850_asm_trampoline_template (FILE *f
)
3099 fprintf (f
, "\tjarl .+4,r12\n");
3100 fprintf (f
, "\tld.w 12[r12],r20\n");
3101 fprintf (f
, "\tld.w 16[r12],r12\n");
3102 fprintf (f
, "\tjmp [r12]\n");
3103 fprintf (f
, "\tnop\n");
3104 fprintf (f
, "\t.long 0\n");
3105 fprintf (f
, "\t.long 0\n");
3108 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3111 v850_trampoline_init (rtx m_tramp
, tree fndecl
, rtx chain_value
)
3113 rtx mem
, fnaddr
= XEXP (DECL_RTL (fndecl
), 0);
3115 emit_block_move (m_tramp
, assemble_trampoline_template (),
3116 GEN_INT (TRAMPOLINE_SIZE
), BLOCK_OP_NORMAL
);
3118 mem
= adjust_address (m_tramp
, SImode
, 16);
3119 emit_move_insn (mem
, chain_value
);
3120 mem
= adjust_address (m_tramp
, SImode
, 20);
3121 emit_move_insn (mem
, fnaddr
);
3125 v850_issue_rate (void)
3127 return (TARGET_V850E2_ALL
? 2 : 1);
3130 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3133 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx x
)
3135 return (GET_CODE (x
) == CONST_DOUBLE
3136 || !(GET_CODE (x
) == CONST
3137 && GET_CODE (XEXP (x
, 0)) == PLUS
3138 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
3139 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3140 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x
, 0), 1)))));
3144 v850_memory_move_cost (enum machine_mode mode
, bool in
)
3146 switch (GET_MODE_SIZE (mode
))
3156 return (GET_MODE_SIZE (mode
) / 2) * (in
? 3 : 1);
3160 /* V850 specific attributes. */
3162 static const struct attribute_spec v850_attribute_table
[] =
3164 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3165 affects_type_identity } */
3166 { "interrupt_handler", 0, 0, true, false, false,
3167 v850_handle_interrupt_attribute
, false },
3168 { "interrupt", 0, 0, true, false, false,
3169 v850_handle_interrupt_attribute
, false },
3170 { "sda", 0, 0, true, false, false,
3171 v850_handle_data_area_attribute
, false },
3172 { "tda", 0, 0, true, false, false,
3173 v850_handle_data_area_attribute
, false },
3174 { "zda", 0, 0, true, false, false,
3175 v850_handle_data_area_attribute
, false },
3176 { NULL
, 0, 0, false, false, false, NULL
, false }
3179 /* Initialize the GCC target structure. */
3181 #undef TARGET_MEMORY_MOVE_COST
3182 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3184 #undef TARGET_ASM_ALIGNED_HI_OP
3185 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3187 #undef TARGET_PRINT_OPERAND
3188 #define TARGET_PRINT_OPERAND v850_print_operand
3189 #undef TARGET_PRINT_OPERAND_ADDRESS
3190 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3191 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3192 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3194 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3195 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3197 #undef TARGET_ATTRIBUTE_TABLE
3198 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3200 #undef TARGET_INSERT_ATTRIBUTES
3201 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3203 #undef TARGET_ASM_SELECT_SECTION
3204 #define TARGET_ASM_SELECT_SECTION v850_select_section
3206 /* The assembler supports switchable .bss sections, but
3207 v850_select_section doesn't yet make use of them. */
3208 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3209 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3211 #undef TARGET_ENCODE_SECTION_INFO
3212 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3214 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3215 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3217 #undef TARGET_DEFAULT_TARGET_FLAGS
3218 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
3219 #undef TARGET_HANDLE_OPTION
3220 #define TARGET_HANDLE_OPTION v850_handle_option
3222 #undef TARGET_RTX_COSTS
3223 #define TARGET_RTX_COSTS v850_rtx_costs
3225 #undef TARGET_ADDRESS_COST
3226 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3228 #undef TARGET_MACHINE_DEPENDENT_REORG
3229 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3231 #undef TARGET_SCHED_ISSUE_RATE
3232 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3234 #undef TARGET_FUNCTION_VALUE_REGNO_P
3235 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3236 #undef TARGET_FUNCTION_VALUE
3237 #define TARGET_FUNCTION_VALUE v850_function_value
3239 #undef TARGET_PROMOTE_PROTOTYPES
3240 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3242 #undef TARGET_RETURN_IN_MEMORY
3243 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3245 #undef TARGET_PASS_BY_REFERENCE
3246 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3248 #undef TARGET_CALLEE_COPIES
3249 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3251 #undef TARGET_SETUP_INCOMING_VARARGS
3252 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3254 #undef TARGET_ARG_PARTIAL_BYTES
3255 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3257 #undef TARGET_FUNCTION_ARG
3258 #define TARGET_FUNCTION_ARG v850_function_arg
3260 #undef TARGET_FUNCTION_ARG_ADVANCE
3261 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3263 #undef TARGET_CAN_ELIMINATE
3264 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3266 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3267 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3269 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3270 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3271 #undef TARGET_TRAMPOLINE_INIT
3272 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3274 #undef TARGET_STRICT_ARGUMENT_NAMING
3275 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3277 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3278 #define TARGET_OPTION_OPTIMIZATION_TABLE v850_option_optimization_table
3280 #undef TARGET_LEGITIMATE_CONSTANT_P
3281 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3283 struct gcc_target targetm
= TARGET_INITIALIZER
;
3285 #include "gt-v850.h"