1 /* Subroutines used for code generation on the Lattice Mico32 architecture.
2 Contributed by Jon Beniston <jon@beniston.com>
4 Copyright (C) 2009-2015 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published
10 by the Free Software Foundation; either version 3, or (at your
11 option) any later version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
15 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
16 License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
31 #include "dominance.h"
37 #include "cfgcleanup.h"
38 #include "basic-block.h"
39 #include "insn-config.h"
40 #include "conditions.h"
41 #include "insn-flags.h"
42 #include "insn-attr.h"
43 #include "insn-codes.h"
48 #include "fold-const.h"
61 #include "diagnostic-core.h"
65 #include "target-def.h"
66 #include "langhooks.h"
67 #include "tm-constrs.h"
71 struct lm32_frame_info
73 HOST_WIDE_INT total_size
; /* number of bytes of entire frame. */
74 HOST_WIDE_INT callee_size
; /* number of bytes to save callee saves. */
75 HOST_WIDE_INT pretend_size
; /* number of bytes we pretend caller did. */
76 HOST_WIDE_INT args_size
; /* number of bytes for outgoing arguments. */
77 HOST_WIDE_INT locals_size
; /* number of bytes for local variables. */
78 unsigned int reg_save_mask
; /* mask of saved registers. */
81 /* Prototypes for static functions. */
82 static rtx
emit_add (rtx dest
, rtx src0
, rtx src1
);
83 static void expand_save_restore (struct lm32_frame_info
*info
, int op
);
84 static void stack_adjust (HOST_WIDE_INT amount
);
85 static bool lm32_in_small_data_p (const_tree
);
86 static void lm32_setup_incoming_varargs (cumulative_args_t cum
,
87 machine_mode mode
, tree type
,
88 int *pretend_size
, int no_rtl
);
89 static bool lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno
,
90 int *total
, bool speed
);
91 static bool lm32_can_eliminate (const int, const int);
93 lm32_legitimate_address_p (machine_mode mode
, rtx x
, bool strict
);
94 static HOST_WIDE_INT
lm32_compute_frame_size (int size
);
95 static void lm32_option_override (void);
96 static rtx
lm32_function_arg (cumulative_args_t cum
,
97 machine_mode mode
, const_tree type
,
99 static void lm32_function_arg_advance (cumulative_args_t cum
,
101 const_tree type
, bool named
);
103 #undef TARGET_OPTION_OVERRIDE
104 #define TARGET_OPTION_OVERRIDE lm32_option_override
105 #undef TARGET_ADDRESS_COST
106 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
107 #undef TARGET_RTX_COSTS
108 #define TARGET_RTX_COSTS lm32_rtx_costs
109 #undef TARGET_IN_SMALL_DATA_P
110 #define TARGET_IN_SMALL_DATA_P lm32_in_small_data_p
111 #undef TARGET_PROMOTE_FUNCTION_MODE
112 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
113 #undef TARGET_SETUP_INCOMING_VARARGS
114 #define TARGET_SETUP_INCOMING_VARARGS lm32_setup_incoming_varargs
115 #undef TARGET_FUNCTION_ARG
116 #define TARGET_FUNCTION_ARG lm32_function_arg
117 #undef TARGET_FUNCTION_ARG_ADVANCE
118 #define TARGET_FUNCTION_ARG_ADVANCE lm32_function_arg_advance
119 #undef TARGET_PROMOTE_PROTOTYPES
120 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
121 #undef TARGET_MIN_ANCHOR_OFFSET
122 #define TARGET_MIN_ANCHOR_OFFSET -0x8000
123 #undef TARGET_MAX_ANCHOR_OFFSET
124 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
125 #undef TARGET_CAN_ELIMINATE
126 #define TARGET_CAN_ELIMINATE lm32_can_eliminate
127 #undef TARGET_LEGITIMATE_ADDRESS_P
128 #define TARGET_LEGITIMATE_ADDRESS_P lm32_legitimate_address_p
130 struct gcc_target targetm
= TARGET_INITIALIZER
;
132 /* Current frame information calculated by lm32_compute_frame_size. */
133 static struct lm32_frame_info current_frame_info
;
135 /* Return non-zero if the given return type should be returned in memory. */
138 lm32_return_in_memory (tree type
)
142 if (!AGGREGATE_TYPE_P (type
))
144 /* All simple types are returned in registers. */
148 size
= int_size_in_bytes (type
);
149 if (size
>= 0 && size
<= UNITS_PER_WORD
)
151 /* If it can fit in one register. */
158 /* Generate an emit a word sized add instruction. */
161 emit_add (rtx dest
, rtx src0
, rtx src1
)
164 insn
= emit_insn (gen_addsi3 (dest
, src0
, src1
));
168 /* Generate the code to compare (and possibly branch) two integer values
169 TEST_CODE is the comparison code we are trying to emulate
170 (or implement directly)
171 RESULT is where to store the result of the comparison,
172 or null to emit a branch
173 CMP0 CMP1 are the two comparison operands
174 DESTINATION is the destination of the branch, or null to only compare
178 gen_int_relational (enum rtx_code code
,
187 mode
= GET_MODE (cmp0
);
188 if (mode
== VOIDmode
)
189 mode
= GET_MODE (cmp1
);
191 /* Is this a branch or compare. */
192 branch_p
= (destination
!= 0);
194 /* Instruction set doesn't support LE or LT, so swap operands and use
205 code
= swap_condition (code
);
217 rtx insn
, cond
, label
;
219 /* Operands must be in registers. */
220 if (!register_operand (cmp0
, mode
))
221 cmp0
= force_reg (mode
, cmp0
);
222 if (!register_operand (cmp1
, mode
))
223 cmp1
= force_reg (mode
, cmp1
);
225 /* Generate conditional branch instruction. */
226 cond
= gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
);
227 label
= gen_rtx_LABEL_REF (VOIDmode
, destination
);
228 insn
= gen_rtx_SET (pc_rtx
, gen_rtx_IF_THEN_ELSE (VOIDmode
,
229 cond
, label
, pc_rtx
));
230 emit_jump_insn (insn
);
234 /* We can't have const_ints in cmp0, other than 0. */
235 if ((GET_CODE (cmp0
) == CONST_INT
) && (INTVAL (cmp0
) != 0))
236 cmp0
= force_reg (mode
, cmp0
);
238 /* If the comparison is against an int not in legal range
239 move it into a register. */
240 if (GET_CODE (cmp1
) == CONST_INT
)
250 if (!satisfies_constraint_K (cmp1
))
251 cmp1
= force_reg (mode
, cmp1
);
257 if (!satisfies_constraint_L (cmp1
))
258 cmp1
= force_reg (mode
, cmp1
);
265 /* Generate compare instruction. */
266 emit_move_insn (result
, gen_rtx_fmt_ee (code
, mode
, cmp0
, cmp1
));
270 /* Try performing the comparison in OPERANDS[1], whose arms are OPERANDS[2]
271 and OPERAND[3]. Store the result in OPERANDS[0]. */
274 lm32_expand_scc (rtx operands
[])
276 rtx target
= operands
[0];
277 enum rtx_code code
= GET_CODE (operands
[1]);
278 rtx op0
= operands
[2];
279 rtx op1
= operands
[3];
281 gen_int_relational (code
, target
, op0
, op1
, NULL_RTX
);
284 /* Compare OPERANDS[1] with OPERANDS[2] using comparison code
285 CODE and jump to OPERANDS[3] if the condition holds. */
288 lm32_expand_conditional_branch (rtx operands
[])
290 enum rtx_code code
= GET_CODE (operands
[0]);
291 rtx op0
= operands
[1];
292 rtx op1
= operands
[2];
293 rtx destination
= operands
[3];
295 gen_int_relational (code
, NULL_RTX
, op0
, op1
, destination
);
298 /* Generate and emit RTL to save or restore callee save registers. */
300 expand_save_restore (struct lm32_frame_info
*info
, int op
)
302 unsigned int reg_save_mask
= info
->reg_save_mask
;
304 HOST_WIDE_INT offset
;
307 /* Callee saves are below locals and above outgoing arguments. */
308 offset
= info
->args_size
+ info
->callee_size
;
309 for (regno
= 0; regno
<= 31; regno
++)
311 if ((reg_save_mask
& (1 << regno
)) != 0)
316 offset_rtx
= GEN_INT (offset
);
317 if (satisfies_constraint_K (offset_rtx
))
319 mem
= gen_rtx_MEM (word_mode
,
326 /* r10 is caller saved so it can be used as a temp reg. */
329 r10
= gen_rtx_REG (word_mode
, 10);
330 insn
= emit_move_insn (r10
, offset_rtx
);
332 RTX_FRAME_RELATED_P (insn
) = 1;
333 insn
= emit_add (r10
, r10
, stack_pointer_rtx
);
335 RTX_FRAME_RELATED_P (insn
) = 1;
336 mem
= gen_rtx_MEM (word_mode
, r10
);
340 insn
= emit_move_insn (mem
, gen_rtx_REG (word_mode
, regno
));
342 insn
= emit_move_insn (gen_rtx_REG (word_mode
, regno
), mem
);
344 /* only prologue instructions which set the sp fp or save a
345 register should be marked as frame related. */
347 RTX_FRAME_RELATED_P (insn
) = 1;
348 offset
-= UNITS_PER_WORD
;
354 stack_adjust (HOST_WIDE_INT amount
)
358 if (!IN_RANGE (amount
, -32776, 32768))
360 /* r10 is caller saved so it can be used as a temp reg. */
362 r10
= gen_rtx_REG (word_mode
, 10);
363 insn
= emit_move_insn (r10
, GEN_INT (amount
));
365 RTX_FRAME_RELATED_P (insn
) = 1;
366 insn
= emit_add (stack_pointer_rtx
, stack_pointer_rtx
, r10
);
368 RTX_FRAME_RELATED_P (insn
) = 1;
372 insn
= emit_add (stack_pointer_rtx
,
373 stack_pointer_rtx
, GEN_INT (amount
));
375 RTX_FRAME_RELATED_P (insn
) = 1;
380 /* Create and emit instructions for a functions prologue. */
382 lm32_expand_prologue (void)
386 lm32_compute_frame_size (get_frame_size ());
388 if (current_frame_info
.total_size
> 0)
390 /* Add space on stack new frame. */
391 stack_adjust (-current_frame_info
.total_size
);
393 /* Save callee save registers. */
394 if (current_frame_info
.reg_save_mask
!= 0)
395 expand_save_restore (¤t_frame_info
, 0);
397 /* Setup frame pointer if it's needed. */
398 if (frame_pointer_needed
== 1)
401 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
402 RTX_FRAME_RELATED_P (insn
) = 1;
404 /* Add offset - Don't use total_size, as that includes pretend_size,
405 which isn't part of this frame? */
406 insn
= emit_add (frame_pointer_rtx
,
408 GEN_INT (current_frame_info
.args_size
+
409 current_frame_info
.callee_size
+
410 current_frame_info
.locals_size
));
411 RTX_FRAME_RELATED_P (insn
) = 1;
414 /* Prevent prologue from being scheduled into function body. */
415 emit_insn (gen_blockage ());
419 /* Create an emit instructions for a functions epilogue. */
421 lm32_expand_epilogue (void)
423 rtx ra_rtx
= gen_rtx_REG (Pmode
, RA_REGNUM
);
425 lm32_compute_frame_size (get_frame_size ());
427 if (current_frame_info
.total_size
> 0)
429 /* Prevent stack code from being reordered. */
430 emit_insn (gen_blockage ());
432 /* Restore callee save registers. */
433 if (current_frame_info
.reg_save_mask
!= 0)
434 expand_save_restore (¤t_frame_info
, 1);
436 /* Deallocate stack. */
437 stack_adjust (current_frame_info
.total_size
);
439 /* Return to calling function. */
440 emit_jump_insn (gen_return_internal (ra_rtx
));
444 /* Return to calling function. */
445 emit_jump_insn (gen_return_internal (ra_rtx
));
449 /* Return the bytes needed to compute the frame pointer from the current
452 lm32_compute_frame_size (int size
)
455 HOST_WIDE_INT total_size
, locals_size
, args_size
, pretend_size
, callee_size
;
456 unsigned int reg_save_mask
;
459 args_size
= crtl
->outgoing_args_size
;
460 pretend_size
= crtl
->args
.pretend_args_size
;
464 /* Build mask that actually determines which regsiters we save
465 and calculate size required to store them in the stack. */
466 for (regno
= 1; regno
< SP_REGNUM
; regno
++)
468 if (df_regs_ever_live_p (regno
) && !call_used_regs
[regno
])
470 reg_save_mask
|= 1 << regno
;
471 callee_size
+= UNITS_PER_WORD
;
474 if (df_regs_ever_live_p (RA_REGNUM
) || ! crtl
->is_leaf
477 reg_save_mask
|= 1 << RA_REGNUM
;
478 callee_size
+= UNITS_PER_WORD
;
480 if (!(reg_save_mask
& (1 << FP_REGNUM
)) && frame_pointer_needed
)
482 reg_save_mask
|= 1 << FP_REGNUM
;
483 callee_size
+= UNITS_PER_WORD
;
486 /* Compute total frame size. */
487 total_size
= pretend_size
+ args_size
+ locals_size
+ callee_size
;
489 /* Align frame to appropriate boundary. */
490 total_size
= (total_size
+ 3) & ~3;
492 /* Save computed information. */
493 current_frame_info
.total_size
= total_size
;
494 current_frame_info
.callee_size
= callee_size
;
495 current_frame_info
.pretend_size
= pretend_size
;
496 current_frame_info
.locals_size
= locals_size
;
497 current_frame_info
.args_size
= args_size
;
498 current_frame_info
.reg_save_mask
= reg_save_mask
;
504 lm32_print_operand (FILE * file
, rtx op
, int letter
)
508 code
= GET_CODE (op
);
510 if (code
== SIGN_EXTEND
)
511 op
= XEXP (op
, 0), code
= GET_CODE (op
);
512 else if (code
== REG
|| code
== SUBREG
)
519 regnum
= true_regnum (op
);
521 fprintf (file
, "%s", reg_names
[regnum
]);
523 else if (code
== HIGH
)
524 output_addr_const (file
, XEXP (op
, 0));
525 else if (code
== MEM
)
526 output_address (XEXP (op
, 0));
527 else if (letter
== 'z' && GET_CODE (op
) == CONST_INT
&& INTVAL (op
) == 0)
528 fprintf (file
, "%s", reg_names
[0]);
529 else if (GET_CODE (op
) == CONST_DOUBLE
)
531 if ((CONST_DOUBLE_LOW (op
) != 0) || (CONST_DOUBLE_HIGH (op
) != 0))
532 output_operand_lossage ("only 0.0 can be loaded as an immediate");
537 fprintf (file
, "e ");
539 fprintf (file
, "ne ");
541 fprintf (file
, "g ");
542 else if (code
== GTU
)
543 fprintf (file
, "gu ");
545 fprintf (file
, "l ");
546 else if (code
== LTU
)
547 fprintf (file
, "lu ");
549 fprintf (file
, "ge ");
550 else if (code
== GEU
)
551 fprintf (file
, "geu");
553 fprintf (file
, "le ");
554 else if (code
== LEU
)
555 fprintf (file
, "leu");
557 output_addr_const (file
, op
);
560 /* A C compound statement to output to stdio stream STREAM the
561 assembler syntax for an instruction operand that is a memory
562 reference whose address is ADDR. ADDR is an RTL expression.
564 On some machines, the syntax for a symbolic address depends on
565 the section that the address refers to. On these machines,
566 define the macro `ENCODE_SECTION_INFO' to store the information
567 into the `symbol_ref', and then check for it here. */
570 lm32_print_operand_address (FILE * file
, rtx addr
)
572 switch (GET_CODE (addr
))
575 fprintf (file
, "(%s+0)", reg_names
[REGNO (addr
)]);
579 output_address (XEXP (addr
, 0));
584 rtx arg0
= XEXP (addr
, 0);
585 rtx arg1
= XEXP (addr
, 1);
587 if (GET_CODE (arg0
) == REG
&& CONSTANT_P (arg1
))
589 if (GET_CODE (arg1
) == CONST_INT
)
590 fprintf (file
, "(%s+%ld)", reg_names
[REGNO (arg0
)],
594 fprintf (file
, "(%s+", reg_names
[REGNO (arg0
)]);
595 output_addr_const (file
, arg1
);
599 else if (CONSTANT_P (arg0
) && CONSTANT_P (arg1
))
600 output_addr_const (file
, addr
);
602 fatal_insn ("bad operand", addr
);
607 if (SYMBOL_REF_SMALL_P (addr
))
609 fprintf (file
, "gp(");
610 output_addr_const (file
, addr
);
614 fatal_insn ("can't use non gp relative absolute address", addr
);
618 fatal_insn ("invalid addressing mode", addr
);
623 /* Determine where to put an argument to a function.
624 Value is zero to push the argument on the stack,
625 or a hard register in which to store the argument.
627 MODE is the argument's machine mode.
628 TYPE is the data type of the argument (as a tree).
629 This is null for libcalls where that information may
631 CUM is a variable of type CUMULATIVE_ARGS which gives info about
632 the preceding args and about the function being called.
633 NAMED is nonzero if this argument is a named parameter
634 (otherwise it is an extra parameter matching an ellipsis). */
637 lm32_function_arg (cumulative_args_t cum_v
, machine_mode mode
,
638 const_tree type
, bool named
)
640 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
642 if (mode
== VOIDmode
)
643 /* Compute operand 2 of the call insn. */
646 if (targetm
.calls
.must_pass_in_stack (mode
, type
))
649 if (!named
|| (*cum
+ LM32_NUM_REGS2 (mode
, type
) > LM32_NUM_ARG_REGS
))
652 return gen_rtx_REG (mode
, *cum
+ LM32_FIRST_ARG_REG
);
656 lm32_function_arg_advance (cumulative_args_t cum
, machine_mode mode
,
657 const_tree type
, bool named ATTRIBUTE_UNUSED
)
659 *get_cumulative_args (cum
) += LM32_NUM_REGS2 (mode
, type
);
663 lm32_compute_initial_elimination_offset (int from
, int to
)
665 HOST_WIDE_INT offset
= 0;
669 case ARG_POINTER_REGNUM
:
672 case FRAME_POINTER_REGNUM
:
675 case STACK_POINTER_REGNUM
:
677 lm32_compute_frame_size (get_frame_size ()) -
678 current_frame_info
.pretend_size
;
692 lm32_setup_incoming_varargs (cumulative_args_t cum_v
, machine_mode mode
,
693 tree type
, int *pretend_size
, int no_rtl
)
695 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
699 fntype
= TREE_TYPE (current_function_decl
);
701 if (stdarg_p (fntype
))
702 first_anon_arg
= *cum
+ LM32_FIRST_ARG_REG
;
705 /* this is the common case, we have been passed details setup
706 for the last named argument, we want to skip over the
707 registers, if any used in passing this named paramter in
708 order to determine which is the first registers used to pass
709 anonymous arguments. */
713 size
= int_size_in_bytes (type
);
715 size
= GET_MODE_SIZE (mode
);
718 *cum
+ LM32_FIRST_ARG_REG
+
719 ((size
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
722 if ((first_anon_arg
< (LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
)) && !no_rtl
)
724 int first_reg_offset
= first_anon_arg
;
725 int size
= LM32_FIRST_ARG_REG
+ LM32_NUM_ARG_REGS
- first_anon_arg
;
728 regblock
= gen_rtx_MEM (BLKmode
,
729 plus_constant (Pmode
, arg_pointer_rtx
,
730 FIRST_PARM_OFFSET (0)));
731 move_block_from_reg (first_reg_offset
, regblock
, size
);
733 *pretend_size
= size
* UNITS_PER_WORD
;
737 /* Override command line options. */
739 lm32_option_override (void)
741 /* We must have sign-extend enabled if barrel-shift isn't. */
742 if (!TARGET_BARREL_SHIFT_ENABLED
&& !TARGET_SIGN_EXTEND_ENABLED
)
743 target_flags
|= MASK_SIGN_EXTEND_ENABLED
;
746 /* Return nonzero if this function is known to have a null epilogue.
747 This allows the optimizer to omit jumps to jumps if no stack
750 lm32_can_use_return (void)
752 if (!reload_completed
)
755 if (df_regs_ever_live_p (RA_REGNUM
) || crtl
->profile
)
758 if (lm32_compute_frame_size (get_frame_size ()) != 0)
764 /* Support function to determine the return address of the function
765 'count' frames back up the stack. */
767 lm32_return_addr_rtx (int count
, rtx frame
)
772 if (!df_regs_ever_live_p (RA_REGNUM
))
773 r
= gen_rtx_REG (Pmode
, RA_REGNUM
);
776 r
= gen_rtx_MEM (Pmode
,
777 gen_rtx_PLUS (Pmode
, frame
,
778 GEN_INT (-2 * UNITS_PER_WORD
)));
779 set_mem_alias_set (r
, get_frame_alias_set ());
782 else if (flag_omit_frame_pointer
)
786 r
= gen_rtx_MEM (Pmode
,
787 gen_rtx_PLUS (Pmode
, frame
,
788 GEN_INT (-2 * UNITS_PER_WORD
)));
789 set_mem_alias_set (r
, get_frame_alias_set ());
794 /* Return true if EXP should be placed in the small data section. */
797 lm32_in_small_data_p (const_tree exp
)
799 /* We want to merge strings, so we never consider them small data. */
800 if (TREE_CODE (exp
) == STRING_CST
)
803 /* Functions are never in the small data area. Duh. */
804 if (TREE_CODE (exp
) == FUNCTION_DECL
)
807 if (TREE_CODE (exp
) == VAR_DECL
&& DECL_SECTION_NAME (exp
))
809 const char *section
= DECL_SECTION_NAME (exp
);
810 if (strcmp (section
, ".sdata") == 0 || strcmp (section
, ".sbss") == 0)
815 HOST_WIDE_INT size
= int_size_in_bytes (TREE_TYPE (exp
));
817 /* If this is an incomplete type with size 0, then we can't put it
818 in sdata because it might be too big when completed. */
819 if (size
> 0 && size
<= g_switch_value
)
826 /* Emit straight-line code to move LENGTH bytes from SRC to DEST.
827 Assume that the areas do not overlap. */
830 lm32_block_move_inline (rtx dest
, rtx src
, HOST_WIDE_INT length
,
831 HOST_WIDE_INT alignment
)
833 HOST_WIDE_INT offset
, delta
;
834 unsigned HOST_WIDE_INT bits
;
839 /* Work out how many bits to move at a time. */
853 mode
= mode_for_size (bits
, MODE_INT
, 0);
854 delta
= bits
/ BITS_PER_UNIT
;
856 /* Allocate a buffer for the temporary registers. */
857 regs
= XALLOCAVEC (rtx
, length
/ delta
);
859 /* Load as many BITS-sized chunks as possible. */
860 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
862 regs
[i
] = gen_reg_rtx (mode
);
863 emit_move_insn (regs
[i
], adjust_address (src
, mode
, offset
));
866 /* Copy the chunks to the destination. */
867 for (offset
= 0, i
= 0; offset
+ delta
<= length
; offset
+= delta
, i
++)
868 emit_move_insn (adjust_address (dest
, mode
, offset
), regs
[i
]);
870 /* Mop up any left-over bytes. */
873 src
= adjust_address (src
, BLKmode
, offset
);
874 dest
= adjust_address (dest
, BLKmode
, offset
);
875 move_by_pieces (dest
, src
, length
- offset
,
876 MIN (MEM_ALIGN (src
), MEM_ALIGN (dest
)), 0);
880 /* Expand string/block move operations.
882 operands[0] is the pointer to the destination.
883 operands[1] is the pointer to the source.
884 operands[2] is the number of bytes to move.
885 operands[3] is the alignment. */
888 lm32_expand_block_move (rtx
* operands
)
890 if ((GET_CODE (operands
[2]) == CONST_INT
) && (INTVAL (operands
[2]) <= 32))
892 lm32_block_move_inline (operands
[0], operands
[1], INTVAL (operands
[2]),
893 INTVAL (operands
[3]));
899 /* Return TRUE if X references a SYMBOL_REF or LABEL_REF whose symbol
900 isn't protected by a PIC unspec. */
902 nonpic_symbol_mentioned_p (rtx x
)
907 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
908 || GET_CODE (x
) == PC
)
911 /* We don't want to look into the possible MEM location of a
912 CONST_DOUBLE, since we're not going to use it, in general. */
913 if (GET_CODE (x
) == CONST_DOUBLE
)
916 if (GET_CODE (x
) == UNSPEC
)
919 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
920 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
926 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
927 if (nonpic_symbol_mentioned_p (XVECEXP (x
, i
, j
)))
930 else if (fmt
[i
] == 'e' && nonpic_symbol_mentioned_p (XEXP (x
, i
)))
937 /* Compute a (partial) cost for rtx X. Return true if the complete
938 cost has been computed, and false if subexpressions should be
939 scanned. In either case, *TOTAL contains the cost result. */
942 lm32_rtx_costs (rtx x
, int code
, int outer_code
, int opno ATTRIBUTE_UNUSED
,
943 int *total
, bool speed
)
945 machine_mode mode
= GET_MODE (x
);
948 const int arithmetic_latency
= 1;
949 const int shift_latency
= 1;
950 const int compare_latency
= 2;
951 const int multiply_latency
= 3;
952 const int load_latency
= 3;
953 const int libcall_size_cost
= 5;
955 /* Determine if we can handle the given mode size in a single instruction. */
956 small_mode
= (mode
== QImode
) || (mode
== HImode
) || (mode
== SImode
);
969 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
));
972 COSTS_N_INSNS (arithmetic_latency
+ (LM32_NUM_REGS (mode
) - 1));
979 *total
= COSTS_N_INSNS (1);
981 *total
= COSTS_N_INSNS (compare_latency
);
985 /* FIXME. Guessing here. */
986 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * (2 + 3) / 2);
993 if (TARGET_BARREL_SHIFT_ENABLED
&& small_mode
)
996 *total
= COSTS_N_INSNS (1);
998 *total
= COSTS_N_INSNS (shift_latency
);
1000 else if (TARGET_BARREL_SHIFT_ENABLED
)
1002 /* FIXME: Guessing here. */
1003 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) * 4);
1005 else if (small_mode
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1007 *total
= COSTS_N_INSNS (INTVAL (XEXP (x
, 1)));
1013 *total
= COSTS_N_INSNS (libcall_size_cost
);
1015 *total
= COSTS_N_INSNS (100);
1020 if (TARGET_MULTIPLY_ENABLED
&& small_mode
)
1023 *total
= COSTS_N_INSNS (1);
1025 *total
= COSTS_N_INSNS (multiply_latency
);
1031 *total
= COSTS_N_INSNS (libcall_size_cost
);
1033 *total
= COSTS_N_INSNS (100);
1041 if (TARGET_DIVIDE_ENABLED
&& small_mode
)
1044 *total
= COSTS_N_INSNS (1);
1047 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1050 unsigned HOST_WIDE_INT i
= INTVAL (XEXP (x
, 1));
1057 if (IN_RANGE (i
, 0, 65536))
1058 *total
= COSTS_N_INSNS (1 + 1 + cycles
);
1060 *total
= COSTS_N_INSNS (2 + 1 + cycles
);
1063 else if (GET_CODE (XEXP (x
, 1)) == REG
)
1065 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1070 *total
= COSTS_N_INSNS (1 + GET_MODE_SIZE (mode
) / 2);
1079 *total
= COSTS_N_INSNS (libcall_size_cost
);
1081 *total
= COSTS_N_INSNS (100);
1088 *total
= COSTS_N_INSNS (1);
1090 *total
= COSTS_N_INSNS (arithmetic_latency
);
1094 if (MEM_P (XEXP (x
, 0)))
1095 *total
= COSTS_N_INSNS (0);
1096 else if (small_mode
)
1099 *total
= COSTS_N_INSNS (1);
1101 *total
= COSTS_N_INSNS (arithmetic_latency
);
1104 *total
= COSTS_N_INSNS (LM32_NUM_REGS (mode
) / 2);
1113 *total
= COSTS_N_INSNS (0);
1124 if (satisfies_constraint_L (x
))
1125 *total
= COSTS_N_INSNS (0);
1127 *total
= COSTS_N_INSNS (2);
1134 if (satisfies_constraint_K (x
))
1135 *total
= COSTS_N_INSNS (0);
1137 *total
= COSTS_N_INSNS (2);
1141 if (TARGET_MULTIPLY_ENABLED
)
1143 if (satisfies_constraint_K (x
))
1144 *total
= COSTS_N_INSNS (0);
1146 *total
= COSTS_N_INSNS (2);
1152 if (satisfies_constraint_K (x
))
1153 *total
= COSTS_N_INSNS (1);
1155 *total
= COSTS_N_INSNS (2);
1166 *total
= COSTS_N_INSNS (0);
1173 *total
= COSTS_N_INSNS (0);
1182 *total
= COSTS_N_INSNS (2);
1186 *total
= COSTS_N_INSNS (1);
1191 *total
= COSTS_N_INSNS (1);
1193 *total
= COSTS_N_INSNS (load_latency
);
1201 /* Implemenent TARGET_CAN_ELIMINATE. */
1204 lm32_can_eliminate (const int from ATTRIBUTE_UNUSED
, const int to
)
1206 return (to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
) ? false : true;
1209 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
1212 lm32_legitimate_address_p (machine_mode mode ATTRIBUTE_UNUSED
, rtx x
, bool strict
)
1215 if (strict
&& REG_P (x
) && STRICT_REG_OK_FOR_BASE_P (x
))
1217 if (!strict
&& REG_P (x
) && NONSTRICT_REG_OK_FOR_BASE_P (x
))
1221 if (GET_CODE (x
) == PLUS
1222 && REG_P (XEXP (x
, 0))
1223 && ((strict
&& STRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0)))
1224 || (!strict
&& NONSTRICT_REG_OK_FOR_BASE_P (XEXP (x
, 0))))
1225 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1226 && satisfies_constraint_K (XEXP ((x
), 1)))
1230 if (GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_SMALL_P (x
))
1236 /* Check a move is not memory to memory. */
1239 lm32_move_ok (machine_mode mode
, rtx operands
[2]) {
1240 if (memory_operand (operands
[0], mode
))
1241 return register_or_zero_operand (operands
[1], mode
);