1 /* The Blackfin code generation auxiliary output file.
2 Copyright (C) 2005, 2006 Free Software Foundation, Inc.
3 Contributed by Analog Devices.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 2, or (at your
10 option) any later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to
19 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
20 Boston, MA 02110-1301, USA. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "insn-codes.h"
32 #include "conditions.h"
33 #include "insn-flags.h"
35 #include "insn-attr.h"
42 #include "target-def.h"
48 #include "integrate.h"
50 #include "langhooks.h"
51 #include "bfin-protos.h"
54 #include "basic-block.h"
56 /* A C structure for machine-specific, per-function data.
57 This is added to the cfun structure. */
58 struct machine_function
GTY(())
60 int has_hardware_loops
;
63 /* Test and compare insns in bfin.md store the information needed to
64 generate branch and scc insns here. */
65 rtx bfin_compare_op0
, bfin_compare_op1
;
67 /* RTX for condition code flag register and RETS register */
68 extern GTY(()) rtx bfin_cc_rtx
;
69 extern GTY(()) rtx bfin_rets_rtx
;
70 rtx bfin_cc_rtx
, bfin_rets_rtx
;
72 int max_arg_registers
= 0;
74 /* Arrays used when emitting register names. */
75 const char *short_reg_names
[] = SHORT_REGISTER_NAMES
;
76 const char *high_reg_names
[] = HIGH_REGISTER_NAMES
;
77 const char *dregs_pair_names
[] = DREGS_PAIR_NAMES
;
78 const char *byte_reg_names
[] = BYTE_REGISTER_NAMES
;
80 static int arg_regs
[] = FUNCTION_ARG_REGISTERS
;
82 /* Nonzero if -mshared-library-id was given. */
83 static int bfin_lib_id_given
;
86 bfin_globalize_label (FILE *stream
, const char *name
)
88 fputs (".global ", stream
);
89 assemble_name (stream
, name
);
95 output_file_start (void)
97 FILE *file
= asm_out_file
;
100 fprintf (file
, ".file \"%s\";\n", input_filename
);
102 for (i
= 0; arg_regs
[i
] >= 0; i
++)
104 max_arg_registers
= i
; /* how many arg reg used */
107 /* Called early in the compilation to conditionally modify
108 fixed_regs/call_used_regs. */
111 conditional_register_usage (void)
113 /* initialize condition code flag register rtx */
114 bfin_cc_rtx
= gen_rtx_REG (BImode
, REG_CC
);
115 bfin_rets_rtx
= gen_rtx_REG (Pmode
, REG_RETS
);
118 /* Examine machine-dependent attributes of function type FUNTYPE and return its
119 type. See the definition of E_FUNKIND. */
121 static e_funkind
funkind (tree funtype
)
123 tree attrs
= TYPE_ATTRIBUTES (funtype
);
124 if (lookup_attribute ("interrupt_handler", attrs
))
125 return INTERRUPT_HANDLER
;
126 else if (lookup_attribute ("exception_handler", attrs
))
127 return EXCPT_HANDLER
;
128 else if (lookup_attribute ("nmi_handler", attrs
))
134 /* Legitimize PIC addresses. If the address is already position-independent,
135 we return ORIG. Newly generated position-independent addresses go into a
136 reg. This is REG if nonzero, otherwise we allocate register(s) as
137 necessary. PICREG is the register holding the pointer to the PIC offset
141 legitimize_pic_address (rtx orig
, rtx reg
, rtx picreg
)
146 if (GET_CODE (addr
) == SYMBOL_REF
|| GET_CODE (addr
) == LABEL_REF
)
151 if (TARGET_ID_SHARED_LIBRARY
)
152 unspec
= UNSPEC_MOVE_PIC
;
153 else if (GET_CODE (addr
) == SYMBOL_REF
154 && SYMBOL_REF_FUNCTION_P (addr
))
155 unspec
= UNSPEC_FUNCDESC_GOT17M4
;
157 unspec
= UNSPEC_MOVE_FDPIC
;
161 gcc_assert (!no_new_pseudos
);
162 reg
= gen_reg_rtx (Pmode
);
165 tmp
= gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, addr
), unspec
);
166 new = gen_const_mem (Pmode
, gen_rtx_PLUS (Pmode
, picreg
, tmp
));
168 emit_move_insn (reg
, new);
169 if (picreg
== pic_offset_table_rtx
)
170 current_function_uses_pic_offset_table
= 1;
174 else if (GET_CODE (addr
) == CONST
|| GET_CODE (addr
) == PLUS
)
178 if (GET_CODE (addr
) == CONST
)
180 addr
= XEXP (addr
, 0);
181 gcc_assert (GET_CODE (addr
) == PLUS
);
184 if (XEXP (addr
, 0) == picreg
)
189 gcc_assert (!no_new_pseudos
);
190 reg
= gen_reg_rtx (Pmode
);
193 base
= legitimize_pic_address (XEXP (addr
, 0), reg
, picreg
);
194 addr
= legitimize_pic_address (XEXP (addr
, 1),
195 base
== reg
? NULL_RTX
: reg
,
198 if (GET_CODE (addr
) == CONST_INT
)
200 gcc_assert (! reload_in_progress
&& ! reload_completed
);
201 addr
= force_reg (Pmode
, addr
);
204 if (GET_CODE (addr
) == PLUS
&& CONSTANT_P (XEXP (addr
, 1)))
206 base
= gen_rtx_PLUS (Pmode
, base
, XEXP (addr
, 0));
207 addr
= XEXP (addr
, 1);
210 return gen_rtx_PLUS (Pmode
, base
, addr
);
216 /* Stack frame layout. */
218 /* Compute the number of DREGS to save with a push_multiple operation.
219 This could include registers that aren't modified in the function,
220 since push_multiple only takes a range of registers.
221 If IS_INTHANDLER, then everything that is live must be saved, even
222 if normally call-clobbered. */
225 n_dregs_to_save (bool is_inthandler
)
229 for (i
= REG_R0
; i
<= REG_R7
; i
++)
231 if (regs_ever_live
[i
] && (is_inthandler
|| ! call_used_regs
[i
]))
232 return REG_R7
- i
+ 1;
234 if (current_function_calls_eh_return
)
239 unsigned test
= EH_RETURN_DATA_REGNO (j
);
240 if (test
== INVALID_REGNUM
)
243 return REG_R7
- i
+ 1;
251 /* Like n_dregs_to_save, but compute number of PREGS to save. */
254 n_pregs_to_save (bool is_inthandler
)
258 for (i
= REG_P0
; i
<= REG_P5
; i
++)
259 if ((regs_ever_live
[i
] && (is_inthandler
|| ! call_used_regs
[i
]))
261 && i
== PIC_OFFSET_TABLE_REGNUM
262 && (current_function_uses_pic_offset_table
263 || (TARGET_ID_SHARED_LIBRARY
&& ! current_function_is_leaf
))))
264 return REG_P5
- i
+ 1;
268 /* Determine if we are going to save the frame pointer in the prologue. */
271 must_save_fp_p (void)
273 return frame_pointer_needed
|| regs_ever_live
[REG_FP
];
277 stack_frame_needed_p (void)
279 /* EH return puts a new return address into the frame using an
280 address relative to the frame pointer. */
281 if (current_function_calls_eh_return
)
283 return frame_pointer_needed
;
286 /* Emit code to save registers in the prologue. SAVEALL is nonzero if we
287 must save all registers; this is used for interrupt handlers.
288 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
289 this for an interrupt (or exception) handler. */
292 expand_prologue_reg_save (rtx spreg
, int saveall
, bool is_inthandler
)
294 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
);
295 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
);
296 int dregno
= REG_R7
+ 1 - ndregs
;
297 int pregno
= REG_P5
+ 1 - npregs
;
298 int total
= ndregs
+ npregs
;
305 val
= GEN_INT (-total
* 4);
306 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total
+ 2));
307 XVECEXP (pat
, 0, 0) = gen_rtx_UNSPEC (VOIDmode
, gen_rtvec (1, val
),
308 UNSPEC_PUSH_MULTIPLE
);
309 XVECEXP (pat
, 0, total
+ 1) = gen_rtx_SET (VOIDmode
, spreg
,
310 gen_rtx_PLUS (Pmode
, spreg
,
312 RTX_FRAME_RELATED_P (XVECEXP (pat
, 0, total
+ 1)) = 1;
313 for (i
= 0; i
< total
; i
++)
315 rtx memref
= gen_rtx_MEM (word_mode
,
316 gen_rtx_PLUS (Pmode
, spreg
,
317 GEN_INT (- i
* 4 - 4)));
321 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
327 subpat
= gen_rtx_SET (VOIDmode
, memref
, gen_rtx_REG (word_mode
,
331 XVECEXP (pat
, 0, i
+ 1) = subpat
;
332 RTX_FRAME_RELATED_P (subpat
) = 1;
334 insn
= emit_insn (pat
);
335 RTX_FRAME_RELATED_P (insn
) = 1;
338 /* Emit code to restore registers in the epilogue. SAVEALL is nonzero if we
339 must save all registers; this is used for interrupt handlers.
340 SPREG contains (reg:SI REG_SP). IS_INTHANDLER is true if we're doing
341 this for an interrupt (or exception) handler. */
344 expand_epilogue_reg_restore (rtx spreg
, bool saveall
, bool is_inthandler
)
346 int ndregs
= saveall
? 8 : n_dregs_to_save (is_inthandler
);
347 int npregs
= saveall
? 6 : n_pregs_to_save (is_inthandler
);
348 int total
= ndregs
+ npregs
;
355 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (total
+ 1));
356 XVECEXP (pat
, 0, 0) = gen_rtx_SET (VOIDmode
, spreg
,
357 gen_rtx_PLUS (Pmode
, spreg
,
358 GEN_INT (total
* 4)));
365 for (i
= 0; i
< total
; i
++)
368 ? gen_rtx_PLUS (Pmode
, spreg
, GEN_INT (i
* 4))
370 rtx memref
= gen_rtx_MEM (word_mode
, addr
);
373 XVECEXP (pat
, 0, i
+ 1)
374 = gen_rtx_SET (VOIDmode
, gen_rtx_REG (word_mode
, regno
), memref
);
383 insn
= emit_insn (pat
);
384 RTX_FRAME_RELATED_P (insn
) = 1;
387 /* Perform any needed actions needed for a function that is receiving a
388 variable number of arguments.
392 MODE and TYPE are the mode and type of the current parameter.
394 PRETEND_SIZE is a variable that should be set to the amount of stack
395 that must be pushed by the prolog to pretend that our caller pushed
398 Normally, this macro will push all remaining incoming registers on the
399 stack and set PRETEND_SIZE to the length of the registers pushed.
402 - VDSP C compiler manual (our ABI) says that a variable args function
403 should save the R0, R1 and R2 registers in the stack.
404 - The caller will always leave space on the stack for the
405 arguments that are passed in registers, so we dont have
406 to leave any extra space.
407 - now, the vastart pointer can access all arguments from the stack. */
410 setup_incoming_varargs (CUMULATIVE_ARGS
*cum
,
411 enum machine_mode mode ATTRIBUTE_UNUSED
,
412 tree type ATTRIBUTE_UNUSED
, int *pretend_size
,
421 /* The move for named arguments will be generated automatically by the
422 compiler. We need to generate the move rtx for the unnamed arguments
423 if they are in the first 3 words. We assume at least 1 named argument
424 exists, so we never generate [ARGP] = R0 here. */
426 for (i
= cum
->words
+ 1; i
< max_arg_registers
; i
++)
428 mem
= gen_rtx_MEM (Pmode
,
429 plus_constant (arg_pointer_rtx
, (i
* UNITS_PER_WORD
)));
430 emit_move_insn (mem
, gen_rtx_REG (Pmode
, i
));
436 /* Value should be nonzero if functions must have frame pointers.
437 Zero means the frame pointer need not be set up (and parms may
438 be accessed via the stack pointer) in functions that seem suitable. */
441 bfin_frame_pointer_required (void)
443 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
445 if (fkind
!= SUBROUTINE
)
448 /* We turn on -fomit-frame-pointer if -momit-leaf-frame-pointer is used,
449 so we have to override it for non-leaf functions. */
450 if (TARGET_OMIT_LEAF_FRAME_POINTER
&& ! current_function_is_leaf
)
456 /* Return the number of registers pushed during the prologue. */
459 n_regs_saved_by_prologue (void)
461 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
462 bool is_inthandler
= fkind
!= SUBROUTINE
;
463 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
464 bool all
= (lookup_attribute ("saveall", attrs
) != NULL_TREE
465 || (is_inthandler
&& !current_function_is_leaf
));
466 int ndregs
= all
? 8 : n_dregs_to_save (is_inthandler
);
467 int npregs
= all
? 6 : n_pregs_to_save (is_inthandler
);
468 int n
= ndregs
+ npregs
;
470 if (all
|| stack_frame_needed_p ())
471 /* We use a LINK instruction in this case. */
475 if (must_save_fp_p ())
477 if (! current_function_is_leaf
)
481 if (fkind
!= SUBROUTINE
)
485 /* Increment once for ASTAT. */
489 if (lookup_attribute ("nesting", attrs
))
492 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
495 || (!leaf_function_p () && call_used_regs
[i
]))
496 n
+= i
== REG_A0
|| i
== REG_A1
? 2 : 1;
501 /* Return the offset between two registers, one to be eliminated, and the other
502 its replacement, at the start of a routine. */
505 bfin_initial_elimination_offset (int from
, int to
)
507 HOST_WIDE_INT offset
= 0;
509 if (from
== ARG_POINTER_REGNUM
)
510 offset
= n_regs_saved_by_prologue () * 4;
512 if (to
== STACK_POINTER_REGNUM
)
514 if (current_function_outgoing_args_size
>= FIXED_STACK_AREA
)
515 offset
+= current_function_outgoing_args_size
;
516 else if (current_function_outgoing_args_size
)
517 offset
+= FIXED_STACK_AREA
;
519 offset
+= get_frame_size ();
525 /* Emit code to load a constant CONSTANT into register REG; setting
526 RTX_FRAME_RELATED_P on all insns we generate if RELATED is true.
527 Make sure that the insns we generate need not be split. */
530 frame_related_constant_load (rtx reg
, HOST_WIDE_INT constant
, bool related
)
533 rtx cst
= GEN_INT (constant
);
535 if (constant
>= -32768 && constant
< 65536)
536 insn
= emit_move_insn (reg
, cst
);
539 /* We don't call split_load_immediate here, since dwarf2out.c can get
540 confused about some of the more clever sequences it can generate. */
541 insn
= emit_insn (gen_movsi_high (reg
, cst
));
543 RTX_FRAME_RELATED_P (insn
) = 1;
544 insn
= emit_insn (gen_movsi_low (reg
, reg
, cst
));
547 RTX_FRAME_RELATED_P (insn
) = 1;
550 /* Generate efficient code to add a value to the frame pointer. We
551 can use P1 as a scratch register. Set RTX_FRAME_RELATED_P on the
552 generated insns if FRAME is nonzero. */
555 add_to_sp (rtx spreg
, HOST_WIDE_INT value
, int frame
)
560 /* Choose whether to use a sequence using a temporary register, or
561 a sequence with multiple adds. We can add a signed 7 bit value
562 in one instruction. */
563 if (value
> 120 || value
< -120)
565 rtx tmpreg
= gen_rtx_REG (SImode
, REG_P1
);
569 frame_related_constant_load (tmpreg
, value
, TRUE
);
572 insn
= emit_move_insn (tmpreg
, GEN_INT (value
));
574 RTX_FRAME_RELATED_P (insn
) = 1;
577 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
579 RTX_FRAME_RELATED_P (insn
) = 1;
590 /* We could use -62, but that would leave the stack unaligned, so
594 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (size
)));
596 RTX_FRAME_RELATED_P (insn
) = 1;
602 /* Generate a LINK insn for a frame sized FRAME_SIZE. If this constant
603 is too large, generate a sequence of insns that has the same effect.
604 SPREG contains (reg:SI REG_SP). */
607 emit_link_insn (rtx spreg
, HOST_WIDE_INT frame_size
)
609 HOST_WIDE_INT link_size
= frame_size
;
613 if (link_size
> 262140)
616 /* Use a LINK insn with as big a constant as possible, then subtract
617 any remaining size from the SP. */
618 insn
= emit_insn (gen_link (GEN_INT (-8 - link_size
)));
619 RTX_FRAME_RELATED_P (insn
) = 1;
621 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
623 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
624 gcc_assert (GET_CODE (set
) == SET
);
625 RTX_FRAME_RELATED_P (set
) = 1;
628 frame_size
-= link_size
;
632 /* Must use a call-clobbered PREG that isn't the static chain. */
633 rtx tmpreg
= gen_rtx_REG (Pmode
, REG_P1
);
635 frame_related_constant_load (tmpreg
, -frame_size
, TRUE
);
636 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, tmpreg
));
637 RTX_FRAME_RELATED_P (insn
) = 1;
641 /* Return the number of bytes we must reserve for outgoing arguments
642 in the current function's stack frame. */
647 if (current_function_outgoing_args_size
)
649 if (current_function_outgoing_args_size
>= FIXED_STACK_AREA
)
650 return current_function_outgoing_args_size
;
652 return FIXED_STACK_AREA
;
657 /* Save RETS and FP, and allocate a stack frame. ALL is true if the
658 function must save all its registers (true only for certain interrupt
662 do_link (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
664 frame_size
+= arg_area_size ();
666 if (all
|| stack_frame_needed_p ()
667 || (must_save_fp_p () && ! current_function_is_leaf
))
668 emit_link_insn (spreg
, frame_size
);
671 if (! current_function_is_leaf
)
673 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
674 gen_rtx_PRE_DEC (Pmode
, spreg
)),
676 rtx insn
= emit_insn (pat
);
677 RTX_FRAME_RELATED_P (insn
) = 1;
679 if (must_save_fp_p ())
681 rtx pat
= gen_movsi (gen_rtx_MEM (Pmode
,
682 gen_rtx_PRE_DEC (Pmode
, spreg
)),
683 gen_rtx_REG (Pmode
, REG_FP
));
684 rtx insn
= emit_insn (pat
);
685 RTX_FRAME_RELATED_P (insn
) = 1;
687 add_to_sp (spreg
, -frame_size
, 1);
691 /* Like do_link, but used for epilogues to deallocate the stack frame. */
694 do_unlink (rtx spreg
, HOST_WIDE_INT frame_size
, bool all
)
696 frame_size
+= arg_area_size ();
698 if (all
|| stack_frame_needed_p ())
699 emit_insn (gen_unlink ());
702 rtx postinc
= gen_rtx_MEM (Pmode
, gen_rtx_POST_INC (Pmode
, spreg
));
704 add_to_sp (spreg
, frame_size
, 0);
705 if (must_save_fp_p ())
707 rtx fpreg
= gen_rtx_REG (Pmode
, REG_FP
);
708 emit_move_insn (fpreg
, postinc
);
709 emit_insn (gen_rtx_USE (VOIDmode
, fpreg
));
711 if (! current_function_is_leaf
)
713 emit_move_insn (bfin_rets_rtx
, postinc
);
714 emit_insn (gen_rtx_USE (VOIDmode
, bfin_rets_rtx
));
719 /* Generate a prologue suitable for a function of kind FKIND. This is
720 called for interrupt and exception handler prologues.
721 SPREG contains (reg:SI REG_SP). */
724 expand_interrupt_handler_prologue (rtx spreg
, e_funkind fkind
)
727 HOST_WIDE_INT frame_size
= get_frame_size ();
728 rtx predec1
= gen_rtx_PRE_DEC (SImode
, spreg
);
729 rtx predec
= gen_rtx_MEM (SImode
, predec1
);
731 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
732 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
733 tree kspisusp
= lookup_attribute ("kspisusp", attrs
);
737 insn
= emit_move_insn (spreg
, gen_rtx_REG (Pmode
, REG_USP
));
738 RTX_FRAME_RELATED_P (insn
) = 1;
741 /* We need space on the stack in case we need to save the argument
743 if (fkind
== EXCPT_HANDLER
)
745 insn
= emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (-12)));
746 RTX_FRAME_RELATED_P (insn
) = 1;
749 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, REG_ASTAT
));
750 RTX_FRAME_RELATED_P (insn
) = 1;
752 /* If we're calling other functions, they won't save their call-clobbered
753 registers, so we must save everything here. */
754 if (!current_function_is_leaf
)
756 expand_prologue_reg_save (spreg
, all
, true);
758 for (i
= REG_P7
+ 1; i
< REG_CC
; i
++)
761 || (!leaf_function_p () && call_used_regs
[i
]))
763 if (i
== REG_A0
|| i
== REG_A1
)
764 insn
= emit_move_insn (gen_rtx_MEM (PDImode
, predec1
),
765 gen_rtx_REG (PDImode
, i
));
767 insn
= emit_move_insn (predec
, gen_rtx_REG (SImode
, i
));
768 RTX_FRAME_RELATED_P (insn
) = 1;
771 if (lookup_attribute ("nesting", attrs
))
773 rtx srcreg
= gen_rtx_REG (Pmode
, (fkind
== EXCPT_HANDLER
? REG_RETX
774 : fkind
== NMI_HANDLER
? REG_RETN
776 insn
= emit_move_insn (predec
, srcreg
);
777 RTX_FRAME_RELATED_P (insn
) = 1;
780 do_link (spreg
, frame_size
, all
);
782 if (fkind
== EXCPT_HANDLER
)
784 rtx r0reg
= gen_rtx_REG (SImode
, REG_R0
);
785 rtx r1reg
= gen_rtx_REG (SImode
, REG_R1
);
786 rtx r2reg
= gen_rtx_REG (SImode
, REG_R2
);
789 insn
= emit_move_insn (r0reg
, gen_rtx_REG (SImode
, REG_SEQSTAT
));
790 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
792 insn
= emit_insn (gen_ashrsi3 (r0reg
, r0reg
, GEN_INT (26)));
793 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
795 insn
= emit_insn (gen_ashlsi3 (r0reg
, r0reg
, GEN_INT (26)));
796 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
798 insn
= emit_move_insn (r1reg
, spreg
);
799 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
801 insn
= emit_move_insn (r2reg
, gen_rtx_REG (Pmode
, REG_FP
));
802 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
804 insn
= emit_insn (gen_addsi3 (r2reg
, r2reg
, GEN_INT (8)));
805 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
,
810 /* Generate an epilogue suitable for a function of kind FKIND. This is
811 called for interrupt and exception handler epilogues.
812 SPREG contains (reg:SI REG_SP). */
815 expand_interrupt_handler_epilogue (rtx spreg
, e_funkind fkind
)
818 rtx postinc1
= gen_rtx_POST_INC (SImode
, spreg
);
819 rtx postinc
= gen_rtx_MEM (SImode
, postinc1
);
820 tree attrs
= TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl
));
821 bool all
= lookup_attribute ("saveall", attrs
) != NULL_TREE
;
823 /* A slightly crude technique to stop flow from trying to delete "dead"
825 MEM_VOLATILE_P (postinc
) = 1;
827 do_unlink (spreg
, get_frame_size (), all
);
829 if (lookup_attribute ("nesting", attrs
))
831 rtx srcreg
= gen_rtx_REG (Pmode
, (fkind
== EXCPT_HANDLER
? REG_RETX
832 : fkind
== NMI_HANDLER
? REG_RETN
834 emit_move_insn (srcreg
, postinc
);
837 /* If we're calling other functions, they won't save their call-clobbered
838 registers, so we must save (and restore) everything here. */
839 if (!current_function_is_leaf
)
842 for (i
= REG_CC
- 1; i
> REG_P7
; i
--)
845 || (!leaf_function_p () && call_used_regs
[i
]))
847 if (i
== REG_A0
|| i
== REG_A1
)
849 rtx mem
= gen_rtx_MEM (PDImode
, postinc1
);
850 MEM_VOLATILE_P (mem
) = 1;
851 emit_move_insn (gen_rtx_REG (PDImode
, i
), mem
);
854 emit_move_insn (gen_rtx_REG (SImode
, i
), postinc
);
857 expand_epilogue_reg_restore (spreg
, all
, true);
859 emit_move_insn (gen_rtx_REG (SImode
, REG_ASTAT
), postinc
);
861 /* Deallocate any space we left on the stack in case we needed to save the
862 argument registers. */
863 if (fkind
== EXCPT_HANDLER
)
864 emit_insn (gen_addsi3 (spreg
, spreg
, GEN_INT (12)));
866 emit_jump_insn (gen_return_internal (GEN_INT (fkind
)));
869 /* Used while emitting the prologue to generate code to load the correct value
870 into the PIC register, which is passed in DEST. */
873 bfin_load_pic_reg (rtx dest
)
875 struct cgraph_local_info
*i
= NULL
;
878 if (flag_unit_at_a_time
)
879 i
= cgraph_local_info (current_function_decl
);
881 /* Functions local to the translation unit don't need to reload the
882 pic reg, since the caller always passes a usable one. */
884 return pic_offset_table_rtx
;
886 if (bfin_lib_id_given
)
887 addr
= plus_constant (pic_offset_table_rtx
, -4 - bfin_library_id
* 4);
889 addr
= gen_rtx_PLUS (Pmode
, pic_offset_table_rtx
,
890 gen_rtx_UNSPEC (Pmode
, gen_rtvec (1, const0_rtx
),
891 UNSPEC_LIBRARY_OFFSET
));
892 insn
= emit_insn (gen_movsi (dest
, gen_rtx_MEM (Pmode
, addr
)));
893 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_MAYBE_DEAD
, const0_rtx
, NULL
);
897 /* Generate RTL for the prologue of the current function. */
900 bfin_expand_prologue (void)
903 HOST_WIDE_INT frame_size
= get_frame_size ();
904 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
905 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
906 rtx pic_reg_loaded
= NULL_RTX
;
908 if (fkind
!= SUBROUTINE
)
910 expand_interrupt_handler_prologue (spreg
, fkind
);
914 if (current_function_limit_stack
)
917 = bfin_initial_elimination_offset (ARG_POINTER_REGNUM
,
918 STACK_POINTER_REGNUM
);
919 rtx lim
= stack_limit_rtx
;
921 if (GET_CODE (lim
) == SYMBOL_REF
)
923 rtx p2reg
= gen_rtx_REG (Pmode
, REG_P2
);
924 if (TARGET_ID_SHARED_LIBRARY
)
926 rtx p1reg
= gen_rtx_REG (Pmode
, REG_P1
);
928 pic_reg_loaded
= bfin_load_pic_reg (p2reg
);
929 val
= legitimize_pic_address (stack_limit_rtx
, p1reg
,
931 emit_move_insn (p1reg
, val
);
932 frame_related_constant_load (p2reg
, offset
, FALSE
);
933 emit_insn (gen_addsi3 (p2reg
, p2reg
, p1reg
));
938 rtx limit
= plus_constant (stack_limit_rtx
, offset
);
939 emit_move_insn (p2reg
, limit
);
943 emit_insn (gen_compare_lt (bfin_cc_rtx
, spreg
, lim
));
944 emit_insn (gen_trapifcc ());
946 expand_prologue_reg_save (spreg
, 0, false);
948 do_link (spreg
, frame_size
, false);
950 if (TARGET_ID_SHARED_LIBRARY
951 && (current_function_uses_pic_offset_table
952 || !current_function_is_leaf
))
953 bfin_load_pic_reg (pic_offset_table_rtx
);
956 /* Generate RTL for the epilogue of the current function. NEED_RETURN is zero
957 if this is for a sibcall. EH_RETURN is nonzero if we're expanding an
958 eh_return pattern. */
961 bfin_expand_epilogue (int need_return
, int eh_return
)
963 rtx spreg
= gen_rtx_REG (Pmode
, REG_SP
);
964 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
966 if (fkind
!= SUBROUTINE
)
968 expand_interrupt_handler_epilogue (spreg
, fkind
);
972 do_unlink (spreg
, get_frame_size (), false);
974 expand_epilogue_reg_restore (spreg
, false, false);
976 /* Omit the return insn if this is for a sibcall. */
981 emit_insn (gen_addsi3 (spreg
, spreg
, gen_rtx_REG (Pmode
, REG_P2
)));
983 emit_jump_insn (gen_return_internal (GEN_INT (SUBROUTINE
)));
986 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
989 bfin_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
990 unsigned int new_reg
)
992 /* Interrupt functions can only use registers that have already been
993 saved by the prologue, even if they would normally be
996 if (funkind (TREE_TYPE (current_function_decl
)) != SUBROUTINE
997 && !regs_ever_live
[new_reg
])
1003 /* Return the value of the return address for the frame COUNT steps up
1004 from the current frame, after the prologue.
1005 We punt for everything but the current frame by returning const0_rtx. */
1008 bfin_return_addr_rtx (int count
)
1013 return get_hard_reg_initial_val (Pmode
, REG_RETS
);
1016 /* Try machine-dependent ways of modifying an illegitimate address X
1017 to be legitimate. If we find one, return the new, valid address,
1018 otherwise return NULL_RTX.
1020 OLDX is the address as it was before break_out_memory_refs was called.
1021 In some cases it is useful to look at this to decide what needs to be done.
1023 MODE is the mode of the memory reference. */
1026 legitimize_address (rtx x ATTRIBUTE_UNUSED
, rtx oldx ATTRIBUTE_UNUSED
,
1027 enum machine_mode mode ATTRIBUTE_UNUSED
)
1033 bfin_delegitimize_address (rtx orig_x
)
1037 if (GET_CODE (x
) != MEM
)
1041 if (GET_CODE (x
) == PLUS
1042 && GET_CODE (XEXP (x
, 1)) == UNSPEC
1043 && XINT (XEXP (x
, 1), 1) == UNSPEC_MOVE_PIC
1044 && GET_CODE (XEXP (x
, 0)) == REG
1045 && REGNO (XEXP (x
, 0)) == PIC_OFFSET_TABLE_REGNUM
)
1046 return XVECEXP (XEXP (x
, 1), 0, 0);
1051 /* This predicate is used to compute the length of a load/store insn.
1052 OP is a MEM rtx, we return nonzero if its addressing mode requires a
1053 32 bit instruction. */
1056 effective_address_32bit_p (rtx op
, enum machine_mode mode
)
1058 HOST_WIDE_INT offset
;
1060 mode
= GET_MODE (op
);
1063 if (GET_CODE (op
) != PLUS
)
1065 gcc_assert (REG_P (op
) || GET_CODE (op
) == POST_INC
1066 || GET_CODE (op
) == PRE_DEC
|| GET_CODE (op
) == POST_DEC
);
1070 offset
= INTVAL (XEXP (op
, 1));
1072 /* All byte loads use a 16 bit offset. */
1073 if (GET_MODE_SIZE (mode
) == 1)
1076 if (GET_MODE_SIZE (mode
) == 4)
1078 /* Frame pointer relative loads can use a negative offset, all others
1079 are restricted to a small positive one. */
1080 if (XEXP (op
, 0) == frame_pointer_rtx
)
1081 return offset
< -128 || offset
> 60;
1082 return offset
< 0 || offset
> 60;
1085 /* Must be HImode now. */
1086 return offset
< 0 || offset
> 30;
1089 /* Returns true if X is a memory reference using an I register. */
1091 bfin_dsp_memref_p (rtx x
)
1096 if (GET_CODE (x
) == POST_INC
|| GET_CODE (x
) == PRE_INC
1097 || GET_CODE (x
) == POST_DEC
|| GET_CODE (x
) == PRE_DEC
)
1102 /* Return cost of the memory address ADDR.
1103 All addressing modes are equally cheap on the Blackfin. */
1106 bfin_address_cost (rtx addr ATTRIBUTE_UNUSED
)
1111 /* Subroutine of print_operand; used to print a memory reference X to FILE. */
1114 print_address_operand (FILE *file
, rtx x
)
1116 switch (GET_CODE (x
))
1119 output_address (XEXP (x
, 0));
1120 fprintf (file
, "+");
1121 output_address (XEXP (x
, 1));
1125 fprintf (file
, "--");
1126 output_address (XEXP (x
, 0));
1129 output_address (XEXP (x
, 0));
1130 fprintf (file
, "++");
1133 output_address (XEXP (x
, 0));
1134 fprintf (file
, "--");
1138 gcc_assert (GET_CODE (x
) != MEM
);
1139 print_operand (file
, x
, 0);
1144 /* Adding intp DImode support by Tony
1150 print_operand (FILE *file
, rtx x
, char code
)
1152 enum machine_mode mode
= GET_MODE (x
);
1157 switch (GET_CODE (x
))
1160 fprintf (file
, "e");
1163 fprintf (file
, "ne");
1166 fprintf (file
, "g");
1169 fprintf (file
, "l");
1172 fprintf (file
, "ge");
1175 fprintf (file
, "le");
1178 fprintf (file
, "g");
1181 fprintf (file
, "l");
1184 fprintf (file
, "ge");
1187 fprintf (file
, "le");
1190 output_operand_lossage ("invalid %%j value");
1194 case 'J': /* reverse logic */
1195 switch (GET_CODE(x
))
1198 fprintf (file
, "ne");
1201 fprintf (file
, "e");
1204 fprintf (file
, "le");
1207 fprintf (file
, "ge");
1210 fprintf (file
, "l");
1213 fprintf (file
, "g");
1216 fprintf (file
, "le");
1219 fprintf (file
, "ge");
1222 fprintf (file
, "l");
1225 fprintf (file
, "g");
1228 output_operand_lossage ("invalid %%J value");
1233 switch (GET_CODE (x
))
1238 gcc_assert (REGNO (x
) < 32);
1239 fprintf (file
, "%s", short_reg_names
[REGNO (x
)]);
1240 /*fprintf (file, "\n%d\n ", REGNO (x));*/
1243 else if (code
== 'd')
1245 gcc_assert (REGNO (x
) < 32);
1246 fprintf (file
, "%s", high_reg_names
[REGNO (x
)]);
1249 else if (code
== 'w')
1251 gcc_assert (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
);
1252 fprintf (file
, "%s.w", reg_names
[REGNO (x
)]);
1254 else if (code
== 'x')
1256 gcc_assert (REGNO (x
) == REG_A0
|| REGNO (x
) == REG_A1
);
1257 fprintf (file
, "%s.x", reg_names
[REGNO (x
)]);
1259 else if (code
== 'D')
1261 fprintf (file
, "%s", dregs_pair_names
[REGNO (x
)]);
1263 else if (code
== 'H')
1265 gcc_assert (mode
== DImode
|| mode
== DFmode
);
1266 gcc_assert (REG_P (x
));
1267 fprintf (file
, "%s", reg_names
[REGNO (x
) + 1]);
1269 else if (code
== 'T')
1271 gcc_assert (D_REGNO_P (REGNO (x
)));
1272 fprintf (file
, "%s", byte_reg_names
[REGNO (x
)]);
1275 fprintf (file
, "%s", reg_names
[REGNO (x
)]);
1281 print_address_operand (file
, x
);
1293 fputs ("(FU)", file
);
1296 fputs ("(T)", file
);
1299 fputs ("(TFU)", file
);
1302 fputs ("(W32)", file
);
1305 fputs ("(IS)", file
);
1308 fputs ("(IU)", file
);
1311 fputs ("(IH)", file
);
1314 fputs ("(M)", file
);
1317 fputs ("(ISS2)", file
);
1320 fputs ("(S2RND)", file
);
1327 else if (code
== 'b')
1329 if (INTVAL (x
) == 0)
1331 else if (INTVAL (x
) == 1)
1337 /* Moves to half registers with d or h modifiers always use unsigned
1339 else if (code
== 'd')
1340 x
= GEN_INT ((INTVAL (x
) >> 16) & 0xffff);
1341 else if (code
== 'h')
1342 x
= GEN_INT (INTVAL (x
) & 0xffff);
1343 else if (code
== 'X')
1344 x
= GEN_INT (exact_log2 (0xffffffff & INTVAL (x
)));
1345 else if (code
== 'Y')
1346 x
= GEN_INT (exact_log2 (0xffffffff & ~INTVAL (x
)));
1347 else if (code
== 'Z')
1348 /* Used for LINK insns. */
1349 x
= GEN_INT (-8 - INTVAL (x
));
1354 output_addr_const (file
, x
);
1358 output_operand_lossage ("invalid const_double operand");
1362 switch (XINT (x
, 1))
1364 case UNSPEC_MOVE_PIC
:
1365 output_addr_const (file
, XVECEXP (x
, 0, 0));
1366 fprintf (file
, "@GOT");
1369 case UNSPEC_MOVE_FDPIC
:
1370 output_addr_const (file
, XVECEXP (x
, 0, 0));
1371 fprintf (file
, "@GOT17M4");
1374 case UNSPEC_FUNCDESC_GOT17M4
:
1375 output_addr_const (file
, XVECEXP (x
, 0, 0));
1376 fprintf (file
, "@FUNCDESC_GOT17M4");
1379 case UNSPEC_LIBRARY_OFFSET
:
1380 fprintf (file
, "_current_shared_library_p5_offset_");
1389 output_addr_const (file
, x
);
1394 /* Argument support functions. */
1396 /* Initialize a variable CUM of type CUMULATIVE_ARGS
1397 for a call to a function whose data type is FNTYPE.
1398 For a library call, FNTYPE is 0.
1399 VDSP C Compiler manual, our ABI says that
1400 first 3 words of arguments will use R0, R1 and R2.
1404 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
,
1405 rtx libname ATTRIBUTE_UNUSED
)
1407 static CUMULATIVE_ARGS zero_cum
;
1411 /* Set up the number of registers to use for passing arguments. */
1413 cum
->nregs
= max_arg_registers
;
1414 cum
->arg_regs
= arg_regs
;
1416 cum
->call_cookie
= CALL_NORMAL
;
1417 /* Check for a longcall attribute. */
1418 if (fntype
&& lookup_attribute ("shortcall", TYPE_ATTRIBUTES (fntype
)))
1419 cum
->call_cookie
|= CALL_SHORT
;
1420 else if (fntype
&& lookup_attribute ("longcall", TYPE_ATTRIBUTES (fntype
)))
1421 cum
->call_cookie
|= CALL_LONG
;
1426 /* Update the data in CUM to advance over an argument
1427 of mode MODE and data type TYPE.
1428 (TYPE is null for libcalls where that information may not be available.) */
1431 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1432 int named ATTRIBUTE_UNUSED
)
1434 int count
, bytes
, words
;
1436 bytes
= (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1437 words
= (bytes
+ UNITS_PER_WORD
- 1) / UNITS_PER_WORD
;
1439 cum
->words
+= words
;
1440 cum
->nregs
-= words
;
1442 if (cum
->nregs
<= 0)
1445 cum
->arg_regs
= NULL
;
1449 for (count
= 1; count
<= words
; count
++)
1456 /* Define where to put the arguments to a function.
1457 Value is zero to push the argument on the stack,
1458 or a hard register in which to store the argument.
1460 MODE is the argument's machine mode.
1461 TYPE is the data type of the argument (as a tree).
1462 This is null for libcalls where that information may
1464 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1465 the preceding args and about the function being called.
1466 NAMED is nonzero if this argument is a named parameter
1467 (otherwise it is an extra parameter matching an ellipsis). */
1470 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1471 int named ATTRIBUTE_UNUSED
)
1474 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1476 if (mode
== VOIDmode
)
1477 /* Compute operand 2 of the call insn. */
1478 return GEN_INT (cum
->call_cookie
);
1484 return gen_rtx_REG (mode
, *(cum
->arg_regs
));
1489 /* For an arg passed partly in registers and partly in memory,
1490 this is the number of bytes passed in registers.
1491 For args passed entirely in registers or entirely in memory, zero.
1493 Refer VDSP C Compiler manual, our ABI.
1494 First 3 words are in registers. So, if a an argument is larger
1495 than the registers available, it will span the register and
1499 bfin_arg_partial_bytes (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
,
1500 tree type ATTRIBUTE_UNUSED
,
1501 bool named ATTRIBUTE_UNUSED
)
1504 = (mode
== BLKmode
) ? int_size_in_bytes (type
) : GET_MODE_SIZE (mode
);
1505 int bytes_left
= cum
->nregs
* UNITS_PER_WORD
;
1510 if (bytes_left
== 0)
1512 if (bytes
> bytes_left
)
1517 /* Variable sized types are passed by reference. */
1520 bfin_pass_by_reference (CUMULATIVE_ARGS
*cum ATTRIBUTE_UNUSED
,
1521 enum machine_mode mode ATTRIBUTE_UNUSED
,
1522 tree type
, bool named ATTRIBUTE_UNUSED
)
1524 return type
&& TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
;
1527 /* Decide whether a type should be returned in memory (true)
1528 or in a register (false). This is called by the macro
1529 RETURN_IN_MEMORY. */
1532 bfin_return_in_memory (tree type
)
1534 int size
= int_size_in_bytes (type
);
1535 return size
> 2 * UNITS_PER_WORD
|| size
== -1;
1538 /* Register in which address to store a structure value
1539 is passed to a function. */
1541 bfin_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED
,
1542 int incoming ATTRIBUTE_UNUSED
)
1544 return gen_rtx_REG (Pmode
, REG_P0
);
1547 /* Return true when register may be used to pass function parameters. */
1550 function_arg_regno_p (int n
)
1553 for (i
= 0; arg_regs
[i
] != -1; i
++)
1554 if (n
== arg_regs
[i
])
1559 /* Returns 1 if OP contains a symbol reference */
1562 symbolic_reference_mentioned_p (rtx op
)
1564 register const char *fmt
;
1567 if (GET_CODE (op
) == SYMBOL_REF
|| GET_CODE (op
) == LABEL_REF
)
1570 fmt
= GET_RTX_FORMAT (GET_CODE (op
));
1571 for (i
= GET_RTX_LENGTH (GET_CODE (op
)) - 1; i
>= 0; i
--)
1577 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
1578 if (symbolic_reference_mentioned_p (XVECEXP (op
, i
, j
)))
1582 else if (fmt
[i
] == 'e' && symbolic_reference_mentioned_p (XEXP (op
, i
)))
1589 /* Decide whether we can make a sibling call to a function. DECL is the
1590 declaration of the function being targeted by the call and EXP is the
1591 CALL_EXPR representing the call. */
1594 bfin_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED
,
1595 tree exp ATTRIBUTE_UNUSED
)
1597 e_funkind fkind
= funkind (TREE_TYPE (current_function_decl
));
1598 return fkind
== SUBROUTINE
;
1601 /* Emit RTL insns to initialize the variable parts of a trampoline at
1602 TRAMP. FNADDR is an RTX for the address of the function's pure
1603 code. CXT is an RTX for the static chain value for the function. */
1606 initialize_trampoline (tramp
, fnaddr
, cxt
)
1607 rtx tramp
, fnaddr
, cxt
;
1609 rtx t1
= copy_to_reg (fnaddr
);
1610 rtx t2
= copy_to_reg (cxt
);
1616 rtx a
= memory_address (Pmode
, plus_constant (tramp
, 8));
1617 addr
= memory_address (Pmode
, tramp
);
1618 emit_move_insn (gen_rtx_MEM (SImode
, addr
), a
);
1622 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 2));
1623 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t1
));
1624 emit_insn (gen_ashrsi3 (t1
, t1
, GEN_INT (16)));
1625 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 6));
1626 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t1
));
1628 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 10));
1629 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t2
));
1630 emit_insn (gen_ashrsi3 (t2
, t2
, GEN_INT (16)));
1631 addr
= memory_address (Pmode
, plus_constant (tramp
, i
+ 14));
1632 emit_move_insn (gen_rtx_MEM (HImode
, addr
), gen_lowpart (HImode
, t2
));
1635 /* Emit insns to move operands[1] into operands[0]. */
1638 emit_pic_move (rtx
*operands
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1640 rtx temp
= reload_in_progress
? operands
[0] : gen_reg_rtx (Pmode
);
1642 gcc_assert (!TARGET_FDPIC
|| !(reload_in_progress
|| reload_completed
));
1643 if (GET_CODE (operands
[0]) == MEM
&& SYMBOLIC_CONST (operands
[1]))
1644 operands
[1] = force_reg (SImode
, operands
[1]);
1646 operands
[1] = legitimize_pic_address (operands
[1], temp
,
1647 TARGET_FDPIC
? OUR_FDPIC_REG
1648 : pic_offset_table_rtx
);
1651 /* Expand a move operation in mode MODE. The operands are in OPERANDS. */
1654 expand_move (rtx
*operands
, enum machine_mode mode
)
1656 rtx op
= operands
[1];
1657 if ((TARGET_ID_SHARED_LIBRARY
|| TARGET_FDPIC
)
1658 && SYMBOLIC_CONST (op
))
1659 emit_pic_move (operands
, mode
);
1660 /* Don't generate memory->memory or constant->memory moves, go through a
1662 else if ((reload_in_progress
| reload_completed
) == 0
1663 && GET_CODE (operands
[0]) == MEM
1664 && GET_CODE (operands
[1]) != REG
)
1665 operands
[1] = force_reg (mode
, operands
[1]);
1668 /* Split one or more DImode RTL references into pairs of SImode
1669 references. The RTL can be REG, offsettable MEM, integer constant, or
1670 CONST_DOUBLE. "operands" is a pointer to an array of DImode RTL to
1671 split and "num" is its length. lo_half and hi_half are output arrays
1672 that parallel "operands". */
1675 split_di (rtx operands
[], int num
, rtx lo_half
[], rtx hi_half
[])
1679 rtx op
= operands
[num
];
1681 /* simplify_subreg refuse to split volatile memory addresses,
1682 but we still have to handle it. */
1683 if (GET_CODE (op
) == MEM
)
1685 lo_half
[num
] = adjust_address (op
, SImode
, 0);
1686 hi_half
[num
] = adjust_address (op
, SImode
, 4);
1690 lo_half
[num
] = simplify_gen_subreg (SImode
, op
,
1691 GET_MODE (op
) == VOIDmode
1692 ? DImode
: GET_MODE (op
), 0);
1693 hi_half
[num
] = simplify_gen_subreg (SImode
, op
,
1694 GET_MODE (op
) == VOIDmode
1695 ? DImode
: GET_MODE (op
), 4);
1701 bfin_longcall_p (rtx op
, int call_cookie
)
1703 gcc_assert (GET_CODE (op
) == SYMBOL_REF
);
1704 if (call_cookie
& CALL_SHORT
)
1706 if (call_cookie
& CALL_LONG
)
1708 if (TARGET_LONG_CALLS
)
1713 /* Expand a call instruction. FNADDR is the call target, RETVAL the return value.
1714 COOKIE is a CONST_INT holding the call_cookie prepared init_cumulative_args.
1715 SIBCALL is nonzero if this is a sibling call. */
1718 bfin_expand_call (rtx retval
, rtx fnaddr
, rtx callarg1
, rtx cookie
, int sibcall
)
1720 rtx use
= NULL
, call
;
1721 rtx callee
= XEXP (fnaddr
, 0);
1722 int nelts
= 2 + !!sibcall
;
1724 rtx picreg
= get_hard_reg_initial_val (SImode
, FDPIC_REGNO
);
1727 /* In an untyped call, we can get NULL for operand 2. */
1728 if (cookie
== NULL_RTX
)
1729 cookie
= const0_rtx
;
1731 /* Static functions and indirect calls don't need the pic register. */
1732 if (!TARGET_FDPIC
&& flag_pic
1733 && GET_CODE (callee
) == SYMBOL_REF
1734 && !SYMBOL_REF_LOCAL_P (callee
))
1735 use_reg (&use
, pic_offset_table_rtx
);
1739 if (GET_CODE (callee
) != SYMBOL_REF
1740 || bfin_longcall_p (callee
, INTVAL (cookie
)))
1743 if (! address_operand (addr
, Pmode
))
1744 addr
= force_reg (Pmode
, addr
);
1746 fnaddr
= gen_reg_rtx (SImode
);
1747 emit_insn (gen_load_funcdescsi (fnaddr
, addr
));
1748 fnaddr
= gen_rtx_MEM (Pmode
, fnaddr
);
1750 picreg
= gen_reg_rtx (SImode
);
1751 emit_insn (gen_load_funcdescsi (picreg
,
1752 plus_constant (addr
, 4)));
1757 else if ((!register_no_elim_operand (callee
, Pmode
)
1758 && GET_CODE (callee
) != SYMBOL_REF
)
1759 || (GET_CODE (callee
) == SYMBOL_REF
1761 || bfin_longcall_p (callee
, INTVAL (cookie
)))))
1763 callee
= copy_to_mode_reg (Pmode
, callee
);
1764 fnaddr
= gen_rtx_MEM (Pmode
, callee
);
1766 call
= gen_rtx_CALL (VOIDmode
, fnaddr
, callarg1
);
1769 call
= gen_rtx_SET (VOIDmode
, retval
, call
);
1771 pat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (nelts
));
1773 XVECEXP (pat
, 0, n
++) = call
;
1775 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, picreg
);
1776 XVECEXP (pat
, 0, n
++) = gen_rtx_USE (VOIDmode
, cookie
);
1778 XVECEXP (pat
, 0, n
++) = gen_rtx_RETURN (VOIDmode
);
1779 call
= emit_call_insn (pat
);
1781 CALL_INSN_FUNCTION_USAGE (call
) = use
;
1784 /* Return 1 if hard register REGNO can hold a value of machine-mode MODE. */
1787 hard_regno_mode_ok (int regno
, enum machine_mode mode
)
1789 /* Allow only dregs to store value of mode HI or QI */
1790 enum reg_class
class = REGNO_REG_CLASS (regno
);
1795 if (mode
== V2HImode
)
1796 return D_REGNO_P (regno
);
1797 if (class == CCREGS
)
1798 return mode
== BImode
;
1799 if (mode
== PDImode
|| mode
== V2PDImode
)
1800 return regno
== REG_A0
|| regno
== REG_A1
;
1802 && TEST_HARD_REG_BIT (reg_class_contents
[PROLOGUE_REGS
], regno
))
1805 return TEST_HARD_REG_BIT (reg_class_contents
[MOST_REGS
], regno
);
1808 /* Implements target hook vector_mode_supported_p. */
1811 bfin_vector_mode_supported_p (enum machine_mode mode
)
1813 return mode
== V2HImode
;
1816 /* Return the cost of moving data from a register in class CLASS1 to
1817 one in class CLASS2. A cost of 2 is the default. */
1820 bfin_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
1821 enum reg_class class1
, enum reg_class class2
)
1823 /* These need secondary reloads, so they're more expensive. */
1824 if ((class1
== CCREGS
&& class2
!= DREGS
)
1825 || (class1
!= DREGS
&& class2
== CCREGS
))
1828 /* If optimizing for size, always prefer reg-reg over reg-memory moves. */
1832 /* There are some stalls involved when moving from a DREG to a different
1833 class reg, and using the value in one of the following instructions.
1834 Attempt to model this by slightly discouraging such moves. */
1835 if (class1
== DREGS
&& class2
!= DREGS
)
1841 /* Return the cost of moving data of mode M between a
1842 register and memory. A value of 2 is the default; this cost is
1843 relative to those in `REGISTER_MOVE_COST'.
1845 ??? In theory L1 memory has single-cycle latency. We should add a switch
1846 that tells the compiler whether we expect to use only L1 memory for the
1847 program; it'll make the costs more accurate. */
1850 bfin_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
1851 enum reg_class
class,
1852 int in ATTRIBUTE_UNUSED
)
1854 /* Make memory accesses slightly more expensive than any register-register
1855 move. Also, penalize non-DP registers, since they need secondary
1856 reloads to load and store. */
1857 if (! reg_class_subset_p (class, DPREGS
))
1863 /* Inform reload about cases where moving X with a mode MODE to a register in
1864 CLASS requires an extra scratch register. Return the class needed for the
1865 scratch register. */
1867 static enum reg_class
1868 bfin_secondary_reload (bool in_p
, rtx x
, enum reg_class
class,
1869 enum machine_mode mode
, secondary_reload_info
*sri
)
1871 /* If we have HImode or QImode, we can only use DREGS as secondary registers;
1872 in most other cases we can also use PREGS. */
1873 enum reg_class default_class
= GET_MODE_SIZE (mode
) >= 4 ? DPREGS
: DREGS
;
1874 enum reg_class x_class
= NO_REGS
;
1875 enum rtx_code code
= GET_CODE (x
);
1878 x
= SUBREG_REG (x
), code
= GET_CODE (x
);
1881 int regno
= REGNO (x
);
1882 if (regno
>= FIRST_PSEUDO_REGISTER
)
1883 regno
= reg_renumber
[regno
];
1888 x_class
= REGNO_REG_CLASS (regno
);
1891 /* We can be asked to reload (plus (FP) (large_constant)) into a DREG.
1892 This happens as a side effect of register elimination, and we need
1893 a scratch register to do it. */
1894 if (fp_plus_const_operand (x
, mode
))
1896 rtx op2
= XEXP (x
, 1);
1897 int large_constant_p
= ! CONST_7BIT_IMM_P (INTVAL (op2
));
1899 if (class == PREGS
|| class == PREGS_CLOBBERED
)
1901 /* If destination is a DREG, we can do this without a scratch register
1902 if the constant is valid for an add instruction. */
1903 if ((class == DREGS
|| class == DPREGS
)
1904 && ! large_constant_p
)
1906 /* Reloading to anything other than a DREG? Use a PREG scratch
1908 sri
->icode
= CODE_FOR_reload_insi
;
1912 /* Data can usually be moved freely between registers of most classes.
1913 AREGS are an exception; they can only move to or from another register
1914 in AREGS or one in DREGS. They can also be assigned the constant 0. */
1915 if (x_class
== AREGS
)
1916 return class == DREGS
|| class == AREGS
? NO_REGS
: DREGS
;
1920 if (x
!= const0_rtx
&& x_class
!= DREGS
)
1926 /* CCREGS can only be moved from/to DREGS. */
1927 if (class == CCREGS
&& x_class
!= DREGS
)
1929 if (x_class
== CCREGS
&& class != DREGS
)
1932 /* All registers other than AREGS can load arbitrary constants. The only
1933 case that remains is MEM. */
1935 if (! reg_class_subset_p (class, default_class
))
1936 return default_class
;
1940 /* Implement TARGET_HANDLE_OPTION. */
1943 bfin_handle_option (size_t code
, const char *arg
, int value
)
1947 case OPT_mshared_library_id_
:
1948 if (value
> MAX_LIBRARY_ID
)
1949 error ("-mshared-library-id=%s is not between 0 and %d",
1950 arg
, MAX_LIBRARY_ID
);
1951 bfin_lib_id_given
= 1;
1959 static struct machine_function
*
1960 bfin_init_machine_status (void)
1962 struct machine_function
*f
;
1964 f
= ggc_alloc_cleared (sizeof (struct machine_function
));
1969 /* Implement the macro OVERRIDE_OPTIONS. */
1972 override_options (void)
1974 if (TARGET_OMIT_LEAF_FRAME_POINTER
)
1975 flag_omit_frame_pointer
= 1;
1977 /* Library identification */
1978 if (bfin_lib_id_given
&& ! TARGET_ID_SHARED_LIBRARY
)
1979 error ("-mshared-library-id= specified without -mid-shared-library");
1981 if (TARGET_ID_SHARED_LIBRARY
&& flag_pic
== 0)
1984 if (TARGET_ID_SHARED_LIBRARY
&& TARGET_FDPIC
)
1985 error ("ID shared libraries and FD-PIC mode can't be used together.");
1987 /* There is no single unaligned SI op for PIC code. Sometimes we
1988 need to use ".4byte" and sometimes we need to use ".picptr".
1989 See bfin_assemble_integer for details. */
1991 targetm
.asm_out
.unaligned_op
.si
= 0;
1993 /* Silently turn off flag_pic if not doing FDPIC or ID shared libraries,
1994 since we don't support it and it'll just break. */
1995 if (flag_pic
&& !TARGET_FDPIC
&& !TARGET_ID_SHARED_LIBRARY
)
1998 flag_schedule_insns
= 0;
2000 init_machine_status
= bfin_init_machine_status
;
2003 /* Return the destination address of BRANCH.
2004 We need to use this instead of get_attr_length, because the
2005 cbranch_with_nops pattern conservatively sets its length to 6, and
2006 we still prefer to use shorter sequences. */
2009 branch_dest (rtx branch
)
2013 rtx pat
= PATTERN (branch
);
2014 if (GET_CODE (pat
) == PARALLEL
)
2015 pat
= XVECEXP (pat
, 0, 0);
2016 dest
= SET_SRC (pat
);
2017 if (GET_CODE (dest
) == IF_THEN_ELSE
)
2018 dest
= XEXP (dest
, 1);
2019 dest
= XEXP (dest
, 0);
2020 dest_uid
= INSN_UID (dest
);
2021 return INSN_ADDRESSES (dest_uid
);
2024 /* Return nonzero if INSN is annotated with a REG_BR_PROB note that indicates
2025 it's a branch that's predicted taken. */
2028 cbranch_predicted_taken_p (rtx insn
)
2030 rtx x
= find_reg_note (insn
, REG_BR_PROB
, 0);
2034 int pred_val
= INTVAL (XEXP (x
, 0));
2036 return pred_val
>= REG_BR_PROB_BASE
/ 2;
2042 /* Templates for use by asm_conditional_branch. */
2044 static const char *ccbranch_templates
[][3] = {
2045 { "if !cc jump %3;", "if cc jump 4 (bp); jump.s %3;", "if cc jump 6 (bp); jump.l %3;" },
2046 { "if cc jump %3;", "if !cc jump 4 (bp); jump.s %3;", "if !cc jump 6 (bp); jump.l %3;" },
2047 { "if !cc jump %3 (bp);", "if cc jump 4; jump.s %3;", "if cc jump 6; jump.l %3;" },
2048 { "if cc jump %3 (bp);", "if !cc jump 4; jump.s %3;", "if !cc jump 6; jump.l %3;" },
2051 /* Output INSN, which is a conditional branch instruction with operands
2054 We deal with the various forms of conditional branches that can be generated
2055 by bfin_reorg to prevent the hardware from doing speculative loads, by
2056 - emitting a sufficient number of nops, if N_NOPS is nonzero, or
2057 - always emitting the branch as predicted taken, if PREDICT_TAKEN is true.
2058 Either of these is only necessary if the branch is short, otherwise the
2059 template we use ends in an unconditional jump which flushes the pipeline
2063 asm_conditional_branch (rtx insn
, rtx
*operands
, int n_nops
, int predict_taken
)
2065 int offset
= branch_dest (insn
) - INSN_ADDRESSES (INSN_UID (insn
));
2066 /* Note : offset for instructions like if cc jmp; jump.[sl] offset
2067 is to be taken from start of if cc rather than jump.
2068 Range for jump.s is (-4094, 4096) instead of (-4096, 4094)
2070 int len
= (offset
>= -1024 && offset
<= 1022 ? 0
2071 : offset
>= -4094 && offset
<= 4096 ? 1
2073 int bp
= predict_taken
&& len
== 0 ? 1 : cbranch_predicted_taken_p (insn
);
2074 int idx
= (bp
<< 1) | (GET_CODE (operands
[0]) == EQ
? BRF
: BRT
);
2075 output_asm_insn (ccbranch_templates
[idx
][len
], operands
);
2076 gcc_assert (n_nops
== 0 || !bp
);
2078 while (n_nops
-- > 0)
2079 output_asm_insn ("nop;", NULL
);
2082 /* Emit rtl for a comparison operation CMP in mode MODE. Operands have been
2083 stored in bfin_compare_op0 and bfin_compare_op1 already. */
2086 bfin_gen_compare (rtx cmp
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2088 enum rtx_code code1
, code2
;
2089 rtx op0
= bfin_compare_op0
, op1
= bfin_compare_op1
;
2090 rtx tem
= bfin_cc_rtx
;
2091 enum rtx_code code
= GET_CODE (cmp
);
2093 /* If we have a BImode input, then we already have a compare result, and
2094 do not need to emit another comparison. */
2095 if (GET_MODE (op0
) == BImode
)
2097 gcc_assert ((code
== NE
|| code
== EQ
) && op1
== const0_rtx
);
2098 tem
= op0
, code2
= code
;
2103 /* bfin has these conditions */
2113 code1
= reverse_condition (code
);
2117 emit_insn (gen_rtx_SET (BImode
, tem
,
2118 gen_rtx_fmt_ee (code1
, BImode
, op0
, op1
)));
2121 return gen_rtx_fmt_ee (code2
, BImode
, tem
, CONST0_RTX (BImode
));
2124 /* Return nonzero iff C has exactly one bit set if it is interpreted
2125 as a 32 bit constant. */
2128 log2constp (unsigned HOST_WIDE_INT c
)
2131 return c
!= 0 && (c
& (c
-1)) == 0;
2134 /* Returns the number of consecutive least significant zeros in the binary
2135 representation of *V.
2136 We modify *V to contain the original value arithmetically shifted right by
2137 the number of zeroes. */
2140 shiftr_zero (HOST_WIDE_INT
*v
)
2142 unsigned HOST_WIDE_INT tmp
= *v
;
2143 unsigned HOST_WIDE_INT sgn
;
2149 sgn
= tmp
& ((unsigned HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
- 1));
2150 while ((tmp
& 0x1) == 0 && n
<= 32)
2152 tmp
= (tmp
>> 1) | sgn
;
2159 /* After reload, split the load of an immediate constant. OPERANDS are the
2160 operands of the movsi_insn pattern which we are splitting. We return
2161 nonzero if we emitted a sequence to load the constant, zero if we emitted
2162 nothing because we want to use the splitter's default sequence. */
2165 split_load_immediate (rtx operands
[])
2167 HOST_WIDE_INT val
= INTVAL (operands
[1]);
2169 HOST_WIDE_INT shifted
= val
;
2170 HOST_WIDE_INT shifted_compl
= ~val
;
2171 int num_zero
= shiftr_zero (&shifted
);
2172 int num_compl_zero
= shiftr_zero (&shifted_compl
);
2173 unsigned int regno
= REGNO (operands
[0]);
2174 enum reg_class class1
= REGNO_REG_CLASS (regno
);
2176 /* This case takes care of single-bit set/clear constants, which we could
2177 also implement with BITSET/BITCLR. */
2179 && shifted
>= -32768 && shifted
< 65536
2180 && (D_REGNO_P (regno
)
2181 || (regno
>= REG_P0
&& regno
<= REG_P7
&& num_zero
<= 2)))
2183 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted
)));
2184 emit_insn (gen_ashlsi3 (operands
[0], operands
[0], GEN_INT (num_zero
)));
2189 tmp
|= -(tmp
& 0x8000);
2191 /* If high word has one bit set or clear, try to use a bit operation. */
2192 if (D_REGNO_P (regno
))
2194 if (log2constp (val
& 0xFFFF0000))
2196 emit_insn (gen_movsi (operands
[0], GEN_INT (val
& 0xFFFF)));
2197 emit_insn (gen_iorsi3 (operands
[0], operands
[0], GEN_INT (val
& 0xFFFF0000)));
2200 else if (log2constp (val
| 0xFFFF) && (val
& 0x8000) != 0)
2202 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2203 emit_insn (gen_andsi3 (operands
[0], operands
[0], GEN_INT (val
| 0xFFFF)));
2207 if (D_REGNO_P (regno
))
2209 if (CONST_7BIT_IMM_P (tmp
))
2211 emit_insn (gen_movsi (operands
[0], GEN_INT (tmp
)));
2212 emit_insn (gen_movstricthi_high (operands
[0], GEN_INT (val
& -65536)));
2216 if ((val
& 0xFFFF0000) == 0)
2218 emit_insn (gen_movsi (operands
[0], const0_rtx
));
2219 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2223 if ((val
& 0xFFFF0000) == 0xFFFF0000)
2225 emit_insn (gen_movsi (operands
[0], constm1_rtx
));
2226 emit_insn (gen_movsi_low (operands
[0], operands
[0], operands
[1]));
2231 /* Need DREGs for the remaining case. */
2236 && num_compl_zero
&& CONST_7BIT_IMM_P (shifted_compl
))
2238 /* If optimizing for size, generate a sequence that has more instructions
2240 emit_insn (gen_movsi (operands
[0], GEN_INT (shifted_compl
)));
2241 emit_insn (gen_ashlsi3 (operands
[0], operands
[0],
2242 GEN_INT (num_compl_zero
)));
2243 emit_insn (gen_one_cmplsi2 (operands
[0], operands
[0]));
2249 /* Return true if the legitimate memory address for a memory operand of mode
2250 MODE. Return false if not. */
2253 bfin_valid_add (enum machine_mode mode
, HOST_WIDE_INT value
)
2255 unsigned HOST_WIDE_INT v
= value
> 0 ? value
: -value
;
2256 int sz
= GET_MODE_SIZE (mode
);
2257 int shift
= sz
== 1 ? 0 : sz
== 2 ? 1 : 2;
2258 /* The usual offsettable_memref machinery doesn't work so well for this
2259 port, so we deal with the problem here. */
2260 unsigned HOST_WIDE_INT mask
= sz
== 8 ? 0x7ffe : 0x7fff;
2261 return (v
& ~(mask
<< shift
)) == 0;
2265 bfin_valid_reg_p (unsigned int regno
, int strict
, enum machine_mode mode
,
2266 enum rtx_code outer_code
)
2269 return REGNO_OK_FOR_BASE_STRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2271 return REGNO_OK_FOR_BASE_NONSTRICT_P (regno
, mode
, outer_code
, SCRATCH
);
2275 bfin_legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
2277 switch (GET_CODE (x
)) {
2279 if (bfin_valid_reg_p (REGNO (x
), strict
, mode
, MEM
))
2283 if (REG_P (XEXP (x
, 0))
2284 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PLUS
)
2285 && ((GET_CODE (XEXP (x
, 1)) == UNSPEC
&& mode
== SImode
)
2286 || (GET_CODE (XEXP (x
, 1)) == CONST_INT
2287 && bfin_valid_add (mode
, INTVAL (XEXP (x
, 1))))))
2292 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2293 && REG_P (XEXP (x
, 0))
2294 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, POST_INC
))
2297 if (LEGITIMATE_MODE_FOR_AUTOINC_P (mode
)
2298 && XEXP (x
, 0) == stack_pointer_rtx
2299 && REG_P (XEXP (x
, 0))
2300 && bfin_valid_reg_p (REGNO (XEXP (x
, 0)), strict
, mode
, PRE_DEC
))
2310 bfin_rtx_costs (rtx x
, int code
, int outer_code
, int *total
)
2312 int cost2
= COSTS_N_INSNS (1);
2317 if (outer_code
== SET
|| outer_code
== PLUS
)
2318 *total
= CONST_7BIT_IMM_P (INTVAL (x
)) ? 0 : cost2
;
2319 else if (outer_code
== AND
)
2320 *total
= log2constp (~INTVAL (x
)) ? 0 : cost2
;
2321 else if (outer_code
== LE
|| outer_code
== LT
|| outer_code
== EQ
)
2322 *total
= (INTVAL (x
) >= -4 && INTVAL (x
) <= 3) ? 0 : cost2
;
2323 else if (outer_code
== LEU
|| outer_code
== LTU
)
2324 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 7) ? 0 : cost2
;
2325 else if (outer_code
== MULT
)
2326 *total
= (INTVAL (x
) == 2 || INTVAL (x
) == 4) ? 0 : cost2
;
2327 else if (outer_code
== ASHIFT
&& (INTVAL (x
) == 1 || INTVAL (x
) == 2))
2329 else if (outer_code
== ASHIFT
|| outer_code
== ASHIFTRT
2330 || outer_code
== LSHIFTRT
)
2331 *total
= (INTVAL (x
) >= 0 && INTVAL (x
) <= 31) ? 0 : cost2
;
2332 else if (outer_code
== IOR
|| outer_code
== XOR
)
2333 *total
= (INTVAL (x
) & (INTVAL (x
) - 1)) == 0 ? 0 : cost2
;
2342 *total
= COSTS_N_INSNS (2);
2346 if (GET_MODE (x
) == Pmode
)
2348 if (GET_CODE (XEXP (x
, 0)) == MULT
2349 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
)
2351 HOST_WIDE_INT val
= INTVAL (XEXP (XEXP (x
, 0), 1));
2352 if (val
== 2 || val
== 4)
2355 *total
+= rtx_cost (XEXP (XEXP (x
, 0), 0), outer_code
);
2356 *total
+= rtx_cost (XEXP (x
, 1), outer_code
);
2368 if (GET_MODE (x
) == DImode
)
2375 if (GET_MODE (x
) == DImode
)
2380 if (GET_MODE_SIZE (GET_MODE (x
)) <= UNITS_PER_WORD
)
2381 *total
= COSTS_N_INSNS (3);
2386 *total
= COSTS_N_INSNS (32);
2391 if (outer_code
== SET
)
2401 bfin_internal_label (FILE *stream
, const char *prefix
, unsigned long num
)
2403 fprintf (stream
, "%s%s$%ld:\n", LOCAL_LABEL_PREFIX
, prefix
, num
);
2406 /* Used for communication between {push,pop}_multiple_operation (which
2407 we use not only as a predicate) and the corresponding output functions. */
2408 static int first_preg_to_save
, first_dreg_to_save
;
2411 push_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2413 int lastdreg
= 8, lastpreg
= 6;
2416 first_preg_to_save
= lastpreg
;
2417 first_dreg_to_save
= lastdreg
;
2418 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0) - 1; i
++)
2420 rtx t
= XVECEXP (op
, 0, i
);
2424 if (GET_CODE (t
) != SET
)
2428 dest
= SET_DEST (t
);
2429 if (GET_CODE (dest
) != MEM
|| ! REG_P (src
))
2431 dest
= XEXP (dest
, 0);
2432 if (GET_CODE (dest
) != PLUS
2433 || ! REG_P (XEXP (dest
, 0))
2434 || REGNO (XEXP (dest
, 0)) != REG_SP
2435 || GET_CODE (XEXP (dest
, 1)) != CONST_INT
2436 || INTVAL (XEXP (dest
, 1)) != -i
* 4)
2439 regno
= REGNO (src
);
2442 if (D_REGNO_P (regno
))
2445 first_dreg_to_save
= lastdreg
= regno
- REG_R0
;
2447 else if (regno
>= REG_P0
&& regno
<= REG_P7
)
2450 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
2460 if (regno
>= REG_P0
&& regno
<= REG_P7
)
2463 first_preg_to_save
= lastpreg
= regno
- REG_P0
;
2465 else if (regno
!= REG_R0
+ lastdreg
+ 1)
2470 else if (group
== 2)
2472 if (regno
!= REG_P0
+ lastpreg
+ 1)
2481 pop_multiple_operation (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
2483 int lastdreg
= 8, lastpreg
= 6;
2486 for (i
= 1, group
= 0; i
< XVECLEN (op
, 0); i
++)
2488 rtx t
= XVECEXP (op
, 0, i
);
2492 if (GET_CODE (t
) != SET
)
2496 dest
= SET_DEST (t
);
2497 if (GET_CODE (src
) != MEM
|| ! REG_P (dest
))
2499 src
= XEXP (src
, 0);
2503 if (! REG_P (src
) || REGNO (src
) != REG_SP
)
2506 else if (GET_CODE (src
) != PLUS
2507 || ! REG_P (XEXP (src
, 0))
2508 || REGNO (XEXP (src
, 0)) != REG_SP
2509 || GET_CODE (XEXP (src
, 1)) != CONST_INT
2510 || INTVAL (XEXP (src
, 1)) != (i
- 1) * 4)
2513 regno
= REGNO (dest
);
2516 if (regno
== REG_R7
)
2521 else if (regno
!= REG_P0
+ lastpreg
- 1)
2526 else if (group
== 1)
2528 if (regno
!= REG_R0
+ lastdreg
- 1)
2534 first_dreg_to_save
= lastdreg
;
2535 first_preg_to_save
= lastpreg
;
2539 /* Emit assembly code for one multi-register push described by INSN, with
2540 operands in OPERANDS. */
2543 output_push_multiple (rtx insn
, rtx
*operands
)
2548 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2549 ok
= push_multiple_operation (PATTERN (insn
), VOIDmode
);
2552 if (first_dreg_to_save
== 8)
2553 sprintf (buf
, "[--sp] = ( p5:%d );\n", first_preg_to_save
);
2554 else if (first_preg_to_save
== 6)
2555 sprintf (buf
, "[--sp] = ( r7:%d );\n", first_dreg_to_save
);
2557 sprintf (buf
, "[--sp] = ( r7:%d, p5:%d );\n",
2558 first_dreg_to_save
, first_preg_to_save
);
2560 output_asm_insn (buf
, operands
);
2563 /* Emit assembly code for one multi-register pop described by INSN, with
2564 operands in OPERANDS. */
2567 output_pop_multiple (rtx insn
, rtx
*operands
)
2572 /* Validate the insn again, and compute first_[dp]reg_to_save. */
2573 ok
= pop_multiple_operation (PATTERN (insn
), VOIDmode
);
2576 if (first_dreg_to_save
== 8)
2577 sprintf (buf
, "( p5:%d ) = [sp++];\n", first_preg_to_save
);
2578 else if (first_preg_to_save
== 6)
2579 sprintf (buf
, "( r7:%d ) = [sp++];\n", first_dreg_to_save
);
2581 sprintf (buf
, "( r7:%d, p5:%d ) = [sp++];\n",
2582 first_dreg_to_save
, first_preg_to_save
);
2584 output_asm_insn (buf
, operands
);
2587 /* Adjust DST and SRC by OFFSET bytes, and generate one move in mode MODE. */
2590 single_move_for_movmem (rtx dst
, rtx src
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2592 rtx scratch
= gen_reg_rtx (mode
);
2595 srcmem
= adjust_address_nv (src
, mode
, offset
);
2596 dstmem
= adjust_address_nv (dst
, mode
, offset
);
2597 emit_move_insn (scratch
, srcmem
);
2598 emit_move_insn (dstmem
, scratch
);
2601 /* Expand a string move operation of COUNT_EXP bytes from SRC to DST, with
2602 alignment ALIGN_EXP. Return true if successful, false if we should fall
2603 back on a different method. */
2606 bfin_expand_movmem (rtx dst
, rtx src
, rtx count_exp
, rtx align_exp
)
2608 rtx srcreg
, destreg
, countreg
;
2609 HOST_WIDE_INT align
= 0;
2610 unsigned HOST_WIDE_INT count
= 0;
2612 if (GET_CODE (align_exp
) == CONST_INT
)
2613 align
= INTVAL (align_exp
);
2614 if (GET_CODE (count_exp
) == CONST_INT
)
2616 count
= INTVAL (count_exp
);
2618 if (!TARGET_INLINE_ALL_STRINGOPS
&& count
> 64)
2623 /* If optimizing for size, only do single copies inline. */
2626 if (count
== 2 && align
< 2)
2628 if (count
== 4 && align
< 4)
2630 if (count
!= 1 && count
!= 2 && count
!= 4)
2633 if (align
< 2 && count
!= 1)
2636 destreg
= copy_to_mode_reg (Pmode
, XEXP (dst
, 0));
2637 if (destreg
!= XEXP (dst
, 0))
2638 dst
= replace_equiv_address_nv (dst
, destreg
);
2639 srcreg
= copy_to_mode_reg (Pmode
, XEXP (src
, 0));
2640 if (srcreg
!= XEXP (src
, 0))
2641 src
= replace_equiv_address_nv (src
, srcreg
);
2643 if (count
!= 0 && align
>= 2)
2645 unsigned HOST_WIDE_INT offset
= 0;
2649 if ((count
& ~3) == 4)
2651 single_move_for_movmem (dst
, src
, SImode
, offset
);
2654 else if (count
& ~3)
2656 HOST_WIDE_INT new_count
= ((count
>> 2) & 0x3fffffff) - 1;
2657 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
2659 emit_insn (gen_rep_movsi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
2663 single_move_for_movmem (dst
, src
, HImode
, offset
);
2669 if ((count
& ~1) == 2)
2671 single_move_for_movmem (dst
, src
, HImode
, offset
);
2674 else if (count
& ~1)
2676 HOST_WIDE_INT new_count
= ((count
>> 1) & 0x7fffffff) - 1;
2677 countreg
= copy_to_mode_reg (Pmode
, GEN_INT (new_count
));
2679 emit_insn (gen_rep_movhi (destreg
, srcreg
, countreg
, destreg
, srcreg
));
2684 single_move_for_movmem (dst
, src
, QImode
, offset
);
2693 bfin_adjust_cost (rtx insn
, rtx link
, rtx dep_insn
, int cost
)
2695 enum attr_type insn_type
, dep_insn_type
;
2696 int dep_insn_code_number
;
2698 /* Anti and output dependencies have zero cost. */
2699 if (REG_NOTE_KIND (link
) != 0)
2702 dep_insn_code_number
= recog_memoized (dep_insn
);
2704 /* If we can't recognize the insns, we can't really do anything. */
2705 if (dep_insn_code_number
< 0 || recog_memoized (insn
) < 0)
2708 insn_type
= get_attr_type (insn
);
2709 dep_insn_type
= get_attr_type (dep_insn
);
2711 if (dep_insn_type
== TYPE_MOVE
|| dep_insn_type
== TYPE_MCLD
)
2713 rtx pat
= PATTERN (dep_insn
);
2714 rtx dest
= SET_DEST (pat
);
2715 rtx src
= SET_SRC (pat
);
2716 if (! ADDRESS_REGNO_P (REGNO (dest
)) || ! D_REGNO_P (REGNO (src
)))
2718 return cost
+ (dep_insn_type
== TYPE_MOVE
? 4 : 3);
2725 /* Increment the counter for the number of loop instructions in the
2726 current function. */
2729 bfin_hardware_loop (void)
2731 cfun
->machine
->has_hardware_loops
++;
2734 /* Maximum loop nesting depth. */
2735 #define MAX_LOOP_DEPTH 2
2737 /* Maximum size of a loop. */
2738 #define MAX_LOOP_LENGTH 2042
2740 /* We need to keep a vector of loops */
2741 typedef struct loop_info
*loop_info
;
2742 DEF_VEC_P (loop_info
);
2743 DEF_VEC_ALLOC_P (loop_info
,heap
);
2745 /* Information about a loop we have found (or are in the process of
2747 struct loop_info
GTY (())
2749 /* loop number, for dumps */
2752 /* Predecessor block of the loop. This is the one that falls into
2753 the loop and contains the initialization instruction. */
2754 basic_block predecessor
;
2756 /* First block in the loop. This is the one branched to by the loop_end
2760 /* Last block in the loop (the one with the loop_end insn). */
2763 /* The successor block of the loop. This is the one the loop_end insn
2765 basic_block successor
;
2767 /* The last instruction in the tail. */
2770 /* The loop_end insn. */
2773 /* The iteration register. */
2776 /* The new initialization insn. */
2779 /* The new initialization instruction. */
2782 /* The new label placed at the beginning of the loop. */
2785 /* The new label placed at the end of the loop. */
2788 /* The length of the loop. */
2791 /* The nesting depth of the loop. */
2794 /* Nonzero if we can't optimize this loop. */
2797 /* True if we have visited this loop. */
2800 /* True if this loop body clobbers any of LC0, LT0, or LB0. */
2803 /* True if this loop body clobbers any of LC1, LT1, or LB1. */
2806 /* Next loop in the graph. */
2807 struct loop_info
*next
;
2809 /* Immediate outer loop of this loop. */
2810 struct loop_info
*outer
;
2812 /* Vector of blocks only within the loop, including those within
2814 VEC (basic_block
,heap
) *blocks
;
2816 /* Same information in a bitmap. */
2817 bitmap block_bitmap
;
2819 /* Vector of inner loops within this loop */
2820 VEC (loop_info
,heap
) *loops
;
2824 bfin_dump_loops (loop_info loops
)
2828 for (loop
= loops
; loop
; loop
= loop
->next
)
2834 fprintf (dump_file
, ";; loop %d: ", loop
->loop_no
);
2836 fprintf (dump_file
, "(bad) ");
2837 fprintf (dump_file
, "{head:%d, depth:%d}", loop
->head
->index
, loop
->depth
);
2839 fprintf (dump_file
, " blocks: [ ");
2840 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, b
); ix
++)
2841 fprintf (dump_file
, "%d ", b
->index
);
2842 fprintf (dump_file
, "] ");
2844 fprintf (dump_file
, " inner loops: [ ");
2845 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
, i
); ix
++)
2846 fprintf (dump_file
, "%d ", i
->loop_no
);
2847 fprintf (dump_file
, "]\n");
2849 fprintf (dump_file
, "\n");
2852 /* Scan the blocks of LOOP (and its inferiors) looking for basic block
2853 BB. Return true, if we find it. */
2856 bfin_bb_in_loop (loop_info loop
, basic_block bb
)
2858 return bitmap_bit_p (loop
->block_bitmap
, bb
->index
);
2861 /* Scan the blocks of LOOP (and its inferiors) looking for uses of
2862 REG. Return true, if we find any. Don't count the loop's loop_end
2863 insn if it matches LOOP_END. */
2866 bfin_scan_loop (loop_info loop
, rtx reg
, rtx loop_end
)
2871 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, bb
); ix
++)
2875 for (insn
= BB_HEAD (bb
);
2876 insn
!= NEXT_INSN (BB_END (bb
));
2877 insn
= NEXT_INSN (insn
))
2881 if (insn
== loop_end
)
2883 if (reg_mentioned_p (reg
, PATTERN (insn
)))
2890 /* Optimize LOOP. */
2893 bfin_optimize_loop (loop_info loop
)
2897 rtx insn
, init_insn
, last_insn
, nop_insn
;
2898 rtx loop_init
, start_label
, end_label
;
2899 rtx reg_lc0
, reg_lc1
, reg_lt0
, reg_lt1
, reg_lb0
, reg_lb1
;
2901 rtx lc_reg
, lt_reg
, lb_reg
;
2905 int inner_depth
= 0;
2915 fprintf (dump_file
, ";; loop %d bad when found\n", loop
->loop_no
);
2919 /* Every loop contains in its list of inner loops every loop nested inside
2920 it, even if there are intermediate loops. This works because we're doing
2921 a depth-first search here and never visit a loop more than once. */
2922 for (ix
= 0; VEC_iterate (loop_info
, loop
->loops
, ix
, inner
); ix
++)
2924 bfin_optimize_loop (inner
);
2926 if (!inner
->bad
&& inner_depth
< inner
->depth
)
2928 inner_depth
= inner
->depth
;
2930 loop
->clobber_loop0
|= inner
->clobber_loop0
;
2931 loop
->clobber_loop1
|= inner
->clobber_loop1
;
2935 loop
->depth
= inner_depth
+ 1;
2936 if (loop
->depth
> MAX_LOOP_DEPTH
)
2939 fprintf (dump_file
, ";; loop %d too deep\n", loop
->loop_no
);
2943 /* Get the loop iteration register. */
2944 iter_reg
= loop
->iter_reg
;
2946 if (!DPREG_P (iter_reg
))
2949 fprintf (dump_file
, ";; loop %d iteration count NOT in PREG or DREG\n",
2954 /* Check if start_label appears before loop_end and calculate the
2955 offset between them. We calculate the length of instructions
2958 for (insn
= loop
->start_label
;
2959 insn
&& insn
!= loop
->loop_end
;
2960 insn
= NEXT_INSN (insn
))
2962 if (JUMP_P (insn
) && any_condjump_p (insn
) && !optimize_size
)
2964 if (TARGET_CSYNC_ANOMALY
)
2966 else if (TARGET_SPECLD_ANOMALY
)
2969 else if (LABEL_P (insn
))
2971 if (TARGET_CSYNC_ANOMALY
)
2976 length
+= get_attr_length (insn
);
2982 fprintf (dump_file
, ";; loop %d start_label not before loop_end\n",
2987 loop
->length
= length
;
2988 if (loop
->length
> MAX_LOOP_LENGTH
)
2991 fprintf (dump_file
, ";; loop %d too long\n", loop
->loop_no
);
2995 /* Scan all the blocks to make sure they don't use iter_reg. */
2996 if (bfin_scan_loop (loop
, iter_reg
, loop
->loop_end
))
2999 fprintf (dump_file
, ";; loop %d uses iterator\n", loop
->loop_no
);
3003 /* Scan all the insns to see if the loop body clobber
3004 any hardware loop registers. */
3006 reg_lc0
= gen_rtx_REG (SImode
, REG_LC0
);
3007 reg_lc1
= gen_rtx_REG (SImode
, REG_LC1
);
3008 reg_lt0
= gen_rtx_REG (SImode
, REG_LT0
);
3009 reg_lt1
= gen_rtx_REG (SImode
, REG_LT1
);
3010 reg_lb0
= gen_rtx_REG (SImode
, REG_LB0
);
3011 reg_lb1
= gen_rtx_REG (SImode
, REG_LB1
);
3013 for (ix
= 0; VEC_iterate (basic_block
, loop
->blocks
, ix
, bb
); ix
++)
3017 for (insn
= BB_HEAD (bb
);
3018 insn
!= NEXT_INSN (BB_END (bb
));
3019 insn
= NEXT_INSN (insn
))
3024 if (reg_set_p (reg_lc0
, insn
)
3025 || reg_set_p (reg_lt0
, insn
)
3026 || reg_set_p (reg_lb0
, insn
))
3027 loop
->clobber_loop0
= 1;
3029 if (reg_set_p (reg_lc1
, insn
)
3030 || reg_set_p (reg_lt1
, insn
)
3031 || reg_set_p (reg_lb1
, insn
))
3032 loop
->clobber_loop1
|= 1;
3036 if ((loop
->clobber_loop0
&& loop
->clobber_loop1
)
3037 || (loop
->depth
== MAX_LOOP_DEPTH
&& loop
->clobber_loop0
))
3039 loop
->depth
= MAX_LOOP_DEPTH
+ 1;
3041 fprintf (dump_file
, ";; loop %d no loop reg available\n",
3046 /* There should be an instruction before the loop_end instruction
3047 in the same basic block. And the instruction must not be
3049 - CONDITIONAL BRANCH
3053 - Returns (RTS, RTN, etc.) */
3056 last_insn
= PREV_INSN (loop
->loop_end
);
3060 for (; last_insn
!= PREV_INSN (BB_HEAD (bb
));
3061 last_insn
= PREV_INSN (last_insn
))
3062 if (INSN_P (last_insn
))
3065 if (last_insn
!= PREV_INSN (BB_HEAD (bb
)))
3068 if (single_pred_p (bb
)
3069 && single_pred (bb
) != ENTRY_BLOCK_PTR
)
3071 bb
= single_pred (bb
);
3072 last_insn
= BB_END (bb
);
3077 last_insn
= NULL_RTX
;
3085 fprintf (dump_file
, ";; loop %d has no last instruction\n",
3090 if (JUMP_P (last_insn
))
3092 loop_info inner
= bb
->aux
;
3094 && inner
->outer
== loop
3095 && inner
->loop_end
== last_insn
3096 && inner
->depth
== 1)
3097 /* This jump_insn is the exact loop_end of an inner loop
3098 and to be optimized away. So use the inner's last_insn. */
3099 last_insn
= inner
->last_insn
;
3103 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3108 else if (CALL_P (last_insn
)
3109 || get_attr_type (last_insn
) == TYPE_SYNC
3110 || recog_memoized (last_insn
) == CODE_FOR_return_internal
)
3113 fprintf (dump_file
, ";; loop %d has bad last instruction\n",
3118 if (GET_CODE (PATTERN (last_insn
)) == ASM_INPUT
3119 || asm_noperands (PATTERN (last_insn
)) >= 0
3120 || get_attr_seq_insns (last_insn
) == SEQ_INSNS_MULTI
)
3122 nop_insn
= emit_insn_after (gen_nop (), last_insn
);
3123 last_insn
= nop_insn
;
3126 loop
->last_insn
= last_insn
;
3128 /* The loop is good for replacement. */
3129 start_label
= loop
->start_label
;
3130 end_label
= gen_label_rtx ();
3131 iter_reg
= loop
->iter_reg
;
3133 if (loop
->depth
== 1 && !loop
->clobber_loop1
)
3138 loop
->clobber_loop1
= 1;
3145 loop
->clobber_loop0
= 1;
3148 /* If iter_reg is a DREG, we need generate an instruction to load
3149 the loop count into LC register. */
3150 if (D_REGNO_P (REGNO (iter_reg
)))
3152 init_insn
= gen_movsi (lc_reg
, iter_reg
);
3153 loop_init
= gen_lsetup_without_autoinit (lt_reg
, start_label
,
3157 else if (P_REGNO_P (REGNO (iter_reg
)))
3159 init_insn
= NULL_RTX
;
3160 loop_init
= gen_lsetup_with_autoinit (lt_reg
, start_label
,
3167 loop
->init
= init_insn
;
3168 loop
->end_label
= end_label
;
3169 loop
->loop_init
= loop_init
;
3173 fprintf (dump_file
, ";; replacing loop %d initializer with\n",
3175 print_rtl_single (dump_file
, loop
->loop_init
);
3176 fprintf (dump_file
, ";; replacing loop %d terminator with\n",
3178 print_rtl_single (dump_file
, loop
->loop_end
);
3183 if (loop
->init
!= NULL_RTX
)
3184 emit_insn (loop
->init
);
3185 emit_insn(loop
->loop_init
);
3186 emit_label (loop
->start_label
);
3191 emit_insn_after (seq
, BB_END (loop
->predecessor
));
3192 delete_insn (loop
->loop_end
);
3194 /* Insert the loop end label before the last instruction of the loop. */
3195 emit_label_before (loop
->end_label
, loop
->last_insn
);
3202 fprintf (dump_file
, ";; loop %d is bad\n", loop
->loop_no
);
3206 if (DPREG_P (loop
->iter_reg
))
3208 /* If loop->iter_reg is a DREG or PREG, we can split it here
3209 without scratch register. */
3212 emit_insn_before (gen_addsi3 (loop
->iter_reg
,
3217 emit_insn_before (gen_cmpsi (loop
->iter_reg
, const0_rtx
),
3220 insn
= emit_jump_insn_before (gen_bne (loop
->start_label
),
3223 JUMP_LABEL (insn
) = loop
->start_label
;
3224 LABEL_NUSES (loop
->start_label
)++;
3225 delete_insn (loop
->loop_end
);
3229 /* Called from bfin_reorg_loops when a potential loop end is found. LOOP is
3230 a newly set up structure describing the loop, it is this function's
3231 responsibility to fill most of it. TAIL_BB and TAIL_INSN point to the
3232 loop_end insn and its enclosing basic block. */
3235 bfin_discover_loop (loop_info loop
, basic_block tail_bb
, rtx tail_insn
)
3239 VEC (basic_block
,heap
) *works
= VEC_alloc (basic_block
,heap
,20);
3241 loop
->tail
= tail_bb
;
3242 loop
->head
= BRANCH_EDGE (tail_bb
)->dest
;
3243 loop
->successor
= FALLTHRU_EDGE (tail_bb
)->dest
;
3244 loop
->predecessor
= NULL
;
3245 loop
->loop_end
= tail_insn
;
3246 loop
->last_insn
= NULL_RTX
;
3247 loop
->iter_reg
= SET_DEST (XVECEXP (PATTERN (tail_insn
), 0, 1));
3248 loop
->depth
= loop
->length
= 0;
3250 loop
->clobber_loop0
= loop
->clobber_loop1
= 0;
3254 loop
->init
= loop
->loop_init
= NULL_RTX
;
3255 loop
->start_label
= XEXP (XEXP (SET_SRC (XVECEXP (PATTERN (tail_insn
), 0, 0)), 1), 0);
3256 loop
->end_label
= NULL_RTX
;
3259 VEC_safe_push (basic_block
, heap
, works
, loop
->head
);
3261 while (VEC_iterate (basic_block
, works
, dwork
++, bb
))
3265 if (bb
== EXIT_BLOCK_PTR
)
3267 /* We've reached the exit block. The loop must be bad. */
3270 ";; Loop is bad - reached exit block while scanning\n");
3275 if (bitmap_bit_p (loop
->block_bitmap
, bb
->index
))
3278 /* We've not seen this block before. Add it to the loop's
3279 list and then add each successor to the work list. */
3281 VEC_safe_push (basic_block
, heap
, loop
->blocks
, bb
);
3282 bitmap_set_bit (loop
->block_bitmap
, bb
->index
);
3286 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3288 basic_block succ
= EDGE_SUCC (bb
, ei
.index
)->dest
;
3289 if (!REGNO_REG_SET_P (succ
->il
.rtl
->global_live_at_start
,
3290 REGNO (loop
->iter_reg
)))
3292 if (!VEC_space (basic_block
, works
, 1))
3296 VEC_block_remove (basic_block
, works
, 0, dwork
);
3300 VEC_reserve (basic_block
, heap
, works
, 1);
3302 VEC_quick_push (basic_block
, works
, succ
);
3309 /* Make sure we only have one entry point. */
3310 if (EDGE_COUNT (loop
->head
->preds
) == 2)
3312 loop
->predecessor
= EDGE_PRED (loop
->head
, 0)->src
;
3313 if (loop
->predecessor
== loop
->tail
)
3314 /* We wanted the other predecessor. */
3315 loop
->predecessor
= EDGE_PRED (loop
->head
, 1)->src
;
3317 /* We can only place a loop insn on a fall through edge of a
3318 single exit block. */
3319 if (EDGE_COUNT (loop
->predecessor
->succs
) != 1
3320 || !(EDGE_SUCC (loop
->predecessor
, 0)->flags
& EDGE_FALLTHRU
)
3321 /* If loop->predecessor is in loop, loop->head is not really
3322 the head of the loop. */
3323 || bfin_bb_in_loop (loop
, loop
->predecessor
))
3324 loop
->predecessor
= NULL
;
3327 if (loop
->predecessor
== NULL
)
3330 fprintf (dump_file
, ";; loop has bad predecessor\n");
3335 #ifdef ENABLE_CHECKING
3336 /* Make sure nothing jumps into this loop. This shouldn't happen as we
3337 wouldn't have generated the counted loop patterns in such a case.
3338 However, this test must be done after the test above to detect loops
3339 with invalid headers. */
3341 for (dwork
= 0; VEC_iterate (basic_block
, loop
->blocks
, dwork
, bb
); dwork
++)
3345 if (bb
== loop
->head
)
3347 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3349 basic_block pred
= EDGE_PRED (bb
, ei
.index
)->src
;
3350 if (!bfin_bb_in_loop (loop
, pred
))
3355 VEC_free (basic_block
, heap
, works
);
3359 bfin_reorg_loops (FILE *dump_file
)
3361 bitmap_obstack stack
;
3364 loop_info loops
= NULL
;
3368 bitmap_obstack_initialize (&stack
);
3370 /* Find all the possible loop tails. This means searching for every
3371 loop_end instruction. For each one found, create a loop_info
3372 structure and add the head block to the work list. */
3375 rtx tail
= BB_END (bb
);
3377 while (GET_CODE (tail
) == NOTE
)
3378 tail
= PREV_INSN (tail
);
3382 if (INSN_P (tail
) && recog_memoized (tail
) == CODE_FOR_loop_end
)
3384 /* A possible loop end */
3386 loop
= XNEW (struct loop_info
);
3389 loop
->loop_no
= nloops
++;
3390 loop
->blocks
= VEC_alloc (basic_block
, heap
, 20);
3391 loop
->block_bitmap
= BITMAP_ALLOC (&stack
);
3396 fprintf (dump_file
, ";; potential loop %d ending at\n",
3398 print_rtl_single (dump_file
, tail
);
3401 bfin_discover_loop (loop
, bb
, tail
);
3405 tmp_bitmap
= BITMAP_ALLOC (&stack
);
3406 /* Compute loop nestings. */
3407 for (loop
= loops
; loop
; loop
= loop
->next
)
3413 for (other
= loop
->next
; other
; other
= other
->next
)
3418 bitmap_and (tmp_bitmap
, other
->block_bitmap
, loop
->block_bitmap
);
3419 if (bitmap_empty_p (tmp_bitmap
))
3421 if (bitmap_equal_p (tmp_bitmap
, other
->block_bitmap
))
3423 other
->outer
= loop
;
3424 VEC_safe_push (loop_info
, heap
, loop
->loops
, other
);
3426 else if (bitmap_equal_p (tmp_bitmap
, loop
->block_bitmap
))
3428 loop
->outer
= other
;
3429 VEC_safe_push (loop_info
, heap
, other
->loops
, loop
);
3433 loop
->bad
= other
->bad
= 1;
3437 BITMAP_FREE (tmp_bitmap
);
3441 fprintf (dump_file
, ";; All loops found:\n\n");
3442 bfin_dump_loops (loops
);
3445 /* Now apply the optimizations. */
3446 for (loop
= loops
; loop
; loop
= loop
->next
)
3447 bfin_optimize_loop (loop
);
3451 fprintf (dump_file
, ";; After hardware loops optimization:\n\n");
3452 bfin_dump_loops (loops
);
3455 /* Free up the loop structures */
3460 VEC_free (loop_info
, heap
, loop
->loops
);
3461 VEC_free (basic_block
, heap
, loop
->blocks
);
3462 BITMAP_FREE (loop
->block_bitmap
);
3467 print_rtl (dump_file
, get_insns ());
3471 /* We use the machine specific reorg pass for emitting CSYNC instructions
3472 after conditional branches as needed.
3474 The Blackfin is unusual in that a code sequence like
3477 may speculatively perform the load even if the condition isn't true. This
3478 happens for a branch that is predicted not taken, because the pipeline
3479 isn't flushed or stalled, so the early stages of the following instructions,
3480 which perform the memory reference, are allowed to execute before the
3481 jump condition is evaluated.
3482 Therefore, we must insert additional instructions in all places where this
3483 could lead to incorrect behavior. The manual recommends CSYNC, while
3484 VDSP seems to use NOPs (even though its corresponding compiler option is
3487 When optimizing for speed, we emit NOPs, which seems faster than a CSYNC.
3488 When optimizing for size, we turn the branch into a predicted taken one.
3489 This may be slower due to mispredicts, but saves code size. */
3494 rtx insn
, last_condjump
= NULL_RTX
;
3495 int cycles_since_jump
= INT_MAX
;
3497 /* Doloop optimization */
3498 if (cfun
->machine
->has_hardware_loops
)
3499 bfin_reorg_loops (dump_file
);
3501 if (! TARGET_SPECLD_ANOMALY
&& ! TARGET_CSYNC_ANOMALY
)
3504 /* First pass: find predicted-false branches; if something after them
3505 needs nops, insert them or change the branch to predict true. */
3506 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3510 if (NOTE_P (insn
) || BARRIER_P (insn
) || LABEL_P (insn
))
3513 pat
= PATTERN (insn
);
3514 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
3515 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
3516 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
3521 if (any_condjump_p (insn
)
3522 && ! cbranch_predicted_taken_p (insn
))
3524 last_condjump
= insn
;
3525 cycles_since_jump
= 0;
3528 cycles_since_jump
= INT_MAX
;
3530 else if (INSN_P (insn
))
3532 enum attr_type type
= get_attr_type (insn
);
3533 int delay_needed
= 0;
3534 if (cycles_since_jump
< INT_MAX
)
3535 cycles_since_jump
++;
3537 if (type
== TYPE_MCLD
&& TARGET_SPECLD_ANOMALY
)
3539 rtx pat
= single_set (insn
);
3540 if (may_trap_p (SET_SRC (pat
)))
3543 else if (type
== TYPE_SYNC
&& TARGET_CSYNC_ANOMALY
)
3546 if (delay_needed
> cycles_since_jump
)
3550 rtx
*op
= recog_data
.operand
;
3552 delay_needed
-= cycles_since_jump
;
3554 extract_insn (last_condjump
);
3557 pat
= gen_cbranch_predicted_taken (op
[0], op
[1], op
[2],
3559 cycles_since_jump
= INT_MAX
;
3562 /* Do not adjust cycles_since_jump in this case, so that
3563 we'll increase the number of NOPs for a subsequent insn
3565 pat
= gen_cbranch_with_nops (op
[0], op
[1], op
[2], op
[3],
3566 GEN_INT (delay_needed
));
3567 PATTERN (last_condjump
) = pat
;
3568 INSN_CODE (last_condjump
) = recog (pat
, insn
, &num_clobbers
);
3572 /* Second pass: for predicted-true branches, see if anything at the
3573 branch destination needs extra nops. */
3574 if (! TARGET_CSYNC_ANOMALY
)
3577 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3580 && any_condjump_p (insn
)
3581 && (INSN_CODE (insn
) == CODE_FOR_cbranch_predicted_taken
3582 || cbranch_predicted_taken_p (insn
)))
3584 rtx target
= JUMP_LABEL (insn
);
3586 cycles_since_jump
= 0;
3587 for (; target
&& cycles_since_jump
< 3; target
= NEXT_INSN (target
))
3591 if (NOTE_P (target
) || BARRIER_P (target
) || LABEL_P (target
))
3594 pat
= PATTERN (target
);
3595 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
3596 || GET_CODE (pat
) == ASM_INPUT
|| GET_CODE (pat
) == ADDR_VEC
3597 || GET_CODE (pat
) == ADDR_DIFF_VEC
|| asm_noperands (pat
) >= 0)
3600 if (INSN_P (target
))
3602 enum attr_type type
= get_attr_type (target
);
3603 int delay_needed
= 0;
3604 if (cycles_since_jump
< INT_MAX
)
3605 cycles_since_jump
++;
3607 if (type
== TYPE_SYNC
&& TARGET_CSYNC_ANOMALY
)
3610 if (delay_needed
> cycles_since_jump
)
3612 rtx prev
= prev_real_insn (label
);
3613 delay_needed
-= cycles_since_jump
;
3615 fprintf (dump_file
, "Adding %d nops after %d\n",
3616 delay_needed
, INSN_UID (label
));
3618 && INSN_CODE (prev
) == CODE_FOR_cbranch_with_nops
)
3625 "Reducing nops on insn %d.\n",
3628 x
= XVECEXP (x
, 0, 1);
3629 v
= INTVAL (XVECEXP (x
, 0, 0)) - delay_needed
;
3630 XVECEXP (x
, 0, 0) = GEN_INT (v
);
3632 while (delay_needed
-- > 0)
3633 emit_insn_after (gen_nop (), label
);
3642 /* Handle interrupt_handler, exception_handler and nmi_handler function
3643 attributes; arguments as in struct attribute_spec.handler. */
3646 handle_int_attribute (tree
*node
, tree name
,
3647 tree args ATTRIBUTE_UNUSED
,
3648 int flags ATTRIBUTE_UNUSED
,
3652 if (TREE_CODE (x
) == FUNCTION_DECL
)
3655 if (TREE_CODE (x
) != FUNCTION_TYPE
)
3657 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
3658 IDENTIFIER_POINTER (name
));
3659 *no_add_attrs
= true;
3661 else if (funkind (x
) != SUBROUTINE
)
3662 error ("multiple function type attributes specified");
3667 /* Return 0 if the attributes for two types are incompatible, 1 if they
3668 are compatible, and 2 if they are nearly compatible (which causes a
3669 warning to be generated). */
3672 bfin_comp_type_attributes (tree type1
, tree type2
)
3674 e_funkind kind1
, kind2
;
3676 if (TREE_CODE (type1
) != FUNCTION_TYPE
)
3679 kind1
= funkind (type1
);
3680 kind2
= funkind (type2
);
3685 /* Check for mismatched modifiers */
3686 if (!lookup_attribute ("nesting", TYPE_ATTRIBUTES (type1
))
3687 != !lookup_attribute ("nesting", TYPE_ATTRIBUTES (type2
)))
3690 if (!lookup_attribute ("saveall", TYPE_ATTRIBUTES (type1
))
3691 != !lookup_attribute ("saveall", TYPE_ATTRIBUTES (type2
)))
3694 if (!lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type1
))
3695 != !lookup_attribute ("kspisusp", TYPE_ATTRIBUTES (type2
)))
3698 if (!lookup_attribute ("longcall", TYPE_ATTRIBUTES (type1
))
3699 != !lookup_attribute ("longcall", TYPE_ATTRIBUTES (type2
)))
3705 /* Handle a "longcall" or "shortcall" attribute; arguments as in
3706 struct attribute_spec.handler. */
3709 bfin_handle_longcall_attribute (tree
*node
, tree name
,
3710 tree args ATTRIBUTE_UNUSED
,
3711 int flags ATTRIBUTE_UNUSED
,
3714 if (TREE_CODE (*node
) != FUNCTION_TYPE
3715 && TREE_CODE (*node
) != FIELD_DECL
3716 && TREE_CODE (*node
) != TYPE_DECL
)
3718 warning (OPT_Wattributes
, "`%s' attribute only applies to functions",
3719 IDENTIFIER_POINTER (name
));
3720 *no_add_attrs
= true;
3723 if ((strcmp (IDENTIFIER_POINTER (name
), "longcall") == 0
3724 && lookup_attribute ("shortcall", TYPE_ATTRIBUTES (*node
)))
3725 || (strcmp (IDENTIFIER_POINTER (name
), "shortcall") == 0
3726 && lookup_attribute ("longcall", TYPE_ATTRIBUTES (*node
))))
3728 warning (OPT_Wattributes
,
3729 "can't apply both longcall and shortcall attributes to the same function");
3730 *no_add_attrs
= true;
3736 /* Table of valid machine attributes. */
3737 const struct attribute_spec bfin_attribute_table
[] =
3739 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
3740 { "interrupt_handler", 0, 0, false, true, true, handle_int_attribute
},
3741 { "exception_handler", 0, 0, false, true, true, handle_int_attribute
},
3742 { "nmi_handler", 0, 0, false, true, true, handle_int_attribute
},
3743 { "nesting", 0, 0, false, true, true, NULL
},
3744 { "kspisusp", 0, 0, false, true, true, NULL
},
3745 { "saveall", 0, 0, false, true, true, NULL
},
3746 { "longcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
},
3747 { "shortcall", 0, 0, false, true, true, bfin_handle_longcall_attribute
},
3748 { NULL
, 0, 0, false, false, false, NULL
}
3751 /* Implementation of TARGET_ASM_INTEGER. When using FD-PIC, we need to
3752 tell the assembler to generate pointers to function descriptors in
3756 bfin_assemble_integer (rtx value
, unsigned int size
, int aligned_p
)
3758 if (TARGET_FDPIC
&& size
== UNITS_PER_WORD
)
3760 if (GET_CODE (value
) == SYMBOL_REF
3761 && SYMBOL_REF_FUNCTION_P (value
))
3763 fputs ("\t.picptr\tfuncdesc(", asm_out_file
);
3764 output_addr_const (asm_out_file
, value
);
3765 fputs (")\n", asm_out_file
);
3770 /* We've set the unaligned SI op to NULL, so we always have to
3771 handle the unaligned case here. */
3772 assemble_integer_with_op ("\t.4byte\t", value
);
3776 return default_assemble_integer (value
, size
, aligned_p
);
3779 /* Output the assembler code for a thunk function. THUNK_DECL is the
3780 declaration for the thunk function itself, FUNCTION is the decl for
3781 the target function. DELTA is an immediate constant offset to be
3782 added to THIS. If VCALL_OFFSET is nonzero, the word at
3783 *(*this + vcall_offset) should be added to THIS. */
3786 bfin_output_mi_thunk (FILE *file ATTRIBUTE_UNUSED
,
3787 tree thunk ATTRIBUTE_UNUSED
, HOST_WIDE_INT delta
,
3788 HOST_WIDE_INT vcall_offset
, tree function
)
3791 /* The this parameter is passed as the first argument. */
3792 rtx
this = gen_rtx_REG (Pmode
, REG_R0
);
3794 /* Adjust the this parameter by a fixed constant. */
3798 if (delta
>= -64 && delta
<= 63)
3800 xops
[0] = GEN_INT (delta
);
3801 output_asm_insn ("%1 += %0;", xops
);
3803 else if (delta
>= -128 && delta
< -64)
3805 xops
[0] = GEN_INT (delta
+ 64);
3806 output_asm_insn ("%1 += -64; %1 += %0;", xops
);
3808 else if (delta
> 63 && delta
<= 126)
3810 xops
[0] = GEN_INT (delta
- 63);
3811 output_asm_insn ("%1 += 63; %1 += %0;", xops
);
3815 xops
[0] = GEN_INT (delta
);
3816 output_asm_insn ("r3.l = %h0; r3.h = %d0; %1 = %1 + r3;", xops
);
3820 /* Adjust the this parameter by a value stored in the vtable. */
3823 rtx p2tmp
= gen_rtx_REG (Pmode
, REG_P2
);
3824 rtx tmp
= gen_rtx_REG (Pmode
, REG_R2
);
3828 output_asm_insn ("%2 = r0; %2 = [%2];", xops
);
3830 /* Adjust the this parameter. */
3831 xops
[0] = gen_rtx_MEM (Pmode
, plus_constant (p2tmp
, vcall_offset
));
3832 if (!memory_operand (xops
[0], Pmode
))
3834 rtx tmp2
= gen_rtx_REG (Pmode
, REG_P1
);
3835 xops
[0] = GEN_INT (vcall_offset
);
3837 output_asm_insn ("%h1 = %h0; %d1 = %d0; %2 = %2 + %1", xops
);
3838 xops
[0] = gen_rtx_MEM (Pmode
, p2tmp
);
3841 output_asm_insn ("%1 = %0; %2 = %2 + %1;", xops
);
3844 xops
[0] = XEXP (DECL_RTL (function
), 0);
3845 if (1 || !flag_pic
|| (*targetm
.binds_local_p
) (function
))
3846 output_asm_insn ("jump.l\t%P0", xops
);
3849 /* Codes for all the Blackfin builtins. */
3854 BFIN_BUILTIN_COMPOSE_2X16
,
3855 BFIN_BUILTIN_EXTRACTLO
,
3856 BFIN_BUILTIN_EXTRACTHI
,
3858 BFIN_BUILTIN_SSADD_2X16
,
3859 BFIN_BUILTIN_SSSUB_2X16
,
3860 BFIN_BUILTIN_SSADDSUB_2X16
,
3861 BFIN_BUILTIN_SSSUBADD_2X16
,
3862 BFIN_BUILTIN_MULT_2X16
,
3863 BFIN_BUILTIN_MULTR_2X16
,
3864 BFIN_BUILTIN_NEG_2X16
,
3865 BFIN_BUILTIN_ABS_2X16
,
3866 BFIN_BUILTIN_MIN_2X16
,
3867 BFIN_BUILTIN_MAX_2X16
,
3869 BFIN_BUILTIN_SSADD_1X16
,
3870 BFIN_BUILTIN_SSSUB_1X16
,
3871 BFIN_BUILTIN_MULT_1X16
,
3872 BFIN_BUILTIN_MULTR_1X16
,
3873 BFIN_BUILTIN_NORM_1X16
,
3874 BFIN_BUILTIN_NEG_1X16
,
3875 BFIN_BUILTIN_ABS_1X16
,
3876 BFIN_BUILTIN_MIN_1X16
,
3877 BFIN_BUILTIN_MAX_1X16
,
3879 BFIN_BUILTIN_DIFFHL_2X16
,
3880 BFIN_BUILTIN_DIFFLH_2X16
,
3882 BFIN_BUILTIN_SSADD_1X32
,
3883 BFIN_BUILTIN_SSSUB_1X32
,
3884 BFIN_BUILTIN_NORM_1X32
,
3885 BFIN_BUILTIN_NEG_1X32
,
3886 BFIN_BUILTIN_MIN_1X32
,
3887 BFIN_BUILTIN_MAX_1X32
,
3888 BFIN_BUILTIN_MULT_1X32
,
3890 BFIN_BUILTIN_MULHISILL
,
3891 BFIN_BUILTIN_MULHISILH
,
3892 BFIN_BUILTIN_MULHISIHL
,
3893 BFIN_BUILTIN_MULHISIHH
,
3895 BFIN_BUILTIN_LSHIFT_1X16
,
3896 BFIN_BUILTIN_LSHIFT_2X16
,
3897 BFIN_BUILTIN_SSASHIFT_1X16
,
3898 BFIN_BUILTIN_SSASHIFT_2X16
,
3900 BFIN_BUILTIN_CPLX_MUL_16
,
3901 BFIN_BUILTIN_CPLX_MAC_16
,
3902 BFIN_BUILTIN_CPLX_MSU_16
,
3907 #define def_builtin(NAME, TYPE, CODE) \
3909 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
3913 /* Set up all builtin functions for this target. */
3915 bfin_init_builtins (void)
3917 tree V2HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V2HImode
);
3918 tree void_ftype_void
3919 = build_function_type (void_type_node
, void_list_node
);
3920 tree short_ftype_short
3921 = build_function_type_list (short_integer_type_node
, short_integer_type_node
,
3923 tree short_ftype_int_int
3924 = build_function_type_list (short_integer_type_node
, integer_type_node
,
3925 integer_type_node
, NULL_TREE
);
3926 tree int_ftype_int_int
3927 = build_function_type_list (integer_type_node
, integer_type_node
,
3928 integer_type_node
, NULL_TREE
);
3930 = build_function_type_list (integer_type_node
, integer_type_node
,
3932 tree short_ftype_int
3933 = build_function_type_list (short_integer_type_node
, integer_type_node
,
3935 tree int_ftype_v2hi_v2hi
3936 = build_function_type_list (integer_type_node
, V2HI_type_node
,
3937 V2HI_type_node
, NULL_TREE
);
3938 tree v2hi_ftype_v2hi_v2hi
3939 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
3940 V2HI_type_node
, NULL_TREE
);
3941 tree v2hi_ftype_v2hi_v2hi_v2hi
3942 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
3943 V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
3944 tree v2hi_ftype_int_int
3945 = build_function_type_list (V2HI_type_node
, integer_type_node
,
3946 integer_type_node
, NULL_TREE
);
3947 tree v2hi_ftype_v2hi_int
3948 = build_function_type_list (V2HI_type_node
, V2HI_type_node
,
3949 integer_type_node
, NULL_TREE
);
3950 tree int_ftype_short_short
3951 = build_function_type_list (integer_type_node
, short_integer_type_node
,
3952 short_integer_type_node
, NULL_TREE
);
3953 tree v2hi_ftype_v2hi
3954 = build_function_type_list (V2HI_type_node
, V2HI_type_node
, NULL_TREE
);
3955 tree short_ftype_v2hi
3956 = build_function_type_list (short_integer_type_node
, V2HI_type_node
,
3959 /* Add the remaining MMX insns with somewhat more complicated types. */
3960 def_builtin ("__builtin_bfin_csync", void_ftype_void
, BFIN_BUILTIN_CSYNC
);
3961 def_builtin ("__builtin_bfin_ssync", void_ftype_void
, BFIN_BUILTIN_SSYNC
);
3963 def_builtin ("__builtin_bfin_compose_2x16", v2hi_ftype_int_int
,
3964 BFIN_BUILTIN_COMPOSE_2X16
);
3965 def_builtin ("__builtin_bfin_extract_hi", short_ftype_v2hi
,
3966 BFIN_BUILTIN_EXTRACTHI
);
3967 def_builtin ("__builtin_bfin_extract_lo", short_ftype_v2hi
,
3968 BFIN_BUILTIN_EXTRACTLO
);
3970 def_builtin ("__builtin_bfin_min_fr2x16", v2hi_ftype_v2hi_v2hi
,
3971 BFIN_BUILTIN_MIN_2X16
);
3972 def_builtin ("__builtin_bfin_max_fr2x16", v2hi_ftype_v2hi_v2hi
,
3973 BFIN_BUILTIN_MAX_2X16
);
3975 def_builtin ("__builtin_bfin_add_fr2x16", v2hi_ftype_v2hi_v2hi
,
3976 BFIN_BUILTIN_SSADD_2X16
);
3977 def_builtin ("__builtin_bfin_sub_fr2x16", v2hi_ftype_v2hi_v2hi
,
3978 BFIN_BUILTIN_SSSUB_2X16
);
3979 def_builtin ("__builtin_bfin_dspaddsubsat", v2hi_ftype_v2hi_v2hi
,
3980 BFIN_BUILTIN_SSADDSUB_2X16
);
3981 def_builtin ("__builtin_bfin_dspsubaddsat", v2hi_ftype_v2hi_v2hi
,
3982 BFIN_BUILTIN_SSSUBADD_2X16
);
3983 def_builtin ("__builtin_bfin_mult_fr2x16", v2hi_ftype_v2hi_v2hi
,
3984 BFIN_BUILTIN_MULT_2X16
);
3985 def_builtin ("__builtin_bfin_multr_fr2x16", v2hi_ftype_v2hi_v2hi
,
3986 BFIN_BUILTIN_MULTR_2X16
);
3987 def_builtin ("__builtin_bfin_negate_fr2x16", v2hi_ftype_v2hi
,
3988 BFIN_BUILTIN_NEG_2X16
);
3989 def_builtin ("__builtin_bfin_abs_fr2x16", v2hi_ftype_v2hi
,
3990 BFIN_BUILTIN_ABS_2X16
);
3992 def_builtin ("__builtin_bfin_add_fr1x16", short_ftype_int_int
,
3993 BFIN_BUILTIN_SSADD_1X16
);
3994 def_builtin ("__builtin_bfin_sub_fr1x16", short_ftype_int_int
,
3995 BFIN_BUILTIN_SSSUB_1X16
);
3996 def_builtin ("__builtin_bfin_mult_fr1x16", short_ftype_int_int
,
3997 BFIN_BUILTIN_MULT_1X16
);
3998 def_builtin ("__builtin_bfin_multr_fr1x16", short_ftype_int_int
,
3999 BFIN_BUILTIN_MULTR_1X16
);
4000 def_builtin ("__builtin_bfin_negate_fr1x16", short_ftype_short
,
4001 BFIN_BUILTIN_NEG_1X16
);
4002 def_builtin ("__builtin_bfin_abs_fr1x16", short_ftype_short
,
4003 BFIN_BUILTIN_ABS_1X16
);
4004 def_builtin ("__builtin_bfin_norm_fr1x16", short_ftype_int
,
4005 BFIN_BUILTIN_NORM_1X16
);
4007 def_builtin ("__builtin_bfin_diff_hl_fr2x16", short_ftype_v2hi
,
4008 BFIN_BUILTIN_DIFFHL_2X16
);
4009 def_builtin ("__builtin_bfin_diff_lh_fr2x16", short_ftype_v2hi
,
4010 BFIN_BUILTIN_DIFFLH_2X16
);
4012 def_builtin ("__builtin_bfin_mulhisill", int_ftype_v2hi_v2hi
,
4013 BFIN_BUILTIN_MULHISILL
);
4014 def_builtin ("__builtin_bfin_mulhisihl", int_ftype_v2hi_v2hi
,
4015 BFIN_BUILTIN_MULHISIHL
);
4016 def_builtin ("__builtin_bfin_mulhisilh", int_ftype_v2hi_v2hi
,
4017 BFIN_BUILTIN_MULHISILH
);
4018 def_builtin ("__builtin_bfin_mulhisihh", int_ftype_v2hi_v2hi
,
4019 BFIN_BUILTIN_MULHISIHH
);
4021 def_builtin ("__builtin_bfin_add_fr1x32", int_ftype_int_int
,
4022 BFIN_BUILTIN_SSADD_1X32
);
4023 def_builtin ("__builtin_bfin_sub_fr1x32", int_ftype_int_int
,
4024 BFIN_BUILTIN_SSSUB_1X32
);
4025 def_builtin ("__builtin_bfin_negate_fr1x32", int_ftype_int
,
4026 BFIN_BUILTIN_NEG_1X32
);
4027 def_builtin ("__builtin_bfin_norm_fr1x32", short_ftype_int
,
4028 BFIN_BUILTIN_NORM_1X32
);
4029 def_builtin ("__builtin_bfin_mult_fr1x32", int_ftype_short_short
,
4030 BFIN_BUILTIN_MULT_1X32
);
4033 def_builtin ("__builtin_bfin_shl_fr1x16", short_ftype_int_int
,
4034 BFIN_BUILTIN_SSASHIFT_1X16
);
4035 def_builtin ("__builtin_bfin_shl_fr2x16", v2hi_ftype_v2hi_int
,
4036 BFIN_BUILTIN_SSASHIFT_2X16
);
4037 def_builtin ("__builtin_bfin_lshl_fr1x16", short_ftype_int_int
,
4038 BFIN_BUILTIN_LSHIFT_1X16
);
4039 def_builtin ("__builtin_bfin_lshl_fr2x16", v2hi_ftype_v2hi_int
,
4040 BFIN_BUILTIN_LSHIFT_2X16
);
4042 /* Complex numbers. */
4043 def_builtin ("__builtin_bfin_cmplx_mul", v2hi_ftype_v2hi_v2hi
,
4044 BFIN_BUILTIN_CPLX_MUL_16
);
4045 def_builtin ("__builtin_bfin_cmplx_mac", v2hi_ftype_v2hi_v2hi_v2hi
,
4046 BFIN_BUILTIN_CPLX_MAC_16
);
4047 def_builtin ("__builtin_bfin_cmplx_msu", v2hi_ftype_v2hi_v2hi_v2hi
,
4048 BFIN_BUILTIN_CPLX_MSU_16
);
4052 struct builtin_description
4054 const enum insn_code icode
;
4055 const char *const name
;
4056 const enum bfin_builtins code
;
4060 static const struct builtin_description bdesc_2arg
[] =
4062 { CODE_FOR_composev2hi
, "__builtin_bfin_compose_2x16", BFIN_BUILTIN_COMPOSE_2X16
, -1 },
4064 { CODE_FOR_ssashiftv2hi3
, "__builtin_bfin_shl_fr2x16", BFIN_BUILTIN_SSASHIFT_2X16
, -1 },
4065 { CODE_FOR_ssashifthi3
, "__builtin_bfin_shl_fr1x16", BFIN_BUILTIN_SSASHIFT_1X16
, -1 },
4066 { CODE_FOR_lshiftv2hi3
, "__builtin_bfin_lshl_fr2x16", BFIN_BUILTIN_LSHIFT_2X16
, -1 },
4067 { CODE_FOR_lshifthi3
, "__builtin_bfin_lshl_fr1x16", BFIN_BUILTIN_LSHIFT_1X16
, -1 },
4069 { CODE_FOR_sminhi3
, "__builtin_bfin_min_fr1x16", BFIN_BUILTIN_MIN_1X16
, -1 },
4070 { CODE_FOR_smaxhi3
, "__builtin_bfin_max_fr1x16", BFIN_BUILTIN_MAX_1X16
, -1 },
4071 { CODE_FOR_ssaddhi3
, "__builtin_bfin_add_fr1x16", BFIN_BUILTIN_SSADD_1X16
, -1 },
4072 { CODE_FOR_sssubhi3
, "__builtin_bfin_sub_fr1x16", BFIN_BUILTIN_SSSUB_1X16
, -1 },
4074 { CODE_FOR_sminsi3
, "__builtin_bfin_min_fr1x32", BFIN_BUILTIN_MIN_1X32
, -1 },
4075 { CODE_FOR_smaxsi3
, "__builtin_bfin_max_fr1x32", BFIN_BUILTIN_MAX_1X32
, -1 },
4076 { CODE_FOR_ssaddsi3
, "__builtin_bfin_add_fr1x32", BFIN_BUILTIN_SSADD_1X32
, -1 },
4077 { CODE_FOR_sssubsi3
, "__builtin_bfin_sub_fr1x32", BFIN_BUILTIN_SSSUB_1X32
, -1 },
4079 { CODE_FOR_sminv2hi3
, "__builtin_bfin_min_fr2x16", BFIN_BUILTIN_MIN_2X16
, -1 },
4080 { CODE_FOR_smaxv2hi3
, "__builtin_bfin_max_fr2x16", BFIN_BUILTIN_MAX_2X16
, -1 },
4081 { CODE_FOR_ssaddv2hi3
, "__builtin_bfin_add_fr2x16", BFIN_BUILTIN_SSADD_2X16
, -1 },
4082 { CODE_FOR_sssubv2hi3
, "__builtin_bfin_sub_fr2x16", BFIN_BUILTIN_SSSUB_2X16
, -1 },
4083 { CODE_FOR_ssaddsubv2hi3
, "__builtin_bfin_dspaddsubsat", BFIN_BUILTIN_SSADDSUB_2X16
, -1 },
4084 { CODE_FOR_sssubaddv2hi3
, "__builtin_bfin_dspsubaddsat", BFIN_BUILTIN_SSSUBADD_2X16
, -1 },
4086 { CODE_FOR_flag_mulhisi
, "__builtin_bfin_mult_fr1x32", BFIN_BUILTIN_MULT_1X32
, MACFLAG_NONE
},
4087 { CODE_FOR_flag_mulhi
, "__builtin_bfin_mult_fr1x16", BFIN_BUILTIN_MULT_1X16
, MACFLAG_T
},
4088 { CODE_FOR_flag_mulhi
, "__builtin_bfin_multr_fr1x16", BFIN_BUILTIN_MULTR_1X16
, MACFLAG_NONE
},
4089 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_mult_fr2x16", BFIN_BUILTIN_MULT_2X16
, MACFLAG_T
},
4090 { CODE_FOR_flag_mulv2hi
, "__builtin_bfin_multr_fr2x16", BFIN_BUILTIN_MULTR_2X16
, MACFLAG_NONE
}
4093 static const struct builtin_description bdesc_1arg
[] =
4095 { CODE_FOR_signbitshi2
, "__builtin_bfin_norm_fr1x16", BFIN_BUILTIN_NORM_1X16
, 0 },
4096 { CODE_FOR_ssneghi2
, "__builtin_bfin_negate_fr1x16", BFIN_BUILTIN_NEG_1X16
, 0 },
4097 { CODE_FOR_abshi2
, "__builtin_bfin_abs_fr1x16", BFIN_BUILTIN_ABS_1X16
, 0 },
4099 { CODE_FOR_signbitssi2
, "__builtin_bfin_norm_fr1x32", BFIN_BUILTIN_NORM_1X32
, 0 },
4100 { CODE_FOR_ssnegsi2
, "__builtin_bfin_negate_fr1x32", BFIN_BUILTIN_NEG_1X32
, 0 },
4102 { CODE_FOR_movv2hi_hi_low
, "__builtin_bfin_extract_lo", BFIN_BUILTIN_EXTRACTLO
, 0 },
4103 { CODE_FOR_movv2hi_hi_high
, "__builtin_bfin_extract_hi", BFIN_BUILTIN_EXTRACTHI
, 0 },
4104 { CODE_FOR_ssnegv2hi2
, "__builtin_bfin_negate_fr2x16", BFIN_BUILTIN_NEG_2X16
, 0 },
4105 { CODE_FOR_absv2hi2
, "__builtin_bfin_abs_fr2x16", BFIN_BUILTIN_ABS_2X16
, 0 }
4108 /* Errors in the source file can cause expand_expr to return const0_rtx
4109 where we expect a vector. To avoid crashing, use one of the vector
4110 clear instructions. */
4112 safe_vector_operand (rtx x
, enum machine_mode mode
)
4114 if (x
!= const0_rtx
)
4116 x
= gen_reg_rtx (SImode
);
4118 emit_insn (gen_movsi (x
, CONST0_RTX (SImode
)));
4119 return gen_lowpart (mode
, x
);
4122 /* Subroutine of bfin_expand_builtin to take care of binop insns. MACFLAG is -1
4123 if this is a normal binary op, or one of the MACFLAG_xxx constants. */
4126 bfin_expand_binop_builtin (enum insn_code icode
, tree arglist
, rtx target
,
4130 tree arg0
= TREE_VALUE (arglist
);
4131 tree arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4132 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4133 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4134 enum machine_mode op0mode
= GET_MODE (op0
);
4135 enum machine_mode op1mode
= GET_MODE (op1
);
4136 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4137 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4138 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
4140 if (VECTOR_MODE_P (mode0
))
4141 op0
= safe_vector_operand (op0
, mode0
);
4142 if (VECTOR_MODE_P (mode1
))
4143 op1
= safe_vector_operand (op1
, mode1
);
4146 || GET_MODE (target
) != tmode
4147 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4148 target
= gen_reg_rtx (tmode
);
4150 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
4153 op0
= gen_lowpart (HImode
, op0
);
4155 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
4158 op1
= gen_lowpart (HImode
, op1
);
4160 /* In case the insn wants input operands in modes different from
4161 the result, abort. */
4162 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
4163 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
4165 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4166 op0
= copy_to_mode_reg (mode0
, op0
);
4167 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
4168 op1
= copy_to_mode_reg (mode1
, op1
);
4171 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
4173 pat
= GEN_FCN (icode
) (target
, op0
, op1
, GEN_INT (macflag
));
4181 /* Subroutine of bfin_expand_builtin to take care of unop insns. */
4184 bfin_expand_unop_builtin (enum insn_code icode
, tree arglist
,
4188 tree arg0
= TREE_VALUE (arglist
);
4189 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4190 enum machine_mode op0mode
= GET_MODE (op0
);
4191 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
4192 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
4195 || GET_MODE (target
) != tmode
4196 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4197 target
= gen_reg_rtx (tmode
);
4199 if (VECTOR_MODE_P (mode0
))
4200 op0
= safe_vector_operand (op0
, mode0
);
4202 if (op0mode
== SImode
&& mode0
== HImode
)
4205 op0
= gen_lowpart (HImode
, op0
);
4207 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
4209 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4210 op0
= copy_to_mode_reg (mode0
, op0
);
4212 pat
= GEN_FCN (icode
) (target
, op0
);
4219 /* Expand an expression EXP that calls a built-in function,
4220 with result going to TARGET if that's convenient
4221 (and in mode MODE if that's convenient).
4222 SUBTARGET may be used as the target for computing one of EXP's operands.
4223 IGNORE is nonzero if the value is to be ignored. */
4226 bfin_expand_builtin (tree exp
, rtx target ATTRIBUTE_UNUSED
,
4227 rtx subtarget ATTRIBUTE_UNUSED
,
4228 enum machine_mode mode ATTRIBUTE_UNUSED
,
4229 int ignore ATTRIBUTE_UNUSED
)
4232 enum insn_code icode
;
4233 const struct builtin_description
*d
;
4234 tree fndecl
= TREE_OPERAND (TREE_OPERAND (exp
, 0), 0);
4235 tree arglist
= TREE_OPERAND (exp
, 1);
4236 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
4237 tree arg0
, arg1
, arg2
;
4238 rtx op0
, op1
, op2
, accvec
, pat
, tmp1
, tmp2
;
4239 enum machine_mode tmode
, mode0
;
4243 case BFIN_BUILTIN_CSYNC
:
4244 emit_insn (gen_csync ());
4246 case BFIN_BUILTIN_SSYNC
:
4247 emit_insn (gen_ssync ());
4250 case BFIN_BUILTIN_DIFFHL_2X16
:
4251 case BFIN_BUILTIN_DIFFLH_2X16
:
4252 arg0
= TREE_VALUE (arglist
);
4253 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4254 icode
= (fcode
== BFIN_BUILTIN_DIFFHL_2X16
4255 ? CODE_FOR_subhilov2hi3
: CODE_FOR_sublohiv2hi3
);
4256 tmode
= insn_data
[icode
].operand
[0].mode
;
4257 mode0
= insn_data
[icode
].operand
[1].mode
;
4260 || GET_MODE (target
) != tmode
4261 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
4262 target
= gen_reg_rtx (tmode
);
4264 if (VECTOR_MODE_P (mode0
))
4265 op0
= safe_vector_operand (op0
, mode0
);
4267 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
4268 op0
= copy_to_mode_reg (mode0
, op0
);
4270 pat
= GEN_FCN (icode
) (target
, op0
, op0
);
4276 case BFIN_BUILTIN_CPLX_MUL_16
:
4277 arg0
= TREE_VALUE (arglist
);
4278 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4279 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4280 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4281 accvec
= gen_reg_rtx (V2PDImode
);
4284 || GET_MODE (target
) != V2HImode
4285 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
4286 target
= gen_reg_rtx (tmode
);
4287 if (! register_operand (op0
, GET_MODE (op0
)))
4288 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
4289 if (! register_operand (op1
, GET_MODE (op1
)))
4290 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
4292 emit_insn (gen_flag_macinit1v2hi_parts (accvec
, op0
, op1
, const0_rtx
,
4293 const0_rtx
, const0_rtx
,
4294 const1_rtx
, GEN_INT (MACFLAG_NONE
)));
4295 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
4296 const1_rtx
, const1_rtx
,
4297 const0_rtx
, accvec
, const1_rtx
, const0_rtx
,
4298 GEN_INT (MACFLAG_NONE
), accvec
));
4302 case BFIN_BUILTIN_CPLX_MAC_16
:
4303 case BFIN_BUILTIN_CPLX_MSU_16
:
4304 arg0
= TREE_VALUE (arglist
);
4305 arg1
= TREE_VALUE (TREE_CHAIN (arglist
));
4306 arg2
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist
)));
4307 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, 0);
4308 op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, 0);
4309 op2
= expand_expr (arg2
, NULL_RTX
, VOIDmode
, 0);
4310 accvec
= gen_reg_rtx (V2PDImode
);
4313 || GET_MODE (target
) != V2HImode
4314 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, V2HImode
))
4315 target
= gen_reg_rtx (tmode
);
4316 if (! register_operand (op0
, GET_MODE (op0
)))
4317 op0
= copy_to_mode_reg (GET_MODE (op0
), op0
);
4318 if (! register_operand (op1
, GET_MODE (op1
)))
4319 op1
= copy_to_mode_reg (GET_MODE (op1
), op1
);
4321 tmp1
= gen_reg_rtx (SImode
);
4322 tmp2
= gen_reg_rtx (SImode
);
4323 emit_insn (gen_ashlsi3 (tmp1
, gen_lowpart (SImode
, op2
), GEN_INT (16)));
4324 emit_move_insn (tmp2
, gen_lowpart (SImode
, op2
));
4325 emit_insn (gen_movstricthi_1 (gen_lowpart (HImode
, tmp2
), const0_rtx
));
4326 emit_insn (gen_load_accumulator_pair (accvec
, tmp1
, tmp2
));
4327 emit_insn (gen_flag_macv2hi_parts_acconly (accvec
, op0
, op1
, const0_rtx
,
4328 const0_rtx
, const0_rtx
,
4329 const1_rtx
, accvec
, const0_rtx
,
4331 GEN_INT (MACFLAG_W32
)));
4332 tmp1
= (fcode
== BFIN_BUILTIN_CPLX_MAC_16
? const1_rtx
: const0_rtx
);
4333 tmp2
= (fcode
== BFIN_BUILTIN_CPLX_MAC_16
? const0_rtx
: const1_rtx
);
4334 emit_insn (gen_flag_macv2hi_parts (target
, op0
, op1
, const1_rtx
,
4335 const1_rtx
, const1_rtx
,
4336 const0_rtx
, accvec
, tmp1
, tmp2
,
4337 GEN_INT (MACFLAG_NONE
), accvec
));
4345 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
4346 if (d
->code
== fcode
)
4347 return bfin_expand_binop_builtin (d
->icode
, arglist
, target
,
4350 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
4351 if (d
->code
== fcode
)
4352 return bfin_expand_unop_builtin (d
->icode
, arglist
, target
);
4357 #undef TARGET_INIT_BUILTINS
4358 #define TARGET_INIT_BUILTINS bfin_init_builtins
4360 #undef TARGET_EXPAND_BUILTIN
4361 #define TARGET_EXPAND_BUILTIN bfin_expand_builtin
4363 #undef TARGET_ASM_GLOBALIZE_LABEL
4364 #define TARGET_ASM_GLOBALIZE_LABEL bfin_globalize_label
4366 #undef TARGET_ASM_FILE_START
4367 #define TARGET_ASM_FILE_START output_file_start
4369 #undef TARGET_ATTRIBUTE_TABLE
4370 #define TARGET_ATTRIBUTE_TABLE bfin_attribute_table
4372 #undef TARGET_COMP_TYPE_ATTRIBUTES
4373 #define TARGET_COMP_TYPE_ATTRIBUTES bfin_comp_type_attributes
4375 #undef TARGET_RTX_COSTS
4376 #define TARGET_RTX_COSTS bfin_rtx_costs
4378 #undef TARGET_ADDRESS_COST
4379 #define TARGET_ADDRESS_COST bfin_address_cost
4381 #undef TARGET_ASM_INTERNAL_LABEL
4382 #define TARGET_ASM_INTERNAL_LABEL bfin_internal_label
4384 #undef TARGET_ASM_INTEGER
4385 #define TARGET_ASM_INTEGER bfin_assemble_integer
4387 #undef TARGET_MACHINE_DEPENDENT_REORG
4388 #define TARGET_MACHINE_DEPENDENT_REORG bfin_reorg
4390 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
4391 #define TARGET_FUNCTION_OK_FOR_SIBCALL bfin_function_ok_for_sibcall
4393 #undef TARGET_ASM_OUTPUT_MI_THUNK
4394 #define TARGET_ASM_OUTPUT_MI_THUNK bfin_output_mi_thunk
4395 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
4396 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_tree_hwi_hwi_tree_true
4398 #undef TARGET_SCHED_ADJUST_COST
4399 #define TARGET_SCHED_ADJUST_COST bfin_adjust_cost
4401 #undef TARGET_PROMOTE_PROTOTYPES
4402 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
4403 #undef TARGET_PROMOTE_FUNCTION_ARGS
4404 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
4405 #undef TARGET_PROMOTE_FUNCTION_RETURN
4406 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
4408 #undef TARGET_ARG_PARTIAL_BYTES
4409 #define TARGET_ARG_PARTIAL_BYTES bfin_arg_partial_bytes
4411 #undef TARGET_PASS_BY_REFERENCE
4412 #define TARGET_PASS_BY_REFERENCE bfin_pass_by_reference
4414 #undef TARGET_SETUP_INCOMING_VARARGS
4415 #define TARGET_SETUP_INCOMING_VARARGS setup_incoming_varargs
4417 #undef TARGET_STRUCT_VALUE_RTX
4418 #define TARGET_STRUCT_VALUE_RTX bfin_struct_value_rtx
4420 #undef TARGET_VECTOR_MODE_SUPPORTED_P
4421 #define TARGET_VECTOR_MODE_SUPPORTED_P bfin_vector_mode_supported_p
4423 #undef TARGET_HANDLE_OPTION
4424 #define TARGET_HANDLE_OPTION bfin_handle_option
4426 #undef TARGET_DEFAULT_TARGET_FLAGS
4427 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
4429 #undef TARGET_SECONDARY_RELOAD
4430 #define TARGET_SECONDARY_RELOAD bfin_secondary_reload
4432 #undef TARGET_DELEGITIMIZE_ADDRESS
4433 #define TARGET_DELEGITIMIZE_ADDRESS bfin_delegitimize_address
4435 struct gcc_target targetm
= TARGET_INITIALIZER
;