1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
38 #include "diagnostic-core.h"
44 #include "langhooks.h"
47 #include "target-def.h"
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree
);
56 static int interrupt_function_p (tree
);
57 static int signal_function_p (tree
);
58 static int avr_OS_task_function_p (tree
);
59 static int avr_OS_main_function_p (tree
);
60 static int avr_regs_to_save (HARD_REG_SET
*);
61 static int get_sequence_length (rtx insns
);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code
);
65 static int avr_num_arg_regs (enum machine_mode
, const_tree
);
67 static RTX_CODE
compare_condition (rtx insn
);
68 static rtx
avr_legitimize_address (rtx
, rtx
, enum machine_mode
);
69 static int compare_sign_p (rtx insn
);
70 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
71 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
72 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
73 static bool avr_assemble_integer (rtx
, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode
, rtx
, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx
avr_function_value (const_tree
, const_tree
, bool);
81 static rtx
avr_libcall_value (enum machine_mode
, const_rtx
);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree
, tree
*);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree
, const char *, int);
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx
, int);
89 static void avr_asm_out_dtor (rtx
, int);
90 static int avr_register_move_cost (enum machine_mode
, reg_class_t
, reg_class_t
);
91 static int avr_memory_move_cost (enum machine_mode
, reg_class_t
, bool);
92 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
, bool);
93 static bool avr_rtx_costs (rtx
, int, int, int *, bool);
94 static int avr_address_cost (rtx
, bool);
95 static bool avr_return_in_memory (const_tree
, const_tree
);
96 static struct machine_function
* avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx
avr_expand_builtin (tree
, rtx
, rtx
, enum machine_mode
, int);
99 static rtx
avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c
);
105 static rtx
avr_function_arg (cumulative_args_t
, enum machine_mode
,
107 static void avr_function_arg_advance (cumulative_args_t
, enum machine_mode
,
109 static bool avr_function_ok_for_sibcall (tree
, tree
);
110 static void avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
);
112 /* Allocate registers from r25 to r8 for parameters for function calls. */
113 #define FIRST_CUM_REG 26
115 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
116 static GTY(()) rtx tmp_reg_rtx
;
118 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
119 static GTY(()) rtx zero_reg_rtx
;
121 /* AVR register names {"r0", "r1", ..., "r31"} */
122 static const char *const avr_regnames
[] = REGISTER_NAMES
;
124 /* Preprocessor macros to define depending on MCU type. */
125 const char *avr_extra_arch_macro
;
127 /* Current architecture. */
128 const struct base_arch_s
*avr_current_arch
;
130 /* Current device. */
131 const struct mcu_type_s
*avr_current_device
;
133 section
*progmem_section
;
135 /* To track if code will use .bss and/or .data. */
136 bool avr_need_clear_bss_p
= false;
137 bool avr_need_copy_data_p
= false;
139 /* AVR attributes. */
140 static const struct attribute_spec avr_attribute_table
[] =
142 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
143 affects_type_identity } */
144 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
,
146 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
148 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
,
150 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
,
152 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
,
154 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute
,
156 { NULL
, 0, 0, false, false, false, NULL
, false }
159 /* Initialize the GCC target structure. */
160 #undef TARGET_ASM_ALIGNED_HI_OP
161 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
162 #undef TARGET_ASM_ALIGNED_SI_OP
163 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
164 #undef TARGET_ASM_UNALIGNED_HI_OP
165 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
166 #undef TARGET_ASM_UNALIGNED_SI_OP
167 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
168 #undef TARGET_ASM_INTEGER
169 #define TARGET_ASM_INTEGER avr_assemble_integer
170 #undef TARGET_ASM_FILE_START
171 #define TARGET_ASM_FILE_START avr_file_start
172 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
173 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
174 #undef TARGET_ASM_FILE_END
175 #define TARGET_ASM_FILE_END avr_file_end
177 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
178 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
179 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
180 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
182 #undef TARGET_FUNCTION_VALUE
183 #define TARGET_FUNCTION_VALUE avr_function_value
184 #undef TARGET_LIBCALL_VALUE
185 #define TARGET_LIBCALL_VALUE avr_libcall_value
186 #undef TARGET_FUNCTION_VALUE_REGNO_P
187 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
189 #undef TARGET_ATTRIBUTE_TABLE
190 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
191 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
192 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
193 #undef TARGET_INSERT_ATTRIBUTES
194 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
195 #undef TARGET_SECTION_TYPE_FLAGS
196 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
198 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
200 #undef TARGET_ASM_INIT_SECTIONS
201 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
203 #undef TARGET_REGISTER_MOVE_COST
204 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
205 #undef TARGET_MEMORY_MOVE_COST
206 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
207 #undef TARGET_RTX_COSTS
208 #define TARGET_RTX_COSTS avr_rtx_costs
209 #undef TARGET_ADDRESS_COST
210 #define TARGET_ADDRESS_COST avr_address_cost
211 #undef TARGET_MACHINE_DEPENDENT_REORG
212 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
213 #undef TARGET_FUNCTION_ARG
214 #define TARGET_FUNCTION_ARG avr_function_arg
215 #undef TARGET_FUNCTION_ARG_ADVANCE
216 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
218 #undef TARGET_LEGITIMIZE_ADDRESS
219 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
221 #undef TARGET_RETURN_IN_MEMORY
222 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
224 #undef TARGET_STRICT_ARGUMENT_NAMING
225 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
227 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
228 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
230 #undef TARGET_HARD_REGNO_SCRATCH_OK
231 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
232 #undef TARGET_CASE_VALUES_THRESHOLD
233 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
235 #undef TARGET_LEGITIMATE_ADDRESS_P
236 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
238 #undef TARGET_FRAME_POINTER_REQUIRED
239 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
240 #undef TARGET_CAN_ELIMINATE
241 #define TARGET_CAN_ELIMINATE avr_can_eliminate
243 #undef TARGET_CLASS_LIKELY_SPILLED_P
244 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
246 #undef TARGET_OPTION_OVERRIDE
247 #define TARGET_OPTION_OVERRIDE avr_option_override
249 #undef TARGET_CANNOT_MODIFY_JUMPS_P
250 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
252 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
253 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
255 #undef TARGET_INIT_BUILTINS
256 #define TARGET_INIT_BUILTINS avr_init_builtins
258 #undef TARGET_EXPAND_BUILTIN
259 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
262 struct gcc_target targetm
= TARGET_INITIALIZER
;
265 avr_option_override (void)
267 flag_delete_null_pointer_checks
= 0;
269 avr_current_device
= &avr_mcu_types
[avr_mcu_index
];
270 avr_current_arch
= &avr_arch_types
[avr_current_device
->arch
];
271 avr_extra_arch_macro
= avr_current_device
->macro
;
273 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
274 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
276 init_machine_status
= avr_init_machine_status
;
279 /* return register class from register number. */
281 static const enum reg_class reg_class_tab
[]={
282 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
283 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
284 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
285 GENERAL_REGS
, /* r0 - r15 */
286 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
287 LD_REGS
, /* r16 - 23 */
288 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
289 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
290 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
291 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
292 STACK_REG
,STACK_REG
/* SPL,SPH */
295 /* Function to set up the backend function structure. */
297 static struct machine_function
*
298 avr_init_machine_status (void)
300 return ggc_alloc_cleared_machine_function ();
303 /* Return register class for register R. */
306 avr_regno_reg_class (int r
)
309 return reg_class_tab
[r
];
313 /* A helper for the subsequent function attribute used to dig for
314 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
317 avr_lookup_function_attribute1 (const_tree func
, const char *name
)
319 if (FUNCTION_DECL
== TREE_CODE (func
))
321 if (NULL_TREE
!= lookup_attribute (name
, DECL_ATTRIBUTES (func
)))
326 func
= TREE_TYPE (func
);
329 gcc_assert (TREE_CODE (func
) == FUNCTION_TYPE
330 || TREE_CODE (func
) == METHOD_TYPE
);
332 return NULL_TREE
!= lookup_attribute (name
, TYPE_ATTRIBUTES (func
));
335 /* Return nonzero if FUNC is a naked function. */
338 avr_naked_function_p (tree func
)
340 return avr_lookup_function_attribute1 (func
, "naked");
343 /* Return nonzero if FUNC is an interrupt function as specified
344 by the "interrupt" attribute. */
347 interrupt_function_p (tree func
)
349 return avr_lookup_function_attribute1 (func
, "interrupt");
352 /* Return nonzero if FUNC is a signal function as specified
353 by the "signal" attribute. */
356 signal_function_p (tree func
)
358 return avr_lookup_function_attribute1 (func
, "signal");
361 /* Return nonzero if FUNC is a OS_task function. */
364 avr_OS_task_function_p (tree func
)
366 return avr_lookup_function_attribute1 (func
, "OS_task");
369 /* Return nonzero if FUNC is a OS_main function. */
372 avr_OS_main_function_p (tree func
)
374 return avr_lookup_function_attribute1 (func
, "OS_main");
377 /* Return the number of hard registers to push/pop in the prologue/epilogue
378 of the current function, and optionally store these registers in SET. */
381 avr_regs_to_save (HARD_REG_SET
*set
)
384 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
385 || signal_function_p (current_function_decl
));
388 CLEAR_HARD_REG_SET (*set
);
391 /* No need to save any registers if the function never returns or
392 is have "OS_task" or "OS_main" attribute. */
393 if (TREE_THIS_VOLATILE (current_function_decl
)
394 || cfun
->machine
->is_OS_task
395 || cfun
->machine
->is_OS_main
)
398 for (reg
= 0; reg
< 32; reg
++)
400 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
401 any global register variables. */
405 if ((int_or_sig_p
&& !current_function_is_leaf
&& call_used_regs
[reg
])
406 || (df_regs_ever_live_p (reg
)
407 && (int_or_sig_p
|| !call_used_regs
[reg
])
408 && !(frame_pointer_needed
409 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
412 SET_HARD_REG_BIT (*set
, reg
);
419 /* Return true if register FROM can be eliminated via register TO. */
422 avr_can_eliminate (const int from
, const int to
)
424 return ((from
== ARG_POINTER_REGNUM
&& to
== FRAME_POINTER_REGNUM
)
425 || ((from
== FRAME_POINTER_REGNUM
426 || from
== FRAME_POINTER_REGNUM
+ 1)
427 && !frame_pointer_needed
));
430 /* Compute offset between arg_pointer and frame_pointer. */
433 avr_initial_elimination_offset (int from
, int to
)
435 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
439 int offset
= frame_pointer_needed
? 2 : 0;
440 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
442 offset
+= avr_regs_to_save (NULL
);
443 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
447 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
448 frame pointer by +STARTING_FRAME_OFFSET.
449 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
450 avoids creating add/sub of offset in nonlocal goto and setjmp. */
452 rtx
avr_builtin_setjmp_frame_value (void)
454 return gen_rtx_MINUS (Pmode
, virtual_stack_vars_rtx
,
455 gen_int_mode (STARTING_FRAME_OFFSET
, Pmode
));
458 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
459 This is return address of function. */
461 avr_return_addr_rtx (int count
, rtx tem
)
465 /* Can only return this functions return address. Others not supported. */
471 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+2");
472 warning (0, "'builtin_return_address' contains only 2 bytes of address");
475 r
= gen_rtx_SYMBOL_REF (Pmode
, ".L__stack_usage+1");
477 r
= gen_rtx_PLUS (Pmode
, tem
, r
);
478 r
= gen_frame_mem (Pmode
, memory_address (Pmode
, r
));
479 r
= gen_rtx_ROTATE (HImode
, r
, GEN_INT (8));
483 /* Return 1 if the function epilogue is just a single "ret". */
486 avr_simple_epilogue (void)
488 return (! frame_pointer_needed
489 && get_frame_size () == 0
490 && avr_regs_to_save (NULL
) == 0
491 && ! interrupt_function_p (current_function_decl
)
492 && ! signal_function_p (current_function_decl
)
493 && ! avr_naked_function_p (current_function_decl
)
494 && ! TREE_THIS_VOLATILE (current_function_decl
));
497 /* This function checks sequence of live registers. */
500 sequent_regs_live (void)
506 for (reg
= 0; reg
< 18; ++reg
)
508 if (!call_used_regs
[reg
])
510 if (df_regs_ever_live_p (reg
))
520 if (!frame_pointer_needed
)
522 if (df_regs_ever_live_p (REG_Y
))
530 if (df_regs_ever_live_p (REG_Y
+1))
543 return (cur_seq
== live_seq
) ? live_seq
: 0;
546 /* Obtain the length sequence of insns. */
549 get_sequence_length (rtx insns
)
554 for (insn
= insns
, length
= 0; insn
; insn
= NEXT_INSN (insn
))
555 length
+= get_attr_length (insn
);
560 /* Implement INCOMING_RETURN_ADDR_RTX. */
563 avr_incoming_return_addr_rtx (void)
565 /* The return address is at the top of the stack. Note that the push
566 was via post-decrement, which means the actual address is off by one. */
567 return gen_frame_mem (HImode
, plus_constant (stack_pointer_rtx
, 1));
570 /* Helper for expand_prologue. Emit a push of a byte register. */
573 emit_push_byte (unsigned regno
, bool frame_related_p
)
577 mem
= gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
);
578 mem
= gen_frame_mem (QImode
, mem
);
579 reg
= gen_rtx_REG (QImode
, regno
);
581 insn
= emit_insn (gen_rtx_SET (VOIDmode
, mem
, reg
));
583 RTX_FRAME_RELATED_P (insn
) = 1;
585 cfun
->machine
->stack_usage
++;
589 /* Output function prologue. */
592 expand_prologue (void)
597 HOST_WIDE_INT size
= get_frame_size();
600 /* Init cfun->machine. */
601 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
602 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
603 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
604 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
605 cfun
->machine
->is_OS_main
= avr_OS_main_function_p (current_function_decl
);
606 cfun
->machine
->stack_usage
= 0;
608 /* Prologue: naked. */
609 if (cfun
->machine
->is_naked
)
614 avr_regs_to_save (&set
);
615 live_seq
= sequent_regs_live ();
616 minimize
= (TARGET_CALL_PROLOGUES
617 && !cfun
->machine
->is_interrupt
618 && !cfun
->machine
->is_signal
619 && !cfun
->machine
->is_OS_task
620 && !cfun
->machine
->is_OS_main
623 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
625 /* Enable interrupts. */
626 if (cfun
->machine
->is_interrupt
)
627 emit_insn (gen_enable_interrupt ());
630 emit_push_byte (ZERO_REGNO
, true);
633 emit_push_byte (TMP_REGNO
, true);
636 /* ??? There's no dwarf2 column reserved for SREG. */
637 emit_move_insn (tmp_reg_rtx
, gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
638 emit_push_byte (TMP_REGNO
, false);
641 /* ??? There's no dwarf2 column reserved for RAMPZ. */
643 && TEST_HARD_REG_BIT (set
, REG_Z
)
644 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
646 emit_move_insn (tmp_reg_rtx
,
647 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
648 emit_push_byte (TMP_REGNO
, false);
651 /* Clear zero reg. */
652 emit_move_insn (zero_reg_rtx
, const0_rtx
);
654 /* Prevent any attempt to delete the setting of ZERO_REG! */
655 emit_use (zero_reg_rtx
);
657 if (minimize
&& (frame_pointer_needed
658 || (AVR_2_BYTE_PC
&& live_seq
> 6)
661 int first_reg
, reg
, offset
;
663 emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
664 gen_int_mode (size
, HImode
));
666 insn
= emit_insn (gen_call_prologue_saves
667 (gen_int_mode (live_seq
, HImode
),
668 gen_int_mode (size
+ live_seq
, HImode
)));
669 RTX_FRAME_RELATED_P (insn
) = 1;
671 /* Describe the effect of the unspec_volatile call to prologue_saves.
672 Note that this formulation assumes that add_reg_note pushes the
673 notes to the front. Thus we build them in the reverse order of
674 how we want dwarf2out to process them. */
676 /* The function does always set frame_pointer_rtx, but whether that
677 is going to be permanent in the function is frame_pointer_needed. */
678 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
679 gen_rtx_SET (VOIDmode
,
680 (frame_pointer_needed
681 ? frame_pointer_rtx
: stack_pointer_rtx
),
682 plus_constant (stack_pointer_rtx
,
683 -(size
+ live_seq
))));
685 /* Note that live_seq always contains r28+r29, but the other
686 registers to be saved are all below 18. */
687 first_reg
= 18 - (live_seq
- 2);
689 for (reg
= 29, offset
= -live_seq
+ 1;
691 reg
= (reg
== 28 ? 17 : reg
- 1), ++offset
)
695 m
= gen_rtx_MEM (QImode
, plus_constant (stack_pointer_rtx
, offset
));
696 r
= gen_rtx_REG (QImode
, reg
);
697 add_reg_note (insn
, REG_CFA_OFFSET
, gen_rtx_SET (VOIDmode
, m
, r
));
700 cfun
->machine
->stack_usage
+= size
+ live_seq
;
705 for (reg
= 0; reg
< 32; ++reg
)
706 if (TEST_HARD_REG_BIT (set
, reg
))
707 emit_push_byte (reg
, true);
709 if (frame_pointer_needed
)
711 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
713 /* Push frame pointer. Always be consistent about the
714 ordering of pushes -- epilogue_restores expects the
715 register pair to be pushed low byte first. */
716 emit_push_byte (REG_Y
, true);
717 emit_push_byte (REG_Y
+ 1, true);
722 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
723 RTX_FRAME_RELATED_P (insn
) = 1;
727 /* Creating a frame can be done by direct manipulation of the
728 stack or via the frame pointer. These two methods are:
735 the optimum method depends on function type, stack and frame size.
736 To avoid a complex logic, both methods are tested and shortest
741 if (AVR_HAVE_8BIT_SP
)
743 /* The high byte (r29) doesn't change. Prefer 'subi'
744 (1 cycle) over 'sbiw' (2 cycles, same size). */
745 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
749 /* Normal sized addition. */
750 myfp
= frame_pointer_rtx
;
753 /* Method 1-Adjust frame pointer. */
756 /* Normally the dwarf2out frame-related-expr interpreter does
757 not expect to have the CFA change once the frame pointer is
758 set up. Thus we avoid marking the move insn below and
759 instead indicate that the entire operation is complete after
760 the frame pointer subtraction is done. */
762 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
764 insn
= emit_move_insn (myfp
, plus_constant (myfp
, -size
));
765 RTX_FRAME_RELATED_P (insn
) = 1;
766 add_reg_note (insn
, REG_CFA_ADJUST_CFA
,
767 gen_rtx_SET (VOIDmode
, frame_pointer_rtx
,
768 plus_constant (stack_pointer_rtx
,
771 /* Copy to stack pointer. Note that since we've already
772 changed the CFA to the frame pointer this operation
773 need not be annotated at all. */
774 if (AVR_HAVE_8BIT_SP
)
776 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
778 else if (TARGET_NO_INTERRUPTS
779 || cfun
->machine
->is_signal
780 || cfun
->machine
->is_OS_main
)
782 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
785 else if (cfun
->machine
->is_interrupt
)
787 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
792 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
795 fp_plus_insns
= get_insns ();
798 /* Method 2-Adjust Stack pointer. */
805 insn
= plus_constant (stack_pointer_rtx
, -size
);
806 insn
= emit_move_insn (stack_pointer_rtx
, insn
);
807 RTX_FRAME_RELATED_P (insn
) = 1;
809 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
810 RTX_FRAME_RELATED_P (insn
) = 1;
812 sp_plus_insns
= get_insns ();
815 /* Use shortest method. */
816 if (get_sequence_length (sp_plus_insns
)
817 < get_sequence_length (fp_plus_insns
))
818 emit_insn (sp_plus_insns
);
820 emit_insn (fp_plus_insns
);
823 emit_insn (fp_plus_insns
);
825 cfun
->machine
->stack_usage
+= size
;
830 if (flag_stack_usage_info
)
831 current_function_static_stack_size
= cfun
->machine
->stack_usage
;
834 /* Output summary at end of function prologue. */
837 avr_asm_function_end_prologue (FILE *file
)
839 if (cfun
->machine
->is_naked
)
841 fputs ("/* prologue: naked */\n", file
);
845 if (cfun
->machine
->is_interrupt
)
847 fputs ("/* prologue: Interrupt */\n", file
);
849 else if (cfun
->machine
->is_signal
)
851 fputs ("/* prologue: Signal */\n", file
);
854 fputs ("/* prologue: function */\n", file
);
856 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
858 fprintf (file
, "/* stack size = %d */\n",
859 cfun
->machine
->stack_usage
);
860 /* Create symbol stack offset here so all functions have it. Add 1 to stack
861 usage for offset so that SP + .L__stack_offset = return address. */
862 fprintf (file
, ".L__stack_usage = %d\n", cfun
->machine
->stack_usage
);
866 /* Implement EPILOGUE_USES. */
869 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
873 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
878 /* Helper for expand_epilogue. Emit a pop of a byte register. */
881 emit_pop_byte (unsigned regno
)
885 mem
= gen_rtx_PRE_INC (HImode
, stack_pointer_rtx
);
886 mem
= gen_frame_mem (QImode
, mem
);
887 reg
= gen_rtx_REG (QImode
, regno
);
889 emit_insn (gen_rtx_SET (VOIDmode
, reg
, mem
));
892 /* Output RTL epilogue. */
895 expand_epilogue (bool sibcall_p
)
901 HOST_WIDE_INT size
= get_frame_size();
903 /* epilogue: naked */
904 if (cfun
->machine
->is_naked
)
906 gcc_assert (!sibcall_p
);
908 emit_jump_insn (gen_return ());
912 avr_regs_to_save (&set
);
913 live_seq
= sequent_regs_live ();
914 minimize
= (TARGET_CALL_PROLOGUES
915 && !cfun
->machine
->is_interrupt
916 && !cfun
->machine
->is_signal
917 && !cfun
->machine
->is_OS_task
918 && !cfun
->machine
->is_OS_main
921 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
923 if (frame_pointer_needed
)
925 /* Get rid of frame. */
926 emit_move_insn(frame_pointer_rtx
,
927 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
928 gen_int_mode (size
, HImode
)));
932 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
935 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
939 if (frame_pointer_needed
)
943 /* Try two methods to adjust stack and select shortest. */
947 if (AVR_HAVE_8BIT_SP
)
949 /* The high byte (r29) doesn't change - prefer 'subi'
950 (1 cycle) over 'sbiw' (2 cycles, same size). */
951 myfp
= gen_rtx_REG (QImode
, FRAME_POINTER_REGNUM
);
955 /* Normal sized addition. */
956 myfp
= frame_pointer_rtx
;
959 /* Method 1-Adjust frame pointer. */
962 emit_move_insn (myfp
, plus_constant (myfp
, size
));
964 /* Copy to stack pointer. */
965 if (AVR_HAVE_8BIT_SP
)
967 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
969 else if (TARGET_NO_INTERRUPTS
970 || cfun
->machine
->is_signal
)
972 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx
,
975 else if (cfun
->machine
->is_interrupt
)
977 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx
,
982 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
985 fp_plus_insns
= get_insns ();
988 /* Method 2-Adjust Stack pointer. */
995 emit_move_insn (stack_pointer_rtx
,
996 plus_constant (stack_pointer_rtx
, size
));
998 sp_plus_insns
= get_insns ();
1001 /* Use shortest method. */
1002 if (get_sequence_length (sp_plus_insns
)
1003 < get_sequence_length (fp_plus_insns
))
1004 emit_insn (sp_plus_insns
);
1006 emit_insn (fp_plus_insns
);
1009 emit_insn (fp_plus_insns
);
1011 if (!(cfun
->machine
->is_OS_task
|| cfun
->machine
->is_OS_main
))
1013 /* Restore previous frame_pointer. See expand_prologue for
1014 rationale for not using pophi. */
1015 emit_pop_byte (REG_Y
+ 1);
1016 emit_pop_byte (REG_Y
);
1020 /* Restore used registers. */
1021 for (reg
= 31; reg
>= 0; --reg
)
1022 if (TEST_HARD_REG_BIT (set
, reg
))
1023 emit_pop_byte (reg
);
1025 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
1027 /* Restore RAMPZ using tmp reg as scratch. */
1029 && TEST_HARD_REG_BIT (set
, REG_Z
)
1030 && TEST_HARD_REG_BIT (set
, REG_Z
+ 1))
1032 emit_pop_byte (TMP_REGNO
);
1033 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)),
1037 /* Restore SREG using tmp reg as scratch. */
1038 emit_pop_byte (TMP_REGNO
);
1040 emit_move_insn (gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)),
1043 /* Restore tmp REG. */
1044 emit_pop_byte (TMP_REGNO
);
1046 /* Restore zero REG. */
1047 emit_pop_byte (ZERO_REGNO
);
1051 emit_jump_insn (gen_return ());
1055 /* Output summary messages at beginning of function epilogue. */
1058 avr_asm_function_begin_epilogue (FILE *file
)
1060 fprintf (file
, "/* epilogue start */\n");
1064 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1067 avr_cannot_modify_jumps_p (void)
1070 /* Naked Functions must not have any instructions after
1071 their epilogue, see PR42240 */
1073 if (reload_completed
1075 && cfun
->machine
->is_naked
)
1084 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1085 machine for a memory operand of mode MODE. */
1088 avr_legitimate_address_p (enum machine_mode mode
, rtx x
, bool strict
)
1090 enum reg_class r
= NO_REGS
;
1092 if (TARGET_ALL_DEBUG
)
1094 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
1095 GET_MODE_NAME(mode
),
1096 strict
? "(strict)": "",
1097 reload_completed
? "(reload_completed)": "",
1098 reload_in_progress
? "(reload_in_progress)": "",
1099 reg_renumber
? "(reg_renumber)" : "");
1100 if (GET_CODE (x
) == PLUS
1101 && REG_P (XEXP (x
, 0))
1102 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1103 && INTVAL (XEXP (x
, 1)) >= 0
1104 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
1107 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
1108 true_regnum (XEXP (x
, 0)));
1111 if (!strict
&& GET_CODE (x
) == SUBREG
)
1113 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
1114 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
1116 else if (CONSTANT_ADDRESS_P (x
))
1118 else if (GET_CODE (x
) == PLUS
1119 && REG_P (XEXP (x
, 0))
1120 && GET_CODE (XEXP (x
, 1)) == CONST_INT
1121 && INTVAL (XEXP (x
, 1)) >= 0)
1123 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
1127 || REGNO (XEXP (x
,0)) == REG_X
1128 || REGNO (XEXP (x
,0)) == REG_Y
1129 || REGNO (XEXP (x
,0)) == REG_Z
)
1130 r
= BASE_POINTER_REGS
;
1131 if (XEXP (x
,0) == frame_pointer_rtx
1132 || XEXP (x
,0) == arg_pointer_rtx
)
1133 r
= BASE_POINTER_REGS
;
1135 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
1138 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
1139 && REG_P (XEXP (x
, 0))
1140 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
1141 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1145 if (TARGET_ALL_DEBUG
)
1147 fprintf (stderr
, " ret = %c\n", r
+ '0');
1149 return r
== NO_REGS
? 0 : (int)r
;
1152 /* Attempts to replace X with a valid
1153 memory address for an operand of mode MODE */
1156 avr_legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1159 if (TARGET_ALL_DEBUG
)
1161 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1165 if (GET_CODE (oldx
) == PLUS
1166 && REG_P (XEXP (oldx
,0)))
1168 if (REG_P (XEXP (oldx
,1)))
1169 x
= force_reg (GET_MODE (oldx
), oldx
);
1170 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1172 int offs
= INTVAL (XEXP (oldx
,1));
1173 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1174 if (offs
> MAX_LD_OFFSET (mode
))
1176 if (TARGET_ALL_DEBUG
)
1177 fprintf (stderr
, "force_reg (big offset)\n");
1178 x
= force_reg (GET_MODE (oldx
), oldx
);
1186 /* Return a pointer register name as a string. */
1189 ptrreg_to_str (int regno
)
1193 case REG_X
: return "X";
1194 case REG_Y
: return "Y";
1195 case REG_Z
: return "Z";
1197 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1202 /* Return the condition name as a string.
1203 Used in conditional jump constructing */
1206 cond_string (enum rtx_code code
)
1215 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1220 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1233 /* Output ADDR to FILE as address. */
1236 print_operand_address (FILE *file
, rtx addr
)
1238 switch (GET_CODE (addr
))
1241 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1245 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1249 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1253 if (CONSTANT_ADDRESS_P (addr
)
1254 && text_segment_operand (addr
, VOIDmode
))
1257 if (GET_CODE (x
) == CONST
)
1259 if (GET_CODE (x
) == PLUS
&& GET_CODE (XEXP (x
,1)) == CONST_INT
)
1261 /* Assembler gs() will implant word address. Make offset
1262 a byte offset inside gs() for assembler. This is
1263 needed because the more logical (constant+gs(sym)) is not
1264 accepted by gas. For 128K and lower devices this is ok. For
1265 large devices it will create a Trampoline to offset from symbol
1266 which may not be what the user really wanted. */
1267 fprintf (file
, "gs(");
1268 output_addr_const (file
, XEXP (x
,0));
1269 fprintf (file
,"+" HOST_WIDE_INT_PRINT_DEC
")", 2 * INTVAL (XEXP (x
,1)));
1271 if (warning (0, "pointer offset from symbol maybe incorrect"))
1273 output_addr_const (stderr
, addr
);
1274 fprintf(stderr
,"\n");
1279 fprintf (file
, "gs(");
1280 output_addr_const (file
, addr
);
1281 fprintf (file
, ")");
1285 output_addr_const (file
, addr
);
1290 /* Output X as assembler operand to file FILE. */
1293 print_operand (FILE *file
, rtx x
, int code
)
1297 if (code
>= 'A' && code
<= 'D')
1302 if (!AVR_HAVE_JMP_CALL
)
1305 else if (code
== '!')
1307 if (AVR_HAVE_EIJMP_EICALL
)
1312 if (x
== zero_reg_rtx
)
1313 fprintf (file
, "__zero_reg__");
1315 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1317 else if (GET_CODE (x
) == CONST_INT
)
1318 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1319 else if (GET_CODE (x
) == MEM
)
1321 rtx addr
= XEXP (x
,0);
1324 if (!CONSTANT_P (addr
))
1325 fatal_insn ("bad address, not a constant):", addr
);
1326 /* Assembler template with m-code is data - not progmem section */
1327 if (text_segment_operand (addr
, VOIDmode
))
1328 if (warning ( 0, "accessing data memory with program memory address"))
1330 output_addr_const (stderr
, addr
);
1331 fprintf(stderr
,"\n");
1333 output_addr_const (file
, addr
);
1335 else if (code
== 'o')
1337 if (GET_CODE (addr
) != PLUS
)
1338 fatal_insn ("bad address, not (reg+disp):", addr
);
1340 print_operand (file
, XEXP (addr
, 1), 0);
1342 else if (code
== 'p' || code
== 'r')
1344 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1345 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1348 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1350 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1352 else if (GET_CODE (addr
) == PLUS
)
1354 print_operand_address (file
, XEXP (addr
,0));
1355 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1356 fatal_insn ("internal compiler error. Bad address:"
1359 print_operand (file
, XEXP (addr
,1), code
);
1362 print_operand_address (file
, addr
);
1364 else if (code
== 'x')
1366 /* Constant progmem address - like used in jmp or call */
1367 if (0 == text_segment_operand (x
, VOIDmode
))
1368 if (warning ( 0, "accessing program memory with data memory address"))
1370 output_addr_const (stderr
, x
);
1371 fprintf(stderr
,"\n");
1373 /* Use normal symbol for direct address no linker trampoline needed */
1374 output_addr_const (file
, x
);
1376 else if (GET_CODE (x
) == CONST_DOUBLE
)
1380 if (GET_MODE (x
) != SFmode
)
1381 fatal_insn ("internal compiler error. Unknown mode:", x
);
1382 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1383 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1384 fprintf (file
, "0x%lx", val
);
1386 else if (code
== 'j')
1387 fputs (cond_string (GET_CODE (x
)), file
);
1388 else if (code
== 'k')
1389 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1391 print_operand_address (file
, x
);
1394 /* Update the condition code in the INSN. */
1397 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1401 switch (get_attr_cc (insn
))
1404 /* Insn does not affect CC at all. */
1412 set
= single_set (insn
);
1416 cc_status
.flags
|= CC_NO_OVERFLOW
;
1417 cc_status
.value1
= SET_DEST (set
);
1422 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1423 The V flag may or may not be known but that's ok because
1424 alter_cond will change tests to use EQ/NE. */
1425 set
= single_set (insn
);
1429 cc_status
.value1
= SET_DEST (set
);
1430 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1435 set
= single_set (insn
);
1438 cc_status
.value1
= SET_SRC (set
);
1442 /* Insn doesn't leave CC in a usable state. */
1445 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1446 set
= single_set (insn
);
1449 rtx src
= SET_SRC (set
);
1451 if (GET_CODE (src
) == ASHIFTRT
1452 && GET_MODE (src
) == QImode
)
1454 rtx x
= XEXP (src
, 1);
1456 if (GET_CODE (x
) == CONST_INT
1460 cc_status
.value1
= SET_DEST (set
);
1461 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1469 /* Return maximum number of consecutive registers of
1470 class CLASS needed to hold a value of mode MODE. */
1473 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1475 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1478 /* Choose mode for jump insn:
1479 1 - relative jump in range -63 <= x <= 62 ;
1480 2 - relative jump in range -2046 <= x <= 2045 ;
1481 3 - absolute jump (only for ATmega[16]03). */
1484 avr_jump_mode (rtx x
, rtx insn
)
1486 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_CODE (x
) == LABEL_REF
1487 ? XEXP (x
, 0) : x
));
1488 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1489 int jump_distance
= cur_addr
- dest_addr
;
1491 if (-63 <= jump_distance
&& jump_distance
<= 62)
1493 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1495 else if (AVR_HAVE_JMP_CALL
)
1501 /* return an AVR condition jump commands.
1502 X is a comparison RTX.
1503 LEN is a number returned by avr_jump_mode function.
1504 if REVERSE nonzero then condition code in X must be reversed. */
1507 ret_cond_branch (rtx x
, int len
, int reverse
)
1509 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1514 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1515 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1517 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1518 AS1 (brmi
,.+2) CR_TAB
1520 (AS1 (breq
,.+6) CR_TAB
1521 AS1 (brmi
,.+4) CR_TAB
1525 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1527 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1528 AS1 (brlt
,.+2) CR_TAB
1530 (AS1 (breq
,.+6) CR_TAB
1531 AS1 (brlt
,.+4) CR_TAB
1534 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1536 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1537 AS1 (brlo
,.+2) CR_TAB
1539 (AS1 (breq
,.+6) CR_TAB
1540 AS1 (brlo
,.+4) CR_TAB
1543 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1544 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1546 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1547 AS1 (brpl
,.+2) CR_TAB
1549 (AS1 (breq
,.+2) CR_TAB
1550 AS1 (brpl
,.+4) CR_TAB
1553 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1555 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1556 AS1 (brge
,.+2) CR_TAB
1558 (AS1 (breq
,.+2) CR_TAB
1559 AS1 (brge
,.+4) CR_TAB
1562 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1564 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1565 AS1 (brsh
,.+2) CR_TAB
1567 (AS1 (breq
,.+2) CR_TAB
1568 AS1 (brsh
,.+4) CR_TAB
1576 return AS1 (br
%k1
,%0);
1578 return (AS1 (br
%j1
,.+2) CR_TAB
1581 return (AS1 (br
%j1
,.+4) CR_TAB
1590 return AS1 (br
%j1
,%0);
1592 return (AS1 (br
%k1
,.+2) CR_TAB
1595 return (AS1 (br
%k1
,.+4) CR_TAB
1603 /* Predicate function for immediate operand which fits to byte (8bit) */
1606 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1608 return (GET_CODE (op
) == CONST_INT
1609 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1612 /* Output insn cost for next insn. */
1615 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1616 int num_operands ATTRIBUTE_UNUSED
)
1618 if (TARGET_ALL_DEBUG
)
1620 fprintf (asm_out_file
, "/* DEBUG: cost = %d. */\n",
1621 rtx_cost (PATTERN (insn
), INSN
, !optimize_size
));
1625 /* Return 0 if undefined, 1 if always true or always false. */
1628 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE op
, rtx x
)
1630 unsigned int max
= (mode
== QImode
? 0xff :
1631 mode
== HImode
? 0xffff :
1632 mode
== SImode
? 0xffffffff : 0);
1633 if (max
&& op
&& GET_CODE (x
) == CONST_INT
)
1635 if (unsigned_condition (op
) != op
)
1638 if (max
!= (INTVAL (x
) & max
)
1639 && INTVAL (x
) != 0xff)
1646 /* Returns nonzero if REGNO is the number of a hard
1647 register in which function arguments are sometimes passed. */
1650 function_arg_regno_p(int r
)
1652 return (r
>= 8 && r
<= 25);
1655 /* Initializing the variable cum for the state at the beginning
1656 of the argument list. */
1659 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1660 tree fndecl ATTRIBUTE_UNUSED
)
1663 cum
->regno
= FIRST_CUM_REG
;
1664 if (!libname
&& stdarg_p (fntype
))
1667 /* Assume the calle may be tail called */
1669 cfun
->machine
->sibcall_fails
= 0;
1672 /* Returns the number of registers to allocate for a function argument. */
1675 avr_num_arg_regs (enum machine_mode mode
, const_tree type
)
1679 if (mode
== BLKmode
)
1680 size
= int_size_in_bytes (type
);
1682 size
= GET_MODE_SIZE (mode
);
1684 /* Align all function arguments to start in even-numbered registers.
1685 Odd-sized arguments leave holes above them. */
1687 return (size
+ 1) & ~1;
1690 /* Controls whether a function argument is passed
1691 in a register, and which register. */
1694 avr_function_arg (cumulative_args_t cum_v
, enum machine_mode mode
,
1695 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1697 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1698 int bytes
= avr_num_arg_regs (mode
, type
);
1700 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1701 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1706 /* Update the summarizer variable CUM to advance past an argument
1707 in the argument list. */
1710 avr_function_arg_advance (cumulative_args_t cum_v
, enum machine_mode mode
,
1711 const_tree type
, bool named ATTRIBUTE_UNUSED
)
1713 CUMULATIVE_ARGS
*cum
= get_cumulative_args (cum_v
);
1714 int bytes
= avr_num_arg_regs (mode
, type
);
1716 cum
->nregs
-= bytes
;
1717 cum
->regno
-= bytes
;
1719 /* A parameter is being passed in a call-saved register. As the original
1720 contents of these regs has to be restored before leaving the function,
1721 a function must not pass arguments in call-saved regs in order to get
1726 && !call_used_regs
[cum
->regno
])
1728 /* FIXME: We ship info on failing tail-call in struct machine_function.
1729 This uses internals of calls.c:expand_call() and the way args_so_far
1730 is used. targetm.function_ok_for_sibcall() needs to be extended to
1731 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1732 dependent so that such an extension is not wanted. */
1734 cfun
->machine
->sibcall_fails
= 1;
1737 /* Test if all registers needed by the ABI are actually available. If the
1738 user has fixed a GPR needed to pass an argument, an (implicit) function
1739 call would clobber that fixed register. See PR45099 for an example. */
1746 for (regno
= cum
->regno
; regno
< cum
->regno
+ bytes
; regno
++)
1747 if (fixed_regs
[regno
])
1748 error ("Register %s is needed to pass a parameter but is fixed",
1752 if (cum
->nregs
<= 0)
1755 cum
->regno
= FIRST_CUM_REG
;
1759 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1760 /* Decide whether we can make a sibling call to a function. DECL is the
1761 declaration of the function being targeted by the call and EXP is the
1762 CALL_EXPR representing the call. */
1765 avr_function_ok_for_sibcall (tree decl_callee
, tree exp_callee
)
1769 /* Tail-calling must fail if callee-saved regs are used to pass
1770 function args. We must not tail-call when `epilogue_restores'
1771 is used. Unfortunately, we cannot tell at this point if that
1772 actually will happen or not, and we cannot step back from
1773 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1775 if (cfun
->machine
->sibcall_fails
1776 || TARGET_CALL_PROLOGUES
)
1781 fntype_callee
= TREE_TYPE (CALL_EXPR_FN (exp_callee
));
1785 decl_callee
= TREE_TYPE (decl_callee
);
1789 decl_callee
= fntype_callee
;
1791 while (FUNCTION_TYPE
!= TREE_CODE (decl_callee
)
1792 && METHOD_TYPE
!= TREE_CODE (decl_callee
))
1794 decl_callee
= TREE_TYPE (decl_callee
);
1798 /* Ensure that caller and callee have compatible epilogues */
1800 if (interrupt_function_p (current_function_decl
)
1801 || signal_function_p (current_function_decl
)
1802 || avr_naked_function_p (decl_callee
)
1803 || avr_naked_function_p (current_function_decl
)
1804 /* FIXME: For OS_task and OS_main, we are over-conservative.
1805 This is due to missing documentation of these attributes
1806 and what they actually should do and should not do. */
1807 || (avr_OS_task_function_p (decl_callee
)
1808 != avr_OS_task_function_p (current_function_decl
))
1809 || (avr_OS_main_function_p (decl_callee
)
1810 != avr_OS_main_function_p (current_function_decl
)))
1818 /***********************************************************************
1819 Functions for outputting various mov's for a various modes
1820 ************************************************************************/
1822 output_movqi (rtx insn
, rtx operands
[], int *l
)
1825 rtx dest
= operands
[0];
1826 rtx src
= operands
[1];
1834 if (register_operand (dest
, QImode
))
1836 if (register_operand (src
, QImode
)) /* mov r,r */
1838 if (test_hard_reg_class (STACK_REG
, dest
))
1839 return AS2 (out
,%0,%1);
1840 else if (test_hard_reg_class (STACK_REG
, src
))
1841 return AS2 (in
,%0,%1);
1843 return AS2 (mov
,%0,%1);
1845 else if (CONSTANT_P (src
))
1847 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1848 return AS2 (ldi
,%0,lo8(%1));
1850 if (GET_CODE (src
) == CONST_INT
)
1852 if (src
== const0_rtx
) /* mov r,L */
1853 return AS1 (clr
,%0);
1854 else if (src
== const1_rtx
)
1857 return (AS1 (clr
,%0) CR_TAB
1860 else if (src
== constm1_rtx
)
1862 /* Immediate constants -1 to any register */
1864 return (AS1 (clr
,%0) CR_TAB
1869 int bit_nr
= exact_log2 (INTVAL (src
));
1875 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1878 avr_output_bld (operands
, bit_nr
);
1885 /* Last resort, larger than loading from memory. */
1887 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1888 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1889 AS2 (mov
,%0,r31
) CR_TAB
1890 AS2 (mov
,r31
,__tmp_reg__
));
1892 else if (GET_CODE (src
) == MEM
)
1893 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1895 else if (GET_CODE (dest
) == MEM
)
1899 if (src
== const0_rtx
)
1900 operands
[1] = zero_reg_rtx
;
1902 templ
= out_movqi_mr_r (insn
, operands
, real_l
);
1905 output_asm_insn (templ
, operands
);
1914 output_movhi (rtx insn
, rtx operands
[], int *l
)
1917 rtx dest
= operands
[0];
1918 rtx src
= operands
[1];
1924 if (register_operand (dest
, HImode
))
1926 if (register_operand (src
, HImode
)) /* mov r,r */
1928 if (test_hard_reg_class (STACK_REG
, dest
))
1930 if (AVR_HAVE_8BIT_SP
)
1931 return *l
= 1, AS2 (out
,__SP_L__
,%A1
);
1932 /* Use simple load of stack pointer if no interrupts are
1934 else if (TARGET_NO_INTERRUPTS
)
1935 return *l
= 2, (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1936 AS2 (out
,__SP_L__
,%A1
));
1938 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1940 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1941 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1942 AS2 (out
,__SP_L__
,%A1
));
1944 else if (test_hard_reg_class (STACK_REG
, src
))
1947 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1948 AS2 (in
,%B0
,__SP_H__
));
1954 return (AS2 (movw
,%0,%1));
1959 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1963 else if (CONSTANT_P (src
))
1965 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1968 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1969 AS2 (ldi
,%B0
,hi8(%1)));
1972 if (GET_CODE (src
) == CONST_INT
)
1974 if (src
== const0_rtx
) /* mov r,L */
1977 return (AS1 (clr
,%A0
) CR_TAB
1980 else if (src
== const1_rtx
)
1983 return (AS1 (clr
,%A0
) CR_TAB
1984 AS1 (clr
,%B0
) CR_TAB
1987 else if (src
== constm1_rtx
)
1989 /* Immediate constants -1 to any register */
1991 return (AS1 (clr
,%0) CR_TAB
1992 AS1 (dec
,%A0
) CR_TAB
1997 int bit_nr
= exact_log2 (INTVAL (src
));
2003 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
2004 AS1 (clr
,%B0
) CR_TAB
2007 avr_output_bld (operands
, bit_nr
);
2013 if ((INTVAL (src
) & 0xff) == 0)
2016 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2017 AS1 (clr
,%A0
) CR_TAB
2018 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2019 AS2 (mov
,%B0
,r31
) CR_TAB
2020 AS2 (mov
,r31
,__tmp_reg__
));
2022 else if ((INTVAL (src
) & 0xff00) == 0)
2025 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2026 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2027 AS2 (mov
,%A0
,r31
) CR_TAB
2028 AS1 (clr
,%B0
) CR_TAB
2029 AS2 (mov
,r31
,__tmp_reg__
));
2033 /* Last resort, equal to loading from memory. */
2035 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2036 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2037 AS2 (mov
,%A0
,r31
) CR_TAB
2038 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2039 AS2 (mov
,%B0
,r31
) CR_TAB
2040 AS2 (mov
,r31
,__tmp_reg__
));
2042 else if (GET_CODE (src
) == MEM
)
2043 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2045 else if (GET_CODE (dest
) == MEM
)
2049 if (src
== const0_rtx
)
2050 operands
[1] = zero_reg_rtx
;
2052 templ
= out_movhi_mr_r (insn
, operands
, real_l
);
2055 output_asm_insn (templ
, operands
);
2060 fatal_insn ("invalid insn:", insn
);
2065 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
2069 rtx x
= XEXP (src
, 0);
2075 if (CONSTANT_ADDRESS_P (x
))
2077 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2080 return AS2 (in
,%0,__SREG__
);
2082 if (optimize
> 0 && io_address_operand (x
, QImode
))
2085 return AS2 (in
,%0,%m1
-0x20);
2088 return AS2 (lds
,%0,%m1
);
2090 /* memory access by reg+disp */
2091 else if (GET_CODE (x
) == PLUS
2092 && REG_P (XEXP (x
,0))
2093 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2095 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
2097 int disp
= INTVAL (XEXP (x
,1));
2098 if (REGNO (XEXP (x
,0)) != REG_Y
)
2099 fatal_insn ("incorrect insn:",insn
);
2101 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2102 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
2103 AS2 (ldd
,%0,Y
+63) CR_TAB
2104 AS2 (sbiw
,r28
,%o1
-63));
2106 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2107 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2108 AS2 (ld
,%0,Y
) CR_TAB
2109 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2110 AS2 (sbci
,r29
,hi8(%o1
)));
2112 else if (REGNO (XEXP (x
,0)) == REG_X
)
2114 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2115 it but I have this situation with extremal optimizing options. */
2116 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
2117 || reg_unused_after (insn
, XEXP (x
,0)))
2118 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
2121 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
2122 AS2 (ld
,%0,X
) CR_TAB
2123 AS2 (sbiw
,r26
,%o1
));
2126 return AS2 (ldd
,%0,%1);
2129 return AS2 (ld
,%0,%1);
2133 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
2137 rtx base
= XEXP (src
, 0);
2138 int reg_dest
= true_regnum (dest
);
2139 int reg_base
= true_regnum (base
);
2140 /* "volatile" forces reading low byte first, even if less efficient,
2141 for correct operation with 16-bit I/O registers. */
2142 int mem_volatile_p
= MEM_VOLATILE_P (src
);
2150 if (reg_dest
== reg_base
) /* R = (R) */
2153 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
2154 AS2 (ld
,%B0
,%1) CR_TAB
2155 AS2 (mov
,%A0
,__tmp_reg__
));
2157 else if (reg_base
== REG_X
) /* (R26) */
2159 if (reg_unused_after (insn
, base
))
2162 return (AS2 (ld
,%A0
,X
+) CR_TAB
2166 return (AS2 (ld
,%A0
,X
+) CR_TAB
2167 AS2 (ld
,%B0
,X
) CR_TAB
2173 return (AS2 (ld
,%A0
,%1) CR_TAB
2174 AS2 (ldd
,%B0
,%1+1));
2177 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2179 int disp
= INTVAL (XEXP (base
, 1));
2180 int reg_base
= true_regnum (XEXP (base
, 0));
2182 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2184 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2185 fatal_insn ("incorrect insn:",insn
);
2187 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2188 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
2189 AS2 (ldd
,%A0
,Y
+62) CR_TAB
2190 AS2 (ldd
,%B0
,Y
+63) CR_TAB
2191 AS2 (sbiw
,r28
,%o1
-62));
2193 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2194 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2195 AS2 (ld
,%A0
,Y
) CR_TAB
2196 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2197 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2198 AS2 (sbci
,r29
,hi8(%o1
)));
2200 if (reg_base
== REG_X
)
2202 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2203 it but I have this situation with extremal
2204 optimization options. */
2207 if (reg_base
== reg_dest
)
2208 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2209 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2210 AS2 (ld
,%B0
,X
) CR_TAB
2211 AS2 (mov
,%A0
,__tmp_reg__
));
2213 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2214 AS2 (ld
,%A0
,X
+) CR_TAB
2215 AS2 (ld
,%B0
,X
) CR_TAB
2216 AS2 (sbiw
,r26
,%o1
+1));
2219 if (reg_base
== reg_dest
)
2222 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
2223 AS2 (ldd
,%B0
,%B1
) CR_TAB
2224 AS2 (mov
,%A0
,__tmp_reg__
));
2228 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
2231 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2233 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2234 fatal_insn ("incorrect insn:", insn
);
2238 if (REGNO (XEXP (base
, 0)) == REG_X
)
2241 return (AS2 (sbiw
,r26
,2) CR_TAB
2242 AS2 (ld
,%A0
,X
+) CR_TAB
2243 AS2 (ld
,%B0
,X
) CR_TAB
2249 return (AS2 (sbiw
,%r1
,2) CR_TAB
2250 AS2 (ld
,%A0
,%p1
) CR_TAB
2251 AS2 (ldd
,%B0
,%p1
+1));
2256 return (AS2 (ld
,%B0
,%1) CR_TAB
2259 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2261 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2262 fatal_insn ("incorrect insn:", insn
);
2265 return (AS2 (ld
,%A0
,%1) CR_TAB
2268 else if (CONSTANT_ADDRESS_P (base
))
2270 if (optimize
> 0 && io_address_operand (base
, HImode
))
2273 return (AS2 (in
,%A0
,%m1
-0x20) CR_TAB
2274 AS2 (in
,%B0
,%m1
+1-0x20));
2277 return (AS2 (lds
,%A0
,%m1
) CR_TAB
2278 AS2 (lds
,%B0
,%m1
+1));
2281 fatal_insn ("unknown move insn:",insn
);
2286 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2290 rtx base
= XEXP (src
, 0);
2291 int reg_dest
= true_regnum (dest
);
2292 int reg_base
= true_regnum (base
);
2300 if (reg_base
== REG_X
) /* (R26) */
2302 if (reg_dest
== REG_X
)
2303 /* "ld r26,-X" is undefined */
2304 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2305 AS2 (ld
,r29
,X
) CR_TAB
2306 AS2 (ld
,r28
,-X
) CR_TAB
2307 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2308 AS2 (sbiw
,r26
,1) CR_TAB
2309 AS2 (ld
,r26
,X
) CR_TAB
2310 AS2 (mov
,r27
,__tmp_reg__
));
2311 else if (reg_dest
== REG_X
- 2)
2312 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2313 AS2 (ld
,%B0
,X
+) CR_TAB
2314 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2315 AS2 (ld
,%D0
,X
) CR_TAB
2316 AS2 (mov
,%C0
,__tmp_reg__
));
2317 else if (reg_unused_after (insn
, base
))
2318 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2319 AS2 (ld
,%B0
,X
+) CR_TAB
2320 AS2 (ld
,%C0
,X
+) CR_TAB
2323 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2324 AS2 (ld
,%B0
,X
+) CR_TAB
2325 AS2 (ld
,%C0
,X
+) CR_TAB
2326 AS2 (ld
,%D0
,X
) CR_TAB
2331 if (reg_dest
== reg_base
)
2332 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2333 AS2 (ldd
,%C0
,%1+2) CR_TAB
2334 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2335 AS2 (ld
,%A0
,%1) CR_TAB
2336 AS2 (mov
,%B0
,__tmp_reg__
));
2337 else if (reg_base
== reg_dest
+ 2)
2338 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2339 AS2 (ldd
,%B0
,%1+1) CR_TAB
2340 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2341 AS2 (ldd
,%D0
,%1+3) CR_TAB
2342 AS2 (mov
,%C0
,__tmp_reg__
));
2344 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2345 AS2 (ldd
,%B0
,%1+1) CR_TAB
2346 AS2 (ldd
,%C0
,%1+2) CR_TAB
2347 AS2 (ldd
,%D0
,%1+3));
2350 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2352 int disp
= INTVAL (XEXP (base
, 1));
2354 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2356 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2357 fatal_insn ("incorrect insn:",insn
);
2359 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2360 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2361 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2362 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2363 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2364 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2365 AS2 (sbiw
,r28
,%o1
-60));
2367 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2368 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2369 AS2 (ld
,%A0
,Y
) CR_TAB
2370 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2371 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2372 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2373 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2374 AS2 (sbci
,r29
,hi8(%o1
)));
2377 reg_base
= true_regnum (XEXP (base
, 0));
2378 if (reg_base
== REG_X
)
2381 if (reg_dest
== REG_X
)
2384 /* "ld r26,-X" is undefined */
2385 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2386 AS2 (ld
,r29
,X
) CR_TAB
2387 AS2 (ld
,r28
,-X
) CR_TAB
2388 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2389 AS2 (sbiw
,r26
,1) CR_TAB
2390 AS2 (ld
,r26
,X
) CR_TAB
2391 AS2 (mov
,r27
,__tmp_reg__
));
2394 if (reg_dest
== REG_X
- 2)
2395 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2396 AS2 (ld
,r24
,X
+) CR_TAB
2397 AS2 (ld
,r25
,X
+) CR_TAB
2398 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2399 AS2 (ld
,r27
,X
) CR_TAB
2400 AS2 (mov
,r26
,__tmp_reg__
));
2402 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2403 AS2 (ld
,%A0
,X
+) CR_TAB
2404 AS2 (ld
,%B0
,X
+) CR_TAB
2405 AS2 (ld
,%C0
,X
+) CR_TAB
2406 AS2 (ld
,%D0
,X
) CR_TAB
2407 AS2 (sbiw
,r26
,%o1
+3));
2409 if (reg_dest
== reg_base
)
2410 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2411 AS2 (ldd
,%C0
,%C1
) CR_TAB
2412 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2413 AS2 (ldd
,%A0
,%A1
) CR_TAB
2414 AS2 (mov
,%B0
,__tmp_reg__
));
2415 else if (reg_dest
== reg_base
- 2)
2416 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2417 AS2 (ldd
,%B0
,%B1
) CR_TAB
2418 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2419 AS2 (ldd
,%D0
,%D1
) CR_TAB
2420 AS2 (mov
,%C0
,__tmp_reg__
));
2421 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2422 AS2 (ldd
,%B0
,%B1
) CR_TAB
2423 AS2 (ldd
,%C0
,%C1
) CR_TAB
2426 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2427 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2428 AS2 (ld
,%C0
,%1) CR_TAB
2429 AS2 (ld
,%B0
,%1) CR_TAB
2431 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2432 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2433 AS2 (ld
,%B0
,%1) CR_TAB
2434 AS2 (ld
,%C0
,%1) CR_TAB
2436 else if (CONSTANT_ADDRESS_P (base
))
2437 return *l
=8, (AS2 (lds
,%A0
,%m1
) CR_TAB
2438 AS2 (lds
,%B0
,%m1
+1) CR_TAB
2439 AS2 (lds
,%C0
,%m1
+2) CR_TAB
2440 AS2 (lds
,%D0
,%m1
+3));
2442 fatal_insn ("unknown move insn:",insn
);
2447 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2451 rtx base
= XEXP (dest
, 0);
2452 int reg_base
= true_regnum (base
);
2453 int reg_src
= true_regnum (src
);
2459 if (CONSTANT_ADDRESS_P (base
))
2460 return *l
=8,(AS2 (sts
,%m0
,%A1
) CR_TAB
2461 AS2 (sts
,%m0
+1,%B1
) CR_TAB
2462 AS2 (sts
,%m0
+2,%C1
) CR_TAB
2463 AS2 (sts
,%m0
+3,%D1
));
2464 if (reg_base
> 0) /* (r) */
2466 if (reg_base
== REG_X
) /* (R26) */
2468 if (reg_src
== REG_X
)
2470 /* "st X+,r26" is undefined */
2471 if (reg_unused_after (insn
, base
))
2472 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2473 AS2 (st
,X
,r26
) CR_TAB
2474 AS2 (adiw
,r26
,1) CR_TAB
2475 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2476 AS2 (st
,X
+,r28
) CR_TAB
2479 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2480 AS2 (st
,X
,r26
) CR_TAB
2481 AS2 (adiw
,r26
,1) CR_TAB
2482 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2483 AS2 (st
,X
+,r28
) CR_TAB
2484 AS2 (st
,X
,r29
) CR_TAB
2487 else if (reg_base
== reg_src
+ 2)
2489 if (reg_unused_after (insn
, base
))
2490 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2491 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2492 AS2 (st
,%0+,%A1
) CR_TAB
2493 AS2 (st
,%0+,%B1
) CR_TAB
2494 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2495 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2496 AS1 (clr
,__zero_reg__
));
2498 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2499 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2500 AS2 (st
,%0+,%A1
) CR_TAB
2501 AS2 (st
,%0+,%B1
) CR_TAB
2502 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2503 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2504 AS1 (clr
,__zero_reg__
) CR_TAB
2507 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2508 AS2 (st
,%0+,%B1
) CR_TAB
2509 AS2 (st
,%0+,%C1
) CR_TAB
2510 AS2 (st
,%0,%D1
) CR_TAB
2514 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2515 AS2 (std
,%0+1,%B1
) CR_TAB
2516 AS2 (std
,%0+2,%C1
) CR_TAB
2517 AS2 (std
,%0+3,%D1
));
2519 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2521 int disp
= INTVAL (XEXP (base
, 1));
2522 reg_base
= REGNO (XEXP (base
, 0));
2523 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2525 if (reg_base
!= REG_Y
)
2526 fatal_insn ("incorrect insn:",insn
);
2528 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2529 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2530 AS2 (std
,Y
+60,%A1
) CR_TAB
2531 AS2 (std
,Y
+61,%B1
) CR_TAB
2532 AS2 (std
,Y
+62,%C1
) CR_TAB
2533 AS2 (std
,Y
+63,%D1
) CR_TAB
2534 AS2 (sbiw
,r28
,%o0
-60));
2536 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2537 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2538 AS2 (st
,Y
,%A1
) CR_TAB
2539 AS2 (std
,Y
+1,%B1
) CR_TAB
2540 AS2 (std
,Y
+2,%C1
) CR_TAB
2541 AS2 (std
,Y
+3,%D1
) CR_TAB
2542 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2543 AS2 (sbci
,r29
,hi8(%o0
)));
2545 if (reg_base
== REG_X
)
2548 if (reg_src
== REG_X
)
2551 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2552 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2553 AS2 (adiw
,r26
,%o0
) CR_TAB
2554 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2555 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2556 AS2 (st
,X
+,r28
) CR_TAB
2557 AS2 (st
,X
,r29
) CR_TAB
2558 AS1 (clr
,__zero_reg__
) CR_TAB
2559 AS2 (sbiw
,r26
,%o0
+3));
2561 else if (reg_src
== REG_X
- 2)
2564 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2565 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2566 AS2 (adiw
,r26
,%o0
) CR_TAB
2567 AS2 (st
,X
+,r24
) CR_TAB
2568 AS2 (st
,X
+,r25
) CR_TAB
2569 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2570 AS2 (st
,X
,__zero_reg__
) CR_TAB
2571 AS1 (clr
,__zero_reg__
) CR_TAB
2572 AS2 (sbiw
,r26
,%o0
+3));
2575 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2576 AS2 (st
,X
+,%A1
) CR_TAB
2577 AS2 (st
,X
+,%B1
) CR_TAB
2578 AS2 (st
,X
+,%C1
) CR_TAB
2579 AS2 (st
,X
,%D1
) CR_TAB
2580 AS2 (sbiw
,r26
,%o0
+3));
2582 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2583 AS2 (std
,%B0
,%B1
) CR_TAB
2584 AS2 (std
,%C0
,%C1
) CR_TAB
2587 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2588 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2589 AS2 (st
,%0,%C1
) CR_TAB
2590 AS2 (st
,%0,%B1
) CR_TAB
2592 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2593 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2594 AS2 (st
,%0,%B1
) CR_TAB
2595 AS2 (st
,%0,%C1
) CR_TAB
2597 fatal_insn ("unknown move insn:",insn
);
2602 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2605 rtx dest
= operands
[0];
2606 rtx src
= operands
[1];
2612 if (register_operand (dest
, VOIDmode
))
2614 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2616 if (true_regnum (dest
) > true_regnum (src
))
2621 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2622 AS2 (movw
,%A0
,%A1
));
2625 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2626 AS2 (mov
,%C0
,%C1
) CR_TAB
2627 AS2 (mov
,%B0
,%B1
) CR_TAB
2635 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2636 AS2 (movw
,%C0
,%C1
));
2639 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2640 AS2 (mov
,%B0
,%B1
) CR_TAB
2641 AS2 (mov
,%C0
,%C1
) CR_TAB
2645 else if (CONSTANT_P (src
))
2647 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2650 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2651 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2652 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2653 AS2 (ldi
,%D0
,hhi8(%1)));
2656 if (GET_CODE (src
) == CONST_INT
)
2658 const char *const clr_op0
=
2659 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2660 AS1 (clr
,%B0
) CR_TAB
2662 : (AS1 (clr
,%A0
) CR_TAB
2663 AS1 (clr
,%B0
) CR_TAB
2664 AS1 (clr
,%C0
) CR_TAB
2667 if (src
== const0_rtx
) /* mov r,L */
2669 *l
= AVR_HAVE_MOVW
? 3 : 4;
2672 else if (src
== const1_rtx
)
2675 output_asm_insn (clr_op0
, operands
);
2676 *l
= AVR_HAVE_MOVW
? 4 : 5;
2677 return AS1 (inc
,%A0
);
2679 else if (src
== constm1_rtx
)
2681 /* Immediate constants -1 to any register */
2685 return (AS1 (clr
,%A0
) CR_TAB
2686 AS1 (dec
,%A0
) CR_TAB
2687 AS2 (mov
,%B0
,%A0
) CR_TAB
2688 AS2 (movw
,%C0
,%A0
));
2691 return (AS1 (clr
,%A0
) CR_TAB
2692 AS1 (dec
,%A0
) CR_TAB
2693 AS2 (mov
,%B0
,%A0
) CR_TAB
2694 AS2 (mov
,%C0
,%A0
) CR_TAB
2699 int bit_nr
= exact_log2 (INTVAL (src
));
2703 *l
= AVR_HAVE_MOVW
? 5 : 6;
2706 output_asm_insn (clr_op0
, operands
);
2707 output_asm_insn ("set", operands
);
2710 avr_output_bld (operands
, bit_nr
);
2717 /* Last resort, better than loading from memory. */
2719 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2720 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2721 AS2 (mov
,%A0
,r31
) CR_TAB
2722 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2723 AS2 (mov
,%B0
,r31
) CR_TAB
2724 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2725 AS2 (mov
,%C0
,r31
) CR_TAB
2726 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2727 AS2 (mov
,%D0
,r31
) CR_TAB
2728 AS2 (mov
,r31
,__tmp_reg__
));
2730 else if (GET_CODE (src
) == MEM
)
2731 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2733 else if (GET_CODE (dest
) == MEM
)
2737 if (src
== const0_rtx
)
2738 operands
[1] = zero_reg_rtx
;
2740 templ
= out_movsi_mr_r (insn
, operands
, real_l
);
2743 output_asm_insn (templ
, operands
);
2748 fatal_insn ("invalid insn:", insn
);
2753 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2757 rtx x
= XEXP (dest
, 0);
2763 if (CONSTANT_ADDRESS_P (x
))
2765 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2768 return AS2 (out
,__SREG__
,%1);
2770 if (optimize
> 0 && io_address_operand (x
, QImode
))
2773 return AS2 (out
,%m0
-0x20,%1);
2776 return AS2 (sts
,%m0
,%1);
2778 /* memory access by reg+disp */
2779 else if (GET_CODE (x
) == PLUS
2780 && REG_P (XEXP (x
,0))
2781 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2783 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2785 int disp
= INTVAL (XEXP (x
,1));
2786 if (REGNO (XEXP (x
,0)) != REG_Y
)
2787 fatal_insn ("incorrect insn:",insn
);
2789 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2790 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2791 AS2 (std
,Y
+63,%1) CR_TAB
2792 AS2 (sbiw
,r28
,%o0
-63));
2794 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2795 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2796 AS2 (st
,Y
,%1) CR_TAB
2797 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2798 AS2 (sbci
,r29
,hi8(%o0
)));
2800 else if (REGNO (XEXP (x
,0)) == REG_X
)
2802 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2804 if (reg_unused_after (insn
, XEXP (x
,0)))
2805 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2806 AS2 (adiw
,r26
,%o0
) CR_TAB
2807 AS2 (st
,X
,__tmp_reg__
));
2809 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2810 AS2 (adiw
,r26
,%o0
) CR_TAB
2811 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2812 AS2 (sbiw
,r26
,%o0
));
2816 if (reg_unused_after (insn
, XEXP (x
,0)))
2817 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2820 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2821 AS2 (st
,X
,%1) CR_TAB
2822 AS2 (sbiw
,r26
,%o0
));
2826 return AS2 (std
,%0,%1);
2829 return AS2 (st
,%0,%1);
2833 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2837 rtx base
= XEXP (dest
, 0);
2838 int reg_base
= true_regnum (base
);
2839 int reg_src
= true_regnum (src
);
2840 /* "volatile" forces writing high byte first, even if less efficient,
2841 for correct operation with 16-bit I/O registers. */
2842 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2847 if (CONSTANT_ADDRESS_P (base
))
2849 if (optimize
> 0 && io_address_operand (base
, HImode
))
2852 return (AS2 (out
,%m0
+1-0x20,%B1
) CR_TAB
2853 AS2 (out
,%m0
-0x20,%A1
));
2855 return *l
= 4, (AS2 (sts
,%m0
+1,%B1
) CR_TAB
2860 if (reg_base
== REG_X
)
2862 if (reg_src
== REG_X
)
2864 /* "st X+,r26" and "st -X,r26" are undefined. */
2865 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2866 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2867 AS2 (st
,X
,r26
) CR_TAB
2868 AS2 (adiw
,r26
,1) CR_TAB
2869 AS2 (st
,X
,__tmp_reg__
));
2871 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2872 AS2 (adiw
,r26
,1) CR_TAB
2873 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2874 AS2 (sbiw
,r26
,1) CR_TAB
2879 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2880 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2883 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2884 AS2 (st
,X
,%B1
) CR_TAB
2889 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2892 else if (GET_CODE (base
) == PLUS
)
2894 int disp
= INTVAL (XEXP (base
, 1));
2895 reg_base
= REGNO (XEXP (base
, 0));
2896 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2898 if (reg_base
!= REG_Y
)
2899 fatal_insn ("incorrect insn:",insn
);
2901 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2902 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2903 AS2 (std
,Y
+63,%B1
) CR_TAB
2904 AS2 (std
,Y
+62,%A1
) CR_TAB
2905 AS2 (sbiw
,r28
,%o0
-62));
2907 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2908 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2909 AS2 (std
,Y
+1,%B1
) CR_TAB
2910 AS2 (st
,Y
,%A1
) CR_TAB
2911 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2912 AS2 (sbci
,r29
,hi8(%o0
)));
2914 if (reg_base
== REG_X
)
2917 if (reg_src
== REG_X
)
2920 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2921 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2922 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2923 AS2 (st
,X
,__zero_reg__
) CR_TAB
2924 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2925 AS1 (clr
,__zero_reg__
) CR_TAB
2926 AS2 (sbiw
,r26
,%o0
));
2929 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2930 AS2 (st
,X
,%B1
) CR_TAB
2931 AS2 (st
,-X
,%A1
) CR_TAB
2932 AS2 (sbiw
,r26
,%o0
));
2934 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2937 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2938 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2940 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2944 if (REGNO (XEXP (base
, 0)) == REG_X
)
2947 return (AS2 (adiw
,r26
,1) CR_TAB
2948 AS2 (st
,X
,%B1
) CR_TAB
2949 AS2 (st
,-X
,%A1
) CR_TAB
2955 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2956 AS2 (st
,%p0
,%A1
) CR_TAB
2962 return (AS2 (st
,%0,%A1
) CR_TAB
2965 fatal_insn ("unknown move insn:",insn
);
2969 /* Return 1 if frame pointer for current function required. */
2972 avr_frame_pointer_required_p (void)
2974 return (cfun
->calls_alloca
2975 || crtl
->args
.info
.nregs
== 0
2976 || get_frame_size () > 0);
2979 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2982 compare_condition (rtx insn
)
2984 rtx next
= next_real_insn (insn
);
2985 RTX_CODE cond
= UNKNOWN
;
2986 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2988 rtx pat
= PATTERN (next
);
2989 rtx src
= SET_SRC (pat
);
2990 rtx t
= XEXP (src
, 0);
2991 cond
= GET_CODE (t
);
2996 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2999 compare_sign_p (rtx insn
)
3001 RTX_CODE cond
= compare_condition (insn
);
3002 return (cond
== GE
|| cond
== LT
);
3005 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3006 that needs to be swapped (GT, GTU, LE, LEU). */
3009 compare_diff_p (rtx insn
)
3011 RTX_CODE cond
= compare_condition (insn
);
3012 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
3015 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3018 compare_eq_p (rtx insn
)
3020 RTX_CODE cond
= compare_condition (insn
);
3021 return (cond
== EQ
|| cond
== NE
);
3025 /* Output test instruction for HImode. */
3028 out_tsthi (rtx insn
, rtx op
, int *l
)
3030 if (compare_sign_p (insn
))
3033 return AS1 (tst
,%B0
);
3035 if (reg_unused_after (insn
, op
)
3036 && compare_eq_p (insn
))
3038 /* Faster than sbiw if we can clobber the operand. */
3040 return "or %A0,%B0";
3042 if (test_hard_reg_class (ADDW_REGS
, op
))
3045 return AS2 (sbiw
,%0,0);
3048 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3049 AS2 (cpc
,%B0
,__zero_reg__
));
3053 /* Output test instruction for SImode. */
3056 out_tstsi (rtx insn
, rtx op
, int *l
)
3058 if (compare_sign_p (insn
))
3061 return AS1 (tst
,%D0
);
3063 if (test_hard_reg_class (ADDW_REGS
, op
))
3066 return (AS2 (sbiw
,%A0
,0) CR_TAB
3067 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3068 AS2 (cpc
,%D0
,__zero_reg__
));
3071 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
3072 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
3073 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
3074 AS2 (cpc
,%D0
,__zero_reg__
));
3078 /* Generate asm equivalent for various shifts.
3079 Shift count is a CONST_INT, MEM or REG.
3080 This only handles cases that are not already
3081 carefully hand-optimized in ?sh??i3_out. */
3084 out_shift_with_cnt (const char *templ
, rtx insn
, rtx operands
[],
3085 int *len
, int t_len
)
3089 int second_label
= 1;
3090 int saved_in_tmp
= 0;
3091 int use_zero_reg
= 0;
3093 op
[0] = operands
[0];
3094 op
[1] = operands
[1];
3095 op
[2] = operands
[2];
3096 op
[3] = operands
[3];
3102 if (GET_CODE (operands
[2]) == CONST_INT
)
3104 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3105 int count
= INTVAL (operands
[2]);
3106 int max_len
= 10; /* If larger than this, always use a loop. */
3115 if (count
< 8 && !scratch
)
3119 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
3121 if (t_len
* count
<= max_len
)
3123 /* Output shifts inline with no loop - faster. */
3125 *len
= t_len
* count
;
3129 output_asm_insn (templ
, op
);
3138 strcat (str
, AS2 (ldi
,%3,%2));
3140 else if (use_zero_reg
)
3142 /* Hack to save one word: use __zero_reg__ as loop counter.
3143 Set one bit, then shift in a loop until it is 0 again. */
3145 op
[3] = zero_reg_rtx
;
3149 strcat (str
, ("set" CR_TAB
3150 AS2 (bld
,%3,%2-1)));
3154 /* No scratch register available, use one from LD_REGS (saved in
3155 __tmp_reg__) that doesn't overlap with registers to shift. */
3157 op
[3] = gen_rtx_REG (QImode
,
3158 ((true_regnum (operands
[0]) - 1) & 15) + 16);
3159 op
[4] = tmp_reg_rtx
;
3163 *len
= 3; /* Includes "mov %3,%4" after the loop. */
3165 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
3171 else if (GET_CODE (operands
[2]) == MEM
)
3175 op
[3] = op_mov
[0] = tmp_reg_rtx
;
3179 out_movqi_r_mr (insn
, op_mov
, len
);
3181 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
3183 else if (register_operand (operands
[2], QImode
))
3185 if (reg_unused_after (insn
, operands
[2]))
3189 op
[3] = tmp_reg_rtx
;
3191 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
3195 fatal_insn ("bad shift insn:", insn
);
3202 strcat (str
, AS1 (rjmp
,2f
));
3206 *len
+= t_len
+ 2; /* template + dec + brXX */
3209 strcat (str
, "\n1:\t");
3210 strcat (str
, templ
);
3211 strcat (str
, second_label
? "\n2:\t" : "\n\t");
3212 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
3213 strcat (str
, CR_TAB
);
3214 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
3216 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
3217 output_asm_insn (str
, op
);
3222 /* 8bit shift left ((char)x << i) */
3225 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
3227 if (GET_CODE (operands
[2]) == CONST_INT
)
3234 switch (INTVAL (operands
[2]))
3237 if (INTVAL (operands
[2]) < 8)
3241 return AS1 (clr
,%0);
3245 return AS1 (lsl
,%0);
3249 return (AS1 (lsl
,%0) CR_TAB
3254 return (AS1 (lsl
,%0) CR_TAB
3259 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3262 return (AS1 (swap
,%0) CR_TAB
3263 AS2 (andi
,%0,0xf0));
3266 return (AS1 (lsl
,%0) CR_TAB
3272 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3275 return (AS1 (swap
,%0) CR_TAB
3277 AS2 (andi
,%0,0xe0));
3280 return (AS1 (lsl
,%0) CR_TAB
3287 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3290 return (AS1 (swap
,%0) CR_TAB
3293 AS2 (andi
,%0,0xc0));
3296 return (AS1 (lsl
,%0) CR_TAB
3305 return (AS1 (ror
,%0) CR_TAB
3310 else if (CONSTANT_P (operands
[2]))
3311 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3313 out_shift_with_cnt (AS1 (lsl
,%0),
3314 insn
, operands
, len
, 1);
3319 /* 16bit shift left ((short)x << i) */
3322 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3324 if (GET_CODE (operands
[2]) == CONST_INT
)
3326 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3327 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3334 switch (INTVAL (operands
[2]))
3337 if (INTVAL (operands
[2]) < 16)
3341 return (AS1 (clr
,%B0
) CR_TAB
3345 if (optimize_size
&& scratch
)
3350 return (AS1 (swap
,%A0
) CR_TAB
3351 AS1 (swap
,%B0
) CR_TAB
3352 AS2 (andi
,%B0
,0xf0) CR_TAB
3353 AS2 (eor
,%B0
,%A0
) CR_TAB
3354 AS2 (andi
,%A0
,0xf0) CR_TAB
3360 return (AS1 (swap
,%A0
) CR_TAB
3361 AS1 (swap
,%B0
) CR_TAB
3362 AS2 (ldi
,%3,0xf0) CR_TAB
3364 AS2 (eor
,%B0
,%A0
) CR_TAB
3368 break; /* optimize_size ? 6 : 8 */
3372 break; /* scratch ? 5 : 6 */
3376 return (AS1 (lsl
,%A0
) CR_TAB
3377 AS1 (rol
,%B0
) CR_TAB
3378 AS1 (swap
,%A0
) CR_TAB
3379 AS1 (swap
,%B0
) CR_TAB
3380 AS2 (andi
,%B0
,0xf0) CR_TAB
3381 AS2 (eor
,%B0
,%A0
) CR_TAB
3382 AS2 (andi
,%A0
,0xf0) CR_TAB
3388 return (AS1 (lsl
,%A0
) CR_TAB
3389 AS1 (rol
,%B0
) CR_TAB
3390 AS1 (swap
,%A0
) CR_TAB
3391 AS1 (swap
,%B0
) CR_TAB
3392 AS2 (ldi
,%3,0xf0) CR_TAB
3394 AS2 (eor
,%B0
,%A0
) CR_TAB
3402 break; /* scratch ? 5 : 6 */
3404 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3405 AS1 (lsr
,%B0
) CR_TAB
3406 AS1 (ror
,%A0
) CR_TAB
3407 AS1 (ror
,__tmp_reg__
) CR_TAB
3408 AS1 (lsr
,%B0
) CR_TAB
3409 AS1 (ror
,%A0
) CR_TAB
3410 AS1 (ror
,__tmp_reg__
) CR_TAB
3411 AS2 (mov
,%B0
,%A0
) CR_TAB
3412 AS2 (mov
,%A0
,__tmp_reg__
));
3416 return (AS1 (lsr
,%B0
) CR_TAB
3417 AS2 (mov
,%B0
,%A0
) CR_TAB
3418 AS1 (clr
,%A0
) CR_TAB
3419 AS1 (ror
,%B0
) CR_TAB
3423 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3428 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3429 AS1 (clr
,%A0
) CR_TAB
3434 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3435 AS1 (clr
,%A0
) CR_TAB
3436 AS1 (lsl
,%B0
) CR_TAB
3441 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3442 AS1 (clr
,%A0
) CR_TAB
3443 AS1 (lsl
,%B0
) CR_TAB
3444 AS1 (lsl
,%B0
) CR_TAB
3451 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3452 AS1 (clr
,%A0
) CR_TAB
3453 AS1 (swap
,%B0
) CR_TAB
3454 AS2 (andi
,%B0
,0xf0));
3459 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3460 AS1 (clr
,%A0
) CR_TAB
3461 AS1 (swap
,%B0
) CR_TAB
3462 AS2 (ldi
,%3,0xf0) CR_TAB
3466 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3467 AS1 (clr
,%A0
) CR_TAB
3468 AS1 (lsl
,%B0
) CR_TAB
3469 AS1 (lsl
,%B0
) CR_TAB
3470 AS1 (lsl
,%B0
) CR_TAB
3477 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3478 AS1 (clr
,%A0
) CR_TAB
3479 AS1 (swap
,%B0
) CR_TAB
3480 AS1 (lsl
,%B0
) CR_TAB
3481 AS2 (andi
,%B0
,0xe0));
3483 if (AVR_HAVE_MUL
&& scratch
)
3486 return (AS2 (ldi
,%3,0x20) CR_TAB
3487 AS2 (mul
,%A0
,%3) CR_TAB
3488 AS2 (mov
,%B0
,r0
) CR_TAB
3489 AS1 (clr
,%A0
) CR_TAB
3490 AS1 (clr
,__zero_reg__
));
3492 if (optimize_size
&& scratch
)
3497 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3498 AS1 (clr
,%A0
) CR_TAB
3499 AS1 (swap
,%B0
) CR_TAB
3500 AS1 (lsl
,%B0
) CR_TAB
3501 AS2 (ldi
,%3,0xe0) CR_TAB
3507 return ("set" CR_TAB
3508 AS2 (bld
,r1
,5) CR_TAB
3509 AS2 (mul
,%A0
,r1
) CR_TAB
3510 AS2 (mov
,%B0
,r0
) CR_TAB
3511 AS1 (clr
,%A0
) CR_TAB
3512 AS1 (clr
,__zero_reg__
));
3515 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3516 AS1 (clr
,%A0
) CR_TAB
3517 AS1 (lsl
,%B0
) CR_TAB
3518 AS1 (lsl
,%B0
) CR_TAB
3519 AS1 (lsl
,%B0
) CR_TAB
3520 AS1 (lsl
,%B0
) CR_TAB
3524 if (AVR_HAVE_MUL
&& ldi_ok
)
3527 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3528 AS2 (mul
,%A0
,%B0
) CR_TAB
3529 AS2 (mov
,%B0
,r0
) CR_TAB
3530 AS1 (clr
,%A0
) CR_TAB
3531 AS1 (clr
,__zero_reg__
));
3533 if (AVR_HAVE_MUL
&& scratch
)
3536 return (AS2 (ldi
,%3,0x40) CR_TAB
3537 AS2 (mul
,%A0
,%3) CR_TAB
3538 AS2 (mov
,%B0
,r0
) CR_TAB
3539 AS1 (clr
,%A0
) CR_TAB
3540 AS1 (clr
,__zero_reg__
));
3542 if (optimize_size
&& ldi_ok
)
3545 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3546 AS2 (ldi
,%A0
,6) "\n1:\t"
3547 AS1 (lsl
,%B0
) CR_TAB
3548 AS1 (dec
,%A0
) CR_TAB
3551 if (optimize_size
&& scratch
)
3554 return (AS1 (clr
,%B0
) CR_TAB
3555 AS1 (lsr
,%A0
) CR_TAB
3556 AS1 (ror
,%B0
) CR_TAB
3557 AS1 (lsr
,%A0
) CR_TAB
3558 AS1 (ror
,%B0
) CR_TAB
3563 return (AS1 (clr
,%B0
) CR_TAB
3564 AS1 (lsr
,%A0
) CR_TAB
3565 AS1 (ror
,%B0
) CR_TAB
3570 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3572 insn
, operands
, len
, 2);
3577 /* 32bit shift left ((long)x << i) */
3580 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3582 if (GET_CODE (operands
[2]) == CONST_INT
)
3590 switch (INTVAL (operands
[2]))
3593 if (INTVAL (operands
[2]) < 32)
3597 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3598 AS1 (clr
,%C0
) CR_TAB
3599 AS2 (movw
,%A0
,%C0
));
3601 return (AS1 (clr
,%D0
) CR_TAB
3602 AS1 (clr
,%C0
) CR_TAB
3603 AS1 (clr
,%B0
) CR_TAB
3608 int reg0
= true_regnum (operands
[0]);
3609 int reg1
= true_regnum (operands
[1]);
3612 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3613 AS2 (mov
,%C0
,%B1
) CR_TAB
3614 AS2 (mov
,%B0
,%A1
) CR_TAB
3617 return (AS1 (clr
,%A0
) CR_TAB
3618 AS2 (mov
,%B0
,%A1
) CR_TAB
3619 AS2 (mov
,%C0
,%B1
) CR_TAB
3625 int reg0
= true_regnum (operands
[0]);
3626 int reg1
= true_regnum (operands
[1]);
3627 if (reg0
+ 2 == reg1
)
3628 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3631 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3632 AS1 (clr
,%B0
) CR_TAB
3635 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3636 AS2 (mov
,%D0
,%B1
) CR_TAB
3637 AS1 (clr
,%B0
) CR_TAB
3643 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3644 AS1 (clr
,%C0
) CR_TAB
3645 AS1 (clr
,%B0
) CR_TAB
3650 return (AS1 (clr
,%D0
) CR_TAB
3651 AS1 (lsr
,%A0
) CR_TAB
3652 AS1 (ror
,%D0
) CR_TAB
3653 AS1 (clr
,%C0
) CR_TAB
3654 AS1 (clr
,%B0
) CR_TAB
3659 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3660 AS1 (rol
,%B0
) CR_TAB
3661 AS1 (rol
,%C0
) CR_TAB
3663 insn
, operands
, len
, 4);
3667 /* 8bit arithmetic shift right ((signed char)x >> i) */
3670 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3672 if (GET_CODE (operands
[2]) == CONST_INT
)
3679 switch (INTVAL (operands
[2]))
3683 return AS1 (asr
,%0);
3687 return (AS1 (asr
,%0) CR_TAB
3692 return (AS1 (asr
,%0) CR_TAB
3698 return (AS1 (asr
,%0) CR_TAB
3705 return (AS1 (asr
,%0) CR_TAB
3713 return (AS2 (bst
,%0,6) CR_TAB
3715 AS2 (sbc
,%0,%0) CR_TAB
3719 if (INTVAL (operands
[2]) < 8)
3726 return (AS1 (lsl
,%0) CR_TAB
3730 else if (CONSTANT_P (operands
[2]))
3731 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3733 out_shift_with_cnt (AS1 (asr
,%0),
3734 insn
, operands
, len
, 1);
3739 /* 16bit arithmetic shift right ((signed short)x >> i) */
3742 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3744 if (GET_CODE (operands
[2]) == CONST_INT
)
3746 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3747 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3754 switch (INTVAL (operands
[2]))
3758 /* XXX try to optimize this too? */
3763 break; /* scratch ? 5 : 6 */
3765 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3766 AS2 (mov
,%A0
,%B0
) CR_TAB
3767 AS1 (lsl
,__tmp_reg__
) CR_TAB
3768 AS1 (rol
,%A0
) CR_TAB
3769 AS2 (sbc
,%B0
,%B0
) CR_TAB
3770 AS1 (lsl
,__tmp_reg__
) CR_TAB
3771 AS1 (rol
,%A0
) CR_TAB
3776 return (AS1 (lsl
,%A0
) CR_TAB
3777 AS2 (mov
,%A0
,%B0
) CR_TAB
3778 AS1 (rol
,%A0
) CR_TAB
3783 int reg0
= true_regnum (operands
[0]);
3784 int reg1
= true_regnum (operands
[1]);
3787 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3788 AS1 (lsl
,%B0
) CR_TAB
3791 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3792 AS1 (clr
,%B0
) CR_TAB
3793 AS2 (sbrc
,%A0
,7) CR_TAB
3799 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3800 AS1 (lsl
,%B0
) CR_TAB
3801 AS2 (sbc
,%B0
,%B0
) CR_TAB
3806 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3807 AS1 (lsl
,%B0
) CR_TAB
3808 AS2 (sbc
,%B0
,%B0
) CR_TAB
3809 AS1 (asr
,%A0
) CR_TAB
3813 if (AVR_HAVE_MUL
&& ldi_ok
)
3816 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3817 AS2 (muls
,%B0
,%A0
) CR_TAB
3818 AS2 (mov
,%A0
,r1
) CR_TAB
3819 AS2 (sbc
,%B0
,%B0
) CR_TAB
3820 AS1 (clr
,__zero_reg__
));
3822 if (optimize_size
&& scratch
)
3825 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3826 AS1 (lsl
,%B0
) CR_TAB
3827 AS2 (sbc
,%B0
,%B0
) CR_TAB
3828 AS1 (asr
,%A0
) CR_TAB
3829 AS1 (asr
,%A0
) CR_TAB
3833 if (AVR_HAVE_MUL
&& ldi_ok
)
3836 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3837 AS2 (muls
,%B0
,%A0
) CR_TAB
3838 AS2 (mov
,%A0
,r1
) CR_TAB
3839 AS2 (sbc
,%B0
,%B0
) CR_TAB
3840 AS1 (clr
,__zero_reg__
));
3842 if (optimize_size
&& scratch
)
3845 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3846 AS1 (lsl
,%B0
) CR_TAB
3847 AS2 (sbc
,%B0
,%B0
) CR_TAB
3848 AS1 (asr
,%A0
) CR_TAB
3849 AS1 (asr
,%A0
) CR_TAB
3850 AS1 (asr
,%A0
) CR_TAB
3854 if (AVR_HAVE_MUL
&& ldi_ok
)
3857 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3858 AS2 (muls
,%B0
,%A0
) CR_TAB
3859 AS2 (mov
,%A0
,r1
) CR_TAB
3860 AS2 (sbc
,%B0
,%B0
) CR_TAB
3861 AS1 (clr
,__zero_reg__
));
3864 break; /* scratch ? 5 : 7 */
3866 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3867 AS1 (lsl
,%B0
) CR_TAB
3868 AS2 (sbc
,%B0
,%B0
) CR_TAB
3869 AS1 (asr
,%A0
) CR_TAB
3870 AS1 (asr
,%A0
) CR_TAB
3871 AS1 (asr
,%A0
) CR_TAB
3872 AS1 (asr
,%A0
) CR_TAB
3877 return (AS1 (lsl
,%B0
) CR_TAB
3878 AS2 (sbc
,%A0
,%A0
) CR_TAB
3879 AS1 (lsl
,%B0
) CR_TAB
3880 AS2 (mov
,%B0
,%A0
) CR_TAB
3884 if (INTVAL (operands
[2]) < 16)
3890 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3891 AS2 (sbc
,%A0
,%A0
) CR_TAB
3896 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3898 insn
, operands
, len
, 2);
3903 /* 32bit arithmetic shift right ((signed long)x >> i) */
3906 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3908 if (GET_CODE (operands
[2]) == CONST_INT
)
3916 switch (INTVAL (operands
[2]))
3920 int reg0
= true_regnum (operands
[0]);
3921 int reg1
= true_regnum (operands
[1]);
3924 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3925 AS2 (mov
,%B0
,%C1
) CR_TAB
3926 AS2 (mov
,%C0
,%D1
) CR_TAB
3927 AS1 (clr
,%D0
) CR_TAB
3928 AS2 (sbrc
,%C0
,7) CR_TAB
3931 return (AS1 (clr
,%D0
) CR_TAB
3932 AS2 (sbrc
,%D1
,7) CR_TAB
3933 AS1 (dec
,%D0
) CR_TAB
3934 AS2 (mov
,%C0
,%D1
) CR_TAB
3935 AS2 (mov
,%B0
,%C1
) CR_TAB
3941 int reg0
= true_regnum (operands
[0]);
3942 int reg1
= true_regnum (operands
[1]);
3944 if (reg0
== reg1
+ 2)
3945 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3946 AS2 (sbrc
,%B0
,7) CR_TAB
3947 AS1 (com
,%D0
) CR_TAB
3950 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3951 AS1 (clr
,%D0
) CR_TAB
3952 AS2 (sbrc
,%B0
,7) CR_TAB
3953 AS1 (com
,%D0
) CR_TAB
3956 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3957 AS2 (mov
,%A0
,%C1
) CR_TAB
3958 AS1 (clr
,%D0
) CR_TAB
3959 AS2 (sbrc
,%B0
,7) CR_TAB
3960 AS1 (com
,%D0
) CR_TAB
3965 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3966 AS1 (clr
,%D0
) CR_TAB
3967 AS2 (sbrc
,%A0
,7) CR_TAB
3968 AS1 (com
,%D0
) CR_TAB
3969 AS2 (mov
,%B0
,%D0
) CR_TAB
3973 if (INTVAL (operands
[2]) < 32)
3980 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3981 AS2 (sbc
,%A0
,%A0
) CR_TAB
3982 AS2 (mov
,%B0
,%A0
) CR_TAB
3983 AS2 (movw
,%C0
,%A0
));
3985 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3986 AS2 (sbc
,%A0
,%A0
) CR_TAB
3987 AS2 (mov
,%B0
,%A0
) CR_TAB
3988 AS2 (mov
,%C0
,%A0
) CR_TAB
3993 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3994 AS1 (ror
,%C0
) CR_TAB
3995 AS1 (ror
,%B0
) CR_TAB
3997 insn
, operands
, len
, 4);
4001 /* 8bit logic shift right ((unsigned char)x >> i) */
4004 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
4006 if (GET_CODE (operands
[2]) == CONST_INT
)
4013 switch (INTVAL (operands
[2]))
4016 if (INTVAL (operands
[2]) < 8)
4020 return AS1 (clr
,%0);
4024 return AS1 (lsr
,%0);
4028 return (AS1 (lsr
,%0) CR_TAB
4032 return (AS1 (lsr
,%0) CR_TAB
4037 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4040 return (AS1 (swap
,%0) CR_TAB
4041 AS2 (andi
,%0,0x0f));
4044 return (AS1 (lsr
,%0) CR_TAB
4050 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4053 return (AS1 (swap
,%0) CR_TAB
4058 return (AS1 (lsr
,%0) CR_TAB
4065 if (test_hard_reg_class (LD_REGS
, operands
[0]))
4068 return (AS1 (swap
,%0) CR_TAB
4074 return (AS1 (lsr
,%0) CR_TAB
4083 return (AS1 (rol
,%0) CR_TAB
4088 else if (CONSTANT_P (operands
[2]))
4089 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
4091 out_shift_with_cnt (AS1 (lsr
,%0),
4092 insn
, operands
, len
, 1);
4096 /* 16bit logic shift right ((unsigned short)x >> i) */
4099 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
4101 if (GET_CODE (operands
[2]) == CONST_INT
)
4103 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
4104 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
4111 switch (INTVAL (operands
[2]))
4114 if (INTVAL (operands
[2]) < 16)
4118 return (AS1 (clr
,%B0
) CR_TAB
4122 if (optimize_size
&& scratch
)
4127 return (AS1 (swap
,%B0
) CR_TAB
4128 AS1 (swap
,%A0
) CR_TAB
4129 AS2 (andi
,%A0
,0x0f) CR_TAB
4130 AS2 (eor
,%A0
,%B0
) CR_TAB
4131 AS2 (andi
,%B0
,0x0f) CR_TAB
4137 return (AS1 (swap
,%B0
) CR_TAB
4138 AS1 (swap
,%A0
) CR_TAB
4139 AS2 (ldi
,%3,0x0f) CR_TAB
4141 AS2 (eor
,%A0
,%B0
) CR_TAB
4145 break; /* optimize_size ? 6 : 8 */
4149 break; /* scratch ? 5 : 6 */
4153 return (AS1 (lsr
,%B0
) CR_TAB
4154 AS1 (ror
,%A0
) CR_TAB
4155 AS1 (swap
,%B0
) CR_TAB
4156 AS1 (swap
,%A0
) CR_TAB
4157 AS2 (andi
,%A0
,0x0f) CR_TAB
4158 AS2 (eor
,%A0
,%B0
) CR_TAB
4159 AS2 (andi
,%B0
,0x0f) CR_TAB
4165 return (AS1 (lsr
,%B0
) CR_TAB
4166 AS1 (ror
,%A0
) CR_TAB
4167 AS1 (swap
,%B0
) CR_TAB
4168 AS1 (swap
,%A0
) CR_TAB
4169 AS2 (ldi
,%3,0x0f) CR_TAB
4171 AS2 (eor
,%A0
,%B0
) CR_TAB
4179 break; /* scratch ? 5 : 6 */
4181 return (AS1 (clr
,__tmp_reg__
) CR_TAB
4182 AS1 (lsl
,%A0
) CR_TAB
4183 AS1 (rol
,%B0
) CR_TAB
4184 AS1 (rol
,__tmp_reg__
) CR_TAB
4185 AS1 (lsl
,%A0
) CR_TAB
4186 AS1 (rol
,%B0
) CR_TAB
4187 AS1 (rol
,__tmp_reg__
) CR_TAB
4188 AS2 (mov
,%A0
,%B0
) CR_TAB
4189 AS2 (mov
,%B0
,__tmp_reg__
));
4193 return (AS1 (lsl
,%A0
) CR_TAB
4194 AS2 (mov
,%A0
,%B0
) CR_TAB
4195 AS1 (rol
,%A0
) CR_TAB
4196 AS2 (sbc
,%B0
,%B0
) CR_TAB
4200 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
4205 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4206 AS1 (clr
,%B0
) CR_TAB
4211 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4212 AS1 (clr
,%B0
) CR_TAB
4213 AS1 (lsr
,%A0
) CR_TAB
4218 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4219 AS1 (clr
,%B0
) CR_TAB
4220 AS1 (lsr
,%A0
) CR_TAB
4221 AS1 (lsr
,%A0
) CR_TAB
4228 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4229 AS1 (clr
,%B0
) CR_TAB
4230 AS1 (swap
,%A0
) CR_TAB
4231 AS2 (andi
,%A0
,0x0f));
4236 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4237 AS1 (clr
,%B0
) CR_TAB
4238 AS1 (swap
,%A0
) CR_TAB
4239 AS2 (ldi
,%3,0x0f) CR_TAB
4243 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4244 AS1 (clr
,%B0
) CR_TAB
4245 AS1 (lsr
,%A0
) CR_TAB
4246 AS1 (lsr
,%A0
) CR_TAB
4247 AS1 (lsr
,%A0
) CR_TAB
4254 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4255 AS1 (clr
,%B0
) CR_TAB
4256 AS1 (swap
,%A0
) CR_TAB
4257 AS1 (lsr
,%A0
) CR_TAB
4258 AS2 (andi
,%A0
,0x07));
4260 if (AVR_HAVE_MUL
&& scratch
)
4263 return (AS2 (ldi
,%3,0x08) CR_TAB
4264 AS2 (mul
,%B0
,%3) CR_TAB
4265 AS2 (mov
,%A0
,r1
) CR_TAB
4266 AS1 (clr
,%B0
) CR_TAB
4267 AS1 (clr
,__zero_reg__
));
4269 if (optimize_size
&& scratch
)
4274 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4275 AS1 (clr
,%B0
) CR_TAB
4276 AS1 (swap
,%A0
) CR_TAB
4277 AS1 (lsr
,%A0
) CR_TAB
4278 AS2 (ldi
,%3,0x07) CR_TAB
4284 return ("set" CR_TAB
4285 AS2 (bld
,r1
,3) CR_TAB
4286 AS2 (mul
,%B0
,r1
) CR_TAB
4287 AS2 (mov
,%A0
,r1
) CR_TAB
4288 AS1 (clr
,%B0
) CR_TAB
4289 AS1 (clr
,__zero_reg__
));
4292 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4293 AS1 (clr
,%B0
) CR_TAB
4294 AS1 (lsr
,%A0
) CR_TAB
4295 AS1 (lsr
,%A0
) CR_TAB
4296 AS1 (lsr
,%A0
) CR_TAB
4297 AS1 (lsr
,%A0
) CR_TAB
4301 if (AVR_HAVE_MUL
&& ldi_ok
)
4304 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4305 AS2 (mul
,%B0
,%A0
) CR_TAB
4306 AS2 (mov
,%A0
,r1
) CR_TAB
4307 AS1 (clr
,%B0
) CR_TAB
4308 AS1 (clr
,__zero_reg__
));
4310 if (AVR_HAVE_MUL
&& scratch
)
4313 return (AS2 (ldi
,%3,0x04) CR_TAB
4314 AS2 (mul
,%B0
,%3) CR_TAB
4315 AS2 (mov
,%A0
,r1
) CR_TAB
4316 AS1 (clr
,%B0
) CR_TAB
4317 AS1 (clr
,__zero_reg__
));
4319 if (optimize_size
&& ldi_ok
)
4322 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4323 AS2 (ldi
,%B0
,6) "\n1:\t"
4324 AS1 (lsr
,%A0
) CR_TAB
4325 AS1 (dec
,%B0
) CR_TAB
4328 if (optimize_size
&& scratch
)
4331 return (AS1 (clr
,%A0
) CR_TAB
4332 AS1 (lsl
,%B0
) CR_TAB
4333 AS1 (rol
,%A0
) CR_TAB
4334 AS1 (lsl
,%B0
) CR_TAB
4335 AS1 (rol
,%A0
) CR_TAB
4340 return (AS1 (clr
,%A0
) CR_TAB
4341 AS1 (lsl
,%B0
) CR_TAB
4342 AS1 (rol
,%A0
) CR_TAB
4347 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4349 insn
, operands
, len
, 2);
4353 /* 32bit logic shift right ((unsigned int)x >> i) */
4356 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4358 if (GET_CODE (operands
[2]) == CONST_INT
)
4366 switch (INTVAL (operands
[2]))
4369 if (INTVAL (operands
[2]) < 32)
4373 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4374 AS1 (clr
,%C0
) CR_TAB
4375 AS2 (movw
,%A0
,%C0
));
4377 return (AS1 (clr
,%D0
) CR_TAB
4378 AS1 (clr
,%C0
) CR_TAB
4379 AS1 (clr
,%B0
) CR_TAB
4384 int reg0
= true_regnum (operands
[0]);
4385 int reg1
= true_regnum (operands
[1]);
4388 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4389 AS2 (mov
,%B0
,%C1
) CR_TAB
4390 AS2 (mov
,%C0
,%D1
) CR_TAB
4393 return (AS1 (clr
,%D0
) CR_TAB
4394 AS2 (mov
,%C0
,%D1
) CR_TAB
4395 AS2 (mov
,%B0
,%C1
) CR_TAB
4401 int reg0
= true_regnum (operands
[0]);
4402 int reg1
= true_regnum (operands
[1]);
4404 if (reg0
== reg1
+ 2)
4405 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4408 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4409 AS1 (clr
,%C0
) CR_TAB
4412 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4413 AS2 (mov
,%A0
,%C1
) CR_TAB
4414 AS1 (clr
,%C0
) CR_TAB
4419 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4420 AS1 (clr
,%B0
) CR_TAB
4421 AS1 (clr
,%C0
) CR_TAB
4426 return (AS1 (clr
,%A0
) CR_TAB
4427 AS2 (sbrc
,%D0
,7) CR_TAB
4428 AS1 (inc
,%A0
) CR_TAB
4429 AS1 (clr
,%B0
) CR_TAB
4430 AS1 (clr
,%C0
) CR_TAB
4435 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4436 AS1 (ror
,%C0
) CR_TAB
4437 AS1 (ror
,%B0
) CR_TAB
4439 insn
, operands
, len
, 4);
4443 /* Create RTL split patterns for byte sized rotate expressions. This
4444 produces a series of move instructions and considers overlap situations.
4445 Overlapping non-HImode operands need a scratch register. */
4448 avr_rotate_bytes (rtx operands
[])
4451 enum machine_mode mode
= GET_MODE (operands
[0]);
4452 bool overlapped
= reg_overlap_mentioned_p (operands
[0], operands
[1]);
4453 bool same_reg
= rtx_equal_p (operands
[0], operands
[1]);
4454 int num
= INTVAL (operands
[2]);
4455 rtx scratch
= operands
[3];
4456 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4457 Word move if no scratch is needed, otherwise use size of scratch. */
4458 enum machine_mode move_mode
= QImode
;
4459 int move_size
, offset
, size
;
4463 else if ((mode
== SImode
&& !same_reg
) || !overlapped
)
4466 move_mode
= GET_MODE (scratch
);
4468 /* Force DI rotate to use QI moves since other DI moves are currently split
4469 into QI moves so forward propagation works better. */
4472 /* Make scratch smaller if needed. */
4473 if (GET_MODE (scratch
) == HImode
&& move_mode
== QImode
)
4474 scratch
= simplify_gen_subreg (move_mode
, scratch
, HImode
, 0);
4476 move_size
= GET_MODE_SIZE (move_mode
);
4477 /* Number of bytes/words to rotate. */
4478 offset
= (num
>> 3) / move_size
;
4479 /* Number of moves needed. */
4480 size
= GET_MODE_SIZE (mode
) / move_size
;
4481 /* Himode byte swap is special case to avoid a scratch register. */
4482 if (mode
== HImode
&& same_reg
)
4484 /* HImode byte swap, using xor. This is as quick as using scratch. */
4486 src
= simplify_gen_subreg (move_mode
, operands
[1], mode
, 0);
4487 dst
= simplify_gen_subreg (move_mode
, operands
[0], mode
, 1);
4488 if (!rtx_equal_p (dst
, src
))
4490 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4491 emit_move_insn (src
, gen_rtx_XOR (QImode
, src
, dst
));
4492 emit_move_insn (dst
, gen_rtx_XOR (QImode
, dst
, src
));
4497 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4498 /* Create linked list of moves to determine move order. */
4502 } move
[MAX_SIZE
+ 8];
4505 gcc_assert (size
<= MAX_SIZE
);
4506 /* Generate list of subreg moves. */
4507 for (i
= 0; i
< size
; i
++)
4510 int to
= (from
+ offset
) % size
;
4511 move
[i
].src
= simplify_gen_subreg (move_mode
, operands
[1],
4512 mode
, from
* move_size
);
4513 move
[i
].dst
= simplify_gen_subreg (move_mode
, operands
[0],
4514 mode
, to
* move_size
);
4517 /* Mark dependence where a dst of one move is the src of another move.
4518 The first move is a conflict as it must wait until second is
4519 performed. We ignore moves to self - we catch this later. */
4521 for (i
= 0; i
< size
; i
++)
4522 if (reg_overlap_mentioned_p (move
[i
].dst
, operands
[1]))
4523 for (j
= 0; j
< size
; j
++)
4524 if (j
!= i
&& rtx_equal_p (move
[j
].src
, move
[i
].dst
))
4526 /* The dst of move i is the src of move j. */
4533 /* Go through move list and perform non-conflicting moves. As each
4534 non-overlapping move is made, it may remove other conflicts
4535 so the process is repeated until no conflicts remain. */
4540 /* Emit move where dst is not also a src or we have used that
4542 for (i
= 0; i
< size
; i
++)
4543 if (move
[i
].src
!= NULL_RTX
)
4545 if (move
[i
].links
== -1
4546 || move
[move
[i
].links
].src
== NULL_RTX
)
4549 /* Ignore NOP moves to self. */
4550 if (!rtx_equal_p (move
[i
].dst
, move
[i
].src
))
4551 emit_move_insn (move
[i
].dst
, move
[i
].src
);
4553 /* Remove conflict from list. */
4554 move
[i
].src
= NULL_RTX
;
4560 /* Check for deadlock. This is when no moves occurred and we have
4561 at least one blocked move. */
4562 if (moves
== 0 && blocked
!= -1)
4564 /* Need to use scratch register to break deadlock.
4565 Add move to put dst of blocked move into scratch.
4566 When this move occurs, it will break chain deadlock.
4567 The scratch register is substituted for real move. */
4569 move
[size
].src
= move
[blocked
].dst
;
4570 move
[size
].dst
= scratch
;
4571 /* Scratch move is never blocked. */
4572 move
[size
].links
= -1;
4573 /* Make sure we have valid link. */
4574 gcc_assert (move
[blocked
].links
!= -1);
4575 /* Replace src of blocking move with scratch reg. */
4576 move
[move
[blocked
].links
].src
= scratch
;
4577 /* Make dependent on scratch move occuring. */
4578 move
[blocked
].links
= size
;
4582 while (blocked
!= -1);
4587 /* Modifies the length assigned to instruction INSN
4588 LEN is the initially computed length of the insn. */
4591 adjust_insn_length (rtx insn
, int len
)
4593 rtx patt
= PATTERN (insn
);
4596 if (GET_CODE (patt
) == SET
)
4599 op
[1] = SET_SRC (patt
);
4600 op
[0] = SET_DEST (patt
);
4601 if (general_operand (op
[1], VOIDmode
)
4602 && general_operand (op
[0], VOIDmode
))
4604 switch (GET_MODE (op
[0]))
4607 output_movqi (insn
, op
, &len
);
4610 output_movhi (insn
, op
, &len
);
4614 output_movsisf (insn
, op
, &len
);
4620 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4622 switch (GET_MODE (op
[1]))
4624 case HImode
: out_tsthi (insn
, op
[1], &len
); break;
4625 case SImode
: out_tstsi (insn
, op
[1], &len
); break;
4629 else if (GET_CODE (op
[1]) == AND
)
4631 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4633 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4634 if (GET_MODE (op
[1]) == SImode
)
4635 len
= (((mask
& 0xff) != 0xff)
4636 + ((mask
& 0xff00) != 0xff00)
4637 + ((mask
& 0xff0000L
) != 0xff0000L
)
4638 + ((mask
& 0xff000000L
) != 0xff000000L
));
4639 else if (GET_MODE (op
[1]) == HImode
)
4640 len
= (((mask
& 0xff) != 0xff)
4641 + ((mask
& 0xff00) != 0xff00));
4644 else if (GET_CODE (op
[1]) == IOR
)
4646 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4648 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4649 if (GET_MODE (op
[1]) == SImode
)
4650 len
= (((mask
& 0xff) != 0)
4651 + ((mask
& 0xff00) != 0)
4652 + ((mask
& 0xff0000L
) != 0)
4653 + ((mask
& 0xff000000L
) != 0));
4654 else if (GET_MODE (op
[1]) == HImode
)
4655 len
= (((mask
& 0xff) != 0)
4656 + ((mask
& 0xff00) != 0));
4660 set
= single_set (insn
);
4665 op
[1] = SET_SRC (set
);
4666 op
[0] = SET_DEST (set
);
4668 if (GET_CODE (patt
) == PARALLEL
4669 && general_operand (op
[1], VOIDmode
)
4670 && general_operand (op
[0], VOIDmode
))
4672 if (XVECLEN (patt
, 0) == 2)
4673 op
[2] = XVECEXP (patt
, 0, 1);
4675 switch (GET_MODE (op
[0]))
4681 output_reload_inhi (insn
, op
, &len
);
4685 output_reload_insisf (insn
, op
, &len
);
4691 else if (GET_CODE (op
[1]) == ASHIFT
4692 || GET_CODE (op
[1]) == ASHIFTRT
4693 || GET_CODE (op
[1]) == LSHIFTRT
)
4697 ops
[1] = XEXP (op
[1],0);
4698 ops
[2] = XEXP (op
[1],1);
4699 switch (GET_CODE (op
[1]))
4702 switch (GET_MODE (op
[0]))
4704 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4705 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4706 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4711 switch (GET_MODE (op
[0]))
4713 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4714 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4715 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4720 switch (GET_MODE (op
[0]))
4722 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4723 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4724 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4736 /* Return nonzero if register REG dead after INSN. */
4739 reg_unused_after (rtx insn
, rtx reg
)
4741 return (dead_or_set_p (insn
, reg
)
4742 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4745 /* Return nonzero if REG is not used after INSN.
4746 We assume REG is a reload reg, and therefore does
4747 not live past labels. It may live past calls or jumps though. */
4750 _reg_unused_after (rtx insn
, rtx reg
)
4755 /* If the reg is set by this instruction, then it is safe for our
4756 case. Disregard the case where this is a store to memory, since
4757 we are checking a register used in the store address. */
4758 set
= single_set (insn
);
4759 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4760 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4763 while ((insn
= NEXT_INSN (insn
)))
4766 code
= GET_CODE (insn
);
4769 /* If this is a label that existed before reload, then the register
4770 if dead here. However, if this is a label added by reorg, then
4771 the register may still be live here. We can't tell the difference,
4772 so we just ignore labels completely. */
4773 if (code
== CODE_LABEL
)
4781 if (code
== JUMP_INSN
)
4784 /* If this is a sequence, we must handle them all at once.
4785 We could have for instance a call that sets the target register,
4786 and an insn in a delay slot that uses the register. In this case,
4787 we must return 0. */
4788 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4793 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4795 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4796 rtx set
= single_set (this_insn
);
4798 if (GET_CODE (this_insn
) == CALL_INSN
)
4800 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4802 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4807 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4809 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4811 if (GET_CODE (SET_DEST (set
)) != MEM
)
4817 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4822 else if (code
== JUMP_INSN
)
4826 if (code
== CALL_INSN
)
4829 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4830 if (GET_CODE (XEXP (tem
, 0)) == USE
4831 && REG_P (XEXP (XEXP (tem
, 0), 0))
4832 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4834 if (call_used_regs
[REGNO (reg
)])
4838 set
= single_set (insn
);
4840 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4842 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4843 return GET_CODE (SET_DEST (set
)) != MEM
;
4844 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4850 /* Target hook for assembling integer objects. The AVR version needs
4851 special handling for references to certain labels. */
4854 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4856 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4857 && text_segment_operand (x
, VOIDmode
) )
4859 fputs ("\t.word\tgs(", asm_out_file
);
4860 output_addr_const (asm_out_file
, x
);
4861 fputs (")\n", asm_out_file
);
4864 return default_assemble_integer (x
, size
, aligned_p
);
4867 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4870 avr_asm_declare_function_name (FILE *file
, const char *name
, tree decl
)
4873 /* If the function has the 'signal' or 'interrupt' attribute, test to
4874 make sure that the name of the function is "__vector_NN" so as to
4875 catch when the user misspells the interrupt vector name. */
4877 if (cfun
->machine
->is_interrupt
)
4879 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4881 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4882 "%qs appears to be a misspelled interrupt handler",
4886 else if (cfun
->machine
->is_signal
)
4888 if (strncmp (name
, "__vector", strlen ("__vector")) != 0)
4890 warning_at (DECL_SOURCE_LOCATION (decl
), 0,
4891 "%qs appears to be a misspelled signal handler",
4896 ASM_OUTPUT_TYPE_DIRECTIVE (file
, name
, "function");
4897 ASM_OUTPUT_LABEL (file
, name
);
4900 /* The routine used to output NUL terminated strings. We use a special
4901 version of this for most svr4 targets because doing so makes the
4902 generated assembly code more compact (and thus faster to assemble)
4903 as well as more readable, especially for targets like the i386
4904 (where the only alternative is to output character sequences as
4905 comma separated lists of numbers). */
4908 gas_output_limited_string(FILE *file
, const char *str
)
4910 const unsigned char *_limited_str
= (const unsigned char *) str
;
4912 fprintf (file
, "%s\"", STRING_ASM_OP
);
4913 for (; (ch
= *_limited_str
); _limited_str
++)
4916 switch (escape
= ESCAPES
[ch
])
4922 fprintf (file
, "\\%03o", ch
);
4926 putc (escape
, file
);
4930 fprintf (file
, "\"\n");
4933 /* The routine used to output sequences of byte values. We use a special
4934 version of this for most svr4 targets because doing so makes the
4935 generated assembly code more compact (and thus faster to assemble)
4936 as well as more readable. Note that if we find subparts of the
4937 character sequence which end with NUL (and which are shorter than
4938 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4941 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4943 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4944 const unsigned char *limit
= _ascii_bytes
+ length
;
4945 unsigned bytes_in_chunk
= 0;
4946 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4948 const unsigned char *p
;
4949 if (bytes_in_chunk
>= 60)
4951 fprintf (file
, "\"\n");
4954 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4956 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4958 if (bytes_in_chunk
> 0)
4960 fprintf (file
, "\"\n");
4963 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4970 if (bytes_in_chunk
== 0)
4971 fprintf (file
, "\t.ascii\t\"");
4972 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4979 fprintf (file
, "\\%03o", ch
);
4980 bytes_in_chunk
+= 4;
4984 putc (escape
, file
);
4985 bytes_in_chunk
+= 2;
4990 if (bytes_in_chunk
> 0)
4991 fprintf (file
, "\"\n");
4994 /* Return value is nonzero if pseudos that have been
4995 assigned to registers of class CLASS would likely be spilled
4996 because registers of CLASS are needed for spill registers. */
4999 avr_class_likely_spilled_p (reg_class_t c
)
5001 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
5004 /* Valid attributes:
5005 progmem - put data to program memory;
5006 signal - make a function to be hardware interrupt. After function
5007 prologue interrupts are disabled;
5008 interrupt - make a function to be hardware interrupt. After function
5009 prologue interrupts are enabled;
5010 naked - don't generate function prologue/epilogue and `ret' command.
5012 Only `progmem' attribute valid for type. */
5014 /* Handle a "progmem" attribute; arguments as in
5015 struct attribute_spec.handler. */
5017 avr_handle_progmem_attribute (tree
*node
, tree name
,
5018 tree args ATTRIBUTE_UNUSED
,
5019 int flags ATTRIBUTE_UNUSED
,
5024 if (TREE_CODE (*node
) == TYPE_DECL
)
5026 /* This is really a decl attribute, not a type attribute,
5027 but try to handle it for GCC 3.0 backwards compatibility. */
5029 tree type
= TREE_TYPE (*node
);
5030 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
5031 tree newtype
= build_type_attribute_variant (type
, attr
);
5033 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
5034 TREE_TYPE (*node
) = newtype
;
5035 *no_add_attrs
= true;
5037 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
5039 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
5041 warning (0, "only initialized variables can be placed into "
5042 "program memory area");
5043 *no_add_attrs
= true;
5048 warning (OPT_Wattributes
, "%qE attribute ignored",
5050 *no_add_attrs
= true;
5057 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5058 struct attribute_spec.handler. */
5061 avr_handle_fndecl_attribute (tree
*node
, tree name
,
5062 tree args ATTRIBUTE_UNUSED
,
5063 int flags ATTRIBUTE_UNUSED
,
5066 if (TREE_CODE (*node
) != FUNCTION_DECL
)
5068 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5070 *no_add_attrs
= true;
5077 avr_handle_fntype_attribute (tree
*node
, tree name
,
5078 tree args ATTRIBUTE_UNUSED
,
5079 int flags ATTRIBUTE_UNUSED
,
5082 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
5084 warning (OPT_Wattributes
, "%qE attribute only applies to functions",
5086 *no_add_attrs
= true;
5092 /* Look for attribute `progmem' in DECL
5093 if found return 1, otherwise 0. */
5096 avr_progmem_p (tree decl
, tree attributes
)
5100 if (TREE_CODE (decl
) != VAR_DECL
)
5104 != lookup_attribute ("progmem", attributes
))
5110 while (TREE_CODE (a
) == ARRAY_TYPE
);
5112 if (a
== error_mark_node
)
5115 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
5121 /* Add the section attribute if the variable is in progmem. */
5124 avr_insert_attributes (tree node
, tree
*attributes
)
5126 if (TREE_CODE (node
) == VAR_DECL
5127 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
5128 && avr_progmem_p (node
, *attributes
))
5130 if (TREE_READONLY (node
))
5132 static const char dsec
[] = ".progmem.data";
5134 *attributes
= tree_cons (get_identifier ("section"),
5135 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
5140 error ("variable %q+D must be const in order to be put into"
5141 " read-only section by means of %<__attribute__((progmem))%>",
5147 /* A get_unnamed_section callback for switching to progmem_section. */
5150 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
5152 fprintf (asm_out_file
,
5153 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5154 AVR_HAVE_JMP_CALL
? "a" : "ax");
5155 /* Should already be aligned, this is just to be safe if it isn't. */
5156 fprintf (asm_out_file
, "\t.p2align 1\n");
5160 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5161 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5162 /* Track need of __do_clear_bss. */
5165 avr_asm_output_aligned_decl_common (FILE * stream
, const_tree decl ATTRIBUTE_UNUSED
,
5166 const char *name
, unsigned HOST_WIDE_INT size
,
5167 unsigned int align
, bool local_p
)
5169 avr_need_clear_bss_p
= true;
5173 fputs ("\t.local\t", stream
);
5174 assemble_name (stream
, name
);
5175 fputs ("\n", stream
);
5178 fputs ("\t.comm\t", stream
);
5179 assemble_name (stream
, name
);
5181 "," HOST_WIDE_INT_PRINT_UNSIGNED
",%u\n",
5182 size
, align
/ BITS_PER_UNIT
);
5186 /* Unnamed section callback for data_section
5187 to track need of __do_copy_data. */
5190 avr_output_data_section_asm_op (const void *data
)
5192 avr_need_copy_data_p
= true;
5194 /* Dispatch to default. */
5195 output_section_asm_op (data
);
5199 /* Unnamed section callback for bss_section
5200 to track need of __do_clear_bss. */
5203 avr_output_bss_section_asm_op (const void *data
)
5205 avr_need_clear_bss_p
= true;
5207 /* Dispatch to default. */
5208 output_section_asm_op (data
);
5212 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5215 avr_asm_init_sections (void)
5217 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
5218 avr_output_progmem_section_asm_op
,
5220 readonly_data_section
= data_section
;
5222 data_section
->unnamed
.callback
= avr_output_data_section_asm_op
;
5223 bss_section
->unnamed
.callback
= avr_output_bss_section_asm_op
;
5227 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5228 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5231 avr_asm_named_section (const char *name
, unsigned int flags
, tree decl
)
5233 if (!avr_need_copy_data_p
)
5234 avr_need_copy_data_p
= (0 == strncmp (name
, ".data", 5)
5235 || 0 == strncmp (name
, ".rodata", 7)
5236 || 0 == strncmp (name
, ".gnu.linkonce.d", 15));
5238 if (!avr_need_clear_bss_p
)
5239 avr_need_clear_bss_p
= (0 == strncmp (name
, ".bss", 4));
5241 default_elf_asm_named_section (name
, flags
, decl
);
5245 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
5247 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
5249 if (strncmp (name
, ".noinit", 7) == 0)
5251 if (decl
&& TREE_CODE (decl
) == VAR_DECL
5252 && DECL_INITIAL (decl
) == NULL_TREE
)
5253 flags
|= SECTION_BSS
; /* @nobits */
5255 warning (0, "only uninitialized variables can be placed in the "
5263 /* Implement `TARGET_ASM_FILE_START'. */
5264 /* Outputs some appropriate text to go at the start of an assembler
5268 avr_file_start (void)
5270 if (avr_current_arch
->asm_only
)
5271 error ("MCU %qs supported for assembler only", avr_current_device
->name
);
5273 default_file_start ();
5275 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5276 fputs ("__SREG__ = 0x3f\n"
5278 "__SP_L__ = 0x3d\n", asm_out_file
);
5280 fputs ("__tmp_reg__ = 0\n"
5281 "__zero_reg__ = 1\n", asm_out_file
);
5285 /* Implement `TARGET_ASM_FILE_END'. */
5286 /* Outputs to the stdio stream FILE some
5287 appropriate text to go at the end of an assembler file. */
5292 /* Output these only if there is anything in the
5293 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5294 input section(s) - some code size can be saved by not
5295 linking in the initialization code from libgcc if resp.
5296 sections are empty. */
5298 if (avr_need_copy_data_p
)
5299 fputs (".global __do_copy_data\n", asm_out_file
);
5301 if (avr_need_clear_bss_p
)
5302 fputs (".global __do_clear_bss\n", asm_out_file
);
5305 /* Choose the order in which to allocate hard registers for
5306 pseudo-registers local to a basic block.
5308 Store the desired register order in the array `reg_alloc_order'.
5309 Element 0 should be the register to allocate first; element 1, the
5310 next register; and so on. */
5313 order_regs_for_local_alloc (void)
5316 static const int order_0
[] = {
5324 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5328 static const int order_1
[] = {
5336 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5340 static const int order_2
[] = {
5349 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5354 const int *order
= (TARGET_ORDER_1
? order_1
:
5355 TARGET_ORDER_2
? order_2
:
5357 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
5358 reg_alloc_order
[i
] = order
[i
];
5362 /* Implement `TARGET_REGISTER_MOVE_COST' */
5365 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED
,
5366 reg_class_t from
, reg_class_t to
)
5368 return (from
== STACK_REG
? 6
5369 : to
== STACK_REG
? 12
5374 /* Implement `TARGET_MEMORY_MOVE_COST' */
5377 avr_memory_move_cost (enum machine_mode mode
, reg_class_t rclass ATTRIBUTE_UNUSED
,
5378 bool in ATTRIBUTE_UNUSED
)
5380 return (mode
== QImode
? 2
5381 : mode
== HImode
? 4
5382 : mode
== SImode
? 8
5383 : mode
== SFmode
? 8
5388 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5389 cost of an RTX operand given its context. X is the rtx of the
5390 operand, MODE is its mode, and OUTER is the rtx_code of this
5391 operand's parent operator. */
5394 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
,
5397 enum rtx_code code
= GET_CODE (x
);
5408 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5415 avr_rtx_costs (x
, code
, outer
, &total
, speed
);
5419 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5420 is to be calculated. Return true if the complete cost has been
5421 computed, and false if subexpressions should be scanned. In either
5422 case, *TOTAL contains the cost result. */
5425 avr_rtx_costs (rtx x
, int codearg
, int outer_code ATTRIBUTE_UNUSED
, int *total
,
5428 enum rtx_code code
= (enum rtx_code
) codearg
;
5429 enum machine_mode mode
= GET_MODE (x
);
5436 /* Immediate constants are as cheap as registers. */
5444 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5452 *total
= COSTS_N_INSNS (1);
5456 *total
= COSTS_N_INSNS (3);
5460 *total
= COSTS_N_INSNS (7);
5466 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5474 *total
= COSTS_N_INSNS (1);
5480 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5484 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5485 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5489 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
5490 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5491 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5495 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
5496 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
5497 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5504 *total
= COSTS_N_INSNS (1);
5505 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5506 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5510 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5512 *total
= COSTS_N_INSNS (2);
5513 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5515 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5516 *total
= COSTS_N_INSNS (1);
5518 *total
= COSTS_N_INSNS (2);
5522 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5524 *total
= COSTS_N_INSNS (4);
5525 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5527 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5528 *total
= COSTS_N_INSNS (1);
5530 *total
= COSTS_N_INSNS (4);
5536 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5542 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5543 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5544 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5545 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5549 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5550 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5551 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5559 *total
= COSTS_N_INSNS (!speed
? 3 : 4);
5561 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5568 *total
= COSTS_N_INSNS (!speed
? 7 : 10);
5570 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5578 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5579 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5587 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5590 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5591 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5598 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 4)
5599 *total
= COSTS_N_INSNS (1);
5604 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) == 8)
5605 *total
= COSTS_N_INSNS (3);
5610 if (CONST_INT_P (XEXP (x
, 1)))
5611 switch (INTVAL (XEXP (x
, 1)))
5615 *total
= COSTS_N_INSNS (5);
5618 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 6);
5626 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5633 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5635 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5636 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5640 val
= INTVAL (XEXP (x
, 1));
5642 *total
= COSTS_N_INSNS (3);
5643 else if (val
>= 0 && val
<= 7)
5644 *total
= COSTS_N_INSNS (val
);
5646 *total
= COSTS_N_INSNS (1);
5651 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5653 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5654 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5657 switch (INTVAL (XEXP (x
, 1)))
5664 *total
= COSTS_N_INSNS (2);
5667 *total
= COSTS_N_INSNS (3);
5673 *total
= COSTS_N_INSNS (4);
5678 *total
= COSTS_N_INSNS (5);
5681 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5684 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5687 *total
= COSTS_N_INSNS (!speed
? 5 : 10);
5690 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5691 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5696 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5698 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5699 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5702 switch (INTVAL (XEXP (x
, 1)))
5708 *total
= COSTS_N_INSNS (3);
5713 *total
= COSTS_N_INSNS (4);
5716 *total
= COSTS_N_INSNS (6);
5719 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5722 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5723 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5730 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5737 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5739 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5740 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5744 val
= INTVAL (XEXP (x
, 1));
5746 *total
= COSTS_N_INSNS (4);
5748 *total
= COSTS_N_INSNS (2);
5749 else if (val
>= 0 && val
<= 7)
5750 *total
= COSTS_N_INSNS (val
);
5752 *total
= COSTS_N_INSNS (1);
5757 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5759 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5760 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5763 switch (INTVAL (XEXP (x
, 1)))
5769 *total
= COSTS_N_INSNS (2);
5772 *total
= COSTS_N_INSNS (3);
5778 *total
= COSTS_N_INSNS (4);
5782 *total
= COSTS_N_INSNS (5);
5785 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5788 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5792 *total
= COSTS_N_INSNS (!speed
? 5 : 8);
5795 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5796 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5801 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5803 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5804 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5807 switch (INTVAL (XEXP (x
, 1)))
5813 *total
= COSTS_N_INSNS (4);
5818 *total
= COSTS_N_INSNS (6);
5821 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5824 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5827 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5828 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5835 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5842 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5844 *total
= COSTS_N_INSNS (!speed
? 4 : 17);
5845 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5849 val
= INTVAL (XEXP (x
, 1));
5851 *total
= COSTS_N_INSNS (3);
5852 else if (val
>= 0 && val
<= 7)
5853 *total
= COSTS_N_INSNS (val
);
5855 *total
= COSTS_N_INSNS (1);
5860 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5862 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5863 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5866 switch (INTVAL (XEXP (x
, 1)))
5873 *total
= COSTS_N_INSNS (2);
5876 *total
= COSTS_N_INSNS (3);
5881 *total
= COSTS_N_INSNS (4);
5885 *total
= COSTS_N_INSNS (5);
5891 *total
= COSTS_N_INSNS (!speed
? 5 : 6);
5894 *total
= COSTS_N_INSNS (!speed
? 5 : 7);
5898 *total
= COSTS_N_INSNS (!speed
? 5 : 9);
5901 *total
= COSTS_N_INSNS (!speed
? 5 : 41);
5902 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5907 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5909 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5910 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5913 switch (INTVAL (XEXP (x
, 1)))
5919 *total
= COSTS_N_INSNS (4);
5922 *total
= COSTS_N_INSNS (!speed
? 7 : 8);
5927 *total
= COSTS_N_INSNS (4);
5930 *total
= COSTS_N_INSNS (6);
5933 *total
= COSTS_N_INSNS (!speed
? 7 : 113);
5934 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5941 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5945 switch (GET_MODE (XEXP (x
, 0)))
5948 *total
= COSTS_N_INSNS (1);
5949 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5950 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5954 *total
= COSTS_N_INSNS (2);
5955 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5956 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5957 else if (INTVAL (XEXP (x
, 1)) != 0)
5958 *total
+= COSTS_N_INSNS (1);
5962 *total
= COSTS_N_INSNS (4);
5963 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5964 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
, speed
);
5965 else if (INTVAL (XEXP (x
, 1)) != 0)
5966 *total
+= COSTS_N_INSNS (3);
5972 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
, speed
);
5981 /* Calculate the cost of a memory address. */
5984 avr_address_cost (rtx x
, bool speed ATTRIBUTE_UNUSED
)
5986 if (GET_CODE (x
) == PLUS
5987 && GET_CODE (XEXP (x
,1)) == CONST_INT
5988 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5989 && INTVAL (XEXP (x
,1)) >= 61)
5991 if (CONSTANT_ADDRESS_P (x
))
5993 if (optimize
> 0 && io_address_operand (x
, QImode
))
6000 /* Test for extra memory constraint 'Q'.
6001 It's a memory address based on Y or Z pointer with valid displacement. */
6004 extra_constraint_Q (rtx x
)
6006 if (GET_CODE (XEXP (x
,0)) == PLUS
6007 && REG_P (XEXP (XEXP (x
,0), 0))
6008 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
6009 && (INTVAL (XEXP (XEXP (x
,0), 1))
6010 <= MAX_LD_OFFSET (GET_MODE (x
))))
6012 rtx xx
= XEXP (XEXP (x
,0), 0);
6013 int regno
= REGNO (xx
);
6014 if (TARGET_ALL_DEBUG
)
6016 fprintf (stderr
, ("extra_constraint:\n"
6017 "reload_completed: %d\n"
6018 "reload_in_progress: %d\n"),
6019 reload_completed
, reload_in_progress
);
6022 if (regno
>= FIRST_PSEUDO_REGISTER
)
6023 return 1; /* allocate pseudos */
6024 else if (regno
== REG_Z
|| regno
== REG_Y
)
6025 return 1; /* strictly check */
6026 else if (xx
== frame_pointer_rtx
6027 || xx
== arg_pointer_rtx
)
6028 return 1; /* XXX frame & arg pointer checks */
6033 /* Convert condition code CONDITION to the valid AVR condition code. */
6036 avr_normalize_condition (RTX_CODE condition
)
6053 /* This function optimizes conditional jumps. */
6060 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
6062 if (! (GET_CODE (insn
) == INSN
6063 || GET_CODE (insn
) == CALL_INSN
6064 || GET_CODE (insn
) == JUMP_INSN
)
6065 || !single_set (insn
))
6068 pattern
= PATTERN (insn
);
6070 if (GET_CODE (pattern
) == PARALLEL
)
6071 pattern
= XVECEXP (pattern
, 0, 0);
6072 if (GET_CODE (pattern
) == SET
6073 && SET_DEST (pattern
) == cc0_rtx
6074 && compare_diff_p (insn
))
6076 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
6078 /* Now we work under compare insn. */
6080 pattern
= SET_SRC (pattern
);
6081 if (true_regnum (XEXP (pattern
,0)) >= 0
6082 && true_regnum (XEXP (pattern
,1)) >= 0 )
6084 rtx x
= XEXP (pattern
,0);
6085 rtx next
= next_real_insn (insn
);
6086 rtx pat
= PATTERN (next
);
6087 rtx src
= SET_SRC (pat
);
6088 rtx t
= XEXP (src
,0);
6089 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6090 XEXP (pattern
,0) = XEXP (pattern
,1);
6091 XEXP (pattern
,1) = x
;
6092 INSN_CODE (next
) = -1;
6094 else if (true_regnum (XEXP (pattern
, 0)) >= 0
6095 && XEXP (pattern
, 1) == const0_rtx
)
6097 /* This is a tst insn, we can reverse it. */
6098 rtx next
= next_real_insn (insn
);
6099 rtx pat
= PATTERN (next
);
6100 rtx src
= SET_SRC (pat
);
6101 rtx t
= XEXP (src
,0);
6103 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
6104 XEXP (pattern
, 1) = XEXP (pattern
, 0);
6105 XEXP (pattern
, 0) = const0_rtx
;
6106 INSN_CODE (next
) = -1;
6107 INSN_CODE (insn
) = -1;
6109 else if (true_regnum (XEXP (pattern
,0)) >= 0
6110 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
6112 rtx x
= XEXP (pattern
,1);
6113 rtx next
= next_real_insn (insn
);
6114 rtx pat
= PATTERN (next
);
6115 rtx src
= SET_SRC (pat
);
6116 rtx t
= XEXP (src
,0);
6117 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
6119 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
6121 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
6122 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
6123 INSN_CODE (next
) = -1;
6124 INSN_CODE (insn
) = -1;
6132 /* Returns register number for function return value.*/
6134 static inline unsigned int
6135 avr_ret_register (void)
6140 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6143 avr_function_value_regno_p (const unsigned int regno
)
6145 return (regno
== avr_ret_register ());
6148 /* Create an RTX representing the place where a
6149 library function returns a value of mode MODE. */
6152 avr_libcall_value (enum machine_mode mode
,
6153 const_rtx func ATTRIBUTE_UNUSED
)
6155 int offs
= GET_MODE_SIZE (mode
);
6158 return gen_rtx_REG (mode
, avr_ret_register () + 2 - offs
);
6161 /* Create an RTX representing the place where a
6162 function returns a value of data type VALTYPE. */
6165 avr_function_value (const_tree type
,
6166 const_tree fn_decl_or_type ATTRIBUTE_UNUSED
,
6167 bool outgoing ATTRIBUTE_UNUSED
)
6171 if (TYPE_MODE (type
) != BLKmode
)
6172 return avr_libcall_value (TYPE_MODE (type
), NULL_RTX
);
6174 offs
= int_size_in_bytes (type
);
6177 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
6178 offs
= GET_MODE_SIZE (SImode
);
6179 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
6180 offs
= GET_MODE_SIZE (DImode
);
6182 return gen_rtx_REG (BLKmode
, avr_ret_register () + 2 - offs
);
6186 test_hard_reg_class (enum reg_class rclass
, rtx x
)
6188 int regno
= true_regnum (x
);
6192 if (TEST_HARD_REG_CLASS (rclass
, regno
))
6200 jump_over_one_insn_p (rtx insn
, rtx dest
)
6202 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
6205 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
6206 int dest_addr
= INSN_ADDRESSES (uid
);
6207 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
6210 /* Returns 1 if a value of mode MODE can be stored starting with hard
6211 register number REGNO. On the enhanced core, anything larger than
6212 1 byte must start in even numbered register for "movw" to work
6213 (this way we don't have to check for odd registers everywhere). */
6216 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
6218 /* Disallow QImode in stack pointer regs. */
6219 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
6222 /* The only thing that can go into registers r28:r29 is a Pmode. */
6223 if (regno
== REG_Y
&& mode
== Pmode
)
6226 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6227 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
6233 /* Modes larger than QImode occupy consecutive registers. */
6234 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
6237 /* All modes larger than QImode should start in an even register. */
6238 return !(regno
& 1);
6242 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6248 if (GET_CODE (operands
[1]) == CONST_INT
)
6250 int val
= INTVAL (operands
[1]);
6251 if ((val
& 0xff) == 0)
6254 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
6255 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6258 else if ((val
& 0xff00) == 0)
6261 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6262 AS2 (mov
,%A0
,%2) CR_TAB
6263 AS2 (mov
,%B0
,__zero_reg__
));
6265 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
6268 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6269 AS2 (mov
,%A0
,%2) CR_TAB
6274 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
6275 AS2 (mov
,%A0
,%2) CR_TAB
6276 AS2 (ldi
,%2,hi8(%1)) CR_TAB
6282 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
6284 rtx src
= operands
[1];
6285 int cnst
= (GET_CODE (src
) == CONST_INT
);
6290 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
6291 + ((INTVAL (src
) & 0xff00) != 0)
6292 + ((INTVAL (src
) & 0xff0000) != 0)
6293 + ((INTVAL (src
) & 0xff000000) != 0);
6300 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
6301 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
6304 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
6305 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
6307 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
6308 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
6311 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
6312 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
6314 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
6315 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
6318 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
6319 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
6321 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
6322 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
6325 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
6326 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
6332 avr_output_bld (rtx operands
[], int bit_nr
)
6334 static char s
[] = "bld %A0,0";
6336 s
[5] = 'A' + (bit_nr
>> 3);
6337 s
[8] = '0' + (bit_nr
& 7);
6338 output_asm_insn (s
, operands
);
6342 avr_output_addr_vec_elt (FILE *stream
, int value
)
6344 switch_to_section (progmem_section
);
6345 if (AVR_HAVE_JMP_CALL
)
6346 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
6348 fprintf (stream
, "\trjmp .L%d\n", value
);
6351 /* Returns true if SCRATCH are safe to be allocated as a scratch
6352 registers (for a define_peephole2) in the current function. */
6355 avr_hard_regno_scratch_ok (unsigned int regno
)
6357 /* Interrupt functions can only use registers that have already been saved
6358 by the prologue, even if they would normally be call-clobbered. */
6360 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6361 && !df_regs_ever_live_p (regno
))
6367 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6370 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED
,
6371 unsigned int new_reg
)
6373 /* Interrupt functions can only use registers that have already been
6374 saved by the prologue, even if they would normally be
6377 if ((cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
6378 && !df_regs_ever_live_p (new_reg
))
6384 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6385 or memory location in the I/O space (QImode only).
6387 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6388 Operand 1: register operand to test, or CONST_INT memory address.
6389 Operand 2: bit number.
6390 Operand 3: label to jump to if the test is true. */
6393 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
6395 enum rtx_code comp
= GET_CODE (operands
[0]);
6396 int long_jump
= (get_attr_length (insn
) >= 4);
6397 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
6401 else if (comp
== LT
)
6405 comp
= reverse_condition (comp
);
6407 if (GET_CODE (operands
[1]) == CONST_INT
)
6409 if (INTVAL (operands
[1]) < 0x40)
6412 output_asm_insn (AS2 (sbis
,%m1
-0x20,%2), operands
);
6414 output_asm_insn (AS2 (sbic
,%m1
-0x20,%2), operands
);
6418 output_asm_insn (AS2 (in
,__tmp_reg__
,%m1
-0x20), operands
);
6420 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
6422 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
6425 else /* GET_CODE (operands[1]) == REG */
6427 if (GET_MODE (operands
[1]) == QImode
)
6430 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
6432 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
6434 else /* HImode or SImode */
6436 static char buf
[] = "sbrc %A1,0";
6437 int bit_nr
= INTVAL (operands
[2]);
6438 buf
[3] = (comp
== EQ
) ? 's' : 'c';
6439 buf
[6] = 'A' + (bit_nr
>> 3);
6440 buf
[9] = '0' + (bit_nr
& 7);
6441 output_asm_insn (buf
, operands
);
6446 return (AS1 (rjmp
,.+4) CR_TAB
6449 return AS1 (rjmp
,%x3
);
6453 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6456 avr_asm_out_ctor (rtx symbol
, int priority
)
6458 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
6459 default_ctor_section_asm_out_constructor (symbol
, priority
);
6462 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6465 avr_asm_out_dtor (rtx symbol
, int priority
)
6467 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
6468 default_dtor_section_asm_out_destructor (symbol
, priority
);
6471 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6474 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
6476 if (TYPE_MODE (type
) == BLKmode
)
6478 HOST_WIDE_INT size
= int_size_in_bytes (type
);
6479 return (size
== -1 || size
> 8);
6485 /* Worker function for CASE_VALUES_THRESHOLD. */
6487 unsigned int avr_case_values_threshold (void)
6489 return (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
6492 /* Helper for __builtin_avr_delay_cycles */
6495 avr_expand_delay_cycles (rtx operands0
)
6497 unsigned HOST_WIDE_INT cycles
= UINTVAL (operands0
);
6498 unsigned HOST_WIDE_INT cycles_used
;
6499 unsigned HOST_WIDE_INT loop_count
;
6501 if (IN_RANGE (cycles
, 83886082, 0xFFFFFFFF))
6503 loop_count
= ((cycles
- 9) / 6) + 1;
6504 cycles_used
= ((loop_count
- 1) * 6) + 9;
6505 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count
, SImode
)));
6506 cycles
-= cycles_used
;
6509 if (IN_RANGE (cycles
, 262145, 83886081))
6511 loop_count
= ((cycles
- 7) / 5) + 1;
6512 if (loop_count
> 0xFFFFFF)
6513 loop_count
= 0xFFFFFF;
6514 cycles_used
= ((loop_count
- 1) * 5) + 7;
6515 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count
, SImode
)));
6516 cycles
-= cycles_used
;
6519 if (IN_RANGE (cycles
, 768, 262144))
6521 loop_count
= ((cycles
- 5) / 4) + 1;
6522 if (loop_count
> 0xFFFF)
6523 loop_count
= 0xFFFF;
6524 cycles_used
= ((loop_count
- 1) * 4) + 5;
6525 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count
, HImode
)));
6526 cycles
-= cycles_used
;
6529 if (IN_RANGE (cycles
, 6, 767))
6531 loop_count
= cycles
/ 3;
6532 if (loop_count
> 255)
6534 cycles_used
= loop_count
* 3;
6535 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count
, QImode
)));
6536 cycles
-= cycles_used
;
6541 emit_insn (gen_nopv (GEN_INT(2)));
6547 emit_insn (gen_nopv (GEN_INT(1)));
6552 /* IDs for all the AVR builtins. */
6565 AVR_BUILTIN_DELAY_CYCLES
6568 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6571 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6576 /* Implement `TARGET_INIT_BUILTINS' */
6577 /* Set up all builtin functions for this target. */
6580 avr_init_builtins (void)
6582 tree void_ftype_void
6583 = build_function_type_list (void_type_node
, NULL_TREE
);
6584 tree uchar_ftype_uchar
6585 = build_function_type_list (unsigned_char_type_node
,
6586 unsigned_char_type_node
,
6588 tree uint_ftype_uchar_uchar
6589 = build_function_type_list (unsigned_type_node
,
6590 unsigned_char_type_node
,
6591 unsigned_char_type_node
,
6593 tree int_ftype_char_char
6594 = build_function_type_list (integer_type_node
,
6598 tree int_ftype_char_uchar
6599 = build_function_type_list (integer_type_node
,
6601 unsigned_char_type_node
,
6603 tree void_ftype_ulong
6604 = build_function_type_list (void_type_node
,
6605 long_unsigned_type_node
,
6608 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void
, AVR_BUILTIN_NOP
);
6609 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void
, AVR_BUILTIN_SEI
);
6610 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void
, AVR_BUILTIN_CLI
);
6611 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void
, AVR_BUILTIN_WDR
);
6612 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void
, AVR_BUILTIN_SLEEP
);
6613 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar
, AVR_BUILTIN_SWAP
);
6614 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong
,
6615 AVR_BUILTIN_DELAY_CYCLES
);
6619 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6620 in libgcc. For fmul and fmuls this is straight forward with
6621 upcoming fixed point support. */
6623 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar
,
6625 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char
,
6627 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar
,
6628 AVR_BUILTIN_FMULSU
);
6634 struct avr_builtin_description
6636 const enum insn_code icode
;
6637 const char *const name
;
6638 const enum avr_builtin_id id
;
6641 static const struct avr_builtin_description
6644 { CODE_FOR_rotlqi3_4
, "__builtin_avr_swap", AVR_BUILTIN_SWAP
}
6647 static const struct avr_builtin_description
6650 { CODE_FOR_fmul
, "__builtin_avr_fmul", AVR_BUILTIN_FMUL
},
6651 { CODE_FOR_fmuls
, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS
},
6652 { CODE_FOR_fmulsu
, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU
}
6655 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6658 avr_expand_unop_builtin (enum insn_code icode
, tree exp
,
6662 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6663 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6664 enum machine_mode op0mode
= GET_MODE (op0
);
6665 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6666 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6669 || GET_MODE (target
) != tmode
6670 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6672 target
= gen_reg_rtx (tmode
);
6675 if (op0mode
== SImode
&& mode0
== HImode
)
6678 op0
= gen_lowpart (HImode
, op0
);
6681 gcc_assert (op0mode
== mode0
|| op0mode
== VOIDmode
);
6683 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6684 op0
= copy_to_mode_reg (mode0
, op0
);
6686 pat
= GEN_FCN (icode
) (target
, op0
);
6696 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6699 avr_expand_binop_builtin (enum insn_code icode
, tree exp
, rtx target
)
6702 tree arg0
= CALL_EXPR_ARG (exp
, 0);
6703 tree arg1
= CALL_EXPR_ARG (exp
, 1);
6704 rtx op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6705 rtx op1
= expand_expr (arg1
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6706 enum machine_mode op0mode
= GET_MODE (op0
);
6707 enum machine_mode op1mode
= GET_MODE (op1
);
6708 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
6709 enum machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
6710 enum machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
6713 || GET_MODE (target
) != tmode
6714 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
6716 target
= gen_reg_rtx (tmode
);
6719 if ((op0mode
== SImode
|| op0mode
== VOIDmode
) && mode0
== HImode
)
6722 op0
= gen_lowpart (HImode
, op0
);
6725 if ((op1mode
== SImode
|| op1mode
== VOIDmode
) && mode1
== HImode
)
6728 op1
= gen_lowpart (HImode
, op1
);
6731 /* In case the insn wants input operands in modes different from
6732 the result, abort. */
6734 gcc_assert ((op0mode
== mode0
|| op0mode
== VOIDmode
)
6735 && (op1mode
== mode1
|| op1mode
== VOIDmode
));
6737 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
6738 op0
= copy_to_mode_reg (mode0
, op0
);
6740 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
6741 op1
= copy_to_mode_reg (mode1
, op1
);
6743 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
6753 /* Expand an expression EXP that calls a built-in function,
6754 with result going to TARGET if that's convenient
6755 (and in mode MODE if that's convenient).
6756 SUBTARGET may be used as the target for computing one of EXP's operands.
6757 IGNORE is nonzero if the value is to be ignored. */
6760 avr_expand_builtin (tree exp
, rtx target
,
6761 rtx subtarget ATTRIBUTE_UNUSED
,
6762 enum machine_mode mode ATTRIBUTE_UNUSED
,
6763 int ignore ATTRIBUTE_UNUSED
)
6766 const struct avr_builtin_description
*d
;
6767 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
6768 unsigned int id
= DECL_FUNCTION_CODE (fndecl
);
6774 case AVR_BUILTIN_NOP
:
6775 emit_insn (gen_nopv (GEN_INT(1)));
6778 case AVR_BUILTIN_SEI
:
6779 emit_insn (gen_enable_interrupt ());
6782 case AVR_BUILTIN_CLI
:
6783 emit_insn (gen_disable_interrupt ());
6786 case AVR_BUILTIN_WDR
:
6787 emit_insn (gen_wdr ());
6790 case AVR_BUILTIN_SLEEP
:
6791 emit_insn (gen_sleep ());
6794 case AVR_BUILTIN_DELAY_CYCLES
:
6796 arg0
= CALL_EXPR_ARG (exp
, 0);
6797 op0
= expand_expr (arg0
, NULL_RTX
, VOIDmode
, EXPAND_NORMAL
);
6799 if (! CONST_INT_P (op0
))
6800 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6802 avr_expand_delay_cycles (op0
);
6807 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
6809 return avr_expand_unop_builtin (d
->icode
, exp
, target
);
6811 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
6813 return avr_expand_binop_builtin (d
->icode
, exp
, target
);