1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
45 #include "target-def.h"
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51 static int avr_naked_function_p (tree
);
52 static int interrupt_function_p (tree
);
53 static int signal_function_p (tree
);
54 static int avr_OS_task_function_p (tree
);
55 static int avr_regs_to_save (HARD_REG_SET
*);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code
);
59 static int avr_num_arg_regs (enum machine_mode
, tree
);
61 static RTX_CODE
compare_condition (rtx insn
);
62 static int compare_sign_p (rtx insn
);
63 static tree
avr_handle_progmem_attribute (tree
*, tree
, tree
, int, bool *);
64 static tree
avr_handle_fndecl_attribute (tree
*, tree
, tree
, int, bool *);
65 static tree
avr_handle_fntype_attribute (tree
*, tree
, tree
, int, bool *);
66 const struct attribute_spec avr_attribute_table
[];
67 static bool avr_assemble_integer (rtx
, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree
, tree
*);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree
, const char *, int);
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx
, int);
78 static void avr_asm_out_dtor (rtx
, int);
79 static int avr_operand_rtx_cost (rtx
, enum machine_mode
, enum rtx_code
);
80 static bool avr_rtx_costs (rtx
, int, int, int *);
81 static int avr_address_cost (rtx
);
82 static bool avr_return_in_memory (const_tree
, const_tree
);
83 static struct machine_function
* avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx
;
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx
;
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames
[] = REGISTER_NAMES
;
96 /* This holds the last insn address. */
97 static int last_insn_address
= 0;
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_extra_arch_macro
;
102 /* Current architecture. */
103 const struct base_arch_s
*avr_current_arch
;
105 section
*progmem_section
;
107 static const struct base_arch_s avr_arch_types
[] = {
108 { 1, 0, 0, 0, 0, 0, 0, 0, NULL
}, /* unknown device specified */
109 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
110 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
111 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
112 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
113 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
114 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
115 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
116 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
117 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
118 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
121 /* These names are used as the index into the avr_arch_types[] table
140 const char *const name
;
141 int arch
; /* index in avr_arch_types[] */
142 /* Must lie outside user's namespace. NULL == no macro. */
143 const char *const macro
;
146 /* List of all known AVR MCU types - if updated, it has to be kept
147 in sync in several places (FIXME: is there a better way?):
149 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
150 - t-avr (MULTILIB_MATCHES)
151 - gas/config/tc-avr.c
154 static const struct mcu_type_s avr_mcu_types
[] = {
155 /* Classic, <= 8K. */
156 { "avr2", ARCH_AVR2
, NULL
},
157 { "at90s2313", ARCH_AVR2
, "__AVR_AT90S2313__" },
158 { "at90s2323", ARCH_AVR2
, "__AVR_AT90S2323__" },
159 { "at90s2333", ARCH_AVR2
, "__AVR_AT90S2333__" },
160 { "at90s2343", ARCH_AVR2
, "__AVR_AT90S2343__" },
161 { "attiny22", ARCH_AVR2
, "__AVR_ATtiny22__" },
162 { "attiny26", ARCH_AVR2
, "__AVR_ATtiny26__" },
163 { "at90s4414", ARCH_AVR2
, "__AVR_AT90S4414__" },
164 { "at90s4433", ARCH_AVR2
, "__AVR_AT90S4433__" },
165 { "at90s4434", ARCH_AVR2
, "__AVR_AT90S4434__" },
166 { "at90s8515", ARCH_AVR2
, "__AVR_AT90S8515__" },
167 { "at90c8534", ARCH_AVR2
, "__AVR_AT90C8534__" },
168 { "at90s8535", ARCH_AVR2
, "__AVR_AT90S8535__" },
169 /* Classic + MOVW, <= 8K. */
170 { "avr25", ARCH_AVR25
, NULL
},
171 { "attiny13", ARCH_AVR25
, "__AVR_ATtiny13__" },
172 { "attiny2313", ARCH_AVR25
, "__AVR_ATtiny2313__" },
173 { "attiny24", ARCH_AVR25
, "__AVR_ATtiny24__" },
174 { "attiny44", ARCH_AVR25
, "__AVR_ATtiny44__" },
175 { "attiny84", ARCH_AVR25
, "__AVR_ATtiny84__" },
176 { "attiny25", ARCH_AVR25
, "__AVR_ATtiny25__" },
177 { "attiny45", ARCH_AVR25
, "__AVR_ATtiny45__" },
178 { "attiny85", ARCH_AVR25
, "__AVR_ATtiny85__" },
179 { "attiny261", ARCH_AVR25
, "__AVR_ATtiny261__" },
180 { "attiny461", ARCH_AVR25
, "__AVR_ATtiny461__" },
181 { "attiny861", ARCH_AVR25
, "__AVR_ATtiny861__" },
182 { "attiny43u", ARCH_AVR25
, "__AVR_ATtiny43U__" },
183 { "attiny48", ARCH_AVR25
, "__AVR_ATtiny48__" },
184 { "attiny88", ARCH_AVR25
, "__AVR_ATtiny88__" },
185 { "at86rf401", ARCH_AVR25
, "__AVR_AT86RF401__" },
186 /* Classic, > 8K, <= 64K. */
187 { "avr3", ARCH_AVR3
, NULL
},
188 { "at43usb320", ARCH_AVR3
, "__AVR_AT43USB320__" },
189 { "at43usb355", ARCH_AVR3
, "__AVR_AT43USB355__" },
190 { "at76c711", ARCH_AVR3
, "__AVR_AT76C711__" },
191 /* Classic, == 128K. */
192 { "avr31", ARCH_AVR31
, NULL
},
193 { "atmega103", ARCH_AVR31
, "__AVR_ATmega103__" },
194 /* Classic + MOVW + JMP/CALL. */
195 { "avr35", ARCH_AVR35
, NULL
},
196 { "at90usb82", ARCH_AVR35
, "__AVR_AT90USB82__" },
197 { "at90usb162", ARCH_AVR35
, "__AVR_AT90USB162__" },
198 /* Enhanced, <= 8K. */
199 { "avr4", ARCH_AVR4
, NULL
},
200 { "atmega8", ARCH_AVR4
, "__AVR_ATmega8__" },
201 { "atmega48", ARCH_AVR4
, "__AVR_ATmega48__" },
202 { "atmega48p", ARCH_AVR4
, "__AVR_ATmega48P__" },
203 { "atmega88", ARCH_AVR4
, "__AVR_ATmega88__" },
204 { "atmega88p", ARCH_AVR4
, "__AVR_ATmega88P__" },
205 { "atmega8515", ARCH_AVR4
, "__AVR_ATmega8515__" },
206 { "atmega8535", ARCH_AVR4
, "__AVR_ATmega8535__" },
207 { "atmega8hva", ARCH_AVR4
, "__AVR_ATmega8HVA__" },
208 { "at90pwm1", ARCH_AVR4
, "__AVR_AT90PWM1__" },
209 { "at90pwm2", ARCH_AVR4
, "__AVR_AT90PWM2__" },
210 { "at90pwm2b", ARCH_AVR4
, "__AVR_AT90PWM2B__" },
211 { "at90pwm3", ARCH_AVR4
, "__AVR_AT90PWM3__" },
212 { "at90pwm3b", ARCH_AVR4
, "__AVR_AT90PWM3B__" },
213 /* Enhanced, > 8K, <= 64K. */
214 { "avr5", ARCH_AVR5
, NULL
},
215 { "atmega16", ARCH_AVR5
, "__AVR_ATmega16__" },
216 { "atmega161", ARCH_AVR5
, "__AVR_ATmega161__" },
217 { "atmega162", ARCH_AVR5
, "__AVR_ATmega162__" },
218 { "atmega163", ARCH_AVR5
, "__AVR_ATmega163__" },
219 { "atmega164p", ARCH_AVR5
, "__AVR_ATmega164P__" },
220 { "atmega165", ARCH_AVR5
, "__AVR_ATmega165__" },
221 { "atmega165p", ARCH_AVR5
, "__AVR_ATmega165P__" },
222 { "atmega168", ARCH_AVR5
, "__AVR_ATmega168__" },
223 { "atmega168p", ARCH_AVR5
, "__AVR_ATmega168P__" },
224 { "atmega169", ARCH_AVR5
, "__AVR_ATmega169__" },
225 { "atmega169p", ARCH_AVR5
, "__AVR_ATmega169P__" },
226 { "atmega32", ARCH_AVR5
, "__AVR_ATmega32__" },
227 { "atmega323", ARCH_AVR5
, "__AVR_ATmega323__" },
228 { "atmega324p", ARCH_AVR5
, "__AVR_ATmega324P__" },
229 { "atmega325", ARCH_AVR5
, "__AVR_ATmega325__" },
230 { "atmega325p", ARCH_AVR5
, "__AVR_ATmega325P__" },
231 { "atmega3250", ARCH_AVR5
, "__AVR_ATmega3250__" },
232 { "atmega3250p", ARCH_AVR5
, "__AVR_ATmega3250P__" },
233 { "atmega328p", ARCH_AVR5
, "__AVR_ATmega328P__" },
234 { "atmega329", ARCH_AVR5
, "__AVR_ATmega329__" },
235 { "atmega329p", ARCH_AVR5
, "__AVR_ATmega329P__" },
236 { "atmega3290", ARCH_AVR5
, "__AVR_ATmega3290__" },
237 { "atmega3290p", ARCH_AVR5
, "__AVR_ATmega3290P__" },
238 { "atmega32hvb", ARCH_AVR5
, "__AVR_ATmega32HVB__" },
239 { "atmega406", ARCH_AVR5
, "__AVR_ATmega406__" },
240 { "atmega64", ARCH_AVR5
, "__AVR_ATmega64__" },
241 { "atmega640", ARCH_AVR5
, "__AVR_ATmega640__" },
242 { "atmega644", ARCH_AVR5
, "__AVR_ATmega644__" },
243 { "atmega644p", ARCH_AVR5
, "__AVR_ATmega644P__" },
244 { "atmega645", ARCH_AVR5
, "__AVR_ATmega645__" },
245 { "atmega6450", ARCH_AVR5
, "__AVR_ATmega6450__" },
246 { "atmega649", ARCH_AVR5
, "__AVR_ATmega649__" },
247 { "atmega6490", ARCH_AVR5
, "__AVR_ATmega6490__" },
248 { "atmega16hva", ARCH_AVR5
, "__AVR_ATmega16HVA__" },
249 { "at90can32", ARCH_AVR5
, "__AVR_AT90CAN32__" },
250 { "at90can64", ARCH_AVR5
, "__AVR_AT90CAN64__" },
251 { "at90pwm216", ARCH_AVR5
, "__AVR_AT90PWM216__" },
252 { "at90pwm316", ARCH_AVR5
, "__AVR_AT90PWM316__" },
253 { "at90usb646", ARCH_AVR5
, "__AVR_AT90USB646__" },
254 { "at90usb647", ARCH_AVR5
, "__AVR_AT90USB647__" },
255 { "at94k", ARCH_AVR5
, "__AVR_AT94K__" },
256 /* Enhanced, == 128K. */
257 { "avr51", ARCH_AVR51
, NULL
},
258 { "atmega128", ARCH_AVR51
, "__AVR_ATmega128__" },
259 { "atmega1280", ARCH_AVR51
, "__AVR_ATmega1280__" },
260 { "atmega1281", ARCH_AVR51
, "__AVR_ATmega1281__" },
261 { "atmega1284p", ARCH_AVR51
, "__AVR_ATmega1284P__" },
262 { "at90can128", ARCH_AVR51
, "__AVR_AT90CAN128__" },
263 { "at90usb1286", ARCH_AVR51
, "__AVR_AT90USB1286__" },
264 { "at90usb1287", ARCH_AVR51
, "__AVR_AT90USB1287__" },
266 { "avr6", ARCH_AVR6
, NULL
},
267 { "atmega2560", ARCH_AVR6
, "__AVR_ATmega2560__" },
268 { "atmega2561", ARCH_AVR6
, "__AVR_ATmega2561__" },
269 /* Assembler only. */
270 { "avr1", ARCH_AVR1
, NULL
},
271 { "at90s1200", ARCH_AVR1
, "__AVR_AT90S1200__" },
272 { "attiny11", ARCH_AVR1
, "__AVR_ATtiny11__" },
273 { "attiny12", ARCH_AVR1
, "__AVR_ATtiny12__" },
274 { "attiny15", ARCH_AVR1
, "__AVR_ATtiny15__" },
275 { "attiny28", ARCH_AVR1
, "__AVR_ATtiny28__" },
276 { NULL
, ARCH_UNKNOWN
, NULL
}
279 int avr_case_values_threshold
= 30000;
281 /* Initialize the GCC target structure. */
282 #undef TARGET_ASM_ALIGNED_HI_OP
283 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
284 #undef TARGET_ASM_ALIGNED_SI_OP
285 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
286 #undef TARGET_ASM_UNALIGNED_HI_OP
287 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
288 #undef TARGET_ASM_UNALIGNED_SI_OP
289 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
290 #undef TARGET_ASM_INTEGER
291 #define TARGET_ASM_INTEGER avr_assemble_integer
292 #undef TARGET_ASM_FILE_START
293 #define TARGET_ASM_FILE_START avr_file_start
294 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
295 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
296 #undef TARGET_ASM_FILE_END
297 #define TARGET_ASM_FILE_END avr_file_end
299 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
300 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
301 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
302 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
303 #undef TARGET_ATTRIBUTE_TABLE
304 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
305 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
306 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
307 #undef TARGET_INSERT_ATTRIBUTES
308 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
309 #undef TARGET_SECTION_TYPE_FLAGS
310 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
311 #undef TARGET_RTX_COSTS
312 #define TARGET_RTX_COSTS avr_rtx_costs
313 #undef TARGET_ADDRESS_COST
314 #define TARGET_ADDRESS_COST avr_address_cost
315 #undef TARGET_MACHINE_DEPENDENT_REORG
316 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
318 #undef TARGET_RETURN_IN_MEMORY
319 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
321 #undef TARGET_STRICT_ARGUMENT_NAMING
322 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
324 struct gcc_target targetm
= TARGET_INITIALIZER
;
327 avr_override_options (void)
329 const struct mcu_type_s
*t
;
331 flag_delete_null_pointer_checks
= 0;
333 for (t
= avr_mcu_types
; t
->name
; t
++)
334 if (strcmp (t
->name
, avr_mcu_name
) == 0)
339 fprintf (stderr
, "unknown MCU '%s' specified\nKnown MCU names:\n",
341 for (t
= avr_mcu_types
; t
->name
; t
++)
342 fprintf (stderr
," %s\n", t
->name
);
345 avr_current_arch
= &avr_arch_types
[t
->arch
];
346 avr_extra_arch_macro
= t
->macro
;
348 if (optimize
&& !TARGET_NO_TABLEJUMP
)
349 avr_case_values_threshold
=
350 (!AVR_HAVE_JMP_CALL
|| TARGET_CALL_PROLOGUES
) ? 8 : 17;
352 tmp_reg_rtx
= gen_rtx_REG (QImode
, TMP_REGNO
);
353 zero_reg_rtx
= gen_rtx_REG (QImode
, ZERO_REGNO
);
355 init_machine_status
= avr_init_machine_status
;
358 /* return register class from register number. */
360 static const int reg_class_tab
[]={
361 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
362 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
363 GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,GENERAL_REGS
,
364 GENERAL_REGS
, /* r0 - r15 */
365 LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,LD_REGS
,
366 LD_REGS
, /* r16 - 23 */
367 ADDW_REGS
,ADDW_REGS
, /* r24,r25 */
368 POINTER_X_REGS
,POINTER_X_REGS
, /* r26,27 */
369 POINTER_Y_REGS
,POINTER_Y_REGS
, /* r28,r29 */
370 POINTER_Z_REGS
,POINTER_Z_REGS
, /* r30,r31 */
371 STACK_REG
,STACK_REG
/* SPL,SPH */
374 /* Function to set up the backend function structure. */
376 static struct machine_function
*
377 avr_init_machine_status (void)
379 return ((struct machine_function
*)
380 ggc_alloc_cleared (sizeof (struct machine_function
)));
383 /* Return register class for register R. */
386 avr_regno_reg_class (int r
)
389 return reg_class_tab
[r
];
393 /* Return nonzero if FUNC is a naked function. */
396 avr_naked_function_p (tree func
)
400 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
402 a
= lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
403 return a
!= NULL_TREE
;
406 /* Return nonzero if FUNC is an interrupt function as specified
407 by the "interrupt" attribute. */
410 interrupt_function_p (tree func
)
414 if (TREE_CODE (func
) != FUNCTION_DECL
)
417 a
= lookup_attribute ("interrupt", DECL_ATTRIBUTES (func
));
418 return a
!= NULL_TREE
;
421 /* Return nonzero if FUNC is a signal function as specified
422 by the "signal" attribute. */
425 signal_function_p (tree func
)
429 if (TREE_CODE (func
) != FUNCTION_DECL
)
432 a
= lookup_attribute ("signal", DECL_ATTRIBUTES (func
));
433 return a
!= NULL_TREE
;
436 /* Return nonzero if FUNC is a OS_task function. */
439 avr_OS_task_function_p (tree func
)
443 gcc_assert (TREE_CODE (func
) == FUNCTION_DECL
);
445 a
= lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func
)));
446 return a
!= NULL_TREE
;
449 /* Return the number of hard registers to push/pop in the prologue/epilogue
450 of the current function, and optionally store these registers in SET. */
453 avr_regs_to_save (HARD_REG_SET
*set
)
456 int int_or_sig_p
= (interrupt_function_p (current_function_decl
)
457 || signal_function_p (current_function_decl
));
459 if (!reload_completed
)
460 cfun
->machine
->is_leaf
= leaf_function_p ();
463 CLEAR_HARD_REG_SET (*set
);
466 /* No need to save any registers if the function never returns or
467 is have "OS_task" attribute. */
468 if (TREE_THIS_VOLATILE (current_function_decl
)
469 || cfun
->machine
->is_OS_task
)
472 for (reg
= 0; reg
< 32; reg
++)
474 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
475 any global register variables. */
479 if ((int_or_sig_p
&& !cfun
->machine
->is_leaf
&& call_used_regs
[reg
])
480 || (df_regs_ever_live_p (reg
)
481 && (int_or_sig_p
|| !call_used_regs
[reg
])
482 && !(frame_pointer_needed
483 && (reg
== REG_Y
|| reg
== (REG_Y
+1)))))
486 SET_HARD_REG_BIT (*set
, reg
);
493 /* Compute offset between arg_pointer and frame_pointer. */
496 initial_elimination_offset (int from
, int to
)
498 if (from
== FRAME_POINTER_REGNUM
&& to
== STACK_POINTER_REGNUM
)
502 int offset
= frame_pointer_needed
? 2 : 0;
503 int avr_pc_size
= AVR_HAVE_EIJMP_EICALL
? 3 : 2;
505 offset
+= avr_regs_to_save (NULL
);
506 return get_frame_size () + (avr_pc_size
) + 1 + offset
;
510 /* Return 1 if the function epilogue is just a single "ret". */
513 avr_simple_epilogue (void)
515 return (! frame_pointer_needed
516 && get_frame_size () == 0
517 && avr_regs_to_save (NULL
) == 0
518 && ! interrupt_function_p (current_function_decl
)
519 && ! signal_function_p (current_function_decl
)
520 && ! avr_naked_function_p (current_function_decl
)
521 && ! TREE_THIS_VOLATILE (current_function_decl
));
524 /* This function checks sequence of live registers. */
527 sequent_regs_live (void)
533 for (reg
= 0; reg
< 18; ++reg
)
535 if (!call_used_regs
[reg
])
537 if (df_regs_ever_live_p (reg
))
547 if (!frame_pointer_needed
)
549 if (df_regs_ever_live_p (REG_Y
))
557 if (df_regs_ever_live_p (REG_Y
+1))
570 return (cur_seq
== live_seq
) ? live_seq
: 0;
573 /* Output function prologue. */
576 expand_prologue (void)
581 HOST_WIDE_INT size
= get_frame_size();
582 /* Define templates for push instructions. */
583 rtx pushbyte
= gen_rtx_MEM (QImode
,
584 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
585 rtx pushword
= gen_rtx_MEM (HImode
,
586 gen_rtx_POST_DEC (HImode
, stack_pointer_rtx
));
589 last_insn_address
= 0;
591 /* Init cfun->machine. */
592 cfun
->machine
->is_naked
= avr_naked_function_p (current_function_decl
);
593 cfun
->machine
->is_interrupt
= interrupt_function_p (current_function_decl
);
594 cfun
->machine
->is_signal
= signal_function_p (current_function_decl
);
595 cfun
->machine
->is_OS_task
= avr_OS_task_function_p (current_function_decl
);
597 /* Prologue: naked. */
598 if (cfun
->machine
->is_naked
)
603 avr_regs_to_save (&set
);
604 live_seq
= sequent_regs_live ();
605 minimize
= (TARGET_CALL_PROLOGUES
606 && !cfun
->machine
->is_interrupt
607 && !cfun
->machine
->is_signal
608 && !cfun
->machine
->is_OS_task
611 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
613 if (cfun
->machine
->is_interrupt
)
615 /* Enable interrupts. */
616 insn
= emit_insn (gen_enable_interrupt ());
617 RTX_FRAME_RELATED_P (insn
) = 1;
621 insn
= emit_move_insn (pushbyte
, zero_reg_rtx
);
622 RTX_FRAME_RELATED_P (insn
) = 1;
625 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
626 RTX_FRAME_RELATED_P (insn
) = 1;
629 insn
= emit_move_insn (tmp_reg_rtx
,
630 gen_rtx_MEM (QImode
, GEN_INT (SREG_ADDR
)));
631 RTX_FRAME_RELATED_P (insn
) = 1;
632 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
633 RTX_FRAME_RELATED_P (insn
) = 1;
637 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
639 insn
= emit_move_insn (tmp_reg_rtx
,
640 gen_rtx_MEM (QImode
, GEN_INT (RAMPZ_ADDR
)));
641 RTX_FRAME_RELATED_P (insn
) = 1;
642 insn
= emit_move_insn (pushbyte
, tmp_reg_rtx
);
643 RTX_FRAME_RELATED_P (insn
) = 1;
646 /* Clear zero reg. */
647 insn
= emit_move_insn (zero_reg_rtx
, const0_rtx
);
648 RTX_FRAME_RELATED_P (insn
) = 1;
650 /* Prevent any attempt to delete the setting of ZERO_REG! */
651 emit_insn (gen_rtx_USE (VOIDmode
, zero_reg_rtx
));
653 if (minimize
&& (frame_pointer_needed
|| live_seq
> 6))
655 insn
= emit_move_insn (gen_rtx_REG (HImode
, REG_X
),
656 gen_int_mode (size
, HImode
));
657 RTX_FRAME_RELATED_P (insn
) = 1;
660 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq
, HImode
),
661 gen_int_mode (size
+ live_seq
, HImode
)));
662 RTX_FRAME_RELATED_P (insn
) = 1;
667 for (reg
= 0; reg
< 32; ++reg
)
669 if (TEST_HARD_REG_BIT (set
, reg
))
671 /* Emit push of register to save. */
672 insn
=emit_move_insn (pushbyte
, gen_rtx_REG (QImode
, reg
));
673 RTX_FRAME_RELATED_P (insn
) = 1;
676 if (frame_pointer_needed
)
678 if(!cfun
->machine
->is_OS_task
)
680 /* Push frame pointer. */
681 insn
= emit_move_insn (pushword
, frame_pointer_rtx
);
682 RTX_FRAME_RELATED_P (insn
) = 1;
687 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
688 RTX_FRAME_RELATED_P (insn
) = 1;
692 /* Creating a frame can be done by direct manipulation of the
693 stack or via the frame pointer. These two methods are:
700 the optimum method depends on function type, stack and frame size.
701 To avoid a complex logic, both methods are tested and shortest
705 if (TARGET_TINY_STACK
)
707 if (size
< -63 || size
> 63)
708 warning (0, "large frame pointer change (%d) with -mtiny-stack", size
);
710 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
711 over 'sbiw' (2 cycles, same size). */
712 myfp
= gen_rtx_REG (QImode
, REGNO (frame_pointer_rtx
));
716 /* Normal sized addition. */
717 myfp
= frame_pointer_rtx
;
719 /* Calculate length. */
722 get_attr_length (gen_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
724 get_attr_length (gen_move_insn (myfp
,
725 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
729 get_attr_length (gen_move_insn (stack_pointer_rtx
, frame_pointer_rtx
));
731 /* Method 2-Adjust Stack pointer. */
732 int sp_plus_length
= 0;
736 get_attr_length (gen_move_insn (stack_pointer_rtx
,
737 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
741 get_attr_length (gen_move_insn (frame_pointer_rtx
, stack_pointer_rtx
));
743 /* Use shortest method. */
744 if (size
<= 6 && (sp_plus_length
< method1_length
))
746 insn
= emit_move_insn (stack_pointer_rtx
,
747 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
748 gen_int_mode (-size
, HImode
)));
749 RTX_FRAME_RELATED_P (insn
) = 1;
750 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
751 RTX_FRAME_RELATED_P (insn
) = 1;
755 insn
= emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
756 RTX_FRAME_RELATED_P (insn
) = 1;
757 insn
= emit_move_insn (myfp
,
758 gen_rtx_PLUS (GET_MODE(myfp
), myfp
,
759 gen_int_mode (-size
, GET_MODE(myfp
))));
760 RTX_FRAME_RELATED_P (insn
) = 1;
761 insn
= emit_move_insn ( stack_pointer_rtx
, frame_pointer_rtx
);
762 RTX_FRAME_RELATED_P (insn
) = 1;
769 /* Output summary at end of function prologue. */
772 avr_asm_function_end_prologue (FILE *file
)
774 if (cfun
->machine
->is_naked
)
776 fputs ("/* prologue: naked */\n", file
);
780 if (cfun
->machine
->is_interrupt
)
782 fputs ("/* prologue: Interrupt */\n", file
);
784 else if (cfun
->machine
->is_signal
)
786 fputs ("/* prologue: Signal */\n", file
);
789 fputs ("/* prologue: function */\n", file
);
791 fprintf (file
, "/* frame size = " HOST_WIDE_INT_PRINT_DEC
" */\n",
796 /* Implement EPILOGUE_USES. */
799 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED
)
803 && (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
))
808 /* Output RTL epilogue. */
811 expand_epilogue (void)
817 HOST_WIDE_INT size
= get_frame_size();
819 /* epilogue: naked */
820 if (cfun
->machine
->is_naked
)
822 emit_jump_insn (gen_return ());
826 avr_regs_to_save (&set
);
827 live_seq
= sequent_regs_live ();
828 minimize
= (TARGET_CALL_PROLOGUES
829 && !cfun
->machine
->is_interrupt
830 && !cfun
->machine
->is_signal
831 && !cfun
->machine
->is_OS_task
834 if (minimize
&& (frame_pointer_needed
|| live_seq
> 4))
836 if (frame_pointer_needed
)
838 /* Get rid of frame. */
839 emit_move_insn(frame_pointer_rtx
,
840 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
841 gen_int_mode (size
, HImode
)));
845 emit_move_insn (frame_pointer_rtx
, stack_pointer_rtx
);
848 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq
, HImode
)));
852 if (frame_pointer_needed
)
856 /* Try two methods to adjust stack and select shortest. */
858 /* Method 1-Adjust frame pointer. */
860 get_attr_length (gen_move_insn (frame_pointer_rtx
,
861 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
864 /* Copy to stack pointer. */
866 get_attr_length (gen_move_insn (stack_pointer_rtx
, frame_pointer_rtx
));
868 /* Method 2-Adjust Stack pointer. */
869 int sp_plus_length
= 0;
873 get_attr_length (gen_move_insn (stack_pointer_rtx
,
874 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
878 /* Use shortest method. */
879 if (size
<= 5 && (sp_plus_length
< fp_plus_length
))
881 emit_move_insn (stack_pointer_rtx
,
882 gen_rtx_PLUS (HImode
, stack_pointer_rtx
,
883 gen_int_mode (size
, HImode
)));
887 emit_move_insn (frame_pointer_rtx
,
888 gen_rtx_PLUS (HImode
, frame_pointer_rtx
,
889 gen_int_mode (size
, HImode
)));
890 /* Copy to stack pointer. */
891 emit_move_insn (stack_pointer_rtx
, frame_pointer_rtx
);
894 if(!cfun
->machine
->is_OS_task
)
896 /* Restore previous frame_pointer. */
897 emit_insn (gen_pophi (frame_pointer_rtx
));
900 /* Restore used registers. */
901 for (reg
= 31; reg
>= 0; --reg
)
903 if (TEST_HARD_REG_BIT (set
, reg
))
904 emit_insn (gen_popqi (gen_rtx_REG (QImode
, reg
)));
906 if (cfun
->machine
->is_interrupt
|| cfun
->machine
->is_signal
)
908 /* Restore RAMPZ using tmp reg as scratch. */
910 && (TEST_HARD_REG_BIT (set
, REG_Z
) && TEST_HARD_REG_BIT (set
, REG_Z
+ 1)))
912 emit_insn (gen_popqi (tmp_reg_rtx
));
913 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(RAMPZ_ADDR
)),
917 /* Restore SREG using tmp reg as scratch. */
918 emit_insn (gen_popqi (tmp_reg_rtx
));
920 emit_move_insn (gen_rtx_MEM(QImode
, GEN_INT(SREG_ADDR
)),
923 /* Restore tmp REG. */
924 emit_insn (gen_popqi (tmp_reg_rtx
));
926 /* Restore zero REG. */
927 emit_insn (gen_popqi (zero_reg_rtx
));
930 emit_jump_insn (gen_return ());
934 /* Output summary messages at beginning of function epilogue. */
937 avr_asm_function_begin_epilogue (FILE *file
)
939 fprintf (file
, "/* epilogue start */\n");
942 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
943 machine for a memory operand of mode MODE. */
946 legitimate_address_p (enum machine_mode mode
, rtx x
, int strict
)
948 enum reg_class r
= NO_REGS
;
950 if (TARGET_ALL_DEBUG
)
952 fprintf (stderr
, "mode: (%s) %s %s %s %s:",
954 strict
? "(strict)": "",
955 reload_completed
? "(reload_completed)": "",
956 reload_in_progress
? "(reload_in_progress)": "",
957 reg_renumber
? "(reg_renumber)" : "");
958 if (GET_CODE (x
) == PLUS
959 && REG_P (XEXP (x
, 0))
960 && GET_CODE (XEXP (x
, 1)) == CONST_INT
961 && INTVAL (XEXP (x
, 1)) >= 0
962 && INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
)
965 fprintf (stderr
, "(r%d ---> r%d)", REGNO (XEXP (x
, 0)),
966 true_regnum (XEXP (x
, 0)));
969 if (REG_P (x
) && (strict
? REG_OK_FOR_BASE_STRICT_P (x
)
970 : REG_OK_FOR_BASE_NOSTRICT_P (x
)))
972 else if (CONSTANT_ADDRESS_P (x
))
974 else if (GET_CODE (x
) == PLUS
975 && REG_P (XEXP (x
, 0))
976 && GET_CODE (XEXP (x
, 1)) == CONST_INT
977 && INTVAL (XEXP (x
, 1)) >= 0)
979 int fit
= INTVAL (XEXP (x
, 1)) <= MAX_LD_OFFSET (mode
);
983 || REGNO (XEXP (x
,0)) == REG_Y
984 || REGNO (XEXP (x
,0)) == REG_Z
)
985 r
= BASE_POINTER_REGS
;
986 if (XEXP (x
,0) == frame_pointer_rtx
987 || XEXP (x
,0) == arg_pointer_rtx
)
988 r
= BASE_POINTER_REGS
;
990 else if (frame_pointer_needed
&& XEXP (x
,0) == frame_pointer_rtx
)
993 else if ((GET_CODE (x
) == PRE_DEC
|| GET_CODE (x
) == POST_INC
)
994 && REG_P (XEXP (x
, 0))
995 && (strict
? REG_OK_FOR_BASE_STRICT_P (XEXP (x
, 0))
996 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x
, 0))))
1000 if (TARGET_ALL_DEBUG
)
1002 fprintf (stderr
, " ret = %c\n", r
+ '0');
1004 return r
== NO_REGS
? 0 : (int)r
;
1007 /* Attempts to replace X with a valid
1008 memory address for an operand of mode MODE */
1011 legitimize_address (rtx x
, rtx oldx
, enum machine_mode mode
)
1014 if (TARGET_ALL_DEBUG
)
1016 fprintf (stderr
, "legitimize_address mode: %s", GET_MODE_NAME(mode
));
1020 if (GET_CODE (oldx
) == PLUS
1021 && REG_P (XEXP (oldx
,0)))
1023 if (REG_P (XEXP (oldx
,1)))
1024 x
= force_reg (GET_MODE (oldx
), oldx
);
1025 else if (GET_CODE (XEXP (oldx
, 1)) == CONST_INT
)
1027 int offs
= INTVAL (XEXP (oldx
,1));
1028 if (frame_pointer_rtx
!= XEXP (oldx
,0))
1029 if (offs
> MAX_LD_OFFSET (mode
))
1031 if (TARGET_ALL_DEBUG
)
1032 fprintf (stderr
, "force_reg (big offset)\n");
1033 x
= force_reg (GET_MODE (oldx
), oldx
);
1041 /* Return a pointer register name as a string. */
1044 ptrreg_to_str (int regno
)
1048 case REG_X
: return "X";
1049 case REG_Y
: return "Y";
1050 case REG_Z
: return "Z";
1052 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1057 /* Return the condition name as a string.
1058 Used in conditional jump constructing */
1061 cond_string (enum rtx_code code
)
1070 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1075 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1088 /* Output ADDR to FILE as address. */
1091 print_operand_address (FILE *file
, rtx addr
)
1093 switch (GET_CODE (addr
))
1096 fprintf (file
, ptrreg_to_str (REGNO (addr
)));
1100 fprintf (file
, "-%s", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1104 fprintf (file
, "%s+", ptrreg_to_str (REGNO (XEXP (addr
, 0))));
1108 if (CONSTANT_ADDRESS_P (addr
)
1109 && ((GET_CODE (addr
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (addr
))
1110 || GET_CODE (addr
) == LABEL_REF
))
1112 fprintf (file
, "gs(");
1113 output_addr_const (file
,addr
);
1114 fprintf (file
,")");
1117 output_addr_const (file
, addr
);
1122 /* Output X as assembler operand to file FILE. */
1125 print_operand (FILE *file
, rtx x
, int code
)
1129 if (code
>= 'A' && code
<= 'D')
1134 if (!AVR_HAVE_JMP_CALL
)
1137 else if (code
== '!')
1139 if (AVR_HAVE_EIJMP_EICALL
)
1144 if (x
== zero_reg_rtx
)
1145 fprintf (file
, "__zero_reg__");
1147 fprintf (file
, reg_names
[true_regnum (x
) + abcd
]);
1149 else if (GET_CODE (x
) == CONST_INT
)
1150 fprintf (file
, HOST_WIDE_INT_PRINT_DEC
, INTVAL (x
) + abcd
);
1151 else if (GET_CODE (x
) == MEM
)
1153 rtx addr
= XEXP (x
,0);
1155 if (CONSTANT_P (addr
) && abcd
)
1158 output_address (addr
);
1159 fprintf (file
, ")+%d", abcd
);
1161 else if (code
== 'o')
1163 if (GET_CODE (addr
) != PLUS
)
1164 fatal_insn ("bad address, not (reg+disp):", addr
);
1166 print_operand (file
, XEXP (addr
, 1), 0);
1168 else if (code
== 'p' || code
== 'r')
1170 if (GET_CODE (addr
) != POST_INC
&& GET_CODE (addr
) != PRE_DEC
)
1171 fatal_insn ("bad address, not post_inc or pre_dec:", addr
);
1174 print_operand_address (file
, XEXP (addr
, 0)); /* X, Y, Z */
1176 print_operand (file
, XEXP (addr
, 0), 0); /* r26, r28, r30 */
1178 else if (GET_CODE (addr
) == PLUS
)
1180 print_operand_address (file
, XEXP (addr
,0));
1181 if (REGNO (XEXP (addr
, 0)) == REG_X
)
1182 fatal_insn ("internal compiler error. Bad address:"
1185 print_operand (file
, XEXP (addr
,1), code
);
1188 print_operand_address (file
, addr
);
1190 else if (GET_CODE (x
) == CONST_DOUBLE
)
1194 if (GET_MODE (x
) != SFmode
)
1195 fatal_insn ("internal compiler error. Unknown mode:", x
);
1196 REAL_VALUE_FROM_CONST_DOUBLE (rv
, x
);
1197 REAL_VALUE_TO_TARGET_SINGLE (rv
, val
);
1198 fprintf (file
, "0x%lx", val
);
1200 else if (code
== 'j')
1201 fputs (cond_string (GET_CODE (x
)), file
);
1202 else if (code
== 'k')
1203 fputs (cond_string (reverse_condition (GET_CODE (x
))), file
);
1205 print_operand_address (file
, x
);
1208 /* Update the condition code in the INSN. */
1211 notice_update_cc (rtx body ATTRIBUTE_UNUSED
, rtx insn
)
1215 switch (get_attr_cc (insn
))
1218 /* Insn does not affect CC at all. */
1226 set
= single_set (insn
);
1230 cc_status
.flags
|= CC_NO_OVERFLOW
;
1231 cc_status
.value1
= SET_DEST (set
);
1236 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1237 The V flag may or may not be known but that's ok because
1238 alter_cond will change tests to use EQ/NE. */
1239 set
= single_set (insn
);
1243 cc_status
.value1
= SET_DEST (set
);
1244 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1249 set
= single_set (insn
);
1252 cc_status
.value1
= SET_SRC (set
);
1256 /* Insn doesn't leave CC in a usable state. */
1259 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1260 set
= single_set (insn
);
1263 rtx src
= SET_SRC (set
);
1265 if (GET_CODE (src
) == ASHIFTRT
1266 && GET_MODE (src
) == QImode
)
1268 rtx x
= XEXP (src
, 1);
1270 if (GET_CODE (x
) == CONST_INT
1274 cc_status
.value1
= SET_DEST (set
);
1275 cc_status
.flags
|= CC_OVERFLOW_UNUSABLE
;
1283 /* Return maximum number of consecutive registers of
1284 class CLASS needed to hold a value of mode MODE. */
1287 class_max_nregs (enum reg_class
class ATTRIBUTE_UNUSED
,enum machine_mode mode
)
1289 return ((GET_MODE_SIZE (mode
) + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
);
1292 /* Choose mode for jump insn:
1293 1 - relative jump in range -63 <= x <= 62 ;
1294 2 - relative jump in range -2046 <= x <= 2045 ;
1295 3 - absolute jump (only for ATmega[16]03). */
1298 avr_jump_mode (rtx x
, rtx insn
)
1300 int dest_addr
= INSN_ADDRESSES (INSN_UID (GET_MODE (x
) == LABEL_REF
1301 ? XEXP (x
, 0) : x
));
1302 int cur_addr
= INSN_ADDRESSES (INSN_UID (insn
));
1303 int jump_distance
= cur_addr
- dest_addr
;
1305 if (-63 <= jump_distance
&& jump_distance
<= 62)
1307 else if (-2046 <= jump_distance
&& jump_distance
<= 2045)
1309 else if (AVR_HAVE_JMP_CALL
)
1315 /* return an AVR condition jump commands.
1316 X is a comparison RTX.
1317 LEN is a number returned by avr_jump_mode function.
1318 if REVERSE nonzero then condition code in X must be reversed. */
1321 ret_cond_branch (rtx x
, int len
, int reverse
)
1323 RTX_CODE cond
= reverse
? reverse_condition (GET_CODE (x
)) : GET_CODE (x
);
1328 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1329 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1331 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1332 AS1 (brmi
,.+2) CR_TAB
1334 (AS1 (breq
,.+6) CR_TAB
1335 AS1 (brmi
,.+4) CR_TAB
1339 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1341 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1342 AS1 (brlt
,.+2) CR_TAB
1344 (AS1 (breq
,.+6) CR_TAB
1345 AS1 (brlt
,.+4) CR_TAB
1348 return (len
== 1 ? (AS1 (breq
,.+2) CR_TAB
1350 len
== 2 ? (AS1 (breq
,.+4) CR_TAB
1351 AS1 (brlo
,.+2) CR_TAB
1353 (AS1 (breq
,.+6) CR_TAB
1354 AS1 (brlo
,.+4) CR_TAB
1357 if (cc_prev_status
.flags
& CC_OVERFLOW_UNUSABLE
)
1358 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1360 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1361 AS1 (brpl
,.+2) CR_TAB
1363 (AS1 (breq
,.+2) CR_TAB
1364 AS1 (brpl
,.+4) CR_TAB
1367 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1369 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1370 AS1 (brge
,.+2) CR_TAB
1372 (AS1 (breq
,.+2) CR_TAB
1373 AS1 (brge
,.+4) CR_TAB
1376 return (len
== 1 ? (AS1 (breq
,%0) CR_TAB
1378 len
== 2 ? (AS1 (breq
,.+2) CR_TAB
1379 AS1 (brsh
,.+2) CR_TAB
1381 (AS1 (breq
,.+2) CR_TAB
1382 AS1 (brsh
,.+4) CR_TAB
1390 return AS1 (br
%k1
,%0);
1392 return (AS1 (br
%j1
,.+2) CR_TAB
1395 return (AS1 (br
%j1
,.+4) CR_TAB
1404 return AS1 (br
%j1
,%0);
1406 return (AS1 (br
%k1
,.+2) CR_TAB
1409 return (AS1 (br
%k1
,.+4) CR_TAB
1417 /* Predicate function for immediate operand which fits to byte (8bit) */
1420 byte_immediate_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1422 return (GET_CODE (op
) == CONST_INT
1423 && INTVAL (op
) <= 0xff && INTVAL (op
) >= 0);
1426 /* Output all insn addresses and their sizes into the assembly language
1427 output file. This is helpful for debugging whether the length attributes
1428 in the md file are correct.
1429 Output insn cost for next insn. */
1432 final_prescan_insn (rtx insn
, rtx
*operand ATTRIBUTE_UNUSED
,
1433 int num_operands ATTRIBUTE_UNUSED
)
1435 int uid
= INSN_UID (insn
);
1437 if (TARGET_INSN_SIZE_DUMP
|| TARGET_ALL_DEBUG
)
1439 fprintf (asm_out_file
, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1440 INSN_ADDRESSES (uid
),
1441 INSN_ADDRESSES (uid
) - last_insn_address
,
1442 rtx_cost (PATTERN (insn
), INSN
));
1444 last_insn_address
= INSN_ADDRESSES (uid
);
1447 /* Return 0 if undefined, 1 if always true or always false. */
1450 avr_simplify_comparison_p (enum machine_mode mode
, RTX_CODE
operator, rtx x
)
1452 unsigned int max
= (mode
== QImode
? 0xff :
1453 mode
== HImode
? 0xffff :
1454 mode
== SImode
? 0xffffffff : 0);
1455 if (max
&& operator && GET_CODE (x
) == CONST_INT
)
1457 if (unsigned_condition (operator) != operator)
1460 if (max
!= (INTVAL (x
) & max
)
1461 && INTVAL (x
) != 0xff)
1468 /* Returns nonzero if REGNO is the number of a hard
1469 register in which function arguments are sometimes passed. */
1472 function_arg_regno_p(int r
)
1474 return (r
>= 8 && r
<= 25);
1477 /* Initializing the variable cum for the state at the beginning
1478 of the argument list. */
1481 init_cumulative_args (CUMULATIVE_ARGS
*cum
, tree fntype
, rtx libname
,
1482 tree fndecl ATTRIBUTE_UNUSED
)
1485 cum
->regno
= FIRST_CUM_REG
;
1486 if (!libname
&& fntype
)
1488 int stdarg
= (TYPE_ARG_TYPES (fntype
) != 0
1489 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype
)))
1490 != void_type_node
));
1496 /* Returns the number of registers to allocate for a function argument. */
1499 avr_num_arg_regs (enum machine_mode mode
, tree type
)
1503 if (mode
== BLKmode
)
1504 size
= int_size_in_bytes (type
);
1506 size
= GET_MODE_SIZE (mode
);
1508 /* Align all function arguments to start in even-numbered registers.
1509 Odd-sized arguments leave holes above them. */
1511 return (size
+ 1) & ~1;
1514 /* Controls whether a function argument is passed
1515 in a register, and which register. */
1518 function_arg (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1519 int named ATTRIBUTE_UNUSED
)
1521 int bytes
= avr_num_arg_regs (mode
, type
);
1523 if (cum
->nregs
&& bytes
<= cum
->nregs
)
1524 return gen_rtx_REG (mode
, cum
->regno
- bytes
);
1529 /* Update the summarizer variable CUM to advance past an argument
1530 in the argument list. */
1533 function_arg_advance (CUMULATIVE_ARGS
*cum
, enum machine_mode mode
, tree type
,
1534 int named ATTRIBUTE_UNUSED
)
1536 int bytes
= avr_num_arg_regs (mode
, type
);
1538 cum
->nregs
-= bytes
;
1539 cum
->regno
-= bytes
;
1541 if (cum
->nregs
<= 0)
1544 cum
->regno
= FIRST_CUM_REG
;
1548 /***********************************************************************
1549 Functions for outputting various mov's for a various modes
1550 ************************************************************************/
1552 output_movqi (rtx insn
, rtx operands
[], int *l
)
1555 rtx dest
= operands
[0];
1556 rtx src
= operands
[1];
1564 if (register_operand (dest
, QImode
))
1566 if (register_operand (src
, QImode
)) /* mov r,r */
1568 if (test_hard_reg_class (STACK_REG
, dest
))
1569 return AS2 (out
,%0,%1);
1570 else if (test_hard_reg_class (STACK_REG
, src
))
1571 return AS2 (in
,%0,%1);
1573 return AS2 (mov
,%0,%1);
1575 else if (CONSTANT_P (src
))
1577 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1578 return AS2 (ldi
,%0,lo8(%1));
1580 if (GET_CODE (src
) == CONST_INT
)
1582 if (src
== const0_rtx
) /* mov r,L */
1583 return AS1 (clr
,%0);
1584 else if (src
== const1_rtx
)
1587 return (AS1 (clr
,%0) CR_TAB
1590 else if (src
== constm1_rtx
)
1592 /* Immediate constants -1 to any register */
1594 return (AS1 (clr
,%0) CR_TAB
1599 int bit_nr
= exact_log2 (INTVAL (src
));
1605 output_asm_insn ((AS1 (clr
,%0) CR_TAB
1608 avr_output_bld (operands
, bit_nr
);
1615 /* Last resort, larger than loading from memory. */
1617 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1618 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1619 AS2 (mov
,%0,r31
) CR_TAB
1620 AS2 (mov
,r31
,__tmp_reg__
));
1622 else if (GET_CODE (src
) == MEM
)
1623 return out_movqi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1625 else if (GET_CODE (dest
) == MEM
)
1627 const char *template;
1629 if (src
== const0_rtx
)
1630 operands
[1] = zero_reg_rtx
;
1632 template = out_movqi_mr_r (insn
, operands
, real_l
);
1635 output_asm_insn (template, operands
);
1644 output_movhi (rtx insn
, rtx operands
[], int *l
)
1647 rtx dest
= operands
[0];
1648 rtx src
= operands
[1];
1654 if (register_operand (dest
, HImode
))
1656 if (register_operand (src
, HImode
)) /* mov r,r */
1658 if (test_hard_reg_class (STACK_REG
, dest
))
1660 if (TARGET_TINY_STACK
)
1663 return AS2 (out
,__SP_L__
,%A1
);
1665 /* Use simple load of stack pointer if no interrupts are used
1666 or inside main or signal function prologue where they disabled. */
1667 else if (TARGET_NO_INTERRUPTS
1668 || (reload_completed
1669 && cfun
->machine
->is_signal
1670 && prologue_epilogue_contains (insn
)))
1673 return (AS2 (out
,__SP_H__
,%B1
) CR_TAB
1674 AS2 (out
,__SP_L__
,%A1
));
1676 /* In interrupt prolog we know interrupts are enabled. */
1677 else if (reload_completed
1678 && cfun
->machine
->is_interrupt
1679 && prologue_epilogue_contains (insn
))
1682 return ("cli" CR_TAB
1683 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1685 AS2 (out
,__SP_L__
,%A1
));
1688 return (AS2 (in
,__tmp_reg__
,__SREG__
) CR_TAB
1690 AS2 (out
,__SP_H__
,%B1
) CR_TAB
1691 AS2 (out
,__SREG__
,__tmp_reg__
) CR_TAB
1692 AS2 (out
,__SP_L__
,%A1
));
1694 else if (test_hard_reg_class (STACK_REG
, src
))
1697 return (AS2 (in
,%A0
,__SP_L__
) CR_TAB
1698 AS2 (in
,%B0
,__SP_H__
));
1704 return (AS2 (movw
,%0,%1));
1709 return (AS2 (mov
,%A0
,%A1
) CR_TAB
1713 else if (CONSTANT_P (src
))
1715 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
1718 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
1719 AS2 (ldi
,%B0
,hi8(%1)));
1722 if (GET_CODE (src
) == CONST_INT
)
1724 if (src
== const0_rtx
) /* mov r,L */
1727 return (AS1 (clr
,%A0
) CR_TAB
1730 else if (src
== const1_rtx
)
1733 return (AS1 (clr
,%A0
) CR_TAB
1734 AS1 (clr
,%B0
) CR_TAB
1737 else if (src
== constm1_rtx
)
1739 /* Immediate constants -1 to any register */
1741 return (AS1 (clr
,%0) CR_TAB
1742 AS1 (dec
,%A0
) CR_TAB
1747 int bit_nr
= exact_log2 (INTVAL (src
));
1753 output_asm_insn ((AS1 (clr
,%A0
) CR_TAB
1754 AS1 (clr
,%B0
) CR_TAB
1757 avr_output_bld (operands
, bit_nr
);
1763 if ((INTVAL (src
) & 0xff) == 0)
1766 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1767 AS1 (clr
,%A0
) CR_TAB
1768 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1769 AS2 (mov
,%B0
,r31
) CR_TAB
1770 AS2 (mov
,r31
,__tmp_reg__
));
1772 else if ((INTVAL (src
) & 0xff00) == 0)
1775 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1776 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1777 AS2 (mov
,%A0
,r31
) CR_TAB
1778 AS1 (clr
,%B0
) CR_TAB
1779 AS2 (mov
,r31
,__tmp_reg__
));
1783 /* Last resort, equal to loading from memory. */
1785 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
1786 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
1787 AS2 (mov
,%A0
,r31
) CR_TAB
1788 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
1789 AS2 (mov
,%B0
,r31
) CR_TAB
1790 AS2 (mov
,r31
,__tmp_reg__
));
1792 else if (GET_CODE (src
) == MEM
)
1793 return out_movhi_r_mr (insn
, operands
, real_l
); /* mov r,m */
1795 else if (GET_CODE (dest
) == MEM
)
1797 const char *template;
1799 if (src
== const0_rtx
)
1800 operands
[1] = zero_reg_rtx
;
1802 template = out_movhi_mr_r (insn
, operands
, real_l
);
1805 output_asm_insn (template, operands
);
1810 fatal_insn ("invalid insn:", insn
);
1815 out_movqi_r_mr (rtx insn
, rtx op
[], int *l
)
1819 rtx x
= XEXP (src
, 0);
1825 if (CONSTANT_ADDRESS_P (x
))
1827 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
1830 return AS2 (in
,%0,__SREG__
);
1832 if (optimize
> 0 && io_address_operand (x
, QImode
))
1835 return AS2 (in
,%0,%1-0x20);
1838 return AS2 (lds
,%0,%1);
1840 /* memory access by reg+disp */
1841 else if (GET_CODE (x
) == PLUS
1842 && REG_P (XEXP (x
,0))
1843 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
1845 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (src
))) >= 63)
1847 int disp
= INTVAL (XEXP (x
,1));
1848 if (REGNO (XEXP (x
,0)) != REG_Y
)
1849 fatal_insn ("incorrect insn:",insn
);
1851 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1852 return *l
= 3, (AS2 (adiw
,r28
,%o1
-63) CR_TAB
1853 AS2 (ldd
,%0,Y
+63) CR_TAB
1854 AS2 (sbiw
,r28
,%o1
-63));
1856 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1857 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1858 AS2 (ld
,%0,Y
) CR_TAB
1859 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1860 AS2 (sbci
,r29
,hi8(%o1
)));
1862 else if (REGNO (XEXP (x
,0)) == REG_X
)
1864 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1865 it but I have this situation with extremal optimizing options. */
1866 if (reg_overlap_mentioned_p (dest
, XEXP (x
,0))
1867 || reg_unused_after (insn
, XEXP (x
,0)))
1868 return *l
= 2, (AS2 (adiw
,r26
,%o1
) CR_TAB
1871 return *l
= 3, (AS2 (adiw
,r26
,%o1
) CR_TAB
1872 AS2 (ld
,%0,X
) CR_TAB
1873 AS2 (sbiw
,r26
,%o1
));
1876 return AS2 (ldd
,%0,%1);
1879 return AS2 (ld
,%0,%1);
1883 out_movhi_r_mr (rtx insn
, rtx op
[], int *l
)
1887 rtx base
= XEXP (src
, 0);
1888 int reg_dest
= true_regnum (dest
);
1889 int reg_base
= true_regnum (base
);
1890 /* "volatile" forces reading low byte first, even if less efficient,
1891 for correct operation with 16-bit I/O registers. */
1892 int mem_volatile_p
= MEM_VOLATILE_P (src
);
1900 if (reg_dest
== reg_base
) /* R = (R) */
1903 return (AS2 (ld
,__tmp_reg__
,%1+) CR_TAB
1904 AS2 (ld
,%B0
,%1) CR_TAB
1905 AS2 (mov
,%A0
,__tmp_reg__
));
1907 else if (reg_base
== REG_X
) /* (R26) */
1909 if (reg_unused_after (insn
, base
))
1912 return (AS2 (ld
,%A0
,X
+) CR_TAB
1916 return (AS2 (ld
,%A0
,X
+) CR_TAB
1917 AS2 (ld
,%B0
,X
) CR_TAB
1923 return (AS2 (ld
,%A0
,%1) CR_TAB
1924 AS2 (ldd
,%B0
,%1+1));
1927 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
1929 int disp
= INTVAL (XEXP (base
, 1));
1930 int reg_base
= true_regnum (XEXP (base
, 0));
1932 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
1934 if (REGNO (XEXP (base
, 0)) != REG_Y
)
1935 fatal_insn ("incorrect insn:",insn
);
1937 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
1938 return *l
= 4, (AS2 (adiw
,r28
,%o1
-62) CR_TAB
1939 AS2 (ldd
,%A0
,Y
+62) CR_TAB
1940 AS2 (ldd
,%B0
,Y
+63) CR_TAB
1941 AS2 (sbiw
,r28
,%o1
-62));
1943 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
1944 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
1945 AS2 (ld
,%A0
,Y
) CR_TAB
1946 AS2 (ldd
,%B0
,Y
+1) CR_TAB
1947 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
1948 AS2 (sbci
,r29
,hi8(%o1
)));
1950 if (reg_base
== REG_X
)
1952 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1953 it but I have this situation with extremal
1954 optimization options. */
1957 if (reg_base
== reg_dest
)
1958 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1959 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
1960 AS2 (ld
,%B0
,X
) CR_TAB
1961 AS2 (mov
,%A0
,__tmp_reg__
));
1963 return (AS2 (adiw
,r26
,%o1
) CR_TAB
1964 AS2 (ld
,%A0
,X
+) CR_TAB
1965 AS2 (ld
,%B0
,X
) CR_TAB
1966 AS2 (sbiw
,r26
,%o1
+1));
1969 if (reg_base
== reg_dest
)
1972 return (AS2 (ldd
,__tmp_reg__
,%A1
) CR_TAB
1973 AS2 (ldd
,%B0
,%B1
) CR_TAB
1974 AS2 (mov
,%A0
,__tmp_reg__
));
1978 return (AS2 (ldd
,%A0
,%A1
) CR_TAB
1981 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
1983 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
1984 fatal_insn ("incorrect insn:", insn
);
1988 if (REGNO (XEXP (base
, 0)) == REG_X
)
1991 return (AS2 (sbiw
,r26
,2) CR_TAB
1992 AS2 (ld
,%A0
,X
+) CR_TAB
1993 AS2 (ld
,%B0
,X
) CR_TAB
1999 return (AS2 (sbiw
,%r1
,2) CR_TAB
2000 AS2 (ld
,%A0
,%p1
) CR_TAB
2001 AS2 (ldd
,%B0
,%p1
+1));
2006 return (AS2 (ld
,%B0
,%1) CR_TAB
2009 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2011 if (reg_overlap_mentioned_p (dest
, XEXP (base
, 0)))
2012 fatal_insn ("incorrect insn:", insn
);
2015 return (AS2 (ld
,%A0
,%1) CR_TAB
2018 else if (CONSTANT_ADDRESS_P (base
))
2020 if (optimize
> 0 && io_address_operand (base
, HImode
))
2023 return (AS2 (in
,%A0
,%A1
-0x20) CR_TAB
2024 AS2 (in
,%B0
,%B1
-0x20));
2027 return (AS2 (lds
,%A0
,%A1
) CR_TAB
2031 fatal_insn ("unknown move insn:",insn
);
2036 out_movsi_r_mr (rtx insn
, rtx op
[], int *l
)
2040 rtx base
= XEXP (src
, 0);
2041 int reg_dest
= true_regnum (dest
);
2042 int reg_base
= true_regnum (base
);
2050 if (reg_base
== REG_X
) /* (R26) */
2052 if (reg_dest
== REG_X
)
2053 /* "ld r26,-X" is undefined */
2054 return *l
=7, (AS2 (adiw
,r26
,3) CR_TAB
2055 AS2 (ld
,r29
,X
) CR_TAB
2056 AS2 (ld
,r28
,-X
) CR_TAB
2057 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2058 AS2 (sbiw
,r26
,1) CR_TAB
2059 AS2 (ld
,r26
,X
) CR_TAB
2060 AS2 (mov
,r27
,__tmp_reg__
));
2061 else if (reg_dest
== REG_X
- 2)
2062 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2063 AS2 (ld
,%B0
,X
+) CR_TAB
2064 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2065 AS2 (ld
,%D0
,X
) CR_TAB
2066 AS2 (mov
,%C0
,__tmp_reg__
));
2067 else if (reg_unused_after (insn
, base
))
2068 return *l
=4, (AS2 (ld
,%A0
,X
+) CR_TAB
2069 AS2 (ld
,%B0
,X
+) CR_TAB
2070 AS2 (ld
,%C0
,X
+) CR_TAB
2073 return *l
=5, (AS2 (ld
,%A0
,X
+) CR_TAB
2074 AS2 (ld
,%B0
,X
+) CR_TAB
2075 AS2 (ld
,%C0
,X
+) CR_TAB
2076 AS2 (ld
,%D0
,X
) CR_TAB
2081 if (reg_dest
== reg_base
)
2082 return *l
=5, (AS2 (ldd
,%D0
,%1+3) CR_TAB
2083 AS2 (ldd
,%C0
,%1+2) CR_TAB
2084 AS2 (ldd
,__tmp_reg__
,%1+1) CR_TAB
2085 AS2 (ld
,%A0
,%1) CR_TAB
2086 AS2 (mov
,%B0
,__tmp_reg__
));
2087 else if (reg_base
== reg_dest
+ 2)
2088 return *l
=5, (AS2 (ld
,%A0
,%1) CR_TAB
2089 AS2 (ldd
,%B0
,%1+1) CR_TAB
2090 AS2 (ldd
,__tmp_reg__
,%1+2) CR_TAB
2091 AS2 (ldd
,%D0
,%1+3) CR_TAB
2092 AS2 (mov
,%C0
,__tmp_reg__
));
2094 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2095 AS2 (ldd
,%B0
,%1+1) CR_TAB
2096 AS2 (ldd
,%C0
,%1+2) CR_TAB
2097 AS2 (ldd
,%D0
,%1+3));
2100 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2102 int disp
= INTVAL (XEXP (base
, 1));
2104 if (disp
> MAX_LD_OFFSET (GET_MODE (src
)))
2106 if (REGNO (XEXP (base
, 0)) != REG_Y
)
2107 fatal_insn ("incorrect insn:",insn
);
2109 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (src
)))
2110 return *l
= 6, (AS2 (adiw
,r28
,%o1
-60) CR_TAB
2111 AS2 (ldd
,%A0
,Y
+60) CR_TAB
2112 AS2 (ldd
,%B0
,Y
+61) CR_TAB
2113 AS2 (ldd
,%C0
,Y
+62) CR_TAB
2114 AS2 (ldd
,%D0
,Y
+63) CR_TAB
2115 AS2 (sbiw
,r28
,%o1
-60));
2117 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o1
)) CR_TAB
2118 AS2 (sbci
,r29
,hi8(-%o1
)) CR_TAB
2119 AS2 (ld
,%A0
,Y
) CR_TAB
2120 AS2 (ldd
,%B0
,Y
+1) CR_TAB
2121 AS2 (ldd
,%C0
,Y
+2) CR_TAB
2122 AS2 (ldd
,%D0
,Y
+3) CR_TAB
2123 AS2 (subi
,r28
,lo8(%o1
)) CR_TAB
2124 AS2 (sbci
,r29
,hi8(%o1
)));
2127 reg_base
= true_regnum (XEXP (base
, 0));
2128 if (reg_base
== REG_X
)
2131 if (reg_dest
== REG_X
)
2134 /* "ld r26,-X" is undefined */
2135 return (AS2 (adiw
,r26
,%o1
+3) CR_TAB
2136 AS2 (ld
,r29
,X
) CR_TAB
2137 AS2 (ld
,r28
,-X
) CR_TAB
2138 AS2 (ld
,__tmp_reg__
,-X
) CR_TAB
2139 AS2 (sbiw
,r26
,1) CR_TAB
2140 AS2 (ld
,r26
,X
) CR_TAB
2141 AS2 (mov
,r27
,__tmp_reg__
));
2144 if (reg_dest
== REG_X
- 2)
2145 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2146 AS2 (ld
,r24
,X
+) CR_TAB
2147 AS2 (ld
,r25
,X
+) CR_TAB
2148 AS2 (ld
,__tmp_reg__
,X
+) CR_TAB
2149 AS2 (ld
,r27
,X
) CR_TAB
2150 AS2 (mov
,r26
,__tmp_reg__
));
2152 return (AS2 (adiw
,r26
,%o1
) CR_TAB
2153 AS2 (ld
,%A0
,X
+) CR_TAB
2154 AS2 (ld
,%B0
,X
+) CR_TAB
2155 AS2 (ld
,%C0
,X
+) CR_TAB
2156 AS2 (ld
,%D0
,X
) CR_TAB
2157 AS2 (sbiw
,r26
,%o1
+3));
2159 if (reg_dest
== reg_base
)
2160 return *l
=5, (AS2 (ldd
,%D0
,%D1
) CR_TAB
2161 AS2 (ldd
,%C0
,%C1
) CR_TAB
2162 AS2 (ldd
,__tmp_reg__
,%B1
) CR_TAB
2163 AS2 (ldd
,%A0
,%A1
) CR_TAB
2164 AS2 (mov
,%B0
,__tmp_reg__
));
2165 else if (reg_dest
== reg_base
- 2)
2166 return *l
=5, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2167 AS2 (ldd
,%B0
,%B1
) CR_TAB
2168 AS2 (ldd
,__tmp_reg__
,%C1
) CR_TAB
2169 AS2 (ldd
,%D0
,%D1
) CR_TAB
2170 AS2 (mov
,%C0
,__tmp_reg__
));
2171 return *l
=4, (AS2 (ldd
,%A0
,%A1
) CR_TAB
2172 AS2 (ldd
,%B0
,%B1
) CR_TAB
2173 AS2 (ldd
,%C0
,%C1
) CR_TAB
2176 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2177 return *l
=4, (AS2 (ld
,%D0
,%1) CR_TAB
2178 AS2 (ld
,%C0
,%1) CR_TAB
2179 AS2 (ld
,%B0
,%1) CR_TAB
2181 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2182 return *l
=4, (AS2 (ld
,%A0
,%1) CR_TAB
2183 AS2 (ld
,%B0
,%1) CR_TAB
2184 AS2 (ld
,%C0
,%1) CR_TAB
2186 else if (CONSTANT_ADDRESS_P (base
))
2187 return *l
=8, (AS2 (lds
,%A0
,%A1
) CR_TAB
2188 AS2 (lds
,%B0
,%B1
) CR_TAB
2189 AS2 (lds
,%C0
,%C1
) CR_TAB
2192 fatal_insn ("unknown move insn:",insn
);
2197 out_movsi_mr_r (rtx insn
, rtx op
[], int *l
)
2201 rtx base
= XEXP (dest
, 0);
2202 int reg_base
= true_regnum (base
);
2203 int reg_src
= true_regnum (src
);
2209 if (CONSTANT_ADDRESS_P (base
))
2210 return *l
=8,(AS2 (sts
,%A0
,%A1
) CR_TAB
2211 AS2 (sts
,%B0
,%B1
) CR_TAB
2212 AS2 (sts
,%C0
,%C1
) CR_TAB
2214 if (reg_base
> 0) /* (r) */
2216 if (reg_base
== REG_X
) /* (R26) */
2218 if (reg_src
== REG_X
)
2220 /* "st X+,r26" is undefined */
2221 if (reg_unused_after (insn
, base
))
2222 return *l
=6, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2223 AS2 (st
,X
,r26
) CR_TAB
2224 AS2 (adiw
,r26
,1) CR_TAB
2225 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2226 AS2 (st
,X
+,r28
) CR_TAB
2229 return *l
=7, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2230 AS2 (st
,X
,r26
) CR_TAB
2231 AS2 (adiw
,r26
,1) CR_TAB
2232 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2233 AS2 (st
,X
+,r28
) CR_TAB
2234 AS2 (st
,X
,r29
) CR_TAB
2237 else if (reg_base
== reg_src
+ 2)
2239 if (reg_unused_after (insn
, base
))
2240 return *l
=7, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2241 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2242 AS2 (st
,%0+,%A1
) CR_TAB
2243 AS2 (st
,%0+,%B1
) CR_TAB
2244 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2245 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2246 AS1 (clr
,__zero_reg__
));
2248 return *l
=8, (AS2 (mov
,__zero_reg__
,%C1
) CR_TAB
2249 AS2 (mov
,__tmp_reg__
,%D1
) CR_TAB
2250 AS2 (st
,%0+,%A1
) CR_TAB
2251 AS2 (st
,%0+,%B1
) CR_TAB
2252 AS2 (st
,%0+,__zero_reg__
) CR_TAB
2253 AS2 (st
,%0,__tmp_reg__
) CR_TAB
2254 AS1 (clr
,__zero_reg__
) CR_TAB
2257 return *l
=5, (AS2 (st
,%0+,%A1
) CR_TAB
2258 AS2 (st
,%0+,%B1
) CR_TAB
2259 AS2 (st
,%0+,%C1
) CR_TAB
2260 AS2 (st
,%0,%D1
) CR_TAB
2264 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2265 AS2 (std
,%0+1,%B1
) CR_TAB
2266 AS2 (std
,%0+2,%C1
) CR_TAB
2267 AS2 (std
,%0+3,%D1
));
2269 else if (GET_CODE (base
) == PLUS
) /* (R + i) */
2271 int disp
= INTVAL (XEXP (base
, 1));
2272 reg_base
= REGNO (XEXP (base
, 0));
2273 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2275 if (reg_base
!= REG_Y
)
2276 fatal_insn ("incorrect insn:",insn
);
2278 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2279 return *l
= 6, (AS2 (adiw
,r28
,%o0
-60) CR_TAB
2280 AS2 (std
,Y
+60,%A1
) CR_TAB
2281 AS2 (std
,Y
+61,%B1
) CR_TAB
2282 AS2 (std
,Y
+62,%C1
) CR_TAB
2283 AS2 (std
,Y
+63,%D1
) CR_TAB
2284 AS2 (sbiw
,r28
,%o0
-60));
2286 return *l
= 8, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2287 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2288 AS2 (st
,Y
,%A1
) CR_TAB
2289 AS2 (std
,Y
+1,%B1
) CR_TAB
2290 AS2 (std
,Y
+2,%C1
) CR_TAB
2291 AS2 (std
,Y
+3,%D1
) CR_TAB
2292 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2293 AS2 (sbci
,r29
,hi8(%o0
)));
2295 if (reg_base
== REG_X
)
2298 if (reg_src
== REG_X
)
2301 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2302 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2303 AS2 (adiw
,r26
,%o0
) CR_TAB
2304 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2305 AS2 (st
,X
+,__zero_reg__
) CR_TAB
2306 AS2 (st
,X
+,r28
) CR_TAB
2307 AS2 (st
,X
,r29
) CR_TAB
2308 AS1 (clr
,__zero_reg__
) CR_TAB
2309 AS2 (sbiw
,r26
,%o0
+3));
2311 else if (reg_src
== REG_X
- 2)
2314 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2315 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2316 AS2 (adiw
,r26
,%o0
) CR_TAB
2317 AS2 (st
,X
+,r24
) CR_TAB
2318 AS2 (st
,X
+,r25
) CR_TAB
2319 AS2 (st
,X
+,__tmp_reg__
) CR_TAB
2320 AS2 (st
,X
,__zero_reg__
) CR_TAB
2321 AS1 (clr
,__zero_reg__
) CR_TAB
2322 AS2 (sbiw
,r26
,%o0
+3));
2325 return (AS2 (adiw
,r26
,%o0
) CR_TAB
2326 AS2 (st
,X
+,%A1
) CR_TAB
2327 AS2 (st
,X
+,%B1
) CR_TAB
2328 AS2 (st
,X
+,%C1
) CR_TAB
2329 AS2 (st
,X
,%D1
) CR_TAB
2330 AS2 (sbiw
,r26
,%o0
+3));
2332 return *l
=4, (AS2 (std
,%A0
,%A1
) CR_TAB
2333 AS2 (std
,%B0
,%B1
) CR_TAB
2334 AS2 (std
,%C0
,%C1
) CR_TAB
2337 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2338 return *l
=4, (AS2 (st
,%0,%D1
) CR_TAB
2339 AS2 (st
,%0,%C1
) CR_TAB
2340 AS2 (st
,%0,%B1
) CR_TAB
2342 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2343 return *l
=4, (AS2 (st
,%0,%A1
) CR_TAB
2344 AS2 (st
,%0,%B1
) CR_TAB
2345 AS2 (st
,%0,%C1
) CR_TAB
2347 fatal_insn ("unknown move insn:",insn
);
2352 output_movsisf(rtx insn
, rtx operands
[], int *l
)
2355 rtx dest
= operands
[0];
2356 rtx src
= operands
[1];
2362 if (register_operand (dest
, VOIDmode
))
2364 if (register_operand (src
, VOIDmode
)) /* mov r,r */
2366 if (true_regnum (dest
) > true_regnum (src
))
2371 return (AS2 (movw
,%C0
,%C1
) CR_TAB
2372 AS2 (movw
,%A0
,%A1
));
2375 return (AS2 (mov
,%D0
,%D1
) CR_TAB
2376 AS2 (mov
,%C0
,%C1
) CR_TAB
2377 AS2 (mov
,%B0
,%B1
) CR_TAB
2385 return (AS2 (movw
,%A0
,%A1
) CR_TAB
2386 AS2 (movw
,%C0
,%C1
));
2389 return (AS2 (mov
,%A0
,%A1
) CR_TAB
2390 AS2 (mov
,%B0
,%B1
) CR_TAB
2391 AS2 (mov
,%C0
,%C1
) CR_TAB
2395 else if (CONSTANT_P (src
))
2397 if (test_hard_reg_class (LD_REGS
, dest
)) /* ldi d,i */
2400 return (AS2 (ldi
,%A0
,lo8(%1)) CR_TAB
2401 AS2 (ldi
,%B0
,hi8(%1)) CR_TAB
2402 AS2 (ldi
,%C0
,hlo8(%1)) CR_TAB
2403 AS2 (ldi
,%D0
,hhi8(%1)));
2406 if (GET_CODE (src
) == CONST_INT
)
2408 const char *const clr_op0
=
2409 AVR_HAVE_MOVW
? (AS1 (clr
,%A0
) CR_TAB
2410 AS1 (clr
,%B0
) CR_TAB
2412 : (AS1 (clr
,%A0
) CR_TAB
2413 AS1 (clr
,%B0
) CR_TAB
2414 AS1 (clr
,%C0
) CR_TAB
2417 if (src
== const0_rtx
) /* mov r,L */
2419 *l
= AVR_HAVE_MOVW
? 3 : 4;
2422 else if (src
== const1_rtx
)
2425 output_asm_insn (clr_op0
, operands
);
2426 *l
= AVR_HAVE_MOVW
? 4 : 5;
2427 return AS1 (inc
,%A0
);
2429 else if (src
== constm1_rtx
)
2431 /* Immediate constants -1 to any register */
2435 return (AS1 (clr
,%A0
) CR_TAB
2436 AS1 (dec
,%A0
) CR_TAB
2437 AS2 (mov
,%B0
,%A0
) CR_TAB
2438 AS2 (movw
,%C0
,%A0
));
2441 return (AS1 (clr
,%A0
) CR_TAB
2442 AS1 (dec
,%A0
) CR_TAB
2443 AS2 (mov
,%B0
,%A0
) CR_TAB
2444 AS2 (mov
,%C0
,%A0
) CR_TAB
2449 int bit_nr
= exact_log2 (INTVAL (src
));
2453 *l
= AVR_HAVE_MOVW
? 5 : 6;
2456 output_asm_insn (clr_op0
, operands
);
2457 output_asm_insn ("set", operands
);
2460 avr_output_bld (operands
, bit_nr
);
2467 /* Last resort, better than loading from memory. */
2469 return (AS2 (mov
,__tmp_reg__
,r31
) CR_TAB
2470 AS2 (ldi
,r31
,lo8(%1)) CR_TAB
2471 AS2 (mov
,%A0
,r31
) CR_TAB
2472 AS2 (ldi
,r31
,hi8(%1)) CR_TAB
2473 AS2 (mov
,%B0
,r31
) CR_TAB
2474 AS2 (ldi
,r31
,hlo8(%1)) CR_TAB
2475 AS2 (mov
,%C0
,r31
) CR_TAB
2476 AS2 (ldi
,r31
,hhi8(%1)) CR_TAB
2477 AS2 (mov
,%D0
,r31
) CR_TAB
2478 AS2 (mov
,r31
,__tmp_reg__
));
2480 else if (GET_CODE (src
) == MEM
)
2481 return out_movsi_r_mr (insn
, operands
, real_l
); /* mov r,m */
2483 else if (GET_CODE (dest
) == MEM
)
2485 const char *template;
2487 if (src
== const0_rtx
)
2488 operands
[1] = zero_reg_rtx
;
2490 template = out_movsi_mr_r (insn
, operands
, real_l
);
2493 output_asm_insn (template, operands
);
2498 fatal_insn ("invalid insn:", insn
);
2503 out_movqi_mr_r (rtx insn
, rtx op
[], int *l
)
2507 rtx x
= XEXP (dest
, 0);
2513 if (CONSTANT_ADDRESS_P (x
))
2515 if (CONST_INT_P (x
) && INTVAL (x
) == SREG_ADDR
)
2518 return AS2 (out
,__SREG__
,%1);
2520 if (optimize
> 0 && io_address_operand (x
, QImode
))
2523 return AS2 (out
,%0-0x20,%1);
2526 return AS2 (sts
,%0,%1);
2528 /* memory access by reg+disp */
2529 else if (GET_CODE (x
) == PLUS
2530 && REG_P (XEXP (x
,0))
2531 && GET_CODE (XEXP (x
,1)) == CONST_INT
)
2533 if ((INTVAL (XEXP (x
,1)) - GET_MODE_SIZE (GET_MODE (dest
))) >= 63)
2535 int disp
= INTVAL (XEXP (x
,1));
2536 if (REGNO (XEXP (x
,0)) != REG_Y
)
2537 fatal_insn ("incorrect insn:",insn
);
2539 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2540 return *l
= 3, (AS2 (adiw
,r28
,%o0
-63) CR_TAB
2541 AS2 (std
,Y
+63,%1) CR_TAB
2542 AS2 (sbiw
,r28
,%o0
-63));
2544 return *l
= 5, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2545 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2546 AS2 (st
,Y
,%1) CR_TAB
2547 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2548 AS2 (sbci
,r29
,hi8(%o0
)));
2550 else if (REGNO (XEXP (x
,0)) == REG_X
)
2552 if (reg_overlap_mentioned_p (src
, XEXP (x
, 0)))
2554 if (reg_unused_after (insn
, XEXP (x
,0)))
2555 return *l
= 3, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2556 AS2 (adiw
,r26
,%o0
) CR_TAB
2557 AS2 (st
,X
,__tmp_reg__
));
2559 return *l
= 4, (AS2 (mov
,__tmp_reg__
,%1) CR_TAB
2560 AS2 (adiw
,r26
,%o0
) CR_TAB
2561 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2562 AS2 (sbiw
,r26
,%o0
));
2566 if (reg_unused_after (insn
, XEXP (x
,0)))
2567 return *l
= 2, (AS2 (adiw
,r26
,%o0
) CR_TAB
2570 return *l
= 3, (AS2 (adiw
,r26
,%o0
) CR_TAB
2571 AS2 (st
,X
,%1) CR_TAB
2572 AS2 (sbiw
,r26
,%o0
));
2576 return AS2 (std
,%0,%1);
2579 return AS2 (st
,%0,%1);
2583 out_movhi_mr_r (rtx insn
, rtx op
[], int *l
)
2587 rtx base
= XEXP (dest
, 0);
2588 int reg_base
= true_regnum (base
);
2589 int reg_src
= true_regnum (src
);
2590 /* "volatile" forces writing high byte first, even if less efficient,
2591 for correct operation with 16-bit I/O registers. */
2592 int mem_volatile_p
= MEM_VOLATILE_P (dest
);
2597 if (CONSTANT_ADDRESS_P (base
))
2599 if (optimize
> 0 && io_address_operand (base
, HImode
))
2602 return (AS2 (out
,%B0
-0x20,%B1
) CR_TAB
2603 AS2 (out
,%A0
-0x20,%A1
));
2605 return *l
= 4, (AS2 (sts
,%B0
,%B1
) CR_TAB
2610 if (reg_base
== REG_X
)
2612 if (reg_src
== REG_X
)
2614 /* "st X+,r26" and "st -X,r26" are undefined. */
2615 if (!mem_volatile_p
&& reg_unused_after (insn
, src
))
2616 return *l
=4, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2617 AS2 (st
,X
,r26
) CR_TAB
2618 AS2 (adiw
,r26
,1) CR_TAB
2619 AS2 (st
,X
,__tmp_reg__
));
2621 return *l
=5, (AS2 (mov
,__tmp_reg__
,r27
) CR_TAB
2622 AS2 (adiw
,r26
,1) CR_TAB
2623 AS2 (st
,X
,__tmp_reg__
) CR_TAB
2624 AS2 (sbiw
,r26
,1) CR_TAB
2629 if (!mem_volatile_p
&& reg_unused_after (insn
, base
))
2630 return *l
=2, (AS2 (st
,X
+,%A1
) CR_TAB
2633 return *l
=3, (AS2 (adiw
,r26
,1) CR_TAB
2634 AS2 (st
,X
,%B1
) CR_TAB
2639 return *l
=2, (AS2 (std
,%0+1,%B1
) CR_TAB
2642 else if (GET_CODE (base
) == PLUS
)
2644 int disp
= INTVAL (XEXP (base
, 1));
2645 reg_base
= REGNO (XEXP (base
, 0));
2646 if (disp
> MAX_LD_OFFSET (GET_MODE (dest
)))
2648 if (reg_base
!= REG_Y
)
2649 fatal_insn ("incorrect insn:",insn
);
2651 if (disp
<= 63 + MAX_LD_OFFSET (GET_MODE (dest
)))
2652 return *l
= 4, (AS2 (adiw
,r28
,%o0
-62) CR_TAB
2653 AS2 (std
,Y
+63,%B1
) CR_TAB
2654 AS2 (std
,Y
+62,%A1
) CR_TAB
2655 AS2 (sbiw
,r28
,%o0
-62));
2657 return *l
= 6, (AS2 (subi
,r28
,lo8(-%o0
)) CR_TAB
2658 AS2 (sbci
,r29
,hi8(-%o0
)) CR_TAB
2659 AS2 (std
,Y
+1,%B1
) CR_TAB
2660 AS2 (st
,Y
,%A1
) CR_TAB
2661 AS2 (subi
,r28
,lo8(%o0
)) CR_TAB
2662 AS2 (sbci
,r29
,hi8(%o0
)));
2664 if (reg_base
== REG_X
)
2667 if (reg_src
== REG_X
)
2670 return (AS2 (mov
,__tmp_reg__
,r26
) CR_TAB
2671 AS2 (mov
,__zero_reg__
,r27
) CR_TAB
2672 AS2 (adiw
,r26
,%o0
+1) CR_TAB
2673 AS2 (st
,X
,__zero_reg__
) CR_TAB
2674 AS2 (st
,-X
,__tmp_reg__
) CR_TAB
2675 AS1 (clr
,__zero_reg__
) CR_TAB
2676 AS2 (sbiw
,r26
,%o0
));
2679 return (AS2 (adiw
,r26
,%o0
+1) CR_TAB
2680 AS2 (st
,X
,%B1
) CR_TAB
2681 AS2 (st
,-X
,%A1
) CR_TAB
2682 AS2 (sbiw
,r26
,%o0
));
2684 return *l
=2, (AS2 (std
,%B0
,%B1
) CR_TAB
2687 else if (GET_CODE (base
) == PRE_DEC
) /* (--R) */
2688 return *l
=2, (AS2 (st
,%0,%B1
) CR_TAB
2690 else if (GET_CODE (base
) == POST_INC
) /* (R++) */
2694 if (REGNO (XEXP (base
, 0)) == REG_X
)
2697 return (AS2 (adiw
,r26
,1) CR_TAB
2698 AS2 (st
,X
,%B1
) CR_TAB
2699 AS2 (st
,-X
,%A1
) CR_TAB
2705 return (AS2 (std
,%p0
+1,%B1
) CR_TAB
2706 AS2 (st
,%p0
,%A1
) CR_TAB
2712 return (AS2 (st
,%0,%A1
) CR_TAB
2715 fatal_insn ("unknown move insn:",insn
);
2719 /* Return 1 if frame pointer for current function required. */
2722 frame_pointer_required_p (void)
2724 return (cfun
->calls_alloca
2725 || crtl
->args
.info
.nregs
== 0
2726 || get_frame_size () > 0);
2729 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2732 compare_condition (rtx insn
)
2734 rtx next
= next_real_insn (insn
);
2735 RTX_CODE cond
= UNKNOWN
;
2736 if (next
&& GET_CODE (next
) == JUMP_INSN
)
2738 rtx pat
= PATTERN (next
);
2739 rtx src
= SET_SRC (pat
);
2740 rtx t
= XEXP (src
, 0);
2741 cond
= GET_CODE (t
);
2746 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2749 compare_sign_p (rtx insn
)
2751 RTX_CODE cond
= compare_condition (insn
);
2752 return (cond
== GE
|| cond
== LT
);
2755 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2756 that needs to be swapped (GT, GTU, LE, LEU). */
2759 compare_diff_p (rtx insn
)
2761 RTX_CODE cond
= compare_condition (insn
);
2762 return (cond
== GT
|| cond
== GTU
|| cond
== LE
|| cond
== LEU
) ? cond
: 0;
2765 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2768 compare_eq_p (rtx insn
)
2770 RTX_CODE cond
= compare_condition (insn
);
2771 return (cond
== EQ
|| cond
== NE
);
2775 /* Output test instruction for HImode. */
2778 out_tsthi (rtx insn
, int *l
)
2780 if (compare_sign_p (insn
))
2783 return AS1 (tst
,%B0
);
2785 if (reg_unused_after (insn
, SET_SRC (PATTERN (insn
)))
2786 && compare_eq_p (insn
))
2788 /* Faster than sbiw if we can clobber the operand. */
2790 return AS2 (or,%A0
,%B0
);
2792 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2795 return AS2 (sbiw
,%0,0);
2798 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2799 AS2 (cpc
,%B0
,__zero_reg__
));
2803 /* Output test instruction for SImode. */
2806 out_tstsi (rtx insn
, int *l
)
2808 if (compare_sign_p (insn
))
2811 return AS1 (tst
,%D0
);
2813 if (test_hard_reg_class (ADDW_REGS
, SET_SRC (PATTERN (insn
))))
2816 return (AS2 (sbiw
,%A0
,0) CR_TAB
2817 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2818 AS2 (cpc
,%D0
,__zero_reg__
));
2821 return (AS2 (cp
,%A0
,__zero_reg__
) CR_TAB
2822 AS2 (cpc
,%B0
,__zero_reg__
) CR_TAB
2823 AS2 (cpc
,%C0
,__zero_reg__
) CR_TAB
2824 AS2 (cpc
,%D0
,__zero_reg__
));
2828 /* Generate asm equivalent for various shifts.
2829 Shift count is a CONST_INT, MEM or REG.
2830 This only handles cases that are not already
2831 carefully hand-optimized in ?sh??i3_out. */
2834 out_shift_with_cnt (const char *template, rtx insn
, rtx operands
[],
2835 int *len
, int t_len
)
2839 int second_label
= 1;
2840 int saved_in_tmp
= 0;
2841 int use_zero_reg
= 0;
2843 op
[0] = operands
[0];
2844 op
[1] = operands
[1];
2845 op
[2] = operands
[2];
2846 op
[3] = operands
[3];
2852 if (GET_CODE (operands
[2]) == CONST_INT
)
2854 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
2855 int count
= INTVAL (operands
[2]);
2856 int max_len
= 10; /* If larger than this, always use a loop. */
2865 if (count
< 8 && !scratch
)
2869 max_len
= t_len
+ (scratch
? 3 : (use_zero_reg
? 4 : 5));
2871 if (t_len
* count
<= max_len
)
2873 /* Output shifts inline with no loop - faster. */
2875 *len
= t_len
* count
;
2879 output_asm_insn (template, op
);
2888 strcat (str
, AS2 (ldi
,%3,%2));
2890 else if (use_zero_reg
)
2892 /* Hack to save one word: use __zero_reg__ as loop counter.
2893 Set one bit, then shift in a loop until it is 0 again. */
2895 op
[3] = zero_reg_rtx
;
2899 strcat (str
, ("set" CR_TAB
2900 AS2 (bld
,%3,%2-1)));
2904 /* No scratch register available, use one from LD_REGS (saved in
2905 __tmp_reg__) that doesn't overlap with registers to shift. */
2907 op
[3] = gen_rtx_REG (QImode
,
2908 ((true_regnum (operands
[0]) - 1) & 15) + 16);
2909 op
[4] = tmp_reg_rtx
;
2913 *len
= 3; /* Includes "mov %3,%4" after the loop. */
2915 strcat (str
, (AS2 (mov
,%4,%3) CR_TAB
2921 else if (GET_CODE (operands
[2]) == MEM
)
2925 op
[3] = op_mov
[0] = tmp_reg_rtx
;
2929 out_movqi_r_mr (insn
, op_mov
, len
);
2931 output_asm_insn (out_movqi_r_mr (insn
, op_mov
, NULL
), op_mov
);
2933 else if (register_operand (operands
[2], QImode
))
2935 if (reg_unused_after (insn
, operands
[2]))
2939 op
[3] = tmp_reg_rtx
;
2941 strcat (str
, (AS2 (mov
,%3,%2) CR_TAB
));
2945 fatal_insn ("bad shift insn:", insn
);
2952 strcat (str
, AS1 (rjmp
,2f
));
2956 *len
+= t_len
+ 2; /* template + dec + brXX */
2959 strcat (str
, "\n1:\t");
2960 strcat (str
, template);
2961 strcat (str
, second_label
? "\n2:\t" : "\n\t");
2962 strcat (str
, use_zero_reg
? AS1 (lsr
,%3) : AS1 (dec
,%3));
2963 strcat (str
, CR_TAB
);
2964 strcat (str
, second_label
? AS1 (brpl
,1b
) : AS1 (brne
,1b
));
2966 strcat (str
, (CR_TAB
AS2 (mov
,%3,%4)));
2967 output_asm_insn (str
, op
);
2972 /* 8bit shift left ((char)x << i) */
2975 ashlqi3_out (rtx insn
, rtx operands
[], int *len
)
2977 if (GET_CODE (operands
[2]) == CONST_INT
)
2984 switch (INTVAL (operands
[2]))
2987 if (INTVAL (operands
[2]) < 8)
2991 return AS1 (clr
,%0);
2995 return AS1 (lsl
,%0);
2999 return (AS1 (lsl
,%0) CR_TAB
3004 return (AS1 (lsl
,%0) CR_TAB
3009 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3012 return (AS1 (swap
,%0) CR_TAB
3013 AS2 (andi
,%0,0xf0));
3016 return (AS1 (lsl
,%0) CR_TAB
3022 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3025 return (AS1 (swap
,%0) CR_TAB
3027 AS2 (andi
,%0,0xe0));
3030 return (AS1 (lsl
,%0) CR_TAB
3037 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3040 return (AS1 (swap
,%0) CR_TAB
3043 AS2 (andi
,%0,0xc0));
3046 return (AS1 (lsl
,%0) CR_TAB
3055 return (AS1 (ror
,%0) CR_TAB
3060 else if (CONSTANT_P (operands
[2]))
3061 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3063 out_shift_with_cnt (AS1 (lsl
,%0),
3064 insn
, operands
, len
, 1);
3069 /* 16bit shift left ((short)x << i) */
3072 ashlhi3_out (rtx insn
, rtx operands
[], int *len
)
3074 if (GET_CODE (operands
[2]) == CONST_INT
)
3076 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3077 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3084 switch (INTVAL (operands
[2]))
3087 if (INTVAL (operands
[2]) < 16)
3091 return (AS1 (clr
,%B0
) CR_TAB
3095 if (optimize_size
&& scratch
)
3100 return (AS1 (swap
,%A0
) CR_TAB
3101 AS1 (swap
,%B0
) CR_TAB
3102 AS2 (andi
,%B0
,0xf0) CR_TAB
3103 AS2 (eor
,%B0
,%A0
) CR_TAB
3104 AS2 (andi
,%A0
,0xf0) CR_TAB
3110 return (AS1 (swap
,%A0
) CR_TAB
3111 AS1 (swap
,%B0
) CR_TAB
3112 AS2 (ldi
,%3,0xf0) CR_TAB
3113 AS2 (and,%B0
,%3) CR_TAB
3114 AS2 (eor
,%B0
,%A0
) CR_TAB
3115 AS2 (and,%A0
,%3) CR_TAB
3118 break; /* optimize_size ? 6 : 8 */
3122 break; /* scratch ? 5 : 6 */
3126 return (AS1 (lsl
,%A0
) CR_TAB
3127 AS1 (rol
,%B0
) CR_TAB
3128 AS1 (swap
,%A0
) CR_TAB
3129 AS1 (swap
,%B0
) CR_TAB
3130 AS2 (andi
,%B0
,0xf0) CR_TAB
3131 AS2 (eor
,%B0
,%A0
) CR_TAB
3132 AS2 (andi
,%A0
,0xf0) CR_TAB
3138 return (AS1 (lsl
,%A0
) CR_TAB
3139 AS1 (rol
,%B0
) CR_TAB
3140 AS1 (swap
,%A0
) CR_TAB
3141 AS1 (swap
,%B0
) CR_TAB
3142 AS2 (ldi
,%3,0xf0) CR_TAB
3143 AS2 (and,%B0
,%3) CR_TAB
3144 AS2 (eor
,%B0
,%A0
) CR_TAB
3145 AS2 (and,%A0
,%3) CR_TAB
3152 break; /* scratch ? 5 : 6 */
3154 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3155 AS1 (lsr
,%B0
) CR_TAB
3156 AS1 (ror
,%A0
) CR_TAB
3157 AS1 (ror
,__tmp_reg__
) CR_TAB
3158 AS1 (lsr
,%B0
) CR_TAB
3159 AS1 (ror
,%A0
) CR_TAB
3160 AS1 (ror
,__tmp_reg__
) CR_TAB
3161 AS2 (mov
,%B0
,%A0
) CR_TAB
3162 AS2 (mov
,%A0
,__tmp_reg__
));
3166 return (AS1 (lsr
,%B0
) CR_TAB
3167 AS2 (mov
,%B0
,%A0
) CR_TAB
3168 AS1 (clr
,%A0
) CR_TAB
3169 AS1 (ror
,%B0
) CR_TAB
3173 return *len
= 2, (AS2 (mov
,%B0
,%A1
) CR_TAB
3178 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3179 AS1 (clr
,%A0
) CR_TAB
3184 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3185 AS1 (clr
,%A0
) CR_TAB
3186 AS1 (lsl
,%B0
) CR_TAB
3191 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3192 AS1 (clr
,%A0
) CR_TAB
3193 AS1 (lsl
,%B0
) CR_TAB
3194 AS1 (lsl
,%B0
) CR_TAB
3201 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3202 AS1 (clr
,%A0
) CR_TAB
3203 AS1 (swap
,%B0
) CR_TAB
3204 AS2 (andi
,%B0
,0xf0));
3209 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3210 AS1 (clr
,%A0
) CR_TAB
3211 AS1 (swap
,%B0
) CR_TAB
3212 AS2 (ldi
,%3,0xf0) CR_TAB
3216 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3217 AS1 (clr
,%A0
) CR_TAB
3218 AS1 (lsl
,%B0
) CR_TAB
3219 AS1 (lsl
,%B0
) CR_TAB
3220 AS1 (lsl
,%B0
) CR_TAB
3227 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3228 AS1 (clr
,%A0
) CR_TAB
3229 AS1 (swap
,%B0
) CR_TAB
3230 AS1 (lsl
,%B0
) CR_TAB
3231 AS2 (andi
,%B0
,0xe0));
3233 if (AVR_HAVE_MUL
&& scratch
)
3236 return (AS2 (ldi
,%3,0x20) CR_TAB
3237 AS2 (mul
,%A0
,%3) CR_TAB
3238 AS2 (mov
,%B0
,r0
) CR_TAB
3239 AS1 (clr
,%A0
) CR_TAB
3240 AS1 (clr
,__zero_reg__
));
3242 if (optimize_size
&& scratch
)
3247 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3248 AS1 (clr
,%A0
) CR_TAB
3249 AS1 (swap
,%B0
) CR_TAB
3250 AS1 (lsl
,%B0
) CR_TAB
3251 AS2 (ldi
,%3,0xe0) CR_TAB
3257 return ("set" CR_TAB
3258 AS2 (bld
,r1
,5) CR_TAB
3259 AS2 (mul
,%A0
,r1
) CR_TAB
3260 AS2 (mov
,%B0
,r0
) CR_TAB
3261 AS1 (clr
,%A0
) CR_TAB
3262 AS1 (clr
,__zero_reg__
));
3265 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3266 AS1 (clr
,%A0
) CR_TAB
3267 AS1 (lsl
,%B0
) CR_TAB
3268 AS1 (lsl
,%B0
) CR_TAB
3269 AS1 (lsl
,%B0
) CR_TAB
3270 AS1 (lsl
,%B0
) CR_TAB
3274 if (AVR_HAVE_MUL
&& ldi_ok
)
3277 return (AS2 (ldi
,%B0
,0x40) CR_TAB
3278 AS2 (mul
,%A0
,%B0
) CR_TAB
3279 AS2 (mov
,%B0
,r0
) CR_TAB
3280 AS1 (clr
,%A0
) CR_TAB
3281 AS1 (clr
,__zero_reg__
));
3283 if (AVR_HAVE_MUL
&& scratch
)
3286 return (AS2 (ldi
,%3,0x40) CR_TAB
3287 AS2 (mul
,%A0
,%3) CR_TAB
3288 AS2 (mov
,%B0
,r0
) CR_TAB
3289 AS1 (clr
,%A0
) CR_TAB
3290 AS1 (clr
,__zero_reg__
));
3292 if (optimize_size
&& ldi_ok
)
3295 return (AS2 (mov
,%B0
,%A0
) CR_TAB
3296 AS2 (ldi
,%A0
,6) "\n1:\t"
3297 AS1 (lsl
,%B0
) CR_TAB
3298 AS1 (dec
,%A0
) CR_TAB
3301 if (optimize_size
&& scratch
)
3304 return (AS1 (clr
,%B0
) CR_TAB
3305 AS1 (lsr
,%A0
) CR_TAB
3306 AS1 (ror
,%B0
) CR_TAB
3307 AS1 (lsr
,%A0
) CR_TAB
3308 AS1 (ror
,%B0
) CR_TAB
3313 return (AS1 (clr
,%B0
) CR_TAB
3314 AS1 (lsr
,%A0
) CR_TAB
3315 AS1 (ror
,%B0
) CR_TAB
3320 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3322 insn
, operands
, len
, 2);
3327 /* 32bit shift left ((long)x << i) */
3330 ashlsi3_out (rtx insn
, rtx operands
[], int *len
)
3332 if (GET_CODE (operands
[2]) == CONST_INT
)
3340 switch (INTVAL (operands
[2]))
3343 if (INTVAL (operands
[2]) < 32)
3347 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
3348 AS1 (clr
,%C0
) CR_TAB
3349 AS2 (movw
,%A0
,%C0
));
3351 return (AS1 (clr
,%D0
) CR_TAB
3352 AS1 (clr
,%C0
) CR_TAB
3353 AS1 (clr
,%B0
) CR_TAB
3358 int reg0
= true_regnum (operands
[0]);
3359 int reg1
= true_regnum (operands
[1]);
3362 return (AS2 (mov
,%D0
,%C1
) CR_TAB
3363 AS2 (mov
,%C0
,%B1
) CR_TAB
3364 AS2 (mov
,%B0
,%A1
) CR_TAB
3367 return (AS1 (clr
,%A0
) CR_TAB
3368 AS2 (mov
,%B0
,%A1
) CR_TAB
3369 AS2 (mov
,%C0
,%B1
) CR_TAB
3375 int reg0
= true_regnum (operands
[0]);
3376 int reg1
= true_regnum (operands
[1]);
3377 if (reg0
+ 2 == reg1
)
3378 return *len
= 2, (AS1 (clr
,%B0
) CR_TAB
3381 return *len
= 3, (AS2 (movw
,%C0
,%A1
) CR_TAB
3382 AS1 (clr
,%B0
) CR_TAB
3385 return *len
= 4, (AS2 (mov
,%C0
,%A1
) CR_TAB
3386 AS2 (mov
,%D0
,%B1
) CR_TAB
3387 AS1 (clr
,%B0
) CR_TAB
3393 return (AS2 (mov
,%D0
,%A1
) CR_TAB
3394 AS1 (clr
,%C0
) CR_TAB
3395 AS1 (clr
,%B0
) CR_TAB
3400 return (AS1 (clr
,%D0
) CR_TAB
3401 AS1 (lsr
,%A0
) CR_TAB
3402 AS1 (ror
,%D0
) CR_TAB
3403 AS1 (clr
,%C0
) CR_TAB
3404 AS1 (clr
,%B0
) CR_TAB
3409 out_shift_with_cnt ((AS1 (lsl
,%A0
) CR_TAB
3410 AS1 (rol
,%B0
) CR_TAB
3411 AS1 (rol
,%C0
) CR_TAB
3413 insn
, operands
, len
, 4);
3417 /* 8bit arithmetic shift right ((signed char)x >> i) */
3420 ashrqi3_out (rtx insn
, rtx operands
[], int *len
)
3422 if (GET_CODE (operands
[2]) == CONST_INT
)
3429 switch (INTVAL (operands
[2]))
3433 return AS1 (asr
,%0);
3437 return (AS1 (asr
,%0) CR_TAB
3442 return (AS1 (asr
,%0) CR_TAB
3448 return (AS1 (asr
,%0) CR_TAB
3455 return (AS1 (asr
,%0) CR_TAB
3463 return (AS2 (bst
,%0,6) CR_TAB
3465 AS2 (sbc
,%0,%0) CR_TAB
3469 if (INTVAL (operands
[2]) < 8)
3476 return (AS1 (lsl
,%0) CR_TAB
3480 else if (CONSTANT_P (operands
[2]))
3481 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3483 out_shift_with_cnt (AS1 (asr
,%0),
3484 insn
, operands
, len
, 1);
3489 /* 16bit arithmetic shift right ((signed short)x >> i) */
3492 ashrhi3_out (rtx insn
, rtx operands
[], int *len
)
3494 if (GET_CODE (operands
[2]) == CONST_INT
)
3496 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3497 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3504 switch (INTVAL (operands
[2]))
3508 /* XXX try to optimize this too? */
3513 break; /* scratch ? 5 : 6 */
3515 return (AS2 (mov
,__tmp_reg__
,%A0
) CR_TAB
3516 AS2 (mov
,%A0
,%B0
) CR_TAB
3517 AS1 (lsl
,__tmp_reg__
) CR_TAB
3518 AS1 (rol
,%A0
) CR_TAB
3519 AS2 (sbc
,%B0
,%B0
) CR_TAB
3520 AS1 (lsl
,__tmp_reg__
) CR_TAB
3521 AS1 (rol
,%A0
) CR_TAB
3526 return (AS1 (lsl
,%A0
) CR_TAB
3527 AS2 (mov
,%A0
,%B0
) CR_TAB
3528 AS1 (rol
,%A0
) CR_TAB
3533 int reg0
= true_regnum (operands
[0]);
3534 int reg1
= true_regnum (operands
[1]);
3537 return *len
= 3, (AS2 (mov
,%A0
,%B0
) CR_TAB
3538 AS1 (lsl
,%B0
) CR_TAB
3541 return *len
= 4, (AS2 (mov
,%A0
,%B1
) CR_TAB
3542 AS1 (clr
,%B0
) CR_TAB
3543 AS2 (sbrc
,%A0
,7) CR_TAB
3549 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3550 AS1 (lsl
,%B0
) CR_TAB
3551 AS2 (sbc
,%B0
,%B0
) CR_TAB
3556 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3557 AS1 (lsl
,%B0
) CR_TAB
3558 AS2 (sbc
,%B0
,%B0
) CR_TAB
3559 AS1 (asr
,%A0
) CR_TAB
3563 if (AVR_HAVE_MUL
&& ldi_ok
)
3566 return (AS2 (ldi
,%A0
,0x20) CR_TAB
3567 AS2 (muls
,%B0
,%A0
) CR_TAB
3568 AS2 (mov
,%A0
,r1
) CR_TAB
3569 AS2 (sbc
,%B0
,%B0
) CR_TAB
3570 AS1 (clr
,__zero_reg__
));
3572 if (optimize_size
&& scratch
)
3575 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3576 AS1 (lsl
,%B0
) CR_TAB
3577 AS2 (sbc
,%B0
,%B0
) CR_TAB
3578 AS1 (asr
,%A0
) CR_TAB
3579 AS1 (asr
,%A0
) CR_TAB
3583 if (AVR_HAVE_MUL
&& ldi_ok
)
3586 return (AS2 (ldi
,%A0
,0x10) CR_TAB
3587 AS2 (muls
,%B0
,%A0
) CR_TAB
3588 AS2 (mov
,%A0
,r1
) CR_TAB
3589 AS2 (sbc
,%B0
,%B0
) CR_TAB
3590 AS1 (clr
,__zero_reg__
));
3592 if (optimize_size
&& scratch
)
3595 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3596 AS1 (lsl
,%B0
) CR_TAB
3597 AS2 (sbc
,%B0
,%B0
) CR_TAB
3598 AS1 (asr
,%A0
) CR_TAB
3599 AS1 (asr
,%A0
) CR_TAB
3600 AS1 (asr
,%A0
) CR_TAB
3604 if (AVR_HAVE_MUL
&& ldi_ok
)
3607 return (AS2 (ldi
,%A0
,0x08) CR_TAB
3608 AS2 (muls
,%B0
,%A0
) CR_TAB
3609 AS2 (mov
,%A0
,r1
) CR_TAB
3610 AS2 (sbc
,%B0
,%B0
) CR_TAB
3611 AS1 (clr
,__zero_reg__
));
3614 break; /* scratch ? 5 : 7 */
3616 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3617 AS1 (lsl
,%B0
) CR_TAB
3618 AS2 (sbc
,%B0
,%B0
) CR_TAB
3619 AS1 (asr
,%A0
) CR_TAB
3620 AS1 (asr
,%A0
) CR_TAB
3621 AS1 (asr
,%A0
) CR_TAB
3622 AS1 (asr
,%A0
) CR_TAB
3627 return (AS1 (lsl
,%B0
) CR_TAB
3628 AS2 (sbc
,%A0
,%A0
) CR_TAB
3629 AS1 (lsl
,%B0
) CR_TAB
3630 AS2 (mov
,%B0
,%A0
) CR_TAB
3634 if (INTVAL (operands
[2]) < 16)
3640 return *len
= 3, (AS1 (lsl
,%B0
) CR_TAB
3641 AS2 (sbc
,%A0
,%A0
) CR_TAB
3646 out_shift_with_cnt ((AS1 (asr
,%B0
) CR_TAB
3648 insn
, operands
, len
, 2);
3653 /* 32bit arithmetic shift right ((signed long)x >> i) */
3656 ashrsi3_out (rtx insn
, rtx operands
[], int *len
)
3658 if (GET_CODE (operands
[2]) == CONST_INT
)
3666 switch (INTVAL (operands
[2]))
3670 int reg0
= true_regnum (operands
[0]);
3671 int reg1
= true_regnum (operands
[1]);
3674 return (AS2 (mov
,%A0
,%B1
) CR_TAB
3675 AS2 (mov
,%B0
,%C1
) CR_TAB
3676 AS2 (mov
,%C0
,%D1
) CR_TAB
3677 AS1 (clr
,%D0
) CR_TAB
3678 AS2 (sbrc
,%C0
,7) CR_TAB
3681 return (AS1 (clr
,%D0
) CR_TAB
3682 AS2 (sbrc
,%D1
,7) CR_TAB
3683 AS1 (dec
,%D0
) CR_TAB
3684 AS2 (mov
,%C0
,%D1
) CR_TAB
3685 AS2 (mov
,%B0
,%C1
) CR_TAB
3691 int reg0
= true_regnum (operands
[0]);
3692 int reg1
= true_regnum (operands
[1]);
3694 if (reg0
== reg1
+ 2)
3695 return *len
= 4, (AS1 (clr
,%D0
) CR_TAB
3696 AS2 (sbrc
,%B0
,7) CR_TAB
3697 AS1 (com
,%D0
) CR_TAB
3700 return *len
= 5, (AS2 (movw
,%A0
,%C1
) CR_TAB
3701 AS1 (clr
,%D0
) CR_TAB
3702 AS2 (sbrc
,%B0
,7) CR_TAB
3703 AS1 (com
,%D0
) CR_TAB
3706 return *len
= 6, (AS2 (mov
,%B0
,%D1
) CR_TAB
3707 AS2 (mov
,%A0
,%C1
) CR_TAB
3708 AS1 (clr
,%D0
) CR_TAB
3709 AS2 (sbrc
,%B0
,7) CR_TAB
3710 AS1 (com
,%D0
) CR_TAB
3715 return *len
= 6, (AS2 (mov
,%A0
,%D1
) CR_TAB
3716 AS1 (clr
,%D0
) CR_TAB
3717 AS2 (sbrc
,%A0
,7) CR_TAB
3718 AS1 (com
,%D0
) CR_TAB
3719 AS2 (mov
,%B0
,%D0
) CR_TAB
3723 if (INTVAL (operands
[2]) < 32)
3730 return *len
= 4, (AS1 (lsl
,%D0
) CR_TAB
3731 AS2 (sbc
,%A0
,%A0
) CR_TAB
3732 AS2 (mov
,%B0
,%A0
) CR_TAB
3733 AS2 (movw
,%C0
,%A0
));
3735 return *len
= 5, (AS1 (lsl
,%D0
) CR_TAB
3736 AS2 (sbc
,%A0
,%A0
) CR_TAB
3737 AS2 (mov
,%B0
,%A0
) CR_TAB
3738 AS2 (mov
,%C0
,%A0
) CR_TAB
3743 out_shift_with_cnt ((AS1 (asr
,%D0
) CR_TAB
3744 AS1 (ror
,%C0
) CR_TAB
3745 AS1 (ror
,%B0
) CR_TAB
3747 insn
, operands
, len
, 4);
3751 /* 8bit logic shift right ((unsigned char)x >> i) */
3754 lshrqi3_out (rtx insn
, rtx operands
[], int *len
)
3756 if (GET_CODE (operands
[2]) == CONST_INT
)
3763 switch (INTVAL (operands
[2]))
3766 if (INTVAL (operands
[2]) < 8)
3770 return AS1 (clr
,%0);
3774 return AS1 (lsr
,%0);
3778 return (AS1 (lsr
,%0) CR_TAB
3782 return (AS1 (lsr
,%0) CR_TAB
3787 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3790 return (AS1 (swap
,%0) CR_TAB
3791 AS2 (andi
,%0,0x0f));
3794 return (AS1 (lsr
,%0) CR_TAB
3800 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3803 return (AS1 (swap
,%0) CR_TAB
3808 return (AS1 (lsr
,%0) CR_TAB
3815 if (test_hard_reg_class (LD_REGS
, operands
[0]))
3818 return (AS1 (swap
,%0) CR_TAB
3824 return (AS1 (lsr
,%0) CR_TAB
3833 return (AS1 (rol
,%0) CR_TAB
3838 else if (CONSTANT_P (operands
[2]))
3839 fatal_insn ("internal compiler error. Incorrect shift:", insn
);
3841 out_shift_with_cnt (AS1 (lsr
,%0),
3842 insn
, operands
, len
, 1);
3846 /* 16bit logic shift right ((unsigned short)x >> i) */
3849 lshrhi3_out (rtx insn
, rtx operands
[], int *len
)
3851 if (GET_CODE (operands
[2]) == CONST_INT
)
3853 int scratch
= (GET_CODE (PATTERN (insn
)) == PARALLEL
);
3854 int ldi_ok
= test_hard_reg_class (LD_REGS
, operands
[0]);
3861 switch (INTVAL (operands
[2]))
3864 if (INTVAL (operands
[2]) < 16)
3868 return (AS1 (clr
,%B0
) CR_TAB
3872 if (optimize_size
&& scratch
)
3877 return (AS1 (swap
,%B0
) CR_TAB
3878 AS1 (swap
,%A0
) CR_TAB
3879 AS2 (andi
,%A0
,0x0f) CR_TAB
3880 AS2 (eor
,%A0
,%B0
) CR_TAB
3881 AS2 (andi
,%B0
,0x0f) CR_TAB
3887 return (AS1 (swap
,%B0
) CR_TAB
3888 AS1 (swap
,%A0
) CR_TAB
3889 AS2 (ldi
,%3,0x0f) CR_TAB
3890 AS2 (and,%A0
,%3) CR_TAB
3891 AS2 (eor
,%A0
,%B0
) CR_TAB
3892 AS2 (and,%B0
,%3) CR_TAB
3895 break; /* optimize_size ? 6 : 8 */
3899 break; /* scratch ? 5 : 6 */
3903 return (AS1 (lsr
,%B0
) CR_TAB
3904 AS1 (ror
,%A0
) CR_TAB
3905 AS1 (swap
,%B0
) CR_TAB
3906 AS1 (swap
,%A0
) CR_TAB
3907 AS2 (andi
,%A0
,0x0f) CR_TAB
3908 AS2 (eor
,%A0
,%B0
) CR_TAB
3909 AS2 (andi
,%B0
,0x0f) CR_TAB
3915 return (AS1 (lsr
,%B0
) CR_TAB
3916 AS1 (ror
,%A0
) CR_TAB
3917 AS1 (swap
,%B0
) CR_TAB
3918 AS1 (swap
,%A0
) CR_TAB
3919 AS2 (ldi
,%3,0x0f) CR_TAB
3920 AS2 (and,%A0
,%3) CR_TAB
3921 AS2 (eor
,%A0
,%B0
) CR_TAB
3922 AS2 (and,%B0
,%3) CR_TAB
3929 break; /* scratch ? 5 : 6 */
3931 return (AS1 (clr
,__tmp_reg__
) CR_TAB
3932 AS1 (lsl
,%A0
) CR_TAB
3933 AS1 (rol
,%B0
) CR_TAB
3934 AS1 (rol
,__tmp_reg__
) CR_TAB
3935 AS1 (lsl
,%A0
) CR_TAB
3936 AS1 (rol
,%B0
) CR_TAB
3937 AS1 (rol
,__tmp_reg__
) CR_TAB
3938 AS2 (mov
,%A0
,%B0
) CR_TAB
3939 AS2 (mov
,%B0
,__tmp_reg__
));
3943 return (AS1 (lsl
,%A0
) CR_TAB
3944 AS2 (mov
,%A0
,%B0
) CR_TAB
3945 AS1 (rol
,%A0
) CR_TAB
3946 AS2 (sbc
,%B0
,%B0
) CR_TAB
3950 return *len
= 2, (AS2 (mov
,%A0
,%B1
) CR_TAB
3955 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3956 AS1 (clr
,%B0
) CR_TAB
3961 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3962 AS1 (clr
,%B0
) CR_TAB
3963 AS1 (lsr
,%A0
) CR_TAB
3968 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3969 AS1 (clr
,%B0
) CR_TAB
3970 AS1 (lsr
,%A0
) CR_TAB
3971 AS1 (lsr
,%A0
) CR_TAB
3978 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3979 AS1 (clr
,%B0
) CR_TAB
3980 AS1 (swap
,%A0
) CR_TAB
3981 AS2 (andi
,%A0
,0x0f));
3986 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3987 AS1 (clr
,%B0
) CR_TAB
3988 AS1 (swap
,%A0
) CR_TAB
3989 AS2 (ldi
,%3,0x0f) CR_TAB
3993 return (AS2 (mov
,%A0
,%B0
) CR_TAB
3994 AS1 (clr
,%B0
) CR_TAB
3995 AS1 (lsr
,%A0
) CR_TAB
3996 AS1 (lsr
,%A0
) CR_TAB
3997 AS1 (lsr
,%A0
) CR_TAB
4004 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4005 AS1 (clr
,%B0
) CR_TAB
4006 AS1 (swap
,%A0
) CR_TAB
4007 AS1 (lsr
,%A0
) CR_TAB
4008 AS2 (andi
,%A0
,0x07));
4010 if (AVR_HAVE_MUL
&& scratch
)
4013 return (AS2 (ldi
,%3,0x08) CR_TAB
4014 AS2 (mul
,%B0
,%3) CR_TAB
4015 AS2 (mov
,%A0
,r1
) CR_TAB
4016 AS1 (clr
,%B0
) CR_TAB
4017 AS1 (clr
,__zero_reg__
));
4019 if (optimize_size
&& scratch
)
4024 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4025 AS1 (clr
,%B0
) CR_TAB
4026 AS1 (swap
,%A0
) CR_TAB
4027 AS1 (lsr
,%A0
) CR_TAB
4028 AS2 (ldi
,%3,0x07) CR_TAB
4034 return ("set" CR_TAB
4035 AS2 (bld
,r1
,3) CR_TAB
4036 AS2 (mul
,%B0
,r1
) CR_TAB
4037 AS2 (mov
,%A0
,r1
) CR_TAB
4038 AS1 (clr
,%B0
) CR_TAB
4039 AS1 (clr
,__zero_reg__
));
4042 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4043 AS1 (clr
,%B0
) CR_TAB
4044 AS1 (lsr
,%A0
) CR_TAB
4045 AS1 (lsr
,%A0
) CR_TAB
4046 AS1 (lsr
,%A0
) CR_TAB
4047 AS1 (lsr
,%A0
) CR_TAB
4051 if (AVR_HAVE_MUL
&& ldi_ok
)
4054 return (AS2 (ldi
,%A0
,0x04) CR_TAB
4055 AS2 (mul
,%B0
,%A0
) CR_TAB
4056 AS2 (mov
,%A0
,r1
) CR_TAB
4057 AS1 (clr
,%B0
) CR_TAB
4058 AS1 (clr
,__zero_reg__
));
4060 if (AVR_HAVE_MUL
&& scratch
)
4063 return (AS2 (ldi
,%3,0x04) CR_TAB
4064 AS2 (mul
,%B0
,%3) CR_TAB
4065 AS2 (mov
,%A0
,r1
) CR_TAB
4066 AS1 (clr
,%B0
) CR_TAB
4067 AS1 (clr
,__zero_reg__
));
4069 if (optimize_size
&& ldi_ok
)
4072 return (AS2 (mov
,%A0
,%B0
) CR_TAB
4073 AS2 (ldi
,%B0
,6) "\n1:\t"
4074 AS1 (lsr
,%A0
) CR_TAB
4075 AS1 (dec
,%B0
) CR_TAB
4078 if (optimize_size
&& scratch
)
4081 return (AS1 (clr
,%A0
) CR_TAB
4082 AS1 (lsl
,%B0
) CR_TAB
4083 AS1 (rol
,%A0
) CR_TAB
4084 AS1 (lsl
,%B0
) CR_TAB
4085 AS1 (rol
,%A0
) CR_TAB
4090 return (AS1 (clr
,%A0
) CR_TAB
4091 AS1 (lsl
,%B0
) CR_TAB
4092 AS1 (rol
,%A0
) CR_TAB
4097 out_shift_with_cnt ((AS1 (lsr
,%B0
) CR_TAB
4099 insn
, operands
, len
, 2);
4103 /* 32bit logic shift right ((unsigned int)x >> i) */
4106 lshrsi3_out (rtx insn
, rtx operands
[], int *len
)
4108 if (GET_CODE (operands
[2]) == CONST_INT
)
4116 switch (INTVAL (operands
[2]))
4119 if (INTVAL (operands
[2]) < 32)
4123 return *len
= 3, (AS1 (clr
,%D0
) CR_TAB
4124 AS1 (clr
,%C0
) CR_TAB
4125 AS2 (movw
,%A0
,%C0
));
4127 return (AS1 (clr
,%D0
) CR_TAB
4128 AS1 (clr
,%C0
) CR_TAB
4129 AS1 (clr
,%B0
) CR_TAB
4134 int reg0
= true_regnum (operands
[0]);
4135 int reg1
= true_regnum (operands
[1]);
4138 return (AS2 (mov
,%A0
,%B1
) CR_TAB
4139 AS2 (mov
,%B0
,%C1
) CR_TAB
4140 AS2 (mov
,%C0
,%D1
) CR_TAB
4143 return (AS1 (clr
,%D0
) CR_TAB
4144 AS2 (mov
,%C0
,%D1
) CR_TAB
4145 AS2 (mov
,%B0
,%C1
) CR_TAB
4151 int reg0
= true_regnum (operands
[0]);
4152 int reg1
= true_regnum (operands
[1]);
4154 if (reg0
== reg1
+ 2)
4155 return *len
= 2, (AS1 (clr
,%C0
) CR_TAB
4158 return *len
= 3, (AS2 (movw
,%A0
,%C1
) CR_TAB
4159 AS1 (clr
,%C0
) CR_TAB
4162 return *len
= 4, (AS2 (mov
,%B0
,%D1
) CR_TAB
4163 AS2 (mov
,%A0
,%C1
) CR_TAB
4164 AS1 (clr
,%C0
) CR_TAB
4169 return *len
= 4, (AS2 (mov
,%A0
,%D1
) CR_TAB
4170 AS1 (clr
,%B0
) CR_TAB
4171 AS1 (clr
,%C0
) CR_TAB
4176 return (AS1 (clr
,%A0
) CR_TAB
4177 AS2 (sbrc
,%D0
,7) CR_TAB
4178 AS1 (inc
,%A0
) CR_TAB
4179 AS1 (clr
,%B0
) CR_TAB
4180 AS1 (clr
,%C0
) CR_TAB
4185 out_shift_with_cnt ((AS1 (lsr
,%D0
) CR_TAB
4186 AS1 (ror
,%C0
) CR_TAB
4187 AS1 (ror
,%B0
) CR_TAB
4189 insn
, operands
, len
, 4);
4193 /* Modifies the length assigned to instruction INSN
4194 LEN is the initially computed length of the insn. */
4197 adjust_insn_length (rtx insn
, int len
)
4199 rtx patt
= PATTERN (insn
);
4202 if (GET_CODE (patt
) == SET
)
4205 op
[1] = SET_SRC (patt
);
4206 op
[0] = SET_DEST (patt
);
4207 if (general_operand (op
[1], VOIDmode
)
4208 && general_operand (op
[0], VOIDmode
))
4210 switch (GET_MODE (op
[0]))
4213 output_movqi (insn
, op
, &len
);
4216 output_movhi (insn
, op
, &len
);
4220 output_movsisf (insn
, op
, &len
);
4226 else if (op
[0] == cc0_rtx
&& REG_P (op
[1]))
4228 switch (GET_MODE (op
[1]))
4230 case HImode
: out_tsthi (insn
,&len
); break;
4231 case SImode
: out_tstsi (insn
,&len
); break;
4235 else if (GET_CODE (op
[1]) == AND
)
4237 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4239 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4240 if (GET_MODE (op
[1]) == SImode
)
4241 len
= (((mask
& 0xff) != 0xff)
4242 + ((mask
& 0xff00) != 0xff00)
4243 + ((mask
& 0xff0000L
) != 0xff0000L
)
4244 + ((mask
& 0xff000000L
) != 0xff000000L
));
4245 else if (GET_MODE (op
[1]) == HImode
)
4246 len
= (((mask
& 0xff) != 0xff)
4247 + ((mask
& 0xff00) != 0xff00));
4250 else if (GET_CODE (op
[1]) == IOR
)
4252 if (GET_CODE (XEXP (op
[1],1)) == CONST_INT
)
4254 HOST_WIDE_INT mask
= INTVAL (XEXP (op
[1],1));
4255 if (GET_MODE (op
[1]) == SImode
)
4256 len
= (((mask
& 0xff) != 0)
4257 + ((mask
& 0xff00) != 0)
4258 + ((mask
& 0xff0000L
) != 0)
4259 + ((mask
& 0xff000000L
) != 0));
4260 else if (GET_MODE (op
[1]) == HImode
)
4261 len
= (((mask
& 0xff) != 0)
4262 + ((mask
& 0xff00) != 0));
4266 set
= single_set (insn
);
4271 op
[1] = SET_SRC (set
);
4272 op
[0] = SET_DEST (set
);
4274 if (GET_CODE (patt
) == PARALLEL
4275 && general_operand (op
[1], VOIDmode
)
4276 && general_operand (op
[0], VOIDmode
))
4278 if (XVECLEN (patt
, 0) == 2)
4279 op
[2] = XVECEXP (patt
, 0, 1);
4281 switch (GET_MODE (op
[0]))
4287 output_reload_inhi (insn
, op
, &len
);
4291 output_reload_insisf (insn
, op
, &len
);
4297 else if (GET_CODE (op
[1]) == ASHIFT
4298 || GET_CODE (op
[1]) == ASHIFTRT
4299 || GET_CODE (op
[1]) == LSHIFTRT
)
4303 ops
[1] = XEXP (op
[1],0);
4304 ops
[2] = XEXP (op
[1],1);
4305 switch (GET_CODE (op
[1]))
4308 switch (GET_MODE (op
[0]))
4310 case QImode
: ashlqi3_out (insn
,ops
,&len
); break;
4311 case HImode
: ashlhi3_out (insn
,ops
,&len
); break;
4312 case SImode
: ashlsi3_out (insn
,ops
,&len
); break;
4317 switch (GET_MODE (op
[0]))
4319 case QImode
: ashrqi3_out (insn
,ops
,&len
); break;
4320 case HImode
: ashrhi3_out (insn
,ops
,&len
); break;
4321 case SImode
: ashrsi3_out (insn
,ops
,&len
); break;
4326 switch (GET_MODE (op
[0]))
4328 case QImode
: lshrqi3_out (insn
,ops
,&len
); break;
4329 case HImode
: lshrhi3_out (insn
,ops
,&len
); break;
4330 case SImode
: lshrsi3_out (insn
,ops
,&len
); break;
4342 /* Return nonzero if register REG dead after INSN. */
4345 reg_unused_after (rtx insn
, rtx reg
)
4347 return (dead_or_set_p (insn
, reg
)
4348 || (REG_P(reg
) && _reg_unused_after (insn
, reg
)));
4351 /* Return nonzero if REG is not used after INSN.
4352 We assume REG is a reload reg, and therefore does
4353 not live past labels. It may live past calls or jumps though. */
4356 _reg_unused_after (rtx insn
, rtx reg
)
4361 /* If the reg is set by this instruction, then it is safe for our
4362 case. Disregard the case where this is a store to memory, since
4363 we are checking a register used in the store address. */
4364 set
= single_set (insn
);
4365 if (set
&& GET_CODE (SET_DEST (set
)) != MEM
4366 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4369 while ((insn
= NEXT_INSN (insn
)))
4372 code
= GET_CODE (insn
);
4375 /* If this is a label that existed before reload, then the register
4376 if dead here. However, if this is a label added by reorg, then
4377 the register may still be live here. We can't tell the difference,
4378 so we just ignore labels completely. */
4379 if (code
== CODE_LABEL
)
4387 if (code
== JUMP_INSN
)
4390 /* If this is a sequence, we must handle them all at once.
4391 We could have for instance a call that sets the target register,
4392 and an insn in a delay slot that uses the register. In this case,
4393 we must return 0. */
4394 else if (code
== INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4399 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4401 rtx this_insn
= XVECEXP (PATTERN (insn
), 0, i
);
4402 rtx set
= single_set (this_insn
);
4404 if (GET_CODE (this_insn
) == CALL_INSN
)
4406 else if (GET_CODE (this_insn
) == JUMP_INSN
)
4408 if (INSN_ANNULLED_BRANCH_P (this_insn
))
4413 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4415 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4417 if (GET_CODE (SET_DEST (set
)) != MEM
)
4423 && reg_overlap_mentioned_p (reg
, PATTERN (this_insn
)))
4428 else if (code
== JUMP_INSN
)
4432 if (code
== CALL_INSN
)
4435 for (tem
= CALL_INSN_FUNCTION_USAGE (insn
); tem
; tem
= XEXP (tem
, 1))
4436 if (GET_CODE (XEXP (tem
, 0)) == USE
4437 && REG_P (XEXP (XEXP (tem
, 0), 0))
4438 && reg_overlap_mentioned_p (reg
, XEXP (XEXP (tem
, 0), 0)))
4440 if (call_used_regs
[REGNO (reg
)])
4444 set
= single_set (insn
);
4446 if (set
&& reg_overlap_mentioned_p (reg
, SET_SRC (set
)))
4448 if (set
&& reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
4449 return GET_CODE (SET_DEST (set
)) != MEM
;
4450 if (set
== 0 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
4456 /* Target hook for assembling integer objects. The AVR version needs
4457 special handling for references to certain labels. */
4460 avr_assemble_integer (rtx x
, unsigned int size
, int aligned_p
)
4462 if (size
== POINTER_SIZE
/ BITS_PER_UNIT
&& aligned_p
4463 && ((GET_CODE (x
) == SYMBOL_REF
&& SYMBOL_REF_FUNCTION_P (x
))
4464 || GET_CODE (x
) == LABEL_REF
))
4466 fputs ("\t.word\tgs(", asm_out_file
);
4467 output_addr_const (asm_out_file
, x
);
4468 fputs (")\n", asm_out_file
);
4471 return default_assemble_integer (x
, size
, aligned_p
);
4474 /* The routine used to output NUL terminated strings. We use a special
4475 version of this for most svr4 targets because doing so makes the
4476 generated assembly code more compact (and thus faster to assemble)
4477 as well as more readable, especially for targets like the i386
4478 (where the only alternative is to output character sequences as
4479 comma separated lists of numbers). */
4482 gas_output_limited_string(FILE *file
, const char *str
)
4484 const unsigned char *_limited_str
= (const unsigned char *) str
;
4486 fprintf (file
, "%s\"", STRING_ASM_OP
);
4487 for (; (ch
= *_limited_str
); _limited_str
++)
4490 switch (escape
= ESCAPES
[ch
])
4496 fprintf (file
, "\\%03o", ch
);
4500 putc (escape
, file
);
4504 fprintf (file
, "\"\n");
4507 /* The routine used to output sequences of byte values. We use a special
4508 version of this for most svr4 targets because doing so makes the
4509 generated assembly code more compact (and thus faster to assemble)
4510 as well as more readable. Note that if we find subparts of the
4511 character sequence which end with NUL (and which are shorter than
4512 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4515 gas_output_ascii(FILE *file
, const char *str
, size_t length
)
4517 const unsigned char *_ascii_bytes
= (const unsigned char *) str
;
4518 const unsigned char *limit
= _ascii_bytes
+ length
;
4519 unsigned bytes_in_chunk
= 0;
4520 for (; _ascii_bytes
< limit
; _ascii_bytes
++)
4522 const unsigned char *p
;
4523 if (bytes_in_chunk
>= 60)
4525 fprintf (file
, "\"\n");
4528 for (p
= _ascii_bytes
; p
< limit
&& *p
!= '\0'; p
++)
4530 if (p
< limit
&& (p
- _ascii_bytes
) <= (signed)STRING_LIMIT
)
4532 if (bytes_in_chunk
> 0)
4534 fprintf (file
, "\"\n");
4537 gas_output_limited_string (file
, (const char*)_ascii_bytes
);
4544 if (bytes_in_chunk
== 0)
4545 fprintf (file
, "\t.ascii\t\"");
4546 switch (escape
= ESCAPES
[ch
= *_ascii_bytes
])
4553 fprintf (file
, "\\%03o", ch
);
4554 bytes_in_chunk
+= 4;
4558 putc (escape
, file
);
4559 bytes_in_chunk
+= 2;
4564 if (bytes_in_chunk
> 0)
4565 fprintf (file
, "\"\n");
4568 /* Return value is nonzero if pseudos that have been
4569 assigned to registers of class CLASS would likely be spilled
4570 because registers of CLASS are needed for spill registers. */
4573 class_likely_spilled_p (int c
)
4575 return (c
!= ALL_REGS
&& c
!= ADDW_REGS
);
4578 /* Valid attributes:
4579 progmem - put data to program memory;
4580 signal - make a function to be hardware interrupt. After function
4581 prologue interrupts are disabled;
4582 interrupt - make a function to be hardware interrupt. After function
4583 prologue interrupts are enabled;
4584 naked - don't generate function prologue/epilogue and `ret' command.
4586 Only `progmem' attribute valid for type. */
4588 const struct attribute_spec avr_attribute_table
[] =
4590 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4591 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute
},
4592 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4593 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute
},
4594 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4595 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute
},
4596 { NULL
, 0, 0, false, false, false, NULL
}
4599 /* Handle a "progmem" attribute; arguments as in
4600 struct attribute_spec.handler. */
4602 avr_handle_progmem_attribute (tree
*node
, tree name
,
4603 tree args ATTRIBUTE_UNUSED
,
4604 int flags ATTRIBUTE_UNUSED
,
4609 if (TREE_CODE (*node
) == TYPE_DECL
)
4611 /* This is really a decl attribute, not a type attribute,
4612 but try to handle it for GCC 3.0 backwards compatibility. */
4614 tree type
= TREE_TYPE (*node
);
4615 tree attr
= tree_cons (name
, args
, TYPE_ATTRIBUTES (type
));
4616 tree newtype
= build_type_attribute_variant (type
, attr
);
4618 TYPE_MAIN_VARIANT (newtype
) = TYPE_MAIN_VARIANT (type
);
4619 TREE_TYPE (*node
) = newtype
;
4620 *no_add_attrs
= true;
4622 else if (TREE_STATIC (*node
) || DECL_EXTERNAL (*node
))
4624 if (DECL_INITIAL (*node
) == NULL_TREE
&& !DECL_EXTERNAL (*node
))
4626 warning (0, "only initialized variables can be placed into "
4627 "program memory area");
4628 *no_add_attrs
= true;
4633 warning (OPT_Wattributes
, "%qs attribute ignored",
4634 IDENTIFIER_POINTER (name
));
4635 *no_add_attrs
= true;
4642 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4643 struct attribute_spec.handler. */
4646 avr_handle_fndecl_attribute (tree
*node
, tree name
,
4647 tree args ATTRIBUTE_UNUSED
,
4648 int flags ATTRIBUTE_UNUSED
,
4651 if (TREE_CODE (*node
) != FUNCTION_DECL
)
4653 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4654 IDENTIFIER_POINTER (name
));
4655 *no_add_attrs
= true;
4659 const char *func_name
= IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node
));
4660 const char *attr
= IDENTIFIER_POINTER (name
);
4662 /* If the function has the 'signal' or 'interrupt' attribute, test to
4663 make sure that the name of the function is "__vector_NN" so as to
4664 catch when the user misspells the interrupt vector name. */
4666 if (strncmp (attr
, "interrupt", strlen ("interrupt")) == 0)
4668 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4670 warning (0, "%qs appears to be a misspelled interrupt handler",
4674 else if (strncmp (attr
, "signal", strlen ("signal")) == 0)
4676 if (strncmp (func_name
, "__vector", strlen ("__vector")) != 0)
4678 warning (0, "%qs appears to be a misspelled signal handler",
4688 avr_handle_fntype_attribute (tree
*node
, tree name
,
4689 tree args ATTRIBUTE_UNUSED
,
4690 int flags ATTRIBUTE_UNUSED
,
4693 if (TREE_CODE (*node
) != FUNCTION_TYPE
)
4695 warning (OPT_Wattributes
, "%qs attribute only applies to functions",
4696 IDENTIFIER_POINTER (name
));
4697 *no_add_attrs
= true;
4703 /* Look for attribute `progmem' in DECL
4704 if found return 1, otherwise 0. */
4707 avr_progmem_p (tree decl
, tree attributes
)
4711 if (TREE_CODE (decl
) != VAR_DECL
)
4715 != lookup_attribute ("progmem", attributes
))
4721 while (TREE_CODE (a
) == ARRAY_TYPE
);
4723 if (a
== error_mark_node
)
4726 if (NULL_TREE
!= lookup_attribute ("progmem", TYPE_ATTRIBUTES (a
)))
4732 /* Add the section attribute if the variable is in progmem. */
4735 avr_insert_attributes (tree node
, tree
*attributes
)
4737 if (TREE_CODE (node
) == VAR_DECL
4738 && (TREE_STATIC (node
) || DECL_EXTERNAL (node
))
4739 && avr_progmem_p (node
, *attributes
))
4741 static const char dsec
[] = ".progmem.data";
4742 *attributes
= tree_cons (get_identifier ("section"),
4743 build_tree_list (NULL
, build_string (strlen (dsec
), dsec
)),
4746 /* ??? This seems sketchy. Why can't the user declare the
4747 thing const in the first place? */
4748 TREE_READONLY (node
) = 1;
4752 /* A get_unnamed_section callback for switching to progmem_section. */
4755 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED
)
4757 fprintf (asm_out_file
,
4758 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4759 AVR_HAVE_JMP_CALL
? "a" : "ax");
4760 /* Should already be aligned, this is just to be safe if it isn't. */
4761 fprintf (asm_out_file
, "\t.p2align 1\n");
4764 /* Implement TARGET_ASM_INIT_SECTIONS. */
4767 avr_asm_init_sections (void)
4769 progmem_section
= get_unnamed_section (AVR_HAVE_JMP_CALL
? 0 : SECTION_CODE
,
4770 avr_output_progmem_section_asm_op
,
4772 readonly_data_section
= data_section
;
4776 avr_section_type_flags (tree decl
, const char *name
, int reloc
)
4778 unsigned int flags
= default_section_type_flags (decl
, name
, reloc
);
4780 if (strncmp (name
, ".noinit", 7) == 0)
4782 if (decl
&& TREE_CODE (decl
) == VAR_DECL
4783 && DECL_INITIAL (decl
) == NULL_TREE
)
4784 flags
|= SECTION_BSS
; /* @nobits */
4786 warning (0, "only uninitialized variables can be placed in the "
4793 /* Outputs some appropriate text to go at the start of an assembler
4797 avr_file_start (void)
4799 if (avr_current_arch
->asm_only
)
4800 error ("MCU %qs supported for assembler only", avr_mcu_name
);
4802 default_file_start ();
4804 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4805 fputs ("__SREG__ = 0x3f\n"
4807 "__SP_L__ = 0x3d\n", asm_out_file
);
4809 fputs ("__tmp_reg__ = 0\n"
4810 "__zero_reg__ = 1\n", asm_out_file
);
4812 /* FIXME: output these only if there is anything in the .data / .bss
4813 sections - some code size could be saved by not linking in the
4814 initialization code from libgcc if one or both sections are empty. */
4815 fputs ("\t.global __do_copy_data\n", asm_out_file
);
4816 fputs ("\t.global __do_clear_bss\n", asm_out_file
);
4819 /* Outputs to the stdio stream FILE some
4820 appropriate text to go at the end of an assembler file. */
4827 /* Choose the order in which to allocate hard registers for
4828 pseudo-registers local to a basic block.
4830 Store the desired register order in the array `reg_alloc_order'.
4831 Element 0 should be the register to allocate first; element 1, the
4832 next register; and so on. */
4835 order_regs_for_local_alloc (void)
4838 static const int order_0
[] = {
4846 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4850 static const int order_1
[] = {
4858 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4862 static const int order_2
[] = {
4871 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4876 const int *order
= (TARGET_ORDER_1
? order_1
:
4877 TARGET_ORDER_2
? order_2
:
4879 for (i
=0; i
< ARRAY_SIZE (order_0
); ++i
)
4880 reg_alloc_order
[i
] = order
[i
];
4884 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4885 cost of an RTX operand given its context. X is the rtx of the
4886 operand, MODE is its mode, and OUTER is the rtx_code of this
4887 operand's parent operator. */
4890 avr_operand_rtx_cost (rtx x
, enum machine_mode mode
, enum rtx_code outer
)
4892 enum rtx_code code
= GET_CODE (x
);
4903 return COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4910 avr_rtx_costs (x
, code
, outer
, &total
);
4914 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4915 is to be calculated. Return true if the complete cost has been
4916 computed, and false if subexpressions should be scanned. In either
4917 case, *TOTAL contains the cost result. */
4920 avr_rtx_costs (rtx x
, int code
, int outer_code ATTRIBUTE_UNUSED
, int *total
)
4922 enum machine_mode mode
= GET_MODE (x
);
4929 /* Immediate constants are as cheap as registers. */
4937 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4945 *total
= COSTS_N_INSNS (1);
4949 *total
= COSTS_N_INSNS (3);
4953 *total
= COSTS_N_INSNS (7);
4959 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4967 *total
= COSTS_N_INSNS (1);
4973 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4977 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
4978 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4982 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
)
4983 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
4984 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4988 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
) + 2
4989 - GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))));
4990 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
4997 *total
= COSTS_N_INSNS (1);
4998 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
4999 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5003 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5005 *total
= COSTS_N_INSNS (2);
5006 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5008 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5009 *total
= COSTS_N_INSNS (1);
5011 *total
= COSTS_N_INSNS (2);
5015 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5017 *total
= COSTS_N_INSNS (4);
5018 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5020 else if (INTVAL (XEXP (x
, 1)) >= -63 && INTVAL (XEXP (x
, 1)) <= 63)
5021 *total
= COSTS_N_INSNS (1);
5023 *total
= COSTS_N_INSNS (4);
5029 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5035 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5036 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5037 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5038 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5042 *total
= COSTS_N_INSNS (GET_MODE_SIZE (mode
));
5043 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5044 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5052 *total
= COSTS_N_INSNS (optimize_size
? 3 : 4);
5053 else if (optimize_size
)
5054 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5061 *total
= COSTS_N_INSNS (optimize_size
? 7 : 10);
5062 else if (optimize_size
)
5063 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5071 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5072 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5080 *total
= COSTS_N_INSNS (AVR_HAVE_JMP_CALL
? 2 : 1);
5083 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5084 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5091 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5093 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5094 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5098 val
= INTVAL (XEXP (x
, 1));
5100 *total
= COSTS_N_INSNS (3);
5101 else if (val
>= 0 && val
<= 7)
5102 *total
= COSTS_N_INSNS (val
);
5104 *total
= COSTS_N_INSNS (1);
5109 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5111 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5112 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5115 switch (INTVAL (XEXP (x
, 1)))
5122 *total
= COSTS_N_INSNS (2);
5125 *total
= COSTS_N_INSNS (3);
5131 *total
= COSTS_N_INSNS (4);
5136 *total
= COSTS_N_INSNS (5);
5139 *total
= COSTS_N_INSNS (optimize_size
? 5 : 8);
5142 *total
= COSTS_N_INSNS (optimize_size
? 5 : 9);
5145 *total
= COSTS_N_INSNS (optimize_size
? 5 : 10);
5148 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5149 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5154 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5156 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5157 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5160 switch (INTVAL (XEXP (x
, 1)))
5166 *total
= COSTS_N_INSNS (3);
5171 *total
= COSTS_N_INSNS (4);
5174 *total
= COSTS_N_INSNS (6);
5177 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5180 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5181 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5188 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5195 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5197 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5198 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5202 val
= INTVAL (XEXP (x
, 1));
5204 *total
= COSTS_N_INSNS (4);
5206 *total
= COSTS_N_INSNS (2);
5207 else if (val
>= 0 && val
<= 7)
5208 *total
= COSTS_N_INSNS (val
);
5210 *total
= COSTS_N_INSNS (1);
5215 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5217 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5218 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5221 switch (INTVAL (XEXP (x
, 1)))
5227 *total
= COSTS_N_INSNS (2);
5230 *total
= COSTS_N_INSNS (3);
5236 *total
= COSTS_N_INSNS (4);
5240 *total
= COSTS_N_INSNS (5);
5243 *total
= COSTS_N_INSNS (optimize_size
? 5 : 6);
5246 *total
= COSTS_N_INSNS (optimize_size
? 5 : 7);
5250 *total
= COSTS_N_INSNS (optimize_size
? 5 : 8);
5253 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5254 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5259 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5261 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5262 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5265 switch (INTVAL (XEXP (x
, 1)))
5271 *total
= COSTS_N_INSNS (4);
5276 *total
= COSTS_N_INSNS (6);
5279 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5282 *total
= COSTS_N_INSNS (AVR_HAVE_MOVW
? 4 : 5);
5285 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5286 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5293 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5300 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5302 *total
= COSTS_N_INSNS (optimize_size
? 4 : 17);
5303 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5307 val
= INTVAL (XEXP (x
, 1));
5309 *total
= COSTS_N_INSNS (3);
5310 else if (val
>= 0 && val
<= 7)
5311 *total
= COSTS_N_INSNS (val
);
5313 *total
= COSTS_N_INSNS (1);
5318 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5320 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5321 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5324 switch (INTVAL (XEXP (x
, 1)))
5331 *total
= COSTS_N_INSNS (2);
5334 *total
= COSTS_N_INSNS (3);
5339 *total
= COSTS_N_INSNS (4);
5343 *total
= COSTS_N_INSNS (5);
5349 *total
= COSTS_N_INSNS (optimize_size
? 5 : 6);
5352 *total
= COSTS_N_INSNS (optimize_size
? 5 : 7);
5356 *total
= COSTS_N_INSNS (optimize_size
? 5 : 9);
5359 *total
= COSTS_N_INSNS (optimize_size
? 5 : 41);
5360 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5365 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5367 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5368 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5371 switch (INTVAL (XEXP (x
, 1)))
5377 *total
= COSTS_N_INSNS (4);
5380 *total
= COSTS_N_INSNS (optimize_size
? 7 : 8);
5385 *total
= COSTS_N_INSNS (4);
5388 *total
= COSTS_N_INSNS (6);
5391 *total
= COSTS_N_INSNS (optimize_size
? 7 : 113);
5392 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5399 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5403 switch (GET_MODE (XEXP (x
, 0)))
5406 *total
= COSTS_N_INSNS (1);
5407 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5408 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5412 *total
= COSTS_N_INSNS (2);
5413 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5414 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5415 else if (INTVAL (XEXP (x
, 1)) != 0)
5416 *total
+= COSTS_N_INSNS (1);
5420 *total
= COSTS_N_INSNS (4);
5421 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
5422 *total
+= avr_operand_rtx_cost (XEXP (x
, 1), mode
, code
);
5423 else if (INTVAL (XEXP (x
, 1)) != 0)
5424 *total
+= COSTS_N_INSNS (3);
5430 *total
+= avr_operand_rtx_cost (XEXP (x
, 0), mode
, code
);
5439 /* Calculate the cost of a memory address. */
5442 avr_address_cost (rtx x
)
5444 if (GET_CODE (x
) == PLUS
5445 && GET_CODE (XEXP (x
,1)) == CONST_INT
5446 && (REG_P (XEXP (x
,0)) || GET_CODE (XEXP (x
,0)) == SUBREG
)
5447 && INTVAL (XEXP (x
,1)) >= 61)
5449 if (CONSTANT_ADDRESS_P (x
))
5451 if (optimize
> 0 && io_address_operand (x
, QImode
))
5458 /* Test for extra memory constraint 'Q'.
5459 It's a memory address based on Y or Z pointer with valid displacement. */
5462 extra_constraint_Q (rtx x
)
5464 if (GET_CODE (XEXP (x
,0)) == PLUS
5465 && REG_P (XEXP (XEXP (x
,0), 0))
5466 && GET_CODE (XEXP (XEXP (x
,0), 1)) == CONST_INT
5467 && (INTVAL (XEXP (XEXP (x
,0), 1))
5468 <= MAX_LD_OFFSET (GET_MODE (x
))))
5470 rtx xx
= XEXP (XEXP (x
,0), 0);
5471 int regno
= REGNO (xx
);
5472 if (TARGET_ALL_DEBUG
)
5474 fprintf (stderr
, ("extra_constraint:\n"
5475 "reload_completed: %d\n"
5476 "reload_in_progress: %d\n"),
5477 reload_completed
, reload_in_progress
);
5480 if (regno
>= FIRST_PSEUDO_REGISTER
)
5481 return 1; /* allocate pseudos */
5482 else if (regno
== REG_Z
|| regno
== REG_Y
)
5483 return 1; /* strictly check */
5484 else if (xx
== frame_pointer_rtx
5485 || xx
== arg_pointer_rtx
)
5486 return 1; /* XXX frame & arg pointer checks */
5491 /* Convert condition code CONDITION to the valid AVR condition code. */
5494 avr_normalize_condition (RTX_CODE condition
)
5511 /* This function optimizes conditional jumps. */
5518 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5520 if (! (GET_CODE (insn
) == INSN
5521 || GET_CODE (insn
) == CALL_INSN
5522 || GET_CODE (insn
) == JUMP_INSN
)
5523 || !single_set (insn
))
5526 pattern
= PATTERN (insn
);
5528 if (GET_CODE (pattern
) == PARALLEL
)
5529 pattern
= XVECEXP (pattern
, 0, 0);
5530 if (GET_CODE (pattern
) == SET
5531 && SET_DEST (pattern
) == cc0_rtx
5532 && compare_diff_p (insn
))
5534 if (GET_CODE (SET_SRC (pattern
)) == COMPARE
)
5536 /* Now we work under compare insn. */
5538 pattern
= SET_SRC (pattern
);
5539 if (true_regnum (XEXP (pattern
,0)) >= 0
5540 && true_regnum (XEXP (pattern
,1)) >= 0 )
5542 rtx x
= XEXP (pattern
,0);
5543 rtx next
= next_real_insn (insn
);
5544 rtx pat
= PATTERN (next
);
5545 rtx src
= SET_SRC (pat
);
5546 rtx t
= XEXP (src
,0);
5547 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5548 XEXP (pattern
,0) = XEXP (pattern
,1);
5549 XEXP (pattern
,1) = x
;
5550 INSN_CODE (next
) = -1;
5552 else if (true_regnum (XEXP (pattern
,0)) >= 0
5553 && GET_CODE (XEXP (pattern
,1)) == CONST_INT
)
5555 rtx x
= XEXP (pattern
,1);
5556 rtx next
= next_real_insn (insn
);
5557 rtx pat
= PATTERN (next
);
5558 rtx src
= SET_SRC (pat
);
5559 rtx t
= XEXP (src
,0);
5560 enum machine_mode mode
= GET_MODE (XEXP (pattern
, 0));
5562 if (avr_simplify_comparison_p (mode
, GET_CODE (t
), x
))
5564 XEXP (pattern
, 1) = gen_int_mode (INTVAL (x
) + 1, mode
);
5565 PUT_CODE (t
, avr_normalize_condition (GET_CODE (t
)));
5566 INSN_CODE (next
) = -1;
5567 INSN_CODE (insn
) = -1;
5571 else if (true_regnum (SET_SRC (pattern
)) >= 0)
5573 /* This is a tst insn */
5574 rtx next
= next_real_insn (insn
);
5575 rtx pat
= PATTERN (next
);
5576 rtx src
= SET_SRC (pat
);
5577 rtx t
= XEXP (src
,0);
5579 PUT_CODE (t
, swap_condition (GET_CODE (t
)));
5580 SET_SRC (pattern
) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern
)),
5582 INSN_CODE (next
) = -1;
5583 INSN_CODE (insn
) = -1;
5589 /* Returns register number for function return value.*/
5592 avr_ret_register (void)
5597 /* Create an RTX representing the place where a
5598 library function returns a value of mode MODE. */
5601 avr_libcall_value (enum machine_mode mode
)
5603 int offs
= GET_MODE_SIZE (mode
);
5606 return gen_rtx_REG (mode
, RET_REGISTER
+ 2 - offs
);
5609 /* Create an RTX representing the place where a
5610 function returns a value of data type VALTYPE. */
5613 avr_function_value (const_tree type
, const_tree func ATTRIBUTE_UNUSED
)
5617 if (TYPE_MODE (type
) != BLKmode
)
5618 return avr_libcall_value (TYPE_MODE (type
));
5620 offs
= int_size_in_bytes (type
);
5623 if (offs
> 2 && offs
< GET_MODE_SIZE (SImode
))
5624 offs
= GET_MODE_SIZE (SImode
);
5625 else if (offs
> GET_MODE_SIZE (SImode
) && offs
< GET_MODE_SIZE (DImode
))
5626 offs
= GET_MODE_SIZE (DImode
);
5628 return gen_rtx_REG (BLKmode
, RET_REGISTER
+ 2 - offs
);
5631 /* Places additional restrictions on the register class to
5632 use when it is necessary to copy value X into a register
5636 preferred_reload_class (rtx x ATTRIBUTE_UNUSED
, enum reg_class
class)
5642 test_hard_reg_class (enum reg_class
class, rtx x
)
5644 int regno
= true_regnum (x
);
5648 if (TEST_HARD_REG_CLASS (class, regno
))
5656 jump_over_one_insn_p (rtx insn
, rtx dest
)
5658 int uid
= INSN_UID (GET_CODE (dest
) == LABEL_REF
5661 int jump_addr
= INSN_ADDRESSES (INSN_UID (insn
));
5662 int dest_addr
= INSN_ADDRESSES (uid
);
5663 return dest_addr
- jump_addr
== get_attr_length (insn
) + 1;
5666 /* Returns 1 if a value of mode MODE can be stored starting with hard
5667 register number REGNO. On the enhanced core, anything larger than
5668 1 byte must start in even numbered register for "movw" to work
5669 (this way we don't have to check for odd registers everywhere). */
5672 avr_hard_regno_mode_ok (int regno
, enum machine_mode mode
)
5674 /* Disallow QImode in stack pointer regs. */
5675 if ((regno
== REG_SP
|| regno
== (REG_SP
+ 1)) && mode
== QImode
)
5678 /* The only thing that can go into registers r28:r29 is a Pmode. */
5679 if (regno
== REG_Y
&& mode
== Pmode
)
5682 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5683 if (regno
<= (REG_Y
+ 1) && (regno
+ GET_MODE_SIZE (mode
)) >= (REG_Y
+ 1))
5689 /* Modes larger than QImode occupy consecutive registers. */
5690 if (regno
+ GET_MODE_SIZE (mode
) > FIRST_PSEUDO_REGISTER
)
5693 /* All modes larger than QImode should start in an even register. */
5694 return !(regno
& 1);
5698 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5704 if (GET_CODE (operands
[1]) == CONST_INT
)
5706 int val
= INTVAL (operands
[1]);
5707 if ((val
& 0xff) == 0)
5710 return (AS2 (mov
,%A0
,__zero_reg__
) CR_TAB
5711 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5714 else if ((val
& 0xff00) == 0)
5717 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5718 AS2 (mov
,%A0
,%2) CR_TAB
5719 AS2 (mov
,%B0
,__zero_reg__
));
5721 else if ((val
& 0xff) == ((val
& 0xff00) >> 8))
5724 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5725 AS2 (mov
,%A0
,%2) CR_TAB
5730 return (AS2 (ldi
,%2,lo8(%1)) CR_TAB
5731 AS2 (mov
,%A0
,%2) CR_TAB
5732 AS2 (ldi
,%2,hi8(%1)) CR_TAB
5738 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED
, rtx
*operands
, int *len
)
5740 rtx src
= operands
[1];
5741 int cnst
= (GET_CODE (src
) == CONST_INT
);
5746 *len
= 4 + ((INTVAL (src
) & 0xff) != 0)
5747 + ((INTVAL (src
) & 0xff00) != 0)
5748 + ((INTVAL (src
) & 0xff0000) != 0)
5749 + ((INTVAL (src
) & 0xff000000) != 0);
5756 if (cnst
&& ((INTVAL (src
) & 0xff) == 0))
5757 output_asm_insn (AS2 (mov
, %A0
, __zero_reg__
), operands
);
5760 output_asm_insn (AS2 (ldi
, %2, lo8(%1)), operands
);
5761 output_asm_insn (AS2 (mov
, %A0
, %2), operands
);
5763 if (cnst
&& ((INTVAL (src
) & 0xff00) == 0))
5764 output_asm_insn (AS2 (mov
, %B0
, __zero_reg__
), operands
);
5767 output_asm_insn (AS2 (ldi
, %2, hi8(%1)), operands
);
5768 output_asm_insn (AS2 (mov
, %B0
, %2), operands
);
5770 if (cnst
&& ((INTVAL (src
) & 0xff0000) == 0))
5771 output_asm_insn (AS2 (mov
, %C0
, __zero_reg__
), operands
);
5774 output_asm_insn (AS2 (ldi
, %2, hlo8(%1)), operands
);
5775 output_asm_insn (AS2 (mov
, %C0
, %2), operands
);
5777 if (cnst
&& ((INTVAL (src
) & 0xff000000) == 0))
5778 output_asm_insn (AS2 (mov
, %D0
, __zero_reg__
), operands
);
5781 output_asm_insn (AS2 (ldi
, %2, hhi8(%1)), operands
);
5782 output_asm_insn (AS2 (mov
, %D0
, %2), operands
);
5788 avr_output_bld (rtx operands
[], int bit_nr
)
5790 static char s
[] = "bld %A0,0";
5792 s
[5] = 'A' + (bit_nr
>> 3);
5793 s
[8] = '0' + (bit_nr
& 7);
5794 output_asm_insn (s
, operands
);
5798 avr_output_addr_vec_elt (FILE *stream
, int value
)
5800 switch_to_section (progmem_section
);
5801 if (AVR_HAVE_JMP_CALL
)
5802 fprintf (stream
, "\t.word gs(.L%d)\n", value
);
5804 fprintf (stream
, "\trjmp .L%d\n", value
);
5807 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5808 registers (for a define_peephole2) in the current function. */
5811 avr_peep2_scratch_safe (rtx scratch
)
5813 if ((interrupt_function_p (current_function_decl
)
5814 || signal_function_p (current_function_decl
))
5815 && leaf_function_p ())
5817 int first_reg
= true_regnum (scratch
);
5818 int last_reg
= first_reg
+ GET_MODE_SIZE (GET_MODE (scratch
)) - 1;
5821 for (reg
= first_reg
; reg
<= last_reg
; reg
++)
5823 if (!df_regs_ever_live_p (reg
))
5830 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5831 or memory location in the I/O space (QImode only).
5833 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5834 Operand 1: register operand to test, or CONST_INT memory address.
5835 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5836 Operand 3: label to jump to if the test is true. */
5839 avr_out_sbxx_branch (rtx insn
, rtx operands
[])
5841 enum rtx_code comp
= GET_CODE (operands
[0]);
5842 int long_jump
= (get_attr_length (insn
) >= 4);
5843 int reverse
= long_jump
|| jump_over_one_insn_p (insn
, operands
[3]);
5847 else if (comp
== LT
)
5851 comp
= reverse_condition (comp
);
5853 if (GET_CODE (operands
[1]) == CONST_INT
)
5855 if (INTVAL (operands
[1]) < 0x40)
5858 output_asm_insn (AS2 (sbis
,%1-0x20,%2), operands
);
5860 output_asm_insn (AS2 (sbic
,%1-0x20,%2), operands
);
5864 output_asm_insn (AS2 (in
,__tmp_reg__
,%1-0x20), operands
);
5866 output_asm_insn (AS2 (sbrs
,__tmp_reg__
,%2), operands
);
5868 output_asm_insn (AS2 (sbrc
,__tmp_reg__
,%2), operands
);
5871 else /* GET_CODE (operands[1]) == REG */
5873 if (GET_MODE (operands
[1]) == QImode
)
5876 output_asm_insn (AS2 (sbrs
,%1,%2), operands
);
5878 output_asm_insn (AS2 (sbrc
,%1,%2), operands
);
5880 else /* HImode or SImode */
5882 static char buf
[] = "sbrc %A1,0";
5883 int bit_nr
= exact_log2 (INTVAL (operands
[2])
5884 & GET_MODE_MASK (GET_MODE (operands
[1])));
5886 buf
[3] = (comp
== EQ
) ? 's' : 'c';
5887 buf
[6] = 'A' + (bit_nr
>> 3);
5888 buf
[9] = '0' + (bit_nr
& 7);
5889 output_asm_insn (buf
, operands
);
5894 return (AS1 (rjmp
,.+4) CR_TAB
5897 return AS1 (rjmp
,%3);
5901 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5904 avr_asm_out_ctor (rtx symbol
, int priority
)
5906 fputs ("\t.global __do_global_ctors\n", asm_out_file
);
5907 default_ctor_section_asm_out_constructor (symbol
, priority
);
5910 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5913 avr_asm_out_dtor (rtx symbol
, int priority
)
5915 fputs ("\t.global __do_global_dtors\n", asm_out_file
);
5916 default_dtor_section_asm_out_destructor (symbol
, priority
);
5919 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5922 avr_return_in_memory (const_tree type
, const_tree fntype ATTRIBUTE_UNUSED
)
5924 if (TYPE_MODE (type
) == BLKmode
)
5926 HOST_WIDE_INT size
= int_size_in_bytes (type
);
5927 return (size
== -1 || size
> 8);