sparc.h (BRANCH_COST): Fix macro definition.
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
48
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
63
64 static RTX_CODE compare_condition (rtx insn);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 const struct attribute_spec avr_attribute_table[];
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
79
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
90
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
93
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
96
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
99
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames[] = REGISTER_NAMES;
102
103 /* This holds the last insn address. */
104 static int last_insn_address = 0;
105
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
108
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
111
112 section *progmem_section;
113
114 static const struct base_arch_s avr_arch_types[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
126 };
127
128 /* These names are used as the index into the avr_arch_types[] table
129 above. */
130
131 enum avr_arch
132 {
133 ARCH_UNKNOWN,
134 ARCH_AVR1,
135 ARCH_AVR2,
136 ARCH_AVR25,
137 ARCH_AVR3,
138 ARCH_AVR31,
139 ARCH_AVR35,
140 ARCH_AVR4,
141 ARCH_AVR5,
142 ARCH_AVR51,
143 ARCH_AVR6
144 };
145
146 struct mcu_type_s {
147 const char *const name;
148 int arch; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro;
151 };
152
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
155 - here
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
159 - avr-libc */
160
161 static const struct mcu_type_s avr_mcu_types[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2, NULL },
164 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25, NULL },
178 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
179 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
180 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
181 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
182 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
183 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
184 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
185 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
186 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
187 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
188 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
189 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
190 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
191 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
192 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
193 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
194 /* Classic, > 8K, <= 64K. */
195 { "avr3", ARCH_AVR3, NULL },
196 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
197 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
198 /* Classic, == 128K. */
199 { "avr31", ARCH_AVR31, NULL },
200 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
201 { "at43usb320", ARCH_AVR31, "__AVR_AT43USB320__" },
202 /* Classic + MOVW + JMP/CALL. */
203 { "avr35", ARCH_AVR35, NULL },
204 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
205 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
206 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
207 /* Enhanced, <= 8K. */
208 { "avr4", ARCH_AVR4, NULL },
209 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
210 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
211 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
212 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
213 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
214 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
215 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
216 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
217 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
218 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
219 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
220 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
221 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
222 /* Enhanced, > 8K, <= 64K. */
223 { "avr5", ARCH_AVR5, NULL },
224 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
225 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
226 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
227 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
228 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
229 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
230 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
231 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
232 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
233 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
234 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
235 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
236 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
237 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
238 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
239 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
240 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
241 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
242 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
243 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
244 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
245 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
246 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
247 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
248 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
249 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
250 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
251 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
252 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
253 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
254 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
255 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
256 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
257 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
258 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
259 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
260 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
261 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
262 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
263 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
264 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
265 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
266 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
267 /* Enhanced, == 128K. */
268 { "avr51", ARCH_AVR51, NULL },
269 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
270 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
271 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
272 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
273 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
274 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
275 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
276 /* 3-Byte PC. */
277 { "avr6", ARCH_AVR6, NULL },
278 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
279 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
280 /* Assembler only. */
281 { "avr1", ARCH_AVR1, NULL },
282 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
283 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
284 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
285 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
286 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
287 { NULL, ARCH_UNKNOWN, NULL }
288 };
289
290 int avr_case_values_threshold = 30000;
291 \f
292 /* Initialize the GCC target structure. */
293 #undef TARGET_ASM_ALIGNED_HI_OP
294 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
295 #undef TARGET_ASM_ALIGNED_SI_OP
296 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
297 #undef TARGET_ASM_UNALIGNED_HI_OP
298 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
299 #undef TARGET_ASM_UNALIGNED_SI_OP
300 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
301 #undef TARGET_ASM_INTEGER
302 #define TARGET_ASM_INTEGER avr_assemble_integer
303 #undef TARGET_ASM_FILE_START
304 #define TARGET_ASM_FILE_START avr_file_start
305 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
306 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
307 #undef TARGET_ASM_FILE_END
308 #define TARGET_ASM_FILE_END avr_file_end
309
310 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
311 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
312 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
313 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
314 #undef TARGET_FUNCTION_VALUE
315 #define TARGET_FUNCTION_VALUE avr_function_value
316 #undef TARGET_ATTRIBUTE_TABLE
317 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
318 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
319 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
320 #undef TARGET_INSERT_ATTRIBUTES
321 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
322 #undef TARGET_SECTION_TYPE_FLAGS
323 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
324 #undef TARGET_RTX_COSTS
325 #define TARGET_RTX_COSTS avr_rtx_costs
326 #undef TARGET_ADDRESS_COST
327 #define TARGET_ADDRESS_COST avr_address_cost
328 #undef TARGET_MACHINE_DEPENDENT_REORG
329 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
330
331 #undef TARGET_RETURN_IN_MEMORY
332 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
333
334 #undef TARGET_STRICT_ARGUMENT_NAMING
335 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
336
337 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
338 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
339
340 #undef TARGET_HARD_REGNO_SCRATCH_OK
341 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
342
343 struct gcc_target targetm = TARGET_INITIALIZER;
344 \f
345 void
346 avr_override_options (void)
347 {
348 const struct mcu_type_s *t;
349
350 flag_delete_null_pointer_checks = 0;
351
352 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST))
353 set_param_value ("inline-call-cost", 5);
354
355 for (t = avr_mcu_types; t->name; t++)
356 if (strcmp (t->name, avr_mcu_name) == 0)
357 break;
358
359 if (!t->name)
360 {
361 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
362 avr_mcu_name);
363 for (t = avr_mcu_types; t->name; t++)
364 fprintf (stderr," %s\n", t->name);
365 }
366
367 avr_current_arch = &avr_arch_types[t->arch];
368 avr_extra_arch_macro = t->macro;
369
370 if (optimize && !TARGET_NO_TABLEJUMP)
371 avr_case_values_threshold =
372 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
373
374 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
375 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
376
377 init_machine_status = avr_init_machine_status;
378 }
379
380 /* return register class from register number. */
381
382 static const int reg_class_tab[]={
383 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
384 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
385 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
386 GENERAL_REGS, /* r0 - r15 */
387 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
388 LD_REGS, /* r16 - 23 */
389 ADDW_REGS,ADDW_REGS, /* r24,r25 */
390 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
391 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
392 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
393 STACK_REG,STACK_REG /* SPL,SPH */
394 };
395
396 /* Function to set up the backend function structure. */
397
398 static struct machine_function *
399 avr_init_machine_status (void)
400 {
401 return ((struct machine_function *)
402 ggc_alloc_cleared (sizeof (struct machine_function)));
403 }
404
405 /* Return register class for register R. */
406
407 enum reg_class
408 avr_regno_reg_class (int r)
409 {
410 if (r <= 33)
411 return reg_class_tab[r];
412 return ALL_REGS;
413 }
414
415 /* Return nonzero if FUNC is a naked function. */
416
417 static int
418 avr_naked_function_p (tree func)
419 {
420 tree a;
421
422 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
423
424 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
425 return a != NULL_TREE;
426 }
427
428 /* Return nonzero if FUNC is an interrupt function as specified
429 by the "interrupt" attribute. */
430
431 static int
432 interrupt_function_p (tree func)
433 {
434 tree a;
435
436 if (TREE_CODE (func) != FUNCTION_DECL)
437 return 0;
438
439 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
440 return a != NULL_TREE;
441 }
442
443 /* Return nonzero if FUNC is a signal function as specified
444 by the "signal" attribute. */
445
446 static int
447 signal_function_p (tree func)
448 {
449 tree a;
450
451 if (TREE_CODE (func) != FUNCTION_DECL)
452 return 0;
453
454 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
455 return a != NULL_TREE;
456 }
457
458 /* Return nonzero if FUNC is a OS_task function. */
459
460 static int
461 avr_OS_task_function_p (tree func)
462 {
463 tree a;
464
465 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
466
467 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
468 return a != NULL_TREE;
469 }
470
471 /* Return nonzero if FUNC is a OS_main function. */
472
473 static int
474 avr_OS_main_function_p (tree func)
475 {
476 tree a;
477
478 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
479
480 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
481 return a != NULL_TREE;
482 }
483
484 /* Return the number of hard registers to push/pop in the prologue/epilogue
485 of the current function, and optionally store these registers in SET. */
486
487 static int
488 avr_regs_to_save (HARD_REG_SET *set)
489 {
490 int reg, count;
491 int int_or_sig_p = (interrupt_function_p (current_function_decl)
492 || signal_function_p (current_function_decl));
493
494 if (!reload_completed)
495 cfun->machine->is_leaf = leaf_function_p ();
496
497 if (set)
498 CLEAR_HARD_REG_SET (*set);
499 count = 0;
500
501 /* No need to save any registers if the function never returns or
502 is have "OS_task" or "OS_main" attribute. */
503 if (TREE_THIS_VOLATILE (current_function_decl)
504 || cfun->machine->is_OS_task
505 || cfun->machine->is_OS_main)
506 return 0;
507
508 for (reg = 0; reg < 32; reg++)
509 {
510 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
511 any global register variables. */
512 if (fixed_regs[reg])
513 continue;
514
515 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
516 || (df_regs_ever_live_p (reg)
517 && (int_or_sig_p || !call_used_regs[reg])
518 && !(frame_pointer_needed
519 && (reg == REG_Y || reg == (REG_Y+1)))))
520 {
521 if (set)
522 SET_HARD_REG_BIT (*set, reg);
523 count++;
524 }
525 }
526 return count;
527 }
528
529 /* Compute offset between arg_pointer and frame_pointer. */
530
531 int
532 initial_elimination_offset (int from, int to)
533 {
534 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
535 return 0;
536 else
537 {
538 int offset = frame_pointer_needed ? 2 : 0;
539 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
540
541 offset += avr_regs_to_save (NULL);
542 return get_frame_size () + (avr_pc_size) + 1 + offset;
543 }
544 }
545
546 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
547 frame pointer by +STARTING_FRAME_OFFSET.
548 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
549 avoids creating add/sub of offset in nonlocal goto and setjmp. */
550
551 rtx avr_builtin_setjmp_frame_value (void)
552 {
553 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
554 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
555 }
556
557 /* Return 1 if the function epilogue is just a single "ret". */
558
559 int
560 avr_simple_epilogue (void)
561 {
562 return (! frame_pointer_needed
563 && get_frame_size () == 0
564 && avr_regs_to_save (NULL) == 0
565 && ! interrupt_function_p (current_function_decl)
566 && ! signal_function_p (current_function_decl)
567 && ! avr_naked_function_p (current_function_decl)
568 && ! TREE_THIS_VOLATILE (current_function_decl));
569 }
570
571 /* This function checks sequence of live registers. */
572
573 static int
574 sequent_regs_live (void)
575 {
576 int reg;
577 int live_seq=0;
578 int cur_seq=0;
579
580 for (reg = 0; reg < 18; ++reg)
581 {
582 if (!call_used_regs[reg])
583 {
584 if (df_regs_ever_live_p (reg))
585 {
586 ++live_seq;
587 ++cur_seq;
588 }
589 else
590 cur_seq = 0;
591 }
592 }
593
594 if (!frame_pointer_needed)
595 {
596 if (df_regs_ever_live_p (REG_Y))
597 {
598 ++live_seq;
599 ++cur_seq;
600 }
601 else
602 cur_seq = 0;
603
604 if (df_regs_ever_live_p (REG_Y+1))
605 {
606 ++live_seq;
607 ++cur_seq;
608 }
609 else
610 cur_seq = 0;
611 }
612 else
613 {
614 cur_seq += 2;
615 live_seq += 2;
616 }
617 return (cur_seq == live_seq) ? live_seq : 0;
618 }
619
620 /* Obtain the length sequence of insns. */
621
622 int
623 get_sequence_length (rtx insns)
624 {
625 rtx insn;
626 int length;
627
628 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
629 length += get_attr_length (insn);
630
631 return length;
632 }
633
634 /* Output function prologue. */
635
636 void
637 expand_prologue (void)
638 {
639 int live_seq;
640 HARD_REG_SET set;
641 int minimize;
642 HOST_WIDE_INT size = get_frame_size();
643 /* Define templates for push instructions. */
644 rtx pushbyte = gen_rtx_MEM (QImode,
645 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
646 rtx pushword = gen_rtx_MEM (HImode,
647 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
648 rtx insn;
649
650 last_insn_address = 0;
651
652 /* Init cfun->machine. */
653 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
654 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
655 cfun->machine->is_signal = signal_function_p (current_function_decl);
656 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
657 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
658
659 /* Prologue: naked. */
660 if (cfun->machine->is_naked)
661 {
662 return;
663 }
664
665 avr_regs_to_save (&set);
666 live_seq = sequent_regs_live ();
667 minimize = (TARGET_CALL_PROLOGUES
668 && !cfun->machine->is_interrupt
669 && !cfun->machine->is_signal
670 && !cfun->machine->is_OS_task
671 && !cfun->machine->is_OS_main
672 && live_seq);
673
674 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
675 {
676 if (cfun->machine->is_interrupt)
677 {
678 /* Enable interrupts. */
679 insn = emit_insn (gen_enable_interrupt ());
680 RTX_FRAME_RELATED_P (insn) = 1;
681 }
682
683 /* Push zero reg. */
684 insn = emit_move_insn (pushbyte, zero_reg_rtx);
685 RTX_FRAME_RELATED_P (insn) = 1;
686
687 /* Push tmp reg. */
688 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
689 RTX_FRAME_RELATED_P (insn) = 1;
690
691 /* Push SREG. */
692 insn = emit_move_insn (tmp_reg_rtx,
693 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
694 RTX_FRAME_RELATED_P (insn) = 1;
695 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
696 RTX_FRAME_RELATED_P (insn) = 1;
697
698 /* Push RAMPZ. */
699 if(AVR_HAVE_RAMPZ
700 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
701 {
702 insn = emit_move_insn (tmp_reg_rtx,
703 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
704 RTX_FRAME_RELATED_P (insn) = 1;
705 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
706 RTX_FRAME_RELATED_P (insn) = 1;
707 }
708
709 /* Clear zero reg. */
710 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
711 RTX_FRAME_RELATED_P (insn) = 1;
712
713 /* Prevent any attempt to delete the setting of ZERO_REG! */
714 emit_use (zero_reg_rtx);
715 }
716 if (minimize && (frame_pointer_needed
717 || (AVR_2_BYTE_PC && live_seq > 6)
718 || live_seq > 7))
719 {
720 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
721 gen_int_mode (size, HImode));
722 RTX_FRAME_RELATED_P (insn) = 1;
723
724 insn =
725 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
726 gen_int_mode (size + live_seq, HImode)));
727 RTX_FRAME_RELATED_P (insn) = 1;
728 }
729 else
730 {
731 int reg;
732 for (reg = 0; reg < 32; ++reg)
733 {
734 if (TEST_HARD_REG_BIT (set, reg))
735 {
736 /* Emit push of register to save. */
737 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
738 RTX_FRAME_RELATED_P (insn) = 1;
739 }
740 }
741 if (frame_pointer_needed)
742 {
743 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
744 {
745 /* Push frame pointer. */
746 insn = emit_move_insn (pushword, frame_pointer_rtx);
747 RTX_FRAME_RELATED_P (insn) = 1;
748 }
749
750 if (!size)
751 {
752 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
753 RTX_FRAME_RELATED_P (insn) = 1;
754 }
755 else
756 {
757 /* Creating a frame can be done by direct manipulation of the
758 stack or via the frame pointer. These two methods are:
759 fp=sp
760 fp-=size
761 sp=fp
762 OR
763 sp-=size
764 fp=sp
765 the optimum method depends on function type, stack and frame size.
766 To avoid a complex logic, both methods are tested and shortest
767 is selected. */
768 rtx myfp;
769 rtx fp_plus_insns;
770 rtx sp_plus_insns = NULL_RTX;
771
772 if (TARGET_TINY_STACK)
773 {
774 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
775 over 'sbiw' (2 cycles, same size). */
776 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
777 }
778 else
779 {
780 /* Normal sized addition. */
781 myfp = frame_pointer_rtx;
782 }
783
784 /* Method 1-Adjust frame pointer. */
785 start_sequence ();
786
787 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
788 RTX_FRAME_RELATED_P (insn) = 1;
789
790 insn =
791 emit_move_insn (myfp,
792 gen_rtx_PLUS (GET_MODE(myfp), myfp,
793 gen_int_mode (-size,
794 GET_MODE(myfp))));
795 RTX_FRAME_RELATED_P (insn) = 1;
796
797 /* Copy to stack pointer. */
798 if (TARGET_TINY_STACK)
799 {
800 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
801 RTX_FRAME_RELATED_P (insn) = 1;
802 }
803 else if (TARGET_NO_INTERRUPTS
804 || cfun->machine->is_signal
805 || cfun->machine->is_OS_main)
806 {
807 insn =
808 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
809 frame_pointer_rtx));
810 RTX_FRAME_RELATED_P (insn) = 1;
811 }
812 else if (cfun->machine->is_interrupt)
813 {
814 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
815 frame_pointer_rtx));
816 RTX_FRAME_RELATED_P (insn) = 1;
817 }
818 else
819 {
820 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
821 RTX_FRAME_RELATED_P (insn) = 1;
822 }
823
824 fp_plus_insns = get_insns ();
825 end_sequence ();
826
827 /* Method 2-Adjust Stack pointer. */
828 if (size <= 6)
829 {
830 start_sequence ();
831
832 insn =
833 emit_move_insn (stack_pointer_rtx,
834 gen_rtx_PLUS (HImode,
835 stack_pointer_rtx,
836 gen_int_mode (-size,
837 HImode)));
838 RTX_FRAME_RELATED_P (insn) = 1;
839
840 insn =
841 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
842 RTX_FRAME_RELATED_P (insn) = 1;
843
844 sp_plus_insns = get_insns ();
845 end_sequence ();
846 }
847
848 /* Use shortest method. */
849 if (size <= 6 && (get_sequence_length (sp_plus_insns)
850 < get_sequence_length (fp_plus_insns)))
851 emit_insn (sp_plus_insns);
852 else
853 emit_insn (fp_plus_insns);
854 }
855 }
856 }
857 }
858
859 /* Output summary at end of function prologue. */
860
861 static void
862 avr_asm_function_end_prologue (FILE *file)
863 {
864 if (cfun->machine->is_naked)
865 {
866 fputs ("/* prologue: naked */\n", file);
867 }
868 else
869 {
870 if (cfun->machine->is_interrupt)
871 {
872 fputs ("/* prologue: Interrupt */\n", file);
873 }
874 else if (cfun->machine->is_signal)
875 {
876 fputs ("/* prologue: Signal */\n", file);
877 }
878 else
879 fputs ("/* prologue: function */\n", file);
880 }
881 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
882 get_frame_size());
883 }
884
885
886 /* Implement EPILOGUE_USES. */
887
888 int
889 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
890 {
891 if (reload_completed
892 && cfun->machine
893 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
894 return 1;
895 return 0;
896 }
897
898 /* Output RTL epilogue. */
899
900 void
901 expand_epilogue (void)
902 {
903 int reg;
904 int live_seq;
905 HARD_REG_SET set;
906 int minimize;
907 HOST_WIDE_INT size = get_frame_size();
908
909 /* epilogue: naked */
910 if (cfun->machine->is_naked)
911 {
912 emit_jump_insn (gen_return ());
913 return;
914 }
915
916 avr_regs_to_save (&set);
917 live_seq = sequent_regs_live ();
918 minimize = (TARGET_CALL_PROLOGUES
919 && !cfun->machine->is_interrupt
920 && !cfun->machine->is_signal
921 && !cfun->machine->is_OS_task
922 && !cfun->machine->is_OS_main
923 && live_seq);
924
925 if (minimize && (frame_pointer_needed || live_seq > 4))
926 {
927 if (frame_pointer_needed)
928 {
929 /* Get rid of frame. */
930 emit_move_insn(frame_pointer_rtx,
931 gen_rtx_PLUS (HImode, frame_pointer_rtx,
932 gen_int_mode (size, HImode)));
933 }
934 else
935 {
936 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
937 }
938
939 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
940 }
941 else
942 {
943 if (frame_pointer_needed)
944 {
945 if (size)
946 {
947 /* Try two methods to adjust stack and select shortest. */
948 rtx myfp;
949 rtx fp_plus_insns;
950 rtx sp_plus_insns = NULL_RTX;
951
952 if (TARGET_TINY_STACK)
953 {
954 /* The high byte (r29) doesn't change - prefer 'subi'
955 (1 cycle) over 'sbiw' (2 cycles, same size). */
956 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
957 }
958 else
959 {
960 /* Normal sized addition. */
961 myfp = frame_pointer_rtx;
962 }
963
964 /* Method 1-Adjust frame pointer. */
965 start_sequence ();
966
967 emit_move_insn (myfp,
968 gen_rtx_PLUS (HImode, myfp,
969 gen_int_mode (size,
970 GET_MODE(myfp))));
971
972 /* Copy to stack pointer. */
973 if (TARGET_TINY_STACK)
974 {
975 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
976 }
977 else if (TARGET_NO_INTERRUPTS
978 || cfun->machine->is_signal)
979 {
980 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
981 frame_pointer_rtx));
982 }
983 else if (cfun->machine->is_interrupt)
984 {
985 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
986 frame_pointer_rtx));
987 }
988 else
989 {
990 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
991 }
992
993 fp_plus_insns = get_insns ();
994 end_sequence ();
995
996 /* Method 2-Adjust Stack pointer. */
997 if (size <= 5)
998 {
999 start_sequence ();
1000
1001 emit_move_insn (stack_pointer_rtx,
1002 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1003 gen_int_mode (size,
1004 HImode)));
1005
1006 sp_plus_insns = get_insns ();
1007 end_sequence ();
1008 }
1009
1010 /* Use shortest method. */
1011 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1012 < get_sequence_length (fp_plus_insns)))
1013 emit_insn (sp_plus_insns);
1014 else
1015 emit_insn (fp_plus_insns);
1016 }
1017 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1018 {
1019 /* Restore previous frame_pointer. */
1020 emit_insn (gen_pophi (frame_pointer_rtx));
1021 }
1022 }
1023 /* Restore used registers. */
1024 for (reg = 31; reg >= 0; --reg)
1025 {
1026 if (TEST_HARD_REG_BIT (set, reg))
1027 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1028 }
1029 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1030 {
1031 /* Restore RAMPZ using tmp reg as scratch. */
1032 if(AVR_HAVE_RAMPZ
1033 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1034 {
1035 emit_insn (gen_popqi (tmp_reg_rtx));
1036 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1037 tmp_reg_rtx);
1038 }
1039
1040 /* Restore SREG using tmp reg as scratch. */
1041 emit_insn (gen_popqi (tmp_reg_rtx));
1042
1043 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1044 tmp_reg_rtx);
1045
1046 /* Restore tmp REG. */
1047 emit_insn (gen_popqi (tmp_reg_rtx));
1048
1049 /* Restore zero REG. */
1050 emit_insn (gen_popqi (zero_reg_rtx));
1051 }
1052
1053 emit_jump_insn (gen_return ());
1054 }
1055 }
1056
1057 /* Output summary messages at beginning of function epilogue. */
1058
1059 static void
1060 avr_asm_function_begin_epilogue (FILE *file)
1061 {
1062 fprintf (file, "/* epilogue start */\n");
1063 }
1064
1065 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1066 machine for a memory operand of mode MODE. */
1067
1068 int
1069 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1070 {
1071 enum reg_class r = NO_REGS;
1072
1073 if (TARGET_ALL_DEBUG)
1074 {
1075 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1076 GET_MODE_NAME(mode),
1077 strict ? "(strict)": "",
1078 reload_completed ? "(reload_completed)": "",
1079 reload_in_progress ? "(reload_in_progress)": "",
1080 reg_renumber ? "(reg_renumber)" : "");
1081 if (GET_CODE (x) == PLUS
1082 && REG_P (XEXP (x, 0))
1083 && GET_CODE (XEXP (x, 1)) == CONST_INT
1084 && INTVAL (XEXP (x, 1)) >= 0
1085 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1086 && reg_renumber
1087 )
1088 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1089 true_regnum (XEXP (x, 0)));
1090 debug_rtx (x);
1091 }
1092 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1093 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1094 r = POINTER_REGS;
1095 else if (CONSTANT_ADDRESS_P (x))
1096 r = ALL_REGS;
1097 else if (GET_CODE (x) == PLUS
1098 && REG_P (XEXP (x, 0))
1099 && GET_CODE (XEXP (x, 1)) == CONST_INT
1100 && INTVAL (XEXP (x, 1)) >= 0)
1101 {
1102 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1103 if (fit)
1104 {
1105 if (! strict
1106 || REGNO (XEXP (x,0)) == REG_Y
1107 || REGNO (XEXP (x,0)) == REG_Z)
1108 r = BASE_POINTER_REGS;
1109 if (XEXP (x,0) == frame_pointer_rtx
1110 || XEXP (x,0) == arg_pointer_rtx)
1111 r = BASE_POINTER_REGS;
1112 }
1113 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1114 r = POINTER_Y_REGS;
1115 }
1116 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1117 && REG_P (XEXP (x, 0))
1118 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1119 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1120 {
1121 r = POINTER_REGS;
1122 }
1123 if (TARGET_ALL_DEBUG)
1124 {
1125 fprintf (stderr, " ret = %c\n", r + '0');
1126 }
1127 return r == NO_REGS ? 0 : (int)r;
1128 }
1129
1130 /* Attempts to replace X with a valid
1131 memory address for an operand of mode MODE */
1132
1133 rtx
1134 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1135 {
1136 x = oldx;
1137 if (TARGET_ALL_DEBUG)
1138 {
1139 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1140 debug_rtx (oldx);
1141 }
1142
1143 if (GET_CODE (oldx) == PLUS
1144 && REG_P (XEXP (oldx,0)))
1145 {
1146 if (REG_P (XEXP (oldx,1)))
1147 x = force_reg (GET_MODE (oldx), oldx);
1148 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1149 {
1150 int offs = INTVAL (XEXP (oldx,1));
1151 if (frame_pointer_rtx != XEXP (oldx,0))
1152 if (offs > MAX_LD_OFFSET (mode))
1153 {
1154 if (TARGET_ALL_DEBUG)
1155 fprintf (stderr, "force_reg (big offset)\n");
1156 x = force_reg (GET_MODE (oldx), oldx);
1157 }
1158 }
1159 }
1160 return x;
1161 }
1162
1163
1164 /* Return a pointer register name as a string. */
1165
1166 static const char *
1167 ptrreg_to_str (int regno)
1168 {
1169 switch (regno)
1170 {
1171 case REG_X: return "X";
1172 case REG_Y: return "Y";
1173 case REG_Z: return "Z";
1174 default:
1175 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1176 }
1177 return NULL;
1178 }
1179
1180 /* Return the condition name as a string.
1181 Used in conditional jump constructing */
1182
1183 static const char *
1184 cond_string (enum rtx_code code)
1185 {
1186 switch (code)
1187 {
1188 case NE:
1189 return "ne";
1190 case EQ:
1191 return "eq";
1192 case GE:
1193 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1194 return "pl";
1195 else
1196 return "ge";
1197 case LT:
1198 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1199 return "mi";
1200 else
1201 return "lt";
1202 case GEU:
1203 return "sh";
1204 case LTU:
1205 return "lo";
1206 default:
1207 gcc_unreachable ();
1208 }
1209 }
1210
1211 /* Output ADDR to FILE as address. */
1212
1213 void
1214 print_operand_address (FILE *file, rtx addr)
1215 {
1216 switch (GET_CODE (addr))
1217 {
1218 case REG:
1219 fprintf (file, ptrreg_to_str (REGNO (addr)));
1220 break;
1221
1222 case PRE_DEC:
1223 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1224 break;
1225
1226 case POST_INC:
1227 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1228 break;
1229
1230 default:
1231 if (CONSTANT_ADDRESS_P (addr)
1232 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1233 || GET_CODE (addr) == LABEL_REF))
1234 {
1235 fprintf (file, "gs(");
1236 output_addr_const (file,addr);
1237 fprintf (file ,")");
1238 }
1239 else
1240 output_addr_const (file, addr);
1241 }
1242 }
1243
1244
1245 /* Output X as assembler operand to file FILE. */
1246
1247 void
1248 print_operand (FILE *file, rtx x, int code)
1249 {
1250 int abcd = 0;
1251
1252 if (code >= 'A' && code <= 'D')
1253 abcd = code - 'A';
1254
1255 if (code == '~')
1256 {
1257 if (!AVR_HAVE_JMP_CALL)
1258 fputc ('r', file);
1259 }
1260 else if (code == '!')
1261 {
1262 if (AVR_HAVE_EIJMP_EICALL)
1263 fputc ('e', file);
1264 }
1265 else if (REG_P (x))
1266 {
1267 if (x == zero_reg_rtx)
1268 fprintf (file, "__zero_reg__");
1269 else
1270 fprintf (file, reg_names[true_regnum (x) + abcd]);
1271 }
1272 else if (GET_CODE (x) == CONST_INT)
1273 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1274 else if (GET_CODE (x) == MEM)
1275 {
1276 rtx addr = XEXP (x,0);
1277
1278 if (CONSTANT_P (addr) && abcd)
1279 {
1280 fputc ('(', file);
1281 output_address (addr);
1282 fprintf (file, ")+%d", abcd);
1283 }
1284 else if (code == 'o')
1285 {
1286 if (GET_CODE (addr) != PLUS)
1287 fatal_insn ("bad address, not (reg+disp):", addr);
1288
1289 print_operand (file, XEXP (addr, 1), 0);
1290 }
1291 else if (code == 'p' || code == 'r')
1292 {
1293 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1294 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1295
1296 if (code == 'p')
1297 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1298 else
1299 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1300 }
1301 else if (GET_CODE (addr) == PLUS)
1302 {
1303 print_operand_address (file, XEXP (addr,0));
1304 if (REGNO (XEXP (addr, 0)) == REG_X)
1305 fatal_insn ("internal compiler error. Bad address:"
1306 ,addr);
1307 fputc ('+', file);
1308 print_operand (file, XEXP (addr,1), code);
1309 }
1310 else
1311 print_operand_address (file, addr);
1312 }
1313 else if (GET_CODE (x) == CONST_DOUBLE)
1314 {
1315 long val;
1316 REAL_VALUE_TYPE rv;
1317 if (GET_MODE (x) != SFmode)
1318 fatal_insn ("internal compiler error. Unknown mode:", x);
1319 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1320 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1321 fprintf (file, "0x%lx", val);
1322 }
1323 else if (code == 'j')
1324 fputs (cond_string (GET_CODE (x)), file);
1325 else if (code == 'k')
1326 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1327 else
1328 print_operand_address (file, x);
1329 }
1330
1331 /* Update the condition code in the INSN. */
1332
1333 void
1334 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1335 {
1336 rtx set;
1337
1338 switch (get_attr_cc (insn))
1339 {
1340 case CC_NONE:
1341 /* Insn does not affect CC at all. */
1342 break;
1343
1344 case CC_SET_N:
1345 CC_STATUS_INIT;
1346 break;
1347
1348 case CC_SET_ZN:
1349 set = single_set (insn);
1350 CC_STATUS_INIT;
1351 if (set)
1352 {
1353 cc_status.flags |= CC_NO_OVERFLOW;
1354 cc_status.value1 = SET_DEST (set);
1355 }
1356 break;
1357
1358 case CC_SET_CZN:
1359 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1360 The V flag may or may not be known but that's ok because
1361 alter_cond will change tests to use EQ/NE. */
1362 set = single_set (insn);
1363 CC_STATUS_INIT;
1364 if (set)
1365 {
1366 cc_status.value1 = SET_DEST (set);
1367 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1368 }
1369 break;
1370
1371 case CC_COMPARE:
1372 set = single_set (insn);
1373 CC_STATUS_INIT;
1374 if (set)
1375 cc_status.value1 = SET_SRC (set);
1376 break;
1377
1378 case CC_CLOBBER:
1379 /* Insn doesn't leave CC in a usable state. */
1380 CC_STATUS_INIT;
1381
1382 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1383 set = single_set (insn);
1384 if (set)
1385 {
1386 rtx src = SET_SRC (set);
1387
1388 if (GET_CODE (src) == ASHIFTRT
1389 && GET_MODE (src) == QImode)
1390 {
1391 rtx x = XEXP (src, 1);
1392
1393 if (GET_CODE (x) == CONST_INT
1394 && INTVAL (x) > 0
1395 && INTVAL (x) != 6)
1396 {
1397 cc_status.value1 = SET_DEST (set);
1398 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1399 }
1400 }
1401 }
1402 break;
1403 }
1404 }
1405
1406 /* Return maximum number of consecutive registers of
1407 class CLASS needed to hold a value of mode MODE. */
1408
1409 int
1410 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1411 {
1412 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1413 }
1414
1415 /* Choose mode for jump insn:
1416 1 - relative jump in range -63 <= x <= 62 ;
1417 2 - relative jump in range -2046 <= x <= 2045 ;
1418 3 - absolute jump (only for ATmega[16]03). */
1419
1420 int
1421 avr_jump_mode (rtx x, rtx insn)
1422 {
1423 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1424 ? XEXP (x, 0) : x));
1425 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1426 int jump_distance = cur_addr - dest_addr;
1427
1428 if (-63 <= jump_distance && jump_distance <= 62)
1429 return 1;
1430 else if (-2046 <= jump_distance && jump_distance <= 2045)
1431 return 2;
1432 else if (AVR_HAVE_JMP_CALL)
1433 return 3;
1434
1435 return 2;
1436 }
1437
1438 /* return an AVR condition jump commands.
1439 X is a comparison RTX.
1440 LEN is a number returned by avr_jump_mode function.
1441 if REVERSE nonzero then condition code in X must be reversed. */
1442
1443 const char *
1444 ret_cond_branch (rtx x, int len, int reverse)
1445 {
1446 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1447
1448 switch (cond)
1449 {
1450 case GT:
1451 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1452 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1453 AS1 (brpl,%0)) :
1454 len == 2 ? (AS1 (breq,.+4) CR_TAB
1455 AS1 (brmi,.+2) CR_TAB
1456 AS1 (rjmp,%0)) :
1457 (AS1 (breq,.+6) CR_TAB
1458 AS1 (brmi,.+4) CR_TAB
1459 AS1 (jmp,%0)));
1460
1461 else
1462 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1463 AS1 (brge,%0)) :
1464 len == 2 ? (AS1 (breq,.+4) CR_TAB
1465 AS1 (brlt,.+2) CR_TAB
1466 AS1 (rjmp,%0)) :
1467 (AS1 (breq,.+6) CR_TAB
1468 AS1 (brlt,.+4) CR_TAB
1469 AS1 (jmp,%0)));
1470 case GTU:
1471 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1472 AS1 (brsh,%0)) :
1473 len == 2 ? (AS1 (breq,.+4) CR_TAB
1474 AS1 (brlo,.+2) CR_TAB
1475 AS1 (rjmp,%0)) :
1476 (AS1 (breq,.+6) CR_TAB
1477 AS1 (brlo,.+4) CR_TAB
1478 AS1 (jmp,%0)));
1479 case LE:
1480 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1481 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1482 AS1 (brmi,%0)) :
1483 len == 2 ? (AS1 (breq,.+2) CR_TAB
1484 AS1 (brpl,.+2) CR_TAB
1485 AS1 (rjmp,%0)) :
1486 (AS1 (breq,.+2) CR_TAB
1487 AS1 (brpl,.+4) CR_TAB
1488 AS1 (jmp,%0)));
1489 else
1490 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1491 AS1 (brlt,%0)) :
1492 len == 2 ? (AS1 (breq,.+2) CR_TAB
1493 AS1 (brge,.+2) CR_TAB
1494 AS1 (rjmp,%0)) :
1495 (AS1 (breq,.+2) CR_TAB
1496 AS1 (brge,.+4) CR_TAB
1497 AS1 (jmp,%0)));
1498 case LEU:
1499 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1500 AS1 (brlo,%0)) :
1501 len == 2 ? (AS1 (breq,.+2) CR_TAB
1502 AS1 (brsh,.+2) CR_TAB
1503 AS1 (rjmp,%0)) :
1504 (AS1 (breq,.+2) CR_TAB
1505 AS1 (brsh,.+4) CR_TAB
1506 AS1 (jmp,%0)));
1507 default:
1508 if (reverse)
1509 {
1510 switch (len)
1511 {
1512 case 1:
1513 return AS1 (br%k1,%0);
1514 case 2:
1515 return (AS1 (br%j1,.+2) CR_TAB
1516 AS1 (rjmp,%0));
1517 default:
1518 return (AS1 (br%j1,.+4) CR_TAB
1519 AS1 (jmp,%0));
1520 }
1521 }
1522 else
1523 {
1524 switch (len)
1525 {
1526 case 1:
1527 return AS1 (br%j1,%0);
1528 case 2:
1529 return (AS1 (br%k1,.+2) CR_TAB
1530 AS1 (rjmp,%0));
1531 default:
1532 return (AS1 (br%k1,.+4) CR_TAB
1533 AS1 (jmp,%0));
1534 }
1535 }
1536 }
1537 return "";
1538 }
1539
1540 /* Predicate function for immediate operand which fits to byte (8bit) */
1541
1542 int
1543 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1544 {
1545 return (GET_CODE (op) == CONST_INT
1546 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1547 }
1548
1549 /* Output all insn addresses and their sizes into the assembly language
1550 output file. This is helpful for debugging whether the length attributes
1551 in the md file are correct.
1552 Output insn cost for next insn. */
1553
1554 void
1555 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1556 int num_operands ATTRIBUTE_UNUSED)
1557 {
1558 int uid = INSN_UID (insn);
1559
1560 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1561 {
1562 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1563 INSN_ADDRESSES (uid),
1564 INSN_ADDRESSES (uid) - last_insn_address,
1565 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1566 }
1567 last_insn_address = INSN_ADDRESSES (uid);
1568 }
1569
1570 /* Return 0 if undefined, 1 if always true or always false. */
1571
1572 int
1573 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1574 {
1575 unsigned int max = (mode == QImode ? 0xff :
1576 mode == HImode ? 0xffff :
1577 mode == SImode ? 0xffffffff : 0);
1578 if (max && op && GET_CODE (x) == CONST_INT)
1579 {
1580 if (unsigned_condition (op) != op)
1581 max >>= 1;
1582
1583 if (max != (INTVAL (x) & max)
1584 && INTVAL (x) != 0xff)
1585 return 1;
1586 }
1587 return 0;
1588 }
1589
1590
1591 /* Returns nonzero if REGNO is the number of a hard
1592 register in which function arguments are sometimes passed. */
1593
1594 int
1595 function_arg_regno_p(int r)
1596 {
1597 return (r >= 8 && r <= 25);
1598 }
1599
1600 /* Initializing the variable cum for the state at the beginning
1601 of the argument list. */
1602
1603 void
1604 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1605 tree fndecl ATTRIBUTE_UNUSED)
1606 {
1607 cum->nregs = 18;
1608 cum->regno = FIRST_CUM_REG;
1609 if (!libname && fntype)
1610 {
1611 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1612 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1613 != void_type_node));
1614 if (stdarg)
1615 cum->nregs = 0;
1616 }
1617 }
1618
1619 /* Returns the number of registers to allocate for a function argument. */
1620
1621 static int
1622 avr_num_arg_regs (enum machine_mode mode, tree type)
1623 {
1624 int size;
1625
1626 if (mode == BLKmode)
1627 size = int_size_in_bytes (type);
1628 else
1629 size = GET_MODE_SIZE (mode);
1630
1631 /* Align all function arguments to start in even-numbered registers.
1632 Odd-sized arguments leave holes above them. */
1633
1634 return (size + 1) & ~1;
1635 }
1636
1637 /* Controls whether a function argument is passed
1638 in a register, and which register. */
1639
1640 rtx
1641 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1642 int named ATTRIBUTE_UNUSED)
1643 {
1644 int bytes = avr_num_arg_regs (mode, type);
1645
1646 if (cum->nregs && bytes <= cum->nregs)
1647 return gen_rtx_REG (mode, cum->regno - bytes);
1648
1649 return NULL_RTX;
1650 }
1651
1652 /* Update the summarizer variable CUM to advance past an argument
1653 in the argument list. */
1654
1655 void
1656 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1657 int named ATTRIBUTE_UNUSED)
1658 {
1659 int bytes = avr_num_arg_regs (mode, type);
1660
1661 cum->nregs -= bytes;
1662 cum->regno -= bytes;
1663
1664 if (cum->nregs <= 0)
1665 {
1666 cum->nregs = 0;
1667 cum->regno = FIRST_CUM_REG;
1668 }
1669 }
1670
1671 /***********************************************************************
1672 Functions for outputting various mov's for a various modes
1673 ************************************************************************/
1674 const char *
1675 output_movqi (rtx insn, rtx operands[], int *l)
1676 {
1677 int dummy;
1678 rtx dest = operands[0];
1679 rtx src = operands[1];
1680 int *real_l = l;
1681
1682 if (!l)
1683 l = &dummy;
1684
1685 *l = 1;
1686
1687 if (register_operand (dest, QImode))
1688 {
1689 if (register_operand (src, QImode)) /* mov r,r */
1690 {
1691 if (test_hard_reg_class (STACK_REG, dest))
1692 return AS2 (out,%0,%1);
1693 else if (test_hard_reg_class (STACK_REG, src))
1694 return AS2 (in,%0,%1);
1695
1696 return AS2 (mov,%0,%1);
1697 }
1698 else if (CONSTANT_P (src))
1699 {
1700 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1701 return AS2 (ldi,%0,lo8(%1));
1702
1703 if (GET_CODE (src) == CONST_INT)
1704 {
1705 if (src == const0_rtx) /* mov r,L */
1706 return AS1 (clr,%0);
1707 else if (src == const1_rtx)
1708 {
1709 *l = 2;
1710 return (AS1 (clr,%0) CR_TAB
1711 AS1 (inc,%0));
1712 }
1713 else if (src == constm1_rtx)
1714 {
1715 /* Immediate constants -1 to any register */
1716 *l = 2;
1717 return (AS1 (clr,%0) CR_TAB
1718 AS1 (dec,%0));
1719 }
1720 else
1721 {
1722 int bit_nr = exact_log2 (INTVAL (src));
1723
1724 if (bit_nr >= 0)
1725 {
1726 *l = 3;
1727 if (!real_l)
1728 output_asm_insn ((AS1 (clr,%0) CR_TAB
1729 "set"), operands);
1730 if (!real_l)
1731 avr_output_bld (operands, bit_nr);
1732
1733 return "";
1734 }
1735 }
1736 }
1737
1738 /* Last resort, larger than loading from memory. */
1739 *l = 4;
1740 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1741 AS2 (ldi,r31,lo8(%1)) CR_TAB
1742 AS2 (mov,%0,r31) CR_TAB
1743 AS2 (mov,r31,__tmp_reg__));
1744 }
1745 else if (GET_CODE (src) == MEM)
1746 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1747 }
1748 else if (GET_CODE (dest) == MEM)
1749 {
1750 const char *templ;
1751
1752 if (src == const0_rtx)
1753 operands[1] = zero_reg_rtx;
1754
1755 templ = out_movqi_mr_r (insn, operands, real_l);
1756
1757 if (!real_l)
1758 output_asm_insn (templ, operands);
1759
1760 operands[1] = src;
1761 }
1762 return "";
1763 }
1764
1765
1766 const char *
1767 output_movhi (rtx insn, rtx operands[], int *l)
1768 {
1769 int dummy;
1770 rtx dest = operands[0];
1771 rtx src = operands[1];
1772 int *real_l = l;
1773
1774 if (!l)
1775 l = &dummy;
1776
1777 if (register_operand (dest, HImode))
1778 {
1779 if (register_operand (src, HImode)) /* mov r,r */
1780 {
1781 if (test_hard_reg_class (STACK_REG, dest))
1782 {
1783 if (TARGET_TINY_STACK)
1784 return *l = 1, AS2 (out,__SP_L__,%A1);
1785 /* Use simple load of stack pointer if no interrupts are
1786 used. */
1787 else if (TARGET_NO_INTERRUPTS)
1788 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1789 AS2 (out,__SP_L__,%A1));
1790 *l = 5;
1791 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1792 "cli" CR_TAB
1793 AS2 (out,__SP_H__,%B1) CR_TAB
1794 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1795 AS2 (out,__SP_L__,%A1));
1796 }
1797 else if (test_hard_reg_class (STACK_REG, src))
1798 {
1799 *l = 2;
1800 return (AS2 (in,%A0,__SP_L__) CR_TAB
1801 AS2 (in,%B0,__SP_H__));
1802 }
1803
1804 if (AVR_HAVE_MOVW)
1805 {
1806 *l = 1;
1807 return (AS2 (movw,%0,%1));
1808 }
1809 else
1810 {
1811 *l = 2;
1812 return (AS2 (mov,%A0,%A1) CR_TAB
1813 AS2 (mov,%B0,%B1));
1814 }
1815 }
1816 else if (CONSTANT_P (src))
1817 {
1818 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1819 {
1820 *l = 2;
1821 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1822 AS2 (ldi,%B0,hi8(%1)));
1823 }
1824
1825 if (GET_CODE (src) == CONST_INT)
1826 {
1827 if (src == const0_rtx) /* mov r,L */
1828 {
1829 *l = 2;
1830 return (AS1 (clr,%A0) CR_TAB
1831 AS1 (clr,%B0));
1832 }
1833 else if (src == const1_rtx)
1834 {
1835 *l = 3;
1836 return (AS1 (clr,%A0) CR_TAB
1837 AS1 (clr,%B0) CR_TAB
1838 AS1 (inc,%A0));
1839 }
1840 else if (src == constm1_rtx)
1841 {
1842 /* Immediate constants -1 to any register */
1843 *l = 3;
1844 return (AS1 (clr,%0) CR_TAB
1845 AS1 (dec,%A0) CR_TAB
1846 AS2 (mov,%B0,%A0));
1847 }
1848 else
1849 {
1850 int bit_nr = exact_log2 (INTVAL (src));
1851
1852 if (bit_nr >= 0)
1853 {
1854 *l = 4;
1855 if (!real_l)
1856 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1857 AS1 (clr,%B0) CR_TAB
1858 "set"), operands);
1859 if (!real_l)
1860 avr_output_bld (operands, bit_nr);
1861
1862 return "";
1863 }
1864 }
1865
1866 if ((INTVAL (src) & 0xff) == 0)
1867 {
1868 *l = 5;
1869 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1870 AS1 (clr,%A0) CR_TAB
1871 AS2 (ldi,r31,hi8(%1)) CR_TAB
1872 AS2 (mov,%B0,r31) CR_TAB
1873 AS2 (mov,r31,__tmp_reg__));
1874 }
1875 else if ((INTVAL (src) & 0xff00) == 0)
1876 {
1877 *l = 5;
1878 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1879 AS2 (ldi,r31,lo8(%1)) CR_TAB
1880 AS2 (mov,%A0,r31) CR_TAB
1881 AS1 (clr,%B0) CR_TAB
1882 AS2 (mov,r31,__tmp_reg__));
1883 }
1884 }
1885
1886 /* Last resort, equal to loading from memory. */
1887 *l = 6;
1888 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1889 AS2 (ldi,r31,lo8(%1)) CR_TAB
1890 AS2 (mov,%A0,r31) CR_TAB
1891 AS2 (ldi,r31,hi8(%1)) CR_TAB
1892 AS2 (mov,%B0,r31) CR_TAB
1893 AS2 (mov,r31,__tmp_reg__));
1894 }
1895 else if (GET_CODE (src) == MEM)
1896 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1897 }
1898 else if (GET_CODE (dest) == MEM)
1899 {
1900 const char *templ;
1901
1902 if (src == const0_rtx)
1903 operands[1] = zero_reg_rtx;
1904
1905 templ = out_movhi_mr_r (insn, operands, real_l);
1906
1907 if (!real_l)
1908 output_asm_insn (templ, operands);
1909
1910 operands[1] = src;
1911 return "";
1912 }
1913 fatal_insn ("invalid insn:", insn);
1914 return "";
1915 }
1916
1917 const char *
1918 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1919 {
1920 rtx dest = op[0];
1921 rtx src = op[1];
1922 rtx x = XEXP (src, 0);
1923 int dummy;
1924
1925 if (!l)
1926 l = &dummy;
1927
1928 if (CONSTANT_ADDRESS_P (x))
1929 {
1930 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1931 {
1932 *l = 1;
1933 return AS2 (in,%0,__SREG__);
1934 }
1935 if (optimize > 0 && io_address_operand (x, QImode))
1936 {
1937 *l = 1;
1938 return AS2 (in,%0,%1-0x20);
1939 }
1940 *l = 2;
1941 return AS2 (lds,%0,%1);
1942 }
1943 /* memory access by reg+disp */
1944 else if (GET_CODE (x) == PLUS
1945 && REG_P (XEXP (x,0))
1946 && GET_CODE (XEXP (x,1)) == CONST_INT)
1947 {
1948 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1949 {
1950 int disp = INTVAL (XEXP (x,1));
1951 if (REGNO (XEXP (x,0)) != REG_Y)
1952 fatal_insn ("incorrect insn:",insn);
1953
1954 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1955 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1956 AS2 (ldd,%0,Y+63) CR_TAB
1957 AS2 (sbiw,r28,%o1-63));
1958
1959 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1960 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1961 AS2 (ld,%0,Y) CR_TAB
1962 AS2 (subi,r28,lo8(%o1)) CR_TAB
1963 AS2 (sbci,r29,hi8(%o1)));
1964 }
1965 else if (REGNO (XEXP (x,0)) == REG_X)
1966 {
1967 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1968 it but I have this situation with extremal optimizing options. */
1969 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1970 || reg_unused_after (insn, XEXP (x,0)))
1971 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1972 AS2 (ld,%0,X));
1973
1974 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1975 AS2 (ld,%0,X) CR_TAB
1976 AS2 (sbiw,r26,%o1));
1977 }
1978 *l = 1;
1979 return AS2 (ldd,%0,%1);
1980 }
1981 *l = 1;
1982 return AS2 (ld,%0,%1);
1983 }
1984
1985 const char *
1986 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1987 {
1988 rtx dest = op[0];
1989 rtx src = op[1];
1990 rtx base = XEXP (src, 0);
1991 int reg_dest = true_regnum (dest);
1992 int reg_base = true_regnum (base);
1993 /* "volatile" forces reading low byte first, even if less efficient,
1994 for correct operation with 16-bit I/O registers. */
1995 int mem_volatile_p = MEM_VOLATILE_P (src);
1996 int tmp;
1997
1998 if (!l)
1999 l = &tmp;
2000
2001 if (reg_base > 0)
2002 {
2003 if (reg_dest == reg_base) /* R = (R) */
2004 {
2005 *l = 3;
2006 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2007 AS2 (ld,%B0,%1) CR_TAB
2008 AS2 (mov,%A0,__tmp_reg__));
2009 }
2010 else if (reg_base == REG_X) /* (R26) */
2011 {
2012 if (reg_unused_after (insn, base))
2013 {
2014 *l = 2;
2015 return (AS2 (ld,%A0,X+) CR_TAB
2016 AS2 (ld,%B0,X));
2017 }
2018 *l = 3;
2019 return (AS2 (ld,%A0,X+) CR_TAB
2020 AS2 (ld,%B0,X) CR_TAB
2021 AS2 (sbiw,r26,1));
2022 }
2023 else /* (R) */
2024 {
2025 *l = 2;
2026 return (AS2 (ld,%A0,%1) CR_TAB
2027 AS2 (ldd,%B0,%1+1));
2028 }
2029 }
2030 else if (GET_CODE (base) == PLUS) /* (R + i) */
2031 {
2032 int disp = INTVAL (XEXP (base, 1));
2033 int reg_base = true_regnum (XEXP (base, 0));
2034
2035 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2036 {
2037 if (REGNO (XEXP (base, 0)) != REG_Y)
2038 fatal_insn ("incorrect insn:",insn);
2039
2040 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2041 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2042 AS2 (ldd,%A0,Y+62) CR_TAB
2043 AS2 (ldd,%B0,Y+63) CR_TAB
2044 AS2 (sbiw,r28,%o1-62));
2045
2046 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2047 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2048 AS2 (ld,%A0,Y) CR_TAB
2049 AS2 (ldd,%B0,Y+1) CR_TAB
2050 AS2 (subi,r28,lo8(%o1)) CR_TAB
2051 AS2 (sbci,r29,hi8(%o1)));
2052 }
2053 if (reg_base == REG_X)
2054 {
2055 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2056 it but I have this situation with extremal
2057 optimization options. */
2058
2059 *l = 4;
2060 if (reg_base == reg_dest)
2061 return (AS2 (adiw,r26,%o1) CR_TAB
2062 AS2 (ld,__tmp_reg__,X+) CR_TAB
2063 AS2 (ld,%B0,X) CR_TAB
2064 AS2 (mov,%A0,__tmp_reg__));
2065
2066 return (AS2 (adiw,r26,%o1) CR_TAB
2067 AS2 (ld,%A0,X+) CR_TAB
2068 AS2 (ld,%B0,X) CR_TAB
2069 AS2 (sbiw,r26,%o1+1));
2070 }
2071
2072 if (reg_base == reg_dest)
2073 {
2074 *l = 3;
2075 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2076 AS2 (ldd,%B0,%B1) CR_TAB
2077 AS2 (mov,%A0,__tmp_reg__));
2078 }
2079
2080 *l = 2;
2081 return (AS2 (ldd,%A0,%A1) CR_TAB
2082 AS2 (ldd,%B0,%B1));
2083 }
2084 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2085 {
2086 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2087 fatal_insn ("incorrect insn:", insn);
2088
2089 if (mem_volatile_p)
2090 {
2091 if (REGNO (XEXP (base, 0)) == REG_X)
2092 {
2093 *l = 4;
2094 return (AS2 (sbiw,r26,2) CR_TAB
2095 AS2 (ld,%A0,X+) CR_TAB
2096 AS2 (ld,%B0,X) CR_TAB
2097 AS2 (sbiw,r26,1));
2098 }
2099 else
2100 {
2101 *l = 3;
2102 return (AS2 (sbiw,%r1,2) CR_TAB
2103 AS2 (ld,%A0,%p1) CR_TAB
2104 AS2 (ldd,%B0,%p1+1));
2105 }
2106 }
2107
2108 *l = 2;
2109 return (AS2 (ld,%B0,%1) CR_TAB
2110 AS2 (ld,%A0,%1));
2111 }
2112 else if (GET_CODE (base) == POST_INC) /* (R++) */
2113 {
2114 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2115 fatal_insn ("incorrect insn:", insn);
2116
2117 *l = 2;
2118 return (AS2 (ld,%A0,%1) CR_TAB
2119 AS2 (ld,%B0,%1));
2120 }
2121 else if (CONSTANT_ADDRESS_P (base))
2122 {
2123 if (optimize > 0 && io_address_operand (base, HImode))
2124 {
2125 *l = 2;
2126 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2127 AS2 (in,%B0,%B1-0x20));
2128 }
2129 *l = 4;
2130 return (AS2 (lds,%A0,%A1) CR_TAB
2131 AS2 (lds,%B0,%B1));
2132 }
2133
2134 fatal_insn ("unknown move insn:",insn);
2135 return "";
2136 }
2137
2138 const char *
2139 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2140 {
2141 rtx dest = op[0];
2142 rtx src = op[1];
2143 rtx base = XEXP (src, 0);
2144 int reg_dest = true_regnum (dest);
2145 int reg_base = true_regnum (base);
2146 int tmp;
2147
2148 if (!l)
2149 l = &tmp;
2150
2151 if (reg_base > 0)
2152 {
2153 if (reg_base == REG_X) /* (R26) */
2154 {
2155 if (reg_dest == REG_X)
2156 /* "ld r26,-X" is undefined */
2157 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2158 AS2 (ld,r29,X) CR_TAB
2159 AS2 (ld,r28,-X) CR_TAB
2160 AS2 (ld,__tmp_reg__,-X) CR_TAB
2161 AS2 (sbiw,r26,1) CR_TAB
2162 AS2 (ld,r26,X) CR_TAB
2163 AS2 (mov,r27,__tmp_reg__));
2164 else if (reg_dest == REG_X - 2)
2165 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2166 AS2 (ld,%B0,X+) CR_TAB
2167 AS2 (ld,__tmp_reg__,X+) CR_TAB
2168 AS2 (ld,%D0,X) CR_TAB
2169 AS2 (mov,%C0,__tmp_reg__));
2170 else if (reg_unused_after (insn, base))
2171 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2172 AS2 (ld,%B0,X+) CR_TAB
2173 AS2 (ld,%C0,X+) CR_TAB
2174 AS2 (ld,%D0,X));
2175 else
2176 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2177 AS2 (ld,%B0,X+) CR_TAB
2178 AS2 (ld,%C0,X+) CR_TAB
2179 AS2 (ld,%D0,X) CR_TAB
2180 AS2 (sbiw,r26,3));
2181 }
2182 else
2183 {
2184 if (reg_dest == reg_base)
2185 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2186 AS2 (ldd,%C0,%1+2) CR_TAB
2187 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2188 AS2 (ld,%A0,%1) CR_TAB
2189 AS2 (mov,%B0,__tmp_reg__));
2190 else if (reg_base == reg_dest + 2)
2191 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2192 AS2 (ldd,%B0,%1+1) CR_TAB
2193 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2194 AS2 (ldd,%D0,%1+3) CR_TAB
2195 AS2 (mov,%C0,__tmp_reg__));
2196 else
2197 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2198 AS2 (ldd,%B0,%1+1) CR_TAB
2199 AS2 (ldd,%C0,%1+2) CR_TAB
2200 AS2 (ldd,%D0,%1+3));
2201 }
2202 }
2203 else if (GET_CODE (base) == PLUS) /* (R + i) */
2204 {
2205 int disp = INTVAL (XEXP (base, 1));
2206
2207 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2208 {
2209 if (REGNO (XEXP (base, 0)) != REG_Y)
2210 fatal_insn ("incorrect insn:",insn);
2211
2212 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2213 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2214 AS2 (ldd,%A0,Y+60) CR_TAB
2215 AS2 (ldd,%B0,Y+61) CR_TAB
2216 AS2 (ldd,%C0,Y+62) CR_TAB
2217 AS2 (ldd,%D0,Y+63) CR_TAB
2218 AS2 (sbiw,r28,%o1-60));
2219
2220 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2221 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2222 AS2 (ld,%A0,Y) CR_TAB
2223 AS2 (ldd,%B0,Y+1) CR_TAB
2224 AS2 (ldd,%C0,Y+2) CR_TAB
2225 AS2 (ldd,%D0,Y+3) CR_TAB
2226 AS2 (subi,r28,lo8(%o1)) CR_TAB
2227 AS2 (sbci,r29,hi8(%o1)));
2228 }
2229
2230 reg_base = true_regnum (XEXP (base, 0));
2231 if (reg_base == REG_X)
2232 {
2233 /* R = (X + d) */
2234 if (reg_dest == REG_X)
2235 {
2236 *l = 7;
2237 /* "ld r26,-X" is undefined */
2238 return (AS2 (adiw,r26,%o1+3) CR_TAB
2239 AS2 (ld,r29,X) CR_TAB
2240 AS2 (ld,r28,-X) CR_TAB
2241 AS2 (ld,__tmp_reg__,-X) CR_TAB
2242 AS2 (sbiw,r26,1) CR_TAB
2243 AS2 (ld,r26,X) CR_TAB
2244 AS2 (mov,r27,__tmp_reg__));
2245 }
2246 *l = 6;
2247 if (reg_dest == REG_X - 2)
2248 return (AS2 (adiw,r26,%o1) CR_TAB
2249 AS2 (ld,r24,X+) CR_TAB
2250 AS2 (ld,r25,X+) CR_TAB
2251 AS2 (ld,__tmp_reg__,X+) CR_TAB
2252 AS2 (ld,r27,X) CR_TAB
2253 AS2 (mov,r26,__tmp_reg__));
2254
2255 return (AS2 (adiw,r26,%o1) CR_TAB
2256 AS2 (ld,%A0,X+) CR_TAB
2257 AS2 (ld,%B0,X+) CR_TAB
2258 AS2 (ld,%C0,X+) CR_TAB
2259 AS2 (ld,%D0,X) CR_TAB
2260 AS2 (sbiw,r26,%o1+3));
2261 }
2262 if (reg_dest == reg_base)
2263 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2264 AS2 (ldd,%C0,%C1) CR_TAB
2265 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2266 AS2 (ldd,%A0,%A1) CR_TAB
2267 AS2 (mov,%B0,__tmp_reg__));
2268 else if (reg_dest == reg_base - 2)
2269 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2270 AS2 (ldd,%B0,%B1) CR_TAB
2271 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2272 AS2 (ldd,%D0,%D1) CR_TAB
2273 AS2 (mov,%C0,__tmp_reg__));
2274 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2275 AS2 (ldd,%B0,%B1) CR_TAB
2276 AS2 (ldd,%C0,%C1) CR_TAB
2277 AS2 (ldd,%D0,%D1));
2278 }
2279 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2280 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2281 AS2 (ld,%C0,%1) CR_TAB
2282 AS2 (ld,%B0,%1) CR_TAB
2283 AS2 (ld,%A0,%1));
2284 else if (GET_CODE (base) == POST_INC) /* (R++) */
2285 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2286 AS2 (ld,%B0,%1) CR_TAB
2287 AS2 (ld,%C0,%1) CR_TAB
2288 AS2 (ld,%D0,%1));
2289 else if (CONSTANT_ADDRESS_P (base))
2290 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2291 AS2 (lds,%B0,%B1) CR_TAB
2292 AS2 (lds,%C0,%C1) CR_TAB
2293 AS2 (lds,%D0,%D1));
2294
2295 fatal_insn ("unknown move insn:",insn);
2296 return "";
2297 }
2298
2299 const char *
2300 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2301 {
2302 rtx dest = op[0];
2303 rtx src = op[1];
2304 rtx base = XEXP (dest, 0);
2305 int reg_base = true_regnum (base);
2306 int reg_src = true_regnum (src);
2307 int tmp;
2308
2309 if (!l)
2310 l = &tmp;
2311
2312 if (CONSTANT_ADDRESS_P (base))
2313 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2314 AS2 (sts,%B0,%B1) CR_TAB
2315 AS2 (sts,%C0,%C1) CR_TAB
2316 AS2 (sts,%D0,%D1));
2317 if (reg_base > 0) /* (r) */
2318 {
2319 if (reg_base == REG_X) /* (R26) */
2320 {
2321 if (reg_src == REG_X)
2322 {
2323 /* "st X+,r26" is undefined */
2324 if (reg_unused_after (insn, base))
2325 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2326 AS2 (st,X,r26) CR_TAB
2327 AS2 (adiw,r26,1) CR_TAB
2328 AS2 (st,X+,__tmp_reg__) CR_TAB
2329 AS2 (st,X+,r28) CR_TAB
2330 AS2 (st,X,r29));
2331 else
2332 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2333 AS2 (st,X,r26) CR_TAB
2334 AS2 (adiw,r26,1) CR_TAB
2335 AS2 (st,X+,__tmp_reg__) CR_TAB
2336 AS2 (st,X+,r28) CR_TAB
2337 AS2 (st,X,r29) CR_TAB
2338 AS2 (sbiw,r26,3));
2339 }
2340 else if (reg_base == reg_src + 2)
2341 {
2342 if (reg_unused_after (insn, base))
2343 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2344 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2345 AS2 (st,%0+,%A1) CR_TAB
2346 AS2 (st,%0+,%B1) CR_TAB
2347 AS2 (st,%0+,__zero_reg__) CR_TAB
2348 AS2 (st,%0,__tmp_reg__) CR_TAB
2349 AS1 (clr,__zero_reg__));
2350 else
2351 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2352 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2353 AS2 (st,%0+,%A1) CR_TAB
2354 AS2 (st,%0+,%B1) CR_TAB
2355 AS2 (st,%0+,__zero_reg__) CR_TAB
2356 AS2 (st,%0,__tmp_reg__) CR_TAB
2357 AS1 (clr,__zero_reg__) CR_TAB
2358 AS2 (sbiw,r26,3));
2359 }
2360 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2361 AS2 (st,%0+,%B1) CR_TAB
2362 AS2 (st,%0+,%C1) CR_TAB
2363 AS2 (st,%0,%D1) CR_TAB
2364 AS2 (sbiw,r26,3));
2365 }
2366 else
2367 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2368 AS2 (std,%0+1,%B1) CR_TAB
2369 AS2 (std,%0+2,%C1) CR_TAB
2370 AS2 (std,%0+3,%D1));
2371 }
2372 else if (GET_CODE (base) == PLUS) /* (R + i) */
2373 {
2374 int disp = INTVAL (XEXP (base, 1));
2375 reg_base = REGNO (XEXP (base, 0));
2376 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2377 {
2378 if (reg_base != REG_Y)
2379 fatal_insn ("incorrect insn:",insn);
2380
2381 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2382 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2383 AS2 (std,Y+60,%A1) CR_TAB
2384 AS2 (std,Y+61,%B1) CR_TAB
2385 AS2 (std,Y+62,%C1) CR_TAB
2386 AS2 (std,Y+63,%D1) CR_TAB
2387 AS2 (sbiw,r28,%o0-60));
2388
2389 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2390 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2391 AS2 (st,Y,%A1) CR_TAB
2392 AS2 (std,Y+1,%B1) CR_TAB
2393 AS2 (std,Y+2,%C1) CR_TAB
2394 AS2 (std,Y+3,%D1) CR_TAB
2395 AS2 (subi,r28,lo8(%o0)) CR_TAB
2396 AS2 (sbci,r29,hi8(%o0)));
2397 }
2398 if (reg_base == REG_X)
2399 {
2400 /* (X + d) = R */
2401 if (reg_src == REG_X)
2402 {
2403 *l = 9;
2404 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2405 AS2 (mov,__zero_reg__,r27) CR_TAB
2406 AS2 (adiw,r26,%o0) CR_TAB
2407 AS2 (st,X+,__tmp_reg__) CR_TAB
2408 AS2 (st,X+,__zero_reg__) CR_TAB
2409 AS2 (st,X+,r28) CR_TAB
2410 AS2 (st,X,r29) CR_TAB
2411 AS1 (clr,__zero_reg__) CR_TAB
2412 AS2 (sbiw,r26,%o0+3));
2413 }
2414 else if (reg_src == REG_X - 2)
2415 {
2416 *l = 9;
2417 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2418 AS2 (mov,__zero_reg__,r27) CR_TAB
2419 AS2 (adiw,r26,%o0) CR_TAB
2420 AS2 (st,X+,r24) CR_TAB
2421 AS2 (st,X+,r25) CR_TAB
2422 AS2 (st,X+,__tmp_reg__) CR_TAB
2423 AS2 (st,X,__zero_reg__) CR_TAB
2424 AS1 (clr,__zero_reg__) CR_TAB
2425 AS2 (sbiw,r26,%o0+3));
2426 }
2427 *l = 6;
2428 return (AS2 (adiw,r26,%o0) CR_TAB
2429 AS2 (st,X+,%A1) CR_TAB
2430 AS2 (st,X+,%B1) CR_TAB
2431 AS2 (st,X+,%C1) CR_TAB
2432 AS2 (st,X,%D1) CR_TAB
2433 AS2 (sbiw,r26,%o0+3));
2434 }
2435 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2436 AS2 (std,%B0,%B1) CR_TAB
2437 AS2 (std,%C0,%C1) CR_TAB
2438 AS2 (std,%D0,%D1));
2439 }
2440 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2441 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2442 AS2 (st,%0,%C1) CR_TAB
2443 AS2 (st,%0,%B1) CR_TAB
2444 AS2 (st,%0,%A1));
2445 else if (GET_CODE (base) == POST_INC) /* (R++) */
2446 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2447 AS2 (st,%0,%B1) CR_TAB
2448 AS2 (st,%0,%C1) CR_TAB
2449 AS2 (st,%0,%D1));
2450 fatal_insn ("unknown move insn:",insn);
2451 return "";
2452 }
2453
2454 const char *
2455 output_movsisf(rtx insn, rtx operands[], int *l)
2456 {
2457 int dummy;
2458 rtx dest = operands[0];
2459 rtx src = operands[1];
2460 int *real_l = l;
2461
2462 if (!l)
2463 l = &dummy;
2464
2465 if (register_operand (dest, VOIDmode))
2466 {
2467 if (register_operand (src, VOIDmode)) /* mov r,r */
2468 {
2469 if (true_regnum (dest) > true_regnum (src))
2470 {
2471 if (AVR_HAVE_MOVW)
2472 {
2473 *l = 2;
2474 return (AS2 (movw,%C0,%C1) CR_TAB
2475 AS2 (movw,%A0,%A1));
2476 }
2477 *l = 4;
2478 return (AS2 (mov,%D0,%D1) CR_TAB
2479 AS2 (mov,%C0,%C1) CR_TAB
2480 AS2 (mov,%B0,%B1) CR_TAB
2481 AS2 (mov,%A0,%A1));
2482 }
2483 else
2484 {
2485 if (AVR_HAVE_MOVW)
2486 {
2487 *l = 2;
2488 return (AS2 (movw,%A0,%A1) CR_TAB
2489 AS2 (movw,%C0,%C1));
2490 }
2491 *l = 4;
2492 return (AS2 (mov,%A0,%A1) CR_TAB
2493 AS2 (mov,%B0,%B1) CR_TAB
2494 AS2 (mov,%C0,%C1) CR_TAB
2495 AS2 (mov,%D0,%D1));
2496 }
2497 }
2498 else if (CONSTANT_P (src))
2499 {
2500 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2501 {
2502 *l = 4;
2503 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2504 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2505 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2506 AS2 (ldi,%D0,hhi8(%1)));
2507 }
2508
2509 if (GET_CODE (src) == CONST_INT)
2510 {
2511 const char *const clr_op0 =
2512 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2513 AS1 (clr,%B0) CR_TAB
2514 AS2 (movw,%C0,%A0))
2515 : (AS1 (clr,%A0) CR_TAB
2516 AS1 (clr,%B0) CR_TAB
2517 AS1 (clr,%C0) CR_TAB
2518 AS1 (clr,%D0));
2519
2520 if (src == const0_rtx) /* mov r,L */
2521 {
2522 *l = AVR_HAVE_MOVW ? 3 : 4;
2523 return clr_op0;
2524 }
2525 else if (src == const1_rtx)
2526 {
2527 if (!real_l)
2528 output_asm_insn (clr_op0, operands);
2529 *l = AVR_HAVE_MOVW ? 4 : 5;
2530 return AS1 (inc,%A0);
2531 }
2532 else if (src == constm1_rtx)
2533 {
2534 /* Immediate constants -1 to any register */
2535 if (AVR_HAVE_MOVW)
2536 {
2537 *l = 4;
2538 return (AS1 (clr,%A0) CR_TAB
2539 AS1 (dec,%A0) CR_TAB
2540 AS2 (mov,%B0,%A0) CR_TAB
2541 AS2 (movw,%C0,%A0));
2542 }
2543 *l = 5;
2544 return (AS1 (clr,%A0) CR_TAB
2545 AS1 (dec,%A0) CR_TAB
2546 AS2 (mov,%B0,%A0) CR_TAB
2547 AS2 (mov,%C0,%A0) CR_TAB
2548 AS2 (mov,%D0,%A0));
2549 }
2550 else
2551 {
2552 int bit_nr = exact_log2 (INTVAL (src));
2553
2554 if (bit_nr >= 0)
2555 {
2556 *l = AVR_HAVE_MOVW ? 5 : 6;
2557 if (!real_l)
2558 {
2559 output_asm_insn (clr_op0, operands);
2560 output_asm_insn ("set", operands);
2561 }
2562 if (!real_l)
2563 avr_output_bld (operands, bit_nr);
2564
2565 return "";
2566 }
2567 }
2568 }
2569
2570 /* Last resort, better than loading from memory. */
2571 *l = 10;
2572 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2573 AS2 (ldi,r31,lo8(%1)) CR_TAB
2574 AS2 (mov,%A0,r31) CR_TAB
2575 AS2 (ldi,r31,hi8(%1)) CR_TAB
2576 AS2 (mov,%B0,r31) CR_TAB
2577 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2578 AS2 (mov,%C0,r31) CR_TAB
2579 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2580 AS2 (mov,%D0,r31) CR_TAB
2581 AS2 (mov,r31,__tmp_reg__));
2582 }
2583 else if (GET_CODE (src) == MEM)
2584 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2585 }
2586 else if (GET_CODE (dest) == MEM)
2587 {
2588 const char *templ;
2589
2590 if (src == const0_rtx)
2591 operands[1] = zero_reg_rtx;
2592
2593 templ = out_movsi_mr_r (insn, operands, real_l);
2594
2595 if (!real_l)
2596 output_asm_insn (templ, operands);
2597
2598 operands[1] = src;
2599 return "";
2600 }
2601 fatal_insn ("invalid insn:", insn);
2602 return "";
2603 }
2604
2605 const char *
2606 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2607 {
2608 rtx dest = op[0];
2609 rtx src = op[1];
2610 rtx x = XEXP (dest, 0);
2611 int dummy;
2612
2613 if (!l)
2614 l = &dummy;
2615
2616 if (CONSTANT_ADDRESS_P (x))
2617 {
2618 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2619 {
2620 *l = 1;
2621 return AS2 (out,__SREG__,%1);
2622 }
2623 if (optimize > 0 && io_address_operand (x, QImode))
2624 {
2625 *l = 1;
2626 return AS2 (out,%0-0x20,%1);
2627 }
2628 *l = 2;
2629 return AS2 (sts,%0,%1);
2630 }
2631 /* memory access by reg+disp */
2632 else if (GET_CODE (x) == PLUS
2633 && REG_P (XEXP (x,0))
2634 && GET_CODE (XEXP (x,1)) == CONST_INT)
2635 {
2636 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2637 {
2638 int disp = INTVAL (XEXP (x,1));
2639 if (REGNO (XEXP (x,0)) != REG_Y)
2640 fatal_insn ("incorrect insn:",insn);
2641
2642 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2643 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2644 AS2 (std,Y+63,%1) CR_TAB
2645 AS2 (sbiw,r28,%o0-63));
2646
2647 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2648 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2649 AS2 (st,Y,%1) CR_TAB
2650 AS2 (subi,r28,lo8(%o0)) CR_TAB
2651 AS2 (sbci,r29,hi8(%o0)));
2652 }
2653 else if (REGNO (XEXP (x,0)) == REG_X)
2654 {
2655 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2656 {
2657 if (reg_unused_after (insn, XEXP (x,0)))
2658 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2659 AS2 (adiw,r26,%o0) CR_TAB
2660 AS2 (st,X,__tmp_reg__));
2661
2662 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2663 AS2 (adiw,r26,%o0) CR_TAB
2664 AS2 (st,X,__tmp_reg__) CR_TAB
2665 AS2 (sbiw,r26,%o0));
2666 }
2667 else
2668 {
2669 if (reg_unused_after (insn, XEXP (x,0)))
2670 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2671 AS2 (st,X,%1));
2672
2673 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2674 AS2 (st,X,%1) CR_TAB
2675 AS2 (sbiw,r26,%o0));
2676 }
2677 }
2678 *l = 1;
2679 return AS2 (std,%0,%1);
2680 }
2681 *l = 1;
2682 return AS2 (st,%0,%1);
2683 }
2684
2685 const char *
2686 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2687 {
2688 rtx dest = op[0];
2689 rtx src = op[1];
2690 rtx base = XEXP (dest, 0);
2691 int reg_base = true_regnum (base);
2692 int reg_src = true_regnum (src);
2693 /* "volatile" forces writing high byte first, even if less efficient,
2694 for correct operation with 16-bit I/O registers. */
2695 int mem_volatile_p = MEM_VOLATILE_P (dest);
2696 int tmp;
2697
2698 if (!l)
2699 l = &tmp;
2700 if (CONSTANT_ADDRESS_P (base))
2701 {
2702 if (optimize > 0 && io_address_operand (base, HImode))
2703 {
2704 *l = 2;
2705 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2706 AS2 (out,%A0-0x20,%A1));
2707 }
2708 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2709 AS2 (sts,%A0,%A1));
2710 }
2711 if (reg_base > 0)
2712 {
2713 if (reg_base == REG_X)
2714 {
2715 if (reg_src == REG_X)
2716 {
2717 /* "st X+,r26" and "st -X,r26" are undefined. */
2718 if (!mem_volatile_p && reg_unused_after (insn, src))
2719 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2720 AS2 (st,X,r26) CR_TAB
2721 AS2 (adiw,r26,1) CR_TAB
2722 AS2 (st,X,__tmp_reg__));
2723 else
2724 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2725 AS2 (adiw,r26,1) CR_TAB
2726 AS2 (st,X,__tmp_reg__) CR_TAB
2727 AS2 (sbiw,r26,1) CR_TAB
2728 AS2 (st,X,r26));
2729 }
2730 else
2731 {
2732 if (!mem_volatile_p && reg_unused_after (insn, base))
2733 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2734 AS2 (st,X,%B1));
2735 else
2736 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2737 AS2 (st,X,%B1) CR_TAB
2738 AS2 (st,-X,%A1));
2739 }
2740 }
2741 else
2742 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2743 AS2 (st,%0,%A1));
2744 }
2745 else if (GET_CODE (base) == PLUS)
2746 {
2747 int disp = INTVAL (XEXP (base, 1));
2748 reg_base = REGNO (XEXP (base, 0));
2749 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2750 {
2751 if (reg_base != REG_Y)
2752 fatal_insn ("incorrect insn:",insn);
2753
2754 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2755 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2756 AS2 (std,Y+63,%B1) CR_TAB
2757 AS2 (std,Y+62,%A1) CR_TAB
2758 AS2 (sbiw,r28,%o0-62));
2759
2760 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2761 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2762 AS2 (std,Y+1,%B1) CR_TAB
2763 AS2 (st,Y,%A1) CR_TAB
2764 AS2 (subi,r28,lo8(%o0)) CR_TAB
2765 AS2 (sbci,r29,hi8(%o0)));
2766 }
2767 if (reg_base == REG_X)
2768 {
2769 /* (X + d) = R */
2770 if (reg_src == REG_X)
2771 {
2772 *l = 7;
2773 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2774 AS2 (mov,__zero_reg__,r27) CR_TAB
2775 AS2 (adiw,r26,%o0+1) CR_TAB
2776 AS2 (st,X,__zero_reg__) CR_TAB
2777 AS2 (st,-X,__tmp_reg__) CR_TAB
2778 AS1 (clr,__zero_reg__) CR_TAB
2779 AS2 (sbiw,r26,%o0));
2780 }
2781 *l = 4;
2782 return (AS2 (adiw,r26,%o0+1) CR_TAB
2783 AS2 (st,X,%B1) CR_TAB
2784 AS2 (st,-X,%A1) CR_TAB
2785 AS2 (sbiw,r26,%o0));
2786 }
2787 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2788 AS2 (std,%A0,%A1));
2789 }
2790 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2791 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2792 AS2 (st,%0,%A1));
2793 else if (GET_CODE (base) == POST_INC) /* (R++) */
2794 {
2795 if (mem_volatile_p)
2796 {
2797 if (REGNO (XEXP (base, 0)) == REG_X)
2798 {
2799 *l = 4;
2800 return (AS2 (adiw,r26,1) CR_TAB
2801 AS2 (st,X,%B1) CR_TAB
2802 AS2 (st,-X,%A1) CR_TAB
2803 AS2 (adiw,r26,2));
2804 }
2805 else
2806 {
2807 *l = 3;
2808 return (AS2 (std,%p0+1,%B1) CR_TAB
2809 AS2 (st,%p0,%A1) CR_TAB
2810 AS2 (adiw,%r0,2));
2811 }
2812 }
2813
2814 *l = 2;
2815 return (AS2 (st,%0,%A1) CR_TAB
2816 AS2 (st,%0,%B1));
2817 }
2818 fatal_insn ("unknown move insn:",insn);
2819 return "";
2820 }
2821
2822 /* Return 1 if frame pointer for current function required. */
2823
2824 int
2825 frame_pointer_required_p (void)
2826 {
2827 return (cfun->calls_alloca
2828 || crtl->args.info.nregs == 0
2829 || get_frame_size () > 0);
2830 }
2831
2832 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2833
2834 static RTX_CODE
2835 compare_condition (rtx insn)
2836 {
2837 rtx next = next_real_insn (insn);
2838 RTX_CODE cond = UNKNOWN;
2839 if (next && GET_CODE (next) == JUMP_INSN)
2840 {
2841 rtx pat = PATTERN (next);
2842 rtx src = SET_SRC (pat);
2843 rtx t = XEXP (src, 0);
2844 cond = GET_CODE (t);
2845 }
2846 return cond;
2847 }
2848
2849 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2850
2851 static int
2852 compare_sign_p (rtx insn)
2853 {
2854 RTX_CODE cond = compare_condition (insn);
2855 return (cond == GE || cond == LT);
2856 }
2857
2858 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2859 that needs to be swapped (GT, GTU, LE, LEU). */
2860
2861 int
2862 compare_diff_p (rtx insn)
2863 {
2864 RTX_CODE cond = compare_condition (insn);
2865 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2866 }
2867
2868 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2869
2870 int
2871 compare_eq_p (rtx insn)
2872 {
2873 RTX_CODE cond = compare_condition (insn);
2874 return (cond == EQ || cond == NE);
2875 }
2876
2877
2878 /* Output test instruction for HImode. */
2879
2880 const char *
2881 out_tsthi (rtx insn, int *l)
2882 {
2883 if (compare_sign_p (insn))
2884 {
2885 if (l) *l = 1;
2886 return AS1 (tst,%B0);
2887 }
2888 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2889 && compare_eq_p (insn))
2890 {
2891 /* Faster than sbiw if we can clobber the operand. */
2892 if (l) *l = 1;
2893 return AS2 (or,%A0,%B0);
2894 }
2895 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2896 {
2897 if (l) *l = 1;
2898 return AS2 (sbiw,%0,0);
2899 }
2900 if (l) *l = 2;
2901 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2902 AS2 (cpc,%B0,__zero_reg__));
2903 }
2904
2905
2906 /* Output test instruction for SImode. */
2907
2908 const char *
2909 out_tstsi (rtx insn, int *l)
2910 {
2911 if (compare_sign_p (insn))
2912 {
2913 if (l) *l = 1;
2914 return AS1 (tst,%D0);
2915 }
2916 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2917 {
2918 if (l) *l = 3;
2919 return (AS2 (sbiw,%A0,0) CR_TAB
2920 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2921 AS2 (cpc,%D0,__zero_reg__));
2922 }
2923 if (l) *l = 4;
2924 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2925 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2926 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2927 AS2 (cpc,%D0,__zero_reg__));
2928 }
2929
2930
2931 /* Generate asm equivalent for various shifts.
2932 Shift count is a CONST_INT, MEM or REG.
2933 This only handles cases that are not already
2934 carefully hand-optimized in ?sh??i3_out. */
2935
2936 void
2937 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2938 int *len, int t_len)
2939 {
2940 rtx op[10];
2941 char str[500];
2942 int second_label = 1;
2943 int saved_in_tmp = 0;
2944 int use_zero_reg = 0;
2945
2946 op[0] = operands[0];
2947 op[1] = operands[1];
2948 op[2] = operands[2];
2949 op[3] = operands[3];
2950 str[0] = 0;
2951
2952 if (len)
2953 *len = 1;
2954
2955 if (GET_CODE (operands[2]) == CONST_INT)
2956 {
2957 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2958 int count = INTVAL (operands[2]);
2959 int max_len = 10; /* If larger than this, always use a loop. */
2960
2961 if (count <= 0)
2962 {
2963 if (len)
2964 *len = 0;
2965 return;
2966 }
2967
2968 if (count < 8 && !scratch)
2969 use_zero_reg = 1;
2970
2971 if (optimize_size)
2972 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2973
2974 if (t_len * count <= max_len)
2975 {
2976 /* Output shifts inline with no loop - faster. */
2977 if (len)
2978 *len = t_len * count;
2979 else
2980 {
2981 while (count-- > 0)
2982 output_asm_insn (templ, op);
2983 }
2984
2985 return;
2986 }
2987
2988 if (scratch)
2989 {
2990 if (!len)
2991 strcat (str, AS2 (ldi,%3,%2));
2992 }
2993 else if (use_zero_reg)
2994 {
2995 /* Hack to save one word: use __zero_reg__ as loop counter.
2996 Set one bit, then shift in a loop until it is 0 again. */
2997
2998 op[3] = zero_reg_rtx;
2999 if (len)
3000 *len = 2;
3001 else
3002 strcat (str, ("set" CR_TAB
3003 AS2 (bld,%3,%2-1)));
3004 }
3005 else
3006 {
3007 /* No scratch register available, use one from LD_REGS (saved in
3008 __tmp_reg__) that doesn't overlap with registers to shift. */
3009
3010 op[3] = gen_rtx_REG (QImode,
3011 ((true_regnum (operands[0]) - 1) & 15) + 16);
3012 op[4] = tmp_reg_rtx;
3013 saved_in_tmp = 1;
3014
3015 if (len)
3016 *len = 3; /* Includes "mov %3,%4" after the loop. */
3017 else
3018 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3019 AS2 (ldi,%3,%2)));
3020 }
3021
3022 second_label = 0;
3023 }
3024 else if (GET_CODE (operands[2]) == MEM)
3025 {
3026 rtx op_mov[10];
3027
3028 op[3] = op_mov[0] = tmp_reg_rtx;
3029 op_mov[1] = op[2];
3030
3031 if (len)
3032 out_movqi_r_mr (insn, op_mov, len);
3033 else
3034 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3035 }
3036 else if (register_operand (operands[2], QImode))
3037 {
3038 if (reg_unused_after (insn, operands[2]))
3039 op[3] = op[2];
3040 else
3041 {
3042 op[3] = tmp_reg_rtx;
3043 if (!len)
3044 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3045 }
3046 }
3047 else
3048 fatal_insn ("bad shift insn:", insn);
3049
3050 if (second_label)
3051 {
3052 if (len)
3053 ++*len;
3054 else
3055 strcat (str, AS1 (rjmp,2f));
3056 }
3057
3058 if (len)
3059 *len += t_len + 2; /* template + dec + brXX */
3060 else
3061 {
3062 strcat (str, "\n1:\t");
3063 strcat (str, templ);
3064 strcat (str, second_label ? "\n2:\t" : "\n\t");
3065 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3066 strcat (str, CR_TAB);
3067 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3068 if (saved_in_tmp)
3069 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3070 output_asm_insn (str, op);
3071 }
3072 }
3073
3074
3075 /* 8bit shift left ((char)x << i) */
3076
3077 const char *
3078 ashlqi3_out (rtx insn, rtx operands[], int *len)
3079 {
3080 if (GET_CODE (operands[2]) == CONST_INT)
3081 {
3082 int k;
3083
3084 if (!len)
3085 len = &k;
3086
3087 switch (INTVAL (operands[2]))
3088 {
3089 default:
3090 if (INTVAL (operands[2]) < 8)
3091 break;
3092
3093 *len = 1;
3094 return AS1 (clr,%0);
3095
3096 case 1:
3097 *len = 1;
3098 return AS1 (lsl,%0);
3099
3100 case 2:
3101 *len = 2;
3102 return (AS1 (lsl,%0) CR_TAB
3103 AS1 (lsl,%0));
3104
3105 case 3:
3106 *len = 3;
3107 return (AS1 (lsl,%0) CR_TAB
3108 AS1 (lsl,%0) CR_TAB
3109 AS1 (lsl,%0));
3110
3111 case 4:
3112 if (test_hard_reg_class (LD_REGS, operands[0]))
3113 {
3114 *len = 2;
3115 return (AS1 (swap,%0) CR_TAB
3116 AS2 (andi,%0,0xf0));
3117 }
3118 *len = 4;
3119 return (AS1 (lsl,%0) CR_TAB
3120 AS1 (lsl,%0) CR_TAB
3121 AS1 (lsl,%0) CR_TAB
3122 AS1 (lsl,%0));
3123
3124 case 5:
3125 if (test_hard_reg_class (LD_REGS, operands[0]))
3126 {
3127 *len = 3;
3128 return (AS1 (swap,%0) CR_TAB
3129 AS1 (lsl,%0) CR_TAB
3130 AS2 (andi,%0,0xe0));
3131 }
3132 *len = 5;
3133 return (AS1 (lsl,%0) CR_TAB
3134 AS1 (lsl,%0) CR_TAB
3135 AS1 (lsl,%0) CR_TAB
3136 AS1 (lsl,%0) CR_TAB
3137 AS1 (lsl,%0));
3138
3139 case 6:
3140 if (test_hard_reg_class (LD_REGS, operands[0]))
3141 {
3142 *len = 4;
3143 return (AS1 (swap,%0) CR_TAB
3144 AS1 (lsl,%0) CR_TAB
3145 AS1 (lsl,%0) CR_TAB
3146 AS2 (andi,%0,0xc0));
3147 }
3148 *len = 6;
3149 return (AS1 (lsl,%0) CR_TAB
3150 AS1 (lsl,%0) CR_TAB
3151 AS1 (lsl,%0) CR_TAB
3152 AS1 (lsl,%0) CR_TAB
3153 AS1 (lsl,%0) CR_TAB
3154 AS1 (lsl,%0));
3155
3156 case 7:
3157 *len = 3;
3158 return (AS1 (ror,%0) CR_TAB
3159 AS1 (clr,%0) CR_TAB
3160 AS1 (ror,%0));
3161 }
3162 }
3163 else if (CONSTANT_P (operands[2]))
3164 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3165
3166 out_shift_with_cnt (AS1 (lsl,%0),
3167 insn, operands, len, 1);
3168 return "";
3169 }
3170
3171
3172 /* 16bit shift left ((short)x << i) */
3173
3174 const char *
3175 ashlhi3_out (rtx insn, rtx operands[], int *len)
3176 {
3177 if (GET_CODE (operands[2]) == CONST_INT)
3178 {
3179 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3180 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3181 int k;
3182 int *t = len;
3183
3184 if (!len)
3185 len = &k;
3186
3187 switch (INTVAL (operands[2]))
3188 {
3189 default:
3190 if (INTVAL (operands[2]) < 16)
3191 break;
3192
3193 *len = 2;
3194 return (AS1 (clr,%B0) CR_TAB
3195 AS1 (clr,%A0));
3196
3197 case 4:
3198 if (optimize_size && scratch)
3199 break; /* 5 */
3200 if (ldi_ok)
3201 {
3202 *len = 6;
3203 return (AS1 (swap,%A0) CR_TAB
3204 AS1 (swap,%B0) CR_TAB
3205 AS2 (andi,%B0,0xf0) CR_TAB
3206 AS2 (eor,%B0,%A0) CR_TAB
3207 AS2 (andi,%A0,0xf0) CR_TAB
3208 AS2 (eor,%B0,%A0));
3209 }
3210 if (scratch)
3211 {
3212 *len = 7;
3213 return (AS1 (swap,%A0) CR_TAB
3214 AS1 (swap,%B0) CR_TAB
3215 AS2 (ldi,%3,0xf0) CR_TAB
3216 AS2 (and,%B0,%3) CR_TAB
3217 AS2 (eor,%B0,%A0) CR_TAB
3218 AS2 (and,%A0,%3) CR_TAB
3219 AS2 (eor,%B0,%A0));
3220 }
3221 break; /* optimize_size ? 6 : 8 */
3222
3223 case 5:
3224 if (optimize_size)
3225 break; /* scratch ? 5 : 6 */
3226 if (ldi_ok)
3227 {
3228 *len = 8;
3229 return (AS1 (lsl,%A0) CR_TAB
3230 AS1 (rol,%B0) CR_TAB
3231 AS1 (swap,%A0) CR_TAB
3232 AS1 (swap,%B0) CR_TAB
3233 AS2 (andi,%B0,0xf0) CR_TAB
3234 AS2 (eor,%B0,%A0) CR_TAB
3235 AS2 (andi,%A0,0xf0) CR_TAB
3236 AS2 (eor,%B0,%A0));
3237 }
3238 if (scratch)
3239 {
3240 *len = 9;
3241 return (AS1 (lsl,%A0) CR_TAB
3242 AS1 (rol,%B0) CR_TAB
3243 AS1 (swap,%A0) CR_TAB
3244 AS1 (swap,%B0) CR_TAB
3245 AS2 (ldi,%3,0xf0) CR_TAB
3246 AS2 (and,%B0,%3) CR_TAB
3247 AS2 (eor,%B0,%A0) CR_TAB
3248 AS2 (and,%A0,%3) CR_TAB
3249 AS2 (eor,%B0,%A0));
3250 }
3251 break; /* 10 */
3252
3253 case 6:
3254 if (optimize_size)
3255 break; /* scratch ? 5 : 6 */
3256 *len = 9;
3257 return (AS1 (clr,__tmp_reg__) CR_TAB
3258 AS1 (lsr,%B0) CR_TAB
3259 AS1 (ror,%A0) CR_TAB
3260 AS1 (ror,__tmp_reg__) CR_TAB
3261 AS1 (lsr,%B0) CR_TAB
3262 AS1 (ror,%A0) CR_TAB
3263 AS1 (ror,__tmp_reg__) CR_TAB
3264 AS2 (mov,%B0,%A0) CR_TAB
3265 AS2 (mov,%A0,__tmp_reg__));
3266
3267 case 7:
3268 *len = 5;
3269 return (AS1 (lsr,%B0) CR_TAB
3270 AS2 (mov,%B0,%A0) CR_TAB
3271 AS1 (clr,%A0) CR_TAB
3272 AS1 (ror,%B0) CR_TAB
3273 AS1 (ror,%A0));
3274
3275 case 8:
3276 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3277 AS1 (clr,%A0));
3278
3279 case 9:
3280 *len = 3;
3281 return (AS2 (mov,%B0,%A0) CR_TAB
3282 AS1 (clr,%A0) CR_TAB
3283 AS1 (lsl,%B0));
3284
3285 case 10:
3286 *len = 4;
3287 return (AS2 (mov,%B0,%A0) CR_TAB
3288 AS1 (clr,%A0) CR_TAB
3289 AS1 (lsl,%B0) CR_TAB
3290 AS1 (lsl,%B0));
3291
3292 case 11:
3293 *len = 5;
3294 return (AS2 (mov,%B0,%A0) CR_TAB
3295 AS1 (clr,%A0) CR_TAB
3296 AS1 (lsl,%B0) CR_TAB
3297 AS1 (lsl,%B0) CR_TAB
3298 AS1 (lsl,%B0));
3299
3300 case 12:
3301 if (ldi_ok)
3302 {
3303 *len = 4;
3304 return (AS2 (mov,%B0,%A0) CR_TAB
3305 AS1 (clr,%A0) CR_TAB
3306 AS1 (swap,%B0) CR_TAB
3307 AS2 (andi,%B0,0xf0));
3308 }
3309 if (scratch)
3310 {
3311 *len = 5;
3312 return (AS2 (mov,%B0,%A0) CR_TAB
3313 AS1 (clr,%A0) CR_TAB
3314 AS1 (swap,%B0) CR_TAB
3315 AS2 (ldi,%3,0xf0) CR_TAB
3316 AS2 (and,%B0,%3));
3317 }
3318 *len = 6;
3319 return (AS2 (mov,%B0,%A0) CR_TAB
3320 AS1 (clr,%A0) CR_TAB
3321 AS1 (lsl,%B0) CR_TAB
3322 AS1 (lsl,%B0) CR_TAB
3323 AS1 (lsl,%B0) CR_TAB
3324 AS1 (lsl,%B0));
3325
3326 case 13:
3327 if (ldi_ok)
3328 {
3329 *len = 5;
3330 return (AS2 (mov,%B0,%A0) CR_TAB
3331 AS1 (clr,%A0) CR_TAB
3332 AS1 (swap,%B0) CR_TAB
3333 AS1 (lsl,%B0) CR_TAB
3334 AS2 (andi,%B0,0xe0));
3335 }
3336 if (AVR_HAVE_MUL && scratch)
3337 {
3338 *len = 5;
3339 return (AS2 (ldi,%3,0x20) CR_TAB
3340 AS2 (mul,%A0,%3) CR_TAB
3341 AS2 (mov,%B0,r0) CR_TAB
3342 AS1 (clr,%A0) CR_TAB
3343 AS1 (clr,__zero_reg__));
3344 }
3345 if (optimize_size && scratch)
3346 break; /* 5 */
3347 if (scratch)
3348 {
3349 *len = 6;
3350 return (AS2 (mov,%B0,%A0) CR_TAB
3351 AS1 (clr,%A0) CR_TAB
3352 AS1 (swap,%B0) CR_TAB
3353 AS1 (lsl,%B0) CR_TAB
3354 AS2 (ldi,%3,0xe0) CR_TAB
3355 AS2 (and,%B0,%3));
3356 }
3357 if (AVR_HAVE_MUL)
3358 {
3359 *len = 6;
3360 return ("set" CR_TAB
3361 AS2 (bld,r1,5) CR_TAB
3362 AS2 (mul,%A0,r1) CR_TAB
3363 AS2 (mov,%B0,r0) CR_TAB
3364 AS1 (clr,%A0) CR_TAB
3365 AS1 (clr,__zero_reg__));
3366 }
3367 *len = 7;
3368 return (AS2 (mov,%B0,%A0) CR_TAB
3369 AS1 (clr,%A0) CR_TAB
3370 AS1 (lsl,%B0) CR_TAB
3371 AS1 (lsl,%B0) CR_TAB
3372 AS1 (lsl,%B0) CR_TAB
3373 AS1 (lsl,%B0) CR_TAB
3374 AS1 (lsl,%B0));
3375
3376 case 14:
3377 if (AVR_HAVE_MUL && ldi_ok)
3378 {
3379 *len = 5;
3380 return (AS2 (ldi,%B0,0x40) CR_TAB
3381 AS2 (mul,%A0,%B0) CR_TAB
3382 AS2 (mov,%B0,r0) CR_TAB
3383 AS1 (clr,%A0) CR_TAB
3384 AS1 (clr,__zero_reg__));
3385 }
3386 if (AVR_HAVE_MUL && scratch)
3387 {
3388 *len = 5;
3389 return (AS2 (ldi,%3,0x40) CR_TAB
3390 AS2 (mul,%A0,%3) CR_TAB
3391 AS2 (mov,%B0,r0) CR_TAB
3392 AS1 (clr,%A0) CR_TAB
3393 AS1 (clr,__zero_reg__));
3394 }
3395 if (optimize_size && ldi_ok)
3396 {
3397 *len = 5;
3398 return (AS2 (mov,%B0,%A0) CR_TAB
3399 AS2 (ldi,%A0,6) "\n1:\t"
3400 AS1 (lsl,%B0) CR_TAB
3401 AS1 (dec,%A0) CR_TAB
3402 AS1 (brne,1b));
3403 }
3404 if (optimize_size && scratch)
3405 break; /* 5 */
3406 *len = 6;
3407 return (AS1 (clr,%B0) CR_TAB
3408 AS1 (lsr,%A0) CR_TAB
3409 AS1 (ror,%B0) CR_TAB
3410 AS1 (lsr,%A0) CR_TAB
3411 AS1 (ror,%B0) CR_TAB
3412 AS1 (clr,%A0));
3413
3414 case 15:
3415 *len = 4;
3416 return (AS1 (clr,%B0) CR_TAB
3417 AS1 (lsr,%A0) CR_TAB
3418 AS1 (ror,%B0) CR_TAB
3419 AS1 (clr,%A0));
3420 }
3421 len = t;
3422 }
3423 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3424 AS1 (rol,%B0)),
3425 insn, operands, len, 2);
3426 return "";
3427 }
3428
3429
3430 /* 32bit shift left ((long)x << i) */
3431
3432 const char *
3433 ashlsi3_out (rtx insn, rtx operands[], int *len)
3434 {
3435 if (GET_CODE (operands[2]) == CONST_INT)
3436 {
3437 int k;
3438 int *t = len;
3439
3440 if (!len)
3441 len = &k;
3442
3443 switch (INTVAL (operands[2]))
3444 {
3445 default:
3446 if (INTVAL (operands[2]) < 32)
3447 break;
3448
3449 if (AVR_HAVE_MOVW)
3450 return *len = 3, (AS1 (clr,%D0) CR_TAB
3451 AS1 (clr,%C0) CR_TAB
3452 AS2 (movw,%A0,%C0));
3453 *len = 4;
3454 return (AS1 (clr,%D0) CR_TAB
3455 AS1 (clr,%C0) CR_TAB
3456 AS1 (clr,%B0) CR_TAB
3457 AS1 (clr,%A0));
3458
3459 case 8:
3460 {
3461 int reg0 = true_regnum (operands[0]);
3462 int reg1 = true_regnum (operands[1]);
3463 *len = 4;
3464 if (reg0 >= reg1)
3465 return (AS2 (mov,%D0,%C1) CR_TAB
3466 AS2 (mov,%C0,%B1) CR_TAB
3467 AS2 (mov,%B0,%A1) CR_TAB
3468 AS1 (clr,%A0));
3469 else
3470 return (AS1 (clr,%A0) CR_TAB
3471 AS2 (mov,%B0,%A1) CR_TAB
3472 AS2 (mov,%C0,%B1) CR_TAB
3473 AS2 (mov,%D0,%C1));
3474 }
3475
3476 case 16:
3477 {
3478 int reg0 = true_regnum (operands[0]);
3479 int reg1 = true_regnum (operands[1]);
3480 if (reg0 + 2 == reg1)
3481 return *len = 2, (AS1 (clr,%B0) CR_TAB
3482 AS1 (clr,%A0));
3483 if (AVR_HAVE_MOVW)
3484 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3485 AS1 (clr,%B0) CR_TAB
3486 AS1 (clr,%A0));
3487 else
3488 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3489 AS2 (mov,%D0,%B1) CR_TAB
3490 AS1 (clr,%B0) CR_TAB
3491 AS1 (clr,%A0));
3492 }
3493
3494 case 24:
3495 *len = 4;
3496 return (AS2 (mov,%D0,%A1) CR_TAB
3497 AS1 (clr,%C0) CR_TAB
3498 AS1 (clr,%B0) CR_TAB
3499 AS1 (clr,%A0));
3500
3501 case 31:
3502 *len = 6;
3503 return (AS1 (clr,%D0) CR_TAB
3504 AS1 (lsr,%A0) CR_TAB
3505 AS1 (ror,%D0) CR_TAB
3506 AS1 (clr,%C0) CR_TAB
3507 AS1 (clr,%B0) CR_TAB
3508 AS1 (clr,%A0));
3509 }
3510 len = t;
3511 }
3512 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3513 AS1 (rol,%B0) CR_TAB
3514 AS1 (rol,%C0) CR_TAB
3515 AS1 (rol,%D0)),
3516 insn, operands, len, 4);
3517 return "";
3518 }
3519
3520 /* 8bit arithmetic shift right ((signed char)x >> i) */
3521
3522 const char *
3523 ashrqi3_out (rtx insn, rtx operands[], int *len)
3524 {
3525 if (GET_CODE (operands[2]) == CONST_INT)
3526 {
3527 int k;
3528
3529 if (!len)
3530 len = &k;
3531
3532 switch (INTVAL (operands[2]))
3533 {
3534 case 1:
3535 *len = 1;
3536 return AS1 (asr,%0);
3537
3538 case 2:
3539 *len = 2;
3540 return (AS1 (asr,%0) CR_TAB
3541 AS1 (asr,%0));
3542
3543 case 3:
3544 *len = 3;
3545 return (AS1 (asr,%0) CR_TAB
3546 AS1 (asr,%0) CR_TAB
3547 AS1 (asr,%0));
3548
3549 case 4:
3550 *len = 4;
3551 return (AS1 (asr,%0) CR_TAB
3552 AS1 (asr,%0) CR_TAB
3553 AS1 (asr,%0) CR_TAB
3554 AS1 (asr,%0));
3555
3556 case 5:
3557 *len = 5;
3558 return (AS1 (asr,%0) CR_TAB
3559 AS1 (asr,%0) CR_TAB
3560 AS1 (asr,%0) CR_TAB
3561 AS1 (asr,%0) CR_TAB
3562 AS1 (asr,%0));
3563
3564 case 6:
3565 *len = 4;
3566 return (AS2 (bst,%0,6) CR_TAB
3567 AS1 (lsl,%0) CR_TAB
3568 AS2 (sbc,%0,%0) CR_TAB
3569 AS2 (bld,%0,0));
3570
3571 default:
3572 if (INTVAL (operands[2]) < 8)
3573 break;
3574
3575 /* fall through */
3576
3577 case 7:
3578 *len = 2;
3579 return (AS1 (lsl,%0) CR_TAB
3580 AS2 (sbc,%0,%0));
3581 }
3582 }
3583 else if (CONSTANT_P (operands[2]))
3584 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3585
3586 out_shift_with_cnt (AS1 (asr,%0),
3587 insn, operands, len, 1);
3588 return "";
3589 }
3590
3591
3592 /* 16bit arithmetic shift right ((signed short)x >> i) */
3593
3594 const char *
3595 ashrhi3_out (rtx insn, rtx operands[], int *len)
3596 {
3597 if (GET_CODE (operands[2]) == CONST_INT)
3598 {
3599 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3600 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3601 int k;
3602 int *t = len;
3603
3604 if (!len)
3605 len = &k;
3606
3607 switch (INTVAL (operands[2]))
3608 {
3609 case 4:
3610 case 5:
3611 /* XXX try to optimize this too? */
3612 break;
3613
3614 case 6:
3615 if (optimize_size)
3616 break; /* scratch ? 5 : 6 */
3617 *len = 8;
3618 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3619 AS2 (mov,%A0,%B0) CR_TAB
3620 AS1 (lsl,__tmp_reg__) CR_TAB
3621 AS1 (rol,%A0) CR_TAB
3622 AS2 (sbc,%B0,%B0) CR_TAB
3623 AS1 (lsl,__tmp_reg__) CR_TAB
3624 AS1 (rol,%A0) CR_TAB
3625 AS1 (rol,%B0));
3626
3627 case 7:
3628 *len = 4;
3629 return (AS1 (lsl,%A0) CR_TAB
3630 AS2 (mov,%A0,%B0) CR_TAB
3631 AS1 (rol,%A0) CR_TAB
3632 AS2 (sbc,%B0,%B0));
3633
3634 case 8:
3635 {
3636 int reg0 = true_regnum (operands[0]);
3637 int reg1 = true_regnum (operands[1]);
3638
3639 if (reg0 == reg1)
3640 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3641 AS1 (lsl,%B0) CR_TAB
3642 AS2 (sbc,%B0,%B0));
3643 else
3644 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3645 AS1 (clr,%B0) CR_TAB
3646 AS2 (sbrc,%A0,7) CR_TAB
3647 AS1 (dec,%B0));
3648 }
3649
3650 case 9:
3651 *len = 4;
3652 return (AS2 (mov,%A0,%B0) CR_TAB
3653 AS1 (lsl,%B0) CR_TAB
3654 AS2 (sbc,%B0,%B0) CR_TAB
3655 AS1 (asr,%A0));
3656
3657 case 10:
3658 *len = 5;
3659 return (AS2 (mov,%A0,%B0) CR_TAB
3660 AS1 (lsl,%B0) CR_TAB
3661 AS2 (sbc,%B0,%B0) CR_TAB
3662 AS1 (asr,%A0) CR_TAB
3663 AS1 (asr,%A0));
3664
3665 case 11:
3666 if (AVR_HAVE_MUL && ldi_ok)
3667 {
3668 *len = 5;
3669 return (AS2 (ldi,%A0,0x20) CR_TAB
3670 AS2 (muls,%B0,%A0) CR_TAB
3671 AS2 (mov,%A0,r1) CR_TAB
3672 AS2 (sbc,%B0,%B0) CR_TAB
3673 AS1 (clr,__zero_reg__));
3674 }
3675 if (optimize_size && scratch)
3676 break; /* 5 */
3677 *len = 6;
3678 return (AS2 (mov,%A0,%B0) CR_TAB
3679 AS1 (lsl,%B0) CR_TAB
3680 AS2 (sbc,%B0,%B0) CR_TAB
3681 AS1 (asr,%A0) CR_TAB
3682 AS1 (asr,%A0) CR_TAB
3683 AS1 (asr,%A0));
3684
3685 case 12:
3686 if (AVR_HAVE_MUL && ldi_ok)
3687 {
3688 *len = 5;
3689 return (AS2 (ldi,%A0,0x10) CR_TAB
3690 AS2 (muls,%B0,%A0) CR_TAB
3691 AS2 (mov,%A0,r1) CR_TAB
3692 AS2 (sbc,%B0,%B0) CR_TAB
3693 AS1 (clr,__zero_reg__));
3694 }
3695 if (optimize_size && scratch)
3696 break; /* 5 */
3697 *len = 7;
3698 return (AS2 (mov,%A0,%B0) CR_TAB
3699 AS1 (lsl,%B0) CR_TAB
3700 AS2 (sbc,%B0,%B0) CR_TAB
3701 AS1 (asr,%A0) CR_TAB
3702 AS1 (asr,%A0) CR_TAB
3703 AS1 (asr,%A0) CR_TAB
3704 AS1 (asr,%A0));
3705
3706 case 13:
3707 if (AVR_HAVE_MUL && ldi_ok)
3708 {
3709 *len = 5;
3710 return (AS2 (ldi,%A0,0x08) CR_TAB
3711 AS2 (muls,%B0,%A0) CR_TAB
3712 AS2 (mov,%A0,r1) CR_TAB
3713 AS2 (sbc,%B0,%B0) CR_TAB
3714 AS1 (clr,__zero_reg__));
3715 }
3716 if (optimize_size)
3717 break; /* scratch ? 5 : 7 */
3718 *len = 8;
3719 return (AS2 (mov,%A0,%B0) CR_TAB
3720 AS1 (lsl,%B0) CR_TAB
3721 AS2 (sbc,%B0,%B0) CR_TAB
3722 AS1 (asr,%A0) CR_TAB
3723 AS1 (asr,%A0) CR_TAB
3724 AS1 (asr,%A0) CR_TAB
3725 AS1 (asr,%A0) CR_TAB
3726 AS1 (asr,%A0));
3727
3728 case 14:
3729 *len = 5;
3730 return (AS1 (lsl,%B0) CR_TAB
3731 AS2 (sbc,%A0,%A0) CR_TAB
3732 AS1 (lsl,%B0) CR_TAB
3733 AS2 (mov,%B0,%A0) CR_TAB
3734 AS1 (rol,%A0));
3735
3736 default:
3737 if (INTVAL (operands[2]) < 16)
3738 break;
3739
3740 /* fall through */
3741
3742 case 15:
3743 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3744 AS2 (sbc,%A0,%A0) CR_TAB
3745 AS2 (mov,%B0,%A0));
3746 }
3747 len = t;
3748 }
3749 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3750 AS1 (ror,%A0)),
3751 insn, operands, len, 2);
3752 return "";
3753 }
3754
3755
3756 /* 32bit arithmetic shift right ((signed long)x >> i) */
3757
3758 const char *
3759 ashrsi3_out (rtx insn, rtx operands[], int *len)
3760 {
3761 if (GET_CODE (operands[2]) == CONST_INT)
3762 {
3763 int k;
3764 int *t = len;
3765
3766 if (!len)
3767 len = &k;
3768
3769 switch (INTVAL (operands[2]))
3770 {
3771 case 8:
3772 {
3773 int reg0 = true_regnum (operands[0]);
3774 int reg1 = true_regnum (operands[1]);
3775 *len=6;
3776 if (reg0 <= reg1)
3777 return (AS2 (mov,%A0,%B1) CR_TAB
3778 AS2 (mov,%B0,%C1) CR_TAB
3779 AS2 (mov,%C0,%D1) CR_TAB
3780 AS1 (clr,%D0) CR_TAB
3781 AS2 (sbrc,%C0,7) CR_TAB
3782 AS1 (dec,%D0));
3783 else
3784 return (AS1 (clr,%D0) CR_TAB
3785 AS2 (sbrc,%D1,7) CR_TAB
3786 AS1 (dec,%D0) CR_TAB
3787 AS2 (mov,%C0,%D1) CR_TAB
3788 AS2 (mov,%B0,%C1) CR_TAB
3789 AS2 (mov,%A0,%B1));
3790 }
3791
3792 case 16:
3793 {
3794 int reg0 = true_regnum (operands[0]);
3795 int reg1 = true_regnum (operands[1]);
3796
3797 if (reg0 == reg1 + 2)
3798 return *len = 4, (AS1 (clr,%D0) CR_TAB
3799 AS2 (sbrc,%B0,7) CR_TAB
3800 AS1 (com,%D0) CR_TAB
3801 AS2 (mov,%C0,%D0));
3802 if (AVR_HAVE_MOVW)
3803 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3804 AS1 (clr,%D0) CR_TAB
3805 AS2 (sbrc,%B0,7) CR_TAB
3806 AS1 (com,%D0) CR_TAB
3807 AS2 (mov,%C0,%D0));
3808 else
3809 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3810 AS2 (mov,%A0,%C1) CR_TAB
3811 AS1 (clr,%D0) CR_TAB
3812 AS2 (sbrc,%B0,7) CR_TAB
3813 AS1 (com,%D0) CR_TAB
3814 AS2 (mov,%C0,%D0));
3815 }
3816
3817 case 24:
3818 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3819 AS1 (clr,%D0) CR_TAB
3820 AS2 (sbrc,%A0,7) CR_TAB
3821 AS1 (com,%D0) CR_TAB
3822 AS2 (mov,%B0,%D0) CR_TAB
3823 AS2 (mov,%C0,%D0));
3824
3825 default:
3826 if (INTVAL (operands[2]) < 32)
3827 break;
3828
3829 /* fall through */
3830
3831 case 31:
3832 if (AVR_HAVE_MOVW)
3833 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3834 AS2 (sbc,%A0,%A0) CR_TAB
3835 AS2 (mov,%B0,%A0) CR_TAB
3836 AS2 (movw,%C0,%A0));
3837 else
3838 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3839 AS2 (sbc,%A0,%A0) CR_TAB
3840 AS2 (mov,%B0,%A0) CR_TAB
3841 AS2 (mov,%C0,%A0) CR_TAB
3842 AS2 (mov,%D0,%A0));
3843 }
3844 len = t;
3845 }
3846 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3847 AS1 (ror,%C0) CR_TAB
3848 AS1 (ror,%B0) CR_TAB
3849 AS1 (ror,%A0)),
3850 insn, operands, len, 4);
3851 return "";
3852 }
3853
3854 /* 8bit logic shift right ((unsigned char)x >> i) */
3855
3856 const char *
3857 lshrqi3_out (rtx insn, rtx operands[], int *len)
3858 {
3859 if (GET_CODE (operands[2]) == CONST_INT)
3860 {
3861 int k;
3862
3863 if (!len)
3864 len = &k;
3865
3866 switch (INTVAL (operands[2]))
3867 {
3868 default:
3869 if (INTVAL (operands[2]) < 8)
3870 break;
3871
3872 *len = 1;
3873 return AS1 (clr,%0);
3874
3875 case 1:
3876 *len = 1;
3877 return AS1 (lsr,%0);
3878
3879 case 2:
3880 *len = 2;
3881 return (AS1 (lsr,%0) CR_TAB
3882 AS1 (lsr,%0));
3883 case 3:
3884 *len = 3;
3885 return (AS1 (lsr,%0) CR_TAB
3886 AS1 (lsr,%0) CR_TAB
3887 AS1 (lsr,%0));
3888
3889 case 4:
3890 if (test_hard_reg_class (LD_REGS, operands[0]))
3891 {
3892 *len=2;
3893 return (AS1 (swap,%0) CR_TAB
3894 AS2 (andi,%0,0x0f));
3895 }
3896 *len = 4;
3897 return (AS1 (lsr,%0) CR_TAB
3898 AS1 (lsr,%0) CR_TAB
3899 AS1 (lsr,%0) CR_TAB
3900 AS1 (lsr,%0));
3901
3902 case 5:
3903 if (test_hard_reg_class (LD_REGS, operands[0]))
3904 {
3905 *len = 3;
3906 return (AS1 (swap,%0) CR_TAB
3907 AS1 (lsr,%0) CR_TAB
3908 AS2 (andi,%0,0x7));
3909 }
3910 *len = 5;
3911 return (AS1 (lsr,%0) CR_TAB
3912 AS1 (lsr,%0) CR_TAB
3913 AS1 (lsr,%0) CR_TAB
3914 AS1 (lsr,%0) CR_TAB
3915 AS1 (lsr,%0));
3916
3917 case 6:
3918 if (test_hard_reg_class (LD_REGS, operands[0]))
3919 {
3920 *len = 4;
3921 return (AS1 (swap,%0) CR_TAB
3922 AS1 (lsr,%0) CR_TAB
3923 AS1 (lsr,%0) CR_TAB
3924 AS2 (andi,%0,0x3));
3925 }
3926 *len = 6;
3927 return (AS1 (lsr,%0) CR_TAB
3928 AS1 (lsr,%0) CR_TAB
3929 AS1 (lsr,%0) CR_TAB
3930 AS1 (lsr,%0) CR_TAB
3931 AS1 (lsr,%0) CR_TAB
3932 AS1 (lsr,%0));
3933
3934 case 7:
3935 *len = 3;
3936 return (AS1 (rol,%0) CR_TAB
3937 AS1 (clr,%0) CR_TAB
3938 AS1 (rol,%0));
3939 }
3940 }
3941 else if (CONSTANT_P (operands[2]))
3942 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3943
3944 out_shift_with_cnt (AS1 (lsr,%0),
3945 insn, operands, len, 1);
3946 return "";
3947 }
3948
3949 /* 16bit logic shift right ((unsigned short)x >> i) */
3950
3951 const char *
3952 lshrhi3_out (rtx insn, rtx operands[], int *len)
3953 {
3954 if (GET_CODE (operands[2]) == CONST_INT)
3955 {
3956 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3957 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3958 int k;
3959 int *t = len;
3960
3961 if (!len)
3962 len = &k;
3963
3964 switch (INTVAL (operands[2]))
3965 {
3966 default:
3967 if (INTVAL (operands[2]) < 16)
3968 break;
3969
3970 *len = 2;
3971 return (AS1 (clr,%B0) CR_TAB
3972 AS1 (clr,%A0));
3973
3974 case 4:
3975 if (optimize_size && scratch)
3976 break; /* 5 */
3977 if (ldi_ok)
3978 {
3979 *len = 6;
3980 return (AS1 (swap,%B0) CR_TAB
3981 AS1 (swap,%A0) CR_TAB
3982 AS2 (andi,%A0,0x0f) CR_TAB
3983 AS2 (eor,%A0,%B0) CR_TAB
3984 AS2 (andi,%B0,0x0f) CR_TAB
3985 AS2 (eor,%A0,%B0));
3986 }
3987 if (scratch)
3988 {
3989 *len = 7;
3990 return (AS1 (swap,%B0) CR_TAB
3991 AS1 (swap,%A0) CR_TAB
3992 AS2 (ldi,%3,0x0f) CR_TAB
3993 AS2 (and,%A0,%3) CR_TAB
3994 AS2 (eor,%A0,%B0) CR_TAB
3995 AS2 (and,%B0,%3) CR_TAB
3996 AS2 (eor,%A0,%B0));
3997 }
3998 break; /* optimize_size ? 6 : 8 */
3999
4000 case 5:
4001 if (optimize_size)
4002 break; /* scratch ? 5 : 6 */
4003 if (ldi_ok)
4004 {
4005 *len = 8;
4006 return (AS1 (lsr,%B0) CR_TAB
4007 AS1 (ror,%A0) CR_TAB
4008 AS1 (swap,%B0) CR_TAB
4009 AS1 (swap,%A0) CR_TAB
4010 AS2 (andi,%A0,0x0f) CR_TAB
4011 AS2 (eor,%A0,%B0) CR_TAB
4012 AS2 (andi,%B0,0x0f) CR_TAB
4013 AS2 (eor,%A0,%B0));
4014 }
4015 if (scratch)
4016 {
4017 *len = 9;
4018 return (AS1 (lsr,%B0) CR_TAB
4019 AS1 (ror,%A0) CR_TAB
4020 AS1 (swap,%B0) CR_TAB
4021 AS1 (swap,%A0) CR_TAB
4022 AS2 (ldi,%3,0x0f) CR_TAB
4023 AS2 (and,%A0,%3) CR_TAB
4024 AS2 (eor,%A0,%B0) CR_TAB
4025 AS2 (and,%B0,%3) CR_TAB
4026 AS2 (eor,%A0,%B0));
4027 }
4028 break; /* 10 */
4029
4030 case 6:
4031 if (optimize_size)
4032 break; /* scratch ? 5 : 6 */
4033 *len = 9;
4034 return (AS1 (clr,__tmp_reg__) CR_TAB
4035 AS1 (lsl,%A0) CR_TAB
4036 AS1 (rol,%B0) CR_TAB
4037 AS1 (rol,__tmp_reg__) CR_TAB
4038 AS1 (lsl,%A0) CR_TAB
4039 AS1 (rol,%B0) CR_TAB
4040 AS1 (rol,__tmp_reg__) CR_TAB
4041 AS2 (mov,%A0,%B0) CR_TAB
4042 AS2 (mov,%B0,__tmp_reg__));
4043
4044 case 7:
4045 *len = 5;
4046 return (AS1 (lsl,%A0) CR_TAB
4047 AS2 (mov,%A0,%B0) CR_TAB
4048 AS1 (rol,%A0) CR_TAB
4049 AS2 (sbc,%B0,%B0) CR_TAB
4050 AS1 (neg,%B0));
4051
4052 case 8:
4053 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4054 AS1 (clr,%B0));
4055
4056 case 9:
4057 *len = 3;
4058 return (AS2 (mov,%A0,%B0) CR_TAB
4059 AS1 (clr,%B0) CR_TAB
4060 AS1 (lsr,%A0));
4061
4062 case 10:
4063 *len = 4;
4064 return (AS2 (mov,%A0,%B0) CR_TAB
4065 AS1 (clr,%B0) CR_TAB
4066 AS1 (lsr,%A0) CR_TAB
4067 AS1 (lsr,%A0));
4068
4069 case 11:
4070 *len = 5;
4071 return (AS2 (mov,%A0,%B0) CR_TAB
4072 AS1 (clr,%B0) CR_TAB
4073 AS1 (lsr,%A0) CR_TAB
4074 AS1 (lsr,%A0) CR_TAB
4075 AS1 (lsr,%A0));
4076
4077 case 12:
4078 if (ldi_ok)
4079 {
4080 *len = 4;
4081 return (AS2 (mov,%A0,%B0) CR_TAB
4082 AS1 (clr,%B0) CR_TAB
4083 AS1 (swap,%A0) CR_TAB
4084 AS2 (andi,%A0,0x0f));
4085 }
4086 if (scratch)
4087 {
4088 *len = 5;
4089 return (AS2 (mov,%A0,%B0) CR_TAB
4090 AS1 (clr,%B0) CR_TAB
4091 AS1 (swap,%A0) CR_TAB
4092 AS2 (ldi,%3,0x0f) CR_TAB
4093 AS2 (and,%A0,%3));
4094 }
4095 *len = 6;
4096 return (AS2 (mov,%A0,%B0) CR_TAB
4097 AS1 (clr,%B0) CR_TAB
4098 AS1 (lsr,%A0) CR_TAB
4099 AS1 (lsr,%A0) CR_TAB
4100 AS1 (lsr,%A0) CR_TAB
4101 AS1 (lsr,%A0));
4102
4103 case 13:
4104 if (ldi_ok)
4105 {
4106 *len = 5;
4107 return (AS2 (mov,%A0,%B0) CR_TAB
4108 AS1 (clr,%B0) CR_TAB
4109 AS1 (swap,%A0) CR_TAB
4110 AS1 (lsr,%A0) CR_TAB
4111 AS2 (andi,%A0,0x07));
4112 }
4113 if (AVR_HAVE_MUL && scratch)
4114 {
4115 *len = 5;
4116 return (AS2 (ldi,%3,0x08) CR_TAB
4117 AS2 (mul,%B0,%3) CR_TAB
4118 AS2 (mov,%A0,r1) CR_TAB
4119 AS1 (clr,%B0) CR_TAB
4120 AS1 (clr,__zero_reg__));
4121 }
4122 if (optimize_size && scratch)
4123 break; /* 5 */
4124 if (scratch)
4125 {
4126 *len = 6;
4127 return (AS2 (mov,%A0,%B0) CR_TAB
4128 AS1 (clr,%B0) CR_TAB
4129 AS1 (swap,%A0) CR_TAB
4130 AS1 (lsr,%A0) CR_TAB
4131 AS2 (ldi,%3,0x07) CR_TAB
4132 AS2 (and,%A0,%3));
4133 }
4134 if (AVR_HAVE_MUL)
4135 {
4136 *len = 6;
4137 return ("set" CR_TAB
4138 AS2 (bld,r1,3) CR_TAB
4139 AS2 (mul,%B0,r1) CR_TAB
4140 AS2 (mov,%A0,r1) CR_TAB
4141 AS1 (clr,%B0) CR_TAB
4142 AS1 (clr,__zero_reg__));
4143 }
4144 *len = 7;
4145 return (AS2 (mov,%A0,%B0) CR_TAB
4146 AS1 (clr,%B0) CR_TAB
4147 AS1 (lsr,%A0) CR_TAB
4148 AS1 (lsr,%A0) CR_TAB
4149 AS1 (lsr,%A0) CR_TAB
4150 AS1 (lsr,%A0) CR_TAB
4151 AS1 (lsr,%A0));
4152
4153 case 14:
4154 if (AVR_HAVE_MUL && ldi_ok)
4155 {
4156 *len = 5;
4157 return (AS2 (ldi,%A0,0x04) CR_TAB
4158 AS2 (mul,%B0,%A0) CR_TAB
4159 AS2 (mov,%A0,r1) CR_TAB
4160 AS1 (clr,%B0) CR_TAB
4161 AS1 (clr,__zero_reg__));
4162 }
4163 if (AVR_HAVE_MUL && scratch)
4164 {
4165 *len = 5;
4166 return (AS2 (ldi,%3,0x04) CR_TAB
4167 AS2 (mul,%B0,%3) CR_TAB
4168 AS2 (mov,%A0,r1) CR_TAB
4169 AS1 (clr,%B0) CR_TAB
4170 AS1 (clr,__zero_reg__));
4171 }
4172 if (optimize_size && ldi_ok)
4173 {
4174 *len = 5;
4175 return (AS2 (mov,%A0,%B0) CR_TAB
4176 AS2 (ldi,%B0,6) "\n1:\t"
4177 AS1 (lsr,%A0) CR_TAB
4178 AS1 (dec,%B0) CR_TAB
4179 AS1 (brne,1b));
4180 }
4181 if (optimize_size && scratch)
4182 break; /* 5 */
4183 *len = 6;
4184 return (AS1 (clr,%A0) CR_TAB
4185 AS1 (lsl,%B0) CR_TAB
4186 AS1 (rol,%A0) CR_TAB
4187 AS1 (lsl,%B0) CR_TAB
4188 AS1 (rol,%A0) CR_TAB
4189 AS1 (clr,%B0));
4190
4191 case 15:
4192 *len = 4;
4193 return (AS1 (clr,%A0) CR_TAB
4194 AS1 (lsl,%B0) CR_TAB
4195 AS1 (rol,%A0) CR_TAB
4196 AS1 (clr,%B0));
4197 }
4198 len = t;
4199 }
4200 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4201 AS1 (ror,%A0)),
4202 insn, operands, len, 2);
4203 return "";
4204 }
4205
4206 /* 32bit logic shift right ((unsigned int)x >> i) */
4207
4208 const char *
4209 lshrsi3_out (rtx insn, rtx operands[], int *len)
4210 {
4211 if (GET_CODE (operands[2]) == CONST_INT)
4212 {
4213 int k;
4214 int *t = len;
4215
4216 if (!len)
4217 len = &k;
4218
4219 switch (INTVAL (operands[2]))
4220 {
4221 default:
4222 if (INTVAL (operands[2]) < 32)
4223 break;
4224
4225 if (AVR_HAVE_MOVW)
4226 return *len = 3, (AS1 (clr,%D0) CR_TAB
4227 AS1 (clr,%C0) CR_TAB
4228 AS2 (movw,%A0,%C0));
4229 *len = 4;
4230 return (AS1 (clr,%D0) CR_TAB
4231 AS1 (clr,%C0) CR_TAB
4232 AS1 (clr,%B0) CR_TAB
4233 AS1 (clr,%A0));
4234
4235 case 8:
4236 {
4237 int reg0 = true_regnum (operands[0]);
4238 int reg1 = true_regnum (operands[1]);
4239 *len = 4;
4240 if (reg0 <= reg1)
4241 return (AS2 (mov,%A0,%B1) CR_TAB
4242 AS2 (mov,%B0,%C1) CR_TAB
4243 AS2 (mov,%C0,%D1) CR_TAB
4244 AS1 (clr,%D0));
4245 else
4246 return (AS1 (clr,%D0) CR_TAB
4247 AS2 (mov,%C0,%D1) CR_TAB
4248 AS2 (mov,%B0,%C1) CR_TAB
4249 AS2 (mov,%A0,%B1));
4250 }
4251
4252 case 16:
4253 {
4254 int reg0 = true_regnum (operands[0]);
4255 int reg1 = true_regnum (operands[1]);
4256
4257 if (reg0 == reg1 + 2)
4258 return *len = 2, (AS1 (clr,%C0) CR_TAB
4259 AS1 (clr,%D0));
4260 if (AVR_HAVE_MOVW)
4261 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4262 AS1 (clr,%C0) CR_TAB
4263 AS1 (clr,%D0));
4264 else
4265 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4266 AS2 (mov,%A0,%C1) CR_TAB
4267 AS1 (clr,%C0) CR_TAB
4268 AS1 (clr,%D0));
4269 }
4270
4271 case 24:
4272 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4273 AS1 (clr,%B0) CR_TAB
4274 AS1 (clr,%C0) CR_TAB
4275 AS1 (clr,%D0));
4276
4277 case 31:
4278 *len = 6;
4279 return (AS1 (clr,%A0) CR_TAB
4280 AS2 (sbrc,%D0,7) CR_TAB
4281 AS1 (inc,%A0) CR_TAB
4282 AS1 (clr,%B0) CR_TAB
4283 AS1 (clr,%C0) CR_TAB
4284 AS1 (clr,%D0));
4285 }
4286 len = t;
4287 }
4288 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4289 AS1 (ror,%C0) CR_TAB
4290 AS1 (ror,%B0) CR_TAB
4291 AS1 (ror,%A0)),
4292 insn, operands, len, 4);
4293 return "";
4294 }
4295
4296 /* Modifies the length assigned to instruction INSN
4297 LEN is the initially computed length of the insn. */
4298
4299 int
4300 adjust_insn_length (rtx insn, int len)
4301 {
4302 rtx patt = PATTERN (insn);
4303 rtx set;
4304
4305 if (GET_CODE (patt) == SET)
4306 {
4307 rtx op[10];
4308 op[1] = SET_SRC (patt);
4309 op[0] = SET_DEST (patt);
4310 if (general_operand (op[1], VOIDmode)
4311 && general_operand (op[0], VOIDmode))
4312 {
4313 switch (GET_MODE (op[0]))
4314 {
4315 case QImode:
4316 output_movqi (insn, op, &len);
4317 break;
4318 case HImode:
4319 output_movhi (insn, op, &len);
4320 break;
4321 case SImode:
4322 case SFmode:
4323 output_movsisf (insn, op, &len);
4324 break;
4325 default:
4326 break;
4327 }
4328 }
4329 else if (op[0] == cc0_rtx && REG_P (op[1]))
4330 {
4331 switch (GET_MODE (op[1]))
4332 {
4333 case HImode: out_tsthi (insn,&len); break;
4334 case SImode: out_tstsi (insn,&len); break;
4335 default: break;
4336 }
4337 }
4338 else if (GET_CODE (op[1]) == AND)
4339 {
4340 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4341 {
4342 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4343 if (GET_MODE (op[1]) == SImode)
4344 len = (((mask & 0xff) != 0xff)
4345 + ((mask & 0xff00) != 0xff00)
4346 + ((mask & 0xff0000L) != 0xff0000L)
4347 + ((mask & 0xff000000L) != 0xff000000L));
4348 else if (GET_MODE (op[1]) == HImode)
4349 len = (((mask & 0xff) != 0xff)
4350 + ((mask & 0xff00) != 0xff00));
4351 }
4352 }
4353 else if (GET_CODE (op[1]) == IOR)
4354 {
4355 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4356 {
4357 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4358 if (GET_MODE (op[1]) == SImode)
4359 len = (((mask & 0xff) != 0)
4360 + ((mask & 0xff00) != 0)
4361 + ((mask & 0xff0000L) != 0)
4362 + ((mask & 0xff000000L) != 0));
4363 else if (GET_MODE (op[1]) == HImode)
4364 len = (((mask & 0xff) != 0)
4365 + ((mask & 0xff00) != 0));
4366 }
4367 }
4368 }
4369 set = single_set (insn);
4370 if (set)
4371 {
4372 rtx op[10];
4373
4374 op[1] = SET_SRC (set);
4375 op[0] = SET_DEST (set);
4376
4377 if (GET_CODE (patt) == PARALLEL
4378 && general_operand (op[1], VOIDmode)
4379 && general_operand (op[0], VOIDmode))
4380 {
4381 if (XVECLEN (patt, 0) == 2)
4382 op[2] = XVECEXP (patt, 0, 1);
4383
4384 switch (GET_MODE (op[0]))
4385 {
4386 case QImode:
4387 len = 2;
4388 break;
4389 case HImode:
4390 output_reload_inhi (insn, op, &len);
4391 break;
4392 case SImode:
4393 case SFmode:
4394 output_reload_insisf (insn, op, &len);
4395 break;
4396 default:
4397 break;
4398 }
4399 }
4400 else if (GET_CODE (op[1]) == ASHIFT
4401 || GET_CODE (op[1]) == ASHIFTRT
4402 || GET_CODE (op[1]) == LSHIFTRT)
4403 {
4404 rtx ops[10];
4405 ops[0] = op[0];
4406 ops[1] = XEXP (op[1],0);
4407 ops[2] = XEXP (op[1],1);
4408 switch (GET_CODE (op[1]))
4409 {
4410 case ASHIFT:
4411 switch (GET_MODE (op[0]))
4412 {
4413 case QImode: ashlqi3_out (insn,ops,&len); break;
4414 case HImode: ashlhi3_out (insn,ops,&len); break;
4415 case SImode: ashlsi3_out (insn,ops,&len); break;
4416 default: break;
4417 }
4418 break;
4419 case ASHIFTRT:
4420 switch (GET_MODE (op[0]))
4421 {
4422 case QImode: ashrqi3_out (insn,ops,&len); break;
4423 case HImode: ashrhi3_out (insn,ops,&len); break;
4424 case SImode: ashrsi3_out (insn,ops,&len); break;
4425 default: break;
4426 }
4427 break;
4428 case LSHIFTRT:
4429 switch (GET_MODE (op[0]))
4430 {
4431 case QImode: lshrqi3_out (insn,ops,&len); break;
4432 case HImode: lshrhi3_out (insn,ops,&len); break;
4433 case SImode: lshrsi3_out (insn,ops,&len); break;
4434 default: break;
4435 }
4436 break;
4437 default:
4438 break;
4439 }
4440 }
4441 }
4442 return len;
4443 }
4444
4445 /* Return nonzero if register REG dead after INSN. */
4446
4447 int
4448 reg_unused_after (rtx insn, rtx reg)
4449 {
4450 return (dead_or_set_p (insn, reg)
4451 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4452 }
4453
4454 /* Return nonzero if REG is not used after INSN.
4455 We assume REG is a reload reg, and therefore does
4456 not live past labels. It may live past calls or jumps though. */
4457
4458 int
4459 _reg_unused_after (rtx insn, rtx reg)
4460 {
4461 enum rtx_code code;
4462 rtx set;
4463
4464 /* If the reg is set by this instruction, then it is safe for our
4465 case. Disregard the case where this is a store to memory, since
4466 we are checking a register used in the store address. */
4467 set = single_set (insn);
4468 if (set && GET_CODE (SET_DEST (set)) != MEM
4469 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4470 return 1;
4471
4472 while ((insn = NEXT_INSN (insn)))
4473 {
4474 rtx set;
4475 code = GET_CODE (insn);
4476
4477 #if 0
4478 /* If this is a label that existed before reload, then the register
4479 if dead here. However, if this is a label added by reorg, then
4480 the register may still be live here. We can't tell the difference,
4481 so we just ignore labels completely. */
4482 if (code == CODE_LABEL)
4483 return 1;
4484 /* else */
4485 #endif
4486
4487 if (!INSN_P (insn))
4488 continue;
4489
4490 if (code == JUMP_INSN)
4491 return 0;
4492
4493 /* If this is a sequence, we must handle them all at once.
4494 We could have for instance a call that sets the target register,
4495 and an insn in a delay slot that uses the register. In this case,
4496 we must return 0. */
4497 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4498 {
4499 int i;
4500 int retval = 0;
4501
4502 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4503 {
4504 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4505 rtx set = single_set (this_insn);
4506
4507 if (GET_CODE (this_insn) == CALL_INSN)
4508 code = CALL_INSN;
4509 else if (GET_CODE (this_insn) == JUMP_INSN)
4510 {
4511 if (INSN_ANNULLED_BRANCH_P (this_insn))
4512 return 0;
4513 code = JUMP_INSN;
4514 }
4515
4516 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4517 return 0;
4518 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4519 {
4520 if (GET_CODE (SET_DEST (set)) != MEM)
4521 retval = 1;
4522 else
4523 return 0;
4524 }
4525 if (set == 0
4526 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4527 return 0;
4528 }
4529 if (retval == 1)
4530 return 1;
4531 else if (code == JUMP_INSN)
4532 return 0;
4533 }
4534
4535 if (code == CALL_INSN)
4536 {
4537 rtx tem;
4538 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4539 if (GET_CODE (XEXP (tem, 0)) == USE
4540 && REG_P (XEXP (XEXP (tem, 0), 0))
4541 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4542 return 0;
4543 if (call_used_regs[REGNO (reg)])
4544 return 1;
4545 }
4546
4547 set = single_set (insn);
4548
4549 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4550 return 0;
4551 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4552 return GET_CODE (SET_DEST (set)) != MEM;
4553 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4554 return 0;
4555 }
4556 return 1;
4557 }
4558
4559 /* Target hook for assembling integer objects. The AVR version needs
4560 special handling for references to certain labels. */
4561
4562 static bool
4563 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4564 {
4565 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4566 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4567 || GET_CODE (x) == LABEL_REF))
4568 {
4569 fputs ("\t.word\tgs(", asm_out_file);
4570 output_addr_const (asm_out_file, x);
4571 fputs (")\n", asm_out_file);
4572 return true;
4573 }
4574 return default_assemble_integer (x, size, aligned_p);
4575 }
4576
4577 /* The routine used to output NUL terminated strings. We use a special
4578 version of this for most svr4 targets because doing so makes the
4579 generated assembly code more compact (and thus faster to assemble)
4580 as well as more readable, especially for targets like the i386
4581 (where the only alternative is to output character sequences as
4582 comma separated lists of numbers). */
4583
4584 void
4585 gas_output_limited_string(FILE *file, const char *str)
4586 {
4587 const unsigned char *_limited_str = (const unsigned char *) str;
4588 unsigned ch;
4589 fprintf (file, "%s\"", STRING_ASM_OP);
4590 for (; (ch = *_limited_str); _limited_str++)
4591 {
4592 int escape;
4593 switch (escape = ESCAPES[ch])
4594 {
4595 case 0:
4596 putc (ch, file);
4597 break;
4598 case 1:
4599 fprintf (file, "\\%03o", ch);
4600 break;
4601 default:
4602 putc ('\\', file);
4603 putc (escape, file);
4604 break;
4605 }
4606 }
4607 fprintf (file, "\"\n");
4608 }
4609
4610 /* The routine used to output sequences of byte values. We use a special
4611 version of this for most svr4 targets because doing so makes the
4612 generated assembly code more compact (and thus faster to assemble)
4613 as well as more readable. Note that if we find subparts of the
4614 character sequence which end with NUL (and which are shorter than
4615 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4616
4617 void
4618 gas_output_ascii(FILE *file, const char *str, size_t length)
4619 {
4620 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4621 const unsigned char *limit = _ascii_bytes + length;
4622 unsigned bytes_in_chunk = 0;
4623 for (; _ascii_bytes < limit; _ascii_bytes++)
4624 {
4625 const unsigned char *p;
4626 if (bytes_in_chunk >= 60)
4627 {
4628 fprintf (file, "\"\n");
4629 bytes_in_chunk = 0;
4630 }
4631 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4632 continue;
4633 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4634 {
4635 if (bytes_in_chunk > 0)
4636 {
4637 fprintf (file, "\"\n");
4638 bytes_in_chunk = 0;
4639 }
4640 gas_output_limited_string (file, (const char*)_ascii_bytes);
4641 _ascii_bytes = p;
4642 }
4643 else
4644 {
4645 int escape;
4646 unsigned ch;
4647 if (bytes_in_chunk == 0)
4648 fprintf (file, "\t.ascii\t\"");
4649 switch (escape = ESCAPES[ch = *_ascii_bytes])
4650 {
4651 case 0:
4652 putc (ch, file);
4653 bytes_in_chunk++;
4654 break;
4655 case 1:
4656 fprintf (file, "\\%03o", ch);
4657 bytes_in_chunk += 4;
4658 break;
4659 default:
4660 putc ('\\', file);
4661 putc (escape, file);
4662 bytes_in_chunk += 2;
4663 break;
4664 }
4665 }
4666 }
4667 if (bytes_in_chunk > 0)
4668 fprintf (file, "\"\n");
4669 }
4670
4671 /* Return value is nonzero if pseudos that have been
4672 assigned to registers of class CLASS would likely be spilled
4673 because registers of CLASS are needed for spill registers. */
4674
4675 enum reg_class
4676 class_likely_spilled_p (int c)
4677 {
4678 return (c != ALL_REGS && c != ADDW_REGS);
4679 }
4680
4681 /* Valid attributes:
4682 progmem - put data to program memory;
4683 signal - make a function to be hardware interrupt. After function
4684 prologue interrupts are disabled;
4685 interrupt - make a function to be hardware interrupt. After function
4686 prologue interrupts are enabled;
4687 naked - don't generate function prologue/epilogue and `ret' command.
4688
4689 Only `progmem' attribute valid for type. */
4690
4691 const struct attribute_spec avr_attribute_table[] =
4692 {
4693 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4694 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4695 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4696 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4697 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4698 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4699 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4700 { NULL, 0, 0, false, false, false, NULL }
4701 };
4702
4703 /* Handle a "progmem" attribute; arguments as in
4704 struct attribute_spec.handler. */
4705 static tree
4706 avr_handle_progmem_attribute (tree *node, tree name,
4707 tree args ATTRIBUTE_UNUSED,
4708 int flags ATTRIBUTE_UNUSED,
4709 bool *no_add_attrs)
4710 {
4711 if (DECL_P (*node))
4712 {
4713 if (TREE_CODE (*node) == TYPE_DECL)
4714 {
4715 /* This is really a decl attribute, not a type attribute,
4716 but try to handle it for GCC 3.0 backwards compatibility. */
4717
4718 tree type = TREE_TYPE (*node);
4719 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4720 tree newtype = build_type_attribute_variant (type, attr);
4721
4722 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4723 TREE_TYPE (*node) = newtype;
4724 *no_add_attrs = true;
4725 }
4726 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4727 {
4728 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4729 {
4730 warning (0, "only initialized variables can be placed into "
4731 "program memory area");
4732 *no_add_attrs = true;
4733 }
4734 }
4735 else
4736 {
4737 warning (OPT_Wattributes, "%qs attribute ignored",
4738 IDENTIFIER_POINTER (name));
4739 *no_add_attrs = true;
4740 }
4741 }
4742
4743 return NULL_TREE;
4744 }
4745
4746 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4747 struct attribute_spec.handler. */
4748
4749 static tree
4750 avr_handle_fndecl_attribute (tree *node, tree name,
4751 tree args ATTRIBUTE_UNUSED,
4752 int flags ATTRIBUTE_UNUSED,
4753 bool *no_add_attrs)
4754 {
4755 if (TREE_CODE (*node) != FUNCTION_DECL)
4756 {
4757 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4758 IDENTIFIER_POINTER (name));
4759 *no_add_attrs = true;
4760 }
4761 else
4762 {
4763 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4764 const char *attr = IDENTIFIER_POINTER (name);
4765
4766 /* If the function has the 'signal' or 'interrupt' attribute, test to
4767 make sure that the name of the function is "__vector_NN" so as to
4768 catch when the user misspells the interrupt vector name. */
4769
4770 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4771 {
4772 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4773 {
4774 warning (0, "%qs appears to be a misspelled interrupt handler",
4775 func_name);
4776 }
4777 }
4778 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4779 {
4780 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4781 {
4782 warning (0, "%qs appears to be a misspelled signal handler",
4783 func_name);
4784 }
4785 }
4786 }
4787
4788 return NULL_TREE;
4789 }
4790
4791 static tree
4792 avr_handle_fntype_attribute (tree *node, tree name,
4793 tree args ATTRIBUTE_UNUSED,
4794 int flags ATTRIBUTE_UNUSED,
4795 bool *no_add_attrs)
4796 {
4797 if (TREE_CODE (*node) != FUNCTION_TYPE)
4798 {
4799 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4800 IDENTIFIER_POINTER (name));
4801 *no_add_attrs = true;
4802 }
4803
4804 return NULL_TREE;
4805 }
4806
4807 /* Look for attribute `progmem' in DECL
4808 if found return 1, otherwise 0. */
4809
4810 int
4811 avr_progmem_p (tree decl, tree attributes)
4812 {
4813 tree a;
4814
4815 if (TREE_CODE (decl) != VAR_DECL)
4816 return 0;
4817
4818 if (NULL_TREE
4819 != lookup_attribute ("progmem", attributes))
4820 return 1;
4821
4822 a=decl;
4823 do
4824 a = TREE_TYPE(a);
4825 while (TREE_CODE (a) == ARRAY_TYPE);
4826
4827 if (a == error_mark_node)
4828 return 0;
4829
4830 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4831 return 1;
4832
4833 return 0;
4834 }
4835
4836 /* Add the section attribute if the variable is in progmem. */
4837
4838 static void
4839 avr_insert_attributes (tree node, tree *attributes)
4840 {
4841 if (TREE_CODE (node) == VAR_DECL
4842 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4843 && avr_progmem_p (node, *attributes))
4844 {
4845 static const char dsec[] = ".progmem.data";
4846 *attributes = tree_cons (get_identifier ("section"),
4847 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4848 *attributes);
4849
4850 /* ??? This seems sketchy. Why can't the user declare the
4851 thing const in the first place? */
4852 TREE_READONLY (node) = 1;
4853 }
4854 }
4855
4856 /* A get_unnamed_section callback for switching to progmem_section. */
4857
4858 static void
4859 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4860 {
4861 fprintf (asm_out_file,
4862 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4863 AVR_HAVE_JMP_CALL ? "a" : "ax");
4864 /* Should already be aligned, this is just to be safe if it isn't. */
4865 fprintf (asm_out_file, "\t.p2align 1\n");
4866 }
4867
4868 /* Implement TARGET_ASM_INIT_SECTIONS. */
4869
4870 static void
4871 avr_asm_init_sections (void)
4872 {
4873 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4874 avr_output_progmem_section_asm_op,
4875 NULL);
4876 readonly_data_section = data_section;
4877 }
4878
4879 static unsigned int
4880 avr_section_type_flags (tree decl, const char *name, int reloc)
4881 {
4882 unsigned int flags = default_section_type_flags (decl, name, reloc);
4883
4884 if (strncmp (name, ".noinit", 7) == 0)
4885 {
4886 if (decl && TREE_CODE (decl) == VAR_DECL
4887 && DECL_INITIAL (decl) == NULL_TREE)
4888 flags |= SECTION_BSS; /* @nobits */
4889 else
4890 warning (0, "only uninitialized variables can be placed in the "
4891 ".noinit section");
4892 }
4893
4894 return flags;
4895 }
4896
4897 /* Outputs some appropriate text to go at the start of an assembler
4898 file. */
4899
4900 static void
4901 avr_file_start (void)
4902 {
4903 if (avr_current_arch->asm_only)
4904 error ("MCU %qs supported for assembler only", avr_mcu_name);
4905
4906 default_file_start ();
4907
4908 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4909 fputs ("__SREG__ = 0x3f\n"
4910 "__SP_H__ = 0x3e\n"
4911 "__SP_L__ = 0x3d\n", asm_out_file);
4912
4913 fputs ("__tmp_reg__ = 0\n"
4914 "__zero_reg__ = 1\n", asm_out_file);
4915
4916 /* FIXME: output these only if there is anything in the .data / .bss
4917 sections - some code size could be saved by not linking in the
4918 initialization code from libgcc if one or both sections are empty. */
4919 fputs ("\t.global __do_copy_data\n", asm_out_file);
4920 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4921 }
4922
4923 /* Outputs to the stdio stream FILE some
4924 appropriate text to go at the end of an assembler file. */
4925
4926 static void
4927 avr_file_end (void)
4928 {
4929 }
4930
4931 /* Choose the order in which to allocate hard registers for
4932 pseudo-registers local to a basic block.
4933
4934 Store the desired register order in the array `reg_alloc_order'.
4935 Element 0 should be the register to allocate first; element 1, the
4936 next register; and so on. */
4937
4938 void
4939 order_regs_for_local_alloc (void)
4940 {
4941 unsigned int i;
4942 static const int order_0[] = {
4943 24,25,
4944 18,19,
4945 20,21,
4946 22,23,
4947 30,31,
4948 26,27,
4949 28,29,
4950 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4951 0,1,
4952 32,33,34,35
4953 };
4954 static const int order_1[] = {
4955 18,19,
4956 20,21,
4957 22,23,
4958 24,25,
4959 30,31,
4960 26,27,
4961 28,29,
4962 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4963 0,1,
4964 32,33,34,35
4965 };
4966 static const int order_2[] = {
4967 25,24,
4968 23,22,
4969 21,20,
4970 19,18,
4971 30,31,
4972 26,27,
4973 28,29,
4974 17,16,
4975 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4976 1,0,
4977 32,33,34,35
4978 };
4979
4980 const int *order = (TARGET_ORDER_1 ? order_1 :
4981 TARGET_ORDER_2 ? order_2 :
4982 order_0);
4983 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4984 reg_alloc_order[i] = order[i];
4985 }
4986
4987
4988 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4989 cost of an RTX operand given its context. X is the rtx of the
4990 operand, MODE is its mode, and OUTER is the rtx_code of this
4991 operand's parent operator. */
4992
4993 static int
4994 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4995 bool speed)
4996 {
4997 enum rtx_code code = GET_CODE (x);
4998 int total;
4999
5000 switch (code)
5001 {
5002 case REG:
5003 case SUBREG:
5004 return 0;
5005
5006 case CONST_INT:
5007 case CONST_DOUBLE:
5008 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5009
5010 default:
5011 break;
5012 }
5013
5014 total = 0;
5015 avr_rtx_costs (x, code, outer, &total, speed);
5016 return total;
5017 }
5018
5019 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5020 is to be calculated. Return true if the complete cost has been
5021 computed, and false if subexpressions should be scanned. In either
5022 case, *TOTAL contains the cost result. */
5023
5024 static bool
5025 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
5026 bool speed)
5027 {
5028 enum machine_mode mode = GET_MODE (x);
5029 HOST_WIDE_INT val;
5030
5031 switch (code)
5032 {
5033 case CONST_INT:
5034 case CONST_DOUBLE:
5035 /* Immediate constants are as cheap as registers. */
5036 *total = 0;
5037 return true;
5038
5039 case MEM:
5040 case CONST:
5041 case LABEL_REF:
5042 case SYMBOL_REF:
5043 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5044 return true;
5045
5046 case NEG:
5047 switch (mode)
5048 {
5049 case QImode:
5050 case SFmode:
5051 *total = COSTS_N_INSNS (1);
5052 break;
5053
5054 case HImode:
5055 *total = COSTS_N_INSNS (3);
5056 break;
5057
5058 case SImode:
5059 *total = COSTS_N_INSNS (7);
5060 break;
5061
5062 default:
5063 return false;
5064 }
5065 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5066 return true;
5067
5068 case ABS:
5069 switch (mode)
5070 {
5071 case QImode:
5072 case SFmode:
5073 *total = COSTS_N_INSNS (1);
5074 break;
5075
5076 default:
5077 return false;
5078 }
5079 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5080 return true;
5081
5082 case NOT:
5083 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5084 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5085 return true;
5086
5087 case ZERO_EXTEND:
5088 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5089 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5090 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5091 return true;
5092
5093 case SIGN_EXTEND:
5094 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5095 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5096 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5097 return true;
5098
5099 case PLUS:
5100 switch (mode)
5101 {
5102 case QImode:
5103 *total = COSTS_N_INSNS (1);
5104 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5105 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5106 break;
5107
5108 case HImode:
5109 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5110 {
5111 *total = COSTS_N_INSNS (2);
5112 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5113 }
5114 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5115 *total = COSTS_N_INSNS (1);
5116 else
5117 *total = COSTS_N_INSNS (2);
5118 break;
5119
5120 case SImode:
5121 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5122 {
5123 *total = COSTS_N_INSNS (4);
5124 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5125 }
5126 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5127 *total = COSTS_N_INSNS (1);
5128 else
5129 *total = COSTS_N_INSNS (4);
5130 break;
5131
5132 default:
5133 return false;
5134 }
5135 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5136 return true;
5137
5138 case MINUS:
5139 case AND:
5140 case IOR:
5141 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5142 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5143 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5144 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5145 return true;
5146
5147 case XOR:
5148 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5149 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5150 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5151 return true;
5152
5153 case MULT:
5154 switch (mode)
5155 {
5156 case QImode:
5157 if (AVR_HAVE_MUL)
5158 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5159 else if (!speed)
5160 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5161 else
5162 return false;
5163 break;
5164
5165 case HImode:
5166 if (AVR_HAVE_MUL)
5167 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5168 else if (!speed)
5169 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5170 else
5171 return false;
5172 break;
5173
5174 default:
5175 return false;
5176 }
5177 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5178 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5179 return true;
5180
5181 case DIV:
5182 case MOD:
5183 case UDIV:
5184 case UMOD:
5185 if (!speed)
5186 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5187 else
5188 return false;
5189 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5191 return true;
5192
5193 case ASHIFT:
5194 switch (mode)
5195 {
5196 case QImode:
5197 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5198 {
5199 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5200 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5201 }
5202 else
5203 {
5204 val = INTVAL (XEXP (x, 1));
5205 if (val == 7)
5206 *total = COSTS_N_INSNS (3);
5207 else if (val >= 0 && val <= 7)
5208 *total = COSTS_N_INSNS (val);
5209 else
5210 *total = COSTS_N_INSNS (1);
5211 }
5212 break;
5213
5214 case HImode:
5215 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5216 {
5217 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5219 }
5220 else
5221 switch (INTVAL (XEXP (x, 1)))
5222 {
5223 case 0:
5224 *total = 0;
5225 break;
5226 case 1:
5227 case 8:
5228 *total = COSTS_N_INSNS (2);
5229 break;
5230 case 9:
5231 *total = COSTS_N_INSNS (3);
5232 break;
5233 case 2:
5234 case 3:
5235 case 10:
5236 case 15:
5237 *total = COSTS_N_INSNS (4);
5238 break;
5239 case 7:
5240 case 11:
5241 case 12:
5242 *total = COSTS_N_INSNS (5);
5243 break;
5244 case 4:
5245 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5246 break;
5247 case 6:
5248 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5249 break;
5250 case 5:
5251 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5252 break;
5253 default:
5254 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5255 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5256 }
5257 break;
5258
5259 case SImode:
5260 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5261 {
5262 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5263 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5264 }
5265 else
5266 switch (INTVAL (XEXP (x, 1)))
5267 {
5268 case 0:
5269 *total = 0;
5270 break;
5271 case 24:
5272 *total = COSTS_N_INSNS (3);
5273 break;
5274 case 1:
5275 case 8:
5276 case 16:
5277 *total = COSTS_N_INSNS (4);
5278 break;
5279 case 31:
5280 *total = COSTS_N_INSNS (6);
5281 break;
5282 case 2:
5283 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5284 break;
5285 default:
5286 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5287 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5288 }
5289 break;
5290
5291 default:
5292 return false;
5293 }
5294 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5295 return true;
5296
5297 case ASHIFTRT:
5298 switch (mode)
5299 {
5300 case QImode:
5301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5302 {
5303 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5304 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5305 }
5306 else
5307 {
5308 val = INTVAL (XEXP (x, 1));
5309 if (val == 6)
5310 *total = COSTS_N_INSNS (4);
5311 else if (val == 7)
5312 *total = COSTS_N_INSNS (2);
5313 else if (val >= 0 && val <= 7)
5314 *total = COSTS_N_INSNS (val);
5315 else
5316 *total = COSTS_N_INSNS (1);
5317 }
5318 break;
5319
5320 case HImode:
5321 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5322 {
5323 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5324 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5325 }
5326 else
5327 switch (INTVAL (XEXP (x, 1)))
5328 {
5329 case 0:
5330 *total = 0;
5331 break;
5332 case 1:
5333 *total = COSTS_N_INSNS (2);
5334 break;
5335 case 15:
5336 *total = COSTS_N_INSNS (3);
5337 break;
5338 case 2:
5339 case 7:
5340 case 8:
5341 case 9:
5342 *total = COSTS_N_INSNS (4);
5343 break;
5344 case 10:
5345 case 14:
5346 *total = COSTS_N_INSNS (5);
5347 break;
5348 case 11:
5349 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5350 break;
5351 case 12:
5352 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5353 break;
5354 case 6:
5355 case 13:
5356 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5357 break;
5358 default:
5359 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5361 }
5362 break;
5363
5364 case SImode:
5365 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5366 {
5367 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5369 }
5370 else
5371 switch (INTVAL (XEXP (x, 1)))
5372 {
5373 case 0:
5374 *total = 0;
5375 break;
5376 case 1:
5377 *total = COSTS_N_INSNS (4);
5378 break;
5379 case 8:
5380 case 16:
5381 case 24:
5382 *total = COSTS_N_INSNS (6);
5383 break;
5384 case 2:
5385 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5386 break;
5387 case 31:
5388 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5389 break;
5390 default:
5391 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5393 }
5394 break;
5395
5396 default:
5397 return false;
5398 }
5399 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5400 return true;
5401
5402 case LSHIFTRT:
5403 switch (mode)
5404 {
5405 case QImode:
5406 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5407 {
5408 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5409 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5410 }
5411 else
5412 {
5413 val = INTVAL (XEXP (x, 1));
5414 if (val == 7)
5415 *total = COSTS_N_INSNS (3);
5416 else if (val >= 0 && val <= 7)
5417 *total = COSTS_N_INSNS (val);
5418 else
5419 *total = COSTS_N_INSNS (1);
5420 }
5421 break;
5422
5423 case HImode:
5424 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5425 {
5426 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5427 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5428 }
5429 else
5430 switch (INTVAL (XEXP (x, 1)))
5431 {
5432 case 0:
5433 *total = 0;
5434 break;
5435 case 1:
5436 case 8:
5437 *total = COSTS_N_INSNS (2);
5438 break;
5439 case 9:
5440 *total = COSTS_N_INSNS (3);
5441 break;
5442 case 2:
5443 case 10:
5444 case 15:
5445 *total = COSTS_N_INSNS (4);
5446 break;
5447 case 7:
5448 case 11:
5449 *total = COSTS_N_INSNS (5);
5450 break;
5451 case 3:
5452 case 12:
5453 case 13:
5454 case 14:
5455 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5456 break;
5457 case 4:
5458 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5459 break;
5460 case 5:
5461 case 6:
5462 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5463 break;
5464 default:
5465 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5466 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5467 }
5468 break;
5469
5470 case SImode:
5471 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5472 {
5473 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5474 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5475 }
5476 else
5477 switch (INTVAL (XEXP (x, 1)))
5478 {
5479 case 0:
5480 *total = 0;
5481 break;
5482 case 1:
5483 *total = COSTS_N_INSNS (4);
5484 break;
5485 case 2:
5486 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5487 break;
5488 case 8:
5489 case 16:
5490 case 24:
5491 *total = COSTS_N_INSNS (4);
5492 break;
5493 case 31:
5494 *total = COSTS_N_INSNS (6);
5495 break;
5496 default:
5497 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5498 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5499 }
5500 break;
5501
5502 default:
5503 return false;
5504 }
5505 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5506 return true;
5507
5508 case COMPARE:
5509 switch (GET_MODE (XEXP (x, 0)))
5510 {
5511 case QImode:
5512 *total = COSTS_N_INSNS (1);
5513 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5514 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5515 break;
5516
5517 case HImode:
5518 *total = COSTS_N_INSNS (2);
5519 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5520 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5521 else if (INTVAL (XEXP (x, 1)) != 0)
5522 *total += COSTS_N_INSNS (1);
5523 break;
5524
5525 case SImode:
5526 *total = COSTS_N_INSNS (4);
5527 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5528 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5529 else if (INTVAL (XEXP (x, 1)) != 0)
5530 *total += COSTS_N_INSNS (3);
5531 break;
5532
5533 default:
5534 return false;
5535 }
5536 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5537 return true;
5538
5539 default:
5540 break;
5541 }
5542 return false;
5543 }
5544
5545 /* Calculate the cost of a memory address. */
5546
5547 static int
5548 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5549 {
5550 if (GET_CODE (x) == PLUS
5551 && GET_CODE (XEXP (x,1)) == CONST_INT
5552 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5553 && INTVAL (XEXP (x,1)) >= 61)
5554 return 18;
5555 if (CONSTANT_ADDRESS_P (x))
5556 {
5557 if (optimize > 0 && io_address_operand (x, QImode))
5558 return 2;
5559 return 4;
5560 }
5561 return 4;
5562 }
5563
5564 /* Test for extra memory constraint 'Q'.
5565 It's a memory address based on Y or Z pointer with valid displacement. */
5566
5567 int
5568 extra_constraint_Q (rtx x)
5569 {
5570 if (GET_CODE (XEXP (x,0)) == PLUS
5571 && REG_P (XEXP (XEXP (x,0), 0))
5572 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5573 && (INTVAL (XEXP (XEXP (x,0), 1))
5574 <= MAX_LD_OFFSET (GET_MODE (x))))
5575 {
5576 rtx xx = XEXP (XEXP (x,0), 0);
5577 int regno = REGNO (xx);
5578 if (TARGET_ALL_DEBUG)
5579 {
5580 fprintf (stderr, ("extra_constraint:\n"
5581 "reload_completed: %d\n"
5582 "reload_in_progress: %d\n"),
5583 reload_completed, reload_in_progress);
5584 debug_rtx (x);
5585 }
5586 if (regno >= FIRST_PSEUDO_REGISTER)
5587 return 1; /* allocate pseudos */
5588 else if (regno == REG_Z || regno == REG_Y)
5589 return 1; /* strictly check */
5590 else if (xx == frame_pointer_rtx
5591 || xx == arg_pointer_rtx)
5592 return 1; /* XXX frame & arg pointer checks */
5593 }
5594 return 0;
5595 }
5596
5597 /* Convert condition code CONDITION to the valid AVR condition code. */
5598
5599 RTX_CODE
5600 avr_normalize_condition (RTX_CODE condition)
5601 {
5602 switch (condition)
5603 {
5604 case GT:
5605 return GE;
5606 case GTU:
5607 return GEU;
5608 case LE:
5609 return LT;
5610 case LEU:
5611 return LTU;
5612 default:
5613 gcc_unreachable ();
5614 }
5615 }
5616
5617 /* This function optimizes conditional jumps. */
5618
5619 static void
5620 avr_reorg (void)
5621 {
5622 rtx insn, pattern;
5623
5624 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5625 {
5626 if (! (GET_CODE (insn) == INSN
5627 || GET_CODE (insn) == CALL_INSN
5628 || GET_CODE (insn) == JUMP_INSN)
5629 || !single_set (insn))
5630 continue;
5631
5632 pattern = PATTERN (insn);
5633
5634 if (GET_CODE (pattern) == PARALLEL)
5635 pattern = XVECEXP (pattern, 0, 0);
5636 if (GET_CODE (pattern) == SET
5637 && SET_DEST (pattern) == cc0_rtx
5638 && compare_diff_p (insn))
5639 {
5640 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5641 {
5642 /* Now we work under compare insn. */
5643
5644 pattern = SET_SRC (pattern);
5645 if (true_regnum (XEXP (pattern,0)) >= 0
5646 && true_regnum (XEXP (pattern,1)) >= 0 )
5647 {
5648 rtx x = XEXP (pattern,0);
5649 rtx next = next_real_insn (insn);
5650 rtx pat = PATTERN (next);
5651 rtx src = SET_SRC (pat);
5652 rtx t = XEXP (src,0);
5653 PUT_CODE (t, swap_condition (GET_CODE (t)));
5654 XEXP (pattern,0) = XEXP (pattern,1);
5655 XEXP (pattern,1) = x;
5656 INSN_CODE (next) = -1;
5657 }
5658 else if (true_regnum (XEXP (pattern,0)) >= 0
5659 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5660 {
5661 rtx x = XEXP (pattern,1);
5662 rtx next = next_real_insn (insn);
5663 rtx pat = PATTERN (next);
5664 rtx src = SET_SRC (pat);
5665 rtx t = XEXP (src,0);
5666 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5667
5668 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5669 {
5670 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5671 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5672 INSN_CODE (next) = -1;
5673 INSN_CODE (insn) = -1;
5674 }
5675 }
5676 }
5677 else if (true_regnum (SET_SRC (pattern)) >= 0)
5678 {
5679 /* This is a tst insn */
5680 rtx next = next_real_insn (insn);
5681 rtx pat = PATTERN (next);
5682 rtx src = SET_SRC (pat);
5683 rtx t = XEXP (src,0);
5684
5685 PUT_CODE (t, swap_condition (GET_CODE (t)));
5686 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5687 SET_SRC (pattern));
5688 INSN_CODE (next) = -1;
5689 INSN_CODE (insn) = -1;
5690 }
5691 }
5692 }
5693 }
5694
5695 /* Returns register number for function return value.*/
5696
5697 int
5698 avr_ret_register (void)
5699 {
5700 return 24;
5701 }
5702
5703 /* Create an RTX representing the place where a
5704 library function returns a value of mode MODE. */
5705
5706 rtx
5707 avr_libcall_value (enum machine_mode mode)
5708 {
5709 int offs = GET_MODE_SIZE (mode);
5710 if (offs < 2)
5711 offs = 2;
5712 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5713 }
5714
5715 /* Create an RTX representing the place where a
5716 function returns a value of data type VALTYPE. */
5717
5718 rtx
5719 avr_function_value (const_tree type,
5720 const_tree func ATTRIBUTE_UNUSED,
5721 bool outgoing ATTRIBUTE_UNUSED)
5722 {
5723 unsigned int offs;
5724
5725 if (TYPE_MODE (type) != BLKmode)
5726 return avr_libcall_value (TYPE_MODE (type));
5727
5728 offs = int_size_in_bytes (type);
5729 if (offs < 2)
5730 offs = 2;
5731 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5732 offs = GET_MODE_SIZE (SImode);
5733 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5734 offs = GET_MODE_SIZE (DImode);
5735
5736 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5737 }
5738
5739 /* Places additional restrictions on the register class to
5740 use when it is necessary to copy value X into a register
5741 in class CLASS. */
5742
5743 enum reg_class
5744 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5745 {
5746 return rclass;
5747 }
5748
5749 int
5750 test_hard_reg_class (enum reg_class rclass, rtx x)
5751 {
5752 int regno = true_regnum (x);
5753 if (regno < 0)
5754 return 0;
5755
5756 if (TEST_HARD_REG_CLASS (rclass, regno))
5757 return 1;
5758
5759 return 0;
5760 }
5761
5762
5763 int
5764 jump_over_one_insn_p (rtx insn, rtx dest)
5765 {
5766 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5767 ? XEXP (dest, 0)
5768 : dest);
5769 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5770 int dest_addr = INSN_ADDRESSES (uid);
5771 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5772 }
5773
5774 /* Returns 1 if a value of mode MODE can be stored starting with hard
5775 register number REGNO. On the enhanced core, anything larger than
5776 1 byte must start in even numbered register for "movw" to work
5777 (this way we don't have to check for odd registers everywhere). */
5778
5779 int
5780 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5781 {
5782 /* Disallow QImode in stack pointer regs. */
5783 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5784 return 0;
5785
5786 /* The only thing that can go into registers r28:r29 is a Pmode. */
5787 if (regno == REG_Y && mode == Pmode)
5788 return 1;
5789
5790 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5791 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5792 return 0;
5793
5794 if (mode == QImode)
5795 return 1;
5796
5797 /* Modes larger than QImode occupy consecutive registers. */
5798 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5799 return 0;
5800
5801 /* All modes larger than QImode should start in an even register. */
5802 return !(regno & 1);
5803 }
5804
5805 const char *
5806 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5807 {
5808 int tmp;
5809 if (!len)
5810 len = &tmp;
5811
5812 if (GET_CODE (operands[1]) == CONST_INT)
5813 {
5814 int val = INTVAL (operands[1]);
5815 if ((val & 0xff) == 0)
5816 {
5817 *len = 3;
5818 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5819 AS2 (ldi,%2,hi8(%1)) CR_TAB
5820 AS2 (mov,%B0,%2));
5821 }
5822 else if ((val & 0xff00) == 0)
5823 {
5824 *len = 3;
5825 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5826 AS2 (mov,%A0,%2) CR_TAB
5827 AS2 (mov,%B0,__zero_reg__));
5828 }
5829 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5830 {
5831 *len = 3;
5832 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5833 AS2 (mov,%A0,%2) CR_TAB
5834 AS2 (mov,%B0,%2));
5835 }
5836 }
5837 *len = 4;
5838 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5839 AS2 (mov,%A0,%2) CR_TAB
5840 AS2 (ldi,%2,hi8(%1)) CR_TAB
5841 AS2 (mov,%B0,%2));
5842 }
5843
5844
5845 const char *
5846 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5847 {
5848 rtx src = operands[1];
5849 int cnst = (GET_CODE (src) == CONST_INT);
5850
5851 if (len)
5852 {
5853 if (cnst)
5854 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5855 + ((INTVAL (src) & 0xff00) != 0)
5856 + ((INTVAL (src) & 0xff0000) != 0)
5857 + ((INTVAL (src) & 0xff000000) != 0);
5858 else
5859 *len = 8;
5860
5861 return "";
5862 }
5863
5864 if (cnst && ((INTVAL (src) & 0xff) == 0))
5865 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5866 else
5867 {
5868 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5869 output_asm_insn (AS2 (mov, %A0, %2), operands);
5870 }
5871 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5872 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5873 else
5874 {
5875 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5876 output_asm_insn (AS2 (mov, %B0, %2), operands);
5877 }
5878 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5879 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5880 else
5881 {
5882 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5883 output_asm_insn (AS2 (mov, %C0, %2), operands);
5884 }
5885 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5886 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5887 else
5888 {
5889 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5890 output_asm_insn (AS2 (mov, %D0, %2), operands);
5891 }
5892 return "";
5893 }
5894
5895 void
5896 avr_output_bld (rtx operands[], int bit_nr)
5897 {
5898 static char s[] = "bld %A0,0";
5899
5900 s[5] = 'A' + (bit_nr >> 3);
5901 s[8] = '0' + (bit_nr & 7);
5902 output_asm_insn (s, operands);
5903 }
5904
5905 void
5906 avr_output_addr_vec_elt (FILE *stream, int value)
5907 {
5908 switch_to_section (progmem_section);
5909 if (AVR_HAVE_JMP_CALL)
5910 fprintf (stream, "\t.word gs(.L%d)\n", value);
5911 else
5912 fprintf (stream, "\trjmp .L%d\n", value);
5913 }
5914
5915 /* Returns true if SCRATCH are safe to be allocated as a scratch
5916 registers (for a define_peephole2) in the current function. */
5917
5918 bool
5919 avr_hard_regno_scratch_ok (unsigned int regno)
5920 {
5921 /* Interrupt functions can only use registers that have already been saved
5922 by the prologue, even if they would normally be call-clobbered. */
5923
5924 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5925 && !df_regs_ever_live_p (regno))
5926 return false;
5927
5928 return true;
5929 }
5930
5931 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5932
5933 int
5934 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5935 unsigned int new_reg)
5936 {
5937 /* Interrupt functions can only use registers that have already been
5938 saved by the prologue, even if they would normally be
5939 call-clobbered. */
5940
5941 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5942 && !df_regs_ever_live_p (new_reg))
5943 return 0;
5944
5945 return 1;
5946 }
5947
5948 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5949 or memory location in the I/O space (QImode only).
5950
5951 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5952 Operand 1: register operand to test, or CONST_INT memory address.
5953 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5954 Operand 3: label to jump to if the test is true. */
5955
5956 const char *
5957 avr_out_sbxx_branch (rtx insn, rtx operands[])
5958 {
5959 enum rtx_code comp = GET_CODE (operands[0]);
5960 int long_jump = (get_attr_length (insn) >= 4);
5961 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5962
5963 if (comp == GE)
5964 comp = EQ;
5965 else if (comp == LT)
5966 comp = NE;
5967
5968 if (reverse)
5969 comp = reverse_condition (comp);
5970
5971 if (GET_CODE (operands[1]) == CONST_INT)
5972 {
5973 if (INTVAL (operands[1]) < 0x40)
5974 {
5975 if (comp == EQ)
5976 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5977 else
5978 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5979 }
5980 else
5981 {
5982 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5983 if (comp == EQ)
5984 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5985 else
5986 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5987 }
5988 }
5989 else /* GET_CODE (operands[1]) == REG */
5990 {
5991 if (GET_MODE (operands[1]) == QImode)
5992 {
5993 if (comp == EQ)
5994 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5995 else
5996 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5997 }
5998 else /* HImode or SImode */
5999 {
6000 static char buf[] = "sbrc %A1,0";
6001 int bit_nr = exact_log2 (INTVAL (operands[2])
6002 & GET_MODE_MASK (GET_MODE (operands[1])));
6003
6004 buf[3] = (comp == EQ) ? 's' : 'c';
6005 buf[6] = 'A' + (bit_nr >> 3);
6006 buf[9] = '0' + (bit_nr & 7);
6007 output_asm_insn (buf, operands);
6008 }
6009 }
6010
6011 if (long_jump)
6012 return (AS1 (rjmp,.+4) CR_TAB
6013 AS1 (jmp,%3));
6014 if (!reverse)
6015 return AS1 (rjmp,%3);
6016 return "";
6017 }
6018
6019 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6020
6021 static void
6022 avr_asm_out_ctor (rtx symbol, int priority)
6023 {
6024 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6025 default_ctor_section_asm_out_constructor (symbol, priority);
6026 }
6027
6028 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6029
6030 static void
6031 avr_asm_out_dtor (rtx symbol, int priority)
6032 {
6033 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6034 default_dtor_section_asm_out_destructor (symbol, priority);
6035 }
6036
6037 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6038
6039 static bool
6040 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6041 {
6042 if (TYPE_MODE (type) == BLKmode)
6043 {
6044 HOST_WIDE_INT size = int_size_in_bytes (type);
6045 return (size == -1 || size > 8);
6046 }
6047 else
6048 return false;
6049 }
6050
6051 #include "gt-avr.h"