avr.c (avr_base_arch_macro, [...]): Remove variables.
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
50
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_OS_task_function_p (tree);
55 static int avr_regs_to_save (HARD_REG_SET *);
56 static int sequent_regs_live (void);
57 static const char *ptrreg_to_str (int);
58 static const char *cond_string (enum rtx_code);
59 static int avr_num_arg_regs (enum machine_mode, tree);
60
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
66 const struct attribute_spec avr_attribute_table[];
67 static bool avr_assemble_integer (rtx, unsigned int, int);
68 static void avr_file_start (void);
69 static void avr_file_end (void);
70 static void avr_asm_function_end_prologue (FILE *);
71 static void avr_asm_function_begin_epilogue (FILE *);
72 static void avr_insert_attributes (tree, tree *);
73 static void avr_asm_init_sections (void);
74 static unsigned int avr_section_type_flags (tree, const char *, int);
75
76 static void avr_reorg (void);
77 static void avr_asm_out_ctor (rtx, int);
78 static void avr_asm_out_dtor (rtx, int);
79 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
80 static bool avr_rtx_costs (rtx, int, int, int *);
81 static int avr_address_cost (rtx);
82 static bool avr_return_in_memory (const_tree, const_tree);
83 static struct machine_function * avr_init_machine_status (void);
84 /* Allocate registers from r25 to r8 for parameters for function calls. */
85 #define FIRST_CUM_REG 26
86
87 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
88 static GTY(()) rtx tmp_reg_rtx;
89
90 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
91 static GTY(()) rtx zero_reg_rtx;
92
93 /* AVR register names {"r0", "r1", ..., "r31"} */
94 static const char *const avr_regnames[] = REGISTER_NAMES;
95
96 /* This holds the last insn address. */
97 static int last_insn_address = 0;
98
99 /* Preprocessor macros to define depending on MCU type. */
100 const char *avr_extra_arch_macro;
101
102 /* Current architecture. */
103 const struct base_arch_s *avr_current_arch;
104
105 section *progmem_section;
106
107 static const struct base_arch_s avr_arch_types[] = {
108 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
109 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
110 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
111 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
112 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
113 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
114 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
115 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
116 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
117 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
118 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
119 };
120
121 /* These names are used as the index into the avr_arch_types[] table
122 above. */
123
124 enum avr_arch
125 {
126 ARCH_UNKNOWN,
127 ARCH_AVR1,
128 ARCH_AVR2,
129 ARCH_AVR25,
130 ARCH_AVR3,
131 ARCH_AVR31,
132 ARCH_AVR35,
133 ARCH_AVR4,
134 ARCH_AVR5,
135 ARCH_AVR51,
136 ARCH_AVR6
137 };
138
139 struct mcu_type_s {
140 const char *const name;
141 int arch; /* index in avr_arch_types[] */
142 /* Must lie outside user's namespace. NULL == no macro. */
143 const char *const macro;
144 };
145
146 /* List of all known AVR MCU types - if updated, it has to be kept
147 in sync in several places (FIXME: is there a better way?):
148 - here
149 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
150 - t-avr (MULTILIB_MATCHES)
151 - gas/config/tc-avr.c
152 - avr-libc */
153
154 static const struct mcu_type_s avr_mcu_types[] = {
155 /* Classic, <= 8K. */
156 { "avr2", ARCH_AVR2, NULL },
157 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
158 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
159 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
160 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
161 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
162 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
163 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
164 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
165 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
166 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
167 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
168 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
169 /* Classic + MOVW, <= 8K. */
170 { "avr25", ARCH_AVR25, NULL },
171 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
172 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
173 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
174 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
175 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
176 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
177 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
178 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
179 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
180 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
181 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
182 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
183 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
184 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
185 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
186 /* Classic, > 8K, <= 64K. */
187 { "avr3", ARCH_AVR3, NULL },
188 { "at43usb320", ARCH_AVR3, "__AVR_AT43USB320__" },
189 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
190 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
191 /* Classic, == 128K. */
192 { "avr31", ARCH_AVR31, NULL },
193 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
194 /* Classic + MOVW + JMP/CALL. */
195 { "avr35", ARCH_AVR35, NULL },
196 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
197 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
198 /* Enhanced, <= 8K. */
199 { "avr4", ARCH_AVR4, NULL },
200 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
201 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
202 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
203 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
204 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
205 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
206 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
207 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
208 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
209 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
210 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
211 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
212 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
213 /* Enhanced, > 8K, <= 64K. */
214 { "avr5", ARCH_AVR5, NULL },
215 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
216 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
217 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
218 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
219 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
220 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
221 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
222 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
223 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
224 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
225 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
226 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
227 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
228 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
229 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
230 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
231 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
232 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
233 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
234 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
235 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
236 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
237 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
238 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
239 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
240 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
241 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
242 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
243 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
244 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
245 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
246 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
247 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
248 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
249 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
250 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
251 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
252 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
253 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
254 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
255 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
256 /* Enhanced, == 128K. */
257 { "avr51", ARCH_AVR51, NULL },
258 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
259 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
260 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
261 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
262 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
263 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
264 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
265 /* 3-Byte PC. */
266 { "avr6", ARCH_AVR6, NULL },
267 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
268 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
269 /* Assembler only. */
270 { "avr1", ARCH_AVR1, NULL },
271 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
272 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
273 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
274 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
275 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
276 { NULL, ARCH_UNKNOWN, NULL }
277 };
278
279 int avr_case_values_threshold = 30000;
280 \f
281 /* Initialize the GCC target structure. */
282 #undef TARGET_ASM_ALIGNED_HI_OP
283 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
284 #undef TARGET_ASM_ALIGNED_SI_OP
285 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
286 #undef TARGET_ASM_UNALIGNED_HI_OP
287 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
288 #undef TARGET_ASM_UNALIGNED_SI_OP
289 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
290 #undef TARGET_ASM_INTEGER
291 #define TARGET_ASM_INTEGER avr_assemble_integer
292 #undef TARGET_ASM_FILE_START
293 #define TARGET_ASM_FILE_START avr_file_start
294 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
295 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
296 #undef TARGET_ASM_FILE_END
297 #define TARGET_ASM_FILE_END avr_file_end
298
299 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
300 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
301 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
302 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
303 #undef TARGET_ATTRIBUTE_TABLE
304 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
305 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
306 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
307 #undef TARGET_INSERT_ATTRIBUTES
308 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
309 #undef TARGET_SECTION_TYPE_FLAGS
310 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
311 #undef TARGET_RTX_COSTS
312 #define TARGET_RTX_COSTS avr_rtx_costs
313 #undef TARGET_ADDRESS_COST
314 #define TARGET_ADDRESS_COST avr_address_cost
315 #undef TARGET_MACHINE_DEPENDENT_REORG
316 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
317
318 #undef TARGET_RETURN_IN_MEMORY
319 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
320
321 #undef TARGET_STRICT_ARGUMENT_NAMING
322 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
323
324 struct gcc_target targetm = TARGET_INITIALIZER;
325 \f
326 void
327 avr_override_options (void)
328 {
329 const struct mcu_type_s *t;
330
331 flag_delete_null_pointer_checks = 0;
332
333 for (t = avr_mcu_types; t->name; t++)
334 if (strcmp (t->name, avr_mcu_name) == 0)
335 break;
336
337 if (!t->name)
338 {
339 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
340 avr_mcu_name);
341 for (t = avr_mcu_types; t->name; t++)
342 fprintf (stderr," %s\n", t->name);
343 }
344
345 avr_current_arch = &avr_arch_types[t->arch];
346 avr_extra_arch_macro = t->macro;
347
348 if (optimize && !TARGET_NO_TABLEJUMP)
349 avr_case_values_threshold =
350 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
351
352 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
353 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
354
355 init_machine_status = avr_init_machine_status;
356 }
357
358 /* return register class from register number. */
359
360 static const int reg_class_tab[]={
361 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
362 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
363 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
364 GENERAL_REGS, /* r0 - r15 */
365 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
366 LD_REGS, /* r16 - 23 */
367 ADDW_REGS,ADDW_REGS, /* r24,r25 */
368 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
369 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
370 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
371 STACK_REG,STACK_REG /* SPL,SPH */
372 };
373
374 /* Function to set up the backend function structure. */
375
376 static struct machine_function *
377 avr_init_machine_status (void)
378 {
379 return ((struct machine_function *)
380 ggc_alloc_cleared (sizeof (struct machine_function)));
381 }
382
383 /* Return register class for register R. */
384
385 enum reg_class
386 avr_regno_reg_class (int r)
387 {
388 if (r <= 33)
389 return reg_class_tab[r];
390 return ALL_REGS;
391 }
392
393 /* Return nonzero if FUNC is a naked function. */
394
395 static int
396 avr_naked_function_p (tree func)
397 {
398 tree a;
399
400 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
401
402 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
403 return a != NULL_TREE;
404 }
405
406 /* Return nonzero if FUNC is an interrupt function as specified
407 by the "interrupt" attribute. */
408
409 static int
410 interrupt_function_p (tree func)
411 {
412 tree a;
413
414 if (TREE_CODE (func) != FUNCTION_DECL)
415 return 0;
416
417 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
418 return a != NULL_TREE;
419 }
420
421 /* Return nonzero if FUNC is a signal function as specified
422 by the "signal" attribute. */
423
424 static int
425 signal_function_p (tree func)
426 {
427 tree a;
428
429 if (TREE_CODE (func) != FUNCTION_DECL)
430 return 0;
431
432 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
433 return a != NULL_TREE;
434 }
435
436 /* Return nonzero if FUNC is a OS_task function. */
437
438 static int
439 avr_OS_task_function_p (tree func)
440 {
441 tree a;
442
443 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
444
445 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
446 return a != NULL_TREE;
447 }
448
449 /* Return the number of hard registers to push/pop in the prologue/epilogue
450 of the current function, and optionally store these registers in SET. */
451
452 static int
453 avr_regs_to_save (HARD_REG_SET *set)
454 {
455 int reg, count;
456 int int_or_sig_p = (interrupt_function_p (current_function_decl)
457 || signal_function_p (current_function_decl));
458
459 if (!reload_completed)
460 cfun->machine->is_leaf = leaf_function_p ();
461
462 if (set)
463 CLEAR_HARD_REG_SET (*set);
464 count = 0;
465
466 /* No need to save any registers if the function never returns or
467 is have "OS_task" attribute. */
468 if (TREE_THIS_VOLATILE (current_function_decl)
469 || cfun->machine->is_OS_task)
470 return 0;
471
472 for (reg = 0; reg < 32; reg++)
473 {
474 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
475 any global register variables. */
476 if (fixed_regs[reg])
477 continue;
478
479 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
480 || (df_regs_ever_live_p (reg)
481 && (int_or_sig_p || !call_used_regs[reg])
482 && !(frame_pointer_needed
483 && (reg == REG_Y || reg == (REG_Y+1)))))
484 {
485 if (set)
486 SET_HARD_REG_BIT (*set, reg);
487 count++;
488 }
489 }
490 return count;
491 }
492
493 /* Compute offset between arg_pointer and frame_pointer. */
494
495 int
496 initial_elimination_offset (int from, int to)
497 {
498 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
499 return 0;
500 else
501 {
502 int offset = frame_pointer_needed ? 2 : 0;
503 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
504
505 offset += avr_regs_to_save (NULL);
506 return get_frame_size () + (avr_pc_size) + 1 + offset;
507 }
508 }
509
510 /* Return 1 if the function epilogue is just a single "ret". */
511
512 int
513 avr_simple_epilogue (void)
514 {
515 return (! frame_pointer_needed
516 && get_frame_size () == 0
517 && avr_regs_to_save (NULL) == 0
518 && ! interrupt_function_p (current_function_decl)
519 && ! signal_function_p (current_function_decl)
520 && ! avr_naked_function_p (current_function_decl)
521 && ! TREE_THIS_VOLATILE (current_function_decl));
522 }
523
524 /* This function checks sequence of live registers. */
525
526 static int
527 sequent_regs_live (void)
528 {
529 int reg;
530 int live_seq=0;
531 int cur_seq=0;
532
533 for (reg = 0; reg < 18; ++reg)
534 {
535 if (!call_used_regs[reg])
536 {
537 if (df_regs_ever_live_p (reg))
538 {
539 ++live_seq;
540 ++cur_seq;
541 }
542 else
543 cur_seq = 0;
544 }
545 }
546
547 if (!frame_pointer_needed)
548 {
549 if (df_regs_ever_live_p (REG_Y))
550 {
551 ++live_seq;
552 ++cur_seq;
553 }
554 else
555 cur_seq = 0;
556
557 if (df_regs_ever_live_p (REG_Y+1))
558 {
559 ++live_seq;
560 ++cur_seq;
561 }
562 else
563 cur_seq = 0;
564 }
565 else
566 {
567 cur_seq += 2;
568 live_seq += 2;
569 }
570 return (cur_seq == live_seq) ? live_seq : 0;
571 }
572
573 /* Output function prologue. */
574
575 void
576 expand_prologue (void)
577 {
578 int live_seq;
579 HARD_REG_SET set;
580 int minimize;
581 HOST_WIDE_INT size = get_frame_size();
582 /* Define templates for push instructions. */
583 rtx pushbyte = gen_rtx_MEM (QImode,
584 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
585 rtx pushword = gen_rtx_MEM (HImode,
586 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
587 rtx insn;
588
589 last_insn_address = 0;
590
591 /* Init cfun->machine. */
592 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
593 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
594 cfun->machine->is_signal = signal_function_p (current_function_decl);
595 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
596
597 /* Prologue: naked. */
598 if (cfun->machine->is_naked)
599 {
600 return;
601 }
602
603 avr_regs_to_save (&set);
604 live_seq = sequent_regs_live ();
605 minimize = (TARGET_CALL_PROLOGUES
606 && !cfun->machine->is_interrupt
607 && !cfun->machine->is_signal
608 && !cfun->machine->is_OS_task
609 && live_seq);
610
611 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
612 {
613 if (cfun->machine->is_interrupt)
614 {
615 /* Enable interrupts. */
616 insn = emit_insn (gen_enable_interrupt ());
617 RTX_FRAME_RELATED_P (insn) = 1;
618 }
619
620 /* Push zero reg. */
621 insn = emit_move_insn (pushbyte, zero_reg_rtx);
622 RTX_FRAME_RELATED_P (insn) = 1;
623
624 /* Push tmp reg. */
625 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
626 RTX_FRAME_RELATED_P (insn) = 1;
627
628 /* Push SREG. */
629 insn = emit_move_insn (tmp_reg_rtx,
630 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
631 RTX_FRAME_RELATED_P (insn) = 1;
632 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
633 RTX_FRAME_RELATED_P (insn) = 1;
634
635 /* Push RAMPZ. */
636 if(AVR_HAVE_RAMPZ
637 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
638 {
639 insn = emit_move_insn (tmp_reg_rtx,
640 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
641 RTX_FRAME_RELATED_P (insn) = 1;
642 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
643 RTX_FRAME_RELATED_P (insn) = 1;
644 }
645
646 /* Clear zero reg. */
647 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
648 RTX_FRAME_RELATED_P (insn) = 1;
649
650 /* Prevent any attempt to delete the setting of ZERO_REG! */
651 emit_insn (gen_rtx_USE (VOIDmode, zero_reg_rtx));
652 }
653 if (minimize && (frame_pointer_needed || live_seq > 6))
654 {
655 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
656 gen_int_mode (size, HImode));
657 RTX_FRAME_RELATED_P (insn) = 1;
658
659 insn =
660 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
661 gen_int_mode (size + live_seq, HImode)));
662 RTX_FRAME_RELATED_P (insn) = 1;
663 }
664 else
665 {
666 int reg;
667 for (reg = 0; reg < 32; ++reg)
668 {
669 if (TEST_HARD_REG_BIT (set, reg))
670 {
671 /* Emit push of register to save. */
672 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
673 RTX_FRAME_RELATED_P (insn) = 1;
674 }
675 }
676 if (frame_pointer_needed)
677 {
678 if(!cfun->machine->is_OS_task)
679 {
680 /* Push frame pointer. */
681 insn = emit_move_insn (pushword, frame_pointer_rtx);
682 RTX_FRAME_RELATED_P (insn) = 1;
683 }
684
685 if (!size)
686 {
687 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
688 RTX_FRAME_RELATED_P (insn) = 1;
689 }
690 else
691 {
692 /* Creating a frame can be done by direct manipulation of the
693 stack or via the frame pointer. These two methods are:
694 fp=sp
695 fp-=size
696 sp=fp
697 OR
698 sp-=size
699 fp=sp
700 the optimum method depends on function type, stack and frame size.
701 To avoid a complex logic, both methods are tested and shortest
702 is selected. */
703 rtx myfp;
704 /* First method. */
705 if (TARGET_TINY_STACK)
706 {
707 if (size < -63 || size > 63)
708 warning (0, "large frame pointer change (%d) with -mtiny-stack", size);
709
710 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
711 over 'sbiw' (2 cycles, same size). */
712 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
713 }
714 else
715 {
716 /* Normal sized addition. */
717 myfp = frame_pointer_rtx;
718 }
719 /* Calculate length. */
720 int method1_length;
721 method1_length =
722 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
723 method1_length +=
724 get_attr_length (gen_move_insn (myfp,
725 gen_rtx_PLUS (GET_MODE(myfp), myfp,
726 gen_int_mode (-size,
727 GET_MODE(myfp)))));
728 method1_length +=
729 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
730
731 /* Method 2-Adjust Stack pointer. */
732 int sp_plus_length = 0;
733 if (size <= 6)
734 {
735 sp_plus_length =
736 get_attr_length (gen_move_insn (stack_pointer_rtx,
737 gen_rtx_PLUS (HImode, stack_pointer_rtx,
738 gen_int_mode (-size,
739 HImode))));
740 sp_plus_length +=
741 get_attr_length (gen_move_insn (frame_pointer_rtx, stack_pointer_rtx));
742 }
743 /* Use shortest method. */
744 if (size <= 6 && (sp_plus_length < method1_length))
745 {
746 insn = emit_move_insn (stack_pointer_rtx,
747 gen_rtx_PLUS (HImode, stack_pointer_rtx,
748 gen_int_mode (-size, HImode)));
749 RTX_FRAME_RELATED_P (insn) = 1;
750 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
751 RTX_FRAME_RELATED_P (insn) = 1;
752 }
753 else
754 {
755 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
756 RTX_FRAME_RELATED_P (insn) = 1;
757 insn = emit_move_insn (myfp,
758 gen_rtx_PLUS (GET_MODE(myfp), myfp,
759 gen_int_mode (-size, GET_MODE(myfp))));
760 RTX_FRAME_RELATED_P (insn) = 1;
761 insn = emit_move_insn ( stack_pointer_rtx, frame_pointer_rtx);
762 RTX_FRAME_RELATED_P (insn) = 1;
763 }
764 }
765 }
766 }
767 }
768
769 /* Output summary at end of function prologue. */
770
771 static void
772 avr_asm_function_end_prologue (FILE *file)
773 {
774 if (cfun->machine->is_naked)
775 {
776 fputs ("/* prologue: naked */\n", file);
777 }
778 else
779 {
780 if (cfun->machine->is_interrupt)
781 {
782 fputs ("/* prologue: Interrupt */\n", file);
783 }
784 else if (cfun->machine->is_signal)
785 {
786 fputs ("/* prologue: Signal */\n", file);
787 }
788 else
789 fputs ("/* prologue: function */\n", file);
790 }
791 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
792 get_frame_size());
793 }
794
795
796 /* Implement EPILOGUE_USES. */
797
798 int
799 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
800 {
801 if (reload_completed
802 && cfun->machine
803 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
804 return 1;
805 return 0;
806 }
807
808 /* Output RTL epilogue. */
809
810 void
811 expand_epilogue (void)
812 {
813 int reg;
814 int live_seq;
815 HARD_REG_SET set;
816 int minimize;
817 HOST_WIDE_INT size = get_frame_size();
818
819 /* epilogue: naked */
820 if (cfun->machine->is_naked)
821 {
822 emit_jump_insn (gen_return ());
823 return;
824 }
825
826 avr_regs_to_save (&set);
827 live_seq = sequent_regs_live ();
828 minimize = (TARGET_CALL_PROLOGUES
829 && !cfun->machine->is_interrupt
830 && !cfun->machine->is_signal
831 && !cfun->machine->is_OS_task
832 && live_seq);
833
834 if (minimize && (frame_pointer_needed || live_seq > 4))
835 {
836 if (frame_pointer_needed)
837 {
838 /* Get rid of frame. */
839 emit_move_insn(frame_pointer_rtx,
840 gen_rtx_PLUS (HImode, frame_pointer_rtx,
841 gen_int_mode (size, HImode)));
842 }
843 else
844 {
845 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
846 }
847
848 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
849 }
850 else
851 {
852 if (frame_pointer_needed)
853 {
854 if (size)
855 {
856 /* Try two methods to adjust stack and select shortest. */
857 int fp_plus_length;
858 /* Method 1-Adjust frame pointer. */
859 fp_plus_length =
860 get_attr_length (gen_move_insn (frame_pointer_rtx,
861 gen_rtx_PLUS (HImode, frame_pointer_rtx,
862 gen_int_mode (size,
863 HImode))));
864 /* Copy to stack pointer. */
865 fp_plus_length +=
866 get_attr_length (gen_move_insn (stack_pointer_rtx, frame_pointer_rtx));
867
868 /* Method 2-Adjust Stack pointer. */
869 int sp_plus_length = 0;
870 if (size <= 5)
871 {
872 sp_plus_length =
873 get_attr_length (gen_move_insn (stack_pointer_rtx,
874 gen_rtx_PLUS (HImode, stack_pointer_rtx,
875 gen_int_mode (size,
876 HImode))));
877 }
878 /* Use shortest method. */
879 if (size <= 5 && (sp_plus_length < fp_plus_length))
880 {
881 emit_move_insn (stack_pointer_rtx,
882 gen_rtx_PLUS (HImode, stack_pointer_rtx,
883 gen_int_mode (size, HImode)));
884 }
885 else
886 {
887 emit_move_insn (frame_pointer_rtx,
888 gen_rtx_PLUS (HImode, frame_pointer_rtx,
889 gen_int_mode (size, HImode)));
890 /* Copy to stack pointer. */
891 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
892 }
893 }
894 if(!cfun->machine->is_OS_task)
895 {
896 /* Restore previous frame_pointer. */
897 emit_insn (gen_pophi (frame_pointer_rtx));
898 }
899 }
900 /* Restore used registers. */
901 for (reg = 31; reg >= 0; --reg)
902 {
903 if (TEST_HARD_REG_BIT (set, reg))
904 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
905 }
906 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
907 {
908 /* Restore RAMPZ using tmp reg as scratch. */
909 if(AVR_HAVE_RAMPZ
910 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
911 {
912 emit_insn (gen_popqi (tmp_reg_rtx));
913 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
914 tmp_reg_rtx);
915 }
916
917 /* Restore SREG using tmp reg as scratch. */
918 emit_insn (gen_popqi (tmp_reg_rtx));
919
920 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
921 tmp_reg_rtx);
922
923 /* Restore tmp REG. */
924 emit_insn (gen_popqi (tmp_reg_rtx));
925
926 /* Restore zero REG. */
927 emit_insn (gen_popqi (zero_reg_rtx));
928 }
929
930 emit_jump_insn (gen_return ());
931 }
932 }
933
934 /* Output summary messages at beginning of function epilogue. */
935
936 static void
937 avr_asm_function_begin_epilogue (FILE *file)
938 {
939 fprintf (file, "/* epilogue start */\n");
940 }
941
942 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
943 machine for a memory operand of mode MODE. */
944
945 int
946 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
947 {
948 enum reg_class r = NO_REGS;
949
950 if (TARGET_ALL_DEBUG)
951 {
952 fprintf (stderr, "mode: (%s) %s %s %s %s:",
953 GET_MODE_NAME(mode),
954 strict ? "(strict)": "",
955 reload_completed ? "(reload_completed)": "",
956 reload_in_progress ? "(reload_in_progress)": "",
957 reg_renumber ? "(reg_renumber)" : "");
958 if (GET_CODE (x) == PLUS
959 && REG_P (XEXP (x, 0))
960 && GET_CODE (XEXP (x, 1)) == CONST_INT
961 && INTVAL (XEXP (x, 1)) >= 0
962 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
963 && reg_renumber
964 )
965 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
966 true_regnum (XEXP (x, 0)));
967 debug_rtx (x);
968 }
969 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
970 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
971 r = POINTER_REGS;
972 else if (CONSTANT_ADDRESS_P (x))
973 r = ALL_REGS;
974 else if (GET_CODE (x) == PLUS
975 && REG_P (XEXP (x, 0))
976 && GET_CODE (XEXP (x, 1)) == CONST_INT
977 && INTVAL (XEXP (x, 1)) >= 0)
978 {
979 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
980 if (fit)
981 {
982 if (! strict
983 || REGNO (XEXP (x,0)) == REG_Y
984 || REGNO (XEXP (x,0)) == REG_Z)
985 r = BASE_POINTER_REGS;
986 if (XEXP (x,0) == frame_pointer_rtx
987 || XEXP (x,0) == arg_pointer_rtx)
988 r = BASE_POINTER_REGS;
989 }
990 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
991 r = POINTER_Y_REGS;
992 }
993 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
994 && REG_P (XEXP (x, 0))
995 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
996 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
997 {
998 r = POINTER_REGS;
999 }
1000 if (TARGET_ALL_DEBUG)
1001 {
1002 fprintf (stderr, " ret = %c\n", r + '0');
1003 }
1004 return r == NO_REGS ? 0 : (int)r;
1005 }
1006
1007 /* Attempts to replace X with a valid
1008 memory address for an operand of mode MODE */
1009
1010 rtx
1011 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1012 {
1013 x = oldx;
1014 if (TARGET_ALL_DEBUG)
1015 {
1016 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1017 debug_rtx (oldx);
1018 }
1019
1020 if (GET_CODE (oldx) == PLUS
1021 && REG_P (XEXP (oldx,0)))
1022 {
1023 if (REG_P (XEXP (oldx,1)))
1024 x = force_reg (GET_MODE (oldx), oldx);
1025 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1026 {
1027 int offs = INTVAL (XEXP (oldx,1));
1028 if (frame_pointer_rtx != XEXP (oldx,0))
1029 if (offs > MAX_LD_OFFSET (mode))
1030 {
1031 if (TARGET_ALL_DEBUG)
1032 fprintf (stderr, "force_reg (big offset)\n");
1033 x = force_reg (GET_MODE (oldx), oldx);
1034 }
1035 }
1036 }
1037 return x;
1038 }
1039
1040
1041 /* Return a pointer register name as a string. */
1042
1043 static const char *
1044 ptrreg_to_str (int regno)
1045 {
1046 switch (regno)
1047 {
1048 case REG_X: return "X";
1049 case REG_Y: return "Y";
1050 case REG_Z: return "Z";
1051 default:
1052 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1053 }
1054 return NULL;
1055 }
1056
1057 /* Return the condition name as a string.
1058 Used in conditional jump constructing */
1059
1060 static const char *
1061 cond_string (enum rtx_code code)
1062 {
1063 switch (code)
1064 {
1065 case NE:
1066 return "ne";
1067 case EQ:
1068 return "eq";
1069 case GE:
1070 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1071 return "pl";
1072 else
1073 return "ge";
1074 case LT:
1075 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1076 return "mi";
1077 else
1078 return "lt";
1079 case GEU:
1080 return "sh";
1081 case LTU:
1082 return "lo";
1083 default:
1084 gcc_unreachable ();
1085 }
1086 }
1087
1088 /* Output ADDR to FILE as address. */
1089
1090 void
1091 print_operand_address (FILE *file, rtx addr)
1092 {
1093 switch (GET_CODE (addr))
1094 {
1095 case REG:
1096 fprintf (file, ptrreg_to_str (REGNO (addr)));
1097 break;
1098
1099 case PRE_DEC:
1100 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1101 break;
1102
1103 case POST_INC:
1104 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1105 break;
1106
1107 default:
1108 if (CONSTANT_ADDRESS_P (addr)
1109 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1110 || GET_CODE (addr) == LABEL_REF))
1111 {
1112 fprintf (file, "gs(");
1113 output_addr_const (file,addr);
1114 fprintf (file ,")");
1115 }
1116 else
1117 output_addr_const (file, addr);
1118 }
1119 }
1120
1121
1122 /* Output X as assembler operand to file FILE. */
1123
1124 void
1125 print_operand (FILE *file, rtx x, int code)
1126 {
1127 int abcd = 0;
1128
1129 if (code >= 'A' && code <= 'D')
1130 abcd = code - 'A';
1131
1132 if (code == '~')
1133 {
1134 if (!AVR_HAVE_JMP_CALL)
1135 fputc ('r', file);
1136 }
1137 else if (code == '!')
1138 {
1139 if (AVR_HAVE_EIJMP_EICALL)
1140 fputc ('e', file);
1141 }
1142 else if (REG_P (x))
1143 {
1144 if (x == zero_reg_rtx)
1145 fprintf (file, "__zero_reg__");
1146 else
1147 fprintf (file, reg_names[true_regnum (x) + abcd]);
1148 }
1149 else if (GET_CODE (x) == CONST_INT)
1150 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1151 else if (GET_CODE (x) == MEM)
1152 {
1153 rtx addr = XEXP (x,0);
1154
1155 if (CONSTANT_P (addr) && abcd)
1156 {
1157 fputc ('(', file);
1158 output_address (addr);
1159 fprintf (file, ")+%d", abcd);
1160 }
1161 else if (code == 'o')
1162 {
1163 if (GET_CODE (addr) != PLUS)
1164 fatal_insn ("bad address, not (reg+disp):", addr);
1165
1166 print_operand (file, XEXP (addr, 1), 0);
1167 }
1168 else if (code == 'p' || code == 'r')
1169 {
1170 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1171 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1172
1173 if (code == 'p')
1174 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1175 else
1176 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1177 }
1178 else if (GET_CODE (addr) == PLUS)
1179 {
1180 print_operand_address (file, XEXP (addr,0));
1181 if (REGNO (XEXP (addr, 0)) == REG_X)
1182 fatal_insn ("internal compiler error. Bad address:"
1183 ,addr);
1184 fputc ('+', file);
1185 print_operand (file, XEXP (addr,1), code);
1186 }
1187 else
1188 print_operand_address (file, addr);
1189 }
1190 else if (GET_CODE (x) == CONST_DOUBLE)
1191 {
1192 long val;
1193 REAL_VALUE_TYPE rv;
1194 if (GET_MODE (x) != SFmode)
1195 fatal_insn ("internal compiler error. Unknown mode:", x);
1196 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1197 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1198 fprintf (file, "0x%lx", val);
1199 }
1200 else if (code == 'j')
1201 fputs (cond_string (GET_CODE (x)), file);
1202 else if (code == 'k')
1203 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1204 else
1205 print_operand_address (file, x);
1206 }
1207
1208 /* Update the condition code in the INSN. */
1209
1210 void
1211 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1212 {
1213 rtx set;
1214
1215 switch (get_attr_cc (insn))
1216 {
1217 case CC_NONE:
1218 /* Insn does not affect CC at all. */
1219 break;
1220
1221 case CC_SET_N:
1222 CC_STATUS_INIT;
1223 break;
1224
1225 case CC_SET_ZN:
1226 set = single_set (insn);
1227 CC_STATUS_INIT;
1228 if (set)
1229 {
1230 cc_status.flags |= CC_NO_OVERFLOW;
1231 cc_status.value1 = SET_DEST (set);
1232 }
1233 break;
1234
1235 case CC_SET_CZN:
1236 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1237 The V flag may or may not be known but that's ok because
1238 alter_cond will change tests to use EQ/NE. */
1239 set = single_set (insn);
1240 CC_STATUS_INIT;
1241 if (set)
1242 {
1243 cc_status.value1 = SET_DEST (set);
1244 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1245 }
1246 break;
1247
1248 case CC_COMPARE:
1249 set = single_set (insn);
1250 CC_STATUS_INIT;
1251 if (set)
1252 cc_status.value1 = SET_SRC (set);
1253 break;
1254
1255 case CC_CLOBBER:
1256 /* Insn doesn't leave CC in a usable state. */
1257 CC_STATUS_INIT;
1258
1259 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1260 set = single_set (insn);
1261 if (set)
1262 {
1263 rtx src = SET_SRC (set);
1264
1265 if (GET_CODE (src) == ASHIFTRT
1266 && GET_MODE (src) == QImode)
1267 {
1268 rtx x = XEXP (src, 1);
1269
1270 if (GET_CODE (x) == CONST_INT
1271 && INTVAL (x) > 0
1272 && INTVAL (x) != 6)
1273 {
1274 cc_status.value1 = SET_DEST (set);
1275 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1276 }
1277 }
1278 }
1279 break;
1280 }
1281 }
1282
1283 /* Return maximum number of consecutive registers of
1284 class CLASS needed to hold a value of mode MODE. */
1285
1286 int
1287 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1288 {
1289 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1290 }
1291
1292 /* Choose mode for jump insn:
1293 1 - relative jump in range -63 <= x <= 62 ;
1294 2 - relative jump in range -2046 <= x <= 2045 ;
1295 3 - absolute jump (only for ATmega[16]03). */
1296
1297 int
1298 avr_jump_mode (rtx x, rtx insn)
1299 {
1300 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1301 ? XEXP (x, 0) : x));
1302 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1303 int jump_distance = cur_addr - dest_addr;
1304
1305 if (-63 <= jump_distance && jump_distance <= 62)
1306 return 1;
1307 else if (-2046 <= jump_distance && jump_distance <= 2045)
1308 return 2;
1309 else if (AVR_HAVE_JMP_CALL)
1310 return 3;
1311
1312 return 2;
1313 }
1314
1315 /* return an AVR condition jump commands.
1316 X is a comparison RTX.
1317 LEN is a number returned by avr_jump_mode function.
1318 if REVERSE nonzero then condition code in X must be reversed. */
1319
1320 const char *
1321 ret_cond_branch (rtx x, int len, int reverse)
1322 {
1323 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1324
1325 switch (cond)
1326 {
1327 case GT:
1328 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1329 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1330 AS1 (brpl,%0)) :
1331 len == 2 ? (AS1 (breq,.+4) CR_TAB
1332 AS1 (brmi,.+2) CR_TAB
1333 AS1 (rjmp,%0)) :
1334 (AS1 (breq,.+6) CR_TAB
1335 AS1 (brmi,.+4) CR_TAB
1336 AS1 (jmp,%0)));
1337
1338 else
1339 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1340 AS1 (brge,%0)) :
1341 len == 2 ? (AS1 (breq,.+4) CR_TAB
1342 AS1 (brlt,.+2) CR_TAB
1343 AS1 (rjmp,%0)) :
1344 (AS1 (breq,.+6) CR_TAB
1345 AS1 (brlt,.+4) CR_TAB
1346 AS1 (jmp,%0)));
1347 case GTU:
1348 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1349 AS1 (brsh,%0)) :
1350 len == 2 ? (AS1 (breq,.+4) CR_TAB
1351 AS1 (brlo,.+2) CR_TAB
1352 AS1 (rjmp,%0)) :
1353 (AS1 (breq,.+6) CR_TAB
1354 AS1 (brlo,.+4) CR_TAB
1355 AS1 (jmp,%0)));
1356 case LE:
1357 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1358 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1359 AS1 (brmi,%0)) :
1360 len == 2 ? (AS1 (breq,.+2) CR_TAB
1361 AS1 (brpl,.+2) CR_TAB
1362 AS1 (rjmp,%0)) :
1363 (AS1 (breq,.+2) CR_TAB
1364 AS1 (brpl,.+4) CR_TAB
1365 AS1 (jmp,%0)));
1366 else
1367 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1368 AS1 (brlt,%0)) :
1369 len == 2 ? (AS1 (breq,.+2) CR_TAB
1370 AS1 (brge,.+2) CR_TAB
1371 AS1 (rjmp,%0)) :
1372 (AS1 (breq,.+2) CR_TAB
1373 AS1 (brge,.+4) CR_TAB
1374 AS1 (jmp,%0)));
1375 case LEU:
1376 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1377 AS1 (brlo,%0)) :
1378 len == 2 ? (AS1 (breq,.+2) CR_TAB
1379 AS1 (brsh,.+2) CR_TAB
1380 AS1 (rjmp,%0)) :
1381 (AS1 (breq,.+2) CR_TAB
1382 AS1 (brsh,.+4) CR_TAB
1383 AS1 (jmp,%0)));
1384 default:
1385 if (reverse)
1386 {
1387 switch (len)
1388 {
1389 case 1:
1390 return AS1 (br%k1,%0);
1391 case 2:
1392 return (AS1 (br%j1,.+2) CR_TAB
1393 AS1 (rjmp,%0));
1394 default:
1395 return (AS1 (br%j1,.+4) CR_TAB
1396 AS1 (jmp,%0));
1397 }
1398 }
1399 else
1400 {
1401 switch (len)
1402 {
1403 case 1:
1404 return AS1 (br%j1,%0);
1405 case 2:
1406 return (AS1 (br%k1,.+2) CR_TAB
1407 AS1 (rjmp,%0));
1408 default:
1409 return (AS1 (br%k1,.+4) CR_TAB
1410 AS1 (jmp,%0));
1411 }
1412 }
1413 }
1414 return "";
1415 }
1416
1417 /* Predicate function for immediate operand which fits to byte (8bit) */
1418
1419 int
1420 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1421 {
1422 return (GET_CODE (op) == CONST_INT
1423 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1424 }
1425
1426 /* Output all insn addresses and their sizes into the assembly language
1427 output file. This is helpful for debugging whether the length attributes
1428 in the md file are correct.
1429 Output insn cost for next insn. */
1430
1431 void
1432 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1433 int num_operands ATTRIBUTE_UNUSED)
1434 {
1435 int uid = INSN_UID (insn);
1436
1437 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1438 {
1439 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1440 INSN_ADDRESSES (uid),
1441 INSN_ADDRESSES (uid) - last_insn_address,
1442 rtx_cost (PATTERN (insn), INSN));
1443 }
1444 last_insn_address = INSN_ADDRESSES (uid);
1445 }
1446
1447 /* Return 0 if undefined, 1 if always true or always false. */
1448
1449 int
1450 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1451 {
1452 unsigned int max = (mode == QImode ? 0xff :
1453 mode == HImode ? 0xffff :
1454 mode == SImode ? 0xffffffff : 0);
1455 if (max && operator && GET_CODE (x) == CONST_INT)
1456 {
1457 if (unsigned_condition (operator) != operator)
1458 max >>= 1;
1459
1460 if (max != (INTVAL (x) & max)
1461 && INTVAL (x) != 0xff)
1462 return 1;
1463 }
1464 return 0;
1465 }
1466
1467
1468 /* Returns nonzero if REGNO is the number of a hard
1469 register in which function arguments are sometimes passed. */
1470
1471 int
1472 function_arg_regno_p(int r)
1473 {
1474 return (r >= 8 && r <= 25);
1475 }
1476
1477 /* Initializing the variable cum for the state at the beginning
1478 of the argument list. */
1479
1480 void
1481 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1482 tree fndecl ATTRIBUTE_UNUSED)
1483 {
1484 cum->nregs = 18;
1485 cum->regno = FIRST_CUM_REG;
1486 if (!libname && fntype)
1487 {
1488 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1489 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1490 != void_type_node));
1491 if (stdarg)
1492 cum->nregs = 0;
1493 }
1494 }
1495
1496 /* Returns the number of registers to allocate for a function argument. */
1497
1498 static int
1499 avr_num_arg_regs (enum machine_mode mode, tree type)
1500 {
1501 int size;
1502
1503 if (mode == BLKmode)
1504 size = int_size_in_bytes (type);
1505 else
1506 size = GET_MODE_SIZE (mode);
1507
1508 /* Align all function arguments to start in even-numbered registers.
1509 Odd-sized arguments leave holes above them. */
1510
1511 return (size + 1) & ~1;
1512 }
1513
1514 /* Controls whether a function argument is passed
1515 in a register, and which register. */
1516
1517 rtx
1518 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1519 int named ATTRIBUTE_UNUSED)
1520 {
1521 int bytes = avr_num_arg_regs (mode, type);
1522
1523 if (cum->nregs && bytes <= cum->nregs)
1524 return gen_rtx_REG (mode, cum->regno - bytes);
1525
1526 return NULL_RTX;
1527 }
1528
1529 /* Update the summarizer variable CUM to advance past an argument
1530 in the argument list. */
1531
1532 void
1533 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1534 int named ATTRIBUTE_UNUSED)
1535 {
1536 int bytes = avr_num_arg_regs (mode, type);
1537
1538 cum->nregs -= bytes;
1539 cum->regno -= bytes;
1540
1541 if (cum->nregs <= 0)
1542 {
1543 cum->nregs = 0;
1544 cum->regno = FIRST_CUM_REG;
1545 }
1546 }
1547
1548 /***********************************************************************
1549 Functions for outputting various mov's for a various modes
1550 ************************************************************************/
1551 const char *
1552 output_movqi (rtx insn, rtx operands[], int *l)
1553 {
1554 int dummy;
1555 rtx dest = operands[0];
1556 rtx src = operands[1];
1557 int *real_l = l;
1558
1559 if (!l)
1560 l = &dummy;
1561
1562 *l = 1;
1563
1564 if (register_operand (dest, QImode))
1565 {
1566 if (register_operand (src, QImode)) /* mov r,r */
1567 {
1568 if (test_hard_reg_class (STACK_REG, dest))
1569 return AS2 (out,%0,%1);
1570 else if (test_hard_reg_class (STACK_REG, src))
1571 return AS2 (in,%0,%1);
1572
1573 return AS2 (mov,%0,%1);
1574 }
1575 else if (CONSTANT_P (src))
1576 {
1577 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1578 return AS2 (ldi,%0,lo8(%1));
1579
1580 if (GET_CODE (src) == CONST_INT)
1581 {
1582 if (src == const0_rtx) /* mov r,L */
1583 return AS1 (clr,%0);
1584 else if (src == const1_rtx)
1585 {
1586 *l = 2;
1587 return (AS1 (clr,%0) CR_TAB
1588 AS1 (inc,%0));
1589 }
1590 else if (src == constm1_rtx)
1591 {
1592 /* Immediate constants -1 to any register */
1593 *l = 2;
1594 return (AS1 (clr,%0) CR_TAB
1595 AS1 (dec,%0));
1596 }
1597 else
1598 {
1599 int bit_nr = exact_log2 (INTVAL (src));
1600
1601 if (bit_nr >= 0)
1602 {
1603 *l = 3;
1604 if (!real_l)
1605 output_asm_insn ((AS1 (clr,%0) CR_TAB
1606 "set"), operands);
1607 if (!real_l)
1608 avr_output_bld (operands, bit_nr);
1609
1610 return "";
1611 }
1612 }
1613 }
1614
1615 /* Last resort, larger than loading from memory. */
1616 *l = 4;
1617 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1618 AS2 (ldi,r31,lo8(%1)) CR_TAB
1619 AS2 (mov,%0,r31) CR_TAB
1620 AS2 (mov,r31,__tmp_reg__));
1621 }
1622 else if (GET_CODE (src) == MEM)
1623 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1624 }
1625 else if (GET_CODE (dest) == MEM)
1626 {
1627 const char *template;
1628
1629 if (src == const0_rtx)
1630 operands[1] = zero_reg_rtx;
1631
1632 template = out_movqi_mr_r (insn, operands, real_l);
1633
1634 if (!real_l)
1635 output_asm_insn (template, operands);
1636
1637 operands[1] = src;
1638 }
1639 return "";
1640 }
1641
1642
1643 const char *
1644 output_movhi (rtx insn, rtx operands[], int *l)
1645 {
1646 int dummy;
1647 rtx dest = operands[0];
1648 rtx src = operands[1];
1649 int *real_l = l;
1650
1651 if (!l)
1652 l = &dummy;
1653
1654 if (register_operand (dest, HImode))
1655 {
1656 if (register_operand (src, HImode)) /* mov r,r */
1657 {
1658 if (test_hard_reg_class (STACK_REG, dest))
1659 {
1660 if (TARGET_TINY_STACK)
1661 {
1662 *l = 1;
1663 return AS2 (out,__SP_L__,%A1);
1664 }
1665 /* Use simple load of stack pointer if no interrupts are used
1666 or inside main or signal function prologue where they disabled. */
1667 else if (TARGET_NO_INTERRUPTS
1668 || (reload_completed
1669 && cfun->machine->is_signal
1670 && prologue_epilogue_contains (insn)))
1671 {
1672 *l = 2;
1673 return (AS2 (out,__SP_H__,%B1) CR_TAB
1674 AS2 (out,__SP_L__,%A1));
1675 }
1676 /* In interrupt prolog we know interrupts are enabled. */
1677 else if (reload_completed
1678 && cfun->machine->is_interrupt
1679 && prologue_epilogue_contains (insn))
1680 {
1681 *l = 4;
1682 return ("cli" CR_TAB
1683 AS2 (out,__SP_H__,%B1) CR_TAB
1684 "sei" CR_TAB
1685 AS2 (out,__SP_L__,%A1));
1686 }
1687 *l = 5;
1688 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1689 "cli" CR_TAB
1690 AS2 (out,__SP_H__,%B1) CR_TAB
1691 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1692 AS2 (out,__SP_L__,%A1));
1693 }
1694 else if (test_hard_reg_class (STACK_REG, src))
1695 {
1696 *l = 2;
1697 return (AS2 (in,%A0,__SP_L__) CR_TAB
1698 AS2 (in,%B0,__SP_H__));
1699 }
1700
1701 if (AVR_HAVE_MOVW)
1702 {
1703 *l = 1;
1704 return (AS2 (movw,%0,%1));
1705 }
1706 else
1707 {
1708 *l = 2;
1709 return (AS2 (mov,%A0,%A1) CR_TAB
1710 AS2 (mov,%B0,%B1));
1711 }
1712 }
1713 else if (CONSTANT_P (src))
1714 {
1715 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1716 {
1717 *l = 2;
1718 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1719 AS2 (ldi,%B0,hi8(%1)));
1720 }
1721
1722 if (GET_CODE (src) == CONST_INT)
1723 {
1724 if (src == const0_rtx) /* mov r,L */
1725 {
1726 *l = 2;
1727 return (AS1 (clr,%A0) CR_TAB
1728 AS1 (clr,%B0));
1729 }
1730 else if (src == const1_rtx)
1731 {
1732 *l = 3;
1733 return (AS1 (clr,%A0) CR_TAB
1734 AS1 (clr,%B0) CR_TAB
1735 AS1 (inc,%A0));
1736 }
1737 else if (src == constm1_rtx)
1738 {
1739 /* Immediate constants -1 to any register */
1740 *l = 3;
1741 return (AS1 (clr,%0) CR_TAB
1742 AS1 (dec,%A0) CR_TAB
1743 AS2 (mov,%B0,%A0));
1744 }
1745 else
1746 {
1747 int bit_nr = exact_log2 (INTVAL (src));
1748
1749 if (bit_nr >= 0)
1750 {
1751 *l = 4;
1752 if (!real_l)
1753 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1754 AS1 (clr,%B0) CR_TAB
1755 "set"), operands);
1756 if (!real_l)
1757 avr_output_bld (operands, bit_nr);
1758
1759 return "";
1760 }
1761 }
1762
1763 if ((INTVAL (src) & 0xff) == 0)
1764 {
1765 *l = 5;
1766 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1767 AS1 (clr,%A0) CR_TAB
1768 AS2 (ldi,r31,hi8(%1)) CR_TAB
1769 AS2 (mov,%B0,r31) CR_TAB
1770 AS2 (mov,r31,__tmp_reg__));
1771 }
1772 else if ((INTVAL (src) & 0xff00) == 0)
1773 {
1774 *l = 5;
1775 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1776 AS2 (ldi,r31,lo8(%1)) CR_TAB
1777 AS2 (mov,%A0,r31) CR_TAB
1778 AS1 (clr,%B0) CR_TAB
1779 AS2 (mov,r31,__tmp_reg__));
1780 }
1781 }
1782
1783 /* Last resort, equal to loading from memory. */
1784 *l = 6;
1785 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1786 AS2 (ldi,r31,lo8(%1)) CR_TAB
1787 AS2 (mov,%A0,r31) CR_TAB
1788 AS2 (ldi,r31,hi8(%1)) CR_TAB
1789 AS2 (mov,%B0,r31) CR_TAB
1790 AS2 (mov,r31,__tmp_reg__));
1791 }
1792 else if (GET_CODE (src) == MEM)
1793 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1794 }
1795 else if (GET_CODE (dest) == MEM)
1796 {
1797 const char *template;
1798
1799 if (src == const0_rtx)
1800 operands[1] = zero_reg_rtx;
1801
1802 template = out_movhi_mr_r (insn, operands, real_l);
1803
1804 if (!real_l)
1805 output_asm_insn (template, operands);
1806
1807 operands[1] = src;
1808 return "";
1809 }
1810 fatal_insn ("invalid insn:", insn);
1811 return "";
1812 }
1813
1814 const char *
1815 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1816 {
1817 rtx dest = op[0];
1818 rtx src = op[1];
1819 rtx x = XEXP (src, 0);
1820 int dummy;
1821
1822 if (!l)
1823 l = &dummy;
1824
1825 if (CONSTANT_ADDRESS_P (x))
1826 {
1827 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1828 {
1829 *l = 1;
1830 return AS2 (in,%0,__SREG__);
1831 }
1832 if (optimize > 0 && io_address_operand (x, QImode))
1833 {
1834 *l = 1;
1835 return AS2 (in,%0,%1-0x20);
1836 }
1837 *l = 2;
1838 return AS2 (lds,%0,%1);
1839 }
1840 /* memory access by reg+disp */
1841 else if (GET_CODE (x) == PLUS
1842 && REG_P (XEXP (x,0))
1843 && GET_CODE (XEXP (x,1)) == CONST_INT)
1844 {
1845 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1846 {
1847 int disp = INTVAL (XEXP (x,1));
1848 if (REGNO (XEXP (x,0)) != REG_Y)
1849 fatal_insn ("incorrect insn:",insn);
1850
1851 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1852 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1853 AS2 (ldd,%0,Y+63) CR_TAB
1854 AS2 (sbiw,r28,%o1-63));
1855
1856 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1857 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1858 AS2 (ld,%0,Y) CR_TAB
1859 AS2 (subi,r28,lo8(%o1)) CR_TAB
1860 AS2 (sbci,r29,hi8(%o1)));
1861 }
1862 else if (REGNO (XEXP (x,0)) == REG_X)
1863 {
1864 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1865 it but I have this situation with extremal optimizing options. */
1866 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1867 || reg_unused_after (insn, XEXP (x,0)))
1868 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1869 AS2 (ld,%0,X));
1870
1871 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1872 AS2 (ld,%0,X) CR_TAB
1873 AS2 (sbiw,r26,%o1));
1874 }
1875 *l = 1;
1876 return AS2 (ldd,%0,%1);
1877 }
1878 *l = 1;
1879 return AS2 (ld,%0,%1);
1880 }
1881
1882 const char *
1883 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1884 {
1885 rtx dest = op[0];
1886 rtx src = op[1];
1887 rtx base = XEXP (src, 0);
1888 int reg_dest = true_regnum (dest);
1889 int reg_base = true_regnum (base);
1890 /* "volatile" forces reading low byte first, even if less efficient,
1891 for correct operation with 16-bit I/O registers. */
1892 int mem_volatile_p = MEM_VOLATILE_P (src);
1893 int tmp;
1894
1895 if (!l)
1896 l = &tmp;
1897
1898 if (reg_base > 0)
1899 {
1900 if (reg_dest == reg_base) /* R = (R) */
1901 {
1902 *l = 3;
1903 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1904 AS2 (ld,%B0,%1) CR_TAB
1905 AS2 (mov,%A0,__tmp_reg__));
1906 }
1907 else if (reg_base == REG_X) /* (R26) */
1908 {
1909 if (reg_unused_after (insn, base))
1910 {
1911 *l = 2;
1912 return (AS2 (ld,%A0,X+) CR_TAB
1913 AS2 (ld,%B0,X));
1914 }
1915 *l = 3;
1916 return (AS2 (ld,%A0,X+) CR_TAB
1917 AS2 (ld,%B0,X) CR_TAB
1918 AS2 (sbiw,r26,1));
1919 }
1920 else /* (R) */
1921 {
1922 *l = 2;
1923 return (AS2 (ld,%A0,%1) CR_TAB
1924 AS2 (ldd,%B0,%1+1));
1925 }
1926 }
1927 else if (GET_CODE (base) == PLUS) /* (R + i) */
1928 {
1929 int disp = INTVAL (XEXP (base, 1));
1930 int reg_base = true_regnum (XEXP (base, 0));
1931
1932 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1933 {
1934 if (REGNO (XEXP (base, 0)) != REG_Y)
1935 fatal_insn ("incorrect insn:",insn);
1936
1937 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1938 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1939 AS2 (ldd,%A0,Y+62) CR_TAB
1940 AS2 (ldd,%B0,Y+63) CR_TAB
1941 AS2 (sbiw,r28,%o1-62));
1942
1943 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1944 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1945 AS2 (ld,%A0,Y) CR_TAB
1946 AS2 (ldd,%B0,Y+1) CR_TAB
1947 AS2 (subi,r28,lo8(%o1)) CR_TAB
1948 AS2 (sbci,r29,hi8(%o1)));
1949 }
1950 if (reg_base == REG_X)
1951 {
1952 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1953 it but I have this situation with extremal
1954 optimization options. */
1955
1956 *l = 4;
1957 if (reg_base == reg_dest)
1958 return (AS2 (adiw,r26,%o1) CR_TAB
1959 AS2 (ld,__tmp_reg__,X+) CR_TAB
1960 AS2 (ld,%B0,X) CR_TAB
1961 AS2 (mov,%A0,__tmp_reg__));
1962
1963 return (AS2 (adiw,r26,%o1) CR_TAB
1964 AS2 (ld,%A0,X+) CR_TAB
1965 AS2 (ld,%B0,X) CR_TAB
1966 AS2 (sbiw,r26,%o1+1));
1967 }
1968
1969 if (reg_base == reg_dest)
1970 {
1971 *l = 3;
1972 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1973 AS2 (ldd,%B0,%B1) CR_TAB
1974 AS2 (mov,%A0,__tmp_reg__));
1975 }
1976
1977 *l = 2;
1978 return (AS2 (ldd,%A0,%A1) CR_TAB
1979 AS2 (ldd,%B0,%B1));
1980 }
1981 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1982 {
1983 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1984 fatal_insn ("incorrect insn:", insn);
1985
1986 if (mem_volatile_p)
1987 {
1988 if (REGNO (XEXP (base, 0)) == REG_X)
1989 {
1990 *l = 4;
1991 return (AS2 (sbiw,r26,2) CR_TAB
1992 AS2 (ld,%A0,X+) CR_TAB
1993 AS2 (ld,%B0,X) CR_TAB
1994 AS2 (sbiw,r26,1));
1995 }
1996 else
1997 {
1998 *l = 3;
1999 return (AS2 (sbiw,%r1,2) CR_TAB
2000 AS2 (ld,%A0,%p1) CR_TAB
2001 AS2 (ldd,%B0,%p1+1));
2002 }
2003 }
2004
2005 *l = 2;
2006 return (AS2 (ld,%B0,%1) CR_TAB
2007 AS2 (ld,%A0,%1));
2008 }
2009 else if (GET_CODE (base) == POST_INC) /* (R++) */
2010 {
2011 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2012 fatal_insn ("incorrect insn:", insn);
2013
2014 *l = 2;
2015 return (AS2 (ld,%A0,%1) CR_TAB
2016 AS2 (ld,%B0,%1));
2017 }
2018 else if (CONSTANT_ADDRESS_P (base))
2019 {
2020 if (optimize > 0 && io_address_operand (base, HImode))
2021 {
2022 *l = 2;
2023 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2024 AS2 (in,%B0,%B1-0x20));
2025 }
2026 *l = 4;
2027 return (AS2 (lds,%A0,%A1) CR_TAB
2028 AS2 (lds,%B0,%B1));
2029 }
2030
2031 fatal_insn ("unknown move insn:",insn);
2032 return "";
2033 }
2034
2035 const char *
2036 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2037 {
2038 rtx dest = op[0];
2039 rtx src = op[1];
2040 rtx base = XEXP (src, 0);
2041 int reg_dest = true_regnum (dest);
2042 int reg_base = true_regnum (base);
2043 int tmp;
2044
2045 if (!l)
2046 l = &tmp;
2047
2048 if (reg_base > 0)
2049 {
2050 if (reg_base == REG_X) /* (R26) */
2051 {
2052 if (reg_dest == REG_X)
2053 /* "ld r26,-X" is undefined */
2054 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2055 AS2 (ld,r29,X) CR_TAB
2056 AS2 (ld,r28,-X) CR_TAB
2057 AS2 (ld,__tmp_reg__,-X) CR_TAB
2058 AS2 (sbiw,r26,1) CR_TAB
2059 AS2 (ld,r26,X) CR_TAB
2060 AS2 (mov,r27,__tmp_reg__));
2061 else if (reg_dest == REG_X - 2)
2062 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2063 AS2 (ld,%B0,X+) CR_TAB
2064 AS2 (ld,__tmp_reg__,X+) CR_TAB
2065 AS2 (ld,%D0,X) CR_TAB
2066 AS2 (mov,%C0,__tmp_reg__));
2067 else if (reg_unused_after (insn, base))
2068 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2069 AS2 (ld,%B0,X+) CR_TAB
2070 AS2 (ld,%C0,X+) CR_TAB
2071 AS2 (ld,%D0,X));
2072 else
2073 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2074 AS2 (ld,%B0,X+) CR_TAB
2075 AS2 (ld,%C0,X+) CR_TAB
2076 AS2 (ld,%D0,X) CR_TAB
2077 AS2 (sbiw,r26,3));
2078 }
2079 else
2080 {
2081 if (reg_dest == reg_base)
2082 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2083 AS2 (ldd,%C0,%1+2) CR_TAB
2084 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2085 AS2 (ld,%A0,%1) CR_TAB
2086 AS2 (mov,%B0,__tmp_reg__));
2087 else if (reg_base == reg_dest + 2)
2088 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2089 AS2 (ldd,%B0,%1+1) CR_TAB
2090 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2091 AS2 (ldd,%D0,%1+3) CR_TAB
2092 AS2 (mov,%C0,__tmp_reg__));
2093 else
2094 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2095 AS2 (ldd,%B0,%1+1) CR_TAB
2096 AS2 (ldd,%C0,%1+2) CR_TAB
2097 AS2 (ldd,%D0,%1+3));
2098 }
2099 }
2100 else if (GET_CODE (base) == PLUS) /* (R + i) */
2101 {
2102 int disp = INTVAL (XEXP (base, 1));
2103
2104 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2105 {
2106 if (REGNO (XEXP (base, 0)) != REG_Y)
2107 fatal_insn ("incorrect insn:",insn);
2108
2109 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2110 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2111 AS2 (ldd,%A0,Y+60) CR_TAB
2112 AS2 (ldd,%B0,Y+61) CR_TAB
2113 AS2 (ldd,%C0,Y+62) CR_TAB
2114 AS2 (ldd,%D0,Y+63) CR_TAB
2115 AS2 (sbiw,r28,%o1-60));
2116
2117 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2118 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2119 AS2 (ld,%A0,Y) CR_TAB
2120 AS2 (ldd,%B0,Y+1) CR_TAB
2121 AS2 (ldd,%C0,Y+2) CR_TAB
2122 AS2 (ldd,%D0,Y+3) CR_TAB
2123 AS2 (subi,r28,lo8(%o1)) CR_TAB
2124 AS2 (sbci,r29,hi8(%o1)));
2125 }
2126
2127 reg_base = true_regnum (XEXP (base, 0));
2128 if (reg_base == REG_X)
2129 {
2130 /* R = (X + d) */
2131 if (reg_dest == REG_X)
2132 {
2133 *l = 7;
2134 /* "ld r26,-X" is undefined */
2135 return (AS2 (adiw,r26,%o1+3) CR_TAB
2136 AS2 (ld,r29,X) CR_TAB
2137 AS2 (ld,r28,-X) CR_TAB
2138 AS2 (ld,__tmp_reg__,-X) CR_TAB
2139 AS2 (sbiw,r26,1) CR_TAB
2140 AS2 (ld,r26,X) CR_TAB
2141 AS2 (mov,r27,__tmp_reg__));
2142 }
2143 *l = 6;
2144 if (reg_dest == REG_X - 2)
2145 return (AS2 (adiw,r26,%o1) CR_TAB
2146 AS2 (ld,r24,X+) CR_TAB
2147 AS2 (ld,r25,X+) CR_TAB
2148 AS2 (ld,__tmp_reg__,X+) CR_TAB
2149 AS2 (ld,r27,X) CR_TAB
2150 AS2 (mov,r26,__tmp_reg__));
2151
2152 return (AS2 (adiw,r26,%o1) CR_TAB
2153 AS2 (ld,%A0,X+) CR_TAB
2154 AS2 (ld,%B0,X+) CR_TAB
2155 AS2 (ld,%C0,X+) CR_TAB
2156 AS2 (ld,%D0,X) CR_TAB
2157 AS2 (sbiw,r26,%o1+3));
2158 }
2159 if (reg_dest == reg_base)
2160 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2161 AS2 (ldd,%C0,%C1) CR_TAB
2162 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2163 AS2 (ldd,%A0,%A1) CR_TAB
2164 AS2 (mov,%B0,__tmp_reg__));
2165 else if (reg_dest == reg_base - 2)
2166 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2167 AS2 (ldd,%B0,%B1) CR_TAB
2168 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2169 AS2 (ldd,%D0,%D1) CR_TAB
2170 AS2 (mov,%C0,__tmp_reg__));
2171 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2172 AS2 (ldd,%B0,%B1) CR_TAB
2173 AS2 (ldd,%C0,%C1) CR_TAB
2174 AS2 (ldd,%D0,%D1));
2175 }
2176 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2177 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2178 AS2 (ld,%C0,%1) CR_TAB
2179 AS2 (ld,%B0,%1) CR_TAB
2180 AS2 (ld,%A0,%1));
2181 else if (GET_CODE (base) == POST_INC) /* (R++) */
2182 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2183 AS2 (ld,%B0,%1) CR_TAB
2184 AS2 (ld,%C0,%1) CR_TAB
2185 AS2 (ld,%D0,%1));
2186 else if (CONSTANT_ADDRESS_P (base))
2187 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2188 AS2 (lds,%B0,%B1) CR_TAB
2189 AS2 (lds,%C0,%C1) CR_TAB
2190 AS2 (lds,%D0,%D1));
2191
2192 fatal_insn ("unknown move insn:",insn);
2193 return "";
2194 }
2195
2196 const char *
2197 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2198 {
2199 rtx dest = op[0];
2200 rtx src = op[1];
2201 rtx base = XEXP (dest, 0);
2202 int reg_base = true_regnum (base);
2203 int reg_src = true_regnum (src);
2204 int tmp;
2205
2206 if (!l)
2207 l = &tmp;
2208
2209 if (CONSTANT_ADDRESS_P (base))
2210 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2211 AS2 (sts,%B0,%B1) CR_TAB
2212 AS2 (sts,%C0,%C1) CR_TAB
2213 AS2 (sts,%D0,%D1));
2214 if (reg_base > 0) /* (r) */
2215 {
2216 if (reg_base == REG_X) /* (R26) */
2217 {
2218 if (reg_src == REG_X)
2219 {
2220 /* "st X+,r26" is undefined */
2221 if (reg_unused_after (insn, base))
2222 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2223 AS2 (st,X,r26) CR_TAB
2224 AS2 (adiw,r26,1) CR_TAB
2225 AS2 (st,X+,__tmp_reg__) CR_TAB
2226 AS2 (st,X+,r28) CR_TAB
2227 AS2 (st,X,r29));
2228 else
2229 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2230 AS2 (st,X,r26) CR_TAB
2231 AS2 (adiw,r26,1) CR_TAB
2232 AS2 (st,X+,__tmp_reg__) CR_TAB
2233 AS2 (st,X+,r28) CR_TAB
2234 AS2 (st,X,r29) CR_TAB
2235 AS2 (sbiw,r26,3));
2236 }
2237 else if (reg_base == reg_src + 2)
2238 {
2239 if (reg_unused_after (insn, base))
2240 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2241 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2242 AS2 (st,%0+,%A1) CR_TAB
2243 AS2 (st,%0+,%B1) CR_TAB
2244 AS2 (st,%0+,__zero_reg__) CR_TAB
2245 AS2 (st,%0,__tmp_reg__) CR_TAB
2246 AS1 (clr,__zero_reg__));
2247 else
2248 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2249 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2250 AS2 (st,%0+,%A1) CR_TAB
2251 AS2 (st,%0+,%B1) CR_TAB
2252 AS2 (st,%0+,__zero_reg__) CR_TAB
2253 AS2 (st,%0,__tmp_reg__) CR_TAB
2254 AS1 (clr,__zero_reg__) CR_TAB
2255 AS2 (sbiw,r26,3));
2256 }
2257 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2258 AS2 (st,%0+,%B1) CR_TAB
2259 AS2 (st,%0+,%C1) CR_TAB
2260 AS2 (st,%0,%D1) CR_TAB
2261 AS2 (sbiw,r26,3));
2262 }
2263 else
2264 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2265 AS2 (std,%0+1,%B1) CR_TAB
2266 AS2 (std,%0+2,%C1) CR_TAB
2267 AS2 (std,%0+3,%D1));
2268 }
2269 else if (GET_CODE (base) == PLUS) /* (R + i) */
2270 {
2271 int disp = INTVAL (XEXP (base, 1));
2272 reg_base = REGNO (XEXP (base, 0));
2273 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2274 {
2275 if (reg_base != REG_Y)
2276 fatal_insn ("incorrect insn:",insn);
2277
2278 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2279 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2280 AS2 (std,Y+60,%A1) CR_TAB
2281 AS2 (std,Y+61,%B1) CR_TAB
2282 AS2 (std,Y+62,%C1) CR_TAB
2283 AS2 (std,Y+63,%D1) CR_TAB
2284 AS2 (sbiw,r28,%o0-60));
2285
2286 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2287 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2288 AS2 (st,Y,%A1) CR_TAB
2289 AS2 (std,Y+1,%B1) CR_TAB
2290 AS2 (std,Y+2,%C1) CR_TAB
2291 AS2 (std,Y+3,%D1) CR_TAB
2292 AS2 (subi,r28,lo8(%o0)) CR_TAB
2293 AS2 (sbci,r29,hi8(%o0)));
2294 }
2295 if (reg_base == REG_X)
2296 {
2297 /* (X + d) = R */
2298 if (reg_src == REG_X)
2299 {
2300 *l = 9;
2301 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2302 AS2 (mov,__zero_reg__,r27) CR_TAB
2303 AS2 (adiw,r26,%o0) CR_TAB
2304 AS2 (st,X+,__tmp_reg__) CR_TAB
2305 AS2 (st,X+,__zero_reg__) CR_TAB
2306 AS2 (st,X+,r28) CR_TAB
2307 AS2 (st,X,r29) CR_TAB
2308 AS1 (clr,__zero_reg__) CR_TAB
2309 AS2 (sbiw,r26,%o0+3));
2310 }
2311 else if (reg_src == REG_X - 2)
2312 {
2313 *l = 9;
2314 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2315 AS2 (mov,__zero_reg__,r27) CR_TAB
2316 AS2 (adiw,r26,%o0) CR_TAB
2317 AS2 (st,X+,r24) CR_TAB
2318 AS2 (st,X+,r25) CR_TAB
2319 AS2 (st,X+,__tmp_reg__) CR_TAB
2320 AS2 (st,X,__zero_reg__) CR_TAB
2321 AS1 (clr,__zero_reg__) CR_TAB
2322 AS2 (sbiw,r26,%o0+3));
2323 }
2324 *l = 6;
2325 return (AS2 (adiw,r26,%o0) CR_TAB
2326 AS2 (st,X+,%A1) CR_TAB
2327 AS2 (st,X+,%B1) CR_TAB
2328 AS2 (st,X+,%C1) CR_TAB
2329 AS2 (st,X,%D1) CR_TAB
2330 AS2 (sbiw,r26,%o0+3));
2331 }
2332 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2333 AS2 (std,%B0,%B1) CR_TAB
2334 AS2 (std,%C0,%C1) CR_TAB
2335 AS2 (std,%D0,%D1));
2336 }
2337 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2338 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2339 AS2 (st,%0,%C1) CR_TAB
2340 AS2 (st,%0,%B1) CR_TAB
2341 AS2 (st,%0,%A1));
2342 else if (GET_CODE (base) == POST_INC) /* (R++) */
2343 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2344 AS2 (st,%0,%B1) CR_TAB
2345 AS2 (st,%0,%C1) CR_TAB
2346 AS2 (st,%0,%D1));
2347 fatal_insn ("unknown move insn:",insn);
2348 return "";
2349 }
2350
2351 const char *
2352 output_movsisf(rtx insn, rtx operands[], int *l)
2353 {
2354 int dummy;
2355 rtx dest = operands[0];
2356 rtx src = operands[1];
2357 int *real_l = l;
2358
2359 if (!l)
2360 l = &dummy;
2361
2362 if (register_operand (dest, VOIDmode))
2363 {
2364 if (register_operand (src, VOIDmode)) /* mov r,r */
2365 {
2366 if (true_regnum (dest) > true_regnum (src))
2367 {
2368 if (AVR_HAVE_MOVW)
2369 {
2370 *l = 2;
2371 return (AS2 (movw,%C0,%C1) CR_TAB
2372 AS2 (movw,%A0,%A1));
2373 }
2374 *l = 4;
2375 return (AS2 (mov,%D0,%D1) CR_TAB
2376 AS2 (mov,%C0,%C1) CR_TAB
2377 AS2 (mov,%B0,%B1) CR_TAB
2378 AS2 (mov,%A0,%A1));
2379 }
2380 else
2381 {
2382 if (AVR_HAVE_MOVW)
2383 {
2384 *l = 2;
2385 return (AS2 (movw,%A0,%A1) CR_TAB
2386 AS2 (movw,%C0,%C1));
2387 }
2388 *l = 4;
2389 return (AS2 (mov,%A0,%A1) CR_TAB
2390 AS2 (mov,%B0,%B1) CR_TAB
2391 AS2 (mov,%C0,%C1) CR_TAB
2392 AS2 (mov,%D0,%D1));
2393 }
2394 }
2395 else if (CONSTANT_P (src))
2396 {
2397 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2398 {
2399 *l = 4;
2400 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2401 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2402 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2403 AS2 (ldi,%D0,hhi8(%1)));
2404 }
2405
2406 if (GET_CODE (src) == CONST_INT)
2407 {
2408 const char *const clr_op0 =
2409 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2410 AS1 (clr,%B0) CR_TAB
2411 AS2 (movw,%C0,%A0))
2412 : (AS1 (clr,%A0) CR_TAB
2413 AS1 (clr,%B0) CR_TAB
2414 AS1 (clr,%C0) CR_TAB
2415 AS1 (clr,%D0));
2416
2417 if (src == const0_rtx) /* mov r,L */
2418 {
2419 *l = AVR_HAVE_MOVW ? 3 : 4;
2420 return clr_op0;
2421 }
2422 else if (src == const1_rtx)
2423 {
2424 if (!real_l)
2425 output_asm_insn (clr_op0, operands);
2426 *l = AVR_HAVE_MOVW ? 4 : 5;
2427 return AS1 (inc,%A0);
2428 }
2429 else if (src == constm1_rtx)
2430 {
2431 /* Immediate constants -1 to any register */
2432 if (AVR_HAVE_MOVW)
2433 {
2434 *l = 4;
2435 return (AS1 (clr,%A0) CR_TAB
2436 AS1 (dec,%A0) CR_TAB
2437 AS2 (mov,%B0,%A0) CR_TAB
2438 AS2 (movw,%C0,%A0));
2439 }
2440 *l = 5;
2441 return (AS1 (clr,%A0) CR_TAB
2442 AS1 (dec,%A0) CR_TAB
2443 AS2 (mov,%B0,%A0) CR_TAB
2444 AS2 (mov,%C0,%A0) CR_TAB
2445 AS2 (mov,%D0,%A0));
2446 }
2447 else
2448 {
2449 int bit_nr = exact_log2 (INTVAL (src));
2450
2451 if (bit_nr >= 0)
2452 {
2453 *l = AVR_HAVE_MOVW ? 5 : 6;
2454 if (!real_l)
2455 {
2456 output_asm_insn (clr_op0, operands);
2457 output_asm_insn ("set", operands);
2458 }
2459 if (!real_l)
2460 avr_output_bld (operands, bit_nr);
2461
2462 return "";
2463 }
2464 }
2465 }
2466
2467 /* Last resort, better than loading from memory. */
2468 *l = 10;
2469 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2470 AS2 (ldi,r31,lo8(%1)) CR_TAB
2471 AS2 (mov,%A0,r31) CR_TAB
2472 AS2 (ldi,r31,hi8(%1)) CR_TAB
2473 AS2 (mov,%B0,r31) CR_TAB
2474 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2475 AS2 (mov,%C0,r31) CR_TAB
2476 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2477 AS2 (mov,%D0,r31) CR_TAB
2478 AS2 (mov,r31,__tmp_reg__));
2479 }
2480 else if (GET_CODE (src) == MEM)
2481 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2482 }
2483 else if (GET_CODE (dest) == MEM)
2484 {
2485 const char *template;
2486
2487 if (src == const0_rtx)
2488 operands[1] = zero_reg_rtx;
2489
2490 template = out_movsi_mr_r (insn, operands, real_l);
2491
2492 if (!real_l)
2493 output_asm_insn (template, operands);
2494
2495 operands[1] = src;
2496 return "";
2497 }
2498 fatal_insn ("invalid insn:", insn);
2499 return "";
2500 }
2501
2502 const char *
2503 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2504 {
2505 rtx dest = op[0];
2506 rtx src = op[1];
2507 rtx x = XEXP (dest, 0);
2508 int dummy;
2509
2510 if (!l)
2511 l = &dummy;
2512
2513 if (CONSTANT_ADDRESS_P (x))
2514 {
2515 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2516 {
2517 *l = 1;
2518 return AS2 (out,__SREG__,%1);
2519 }
2520 if (optimize > 0 && io_address_operand (x, QImode))
2521 {
2522 *l = 1;
2523 return AS2 (out,%0-0x20,%1);
2524 }
2525 *l = 2;
2526 return AS2 (sts,%0,%1);
2527 }
2528 /* memory access by reg+disp */
2529 else if (GET_CODE (x) == PLUS
2530 && REG_P (XEXP (x,0))
2531 && GET_CODE (XEXP (x,1)) == CONST_INT)
2532 {
2533 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2534 {
2535 int disp = INTVAL (XEXP (x,1));
2536 if (REGNO (XEXP (x,0)) != REG_Y)
2537 fatal_insn ("incorrect insn:",insn);
2538
2539 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2540 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2541 AS2 (std,Y+63,%1) CR_TAB
2542 AS2 (sbiw,r28,%o0-63));
2543
2544 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2545 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2546 AS2 (st,Y,%1) CR_TAB
2547 AS2 (subi,r28,lo8(%o0)) CR_TAB
2548 AS2 (sbci,r29,hi8(%o0)));
2549 }
2550 else if (REGNO (XEXP (x,0)) == REG_X)
2551 {
2552 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2553 {
2554 if (reg_unused_after (insn, XEXP (x,0)))
2555 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2556 AS2 (adiw,r26,%o0) CR_TAB
2557 AS2 (st,X,__tmp_reg__));
2558
2559 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2560 AS2 (adiw,r26,%o0) CR_TAB
2561 AS2 (st,X,__tmp_reg__) CR_TAB
2562 AS2 (sbiw,r26,%o0));
2563 }
2564 else
2565 {
2566 if (reg_unused_after (insn, XEXP (x,0)))
2567 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2568 AS2 (st,X,%1));
2569
2570 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2571 AS2 (st,X,%1) CR_TAB
2572 AS2 (sbiw,r26,%o0));
2573 }
2574 }
2575 *l = 1;
2576 return AS2 (std,%0,%1);
2577 }
2578 *l = 1;
2579 return AS2 (st,%0,%1);
2580 }
2581
2582 const char *
2583 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2584 {
2585 rtx dest = op[0];
2586 rtx src = op[1];
2587 rtx base = XEXP (dest, 0);
2588 int reg_base = true_regnum (base);
2589 int reg_src = true_regnum (src);
2590 /* "volatile" forces writing high byte first, even if less efficient,
2591 for correct operation with 16-bit I/O registers. */
2592 int mem_volatile_p = MEM_VOLATILE_P (dest);
2593 int tmp;
2594
2595 if (!l)
2596 l = &tmp;
2597 if (CONSTANT_ADDRESS_P (base))
2598 {
2599 if (optimize > 0 && io_address_operand (base, HImode))
2600 {
2601 *l = 2;
2602 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2603 AS2 (out,%A0-0x20,%A1));
2604 }
2605 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2606 AS2 (sts,%A0,%A1));
2607 }
2608 if (reg_base > 0)
2609 {
2610 if (reg_base == REG_X)
2611 {
2612 if (reg_src == REG_X)
2613 {
2614 /* "st X+,r26" and "st -X,r26" are undefined. */
2615 if (!mem_volatile_p && reg_unused_after (insn, src))
2616 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2617 AS2 (st,X,r26) CR_TAB
2618 AS2 (adiw,r26,1) CR_TAB
2619 AS2 (st,X,__tmp_reg__));
2620 else
2621 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2622 AS2 (adiw,r26,1) CR_TAB
2623 AS2 (st,X,__tmp_reg__) CR_TAB
2624 AS2 (sbiw,r26,1) CR_TAB
2625 AS2 (st,X,r26));
2626 }
2627 else
2628 {
2629 if (!mem_volatile_p && reg_unused_after (insn, base))
2630 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2631 AS2 (st,X,%B1));
2632 else
2633 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2634 AS2 (st,X,%B1) CR_TAB
2635 AS2 (st,-X,%A1));
2636 }
2637 }
2638 else
2639 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2640 AS2 (st,%0,%A1));
2641 }
2642 else if (GET_CODE (base) == PLUS)
2643 {
2644 int disp = INTVAL (XEXP (base, 1));
2645 reg_base = REGNO (XEXP (base, 0));
2646 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2647 {
2648 if (reg_base != REG_Y)
2649 fatal_insn ("incorrect insn:",insn);
2650
2651 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2652 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2653 AS2 (std,Y+63,%B1) CR_TAB
2654 AS2 (std,Y+62,%A1) CR_TAB
2655 AS2 (sbiw,r28,%o0-62));
2656
2657 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2658 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2659 AS2 (std,Y+1,%B1) CR_TAB
2660 AS2 (st,Y,%A1) CR_TAB
2661 AS2 (subi,r28,lo8(%o0)) CR_TAB
2662 AS2 (sbci,r29,hi8(%o0)));
2663 }
2664 if (reg_base == REG_X)
2665 {
2666 /* (X + d) = R */
2667 if (reg_src == REG_X)
2668 {
2669 *l = 7;
2670 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2671 AS2 (mov,__zero_reg__,r27) CR_TAB
2672 AS2 (adiw,r26,%o0+1) CR_TAB
2673 AS2 (st,X,__zero_reg__) CR_TAB
2674 AS2 (st,-X,__tmp_reg__) CR_TAB
2675 AS1 (clr,__zero_reg__) CR_TAB
2676 AS2 (sbiw,r26,%o0));
2677 }
2678 *l = 4;
2679 return (AS2 (adiw,r26,%o0+1) CR_TAB
2680 AS2 (st,X,%B1) CR_TAB
2681 AS2 (st,-X,%A1) CR_TAB
2682 AS2 (sbiw,r26,%o0));
2683 }
2684 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2685 AS2 (std,%A0,%A1));
2686 }
2687 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2688 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2689 AS2 (st,%0,%A1));
2690 else if (GET_CODE (base) == POST_INC) /* (R++) */
2691 {
2692 if (mem_volatile_p)
2693 {
2694 if (REGNO (XEXP (base, 0)) == REG_X)
2695 {
2696 *l = 4;
2697 return (AS2 (adiw,r26,1) CR_TAB
2698 AS2 (st,X,%B1) CR_TAB
2699 AS2 (st,-X,%A1) CR_TAB
2700 AS2 (adiw,r26,2));
2701 }
2702 else
2703 {
2704 *l = 3;
2705 return (AS2 (std,%p0+1,%B1) CR_TAB
2706 AS2 (st,%p0,%A1) CR_TAB
2707 AS2 (adiw,%r0,2));
2708 }
2709 }
2710
2711 *l = 2;
2712 return (AS2 (st,%0,%A1) CR_TAB
2713 AS2 (st,%0,%B1));
2714 }
2715 fatal_insn ("unknown move insn:",insn);
2716 return "";
2717 }
2718
2719 /* Return 1 if frame pointer for current function required. */
2720
2721 int
2722 frame_pointer_required_p (void)
2723 {
2724 return (cfun->calls_alloca
2725 || crtl->args.info.nregs == 0
2726 || get_frame_size () > 0);
2727 }
2728
2729 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2730
2731 static RTX_CODE
2732 compare_condition (rtx insn)
2733 {
2734 rtx next = next_real_insn (insn);
2735 RTX_CODE cond = UNKNOWN;
2736 if (next && GET_CODE (next) == JUMP_INSN)
2737 {
2738 rtx pat = PATTERN (next);
2739 rtx src = SET_SRC (pat);
2740 rtx t = XEXP (src, 0);
2741 cond = GET_CODE (t);
2742 }
2743 return cond;
2744 }
2745
2746 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2747
2748 static int
2749 compare_sign_p (rtx insn)
2750 {
2751 RTX_CODE cond = compare_condition (insn);
2752 return (cond == GE || cond == LT);
2753 }
2754
2755 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2756 that needs to be swapped (GT, GTU, LE, LEU). */
2757
2758 int
2759 compare_diff_p (rtx insn)
2760 {
2761 RTX_CODE cond = compare_condition (insn);
2762 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2763 }
2764
2765 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2766
2767 int
2768 compare_eq_p (rtx insn)
2769 {
2770 RTX_CODE cond = compare_condition (insn);
2771 return (cond == EQ || cond == NE);
2772 }
2773
2774
2775 /* Output test instruction for HImode. */
2776
2777 const char *
2778 out_tsthi (rtx insn, int *l)
2779 {
2780 if (compare_sign_p (insn))
2781 {
2782 if (l) *l = 1;
2783 return AS1 (tst,%B0);
2784 }
2785 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2786 && compare_eq_p (insn))
2787 {
2788 /* Faster than sbiw if we can clobber the operand. */
2789 if (l) *l = 1;
2790 return AS2 (or,%A0,%B0);
2791 }
2792 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2793 {
2794 if (l) *l = 1;
2795 return AS2 (sbiw,%0,0);
2796 }
2797 if (l) *l = 2;
2798 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2799 AS2 (cpc,%B0,__zero_reg__));
2800 }
2801
2802
2803 /* Output test instruction for SImode. */
2804
2805 const char *
2806 out_tstsi (rtx insn, int *l)
2807 {
2808 if (compare_sign_p (insn))
2809 {
2810 if (l) *l = 1;
2811 return AS1 (tst,%D0);
2812 }
2813 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2814 {
2815 if (l) *l = 3;
2816 return (AS2 (sbiw,%A0,0) CR_TAB
2817 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2818 AS2 (cpc,%D0,__zero_reg__));
2819 }
2820 if (l) *l = 4;
2821 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2822 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2823 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2824 AS2 (cpc,%D0,__zero_reg__));
2825 }
2826
2827
2828 /* Generate asm equivalent for various shifts.
2829 Shift count is a CONST_INT, MEM or REG.
2830 This only handles cases that are not already
2831 carefully hand-optimized in ?sh??i3_out. */
2832
2833 void
2834 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2835 int *len, int t_len)
2836 {
2837 rtx op[10];
2838 char str[500];
2839 int second_label = 1;
2840 int saved_in_tmp = 0;
2841 int use_zero_reg = 0;
2842
2843 op[0] = operands[0];
2844 op[1] = operands[1];
2845 op[2] = operands[2];
2846 op[3] = operands[3];
2847 str[0] = 0;
2848
2849 if (len)
2850 *len = 1;
2851
2852 if (GET_CODE (operands[2]) == CONST_INT)
2853 {
2854 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2855 int count = INTVAL (operands[2]);
2856 int max_len = 10; /* If larger than this, always use a loop. */
2857
2858 if (count <= 0)
2859 {
2860 if (len)
2861 *len = 0;
2862 return;
2863 }
2864
2865 if (count < 8 && !scratch)
2866 use_zero_reg = 1;
2867
2868 if (optimize_size)
2869 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2870
2871 if (t_len * count <= max_len)
2872 {
2873 /* Output shifts inline with no loop - faster. */
2874 if (len)
2875 *len = t_len * count;
2876 else
2877 {
2878 while (count-- > 0)
2879 output_asm_insn (template, op);
2880 }
2881
2882 return;
2883 }
2884
2885 if (scratch)
2886 {
2887 if (!len)
2888 strcat (str, AS2 (ldi,%3,%2));
2889 }
2890 else if (use_zero_reg)
2891 {
2892 /* Hack to save one word: use __zero_reg__ as loop counter.
2893 Set one bit, then shift in a loop until it is 0 again. */
2894
2895 op[3] = zero_reg_rtx;
2896 if (len)
2897 *len = 2;
2898 else
2899 strcat (str, ("set" CR_TAB
2900 AS2 (bld,%3,%2-1)));
2901 }
2902 else
2903 {
2904 /* No scratch register available, use one from LD_REGS (saved in
2905 __tmp_reg__) that doesn't overlap with registers to shift. */
2906
2907 op[3] = gen_rtx_REG (QImode,
2908 ((true_regnum (operands[0]) - 1) & 15) + 16);
2909 op[4] = tmp_reg_rtx;
2910 saved_in_tmp = 1;
2911
2912 if (len)
2913 *len = 3; /* Includes "mov %3,%4" after the loop. */
2914 else
2915 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2916 AS2 (ldi,%3,%2)));
2917 }
2918
2919 second_label = 0;
2920 }
2921 else if (GET_CODE (operands[2]) == MEM)
2922 {
2923 rtx op_mov[10];
2924
2925 op[3] = op_mov[0] = tmp_reg_rtx;
2926 op_mov[1] = op[2];
2927
2928 if (len)
2929 out_movqi_r_mr (insn, op_mov, len);
2930 else
2931 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2932 }
2933 else if (register_operand (operands[2], QImode))
2934 {
2935 if (reg_unused_after (insn, operands[2]))
2936 op[3] = op[2];
2937 else
2938 {
2939 op[3] = tmp_reg_rtx;
2940 if (!len)
2941 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2942 }
2943 }
2944 else
2945 fatal_insn ("bad shift insn:", insn);
2946
2947 if (second_label)
2948 {
2949 if (len)
2950 ++*len;
2951 else
2952 strcat (str, AS1 (rjmp,2f));
2953 }
2954
2955 if (len)
2956 *len += t_len + 2; /* template + dec + brXX */
2957 else
2958 {
2959 strcat (str, "\n1:\t");
2960 strcat (str, template);
2961 strcat (str, second_label ? "\n2:\t" : "\n\t");
2962 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2963 strcat (str, CR_TAB);
2964 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2965 if (saved_in_tmp)
2966 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2967 output_asm_insn (str, op);
2968 }
2969 }
2970
2971
2972 /* 8bit shift left ((char)x << i) */
2973
2974 const char *
2975 ashlqi3_out (rtx insn, rtx operands[], int *len)
2976 {
2977 if (GET_CODE (operands[2]) == CONST_INT)
2978 {
2979 int k;
2980
2981 if (!len)
2982 len = &k;
2983
2984 switch (INTVAL (operands[2]))
2985 {
2986 default:
2987 if (INTVAL (operands[2]) < 8)
2988 break;
2989
2990 *len = 1;
2991 return AS1 (clr,%0);
2992
2993 case 1:
2994 *len = 1;
2995 return AS1 (lsl,%0);
2996
2997 case 2:
2998 *len = 2;
2999 return (AS1 (lsl,%0) CR_TAB
3000 AS1 (lsl,%0));
3001
3002 case 3:
3003 *len = 3;
3004 return (AS1 (lsl,%0) CR_TAB
3005 AS1 (lsl,%0) CR_TAB
3006 AS1 (lsl,%0));
3007
3008 case 4:
3009 if (test_hard_reg_class (LD_REGS, operands[0]))
3010 {
3011 *len = 2;
3012 return (AS1 (swap,%0) CR_TAB
3013 AS2 (andi,%0,0xf0));
3014 }
3015 *len = 4;
3016 return (AS1 (lsl,%0) CR_TAB
3017 AS1 (lsl,%0) CR_TAB
3018 AS1 (lsl,%0) CR_TAB
3019 AS1 (lsl,%0));
3020
3021 case 5:
3022 if (test_hard_reg_class (LD_REGS, operands[0]))
3023 {
3024 *len = 3;
3025 return (AS1 (swap,%0) CR_TAB
3026 AS1 (lsl,%0) CR_TAB
3027 AS2 (andi,%0,0xe0));
3028 }
3029 *len = 5;
3030 return (AS1 (lsl,%0) CR_TAB
3031 AS1 (lsl,%0) CR_TAB
3032 AS1 (lsl,%0) CR_TAB
3033 AS1 (lsl,%0) CR_TAB
3034 AS1 (lsl,%0));
3035
3036 case 6:
3037 if (test_hard_reg_class (LD_REGS, operands[0]))
3038 {
3039 *len = 4;
3040 return (AS1 (swap,%0) CR_TAB
3041 AS1 (lsl,%0) CR_TAB
3042 AS1 (lsl,%0) CR_TAB
3043 AS2 (andi,%0,0xc0));
3044 }
3045 *len = 6;
3046 return (AS1 (lsl,%0) CR_TAB
3047 AS1 (lsl,%0) CR_TAB
3048 AS1 (lsl,%0) CR_TAB
3049 AS1 (lsl,%0) CR_TAB
3050 AS1 (lsl,%0) CR_TAB
3051 AS1 (lsl,%0));
3052
3053 case 7:
3054 *len = 3;
3055 return (AS1 (ror,%0) CR_TAB
3056 AS1 (clr,%0) CR_TAB
3057 AS1 (ror,%0));
3058 }
3059 }
3060 else if (CONSTANT_P (operands[2]))
3061 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3062
3063 out_shift_with_cnt (AS1 (lsl,%0),
3064 insn, operands, len, 1);
3065 return "";
3066 }
3067
3068
3069 /* 16bit shift left ((short)x << i) */
3070
3071 const char *
3072 ashlhi3_out (rtx insn, rtx operands[], int *len)
3073 {
3074 if (GET_CODE (operands[2]) == CONST_INT)
3075 {
3076 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3077 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3078 int k;
3079 int *t = len;
3080
3081 if (!len)
3082 len = &k;
3083
3084 switch (INTVAL (operands[2]))
3085 {
3086 default:
3087 if (INTVAL (operands[2]) < 16)
3088 break;
3089
3090 *len = 2;
3091 return (AS1 (clr,%B0) CR_TAB
3092 AS1 (clr,%A0));
3093
3094 case 4:
3095 if (optimize_size && scratch)
3096 break; /* 5 */
3097 if (ldi_ok)
3098 {
3099 *len = 6;
3100 return (AS1 (swap,%A0) CR_TAB
3101 AS1 (swap,%B0) CR_TAB
3102 AS2 (andi,%B0,0xf0) CR_TAB
3103 AS2 (eor,%B0,%A0) CR_TAB
3104 AS2 (andi,%A0,0xf0) CR_TAB
3105 AS2 (eor,%B0,%A0));
3106 }
3107 if (scratch)
3108 {
3109 *len = 7;
3110 return (AS1 (swap,%A0) CR_TAB
3111 AS1 (swap,%B0) CR_TAB
3112 AS2 (ldi,%3,0xf0) CR_TAB
3113 AS2 (and,%B0,%3) CR_TAB
3114 AS2 (eor,%B0,%A0) CR_TAB
3115 AS2 (and,%A0,%3) CR_TAB
3116 AS2 (eor,%B0,%A0));
3117 }
3118 break; /* optimize_size ? 6 : 8 */
3119
3120 case 5:
3121 if (optimize_size)
3122 break; /* scratch ? 5 : 6 */
3123 if (ldi_ok)
3124 {
3125 *len = 8;
3126 return (AS1 (lsl,%A0) CR_TAB
3127 AS1 (rol,%B0) CR_TAB
3128 AS1 (swap,%A0) CR_TAB
3129 AS1 (swap,%B0) CR_TAB
3130 AS2 (andi,%B0,0xf0) CR_TAB
3131 AS2 (eor,%B0,%A0) CR_TAB
3132 AS2 (andi,%A0,0xf0) CR_TAB
3133 AS2 (eor,%B0,%A0));
3134 }
3135 if (scratch)
3136 {
3137 *len = 9;
3138 return (AS1 (lsl,%A0) CR_TAB
3139 AS1 (rol,%B0) CR_TAB
3140 AS1 (swap,%A0) CR_TAB
3141 AS1 (swap,%B0) CR_TAB
3142 AS2 (ldi,%3,0xf0) CR_TAB
3143 AS2 (and,%B0,%3) CR_TAB
3144 AS2 (eor,%B0,%A0) CR_TAB
3145 AS2 (and,%A0,%3) CR_TAB
3146 AS2 (eor,%B0,%A0));
3147 }
3148 break; /* 10 */
3149
3150 case 6:
3151 if (optimize_size)
3152 break; /* scratch ? 5 : 6 */
3153 *len = 9;
3154 return (AS1 (clr,__tmp_reg__) CR_TAB
3155 AS1 (lsr,%B0) CR_TAB
3156 AS1 (ror,%A0) CR_TAB
3157 AS1 (ror,__tmp_reg__) CR_TAB
3158 AS1 (lsr,%B0) CR_TAB
3159 AS1 (ror,%A0) CR_TAB
3160 AS1 (ror,__tmp_reg__) CR_TAB
3161 AS2 (mov,%B0,%A0) CR_TAB
3162 AS2 (mov,%A0,__tmp_reg__));
3163
3164 case 7:
3165 *len = 5;
3166 return (AS1 (lsr,%B0) CR_TAB
3167 AS2 (mov,%B0,%A0) CR_TAB
3168 AS1 (clr,%A0) CR_TAB
3169 AS1 (ror,%B0) CR_TAB
3170 AS1 (ror,%A0));
3171
3172 case 8:
3173 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3174 AS1 (clr,%A0));
3175
3176 case 9:
3177 *len = 3;
3178 return (AS2 (mov,%B0,%A0) CR_TAB
3179 AS1 (clr,%A0) CR_TAB
3180 AS1 (lsl,%B0));
3181
3182 case 10:
3183 *len = 4;
3184 return (AS2 (mov,%B0,%A0) CR_TAB
3185 AS1 (clr,%A0) CR_TAB
3186 AS1 (lsl,%B0) CR_TAB
3187 AS1 (lsl,%B0));
3188
3189 case 11:
3190 *len = 5;
3191 return (AS2 (mov,%B0,%A0) CR_TAB
3192 AS1 (clr,%A0) CR_TAB
3193 AS1 (lsl,%B0) CR_TAB
3194 AS1 (lsl,%B0) CR_TAB
3195 AS1 (lsl,%B0));
3196
3197 case 12:
3198 if (ldi_ok)
3199 {
3200 *len = 4;
3201 return (AS2 (mov,%B0,%A0) CR_TAB
3202 AS1 (clr,%A0) CR_TAB
3203 AS1 (swap,%B0) CR_TAB
3204 AS2 (andi,%B0,0xf0));
3205 }
3206 if (scratch)
3207 {
3208 *len = 5;
3209 return (AS2 (mov,%B0,%A0) CR_TAB
3210 AS1 (clr,%A0) CR_TAB
3211 AS1 (swap,%B0) CR_TAB
3212 AS2 (ldi,%3,0xf0) CR_TAB
3213 AS2 (and,%B0,%3));
3214 }
3215 *len = 6;
3216 return (AS2 (mov,%B0,%A0) CR_TAB
3217 AS1 (clr,%A0) CR_TAB
3218 AS1 (lsl,%B0) CR_TAB
3219 AS1 (lsl,%B0) CR_TAB
3220 AS1 (lsl,%B0) CR_TAB
3221 AS1 (lsl,%B0));
3222
3223 case 13:
3224 if (ldi_ok)
3225 {
3226 *len = 5;
3227 return (AS2 (mov,%B0,%A0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3229 AS1 (swap,%B0) CR_TAB
3230 AS1 (lsl,%B0) CR_TAB
3231 AS2 (andi,%B0,0xe0));
3232 }
3233 if (AVR_HAVE_MUL && scratch)
3234 {
3235 *len = 5;
3236 return (AS2 (ldi,%3,0x20) CR_TAB
3237 AS2 (mul,%A0,%3) CR_TAB
3238 AS2 (mov,%B0,r0) CR_TAB
3239 AS1 (clr,%A0) CR_TAB
3240 AS1 (clr,__zero_reg__));
3241 }
3242 if (optimize_size && scratch)
3243 break; /* 5 */
3244 if (scratch)
3245 {
3246 *len = 6;
3247 return (AS2 (mov,%B0,%A0) CR_TAB
3248 AS1 (clr,%A0) CR_TAB
3249 AS1 (swap,%B0) CR_TAB
3250 AS1 (lsl,%B0) CR_TAB
3251 AS2 (ldi,%3,0xe0) CR_TAB
3252 AS2 (and,%B0,%3));
3253 }
3254 if (AVR_HAVE_MUL)
3255 {
3256 *len = 6;
3257 return ("set" CR_TAB
3258 AS2 (bld,r1,5) CR_TAB
3259 AS2 (mul,%A0,r1) CR_TAB
3260 AS2 (mov,%B0,r0) CR_TAB
3261 AS1 (clr,%A0) CR_TAB
3262 AS1 (clr,__zero_reg__));
3263 }
3264 *len = 7;
3265 return (AS2 (mov,%B0,%A0) CR_TAB
3266 AS1 (clr,%A0) CR_TAB
3267 AS1 (lsl,%B0) CR_TAB
3268 AS1 (lsl,%B0) CR_TAB
3269 AS1 (lsl,%B0) CR_TAB
3270 AS1 (lsl,%B0) CR_TAB
3271 AS1 (lsl,%B0));
3272
3273 case 14:
3274 if (AVR_HAVE_MUL && ldi_ok)
3275 {
3276 *len = 5;
3277 return (AS2 (ldi,%B0,0x40) CR_TAB
3278 AS2 (mul,%A0,%B0) CR_TAB
3279 AS2 (mov,%B0,r0) CR_TAB
3280 AS1 (clr,%A0) CR_TAB
3281 AS1 (clr,__zero_reg__));
3282 }
3283 if (AVR_HAVE_MUL && scratch)
3284 {
3285 *len = 5;
3286 return (AS2 (ldi,%3,0x40) CR_TAB
3287 AS2 (mul,%A0,%3) CR_TAB
3288 AS2 (mov,%B0,r0) CR_TAB
3289 AS1 (clr,%A0) CR_TAB
3290 AS1 (clr,__zero_reg__));
3291 }
3292 if (optimize_size && ldi_ok)
3293 {
3294 *len = 5;
3295 return (AS2 (mov,%B0,%A0) CR_TAB
3296 AS2 (ldi,%A0,6) "\n1:\t"
3297 AS1 (lsl,%B0) CR_TAB
3298 AS1 (dec,%A0) CR_TAB
3299 AS1 (brne,1b));
3300 }
3301 if (optimize_size && scratch)
3302 break; /* 5 */
3303 *len = 6;
3304 return (AS1 (clr,%B0) CR_TAB
3305 AS1 (lsr,%A0) CR_TAB
3306 AS1 (ror,%B0) CR_TAB
3307 AS1 (lsr,%A0) CR_TAB
3308 AS1 (ror,%B0) CR_TAB
3309 AS1 (clr,%A0));
3310
3311 case 15:
3312 *len = 4;
3313 return (AS1 (clr,%B0) CR_TAB
3314 AS1 (lsr,%A0) CR_TAB
3315 AS1 (ror,%B0) CR_TAB
3316 AS1 (clr,%A0));
3317 }
3318 len = t;
3319 }
3320 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3321 AS1 (rol,%B0)),
3322 insn, operands, len, 2);
3323 return "";
3324 }
3325
3326
3327 /* 32bit shift left ((long)x << i) */
3328
3329 const char *
3330 ashlsi3_out (rtx insn, rtx operands[], int *len)
3331 {
3332 if (GET_CODE (operands[2]) == CONST_INT)
3333 {
3334 int k;
3335 int *t = len;
3336
3337 if (!len)
3338 len = &k;
3339
3340 switch (INTVAL (operands[2]))
3341 {
3342 default:
3343 if (INTVAL (operands[2]) < 32)
3344 break;
3345
3346 if (AVR_HAVE_MOVW)
3347 return *len = 3, (AS1 (clr,%D0) CR_TAB
3348 AS1 (clr,%C0) CR_TAB
3349 AS2 (movw,%A0,%C0));
3350 *len = 4;
3351 return (AS1 (clr,%D0) CR_TAB
3352 AS1 (clr,%C0) CR_TAB
3353 AS1 (clr,%B0) CR_TAB
3354 AS1 (clr,%A0));
3355
3356 case 8:
3357 {
3358 int reg0 = true_regnum (operands[0]);
3359 int reg1 = true_regnum (operands[1]);
3360 *len = 4;
3361 if (reg0 >= reg1)
3362 return (AS2 (mov,%D0,%C1) CR_TAB
3363 AS2 (mov,%C0,%B1) CR_TAB
3364 AS2 (mov,%B0,%A1) CR_TAB
3365 AS1 (clr,%A0));
3366 else
3367 return (AS1 (clr,%A0) CR_TAB
3368 AS2 (mov,%B0,%A1) CR_TAB
3369 AS2 (mov,%C0,%B1) CR_TAB
3370 AS2 (mov,%D0,%C1));
3371 }
3372
3373 case 16:
3374 {
3375 int reg0 = true_regnum (operands[0]);
3376 int reg1 = true_regnum (operands[1]);
3377 if (reg0 + 2 == reg1)
3378 return *len = 2, (AS1 (clr,%B0) CR_TAB
3379 AS1 (clr,%A0));
3380 if (AVR_HAVE_MOVW)
3381 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3382 AS1 (clr,%B0) CR_TAB
3383 AS1 (clr,%A0));
3384 else
3385 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3386 AS2 (mov,%D0,%B1) CR_TAB
3387 AS1 (clr,%B0) CR_TAB
3388 AS1 (clr,%A0));
3389 }
3390
3391 case 24:
3392 *len = 4;
3393 return (AS2 (mov,%D0,%A1) CR_TAB
3394 AS1 (clr,%C0) CR_TAB
3395 AS1 (clr,%B0) CR_TAB
3396 AS1 (clr,%A0));
3397
3398 case 31:
3399 *len = 6;
3400 return (AS1 (clr,%D0) CR_TAB
3401 AS1 (lsr,%A0) CR_TAB
3402 AS1 (ror,%D0) CR_TAB
3403 AS1 (clr,%C0) CR_TAB
3404 AS1 (clr,%B0) CR_TAB
3405 AS1 (clr,%A0));
3406 }
3407 len = t;
3408 }
3409 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3410 AS1 (rol,%B0) CR_TAB
3411 AS1 (rol,%C0) CR_TAB
3412 AS1 (rol,%D0)),
3413 insn, operands, len, 4);
3414 return "";
3415 }
3416
3417 /* 8bit arithmetic shift right ((signed char)x >> i) */
3418
3419 const char *
3420 ashrqi3_out (rtx insn, rtx operands[], int *len)
3421 {
3422 if (GET_CODE (operands[2]) == CONST_INT)
3423 {
3424 int k;
3425
3426 if (!len)
3427 len = &k;
3428
3429 switch (INTVAL (operands[2]))
3430 {
3431 case 1:
3432 *len = 1;
3433 return AS1 (asr,%0);
3434
3435 case 2:
3436 *len = 2;
3437 return (AS1 (asr,%0) CR_TAB
3438 AS1 (asr,%0));
3439
3440 case 3:
3441 *len = 3;
3442 return (AS1 (asr,%0) CR_TAB
3443 AS1 (asr,%0) CR_TAB
3444 AS1 (asr,%0));
3445
3446 case 4:
3447 *len = 4;
3448 return (AS1 (asr,%0) CR_TAB
3449 AS1 (asr,%0) CR_TAB
3450 AS1 (asr,%0) CR_TAB
3451 AS1 (asr,%0));
3452
3453 case 5:
3454 *len = 5;
3455 return (AS1 (asr,%0) CR_TAB
3456 AS1 (asr,%0) CR_TAB
3457 AS1 (asr,%0) CR_TAB
3458 AS1 (asr,%0) CR_TAB
3459 AS1 (asr,%0));
3460
3461 case 6:
3462 *len = 4;
3463 return (AS2 (bst,%0,6) CR_TAB
3464 AS1 (lsl,%0) CR_TAB
3465 AS2 (sbc,%0,%0) CR_TAB
3466 AS2 (bld,%0,0));
3467
3468 default:
3469 if (INTVAL (operands[2]) < 8)
3470 break;
3471
3472 /* fall through */
3473
3474 case 7:
3475 *len = 2;
3476 return (AS1 (lsl,%0) CR_TAB
3477 AS2 (sbc,%0,%0));
3478 }
3479 }
3480 else if (CONSTANT_P (operands[2]))
3481 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3482
3483 out_shift_with_cnt (AS1 (asr,%0),
3484 insn, operands, len, 1);
3485 return "";
3486 }
3487
3488
3489 /* 16bit arithmetic shift right ((signed short)x >> i) */
3490
3491 const char *
3492 ashrhi3_out (rtx insn, rtx operands[], int *len)
3493 {
3494 if (GET_CODE (operands[2]) == CONST_INT)
3495 {
3496 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3497 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3498 int k;
3499 int *t = len;
3500
3501 if (!len)
3502 len = &k;
3503
3504 switch (INTVAL (operands[2]))
3505 {
3506 case 4:
3507 case 5:
3508 /* XXX try to optimize this too? */
3509 break;
3510
3511 case 6:
3512 if (optimize_size)
3513 break; /* scratch ? 5 : 6 */
3514 *len = 8;
3515 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3516 AS2 (mov,%A0,%B0) CR_TAB
3517 AS1 (lsl,__tmp_reg__) CR_TAB
3518 AS1 (rol,%A0) CR_TAB
3519 AS2 (sbc,%B0,%B0) CR_TAB
3520 AS1 (lsl,__tmp_reg__) CR_TAB
3521 AS1 (rol,%A0) CR_TAB
3522 AS1 (rol,%B0));
3523
3524 case 7:
3525 *len = 4;
3526 return (AS1 (lsl,%A0) CR_TAB
3527 AS2 (mov,%A0,%B0) CR_TAB
3528 AS1 (rol,%A0) CR_TAB
3529 AS2 (sbc,%B0,%B0));
3530
3531 case 8:
3532 {
3533 int reg0 = true_regnum (operands[0]);
3534 int reg1 = true_regnum (operands[1]);
3535
3536 if (reg0 == reg1)
3537 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3538 AS1 (lsl,%B0) CR_TAB
3539 AS2 (sbc,%B0,%B0));
3540 else
3541 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3542 AS1 (clr,%B0) CR_TAB
3543 AS2 (sbrc,%A0,7) CR_TAB
3544 AS1 (dec,%B0));
3545 }
3546
3547 case 9:
3548 *len = 4;
3549 return (AS2 (mov,%A0,%B0) CR_TAB
3550 AS1 (lsl,%B0) CR_TAB
3551 AS2 (sbc,%B0,%B0) CR_TAB
3552 AS1 (asr,%A0));
3553
3554 case 10:
3555 *len = 5;
3556 return (AS2 (mov,%A0,%B0) CR_TAB
3557 AS1 (lsl,%B0) CR_TAB
3558 AS2 (sbc,%B0,%B0) CR_TAB
3559 AS1 (asr,%A0) CR_TAB
3560 AS1 (asr,%A0));
3561
3562 case 11:
3563 if (AVR_HAVE_MUL && ldi_ok)
3564 {
3565 *len = 5;
3566 return (AS2 (ldi,%A0,0x20) CR_TAB
3567 AS2 (muls,%B0,%A0) CR_TAB
3568 AS2 (mov,%A0,r1) CR_TAB
3569 AS2 (sbc,%B0,%B0) CR_TAB
3570 AS1 (clr,__zero_reg__));
3571 }
3572 if (optimize_size && scratch)
3573 break; /* 5 */
3574 *len = 6;
3575 return (AS2 (mov,%A0,%B0) CR_TAB
3576 AS1 (lsl,%B0) CR_TAB
3577 AS2 (sbc,%B0,%B0) CR_TAB
3578 AS1 (asr,%A0) CR_TAB
3579 AS1 (asr,%A0) CR_TAB
3580 AS1 (asr,%A0));
3581
3582 case 12:
3583 if (AVR_HAVE_MUL && ldi_ok)
3584 {
3585 *len = 5;
3586 return (AS2 (ldi,%A0,0x10) CR_TAB
3587 AS2 (muls,%B0,%A0) CR_TAB
3588 AS2 (mov,%A0,r1) CR_TAB
3589 AS2 (sbc,%B0,%B0) CR_TAB
3590 AS1 (clr,__zero_reg__));
3591 }
3592 if (optimize_size && scratch)
3593 break; /* 5 */
3594 *len = 7;
3595 return (AS2 (mov,%A0,%B0) CR_TAB
3596 AS1 (lsl,%B0) CR_TAB
3597 AS2 (sbc,%B0,%B0) CR_TAB
3598 AS1 (asr,%A0) CR_TAB
3599 AS1 (asr,%A0) CR_TAB
3600 AS1 (asr,%A0) CR_TAB
3601 AS1 (asr,%A0));
3602
3603 case 13:
3604 if (AVR_HAVE_MUL && ldi_ok)
3605 {
3606 *len = 5;
3607 return (AS2 (ldi,%A0,0x08) CR_TAB
3608 AS2 (muls,%B0,%A0) CR_TAB
3609 AS2 (mov,%A0,r1) CR_TAB
3610 AS2 (sbc,%B0,%B0) CR_TAB
3611 AS1 (clr,__zero_reg__));
3612 }
3613 if (optimize_size)
3614 break; /* scratch ? 5 : 7 */
3615 *len = 8;
3616 return (AS2 (mov,%A0,%B0) CR_TAB
3617 AS1 (lsl,%B0) CR_TAB
3618 AS2 (sbc,%B0,%B0) CR_TAB
3619 AS1 (asr,%A0) CR_TAB
3620 AS1 (asr,%A0) CR_TAB
3621 AS1 (asr,%A0) CR_TAB
3622 AS1 (asr,%A0) CR_TAB
3623 AS1 (asr,%A0));
3624
3625 case 14:
3626 *len = 5;
3627 return (AS1 (lsl,%B0) CR_TAB
3628 AS2 (sbc,%A0,%A0) CR_TAB
3629 AS1 (lsl,%B0) CR_TAB
3630 AS2 (mov,%B0,%A0) CR_TAB
3631 AS1 (rol,%A0));
3632
3633 default:
3634 if (INTVAL (operands[2]) < 16)
3635 break;
3636
3637 /* fall through */
3638
3639 case 15:
3640 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3641 AS2 (sbc,%A0,%A0) CR_TAB
3642 AS2 (mov,%B0,%A0));
3643 }
3644 len = t;
3645 }
3646 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3647 AS1 (ror,%A0)),
3648 insn, operands, len, 2);
3649 return "";
3650 }
3651
3652
3653 /* 32bit arithmetic shift right ((signed long)x >> i) */
3654
3655 const char *
3656 ashrsi3_out (rtx insn, rtx operands[], int *len)
3657 {
3658 if (GET_CODE (operands[2]) == CONST_INT)
3659 {
3660 int k;
3661 int *t = len;
3662
3663 if (!len)
3664 len = &k;
3665
3666 switch (INTVAL (operands[2]))
3667 {
3668 case 8:
3669 {
3670 int reg0 = true_regnum (operands[0]);
3671 int reg1 = true_regnum (operands[1]);
3672 *len=6;
3673 if (reg0 <= reg1)
3674 return (AS2 (mov,%A0,%B1) CR_TAB
3675 AS2 (mov,%B0,%C1) CR_TAB
3676 AS2 (mov,%C0,%D1) CR_TAB
3677 AS1 (clr,%D0) CR_TAB
3678 AS2 (sbrc,%C0,7) CR_TAB
3679 AS1 (dec,%D0));
3680 else
3681 return (AS1 (clr,%D0) CR_TAB
3682 AS2 (sbrc,%D1,7) CR_TAB
3683 AS1 (dec,%D0) CR_TAB
3684 AS2 (mov,%C0,%D1) CR_TAB
3685 AS2 (mov,%B0,%C1) CR_TAB
3686 AS2 (mov,%A0,%B1));
3687 }
3688
3689 case 16:
3690 {
3691 int reg0 = true_regnum (operands[0]);
3692 int reg1 = true_regnum (operands[1]);
3693
3694 if (reg0 == reg1 + 2)
3695 return *len = 4, (AS1 (clr,%D0) CR_TAB
3696 AS2 (sbrc,%B0,7) CR_TAB
3697 AS1 (com,%D0) CR_TAB
3698 AS2 (mov,%C0,%D0));
3699 if (AVR_HAVE_MOVW)
3700 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3701 AS1 (clr,%D0) CR_TAB
3702 AS2 (sbrc,%B0,7) CR_TAB
3703 AS1 (com,%D0) CR_TAB
3704 AS2 (mov,%C0,%D0));
3705 else
3706 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3707 AS2 (mov,%A0,%C1) CR_TAB
3708 AS1 (clr,%D0) CR_TAB
3709 AS2 (sbrc,%B0,7) CR_TAB
3710 AS1 (com,%D0) CR_TAB
3711 AS2 (mov,%C0,%D0));
3712 }
3713
3714 case 24:
3715 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3716 AS1 (clr,%D0) CR_TAB
3717 AS2 (sbrc,%A0,7) CR_TAB
3718 AS1 (com,%D0) CR_TAB
3719 AS2 (mov,%B0,%D0) CR_TAB
3720 AS2 (mov,%C0,%D0));
3721
3722 default:
3723 if (INTVAL (operands[2]) < 32)
3724 break;
3725
3726 /* fall through */
3727
3728 case 31:
3729 if (AVR_HAVE_MOVW)
3730 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3731 AS2 (sbc,%A0,%A0) CR_TAB
3732 AS2 (mov,%B0,%A0) CR_TAB
3733 AS2 (movw,%C0,%A0));
3734 else
3735 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3736 AS2 (sbc,%A0,%A0) CR_TAB
3737 AS2 (mov,%B0,%A0) CR_TAB
3738 AS2 (mov,%C0,%A0) CR_TAB
3739 AS2 (mov,%D0,%A0));
3740 }
3741 len = t;
3742 }
3743 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3744 AS1 (ror,%C0) CR_TAB
3745 AS1 (ror,%B0) CR_TAB
3746 AS1 (ror,%A0)),
3747 insn, operands, len, 4);
3748 return "";
3749 }
3750
3751 /* 8bit logic shift right ((unsigned char)x >> i) */
3752
3753 const char *
3754 lshrqi3_out (rtx insn, rtx operands[], int *len)
3755 {
3756 if (GET_CODE (operands[2]) == CONST_INT)
3757 {
3758 int k;
3759
3760 if (!len)
3761 len = &k;
3762
3763 switch (INTVAL (operands[2]))
3764 {
3765 default:
3766 if (INTVAL (operands[2]) < 8)
3767 break;
3768
3769 *len = 1;
3770 return AS1 (clr,%0);
3771
3772 case 1:
3773 *len = 1;
3774 return AS1 (lsr,%0);
3775
3776 case 2:
3777 *len = 2;
3778 return (AS1 (lsr,%0) CR_TAB
3779 AS1 (lsr,%0));
3780 case 3:
3781 *len = 3;
3782 return (AS1 (lsr,%0) CR_TAB
3783 AS1 (lsr,%0) CR_TAB
3784 AS1 (lsr,%0));
3785
3786 case 4:
3787 if (test_hard_reg_class (LD_REGS, operands[0]))
3788 {
3789 *len=2;
3790 return (AS1 (swap,%0) CR_TAB
3791 AS2 (andi,%0,0x0f));
3792 }
3793 *len = 4;
3794 return (AS1 (lsr,%0) CR_TAB
3795 AS1 (lsr,%0) CR_TAB
3796 AS1 (lsr,%0) CR_TAB
3797 AS1 (lsr,%0));
3798
3799 case 5:
3800 if (test_hard_reg_class (LD_REGS, operands[0]))
3801 {
3802 *len = 3;
3803 return (AS1 (swap,%0) CR_TAB
3804 AS1 (lsr,%0) CR_TAB
3805 AS2 (andi,%0,0x7));
3806 }
3807 *len = 5;
3808 return (AS1 (lsr,%0) CR_TAB
3809 AS1 (lsr,%0) CR_TAB
3810 AS1 (lsr,%0) CR_TAB
3811 AS1 (lsr,%0) CR_TAB
3812 AS1 (lsr,%0));
3813
3814 case 6:
3815 if (test_hard_reg_class (LD_REGS, operands[0]))
3816 {
3817 *len = 4;
3818 return (AS1 (swap,%0) CR_TAB
3819 AS1 (lsr,%0) CR_TAB
3820 AS1 (lsr,%0) CR_TAB
3821 AS2 (andi,%0,0x3));
3822 }
3823 *len = 6;
3824 return (AS1 (lsr,%0) CR_TAB
3825 AS1 (lsr,%0) CR_TAB
3826 AS1 (lsr,%0) CR_TAB
3827 AS1 (lsr,%0) CR_TAB
3828 AS1 (lsr,%0) CR_TAB
3829 AS1 (lsr,%0));
3830
3831 case 7:
3832 *len = 3;
3833 return (AS1 (rol,%0) CR_TAB
3834 AS1 (clr,%0) CR_TAB
3835 AS1 (rol,%0));
3836 }
3837 }
3838 else if (CONSTANT_P (operands[2]))
3839 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3840
3841 out_shift_with_cnt (AS1 (lsr,%0),
3842 insn, operands, len, 1);
3843 return "";
3844 }
3845
3846 /* 16bit logic shift right ((unsigned short)x >> i) */
3847
3848 const char *
3849 lshrhi3_out (rtx insn, rtx operands[], int *len)
3850 {
3851 if (GET_CODE (operands[2]) == CONST_INT)
3852 {
3853 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3854 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3855 int k;
3856 int *t = len;
3857
3858 if (!len)
3859 len = &k;
3860
3861 switch (INTVAL (operands[2]))
3862 {
3863 default:
3864 if (INTVAL (operands[2]) < 16)
3865 break;
3866
3867 *len = 2;
3868 return (AS1 (clr,%B0) CR_TAB
3869 AS1 (clr,%A0));
3870
3871 case 4:
3872 if (optimize_size && scratch)
3873 break; /* 5 */
3874 if (ldi_ok)
3875 {
3876 *len = 6;
3877 return (AS1 (swap,%B0) CR_TAB
3878 AS1 (swap,%A0) CR_TAB
3879 AS2 (andi,%A0,0x0f) CR_TAB
3880 AS2 (eor,%A0,%B0) CR_TAB
3881 AS2 (andi,%B0,0x0f) CR_TAB
3882 AS2 (eor,%A0,%B0));
3883 }
3884 if (scratch)
3885 {
3886 *len = 7;
3887 return (AS1 (swap,%B0) CR_TAB
3888 AS1 (swap,%A0) CR_TAB
3889 AS2 (ldi,%3,0x0f) CR_TAB
3890 AS2 (and,%A0,%3) CR_TAB
3891 AS2 (eor,%A0,%B0) CR_TAB
3892 AS2 (and,%B0,%3) CR_TAB
3893 AS2 (eor,%A0,%B0));
3894 }
3895 break; /* optimize_size ? 6 : 8 */
3896
3897 case 5:
3898 if (optimize_size)
3899 break; /* scratch ? 5 : 6 */
3900 if (ldi_ok)
3901 {
3902 *len = 8;
3903 return (AS1 (lsr,%B0) CR_TAB
3904 AS1 (ror,%A0) CR_TAB
3905 AS1 (swap,%B0) CR_TAB
3906 AS1 (swap,%A0) CR_TAB
3907 AS2 (andi,%A0,0x0f) CR_TAB
3908 AS2 (eor,%A0,%B0) CR_TAB
3909 AS2 (andi,%B0,0x0f) CR_TAB
3910 AS2 (eor,%A0,%B0));
3911 }
3912 if (scratch)
3913 {
3914 *len = 9;
3915 return (AS1 (lsr,%B0) CR_TAB
3916 AS1 (ror,%A0) CR_TAB
3917 AS1 (swap,%B0) CR_TAB
3918 AS1 (swap,%A0) CR_TAB
3919 AS2 (ldi,%3,0x0f) CR_TAB
3920 AS2 (and,%A0,%3) CR_TAB
3921 AS2 (eor,%A0,%B0) CR_TAB
3922 AS2 (and,%B0,%3) CR_TAB
3923 AS2 (eor,%A0,%B0));
3924 }
3925 break; /* 10 */
3926
3927 case 6:
3928 if (optimize_size)
3929 break; /* scratch ? 5 : 6 */
3930 *len = 9;
3931 return (AS1 (clr,__tmp_reg__) CR_TAB
3932 AS1 (lsl,%A0) CR_TAB
3933 AS1 (rol,%B0) CR_TAB
3934 AS1 (rol,__tmp_reg__) CR_TAB
3935 AS1 (lsl,%A0) CR_TAB
3936 AS1 (rol,%B0) CR_TAB
3937 AS1 (rol,__tmp_reg__) CR_TAB
3938 AS2 (mov,%A0,%B0) CR_TAB
3939 AS2 (mov,%B0,__tmp_reg__));
3940
3941 case 7:
3942 *len = 5;
3943 return (AS1 (lsl,%A0) CR_TAB
3944 AS2 (mov,%A0,%B0) CR_TAB
3945 AS1 (rol,%A0) CR_TAB
3946 AS2 (sbc,%B0,%B0) CR_TAB
3947 AS1 (neg,%B0));
3948
3949 case 8:
3950 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3951 AS1 (clr,%B0));
3952
3953 case 9:
3954 *len = 3;
3955 return (AS2 (mov,%A0,%B0) CR_TAB
3956 AS1 (clr,%B0) CR_TAB
3957 AS1 (lsr,%A0));
3958
3959 case 10:
3960 *len = 4;
3961 return (AS2 (mov,%A0,%B0) CR_TAB
3962 AS1 (clr,%B0) CR_TAB
3963 AS1 (lsr,%A0) CR_TAB
3964 AS1 (lsr,%A0));
3965
3966 case 11:
3967 *len = 5;
3968 return (AS2 (mov,%A0,%B0) CR_TAB
3969 AS1 (clr,%B0) CR_TAB
3970 AS1 (lsr,%A0) CR_TAB
3971 AS1 (lsr,%A0) CR_TAB
3972 AS1 (lsr,%A0));
3973
3974 case 12:
3975 if (ldi_ok)
3976 {
3977 *len = 4;
3978 return (AS2 (mov,%A0,%B0) CR_TAB
3979 AS1 (clr,%B0) CR_TAB
3980 AS1 (swap,%A0) CR_TAB
3981 AS2 (andi,%A0,0x0f));
3982 }
3983 if (scratch)
3984 {
3985 *len = 5;
3986 return (AS2 (mov,%A0,%B0) CR_TAB
3987 AS1 (clr,%B0) CR_TAB
3988 AS1 (swap,%A0) CR_TAB
3989 AS2 (ldi,%3,0x0f) CR_TAB
3990 AS2 (and,%A0,%3));
3991 }
3992 *len = 6;
3993 return (AS2 (mov,%A0,%B0) CR_TAB
3994 AS1 (clr,%B0) CR_TAB
3995 AS1 (lsr,%A0) CR_TAB
3996 AS1 (lsr,%A0) CR_TAB
3997 AS1 (lsr,%A0) CR_TAB
3998 AS1 (lsr,%A0));
3999
4000 case 13:
4001 if (ldi_ok)
4002 {
4003 *len = 5;
4004 return (AS2 (mov,%A0,%B0) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4006 AS1 (swap,%A0) CR_TAB
4007 AS1 (lsr,%A0) CR_TAB
4008 AS2 (andi,%A0,0x07));
4009 }
4010 if (AVR_HAVE_MUL && scratch)
4011 {
4012 *len = 5;
4013 return (AS2 (ldi,%3,0x08) CR_TAB
4014 AS2 (mul,%B0,%3) CR_TAB
4015 AS2 (mov,%A0,r1) CR_TAB
4016 AS1 (clr,%B0) CR_TAB
4017 AS1 (clr,__zero_reg__));
4018 }
4019 if (optimize_size && scratch)
4020 break; /* 5 */
4021 if (scratch)
4022 {
4023 *len = 6;
4024 return (AS2 (mov,%A0,%B0) CR_TAB
4025 AS1 (clr,%B0) CR_TAB
4026 AS1 (swap,%A0) CR_TAB
4027 AS1 (lsr,%A0) CR_TAB
4028 AS2 (ldi,%3,0x07) CR_TAB
4029 AS2 (and,%A0,%3));
4030 }
4031 if (AVR_HAVE_MUL)
4032 {
4033 *len = 6;
4034 return ("set" CR_TAB
4035 AS2 (bld,r1,3) CR_TAB
4036 AS2 (mul,%B0,r1) CR_TAB
4037 AS2 (mov,%A0,r1) CR_TAB
4038 AS1 (clr,%B0) CR_TAB
4039 AS1 (clr,__zero_reg__));
4040 }
4041 *len = 7;
4042 return (AS2 (mov,%A0,%B0) CR_TAB
4043 AS1 (clr,%B0) CR_TAB
4044 AS1 (lsr,%A0) CR_TAB
4045 AS1 (lsr,%A0) CR_TAB
4046 AS1 (lsr,%A0) CR_TAB
4047 AS1 (lsr,%A0) CR_TAB
4048 AS1 (lsr,%A0));
4049
4050 case 14:
4051 if (AVR_HAVE_MUL && ldi_ok)
4052 {
4053 *len = 5;
4054 return (AS2 (ldi,%A0,0x04) CR_TAB
4055 AS2 (mul,%B0,%A0) CR_TAB
4056 AS2 (mov,%A0,r1) CR_TAB
4057 AS1 (clr,%B0) CR_TAB
4058 AS1 (clr,__zero_reg__));
4059 }
4060 if (AVR_HAVE_MUL && scratch)
4061 {
4062 *len = 5;
4063 return (AS2 (ldi,%3,0x04) CR_TAB
4064 AS2 (mul,%B0,%3) CR_TAB
4065 AS2 (mov,%A0,r1) CR_TAB
4066 AS1 (clr,%B0) CR_TAB
4067 AS1 (clr,__zero_reg__));
4068 }
4069 if (optimize_size && ldi_ok)
4070 {
4071 *len = 5;
4072 return (AS2 (mov,%A0,%B0) CR_TAB
4073 AS2 (ldi,%B0,6) "\n1:\t"
4074 AS1 (lsr,%A0) CR_TAB
4075 AS1 (dec,%B0) CR_TAB
4076 AS1 (brne,1b));
4077 }
4078 if (optimize_size && scratch)
4079 break; /* 5 */
4080 *len = 6;
4081 return (AS1 (clr,%A0) CR_TAB
4082 AS1 (lsl,%B0) CR_TAB
4083 AS1 (rol,%A0) CR_TAB
4084 AS1 (lsl,%B0) CR_TAB
4085 AS1 (rol,%A0) CR_TAB
4086 AS1 (clr,%B0));
4087
4088 case 15:
4089 *len = 4;
4090 return (AS1 (clr,%A0) CR_TAB
4091 AS1 (lsl,%B0) CR_TAB
4092 AS1 (rol,%A0) CR_TAB
4093 AS1 (clr,%B0));
4094 }
4095 len = t;
4096 }
4097 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4098 AS1 (ror,%A0)),
4099 insn, operands, len, 2);
4100 return "";
4101 }
4102
4103 /* 32bit logic shift right ((unsigned int)x >> i) */
4104
4105 const char *
4106 lshrsi3_out (rtx insn, rtx operands[], int *len)
4107 {
4108 if (GET_CODE (operands[2]) == CONST_INT)
4109 {
4110 int k;
4111 int *t = len;
4112
4113 if (!len)
4114 len = &k;
4115
4116 switch (INTVAL (operands[2]))
4117 {
4118 default:
4119 if (INTVAL (operands[2]) < 32)
4120 break;
4121
4122 if (AVR_HAVE_MOVW)
4123 return *len = 3, (AS1 (clr,%D0) CR_TAB
4124 AS1 (clr,%C0) CR_TAB
4125 AS2 (movw,%A0,%C0));
4126 *len = 4;
4127 return (AS1 (clr,%D0) CR_TAB
4128 AS1 (clr,%C0) CR_TAB
4129 AS1 (clr,%B0) CR_TAB
4130 AS1 (clr,%A0));
4131
4132 case 8:
4133 {
4134 int reg0 = true_regnum (operands[0]);
4135 int reg1 = true_regnum (operands[1]);
4136 *len = 4;
4137 if (reg0 <= reg1)
4138 return (AS2 (mov,%A0,%B1) CR_TAB
4139 AS2 (mov,%B0,%C1) CR_TAB
4140 AS2 (mov,%C0,%D1) CR_TAB
4141 AS1 (clr,%D0));
4142 else
4143 return (AS1 (clr,%D0) CR_TAB
4144 AS2 (mov,%C0,%D1) CR_TAB
4145 AS2 (mov,%B0,%C1) CR_TAB
4146 AS2 (mov,%A0,%B1));
4147 }
4148
4149 case 16:
4150 {
4151 int reg0 = true_regnum (operands[0]);
4152 int reg1 = true_regnum (operands[1]);
4153
4154 if (reg0 == reg1 + 2)
4155 return *len = 2, (AS1 (clr,%C0) CR_TAB
4156 AS1 (clr,%D0));
4157 if (AVR_HAVE_MOVW)
4158 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4159 AS1 (clr,%C0) CR_TAB
4160 AS1 (clr,%D0));
4161 else
4162 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4163 AS2 (mov,%A0,%C1) CR_TAB
4164 AS1 (clr,%C0) CR_TAB
4165 AS1 (clr,%D0));
4166 }
4167
4168 case 24:
4169 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4170 AS1 (clr,%B0) CR_TAB
4171 AS1 (clr,%C0) CR_TAB
4172 AS1 (clr,%D0));
4173
4174 case 31:
4175 *len = 6;
4176 return (AS1 (clr,%A0) CR_TAB
4177 AS2 (sbrc,%D0,7) CR_TAB
4178 AS1 (inc,%A0) CR_TAB
4179 AS1 (clr,%B0) CR_TAB
4180 AS1 (clr,%C0) CR_TAB
4181 AS1 (clr,%D0));
4182 }
4183 len = t;
4184 }
4185 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4186 AS1 (ror,%C0) CR_TAB
4187 AS1 (ror,%B0) CR_TAB
4188 AS1 (ror,%A0)),
4189 insn, operands, len, 4);
4190 return "";
4191 }
4192
4193 /* Modifies the length assigned to instruction INSN
4194 LEN is the initially computed length of the insn. */
4195
4196 int
4197 adjust_insn_length (rtx insn, int len)
4198 {
4199 rtx patt = PATTERN (insn);
4200 rtx set;
4201
4202 if (GET_CODE (patt) == SET)
4203 {
4204 rtx op[10];
4205 op[1] = SET_SRC (patt);
4206 op[0] = SET_DEST (patt);
4207 if (general_operand (op[1], VOIDmode)
4208 && general_operand (op[0], VOIDmode))
4209 {
4210 switch (GET_MODE (op[0]))
4211 {
4212 case QImode:
4213 output_movqi (insn, op, &len);
4214 break;
4215 case HImode:
4216 output_movhi (insn, op, &len);
4217 break;
4218 case SImode:
4219 case SFmode:
4220 output_movsisf (insn, op, &len);
4221 break;
4222 default:
4223 break;
4224 }
4225 }
4226 else if (op[0] == cc0_rtx && REG_P (op[1]))
4227 {
4228 switch (GET_MODE (op[1]))
4229 {
4230 case HImode: out_tsthi (insn,&len); break;
4231 case SImode: out_tstsi (insn,&len); break;
4232 default: break;
4233 }
4234 }
4235 else if (GET_CODE (op[1]) == AND)
4236 {
4237 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4238 {
4239 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4240 if (GET_MODE (op[1]) == SImode)
4241 len = (((mask & 0xff) != 0xff)
4242 + ((mask & 0xff00) != 0xff00)
4243 + ((mask & 0xff0000L) != 0xff0000L)
4244 + ((mask & 0xff000000L) != 0xff000000L));
4245 else if (GET_MODE (op[1]) == HImode)
4246 len = (((mask & 0xff) != 0xff)
4247 + ((mask & 0xff00) != 0xff00));
4248 }
4249 }
4250 else if (GET_CODE (op[1]) == IOR)
4251 {
4252 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4253 {
4254 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4255 if (GET_MODE (op[1]) == SImode)
4256 len = (((mask & 0xff) != 0)
4257 + ((mask & 0xff00) != 0)
4258 + ((mask & 0xff0000L) != 0)
4259 + ((mask & 0xff000000L) != 0));
4260 else if (GET_MODE (op[1]) == HImode)
4261 len = (((mask & 0xff) != 0)
4262 + ((mask & 0xff00) != 0));
4263 }
4264 }
4265 }
4266 set = single_set (insn);
4267 if (set)
4268 {
4269 rtx op[10];
4270
4271 op[1] = SET_SRC (set);
4272 op[0] = SET_DEST (set);
4273
4274 if (GET_CODE (patt) == PARALLEL
4275 && general_operand (op[1], VOIDmode)
4276 && general_operand (op[0], VOIDmode))
4277 {
4278 if (XVECLEN (patt, 0) == 2)
4279 op[2] = XVECEXP (patt, 0, 1);
4280
4281 switch (GET_MODE (op[0]))
4282 {
4283 case QImode:
4284 len = 2;
4285 break;
4286 case HImode:
4287 output_reload_inhi (insn, op, &len);
4288 break;
4289 case SImode:
4290 case SFmode:
4291 output_reload_insisf (insn, op, &len);
4292 break;
4293 default:
4294 break;
4295 }
4296 }
4297 else if (GET_CODE (op[1]) == ASHIFT
4298 || GET_CODE (op[1]) == ASHIFTRT
4299 || GET_CODE (op[1]) == LSHIFTRT)
4300 {
4301 rtx ops[10];
4302 ops[0] = op[0];
4303 ops[1] = XEXP (op[1],0);
4304 ops[2] = XEXP (op[1],1);
4305 switch (GET_CODE (op[1]))
4306 {
4307 case ASHIFT:
4308 switch (GET_MODE (op[0]))
4309 {
4310 case QImode: ashlqi3_out (insn,ops,&len); break;
4311 case HImode: ashlhi3_out (insn,ops,&len); break;
4312 case SImode: ashlsi3_out (insn,ops,&len); break;
4313 default: break;
4314 }
4315 break;
4316 case ASHIFTRT:
4317 switch (GET_MODE (op[0]))
4318 {
4319 case QImode: ashrqi3_out (insn,ops,&len); break;
4320 case HImode: ashrhi3_out (insn,ops,&len); break;
4321 case SImode: ashrsi3_out (insn,ops,&len); break;
4322 default: break;
4323 }
4324 break;
4325 case LSHIFTRT:
4326 switch (GET_MODE (op[0]))
4327 {
4328 case QImode: lshrqi3_out (insn,ops,&len); break;
4329 case HImode: lshrhi3_out (insn,ops,&len); break;
4330 case SImode: lshrsi3_out (insn,ops,&len); break;
4331 default: break;
4332 }
4333 break;
4334 default:
4335 break;
4336 }
4337 }
4338 }
4339 return len;
4340 }
4341
4342 /* Return nonzero if register REG dead after INSN. */
4343
4344 int
4345 reg_unused_after (rtx insn, rtx reg)
4346 {
4347 return (dead_or_set_p (insn, reg)
4348 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4349 }
4350
4351 /* Return nonzero if REG is not used after INSN.
4352 We assume REG is a reload reg, and therefore does
4353 not live past labels. It may live past calls or jumps though. */
4354
4355 int
4356 _reg_unused_after (rtx insn, rtx reg)
4357 {
4358 enum rtx_code code;
4359 rtx set;
4360
4361 /* If the reg is set by this instruction, then it is safe for our
4362 case. Disregard the case where this is a store to memory, since
4363 we are checking a register used in the store address. */
4364 set = single_set (insn);
4365 if (set && GET_CODE (SET_DEST (set)) != MEM
4366 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4367 return 1;
4368
4369 while ((insn = NEXT_INSN (insn)))
4370 {
4371 rtx set;
4372 code = GET_CODE (insn);
4373
4374 #if 0
4375 /* If this is a label that existed before reload, then the register
4376 if dead here. However, if this is a label added by reorg, then
4377 the register may still be live here. We can't tell the difference,
4378 so we just ignore labels completely. */
4379 if (code == CODE_LABEL)
4380 return 1;
4381 /* else */
4382 #endif
4383
4384 if (!INSN_P (insn))
4385 continue;
4386
4387 if (code == JUMP_INSN)
4388 return 0;
4389
4390 /* If this is a sequence, we must handle them all at once.
4391 We could have for instance a call that sets the target register,
4392 and an insn in a delay slot that uses the register. In this case,
4393 we must return 0. */
4394 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4395 {
4396 int i;
4397 int retval = 0;
4398
4399 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4400 {
4401 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4402 rtx set = single_set (this_insn);
4403
4404 if (GET_CODE (this_insn) == CALL_INSN)
4405 code = CALL_INSN;
4406 else if (GET_CODE (this_insn) == JUMP_INSN)
4407 {
4408 if (INSN_ANNULLED_BRANCH_P (this_insn))
4409 return 0;
4410 code = JUMP_INSN;
4411 }
4412
4413 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4414 return 0;
4415 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4416 {
4417 if (GET_CODE (SET_DEST (set)) != MEM)
4418 retval = 1;
4419 else
4420 return 0;
4421 }
4422 if (set == 0
4423 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4424 return 0;
4425 }
4426 if (retval == 1)
4427 return 1;
4428 else if (code == JUMP_INSN)
4429 return 0;
4430 }
4431
4432 if (code == CALL_INSN)
4433 {
4434 rtx tem;
4435 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4436 if (GET_CODE (XEXP (tem, 0)) == USE
4437 && REG_P (XEXP (XEXP (tem, 0), 0))
4438 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4439 return 0;
4440 if (call_used_regs[REGNO (reg)])
4441 return 1;
4442 }
4443
4444 set = single_set (insn);
4445
4446 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4447 return 0;
4448 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4449 return GET_CODE (SET_DEST (set)) != MEM;
4450 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4451 return 0;
4452 }
4453 return 1;
4454 }
4455
4456 /* Target hook for assembling integer objects. The AVR version needs
4457 special handling for references to certain labels. */
4458
4459 static bool
4460 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4461 {
4462 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4463 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4464 || GET_CODE (x) == LABEL_REF))
4465 {
4466 fputs ("\t.word\tgs(", asm_out_file);
4467 output_addr_const (asm_out_file, x);
4468 fputs (")\n", asm_out_file);
4469 return true;
4470 }
4471 return default_assemble_integer (x, size, aligned_p);
4472 }
4473
4474 /* The routine used to output NUL terminated strings. We use a special
4475 version of this for most svr4 targets because doing so makes the
4476 generated assembly code more compact (and thus faster to assemble)
4477 as well as more readable, especially for targets like the i386
4478 (where the only alternative is to output character sequences as
4479 comma separated lists of numbers). */
4480
4481 void
4482 gas_output_limited_string(FILE *file, const char *str)
4483 {
4484 const unsigned char *_limited_str = (const unsigned char *) str;
4485 unsigned ch;
4486 fprintf (file, "%s\"", STRING_ASM_OP);
4487 for (; (ch = *_limited_str); _limited_str++)
4488 {
4489 int escape;
4490 switch (escape = ESCAPES[ch])
4491 {
4492 case 0:
4493 putc (ch, file);
4494 break;
4495 case 1:
4496 fprintf (file, "\\%03o", ch);
4497 break;
4498 default:
4499 putc ('\\', file);
4500 putc (escape, file);
4501 break;
4502 }
4503 }
4504 fprintf (file, "\"\n");
4505 }
4506
4507 /* The routine used to output sequences of byte values. We use a special
4508 version of this for most svr4 targets because doing so makes the
4509 generated assembly code more compact (and thus faster to assemble)
4510 as well as more readable. Note that if we find subparts of the
4511 character sequence which end with NUL (and which are shorter than
4512 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4513
4514 void
4515 gas_output_ascii(FILE *file, const char *str, size_t length)
4516 {
4517 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4518 const unsigned char *limit = _ascii_bytes + length;
4519 unsigned bytes_in_chunk = 0;
4520 for (; _ascii_bytes < limit; _ascii_bytes++)
4521 {
4522 const unsigned char *p;
4523 if (bytes_in_chunk >= 60)
4524 {
4525 fprintf (file, "\"\n");
4526 bytes_in_chunk = 0;
4527 }
4528 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4529 continue;
4530 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4531 {
4532 if (bytes_in_chunk > 0)
4533 {
4534 fprintf (file, "\"\n");
4535 bytes_in_chunk = 0;
4536 }
4537 gas_output_limited_string (file, (const char*)_ascii_bytes);
4538 _ascii_bytes = p;
4539 }
4540 else
4541 {
4542 int escape;
4543 unsigned ch;
4544 if (bytes_in_chunk == 0)
4545 fprintf (file, "\t.ascii\t\"");
4546 switch (escape = ESCAPES[ch = *_ascii_bytes])
4547 {
4548 case 0:
4549 putc (ch, file);
4550 bytes_in_chunk++;
4551 break;
4552 case 1:
4553 fprintf (file, "\\%03o", ch);
4554 bytes_in_chunk += 4;
4555 break;
4556 default:
4557 putc ('\\', file);
4558 putc (escape, file);
4559 bytes_in_chunk += 2;
4560 break;
4561 }
4562 }
4563 }
4564 if (bytes_in_chunk > 0)
4565 fprintf (file, "\"\n");
4566 }
4567
4568 /* Return value is nonzero if pseudos that have been
4569 assigned to registers of class CLASS would likely be spilled
4570 because registers of CLASS are needed for spill registers. */
4571
4572 enum reg_class
4573 class_likely_spilled_p (int c)
4574 {
4575 return (c != ALL_REGS && c != ADDW_REGS);
4576 }
4577
4578 /* Valid attributes:
4579 progmem - put data to program memory;
4580 signal - make a function to be hardware interrupt. After function
4581 prologue interrupts are disabled;
4582 interrupt - make a function to be hardware interrupt. After function
4583 prologue interrupts are enabled;
4584 naked - don't generate function prologue/epilogue and `ret' command.
4585
4586 Only `progmem' attribute valid for type. */
4587
4588 const struct attribute_spec avr_attribute_table[] =
4589 {
4590 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4591 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4592 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4593 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4594 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4595 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4596 { NULL, 0, 0, false, false, false, NULL }
4597 };
4598
4599 /* Handle a "progmem" attribute; arguments as in
4600 struct attribute_spec.handler. */
4601 static tree
4602 avr_handle_progmem_attribute (tree *node, tree name,
4603 tree args ATTRIBUTE_UNUSED,
4604 int flags ATTRIBUTE_UNUSED,
4605 bool *no_add_attrs)
4606 {
4607 if (DECL_P (*node))
4608 {
4609 if (TREE_CODE (*node) == TYPE_DECL)
4610 {
4611 /* This is really a decl attribute, not a type attribute,
4612 but try to handle it for GCC 3.0 backwards compatibility. */
4613
4614 tree type = TREE_TYPE (*node);
4615 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4616 tree newtype = build_type_attribute_variant (type, attr);
4617
4618 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4619 TREE_TYPE (*node) = newtype;
4620 *no_add_attrs = true;
4621 }
4622 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4623 {
4624 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4625 {
4626 warning (0, "only initialized variables can be placed into "
4627 "program memory area");
4628 *no_add_attrs = true;
4629 }
4630 }
4631 else
4632 {
4633 warning (OPT_Wattributes, "%qs attribute ignored",
4634 IDENTIFIER_POINTER (name));
4635 *no_add_attrs = true;
4636 }
4637 }
4638
4639 return NULL_TREE;
4640 }
4641
4642 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4643 struct attribute_spec.handler. */
4644
4645 static tree
4646 avr_handle_fndecl_attribute (tree *node, tree name,
4647 tree args ATTRIBUTE_UNUSED,
4648 int flags ATTRIBUTE_UNUSED,
4649 bool *no_add_attrs)
4650 {
4651 if (TREE_CODE (*node) != FUNCTION_DECL)
4652 {
4653 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4654 IDENTIFIER_POINTER (name));
4655 *no_add_attrs = true;
4656 }
4657 else
4658 {
4659 const char *func_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (*node));
4660 const char *attr = IDENTIFIER_POINTER (name);
4661
4662 /* If the function has the 'signal' or 'interrupt' attribute, test to
4663 make sure that the name of the function is "__vector_NN" so as to
4664 catch when the user misspells the interrupt vector name. */
4665
4666 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4667 {
4668 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4669 {
4670 warning (0, "%qs appears to be a misspelled interrupt handler",
4671 func_name);
4672 }
4673 }
4674 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4675 {
4676 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4677 {
4678 warning (0, "%qs appears to be a misspelled signal handler",
4679 func_name);
4680 }
4681 }
4682 }
4683
4684 return NULL_TREE;
4685 }
4686
4687 static tree
4688 avr_handle_fntype_attribute (tree *node, tree name,
4689 tree args ATTRIBUTE_UNUSED,
4690 int flags ATTRIBUTE_UNUSED,
4691 bool *no_add_attrs)
4692 {
4693 if (TREE_CODE (*node) != FUNCTION_TYPE)
4694 {
4695 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4696 IDENTIFIER_POINTER (name));
4697 *no_add_attrs = true;
4698 }
4699
4700 return NULL_TREE;
4701 }
4702
4703 /* Look for attribute `progmem' in DECL
4704 if found return 1, otherwise 0. */
4705
4706 int
4707 avr_progmem_p (tree decl, tree attributes)
4708 {
4709 tree a;
4710
4711 if (TREE_CODE (decl) != VAR_DECL)
4712 return 0;
4713
4714 if (NULL_TREE
4715 != lookup_attribute ("progmem", attributes))
4716 return 1;
4717
4718 a=decl;
4719 do
4720 a = TREE_TYPE(a);
4721 while (TREE_CODE (a) == ARRAY_TYPE);
4722
4723 if (a == error_mark_node)
4724 return 0;
4725
4726 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4727 return 1;
4728
4729 return 0;
4730 }
4731
4732 /* Add the section attribute if the variable is in progmem. */
4733
4734 static void
4735 avr_insert_attributes (tree node, tree *attributes)
4736 {
4737 if (TREE_CODE (node) == VAR_DECL
4738 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4739 && avr_progmem_p (node, *attributes))
4740 {
4741 static const char dsec[] = ".progmem.data";
4742 *attributes = tree_cons (get_identifier ("section"),
4743 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4744 *attributes);
4745
4746 /* ??? This seems sketchy. Why can't the user declare the
4747 thing const in the first place? */
4748 TREE_READONLY (node) = 1;
4749 }
4750 }
4751
4752 /* A get_unnamed_section callback for switching to progmem_section. */
4753
4754 static void
4755 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4756 {
4757 fprintf (asm_out_file,
4758 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4759 AVR_HAVE_JMP_CALL ? "a" : "ax");
4760 /* Should already be aligned, this is just to be safe if it isn't. */
4761 fprintf (asm_out_file, "\t.p2align 1\n");
4762 }
4763
4764 /* Implement TARGET_ASM_INIT_SECTIONS. */
4765
4766 static void
4767 avr_asm_init_sections (void)
4768 {
4769 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4770 avr_output_progmem_section_asm_op,
4771 NULL);
4772 readonly_data_section = data_section;
4773 }
4774
4775 static unsigned int
4776 avr_section_type_flags (tree decl, const char *name, int reloc)
4777 {
4778 unsigned int flags = default_section_type_flags (decl, name, reloc);
4779
4780 if (strncmp (name, ".noinit", 7) == 0)
4781 {
4782 if (decl && TREE_CODE (decl) == VAR_DECL
4783 && DECL_INITIAL (decl) == NULL_TREE)
4784 flags |= SECTION_BSS; /* @nobits */
4785 else
4786 warning (0, "only uninitialized variables can be placed in the "
4787 ".noinit section");
4788 }
4789
4790 return flags;
4791 }
4792
4793 /* Outputs some appropriate text to go at the start of an assembler
4794 file. */
4795
4796 static void
4797 avr_file_start (void)
4798 {
4799 if (avr_current_arch->asm_only)
4800 error ("MCU %qs supported for assembler only", avr_mcu_name);
4801
4802 default_file_start ();
4803
4804 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4805 fputs ("__SREG__ = 0x3f\n"
4806 "__SP_H__ = 0x3e\n"
4807 "__SP_L__ = 0x3d\n", asm_out_file);
4808
4809 fputs ("__tmp_reg__ = 0\n"
4810 "__zero_reg__ = 1\n", asm_out_file);
4811
4812 /* FIXME: output these only if there is anything in the .data / .bss
4813 sections - some code size could be saved by not linking in the
4814 initialization code from libgcc if one or both sections are empty. */
4815 fputs ("\t.global __do_copy_data\n", asm_out_file);
4816 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4817 }
4818
4819 /* Outputs to the stdio stream FILE some
4820 appropriate text to go at the end of an assembler file. */
4821
4822 static void
4823 avr_file_end (void)
4824 {
4825 }
4826
4827 /* Choose the order in which to allocate hard registers for
4828 pseudo-registers local to a basic block.
4829
4830 Store the desired register order in the array `reg_alloc_order'.
4831 Element 0 should be the register to allocate first; element 1, the
4832 next register; and so on. */
4833
4834 void
4835 order_regs_for_local_alloc (void)
4836 {
4837 unsigned int i;
4838 static const int order_0[] = {
4839 24,25,
4840 18,19,
4841 20,21,
4842 22,23,
4843 30,31,
4844 26,27,
4845 28,29,
4846 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4847 0,1,
4848 32,33,34,35
4849 };
4850 static const int order_1[] = {
4851 18,19,
4852 20,21,
4853 22,23,
4854 24,25,
4855 30,31,
4856 26,27,
4857 28,29,
4858 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4859 0,1,
4860 32,33,34,35
4861 };
4862 static const int order_2[] = {
4863 25,24,
4864 23,22,
4865 21,20,
4866 19,18,
4867 30,31,
4868 26,27,
4869 28,29,
4870 17,16,
4871 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4872 1,0,
4873 32,33,34,35
4874 };
4875
4876 const int *order = (TARGET_ORDER_1 ? order_1 :
4877 TARGET_ORDER_2 ? order_2 :
4878 order_0);
4879 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4880 reg_alloc_order[i] = order[i];
4881 }
4882
4883
4884 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4885 cost of an RTX operand given its context. X is the rtx of the
4886 operand, MODE is its mode, and OUTER is the rtx_code of this
4887 operand's parent operator. */
4888
4889 static int
4890 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4891 {
4892 enum rtx_code code = GET_CODE (x);
4893 int total;
4894
4895 switch (code)
4896 {
4897 case REG:
4898 case SUBREG:
4899 return 0;
4900
4901 case CONST_INT:
4902 case CONST_DOUBLE:
4903 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4904
4905 default:
4906 break;
4907 }
4908
4909 total = 0;
4910 avr_rtx_costs (x, code, outer, &total);
4911 return total;
4912 }
4913
4914 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4915 is to be calculated. Return true if the complete cost has been
4916 computed, and false if subexpressions should be scanned. In either
4917 case, *TOTAL contains the cost result. */
4918
4919 static bool
4920 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4921 {
4922 enum machine_mode mode = GET_MODE (x);
4923 HOST_WIDE_INT val;
4924
4925 switch (code)
4926 {
4927 case CONST_INT:
4928 case CONST_DOUBLE:
4929 /* Immediate constants are as cheap as registers. */
4930 *total = 0;
4931 return true;
4932
4933 case MEM:
4934 case CONST:
4935 case LABEL_REF:
4936 case SYMBOL_REF:
4937 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4938 return true;
4939
4940 case NEG:
4941 switch (mode)
4942 {
4943 case QImode:
4944 case SFmode:
4945 *total = COSTS_N_INSNS (1);
4946 break;
4947
4948 case HImode:
4949 *total = COSTS_N_INSNS (3);
4950 break;
4951
4952 case SImode:
4953 *total = COSTS_N_INSNS (7);
4954 break;
4955
4956 default:
4957 return false;
4958 }
4959 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4960 return true;
4961
4962 case ABS:
4963 switch (mode)
4964 {
4965 case QImode:
4966 case SFmode:
4967 *total = COSTS_N_INSNS (1);
4968 break;
4969
4970 default:
4971 return false;
4972 }
4973 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4974 return true;
4975
4976 case NOT:
4977 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4978 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4979 return true;
4980
4981 case ZERO_EXTEND:
4982 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4983 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4984 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4985 return true;
4986
4987 case SIGN_EXTEND:
4988 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4989 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4991 return true;
4992
4993 case PLUS:
4994 switch (mode)
4995 {
4996 case QImode:
4997 *total = COSTS_N_INSNS (1);
4998 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4999 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5000 break;
5001
5002 case HImode:
5003 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5004 {
5005 *total = COSTS_N_INSNS (2);
5006 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5007 }
5008 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5009 *total = COSTS_N_INSNS (1);
5010 else
5011 *total = COSTS_N_INSNS (2);
5012 break;
5013
5014 case SImode:
5015 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5016 {
5017 *total = COSTS_N_INSNS (4);
5018 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5019 }
5020 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5021 *total = COSTS_N_INSNS (1);
5022 else
5023 *total = COSTS_N_INSNS (4);
5024 break;
5025
5026 default:
5027 return false;
5028 }
5029 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5030 return true;
5031
5032 case MINUS:
5033 case AND:
5034 case IOR:
5035 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5036 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5037 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5038 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5039 return true;
5040
5041 case XOR:
5042 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5043 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5044 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5045 return true;
5046
5047 case MULT:
5048 switch (mode)
5049 {
5050 case QImode:
5051 if (AVR_HAVE_MUL)
5052 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
5053 else if (optimize_size)
5054 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5055 else
5056 return false;
5057 break;
5058
5059 case HImode:
5060 if (AVR_HAVE_MUL)
5061 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
5062 else if (optimize_size)
5063 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5064 else
5065 return false;
5066 break;
5067
5068 default:
5069 return false;
5070 }
5071 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5072 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5073 return true;
5074
5075 case DIV:
5076 case MOD:
5077 case UDIV:
5078 case UMOD:
5079 if (optimize_size)
5080 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5081 else
5082 return false;
5083 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5084 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5085 return true;
5086
5087 case ASHIFT:
5088 switch (mode)
5089 {
5090 case QImode:
5091 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5092 {
5093 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5094 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5095 }
5096 else
5097 {
5098 val = INTVAL (XEXP (x, 1));
5099 if (val == 7)
5100 *total = COSTS_N_INSNS (3);
5101 else if (val >= 0 && val <= 7)
5102 *total = COSTS_N_INSNS (val);
5103 else
5104 *total = COSTS_N_INSNS (1);
5105 }
5106 break;
5107
5108 case HImode:
5109 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5110 {
5111 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5112 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5113 }
5114 else
5115 switch (INTVAL (XEXP (x, 1)))
5116 {
5117 case 0:
5118 *total = 0;
5119 break;
5120 case 1:
5121 case 8:
5122 *total = COSTS_N_INSNS (2);
5123 break;
5124 case 9:
5125 *total = COSTS_N_INSNS (3);
5126 break;
5127 case 2:
5128 case 3:
5129 case 10:
5130 case 15:
5131 *total = COSTS_N_INSNS (4);
5132 break;
5133 case 7:
5134 case 11:
5135 case 12:
5136 *total = COSTS_N_INSNS (5);
5137 break;
5138 case 4:
5139 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5140 break;
5141 case 6:
5142 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5143 break;
5144 case 5:
5145 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5146 break;
5147 default:
5148 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5149 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5150 }
5151 break;
5152
5153 case SImode:
5154 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5155 {
5156 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5157 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5158 }
5159 else
5160 switch (INTVAL (XEXP (x, 1)))
5161 {
5162 case 0:
5163 *total = 0;
5164 break;
5165 case 24:
5166 *total = COSTS_N_INSNS (3);
5167 break;
5168 case 1:
5169 case 8:
5170 case 16:
5171 *total = COSTS_N_INSNS (4);
5172 break;
5173 case 31:
5174 *total = COSTS_N_INSNS (6);
5175 break;
5176 case 2:
5177 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5178 break;
5179 default:
5180 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5181 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5182 }
5183 break;
5184
5185 default:
5186 return false;
5187 }
5188 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5189 return true;
5190
5191 case ASHIFTRT:
5192 switch (mode)
5193 {
5194 case QImode:
5195 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5196 {
5197 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5198 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5199 }
5200 else
5201 {
5202 val = INTVAL (XEXP (x, 1));
5203 if (val == 6)
5204 *total = COSTS_N_INSNS (4);
5205 else if (val == 7)
5206 *total = COSTS_N_INSNS (2);
5207 else if (val >= 0 && val <= 7)
5208 *total = COSTS_N_INSNS (val);
5209 else
5210 *total = COSTS_N_INSNS (1);
5211 }
5212 break;
5213
5214 case HImode:
5215 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5216 {
5217 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5218 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5219 }
5220 else
5221 switch (INTVAL (XEXP (x, 1)))
5222 {
5223 case 0:
5224 *total = 0;
5225 break;
5226 case 1:
5227 *total = COSTS_N_INSNS (2);
5228 break;
5229 case 15:
5230 *total = COSTS_N_INSNS (3);
5231 break;
5232 case 2:
5233 case 7:
5234 case 8:
5235 case 9:
5236 *total = COSTS_N_INSNS (4);
5237 break;
5238 case 10:
5239 case 14:
5240 *total = COSTS_N_INSNS (5);
5241 break;
5242 case 11:
5243 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5244 break;
5245 case 12:
5246 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5247 break;
5248 case 6:
5249 case 13:
5250 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5251 break;
5252 default:
5253 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5255 }
5256 break;
5257
5258 case SImode:
5259 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5260 {
5261 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5262 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5263 }
5264 else
5265 switch (INTVAL (XEXP (x, 1)))
5266 {
5267 case 0:
5268 *total = 0;
5269 break;
5270 case 1:
5271 *total = COSTS_N_INSNS (4);
5272 break;
5273 case 8:
5274 case 16:
5275 case 24:
5276 *total = COSTS_N_INSNS (6);
5277 break;
5278 case 2:
5279 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5280 break;
5281 case 31:
5282 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5283 break;
5284 default:
5285 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5286 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5287 }
5288 break;
5289
5290 default:
5291 return false;
5292 }
5293 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5294 return true;
5295
5296 case LSHIFTRT:
5297 switch (mode)
5298 {
5299 case QImode:
5300 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5301 {
5302 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5303 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5304 }
5305 else
5306 {
5307 val = INTVAL (XEXP (x, 1));
5308 if (val == 7)
5309 *total = COSTS_N_INSNS (3);
5310 else if (val >= 0 && val <= 7)
5311 *total = COSTS_N_INSNS (val);
5312 else
5313 *total = COSTS_N_INSNS (1);
5314 }
5315 break;
5316
5317 case HImode:
5318 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5319 {
5320 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5321 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5322 }
5323 else
5324 switch (INTVAL (XEXP (x, 1)))
5325 {
5326 case 0:
5327 *total = 0;
5328 break;
5329 case 1:
5330 case 8:
5331 *total = COSTS_N_INSNS (2);
5332 break;
5333 case 9:
5334 *total = COSTS_N_INSNS (3);
5335 break;
5336 case 2:
5337 case 10:
5338 case 15:
5339 *total = COSTS_N_INSNS (4);
5340 break;
5341 case 7:
5342 case 11:
5343 *total = COSTS_N_INSNS (5);
5344 break;
5345 case 3:
5346 case 12:
5347 case 13:
5348 case 14:
5349 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5350 break;
5351 case 4:
5352 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5353 break;
5354 case 5:
5355 case 6:
5356 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5357 break;
5358 default:
5359 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5361 }
5362 break;
5363
5364 case SImode:
5365 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5366 {
5367 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5368 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5369 }
5370 else
5371 switch (INTVAL (XEXP (x, 1)))
5372 {
5373 case 0:
5374 *total = 0;
5375 break;
5376 case 1:
5377 *total = COSTS_N_INSNS (4);
5378 break;
5379 case 2:
5380 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5381 break;
5382 case 8:
5383 case 16:
5384 case 24:
5385 *total = COSTS_N_INSNS (4);
5386 break;
5387 case 31:
5388 *total = COSTS_N_INSNS (6);
5389 break;
5390 default:
5391 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5392 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5393 }
5394 break;
5395
5396 default:
5397 return false;
5398 }
5399 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5400 return true;
5401
5402 case COMPARE:
5403 switch (GET_MODE (XEXP (x, 0)))
5404 {
5405 case QImode:
5406 *total = COSTS_N_INSNS (1);
5407 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5408 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5409 break;
5410
5411 case HImode:
5412 *total = COSTS_N_INSNS (2);
5413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5414 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5415 else if (INTVAL (XEXP (x, 1)) != 0)
5416 *total += COSTS_N_INSNS (1);
5417 break;
5418
5419 case SImode:
5420 *total = COSTS_N_INSNS (4);
5421 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5422 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5423 else if (INTVAL (XEXP (x, 1)) != 0)
5424 *total += COSTS_N_INSNS (3);
5425 break;
5426
5427 default:
5428 return false;
5429 }
5430 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5431 return true;
5432
5433 default:
5434 break;
5435 }
5436 return false;
5437 }
5438
5439 /* Calculate the cost of a memory address. */
5440
5441 static int
5442 avr_address_cost (rtx x)
5443 {
5444 if (GET_CODE (x) == PLUS
5445 && GET_CODE (XEXP (x,1)) == CONST_INT
5446 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5447 && INTVAL (XEXP (x,1)) >= 61)
5448 return 18;
5449 if (CONSTANT_ADDRESS_P (x))
5450 {
5451 if (optimize > 0 && io_address_operand (x, QImode))
5452 return 2;
5453 return 4;
5454 }
5455 return 4;
5456 }
5457
5458 /* Test for extra memory constraint 'Q'.
5459 It's a memory address based on Y or Z pointer with valid displacement. */
5460
5461 int
5462 extra_constraint_Q (rtx x)
5463 {
5464 if (GET_CODE (XEXP (x,0)) == PLUS
5465 && REG_P (XEXP (XEXP (x,0), 0))
5466 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5467 && (INTVAL (XEXP (XEXP (x,0), 1))
5468 <= MAX_LD_OFFSET (GET_MODE (x))))
5469 {
5470 rtx xx = XEXP (XEXP (x,0), 0);
5471 int regno = REGNO (xx);
5472 if (TARGET_ALL_DEBUG)
5473 {
5474 fprintf (stderr, ("extra_constraint:\n"
5475 "reload_completed: %d\n"
5476 "reload_in_progress: %d\n"),
5477 reload_completed, reload_in_progress);
5478 debug_rtx (x);
5479 }
5480 if (regno >= FIRST_PSEUDO_REGISTER)
5481 return 1; /* allocate pseudos */
5482 else if (regno == REG_Z || regno == REG_Y)
5483 return 1; /* strictly check */
5484 else if (xx == frame_pointer_rtx
5485 || xx == arg_pointer_rtx)
5486 return 1; /* XXX frame & arg pointer checks */
5487 }
5488 return 0;
5489 }
5490
5491 /* Convert condition code CONDITION to the valid AVR condition code. */
5492
5493 RTX_CODE
5494 avr_normalize_condition (RTX_CODE condition)
5495 {
5496 switch (condition)
5497 {
5498 case GT:
5499 return GE;
5500 case GTU:
5501 return GEU;
5502 case LE:
5503 return LT;
5504 case LEU:
5505 return LTU;
5506 default:
5507 gcc_unreachable ();
5508 }
5509 }
5510
5511 /* This function optimizes conditional jumps. */
5512
5513 static void
5514 avr_reorg (void)
5515 {
5516 rtx insn, pattern;
5517
5518 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5519 {
5520 if (! (GET_CODE (insn) == INSN
5521 || GET_CODE (insn) == CALL_INSN
5522 || GET_CODE (insn) == JUMP_INSN)
5523 || !single_set (insn))
5524 continue;
5525
5526 pattern = PATTERN (insn);
5527
5528 if (GET_CODE (pattern) == PARALLEL)
5529 pattern = XVECEXP (pattern, 0, 0);
5530 if (GET_CODE (pattern) == SET
5531 && SET_DEST (pattern) == cc0_rtx
5532 && compare_diff_p (insn))
5533 {
5534 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5535 {
5536 /* Now we work under compare insn. */
5537
5538 pattern = SET_SRC (pattern);
5539 if (true_regnum (XEXP (pattern,0)) >= 0
5540 && true_regnum (XEXP (pattern,1)) >= 0 )
5541 {
5542 rtx x = XEXP (pattern,0);
5543 rtx next = next_real_insn (insn);
5544 rtx pat = PATTERN (next);
5545 rtx src = SET_SRC (pat);
5546 rtx t = XEXP (src,0);
5547 PUT_CODE (t, swap_condition (GET_CODE (t)));
5548 XEXP (pattern,0) = XEXP (pattern,1);
5549 XEXP (pattern,1) = x;
5550 INSN_CODE (next) = -1;
5551 }
5552 else if (true_regnum (XEXP (pattern,0)) >= 0
5553 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5554 {
5555 rtx x = XEXP (pattern,1);
5556 rtx next = next_real_insn (insn);
5557 rtx pat = PATTERN (next);
5558 rtx src = SET_SRC (pat);
5559 rtx t = XEXP (src,0);
5560 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5561
5562 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5563 {
5564 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5565 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5566 INSN_CODE (next) = -1;
5567 INSN_CODE (insn) = -1;
5568 }
5569 }
5570 }
5571 else if (true_regnum (SET_SRC (pattern)) >= 0)
5572 {
5573 /* This is a tst insn */
5574 rtx next = next_real_insn (insn);
5575 rtx pat = PATTERN (next);
5576 rtx src = SET_SRC (pat);
5577 rtx t = XEXP (src,0);
5578
5579 PUT_CODE (t, swap_condition (GET_CODE (t)));
5580 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5581 SET_SRC (pattern));
5582 INSN_CODE (next) = -1;
5583 INSN_CODE (insn) = -1;
5584 }
5585 }
5586 }
5587 }
5588
5589 /* Returns register number for function return value.*/
5590
5591 int
5592 avr_ret_register (void)
5593 {
5594 return 24;
5595 }
5596
5597 /* Create an RTX representing the place where a
5598 library function returns a value of mode MODE. */
5599
5600 rtx
5601 avr_libcall_value (enum machine_mode mode)
5602 {
5603 int offs = GET_MODE_SIZE (mode);
5604 if (offs < 2)
5605 offs = 2;
5606 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5607 }
5608
5609 /* Create an RTX representing the place where a
5610 function returns a value of data type VALTYPE. */
5611
5612 rtx
5613 avr_function_value (const_tree type, const_tree func ATTRIBUTE_UNUSED)
5614 {
5615 unsigned int offs;
5616
5617 if (TYPE_MODE (type) != BLKmode)
5618 return avr_libcall_value (TYPE_MODE (type));
5619
5620 offs = int_size_in_bytes (type);
5621 if (offs < 2)
5622 offs = 2;
5623 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5624 offs = GET_MODE_SIZE (SImode);
5625 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5626 offs = GET_MODE_SIZE (DImode);
5627
5628 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5629 }
5630
5631 /* Places additional restrictions on the register class to
5632 use when it is necessary to copy value X into a register
5633 in class CLASS. */
5634
5635 enum reg_class
5636 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5637 {
5638 return class;
5639 }
5640
5641 int
5642 test_hard_reg_class (enum reg_class class, rtx x)
5643 {
5644 int regno = true_regnum (x);
5645 if (regno < 0)
5646 return 0;
5647
5648 if (TEST_HARD_REG_CLASS (class, regno))
5649 return 1;
5650
5651 return 0;
5652 }
5653
5654
5655 int
5656 jump_over_one_insn_p (rtx insn, rtx dest)
5657 {
5658 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5659 ? XEXP (dest, 0)
5660 : dest);
5661 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5662 int dest_addr = INSN_ADDRESSES (uid);
5663 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5664 }
5665
5666 /* Returns 1 if a value of mode MODE can be stored starting with hard
5667 register number REGNO. On the enhanced core, anything larger than
5668 1 byte must start in even numbered register for "movw" to work
5669 (this way we don't have to check for odd registers everywhere). */
5670
5671 int
5672 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5673 {
5674 /* Disallow QImode in stack pointer regs. */
5675 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5676 return 0;
5677
5678 /* The only thing that can go into registers r28:r29 is a Pmode. */
5679 if (regno == REG_Y && mode == Pmode)
5680 return 1;
5681
5682 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5683 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5684 return 0;
5685
5686 if (mode == QImode)
5687 return 1;
5688
5689 /* Modes larger than QImode occupy consecutive registers. */
5690 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5691 return 0;
5692
5693 /* All modes larger than QImode should start in an even register. */
5694 return !(regno & 1);
5695 }
5696
5697 const char *
5698 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5699 {
5700 int tmp;
5701 if (!len)
5702 len = &tmp;
5703
5704 if (GET_CODE (operands[1]) == CONST_INT)
5705 {
5706 int val = INTVAL (operands[1]);
5707 if ((val & 0xff) == 0)
5708 {
5709 *len = 3;
5710 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5711 AS2 (ldi,%2,hi8(%1)) CR_TAB
5712 AS2 (mov,%B0,%2));
5713 }
5714 else if ((val & 0xff00) == 0)
5715 {
5716 *len = 3;
5717 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5718 AS2 (mov,%A0,%2) CR_TAB
5719 AS2 (mov,%B0,__zero_reg__));
5720 }
5721 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5722 {
5723 *len = 3;
5724 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5725 AS2 (mov,%A0,%2) CR_TAB
5726 AS2 (mov,%B0,%2));
5727 }
5728 }
5729 *len = 4;
5730 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5731 AS2 (mov,%A0,%2) CR_TAB
5732 AS2 (ldi,%2,hi8(%1)) CR_TAB
5733 AS2 (mov,%B0,%2));
5734 }
5735
5736
5737 const char *
5738 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5739 {
5740 rtx src = operands[1];
5741 int cnst = (GET_CODE (src) == CONST_INT);
5742
5743 if (len)
5744 {
5745 if (cnst)
5746 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5747 + ((INTVAL (src) & 0xff00) != 0)
5748 + ((INTVAL (src) & 0xff0000) != 0)
5749 + ((INTVAL (src) & 0xff000000) != 0);
5750 else
5751 *len = 8;
5752
5753 return "";
5754 }
5755
5756 if (cnst && ((INTVAL (src) & 0xff) == 0))
5757 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5758 else
5759 {
5760 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5761 output_asm_insn (AS2 (mov, %A0, %2), operands);
5762 }
5763 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5764 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5765 else
5766 {
5767 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5768 output_asm_insn (AS2 (mov, %B0, %2), operands);
5769 }
5770 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5771 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5772 else
5773 {
5774 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5775 output_asm_insn (AS2 (mov, %C0, %2), operands);
5776 }
5777 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5778 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5779 else
5780 {
5781 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5782 output_asm_insn (AS2 (mov, %D0, %2), operands);
5783 }
5784 return "";
5785 }
5786
5787 void
5788 avr_output_bld (rtx operands[], int bit_nr)
5789 {
5790 static char s[] = "bld %A0,0";
5791
5792 s[5] = 'A' + (bit_nr >> 3);
5793 s[8] = '0' + (bit_nr & 7);
5794 output_asm_insn (s, operands);
5795 }
5796
5797 void
5798 avr_output_addr_vec_elt (FILE *stream, int value)
5799 {
5800 switch_to_section (progmem_section);
5801 if (AVR_HAVE_JMP_CALL)
5802 fprintf (stream, "\t.word gs(.L%d)\n", value);
5803 else
5804 fprintf (stream, "\trjmp .L%d\n", value);
5805 }
5806
5807 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5808 registers (for a define_peephole2) in the current function. */
5809
5810 int
5811 avr_peep2_scratch_safe (rtx scratch)
5812 {
5813 if ((interrupt_function_p (current_function_decl)
5814 || signal_function_p (current_function_decl))
5815 && leaf_function_p ())
5816 {
5817 int first_reg = true_regnum (scratch);
5818 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5819 int reg;
5820
5821 for (reg = first_reg; reg <= last_reg; reg++)
5822 {
5823 if (!df_regs_ever_live_p (reg))
5824 return 0;
5825 }
5826 }
5827 return 1;
5828 }
5829
5830 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5831 or memory location in the I/O space (QImode only).
5832
5833 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5834 Operand 1: register operand to test, or CONST_INT memory address.
5835 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5836 Operand 3: label to jump to if the test is true. */
5837
5838 const char *
5839 avr_out_sbxx_branch (rtx insn, rtx operands[])
5840 {
5841 enum rtx_code comp = GET_CODE (operands[0]);
5842 int long_jump = (get_attr_length (insn) >= 4);
5843 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5844
5845 if (comp == GE)
5846 comp = EQ;
5847 else if (comp == LT)
5848 comp = NE;
5849
5850 if (reverse)
5851 comp = reverse_condition (comp);
5852
5853 if (GET_CODE (operands[1]) == CONST_INT)
5854 {
5855 if (INTVAL (operands[1]) < 0x40)
5856 {
5857 if (comp == EQ)
5858 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5859 else
5860 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5861 }
5862 else
5863 {
5864 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5865 if (comp == EQ)
5866 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5867 else
5868 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5869 }
5870 }
5871 else /* GET_CODE (operands[1]) == REG */
5872 {
5873 if (GET_MODE (operands[1]) == QImode)
5874 {
5875 if (comp == EQ)
5876 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5877 else
5878 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5879 }
5880 else /* HImode or SImode */
5881 {
5882 static char buf[] = "sbrc %A1,0";
5883 int bit_nr = exact_log2 (INTVAL (operands[2])
5884 & GET_MODE_MASK (GET_MODE (operands[1])));
5885
5886 buf[3] = (comp == EQ) ? 's' : 'c';
5887 buf[6] = 'A' + (bit_nr >> 3);
5888 buf[9] = '0' + (bit_nr & 7);
5889 output_asm_insn (buf, operands);
5890 }
5891 }
5892
5893 if (long_jump)
5894 return (AS1 (rjmp,.+4) CR_TAB
5895 AS1 (jmp,%3));
5896 if (!reverse)
5897 return AS1 (rjmp,%3);
5898 return "";
5899 }
5900
5901 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5902
5903 static void
5904 avr_asm_out_ctor (rtx symbol, int priority)
5905 {
5906 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5907 default_ctor_section_asm_out_constructor (symbol, priority);
5908 }
5909
5910 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5911
5912 static void
5913 avr_asm_out_dtor (rtx symbol, int priority)
5914 {
5915 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5916 default_dtor_section_asm_out_destructor (symbol, priority);
5917 }
5918
5919 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5920
5921 static bool
5922 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5923 {
5924 if (TYPE_MODE (type) == BLKmode)
5925 {
5926 HOST_WIDE_INT size = int_size_in_bytes (type);
5927 return (size == -1 || size > 8);
5928 }
5929 else
5930 return false;
5931 }
5932
5933 #include "gt-avr.h"