avr.h (AVR_HAVE_LPMX): New macro.
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007
3 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to
20 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
21 Boston, MA 02110-1301, USA. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "reload.h"
36 #include "tree.h"
37 #include "output.h"
38 #include "expr.h"
39 #include "toplev.h"
40 #include "obstack.h"
41 #include "function.h"
42 #include "recog.h"
43 #include "ggc.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "target-def.h"
47
48 /* Maximal allowed offset for an address in the LD command */
49 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
50
51 static int avr_naked_function_p (tree);
52 static int interrupt_function_p (tree);
53 static int signal_function_p (tree);
54 static int avr_regs_to_save (HARD_REG_SET *);
55 static int sequent_regs_live (void);
56 static const char *ptrreg_to_str (int);
57 static const char *cond_string (enum rtx_code);
58 static int avr_num_arg_regs (enum machine_mode, tree);
59 static int out_adj_frame_ptr (FILE *, int);
60 static int out_set_stack_ptr (FILE *, int, int);
61 static RTX_CODE compare_condition (rtx insn);
62 static int compare_sign_p (rtx insn);
63 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
64 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
65 const struct attribute_spec avr_attribute_table[];
66 static bool avr_assemble_integer (rtx, unsigned int, int);
67 static void avr_file_start (void);
68 static void avr_file_end (void);
69 static void avr_output_function_prologue (FILE *, HOST_WIDE_INT);
70 static void avr_output_function_epilogue (FILE *, HOST_WIDE_INT);
71 static void avr_insert_attributes (tree, tree *);
72 static void avr_asm_init_sections (void);
73 static unsigned int avr_section_type_flags (tree, const char *, int);
74
75 static void avr_reorg (void);
76 static void avr_asm_out_ctor (rtx, int);
77 static void avr_asm_out_dtor (rtx, int);
78 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code);
79 static bool avr_rtx_costs (rtx, int, int, int *);
80 static int avr_address_cost (rtx);
81 static bool avr_return_in_memory (tree, tree);
82
83 /* Allocate registers from r25 to r8 for parameters for function calls. */
84 #define FIRST_CUM_REG 26
85
86 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
87 static GTY(()) rtx tmp_reg_rtx;
88
89 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
90 static GTY(()) rtx zero_reg_rtx;
91
92 /* AVR register names {"r0", "r1", ..., "r31"} */
93 static const char *const avr_regnames[] = REGISTER_NAMES;
94
95 /* This holds the last insn address. */
96 static int last_insn_address = 0;
97
98 /* Commands count in the compiled file */
99 static int commands_in_file;
100
101 /* Commands in the functions prologues in the compiled file */
102 static int commands_in_prologues;
103
104 /* Commands in the functions epilogues in the compiled file */
105 static int commands_in_epilogues;
106
107 /* Prologue/Epilogue size in words */
108 static int prologue_size;
109 static int epilogue_size;
110
111 /* Size of all jump tables in the current function, in words. */
112 static int jump_tables_size;
113
114 /* Preprocessor macros to define depending on MCU type. */
115 const char *avr_base_arch_macro;
116 const char *avr_extra_arch_macro;
117
118 section *progmem_section;
119
120 /* More than 8K of program memory: use "call" and "jmp". */
121 int avr_mega_p = 0;
122
123 /* Core have 'MUL*' instructions. */
124 int avr_have_mul_p = 0;
125
126 /* Assembler only. */
127 int avr_asm_only_p = 0;
128
129 /* Core have 'MOVW' and 'LPM Rx,Z' instructions. */
130 int avr_have_movw_lpmx_p = 0;
131
132 struct base_arch_s {
133 int asm_only;
134 int have_mul;
135 int mega;
136 int have_movw_lpmx;
137 const char *const macro;
138 };
139
140 static const struct base_arch_s avr_arch_types[] = {
141 { 1, 0, 0, 0, NULL }, /* unknown device specified */
142 { 1, 0, 0, 0, "__AVR_ARCH__=1" },
143 { 0, 0, 0, 0, "__AVR_ARCH__=2" },
144 { 0, 0, 1, 0, "__AVR_ARCH__=3" },
145 { 0, 1, 0, 1, "__AVR_ARCH__=4" },
146 { 0, 1, 1, 1, "__AVR_ARCH__=5" },
147 { 0, 0, 0, 1, "__AVR_ARCH__=25"}
148 };
149
150 struct mcu_type_s {
151 const char *const name;
152 int arch; /* index in avr_arch_types[] */
153 /* Must lie outside user's namespace. NULL == no macro. */
154 const char *const macro;
155 };
156
157 /* List of all known AVR MCU types - if updated, it has to be kept
158 in sync in several places (FIXME: is there a better way?):
159 - here
160 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
161 - t-avr (MULTILIB_MATCHES)
162 - gas/config/tc-avr.c
163 - avr-libc */
164
165 static const struct mcu_type_s avr_mcu_types[] = {
166 /* Classic, <= 8K. */
167 { "avr2", 2, NULL },
168 { "at90s2313", 2, "__AVR_AT90S2313__" },
169 { "at90s2323", 2, "__AVR_AT90S2323__" },
170 { "at90s2333", 2, "__AVR_AT90S2333__" },
171 { "at90s2343", 2, "__AVR_AT90S2343__" },
172 { "attiny22", 2, "__AVR_ATtiny22__" },
173 { "attiny26", 2, "__AVR_ATtiny26__" },
174 { "at90s4414", 2, "__AVR_AT90S4414__" },
175 { "at90s4433", 2, "__AVR_AT90S4433__" },
176 { "at90s4434", 2, "__AVR_AT90S4434__" },
177 { "at90s8515", 2, "__AVR_AT90S8515__" },
178 { "at90c8534", 2, "__AVR_AT90C8534__" },
179 { "at90s8535", 2, "__AVR_AT90S8535__" },
180 /* Classic + MOVW, <= 8K. */
181 { "avr25", 6, NULL },
182 { "attiny13", 6, "__AVR_ATtiny13__" },
183 { "attiny2313", 6, "__AVR_ATtiny2313__" },
184 { "attiny24", 6, "__AVR_ATtiny24__" },
185 { "attiny44", 6, "__AVR_ATtiny44__" },
186 { "attiny84", 6, "__AVR_ATtiny84__" },
187 { "attiny25", 6, "__AVR_ATtiny25__" },
188 { "attiny45", 6, "__AVR_ATtiny45__" },
189 { "attiny85", 6, "__AVR_ATtiny85__" },
190 { "attiny261", 6, "__AVR_ATtiny261__" },
191 { "attiny461", 6, "__AVR_ATtiny461__" },
192 { "attiny861", 6, "__AVR_ATtiny861__" },
193 { "at86rf401", 6, "__AVR_AT86RF401__" },
194 /* Classic, > 8K. */
195 { "avr3", 3, NULL },
196 { "atmega103", 3, "__AVR_ATmega103__" },
197 { "atmega603", 3, "__AVR_ATmega603__" },
198 { "at43usb320", 3, "__AVR_AT43USB320__" },
199 { "at43usb355", 3, "__AVR_AT43USB355__" },
200 { "at76c711", 3, "__AVR_AT76C711__" },
201 /* Enhanced, <= 8K. */
202 { "avr4", 4, NULL },
203 { "atmega8", 4, "__AVR_ATmega8__" },
204 { "atmega48", 4, "__AVR_ATmega48__" },
205 { "atmega88", 4, "__AVR_ATmega88__" },
206 { "atmega8515", 4, "__AVR_ATmega8515__" },
207 { "atmega8535", 4, "__AVR_ATmega8535__" },
208 { "at90pwm1", 4, "__AVR_AT90PWM1__" },
209 { "at90pwm2", 4, "__AVR_AT90PWM2__" },
210 { "at90pwm3", 4, "__AVR_AT90PWM3__" },
211 /* Enhanced, > 8K. */
212 { "avr5", 5, NULL },
213 { "atmega16", 5, "__AVR_ATmega16__" },
214 { "atmega161", 5, "__AVR_ATmega161__" },
215 { "atmega162", 5, "__AVR_ATmega162__" },
216 { "atmega163", 5, "__AVR_ATmega163__" },
217 { "atmega164p",5, "__AVR_ATmega164P__" },
218 { "atmega165", 5, "__AVR_ATmega165__" },
219 { "atmega165p",5, "__AVR_ATmega165P__" },
220 { "atmega168", 5, "__AVR_ATmega168__" },
221 { "atmega169", 5, "__AVR_ATmega169__" },
222 { "atmega169p",5, "__AVR_ATmega169P__" },
223 { "atmega32", 5, "__AVR_ATmega32__" },
224 { "atmega323", 5, "__AVR_ATmega323__" },
225 { "atmega324p",5, "__AVR_ATmega324P__" },
226 { "atmega325", 5, "__AVR_ATmega325__" },
227 { "atmega3250", 5, "__AVR_ATmega3250__" },
228 { "atmega329", 5, "__AVR_ATmega329__" },
229 { "atmega3290", 5, "__AVR_ATmega3290__" },
230 { "atmega406", 5, "__AVR_ATmega406__" },
231 { "atmega64", 5, "__AVR_ATmega64__" },
232 { "atmega640", 5, "__AVR_ATmega640__" },
233 { "atmega644", 5, "__AVR_ATmega644__" },
234 { "atmega644p",5, "__AVR_ATmega644P__" },
235 { "atmega645", 5, "__AVR_ATmega645__" },
236 { "atmega6450", 5, "__AVR_ATmega6450__" },
237 { "atmega649", 5, "__AVR_ATmega649__" },
238 { "atmega6490", 5, "__AVR_ATmega6490__" },
239 { "atmega128", 5, "__AVR_ATmega128__" },
240 { "atmega1280",5, "__AVR_ATmega1280__" },
241 { "atmega1281",5, "__AVR_ATmega1281__" },
242 { "at90can32", 5, "__AVR_AT90CAN32__" },
243 { "at90can64", 5, "__AVR_AT90CAN64__" },
244 { "at90can128", 5, "__AVR_AT90CAN128__" },
245 { "at90usb646", 5, "__AVR_AT90USB646__" },
246 { "at90usb647", 5, "__AVR_AT90USB647__" },
247 { "at90usb1286", 5, "__AVR_AT90USB1286__" },
248 { "at90usb1287", 5, "__AVR_AT90USB1287__" },
249 { "at94k", 5, "__AVR_AT94K__" },
250 /* Assembler only. */
251 { "avr1", 1, NULL },
252 { "at90s1200", 1, "__AVR_AT90S1200__" },
253 { "attiny11", 1, "__AVR_ATtiny11__" },
254 { "attiny12", 1, "__AVR_ATtiny12__" },
255 { "attiny15", 1, "__AVR_ATtiny15__" },
256 { "attiny28", 1, "__AVR_ATtiny28__" },
257 { NULL, 0, NULL }
258 };
259
260 int avr_case_values_threshold = 30000;
261 \f
262 /* Initialize the GCC target structure. */
263 #undef TARGET_ASM_ALIGNED_HI_OP
264 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
265 #undef TARGET_ASM_ALIGNED_SI_OP
266 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
267 #undef TARGET_ASM_UNALIGNED_HI_OP
268 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
269 #undef TARGET_ASM_UNALIGNED_SI_OP
270 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
271 #undef TARGET_ASM_INTEGER
272 #define TARGET_ASM_INTEGER avr_assemble_integer
273 #undef TARGET_ASM_FILE_START
274 #define TARGET_ASM_FILE_START avr_file_start
275 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
276 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
277 #undef TARGET_ASM_FILE_END
278 #define TARGET_ASM_FILE_END avr_file_end
279
280 #undef TARGET_ASM_FUNCTION_PROLOGUE
281 #define TARGET_ASM_FUNCTION_PROLOGUE avr_output_function_prologue
282 #undef TARGET_ASM_FUNCTION_EPILOGUE
283 #define TARGET_ASM_FUNCTION_EPILOGUE avr_output_function_epilogue
284 #undef TARGET_ATTRIBUTE_TABLE
285 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
286 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
287 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
288 #undef TARGET_INSERT_ATTRIBUTES
289 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
290 #undef TARGET_SECTION_TYPE_FLAGS
291 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
292 #undef TARGET_RTX_COSTS
293 #define TARGET_RTX_COSTS avr_rtx_costs
294 #undef TARGET_ADDRESS_COST
295 #define TARGET_ADDRESS_COST avr_address_cost
296 #undef TARGET_MACHINE_DEPENDENT_REORG
297 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
298
299 #undef TARGET_RETURN_IN_MEMORY
300 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
301
302 #undef TARGET_STRICT_ARGUMENT_NAMING
303 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
304
305 struct gcc_target targetm = TARGET_INITIALIZER;
306 \f
307 void
308 avr_override_options (void)
309 {
310 const struct mcu_type_s *t;
311 const struct base_arch_s *base;
312
313 for (t = avr_mcu_types; t->name; t++)
314 if (strcmp (t->name, avr_mcu_name) == 0)
315 break;
316
317 if (!t->name)
318 {
319 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
320 avr_mcu_name);
321 for (t = avr_mcu_types; t->name; t++)
322 fprintf (stderr," %s\n", t->name);
323 }
324
325 base = &avr_arch_types[t->arch];
326 avr_asm_only_p = base->asm_only;
327 avr_have_mul_p = base->have_mul;
328 avr_mega_p = base->mega;
329 avr_have_movw_lpmx_p = base->have_movw_lpmx;
330 avr_base_arch_macro = base->macro;
331 avr_extra_arch_macro = t->macro;
332
333 if (optimize && !TARGET_NO_TABLEJUMP)
334 avr_case_values_threshold = (!AVR_MEGA || TARGET_CALL_PROLOGUES) ? 8 : 17;
335
336 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
337 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
338 }
339
340 /* return register class from register number. */
341
342 static const int reg_class_tab[]={
343 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
344 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
345 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
346 GENERAL_REGS, /* r0 - r15 */
347 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
348 LD_REGS, /* r16 - 23 */
349 ADDW_REGS,ADDW_REGS, /* r24,r25 */
350 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
351 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
352 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
353 STACK_REG,STACK_REG /* SPL,SPH */
354 };
355
356 /* Return register class for register R. */
357
358 enum reg_class
359 avr_regno_reg_class (int r)
360 {
361 if (r <= 33)
362 return reg_class_tab[r];
363 return ALL_REGS;
364 }
365
366 /* Return nonzero if FUNC is a naked function. */
367
368 static int
369 avr_naked_function_p (tree func)
370 {
371 tree a;
372
373 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
374
375 a = lookup_attribute ("naked", DECL_ATTRIBUTES (func));
376 return a != NULL_TREE;
377 }
378
379 /* Return nonzero if FUNC is an interrupt function as specified
380 by the "interrupt" attribute. */
381
382 static int
383 interrupt_function_p (tree func)
384 {
385 tree a;
386
387 if (TREE_CODE (func) != FUNCTION_DECL)
388 return 0;
389
390 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
391 return a != NULL_TREE;
392 }
393
394 /* Return nonzero if FUNC is a signal function as specified
395 by the "signal" attribute. */
396
397 static int
398 signal_function_p (tree func)
399 {
400 tree a;
401
402 if (TREE_CODE (func) != FUNCTION_DECL)
403 return 0;
404
405 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
406 return a != NULL_TREE;
407 }
408
409 /* Return the number of hard registers to push/pop in the prologue/epilogue
410 of the current function, and optionally store these registers in SET. */
411
412 static int
413 avr_regs_to_save (HARD_REG_SET *set)
414 {
415 int reg, count;
416 int int_or_sig_p = (interrupt_function_p (current_function_decl)
417 || signal_function_p (current_function_decl));
418 int leaf_func_p = leaf_function_p ();
419
420 if (set)
421 CLEAR_HARD_REG_SET (*set);
422 count = 0;
423
424 /* No need to save any registers if the function never returns. */
425 if (TREE_THIS_VOLATILE (current_function_decl))
426 return 0;
427
428 for (reg = 0; reg < 32; reg++)
429 {
430 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
431 any global register variables. */
432 if (fixed_regs[reg])
433 continue;
434
435 if ((int_or_sig_p && !leaf_func_p && call_used_regs[reg])
436 || (regs_ever_live[reg]
437 && (int_or_sig_p || !call_used_regs[reg])
438 && !(frame_pointer_needed
439 && (reg == REG_Y || reg == (REG_Y+1)))))
440 {
441 if (set)
442 SET_HARD_REG_BIT (*set, reg);
443 count++;
444 }
445 }
446 return count;
447 }
448
449 /* Compute offset between arg_pointer and frame_pointer. */
450
451 int
452 initial_elimination_offset (int from, int to)
453 {
454 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
455 return 0;
456 else
457 {
458 int offset = frame_pointer_needed ? 2 : 0;
459
460 offset += avr_regs_to_save (NULL);
461 return get_frame_size () + 2 + 1 + offset;
462 }
463 }
464
465 /* Return 1 if the function epilogue is just a single "ret". */
466
467 int
468 avr_simple_epilogue (void)
469 {
470 return (! frame_pointer_needed
471 && get_frame_size () == 0
472 && avr_regs_to_save (NULL) == 0
473 && ! interrupt_function_p (current_function_decl)
474 && ! signal_function_p (current_function_decl)
475 && ! avr_naked_function_p (current_function_decl)
476 && ! MAIN_NAME_P (DECL_NAME (current_function_decl))
477 && ! TREE_THIS_VOLATILE (current_function_decl));
478 }
479
480 /* This function checks sequence of live registers. */
481
482 static int
483 sequent_regs_live (void)
484 {
485 int reg;
486 int live_seq=0;
487 int cur_seq=0;
488
489 for (reg = 0; reg < 18; ++reg)
490 {
491 if (!call_used_regs[reg])
492 {
493 if (regs_ever_live[reg])
494 {
495 ++live_seq;
496 ++cur_seq;
497 }
498 else
499 cur_seq = 0;
500 }
501 }
502
503 if (!frame_pointer_needed)
504 {
505 if (regs_ever_live[REG_Y])
506 {
507 ++live_seq;
508 ++cur_seq;
509 }
510 else
511 cur_seq = 0;
512
513 if (regs_ever_live[REG_Y+1])
514 {
515 ++live_seq;
516 ++cur_seq;
517 }
518 else
519 cur_seq = 0;
520 }
521 else
522 {
523 cur_seq += 2;
524 live_seq += 2;
525 }
526 return (cur_seq == live_seq) ? live_seq : 0;
527 }
528
529
530 /* Output to FILE the asm instructions to adjust the frame pointer by
531 ADJ (r29:r28 -= ADJ;) which can be positive (prologue) or negative
532 (epilogue). Returns the number of instructions generated. */
533
534 static int
535 out_adj_frame_ptr (FILE *file, int adj)
536 {
537 int size = 0;
538
539 if (adj)
540 {
541 if (TARGET_TINY_STACK)
542 {
543 if (adj < -63 || adj > 63)
544 warning (0, "large frame pointer change (%d) with -mtiny-stack", adj);
545
546 /* The high byte (r29) doesn't change - prefer "subi" (1 cycle)
547 over "sbiw" (2 cycles, same size). */
548
549 fprintf (file, (AS2 (subi, r28, %d) CR_TAB), adj);
550 size++;
551 }
552 else if (adj < -63 || adj > 63)
553 {
554 fprintf (file, (AS2 (subi, r28, lo8(%d)) CR_TAB
555 AS2 (sbci, r29, hi8(%d)) CR_TAB),
556 adj, adj);
557 size += 2;
558 }
559 else if (adj < 0)
560 {
561 fprintf (file, (AS2 (adiw, r28, %d) CR_TAB), -adj);
562 size++;
563 }
564 else
565 {
566 fprintf (file, (AS2 (sbiw, r28, %d) CR_TAB), adj);
567 size++;
568 }
569 }
570 return size;
571 }
572
573
574 /* Output to FILE the asm instructions to copy r29:r28 to SPH:SPL,
575 handling various cases of interrupt enable flag state BEFORE and AFTER
576 (0=disabled, 1=enabled, -1=unknown/unchanged) and target_flags.
577 Returns the number of instructions generated. */
578
579 static int
580 out_set_stack_ptr (FILE *file, int before, int after)
581 {
582 int do_sph, do_cli, do_save, do_sei, lock_sph, size;
583
584 /* The logic here is so that -mno-interrupts actually means
585 "it is safe to write SPH in one instruction, then SPL in the
586 next instruction, without disabling interrupts first".
587 The after != -1 case (interrupt/signal) is not affected. */
588
589 do_sph = !TARGET_TINY_STACK;
590 lock_sph = do_sph && !TARGET_NO_INTERRUPTS;
591 do_cli = (before != 0 && (after == 0 || lock_sph));
592 do_save = (do_cli && before == -1 && after == -1);
593 do_sei = ((do_cli || before != 1) && after == 1);
594 size = 1;
595
596 if (do_save)
597 {
598 fprintf (file, AS2 (in, __tmp_reg__, __SREG__) CR_TAB);
599 size++;
600 }
601
602 if (do_cli)
603 {
604 fprintf (file, "cli" CR_TAB);
605 size++;
606 }
607
608 /* Do SPH first - maybe this will disable interrupts for one instruction
609 someday (a suggestion has been sent to avr@atmel.com for consideration
610 in future devices - that would make -mno-interrupts always safe). */
611 if (do_sph)
612 {
613 fprintf (file, AS2 (out, __SP_H__, r29) CR_TAB);
614 size++;
615 }
616
617 /* Set/restore the I flag now - interrupts will be really enabled only
618 after the next instruction. This is not clearly documented, but
619 believed to be true for all AVR devices. */
620 if (do_save)
621 {
622 fprintf (file, AS2 (out, __SREG__, __tmp_reg__) CR_TAB);
623 size++;
624 }
625 else if (do_sei)
626 {
627 fprintf (file, "sei" CR_TAB);
628 size++;
629 }
630
631 fprintf (file, AS2 (out, __SP_L__, r28) "\n");
632
633 return size;
634 }
635
636
637 /* Output function prologue. */
638
639 static void
640 avr_output_function_prologue (FILE *file, HOST_WIDE_INT size)
641 {
642 int reg;
643 int interrupt_func_p;
644 int signal_func_p;
645 int main_p;
646 int live_seq;
647 int minimize;
648
649 last_insn_address = 0;
650 jump_tables_size = 0;
651 prologue_size = 0;
652 fprintf (file, "/* prologue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n",
653 size);
654
655 if (avr_naked_function_p (current_function_decl))
656 {
657 fputs ("/* prologue: naked */\n", file);
658 goto out;
659 }
660
661 interrupt_func_p = interrupt_function_p (current_function_decl);
662 signal_func_p = signal_function_p (current_function_decl);
663 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
664 live_seq = sequent_regs_live ();
665 minimize = (TARGET_CALL_PROLOGUES
666 && !interrupt_func_p && !signal_func_p && live_seq);
667
668 if (interrupt_func_p)
669 {
670 fprintf (file,"\tsei\n");
671 ++prologue_size;
672 }
673 if (interrupt_func_p || signal_func_p)
674 {
675 fprintf (file, "\t"
676 AS1 (push,__zero_reg__) CR_TAB
677 AS1 (push,__tmp_reg__) CR_TAB
678 AS2 (in,__tmp_reg__,__SREG__) CR_TAB
679 AS1 (push,__tmp_reg__) CR_TAB
680 AS1 (clr,__zero_reg__) "\n");
681 prologue_size += 5;
682 }
683 if (main_p)
684 {
685 fprintf (file, ("\t"
686 AS1 (ldi,r28) ",lo8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
687 AS1 (ldi,r29) ",hi8(%s - " HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
688 AS2 (out,__SP_H__,r29) CR_TAB
689 AS2 (out,__SP_L__,r28) "\n"),
690 avr_init_stack, size, avr_init_stack, size);
691
692 prologue_size += 4;
693 }
694 else if (minimize && (frame_pointer_needed || live_seq > 6))
695 {
696 fprintf (file, ("\t"
697 AS1 (ldi, r26) ",lo8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB
698 AS1 (ldi, r27) ",hi8(" HOST_WIDE_INT_PRINT_DEC ")" CR_TAB), size, size);
699
700 fputs ((AS2 (ldi,r30,pm_lo8(1f)) CR_TAB
701 AS2 (ldi,r31,pm_hi8(1f)) CR_TAB), file);
702
703 prologue_size += 4;
704
705 if (AVR_MEGA)
706 {
707 fprintf (file, AS1 (jmp,__prologue_saves__+%d) "\n",
708 (18 - live_seq) * 2);
709 prologue_size += 2;
710 }
711 else
712 {
713 fprintf (file, AS1 (rjmp,__prologue_saves__+%d) "\n",
714 (18 - live_seq) * 2);
715 ++prologue_size;
716 }
717 fputs ("1:\n", file);
718 }
719 else
720 {
721 HARD_REG_SET set;
722
723 prologue_size += avr_regs_to_save (&set);
724 for (reg = 0; reg < 32; ++reg)
725 {
726 if (TEST_HARD_REG_BIT (set, reg))
727 {
728 fprintf (file, "\t" AS1 (push,%s) "\n", avr_regnames[reg]);
729 }
730 }
731 if (frame_pointer_needed)
732 {
733 fprintf (file, "\t"
734 AS1 (push,r28) CR_TAB
735 AS1 (push,r29) CR_TAB
736 AS2 (in,r28,__SP_L__) CR_TAB
737 AS2 (in,r29,__SP_H__) "\n");
738 prologue_size += 4;
739 if (size)
740 {
741 fputs ("\t", file);
742 prologue_size += out_adj_frame_ptr (file, size);
743
744 if (interrupt_func_p)
745 {
746 prologue_size += out_set_stack_ptr (file, 1, 1);
747 }
748 else if (signal_func_p)
749 {
750 prologue_size += out_set_stack_ptr (file, 0, 0);
751 }
752 else
753 {
754 prologue_size += out_set_stack_ptr (file, -1, -1);
755 }
756 }
757 }
758 }
759
760 out:
761 fprintf (file, "/* prologue end (size=%d) */\n", prologue_size);
762 }
763
764 /* Output function epilogue. */
765
766 static void
767 avr_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
768 {
769 int reg;
770 int interrupt_func_p;
771 int signal_func_p;
772 int main_p;
773 int function_size;
774 int live_seq;
775 int minimize;
776 rtx last = get_last_nonnote_insn ();
777
778 function_size = jump_tables_size;
779 if (last)
780 {
781 rtx first = get_first_nonnote_insn ();
782 function_size += (INSN_ADDRESSES (INSN_UID (last)) -
783 INSN_ADDRESSES (INSN_UID (first)));
784 function_size += get_attr_length (last);
785 }
786
787 fprintf (file, "/* epilogue: frame size=" HOST_WIDE_INT_PRINT_DEC " */\n", size);
788 epilogue_size = 0;
789
790 if (avr_naked_function_p (current_function_decl))
791 {
792 fputs ("/* epilogue: naked */\n", file);
793 goto out;
794 }
795
796 if (last && GET_CODE (last) == BARRIER)
797 {
798 fputs ("/* epilogue: noreturn */\n", file);
799 goto out;
800 }
801
802 interrupt_func_p = interrupt_function_p (current_function_decl);
803 signal_func_p = signal_function_p (current_function_decl);
804 main_p = MAIN_NAME_P (DECL_NAME (current_function_decl));
805 live_seq = sequent_regs_live ();
806 minimize = (TARGET_CALL_PROLOGUES
807 && !interrupt_func_p && !signal_func_p && live_seq);
808
809 if (main_p)
810 {
811 /* Return value from main() is already in the correct registers
812 (r25:r24) as the exit() argument. */
813 if (AVR_MEGA)
814 {
815 fputs ("\t" AS1 (jmp,exit) "\n", file);
816 epilogue_size += 2;
817 }
818 else
819 {
820 fputs ("\t" AS1 (rjmp,exit) "\n", file);
821 ++epilogue_size;
822 }
823 }
824 else if (minimize && (frame_pointer_needed || live_seq > 4))
825 {
826 fprintf (file, ("\t" AS2 (ldi, r30, %d) CR_TAB), live_seq);
827 ++epilogue_size;
828 if (frame_pointer_needed)
829 {
830 epilogue_size += out_adj_frame_ptr (file, -size);
831 }
832 else
833 {
834 fprintf (file, (AS2 (in , r28, __SP_L__) CR_TAB
835 AS2 (in , r29, __SP_H__) CR_TAB));
836 epilogue_size += 2;
837 }
838
839 if (AVR_MEGA)
840 {
841 fprintf (file, AS1 (jmp,__epilogue_restores__+%d) "\n",
842 (18 - live_seq) * 2);
843 epilogue_size += 2;
844 }
845 else
846 {
847 fprintf (file, AS1 (rjmp,__epilogue_restores__+%d) "\n",
848 (18 - live_seq) * 2);
849 ++epilogue_size;
850 }
851 }
852 else
853 {
854 HARD_REG_SET set;
855
856 if (frame_pointer_needed)
857 {
858 if (size)
859 {
860 fputs ("\t", file);
861 epilogue_size += out_adj_frame_ptr (file, -size);
862
863 if (interrupt_func_p || signal_func_p)
864 {
865 epilogue_size += out_set_stack_ptr (file, -1, 0);
866 }
867 else
868 {
869 epilogue_size += out_set_stack_ptr (file, -1, -1);
870 }
871 }
872 fprintf (file, "\t"
873 AS1 (pop,r29) CR_TAB
874 AS1 (pop,r28) "\n");
875 epilogue_size += 2;
876 }
877
878 epilogue_size += avr_regs_to_save (&set);
879 for (reg = 31; reg >= 0; --reg)
880 {
881 if (TEST_HARD_REG_BIT (set, reg))
882 {
883 fprintf (file, "\t" AS1 (pop,%s) "\n", avr_regnames[reg]);
884 }
885 }
886
887 if (interrupt_func_p || signal_func_p)
888 {
889 fprintf (file, "\t"
890 AS1 (pop,__tmp_reg__) CR_TAB
891 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
892 AS1 (pop,__tmp_reg__) CR_TAB
893 AS1 (pop,__zero_reg__) "\n");
894 epilogue_size += 4;
895 fprintf (file, "\treti\n");
896 }
897 else
898 fprintf (file, "\tret\n");
899 ++epilogue_size;
900 }
901
902 out:
903 fprintf (file, "/* epilogue end (size=%d) */\n", epilogue_size);
904 fprintf (file, "/* function %s size %d (%d) */\n", current_function_name (),
905 prologue_size + function_size + epilogue_size, function_size);
906 commands_in_file += prologue_size + function_size + epilogue_size;
907 commands_in_prologues += prologue_size;
908 commands_in_epilogues += epilogue_size;
909 }
910
911
912 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
913 machine for a memory operand of mode MODE. */
914
915 int
916 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
917 {
918 enum reg_class r = NO_REGS;
919
920 if (TARGET_ALL_DEBUG)
921 {
922 fprintf (stderr, "mode: (%s) %s %s %s %s:",
923 GET_MODE_NAME(mode),
924 strict ? "(strict)": "",
925 reload_completed ? "(reload_completed)": "",
926 reload_in_progress ? "(reload_in_progress)": "",
927 reg_renumber ? "(reg_renumber)" : "");
928 if (GET_CODE (x) == PLUS
929 && REG_P (XEXP (x, 0))
930 && GET_CODE (XEXP (x, 1)) == CONST_INT
931 && INTVAL (XEXP (x, 1)) >= 0
932 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
933 && reg_renumber
934 )
935 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
936 true_regnum (XEXP (x, 0)));
937 debug_rtx (x);
938 }
939 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
940 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
941 r = POINTER_REGS;
942 else if (CONSTANT_ADDRESS_P (x))
943 r = ALL_REGS;
944 else if (GET_CODE (x) == PLUS
945 && REG_P (XEXP (x, 0))
946 && GET_CODE (XEXP (x, 1)) == CONST_INT
947 && INTVAL (XEXP (x, 1)) >= 0)
948 {
949 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
950 if (fit)
951 {
952 if (! strict
953 || REGNO (XEXP (x,0)) == REG_Y
954 || REGNO (XEXP (x,0)) == REG_Z)
955 r = BASE_POINTER_REGS;
956 if (XEXP (x,0) == frame_pointer_rtx
957 || XEXP (x,0) == arg_pointer_rtx)
958 r = BASE_POINTER_REGS;
959 }
960 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
961 r = POINTER_Y_REGS;
962 }
963 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
964 && REG_P (XEXP (x, 0))
965 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
966 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
967 {
968 r = POINTER_REGS;
969 }
970 if (TARGET_ALL_DEBUG)
971 {
972 fprintf (stderr, " ret = %c\n", r + '0');
973 }
974 return r == NO_REGS ? 0 : (int)r;
975 }
976
977 /* Attempts to replace X with a valid
978 memory address for an operand of mode MODE */
979
980 rtx
981 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
982 {
983 x = oldx;
984 if (TARGET_ALL_DEBUG)
985 {
986 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
987 debug_rtx (oldx);
988 }
989
990 if (GET_CODE (oldx) == PLUS
991 && REG_P (XEXP (oldx,0)))
992 {
993 if (REG_P (XEXP (oldx,1)))
994 x = force_reg (GET_MODE (oldx), oldx);
995 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
996 {
997 int offs = INTVAL (XEXP (oldx,1));
998 if (frame_pointer_rtx != XEXP (oldx,0))
999 if (offs > MAX_LD_OFFSET (mode))
1000 {
1001 if (TARGET_ALL_DEBUG)
1002 fprintf (stderr, "force_reg (big offset)\n");
1003 x = force_reg (GET_MODE (oldx), oldx);
1004 }
1005 }
1006 }
1007 return x;
1008 }
1009
1010
1011 /* Return a pointer register name as a string. */
1012
1013 static const char *
1014 ptrreg_to_str (int regno)
1015 {
1016 switch (regno)
1017 {
1018 case REG_X: return "X";
1019 case REG_Y: return "Y";
1020 case REG_Z: return "Z";
1021 default:
1022 gcc_unreachable ();
1023 }
1024 return NULL;
1025 }
1026
1027 /* Return the condition name as a string.
1028 Used in conditional jump constructing */
1029
1030 static const char *
1031 cond_string (enum rtx_code code)
1032 {
1033 switch (code)
1034 {
1035 case NE:
1036 return "ne";
1037 case EQ:
1038 return "eq";
1039 case GE:
1040 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1041 return "pl";
1042 else
1043 return "ge";
1044 case LT:
1045 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1046 return "mi";
1047 else
1048 return "lt";
1049 case GEU:
1050 return "sh";
1051 case LTU:
1052 return "lo";
1053 default:
1054 gcc_unreachable ();
1055 }
1056 }
1057
1058 /* Output ADDR to FILE as address. */
1059
1060 void
1061 print_operand_address (FILE *file, rtx addr)
1062 {
1063 switch (GET_CODE (addr))
1064 {
1065 case REG:
1066 fprintf (file, ptrreg_to_str (REGNO (addr)));
1067 break;
1068
1069 case PRE_DEC:
1070 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1071 break;
1072
1073 case POST_INC:
1074 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1075 break;
1076
1077 default:
1078 if (CONSTANT_ADDRESS_P (addr)
1079 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1080 || GET_CODE (addr) == LABEL_REF))
1081 {
1082 fprintf (file, "pm(");
1083 output_addr_const (file,addr);
1084 fprintf (file ,")");
1085 }
1086 else
1087 output_addr_const (file, addr);
1088 }
1089 }
1090
1091
1092 /* Output X as assembler operand to file FILE. */
1093
1094 void
1095 print_operand (FILE *file, rtx x, int code)
1096 {
1097 int abcd = 0;
1098
1099 if (code >= 'A' && code <= 'D')
1100 abcd = code - 'A';
1101
1102 if (code == '~')
1103 {
1104 if (!AVR_MEGA)
1105 fputc ('r', file);
1106 }
1107 else if (REG_P (x))
1108 {
1109 if (x == zero_reg_rtx)
1110 fprintf (file, "__zero_reg__");
1111 else
1112 fprintf (file, reg_names[true_regnum (x) + abcd]);
1113 }
1114 else if (GET_CODE (x) == CONST_INT)
1115 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1116 else if (GET_CODE (x) == MEM)
1117 {
1118 rtx addr = XEXP (x,0);
1119
1120 if (CONSTANT_P (addr) && abcd)
1121 {
1122 fputc ('(', file);
1123 output_address (addr);
1124 fprintf (file, ")+%d", abcd);
1125 }
1126 else if (code == 'o')
1127 {
1128 if (GET_CODE (addr) != PLUS)
1129 fatal_insn ("bad address, not (reg+disp):", addr);
1130
1131 print_operand (file, XEXP (addr, 1), 0);
1132 }
1133 else if (code == 'p' || code == 'r')
1134 {
1135 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1136 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1137
1138 if (code == 'p')
1139 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1140 else
1141 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1142 }
1143 else if (GET_CODE (addr) == PLUS)
1144 {
1145 print_operand_address (file, XEXP (addr,0));
1146 if (REGNO (XEXP (addr, 0)) == REG_X)
1147 fatal_insn ("internal compiler error. Bad address:"
1148 ,addr);
1149 fputc ('+', file);
1150 print_operand (file, XEXP (addr,1), code);
1151 }
1152 else
1153 print_operand_address (file, addr);
1154 }
1155 else if (GET_CODE (x) == CONST_DOUBLE)
1156 {
1157 long val;
1158 REAL_VALUE_TYPE rv;
1159 if (GET_MODE (x) != SFmode)
1160 fatal_insn ("internal compiler error. Unknown mode:", x);
1161 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1162 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1163 fprintf (file, "0x%lx", val);
1164 }
1165 else if (code == 'j')
1166 fputs (cond_string (GET_CODE (x)), file);
1167 else if (code == 'k')
1168 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1169 else
1170 print_operand_address (file, x);
1171 }
1172
1173 /* Update the condition code in the INSN. */
1174
1175 void
1176 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1177 {
1178 rtx set;
1179
1180 switch (get_attr_cc (insn))
1181 {
1182 case CC_NONE:
1183 /* Insn does not affect CC at all. */
1184 break;
1185
1186 case CC_SET_N:
1187 CC_STATUS_INIT;
1188 break;
1189
1190 case CC_SET_ZN:
1191 set = single_set (insn);
1192 CC_STATUS_INIT;
1193 if (set)
1194 {
1195 cc_status.flags |= CC_NO_OVERFLOW;
1196 cc_status.value1 = SET_DEST (set);
1197 }
1198 break;
1199
1200 case CC_SET_CZN:
1201 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1202 The V flag may or may not be known but that's ok because
1203 alter_cond will change tests to use EQ/NE. */
1204 set = single_set (insn);
1205 CC_STATUS_INIT;
1206 if (set)
1207 {
1208 cc_status.value1 = SET_DEST (set);
1209 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1210 }
1211 break;
1212
1213 case CC_COMPARE:
1214 set = single_set (insn);
1215 CC_STATUS_INIT;
1216 if (set)
1217 cc_status.value1 = SET_SRC (set);
1218 break;
1219
1220 case CC_CLOBBER:
1221 /* Insn doesn't leave CC in a usable state. */
1222 CC_STATUS_INIT;
1223
1224 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1225 set = single_set (insn);
1226 if (set)
1227 {
1228 rtx src = SET_SRC (set);
1229
1230 if (GET_CODE (src) == ASHIFTRT
1231 && GET_MODE (src) == QImode)
1232 {
1233 rtx x = XEXP (src, 1);
1234
1235 if (GET_CODE (x) == CONST_INT
1236 && INTVAL (x) > 0
1237 && INTVAL (x) != 6)
1238 {
1239 cc_status.value1 = SET_DEST (set);
1240 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1241 }
1242 }
1243 }
1244 break;
1245 }
1246 }
1247
1248 /* Return maximum number of consecutive registers of
1249 class CLASS needed to hold a value of mode MODE. */
1250
1251 int
1252 class_max_nregs (enum reg_class class ATTRIBUTE_UNUSED,enum machine_mode mode)
1253 {
1254 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1255 }
1256
1257 /* Choose mode for jump insn:
1258 1 - relative jump in range -63 <= x <= 62 ;
1259 2 - relative jump in range -2046 <= x <= 2045 ;
1260 3 - absolute jump (only for ATmega[16]03). */
1261
1262 int
1263 avr_jump_mode (rtx x, rtx insn)
1264 {
1265 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1266 ? XEXP (x, 0) : x));
1267 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1268 int jump_distance = cur_addr - dest_addr;
1269
1270 if (-63 <= jump_distance && jump_distance <= 62)
1271 return 1;
1272 else if (-2046 <= jump_distance && jump_distance <= 2045)
1273 return 2;
1274 else if (AVR_MEGA)
1275 return 3;
1276
1277 return 2;
1278 }
1279
1280 /* return an AVR condition jump commands.
1281 X is a comparison RTX.
1282 LEN is a number returned by avr_jump_mode function.
1283 if REVERSE nonzero then condition code in X must be reversed. */
1284
1285 const char *
1286 ret_cond_branch (rtx x, int len, int reverse)
1287 {
1288 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1289
1290 switch (cond)
1291 {
1292 case GT:
1293 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1294 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1295 AS1 (brpl,%0)) :
1296 len == 2 ? (AS1 (breq,.+4) CR_TAB
1297 AS1 (brmi,.+2) CR_TAB
1298 AS1 (rjmp,%0)) :
1299 (AS1 (breq,.+6) CR_TAB
1300 AS1 (brmi,.+4) CR_TAB
1301 AS1 (jmp,%0)));
1302
1303 else
1304 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1305 AS1 (brge,%0)) :
1306 len == 2 ? (AS1 (breq,.+4) CR_TAB
1307 AS1 (brlt,.+2) CR_TAB
1308 AS1 (rjmp,%0)) :
1309 (AS1 (breq,.+6) CR_TAB
1310 AS1 (brlt,.+4) CR_TAB
1311 AS1 (jmp,%0)));
1312 case GTU:
1313 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1314 AS1 (brsh,%0)) :
1315 len == 2 ? (AS1 (breq,.+4) CR_TAB
1316 AS1 (brlo,.+2) CR_TAB
1317 AS1 (rjmp,%0)) :
1318 (AS1 (breq,.+6) CR_TAB
1319 AS1 (brlo,.+4) CR_TAB
1320 AS1 (jmp,%0)));
1321 case LE:
1322 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1323 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1324 AS1 (brmi,%0)) :
1325 len == 2 ? (AS1 (breq,.+2) CR_TAB
1326 AS1 (brpl,.+2) CR_TAB
1327 AS1 (rjmp,%0)) :
1328 (AS1 (breq,.+2) CR_TAB
1329 AS1 (brpl,.+4) CR_TAB
1330 AS1 (jmp,%0)));
1331 else
1332 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1333 AS1 (brlt,%0)) :
1334 len == 2 ? (AS1 (breq,.+2) CR_TAB
1335 AS1 (brge,.+2) CR_TAB
1336 AS1 (rjmp,%0)) :
1337 (AS1 (breq,.+2) CR_TAB
1338 AS1 (brge,.+4) CR_TAB
1339 AS1 (jmp,%0)));
1340 case LEU:
1341 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1342 AS1 (brlo,%0)) :
1343 len == 2 ? (AS1 (breq,.+2) CR_TAB
1344 AS1 (brsh,.+2) CR_TAB
1345 AS1 (rjmp,%0)) :
1346 (AS1 (breq,.+2) CR_TAB
1347 AS1 (brsh,.+4) CR_TAB
1348 AS1 (jmp,%0)));
1349 default:
1350 if (reverse)
1351 {
1352 switch (len)
1353 {
1354 case 1:
1355 return AS1 (br%k1,%0);
1356 case 2:
1357 return (AS1 (br%j1,.+2) CR_TAB
1358 AS1 (rjmp,%0));
1359 default:
1360 return (AS1 (br%j1,.+4) CR_TAB
1361 AS1 (jmp,%0));
1362 }
1363 }
1364 else
1365 {
1366 switch (len)
1367 {
1368 case 1:
1369 return AS1 (br%j1,%0);
1370 case 2:
1371 return (AS1 (br%k1,.+2) CR_TAB
1372 AS1 (rjmp,%0));
1373 default:
1374 return (AS1 (br%k1,.+4) CR_TAB
1375 AS1 (jmp,%0));
1376 }
1377 }
1378 }
1379 return "";
1380 }
1381
1382 /* Predicate function for immediate operand which fits to byte (8bit) */
1383
1384 int
1385 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1386 {
1387 return (GET_CODE (op) == CONST_INT
1388 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1389 }
1390
1391 /* Output all insn addresses and their sizes into the assembly language
1392 output file. This is helpful for debugging whether the length attributes
1393 in the md file are correct.
1394 Output insn cost for next insn. */
1395
1396 void
1397 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1398 int num_operands ATTRIBUTE_UNUSED)
1399 {
1400 int uid = INSN_UID (insn);
1401
1402 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1403 {
1404 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1405 INSN_ADDRESSES (uid),
1406 INSN_ADDRESSES (uid) - last_insn_address,
1407 rtx_cost (PATTERN (insn), INSN));
1408 }
1409 last_insn_address = INSN_ADDRESSES (uid);
1410 }
1411
1412 /* Return 0 if undefined, 1 if always true or always false. */
1413
1414 int
1415 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE operator, rtx x)
1416 {
1417 unsigned int max = (mode == QImode ? 0xff :
1418 mode == HImode ? 0xffff :
1419 mode == SImode ? 0xffffffff : 0);
1420 if (max && operator && GET_CODE (x) == CONST_INT)
1421 {
1422 if (unsigned_condition (operator) != operator)
1423 max >>= 1;
1424
1425 if (max != (INTVAL (x) & max)
1426 && INTVAL (x) != 0xff)
1427 return 1;
1428 }
1429 return 0;
1430 }
1431
1432
1433 /* Returns nonzero if REGNO is the number of a hard
1434 register in which function arguments are sometimes passed. */
1435
1436 int
1437 function_arg_regno_p(int r)
1438 {
1439 return (r >= 8 && r <= 25);
1440 }
1441
1442 /* Initializing the variable cum for the state at the beginning
1443 of the argument list. */
1444
1445 void
1446 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1447 tree fndecl ATTRIBUTE_UNUSED)
1448 {
1449 cum->nregs = 18;
1450 cum->regno = FIRST_CUM_REG;
1451 if (!libname && fntype)
1452 {
1453 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1454 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1455 != void_type_node));
1456 if (stdarg)
1457 cum->nregs = 0;
1458 }
1459 }
1460
1461 /* Returns the number of registers to allocate for a function argument. */
1462
1463 static int
1464 avr_num_arg_regs (enum machine_mode mode, tree type)
1465 {
1466 int size;
1467
1468 if (mode == BLKmode)
1469 size = int_size_in_bytes (type);
1470 else
1471 size = GET_MODE_SIZE (mode);
1472
1473 /* Align all function arguments to start in even-numbered registers.
1474 Odd-sized arguments leave holes above them. */
1475
1476 return (size + 1) & ~1;
1477 }
1478
1479 /* Controls whether a function argument is passed
1480 in a register, and which register. */
1481
1482 rtx
1483 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1484 int named ATTRIBUTE_UNUSED)
1485 {
1486 int bytes = avr_num_arg_regs (mode, type);
1487
1488 if (cum->nregs && bytes <= cum->nregs)
1489 return gen_rtx_REG (mode, cum->regno - bytes);
1490
1491 return NULL_RTX;
1492 }
1493
1494 /* Update the summarizer variable CUM to advance past an argument
1495 in the argument list. */
1496
1497 void
1498 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1499 int named ATTRIBUTE_UNUSED)
1500 {
1501 int bytes = avr_num_arg_regs (mode, type);
1502
1503 cum->nregs -= bytes;
1504 cum->regno -= bytes;
1505
1506 if (cum->nregs <= 0)
1507 {
1508 cum->nregs = 0;
1509 cum->regno = FIRST_CUM_REG;
1510 }
1511 }
1512
1513 /***********************************************************************
1514 Functions for outputting various mov's for a various modes
1515 ************************************************************************/
1516 const char *
1517 output_movqi (rtx insn, rtx operands[], int *l)
1518 {
1519 int dummy;
1520 rtx dest = operands[0];
1521 rtx src = operands[1];
1522 int *real_l = l;
1523
1524 if (!l)
1525 l = &dummy;
1526
1527 *l = 1;
1528
1529 if (register_operand (dest, QImode))
1530 {
1531 if (register_operand (src, QImode)) /* mov r,r */
1532 {
1533 if (test_hard_reg_class (STACK_REG, dest))
1534 return AS2 (out,%0,%1);
1535 else if (test_hard_reg_class (STACK_REG, src))
1536 return AS2 (in,%0,%1);
1537
1538 return AS2 (mov,%0,%1);
1539 }
1540 else if (CONSTANT_P (src))
1541 {
1542 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1543 return AS2 (ldi,%0,lo8(%1));
1544
1545 if (GET_CODE (src) == CONST_INT)
1546 {
1547 if (src == const0_rtx) /* mov r,L */
1548 return AS1 (clr,%0);
1549 else if (src == const1_rtx)
1550 {
1551 *l = 2;
1552 return (AS1 (clr,%0) CR_TAB
1553 AS1 (inc,%0));
1554 }
1555 else if (src == constm1_rtx)
1556 {
1557 /* Immediate constants -1 to any register */
1558 *l = 2;
1559 return (AS1 (clr,%0) CR_TAB
1560 AS1 (dec,%0));
1561 }
1562 else
1563 {
1564 int bit_nr = exact_log2 (INTVAL (src));
1565
1566 if (bit_nr >= 0)
1567 {
1568 *l = 3;
1569 if (!real_l)
1570 output_asm_insn ((AS1 (clr,%0) CR_TAB
1571 "set"), operands);
1572 if (!real_l)
1573 avr_output_bld (operands, bit_nr);
1574
1575 return "";
1576 }
1577 }
1578 }
1579
1580 /* Last resort, larger than loading from memory. */
1581 *l = 4;
1582 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1583 AS2 (ldi,r31,lo8(%1)) CR_TAB
1584 AS2 (mov,%0,r31) CR_TAB
1585 AS2 (mov,r31,__tmp_reg__));
1586 }
1587 else if (GET_CODE (src) == MEM)
1588 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1589 }
1590 else if (GET_CODE (dest) == MEM)
1591 {
1592 const char *template;
1593
1594 if (src == const0_rtx)
1595 operands[1] = zero_reg_rtx;
1596
1597 template = out_movqi_mr_r (insn, operands, real_l);
1598
1599 if (!real_l)
1600 output_asm_insn (template, operands);
1601
1602 operands[1] = src;
1603 }
1604 return "";
1605 }
1606
1607
1608 const char *
1609 output_movhi (rtx insn, rtx operands[], int *l)
1610 {
1611 int dummy;
1612 rtx dest = operands[0];
1613 rtx src = operands[1];
1614 int *real_l = l;
1615
1616 if (!l)
1617 l = &dummy;
1618
1619 if (register_operand (dest, HImode))
1620 {
1621 if (register_operand (src, HImode)) /* mov r,r */
1622 {
1623 if (test_hard_reg_class (STACK_REG, dest))
1624 {
1625 if (TARGET_TINY_STACK)
1626 {
1627 *l = 1;
1628 return AS2 (out,__SP_L__,%A1);
1629 }
1630 else if (TARGET_NO_INTERRUPTS)
1631 {
1632 *l = 2;
1633 return (AS2 (out,__SP_H__,%B1) CR_TAB
1634 AS2 (out,__SP_L__,%A1));
1635 }
1636
1637 *l = 5;
1638 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1639 "cli" CR_TAB
1640 AS2 (out,__SP_H__,%B1) CR_TAB
1641 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1642 AS2 (out,__SP_L__,%A1));
1643 }
1644 else if (test_hard_reg_class (STACK_REG, src))
1645 {
1646 *l = 2;
1647 return (AS2 (in,%A0,__SP_L__) CR_TAB
1648 AS2 (in,%B0,__SP_H__));
1649 }
1650
1651 if (AVR_HAVE_MOVW)
1652 {
1653 *l = 1;
1654 return (AS2 (movw,%0,%1));
1655 }
1656 else
1657 {
1658 *l = 2;
1659 return (AS2 (mov,%A0,%A1) CR_TAB
1660 AS2 (mov,%B0,%B1));
1661 }
1662 }
1663 else if (CONSTANT_P (src))
1664 {
1665 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1666 {
1667 *l = 2;
1668 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1669 AS2 (ldi,%B0,hi8(%1)));
1670 }
1671
1672 if (GET_CODE (src) == CONST_INT)
1673 {
1674 if (src == const0_rtx) /* mov r,L */
1675 {
1676 *l = 2;
1677 return (AS1 (clr,%A0) CR_TAB
1678 AS1 (clr,%B0));
1679 }
1680 else if (src == const1_rtx)
1681 {
1682 *l = 3;
1683 return (AS1 (clr,%A0) CR_TAB
1684 AS1 (clr,%B0) CR_TAB
1685 AS1 (inc,%A0));
1686 }
1687 else if (src == constm1_rtx)
1688 {
1689 /* Immediate constants -1 to any register */
1690 *l = 3;
1691 return (AS1 (clr,%0) CR_TAB
1692 AS1 (dec,%A0) CR_TAB
1693 AS2 (mov,%B0,%A0));
1694 }
1695 else
1696 {
1697 int bit_nr = exact_log2 (INTVAL (src));
1698
1699 if (bit_nr >= 0)
1700 {
1701 *l = 4;
1702 if (!real_l)
1703 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1704 AS1 (clr,%B0) CR_TAB
1705 "set"), operands);
1706 if (!real_l)
1707 avr_output_bld (operands, bit_nr);
1708
1709 return "";
1710 }
1711 }
1712
1713 if ((INTVAL (src) & 0xff) == 0)
1714 {
1715 *l = 5;
1716 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1717 AS1 (clr,%A0) CR_TAB
1718 AS2 (ldi,r31,hi8(%1)) CR_TAB
1719 AS2 (mov,%B0,r31) CR_TAB
1720 AS2 (mov,r31,__tmp_reg__));
1721 }
1722 else if ((INTVAL (src) & 0xff00) == 0)
1723 {
1724 *l = 5;
1725 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1726 AS2 (ldi,r31,lo8(%1)) CR_TAB
1727 AS2 (mov,%A0,r31) CR_TAB
1728 AS1 (clr,%B0) CR_TAB
1729 AS2 (mov,r31,__tmp_reg__));
1730 }
1731 }
1732
1733 /* Last resort, equal to loading from memory. */
1734 *l = 6;
1735 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1736 AS2 (ldi,r31,lo8(%1)) CR_TAB
1737 AS2 (mov,%A0,r31) CR_TAB
1738 AS2 (ldi,r31,hi8(%1)) CR_TAB
1739 AS2 (mov,%B0,r31) CR_TAB
1740 AS2 (mov,r31,__tmp_reg__));
1741 }
1742 else if (GET_CODE (src) == MEM)
1743 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1744 }
1745 else if (GET_CODE (dest) == MEM)
1746 {
1747 const char *template;
1748
1749 if (src == const0_rtx)
1750 operands[1] = zero_reg_rtx;
1751
1752 template = out_movhi_mr_r (insn, operands, real_l);
1753
1754 if (!real_l)
1755 output_asm_insn (template, operands);
1756
1757 operands[1] = src;
1758 return "";
1759 }
1760 fatal_insn ("invalid insn:", insn);
1761 return "";
1762 }
1763
1764 const char *
1765 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1766 {
1767 rtx dest = op[0];
1768 rtx src = op[1];
1769 rtx x = XEXP (src, 0);
1770 int dummy;
1771
1772 if (!l)
1773 l = &dummy;
1774
1775 if (CONSTANT_ADDRESS_P (x))
1776 {
1777 if (avr_io_address_p (x, 1))
1778 {
1779 *l = 1;
1780 return AS2 (in,%0,%1-0x20);
1781 }
1782 *l = 2;
1783 return AS2 (lds,%0,%1);
1784 }
1785 /* memory access by reg+disp */
1786 else if (GET_CODE (x) == PLUS
1787 && REG_P (XEXP (x,0))
1788 && GET_CODE (XEXP (x,1)) == CONST_INT)
1789 {
1790 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1791 {
1792 int disp = INTVAL (XEXP (x,1));
1793 if (REGNO (XEXP (x,0)) != REG_Y)
1794 fatal_insn ("incorrect insn:",insn);
1795
1796 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1797 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1798 AS2 (ldd,%0,Y+63) CR_TAB
1799 AS2 (sbiw,r28,%o1-63));
1800
1801 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1802 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1803 AS2 (ld,%0,Y) CR_TAB
1804 AS2 (subi,r28,lo8(%o1)) CR_TAB
1805 AS2 (sbci,r29,hi8(%o1)));
1806 }
1807 else if (REGNO (XEXP (x,0)) == REG_X)
1808 {
1809 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1810 it but I have this situation with extremal optimizing options. */
1811 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1812 || reg_unused_after (insn, XEXP (x,0)))
1813 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1814 AS2 (ld,%0,X));
1815
1816 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1817 AS2 (ld,%0,X) CR_TAB
1818 AS2 (sbiw,r26,%o1));
1819 }
1820 *l = 1;
1821 return AS2 (ldd,%0,%1);
1822 }
1823 *l = 1;
1824 return AS2 (ld,%0,%1);
1825 }
1826
1827 const char *
1828 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1829 {
1830 rtx dest = op[0];
1831 rtx src = op[1];
1832 rtx base = XEXP (src, 0);
1833 int reg_dest = true_regnum (dest);
1834 int reg_base = true_regnum (base);
1835 /* "volatile" forces reading low byte first, even if less efficient,
1836 for correct operation with 16-bit I/O registers. */
1837 int mem_volatile_p = MEM_VOLATILE_P (src);
1838 int tmp;
1839
1840 if (!l)
1841 l = &tmp;
1842
1843 if (reg_base > 0)
1844 {
1845 if (reg_dest == reg_base) /* R = (R) */
1846 {
1847 *l = 3;
1848 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1849 AS2 (ld,%B0,%1) CR_TAB
1850 AS2 (mov,%A0,__tmp_reg__));
1851 }
1852 else if (reg_base == REG_X) /* (R26) */
1853 {
1854 if (reg_unused_after (insn, base))
1855 {
1856 *l = 2;
1857 return (AS2 (ld,%A0,X+) CR_TAB
1858 AS2 (ld,%B0,X));
1859 }
1860 *l = 3;
1861 return (AS2 (ld,%A0,X+) CR_TAB
1862 AS2 (ld,%B0,X) CR_TAB
1863 AS2 (sbiw,r26,1));
1864 }
1865 else /* (R) */
1866 {
1867 *l = 2;
1868 return (AS2 (ld,%A0,%1) CR_TAB
1869 AS2 (ldd,%B0,%1+1));
1870 }
1871 }
1872 else if (GET_CODE (base) == PLUS) /* (R + i) */
1873 {
1874 int disp = INTVAL (XEXP (base, 1));
1875 int reg_base = true_regnum (XEXP (base, 0));
1876
1877 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1878 {
1879 if (REGNO (XEXP (base, 0)) != REG_Y)
1880 fatal_insn ("incorrect insn:",insn);
1881
1882 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1883 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1884 AS2 (ldd,%A0,Y+62) CR_TAB
1885 AS2 (ldd,%B0,Y+63) CR_TAB
1886 AS2 (sbiw,r28,%o1-62));
1887
1888 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1889 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1890 AS2 (ld,%A0,Y) CR_TAB
1891 AS2 (ldd,%B0,Y+1) CR_TAB
1892 AS2 (subi,r28,lo8(%o1)) CR_TAB
1893 AS2 (sbci,r29,hi8(%o1)));
1894 }
1895 if (reg_base == REG_X)
1896 {
1897 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1898 it but I have this situation with extremal
1899 optimization options. */
1900
1901 *l = 4;
1902 if (reg_base == reg_dest)
1903 return (AS2 (adiw,r26,%o1) CR_TAB
1904 AS2 (ld,__tmp_reg__,X+) CR_TAB
1905 AS2 (ld,%B0,X) CR_TAB
1906 AS2 (mov,%A0,__tmp_reg__));
1907
1908 return (AS2 (adiw,r26,%o1) CR_TAB
1909 AS2 (ld,%A0,X+) CR_TAB
1910 AS2 (ld,%B0,X) CR_TAB
1911 AS2 (sbiw,r26,%o1+1));
1912 }
1913
1914 if (reg_base == reg_dest)
1915 {
1916 *l = 3;
1917 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1918 AS2 (ldd,%B0,%B1) CR_TAB
1919 AS2 (mov,%A0,__tmp_reg__));
1920 }
1921
1922 *l = 2;
1923 return (AS2 (ldd,%A0,%A1) CR_TAB
1924 AS2 (ldd,%B0,%B1));
1925 }
1926 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1927 {
1928 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1929 fatal_insn ("incorrect insn:", insn);
1930
1931 if (mem_volatile_p)
1932 {
1933 if (REGNO (XEXP (base, 0)) == REG_X)
1934 {
1935 *l = 4;
1936 return (AS2 (sbiw,r26,2) CR_TAB
1937 AS2 (ld,%A0,X+) CR_TAB
1938 AS2 (ld,%B0,X) CR_TAB
1939 AS2 (sbiw,r26,1));
1940 }
1941 else
1942 {
1943 *l = 3;
1944 return (AS2 (sbiw,%r1,2) CR_TAB
1945 AS2 (ld,%A0,%p1) CR_TAB
1946 AS2 (ldd,%B0,%p1+1));
1947 }
1948 }
1949
1950 *l = 2;
1951 return (AS2 (ld,%B0,%1) CR_TAB
1952 AS2 (ld,%A0,%1));
1953 }
1954 else if (GET_CODE (base) == POST_INC) /* (R++) */
1955 {
1956 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1957 fatal_insn ("incorrect insn:", insn);
1958
1959 *l = 2;
1960 return (AS2 (ld,%A0,%1) CR_TAB
1961 AS2 (ld,%B0,%1));
1962 }
1963 else if (CONSTANT_ADDRESS_P (base))
1964 {
1965 if (avr_io_address_p (base, 2))
1966 {
1967 *l = 2;
1968 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1969 AS2 (in,%B0,%B1-0x20));
1970 }
1971 *l = 4;
1972 return (AS2 (lds,%A0,%A1) CR_TAB
1973 AS2 (lds,%B0,%B1));
1974 }
1975
1976 fatal_insn ("unknown move insn:",insn);
1977 return "";
1978 }
1979
1980 const char *
1981 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1982 {
1983 rtx dest = op[0];
1984 rtx src = op[1];
1985 rtx base = XEXP (src, 0);
1986 int reg_dest = true_regnum (dest);
1987 int reg_base = true_regnum (base);
1988 int tmp;
1989
1990 if (!l)
1991 l = &tmp;
1992
1993 if (reg_base > 0)
1994 {
1995 if (reg_base == REG_X) /* (R26) */
1996 {
1997 if (reg_dest == REG_X)
1998 /* "ld r26,-X" is undefined */
1999 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2000 AS2 (ld,r29,X) CR_TAB
2001 AS2 (ld,r28,-X) CR_TAB
2002 AS2 (ld,__tmp_reg__,-X) CR_TAB
2003 AS2 (sbiw,r26,1) CR_TAB
2004 AS2 (ld,r26,X) CR_TAB
2005 AS2 (mov,r27,__tmp_reg__));
2006 else if (reg_dest == REG_X - 2)
2007 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2008 AS2 (ld,%B0,X+) CR_TAB
2009 AS2 (ld,__tmp_reg__,X+) CR_TAB
2010 AS2 (ld,%D0,X) CR_TAB
2011 AS2 (mov,%C0,__tmp_reg__));
2012 else if (reg_unused_after (insn, base))
2013 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2014 AS2 (ld,%B0,X+) CR_TAB
2015 AS2 (ld,%C0,X+) CR_TAB
2016 AS2 (ld,%D0,X));
2017 else
2018 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2019 AS2 (ld,%B0,X+) CR_TAB
2020 AS2 (ld,%C0,X+) CR_TAB
2021 AS2 (ld,%D0,X) CR_TAB
2022 AS2 (sbiw,r26,3));
2023 }
2024 else
2025 {
2026 if (reg_dest == reg_base)
2027 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2028 AS2 (ldd,%C0,%1+2) CR_TAB
2029 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2030 AS2 (ld,%A0,%1) CR_TAB
2031 AS2 (mov,%B0,__tmp_reg__));
2032 else if (reg_base == reg_dest + 2)
2033 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2034 AS2 (ldd,%B0,%1+1) CR_TAB
2035 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2036 AS2 (ldd,%D0,%1+3) CR_TAB
2037 AS2 (mov,%C0,__tmp_reg__));
2038 else
2039 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2040 AS2 (ldd,%B0,%1+1) CR_TAB
2041 AS2 (ldd,%C0,%1+2) CR_TAB
2042 AS2 (ldd,%D0,%1+3));
2043 }
2044 }
2045 else if (GET_CODE (base) == PLUS) /* (R + i) */
2046 {
2047 int disp = INTVAL (XEXP (base, 1));
2048
2049 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2050 {
2051 if (REGNO (XEXP (base, 0)) != REG_Y)
2052 fatal_insn ("incorrect insn:",insn);
2053
2054 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2055 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2056 AS2 (ldd,%A0,Y+60) CR_TAB
2057 AS2 (ldd,%B0,Y+61) CR_TAB
2058 AS2 (ldd,%C0,Y+62) CR_TAB
2059 AS2 (ldd,%D0,Y+63) CR_TAB
2060 AS2 (sbiw,r28,%o1-60));
2061
2062 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2063 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2064 AS2 (ld,%A0,Y) CR_TAB
2065 AS2 (ldd,%B0,Y+1) CR_TAB
2066 AS2 (ldd,%C0,Y+2) CR_TAB
2067 AS2 (ldd,%D0,Y+3) CR_TAB
2068 AS2 (subi,r28,lo8(%o1)) CR_TAB
2069 AS2 (sbci,r29,hi8(%o1)));
2070 }
2071
2072 reg_base = true_regnum (XEXP (base, 0));
2073 if (reg_base == REG_X)
2074 {
2075 /* R = (X + d) */
2076 if (reg_dest == REG_X)
2077 {
2078 *l = 7;
2079 /* "ld r26,-X" is undefined */
2080 return (AS2 (adiw,r26,%o1+3) CR_TAB
2081 AS2 (ld,r29,X) CR_TAB
2082 AS2 (ld,r28,-X) CR_TAB
2083 AS2 (ld,__tmp_reg__,-X) CR_TAB
2084 AS2 (sbiw,r26,1) CR_TAB
2085 AS2 (ld,r26,X) CR_TAB
2086 AS2 (mov,r27,__tmp_reg__));
2087 }
2088 *l = 6;
2089 if (reg_dest == REG_X - 2)
2090 return (AS2 (adiw,r26,%o1) CR_TAB
2091 AS2 (ld,r24,X+) CR_TAB
2092 AS2 (ld,r25,X+) CR_TAB
2093 AS2 (ld,__tmp_reg__,X+) CR_TAB
2094 AS2 (ld,r27,X) CR_TAB
2095 AS2 (mov,r26,__tmp_reg__));
2096
2097 return (AS2 (adiw,r26,%o1) CR_TAB
2098 AS2 (ld,%A0,X+) CR_TAB
2099 AS2 (ld,%B0,X+) CR_TAB
2100 AS2 (ld,%C0,X+) CR_TAB
2101 AS2 (ld,%D0,X) CR_TAB
2102 AS2 (sbiw,r26,%o1+3));
2103 }
2104 if (reg_dest == reg_base)
2105 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2106 AS2 (ldd,%C0,%C1) CR_TAB
2107 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2108 AS2 (ldd,%A0,%A1) CR_TAB
2109 AS2 (mov,%B0,__tmp_reg__));
2110 else if (reg_dest == reg_base - 2)
2111 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2112 AS2 (ldd,%B0,%B1) CR_TAB
2113 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2114 AS2 (ldd,%D0,%D1) CR_TAB
2115 AS2 (mov,%C0,__tmp_reg__));
2116 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2117 AS2 (ldd,%B0,%B1) CR_TAB
2118 AS2 (ldd,%C0,%C1) CR_TAB
2119 AS2 (ldd,%D0,%D1));
2120 }
2121 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2122 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2123 AS2 (ld,%C0,%1) CR_TAB
2124 AS2 (ld,%B0,%1) CR_TAB
2125 AS2 (ld,%A0,%1));
2126 else if (GET_CODE (base) == POST_INC) /* (R++) */
2127 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2128 AS2 (ld,%B0,%1) CR_TAB
2129 AS2 (ld,%C0,%1) CR_TAB
2130 AS2 (ld,%D0,%1));
2131 else if (CONSTANT_ADDRESS_P (base))
2132 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2133 AS2 (lds,%B0,%B1) CR_TAB
2134 AS2 (lds,%C0,%C1) CR_TAB
2135 AS2 (lds,%D0,%D1));
2136
2137 fatal_insn ("unknown move insn:",insn);
2138 return "";
2139 }
2140
2141 const char *
2142 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2143 {
2144 rtx dest = op[0];
2145 rtx src = op[1];
2146 rtx base = XEXP (dest, 0);
2147 int reg_base = true_regnum (base);
2148 int reg_src = true_regnum (src);
2149 int tmp;
2150
2151 if (!l)
2152 l = &tmp;
2153
2154 if (CONSTANT_ADDRESS_P (base))
2155 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2156 AS2 (sts,%B0,%B1) CR_TAB
2157 AS2 (sts,%C0,%C1) CR_TAB
2158 AS2 (sts,%D0,%D1));
2159 if (reg_base > 0) /* (r) */
2160 {
2161 if (reg_base == REG_X) /* (R26) */
2162 {
2163 if (reg_src == REG_X)
2164 {
2165 /* "st X+,r26" is undefined */
2166 if (reg_unused_after (insn, base))
2167 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2168 AS2 (st,X,r26) CR_TAB
2169 AS2 (adiw,r26,1) CR_TAB
2170 AS2 (st,X+,__tmp_reg__) CR_TAB
2171 AS2 (st,X+,r28) CR_TAB
2172 AS2 (st,X,r29));
2173 else
2174 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2175 AS2 (st,X,r26) CR_TAB
2176 AS2 (adiw,r26,1) CR_TAB
2177 AS2 (st,X+,__tmp_reg__) CR_TAB
2178 AS2 (st,X+,r28) CR_TAB
2179 AS2 (st,X,r29) CR_TAB
2180 AS2 (sbiw,r26,3));
2181 }
2182 else if (reg_base == reg_src + 2)
2183 {
2184 if (reg_unused_after (insn, base))
2185 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2186 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2187 AS2 (st,%0+,%A1) CR_TAB
2188 AS2 (st,%0+,%B1) CR_TAB
2189 AS2 (st,%0+,__zero_reg__) CR_TAB
2190 AS2 (st,%0,__tmp_reg__) CR_TAB
2191 AS1 (clr,__zero_reg__));
2192 else
2193 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2194 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2195 AS2 (st,%0+,%A1) CR_TAB
2196 AS2 (st,%0+,%B1) CR_TAB
2197 AS2 (st,%0+,__zero_reg__) CR_TAB
2198 AS2 (st,%0,__tmp_reg__) CR_TAB
2199 AS1 (clr,__zero_reg__) CR_TAB
2200 AS2 (sbiw,r26,3));
2201 }
2202 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2203 AS2 (st,%0+,%B1) CR_TAB
2204 AS2 (st,%0+,%C1) CR_TAB
2205 AS2 (st,%0,%D1) CR_TAB
2206 AS2 (sbiw,r26,3));
2207 }
2208 else
2209 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2210 AS2 (std,%0+1,%B1) CR_TAB
2211 AS2 (std,%0+2,%C1) CR_TAB
2212 AS2 (std,%0+3,%D1));
2213 }
2214 else if (GET_CODE (base) == PLUS) /* (R + i) */
2215 {
2216 int disp = INTVAL (XEXP (base, 1));
2217 reg_base = REGNO (XEXP (base, 0));
2218 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2219 {
2220 if (reg_base != REG_Y)
2221 fatal_insn ("incorrect insn:",insn);
2222
2223 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2224 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2225 AS2 (std,Y+60,%A1) CR_TAB
2226 AS2 (std,Y+61,%B1) CR_TAB
2227 AS2 (std,Y+62,%C1) CR_TAB
2228 AS2 (std,Y+63,%D1) CR_TAB
2229 AS2 (sbiw,r28,%o0-60));
2230
2231 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2232 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2233 AS2 (st,Y,%A1) CR_TAB
2234 AS2 (std,Y+1,%B1) CR_TAB
2235 AS2 (std,Y+2,%C1) CR_TAB
2236 AS2 (std,Y+3,%D1) CR_TAB
2237 AS2 (subi,r28,lo8(%o0)) CR_TAB
2238 AS2 (sbci,r29,hi8(%o0)));
2239 }
2240 if (reg_base == REG_X)
2241 {
2242 /* (X + d) = R */
2243 if (reg_src == REG_X)
2244 {
2245 *l = 9;
2246 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2247 AS2 (mov,__zero_reg__,r27) CR_TAB
2248 AS2 (adiw,r26,%o0) CR_TAB
2249 AS2 (st,X+,__tmp_reg__) CR_TAB
2250 AS2 (st,X+,__zero_reg__) CR_TAB
2251 AS2 (st,X+,r28) CR_TAB
2252 AS2 (st,X,r29) CR_TAB
2253 AS1 (clr,__zero_reg__) CR_TAB
2254 AS2 (sbiw,r26,%o0+3));
2255 }
2256 else if (reg_src == REG_X - 2)
2257 {
2258 *l = 9;
2259 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2260 AS2 (mov,__zero_reg__,r27) CR_TAB
2261 AS2 (adiw,r26,%o0) CR_TAB
2262 AS2 (st,X+,r24) CR_TAB
2263 AS2 (st,X+,r25) CR_TAB
2264 AS2 (st,X+,__tmp_reg__) CR_TAB
2265 AS2 (st,X,__zero_reg__) CR_TAB
2266 AS1 (clr,__zero_reg__) CR_TAB
2267 AS2 (sbiw,r26,%o0+3));
2268 }
2269 *l = 6;
2270 return (AS2 (adiw,r26,%o0) CR_TAB
2271 AS2 (st,X+,%A1) CR_TAB
2272 AS2 (st,X+,%B1) CR_TAB
2273 AS2 (st,X+,%C1) CR_TAB
2274 AS2 (st,X,%D1) CR_TAB
2275 AS2 (sbiw,r26,%o0+3));
2276 }
2277 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2278 AS2 (std,%B0,%B1) CR_TAB
2279 AS2 (std,%C0,%C1) CR_TAB
2280 AS2 (std,%D0,%D1));
2281 }
2282 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2283 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2284 AS2 (st,%0,%C1) CR_TAB
2285 AS2 (st,%0,%B1) CR_TAB
2286 AS2 (st,%0,%A1));
2287 else if (GET_CODE (base) == POST_INC) /* (R++) */
2288 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2289 AS2 (st,%0,%B1) CR_TAB
2290 AS2 (st,%0,%C1) CR_TAB
2291 AS2 (st,%0,%D1));
2292 fatal_insn ("unknown move insn:",insn);
2293 return "";
2294 }
2295
2296 const char *
2297 output_movsisf(rtx insn, rtx operands[], int *l)
2298 {
2299 int dummy;
2300 rtx dest = operands[0];
2301 rtx src = operands[1];
2302 int *real_l = l;
2303
2304 if (!l)
2305 l = &dummy;
2306
2307 if (register_operand (dest, VOIDmode))
2308 {
2309 if (register_operand (src, VOIDmode)) /* mov r,r */
2310 {
2311 if (true_regnum (dest) > true_regnum (src))
2312 {
2313 if (AVR_HAVE_MOVW)
2314 {
2315 *l = 2;
2316 return (AS2 (movw,%C0,%C1) CR_TAB
2317 AS2 (movw,%A0,%A1));
2318 }
2319 *l = 4;
2320 return (AS2 (mov,%D0,%D1) CR_TAB
2321 AS2 (mov,%C0,%C1) CR_TAB
2322 AS2 (mov,%B0,%B1) CR_TAB
2323 AS2 (mov,%A0,%A1));
2324 }
2325 else
2326 {
2327 if (AVR_HAVE_MOVW)
2328 {
2329 *l = 2;
2330 return (AS2 (movw,%A0,%A1) CR_TAB
2331 AS2 (movw,%C0,%C1));
2332 }
2333 *l = 4;
2334 return (AS2 (mov,%A0,%A1) CR_TAB
2335 AS2 (mov,%B0,%B1) CR_TAB
2336 AS2 (mov,%C0,%C1) CR_TAB
2337 AS2 (mov,%D0,%D1));
2338 }
2339 }
2340 else if (CONSTANT_P (src))
2341 {
2342 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2343 {
2344 *l = 4;
2345 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2346 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2347 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2348 AS2 (ldi,%D0,hhi8(%1)));
2349 }
2350
2351 if (GET_CODE (src) == CONST_INT)
2352 {
2353 const char *const clr_op0 =
2354 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2355 AS1 (clr,%B0) CR_TAB
2356 AS2 (movw,%C0,%A0))
2357 : (AS1 (clr,%A0) CR_TAB
2358 AS1 (clr,%B0) CR_TAB
2359 AS1 (clr,%C0) CR_TAB
2360 AS1 (clr,%D0));
2361
2362 if (src == const0_rtx) /* mov r,L */
2363 {
2364 *l = AVR_HAVE_MOVW ? 3 : 4;
2365 return clr_op0;
2366 }
2367 else if (src == const1_rtx)
2368 {
2369 if (!real_l)
2370 output_asm_insn (clr_op0, operands);
2371 *l = AVR_HAVE_MOVW ? 4 : 5;
2372 return AS1 (inc,%A0);
2373 }
2374 else if (src == constm1_rtx)
2375 {
2376 /* Immediate constants -1 to any register */
2377 if (AVR_HAVE_MOVW)
2378 {
2379 *l = 4;
2380 return (AS1 (clr,%A0) CR_TAB
2381 AS1 (dec,%A0) CR_TAB
2382 AS2 (mov,%B0,%A0) CR_TAB
2383 AS2 (movw,%C0,%A0));
2384 }
2385 *l = 5;
2386 return (AS1 (clr,%A0) CR_TAB
2387 AS1 (dec,%A0) CR_TAB
2388 AS2 (mov,%B0,%A0) CR_TAB
2389 AS2 (mov,%C0,%A0) CR_TAB
2390 AS2 (mov,%D0,%A0));
2391 }
2392 else
2393 {
2394 int bit_nr = exact_log2 (INTVAL (src));
2395
2396 if (bit_nr >= 0)
2397 {
2398 *l = AVR_HAVE_MOVW ? 5 : 6;
2399 if (!real_l)
2400 {
2401 output_asm_insn (clr_op0, operands);
2402 output_asm_insn ("set", operands);
2403 }
2404 if (!real_l)
2405 avr_output_bld (operands, bit_nr);
2406
2407 return "";
2408 }
2409 }
2410 }
2411
2412 /* Last resort, better than loading from memory. */
2413 *l = 10;
2414 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2415 AS2 (ldi,r31,lo8(%1)) CR_TAB
2416 AS2 (mov,%A0,r31) CR_TAB
2417 AS2 (ldi,r31,hi8(%1)) CR_TAB
2418 AS2 (mov,%B0,r31) CR_TAB
2419 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2420 AS2 (mov,%C0,r31) CR_TAB
2421 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2422 AS2 (mov,%D0,r31) CR_TAB
2423 AS2 (mov,r31,__tmp_reg__));
2424 }
2425 else if (GET_CODE (src) == MEM)
2426 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2427 }
2428 else if (GET_CODE (dest) == MEM)
2429 {
2430 const char *template;
2431
2432 if (src == const0_rtx)
2433 operands[1] = zero_reg_rtx;
2434
2435 template = out_movsi_mr_r (insn, operands, real_l);
2436
2437 if (!real_l)
2438 output_asm_insn (template, operands);
2439
2440 operands[1] = src;
2441 return "";
2442 }
2443 fatal_insn ("invalid insn:", insn);
2444 return "";
2445 }
2446
2447 const char *
2448 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2449 {
2450 rtx dest = op[0];
2451 rtx src = op[1];
2452 rtx x = XEXP (dest, 0);
2453 int dummy;
2454
2455 if (!l)
2456 l = &dummy;
2457
2458 if (CONSTANT_ADDRESS_P (x))
2459 {
2460 if (avr_io_address_p (x, 1))
2461 {
2462 *l = 1;
2463 return AS2 (out,%0-0x20,%1);
2464 }
2465 *l = 2;
2466 return AS2 (sts,%0,%1);
2467 }
2468 /* memory access by reg+disp */
2469 else if (GET_CODE (x) == PLUS
2470 && REG_P (XEXP (x,0))
2471 && GET_CODE (XEXP (x,1)) == CONST_INT)
2472 {
2473 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2474 {
2475 int disp = INTVAL (XEXP (x,1));
2476 if (REGNO (XEXP (x,0)) != REG_Y)
2477 fatal_insn ("incorrect insn:",insn);
2478
2479 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2480 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2481 AS2 (std,Y+63,%1) CR_TAB
2482 AS2 (sbiw,r28,%o0-63));
2483
2484 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2485 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2486 AS2 (st,Y,%1) CR_TAB
2487 AS2 (subi,r28,lo8(%o0)) CR_TAB
2488 AS2 (sbci,r29,hi8(%o0)));
2489 }
2490 else if (REGNO (XEXP (x,0)) == REG_X)
2491 {
2492 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2493 {
2494 if (reg_unused_after (insn, XEXP (x,0)))
2495 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2496 AS2 (adiw,r26,%o0) CR_TAB
2497 AS2 (st,X,__tmp_reg__));
2498
2499 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2500 AS2 (adiw,r26,%o0) CR_TAB
2501 AS2 (st,X,__tmp_reg__) CR_TAB
2502 AS2 (sbiw,r26,%o0));
2503 }
2504 else
2505 {
2506 if (reg_unused_after (insn, XEXP (x,0)))
2507 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2508 AS2 (st,X,%1));
2509
2510 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2511 AS2 (st,X,%1) CR_TAB
2512 AS2 (sbiw,r26,%o0));
2513 }
2514 }
2515 *l = 1;
2516 return AS2 (std,%0,%1);
2517 }
2518 *l = 1;
2519 return AS2 (st,%0,%1);
2520 }
2521
2522 const char *
2523 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2524 {
2525 rtx dest = op[0];
2526 rtx src = op[1];
2527 rtx base = XEXP (dest, 0);
2528 int reg_base = true_regnum (base);
2529 int reg_src = true_regnum (src);
2530 /* "volatile" forces writing high byte first, even if less efficient,
2531 for correct operation with 16-bit I/O registers. */
2532 int mem_volatile_p = MEM_VOLATILE_P (dest);
2533 int tmp;
2534
2535 if (!l)
2536 l = &tmp;
2537 if (CONSTANT_ADDRESS_P (base))
2538 {
2539 if (avr_io_address_p (base, 2))
2540 {
2541 *l = 2;
2542 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2543 AS2 (out,%A0-0x20,%A1));
2544 }
2545 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2546 AS2 (sts,%A0,%A1));
2547 }
2548 if (reg_base > 0)
2549 {
2550 if (reg_base == REG_X)
2551 {
2552 if (reg_src == REG_X)
2553 {
2554 /* "st X+,r26" and "st -X,r26" are undefined. */
2555 if (!mem_volatile_p && reg_unused_after (insn, src))
2556 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2557 AS2 (st,X,r26) CR_TAB
2558 AS2 (adiw,r26,1) CR_TAB
2559 AS2 (st,X,__tmp_reg__));
2560 else
2561 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2562 AS2 (adiw,r26,1) CR_TAB
2563 AS2 (st,X,__tmp_reg__) CR_TAB
2564 AS2 (sbiw,r26,1) CR_TAB
2565 AS2 (st,X,r26));
2566 }
2567 else
2568 {
2569 if (!mem_volatile_p && reg_unused_after (insn, base))
2570 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2571 AS2 (st,X,%B1));
2572 else
2573 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2574 AS2 (st,X,%B1) CR_TAB
2575 AS2 (st,-X,%A1));
2576 }
2577 }
2578 else
2579 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2580 AS2 (st,%0,%A1));
2581 }
2582 else if (GET_CODE (base) == PLUS)
2583 {
2584 int disp = INTVAL (XEXP (base, 1));
2585 reg_base = REGNO (XEXP (base, 0));
2586 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2587 {
2588 if (reg_base != REG_Y)
2589 fatal_insn ("incorrect insn:",insn);
2590
2591 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2592 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2593 AS2 (std,Y+63,%B1) CR_TAB
2594 AS2 (std,Y+62,%A1) CR_TAB
2595 AS2 (sbiw,r28,%o0-62));
2596
2597 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2598 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2599 AS2 (std,Y+1,%B1) CR_TAB
2600 AS2 (st,Y,%A1) CR_TAB
2601 AS2 (subi,r28,lo8(%o0)) CR_TAB
2602 AS2 (sbci,r29,hi8(%o0)));
2603 }
2604 if (reg_base == REG_X)
2605 {
2606 /* (X + d) = R */
2607 if (reg_src == REG_X)
2608 {
2609 *l = 7;
2610 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2611 AS2 (mov,__zero_reg__,r27) CR_TAB
2612 AS2 (adiw,r26,%o0+1) CR_TAB
2613 AS2 (st,X,__zero_reg__) CR_TAB
2614 AS2 (st,-X,__tmp_reg__) CR_TAB
2615 AS1 (clr,__zero_reg__) CR_TAB
2616 AS2 (sbiw,r26,%o0));
2617 }
2618 *l = 4;
2619 return (AS2 (adiw,r26,%o0+1) CR_TAB
2620 AS2 (st,X,%B1) CR_TAB
2621 AS2 (st,-X,%A1) CR_TAB
2622 AS2 (sbiw,r26,%o0));
2623 }
2624 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2625 AS2 (std,%A0,%A1));
2626 }
2627 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2628 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2629 AS2 (st,%0,%A1));
2630 else if (GET_CODE (base) == POST_INC) /* (R++) */
2631 {
2632 if (mem_volatile_p)
2633 {
2634 if (REGNO (XEXP (base, 0)) == REG_X)
2635 {
2636 *l = 4;
2637 return (AS2 (adiw,r26,1) CR_TAB
2638 AS2 (st,X,%B1) CR_TAB
2639 AS2 (st,-X,%A1) CR_TAB
2640 AS2 (adiw,r26,2));
2641 }
2642 else
2643 {
2644 *l = 3;
2645 return (AS2 (std,%p0+1,%B1) CR_TAB
2646 AS2 (st,%p0,%A1) CR_TAB
2647 AS2 (adiw,%r0,2));
2648 }
2649 }
2650
2651 *l = 2;
2652 return (AS2 (st,%0,%A1) CR_TAB
2653 AS2 (st,%0,%B1));
2654 }
2655 fatal_insn ("unknown move insn:",insn);
2656 return "";
2657 }
2658
2659 /* Return 1 if frame pointer for current function required. */
2660
2661 int
2662 frame_pointer_required_p (void)
2663 {
2664 return (current_function_calls_alloca
2665 || current_function_args_info.nregs == 0
2666 || get_frame_size () > 0);
2667 }
2668
2669 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2670
2671 static RTX_CODE
2672 compare_condition (rtx insn)
2673 {
2674 rtx next = next_real_insn (insn);
2675 RTX_CODE cond = UNKNOWN;
2676 if (next && GET_CODE (next) == JUMP_INSN)
2677 {
2678 rtx pat = PATTERN (next);
2679 rtx src = SET_SRC (pat);
2680 rtx t = XEXP (src, 0);
2681 cond = GET_CODE (t);
2682 }
2683 return cond;
2684 }
2685
2686 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2687
2688 static int
2689 compare_sign_p (rtx insn)
2690 {
2691 RTX_CODE cond = compare_condition (insn);
2692 return (cond == GE || cond == LT);
2693 }
2694
2695 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2696 that needs to be swapped (GT, GTU, LE, LEU). */
2697
2698 int
2699 compare_diff_p (rtx insn)
2700 {
2701 RTX_CODE cond = compare_condition (insn);
2702 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2703 }
2704
2705 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2706
2707 int
2708 compare_eq_p (rtx insn)
2709 {
2710 RTX_CODE cond = compare_condition (insn);
2711 return (cond == EQ || cond == NE);
2712 }
2713
2714
2715 /* Output test instruction for HImode. */
2716
2717 const char *
2718 out_tsthi (rtx insn, int *l)
2719 {
2720 if (compare_sign_p (insn))
2721 {
2722 if (l) *l = 1;
2723 return AS1 (tst,%B0);
2724 }
2725 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2726 && compare_eq_p (insn))
2727 {
2728 /* Faster than sbiw if we can clobber the operand. */
2729 if (l) *l = 1;
2730 return AS2 (or,%A0,%B0);
2731 }
2732 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2733 {
2734 if (l) *l = 1;
2735 return AS2 (sbiw,%0,0);
2736 }
2737 if (l) *l = 2;
2738 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2739 AS2 (cpc,%B0,__zero_reg__));
2740 }
2741
2742
2743 /* Output test instruction for SImode. */
2744
2745 const char *
2746 out_tstsi (rtx insn, int *l)
2747 {
2748 if (compare_sign_p (insn))
2749 {
2750 if (l) *l = 1;
2751 return AS1 (tst,%D0);
2752 }
2753 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2754 {
2755 if (l) *l = 3;
2756 return (AS2 (sbiw,%A0,0) CR_TAB
2757 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2758 AS2 (cpc,%D0,__zero_reg__));
2759 }
2760 if (l) *l = 4;
2761 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2762 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2763 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2764 AS2 (cpc,%D0,__zero_reg__));
2765 }
2766
2767
2768 /* Generate asm equivalent for various shifts.
2769 Shift count is a CONST_INT, MEM or REG.
2770 This only handles cases that are not already
2771 carefully hand-optimized in ?sh??i3_out. */
2772
2773 void
2774 out_shift_with_cnt (const char *template, rtx insn, rtx operands[],
2775 int *len, int t_len)
2776 {
2777 rtx op[10];
2778 char str[500];
2779 int second_label = 1;
2780 int saved_in_tmp = 0;
2781 int use_zero_reg = 0;
2782
2783 op[0] = operands[0];
2784 op[1] = operands[1];
2785 op[2] = operands[2];
2786 op[3] = operands[3];
2787 str[0] = 0;
2788
2789 if (len)
2790 *len = 1;
2791
2792 if (GET_CODE (operands[2]) == CONST_INT)
2793 {
2794 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2795 int count = INTVAL (operands[2]);
2796 int max_len = 10; /* If larger than this, always use a loop. */
2797
2798 if (count <= 0)
2799 {
2800 if (len)
2801 *len = 0;
2802 return;
2803 }
2804
2805 if (count < 8 && !scratch)
2806 use_zero_reg = 1;
2807
2808 if (optimize_size)
2809 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2810
2811 if (t_len * count <= max_len)
2812 {
2813 /* Output shifts inline with no loop - faster. */
2814 if (len)
2815 *len = t_len * count;
2816 else
2817 {
2818 while (count-- > 0)
2819 output_asm_insn (template, op);
2820 }
2821
2822 return;
2823 }
2824
2825 if (scratch)
2826 {
2827 if (!len)
2828 strcat (str, AS2 (ldi,%3,%2));
2829 }
2830 else if (use_zero_reg)
2831 {
2832 /* Hack to save one word: use __zero_reg__ as loop counter.
2833 Set one bit, then shift in a loop until it is 0 again. */
2834
2835 op[3] = zero_reg_rtx;
2836 if (len)
2837 *len = 2;
2838 else
2839 strcat (str, ("set" CR_TAB
2840 AS2 (bld,%3,%2-1)));
2841 }
2842 else
2843 {
2844 /* No scratch register available, use one from LD_REGS (saved in
2845 __tmp_reg__) that doesn't overlap with registers to shift. */
2846
2847 op[3] = gen_rtx_REG (QImode,
2848 ((true_regnum (operands[0]) - 1) & 15) + 16);
2849 op[4] = tmp_reg_rtx;
2850 saved_in_tmp = 1;
2851
2852 if (len)
2853 *len = 3; /* Includes "mov %3,%4" after the loop. */
2854 else
2855 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2856 AS2 (ldi,%3,%2)));
2857 }
2858
2859 second_label = 0;
2860 }
2861 else if (GET_CODE (operands[2]) == MEM)
2862 {
2863 rtx op_mov[10];
2864
2865 op[3] = op_mov[0] = tmp_reg_rtx;
2866 op_mov[1] = op[2];
2867
2868 if (len)
2869 out_movqi_r_mr (insn, op_mov, len);
2870 else
2871 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2872 }
2873 else if (register_operand (operands[2], QImode))
2874 {
2875 if (reg_unused_after (insn, operands[2]))
2876 op[3] = op[2];
2877 else
2878 {
2879 op[3] = tmp_reg_rtx;
2880 if (!len)
2881 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2882 }
2883 }
2884 else
2885 fatal_insn ("bad shift insn:", insn);
2886
2887 if (second_label)
2888 {
2889 if (len)
2890 ++*len;
2891 else
2892 strcat (str, AS1 (rjmp,2f));
2893 }
2894
2895 if (len)
2896 *len += t_len + 2; /* template + dec + brXX */
2897 else
2898 {
2899 strcat (str, "\n1:\t");
2900 strcat (str, template);
2901 strcat (str, second_label ? "\n2:\t" : "\n\t");
2902 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2903 strcat (str, CR_TAB);
2904 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2905 if (saved_in_tmp)
2906 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2907 output_asm_insn (str, op);
2908 }
2909 }
2910
2911
2912 /* 8bit shift left ((char)x << i) */
2913
2914 const char *
2915 ashlqi3_out (rtx insn, rtx operands[], int *len)
2916 {
2917 if (GET_CODE (operands[2]) == CONST_INT)
2918 {
2919 int k;
2920
2921 if (!len)
2922 len = &k;
2923
2924 switch (INTVAL (operands[2]))
2925 {
2926 default:
2927 if (INTVAL (operands[2]) < 8)
2928 break;
2929
2930 *len = 1;
2931 return AS1 (clr,%0);
2932
2933 case 1:
2934 *len = 1;
2935 return AS1 (lsl,%0);
2936
2937 case 2:
2938 *len = 2;
2939 return (AS1 (lsl,%0) CR_TAB
2940 AS1 (lsl,%0));
2941
2942 case 3:
2943 *len = 3;
2944 return (AS1 (lsl,%0) CR_TAB
2945 AS1 (lsl,%0) CR_TAB
2946 AS1 (lsl,%0));
2947
2948 case 4:
2949 if (test_hard_reg_class (LD_REGS, operands[0]))
2950 {
2951 *len = 2;
2952 return (AS1 (swap,%0) CR_TAB
2953 AS2 (andi,%0,0xf0));
2954 }
2955 *len = 4;
2956 return (AS1 (lsl,%0) CR_TAB
2957 AS1 (lsl,%0) CR_TAB
2958 AS1 (lsl,%0) CR_TAB
2959 AS1 (lsl,%0));
2960
2961 case 5:
2962 if (test_hard_reg_class (LD_REGS, operands[0]))
2963 {
2964 *len = 3;
2965 return (AS1 (swap,%0) CR_TAB
2966 AS1 (lsl,%0) CR_TAB
2967 AS2 (andi,%0,0xe0));
2968 }
2969 *len = 5;
2970 return (AS1 (lsl,%0) CR_TAB
2971 AS1 (lsl,%0) CR_TAB
2972 AS1 (lsl,%0) CR_TAB
2973 AS1 (lsl,%0) CR_TAB
2974 AS1 (lsl,%0));
2975
2976 case 6:
2977 if (test_hard_reg_class (LD_REGS, operands[0]))
2978 {
2979 *len = 4;
2980 return (AS1 (swap,%0) CR_TAB
2981 AS1 (lsl,%0) CR_TAB
2982 AS1 (lsl,%0) CR_TAB
2983 AS2 (andi,%0,0xc0));
2984 }
2985 *len = 6;
2986 return (AS1 (lsl,%0) CR_TAB
2987 AS1 (lsl,%0) CR_TAB
2988 AS1 (lsl,%0) CR_TAB
2989 AS1 (lsl,%0) CR_TAB
2990 AS1 (lsl,%0) CR_TAB
2991 AS1 (lsl,%0));
2992
2993 case 7:
2994 *len = 3;
2995 return (AS1 (ror,%0) CR_TAB
2996 AS1 (clr,%0) CR_TAB
2997 AS1 (ror,%0));
2998 }
2999 }
3000 else if (CONSTANT_P (operands[2]))
3001 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3002
3003 out_shift_with_cnt (AS1 (lsl,%0),
3004 insn, operands, len, 1);
3005 return "";
3006 }
3007
3008
3009 /* 16bit shift left ((short)x << i) */
3010
3011 const char *
3012 ashlhi3_out (rtx insn, rtx operands[], int *len)
3013 {
3014 if (GET_CODE (operands[2]) == CONST_INT)
3015 {
3016 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3017 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3018 int k;
3019 int *t = len;
3020
3021 if (!len)
3022 len = &k;
3023
3024 switch (INTVAL (operands[2]))
3025 {
3026 default:
3027 if (INTVAL (operands[2]) < 16)
3028 break;
3029
3030 *len = 2;
3031 return (AS1 (clr,%B0) CR_TAB
3032 AS1 (clr,%A0));
3033
3034 case 4:
3035 if (optimize_size && scratch)
3036 break; /* 5 */
3037 if (ldi_ok)
3038 {
3039 *len = 6;
3040 return (AS1 (swap,%A0) CR_TAB
3041 AS1 (swap,%B0) CR_TAB
3042 AS2 (andi,%B0,0xf0) CR_TAB
3043 AS2 (eor,%B0,%A0) CR_TAB
3044 AS2 (andi,%A0,0xf0) CR_TAB
3045 AS2 (eor,%B0,%A0));
3046 }
3047 if (scratch)
3048 {
3049 *len = 7;
3050 return (AS1 (swap,%A0) CR_TAB
3051 AS1 (swap,%B0) CR_TAB
3052 AS2 (ldi,%3,0xf0) CR_TAB
3053 AS2 (and,%B0,%3) CR_TAB
3054 AS2 (eor,%B0,%A0) CR_TAB
3055 AS2 (and,%A0,%3) CR_TAB
3056 AS2 (eor,%B0,%A0));
3057 }
3058 break; /* optimize_size ? 6 : 8 */
3059
3060 case 5:
3061 if (optimize_size)
3062 break; /* scratch ? 5 : 6 */
3063 if (ldi_ok)
3064 {
3065 *len = 8;
3066 return (AS1 (lsl,%A0) CR_TAB
3067 AS1 (rol,%B0) CR_TAB
3068 AS1 (swap,%A0) CR_TAB
3069 AS1 (swap,%B0) CR_TAB
3070 AS2 (andi,%B0,0xf0) CR_TAB
3071 AS2 (eor,%B0,%A0) CR_TAB
3072 AS2 (andi,%A0,0xf0) CR_TAB
3073 AS2 (eor,%B0,%A0));
3074 }
3075 if (scratch)
3076 {
3077 *len = 9;
3078 return (AS1 (lsl,%A0) CR_TAB
3079 AS1 (rol,%B0) CR_TAB
3080 AS1 (swap,%A0) CR_TAB
3081 AS1 (swap,%B0) CR_TAB
3082 AS2 (ldi,%3,0xf0) CR_TAB
3083 AS2 (and,%B0,%3) CR_TAB
3084 AS2 (eor,%B0,%A0) CR_TAB
3085 AS2 (and,%A0,%3) CR_TAB
3086 AS2 (eor,%B0,%A0));
3087 }
3088 break; /* 10 */
3089
3090 case 6:
3091 if (optimize_size)
3092 break; /* scratch ? 5 : 6 */
3093 *len = 9;
3094 return (AS1 (clr,__tmp_reg__) CR_TAB
3095 AS1 (lsr,%B0) CR_TAB
3096 AS1 (ror,%A0) CR_TAB
3097 AS1 (ror,__tmp_reg__) CR_TAB
3098 AS1 (lsr,%B0) CR_TAB
3099 AS1 (ror,%A0) CR_TAB
3100 AS1 (ror,__tmp_reg__) CR_TAB
3101 AS2 (mov,%B0,%A0) CR_TAB
3102 AS2 (mov,%A0,__tmp_reg__));
3103
3104 case 7:
3105 *len = 5;
3106 return (AS1 (lsr,%B0) CR_TAB
3107 AS2 (mov,%B0,%A0) CR_TAB
3108 AS1 (clr,%A0) CR_TAB
3109 AS1 (ror,%B0) CR_TAB
3110 AS1 (ror,%A0));
3111
3112 case 8:
3113 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3114 AS1 (clr,%A0));
3115
3116 case 9:
3117 *len = 3;
3118 return (AS2 (mov,%B0,%A0) CR_TAB
3119 AS1 (clr,%A0) CR_TAB
3120 AS1 (lsl,%B0));
3121
3122 case 10:
3123 *len = 4;
3124 return (AS2 (mov,%B0,%A0) CR_TAB
3125 AS1 (clr,%A0) CR_TAB
3126 AS1 (lsl,%B0) CR_TAB
3127 AS1 (lsl,%B0));
3128
3129 case 11:
3130 *len = 5;
3131 return (AS2 (mov,%B0,%A0) CR_TAB
3132 AS1 (clr,%A0) CR_TAB
3133 AS1 (lsl,%B0) CR_TAB
3134 AS1 (lsl,%B0) CR_TAB
3135 AS1 (lsl,%B0));
3136
3137 case 12:
3138 if (ldi_ok)
3139 {
3140 *len = 4;
3141 return (AS2 (mov,%B0,%A0) CR_TAB
3142 AS1 (clr,%A0) CR_TAB
3143 AS1 (swap,%B0) CR_TAB
3144 AS2 (andi,%B0,0xf0));
3145 }
3146 if (scratch)
3147 {
3148 *len = 5;
3149 return (AS2 (mov,%B0,%A0) CR_TAB
3150 AS1 (clr,%A0) CR_TAB
3151 AS1 (swap,%B0) CR_TAB
3152 AS2 (ldi,%3,0xf0) CR_TAB
3153 AS2 (and,%B0,%3));
3154 }
3155 *len = 6;
3156 return (AS2 (mov,%B0,%A0) CR_TAB
3157 AS1 (clr,%A0) CR_TAB
3158 AS1 (lsl,%B0) CR_TAB
3159 AS1 (lsl,%B0) CR_TAB
3160 AS1 (lsl,%B0) CR_TAB
3161 AS1 (lsl,%B0));
3162
3163 case 13:
3164 if (ldi_ok)
3165 {
3166 *len = 5;
3167 return (AS2 (mov,%B0,%A0) CR_TAB
3168 AS1 (clr,%A0) CR_TAB
3169 AS1 (swap,%B0) CR_TAB
3170 AS1 (lsl,%B0) CR_TAB
3171 AS2 (andi,%B0,0xe0));
3172 }
3173 if (AVR_HAVE_MUL && scratch)
3174 {
3175 *len = 5;
3176 return (AS2 (ldi,%3,0x20) CR_TAB
3177 AS2 (mul,%A0,%3) CR_TAB
3178 AS2 (mov,%B0,r0) CR_TAB
3179 AS1 (clr,%A0) CR_TAB
3180 AS1 (clr,__zero_reg__));
3181 }
3182 if (optimize_size && scratch)
3183 break; /* 5 */
3184 if (scratch)
3185 {
3186 *len = 6;
3187 return (AS2 (mov,%B0,%A0) CR_TAB
3188 AS1 (clr,%A0) CR_TAB
3189 AS1 (swap,%B0) CR_TAB
3190 AS1 (lsl,%B0) CR_TAB
3191 AS2 (ldi,%3,0xe0) CR_TAB
3192 AS2 (and,%B0,%3));
3193 }
3194 if (AVR_HAVE_MUL)
3195 {
3196 *len = 6;
3197 return ("set" CR_TAB
3198 AS2 (bld,r1,5) CR_TAB
3199 AS2 (mul,%A0,r1) CR_TAB
3200 AS2 (mov,%B0,r0) CR_TAB
3201 AS1 (clr,%A0) CR_TAB
3202 AS1 (clr,__zero_reg__));
3203 }
3204 *len = 7;
3205 return (AS2 (mov,%B0,%A0) CR_TAB
3206 AS1 (clr,%A0) CR_TAB
3207 AS1 (lsl,%B0) CR_TAB
3208 AS1 (lsl,%B0) CR_TAB
3209 AS1 (lsl,%B0) CR_TAB
3210 AS1 (lsl,%B0) CR_TAB
3211 AS1 (lsl,%B0));
3212
3213 case 14:
3214 if (AVR_HAVE_MUL && ldi_ok)
3215 {
3216 *len = 5;
3217 return (AS2 (ldi,%B0,0x40) CR_TAB
3218 AS2 (mul,%A0,%B0) CR_TAB
3219 AS2 (mov,%B0,r0) CR_TAB
3220 AS1 (clr,%A0) CR_TAB
3221 AS1 (clr,__zero_reg__));
3222 }
3223 if (AVR_HAVE_MUL && scratch)
3224 {
3225 *len = 5;
3226 return (AS2 (ldi,%3,0x40) CR_TAB
3227 AS2 (mul,%A0,%3) CR_TAB
3228 AS2 (mov,%B0,r0) CR_TAB
3229 AS1 (clr,%A0) CR_TAB
3230 AS1 (clr,__zero_reg__));
3231 }
3232 if (optimize_size && ldi_ok)
3233 {
3234 *len = 5;
3235 return (AS2 (mov,%B0,%A0) CR_TAB
3236 AS2 (ldi,%A0,6) "\n1:\t"
3237 AS1 (lsl,%B0) CR_TAB
3238 AS1 (dec,%A0) CR_TAB
3239 AS1 (brne,1b));
3240 }
3241 if (optimize_size && scratch)
3242 break; /* 5 */
3243 *len = 6;
3244 return (AS1 (clr,%B0) CR_TAB
3245 AS1 (lsr,%A0) CR_TAB
3246 AS1 (ror,%B0) CR_TAB
3247 AS1 (lsr,%A0) CR_TAB
3248 AS1 (ror,%B0) CR_TAB
3249 AS1 (clr,%A0));
3250
3251 case 15:
3252 *len = 4;
3253 return (AS1 (clr,%B0) CR_TAB
3254 AS1 (lsr,%A0) CR_TAB
3255 AS1 (ror,%B0) CR_TAB
3256 AS1 (clr,%A0));
3257 }
3258 len = t;
3259 }
3260 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3261 AS1 (rol,%B0)),
3262 insn, operands, len, 2);
3263 return "";
3264 }
3265
3266
3267 /* 32bit shift left ((long)x << i) */
3268
3269 const char *
3270 ashlsi3_out (rtx insn, rtx operands[], int *len)
3271 {
3272 if (GET_CODE (operands[2]) == CONST_INT)
3273 {
3274 int k;
3275 int *t = len;
3276
3277 if (!len)
3278 len = &k;
3279
3280 switch (INTVAL (operands[2]))
3281 {
3282 default:
3283 if (INTVAL (operands[2]) < 32)
3284 break;
3285
3286 if (AVR_HAVE_MOVW)
3287 return *len = 3, (AS1 (clr,%D0) CR_TAB
3288 AS1 (clr,%C0) CR_TAB
3289 AS2 (movw,%A0,%C0));
3290 *len = 4;
3291 return (AS1 (clr,%D0) CR_TAB
3292 AS1 (clr,%C0) CR_TAB
3293 AS1 (clr,%B0) CR_TAB
3294 AS1 (clr,%A0));
3295
3296 case 8:
3297 {
3298 int reg0 = true_regnum (operands[0]);
3299 int reg1 = true_regnum (operands[1]);
3300 *len = 4;
3301 if (reg0 >= reg1)
3302 return (AS2 (mov,%D0,%C1) CR_TAB
3303 AS2 (mov,%C0,%B1) CR_TAB
3304 AS2 (mov,%B0,%A1) CR_TAB
3305 AS1 (clr,%A0));
3306 else
3307 return (AS1 (clr,%A0) CR_TAB
3308 AS2 (mov,%B0,%A1) CR_TAB
3309 AS2 (mov,%C0,%B1) CR_TAB
3310 AS2 (mov,%D0,%C1));
3311 }
3312
3313 case 16:
3314 {
3315 int reg0 = true_regnum (operands[0]);
3316 int reg1 = true_regnum (operands[1]);
3317 if (reg0 + 2 == reg1)
3318 return *len = 2, (AS1 (clr,%B0) CR_TAB
3319 AS1 (clr,%A0));
3320 if (AVR_HAVE_MOVW)
3321 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3322 AS1 (clr,%B0) CR_TAB
3323 AS1 (clr,%A0));
3324 else
3325 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3326 AS2 (mov,%D0,%B1) CR_TAB
3327 AS1 (clr,%B0) CR_TAB
3328 AS1 (clr,%A0));
3329 }
3330
3331 case 24:
3332 *len = 4;
3333 return (AS2 (mov,%D0,%A1) CR_TAB
3334 AS1 (clr,%C0) CR_TAB
3335 AS1 (clr,%B0) CR_TAB
3336 AS1 (clr,%A0));
3337
3338 case 31:
3339 *len = 6;
3340 return (AS1 (clr,%D0) CR_TAB
3341 AS1 (lsr,%A0) CR_TAB
3342 AS1 (ror,%D0) CR_TAB
3343 AS1 (clr,%C0) CR_TAB
3344 AS1 (clr,%B0) CR_TAB
3345 AS1 (clr,%A0));
3346 }
3347 len = t;
3348 }
3349 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3350 AS1 (rol,%B0) CR_TAB
3351 AS1 (rol,%C0) CR_TAB
3352 AS1 (rol,%D0)),
3353 insn, operands, len, 4);
3354 return "";
3355 }
3356
3357 /* 8bit arithmetic shift right ((signed char)x >> i) */
3358
3359 const char *
3360 ashrqi3_out (rtx insn, rtx operands[], int *len)
3361 {
3362 if (GET_CODE (operands[2]) == CONST_INT)
3363 {
3364 int k;
3365
3366 if (!len)
3367 len = &k;
3368
3369 switch (INTVAL (operands[2]))
3370 {
3371 case 1:
3372 *len = 1;
3373 return AS1 (asr,%0);
3374
3375 case 2:
3376 *len = 2;
3377 return (AS1 (asr,%0) CR_TAB
3378 AS1 (asr,%0));
3379
3380 case 3:
3381 *len = 3;
3382 return (AS1 (asr,%0) CR_TAB
3383 AS1 (asr,%0) CR_TAB
3384 AS1 (asr,%0));
3385
3386 case 4:
3387 *len = 4;
3388 return (AS1 (asr,%0) CR_TAB
3389 AS1 (asr,%0) CR_TAB
3390 AS1 (asr,%0) CR_TAB
3391 AS1 (asr,%0));
3392
3393 case 5:
3394 *len = 5;
3395 return (AS1 (asr,%0) CR_TAB
3396 AS1 (asr,%0) CR_TAB
3397 AS1 (asr,%0) CR_TAB
3398 AS1 (asr,%0) CR_TAB
3399 AS1 (asr,%0));
3400
3401 case 6:
3402 *len = 4;
3403 return (AS2 (bst,%0,6) CR_TAB
3404 AS1 (lsl,%0) CR_TAB
3405 AS2 (sbc,%0,%0) CR_TAB
3406 AS2 (bld,%0,0));
3407
3408 default:
3409 if (INTVAL (operands[2]) < 8)
3410 break;
3411
3412 /* fall through */
3413
3414 case 7:
3415 *len = 2;
3416 return (AS1 (lsl,%0) CR_TAB
3417 AS2 (sbc,%0,%0));
3418 }
3419 }
3420 else if (CONSTANT_P (operands[2]))
3421 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3422
3423 out_shift_with_cnt (AS1 (asr,%0),
3424 insn, operands, len, 1);
3425 return "";
3426 }
3427
3428
3429 /* 16bit arithmetic shift right ((signed short)x >> i) */
3430
3431 const char *
3432 ashrhi3_out (rtx insn, rtx operands[], int *len)
3433 {
3434 if (GET_CODE (operands[2]) == CONST_INT)
3435 {
3436 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3437 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3438 int k;
3439 int *t = len;
3440
3441 if (!len)
3442 len = &k;
3443
3444 switch (INTVAL (operands[2]))
3445 {
3446 case 4:
3447 case 5:
3448 /* XXX try to optimize this too? */
3449 break;
3450
3451 case 6:
3452 if (optimize_size)
3453 break; /* scratch ? 5 : 6 */
3454 *len = 8;
3455 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3456 AS2 (mov,%A0,%B0) CR_TAB
3457 AS1 (lsl,__tmp_reg__) CR_TAB
3458 AS1 (rol,%A0) CR_TAB
3459 AS2 (sbc,%B0,%B0) CR_TAB
3460 AS1 (lsl,__tmp_reg__) CR_TAB
3461 AS1 (rol,%A0) CR_TAB
3462 AS1 (rol,%B0));
3463
3464 case 7:
3465 *len = 4;
3466 return (AS1 (lsl,%A0) CR_TAB
3467 AS2 (mov,%A0,%B0) CR_TAB
3468 AS1 (rol,%A0) CR_TAB
3469 AS2 (sbc,%B0,%B0));
3470
3471 case 8:
3472 {
3473 int reg0 = true_regnum (operands[0]);
3474 int reg1 = true_regnum (operands[1]);
3475
3476 if (reg0 == reg1)
3477 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3478 AS1 (lsl,%B0) CR_TAB
3479 AS2 (sbc,%B0,%B0));
3480 else
3481 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3482 AS1 (clr,%B0) CR_TAB
3483 AS2 (sbrc,%A0,7) CR_TAB
3484 AS1 (dec,%B0));
3485 }
3486
3487 case 9:
3488 *len = 4;
3489 return (AS2 (mov,%A0,%B0) CR_TAB
3490 AS1 (lsl,%B0) CR_TAB
3491 AS2 (sbc,%B0,%B0) CR_TAB
3492 AS1 (asr,%A0));
3493
3494 case 10:
3495 *len = 5;
3496 return (AS2 (mov,%A0,%B0) CR_TAB
3497 AS1 (lsl,%B0) CR_TAB
3498 AS2 (sbc,%B0,%B0) CR_TAB
3499 AS1 (asr,%A0) CR_TAB
3500 AS1 (asr,%A0));
3501
3502 case 11:
3503 if (AVR_HAVE_MUL && ldi_ok)
3504 {
3505 *len = 5;
3506 return (AS2 (ldi,%A0,0x20) CR_TAB
3507 AS2 (muls,%B0,%A0) CR_TAB
3508 AS2 (mov,%A0,r1) CR_TAB
3509 AS2 (sbc,%B0,%B0) CR_TAB
3510 AS1 (clr,__zero_reg__));
3511 }
3512 if (optimize_size && scratch)
3513 break; /* 5 */
3514 *len = 6;
3515 return (AS2 (mov,%A0,%B0) CR_TAB
3516 AS1 (lsl,%B0) CR_TAB
3517 AS2 (sbc,%B0,%B0) CR_TAB
3518 AS1 (asr,%A0) CR_TAB
3519 AS1 (asr,%A0) CR_TAB
3520 AS1 (asr,%A0));
3521
3522 case 12:
3523 if (AVR_HAVE_MUL && ldi_ok)
3524 {
3525 *len = 5;
3526 return (AS2 (ldi,%A0,0x10) CR_TAB
3527 AS2 (muls,%B0,%A0) CR_TAB
3528 AS2 (mov,%A0,r1) CR_TAB
3529 AS2 (sbc,%B0,%B0) CR_TAB
3530 AS1 (clr,__zero_reg__));
3531 }
3532 if (optimize_size && scratch)
3533 break; /* 5 */
3534 *len = 7;
3535 return (AS2 (mov,%A0,%B0) CR_TAB
3536 AS1 (lsl,%B0) CR_TAB
3537 AS2 (sbc,%B0,%B0) CR_TAB
3538 AS1 (asr,%A0) CR_TAB
3539 AS1 (asr,%A0) CR_TAB
3540 AS1 (asr,%A0) CR_TAB
3541 AS1 (asr,%A0));
3542
3543 case 13:
3544 if (AVR_HAVE_MUL && ldi_ok)
3545 {
3546 *len = 5;
3547 return (AS2 (ldi,%A0,0x08) CR_TAB
3548 AS2 (muls,%B0,%A0) CR_TAB
3549 AS2 (mov,%A0,r1) CR_TAB
3550 AS2 (sbc,%B0,%B0) CR_TAB
3551 AS1 (clr,__zero_reg__));
3552 }
3553 if (optimize_size)
3554 break; /* scratch ? 5 : 7 */
3555 *len = 8;
3556 return (AS2 (mov,%A0,%B0) CR_TAB
3557 AS1 (lsl,%B0) CR_TAB
3558 AS2 (sbc,%B0,%B0) CR_TAB
3559 AS1 (asr,%A0) CR_TAB
3560 AS1 (asr,%A0) CR_TAB
3561 AS1 (asr,%A0) CR_TAB
3562 AS1 (asr,%A0) CR_TAB
3563 AS1 (asr,%A0));
3564
3565 case 14:
3566 *len = 5;
3567 return (AS1 (lsl,%B0) CR_TAB
3568 AS2 (sbc,%A0,%A0) CR_TAB
3569 AS1 (lsl,%B0) CR_TAB
3570 AS2 (mov,%B0,%A0) CR_TAB
3571 AS1 (rol,%A0));
3572
3573 default:
3574 if (INTVAL (operands[2]) < 16)
3575 break;
3576
3577 /* fall through */
3578
3579 case 15:
3580 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3581 AS2 (sbc,%A0,%A0) CR_TAB
3582 AS2 (mov,%B0,%A0));
3583 }
3584 len = t;
3585 }
3586 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3587 AS1 (ror,%A0)),
3588 insn, operands, len, 2);
3589 return "";
3590 }
3591
3592
3593 /* 32bit arithmetic shift right ((signed long)x >> i) */
3594
3595 const char *
3596 ashrsi3_out (rtx insn, rtx operands[], int *len)
3597 {
3598 if (GET_CODE (operands[2]) == CONST_INT)
3599 {
3600 int k;
3601 int *t = len;
3602
3603 if (!len)
3604 len = &k;
3605
3606 switch (INTVAL (operands[2]))
3607 {
3608 case 8:
3609 {
3610 int reg0 = true_regnum (operands[0]);
3611 int reg1 = true_regnum (operands[1]);
3612 *len=6;
3613 if (reg0 <= reg1)
3614 return (AS2 (mov,%A0,%B1) CR_TAB
3615 AS2 (mov,%B0,%C1) CR_TAB
3616 AS2 (mov,%C0,%D1) CR_TAB
3617 AS1 (clr,%D0) CR_TAB
3618 AS2 (sbrc,%C0,7) CR_TAB
3619 AS1 (dec,%D0));
3620 else
3621 return (AS1 (clr,%D0) CR_TAB
3622 AS2 (sbrc,%D1,7) CR_TAB
3623 AS1 (dec,%D0) CR_TAB
3624 AS2 (mov,%C0,%D1) CR_TAB
3625 AS2 (mov,%B0,%C1) CR_TAB
3626 AS2 (mov,%A0,%B1));
3627 }
3628
3629 case 16:
3630 {
3631 int reg0 = true_regnum (operands[0]);
3632 int reg1 = true_regnum (operands[1]);
3633
3634 if (reg0 == reg1 + 2)
3635 return *len = 4, (AS1 (clr,%D0) CR_TAB
3636 AS2 (sbrc,%B0,7) CR_TAB
3637 AS1 (com,%D0) CR_TAB
3638 AS2 (mov,%C0,%D0));
3639 if (AVR_HAVE_MOVW)
3640 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3641 AS1 (clr,%D0) CR_TAB
3642 AS2 (sbrc,%B0,7) CR_TAB
3643 AS1 (com,%D0) CR_TAB
3644 AS2 (mov,%C0,%D0));
3645 else
3646 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3647 AS2 (mov,%A0,%C1) CR_TAB
3648 AS1 (clr,%D0) CR_TAB
3649 AS2 (sbrc,%B0,7) CR_TAB
3650 AS1 (com,%D0) CR_TAB
3651 AS2 (mov,%C0,%D0));
3652 }
3653
3654 case 24:
3655 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3656 AS1 (clr,%D0) CR_TAB
3657 AS2 (sbrc,%A0,7) CR_TAB
3658 AS1 (com,%D0) CR_TAB
3659 AS2 (mov,%B0,%D0) CR_TAB
3660 AS2 (mov,%C0,%D0));
3661
3662 default:
3663 if (INTVAL (operands[2]) < 32)
3664 break;
3665
3666 /* fall through */
3667
3668 case 31:
3669 if (AVR_HAVE_MOVW)
3670 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3671 AS2 (sbc,%A0,%A0) CR_TAB
3672 AS2 (mov,%B0,%A0) CR_TAB
3673 AS2 (movw,%C0,%A0));
3674 else
3675 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3676 AS2 (sbc,%A0,%A0) CR_TAB
3677 AS2 (mov,%B0,%A0) CR_TAB
3678 AS2 (mov,%C0,%A0) CR_TAB
3679 AS2 (mov,%D0,%A0));
3680 }
3681 len = t;
3682 }
3683 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3684 AS1 (ror,%C0) CR_TAB
3685 AS1 (ror,%B0) CR_TAB
3686 AS1 (ror,%A0)),
3687 insn, operands, len, 4);
3688 return "";
3689 }
3690
3691 /* 8bit logic shift right ((unsigned char)x >> i) */
3692
3693 const char *
3694 lshrqi3_out (rtx insn, rtx operands[], int *len)
3695 {
3696 if (GET_CODE (operands[2]) == CONST_INT)
3697 {
3698 int k;
3699
3700 if (!len)
3701 len = &k;
3702
3703 switch (INTVAL (operands[2]))
3704 {
3705 default:
3706 if (INTVAL (operands[2]) < 8)
3707 break;
3708
3709 *len = 1;
3710 return AS1 (clr,%0);
3711
3712 case 1:
3713 *len = 1;
3714 return AS1 (lsr,%0);
3715
3716 case 2:
3717 *len = 2;
3718 return (AS1 (lsr,%0) CR_TAB
3719 AS1 (lsr,%0));
3720 case 3:
3721 *len = 3;
3722 return (AS1 (lsr,%0) CR_TAB
3723 AS1 (lsr,%0) CR_TAB
3724 AS1 (lsr,%0));
3725
3726 case 4:
3727 if (test_hard_reg_class (LD_REGS, operands[0]))
3728 {
3729 *len=2;
3730 return (AS1 (swap,%0) CR_TAB
3731 AS2 (andi,%0,0x0f));
3732 }
3733 *len = 4;
3734 return (AS1 (lsr,%0) CR_TAB
3735 AS1 (lsr,%0) CR_TAB
3736 AS1 (lsr,%0) CR_TAB
3737 AS1 (lsr,%0));
3738
3739 case 5:
3740 if (test_hard_reg_class (LD_REGS, operands[0]))
3741 {
3742 *len = 3;
3743 return (AS1 (swap,%0) CR_TAB
3744 AS1 (lsr,%0) CR_TAB
3745 AS2 (andi,%0,0x7));
3746 }
3747 *len = 5;
3748 return (AS1 (lsr,%0) CR_TAB
3749 AS1 (lsr,%0) CR_TAB
3750 AS1 (lsr,%0) CR_TAB
3751 AS1 (lsr,%0) CR_TAB
3752 AS1 (lsr,%0));
3753
3754 case 6:
3755 if (test_hard_reg_class (LD_REGS, operands[0]))
3756 {
3757 *len = 4;
3758 return (AS1 (swap,%0) CR_TAB
3759 AS1 (lsr,%0) CR_TAB
3760 AS1 (lsr,%0) CR_TAB
3761 AS2 (andi,%0,0x3));
3762 }
3763 *len = 6;
3764 return (AS1 (lsr,%0) CR_TAB
3765 AS1 (lsr,%0) CR_TAB
3766 AS1 (lsr,%0) CR_TAB
3767 AS1 (lsr,%0) CR_TAB
3768 AS1 (lsr,%0) CR_TAB
3769 AS1 (lsr,%0));
3770
3771 case 7:
3772 *len = 3;
3773 return (AS1 (rol,%0) CR_TAB
3774 AS1 (clr,%0) CR_TAB
3775 AS1 (rol,%0));
3776 }
3777 }
3778 else if (CONSTANT_P (operands[2]))
3779 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3780
3781 out_shift_with_cnt (AS1 (lsr,%0),
3782 insn, operands, len, 1);
3783 return "";
3784 }
3785
3786 /* 16bit logic shift right ((unsigned short)x >> i) */
3787
3788 const char *
3789 lshrhi3_out (rtx insn, rtx operands[], int *len)
3790 {
3791 if (GET_CODE (operands[2]) == CONST_INT)
3792 {
3793 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3794 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3795 int k;
3796 int *t = len;
3797
3798 if (!len)
3799 len = &k;
3800
3801 switch (INTVAL (operands[2]))
3802 {
3803 default:
3804 if (INTVAL (operands[2]) < 16)
3805 break;
3806
3807 *len = 2;
3808 return (AS1 (clr,%B0) CR_TAB
3809 AS1 (clr,%A0));
3810
3811 case 4:
3812 if (optimize_size && scratch)
3813 break; /* 5 */
3814 if (ldi_ok)
3815 {
3816 *len = 6;
3817 return (AS1 (swap,%B0) CR_TAB
3818 AS1 (swap,%A0) CR_TAB
3819 AS2 (andi,%A0,0x0f) CR_TAB
3820 AS2 (eor,%A0,%B0) CR_TAB
3821 AS2 (andi,%B0,0x0f) CR_TAB
3822 AS2 (eor,%A0,%B0));
3823 }
3824 if (scratch)
3825 {
3826 *len = 7;
3827 return (AS1 (swap,%B0) CR_TAB
3828 AS1 (swap,%A0) CR_TAB
3829 AS2 (ldi,%3,0x0f) CR_TAB
3830 AS2 (and,%A0,%3) CR_TAB
3831 AS2 (eor,%A0,%B0) CR_TAB
3832 AS2 (and,%B0,%3) CR_TAB
3833 AS2 (eor,%A0,%B0));
3834 }
3835 break; /* optimize_size ? 6 : 8 */
3836
3837 case 5:
3838 if (optimize_size)
3839 break; /* scratch ? 5 : 6 */
3840 if (ldi_ok)
3841 {
3842 *len = 8;
3843 return (AS1 (lsr,%B0) CR_TAB
3844 AS1 (ror,%A0) CR_TAB
3845 AS1 (swap,%B0) CR_TAB
3846 AS1 (swap,%A0) CR_TAB
3847 AS2 (andi,%A0,0x0f) CR_TAB
3848 AS2 (eor,%A0,%B0) CR_TAB
3849 AS2 (andi,%B0,0x0f) CR_TAB
3850 AS2 (eor,%A0,%B0));
3851 }
3852 if (scratch)
3853 {
3854 *len = 9;
3855 return (AS1 (lsr,%B0) CR_TAB
3856 AS1 (ror,%A0) CR_TAB
3857 AS1 (swap,%B0) CR_TAB
3858 AS1 (swap,%A0) CR_TAB
3859 AS2 (ldi,%3,0x0f) CR_TAB
3860 AS2 (and,%A0,%3) CR_TAB
3861 AS2 (eor,%A0,%B0) CR_TAB
3862 AS2 (and,%B0,%3) CR_TAB
3863 AS2 (eor,%A0,%B0));
3864 }
3865 break; /* 10 */
3866
3867 case 6:
3868 if (optimize_size)
3869 break; /* scratch ? 5 : 6 */
3870 *len = 9;
3871 return (AS1 (clr,__tmp_reg__) CR_TAB
3872 AS1 (lsl,%A0) CR_TAB
3873 AS1 (rol,%B0) CR_TAB
3874 AS1 (rol,__tmp_reg__) CR_TAB
3875 AS1 (lsl,%A0) CR_TAB
3876 AS1 (rol,%B0) CR_TAB
3877 AS1 (rol,__tmp_reg__) CR_TAB
3878 AS2 (mov,%A0,%B0) CR_TAB
3879 AS2 (mov,%B0,__tmp_reg__));
3880
3881 case 7:
3882 *len = 5;
3883 return (AS1 (lsl,%A0) CR_TAB
3884 AS2 (mov,%A0,%B0) CR_TAB
3885 AS1 (rol,%A0) CR_TAB
3886 AS2 (sbc,%B0,%B0) CR_TAB
3887 AS1 (neg,%B0));
3888
3889 case 8:
3890 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3891 AS1 (clr,%B0));
3892
3893 case 9:
3894 *len = 3;
3895 return (AS2 (mov,%A0,%B0) CR_TAB
3896 AS1 (clr,%B0) CR_TAB
3897 AS1 (lsr,%A0));
3898
3899 case 10:
3900 *len = 4;
3901 return (AS2 (mov,%A0,%B0) CR_TAB
3902 AS1 (clr,%B0) CR_TAB
3903 AS1 (lsr,%A0) CR_TAB
3904 AS1 (lsr,%A0));
3905
3906 case 11:
3907 *len = 5;
3908 return (AS2 (mov,%A0,%B0) CR_TAB
3909 AS1 (clr,%B0) CR_TAB
3910 AS1 (lsr,%A0) CR_TAB
3911 AS1 (lsr,%A0) CR_TAB
3912 AS1 (lsr,%A0));
3913
3914 case 12:
3915 if (ldi_ok)
3916 {
3917 *len = 4;
3918 return (AS2 (mov,%A0,%B0) CR_TAB
3919 AS1 (clr,%B0) CR_TAB
3920 AS1 (swap,%A0) CR_TAB
3921 AS2 (andi,%A0,0x0f));
3922 }
3923 if (scratch)
3924 {
3925 *len = 5;
3926 return (AS2 (mov,%A0,%B0) CR_TAB
3927 AS1 (clr,%B0) CR_TAB
3928 AS1 (swap,%A0) CR_TAB
3929 AS2 (ldi,%3,0x0f) CR_TAB
3930 AS2 (and,%A0,%3));
3931 }
3932 *len = 6;
3933 return (AS2 (mov,%A0,%B0) CR_TAB
3934 AS1 (clr,%B0) CR_TAB
3935 AS1 (lsr,%A0) CR_TAB
3936 AS1 (lsr,%A0) CR_TAB
3937 AS1 (lsr,%A0) CR_TAB
3938 AS1 (lsr,%A0));
3939
3940 case 13:
3941 if (ldi_ok)
3942 {
3943 *len = 5;
3944 return (AS2 (mov,%A0,%B0) CR_TAB
3945 AS1 (clr,%B0) CR_TAB
3946 AS1 (swap,%A0) CR_TAB
3947 AS1 (lsr,%A0) CR_TAB
3948 AS2 (andi,%A0,0x07));
3949 }
3950 if (AVR_HAVE_MUL && scratch)
3951 {
3952 *len = 5;
3953 return (AS2 (ldi,%3,0x08) CR_TAB
3954 AS2 (mul,%B0,%3) CR_TAB
3955 AS2 (mov,%A0,r1) CR_TAB
3956 AS1 (clr,%B0) CR_TAB
3957 AS1 (clr,__zero_reg__));
3958 }
3959 if (optimize_size && scratch)
3960 break; /* 5 */
3961 if (scratch)
3962 {
3963 *len = 6;
3964 return (AS2 (mov,%A0,%B0) CR_TAB
3965 AS1 (clr,%B0) CR_TAB
3966 AS1 (swap,%A0) CR_TAB
3967 AS1 (lsr,%A0) CR_TAB
3968 AS2 (ldi,%3,0x07) CR_TAB
3969 AS2 (and,%A0,%3));
3970 }
3971 if (AVR_HAVE_MUL)
3972 {
3973 *len = 6;
3974 return ("set" CR_TAB
3975 AS2 (bld,r1,3) CR_TAB
3976 AS2 (mul,%B0,r1) CR_TAB
3977 AS2 (mov,%A0,r1) CR_TAB
3978 AS1 (clr,%B0) CR_TAB
3979 AS1 (clr,__zero_reg__));
3980 }
3981 *len = 7;
3982 return (AS2 (mov,%A0,%B0) CR_TAB
3983 AS1 (clr,%B0) CR_TAB
3984 AS1 (lsr,%A0) CR_TAB
3985 AS1 (lsr,%A0) CR_TAB
3986 AS1 (lsr,%A0) CR_TAB
3987 AS1 (lsr,%A0) CR_TAB
3988 AS1 (lsr,%A0));
3989
3990 case 14:
3991 if (AVR_HAVE_MUL && ldi_ok)
3992 {
3993 *len = 5;
3994 return (AS2 (ldi,%A0,0x04) CR_TAB
3995 AS2 (mul,%B0,%A0) CR_TAB
3996 AS2 (mov,%A0,r1) CR_TAB
3997 AS1 (clr,%B0) CR_TAB
3998 AS1 (clr,__zero_reg__));
3999 }
4000 if (AVR_HAVE_MUL && scratch)
4001 {
4002 *len = 5;
4003 return (AS2 (ldi,%3,0x04) CR_TAB
4004 AS2 (mul,%B0,%3) CR_TAB
4005 AS2 (mov,%A0,r1) CR_TAB
4006 AS1 (clr,%B0) CR_TAB
4007 AS1 (clr,__zero_reg__));
4008 }
4009 if (optimize_size && ldi_ok)
4010 {
4011 *len = 5;
4012 return (AS2 (mov,%A0,%B0) CR_TAB
4013 AS2 (ldi,%B0,6) "\n1:\t"
4014 AS1 (lsr,%A0) CR_TAB
4015 AS1 (dec,%B0) CR_TAB
4016 AS1 (brne,1b));
4017 }
4018 if (optimize_size && scratch)
4019 break; /* 5 */
4020 *len = 6;
4021 return (AS1 (clr,%A0) CR_TAB
4022 AS1 (lsl,%B0) CR_TAB
4023 AS1 (rol,%A0) CR_TAB
4024 AS1 (lsl,%B0) CR_TAB
4025 AS1 (rol,%A0) CR_TAB
4026 AS1 (clr,%B0));
4027
4028 case 15:
4029 *len = 4;
4030 return (AS1 (clr,%A0) CR_TAB
4031 AS1 (lsl,%B0) CR_TAB
4032 AS1 (rol,%A0) CR_TAB
4033 AS1 (clr,%B0));
4034 }
4035 len = t;
4036 }
4037 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4038 AS1 (ror,%A0)),
4039 insn, operands, len, 2);
4040 return "";
4041 }
4042
4043 /* 32bit logic shift right ((unsigned int)x >> i) */
4044
4045 const char *
4046 lshrsi3_out (rtx insn, rtx operands[], int *len)
4047 {
4048 if (GET_CODE (operands[2]) == CONST_INT)
4049 {
4050 int k;
4051 int *t = len;
4052
4053 if (!len)
4054 len = &k;
4055
4056 switch (INTVAL (operands[2]))
4057 {
4058 default:
4059 if (INTVAL (operands[2]) < 32)
4060 break;
4061
4062 if (AVR_HAVE_MOVW)
4063 return *len = 3, (AS1 (clr,%D0) CR_TAB
4064 AS1 (clr,%C0) CR_TAB
4065 AS2 (movw,%A0,%C0));
4066 *len = 4;
4067 return (AS1 (clr,%D0) CR_TAB
4068 AS1 (clr,%C0) CR_TAB
4069 AS1 (clr,%B0) CR_TAB
4070 AS1 (clr,%A0));
4071
4072 case 8:
4073 {
4074 int reg0 = true_regnum (operands[0]);
4075 int reg1 = true_regnum (operands[1]);
4076 *len = 4;
4077 if (reg0 <= reg1)
4078 return (AS2 (mov,%A0,%B1) CR_TAB
4079 AS2 (mov,%B0,%C1) CR_TAB
4080 AS2 (mov,%C0,%D1) CR_TAB
4081 AS1 (clr,%D0));
4082 else
4083 return (AS1 (clr,%D0) CR_TAB
4084 AS2 (mov,%C0,%D1) CR_TAB
4085 AS2 (mov,%B0,%C1) CR_TAB
4086 AS2 (mov,%A0,%B1));
4087 }
4088
4089 case 16:
4090 {
4091 int reg0 = true_regnum (operands[0]);
4092 int reg1 = true_regnum (operands[1]);
4093
4094 if (reg0 == reg1 + 2)
4095 return *len = 2, (AS1 (clr,%C0) CR_TAB
4096 AS1 (clr,%D0));
4097 if (AVR_HAVE_MOVW)
4098 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4099 AS1 (clr,%C0) CR_TAB
4100 AS1 (clr,%D0));
4101 else
4102 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4103 AS2 (mov,%A0,%C1) CR_TAB
4104 AS1 (clr,%C0) CR_TAB
4105 AS1 (clr,%D0));
4106 }
4107
4108 case 24:
4109 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4110 AS1 (clr,%B0) CR_TAB
4111 AS1 (clr,%C0) CR_TAB
4112 AS1 (clr,%D0));
4113
4114 case 31:
4115 *len = 6;
4116 return (AS1 (clr,%A0) CR_TAB
4117 AS2 (sbrc,%D0,7) CR_TAB
4118 AS1 (inc,%A0) CR_TAB
4119 AS1 (clr,%B0) CR_TAB
4120 AS1 (clr,%C0) CR_TAB
4121 AS1 (clr,%D0));
4122 }
4123 len = t;
4124 }
4125 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4126 AS1 (ror,%C0) CR_TAB
4127 AS1 (ror,%B0) CR_TAB
4128 AS1 (ror,%A0)),
4129 insn, operands, len, 4);
4130 return "";
4131 }
4132
4133 /* Modifies the length assigned to instruction INSN
4134 LEN is the initially computed length of the insn. */
4135
4136 int
4137 adjust_insn_length (rtx insn, int len)
4138 {
4139 rtx patt = PATTERN (insn);
4140 rtx set;
4141
4142 if (GET_CODE (patt) == SET)
4143 {
4144 rtx op[10];
4145 op[1] = SET_SRC (patt);
4146 op[0] = SET_DEST (patt);
4147 if (general_operand (op[1], VOIDmode)
4148 && general_operand (op[0], VOIDmode))
4149 {
4150 switch (GET_MODE (op[0]))
4151 {
4152 case QImode:
4153 output_movqi (insn, op, &len);
4154 break;
4155 case HImode:
4156 output_movhi (insn, op, &len);
4157 break;
4158 case SImode:
4159 case SFmode:
4160 output_movsisf (insn, op, &len);
4161 break;
4162 default:
4163 break;
4164 }
4165 }
4166 else if (op[0] == cc0_rtx && REG_P (op[1]))
4167 {
4168 switch (GET_MODE (op[1]))
4169 {
4170 case HImode: out_tsthi (insn,&len); break;
4171 case SImode: out_tstsi (insn,&len); break;
4172 default: break;
4173 }
4174 }
4175 else if (GET_CODE (op[1]) == AND)
4176 {
4177 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4178 {
4179 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4180 if (GET_MODE (op[1]) == SImode)
4181 len = (((mask & 0xff) != 0xff)
4182 + ((mask & 0xff00) != 0xff00)
4183 + ((mask & 0xff0000L) != 0xff0000L)
4184 + ((mask & 0xff000000L) != 0xff000000L));
4185 else if (GET_MODE (op[1]) == HImode)
4186 len = (((mask & 0xff) != 0xff)
4187 + ((mask & 0xff00) != 0xff00));
4188 }
4189 }
4190 else if (GET_CODE (op[1]) == IOR)
4191 {
4192 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4193 {
4194 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4195 if (GET_MODE (op[1]) == SImode)
4196 len = (((mask & 0xff) != 0)
4197 + ((mask & 0xff00) != 0)
4198 + ((mask & 0xff0000L) != 0)
4199 + ((mask & 0xff000000L) != 0));
4200 else if (GET_MODE (op[1]) == HImode)
4201 len = (((mask & 0xff) != 0)
4202 + ((mask & 0xff00) != 0));
4203 }
4204 }
4205 }
4206 set = single_set (insn);
4207 if (set)
4208 {
4209 rtx op[10];
4210
4211 op[1] = SET_SRC (set);
4212 op[0] = SET_DEST (set);
4213
4214 if (GET_CODE (patt) == PARALLEL
4215 && general_operand (op[1], VOIDmode)
4216 && general_operand (op[0], VOIDmode))
4217 {
4218 if (XVECLEN (patt, 0) == 2)
4219 op[2] = XVECEXP (patt, 0, 1);
4220
4221 switch (GET_MODE (op[0]))
4222 {
4223 case QImode:
4224 len = 2;
4225 break;
4226 case HImode:
4227 output_reload_inhi (insn, op, &len);
4228 break;
4229 case SImode:
4230 case SFmode:
4231 output_reload_insisf (insn, op, &len);
4232 break;
4233 default:
4234 break;
4235 }
4236 }
4237 else if (GET_CODE (op[1]) == ASHIFT
4238 || GET_CODE (op[1]) == ASHIFTRT
4239 || GET_CODE (op[1]) == LSHIFTRT)
4240 {
4241 rtx ops[10];
4242 ops[0] = op[0];
4243 ops[1] = XEXP (op[1],0);
4244 ops[2] = XEXP (op[1],1);
4245 switch (GET_CODE (op[1]))
4246 {
4247 case ASHIFT:
4248 switch (GET_MODE (op[0]))
4249 {
4250 case QImode: ashlqi3_out (insn,ops,&len); break;
4251 case HImode: ashlhi3_out (insn,ops,&len); break;
4252 case SImode: ashlsi3_out (insn,ops,&len); break;
4253 default: break;
4254 }
4255 break;
4256 case ASHIFTRT:
4257 switch (GET_MODE (op[0]))
4258 {
4259 case QImode: ashrqi3_out (insn,ops,&len); break;
4260 case HImode: ashrhi3_out (insn,ops,&len); break;
4261 case SImode: ashrsi3_out (insn,ops,&len); break;
4262 default: break;
4263 }
4264 break;
4265 case LSHIFTRT:
4266 switch (GET_MODE (op[0]))
4267 {
4268 case QImode: lshrqi3_out (insn,ops,&len); break;
4269 case HImode: lshrhi3_out (insn,ops,&len); break;
4270 case SImode: lshrsi3_out (insn,ops,&len); break;
4271 default: break;
4272 }
4273 break;
4274 default:
4275 break;
4276 }
4277 }
4278 }
4279 return len;
4280 }
4281
4282 /* Return nonzero if register REG dead after INSN. */
4283
4284 int
4285 reg_unused_after (rtx insn, rtx reg)
4286 {
4287 return (dead_or_set_p (insn, reg)
4288 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4289 }
4290
4291 /* Return nonzero if REG is not used after INSN.
4292 We assume REG is a reload reg, and therefore does
4293 not live past labels. It may live past calls or jumps though. */
4294
4295 int
4296 _reg_unused_after (rtx insn, rtx reg)
4297 {
4298 enum rtx_code code;
4299 rtx set;
4300
4301 /* If the reg is set by this instruction, then it is safe for our
4302 case. Disregard the case where this is a store to memory, since
4303 we are checking a register used in the store address. */
4304 set = single_set (insn);
4305 if (set && GET_CODE (SET_DEST (set)) != MEM
4306 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4307 return 1;
4308
4309 while ((insn = NEXT_INSN (insn)))
4310 {
4311 rtx set;
4312 code = GET_CODE (insn);
4313
4314 #if 0
4315 /* If this is a label that existed before reload, then the register
4316 if dead here. However, if this is a label added by reorg, then
4317 the register may still be live here. We can't tell the difference,
4318 so we just ignore labels completely. */
4319 if (code == CODE_LABEL)
4320 return 1;
4321 /* else */
4322 #endif
4323
4324 if (!INSN_P (insn))
4325 continue;
4326
4327 if (code == JUMP_INSN)
4328 return 0;
4329
4330 /* If this is a sequence, we must handle them all at once.
4331 We could have for instance a call that sets the target register,
4332 and an insn in a delay slot that uses the register. In this case,
4333 we must return 0. */
4334 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4335 {
4336 int i;
4337 int retval = 0;
4338
4339 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4340 {
4341 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4342 rtx set = single_set (this_insn);
4343
4344 if (GET_CODE (this_insn) == CALL_INSN)
4345 code = CALL_INSN;
4346 else if (GET_CODE (this_insn) == JUMP_INSN)
4347 {
4348 if (INSN_ANNULLED_BRANCH_P (this_insn))
4349 return 0;
4350 code = JUMP_INSN;
4351 }
4352
4353 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4354 return 0;
4355 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4356 {
4357 if (GET_CODE (SET_DEST (set)) != MEM)
4358 retval = 1;
4359 else
4360 return 0;
4361 }
4362 if (set == 0
4363 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4364 return 0;
4365 }
4366 if (retval == 1)
4367 return 1;
4368 else if (code == JUMP_INSN)
4369 return 0;
4370 }
4371
4372 if (code == CALL_INSN)
4373 {
4374 rtx tem;
4375 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4376 if (GET_CODE (XEXP (tem, 0)) == USE
4377 && REG_P (XEXP (XEXP (tem, 0), 0))
4378 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4379 return 0;
4380 if (call_used_regs[REGNO (reg)])
4381 return 1;
4382 }
4383
4384 set = single_set (insn);
4385
4386 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4387 return 0;
4388 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4389 return GET_CODE (SET_DEST (set)) != MEM;
4390 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4391 return 0;
4392 }
4393 return 1;
4394 }
4395
4396 /* Target hook for assembling integer objects. The AVR version needs
4397 special handling for references to certain labels. */
4398
4399 static bool
4400 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4401 {
4402 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4403 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4404 || GET_CODE (x) == LABEL_REF))
4405 {
4406 fputs ("\t.word\tpm(", asm_out_file);
4407 output_addr_const (asm_out_file, x);
4408 fputs (")\n", asm_out_file);
4409 return true;
4410 }
4411 return default_assemble_integer (x, size, aligned_p);
4412 }
4413
4414 /* The routine used to output NUL terminated strings. We use a special
4415 version of this for most svr4 targets because doing so makes the
4416 generated assembly code more compact (and thus faster to assemble)
4417 as well as more readable, especially for targets like the i386
4418 (where the only alternative is to output character sequences as
4419 comma separated lists of numbers). */
4420
4421 void
4422 gas_output_limited_string(FILE *file, const char *str)
4423 {
4424 const unsigned char *_limited_str = (unsigned char *) str;
4425 unsigned ch;
4426 fprintf (file, "%s\"", STRING_ASM_OP);
4427 for (; (ch = *_limited_str); _limited_str++)
4428 {
4429 int escape;
4430 switch (escape = ESCAPES[ch])
4431 {
4432 case 0:
4433 putc (ch, file);
4434 break;
4435 case 1:
4436 fprintf (file, "\\%03o", ch);
4437 break;
4438 default:
4439 putc ('\\', file);
4440 putc (escape, file);
4441 break;
4442 }
4443 }
4444 fprintf (file, "\"\n");
4445 }
4446
4447 /* The routine used to output sequences of byte values. We use a special
4448 version of this for most svr4 targets because doing so makes the
4449 generated assembly code more compact (and thus faster to assemble)
4450 as well as more readable. Note that if we find subparts of the
4451 character sequence which end with NUL (and which are shorter than
4452 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4453
4454 void
4455 gas_output_ascii(FILE *file, const char *str, size_t length)
4456 {
4457 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4458 const unsigned char *limit = _ascii_bytes + length;
4459 unsigned bytes_in_chunk = 0;
4460 for (; _ascii_bytes < limit; _ascii_bytes++)
4461 {
4462 const unsigned char *p;
4463 if (bytes_in_chunk >= 60)
4464 {
4465 fprintf (file, "\"\n");
4466 bytes_in_chunk = 0;
4467 }
4468 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4469 continue;
4470 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4471 {
4472 if (bytes_in_chunk > 0)
4473 {
4474 fprintf (file, "\"\n");
4475 bytes_in_chunk = 0;
4476 }
4477 gas_output_limited_string (file, (char*)_ascii_bytes);
4478 _ascii_bytes = p;
4479 }
4480 else
4481 {
4482 int escape;
4483 unsigned ch;
4484 if (bytes_in_chunk == 0)
4485 fprintf (file, "\t.ascii\t\"");
4486 switch (escape = ESCAPES[ch = *_ascii_bytes])
4487 {
4488 case 0:
4489 putc (ch, file);
4490 bytes_in_chunk++;
4491 break;
4492 case 1:
4493 fprintf (file, "\\%03o", ch);
4494 bytes_in_chunk += 4;
4495 break;
4496 default:
4497 putc ('\\', file);
4498 putc (escape, file);
4499 bytes_in_chunk += 2;
4500 break;
4501 }
4502 }
4503 }
4504 if (bytes_in_chunk > 0)
4505 fprintf (file, "\"\n");
4506 }
4507
4508 /* Return value is nonzero if pseudos that have been
4509 assigned to registers of class CLASS would likely be spilled
4510 because registers of CLASS are needed for spill registers. */
4511
4512 enum reg_class
4513 class_likely_spilled_p (int c)
4514 {
4515 return (c != ALL_REGS && c != ADDW_REGS);
4516 }
4517
4518 /* Valid attributes:
4519 progmem - put data to program memory;
4520 signal - make a function to be hardware interrupt. After function
4521 prologue interrupts are disabled;
4522 interrupt - make a function to be hardware interrupt. After function
4523 prologue interrupts are enabled;
4524 naked - don't generate function prologue/epilogue and `ret' command.
4525
4526 Only `progmem' attribute valid for type. */
4527
4528 const struct attribute_spec avr_attribute_table[] =
4529 {
4530 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4531 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4532 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4533 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4534 { "naked", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4535 { NULL, 0, 0, false, false, false, NULL }
4536 };
4537
4538 /* Handle a "progmem" attribute; arguments as in
4539 struct attribute_spec.handler. */
4540 static tree
4541 avr_handle_progmem_attribute (tree *node, tree name,
4542 tree args ATTRIBUTE_UNUSED,
4543 int flags ATTRIBUTE_UNUSED,
4544 bool *no_add_attrs)
4545 {
4546 if (DECL_P (*node))
4547 {
4548 if (TREE_CODE (*node) == TYPE_DECL)
4549 {
4550 /* This is really a decl attribute, not a type attribute,
4551 but try to handle it for GCC 3.0 backwards compatibility. */
4552
4553 tree type = TREE_TYPE (*node);
4554 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4555 tree newtype = build_type_attribute_variant (type, attr);
4556
4557 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4558 TREE_TYPE (*node) = newtype;
4559 *no_add_attrs = true;
4560 }
4561 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4562 {
4563 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4564 {
4565 warning (0, "only initialized variables can be placed into "
4566 "program memory area");
4567 *no_add_attrs = true;
4568 }
4569 }
4570 else
4571 {
4572 warning (OPT_Wattributes, "%qs attribute ignored",
4573 IDENTIFIER_POINTER (name));
4574 *no_add_attrs = true;
4575 }
4576 }
4577
4578 return NULL_TREE;
4579 }
4580
4581 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4582 struct attribute_spec.handler. */
4583
4584 static tree
4585 avr_handle_fndecl_attribute (tree *node, tree name,
4586 tree args ATTRIBUTE_UNUSED,
4587 int flags ATTRIBUTE_UNUSED,
4588 bool *no_add_attrs)
4589 {
4590 if (TREE_CODE (*node) != FUNCTION_DECL)
4591 {
4592 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4593 IDENTIFIER_POINTER (name));
4594 *no_add_attrs = true;
4595 }
4596 else
4597 {
4598 const char *func_name = IDENTIFIER_POINTER (DECL_NAME (*node));
4599 const char *attr = IDENTIFIER_POINTER (name);
4600
4601 /* If the function has the 'signal' or 'interrupt' attribute, test to
4602 make sure that the name of the function is "__vector_NN" so as to
4603 catch when the user misspells the interrupt vector name. */
4604
4605 if (strncmp (attr, "interrupt", strlen ("interrupt")) == 0)
4606 {
4607 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4608 {
4609 warning (0, "%qs appears to be a misspelled interrupt handler",
4610 func_name);
4611 }
4612 }
4613 else if (strncmp (attr, "signal", strlen ("signal")) == 0)
4614 {
4615 if (strncmp (func_name, "__vector", strlen ("__vector")) != 0)
4616 {
4617 warning (0, "%qs appears to be a misspelled signal handler",
4618 func_name);
4619 }
4620 }
4621 }
4622
4623 return NULL_TREE;
4624 }
4625
4626 /* Look for attribute `progmem' in DECL
4627 if found return 1, otherwise 0. */
4628
4629 int
4630 avr_progmem_p (tree decl, tree attributes)
4631 {
4632 tree a;
4633
4634 if (TREE_CODE (decl) != VAR_DECL)
4635 return 0;
4636
4637 if (NULL_TREE
4638 != lookup_attribute ("progmem", attributes))
4639 return 1;
4640
4641 a=decl;
4642 do
4643 a = TREE_TYPE(a);
4644 while (TREE_CODE (a) == ARRAY_TYPE);
4645
4646 if (a == error_mark_node)
4647 return 0;
4648
4649 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4650 return 1;
4651
4652 return 0;
4653 }
4654
4655 /* Add the section attribute if the variable is in progmem. */
4656
4657 static void
4658 avr_insert_attributes (tree node, tree *attributes)
4659 {
4660 if (TREE_CODE (node) == VAR_DECL
4661 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4662 && avr_progmem_p (node, *attributes))
4663 {
4664 static const char dsec[] = ".progmem.data";
4665 *attributes = tree_cons (get_identifier ("section"),
4666 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4667 *attributes);
4668
4669 /* ??? This seems sketchy. Why can't the user declare the
4670 thing const in the first place? */
4671 TREE_READONLY (node) = 1;
4672 }
4673 }
4674
4675 /* A get_unnamed_section callback for switching to progmem_section. */
4676
4677 static void
4678 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4679 {
4680 fprintf (asm_out_file,
4681 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4682 AVR_MEGA ? "a" : "ax");
4683 /* Should already be aligned, this is just to be safe if it isn't. */
4684 fprintf (asm_out_file, "\t.p2align 1\n");
4685 }
4686
4687 /* Implement TARGET_ASM_INIT_SECTIONS. */
4688
4689 static void
4690 avr_asm_init_sections (void)
4691 {
4692 progmem_section = get_unnamed_section (AVR_MEGA ? 0 : SECTION_CODE,
4693 avr_output_progmem_section_asm_op,
4694 NULL);
4695 readonly_data_section = data_section;
4696 }
4697
4698 static unsigned int
4699 avr_section_type_flags (tree decl, const char *name, int reloc)
4700 {
4701 unsigned int flags = default_section_type_flags (decl, name, reloc);
4702
4703 if (strncmp (name, ".noinit", 7) == 0)
4704 {
4705 if (decl && TREE_CODE (decl) == VAR_DECL
4706 && DECL_INITIAL (decl) == NULL_TREE)
4707 flags |= SECTION_BSS; /* @nobits */
4708 else
4709 warning (0, "only uninitialized variables can be placed in the "
4710 ".noinit section");
4711 }
4712
4713 return flags;
4714 }
4715
4716 /* Outputs some appropriate text to go at the start of an assembler
4717 file. */
4718
4719 static void
4720 avr_file_start (void)
4721 {
4722 if (avr_asm_only_p)
4723 error ("MCU %qs supported for assembler only", avr_mcu_name);
4724
4725 default_file_start ();
4726
4727 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4728 fputs ("__SREG__ = 0x3f\n"
4729 "__SP_H__ = 0x3e\n"
4730 "__SP_L__ = 0x3d\n", asm_out_file);
4731
4732 fputs ("__tmp_reg__ = 0\n"
4733 "__zero_reg__ = 1\n", asm_out_file);
4734
4735 /* FIXME: output these only if there is anything in the .data / .bss
4736 sections - some code size could be saved by not linking in the
4737 initialization code from libgcc if one or both sections are empty. */
4738 fputs ("\t.global __do_copy_data\n", asm_out_file);
4739 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4740
4741 commands_in_file = 0;
4742 commands_in_prologues = 0;
4743 commands_in_epilogues = 0;
4744 }
4745
4746 /* Outputs to the stdio stream FILE some
4747 appropriate text to go at the end of an assembler file. */
4748
4749 static void
4750 avr_file_end (void)
4751 {
4752 fputs ("/* File ", asm_out_file);
4753 output_quoted_string (asm_out_file, main_input_filename);
4754 fprintf (asm_out_file,
4755 ": code %4d = 0x%04x (%4d), prologues %3d, epilogues %3d */\n",
4756 commands_in_file,
4757 commands_in_file,
4758 commands_in_file - commands_in_prologues - commands_in_epilogues,
4759 commands_in_prologues, commands_in_epilogues);
4760 }
4761
4762 /* Choose the order in which to allocate hard registers for
4763 pseudo-registers local to a basic block.
4764
4765 Store the desired register order in the array `reg_alloc_order'.
4766 Element 0 should be the register to allocate first; element 1, the
4767 next register; and so on. */
4768
4769 void
4770 order_regs_for_local_alloc (void)
4771 {
4772 unsigned int i;
4773 static const int order_0[] = {
4774 24,25,
4775 18,19,
4776 20,21,
4777 22,23,
4778 30,31,
4779 26,27,
4780 28,29,
4781 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4782 0,1,
4783 32,33,34,35
4784 };
4785 static const int order_1[] = {
4786 18,19,
4787 20,21,
4788 22,23,
4789 24,25,
4790 30,31,
4791 26,27,
4792 28,29,
4793 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4794 0,1,
4795 32,33,34,35
4796 };
4797 static const int order_2[] = {
4798 25,24,
4799 23,22,
4800 21,20,
4801 19,18,
4802 30,31,
4803 26,27,
4804 28,29,
4805 17,16,
4806 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4807 1,0,
4808 32,33,34,35
4809 };
4810
4811 const int *order = (TARGET_ORDER_1 ? order_1 :
4812 TARGET_ORDER_2 ? order_2 :
4813 order_0);
4814 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4815 reg_alloc_order[i] = order[i];
4816 }
4817
4818
4819 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4820 cost of an RTX operand given its context. X is the rtx of the
4821 operand, MODE is its mode, and OUTER is the rtx_code of this
4822 operand's parent operator. */
4823
4824 static int
4825 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer)
4826 {
4827 enum rtx_code code = GET_CODE (x);
4828 int total;
4829
4830 switch (code)
4831 {
4832 case REG:
4833 case SUBREG:
4834 return 0;
4835
4836 case CONST_INT:
4837 case CONST_DOUBLE:
4838 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4839
4840 default:
4841 break;
4842 }
4843
4844 total = 0;
4845 avr_rtx_costs (x, code, outer, &total);
4846 return total;
4847 }
4848
4849 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4850 is to be calculated. Return true if the complete cost has been
4851 computed, and false if subexpressions should be scanned. In either
4852 case, *TOTAL contains the cost result. */
4853
4854 static bool
4855 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
4856 {
4857 enum machine_mode mode = GET_MODE (x);
4858 HOST_WIDE_INT val;
4859
4860 switch (code)
4861 {
4862 case CONST_INT:
4863 case CONST_DOUBLE:
4864 /* Immediate constants are as cheap as registers. */
4865 *total = 0;
4866 return true;
4867
4868 case MEM:
4869 case CONST:
4870 case LABEL_REF:
4871 case SYMBOL_REF:
4872 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4873 return true;
4874
4875 case NEG:
4876 switch (mode)
4877 {
4878 case QImode:
4879 case SFmode:
4880 *total = COSTS_N_INSNS (1);
4881 break;
4882
4883 case HImode:
4884 *total = COSTS_N_INSNS (3);
4885 break;
4886
4887 case SImode:
4888 *total = COSTS_N_INSNS (7);
4889 break;
4890
4891 default:
4892 return false;
4893 }
4894 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4895 return true;
4896
4897 case ABS:
4898 switch (mode)
4899 {
4900 case QImode:
4901 case SFmode:
4902 *total = COSTS_N_INSNS (1);
4903 break;
4904
4905 default:
4906 return false;
4907 }
4908 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4909 return true;
4910
4911 case NOT:
4912 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4913 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4914 return true;
4915
4916 case ZERO_EXTEND:
4917 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4918 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4919 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4920 return true;
4921
4922 case SIGN_EXTEND:
4923 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4924 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4925 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4926 return true;
4927
4928 case PLUS:
4929 switch (mode)
4930 {
4931 case QImode:
4932 *total = COSTS_N_INSNS (1);
4933 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4934 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4935 break;
4936
4937 case HImode:
4938 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4939 {
4940 *total = COSTS_N_INSNS (2);
4941 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4942 }
4943 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4944 *total = COSTS_N_INSNS (1);
4945 else
4946 *total = COSTS_N_INSNS (2);
4947 break;
4948
4949 case SImode:
4950 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4951 {
4952 *total = COSTS_N_INSNS (4);
4953 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4954 }
4955 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4956 *total = COSTS_N_INSNS (1);
4957 else
4958 *total = COSTS_N_INSNS (4);
4959 break;
4960
4961 default:
4962 return false;
4963 }
4964 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4965 return true;
4966
4967 case MINUS:
4968 case AND:
4969 case IOR:
4970 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4971 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4972 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4973 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4974 return true;
4975
4976 case XOR:
4977 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4978 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
4979 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
4980 return true;
4981
4982 case MULT:
4983 switch (mode)
4984 {
4985 case QImode:
4986 if (AVR_HAVE_MUL)
4987 *total = COSTS_N_INSNS (optimize_size ? 3 : 4);
4988 else if (optimize_size)
4989 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
4990 else
4991 return false;
4992
4993 case HImode:
4994 if (AVR_HAVE_MUL)
4995 *total = COSTS_N_INSNS (optimize_size ? 7 : 10);
4996 else if (optimize_size)
4997 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
4998 else
4999 return false;
5000
5001 default:
5002 return false;
5003 }
5004 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5005 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5006 return true;
5007
5008 case DIV:
5009 case MOD:
5010 case UDIV:
5011 case UMOD:
5012 if (optimize_size)
5013 *total = COSTS_N_INSNS (AVR_MEGA ? 2 : 1);
5014 else
5015 return false;
5016 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5017 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5018 return true;
5019
5020 case ASHIFT:
5021 switch (mode)
5022 {
5023 case QImode:
5024 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5025 {
5026 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5027 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5028 }
5029 else
5030 {
5031 val = INTVAL (XEXP (x, 1));
5032 if (val == 7)
5033 *total = COSTS_N_INSNS (3);
5034 else if (val >= 0 && val <= 7)
5035 *total = COSTS_N_INSNS (val);
5036 else
5037 *total = COSTS_N_INSNS (1);
5038 }
5039 break;
5040
5041 case HImode:
5042 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5043 {
5044 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5045 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5046 }
5047 else
5048 switch (INTVAL (XEXP (x, 1)))
5049 {
5050 case 0:
5051 *total = 0;
5052 break;
5053 case 1:
5054 case 8:
5055 *total = COSTS_N_INSNS (2);
5056 break;
5057 case 9:
5058 *total = COSTS_N_INSNS (3);
5059 break;
5060 case 2:
5061 case 3:
5062 case 10:
5063 case 15:
5064 *total = COSTS_N_INSNS (4);
5065 break;
5066 case 7:
5067 case 11:
5068 case 12:
5069 *total = COSTS_N_INSNS (5);
5070 break;
5071 case 4:
5072 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5073 break;
5074 case 6:
5075 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5076 break;
5077 case 5:
5078 *total = COSTS_N_INSNS (optimize_size ? 5 : 10);
5079 break;
5080 default:
5081 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5082 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5083 }
5084 break;
5085
5086 case SImode:
5087 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5088 {
5089 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5091 }
5092 else
5093 switch (INTVAL (XEXP (x, 1)))
5094 {
5095 case 0:
5096 *total = 0;
5097 break;
5098 case 24:
5099 *total = COSTS_N_INSNS (3);
5100 break;
5101 case 1:
5102 case 8:
5103 case 16:
5104 *total = COSTS_N_INSNS (4);
5105 break;
5106 case 31:
5107 *total = COSTS_N_INSNS (6);
5108 break;
5109 case 2:
5110 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5111 break;
5112 default:
5113 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5114 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5115 }
5116 break;
5117
5118 default:
5119 return false;
5120 }
5121 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5122 return true;
5123
5124 case ASHIFTRT:
5125 switch (mode)
5126 {
5127 case QImode:
5128 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5129 {
5130 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5131 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5132 }
5133 else
5134 {
5135 val = INTVAL (XEXP (x, 1));
5136 if (val == 6)
5137 *total = COSTS_N_INSNS (4);
5138 else if (val == 7)
5139 *total = COSTS_N_INSNS (2);
5140 else if (val >= 0 && val <= 7)
5141 *total = COSTS_N_INSNS (val);
5142 else
5143 *total = COSTS_N_INSNS (1);
5144 }
5145 break;
5146
5147 case HImode:
5148 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5149 {
5150 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5151 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5152 }
5153 else
5154 switch (INTVAL (XEXP (x, 1)))
5155 {
5156 case 0:
5157 *total = 0;
5158 break;
5159 case 1:
5160 *total = COSTS_N_INSNS (2);
5161 break;
5162 case 15:
5163 *total = COSTS_N_INSNS (3);
5164 break;
5165 case 2:
5166 case 7:
5167 case 8:
5168 case 9:
5169 *total = COSTS_N_INSNS (4);
5170 break;
5171 case 10:
5172 case 14:
5173 *total = COSTS_N_INSNS (5);
5174 break;
5175 case 11:
5176 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5177 break;
5178 case 12:
5179 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5180 break;
5181 case 6:
5182 case 13:
5183 *total = COSTS_N_INSNS (optimize_size ? 5 : 8);
5184 break;
5185 default:
5186 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5187 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5188 }
5189 break;
5190
5191 case SImode:
5192 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5193 {
5194 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5195 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5196 }
5197 else
5198 switch (INTVAL (XEXP (x, 1)))
5199 {
5200 case 0:
5201 *total = 0;
5202 break;
5203 case 1:
5204 *total = COSTS_N_INSNS (4);
5205 break;
5206 case 8:
5207 case 16:
5208 case 24:
5209 *total = COSTS_N_INSNS (6);
5210 break;
5211 case 2:
5212 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5213 break;
5214 case 31:
5215 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5216 break;
5217 default:
5218 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5219 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5220 }
5221 break;
5222
5223 default:
5224 return false;
5225 }
5226 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5227 return true;
5228
5229 case LSHIFTRT:
5230 switch (mode)
5231 {
5232 case QImode:
5233 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5234 {
5235 *total = COSTS_N_INSNS (optimize_size ? 4 : 17);
5236 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5237 }
5238 else
5239 {
5240 val = INTVAL (XEXP (x, 1));
5241 if (val == 7)
5242 *total = COSTS_N_INSNS (3);
5243 else if (val >= 0 && val <= 7)
5244 *total = COSTS_N_INSNS (val);
5245 else
5246 *total = COSTS_N_INSNS (1);
5247 }
5248 break;
5249
5250 case HImode:
5251 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5252 {
5253 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5255 }
5256 else
5257 switch (INTVAL (XEXP (x, 1)))
5258 {
5259 case 0:
5260 *total = 0;
5261 break;
5262 case 1:
5263 case 8:
5264 *total = COSTS_N_INSNS (2);
5265 break;
5266 case 9:
5267 *total = COSTS_N_INSNS (3);
5268 break;
5269 case 2:
5270 case 10:
5271 case 15:
5272 *total = COSTS_N_INSNS (4);
5273 break;
5274 case 7:
5275 case 11:
5276 *total = COSTS_N_INSNS (5);
5277 break;
5278 case 3:
5279 case 12:
5280 case 13:
5281 case 14:
5282 *total = COSTS_N_INSNS (optimize_size ? 5 : 6);
5283 break;
5284 case 4:
5285 *total = COSTS_N_INSNS (optimize_size ? 5 : 7);
5286 break;
5287 case 5:
5288 case 6:
5289 *total = COSTS_N_INSNS (optimize_size ? 5 : 9);
5290 break;
5291 default:
5292 *total = COSTS_N_INSNS (optimize_size ? 5 : 41);
5293 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5294 }
5295 break;
5296
5297 case SImode:
5298 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5299 {
5300 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5301 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5302 }
5303 else
5304 switch (INTVAL (XEXP (x, 1)))
5305 {
5306 case 0:
5307 *total = 0;
5308 break;
5309 case 1:
5310 *total = COSTS_N_INSNS (4);
5311 break;
5312 case 2:
5313 *total = COSTS_N_INSNS (optimize_size ? 7 : 8);
5314 break;
5315 case 8:
5316 case 16:
5317 case 24:
5318 *total = COSTS_N_INSNS (4);
5319 break;
5320 case 31:
5321 *total = COSTS_N_INSNS (6);
5322 break;
5323 default:
5324 *total = COSTS_N_INSNS (optimize_size ? 7 : 113);
5325 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5326 }
5327 break;
5328
5329 default:
5330 return false;
5331 }
5332 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5333 return true;
5334
5335 case COMPARE:
5336 switch (GET_MODE (XEXP (x, 0)))
5337 {
5338 case QImode:
5339 *total = COSTS_N_INSNS (1);
5340 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5341 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5342 break;
5343
5344 case HImode:
5345 *total = COSTS_N_INSNS (2);
5346 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5347 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5348 else if (INTVAL (XEXP (x, 1)) != 0)
5349 *total += COSTS_N_INSNS (1);
5350 break;
5351
5352 case SImode:
5353 *total = COSTS_N_INSNS (4);
5354 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5355 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code);
5356 else if (INTVAL (XEXP (x, 1)) != 0)
5357 *total += COSTS_N_INSNS (3);
5358 break;
5359
5360 default:
5361 return false;
5362 }
5363 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code);
5364 return true;
5365
5366 default:
5367 break;
5368 }
5369 return false;
5370 }
5371
5372 /* Calculate the cost of a memory address. */
5373
5374 static int
5375 avr_address_cost (rtx x)
5376 {
5377 if (GET_CODE (x) == PLUS
5378 && GET_CODE (XEXP (x,1)) == CONST_INT
5379 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5380 && INTVAL (XEXP (x,1)) >= 61)
5381 return 18;
5382 if (CONSTANT_ADDRESS_P (x))
5383 {
5384 if (avr_io_address_p (x, 1))
5385 return 2;
5386 return 4;
5387 }
5388 return 4;
5389 }
5390
5391 /* Test for extra memory constraint 'Q'.
5392 It's a memory address based on Y or Z pointer with valid displacement. */
5393
5394 int
5395 extra_constraint_Q (rtx x)
5396 {
5397 if (GET_CODE (XEXP (x,0)) == PLUS
5398 && REG_P (XEXP (XEXP (x,0), 0))
5399 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5400 && (INTVAL (XEXP (XEXP (x,0), 1))
5401 <= MAX_LD_OFFSET (GET_MODE (x))))
5402 {
5403 rtx xx = XEXP (XEXP (x,0), 0);
5404 int regno = REGNO (xx);
5405 if (TARGET_ALL_DEBUG)
5406 {
5407 fprintf (stderr, ("extra_constraint:\n"
5408 "reload_completed: %d\n"
5409 "reload_in_progress: %d\n"),
5410 reload_completed, reload_in_progress);
5411 debug_rtx (x);
5412 }
5413 if (regno >= FIRST_PSEUDO_REGISTER)
5414 return 1; /* allocate pseudos */
5415 else if (regno == REG_Z || regno == REG_Y)
5416 return 1; /* strictly check */
5417 else if (xx == frame_pointer_rtx
5418 || xx == arg_pointer_rtx)
5419 return 1; /* XXX frame & arg pointer checks */
5420 }
5421 return 0;
5422 }
5423
5424 /* Convert condition code CONDITION to the valid AVR condition code. */
5425
5426 RTX_CODE
5427 avr_normalize_condition (RTX_CODE condition)
5428 {
5429 switch (condition)
5430 {
5431 case GT:
5432 return GE;
5433 case GTU:
5434 return GEU;
5435 case LE:
5436 return LT;
5437 case LEU:
5438 return LTU;
5439 default:
5440 gcc_unreachable ();
5441 }
5442 }
5443
5444 /* This function optimizes conditional jumps. */
5445
5446 static void
5447 avr_reorg (void)
5448 {
5449 rtx insn, pattern;
5450
5451 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5452 {
5453 if (! (GET_CODE (insn) == INSN
5454 || GET_CODE (insn) == CALL_INSN
5455 || GET_CODE (insn) == JUMP_INSN)
5456 || !single_set (insn))
5457 continue;
5458
5459 pattern = PATTERN (insn);
5460
5461 if (GET_CODE (pattern) == PARALLEL)
5462 pattern = XVECEXP (pattern, 0, 0);
5463 if (GET_CODE (pattern) == SET
5464 && SET_DEST (pattern) == cc0_rtx
5465 && compare_diff_p (insn))
5466 {
5467 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5468 {
5469 /* Now we work under compare insn. */
5470
5471 pattern = SET_SRC (pattern);
5472 if (true_regnum (XEXP (pattern,0)) >= 0
5473 && true_regnum (XEXP (pattern,1)) >= 0 )
5474 {
5475 rtx x = XEXP (pattern,0);
5476 rtx next = next_real_insn (insn);
5477 rtx pat = PATTERN (next);
5478 rtx src = SET_SRC (pat);
5479 rtx t = XEXP (src,0);
5480 PUT_CODE (t, swap_condition (GET_CODE (t)));
5481 XEXP (pattern,0) = XEXP (pattern,1);
5482 XEXP (pattern,1) = x;
5483 INSN_CODE (next) = -1;
5484 }
5485 else if (true_regnum (XEXP (pattern,0)) >= 0
5486 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5487 {
5488 rtx x = XEXP (pattern,1);
5489 rtx next = next_real_insn (insn);
5490 rtx pat = PATTERN (next);
5491 rtx src = SET_SRC (pat);
5492 rtx t = XEXP (src,0);
5493 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5494
5495 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5496 {
5497 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5498 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5499 INSN_CODE (next) = -1;
5500 INSN_CODE (insn) = -1;
5501 }
5502 }
5503 }
5504 else if (true_regnum (SET_SRC (pattern)) >= 0)
5505 {
5506 /* This is a tst insn */
5507 rtx next = next_real_insn (insn);
5508 rtx pat = PATTERN (next);
5509 rtx src = SET_SRC (pat);
5510 rtx t = XEXP (src,0);
5511
5512 PUT_CODE (t, swap_condition (GET_CODE (t)));
5513 SET_SRC (pattern) = gen_rtx_NEG (GET_MODE (SET_SRC (pattern)),
5514 SET_SRC (pattern));
5515 INSN_CODE (next) = -1;
5516 INSN_CODE (insn) = -1;
5517 }
5518 }
5519 }
5520 }
5521
5522 /* Returns register number for function return value.*/
5523
5524 int
5525 avr_ret_register (void)
5526 {
5527 return 24;
5528 }
5529
5530 /* Ceate an RTX representing the place where a
5531 library function returns a value of mode MODE. */
5532
5533 rtx
5534 avr_libcall_value (enum machine_mode mode)
5535 {
5536 int offs = GET_MODE_SIZE (mode);
5537 if (offs < 2)
5538 offs = 2;
5539 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5540 }
5541
5542 /* Create an RTX representing the place where a
5543 function returns a value of data type VALTYPE. */
5544
5545 rtx
5546 avr_function_value (tree type, tree func ATTRIBUTE_UNUSED)
5547 {
5548 unsigned int offs;
5549
5550 if (TYPE_MODE (type) != BLKmode)
5551 return avr_libcall_value (TYPE_MODE (type));
5552
5553 offs = int_size_in_bytes (type);
5554 if (offs < 2)
5555 offs = 2;
5556 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5557 offs = GET_MODE_SIZE (SImode);
5558 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5559 offs = GET_MODE_SIZE (DImode);
5560
5561 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5562 }
5563
5564 /* Places additional restrictions on the register class to
5565 use when it is necessary to copy value X into a register
5566 in class CLASS. */
5567
5568 enum reg_class
5569 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class class)
5570 {
5571 return class;
5572 }
5573
5574 int
5575 test_hard_reg_class (enum reg_class class, rtx x)
5576 {
5577 int regno = true_regnum (x);
5578 if (regno < 0)
5579 return 0;
5580
5581 if (TEST_HARD_REG_CLASS (class, regno))
5582 return 1;
5583
5584 return 0;
5585 }
5586
5587
5588 int
5589 jump_over_one_insn_p (rtx insn, rtx dest)
5590 {
5591 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5592 ? XEXP (dest, 0)
5593 : dest);
5594 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5595 int dest_addr = INSN_ADDRESSES (uid);
5596 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5597 }
5598
5599 /* Returns 1 if a value of mode MODE can be stored starting with hard
5600 register number REGNO. On the enhanced core, anything larger than
5601 1 byte must start in even numbered register for "movw" to work
5602 (this way we don't have to check for odd registers everywhere). */
5603
5604 int
5605 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5606 {
5607 /* The only thing that can go into registers r28:r29 is a Pmode. */
5608 if (regno == REG_Y && mode == Pmode)
5609 return 1;
5610
5611 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5612 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5613 return 0;
5614
5615 if (mode == QImode)
5616 return 1;
5617
5618 /* Modes larger than QImode occupy consecutive registers. */
5619 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5620 return 0;
5621
5622 /* All modes larger than QImode should start in an even register. */
5623 return !(regno & 1);
5624 }
5625
5626 /* Returns 1 if X is a valid address for an I/O register of size SIZE
5627 (1 or 2). Used for lds/sts -> in/out optimization. Add 0x20 to SIZE
5628 to check for the lower half of I/O space (for cbi/sbi/sbic/sbis). */
5629
5630 int
5631 avr_io_address_p (rtx x, int size)
5632 {
5633 return (optimize > 0 && GET_CODE (x) == CONST_INT
5634 && INTVAL (x) >= 0x20 && INTVAL (x) <= 0x60 - size);
5635 }
5636
5637 const char *
5638 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5639 {
5640 int tmp;
5641 if (!len)
5642 len = &tmp;
5643
5644 if (GET_CODE (operands[1]) == CONST_INT)
5645 {
5646 int val = INTVAL (operands[1]);
5647 if ((val & 0xff) == 0)
5648 {
5649 *len = 3;
5650 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5651 AS2 (ldi,%2,hi8(%1)) CR_TAB
5652 AS2 (mov,%B0,%2));
5653 }
5654 else if ((val & 0xff00) == 0)
5655 {
5656 *len = 3;
5657 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5658 AS2 (mov,%A0,%2) CR_TAB
5659 AS2 (mov,%B0,__zero_reg__));
5660 }
5661 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5662 {
5663 *len = 3;
5664 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5665 AS2 (mov,%A0,%2) CR_TAB
5666 AS2 (mov,%B0,%2));
5667 }
5668 }
5669 *len = 4;
5670 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5671 AS2 (mov,%A0,%2) CR_TAB
5672 AS2 (ldi,%2,hi8(%1)) CR_TAB
5673 AS2 (mov,%B0,%2));
5674 }
5675
5676
5677 const char *
5678 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5679 {
5680 rtx src = operands[1];
5681 int cnst = (GET_CODE (src) == CONST_INT);
5682
5683 if (len)
5684 {
5685 if (cnst)
5686 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5687 + ((INTVAL (src) & 0xff00) != 0)
5688 + ((INTVAL (src) & 0xff0000) != 0)
5689 + ((INTVAL (src) & 0xff000000) != 0);
5690 else
5691 *len = 8;
5692
5693 return "";
5694 }
5695
5696 if (cnst && ((INTVAL (src) & 0xff) == 0))
5697 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5698 else
5699 {
5700 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5701 output_asm_insn (AS2 (mov, %A0, %2), operands);
5702 }
5703 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5704 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5705 else
5706 {
5707 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5708 output_asm_insn (AS2 (mov, %B0, %2), operands);
5709 }
5710 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5711 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5712 else
5713 {
5714 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5715 output_asm_insn (AS2 (mov, %C0, %2), operands);
5716 }
5717 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5718 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5719 else
5720 {
5721 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5722 output_asm_insn (AS2 (mov, %D0, %2), operands);
5723 }
5724 return "";
5725 }
5726
5727 void
5728 avr_output_bld (rtx operands[], int bit_nr)
5729 {
5730 static char s[] = "bld %A0,0";
5731
5732 s[5] = 'A' + (bit_nr >> 3);
5733 s[8] = '0' + (bit_nr & 7);
5734 output_asm_insn (s, operands);
5735 }
5736
5737 void
5738 avr_output_addr_vec_elt (FILE *stream, int value)
5739 {
5740 switch_to_section (progmem_section);
5741 if (AVR_MEGA)
5742 fprintf (stream, "\t.word pm(.L%d)\n", value);
5743 else
5744 fprintf (stream, "\trjmp .L%d\n", value);
5745
5746 jump_tables_size++;
5747 }
5748
5749 /* Returns 1 if SCRATCH are safe to be allocated as a scratch
5750 registers (for a define_peephole2) in the current function. */
5751
5752 int
5753 avr_peep2_scratch_safe (rtx scratch)
5754 {
5755 if ((interrupt_function_p (current_function_decl)
5756 || signal_function_p (current_function_decl))
5757 && leaf_function_p ())
5758 {
5759 int first_reg = true_regnum (scratch);
5760 int last_reg = first_reg + GET_MODE_SIZE (GET_MODE (scratch)) - 1;
5761 int reg;
5762
5763 for (reg = first_reg; reg <= last_reg; reg++)
5764 {
5765 if (!regs_ever_live[reg])
5766 return 0;
5767 }
5768 }
5769 return 1;
5770 }
5771
5772 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5773 or memory location in the I/O space (QImode only).
5774
5775 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5776 Operand 1: register operand to test, or CONST_INT memory address.
5777 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5778 Operand 3: label to jump to if the test is true. */
5779
5780 const char *
5781 avr_out_sbxx_branch (rtx insn, rtx operands[])
5782 {
5783 enum rtx_code comp = GET_CODE (operands[0]);
5784 int long_jump = (get_attr_length (insn) >= 4);
5785 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5786
5787 if (comp == GE)
5788 comp = EQ;
5789 else if (comp == LT)
5790 comp = NE;
5791
5792 if (reverse)
5793 comp = reverse_condition (comp);
5794
5795 if (GET_CODE (operands[1]) == CONST_INT)
5796 {
5797 if (INTVAL (operands[1]) < 0x40)
5798 {
5799 if (comp == EQ)
5800 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5801 else
5802 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5803 }
5804 else
5805 {
5806 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5807 if (comp == EQ)
5808 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5809 else
5810 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5811 }
5812 }
5813 else /* GET_CODE (operands[1]) == REG */
5814 {
5815 if (GET_MODE (operands[1]) == QImode)
5816 {
5817 if (comp == EQ)
5818 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5819 else
5820 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5821 }
5822 else /* HImode or SImode */
5823 {
5824 static char buf[] = "sbrc %A1,0";
5825 int bit_nr = exact_log2 (INTVAL (operands[2])
5826 & GET_MODE_MASK (GET_MODE (operands[1])));
5827
5828 buf[3] = (comp == EQ) ? 's' : 'c';
5829 buf[6] = 'A' + (bit_nr >> 3);
5830 buf[9] = '0' + (bit_nr & 7);
5831 output_asm_insn (buf, operands);
5832 }
5833 }
5834
5835 if (long_jump)
5836 return (AS1 (rjmp,.+4) CR_TAB
5837 AS1 (jmp,%3));
5838 if (!reverse)
5839 return AS1 (rjmp,%3);
5840 return "";
5841 }
5842
5843 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5844
5845 static void
5846 avr_asm_out_ctor (rtx symbol, int priority)
5847 {
5848 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5849 default_ctor_section_asm_out_constructor (symbol, priority);
5850 }
5851
5852 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5853
5854 static void
5855 avr_asm_out_dtor (rtx symbol, int priority)
5856 {
5857 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5858 default_dtor_section_asm_out_destructor (symbol, priority);
5859 }
5860
5861 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5862
5863 static bool
5864 avr_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
5865 {
5866 if (TYPE_MODE (type) == BLKmode)
5867 {
5868 HOST_WIDE_INT size = int_size_in_bytes (type);
5869 return (size == -1 || size > 8);
5870 }
5871 else
5872 return false;
5873 }
5874
5875 #include "gt-avr.h"