7dd2a6f8bfdb7960c960e9ba3dd1b765fdb54577
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
48
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
63
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
74 static void avr_asm_function_end_prologue (FILE *);
75 static void avr_asm_function_begin_epilogue (FILE *);
76 static rtx avr_function_value (const_tree, const_tree, bool);
77 static void avr_insert_attributes (tree, tree *);
78 static void avr_asm_init_sections (void);
79 static unsigned int avr_section_type_flags (tree, const char *, int);
80
81 static void avr_reorg (void);
82 static void avr_asm_out_ctor (rtx, int);
83 static void avr_asm_out_dtor (rtx, int);
84 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
85 static bool avr_rtx_costs (rtx, int, int, int *, bool);
86 static int avr_address_cost (rtx, bool);
87 static bool avr_return_in_memory (const_tree, const_tree);
88 static struct machine_function * avr_init_machine_status (void);
89 static rtx avr_builtin_setjmp_frame_value (void);
90 static bool avr_hard_regno_scratch_ok (unsigned int);
91 static unsigned int avr_case_values_threshold (void);
92 static bool avr_frame_pointer_required_p (void);
93
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
96
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx;
99
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx;
102
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames[] = REGISTER_NAMES;
105
106 /* This holds the last insn address. */
107 static int last_insn_address = 0;
108
109 /* Preprocessor macros to define depending on MCU type. */
110 const char *avr_extra_arch_macro;
111
112 /* Current architecture. */
113 const struct base_arch_s *avr_current_arch;
114
115 /* Current device. */
116 const struct mcu_type_s *avr_current_device;
117
118 section *progmem_section;
119
120 /* AVR attributes. */
121 static const struct attribute_spec avr_attribute_table[] =
122 {
123 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
124 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
125 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
126 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
127 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
128 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
129 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
130 { NULL, 0, 0, false, false, false, NULL }
131 };
132 \f
133 /* Initialize the GCC target structure. */
134 #undef TARGET_ASM_ALIGNED_HI_OP
135 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
136 #undef TARGET_ASM_ALIGNED_SI_OP
137 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
138 #undef TARGET_ASM_UNALIGNED_HI_OP
139 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
140 #undef TARGET_ASM_UNALIGNED_SI_OP
141 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
142 #undef TARGET_ASM_INTEGER
143 #define TARGET_ASM_INTEGER avr_assemble_integer
144 #undef TARGET_ASM_FILE_START
145 #define TARGET_ASM_FILE_START avr_file_start
146 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
147 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
148 #undef TARGET_ASM_FILE_END
149 #define TARGET_ASM_FILE_END avr_file_end
150
151 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
152 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
153 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
154 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
155 #undef TARGET_FUNCTION_VALUE
156 #define TARGET_FUNCTION_VALUE avr_function_value
157 #undef TARGET_ATTRIBUTE_TABLE
158 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
159 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
160 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
161 #undef TARGET_INSERT_ATTRIBUTES
162 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
163 #undef TARGET_SECTION_TYPE_FLAGS
164 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS avr_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST avr_address_cost
169 #undef TARGET_MACHINE_DEPENDENT_REORG
170 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
171
172 #undef TARGET_LEGITIMIZE_ADDRESS
173 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
174
175 #undef TARGET_RETURN_IN_MEMORY
176 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
177
178 #undef TARGET_STRICT_ARGUMENT_NAMING
179 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
180
181 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
182 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
183
184 #undef TARGET_HARD_REGNO_SCRATCH_OK
185 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
186 #undef TARGET_CASE_VALUES_THRESHOLD
187 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
188
189 #undef TARGET_LEGITIMATE_ADDRESS_P
190 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
191
192 #undef TARGET_FRAME_POINTER_REQUIRED
193 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
194
195 struct gcc_target targetm = TARGET_INITIALIZER;
196 \f
197 void
198 avr_override_options (void)
199 {
200 const struct mcu_type_s *t;
201
202 flag_delete_null_pointer_checks = 0;
203
204 for (t = avr_mcu_types; t->name; t++)
205 if (strcmp (t->name, avr_mcu_name) == 0)
206 break;
207
208 if (!t->name)
209 {
210 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
211 avr_mcu_name);
212 for (t = avr_mcu_types; t->name; t++)
213 fprintf (stderr," %s\n", t->name);
214 }
215
216 avr_current_arch = &avr_arch_types[t->arch];
217 avr_extra_arch_macro = t->macro;
218
219 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
220 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
221
222 init_machine_status = avr_init_machine_status;
223 }
224
225 /* return register class from register number. */
226
227 static const enum reg_class reg_class_tab[]={
228 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
229 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
230 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
231 GENERAL_REGS, /* r0 - r15 */
232 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
233 LD_REGS, /* r16 - 23 */
234 ADDW_REGS,ADDW_REGS, /* r24,r25 */
235 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
236 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
237 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
238 STACK_REG,STACK_REG /* SPL,SPH */
239 };
240
241 /* Function to set up the backend function structure. */
242
243 static struct machine_function *
244 avr_init_machine_status (void)
245 {
246 return ((struct machine_function *)
247 ggc_alloc_cleared (sizeof (struct machine_function)));
248 }
249
250 /* Return register class for register R. */
251
252 enum reg_class
253 avr_regno_reg_class (int r)
254 {
255 if (r <= 33)
256 return reg_class_tab[r];
257 return ALL_REGS;
258 }
259
260 /* Return nonzero if FUNC is a naked function. */
261
262 static int
263 avr_naked_function_p (tree func)
264 {
265 tree a;
266
267 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
268
269 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
270 return a != NULL_TREE;
271 }
272
273 /* Return nonzero if FUNC is an interrupt function as specified
274 by the "interrupt" attribute. */
275
276 static int
277 interrupt_function_p (tree func)
278 {
279 tree a;
280
281 if (TREE_CODE (func) != FUNCTION_DECL)
282 return 0;
283
284 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
285 return a != NULL_TREE;
286 }
287
288 /* Return nonzero if FUNC is a signal function as specified
289 by the "signal" attribute. */
290
291 static int
292 signal_function_p (tree func)
293 {
294 tree a;
295
296 if (TREE_CODE (func) != FUNCTION_DECL)
297 return 0;
298
299 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
300 return a != NULL_TREE;
301 }
302
303 /* Return nonzero if FUNC is a OS_task function. */
304
305 static int
306 avr_OS_task_function_p (tree func)
307 {
308 tree a;
309
310 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
311
312 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
313 return a != NULL_TREE;
314 }
315
316 /* Return nonzero if FUNC is a OS_main function. */
317
318 static int
319 avr_OS_main_function_p (tree func)
320 {
321 tree a;
322
323 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
324
325 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
326 return a != NULL_TREE;
327 }
328
329 /* Return the number of hard registers to push/pop in the prologue/epilogue
330 of the current function, and optionally store these registers in SET. */
331
332 static int
333 avr_regs_to_save (HARD_REG_SET *set)
334 {
335 int reg, count;
336 int int_or_sig_p = (interrupt_function_p (current_function_decl)
337 || signal_function_p (current_function_decl));
338
339 if (!reload_completed)
340 cfun->machine->is_leaf = leaf_function_p ();
341
342 if (set)
343 CLEAR_HARD_REG_SET (*set);
344 count = 0;
345
346 /* No need to save any registers if the function never returns or
347 is have "OS_task" or "OS_main" attribute. */
348 if (TREE_THIS_VOLATILE (current_function_decl)
349 || cfun->machine->is_OS_task
350 || cfun->machine->is_OS_main)
351 return 0;
352
353 for (reg = 0; reg < 32; reg++)
354 {
355 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
356 any global register variables. */
357 if (fixed_regs[reg])
358 continue;
359
360 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
361 || (df_regs_ever_live_p (reg)
362 && (int_or_sig_p || !call_used_regs[reg])
363 && !(frame_pointer_needed
364 && (reg == REG_Y || reg == (REG_Y+1)))))
365 {
366 if (set)
367 SET_HARD_REG_BIT (*set, reg);
368 count++;
369 }
370 }
371 return count;
372 }
373
374 /* Return true if register FROM can be eliminated via register TO. */
375
376 bool
377 avr_can_eliminate (int from, int to)
378 {
379 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
380 || ((from == FRAME_POINTER_REGNUM
381 || from == FRAME_POINTER_REGNUM + 1)
382 && !frame_pointer_needed));
383 }
384
385 /* Compute offset between arg_pointer and frame_pointer. */
386
387 int
388 avr_initial_elimination_offset (int from, int to)
389 {
390 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
391 return 0;
392 else
393 {
394 int offset = frame_pointer_needed ? 2 : 0;
395 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
396
397 offset += avr_regs_to_save (NULL);
398 return get_frame_size () + (avr_pc_size) + 1 + offset;
399 }
400 }
401
402 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
403 frame pointer by +STARTING_FRAME_OFFSET.
404 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
405 avoids creating add/sub of offset in nonlocal goto and setjmp. */
406
407 rtx avr_builtin_setjmp_frame_value (void)
408 {
409 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
410 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
411 }
412
413 /* Return 1 if the function epilogue is just a single "ret". */
414
415 int
416 avr_simple_epilogue (void)
417 {
418 return (! frame_pointer_needed
419 && get_frame_size () == 0
420 && avr_regs_to_save (NULL) == 0
421 && ! interrupt_function_p (current_function_decl)
422 && ! signal_function_p (current_function_decl)
423 && ! avr_naked_function_p (current_function_decl)
424 && ! TREE_THIS_VOLATILE (current_function_decl));
425 }
426
427 /* This function checks sequence of live registers. */
428
429 static int
430 sequent_regs_live (void)
431 {
432 int reg;
433 int live_seq=0;
434 int cur_seq=0;
435
436 for (reg = 0; reg < 18; ++reg)
437 {
438 if (!call_used_regs[reg])
439 {
440 if (df_regs_ever_live_p (reg))
441 {
442 ++live_seq;
443 ++cur_seq;
444 }
445 else
446 cur_seq = 0;
447 }
448 }
449
450 if (!frame_pointer_needed)
451 {
452 if (df_regs_ever_live_p (REG_Y))
453 {
454 ++live_seq;
455 ++cur_seq;
456 }
457 else
458 cur_seq = 0;
459
460 if (df_regs_ever_live_p (REG_Y+1))
461 {
462 ++live_seq;
463 ++cur_seq;
464 }
465 else
466 cur_seq = 0;
467 }
468 else
469 {
470 cur_seq += 2;
471 live_seq += 2;
472 }
473 return (cur_seq == live_seq) ? live_seq : 0;
474 }
475
476 /* Obtain the length sequence of insns. */
477
478 int
479 get_sequence_length (rtx insns)
480 {
481 rtx insn;
482 int length;
483
484 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
485 length += get_attr_length (insn);
486
487 return length;
488 }
489
490 /* Output function prologue. */
491
492 void
493 expand_prologue (void)
494 {
495 int live_seq;
496 HARD_REG_SET set;
497 int minimize;
498 HOST_WIDE_INT size = get_frame_size();
499 /* Define templates for push instructions. */
500 rtx pushbyte = gen_rtx_MEM (QImode,
501 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
502 rtx pushword = gen_rtx_MEM (HImode,
503 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
504 rtx insn;
505
506 last_insn_address = 0;
507
508 /* Init cfun->machine. */
509 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
510 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
511 cfun->machine->is_signal = signal_function_p (current_function_decl);
512 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
513 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
514
515 /* Prologue: naked. */
516 if (cfun->machine->is_naked)
517 {
518 return;
519 }
520
521 avr_regs_to_save (&set);
522 live_seq = sequent_regs_live ();
523 minimize = (TARGET_CALL_PROLOGUES
524 && !cfun->machine->is_interrupt
525 && !cfun->machine->is_signal
526 && !cfun->machine->is_OS_task
527 && !cfun->machine->is_OS_main
528 && live_seq);
529
530 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
531 {
532 if (cfun->machine->is_interrupt)
533 {
534 /* Enable interrupts. */
535 insn = emit_insn (gen_enable_interrupt ());
536 RTX_FRAME_RELATED_P (insn) = 1;
537 }
538
539 /* Push zero reg. */
540 insn = emit_move_insn (pushbyte, zero_reg_rtx);
541 RTX_FRAME_RELATED_P (insn) = 1;
542
543 /* Push tmp reg. */
544 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
545 RTX_FRAME_RELATED_P (insn) = 1;
546
547 /* Push SREG. */
548 insn = emit_move_insn (tmp_reg_rtx,
549 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
550 RTX_FRAME_RELATED_P (insn) = 1;
551 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
552 RTX_FRAME_RELATED_P (insn) = 1;
553
554 /* Push RAMPZ. */
555 if(AVR_HAVE_RAMPZ
556 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
557 {
558 insn = emit_move_insn (tmp_reg_rtx,
559 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
560 RTX_FRAME_RELATED_P (insn) = 1;
561 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
562 RTX_FRAME_RELATED_P (insn) = 1;
563 }
564
565 /* Clear zero reg. */
566 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
567 RTX_FRAME_RELATED_P (insn) = 1;
568
569 /* Prevent any attempt to delete the setting of ZERO_REG! */
570 emit_use (zero_reg_rtx);
571 }
572 if (minimize && (frame_pointer_needed
573 || (AVR_2_BYTE_PC && live_seq > 6)
574 || live_seq > 7))
575 {
576 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
577 gen_int_mode (size, HImode));
578 RTX_FRAME_RELATED_P (insn) = 1;
579
580 insn =
581 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
582 gen_int_mode (size + live_seq, HImode)));
583 RTX_FRAME_RELATED_P (insn) = 1;
584 }
585 else
586 {
587 int reg;
588 for (reg = 0; reg < 32; ++reg)
589 {
590 if (TEST_HARD_REG_BIT (set, reg))
591 {
592 /* Emit push of register to save. */
593 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
594 RTX_FRAME_RELATED_P (insn) = 1;
595 }
596 }
597 if (frame_pointer_needed)
598 {
599 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
600 {
601 /* Push frame pointer. */
602 insn = emit_move_insn (pushword, frame_pointer_rtx);
603 RTX_FRAME_RELATED_P (insn) = 1;
604 }
605
606 if (!size)
607 {
608 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
609 RTX_FRAME_RELATED_P (insn) = 1;
610 }
611 else
612 {
613 /* Creating a frame can be done by direct manipulation of the
614 stack or via the frame pointer. These two methods are:
615 fp=sp
616 fp-=size
617 sp=fp
618 OR
619 sp-=size
620 fp=sp
621 the optimum method depends on function type, stack and frame size.
622 To avoid a complex logic, both methods are tested and shortest
623 is selected. */
624 rtx myfp;
625 rtx fp_plus_insns;
626 rtx sp_plus_insns = NULL_RTX;
627
628 if (TARGET_TINY_STACK)
629 {
630 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
631 over 'sbiw' (2 cycles, same size). */
632 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
633 }
634 else
635 {
636 /* Normal sized addition. */
637 myfp = frame_pointer_rtx;
638 }
639
640 /* Method 1-Adjust frame pointer. */
641 start_sequence ();
642
643 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
644 RTX_FRAME_RELATED_P (insn) = 1;
645
646 insn =
647 emit_move_insn (myfp,
648 gen_rtx_PLUS (GET_MODE(myfp), myfp,
649 gen_int_mode (-size,
650 GET_MODE(myfp))));
651 RTX_FRAME_RELATED_P (insn) = 1;
652
653 /* Copy to stack pointer. */
654 if (TARGET_TINY_STACK)
655 {
656 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
657 RTX_FRAME_RELATED_P (insn) = 1;
658 }
659 else if (TARGET_NO_INTERRUPTS
660 || cfun->machine->is_signal
661 || cfun->machine->is_OS_main)
662 {
663 insn =
664 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
665 frame_pointer_rtx));
666 RTX_FRAME_RELATED_P (insn) = 1;
667 }
668 else if (cfun->machine->is_interrupt)
669 {
670 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
671 frame_pointer_rtx));
672 RTX_FRAME_RELATED_P (insn) = 1;
673 }
674 else
675 {
676 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
677 RTX_FRAME_RELATED_P (insn) = 1;
678 }
679
680 fp_plus_insns = get_insns ();
681 end_sequence ();
682
683 /* Method 2-Adjust Stack pointer. */
684 if (size <= 6)
685 {
686 start_sequence ();
687
688 insn =
689 emit_move_insn (stack_pointer_rtx,
690 gen_rtx_PLUS (HImode,
691 stack_pointer_rtx,
692 gen_int_mode (-size,
693 HImode)));
694 RTX_FRAME_RELATED_P (insn) = 1;
695
696 insn =
697 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
698 RTX_FRAME_RELATED_P (insn) = 1;
699
700 sp_plus_insns = get_insns ();
701 end_sequence ();
702 }
703
704 /* Use shortest method. */
705 if (size <= 6 && (get_sequence_length (sp_plus_insns)
706 < get_sequence_length (fp_plus_insns)))
707 emit_insn (sp_plus_insns);
708 else
709 emit_insn (fp_plus_insns);
710 }
711 }
712 }
713 }
714
715 /* Output summary at end of function prologue. */
716
717 static void
718 avr_asm_function_end_prologue (FILE *file)
719 {
720 if (cfun->machine->is_naked)
721 {
722 fputs ("/* prologue: naked */\n", file);
723 }
724 else
725 {
726 if (cfun->machine->is_interrupt)
727 {
728 fputs ("/* prologue: Interrupt */\n", file);
729 }
730 else if (cfun->machine->is_signal)
731 {
732 fputs ("/* prologue: Signal */\n", file);
733 }
734 else
735 fputs ("/* prologue: function */\n", file);
736 }
737 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
738 get_frame_size());
739 }
740
741
742 /* Implement EPILOGUE_USES. */
743
744 int
745 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
746 {
747 if (reload_completed
748 && cfun->machine
749 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
750 return 1;
751 return 0;
752 }
753
754 /* Output RTL epilogue. */
755
756 void
757 expand_epilogue (void)
758 {
759 int reg;
760 int live_seq;
761 HARD_REG_SET set;
762 int minimize;
763 HOST_WIDE_INT size = get_frame_size();
764
765 /* epilogue: naked */
766 if (cfun->machine->is_naked)
767 {
768 emit_jump_insn (gen_return ());
769 return;
770 }
771
772 avr_regs_to_save (&set);
773 live_seq = sequent_regs_live ();
774 minimize = (TARGET_CALL_PROLOGUES
775 && !cfun->machine->is_interrupt
776 && !cfun->machine->is_signal
777 && !cfun->machine->is_OS_task
778 && !cfun->machine->is_OS_main
779 && live_seq);
780
781 if (minimize && (frame_pointer_needed || live_seq > 4))
782 {
783 if (frame_pointer_needed)
784 {
785 /* Get rid of frame. */
786 emit_move_insn(frame_pointer_rtx,
787 gen_rtx_PLUS (HImode, frame_pointer_rtx,
788 gen_int_mode (size, HImode)));
789 }
790 else
791 {
792 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
793 }
794
795 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
796 }
797 else
798 {
799 if (frame_pointer_needed)
800 {
801 if (size)
802 {
803 /* Try two methods to adjust stack and select shortest. */
804 rtx myfp;
805 rtx fp_plus_insns;
806 rtx sp_plus_insns = NULL_RTX;
807
808 if (TARGET_TINY_STACK)
809 {
810 /* The high byte (r29) doesn't change - prefer 'subi'
811 (1 cycle) over 'sbiw' (2 cycles, same size). */
812 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
813 }
814 else
815 {
816 /* Normal sized addition. */
817 myfp = frame_pointer_rtx;
818 }
819
820 /* Method 1-Adjust frame pointer. */
821 start_sequence ();
822
823 emit_move_insn (myfp,
824 gen_rtx_PLUS (HImode, myfp,
825 gen_int_mode (size,
826 GET_MODE(myfp))));
827
828 /* Copy to stack pointer. */
829 if (TARGET_TINY_STACK)
830 {
831 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
832 }
833 else if (TARGET_NO_INTERRUPTS
834 || cfun->machine->is_signal)
835 {
836 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
837 frame_pointer_rtx));
838 }
839 else if (cfun->machine->is_interrupt)
840 {
841 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
842 frame_pointer_rtx));
843 }
844 else
845 {
846 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
847 }
848
849 fp_plus_insns = get_insns ();
850 end_sequence ();
851
852 /* Method 2-Adjust Stack pointer. */
853 if (size <= 5)
854 {
855 start_sequence ();
856
857 emit_move_insn (stack_pointer_rtx,
858 gen_rtx_PLUS (HImode, stack_pointer_rtx,
859 gen_int_mode (size,
860 HImode)));
861
862 sp_plus_insns = get_insns ();
863 end_sequence ();
864 }
865
866 /* Use shortest method. */
867 if (size <= 5 && (get_sequence_length (sp_plus_insns)
868 < get_sequence_length (fp_plus_insns)))
869 emit_insn (sp_plus_insns);
870 else
871 emit_insn (fp_plus_insns);
872 }
873 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
874 {
875 /* Restore previous frame_pointer. */
876 emit_insn (gen_pophi (frame_pointer_rtx));
877 }
878 }
879 /* Restore used registers. */
880 for (reg = 31; reg >= 0; --reg)
881 {
882 if (TEST_HARD_REG_BIT (set, reg))
883 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
884 }
885 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
886 {
887 /* Restore RAMPZ using tmp reg as scratch. */
888 if(AVR_HAVE_RAMPZ
889 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
890 {
891 emit_insn (gen_popqi (tmp_reg_rtx));
892 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
893 tmp_reg_rtx);
894 }
895
896 /* Restore SREG using tmp reg as scratch. */
897 emit_insn (gen_popqi (tmp_reg_rtx));
898
899 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
900 tmp_reg_rtx);
901
902 /* Restore tmp REG. */
903 emit_insn (gen_popqi (tmp_reg_rtx));
904
905 /* Restore zero REG. */
906 emit_insn (gen_popqi (zero_reg_rtx));
907 }
908
909 emit_jump_insn (gen_return ());
910 }
911 }
912
913 /* Output summary messages at beginning of function epilogue. */
914
915 static void
916 avr_asm_function_begin_epilogue (FILE *file)
917 {
918 fprintf (file, "/* epilogue start */\n");
919 }
920
921 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
922 machine for a memory operand of mode MODE. */
923
924 bool
925 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
926 {
927 enum reg_class r = NO_REGS;
928
929 if (TARGET_ALL_DEBUG)
930 {
931 fprintf (stderr, "mode: (%s) %s %s %s %s:",
932 GET_MODE_NAME(mode),
933 strict ? "(strict)": "",
934 reload_completed ? "(reload_completed)": "",
935 reload_in_progress ? "(reload_in_progress)": "",
936 reg_renumber ? "(reg_renumber)" : "");
937 if (GET_CODE (x) == PLUS
938 && REG_P (XEXP (x, 0))
939 && GET_CODE (XEXP (x, 1)) == CONST_INT
940 && INTVAL (XEXP (x, 1)) >= 0
941 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
942 && reg_renumber
943 )
944 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
945 true_regnum (XEXP (x, 0)));
946 debug_rtx (x);
947 }
948 if (!strict && GET_CODE (x) == SUBREG)
949 x = SUBREG_REG (x);
950 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
951 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
952 r = POINTER_REGS;
953 else if (CONSTANT_ADDRESS_P (x))
954 r = ALL_REGS;
955 else if (GET_CODE (x) == PLUS
956 && REG_P (XEXP (x, 0))
957 && GET_CODE (XEXP (x, 1)) == CONST_INT
958 && INTVAL (XEXP (x, 1)) >= 0)
959 {
960 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
961 if (fit)
962 {
963 if (! strict
964 || REGNO (XEXP (x,0)) == REG_X
965 || REGNO (XEXP (x,0)) == REG_Y
966 || REGNO (XEXP (x,0)) == REG_Z)
967 r = BASE_POINTER_REGS;
968 if (XEXP (x,0) == frame_pointer_rtx
969 || XEXP (x,0) == arg_pointer_rtx)
970 r = BASE_POINTER_REGS;
971 }
972 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
973 r = POINTER_Y_REGS;
974 }
975 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
976 && REG_P (XEXP (x, 0))
977 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
978 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
979 {
980 r = POINTER_REGS;
981 }
982 if (TARGET_ALL_DEBUG)
983 {
984 fprintf (stderr, " ret = %c\n", r + '0');
985 }
986 return r == NO_REGS ? 0 : (int)r;
987 }
988
989 /* Attempts to replace X with a valid
990 memory address for an operand of mode MODE */
991
992 rtx
993 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
994 {
995 x = oldx;
996 if (TARGET_ALL_DEBUG)
997 {
998 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
999 debug_rtx (oldx);
1000 }
1001
1002 if (GET_CODE (oldx) == PLUS
1003 && REG_P (XEXP (oldx,0)))
1004 {
1005 if (REG_P (XEXP (oldx,1)))
1006 x = force_reg (GET_MODE (oldx), oldx);
1007 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1008 {
1009 int offs = INTVAL (XEXP (oldx,1));
1010 if (frame_pointer_rtx != XEXP (oldx,0))
1011 if (offs > MAX_LD_OFFSET (mode))
1012 {
1013 if (TARGET_ALL_DEBUG)
1014 fprintf (stderr, "force_reg (big offset)\n");
1015 x = force_reg (GET_MODE (oldx), oldx);
1016 }
1017 }
1018 }
1019 return x;
1020 }
1021
1022
1023 /* Return a pointer register name as a string. */
1024
1025 static const char *
1026 ptrreg_to_str (int regno)
1027 {
1028 switch (regno)
1029 {
1030 case REG_X: return "X";
1031 case REG_Y: return "Y";
1032 case REG_Z: return "Z";
1033 default:
1034 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1035 }
1036 return NULL;
1037 }
1038
1039 /* Return the condition name as a string.
1040 Used in conditional jump constructing */
1041
1042 static const char *
1043 cond_string (enum rtx_code code)
1044 {
1045 switch (code)
1046 {
1047 case NE:
1048 return "ne";
1049 case EQ:
1050 return "eq";
1051 case GE:
1052 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1053 return "pl";
1054 else
1055 return "ge";
1056 case LT:
1057 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1058 return "mi";
1059 else
1060 return "lt";
1061 case GEU:
1062 return "sh";
1063 case LTU:
1064 return "lo";
1065 default:
1066 gcc_unreachable ();
1067 }
1068 }
1069
1070 /* Output ADDR to FILE as address. */
1071
1072 void
1073 print_operand_address (FILE *file, rtx addr)
1074 {
1075 switch (GET_CODE (addr))
1076 {
1077 case REG:
1078 fprintf (file, ptrreg_to_str (REGNO (addr)));
1079 break;
1080
1081 case PRE_DEC:
1082 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1083 break;
1084
1085 case POST_INC:
1086 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1087 break;
1088
1089 default:
1090 if (CONSTANT_ADDRESS_P (addr)
1091 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1092 || GET_CODE (addr) == LABEL_REF))
1093 {
1094 fprintf (file, "gs(");
1095 output_addr_const (file,addr);
1096 fprintf (file ,")");
1097 }
1098 else
1099 output_addr_const (file, addr);
1100 }
1101 }
1102
1103
1104 /* Output X as assembler operand to file FILE. */
1105
1106 void
1107 print_operand (FILE *file, rtx x, int code)
1108 {
1109 int abcd = 0;
1110
1111 if (code >= 'A' && code <= 'D')
1112 abcd = code - 'A';
1113
1114 if (code == '~')
1115 {
1116 if (!AVR_HAVE_JMP_CALL)
1117 fputc ('r', file);
1118 }
1119 else if (code == '!')
1120 {
1121 if (AVR_HAVE_EIJMP_EICALL)
1122 fputc ('e', file);
1123 }
1124 else if (REG_P (x))
1125 {
1126 if (x == zero_reg_rtx)
1127 fprintf (file, "__zero_reg__");
1128 else
1129 fprintf (file, reg_names[true_regnum (x) + abcd]);
1130 }
1131 else if (GET_CODE (x) == CONST_INT)
1132 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1133 else if (GET_CODE (x) == MEM)
1134 {
1135 rtx addr = XEXP (x,0);
1136
1137 if (CONSTANT_P (addr) && abcd)
1138 {
1139 fputc ('(', file);
1140 output_address (addr);
1141 fprintf (file, ")+%d", abcd);
1142 }
1143 else if (code == 'o')
1144 {
1145 if (GET_CODE (addr) != PLUS)
1146 fatal_insn ("bad address, not (reg+disp):", addr);
1147
1148 print_operand (file, XEXP (addr, 1), 0);
1149 }
1150 else if (code == 'p' || code == 'r')
1151 {
1152 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1153 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1154
1155 if (code == 'p')
1156 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1157 else
1158 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1159 }
1160 else if (GET_CODE (addr) == PLUS)
1161 {
1162 print_operand_address (file, XEXP (addr,0));
1163 if (REGNO (XEXP (addr, 0)) == REG_X)
1164 fatal_insn ("internal compiler error. Bad address:"
1165 ,addr);
1166 fputc ('+', file);
1167 print_operand (file, XEXP (addr,1), code);
1168 }
1169 else
1170 print_operand_address (file, addr);
1171 }
1172 else if (GET_CODE (x) == CONST_DOUBLE)
1173 {
1174 long val;
1175 REAL_VALUE_TYPE rv;
1176 if (GET_MODE (x) != SFmode)
1177 fatal_insn ("internal compiler error. Unknown mode:", x);
1178 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1179 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1180 fprintf (file, "0x%lx", val);
1181 }
1182 else if (code == 'j')
1183 fputs (cond_string (GET_CODE (x)), file);
1184 else if (code == 'k')
1185 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1186 else
1187 print_operand_address (file, x);
1188 }
1189
1190 /* Update the condition code in the INSN. */
1191
1192 void
1193 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1194 {
1195 rtx set;
1196
1197 switch (get_attr_cc (insn))
1198 {
1199 case CC_NONE:
1200 /* Insn does not affect CC at all. */
1201 break;
1202
1203 case CC_SET_N:
1204 CC_STATUS_INIT;
1205 break;
1206
1207 case CC_SET_ZN:
1208 set = single_set (insn);
1209 CC_STATUS_INIT;
1210 if (set)
1211 {
1212 cc_status.flags |= CC_NO_OVERFLOW;
1213 cc_status.value1 = SET_DEST (set);
1214 }
1215 break;
1216
1217 case CC_SET_CZN:
1218 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1219 The V flag may or may not be known but that's ok because
1220 alter_cond will change tests to use EQ/NE. */
1221 set = single_set (insn);
1222 CC_STATUS_INIT;
1223 if (set)
1224 {
1225 cc_status.value1 = SET_DEST (set);
1226 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1227 }
1228 break;
1229
1230 case CC_COMPARE:
1231 set = single_set (insn);
1232 CC_STATUS_INIT;
1233 if (set)
1234 cc_status.value1 = SET_SRC (set);
1235 break;
1236
1237 case CC_CLOBBER:
1238 /* Insn doesn't leave CC in a usable state. */
1239 CC_STATUS_INIT;
1240
1241 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1242 set = single_set (insn);
1243 if (set)
1244 {
1245 rtx src = SET_SRC (set);
1246
1247 if (GET_CODE (src) == ASHIFTRT
1248 && GET_MODE (src) == QImode)
1249 {
1250 rtx x = XEXP (src, 1);
1251
1252 if (GET_CODE (x) == CONST_INT
1253 && INTVAL (x) > 0
1254 && INTVAL (x) != 6)
1255 {
1256 cc_status.value1 = SET_DEST (set);
1257 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1258 }
1259 }
1260 }
1261 break;
1262 }
1263 }
1264
1265 /* Return maximum number of consecutive registers of
1266 class CLASS needed to hold a value of mode MODE. */
1267
1268 int
1269 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1270 {
1271 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1272 }
1273
1274 /* Choose mode for jump insn:
1275 1 - relative jump in range -63 <= x <= 62 ;
1276 2 - relative jump in range -2046 <= x <= 2045 ;
1277 3 - absolute jump (only for ATmega[16]03). */
1278
1279 int
1280 avr_jump_mode (rtx x, rtx insn)
1281 {
1282 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1283 ? XEXP (x, 0) : x));
1284 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1285 int jump_distance = cur_addr - dest_addr;
1286
1287 if (-63 <= jump_distance && jump_distance <= 62)
1288 return 1;
1289 else if (-2046 <= jump_distance && jump_distance <= 2045)
1290 return 2;
1291 else if (AVR_HAVE_JMP_CALL)
1292 return 3;
1293
1294 return 2;
1295 }
1296
1297 /* return an AVR condition jump commands.
1298 X is a comparison RTX.
1299 LEN is a number returned by avr_jump_mode function.
1300 if REVERSE nonzero then condition code in X must be reversed. */
1301
1302 const char *
1303 ret_cond_branch (rtx x, int len, int reverse)
1304 {
1305 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1306
1307 switch (cond)
1308 {
1309 case GT:
1310 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1311 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1312 AS1 (brpl,%0)) :
1313 len == 2 ? (AS1 (breq,.+4) CR_TAB
1314 AS1 (brmi,.+2) CR_TAB
1315 AS1 (rjmp,%0)) :
1316 (AS1 (breq,.+6) CR_TAB
1317 AS1 (brmi,.+4) CR_TAB
1318 AS1 (jmp,%0)));
1319
1320 else
1321 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1322 AS1 (brge,%0)) :
1323 len == 2 ? (AS1 (breq,.+4) CR_TAB
1324 AS1 (brlt,.+2) CR_TAB
1325 AS1 (rjmp,%0)) :
1326 (AS1 (breq,.+6) CR_TAB
1327 AS1 (brlt,.+4) CR_TAB
1328 AS1 (jmp,%0)));
1329 case GTU:
1330 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1331 AS1 (brsh,%0)) :
1332 len == 2 ? (AS1 (breq,.+4) CR_TAB
1333 AS1 (brlo,.+2) CR_TAB
1334 AS1 (rjmp,%0)) :
1335 (AS1 (breq,.+6) CR_TAB
1336 AS1 (brlo,.+4) CR_TAB
1337 AS1 (jmp,%0)));
1338 case LE:
1339 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1340 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1341 AS1 (brmi,%0)) :
1342 len == 2 ? (AS1 (breq,.+2) CR_TAB
1343 AS1 (brpl,.+2) CR_TAB
1344 AS1 (rjmp,%0)) :
1345 (AS1 (breq,.+2) CR_TAB
1346 AS1 (brpl,.+4) CR_TAB
1347 AS1 (jmp,%0)));
1348 else
1349 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1350 AS1 (brlt,%0)) :
1351 len == 2 ? (AS1 (breq,.+2) CR_TAB
1352 AS1 (brge,.+2) CR_TAB
1353 AS1 (rjmp,%0)) :
1354 (AS1 (breq,.+2) CR_TAB
1355 AS1 (brge,.+4) CR_TAB
1356 AS1 (jmp,%0)));
1357 case LEU:
1358 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1359 AS1 (brlo,%0)) :
1360 len == 2 ? (AS1 (breq,.+2) CR_TAB
1361 AS1 (brsh,.+2) CR_TAB
1362 AS1 (rjmp,%0)) :
1363 (AS1 (breq,.+2) CR_TAB
1364 AS1 (brsh,.+4) CR_TAB
1365 AS1 (jmp,%0)));
1366 default:
1367 if (reverse)
1368 {
1369 switch (len)
1370 {
1371 case 1:
1372 return AS1 (br%k1,%0);
1373 case 2:
1374 return (AS1 (br%j1,.+2) CR_TAB
1375 AS1 (rjmp,%0));
1376 default:
1377 return (AS1 (br%j1,.+4) CR_TAB
1378 AS1 (jmp,%0));
1379 }
1380 }
1381 else
1382 {
1383 switch (len)
1384 {
1385 case 1:
1386 return AS1 (br%j1,%0);
1387 case 2:
1388 return (AS1 (br%k1,.+2) CR_TAB
1389 AS1 (rjmp,%0));
1390 default:
1391 return (AS1 (br%k1,.+4) CR_TAB
1392 AS1 (jmp,%0));
1393 }
1394 }
1395 }
1396 return "";
1397 }
1398
1399 /* Predicate function for immediate operand which fits to byte (8bit) */
1400
1401 int
1402 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1403 {
1404 return (GET_CODE (op) == CONST_INT
1405 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1406 }
1407
1408 /* Output all insn addresses and their sizes into the assembly language
1409 output file. This is helpful for debugging whether the length attributes
1410 in the md file are correct.
1411 Output insn cost for next insn. */
1412
1413 void
1414 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1415 int num_operands ATTRIBUTE_UNUSED)
1416 {
1417 int uid = INSN_UID (insn);
1418
1419 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1420 {
1421 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1422 INSN_ADDRESSES (uid),
1423 INSN_ADDRESSES (uid) - last_insn_address,
1424 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1425 }
1426 last_insn_address = INSN_ADDRESSES (uid);
1427 }
1428
1429 /* Return 0 if undefined, 1 if always true or always false. */
1430
1431 int
1432 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1433 {
1434 unsigned int max = (mode == QImode ? 0xff :
1435 mode == HImode ? 0xffff :
1436 mode == SImode ? 0xffffffff : 0);
1437 if (max && op && GET_CODE (x) == CONST_INT)
1438 {
1439 if (unsigned_condition (op) != op)
1440 max >>= 1;
1441
1442 if (max != (INTVAL (x) & max)
1443 && INTVAL (x) != 0xff)
1444 return 1;
1445 }
1446 return 0;
1447 }
1448
1449
1450 /* Returns nonzero if REGNO is the number of a hard
1451 register in which function arguments are sometimes passed. */
1452
1453 int
1454 function_arg_regno_p(int r)
1455 {
1456 return (r >= 8 && r <= 25);
1457 }
1458
1459 /* Initializing the variable cum for the state at the beginning
1460 of the argument list. */
1461
1462 void
1463 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1464 tree fndecl ATTRIBUTE_UNUSED)
1465 {
1466 cum->nregs = 18;
1467 cum->regno = FIRST_CUM_REG;
1468 if (!libname && fntype)
1469 {
1470 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1471 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1472 != void_type_node));
1473 if (stdarg)
1474 cum->nregs = 0;
1475 }
1476 }
1477
1478 /* Returns the number of registers to allocate for a function argument. */
1479
1480 static int
1481 avr_num_arg_regs (enum machine_mode mode, tree type)
1482 {
1483 int size;
1484
1485 if (mode == BLKmode)
1486 size = int_size_in_bytes (type);
1487 else
1488 size = GET_MODE_SIZE (mode);
1489
1490 /* Align all function arguments to start in even-numbered registers.
1491 Odd-sized arguments leave holes above them. */
1492
1493 return (size + 1) & ~1;
1494 }
1495
1496 /* Controls whether a function argument is passed
1497 in a register, and which register. */
1498
1499 rtx
1500 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1501 int named ATTRIBUTE_UNUSED)
1502 {
1503 int bytes = avr_num_arg_regs (mode, type);
1504
1505 if (cum->nregs && bytes <= cum->nregs)
1506 return gen_rtx_REG (mode, cum->regno - bytes);
1507
1508 return NULL_RTX;
1509 }
1510
1511 /* Update the summarizer variable CUM to advance past an argument
1512 in the argument list. */
1513
1514 void
1515 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1516 int named ATTRIBUTE_UNUSED)
1517 {
1518 int bytes = avr_num_arg_regs (mode, type);
1519
1520 cum->nregs -= bytes;
1521 cum->regno -= bytes;
1522
1523 if (cum->nregs <= 0)
1524 {
1525 cum->nregs = 0;
1526 cum->regno = FIRST_CUM_REG;
1527 }
1528 }
1529
1530 /***********************************************************************
1531 Functions for outputting various mov's for a various modes
1532 ************************************************************************/
1533 const char *
1534 output_movqi (rtx insn, rtx operands[], int *l)
1535 {
1536 int dummy;
1537 rtx dest = operands[0];
1538 rtx src = operands[1];
1539 int *real_l = l;
1540
1541 if (!l)
1542 l = &dummy;
1543
1544 *l = 1;
1545
1546 if (register_operand (dest, QImode))
1547 {
1548 if (register_operand (src, QImode)) /* mov r,r */
1549 {
1550 if (test_hard_reg_class (STACK_REG, dest))
1551 return AS2 (out,%0,%1);
1552 else if (test_hard_reg_class (STACK_REG, src))
1553 return AS2 (in,%0,%1);
1554
1555 return AS2 (mov,%0,%1);
1556 }
1557 else if (CONSTANT_P (src))
1558 {
1559 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1560 return AS2 (ldi,%0,lo8(%1));
1561
1562 if (GET_CODE (src) == CONST_INT)
1563 {
1564 if (src == const0_rtx) /* mov r,L */
1565 return AS1 (clr,%0);
1566 else if (src == const1_rtx)
1567 {
1568 *l = 2;
1569 return (AS1 (clr,%0) CR_TAB
1570 AS1 (inc,%0));
1571 }
1572 else if (src == constm1_rtx)
1573 {
1574 /* Immediate constants -1 to any register */
1575 *l = 2;
1576 return (AS1 (clr,%0) CR_TAB
1577 AS1 (dec,%0));
1578 }
1579 else
1580 {
1581 int bit_nr = exact_log2 (INTVAL (src));
1582
1583 if (bit_nr >= 0)
1584 {
1585 *l = 3;
1586 if (!real_l)
1587 output_asm_insn ((AS1 (clr,%0) CR_TAB
1588 "set"), operands);
1589 if (!real_l)
1590 avr_output_bld (operands, bit_nr);
1591
1592 return "";
1593 }
1594 }
1595 }
1596
1597 /* Last resort, larger than loading from memory. */
1598 *l = 4;
1599 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1600 AS2 (ldi,r31,lo8(%1)) CR_TAB
1601 AS2 (mov,%0,r31) CR_TAB
1602 AS2 (mov,r31,__tmp_reg__));
1603 }
1604 else if (GET_CODE (src) == MEM)
1605 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1606 }
1607 else if (GET_CODE (dest) == MEM)
1608 {
1609 const char *templ;
1610
1611 if (src == const0_rtx)
1612 operands[1] = zero_reg_rtx;
1613
1614 templ = out_movqi_mr_r (insn, operands, real_l);
1615
1616 if (!real_l)
1617 output_asm_insn (templ, operands);
1618
1619 operands[1] = src;
1620 }
1621 return "";
1622 }
1623
1624
1625 const char *
1626 output_movhi (rtx insn, rtx operands[], int *l)
1627 {
1628 int dummy;
1629 rtx dest = operands[0];
1630 rtx src = operands[1];
1631 int *real_l = l;
1632
1633 if (!l)
1634 l = &dummy;
1635
1636 if (register_operand (dest, HImode))
1637 {
1638 if (register_operand (src, HImode)) /* mov r,r */
1639 {
1640 if (test_hard_reg_class (STACK_REG, dest))
1641 {
1642 if (TARGET_TINY_STACK)
1643 return *l = 1, AS2 (out,__SP_L__,%A1);
1644 /* Use simple load of stack pointer if no interrupts are
1645 used. */
1646 else if (TARGET_NO_INTERRUPTS)
1647 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1648 AS2 (out,__SP_L__,%A1));
1649 *l = 5;
1650 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1651 "cli" CR_TAB
1652 AS2 (out,__SP_H__,%B1) CR_TAB
1653 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1654 AS2 (out,__SP_L__,%A1));
1655 }
1656 else if (test_hard_reg_class (STACK_REG, src))
1657 {
1658 *l = 2;
1659 return (AS2 (in,%A0,__SP_L__) CR_TAB
1660 AS2 (in,%B0,__SP_H__));
1661 }
1662
1663 if (AVR_HAVE_MOVW)
1664 {
1665 *l = 1;
1666 return (AS2 (movw,%0,%1));
1667 }
1668 else
1669 {
1670 *l = 2;
1671 return (AS2 (mov,%A0,%A1) CR_TAB
1672 AS2 (mov,%B0,%B1));
1673 }
1674 }
1675 else if (CONSTANT_P (src))
1676 {
1677 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1678 {
1679 *l = 2;
1680 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1681 AS2 (ldi,%B0,hi8(%1)));
1682 }
1683
1684 if (GET_CODE (src) == CONST_INT)
1685 {
1686 if (src == const0_rtx) /* mov r,L */
1687 {
1688 *l = 2;
1689 return (AS1 (clr,%A0) CR_TAB
1690 AS1 (clr,%B0));
1691 }
1692 else if (src == const1_rtx)
1693 {
1694 *l = 3;
1695 return (AS1 (clr,%A0) CR_TAB
1696 AS1 (clr,%B0) CR_TAB
1697 AS1 (inc,%A0));
1698 }
1699 else if (src == constm1_rtx)
1700 {
1701 /* Immediate constants -1 to any register */
1702 *l = 3;
1703 return (AS1 (clr,%0) CR_TAB
1704 AS1 (dec,%A0) CR_TAB
1705 AS2 (mov,%B0,%A0));
1706 }
1707 else
1708 {
1709 int bit_nr = exact_log2 (INTVAL (src));
1710
1711 if (bit_nr >= 0)
1712 {
1713 *l = 4;
1714 if (!real_l)
1715 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1716 AS1 (clr,%B0) CR_TAB
1717 "set"), operands);
1718 if (!real_l)
1719 avr_output_bld (operands, bit_nr);
1720
1721 return "";
1722 }
1723 }
1724
1725 if ((INTVAL (src) & 0xff) == 0)
1726 {
1727 *l = 5;
1728 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1729 AS1 (clr,%A0) CR_TAB
1730 AS2 (ldi,r31,hi8(%1)) CR_TAB
1731 AS2 (mov,%B0,r31) CR_TAB
1732 AS2 (mov,r31,__tmp_reg__));
1733 }
1734 else if ((INTVAL (src) & 0xff00) == 0)
1735 {
1736 *l = 5;
1737 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1738 AS2 (ldi,r31,lo8(%1)) CR_TAB
1739 AS2 (mov,%A0,r31) CR_TAB
1740 AS1 (clr,%B0) CR_TAB
1741 AS2 (mov,r31,__tmp_reg__));
1742 }
1743 }
1744
1745 /* Last resort, equal to loading from memory. */
1746 *l = 6;
1747 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1748 AS2 (ldi,r31,lo8(%1)) CR_TAB
1749 AS2 (mov,%A0,r31) CR_TAB
1750 AS2 (ldi,r31,hi8(%1)) CR_TAB
1751 AS2 (mov,%B0,r31) CR_TAB
1752 AS2 (mov,r31,__tmp_reg__));
1753 }
1754 else if (GET_CODE (src) == MEM)
1755 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1756 }
1757 else if (GET_CODE (dest) == MEM)
1758 {
1759 const char *templ;
1760
1761 if (src == const0_rtx)
1762 operands[1] = zero_reg_rtx;
1763
1764 templ = out_movhi_mr_r (insn, operands, real_l);
1765
1766 if (!real_l)
1767 output_asm_insn (templ, operands);
1768
1769 operands[1] = src;
1770 return "";
1771 }
1772 fatal_insn ("invalid insn:", insn);
1773 return "";
1774 }
1775
1776 const char *
1777 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1778 {
1779 rtx dest = op[0];
1780 rtx src = op[1];
1781 rtx x = XEXP (src, 0);
1782 int dummy;
1783
1784 if (!l)
1785 l = &dummy;
1786
1787 if (CONSTANT_ADDRESS_P (x))
1788 {
1789 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1790 {
1791 *l = 1;
1792 return AS2 (in,%0,__SREG__);
1793 }
1794 if (optimize > 0 && io_address_operand (x, QImode))
1795 {
1796 *l = 1;
1797 return AS2 (in,%0,%1-0x20);
1798 }
1799 *l = 2;
1800 return AS2 (lds,%0,%1);
1801 }
1802 /* memory access by reg+disp */
1803 else if (GET_CODE (x) == PLUS
1804 && REG_P (XEXP (x,0))
1805 && GET_CODE (XEXP (x,1)) == CONST_INT)
1806 {
1807 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1808 {
1809 int disp = INTVAL (XEXP (x,1));
1810 if (REGNO (XEXP (x,0)) != REG_Y)
1811 fatal_insn ("incorrect insn:",insn);
1812
1813 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1814 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1815 AS2 (ldd,%0,Y+63) CR_TAB
1816 AS2 (sbiw,r28,%o1-63));
1817
1818 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1819 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1820 AS2 (ld,%0,Y) CR_TAB
1821 AS2 (subi,r28,lo8(%o1)) CR_TAB
1822 AS2 (sbci,r29,hi8(%o1)));
1823 }
1824 else if (REGNO (XEXP (x,0)) == REG_X)
1825 {
1826 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1827 it but I have this situation with extremal optimizing options. */
1828 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1829 || reg_unused_after (insn, XEXP (x,0)))
1830 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1831 AS2 (ld,%0,X));
1832
1833 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1834 AS2 (ld,%0,X) CR_TAB
1835 AS2 (sbiw,r26,%o1));
1836 }
1837 *l = 1;
1838 return AS2 (ldd,%0,%1);
1839 }
1840 *l = 1;
1841 return AS2 (ld,%0,%1);
1842 }
1843
1844 const char *
1845 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1846 {
1847 rtx dest = op[0];
1848 rtx src = op[1];
1849 rtx base = XEXP (src, 0);
1850 int reg_dest = true_regnum (dest);
1851 int reg_base = true_regnum (base);
1852 /* "volatile" forces reading low byte first, even if less efficient,
1853 for correct operation with 16-bit I/O registers. */
1854 int mem_volatile_p = MEM_VOLATILE_P (src);
1855 int tmp;
1856
1857 if (!l)
1858 l = &tmp;
1859
1860 if (reg_base > 0)
1861 {
1862 if (reg_dest == reg_base) /* R = (R) */
1863 {
1864 *l = 3;
1865 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1866 AS2 (ld,%B0,%1) CR_TAB
1867 AS2 (mov,%A0,__tmp_reg__));
1868 }
1869 else if (reg_base == REG_X) /* (R26) */
1870 {
1871 if (reg_unused_after (insn, base))
1872 {
1873 *l = 2;
1874 return (AS2 (ld,%A0,X+) CR_TAB
1875 AS2 (ld,%B0,X));
1876 }
1877 *l = 3;
1878 return (AS2 (ld,%A0,X+) CR_TAB
1879 AS2 (ld,%B0,X) CR_TAB
1880 AS2 (sbiw,r26,1));
1881 }
1882 else /* (R) */
1883 {
1884 *l = 2;
1885 return (AS2 (ld,%A0,%1) CR_TAB
1886 AS2 (ldd,%B0,%1+1));
1887 }
1888 }
1889 else if (GET_CODE (base) == PLUS) /* (R + i) */
1890 {
1891 int disp = INTVAL (XEXP (base, 1));
1892 int reg_base = true_regnum (XEXP (base, 0));
1893
1894 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1895 {
1896 if (REGNO (XEXP (base, 0)) != REG_Y)
1897 fatal_insn ("incorrect insn:",insn);
1898
1899 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1900 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1901 AS2 (ldd,%A0,Y+62) CR_TAB
1902 AS2 (ldd,%B0,Y+63) CR_TAB
1903 AS2 (sbiw,r28,%o1-62));
1904
1905 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1906 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1907 AS2 (ld,%A0,Y) CR_TAB
1908 AS2 (ldd,%B0,Y+1) CR_TAB
1909 AS2 (subi,r28,lo8(%o1)) CR_TAB
1910 AS2 (sbci,r29,hi8(%o1)));
1911 }
1912 if (reg_base == REG_X)
1913 {
1914 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1915 it but I have this situation with extremal
1916 optimization options. */
1917
1918 *l = 4;
1919 if (reg_base == reg_dest)
1920 return (AS2 (adiw,r26,%o1) CR_TAB
1921 AS2 (ld,__tmp_reg__,X+) CR_TAB
1922 AS2 (ld,%B0,X) CR_TAB
1923 AS2 (mov,%A0,__tmp_reg__));
1924
1925 return (AS2 (adiw,r26,%o1) CR_TAB
1926 AS2 (ld,%A0,X+) CR_TAB
1927 AS2 (ld,%B0,X) CR_TAB
1928 AS2 (sbiw,r26,%o1+1));
1929 }
1930
1931 if (reg_base == reg_dest)
1932 {
1933 *l = 3;
1934 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1935 AS2 (ldd,%B0,%B1) CR_TAB
1936 AS2 (mov,%A0,__tmp_reg__));
1937 }
1938
1939 *l = 2;
1940 return (AS2 (ldd,%A0,%A1) CR_TAB
1941 AS2 (ldd,%B0,%B1));
1942 }
1943 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1944 {
1945 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1946 fatal_insn ("incorrect insn:", insn);
1947
1948 if (mem_volatile_p)
1949 {
1950 if (REGNO (XEXP (base, 0)) == REG_X)
1951 {
1952 *l = 4;
1953 return (AS2 (sbiw,r26,2) CR_TAB
1954 AS2 (ld,%A0,X+) CR_TAB
1955 AS2 (ld,%B0,X) CR_TAB
1956 AS2 (sbiw,r26,1));
1957 }
1958 else
1959 {
1960 *l = 3;
1961 return (AS2 (sbiw,%r1,2) CR_TAB
1962 AS2 (ld,%A0,%p1) CR_TAB
1963 AS2 (ldd,%B0,%p1+1));
1964 }
1965 }
1966
1967 *l = 2;
1968 return (AS2 (ld,%B0,%1) CR_TAB
1969 AS2 (ld,%A0,%1));
1970 }
1971 else if (GET_CODE (base) == POST_INC) /* (R++) */
1972 {
1973 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1974 fatal_insn ("incorrect insn:", insn);
1975
1976 *l = 2;
1977 return (AS2 (ld,%A0,%1) CR_TAB
1978 AS2 (ld,%B0,%1));
1979 }
1980 else if (CONSTANT_ADDRESS_P (base))
1981 {
1982 if (optimize > 0 && io_address_operand (base, HImode))
1983 {
1984 *l = 2;
1985 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1986 AS2 (in,%B0,%B1-0x20));
1987 }
1988 *l = 4;
1989 return (AS2 (lds,%A0,%A1) CR_TAB
1990 AS2 (lds,%B0,%B1));
1991 }
1992
1993 fatal_insn ("unknown move insn:",insn);
1994 return "";
1995 }
1996
1997 const char *
1998 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1999 {
2000 rtx dest = op[0];
2001 rtx src = op[1];
2002 rtx base = XEXP (src, 0);
2003 int reg_dest = true_regnum (dest);
2004 int reg_base = true_regnum (base);
2005 int tmp;
2006
2007 if (!l)
2008 l = &tmp;
2009
2010 if (reg_base > 0)
2011 {
2012 if (reg_base == REG_X) /* (R26) */
2013 {
2014 if (reg_dest == REG_X)
2015 /* "ld r26,-X" is undefined */
2016 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2017 AS2 (ld,r29,X) CR_TAB
2018 AS2 (ld,r28,-X) CR_TAB
2019 AS2 (ld,__tmp_reg__,-X) CR_TAB
2020 AS2 (sbiw,r26,1) CR_TAB
2021 AS2 (ld,r26,X) CR_TAB
2022 AS2 (mov,r27,__tmp_reg__));
2023 else if (reg_dest == REG_X - 2)
2024 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2025 AS2 (ld,%B0,X+) CR_TAB
2026 AS2 (ld,__tmp_reg__,X+) CR_TAB
2027 AS2 (ld,%D0,X) CR_TAB
2028 AS2 (mov,%C0,__tmp_reg__));
2029 else if (reg_unused_after (insn, base))
2030 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2031 AS2 (ld,%B0,X+) CR_TAB
2032 AS2 (ld,%C0,X+) CR_TAB
2033 AS2 (ld,%D0,X));
2034 else
2035 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2036 AS2 (ld,%B0,X+) CR_TAB
2037 AS2 (ld,%C0,X+) CR_TAB
2038 AS2 (ld,%D0,X) CR_TAB
2039 AS2 (sbiw,r26,3));
2040 }
2041 else
2042 {
2043 if (reg_dest == reg_base)
2044 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2045 AS2 (ldd,%C0,%1+2) CR_TAB
2046 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2047 AS2 (ld,%A0,%1) CR_TAB
2048 AS2 (mov,%B0,__tmp_reg__));
2049 else if (reg_base == reg_dest + 2)
2050 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2051 AS2 (ldd,%B0,%1+1) CR_TAB
2052 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2053 AS2 (ldd,%D0,%1+3) CR_TAB
2054 AS2 (mov,%C0,__tmp_reg__));
2055 else
2056 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2057 AS2 (ldd,%B0,%1+1) CR_TAB
2058 AS2 (ldd,%C0,%1+2) CR_TAB
2059 AS2 (ldd,%D0,%1+3));
2060 }
2061 }
2062 else if (GET_CODE (base) == PLUS) /* (R + i) */
2063 {
2064 int disp = INTVAL (XEXP (base, 1));
2065
2066 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2067 {
2068 if (REGNO (XEXP (base, 0)) != REG_Y)
2069 fatal_insn ("incorrect insn:",insn);
2070
2071 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2072 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2073 AS2 (ldd,%A0,Y+60) CR_TAB
2074 AS2 (ldd,%B0,Y+61) CR_TAB
2075 AS2 (ldd,%C0,Y+62) CR_TAB
2076 AS2 (ldd,%D0,Y+63) CR_TAB
2077 AS2 (sbiw,r28,%o1-60));
2078
2079 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2080 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2081 AS2 (ld,%A0,Y) CR_TAB
2082 AS2 (ldd,%B0,Y+1) CR_TAB
2083 AS2 (ldd,%C0,Y+2) CR_TAB
2084 AS2 (ldd,%D0,Y+3) CR_TAB
2085 AS2 (subi,r28,lo8(%o1)) CR_TAB
2086 AS2 (sbci,r29,hi8(%o1)));
2087 }
2088
2089 reg_base = true_regnum (XEXP (base, 0));
2090 if (reg_base == REG_X)
2091 {
2092 /* R = (X + d) */
2093 if (reg_dest == REG_X)
2094 {
2095 *l = 7;
2096 /* "ld r26,-X" is undefined */
2097 return (AS2 (adiw,r26,%o1+3) CR_TAB
2098 AS2 (ld,r29,X) CR_TAB
2099 AS2 (ld,r28,-X) CR_TAB
2100 AS2 (ld,__tmp_reg__,-X) CR_TAB
2101 AS2 (sbiw,r26,1) CR_TAB
2102 AS2 (ld,r26,X) CR_TAB
2103 AS2 (mov,r27,__tmp_reg__));
2104 }
2105 *l = 6;
2106 if (reg_dest == REG_X - 2)
2107 return (AS2 (adiw,r26,%o1) CR_TAB
2108 AS2 (ld,r24,X+) CR_TAB
2109 AS2 (ld,r25,X+) CR_TAB
2110 AS2 (ld,__tmp_reg__,X+) CR_TAB
2111 AS2 (ld,r27,X) CR_TAB
2112 AS2 (mov,r26,__tmp_reg__));
2113
2114 return (AS2 (adiw,r26,%o1) CR_TAB
2115 AS2 (ld,%A0,X+) CR_TAB
2116 AS2 (ld,%B0,X+) CR_TAB
2117 AS2 (ld,%C0,X+) CR_TAB
2118 AS2 (ld,%D0,X) CR_TAB
2119 AS2 (sbiw,r26,%o1+3));
2120 }
2121 if (reg_dest == reg_base)
2122 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2123 AS2 (ldd,%C0,%C1) CR_TAB
2124 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2125 AS2 (ldd,%A0,%A1) CR_TAB
2126 AS2 (mov,%B0,__tmp_reg__));
2127 else if (reg_dest == reg_base - 2)
2128 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2129 AS2 (ldd,%B0,%B1) CR_TAB
2130 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2131 AS2 (ldd,%D0,%D1) CR_TAB
2132 AS2 (mov,%C0,__tmp_reg__));
2133 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2134 AS2 (ldd,%B0,%B1) CR_TAB
2135 AS2 (ldd,%C0,%C1) CR_TAB
2136 AS2 (ldd,%D0,%D1));
2137 }
2138 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2139 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2140 AS2 (ld,%C0,%1) CR_TAB
2141 AS2 (ld,%B0,%1) CR_TAB
2142 AS2 (ld,%A0,%1));
2143 else if (GET_CODE (base) == POST_INC) /* (R++) */
2144 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2145 AS2 (ld,%B0,%1) CR_TAB
2146 AS2 (ld,%C0,%1) CR_TAB
2147 AS2 (ld,%D0,%1));
2148 else if (CONSTANT_ADDRESS_P (base))
2149 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2150 AS2 (lds,%B0,%B1) CR_TAB
2151 AS2 (lds,%C0,%C1) CR_TAB
2152 AS2 (lds,%D0,%D1));
2153
2154 fatal_insn ("unknown move insn:",insn);
2155 return "";
2156 }
2157
2158 const char *
2159 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2160 {
2161 rtx dest = op[0];
2162 rtx src = op[1];
2163 rtx base = XEXP (dest, 0);
2164 int reg_base = true_regnum (base);
2165 int reg_src = true_regnum (src);
2166 int tmp;
2167
2168 if (!l)
2169 l = &tmp;
2170
2171 if (CONSTANT_ADDRESS_P (base))
2172 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2173 AS2 (sts,%B0,%B1) CR_TAB
2174 AS2 (sts,%C0,%C1) CR_TAB
2175 AS2 (sts,%D0,%D1));
2176 if (reg_base > 0) /* (r) */
2177 {
2178 if (reg_base == REG_X) /* (R26) */
2179 {
2180 if (reg_src == REG_X)
2181 {
2182 /* "st X+,r26" is undefined */
2183 if (reg_unused_after (insn, base))
2184 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2185 AS2 (st,X,r26) CR_TAB
2186 AS2 (adiw,r26,1) CR_TAB
2187 AS2 (st,X+,__tmp_reg__) CR_TAB
2188 AS2 (st,X+,r28) CR_TAB
2189 AS2 (st,X,r29));
2190 else
2191 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2192 AS2 (st,X,r26) CR_TAB
2193 AS2 (adiw,r26,1) CR_TAB
2194 AS2 (st,X+,__tmp_reg__) CR_TAB
2195 AS2 (st,X+,r28) CR_TAB
2196 AS2 (st,X,r29) CR_TAB
2197 AS2 (sbiw,r26,3));
2198 }
2199 else if (reg_base == reg_src + 2)
2200 {
2201 if (reg_unused_after (insn, base))
2202 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2203 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2204 AS2 (st,%0+,%A1) CR_TAB
2205 AS2 (st,%0+,%B1) CR_TAB
2206 AS2 (st,%0+,__zero_reg__) CR_TAB
2207 AS2 (st,%0,__tmp_reg__) CR_TAB
2208 AS1 (clr,__zero_reg__));
2209 else
2210 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2211 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2212 AS2 (st,%0+,%A1) CR_TAB
2213 AS2 (st,%0+,%B1) CR_TAB
2214 AS2 (st,%0+,__zero_reg__) CR_TAB
2215 AS2 (st,%0,__tmp_reg__) CR_TAB
2216 AS1 (clr,__zero_reg__) CR_TAB
2217 AS2 (sbiw,r26,3));
2218 }
2219 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2220 AS2 (st,%0+,%B1) CR_TAB
2221 AS2 (st,%0+,%C1) CR_TAB
2222 AS2 (st,%0,%D1) CR_TAB
2223 AS2 (sbiw,r26,3));
2224 }
2225 else
2226 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2227 AS2 (std,%0+1,%B1) CR_TAB
2228 AS2 (std,%0+2,%C1) CR_TAB
2229 AS2 (std,%0+3,%D1));
2230 }
2231 else if (GET_CODE (base) == PLUS) /* (R + i) */
2232 {
2233 int disp = INTVAL (XEXP (base, 1));
2234 reg_base = REGNO (XEXP (base, 0));
2235 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2236 {
2237 if (reg_base != REG_Y)
2238 fatal_insn ("incorrect insn:",insn);
2239
2240 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2241 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2242 AS2 (std,Y+60,%A1) CR_TAB
2243 AS2 (std,Y+61,%B1) CR_TAB
2244 AS2 (std,Y+62,%C1) CR_TAB
2245 AS2 (std,Y+63,%D1) CR_TAB
2246 AS2 (sbiw,r28,%o0-60));
2247
2248 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2249 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2250 AS2 (st,Y,%A1) CR_TAB
2251 AS2 (std,Y+1,%B1) CR_TAB
2252 AS2 (std,Y+2,%C1) CR_TAB
2253 AS2 (std,Y+3,%D1) CR_TAB
2254 AS2 (subi,r28,lo8(%o0)) CR_TAB
2255 AS2 (sbci,r29,hi8(%o0)));
2256 }
2257 if (reg_base == REG_X)
2258 {
2259 /* (X + d) = R */
2260 if (reg_src == REG_X)
2261 {
2262 *l = 9;
2263 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2264 AS2 (mov,__zero_reg__,r27) CR_TAB
2265 AS2 (adiw,r26,%o0) CR_TAB
2266 AS2 (st,X+,__tmp_reg__) CR_TAB
2267 AS2 (st,X+,__zero_reg__) CR_TAB
2268 AS2 (st,X+,r28) CR_TAB
2269 AS2 (st,X,r29) CR_TAB
2270 AS1 (clr,__zero_reg__) CR_TAB
2271 AS2 (sbiw,r26,%o0+3));
2272 }
2273 else if (reg_src == REG_X - 2)
2274 {
2275 *l = 9;
2276 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2277 AS2 (mov,__zero_reg__,r27) CR_TAB
2278 AS2 (adiw,r26,%o0) CR_TAB
2279 AS2 (st,X+,r24) CR_TAB
2280 AS2 (st,X+,r25) CR_TAB
2281 AS2 (st,X+,__tmp_reg__) CR_TAB
2282 AS2 (st,X,__zero_reg__) CR_TAB
2283 AS1 (clr,__zero_reg__) CR_TAB
2284 AS2 (sbiw,r26,%o0+3));
2285 }
2286 *l = 6;
2287 return (AS2 (adiw,r26,%o0) CR_TAB
2288 AS2 (st,X+,%A1) CR_TAB
2289 AS2 (st,X+,%B1) CR_TAB
2290 AS2 (st,X+,%C1) CR_TAB
2291 AS2 (st,X,%D1) CR_TAB
2292 AS2 (sbiw,r26,%o0+3));
2293 }
2294 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2295 AS2 (std,%B0,%B1) CR_TAB
2296 AS2 (std,%C0,%C1) CR_TAB
2297 AS2 (std,%D0,%D1));
2298 }
2299 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2300 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2301 AS2 (st,%0,%C1) CR_TAB
2302 AS2 (st,%0,%B1) CR_TAB
2303 AS2 (st,%0,%A1));
2304 else if (GET_CODE (base) == POST_INC) /* (R++) */
2305 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2306 AS2 (st,%0,%B1) CR_TAB
2307 AS2 (st,%0,%C1) CR_TAB
2308 AS2 (st,%0,%D1));
2309 fatal_insn ("unknown move insn:",insn);
2310 return "";
2311 }
2312
2313 const char *
2314 output_movsisf(rtx insn, rtx operands[], int *l)
2315 {
2316 int dummy;
2317 rtx dest = operands[0];
2318 rtx src = operands[1];
2319 int *real_l = l;
2320
2321 if (!l)
2322 l = &dummy;
2323
2324 if (register_operand (dest, VOIDmode))
2325 {
2326 if (register_operand (src, VOIDmode)) /* mov r,r */
2327 {
2328 if (true_regnum (dest) > true_regnum (src))
2329 {
2330 if (AVR_HAVE_MOVW)
2331 {
2332 *l = 2;
2333 return (AS2 (movw,%C0,%C1) CR_TAB
2334 AS2 (movw,%A0,%A1));
2335 }
2336 *l = 4;
2337 return (AS2 (mov,%D0,%D1) CR_TAB
2338 AS2 (mov,%C0,%C1) CR_TAB
2339 AS2 (mov,%B0,%B1) CR_TAB
2340 AS2 (mov,%A0,%A1));
2341 }
2342 else
2343 {
2344 if (AVR_HAVE_MOVW)
2345 {
2346 *l = 2;
2347 return (AS2 (movw,%A0,%A1) CR_TAB
2348 AS2 (movw,%C0,%C1));
2349 }
2350 *l = 4;
2351 return (AS2 (mov,%A0,%A1) CR_TAB
2352 AS2 (mov,%B0,%B1) CR_TAB
2353 AS2 (mov,%C0,%C1) CR_TAB
2354 AS2 (mov,%D0,%D1));
2355 }
2356 }
2357 else if (CONSTANT_P (src))
2358 {
2359 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2360 {
2361 *l = 4;
2362 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2363 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2364 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2365 AS2 (ldi,%D0,hhi8(%1)));
2366 }
2367
2368 if (GET_CODE (src) == CONST_INT)
2369 {
2370 const char *const clr_op0 =
2371 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2372 AS1 (clr,%B0) CR_TAB
2373 AS2 (movw,%C0,%A0))
2374 : (AS1 (clr,%A0) CR_TAB
2375 AS1 (clr,%B0) CR_TAB
2376 AS1 (clr,%C0) CR_TAB
2377 AS1 (clr,%D0));
2378
2379 if (src == const0_rtx) /* mov r,L */
2380 {
2381 *l = AVR_HAVE_MOVW ? 3 : 4;
2382 return clr_op0;
2383 }
2384 else if (src == const1_rtx)
2385 {
2386 if (!real_l)
2387 output_asm_insn (clr_op0, operands);
2388 *l = AVR_HAVE_MOVW ? 4 : 5;
2389 return AS1 (inc,%A0);
2390 }
2391 else if (src == constm1_rtx)
2392 {
2393 /* Immediate constants -1 to any register */
2394 if (AVR_HAVE_MOVW)
2395 {
2396 *l = 4;
2397 return (AS1 (clr,%A0) CR_TAB
2398 AS1 (dec,%A0) CR_TAB
2399 AS2 (mov,%B0,%A0) CR_TAB
2400 AS2 (movw,%C0,%A0));
2401 }
2402 *l = 5;
2403 return (AS1 (clr,%A0) CR_TAB
2404 AS1 (dec,%A0) CR_TAB
2405 AS2 (mov,%B0,%A0) CR_TAB
2406 AS2 (mov,%C0,%A0) CR_TAB
2407 AS2 (mov,%D0,%A0));
2408 }
2409 else
2410 {
2411 int bit_nr = exact_log2 (INTVAL (src));
2412
2413 if (bit_nr >= 0)
2414 {
2415 *l = AVR_HAVE_MOVW ? 5 : 6;
2416 if (!real_l)
2417 {
2418 output_asm_insn (clr_op0, operands);
2419 output_asm_insn ("set", operands);
2420 }
2421 if (!real_l)
2422 avr_output_bld (operands, bit_nr);
2423
2424 return "";
2425 }
2426 }
2427 }
2428
2429 /* Last resort, better than loading from memory. */
2430 *l = 10;
2431 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2432 AS2 (ldi,r31,lo8(%1)) CR_TAB
2433 AS2 (mov,%A0,r31) CR_TAB
2434 AS2 (ldi,r31,hi8(%1)) CR_TAB
2435 AS2 (mov,%B0,r31) CR_TAB
2436 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2437 AS2 (mov,%C0,r31) CR_TAB
2438 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2439 AS2 (mov,%D0,r31) CR_TAB
2440 AS2 (mov,r31,__tmp_reg__));
2441 }
2442 else if (GET_CODE (src) == MEM)
2443 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2444 }
2445 else if (GET_CODE (dest) == MEM)
2446 {
2447 const char *templ;
2448
2449 if (src == const0_rtx)
2450 operands[1] = zero_reg_rtx;
2451
2452 templ = out_movsi_mr_r (insn, operands, real_l);
2453
2454 if (!real_l)
2455 output_asm_insn (templ, operands);
2456
2457 operands[1] = src;
2458 return "";
2459 }
2460 fatal_insn ("invalid insn:", insn);
2461 return "";
2462 }
2463
2464 const char *
2465 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2466 {
2467 rtx dest = op[0];
2468 rtx src = op[1];
2469 rtx x = XEXP (dest, 0);
2470 int dummy;
2471
2472 if (!l)
2473 l = &dummy;
2474
2475 if (CONSTANT_ADDRESS_P (x))
2476 {
2477 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2478 {
2479 *l = 1;
2480 return AS2 (out,__SREG__,%1);
2481 }
2482 if (optimize > 0 && io_address_operand (x, QImode))
2483 {
2484 *l = 1;
2485 return AS2 (out,%0-0x20,%1);
2486 }
2487 *l = 2;
2488 return AS2 (sts,%0,%1);
2489 }
2490 /* memory access by reg+disp */
2491 else if (GET_CODE (x) == PLUS
2492 && REG_P (XEXP (x,0))
2493 && GET_CODE (XEXP (x,1)) == CONST_INT)
2494 {
2495 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2496 {
2497 int disp = INTVAL (XEXP (x,1));
2498 if (REGNO (XEXP (x,0)) != REG_Y)
2499 fatal_insn ("incorrect insn:",insn);
2500
2501 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2502 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2503 AS2 (std,Y+63,%1) CR_TAB
2504 AS2 (sbiw,r28,%o0-63));
2505
2506 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2507 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2508 AS2 (st,Y,%1) CR_TAB
2509 AS2 (subi,r28,lo8(%o0)) CR_TAB
2510 AS2 (sbci,r29,hi8(%o0)));
2511 }
2512 else if (REGNO (XEXP (x,0)) == REG_X)
2513 {
2514 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2515 {
2516 if (reg_unused_after (insn, XEXP (x,0)))
2517 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2518 AS2 (adiw,r26,%o0) CR_TAB
2519 AS2 (st,X,__tmp_reg__));
2520
2521 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2522 AS2 (adiw,r26,%o0) CR_TAB
2523 AS2 (st,X,__tmp_reg__) CR_TAB
2524 AS2 (sbiw,r26,%o0));
2525 }
2526 else
2527 {
2528 if (reg_unused_after (insn, XEXP (x,0)))
2529 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2530 AS2 (st,X,%1));
2531
2532 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2533 AS2 (st,X,%1) CR_TAB
2534 AS2 (sbiw,r26,%o0));
2535 }
2536 }
2537 *l = 1;
2538 return AS2 (std,%0,%1);
2539 }
2540 *l = 1;
2541 return AS2 (st,%0,%1);
2542 }
2543
2544 const char *
2545 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2546 {
2547 rtx dest = op[0];
2548 rtx src = op[1];
2549 rtx base = XEXP (dest, 0);
2550 int reg_base = true_regnum (base);
2551 int reg_src = true_regnum (src);
2552 /* "volatile" forces writing high byte first, even if less efficient,
2553 for correct operation with 16-bit I/O registers. */
2554 int mem_volatile_p = MEM_VOLATILE_P (dest);
2555 int tmp;
2556
2557 if (!l)
2558 l = &tmp;
2559 if (CONSTANT_ADDRESS_P (base))
2560 {
2561 if (optimize > 0 && io_address_operand (base, HImode))
2562 {
2563 *l = 2;
2564 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2565 AS2 (out,%A0-0x20,%A1));
2566 }
2567 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2568 AS2 (sts,%A0,%A1));
2569 }
2570 if (reg_base > 0)
2571 {
2572 if (reg_base == REG_X)
2573 {
2574 if (reg_src == REG_X)
2575 {
2576 /* "st X+,r26" and "st -X,r26" are undefined. */
2577 if (!mem_volatile_p && reg_unused_after (insn, src))
2578 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2579 AS2 (st,X,r26) CR_TAB
2580 AS2 (adiw,r26,1) CR_TAB
2581 AS2 (st,X,__tmp_reg__));
2582 else
2583 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2584 AS2 (adiw,r26,1) CR_TAB
2585 AS2 (st,X,__tmp_reg__) CR_TAB
2586 AS2 (sbiw,r26,1) CR_TAB
2587 AS2 (st,X,r26));
2588 }
2589 else
2590 {
2591 if (!mem_volatile_p && reg_unused_after (insn, base))
2592 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2593 AS2 (st,X,%B1));
2594 else
2595 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2596 AS2 (st,X,%B1) CR_TAB
2597 AS2 (st,-X,%A1));
2598 }
2599 }
2600 else
2601 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2602 AS2 (st,%0,%A1));
2603 }
2604 else if (GET_CODE (base) == PLUS)
2605 {
2606 int disp = INTVAL (XEXP (base, 1));
2607 reg_base = REGNO (XEXP (base, 0));
2608 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2609 {
2610 if (reg_base != REG_Y)
2611 fatal_insn ("incorrect insn:",insn);
2612
2613 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2614 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2615 AS2 (std,Y+63,%B1) CR_TAB
2616 AS2 (std,Y+62,%A1) CR_TAB
2617 AS2 (sbiw,r28,%o0-62));
2618
2619 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2620 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2621 AS2 (std,Y+1,%B1) CR_TAB
2622 AS2 (st,Y,%A1) CR_TAB
2623 AS2 (subi,r28,lo8(%o0)) CR_TAB
2624 AS2 (sbci,r29,hi8(%o0)));
2625 }
2626 if (reg_base == REG_X)
2627 {
2628 /* (X + d) = R */
2629 if (reg_src == REG_X)
2630 {
2631 *l = 7;
2632 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2633 AS2 (mov,__zero_reg__,r27) CR_TAB
2634 AS2 (adiw,r26,%o0+1) CR_TAB
2635 AS2 (st,X,__zero_reg__) CR_TAB
2636 AS2 (st,-X,__tmp_reg__) CR_TAB
2637 AS1 (clr,__zero_reg__) CR_TAB
2638 AS2 (sbiw,r26,%o0));
2639 }
2640 *l = 4;
2641 return (AS2 (adiw,r26,%o0+1) CR_TAB
2642 AS2 (st,X,%B1) CR_TAB
2643 AS2 (st,-X,%A1) CR_TAB
2644 AS2 (sbiw,r26,%o0));
2645 }
2646 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2647 AS2 (std,%A0,%A1));
2648 }
2649 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2650 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2651 AS2 (st,%0,%A1));
2652 else if (GET_CODE (base) == POST_INC) /* (R++) */
2653 {
2654 if (mem_volatile_p)
2655 {
2656 if (REGNO (XEXP (base, 0)) == REG_X)
2657 {
2658 *l = 4;
2659 return (AS2 (adiw,r26,1) CR_TAB
2660 AS2 (st,X,%B1) CR_TAB
2661 AS2 (st,-X,%A1) CR_TAB
2662 AS2 (adiw,r26,2));
2663 }
2664 else
2665 {
2666 *l = 3;
2667 return (AS2 (std,%p0+1,%B1) CR_TAB
2668 AS2 (st,%p0,%A1) CR_TAB
2669 AS2 (adiw,%r0,2));
2670 }
2671 }
2672
2673 *l = 2;
2674 return (AS2 (st,%0,%A1) CR_TAB
2675 AS2 (st,%0,%B1));
2676 }
2677 fatal_insn ("unknown move insn:",insn);
2678 return "";
2679 }
2680
2681 /* Return 1 if frame pointer for current function required. */
2682
2683 bool
2684 avr_frame_pointer_required_p (void)
2685 {
2686 return (cfun->calls_alloca
2687 || crtl->args.info.nregs == 0
2688 || get_frame_size () > 0);
2689 }
2690
2691 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2692
2693 static RTX_CODE
2694 compare_condition (rtx insn)
2695 {
2696 rtx next = next_real_insn (insn);
2697 RTX_CODE cond = UNKNOWN;
2698 if (next && GET_CODE (next) == JUMP_INSN)
2699 {
2700 rtx pat = PATTERN (next);
2701 rtx src = SET_SRC (pat);
2702 rtx t = XEXP (src, 0);
2703 cond = GET_CODE (t);
2704 }
2705 return cond;
2706 }
2707
2708 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2709
2710 static int
2711 compare_sign_p (rtx insn)
2712 {
2713 RTX_CODE cond = compare_condition (insn);
2714 return (cond == GE || cond == LT);
2715 }
2716
2717 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2718 that needs to be swapped (GT, GTU, LE, LEU). */
2719
2720 int
2721 compare_diff_p (rtx insn)
2722 {
2723 RTX_CODE cond = compare_condition (insn);
2724 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2725 }
2726
2727 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2728
2729 int
2730 compare_eq_p (rtx insn)
2731 {
2732 RTX_CODE cond = compare_condition (insn);
2733 return (cond == EQ || cond == NE);
2734 }
2735
2736
2737 /* Output test instruction for HImode. */
2738
2739 const char *
2740 out_tsthi (rtx insn, rtx op, int *l)
2741 {
2742 if (compare_sign_p (insn))
2743 {
2744 if (l) *l = 1;
2745 return AS1 (tst,%B0);
2746 }
2747 if (reg_unused_after (insn, op)
2748 && compare_eq_p (insn))
2749 {
2750 /* Faster than sbiw if we can clobber the operand. */
2751 if (l) *l = 1;
2752 return "or %A0,%B0";
2753 }
2754 if (test_hard_reg_class (ADDW_REGS, op))
2755 {
2756 if (l) *l = 1;
2757 return AS2 (sbiw,%0,0);
2758 }
2759 if (l) *l = 2;
2760 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2761 AS2 (cpc,%B0,__zero_reg__));
2762 }
2763
2764
2765 /* Output test instruction for SImode. */
2766
2767 const char *
2768 out_tstsi (rtx insn, rtx op, int *l)
2769 {
2770 if (compare_sign_p (insn))
2771 {
2772 if (l) *l = 1;
2773 return AS1 (tst,%D0);
2774 }
2775 if (test_hard_reg_class (ADDW_REGS, op))
2776 {
2777 if (l) *l = 3;
2778 return (AS2 (sbiw,%A0,0) CR_TAB
2779 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2780 AS2 (cpc,%D0,__zero_reg__));
2781 }
2782 if (l) *l = 4;
2783 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2784 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2785 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2786 AS2 (cpc,%D0,__zero_reg__));
2787 }
2788
2789
2790 /* Generate asm equivalent for various shifts.
2791 Shift count is a CONST_INT, MEM or REG.
2792 This only handles cases that are not already
2793 carefully hand-optimized in ?sh??i3_out. */
2794
2795 void
2796 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2797 int *len, int t_len)
2798 {
2799 rtx op[10];
2800 char str[500];
2801 int second_label = 1;
2802 int saved_in_tmp = 0;
2803 int use_zero_reg = 0;
2804
2805 op[0] = operands[0];
2806 op[1] = operands[1];
2807 op[2] = operands[2];
2808 op[3] = operands[3];
2809 str[0] = 0;
2810
2811 if (len)
2812 *len = 1;
2813
2814 if (GET_CODE (operands[2]) == CONST_INT)
2815 {
2816 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2817 int count = INTVAL (operands[2]);
2818 int max_len = 10; /* If larger than this, always use a loop. */
2819
2820 if (count <= 0)
2821 {
2822 if (len)
2823 *len = 0;
2824 return;
2825 }
2826
2827 if (count < 8 && !scratch)
2828 use_zero_reg = 1;
2829
2830 if (optimize_size)
2831 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2832
2833 if (t_len * count <= max_len)
2834 {
2835 /* Output shifts inline with no loop - faster. */
2836 if (len)
2837 *len = t_len * count;
2838 else
2839 {
2840 while (count-- > 0)
2841 output_asm_insn (templ, op);
2842 }
2843
2844 return;
2845 }
2846
2847 if (scratch)
2848 {
2849 if (!len)
2850 strcat (str, AS2 (ldi,%3,%2));
2851 }
2852 else if (use_zero_reg)
2853 {
2854 /* Hack to save one word: use __zero_reg__ as loop counter.
2855 Set one bit, then shift in a loop until it is 0 again. */
2856
2857 op[3] = zero_reg_rtx;
2858 if (len)
2859 *len = 2;
2860 else
2861 strcat (str, ("set" CR_TAB
2862 AS2 (bld,%3,%2-1)));
2863 }
2864 else
2865 {
2866 /* No scratch register available, use one from LD_REGS (saved in
2867 __tmp_reg__) that doesn't overlap with registers to shift. */
2868
2869 op[3] = gen_rtx_REG (QImode,
2870 ((true_regnum (operands[0]) - 1) & 15) + 16);
2871 op[4] = tmp_reg_rtx;
2872 saved_in_tmp = 1;
2873
2874 if (len)
2875 *len = 3; /* Includes "mov %3,%4" after the loop. */
2876 else
2877 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2878 AS2 (ldi,%3,%2)));
2879 }
2880
2881 second_label = 0;
2882 }
2883 else if (GET_CODE (operands[2]) == MEM)
2884 {
2885 rtx op_mov[10];
2886
2887 op[3] = op_mov[0] = tmp_reg_rtx;
2888 op_mov[1] = op[2];
2889
2890 if (len)
2891 out_movqi_r_mr (insn, op_mov, len);
2892 else
2893 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2894 }
2895 else if (register_operand (operands[2], QImode))
2896 {
2897 if (reg_unused_after (insn, operands[2]))
2898 op[3] = op[2];
2899 else
2900 {
2901 op[3] = tmp_reg_rtx;
2902 if (!len)
2903 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2904 }
2905 }
2906 else
2907 fatal_insn ("bad shift insn:", insn);
2908
2909 if (second_label)
2910 {
2911 if (len)
2912 ++*len;
2913 else
2914 strcat (str, AS1 (rjmp,2f));
2915 }
2916
2917 if (len)
2918 *len += t_len + 2; /* template + dec + brXX */
2919 else
2920 {
2921 strcat (str, "\n1:\t");
2922 strcat (str, templ);
2923 strcat (str, second_label ? "\n2:\t" : "\n\t");
2924 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2925 strcat (str, CR_TAB);
2926 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2927 if (saved_in_tmp)
2928 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2929 output_asm_insn (str, op);
2930 }
2931 }
2932
2933
2934 /* 8bit shift left ((char)x << i) */
2935
2936 const char *
2937 ashlqi3_out (rtx insn, rtx operands[], int *len)
2938 {
2939 if (GET_CODE (operands[2]) == CONST_INT)
2940 {
2941 int k;
2942
2943 if (!len)
2944 len = &k;
2945
2946 switch (INTVAL (operands[2]))
2947 {
2948 default:
2949 if (INTVAL (operands[2]) < 8)
2950 break;
2951
2952 *len = 1;
2953 return AS1 (clr,%0);
2954
2955 case 1:
2956 *len = 1;
2957 return AS1 (lsl,%0);
2958
2959 case 2:
2960 *len = 2;
2961 return (AS1 (lsl,%0) CR_TAB
2962 AS1 (lsl,%0));
2963
2964 case 3:
2965 *len = 3;
2966 return (AS1 (lsl,%0) CR_TAB
2967 AS1 (lsl,%0) CR_TAB
2968 AS1 (lsl,%0));
2969
2970 case 4:
2971 if (test_hard_reg_class (LD_REGS, operands[0]))
2972 {
2973 *len = 2;
2974 return (AS1 (swap,%0) CR_TAB
2975 AS2 (andi,%0,0xf0));
2976 }
2977 *len = 4;
2978 return (AS1 (lsl,%0) CR_TAB
2979 AS1 (lsl,%0) CR_TAB
2980 AS1 (lsl,%0) CR_TAB
2981 AS1 (lsl,%0));
2982
2983 case 5:
2984 if (test_hard_reg_class (LD_REGS, operands[0]))
2985 {
2986 *len = 3;
2987 return (AS1 (swap,%0) CR_TAB
2988 AS1 (lsl,%0) CR_TAB
2989 AS2 (andi,%0,0xe0));
2990 }
2991 *len = 5;
2992 return (AS1 (lsl,%0) CR_TAB
2993 AS1 (lsl,%0) CR_TAB
2994 AS1 (lsl,%0) CR_TAB
2995 AS1 (lsl,%0) CR_TAB
2996 AS1 (lsl,%0));
2997
2998 case 6:
2999 if (test_hard_reg_class (LD_REGS, operands[0]))
3000 {
3001 *len = 4;
3002 return (AS1 (swap,%0) CR_TAB
3003 AS1 (lsl,%0) CR_TAB
3004 AS1 (lsl,%0) CR_TAB
3005 AS2 (andi,%0,0xc0));
3006 }
3007 *len = 6;
3008 return (AS1 (lsl,%0) CR_TAB
3009 AS1 (lsl,%0) CR_TAB
3010 AS1 (lsl,%0) CR_TAB
3011 AS1 (lsl,%0) CR_TAB
3012 AS1 (lsl,%0) CR_TAB
3013 AS1 (lsl,%0));
3014
3015 case 7:
3016 *len = 3;
3017 return (AS1 (ror,%0) CR_TAB
3018 AS1 (clr,%0) CR_TAB
3019 AS1 (ror,%0));
3020 }
3021 }
3022 else if (CONSTANT_P (operands[2]))
3023 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3024
3025 out_shift_with_cnt (AS1 (lsl,%0),
3026 insn, operands, len, 1);
3027 return "";
3028 }
3029
3030
3031 /* 16bit shift left ((short)x << i) */
3032
3033 const char *
3034 ashlhi3_out (rtx insn, rtx operands[], int *len)
3035 {
3036 if (GET_CODE (operands[2]) == CONST_INT)
3037 {
3038 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3039 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3040 int k;
3041 int *t = len;
3042
3043 if (!len)
3044 len = &k;
3045
3046 switch (INTVAL (operands[2]))
3047 {
3048 default:
3049 if (INTVAL (operands[2]) < 16)
3050 break;
3051
3052 *len = 2;
3053 return (AS1 (clr,%B0) CR_TAB
3054 AS1 (clr,%A0));
3055
3056 case 4:
3057 if (optimize_size && scratch)
3058 break; /* 5 */
3059 if (ldi_ok)
3060 {
3061 *len = 6;
3062 return (AS1 (swap,%A0) CR_TAB
3063 AS1 (swap,%B0) CR_TAB
3064 AS2 (andi,%B0,0xf0) CR_TAB
3065 AS2 (eor,%B0,%A0) CR_TAB
3066 AS2 (andi,%A0,0xf0) CR_TAB
3067 AS2 (eor,%B0,%A0));
3068 }
3069 if (scratch)
3070 {
3071 *len = 7;
3072 return (AS1 (swap,%A0) CR_TAB
3073 AS1 (swap,%B0) CR_TAB
3074 AS2 (ldi,%3,0xf0) CR_TAB
3075 "and %B0,%3" CR_TAB
3076 AS2 (eor,%B0,%A0) CR_TAB
3077 "and %A0,%3" CR_TAB
3078 AS2 (eor,%B0,%A0));
3079 }
3080 break; /* optimize_size ? 6 : 8 */
3081
3082 case 5:
3083 if (optimize_size)
3084 break; /* scratch ? 5 : 6 */
3085 if (ldi_ok)
3086 {
3087 *len = 8;
3088 return (AS1 (lsl,%A0) CR_TAB
3089 AS1 (rol,%B0) CR_TAB
3090 AS1 (swap,%A0) CR_TAB
3091 AS1 (swap,%B0) CR_TAB
3092 AS2 (andi,%B0,0xf0) CR_TAB
3093 AS2 (eor,%B0,%A0) CR_TAB
3094 AS2 (andi,%A0,0xf0) CR_TAB
3095 AS2 (eor,%B0,%A0));
3096 }
3097 if (scratch)
3098 {
3099 *len = 9;
3100 return (AS1 (lsl,%A0) CR_TAB
3101 AS1 (rol,%B0) CR_TAB
3102 AS1 (swap,%A0) CR_TAB
3103 AS1 (swap,%B0) CR_TAB
3104 AS2 (ldi,%3,0xf0) CR_TAB
3105 "and %B0,%3" CR_TAB
3106 AS2 (eor,%B0,%A0) CR_TAB
3107 "and %A0,%3" CR_TAB
3108 AS2 (eor,%B0,%A0));
3109 }
3110 break; /* 10 */
3111
3112 case 6:
3113 if (optimize_size)
3114 break; /* scratch ? 5 : 6 */
3115 *len = 9;
3116 return (AS1 (clr,__tmp_reg__) CR_TAB
3117 AS1 (lsr,%B0) CR_TAB
3118 AS1 (ror,%A0) CR_TAB
3119 AS1 (ror,__tmp_reg__) CR_TAB
3120 AS1 (lsr,%B0) CR_TAB
3121 AS1 (ror,%A0) CR_TAB
3122 AS1 (ror,__tmp_reg__) CR_TAB
3123 AS2 (mov,%B0,%A0) CR_TAB
3124 AS2 (mov,%A0,__tmp_reg__));
3125
3126 case 7:
3127 *len = 5;
3128 return (AS1 (lsr,%B0) CR_TAB
3129 AS2 (mov,%B0,%A0) CR_TAB
3130 AS1 (clr,%A0) CR_TAB
3131 AS1 (ror,%B0) CR_TAB
3132 AS1 (ror,%A0));
3133
3134 case 8:
3135 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3136 AS1 (clr,%A0));
3137
3138 case 9:
3139 *len = 3;
3140 return (AS2 (mov,%B0,%A0) CR_TAB
3141 AS1 (clr,%A0) CR_TAB
3142 AS1 (lsl,%B0));
3143
3144 case 10:
3145 *len = 4;
3146 return (AS2 (mov,%B0,%A0) CR_TAB
3147 AS1 (clr,%A0) CR_TAB
3148 AS1 (lsl,%B0) CR_TAB
3149 AS1 (lsl,%B0));
3150
3151 case 11:
3152 *len = 5;
3153 return (AS2 (mov,%B0,%A0) CR_TAB
3154 AS1 (clr,%A0) CR_TAB
3155 AS1 (lsl,%B0) CR_TAB
3156 AS1 (lsl,%B0) CR_TAB
3157 AS1 (lsl,%B0));
3158
3159 case 12:
3160 if (ldi_ok)
3161 {
3162 *len = 4;
3163 return (AS2 (mov,%B0,%A0) CR_TAB
3164 AS1 (clr,%A0) CR_TAB
3165 AS1 (swap,%B0) CR_TAB
3166 AS2 (andi,%B0,0xf0));
3167 }
3168 if (scratch)
3169 {
3170 *len = 5;
3171 return (AS2 (mov,%B0,%A0) CR_TAB
3172 AS1 (clr,%A0) CR_TAB
3173 AS1 (swap,%B0) CR_TAB
3174 AS2 (ldi,%3,0xf0) CR_TAB
3175 "and %B0,%3");
3176 }
3177 *len = 6;
3178 return (AS2 (mov,%B0,%A0) CR_TAB
3179 AS1 (clr,%A0) CR_TAB
3180 AS1 (lsl,%B0) CR_TAB
3181 AS1 (lsl,%B0) CR_TAB
3182 AS1 (lsl,%B0) CR_TAB
3183 AS1 (lsl,%B0));
3184
3185 case 13:
3186 if (ldi_ok)
3187 {
3188 *len = 5;
3189 return (AS2 (mov,%B0,%A0) CR_TAB
3190 AS1 (clr,%A0) CR_TAB
3191 AS1 (swap,%B0) CR_TAB
3192 AS1 (lsl,%B0) CR_TAB
3193 AS2 (andi,%B0,0xe0));
3194 }
3195 if (AVR_HAVE_MUL && scratch)
3196 {
3197 *len = 5;
3198 return (AS2 (ldi,%3,0x20) CR_TAB
3199 AS2 (mul,%A0,%3) CR_TAB
3200 AS2 (mov,%B0,r0) CR_TAB
3201 AS1 (clr,%A0) CR_TAB
3202 AS1 (clr,__zero_reg__));
3203 }
3204 if (optimize_size && scratch)
3205 break; /* 5 */
3206 if (scratch)
3207 {
3208 *len = 6;
3209 return (AS2 (mov,%B0,%A0) CR_TAB
3210 AS1 (clr,%A0) CR_TAB
3211 AS1 (swap,%B0) CR_TAB
3212 AS1 (lsl,%B0) CR_TAB
3213 AS2 (ldi,%3,0xe0) CR_TAB
3214 "and %B0,%3");
3215 }
3216 if (AVR_HAVE_MUL)
3217 {
3218 *len = 6;
3219 return ("set" CR_TAB
3220 AS2 (bld,r1,5) CR_TAB
3221 AS2 (mul,%A0,r1) CR_TAB
3222 AS2 (mov,%B0,r0) CR_TAB
3223 AS1 (clr,%A0) CR_TAB
3224 AS1 (clr,__zero_reg__));
3225 }
3226 *len = 7;
3227 return (AS2 (mov,%B0,%A0) CR_TAB
3228 AS1 (clr,%A0) CR_TAB
3229 AS1 (lsl,%B0) CR_TAB
3230 AS1 (lsl,%B0) CR_TAB
3231 AS1 (lsl,%B0) CR_TAB
3232 AS1 (lsl,%B0) CR_TAB
3233 AS1 (lsl,%B0));
3234
3235 case 14:
3236 if (AVR_HAVE_MUL && ldi_ok)
3237 {
3238 *len = 5;
3239 return (AS2 (ldi,%B0,0x40) CR_TAB
3240 AS2 (mul,%A0,%B0) CR_TAB
3241 AS2 (mov,%B0,r0) CR_TAB
3242 AS1 (clr,%A0) CR_TAB
3243 AS1 (clr,__zero_reg__));
3244 }
3245 if (AVR_HAVE_MUL && scratch)
3246 {
3247 *len = 5;
3248 return (AS2 (ldi,%3,0x40) CR_TAB
3249 AS2 (mul,%A0,%3) CR_TAB
3250 AS2 (mov,%B0,r0) CR_TAB
3251 AS1 (clr,%A0) CR_TAB
3252 AS1 (clr,__zero_reg__));
3253 }
3254 if (optimize_size && ldi_ok)
3255 {
3256 *len = 5;
3257 return (AS2 (mov,%B0,%A0) CR_TAB
3258 AS2 (ldi,%A0,6) "\n1:\t"
3259 AS1 (lsl,%B0) CR_TAB
3260 AS1 (dec,%A0) CR_TAB
3261 AS1 (brne,1b));
3262 }
3263 if (optimize_size && scratch)
3264 break; /* 5 */
3265 *len = 6;
3266 return (AS1 (clr,%B0) CR_TAB
3267 AS1 (lsr,%A0) CR_TAB
3268 AS1 (ror,%B0) CR_TAB
3269 AS1 (lsr,%A0) CR_TAB
3270 AS1 (ror,%B0) CR_TAB
3271 AS1 (clr,%A0));
3272
3273 case 15:
3274 *len = 4;
3275 return (AS1 (clr,%B0) CR_TAB
3276 AS1 (lsr,%A0) CR_TAB
3277 AS1 (ror,%B0) CR_TAB
3278 AS1 (clr,%A0));
3279 }
3280 len = t;
3281 }
3282 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3283 AS1 (rol,%B0)),
3284 insn, operands, len, 2);
3285 return "";
3286 }
3287
3288
3289 /* 32bit shift left ((long)x << i) */
3290
3291 const char *
3292 ashlsi3_out (rtx insn, rtx operands[], int *len)
3293 {
3294 if (GET_CODE (operands[2]) == CONST_INT)
3295 {
3296 int k;
3297 int *t = len;
3298
3299 if (!len)
3300 len = &k;
3301
3302 switch (INTVAL (operands[2]))
3303 {
3304 default:
3305 if (INTVAL (operands[2]) < 32)
3306 break;
3307
3308 if (AVR_HAVE_MOVW)
3309 return *len = 3, (AS1 (clr,%D0) CR_TAB
3310 AS1 (clr,%C0) CR_TAB
3311 AS2 (movw,%A0,%C0));
3312 *len = 4;
3313 return (AS1 (clr,%D0) CR_TAB
3314 AS1 (clr,%C0) CR_TAB
3315 AS1 (clr,%B0) CR_TAB
3316 AS1 (clr,%A0));
3317
3318 case 8:
3319 {
3320 int reg0 = true_regnum (operands[0]);
3321 int reg1 = true_regnum (operands[1]);
3322 *len = 4;
3323 if (reg0 >= reg1)
3324 return (AS2 (mov,%D0,%C1) CR_TAB
3325 AS2 (mov,%C0,%B1) CR_TAB
3326 AS2 (mov,%B0,%A1) CR_TAB
3327 AS1 (clr,%A0));
3328 else
3329 return (AS1 (clr,%A0) CR_TAB
3330 AS2 (mov,%B0,%A1) CR_TAB
3331 AS2 (mov,%C0,%B1) CR_TAB
3332 AS2 (mov,%D0,%C1));
3333 }
3334
3335 case 16:
3336 {
3337 int reg0 = true_regnum (operands[0]);
3338 int reg1 = true_regnum (operands[1]);
3339 if (reg0 + 2 == reg1)
3340 return *len = 2, (AS1 (clr,%B0) CR_TAB
3341 AS1 (clr,%A0));
3342 if (AVR_HAVE_MOVW)
3343 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3344 AS1 (clr,%B0) CR_TAB
3345 AS1 (clr,%A0));
3346 else
3347 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3348 AS2 (mov,%D0,%B1) CR_TAB
3349 AS1 (clr,%B0) CR_TAB
3350 AS1 (clr,%A0));
3351 }
3352
3353 case 24:
3354 *len = 4;
3355 return (AS2 (mov,%D0,%A1) CR_TAB
3356 AS1 (clr,%C0) CR_TAB
3357 AS1 (clr,%B0) CR_TAB
3358 AS1 (clr,%A0));
3359
3360 case 31:
3361 *len = 6;
3362 return (AS1 (clr,%D0) CR_TAB
3363 AS1 (lsr,%A0) CR_TAB
3364 AS1 (ror,%D0) CR_TAB
3365 AS1 (clr,%C0) CR_TAB
3366 AS1 (clr,%B0) CR_TAB
3367 AS1 (clr,%A0));
3368 }
3369 len = t;
3370 }
3371 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3372 AS1 (rol,%B0) CR_TAB
3373 AS1 (rol,%C0) CR_TAB
3374 AS1 (rol,%D0)),
3375 insn, operands, len, 4);
3376 return "";
3377 }
3378
3379 /* 8bit arithmetic shift right ((signed char)x >> i) */
3380
3381 const char *
3382 ashrqi3_out (rtx insn, rtx operands[], int *len)
3383 {
3384 if (GET_CODE (operands[2]) == CONST_INT)
3385 {
3386 int k;
3387
3388 if (!len)
3389 len = &k;
3390
3391 switch (INTVAL (operands[2]))
3392 {
3393 case 1:
3394 *len = 1;
3395 return AS1 (asr,%0);
3396
3397 case 2:
3398 *len = 2;
3399 return (AS1 (asr,%0) CR_TAB
3400 AS1 (asr,%0));
3401
3402 case 3:
3403 *len = 3;
3404 return (AS1 (asr,%0) CR_TAB
3405 AS1 (asr,%0) CR_TAB
3406 AS1 (asr,%0));
3407
3408 case 4:
3409 *len = 4;
3410 return (AS1 (asr,%0) CR_TAB
3411 AS1 (asr,%0) CR_TAB
3412 AS1 (asr,%0) CR_TAB
3413 AS1 (asr,%0));
3414
3415 case 5:
3416 *len = 5;
3417 return (AS1 (asr,%0) CR_TAB
3418 AS1 (asr,%0) CR_TAB
3419 AS1 (asr,%0) CR_TAB
3420 AS1 (asr,%0) CR_TAB
3421 AS1 (asr,%0));
3422
3423 case 6:
3424 *len = 4;
3425 return (AS2 (bst,%0,6) CR_TAB
3426 AS1 (lsl,%0) CR_TAB
3427 AS2 (sbc,%0,%0) CR_TAB
3428 AS2 (bld,%0,0));
3429
3430 default:
3431 if (INTVAL (operands[2]) < 8)
3432 break;
3433
3434 /* fall through */
3435
3436 case 7:
3437 *len = 2;
3438 return (AS1 (lsl,%0) CR_TAB
3439 AS2 (sbc,%0,%0));
3440 }
3441 }
3442 else if (CONSTANT_P (operands[2]))
3443 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3444
3445 out_shift_with_cnt (AS1 (asr,%0),
3446 insn, operands, len, 1);
3447 return "";
3448 }
3449
3450
3451 /* 16bit arithmetic shift right ((signed short)x >> i) */
3452
3453 const char *
3454 ashrhi3_out (rtx insn, rtx operands[], int *len)
3455 {
3456 if (GET_CODE (operands[2]) == CONST_INT)
3457 {
3458 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3459 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3460 int k;
3461 int *t = len;
3462
3463 if (!len)
3464 len = &k;
3465
3466 switch (INTVAL (operands[2]))
3467 {
3468 case 4:
3469 case 5:
3470 /* XXX try to optimize this too? */
3471 break;
3472
3473 case 6:
3474 if (optimize_size)
3475 break; /* scratch ? 5 : 6 */
3476 *len = 8;
3477 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3478 AS2 (mov,%A0,%B0) CR_TAB
3479 AS1 (lsl,__tmp_reg__) CR_TAB
3480 AS1 (rol,%A0) CR_TAB
3481 AS2 (sbc,%B0,%B0) CR_TAB
3482 AS1 (lsl,__tmp_reg__) CR_TAB
3483 AS1 (rol,%A0) CR_TAB
3484 AS1 (rol,%B0));
3485
3486 case 7:
3487 *len = 4;
3488 return (AS1 (lsl,%A0) CR_TAB
3489 AS2 (mov,%A0,%B0) CR_TAB
3490 AS1 (rol,%A0) CR_TAB
3491 AS2 (sbc,%B0,%B0));
3492
3493 case 8:
3494 {
3495 int reg0 = true_regnum (operands[0]);
3496 int reg1 = true_regnum (operands[1]);
3497
3498 if (reg0 == reg1)
3499 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3500 AS1 (lsl,%B0) CR_TAB
3501 AS2 (sbc,%B0,%B0));
3502 else
3503 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3504 AS1 (clr,%B0) CR_TAB
3505 AS2 (sbrc,%A0,7) CR_TAB
3506 AS1 (dec,%B0));
3507 }
3508
3509 case 9:
3510 *len = 4;
3511 return (AS2 (mov,%A0,%B0) CR_TAB
3512 AS1 (lsl,%B0) CR_TAB
3513 AS2 (sbc,%B0,%B0) CR_TAB
3514 AS1 (asr,%A0));
3515
3516 case 10:
3517 *len = 5;
3518 return (AS2 (mov,%A0,%B0) CR_TAB
3519 AS1 (lsl,%B0) CR_TAB
3520 AS2 (sbc,%B0,%B0) CR_TAB
3521 AS1 (asr,%A0) CR_TAB
3522 AS1 (asr,%A0));
3523
3524 case 11:
3525 if (AVR_HAVE_MUL && ldi_ok)
3526 {
3527 *len = 5;
3528 return (AS2 (ldi,%A0,0x20) CR_TAB
3529 AS2 (muls,%B0,%A0) CR_TAB
3530 AS2 (mov,%A0,r1) CR_TAB
3531 AS2 (sbc,%B0,%B0) CR_TAB
3532 AS1 (clr,__zero_reg__));
3533 }
3534 if (optimize_size && scratch)
3535 break; /* 5 */
3536 *len = 6;
3537 return (AS2 (mov,%A0,%B0) CR_TAB
3538 AS1 (lsl,%B0) CR_TAB
3539 AS2 (sbc,%B0,%B0) CR_TAB
3540 AS1 (asr,%A0) CR_TAB
3541 AS1 (asr,%A0) CR_TAB
3542 AS1 (asr,%A0));
3543
3544 case 12:
3545 if (AVR_HAVE_MUL && ldi_ok)
3546 {
3547 *len = 5;
3548 return (AS2 (ldi,%A0,0x10) CR_TAB
3549 AS2 (muls,%B0,%A0) CR_TAB
3550 AS2 (mov,%A0,r1) CR_TAB
3551 AS2 (sbc,%B0,%B0) CR_TAB
3552 AS1 (clr,__zero_reg__));
3553 }
3554 if (optimize_size && scratch)
3555 break; /* 5 */
3556 *len = 7;
3557 return (AS2 (mov,%A0,%B0) CR_TAB
3558 AS1 (lsl,%B0) CR_TAB
3559 AS2 (sbc,%B0,%B0) CR_TAB
3560 AS1 (asr,%A0) CR_TAB
3561 AS1 (asr,%A0) CR_TAB
3562 AS1 (asr,%A0) CR_TAB
3563 AS1 (asr,%A0));
3564
3565 case 13:
3566 if (AVR_HAVE_MUL && ldi_ok)
3567 {
3568 *len = 5;
3569 return (AS2 (ldi,%A0,0x08) CR_TAB
3570 AS2 (muls,%B0,%A0) CR_TAB
3571 AS2 (mov,%A0,r1) CR_TAB
3572 AS2 (sbc,%B0,%B0) CR_TAB
3573 AS1 (clr,__zero_reg__));
3574 }
3575 if (optimize_size)
3576 break; /* scratch ? 5 : 7 */
3577 *len = 8;
3578 return (AS2 (mov,%A0,%B0) CR_TAB
3579 AS1 (lsl,%B0) CR_TAB
3580 AS2 (sbc,%B0,%B0) CR_TAB
3581 AS1 (asr,%A0) CR_TAB
3582 AS1 (asr,%A0) CR_TAB
3583 AS1 (asr,%A0) CR_TAB
3584 AS1 (asr,%A0) CR_TAB
3585 AS1 (asr,%A0));
3586
3587 case 14:
3588 *len = 5;
3589 return (AS1 (lsl,%B0) CR_TAB
3590 AS2 (sbc,%A0,%A0) CR_TAB
3591 AS1 (lsl,%B0) CR_TAB
3592 AS2 (mov,%B0,%A0) CR_TAB
3593 AS1 (rol,%A0));
3594
3595 default:
3596 if (INTVAL (operands[2]) < 16)
3597 break;
3598
3599 /* fall through */
3600
3601 case 15:
3602 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3603 AS2 (sbc,%A0,%A0) CR_TAB
3604 AS2 (mov,%B0,%A0));
3605 }
3606 len = t;
3607 }
3608 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3609 AS1 (ror,%A0)),
3610 insn, operands, len, 2);
3611 return "";
3612 }
3613
3614
3615 /* 32bit arithmetic shift right ((signed long)x >> i) */
3616
3617 const char *
3618 ashrsi3_out (rtx insn, rtx operands[], int *len)
3619 {
3620 if (GET_CODE (operands[2]) == CONST_INT)
3621 {
3622 int k;
3623 int *t = len;
3624
3625 if (!len)
3626 len = &k;
3627
3628 switch (INTVAL (operands[2]))
3629 {
3630 case 8:
3631 {
3632 int reg0 = true_regnum (operands[0]);
3633 int reg1 = true_regnum (operands[1]);
3634 *len=6;
3635 if (reg0 <= reg1)
3636 return (AS2 (mov,%A0,%B1) CR_TAB
3637 AS2 (mov,%B0,%C1) CR_TAB
3638 AS2 (mov,%C0,%D1) CR_TAB
3639 AS1 (clr,%D0) CR_TAB
3640 AS2 (sbrc,%C0,7) CR_TAB
3641 AS1 (dec,%D0));
3642 else
3643 return (AS1 (clr,%D0) CR_TAB
3644 AS2 (sbrc,%D1,7) CR_TAB
3645 AS1 (dec,%D0) CR_TAB
3646 AS2 (mov,%C0,%D1) CR_TAB
3647 AS2 (mov,%B0,%C1) CR_TAB
3648 AS2 (mov,%A0,%B1));
3649 }
3650
3651 case 16:
3652 {
3653 int reg0 = true_regnum (operands[0]);
3654 int reg1 = true_regnum (operands[1]);
3655
3656 if (reg0 == reg1 + 2)
3657 return *len = 4, (AS1 (clr,%D0) CR_TAB
3658 AS2 (sbrc,%B0,7) CR_TAB
3659 AS1 (com,%D0) CR_TAB
3660 AS2 (mov,%C0,%D0));
3661 if (AVR_HAVE_MOVW)
3662 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3663 AS1 (clr,%D0) CR_TAB
3664 AS2 (sbrc,%B0,7) CR_TAB
3665 AS1 (com,%D0) CR_TAB
3666 AS2 (mov,%C0,%D0));
3667 else
3668 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3669 AS2 (mov,%A0,%C1) CR_TAB
3670 AS1 (clr,%D0) CR_TAB
3671 AS2 (sbrc,%B0,7) CR_TAB
3672 AS1 (com,%D0) CR_TAB
3673 AS2 (mov,%C0,%D0));
3674 }
3675
3676 case 24:
3677 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3678 AS1 (clr,%D0) CR_TAB
3679 AS2 (sbrc,%A0,7) CR_TAB
3680 AS1 (com,%D0) CR_TAB
3681 AS2 (mov,%B0,%D0) CR_TAB
3682 AS2 (mov,%C0,%D0));
3683
3684 default:
3685 if (INTVAL (operands[2]) < 32)
3686 break;
3687
3688 /* fall through */
3689
3690 case 31:
3691 if (AVR_HAVE_MOVW)
3692 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3693 AS2 (sbc,%A0,%A0) CR_TAB
3694 AS2 (mov,%B0,%A0) CR_TAB
3695 AS2 (movw,%C0,%A0));
3696 else
3697 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3698 AS2 (sbc,%A0,%A0) CR_TAB
3699 AS2 (mov,%B0,%A0) CR_TAB
3700 AS2 (mov,%C0,%A0) CR_TAB
3701 AS2 (mov,%D0,%A0));
3702 }
3703 len = t;
3704 }
3705 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3706 AS1 (ror,%C0) CR_TAB
3707 AS1 (ror,%B0) CR_TAB
3708 AS1 (ror,%A0)),
3709 insn, operands, len, 4);
3710 return "";
3711 }
3712
3713 /* 8bit logic shift right ((unsigned char)x >> i) */
3714
3715 const char *
3716 lshrqi3_out (rtx insn, rtx operands[], int *len)
3717 {
3718 if (GET_CODE (operands[2]) == CONST_INT)
3719 {
3720 int k;
3721
3722 if (!len)
3723 len = &k;
3724
3725 switch (INTVAL (operands[2]))
3726 {
3727 default:
3728 if (INTVAL (operands[2]) < 8)
3729 break;
3730
3731 *len = 1;
3732 return AS1 (clr,%0);
3733
3734 case 1:
3735 *len = 1;
3736 return AS1 (lsr,%0);
3737
3738 case 2:
3739 *len = 2;
3740 return (AS1 (lsr,%0) CR_TAB
3741 AS1 (lsr,%0));
3742 case 3:
3743 *len = 3;
3744 return (AS1 (lsr,%0) CR_TAB
3745 AS1 (lsr,%0) CR_TAB
3746 AS1 (lsr,%0));
3747
3748 case 4:
3749 if (test_hard_reg_class (LD_REGS, operands[0]))
3750 {
3751 *len=2;
3752 return (AS1 (swap,%0) CR_TAB
3753 AS2 (andi,%0,0x0f));
3754 }
3755 *len = 4;
3756 return (AS1 (lsr,%0) CR_TAB
3757 AS1 (lsr,%0) CR_TAB
3758 AS1 (lsr,%0) CR_TAB
3759 AS1 (lsr,%0));
3760
3761 case 5:
3762 if (test_hard_reg_class (LD_REGS, operands[0]))
3763 {
3764 *len = 3;
3765 return (AS1 (swap,%0) CR_TAB
3766 AS1 (lsr,%0) CR_TAB
3767 AS2 (andi,%0,0x7));
3768 }
3769 *len = 5;
3770 return (AS1 (lsr,%0) CR_TAB
3771 AS1 (lsr,%0) CR_TAB
3772 AS1 (lsr,%0) CR_TAB
3773 AS1 (lsr,%0) CR_TAB
3774 AS1 (lsr,%0));
3775
3776 case 6:
3777 if (test_hard_reg_class (LD_REGS, operands[0]))
3778 {
3779 *len = 4;
3780 return (AS1 (swap,%0) CR_TAB
3781 AS1 (lsr,%0) CR_TAB
3782 AS1 (lsr,%0) CR_TAB
3783 AS2 (andi,%0,0x3));
3784 }
3785 *len = 6;
3786 return (AS1 (lsr,%0) CR_TAB
3787 AS1 (lsr,%0) CR_TAB
3788 AS1 (lsr,%0) CR_TAB
3789 AS1 (lsr,%0) CR_TAB
3790 AS1 (lsr,%0) CR_TAB
3791 AS1 (lsr,%0));
3792
3793 case 7:
3794 *len = 3;
3795 return (AS1 (rol,%0) CR_TAB
3796 AS1 (clr,%0) CR_TAB
3797 AS1 (rol,%0));
3798 }
3799 }
3800 else if (CONSTANT_P (operands[2]))
3801 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3802
3803 out_shift_with_cnt (AS1 (lsr,%0),
3804 insn, operands, len, 1);
3805 return "";
3806 }
3807
3808 /* 16bit logic shift right ((unsigned short)x >> i) */
3809
3810 const char *
3811 lshrhi3_out (rtx insn, rtx operands[], int *len)
3812 {
3813 if (GET_CODE (operands[2]) == CONST_INT)
3814 {
3815 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3816 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3817 int k;
3818 int *t = len;
3819
3820 if (!len)
3821 len = &k;
3822
3823 switch (INTVAL (operands[2]))
3824 {
3825 default:
3826 if (INTVAL (operands[2]) < 16)
3827 break;
3828
3829 *len = 2;
3830 return (AS1 (clr,%B0) CR_TAB
3831 AS1 (clr,%A0));
3832
3833 case 4:
3834 if (optimize_size && scratch)
3835 break; /* 5 */
3836 if (ldi_ok)
3837 {
3838 *len = 6;
3839 return (AS1 (swap,%B0) CR_TAB
3840 AS1 (swap,%A0) CR_TAB
3841 AS2 (andi,%A0,0x0f) CR_TAB
3842 AS2 (eor,%A0,%B0) CR_TAB
3843 AS2 (andi,%B0,0x0f) CR_TAB
3844 AS2 (eor,%A0,%B0));
3845 }
3846 if (scratch)
3847 {
3848 *len = 7;
3849 return (AS1 (swap,%B0) CR_TAB
3850 AS1 (swap,%A0) CR_TAB
3851 AS2 (ldi,%3,0x0f) CR_TAB
3852 "and %A0,%3" CR_TAB
3853 AS2 (eor,%A0,%B0) CR_TAB
3854 "and %B0,%3" CR_TAB
3855 AS2 (eor,%A0,%B0));
3856 }
3857 break; /* optimize_size ? 6 : 8 */
3858
3859 case 5:
3860 if (optimize_size)
3861 break; /* scratch ? 5 : 6 */
3862 if (ldi_ok)
3863 {
3864 *len = 8;
3865 return (AS1 (lsr,%B0) CR_TAB
3866 AS1 (ror,%A0) CR_TAB
3867 AS1 (swap,%B0) CR_TAB
3868 AS1 (swap,%A0) CR_TAB
3869 AS2 (andi,%A0,0x0f) CR_TAB
3870 AS2 (eor,%A0,%B0) CR_TAB
3871 AS2 (andi,%B0,0x0f) CR_TAB
3872 AS2 (eor,%A0,%B0));
3873 }
3874 if (scratch)
3875 {
3876 *len = 9;
3877 return (AS1 (lsr,%B0) CR_TAB
3878 AS1 (ror,%A0) CR_TAB
3879 AS1 (swap,%B0) CR_TAB
3880 AS1 (swap,%A0) CR_TAB
3881 AS2 (ldi,%3,0x0f) CR_TAB
3882 "and %A0,%3" CR_TAB
3883 AS2 (eor,%A0,%B0) CR_TAB
3884 "and %B0,%3" CR_TAB
3885 AS2 (eor,%A0,%B0));
3886 }
3887 break; /* 10 */
3888
3889 case 6:
3890 if (optimize_size)
3891 break; /* scratch ? 5 : 6 */
3892 *len = 9;
3893 return (AS1 (clr,__tmp_reg__) CR_TAB
3894 AS1 (lsl,%A0) CR_TAB
3895 AS1 (rol,%B0) CR_TAB
3896 AS1 (rol,__tmp_reg__) CR_TAB
3897 AS1 (lsl,%A0) CR_TAB
3898 AS1 (rol,%B0) CR_TAB
3899 AS1 (rol,__tmp_reg__) CR_TAB
3900 AS2 (mov,%A0,%B0) CR_TAB
3901 AS2 (mov,%B0,__tmp_reg__));
3902
3903 case 7:
3904 *len = 5;
3905 return (AS1 (lsl,%A0) CR_TAB
3906 AS2 (mov,%A0,%B0) CR_TAB
3907 AS1 (rol,%A0) CR_TAB
3908 AS2 (sbc,%B0,%B0) CR_TAB
3909 AS1 (neg,%B0));
3910
3911 case 8:
3912 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3913 AS1 (clr,%B0));
3914
3915 case 9:
3916 *len = 3;
3917 return (AS2 (mov,%A0,%B0) CR_TAB
3918 AS1 (clr,%B0) CR_TAB
3919 AS1 (lsr,%A0));
3920
3921 case 10:
3922 *len = 4;
3923 return (AS2 (mov,%A0,%B0) CR_TAB
3924 AS1 (clr,%B0) CR_TAB
3925 AS1 (lsr,%A0) CR_TAB
3926 AS1 (lsr,%A0));
3927
3928 case 11:
3929 *len = 5;
3930 return (AS2 (mov,%A0,%B0) CR_TAB
3931 AS1 (clr,%B0) CR_TAB
3932 AS1 (lsr,%A0) CR_TAB
3933 AS1 (lsr,%A0) CR_TAB
3934 AS1 (lsr,%A0));
3935
3936 case 12:
3937 if (ldi_ok)
3938 {
3939 *len = 4;
3940 return (AS2 (mov,%A0,%B0) CR_TAB
3941 AS1 (clr,%B0) CR_TAB
3942 AS1 (swap,%A0) CR_TAB
3943 AS2 (andi,%A0,0x0f));
3944 }
3945 if (scratch)
3946 {
3947 *len = 5;
3948 return (AS2 (mov,%A0,%B0) CR_TAB
3949 AS1 (clr,%B0) CR_TAB
3950 AS1 (swap,%A0) CR_TAB
3951 AS2 (ldi,%3,0x0f) CR_TAB
3952 "and %A0,%3");
3953 }
3954 *len = 6;
3955 return (AS2 (mov,%A0,%B0) CR_TAB
3956 AS1 (clr,%B0) CR_TAB
3957 AS1 (lsr,%A0) CR_TAB
3958 AS1 (lsr,%A0) CR_TAB
3959 AS1 (lsr,%A0) CR_TAB
3960 AS1 (lsr,%A0));
3961
3962 case 13:
3963 if (ldi_ok)
3964 {
3965 *len = 5;
3966 return (AS2 (mov,%A0,%B0) CR_TAB
3967 AS1 (clr,%B0) CR_TAB
3968 AS1 (swap,%A0) CR_TAB
3969 AS1 (lsr,%A0) CR_TAB
3970 AS2 (andi,%A0,0x07));
3971 }
3972 if (AVR_HAVE_MUL && scratch)
3973 {
3974 *len = 5;
3975 return (AS2 (ldi,%3,0x08) CR_TAB
3976 AS2 (mul,%B0,%3) CR_TAB
3977 AS2 (mov,%A0,r1) CR_TAB
3978 AS1 (clr,%B0) CR_TAB
3979 AS1 (clr,__zero_reg__));
3980 }
3981 if (optimize_size && scratch)
3982 break; /* 5 */
3983 if (scratch)
3984 {
3985 *len = 6;
3986 return (AS2 (mov,%A0,%B0) CR_TAB
3987 AS1 (clr,%B0) CR_TAB
3988 AS1 (swap,%A0) CR_TAB
3989 AS1 (lsr,%A0) CR_TAB
3990 AS2 (ldi,%3,0x07) CR_TAB
3991 "and %A0,%3");
3992 }
3993 if (AVR_HAVE_MUL)
3994 {
3995 *len = 6;
3996 return ("set" CR_TAB
3997 AS2 (bld,r1,3) CR_TAB
3998 AS2 (mul,%B0,r1) CR_TAB
3999 AS2 (mov,%A0,r1) CR_TAB
4000 AS1 (clr,%B0) CR_TAB
4001 AS1 (clr,__zero_reg__));
4002 }
4003 *len = 7;
4004 return (AS2 (mov,%A0,%B0) CR_TAB
4005 AS1 (clr,%B0) CR_TAB
4006 AS1 (lsr,%A0) CR_TAB
4007 AS1 (lsr,%A0) CR_TAB
4008 AS1 (lsr,%A0) CR_TAB
4009 AS1 (lsr,%A0) CR_TAB
4010 AS1 (lsr,%A0));
4011
4012 case 14:
4013 if (AVR_HAVE_MUL && ldi_ok)
4014 {
4015 *len = 5;
4016 return (AS2 (ldi,%A0,0x04) CR_TAB
4017 AS2 (mul,%B0,%A0) CR_TAB
4018 AS2 (mov,%A0,r1) CR_TAB
4019 AS1 (clr,%B0) CR_TAB
4020 AS1 (clr,__zero_reg__));
4021 }
4022 if (AVR_HAVE_MUL && scratch)
4023 {
4024 *len = 5;
4025 return (AS2 (ldi,%3,0x04) CR_TAB
4026 AS2 (mul,%B0,%3) CR_TAB
4027 AS2 (mov,%A0,r1) CR_TAB
4028 AS1 (clr,%B0) CR_TAB
4029 AS1 (clr,__zero_reg__));
4030 }
4031 if (optimize_size && ldi_ok)
4032 {
4033 *len = 5;
4034 return (AS2 (mov,%A0,%B0) CR_TAB
4035 AS2 (ldi,%B0,6) "\n1:\t"
4036 AS1 (lsr,%A0) CR_TAB
4037 AS1 (dec,%B0) CR_TAB
4038 AS1 (brne,1b));
4039 }
4040 if (optimize_size && scratch)
4041 break; /* 5 */
4042 *len = 6;
4043 return (AS1 (clr,%A0) CR_TAB
4044 AS1 (lsl,%B0) CR_TAB
4045 AS1 (rol,%A0) CR_TAB
4046 AS1 (lsl,%B0) CR_TAB
4047 AS1 (rol,%A0) CR_TAB
4048 AS1 (clr,%B0));
4049
4050 case 15:
4051 *len = 4;
4052 return (AS1 (clr,%A0) CR_TAB
4053 AS1 (lsl,%B0) CR_TAB
4054 AS1 (rol,%A0) CR_TAB
4055 AS1 (clr,%B0));
4056 }
4057 len = t;
4058 }
4059 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4060 AS1 (ror,%A0)),
4061 insn, operands, len, 2);
4062 return "";
4063 }
4064
4065 /* 32bit logic shift right ((unsigned int)x >> i) */
4066
4067 const char *
4068 lshrsi3_out (rtx insn, rtx operands[], int *len)
4069 {
4070 if (GET_CODE (operands[2]) == CONST_INT)
4071 {
4072 int k;
4073 int *t = len;
4074
4075 if (!len)
4076 len = &k;
4077
4078 switch (INTVAL (operands[2]))
4079 {
4080 default:
4081 if (INTVAL (operands[2]) < 32)
4082 break;
4083
4084 if (AVR_HAVE_MOVW)
4085 return *len = 3, (AS1 (clr,%D0) CR_TAB
4086 AS1 (clr,%C0) CR_TAB
4087 AS2 (movw,%A0,%C0));
4088 *len = 4;
4089 return (AS1 (clr,%D0) CR_TAB
4090 AS1 (clr,%C0) CR_TAB
4091 AS1 (clr,%B0) CR_TAB
4092 AS1 (clr,%A0));
4093
4094 case 8:
4095 {
4096 int reg0 = true_regnum (operands[0]);
4097 int reg1 = true_regnum (operands[1]);
4098 *len = 4;
4099 if (reg0 <= reg1)
4100 return (AS2 (mov,%A0,%B1) CR_TAB
4101 AS2 (mov,%B0,%C1) CR_TAB
4102 AS2 (mov,%C0,%D1) CR_TAB
4103 AS1 (clr,%D0));
4104 else
4105 return (AS1 (clr,%D0) CR_TAB
4106 AS2 (mov,%C0,%D1) CR_TAB
4107 AS2 (mov,%B0,%C1) CR_TAB
4108 AS2 (mov,%A0,%B1));
4109 }
4110
4111 case 16:
4112 {
4113 int reg0 = true_regnum (operands[0]);
4114 int reg1 = true_regnum (operands[1]);
4115
4116 if (reg0 == reg1 + 2)
4117 return *len = 2, (AS1 (clr,%C0) CR_TAB
4118 AS1 (clr,%D0));
4119 if (AVR_HAVE_MOVW)
4120 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4121 AS1 (clr,%C0) CR_TAB
4122 AS1 (clr,%D0));
4123 else
4124 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4125 AS2 (mov,%A0,%C1) CR_TAB
4126 AS1 (clr,%C0) CR_TAB
4127 AS1 (clr,%D0));
4128 }
4129
4130 case 24:
4131 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4132 AS1 (clr,%B0) CR_TAB
4133 AS1 (clr,%C0) CR_TAB
4134 AS1 (clr,%D0));
4135
4136 case 31:
4137 *len = 6;
4138 return (AS1 (clr,%A0) CR_TAB
4139 AS2 (sbrc,%D0,7) CR_TAB
4140 AS1 (inc,%A0) CR_TAB
4141 AS1 (clr,%B0) CR_TAB
4142 AS1 (clr,%C0) CR_TAB
4143 AS1 (clr,%D0));
4144 }
4145 len = t;
4146 }
4147 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4148 AS1 (ror,%C0) CR_TAB
4149 AS1 (ror,%B0) CR_TAB
4150 AS1 (ror,%A0)),
4151 insn, operands, len, 4);
4152 return "";
4153 }
4154
4155 /* Modifies the length assigned to instruction INSN
4156 LEN is the initially computed length of the insn. */
4157
4158 int
4159 adjust_insn_length (rtx insn, int len)
4160 {
4161 rtx patt = PATTERN (insn);
4162 rtx set;
4163
4164 if (GET_CODE (patt) == SET)
4165 {
4166 rtx op[10];
4167 op[1] = SET_SRC (patt);
4168 op[0] = SET_DEST (patt);
4169 if (general_operand (op[1], VOIDmode)
4170 && general_operand (op[0], VOIDmode))
4171 {
4172 switch (GET_MODE (op[0]))
4173 {
4174 case QImode:
4175 output_movqi (insn, op, &len);
4176 break;
4177 case HImode:
4178 output_movhi (insn, op, &len);
4179 break;
4180 case SImode:
4181 case SFmode:
4182 output_movsisf (insn, op, &len);
4183 break;
4184 default:
4185 break;
4186 }
4187 }
4188 else if (op[0] == cc0_rtx && REG_P (op[1]))
4189 {
4190 switch (GET_MODE (op[1]))
4191 {
4192 case HImode: out_tsthi (insn, op[1], &len); break;
4193 case SImode: out_tstsi (insn, op[1], &len); break;
4194 default: break;
4195 }
4196 }
4197 else if (GET_CODE (op[1]) == AND)
4198 {
4199 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4200 {
4201 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4202 if (GET_MODE (op[1]) == SImode)
4203 len = (((mask & 0xff) != 0xff)
4204 + ((mask & 0xff00) != 0xff00)
4205 + ((mask & 0xff0000L) != 0xff0000L)
4206 + ((mask & 0xff000000L) != 0xff000000L));
4207 else if (GET_MODE (op[1]) == HImode)
4208 len = (((mask & 0xff) != 0xff)
4209 + ((mask & 0xff00) != 0xff00));
4210 }
4211 }
4212 else if (GET_CODE (op[1]) == IOR)
4213 {
4214 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4215 {
4216 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4217 if (GET_MODE (op[1]) == SImode)
4218 len = (((mask & 0xff) != 0)
4219 + ((mask & 0xff00) != 0)
4220 + ((mask & 0xff0000L) != 0)
4221 + ((mask & 0xff000000L) != 0));
4222 else if (GET_MODE (op[1]) == HImode)
4223 len = (((mask & 0xff) != 0)
4224 + ((mask & 0xff00) != 0));
4225 }
4226 }
4227 }
4228 set = single_set (insn);
4229 if (set)
4230 {
4231 rtx op[10];
4232
4233 op[1] = SET_SRC (set);
4234 op[0] = SET_DEST (set);
4235
4236 if (GET_CODE (patt) == PARALLEL
4237 && general_operand (op[1], VOIDmode)
4238 && general_operand (op[0], VOIDmode))
4239 {
4240 if (XVECLEN (patt, 0) == 2)
4241 op[2] = XVECEXP (patt, 0, 1);
4242
4243 switch (GET_MODE (op[0]))
4244 {
4245 case QImode:
4246 len = 2;
4247 break;
4248 case HImode:
4249 output_reload_inhi (insn, op, &len);
4250 break;
4251 case SImode:
4252 case SFmode:
4253 output_reload_insisf (insn, op, &len);
4254 break;
4255 default:
4256 break;
4257 }
4258 }
4259 else if (GET_CODE (op[1]) == ASHIFT
4260 || GET_CODE (op[1]) == ASHIFTRT
4261 || GET_CODE (op[1]) == LSHIFTRT)
4262 {
4263 rtx ops[10];
4264 ops[0] = op[0];
4265 ops[1] = XEXP (op[1],0);
4266 ops[2] = XEXP (op[1],1);
4267 switch (GET_CODE (op[1]))
4268 {
4269 case ASHIFT:
4270 switch (GET_MODE (op[0]))
4271 {
4272 case QImode: ashlqi3_out (insn,ops,&len); break;
4273 case HImode: ashlhi3_out (insn,ops,&len); break;
4274 case SImode: ashlsi3_out (insn,ops,&len); break;
4275 default: break;
4276 }
4277 break;
4278 case ASHIFTRT:
4279 switch (GET_MODE (op[0]))
4280 {
4281 case QImode: ashrqi3_out (insn,ops,&len); break;
4282 case HImode: ashrhi3_out (insn,ops,&len); break;
4283 case SImode: ashrsi3_out (insn,ops,&len); break;
4284 default: break;
4285 }
4286 break;
4287 case LSHIFTRT:
4288 switch (GET_MODE (op[0]))
4289 {
4290 case QImode: lshrqi3_out (insn,ops,&len); break;
4291 case HImode: lshrhi3_out (insn,ops,&len); break;
4292 case SImode: lshrsi3_out (insn,ops,&len); break;
4293 default: break;
4294 }
4295 break;
4296 default:
4297 break;
4298 }
4299 }
4300 }
4301 return len;
4302 }
4303
4304 /* Return nonzero if register REG dead after INSN. */
4305
4306 int
4307 reg_unused_after (rtx insn, rtx reg)
4308 {
4309 return (dead_or_set_p (insn, reg)
4310 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4311 }
4312
4313 /* Return nonzero if REG is not used after INSN.
4314 We assume REG is a reload reg, and therefore does
4315 not live past labels. It may live past calls or jumps though. */
4316
4317 int
4318 _reg_unused_after (rtx insn, rtx reg)
4319 {
4320 enum rtx_code code;
4321 rtx set;
4322
4323 /* If the reg is set by this instruction, then it is safe for our
4324 case. Disregard the case where this is a store to memory, since
4325 we are checking a register used in the store address. */
4326 set = single_set (insn);
4327 if (set && GET_CODE (SET_DEST (set)) != MEM
4328 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4329 return 1;
4330
4331 while ((insn = NEXT_INSN (insn)))
4332 {
4333 rtx set;
4334 code = GET_CODE (insn);
4335
4336 #if 0
4337 /* If this is a label that existed before reload, then the register
4338 if dead here. However, if this is a label added by reorg, then
4339 the register may still be live here. We can't tell the difference,
4340 so we just ignore labels completely. */
4341 if (code == CODE_LABEL)
4342 return 1;
4343 /* else */
4344 #endif
4345
4346 if (!INSN_P (insn))
4347 continue;
4348
4349 if (code == JUMP_INSN)
4350 return 0;
4351
4352 /* If this is a sequence, we must handle them all at once.
4353 We could have for instance a call that sets the target register,
4354 and an insn in a delay slot that uses the register. In this case,
4355 we must return 0. */
4356 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4357 {
4358 int i;
4359 int retval = 0;
4360
4361 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4362 {
4363 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4364 rtx set = single_set (this_insn);
4365
4366 if (GET_CODE (this_insn) == CALL_INSN)
4367 code = CALL_INSN;
4368 else if (GET_CODE (this_insn) == JUMP_INSN)
4369 {
4370 if (INSN_ANNULLED_BRANCH_P (this_insn))
4371 return 0;
4372 code = JUMP_INSN;
4373 }
4374
4375 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4376 return 0;
4377 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4378 {
4379 if (GET_CODE (SET_DEST (set)) != MEM)
4380 retval = 1;
4381 else
4382 return 0;
4383 }
4384 if (set == 0
4385 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4386 return 0;
4387 }
4388 if (retval == 1)
4389 return 1;
4390 else if (code == JUMP_INSN)
4391 return 0;
4392 }
4393
4394 if (code == CALL_INSN)
4395 {
4396 rtx tem;
4397 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4398 if (GET_CODE (XEXP (tem, 0)) == USE
4399 && REG_P (XEXP (XEXP (tem, 0), 0))
4400 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4401 return 0;
4402 if (call_used_regs[REGNO (reg)])
4403 return 1;
4404 }
4405
4406 set = single_set (insn);
4407
4408 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4409 return 0;
4410 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4411 return GET_CODE (SET_DEST (set)) != MEM;
4412 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4413 return 0;
4414 }
4415 return 1;
4416 }
4417
4418 /* Target hook for assembling integer objects. The AVR version needs
4419 special handling for references to certain labels. */
4420
4421 static bool
4422 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4423 {
4424 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4425 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4426 || GET_CODE (x) == LABEL_REF))
4427 {
4428 fputs ("\t.word\tgs(", asm_out_file);
4429 output_addr_const (asm_out_file, x);
4430 fputs (")\n", asm_out_file);
4431 return true;
4432 }
4433 return default_assemble_integer (x, size, aligned_p);
4434 }
4435
4436 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4437
4438 void
4439 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4440 {
4441
4442 /* If the function has the 'signal' or 'interrupt' attribute, test to
4443 make sure that the name of the function is "__vector_NN" so as to
4444 catch when the user misspells the interrupt vector name. */
4445
4446 if (cfun->machine->is_interrupt)
4447 {
4448 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4449 {
4450 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4451 "%qs appears to be a misspelled interrupt handler",
4452 name);
4453 }
4454 }
4455 else if (cfun->machine->is_signal)
4456 {
4457 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4458 {
4459 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4460 "%qs appears to be a misspelled signal handler",
4461 name);
4462 }
4463 }
4464
4465 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4466 ASM_OUTPUT_LABEL (file, name);
4467 }
4468
4469 /* The routine used to output NUL terminated strings. We use a special
4470 version of this for most svr4 targets because doing so makes the
4471 generated assembly code more compact (and thus faster to assemble)
4472 as well as more readable, especially for targets like the i386
4473 (where the only alternative is to output character sequences as
4474 comma separated lists of numbers). */
4475
4476 void
4477 gas_output_limited_string(FILE *file, const char *str)
4478 {
4479 const unsigned char *_limited_str = (const unsigned char *) str;
4480 unsigned ch;
4481 fprintf (file, "%s\"", STRING_ASM_OP);
4482 for (; (ch = *_limited_str); _limited_str++)
4483 {
4484 int escape;
4485 switch (escape = ESCAPES[ch])
4486 {
4487 case 0:
4488 putc (ch, file);
4489 break;
4490 case 1:
4491 fprintf (file, "\\%03o", ch);
4492 break;
4493 default:
4494 putc ('\\', file);
4495 putc (escape, file);
4496 break;
4497 }
4498 }
4499 fprintf (file, "\"\n");
4500 }
4501
4502 /* The routine used to output sequences of byte values. We use a special
4503 version of this for most svr4 targets because doing so makes the
4504 generated assembly code more compact (and thus faster to assemble)
4505 as well as more readable. Note that if we find subparts of the
4506 character sequence which end with NUL (and which are shorter than
4507 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4508
4509 void
4510 gas_output_ascii(FILE *file, const char *str, size_t length)
4511 {
4512 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4513 const unsigned char *limit = _ascii_bytes + length;
4514 unsigned bytes_in_chunk = 0;
4515 for (; _ascii_bytes < limit; _ascii_bytes++)
4516 {
4517 const unsigned char *p;
4518 if (bytes_in_chunk >= 60)
4519 {
4520 fprintf (file, "\"\n");
4521 bytes_in_chunk = 0;
4522 }
4523 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4524 continue;
4525 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4526 {
4527 if (bytes_in_chunk > 0)
4528 {
4529 fprintf (file, "\"\n");
4530 bytes_in_chunk = 0;
4531 }
4532 gas_output_limited_string (file, (const char*)_ascii_bytes);
4533 _ascii_bytes = p;
4534 }
4535 else
4536 {
4537 int escape;
4538 unsigned ch;
4539 if (bytes_in_chunk == 0)
4540 fprintf (file, "\t.ascii\t\"");
4541 switch (escape = ESCAPES[ch = *_ascii_bytes])
4542 {
4543 case 0:
4544 putc (ch, file);
4545 bytes_in_chunk++;
4546 break;
4547 case 1:
4548 fprintf (file, "\\%03o", ch);
4549 bytes_in_chunk += 4;
4550 break;
4551 default:
4552 putc ('\\', file);
4553 putc (escape, file);
4554 bytes_in_chunk += 2;
4555 break;
4556 }
4557 }
4558 }
4559 if (bytes_in_chunk > 0)
4560 fprintf (file, "\"\n");
4561 }
4562
4563 /* Return value is nonzero if pseudos that have been
4564 assigned to registers of class CLASS would likely be spilled
4565 because registers of CLASS are needed for spill registers. */
4566
4567 bool
4568 class_likely_spilled_p (int c)
4569 {
4570 return (c != ALL_REGS && c != ADDW_REGS);
4571 }
4572
4573 /* Valid attributes:
4574 progmem - put data to program memory;
4575 signal - make a function to be hardware interrupt. After function
4576 prologue interrupts are disabled;
4577 interrupt - make a function to be hardware interrupt. After function
4578 prologue interrupts are enabled;
4579 naked - don't generate function prologue/epilogue and `ret' command.
4580
4581 Only `progmem' attribute valid for type. */
4582
4583 /* Handle a "progmem" attribute; arguments as in
4584 struct attribute_spec.handler. */
4585 static tree
4586 avr_handle_progmem_attribute (tree *node, tree name,
4587 tree args ATTRIBUTE_UNUSED,
4588 int flags ATTRIBUTE_UNUSED,
4589 bool *no_add_attrs)
4590 {
4591 if (DECL_P (*node))
4592 {
4593 if (TREE_CODE (*node) == TYPE_DECL)
4594 {
4595 /* This is really a decl attribute, not a type attribute,
4596 but try to handle it for GCC 3.0 backwards compatibility. */
4597
4598 tree type = TREE_TYPE (*node);
4599 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4600 tree newtype = build_type_attribute_variant (type, attr);
4601
4602 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4603 TREE_TYPE (*node) = newtype;
4604 *no_add_attrs = true;
4605 }
4606 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4607 {
4608 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4609 {
4610 warning (0, "only initialized variables can be placed into "
4611 "program memory area");
4612 *no_add_attrs = true;
4613 }
4614 }
4615 else
4616 {
4617 warning (OPT_Wattributes, "%qE attribute ignored",
4618 name);
4619 *no_add_attrs = true;
4620 }
4621 }
4622
4623 return NULL_TREE;
4624 }
4625
4626 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4627 struct attribute_spec.handler. */
4628
4629 static tree
4630 avr_handle_fndecl_attribute (tree *node, tree name,
4631 tree args ATTRIBUTE_UNUSED,
4632 int flags ATTRIBUTE_UNUSED,
4633 bool *no_add_attrs)
4634 {
4635 if (TREE_CODE (*node) != FUNCTION_DECL)
4636 {
4637 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4638 name);
4639 *no_add_attrs = true;
4640 }
4641
4642 return NULL_TREE;
4643 }
4644
4645 static tree
4646 avr_handle_fntype_attribute (tree *node, tree name,
4647 tree args ATTRIBUTE_UNUSED,
4648 int flags ATTRIBUTE_UNUSED,
4649 bool *no_add_attrs)
4650 {
4651 if (TREE_CODE (*node) != FUNCTION_TYPE)
4652 {
4653 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4654 name);
4655 *no_add_attrs = true;
4656 }
4657
4658 return NULL_TREE;
4659 }
4660
4661 /* Look for attribute `progmem' in DECL
4662 if found return 1, otherwise 0. */
4663
4664 int
4665 avr_progmem_p (tree decl, tree attributes)
4666 {
4667 tree a;
4668
4669 if (TREE_CODE (decl) != VAR_DECL)
4670 return 0;
4671
4672 if (NULL_TREE
4673 != lookup_attribute ("progmem", attributes))
4674 return 1;
4675
4676 a=decl;
4677 do
4678 a = TREE_TYPE(a);
4679 while (TREE_CODE (a) == ARRAY_TYPE);
4680
4681 if (a == error_mark_node)
4682 return 0;
4683
4684 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4685 return 1;
4686
4687 return 0;
4688 }
4689
4690 /* Add the section attribute if the variable is in progmem. */
4691
4692 static void
4693 avr_insert_attributes (tree node, tree *attributes)
4694 {
4695 if (TREE_CODE (node) == VAR_DECL
4696 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4697 && avr_progmem_p (node, *attributes))
4698 {
4699 static const char dsec[] = ".progmem.data";
4700 *attributes = tree_cons (get_identifier ("section"),
4701 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4702 *attributes);
4703
4704 /* ??? This seems sketchy. Why can't the user declare the
4705 thing const in the first place? */
4706 TREE_READONLY (node) = 1;
4707 }
4708 }
4709
4710 /* A get_unnamed_section callback for switching to progmem_section. */
4711
4712 static void
4713 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4714 {
4715 fprintf (asm_out_file,
4716 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4717 AVR_HAVE_JMP_CALL ? "a" : "ax");
4718 /* Should already be aligned, this is just to be safe if it isn't. */
4719 fprintf (asm_out_file, "\t.p2align 1\n");
4720 }
4721
4722 /* Implement TARGET_ASM_INIT_SECTIONS. */
4723
4724 static void
4725 avr_asm_init_sections (void)
4726 {
4727 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4728 avr_output_progmem_section_asm_op,
4729 NULL);
4730 readonly_data_section = data_section;
4731 }
4732
4733 static unsigned int
4734 avr_section_type_flags (tree decl, const char *name, int reloc)
4735 {
4736 unsigned int flags = default_section_type_flags (decl, name, reloc);
4737
4738 if (strncmp (name, ".noinit", 7) == 0)
4739 {
4740 if (decl && TREE_CODE (decl) == VAR_DECL
4741 && DECL_INITIAL (decl) == NULL_TREE)
4742 flags |= SECTION_BSS; /* @nobits */
4743 else
4744 warning (0, "only uninitialized variables can be placed in the "
4745 ".noinit section");
4746 }
4747
4748 return flags;
4749 }
4750
4751 /* Outputs some appropriate text to go at the start of an assembler
4752 file. */
4753
4754 static void
4755 avr_file_start (void)
4756 {
4757 if (avr_current_arch->asm_only)
4758 error ("MCU %qs supported for assembler only", avr_mcu_name);
4759
4760 default_file_start ();
4761
4762 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4763 fputs ("__SREG__ = 0x3f\n"
4764 "__SP_H__ = 0x3e\n"
4765 "__SP_L__ = 0x3d\n", asm_out_file);
4766
4767 fputs ("__tmp_reg__ = 0\n"
4768 "__zero_reg__ = 1\n", asm_out_file);
4769
4770 /* FIXME: output these only if there is anything in the .data / .bss
4771 sections - some code size could be saved by not linking in the
4772 initialization code from libgcc if one or both sections are empty. */
4773 fputs ("\t.global __do_copy_data\n", asm_out_file);
4774 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4775 }
4776
4777 /* Outputs to the stdio stream FILE some
4778 appropriate text to go at the end of an assembler file. */
4779
4780 static void
4781 avr_file_end (void)
4782 {
4783 }
4784
4785 /* Choose the order in which to allocate hard registers for
4786 pseudo-registers local to a basic block.
4787
4788 Store the desired register order in the array `reg_alloc_order'.
4789 Element 0 should be the register to allocate first; element 1, the
4790 next register; and so on. */
4791
4792 void
4793 order_regs_for_local_alloc (void)
4794 {
4795 unsigned int i;
4796 static const int order_0[] = {
4797 24,25,
4798 18,19,
4799 20,21,
4800 22,23,
4801 30,31,
4802 26,27,
4803 28,29,
4804 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4805 0,1,
4806 32,33,34,35
4807 };
4808 static const int order_1[] = {
4809 18,19,
4810 20,21,
4811 22,23,
4812 24,25,
4813 30,31,
4814 26,27,
4815 28,29,
4816 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4817 0,1,
4818 32,33,34,35
4819 };
4820 static const int order_2[] = {
4821 25,24,
4822 23,22,
4823 21,20,
4824 19,18,
4825 30,31,
4826 26,27,
4827 28,29,
4828 17,16,
4829 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4830 1,0,
4831 32,33,34,35
4832 };
4833
4834 const int *order = (TARGET_ORDER_1 ? order_1 :
4835 TARGET_ORDER_2 ? order_2 :
4836 order_0);
4837 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4838 reg_alloc_order[i] = order[i];
4839 }
4840
4841
4842 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4843 cost of an RTX operand given its context. X is the rtx of the
4844 operand, MODE is its mode, and OUTER is the rtx_code of this
4845 operand's parent operator. */
4846
4847 static int
4848 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4849 bool speed)
4850 {
4851 enum rtx_code code = GET_CODE (x);
4852 int total;
4853
4854 switch (code)
4855 {
4856 case REG:
4857 case SUBREG:
4858 return 0;
4859
4860 case CONST_INT:
4861 case CONST_DOUBLE:
4862 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4863
4864 default:
4865 break;
4866 }
4867
4868 total = 0;
4869 avr_rtx_costs (x, code, outer, &total, speed);
4870 return total;
4871 }
4872
4873 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4874 is to be calculated. Return true if the complete cost has been
4875 computed, and false if subexpressions should be scanned. In either
4876 case, *TOTAL contains the cost result. */
4877
4878 static bool
4879 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
4880 bool speed)
4881 {
4882 enum rtx_code code = (enum rtx_code) codearg;
4883 enum machine_mode mode = GET_MODE (x);
4884 HOST_WIDE_INT val;
4885
4886 switch (code)
4887 {
4888 case CONST_INT:
4889 case CONST_DOUBLE:
4890 /* Immediate constants are as cheap as registers. */
4891 *total = 0;
4892 return true;
4893
4894 case MEM:
4895 case CONST:
4896 case LABEL_REF:
4897 case SYMBOL_REF:
4898 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4899 return true;
4900
4901 case NEG:
4902 switch (mode)
4903 {
4904 case QImode:
4905 case SFmode:
4906 *total = COSTS_N_INSNS (1);
4907 break;
4908
4909 case HImode:
4910 *total = COSTS_N_INSNS (3);
4911 break;
4912
4913 case SImode:
4914 *total = COSTS_N_INSNS (7);
4915 break;
4916
4917 default:
4918 return false;
4919 }
4920 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4921 return true;
4922
4923 case ABS:
4924 switch (mode)
4925 {
4926 case QImode:
4927 case SFmode:
4928 *total = COSTS_N_INSNS (1);
4929 break;
4930
4931 default:
4932 return false;
4933 }
4934 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4935 return true;
4936
4937 case NOT:
4938 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4939 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4940 return true;
4941
4942 case ZERO_EXTEND:
4943 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4944 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4945 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4946 return true;
4947
4948 case SIGN_EXTEND:
4949 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4950 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4951 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4952 return true;
4953
4954 case PLUS:
4955 switch (mode)
4956 {
4957 case QImode:
4958 *total = COSTS_N_INSNS (1);
4959 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4960 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4961 break;
4962
4963 case HImode:
4964 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4965 {
4966 *total = COSTS_N_INSNS (2);
4967 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4968 }
4969 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4970 *total = COSTS_N_INSNS (1);
4971 else
4972 *total = COSTS_N_INSNS (2);
4973 break;
4974
4975 case SImode:
4976 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4977 {
4978 *total = COSTS_N_INSNS (4);
4979 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4980 }
4981 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4982 *total = COSTS_N_INSNS (1);
4983 else
4984 *total = COSTS_N_INSNS (4);
4985 break;
4986
4987 default:
4988 return false;
4989 }
4990 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4991 return true;
4992
4993 case MINUS:
4994 case AND:
4995 case IOR:
4996 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4997 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4998 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4999 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5000 return true;
5001
5002 case XOR:
5003 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5004 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5005 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5006 return true;
5007
5008 case MULT:
5009 switch (mode)
5010 {
5011 case QImode:
5012 if (AVR_HAVE_MUL)
5013 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5014 else if (!speed)
5015 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5016 else
5017 return false;
5018 break;
5019
5020 case HImode:
5021 if (AVR_HAVE_MUL)
5022 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5023 else if (!speed)
5024 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5025 else
5026 return false;
5027 break;
5028
5029 default:
5030 return false;
5031 }
5032 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5033 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5034 return true;
5035
5036 case DIV:
5037 case MOD:
5038 case UDIV:
5039 case UMOD:
5040 if (!speed)
5041 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5042 else
5043 return false;
5044 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5045 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5046 return true;
5047
5048 case ROTATE:
5049 switch (mode)
5050 {
5051 case QImode:
5052 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5053 *total = COSTS_N_INSNS (1);
5054
5055 break;
5056
5057 case HImode:
5058 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5059 *total = COSTS_N_INSNS (3);
5060
5061 break;
5062
5063 case SImode:
5064 if (CONST_INT_P (XEXP (x, 1)))
5065 switch (INTVAL (XEXP (x, 1)))
5066 {
5067 case 8:
5068 case 24:
5069 *total = COSTS_N_INSNS (5);
5070 break;
5071 case 16:
5072 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5073 break;
5074 }
5075 break;
5076
5077 default:
5078 return false;
5079 }
5080 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5081 return true;
5082
5083 case ASHIFT:
5084 switch (mode)
5085 {
5086 case QImode:
5087 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5088 {
5089 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5090 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5091 }
5092 else
5093 {
5094 val = INTVAL (XEXP (x, 1));
5095 if (val == 7)
5096 *total = COSTS_N_INSNS (3);
5097 else if (val >= 0 && val <= 7)
5098 *total = COSTS_N_INSNS (val);
5099 else
5100 *total = COSTS_N_INSNS (1);
5101 }
5102 break;
5103
5104 case HImode:
5105 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5106 {
5107 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5108 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5109 }
5110 else
5111 switch (INTVAL (XEXP (x, 1)))
5112 {
5113 case 0:
5114 *total = 0;
5115 break;
5116 case 1:
5117 case 8:
5118 *total = COSTS_N_INSNS (2);
5119 break;
5120 case 9:
5121 *total = COSTS_N_INSNS (3);
5122 break;
5123 case 2:
5124 case 3:
5125 case 10:
5126 case 15:
5127 *total = COSTS_N_INSNS (4);
5128 break;
5129 case 7:
5130 case 11:
5131 case 12:
5132 *total = COSTS_N_INSNS (5);
5133 break;
5134 case 4:
5135 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5136 break;
5137 case 6:
5138 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5139 break;
5140 case 5:
5141 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5142 break;
5143 default:
5144 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5145 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5146 }
5147 break;
5148
5149 case SImode:
5150 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5151 {
5152 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5153 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5154 }
5155 else
5156 switch (INTVAL (XEXP (x, 1)))
5157 {
5158 case 0:
5159 *total = 0;
5160 break;
5161 case 24:
5162 *total = COSTS_N_INSNS (3);
5163 break;
5164 case 1:
5165 case 8:
5166 case 16:
5167 *total = COSTS_N_INSNS (4);
5168 break;
5169 case 31:
5170 *total = COSTS_N_INSNS (6);
5171 break;
5172 case 2:
5173 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5174 break;
5175 default:
5176 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5177 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5178 }
5179 break;
5180
5181 default:
5182 return false;
5183 }
5184 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5185 return true;
5186
5187 case ASHIFTRT:
5188 switch (mode)
5189 {
5190 case QImode:
5191 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5192 {
5193 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5194 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5195 }
5196 else
5197 {
5198 val = INTVAL (XEXP (x, 1));
5199 if (val == 6)
5200 *total = COSTS_N_INSNS (4);
5201 else if (val == 7)
5202 *total = COSTS_N_INSNS (2);
5203 else if (val >= 0 && val <= 7)
5204 *total = COSTS_N_INSNS (val);
5205 else
5206 *total = COSTS_N_INSNS (1);
5207 }
5208 break;
5209
5210 case HImode:
5211 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5212 {
5213 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5214 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5215 }
5216 else
5217 switch (INTVAL (XEXP (x, 1)))
5218 {
5219 case 0:
5220 *total = 0;
5221 break;
5222 case 1:
5223 *total = COSTS_N_INSNS (2);
5224 break;
5225 case 15:
5226 *total = COSTS_N_INSNS (3);
5227 break;
5228 case 2:
5229 case 7:
5230 case 8:
5231 case 9:
5232 *total = COSTS_N_INSNS (4);
5233 break;
5234 case 10:
5235 case 14:
5236 *total = COSTS_N_INSNS (5);
5237 break;
5238 case 11:
5239 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5240 break;
5241 case 12:
5242 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5243 break;
5244 case 6:
5245 case 13:
5246 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5247 break;
5248 default:
5249 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5250 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5251 }
5252 break;
5253
5254 case SImode:
5255 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5256 {
5257 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5258 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5259 }
5260 else
5261 switch (INTVAL (XEXP (x, 1)))
5262 {
5263 case 0:
5264 *total = 0;
5265 break;
5266 case 1:
5267 *total = COSTS_N_INSNS (4);
5268 break;
5269 case 8:
5270 case 16:
5271 case 24:
5272 *total = COSTS_N_INSNS (6);
5273 break;
5274 case 2:
5275 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5276 break;
5277 case 31:
5278 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5279 break;
5280 default:
5281 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5282 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5283 }
5284 break;
5285
5286 default:
5287 return false;
5288 }
5289 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5290 return true;
5291
5292 case LSHIFTRT:
5293 switch (mode)
5294 {
5295 case QImode:
5296 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5297 {
5298 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5299 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5300 }
5301 else
5302 {
5303 val = INTVAL (XEXP (x, 1));
5304 if (val == 7)
5305 *total = COSTS_N_INSNS (3);
5306 else if (val >= 0 && val <= 7)
5307 *total = COSTS_N_INSNS (val);
5308 else
5309 *total = COSTS_N_INSNS (1);
5310 }
5311 break;
5312
5313 case HImode:
5314 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5315 {
5316 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5317 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5318 }
5319 else
5320 switch (INTVAL (XEXP (x, 1)))
5321 {
5322 case 0:
5323 *total = 0;
5324 break;
5325 case 1:
5326 case 8:
5327 *total = COSTS_N_INSNS (2);
5328 break;
5329 case 9:
5330 *total = COSTS_N_INSNS (3);
5331 break;
5332 case 2:
5333 case 10:
5334 case 15:
5335 *total = COSTS_N_INSNS (4);
5336 break;
5337 case 7:
5338 case 11:
5339 *total = COSTS_N_INSNS (5);
5340 break;
5341 case 3:
5342 case 12:
5343 case 13:
5344 case 14:
5345 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5346 break;
5347 case 4:
5348 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5349 break;
5350 case 5:
5351 case 6:
5352 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5353 break;
5354 default:
5355 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5356 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5357 }
5358 break;
5359
5360 case SImode:
5361 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5362 {
5363 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5364 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5365 }
5366 else
5367 switch (INTVAL (XEXP (x, 1)))
5368 {
5369 case 0:
5370 *total = 0;
5371 break;
5372 case 1:
5373 *total = COSTS_N_INSNS (4);
5374 break;
5375 case 2:
5376 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5377 break;
5378 case 8:
5379 case 16:
5380 case 24:
5381 *total = COSTS_N_INSNS (4);
5382 break;
5383 case 31:
5384 *total = COSTS_N_INSNS (6);
5385 break;
5386 default:
5387 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5388 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5389 }
5390 break;
5391
5392 default:
5393 return false;
5394 }
5395 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5396 return true;
5397
5398 case COMPARE:
5399 switch (GET_MODE (XEXP (x, 0)))
5400 {
5401 case QImode:
5402 *total = COSTS_N_INSNS (1);
5403 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5404 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5405 break;
5406
5407 case HImode:
5408 *total = COSTS_N_INSNS (2);
5409 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5410 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5411 else if (INTVAL (XEXP (x, 1)) != 0)
5412 *total += COSTS_N_INSNS (1);
5413 break;
5414
5415 case SImode:
5416 *total = COSTS_N_INSNS (4);
5417 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5418 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5419 else if (INTVAL (XEXP (x, 1)) != 0)
5420 *total += COSTS_N_INSNS (3);
5421 break;
5422
5423 default:
5424 return false;
5425 }
5426 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5427 return true;
5428
5429 default:
5430 break;
5431 }
5432 return false;
5433 }
5434
5435 /* Calculate the cost of a memory address. */
5436
5437 static int
5438 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5439 {
5440 if (GET_CODE (x) == PLUS
5441 && GET_CODE (XEXP (x,1)) == CONST_INT
5442 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5443 && INTVAL (XEXP (x,1)) >= 61)
5444 return 18;
5445 if (CONSTANT_ADDRESS_P (x))
5446 {
5447 if (optimize > 0 && io_address_operand (x, QImode))
5448 return 2;
5449 return 4;
5450 }
5451 return 4;
5452 }
5453
5454 /* Test for extra memory constraint 'Q'.
5455 It's a memory address based on Y or Z pointer with valid displacement. */
5456
5457 int
5458 extra_constraint_Q (rtx x)
5459 {
5460 if (GET_CODE (XEXP (x,0)) == PLUS
5461 && REG_P (XEXP (XEXP (x,0), 0))
5462 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5463 && (INTVAL (XEXP (XEXP (x,0), 1))
5464 <= MAX_LD_OFFSET (GET_MODE (x))))
5465 {
5466 rtx xx = XEXP (XEXP (x,0), 0);
5467 int regno = REGNO (xx);
5468 if (TARGET_ALL_DEBUG)
5469 {
5470 fprintf (stderr, ("extra_constraint:\n"
5471 "reload_completed: %d\n"
5472 "reload_in_progress: %d\n"),
5473 reload_completed, reload_in_progress);
5474 debug_rtx (x);
5475 }
5476 if (regno >= FIRST_PSEUDO_REGISTER)
5477 return 1; /* allocate pseudos */
5478 else if (regno == REG_Z || regno == REG_Y)
5479 return 1; /* strictly check */
5480 else if (xx == frame_pointer_rtx
5481 || xx == arg_pointer_rtx)
5482 return 1; /* XXX frame & arg pointer checks */
5483 }
5484 return 0;
5485 }
5486
5487 /* Convert condition code CONDITION to the valid AVR condition code. */
5488
5489 RTX_CODE
5490 avr_normalize_condition (RTX_CODE condition)
5491 {
5492 switch (condition)
5493 {
5494 case GT:
5495 return GE;
5496 case GTU:
5497 return GEU;
5498 case LE:
5499 return LT;
5500 case LEU:
5501 return LTU;
5502 default:
5503 gcc_unreachable ();
5504 }
5505 }
5506
5507 /* This function optimizes conditional jumps. */
5508
5509 static void
5510 avr_reorg (void)
5511 {
5512 rtx insn, pattern;
5513
5514 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5515 {
5516 if (! (GET_CODE (insn) == INSN
5517 || GET_CODE (insn) == CALL_INSN
5518 || GET_CODE (insn) == JUMP_INSN)
5519 || !single_set (insn))
5520 continue;
5521
5522 pattern = PATTERN (insn);
5523
5524 if (GET_CODE (pattern) == PARALLEL)
5525 pattern = XVECEXP (pattern, 0, 0);
5526 if (GET_CODE (pattern) == SET
5527 && SET_DEST (pattern) == cc0_rtx
5528 && compare_diff_p (insn))
5529 {
5530 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5531 {
5532 /* Now we work under compare insn. */
5533
5534 pattern = SET_SRC (pattern);
5535 if (true_regnum (XEXP (pattern,0)) >= 0
5536 && true_regnum (XEXP (pattern,1)) >= 0 )
5537 {
5538 rtx x = XEXP (pattern,0);
5539 rtx next = next_real_insn (insn);
5540 rtx pat = PATTERN (next);
5541 rtx src = SET_SRC (pat);
5542 rtx t = XEXP (src,0);
5543 PUT_CODE (t, swap_condition (GET_CODE (t)));
5544 XEXP (pattern,0) = XEXP (pattern,1);
5545 XEXP (pattern,1) = x;
5546 INSN_CODE (next) = -1;
5547 }
5548 else if (true_regnum (XEXP (pattern, 0)) >= 0
5549 && XEXP (pattern, 1) == const0_rtx)
5550 {
5551 /* This is a tst insn, we can reverse it. */
5552 rtx next = next_real_insn (insn);
5553 rtx pat = PATTERN (next);
5554 rtx src = SET_SRC (pat);
5555 rtx t = XEXP (src,0);
5556
5557 PUT_CODE (t, swap_condition (GET_CODE (t)));
5558 XEXP (pattern, 1) = XEXP (pattern, 0);
5559 XEXP (pattern, 0) = const0_rtx;
5560 INSN_CODE (next) = -1;
5561 INSN_CODE (insn) = -1;
5562 }
5563 else if (true_regnum (XEXP (pattern,0)) >= 0
5564 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5565 {
5566 rtx x = XEXP (pattern,1);
5567 rtx next = next_real_insn (insn);
5568 rtx pat = PATTERN (next);
5569 rtx src = SET_SRC (pat);
5570 rtx t = XEXP (src,0);
5571 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5572
5573 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5574 {
5575 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5576 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5577 INSN_CODE (next) = -1;
5578 INSN_CODE (insn) = -1;
5579 }
5580 }
5581 }
5582 }
5583 }
5584 }
5585
5586 /* Returns register number for function return value.*/
5587
5588 int
5589 avr_ret_register (void)
5590 {
5591 return 24;
5592 }
5593
5594 /* Create an RTX representing the place where a
5595 library function returns a value of mode MODE. */
5596
5597 rtx
5598 avr_libcall_value (enum machine_mode mode)
5599 {
5600 int offs = GET_MODE_SIZE (mode);
5601 if (offs < 2)
5602 offs = 2;
5603 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5604 }
5605
5606 /* Create an RTX representing the place where a
5607 function returns a value of data type VALTYPE. */
5608
5609 rtx
5610 avr_function_value (const_tree type,
5611 const_tree func ATTRIBUTE_UNUSED,
5612 bool outgoing ATTRIBUTE_UNUSED)
5613 {
5614 unsigned int offs;
5615
5616 if (TYPE_MODE (type) != BLKmode)
5617 return avr_libcall_value (TYPE_MODE (type));
5618
5619 offs = int_size_in_bytes (type);
5620 if (offs < 2)
5621 offs = 2;
5622 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5623 offs = GET_MODE_SIZE (SImode);
5624 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5625 offs = GET_MODE_SIZE (DImode);
5626
5627 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5628 }
5629
5630 /* Places additional restrictions on the register class to
5631 use when it is necessary to copy value X into a register
5632 in class CLASS. */
5633
5634 enum reg_class
5635 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5636 {
5637 return rclass;
5638 }
5639
5640 int
5641 test_hard_reg_class (enum reg_class rclass, rtx x)
5642 {
5643 int regno = true_regnum (x);
5644 if (regno < 0)
5645 return 0;
5646
5647 if (TEST_HARD_REG_CLASS (rclass, regno))
5648 return 1;
5649
5650 return 0;
5651 }
5652
5653
5654 int
5655 jump_over_one_insn_p (rtx insn, rtx dest)
5656 {
5657 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5658 ? XEXP (dest, 0)
5659 : dest);
5660 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5661 int dest_addr = INSN_ADDRESSES (uid);
5662 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5663 }
5664
5665 /* Returns 1 if a value of mode MODE can be stored starting with hard
5666 register number REGNO. On the enhanced core, anything larger than
5667 1 byte must start in even numbered register for "movw" to work
5668 (this way we don't have to check for odd registers everywhere). */
5669
5670 int
5671 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5672 {
5673 /* Disallow QImode in stack pointer regs. */
5674 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5675 return 0;
5676
5677 /* The only thing that can go into registers r28:r29 is a Pmode. */
5678 if (regno == REG_Y && mode == Pmode)
5679 return 1;
5680
5681 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5682 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5683 return 0;
5684
5685 if (mode == QImode)
5686 return 1;
5687
5688 /* Modes larger than QImode occupy consecutive registers. */
5689 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5690 return 0;
5691
5692 /* All modes larger than QImode should start in an even register. */
5693 return !(regno & 1);
5694 }
5695
5696 const char *
5697 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5698 {
5699 int tmp;
5700 if (!len)
5701 len = &tmp;
5702
5703 if (GET_CODE (operands[1]) == CONST_INT)
5704 {
5705 int val = INTVAL (operands[1]);
5706 if ((val & 0xff) == 0)
5707 {
5708 *len = 3;
5709 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5710 AS2 (ldi,%2,hi8(%1)) CR_TAB
5711 AS2 (mov,%B0,%2));
5712 }
5713 else if ((val & 0xff00) == 0)
5714 {
5715 *len = 3;
5716 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5717 AS2 (mov,%A0,%2) CR_TAB
5718 AS2 (mov,%B0,__zero_reg__));
5719 }
5720 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5721 {
5722 *len = 3;
5723 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5724 AS2 (mov,%A0,%2) CR_TAB
5725 AS2 (mov,%B0,%2));
5726 }
5727 }
5728 *len = 4;
5729 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5730 AS2 (mov,%A0,%2) CR_TAB
5731 AS2 (ldi,%2,hi8(%1)) CR_TAB
5732 AS2 (mov,%B0,%2));
5733 }
5734
5735
5736 const char *
5737 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5738 {
5739 rtx src = operands[1];
5740 int cnst = (GET_CODE (src) == CONST_INT);
5741
5742 if (len)
5743 {
5744 if (cnst)
5745 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5746 + ((INTVAL (src) & 0xff00) != 0)
5747 + ((INTVAL (src) & 0xff0000) != 0)
5748 + ((INTVAL (src) & 0xff000000) != 0);
5749 else
5750 *len = 8;
5751
5752 return "";
5753 }
5754
5755 if (cnst && ((INTVAL (src) & 0xff) == 0))
5756 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5757 else
5758 {
5759 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5760 output_asm_insn (AS2 (mov, %A0, %2), operands);
5761 }
5762 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5763 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5764 else
5765 {
5766 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5767 output_asm_insn (AS2 (mov, %B0, %2), operands);
5768 }
5769 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5770 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5771 else
5772 {
5773 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5774 output_asm_insn (AS2 (mov, %C0, %2), operands);
5775 }
5776 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5777 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5778 else
5779 {
5780 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5781 output_asm_insn (AS2 (mov, %D0, %2), operands);
5782 }
5783 return "";
5784 }
5785
5786 void
5787 avr_output_bld (rtx operands[], int bit_nr)
5788 {
5789 static char s[] = "bld %A0,0";
5790
5791 s[5] = 'A' + (bit_nr >> 3);
5792 s[8] = '0' + (bit_nr & 7);
5793 output_asm_insn (s, operands);
5794 }
5795
5796 void
5797 avr_output_addr_vec_elt (FILE *stream, int value)
5798 {
5799 switch_to_section (progmem_section);
5800 if (AVR_HAVE_JMP_CALL)
5801 fprintf (stream, "\t.word gs(.L%d)\n", value);
5802 else
5803 fprintf (stream, "\trjmp .L%d\n", value);
5804 }
5805
5806 /* Returns true if SCRATCH are safe to be allocated as a scratch
5807 registers (for a define_peephole2) in the current function. */
5808
5809 bool
5810 avr_hard_regno_scratch_ok (unsigned int regno)
5811 {
5812 /* Interrupt functions can only use registers that have already been saved
5813 by the prologue, even if they would normally be call-clobbered. */
5814
5815 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5816 && !df_regs_ever_live_p (regno))
5817 return false;
5818
5819 return true;
5820 }
5821
5822 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5823
5824 int
5825 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5826 unsigned int new_reg)
5827 {
5828 /* Interrupt functions can only use registers that have already been
5829 saved by the prologue, even if they would normally be
5830 call-clobbered. */
5831
5832 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5833 && !df_regs_ever_live_p (new_reg))
5834 return 0;
5835
5836 return 1;
5837 }
5838
5839 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5840 or memory location in the I/O space (QImode only).
5841
5842 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5843 Operand 1: register operand to test, or CONST_INT memory address.
5844 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5845 Operand 3: label to jump to if the test is true. */
5846
5847 const char *
5848 avr_out_sbxx_branch (rtx insn, rtx operands[])
5849 {
5850 enum rtx_code comp = GET_CODE (operands[0]);
5851 int long_jump = (get_attr_length (insn) >= 4);
5852 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5853
5854 if (comp == GE)
5855 comp = EQ;
5856 else if (comp == LT)
5857 comp = NE;
5858
5859 if (reverse)
5860 comp = reverse_condition (comp);
5861
5862 if (GET_CODE (operands[1]) == CONST_INT)
5863 {
5864 if (INTVAL (operands[1]) < 0x40)
5865 {
5866 if (comp == EQ)
5867 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5868 else
5869 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5870 }
5871 else
5872 {
5873 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5874 if (comp == EQ)
5875 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5876 else
5877 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5878 }
5879 }
5880 else /* GET_CODE (operands[1]) == REG */
5881 {
5882 if (GET_MODE (operands[1]) == QImode)
5883 {
5884 if (comp == EQ)
5885 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5886 else
5887 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5888 }
5889 else /* HImode or SImode */
5890 {
5891 static char buf[] = "sbrc %A1,0";
5892 int bit_nr = exact_log2 (INTVAL (operands[2])
5893 & GET_MODE_MASK (GET_MODE (operands[1])));
5894
5895 buf[3] = (comp == EQ) ? 's' : 'c';
5896 buf[6] = 'A' + (bit_nr >> 3);
5897 buf[9] = '0' + (bit_nr & 7);
5898 output_asm_insn (buf, operands);
5899 }
5900 }
5901
5902 if (long_jump)
5903 return (AS1 (rjmp,.+4) CR_TAB
5904 AS1 (jmp,%3));
5905 if (!reverse)
5906 return AS1 (rjmp,%3);
5907 return "";
5908 }
5909
5910 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5911
5912 static void
5913 avr_asm_out_ctor (rtx symbol, int priority)
5914 {
5915 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5916 default_ctor_section_asm_out_constructor (symbol, priority);
5917 }
5918
5919 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5920
5921 static void
5922 avr_asm_out_dtor (rtx symbol, int priority)
5923 {
5924 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5925 default_dtor_section_asm_out_destructor (symbol, priority);
5926 }
5927
5928 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5929
5930 static bool
5931 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5932 {
5933 if (TYPE_MODE (type) == BLKmode)
5934 {
5935 HOST_WIDE_INT size = int_size_in_bytes (type);
5936 return (size == -1 || size > 8);
5937 }
5938 else
5939 return false;
5940 }
5941
5942 /* Worker function for CASE_VALUES_THRESHOLD. */
5943
5944 unsigned int avr_case_values_threshold (void)
5945 {
5946 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
5947 }
5948
5949 #include "gt-avr.h"