config.gcc (avr-*-rtems*, avr-*-*): Set extra_gcc_objs and extra_objs.
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
48
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
63
64 static RTX_CODE compare_condition (rtx insn);
65 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
66 static int compare_sign_p (rtx insn);
67 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
69 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
70 EXPORTED_CONST struct attribute_spec avr_attribute_table[];
71 static bool avr_assemble_integer (rtx, unsigned int, int);
72 static void avr_file_start (void);
73 static void avr_file_end (void);
74 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
75 static void avr_asm_function_end_prologue (FILE *);
76 static void avr_asm_function_begin_epilogue (FILE *);
77 static rtx avr_function_value (const_tree, const_tree, bool);
78 static void avr_insert_attributes (tree, tree *);
79 static void avr_asm_init_sections (void);
80 static unsigned int avr_section_type_flags (tree, const char *, int);
81
82 static void avr_reorg (void);
83 static void avr_asm_out_ctor (rtx, int);
84 static void avr_asm_out_dtor (rtx, int);
85 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
86 static bool avr_rtx_costs (rtx, int, int, int *, bool);
87 static int avr_address_cost (rtx, bool);
88 static bool avr_return_in_memory (const_tree, const_tree);
89 static struct machine_function * avr_init_machine_status (void);
90 static rtx avr_builtin_setjmp_frame_value (void);
91 static bool avr_hard_regno_scratch_ok (unsigned int);
92 static unsigned int avr_case_values_threshold (void);
93
94 /* Allocate registers from r25 to r8 for parameters for function calls. */
95 #define FIRST_CUM_REG 26
96
97 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
98 static GTY(()) rtx tmp_reg_rtx;
99
100 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
101 static GTY(()) rtx zero_reg_rtx;
102
103 /* AVR register names {"r0", "r1", ..., "r31"} */
104 static const char *const avr_regnames[] = REGISTER_NAMES;
105
106 /* This holds the last insn address. */
107 static int last_insn_address = 0;
108
109 /* Preprocessor macros to define depending on MCU type. */
110 const char *avr_extra_arch_macro;
111
112 /* Current architecture. */
113 const struct base_arch_s *avr_current_arch;
114
115 /* Current device. */
116 const struct mcu_type_s *avr_current_device;
117
118 section *progmem_section;
119
120 \f
121 /* Initialize the GCC target structure. */
122 #undef TARGET_ASM_ALIGNED_HI_OP
123 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
124 #undef TARGET_ASM_ALIGNED_SI_OP
125 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
126 #undef TARGET_ASM_UNALIGNED_HI_OP
127 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
128 #undef TARGET_ASM_UNALIGNED_SI_OP
129 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
130 #undef TARGET_ASM_INTEGER
131 #define TARGET_ASM_INTEGER avr_assemble_integer
132 #undef TARGET_ASM_FILE_START
133 #define TARGET_ASM_FILE_START avr_file_start
134 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
135 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
136 #undef TARGET_ASM_FILE_END
137 #define TARGET_ASM_FILE_END avr_file_end
138
139 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
140 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
141 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
142 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
143 #undef TARGET_FUNCTION_VALUE
144 #define TARGET_FUNCTION_VALUE avr_function_value
145 #undef TARGET_ATTRIBUTE_TABLE
146 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
147 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
148 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
149 #undef TARGET_INSERT_ATTRIBUTES
150 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
151 #undef TARGET_SECTION_TYPE_FLAGS
152 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
153 #undef TARGET_RTX_COSTS
154 #define TARGET_RTX_COSTS avr_rtx_costs
155 #undef TARGET_ADDRESS_COST
156 #define TARGET_ADDRESS_COST avr_address_cost
157 #undef TARGET_MACHINE_DEPENDENT_REORG
158 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
159
160 #undef TARGET_LEGITIMIZE_ADDRESS
161 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
162
163 #undef TARGET_RETURN_IN_MEMORY
164 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
165
166 #undef TARGET_STRICT_ARGUMENT_NAMING
167 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
168
169 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
170 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
171
172 #undef TARGET_HARD_REGNO_SCRATCH_OK
173 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
174 #undef TARGET_CASE_VALUES_THRESHOLD
175 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
176
177 #undef TARGET_LEGITIMATE_ADDRESS_P
178 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
179
180 struct gcc_target targetm = TARGET_INITIALIZER;
181 \f
182 void
183 avr_override_options (void)
184 {
185 const struct mcu_type_s *t;
186
187 flag_delete_null_pointer_checks = 0;
188
189 for (t = avr_mcu_types; t->name; t++)
190 if (strcmp (t->name, avr_mcu_name) == 0)
191 break;
192
193 if (!t->name)
194 {
195 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
196 avr_mcu_name);
197 for (t = avr_mcu_types; t->name; t++)
198 fprintf (stderr," %s\n", t->name);
199 }
200
201 avr_current_arch = &avr_arch_types[t->arch];
202 avr_extra_arch_macro = t->macro;
203
204 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
205 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
206
207 init_machine_status = avr_init_machine_status;
208 }
209
210 /* return register class from register number. */
211
212 static const int reg_class_tab[]={
213 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
214 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
215 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
216 GENERAL_REGS, /* r0 - r15 */
217 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
218 LD_REGS, /* r16 - 23 */
219 ADDW_REGS,ADDW_REGS, /* r24,r25 */
220 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
221 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
222 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
223 STACK_REG,STACK_REG /* SPL,SPH */
224 };
225
226 /* Function to set up the backend function structure. */
227
228 static struct machine_function *
229 avr_init_machine_status (void)
230 {
231 return ((struct machine_function *)
232 ggc_alloc_cleared (sizeof (struct machine_function)));
233 }
234
235 /* Return register class for register R. */
236
237 enum reg_class
238 avr_regno_reg_class (int r)
239 {
240 if (r <= 33)
241 return reg_class_tab[r];
242 return ALL_REGS;
243 }
244
245 /* Return nonzero if FUNC is a naked function. */
246
247 static int
248 avr_naked_function_p (tree func)
249 {
250 tree a;
251
252 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
253
254 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
255 return a != NULL_TREE;
256 }
257
258 /* Return nonzero if FUNC is an interrupt function as specified
259 by the "interrupt" attribute. */
260
261 static int
262 interrupt_function_p (tree func)
263 {
264 tree a;
265
266 if (TREE_CODE (func) != FUNCTION_DECL)
267 return 0;
268
269 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
270 return a != NULL_TREE;
271 }
272
273 /* Return nonzero if FUNC is a signal function as specified
274 by the "signal" attribute. */
275
276 static int
277 signal_function_p (tree func)
278 {
279 tree a;
280
281 if (TREE_CODE (func) != FUNCTION_DECL)
282 return 0;
283
284 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
285 return a != NULL_TREE;
286 }
287
288 /* Return nonzero if FUNC is a OS_task function. */
289
290 static int
291 avr_OS_task_function_p (tree func)
292 {
293 tree a;
294
295 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
296
297 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
298 return a != NULL_TREE;
299 }
300
301 /* Return nonzero if FUNC is a OS_main function. */
302
303 static int
304 avr_OS_main_function_p (tree func)
305 {
306 tree a;
307
308 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
309
310 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
311 return a != NULL_TREE;
312 }
313
314 /* Return the number of hard registers to push/pop in the prologue/epilogue
315 of the current function, and optionally store these registers in SET. */
316
317 static int
318 avr_regs_to_save (HARD_REG_SET *set)
319 {
320 int reg, count;
321 int int_or_sig_p = (interrupt_function_p (current_function_decl)
322 || signal_function_p (current_function_decl));
323
324 if (!reload_completed)
325 cfun->machine->is_leaf = leaf_function_p ();
326
327 if (set)
328 CLEAR_HARD_REG_SET (*set);
329 count = 0;
330
331 /* No need to save any registers if the function never returns or
332 is have "OS_task" or "OS_main" attribute. */
333 if (TREE_THIS_VOLATILE (current_function_decl)
334 || cfun->machine->is_OS_task
335 || cfun->machine->is_OS_main)
336 return 0;
337
338 for (reg = 0; reg < 32; reg++)
339 {
340 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
341 any global register variables. */
342 if (fixed_regs[reg])
343 continue;
344
345 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
346 || (df_regs_ever_live_p (reg)
347 && (int_or_sig_p || !call_used_regs[reg])
348 && !(frame_pointer_needed
349 && (reg == REG_Y || reg == (REG_Y+1)))))
350 {
351 if (set)
352 SET_HARD_REG_BIT (*set, reg);
353 count++;
354 }
355 }
356 return count;
357 }
358
359 /* Return true if register FROM can be eliminated via register TO. */
360
361 bool
362 avr_can_eliminate (int from, int to)
363 {
364 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
365 || ((from == FRAME_POINTER_REGNUM
366 || from == FRAME_POINTER_REGNUM + 1)
367 && !frame_pointer_needed));
368 }
369
370 /* Compute offset between arg_pointer and frame_pointer. */
371
372 int
373 avr_initial_elimination_offset (int from, int to)
374 {
375 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
376 return 0;
377 else
378 {
379 int offset = frame_pointer_needed ? 2 : 0;
380 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
381
382 offset += avr_regs_to_save (NULL);
383 return get_frame_size () + (avr_pc_size) + 1 + offset;
384 }
385 }
386
387 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
388 frame pointer by +STARTING_FRAME_OFFSET.
389 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
390 avoids creating add/sub of offset in nonlocal goto and setjmp. */
391
392 rtx avr_builtin_setjmp_frame_value (void)
393 {
394 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
395 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
396 }
397
398 /* Return 1 if the function epilogue is just a single "ret". */
399
400 int
401 avr_simple_epilogue (void)
402 {
403 return (! frame_pointer_needed
404 && get_frame_size () == 0
405 && avr_regs_to_save (NULL) == 0
406 && ! interrupt_function_p (current_function_decl)
407 && ! signal_function_p (current_function_decl)
408 && ! avr_naked_function_p (current_function_decl)
409 && ! TREE_THIS_VOLATILE (current_function_decl));
410 }
411
412 /* This function checks sequence of live registers. */
413
414 static int
415 sequent_regs_live (void)
416 {
417 int reg;
418 int live_seq=0;
419 int cur_seq=0;
420
421 for (reg = 0; reg < 18; ++reg)
422 {
423 if (!call_used_regs[reg])
424 {
425 if (df_regs_ever_live_p (reg))
426 {
427 ++live_seq;
428 ++cur_seq;
429 }
430 else
431 cur_seq = 0;
432 }
433 }
434
435 if (!frame_pointer_needed)
436 {
437 if (df_regs_ever_live_p (REG_Y))
438 {
439 ++live_seq;
440 ++cur_seq;
441 }
442 else
443 cur_seq = 0;
444
445 if (df_regs_ever_live_p (REG_Y+1))
446 {
447 ++live_seq;
448 ++cur_seq;
449 }
450 else
451 cur_seq = 0;
452 }
453 else
454 {
455 cur_seq += 2;
456 live_seq += 2;
457 }
458 return (cur_seq == live_seq) ? live_seq : 0;
459 }
460
461 /* Obtain the length sequence of insns. */
462
463 int
464 get_sequence_length (rtx insns)
465 {
466 rtx insn;
467 int length;
468
469 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
470 length += get_attr_length (insn);
471
472 return length;
473 }
474
475 /* Output function prologue. */
476
477 void
478 expand_prologue (void)
479 {
480 int live_seq;
481 HARD_REG_SET set;
482 int minimize;
483 HOST_WIDE_INT size = get_frame_size();
484 /* Define templates for push instructions. */
485 rtx pushbyte = gen_rtx_MEM (QImode,
486 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
487 rtx pushword = gen_rtx_MEM (HImode,
488 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
489 rtx insn;
490
491 last_insn_address = 0;
492
493 /* Init cfun->machine. */
494 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
495 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
496 cfun->machine->is_signal = signal_function_p (current_function_decl);
497 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
498 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
499
500 /* Prologue: naked. */
501 if (cfun->machine->is_naked)
502 {
503 return;
504 }
505
506 avr_regs_to_save (&set);
507 live_seq = sequent_regs_live ();
508 minimize = (TARGET_CALL_PROLOGUES
509 && !cfun->machine->is_interrupt
510 && !cfun->machine->is_signal
511 && !cfun->machine->is_OS_task
512 && !cfun->machine->is_OS_main
513 && live_seq);
514
515 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
516 {
517 if (cfun->machine->is_interrupt)
518 {
519 /* Enable interrupts. */
520 insn = emit_insn (gen_enable_interrupt ());
521 RTX_FRAME_RELATED_P (insn) = 1;
522 }
523
524 /* Push zero reg. */
525 insn = emit_move_insn (pushbyte, zero_reg_rtx);
526 RTX_FRAME_RELATED_P (insn) = 1;
527
528 /* Push tmp reg. */
529 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
530 RTX_FRAME_RELATED_P (insn) = 1;
531
532 /* Push SREG. */
533 insn = emit_move_insn (tmp_reg_rtx,
534 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
535 RTX_FRAME_RELATED_P (insn) = 1;
536 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
537 RTX_FRAME_RELATED_P (insn) = 1;
538
539 /* Push RAMPZ. */
540 if(AVR_HAVE_RAMPZ
541 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
542 {
543 insn = emit_move_insn (tmp_reg_rtx,
544 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
545 RTX_FRAME_RELATED_P (insn) = 1;
546 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
547 RTX_FRAME_RELATED_P (insn) = 1;
548 }
549
550 /* Clear zero reg. */
551 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
552 RTX_FRAME_RELATED_P (insn) = 1;
553
554 /* Prevent any attempt to delete the setting of ZERO_REG! */
555 emit_use (zero_reg_rtx);
556 }
557 if (minimize && (frame_pointer_needed
558 || (AVR_2_BYTE_PC && live_seq > 6)
559 || live_seq > 7))
560 {
561 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
562 gen_int_mode (size, HImode));
563 RTX_FRAME_RELATED_P (insn) = 1;
564
565 insn =
566 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
567 gen_int_mode (size + live_seq, HImode)));
568 RTX_FRAME_RELATED_P (insn) = 1;
569 }
570 else
571 {
572 int reg;
573 for (reg = 0; reg < 32; ++reg)
574 {
575 if (TEST_HARD_REG_BIT (set, reg))
576 {
577 /* Emit push of register to save. */
578 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
579 RTX_FRAME_RELATED_P (insn) = 1;
580 }
581 }
582 if (frame_pointer_needed)
583 {
584 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
585 {
586 /* Push frame pointer. */
587 insn = emit_move_insn (pushword, frame_pointer_rtx);
588 RTX_FRAME_RELATED_P (insn) = 1;
589 }
590
591 if (!size)
592 {
593 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
594 RTX_FRAME_RELATED_P (insn) = 1;
595 }
596 else
597 {
598 /* Creating a frame can be done by direct manipulation of the
599 stack or via the frame pointer. These two methods are:
600 fp=sp
601 fp-=size
602 sp=fp
603 OR
604 sp-=size
605 fp=sp
606 the optimum method depends on function type, stack and frame size.
607 To avoid a complex logic, both methods are tested and shortest
608 is selected. */
609 rtx myfp;
610 rtx fp_plus_insns;
611 rtx sp_plus_insns = NULL_RTX;
612
613 if (TARGET_TINY_STACK)
614 {
615 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
616 over 'sbiw' (2 cycles, same size). */
617 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
618 }
619 else
620 {
621 /* Normal sized addition. */
622 myfp = frame_pointer_rtx;
623 }
624
625 /* Method 1-Adjust frame pointer. */
626 start_sequence ();
627
628 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
629 RTX_FRAME_RELATED_P (insn) = 1;
630
631 insn =
632 emit_move_insn (myfp,
633 gen_rtx_PLUS (GET_MODE(myfp), myfp,
634 gen_int_mode (-size,
635 GET_MODE(myfp))));
636 RTX_FRAME_RELATED_P (insn) = 1;
637
638 /* Copy to stack pointer. */
639 if (TARGET_TINY_STACK)
640 {
641 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
642 RTX_FRAME_RELATED_P (insn) = 1;
643 }
644 else if (TARGET_NO_INTERRUPTS
645 || cfun->machine->is_signal
646 || cfun->machine->is_OS_main)
647 {
648 insn =
649 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
650 frame_pointer_rtx));
651 RTX_FRAME_RELATED_P (insn) = 1;
652 }
653 else if (cfun->machine->is_interrupt)
654 {
655 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
656 frame_pointer_rtx));
657 RTX_FRAME_RELATED_P (insn) = 1;
658 }
659 else
660 {
661 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
662 RTX_FRAME_RELATED_P (insn) = 1;
663 }
664
665 fp_plus_insns = get_insns ();
666 end_sequence ();
667
668 /* Method 2-Adjust Stack pointer. */
669 if (size <= 6)
670 {
671 start_sequence ();
672
673 insn =
674 emit_move_insn (stack_pointer_rtx,
675 gen_rtx_PLUS (HImode,
676 stack_pointer_rtx,
677 gen_int_mode (-size,
678 HImode)));
679 RTX_FRAME_RELATED_P (insn) = 1;
680
681 insn =
682 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
683 RTX_FRAME_RELATED_P (insn) = 1;
684
685 sp_plus_insns = get_insns ();
686 end_sequence ();
687 }
688
689 /* Use shortest method. */
690 if (size <= 6 && (get_sequence_length (sp_plus_insns)
691 < get_sequence_length (fp_plus_insns)))
692 emit_insn (sp_plus_insns);
693 else
694 emit_insn (fp_plus_insns);
695 }
696 }
697 }
698 }
699
700 /* Output summary at end of function prologue. */
701
702 static void
703 avr_asm_function_end_prologue (FILE *file)
704 {
705 if (cfun->machine->is_naked)
706 {
707 fputs ("/* prologue: naked */\n", file);
708 }
709 else
710 {
711 if (cfun->machine->is_interrupt)
712 {
713 fputs ("/* prologue: Interrupt */\n", file);
714 }
715 else if (cfun->machine->is_signal)
716 {
717 fputs ("/* prologue: Signal */\n", file);
718 }
719 else
720 fputs ("/* prologue: function */\n", file);
721 }
722 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
723 get_frame_size());
724 }
725
726
727 /* Implement EPILOGUE_USES. */
728
729 int
730 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
731 {
732 if (reload_completed
733 && cfun->machine
734 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
735 return 1;
736 return 0;
737 }
738
739 /* Output RTL epilogue. */
740
741 void
742 expand_epilogue (void)
743 {
744 int reg;
745 int live_seq;
746 HARD_REG_SET set;
747 int minimize;
748 HOST_WIDE_INT size = get_frame_size();
749
750 /* epilogue: naked */
751 if (cfun->machine->is_naked)
752 {
753 emit_jump_insn (gen_return ());
754 return;
755 }
756
757 avr_regs_to_save (&set);
758 live_seq = sequent_regs_live ();
759 minimize = (TARGET_CALL_PROLOGUES
760 && !cfun->machine->is_interrupt
761 && !cfun->machine->is_signal
762 && !cfun->machine->is_OS_task
763 && !cfun->machine->is_OS_main
764 && live_seq);
765
766 if (minimize && (frame_pointer_needed || live_seq > 4))
767 {
768 if (frame_pointer_needed)
769 {
770 /* Get rid of frame. */
771 emit_move_insn(frame_pointer_rtx,
772 gen_rtx_PLUS (HImode, frame_pointer_rtx,
773 gen_int_mode (size, HImode)));
774 }
775 else
776 {
777 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
778 }
779
780 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
781 }
782 else
783 {
784 if (frame_pointer_needed)
785 {
786 if (size)
787 {
788 /* Try two methods to adjust stack and select shortest. */
789 rtx myfp;
790 rtx fp_plus_insns;
791 rtx sp_plus_insns = NULL_RTX;
792
793 if (TARGET_TINY_STACK)
794 {
795 /* The high byte (r29) doesn't change - prefer 'subi'
796 (1 cycle) over 'sbiw' (2 cycles, same size). */
797 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
798 }
799 else
800 {
801 /* Normal sized addition. */
802 myfp = frame_pointer_rtx;
803 }
804
805 /* Method 1-Adjust frame pointer. */
806 start_sequence ();
807
808 emit_move_insn (myfp,
809 gen_rtx_PLUS (HImode, myfp,
810 gen_int_mode (size,
811 GET_MODE(myfp))));
812
813 /* Copy to stack pointer. */
814 if (TARGET_TINY_STACK)
815 {
816 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
817 }
818 else if (TARGET_NO_INTERRUPTS
819 || cfun->machine->is_signal)
820 {
821 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
822 frame_pointer_rtx));
823 }
824 else if (cfun->machine->is_interrupt)
825 {
826 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
827 frame_pointer_rtx));
828 }
829 else
830 {
831 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
832 }
833
834 fp_plus_insns = get_insns ();
835 end_sequence ();
836
837 /* Method 2-Adjust Stack pointer. */
838 if (size <= 5)
839 {
840 start_sequence ();
841
842 emit_move_insn (stack_pointer_rtx,
843 gen_rtx_PLUS (HImode, stack_pointer_rtx,
844 gen_int_mode (size,
845 HImode)));
846
847 sp_plus_insns = get_insns ();
848 end_sequence ();
849 }
850
851 /* Use shortest method. */
852 if (size <= 5 && (get_sequence_length (sp_plus_insns)
853 < get_sequence_length (fp_plus_insns)))
854 emit_insn (sp_plus_insns);
855 else
856 emit_insn (fp_plus_insns);
857 }
858 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
859 {
860 /* Restore previous frame_pointer. */
861 emit_insn (gen_pophi (frame_pointer_rtx));
862 }
863 }
864 /* Restore used registers. */
865 for (reg = 31; reg >= 0; --reg)
866 {
867 if (TEST_HARD_REG_BIT (set, reg))
868 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
869 }
870 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
871 {
872 /* Restore RAMPZ using tmp reg as scratch. */
873 if(AVR_HAVE_RAMPZ
874 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
875 {
876 emit_insn (gen_popqi (tmp_reg_rtx));
877 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
878 tmp_reg_rtx);
879 }
880
881 /* Restore SREG using tmp reg as scratch. */
882 emit_insn (gen_popqi (tmp_reg_rtx));
883
884 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
885 tmp_reg_rtx);
886
887 /* Restore tmp REG. */
888 emit_insn (gen_popqi (tmp_reg_rtx));
889
890 /* Restore zero REG. */
891 emit_insn (gen_popqi (zero_reg_rtx));
892 }
893
894 emit_jump_insn (gen_return ());
895 }
896 }
897
898 /* Output summary messages at beginning of function epilogue. */
899
900 static void
901 avr_asm_function_begin_epilogue (FILE *file)
902 {
903 fprintf (file, "/* epilogue start */\n");
904 }
905
906 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
907 machine for a memory operand of mode MODE. */
908
909 bool
910 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
911 {
912 enum reg_class r = NO_REGS;
913
914 if (TARGET_ALL_DEBUG)
915 {
916 fprintf (stderr, "mode: (%s) %s %s %s %s:",
917 GET_MODE_NAME(mode),
918 strict ? "(strict)": "",
919 reload_completed ? "(reload_completed)": "",
920 reload_in_progress ? "(reload_in_progress)": "",
921 reg_renumber ? "(reg_renumber)" : "");
922 if (GET_CODE (x) == PLUS
923 && REG_P (XEXP (x, 0))
924 && GET_CODE (XEXP (x, 1)) == CONST_INT
925 && INTVAL (XEXP (x, 1)) >= 0
926 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
927 && reg_renumber
928 )
929 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
930 true_regnum (XEXP (x, 0)));
931 debug_rtx (x);
932 }
933 if (!strict && GET_CODE (x) == SUBREG)
934 x = SUBREG_REG (x);
935 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
936 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
937 r = POINTER_REGS;
938 else if (CONSTANT_ADDRESS_P (x))
939 r = ALL_REGS;
940 else if (GET_CODE (x) == PLUS
941 && REG_P (XEXP (x, 0))
942 && GET_CODE (XEXP (x, 1)) == CONST_INT
943 && INTVAL (XEXP (x, 1)) >= 0)
944 {
945 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
946 if (fit)
947 {
948 if (! strict
949 || REGNO (XEXP (x,0)) == REG_X
950 || REGNO (XEXP (x,0)) == REG_Y
951 || REGNO (XEXP (x,0)) == REG_Z)
952 r = BASE_POINTER_REGS;
953 if (XEXP (x,0) == frame_pointer_rtx
954 || XEXP (x,0) == arg_pointer_rtx)
955 r = BASE_POINTER_REGS;
956 }
957 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
958 r = POINTER_Y_REGS;
959 }
960 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
961 && REG_P (XEXP (x, 0))
962 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
963 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
964 {
965 r = POINTER_REGS;
966 }
967 if (TARGET_ALL_DEBUG)
968 {
969 fprintf (stderr, " ret = %c\n", r + '0');
970 }
971 return r == NO_REGS ? 0 : (int)r;
972 }
973
974 /* Attempts to replace X with a valid
975 memory address for an operand of mode MODE */
976
977 rtx
978 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
979 {
980 x = oldx;
981 if (TARGET_ALL_DEBUG)
982 {
983 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
984 debug_rtx (oldx);
985 }
986
987 if (GET_CODE (oldx) == PLUS
988 && REG_P (XEXP (oldx,0)))
989 {
990 if (REG_P (XEXP (oldx,1)))
991 x = force_reg (GET_MODE (oldx), oldx);
992 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
993 {
994 int offs = INTVAL (XEXP (oldx,1));
995 if (frame_pointer_rtx != XEXP (oldx,0))
996 if (offs > MAX_LD_OFFSET (mode))
997 {
998 if (TARGET_ALL_DEBUG)
999 fprintf (stderr, "force_reg (big offset)\n");
1000 x = force_reg (GET_MODE (oldx), oldx);
1001 }
1002 }
1003 }
1004 return x;
1005 }
1006
1007
1008 /* Return a pointer register name as a string. */
1009
1010 static const char *
1011 ptrreg_to_str (int regno)
1012 {
1013 switch (regno)
1014 {
1015 case REG_X: return "X";
1016 case REG_Y: return "Y";
1017 case REG_Z: return "Z";
1018 default:
1019 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1020 }
1021 return NULL;
1022 }
1023
1024 /* Return the condition name as a string.
1025 Used in conditional jump constructing */
1026
1027 static const char *
1028 cond_string (enum rtx_code code)
1029 {
1030 switch (code)
1031 {
1032 case NE:
1033 return "ne";
1034 case EQ:
1035 return "eq";
1036 case GE:
1037 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1038 return "pl";
1039 else
1040 return "ge";
1041 case LT:
1042 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1043 return "mi";
1044 else
1045 return "lt";
1046 case GEU:
1047 return "sh";
1048 case LTU:
1049 return "lo";
1050 default:
1051 gcc_unreachable ();
1052 }
1053 }
1054
1055 /* Output ADDR to FILE as address. */
1056
1057 void
1058 print_operand_address (FILE *file, rtx addr)
1059 {
1060 switch (GET_CODE (addr))
1061 {
1062 case REG:
1063 fprintf (file, ptrreg_to_str (REGNO (addr)));
1064 break;
1065
1066 case PRE_DEC:
1067 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1068 break;
1069
1070 case POST_INC:
1071 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1072 break;
1073
1074 default:
1075 if (CONSTANT_ADDRESS_P (addr)
1076 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1077 || GET_CODE (addr) == LABEL_REF))
1078 {
1079 fprintf (file, "gs(");
1080 output_addr_const (file,addr);
1081 fprintf (file ,")");
1082 }
1083 else
1084 output_addr_const (file, addr);
1085 }
1086 }
1087
1088
1089 /* Output X as assembler operand to file FILE. */
1090
1091 void
1092 print_operand (FILE *file, rtx x, int code)
1093 {
1094 int abcd = 0;
1095
1096 if (code >= 'A' && code <= 'D')
1097 abcd = code - 'A';
1098
1099 if (code == '~')
1100 {
1101 if (!AVR_HAVE_JMP_CALL)
1102 fputc ('r', file);
1103 }
1104 else if (code == '!')
1105 {
1106 if (AVR_HAVE_EIJMP_EICALL)
1107 fputc ('e', file);
1108 }
1109 else if (REG_P (x))
1110 {
1111 if (x == zero_reg_rtx)
1112 fprintf (file, "__zero_reg__");
1113 else
1114 fprintf (file, reg_names[true_regnum (x) + abcd]);
1115 }
1116 else if (GET_CODE (x) == CONST_INT)
1117 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1118 else if (GET_CODE (x) == MEM)
1119 {
1120 rtx addr = XEXP (x,0);
1121
1122 if (CONSTANT_P (addr) && abcd)
1123 {
1124 fputc ('(', file);
1125 output_address (addr);
1126 fprintf (file, ")+%d", abcd);
1127 }
1128 else if (code == 'o')
1129 {
1130 if (GET_CODE (addr) != PLUS)
1131 fatal_insn ("bad address, not (reg+disp):", addr);
1132
1133 print_operand (file, XEXP (addr, 1), 0);
1134 }
1135 else if (code == 'p' || code == 'r')
1136 {
1137 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1138 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1139
1140 if (code == 'p')
1141 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1142 else
1143 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1144 }
1145 else if (GET_CODE (addr) == PLUS)
1146 {
1147 print_operand_address (file, XEXP (addr,0));
1148 if (REGNO (XEXP (addr, 0)) == REG_X)
1149 fatal_insn ("internal compiler error. Bad address:"
1150 ,addr);
1151 fputc ('+', file);
1152 print_operand (file, XEXP (addr,1), code);
1153 }
1154 else
1155 print_operand_address (file, addr);
1156 }
1157 else if (GET_CODE (x) == CONST_DOUBLE)
1158 {
1159 long val;
1160 REAL_VALUE_TYPE rv;
1161 if (GET_MODE (x) != SFmode)
1162 fatal_insn ("internal compiler error. Unknown mode:", x);
1163 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1164 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1165 fprintf (file, "0x%lx", val);
1166 }
1167 else if (code == 'j')
1168 fputs (cond_string (GET_CODE (x)), file);
1169 else if (code == 'k')
1170 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1171 else
1172 print_operand_address (file, x);
1173 }
1174
1175 /* Update the condition code in the INSN. */
1176
1177 void
1178 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1179 {
1180 rtx set;
1181
1182 switch (get_attr_cc (insn))
1183 {
1184 case CC_NONE:
1185 /* Insn does not affect CC at all. */
1186 break;
1187
1188 case CC_SET_N:
1189 CC_STATUS_INIT;
1190 break;
1191
1192 case CC_SET_ZN:
1193 set = single_set (insn);
1194 CC_STATUS_INIT;
1195 if (set)
1196 {
1197 cc_status.flags |= CC_NO_OVERFLOW;
1198 cc_status.value1 = SET_DEST (set);
1199 }
1200 break;
1201
1202 case CC_SET_CZN:
1203 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1204 The V flag may or may not be known but that's ok because
1205 alter_cond will change tests to use EQ/NE. */
1206 set = single_set (insn);
1207 CC_STATUS_INIT;
1208 if (set)
1209 {
1210 cc_status.value1 = SET_DEST (set);
1211 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1212 }
1213 break;
1214
1215 case CC_COMPARE:
1216 set = single_set (insn);
1217 CC_STATUS_INIT;
1218 if (set)
1219 cc_status.value1 = SET_SRC (set);
1220 break;
1221
1222 case CC_CLOBBER:
1223 /* Insn doesn't leave CC in a usable state. */
1224 CC_STATUS_INIT;
1225
1226 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1227 set = single_set (insn);
1228 if (set)
1229 {
1230 rtx src = SET_SRC (set);
1231
1232 if (GET_CODE (src) == ASHIFTRT
1233 && GET_MODE (src) == QImode)
1234 {
1235 rtx x = XEXP (src, 1);
1236
1237 if (GET_CODE (x) == CONST_INT
1238 && INTVAL (x) > 0
1239 && INTVAL (x) != 6)
1240 {
1241 cc_status.value1 = SET_DEST (set);
1242 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1243 }
1244 }
1245 }
1246 break;
1247 }
1248 }
1249
1250 /* Return maximum number of consecutive registers of
1251 class CLASS needed to hold a value of mode MODE. */
1252
1253 int
1254 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1255 {
1256 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1257 }
1258
1259 /* Choose mode for jump insn:
1260 1 - relative jump in range -63 <= x <= 62 ;
1261 2 - relative jump in range -2046 <= x <= 2045 ;
1262 3 - absolute jump (only for ATmega[16]03). */
1263
1264 int
1265 avr_jump_mode (rtx x, rtx insn)
1266 {
1267 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1268 ? XEXP (x, 0) : x));
1269 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1270 int jump_distance = cur_addr - dest_addr;
1271
1272 if (-63 <= jump_distance && jump_distance <= 62)
1273 return 1;
1274 else if (-2046 <= jump_distance && jump_distance <= 2045)
1275 return 2;
1276 else if (AVR_HAVE_JMP_CALL)
1277 return 3;
1278
1279 return 2;
1280 }
1281
1282 /* return an AVR condition jump commands.
1283 X is a comparison RTX.
1284 LEN is a number returned by avr_jump_mode function.
1285 if REVERSE nonzero then condition code in X must be reversed. */
1286
1287 const char *
1288 ret_cond_branch (rtx x, int len, int reverse)
1289 {
1290 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1291
1292 switch (cond)
1293 {
1294 case GT:
1295 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1296 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1297 AS1 (brpl,%0)) :
1298 len == 2 ? (AS1 (breq,.+4) CR_TAB
1299 AS1 (brmi,.+2) CR_TAB
1300 AS1 (rjmp,%0)) :
1301 (AS1 (breq,.+6) CR_TAB
1302 AS1 (brmi,.+4) CR_TAB
1303 AS1 (jmp,%0)));
1304
1305 else
1306 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1307 AS1 (brge,%0)) :
1308 len == 2 ? (AS1 (breq,.+4) CR_TAB
1309 AS1 (brlt,.+2) CR_TAB
1310 AS1 (rjmp,%0)) :
1311 (AS1 (breq,.+6) CR_TAB
1312 AS1 (brlt,.+4) CR_TAB
1313 AS1 (jmp,%0)));
1314 case GTU:
1315 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1316 AS1 (brsh,%0)) :
1317 len == 2 ? (AS1 (breq,.+4) CR_TAB
1318 AS1 (brlo,.+2) CR_TAB
1319 AS1 (rjmp,%0)) :
1320 (AS1 (breq,.+6) CR_TAB
1321 AS1 (brlo,.+4) CR_TAB
1322 AS1 (jmp,%0)));
1323 case LE:
1324 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1325 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1326 AS1 (brmi,%0)) :
1327 len == 2 ? (AS1 (breq,.+2) CR_TAB
1328 AS1 (brpl,.+2) CR_TAB
1329 AS1 (rjmp,%0)) :
1330 (AS1 (breq,.+2) CR_TAB
1331 AS1 (brpl,.+4) CR_TAB
1332 AS1 (jmp,%0)));
1333 else
1334 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1335 AS1 (brlt,%0)) :
1336 len == 2 ? (AS1 (breq,.+2) CR_TAB
1337 AS1 (brge,.+2) CR_TAB
1338 AS1 (rjmp,%0)) :
1339 (AS1 (breq,.+2) CR_TAB
1340 AS1 (brge,.+4) CR_TAB
1341 AS1 (jmp,%0)));
1342 case LEU:
1343 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1344 AS1 (brlo,%0)) :
1345 len == 2 ? (AS1 (breq,.+2) CR_TAB
1346 AS1 (brsh,.+2) CR_TAB
1347 AS1 (rjmp,%0)) :
1348 (AS1 (breq,.+2) CR_TAB
1349 AS1 (brsh,.+4) CR_TAB
1350 AS1 (jmp,%0)));
1351 default:
1352 if (reverse)
1353 {
1354 switch (len)
1355 {
1356 case 1:
1357 return AS1 (br%k1,%0);
1358 case 2:
1359 return (AS1 (br%j1,.+2) CR_TAB
1360 AS1 (rjmp,%0));
1361 default:
1362 return (AS1 (br%j1,.+4) CR_TAB
1363 AS1 (jmp,%0));
1364 }
1365 }
1366 else
1367 {
1368 switch (len)
1369 {
1370 case 1:
1371 return AS1 (br%j1,%0);
1372 case 2:
1373 return (AS1 (br%k1,.+2) CR_TAB
1374 AS1 (rjmp,%0));
1375 default:
1376 return (AS1 (br%k1,.+4) CR_TAB
1377 AS1 (jmp,%0));
1378 }
1379 }
1380 }
1381 return "";
1382 }
1383
1384 /* Predicate function for immediate operand which fits to byte (8bit) */
1385
1386 int
1387 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1388 {
1389 return (GET_CODE (op) == CONST_INT
1390 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1391 }
1392
1393 /* Output all insn addresses and their sizes into the assembly language
1394 output file. This is helpful for debugging whether the length attributes
1395 in the md file are correct.
1396 Output insn cost for next insn. */
1397
1398 void
1399 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1400 int num_operands ATTRIBUTE_UNUSED)
1401 {
1402 int uid = INSN_UID (insn);
1403
1404 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1405 {
1406 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1407 INSN_ADDRESSES (uid),
1408 INSN_ADDRESSES (uid) - last_insn_address,
1409 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1410 }
1411 last_insn_address = INSN_ADDRESSES (uid);
1412 }
1413
1414 /* Return 0 if undefined, 1 if always true or always false. */
1415
1416 int
1417 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1418 {
1419 unsigned int max = (mode == QImode ? 0xff :
1420 mode == HImode ? 0xffff :
1421 mode == SImode ? 0xffffffff : 0);
1422 if (max && op && GET_CODE (x) == CONST_INT)
1423 {
1424 if (unsigned_condition (op) != op)
1425 max >>= 1;
1426
1427 if (max != (INTVAL (x) & max)
1428 && INTVAL (x) != 0xff)
1429 return 1;
1430 }
1431 return 0;
1432 }
1433
1434
1435 /* Returns nonzero if REGNO is the number of a hard
1436 register in which function arguments are sometimes passed. */
1437
1438 int
1439 function_arg_regno_p(int r)
1440 {
1441 return (r >= 8 && r <= 25);
1442 }
1443
1444 /* Initializing the variable cum for the state at the beginning
1445 of the argument list. */
1446
1447 void
1448 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1449 tree fndecl ATTRIBUTE_UNUSED)
1450 {
1451 cum->nregs = 18;
1452 cum->regno = FIRST_CUM_REG;
1453 if (!libname && fntype)
1454 {
1455 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1456 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1457 != void_type_node));
1458 if (stdarg)
1459 cum->nregs = 0;
1460 }
1461 }
1462
1463 /* Returns the number of registers to allocate for a function argument. */
1464
1465 static int
1466 avr_num_arg_regs (enum machine_mode mode, tree type)
1467 {
1468 int size;
1469
1470 if (mode == BLKmode)
1471 size = int_size_in_bytes (type);
1472 else
1473 size = GET_MODE_SIZE (mode);
1474
1475 /* Align all function arguments to start in even-numbered registers.
1476 Odd-sized arguments leave holes above them. */
1477
1478 return (size + 1) & ~1;
1479 }
1480
1481 /* Controls whether a function argument is passed
1482 in a register, and which register. */
1483
1484 rtx
1485 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1486 int named ATTRIBUTE_UNUSED)
1487 {
1488 int bytes = avr_num_arg_regs (mode, type);
1489
1490 if (cum->nregs && bytes <= cum->nregs)
1491 return gen_rtx_REG (mode, cum->regno - bytes);
1492
1493 return NULL_RTX;
1494 }
1495
1496 /* Update the summarizer variable CUM to advance past an argument
1497 in the argument list. */
1498
1499 void
1500 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1501 int named ATTRIBUTE_UNUSED)
1502 {
1503 int bytes = avr_num_arg_regs (mode, type);
1504
1505 cum->nregs -= bytes;
1506 cum->regno -= bytes;
1507
1508 if (cum->nregs <= 0)
1509 {
1510 cum->nregs = 0;
1511 cum->regno = FIRST_CUM_REG;
1512 }
1513 }
1514
1515 /***********************************************************************
1516 Functions for outputting various mov's for a various modes
1517 ************************************************************************/
1518 const char *
1519 output_movqi (rtx insn, rtx operands[], int *l)
1520 {
1521 int dummy;
1522 rtx dest = operands[0];
1523 rtx src = operands[1];
1524 int *real_l = l;
1525
1526 if (!l)
1527 l = &dummy;
1528
1529 *l = 1;
1530
1531 if (register_operand (dest, QImode))
1532 {
1533 if (register_operand (src, QImode)) /* mov r,r */
1534 {
1535 if (test_hard_reg_class (STACK_REG, dest))
1536 return AS2 (out,%0,%1);
1537 else if (test_hard_reg_class (STACK_REG, src))
1538 return AS2 (in,%0,%1);
1539
1540 return AS2 (mov,%0,%1);
1541 }
1542 else if (CONSTANT_P (src))
1543 {
1544 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1545 return AS2 (ldi,%0,lo8(%1));
1546
1547 if (GET_CODE (src) == CONST_INT)
1548 {
1549 if (src == const0_rtx) /* mov r,L */
1550 return AS1 (clr,%0);
1551 else if (src == const1_rtx)
1552 {
1553 *l = 2;
1554 return (AS1 (clr,%0) CR_TAB
1555 AS1 (inc,%0));
1556 }
1557 else if (src == constm1_rtx)
1558 {
1559 /* Immediate constants -1 to any register */
1560 *l = 2;
1561 return (AS1 (clr,%0) CR_TAB
1562 AS1 (dec,%0));
1563 }
1564 else
1565 {
1566 int bit_nr = exact_log2 (INTVAL (src));
1567
1568 if (bit_nr >= 0)
1569 {
1570 *l = 3;
1571 if (!real_l)
1572 output_asm_insn ((AS1 (clr,%0) CR_TAB
1573 "set"), operands);
1574 if (!real_l)
1575 avr_output_bld (operands, bit_nr);
1576
1577 return "";
1578 }
1579 }
1580 }
1581
1582 /* Last resort, larger than loading from memory. */
1583 *l = 4;
1584 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1585 AS2 (ldi,r31,lo8(%1)) CR_TAB
1586 AS2 (mov,%0,r31) CR_TAB
1587 AS2 (mov,r31,__tmp_reg__));
1588 }
1589 else if (GET_CODE (src) == MEM)
1590 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1591 }
1592 else if (GET_CODE (dest) == MEM)
1593 {
1594 const char *templ;
1595
1596 if (src == const0_rtx)
1597 operands[1] = zero_reg_rtx;
1598
1599 templ = out_movqi_mr_r (insn, operands, real_l);
1600
1601 if (!real_l)
1602 output_asm_insn (templ, operands);
1603
1604 operands[1] = src;
1605 }
1606 return "";
1607 }
1608
1609
1610 const char *
1611 output_movhi (rtx insn, rtx operands[], int *l)
1612 {
1613 int dummy;
1614 rtx dest = operands[0];
1615 rtx src = operands[1];
1616 int *real_l = l;
1617
1618 if (!l)
1619 l = &dummy;
1620
1621 if (register_operand (dest, HImode))
1622 {
1623 if (register_operand (src, HImode)) /* mov r,r */
1624 {
1625 if (test_hard_reg_class (STACK_REG, dest))
1626 {
1627 if (TARGET_TINY_STACK)
1628 return *l = 1, AS2 (out,__SP_L__,%A1);
1629 /* Use simple load of stack pointer if no interrupts are
1630 used. */
1631 else if (TARGET_NO_INTERRUPTS)
1632 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1633 AS2 (out,__SP_L__,%A1));
1634 *l = 5;
1635 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1636 "cli" CR_TAB
1637 AS2 (out,__SP_H__,%B1) CR_TAB
1638 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1639 AS2 (out,__SP_L__,%A1));
1640 }
1641 else if (test_hard_reg_class (STACK_REG, src))
1642 {
1643 *l = 2;
1644 return (AS2 (in,%A0,__SP_L__) CR_TAB
1645 AS2 (in,%B0,__SP_H__));
1646 }
1647
1648 if (AVR_HAVE_MOVW)
1649 {
1650 *l = 1;
1651 return (AS2 (movw,%0,%1));
1652 }
1653 else
1654 {
1655 *l = 2;
1656 return (AS2 (mov,%A0,%A1) CR_TAB
1657 AS2 (mov,%B0,%B1));
1658 }
1659 }
1660 else if (CONSTANT_P (src))
1661 {
1662 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1663 {
1664 *l = 2;
1665 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1666 AS2 (ldi,%B0,hi8(%1)));
1667 }
1668
1669 if (GET_CODE (src) == CONST_INT)
1670 {
1671 if (src == const0_rtx) /* mov r,L */
1672 {
1673 *l = 2;
1674 return (AS1 (clr,%A0) CR_TAB
1675 AS1 (clr,%B0));
1676 }
1677 else if (src == const1_rtx)
1678 {
1679 *l = 3;
1680 return (AS1 (clr,%A0) CR_TAB
1681 AS1 (clr,%B0) CR_TAB
1682 AS1 (inc,%A0));
1683 }
1684 else if (src == constm1_rtx)
1685 {
1686 /* Immediate constants -1 to any register */
1687 *l = 3;
1688 return (AS1 (clr,%0) CR_TAB
1689 AS1 (dec,%A0) CR_TAB
1690 AS2 (mov,%B0,%A0));
1691 }
1692 else
1693 {
1694 int bit_nr = exact_log2 (INTVAL (src));
1695
1696 if (bit_nr >= 0)
1697 {
1698 *l = 4;
1699 if (!real_l)
1700 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1701 AS1 (clr,%B0) CR_TAB
1702 "set"), operands);
1703 if (!real_l)
1704 avr_output_bld (operands, bit_nr);
1705
1706 return "";
1707 }
1708 }
1709
1710 if ((INTVAL (src) & 0xff) == 0)
1711 {
1712 *l = 5;
1713 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1714 AS1 (clr,%A0) CR_TAB
1715 AS2 (ldi,r31,hi8(%1)) CR_TAB
1716 AS2 (mov,%B0,r31) CR_TAB
1717 AS2 (mov,r31,__tmp_reg__));
1718 }
1719 else if ((INTVAL (src) & 0xff00) == 0)
1720 {
1721 *l = 5;
1722 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1723 AS2 (ldi,r31,lo8(%1)) CR_TAB
1724 AS2 (mov,%A0,r31) CR_TAB
1725 AS1 (clr,%B0) CR_TAB
1726 AS2 (mov,r31,__tmp_reg__));
1727 }
1728 }
1729
1730 /* Last resort, equal to loading from memory. */
1731 *l = 6;
1732 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1733 AS2 (ldi,r31,lo8(%1)) CR_TAB
1734 AS2 (mov,%A0,r31) CR_TAB
1735 AS2 (ldi,r31,hi8(%1)) CR_TAB
1736 AS2 (mov,%B0,r31) CR_TAB
1737 AS2 (mov,r31,__tmp_reg__));
1738 }
1739 else if (GET_CODE (src) == MEM)
1740 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1741 }
1742 else if (GET_CODE (dest) == MEM)
1743 {
1744 const char *templ;
1745
1746 if (src == const0_rtx)
1747 operands[1] = zero_reg_rtx;
1748
1749 templ = out_movhi_mr_r (insn, operands, real_l);
1750
1751 if (!real_l)
1752 output_asm_insn (templ, operands);
1753
1754 operands[1] = src;
1755 return "";
1756 }
1757 fatal_insn ("invalid insn:", insn);
1758 return "";
1759 }
1760
1761 const char *
1762 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1763 {
1764 rtx dest = op[0];
1765 rtx src = op[1];
1766 rtx x = XEXP (src, 0);
1767 int dummy;
1768
1769 if (!l)
1770 l = &dummy;
1771
1772 if (CONSTANT_ADDRESS_P (x))
1773 {
1774 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1775 {
1776 *l = 1;
1777 return AS2 (in,%0,__SREG__);
1778 }
1779 if (optimize > 0 && io_address_operand (x, QImode))
1780 {
1781 *l = 1;
1782 return AS2 (in,%0,%1-0x20);
1783 }
1784 *l = 2;
1785 return AS2 (lds,%0,%1);
1786 }
1787 /* memory access by reg+disp */
1788 else if (GET_CODE (x) == PLUS
1789 && REG_P (XEXP (x,0))
1790 && GET_CODE (XEXP (x,1)) == CONST_INT)
1791 {
1792 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1793 {
1794 int disp = INTVAL (XEXP (x,1));
1795 if (REGNO (XEXP (x,0)) != REG_Y)
1796 fatal_insn ("incorrect insn:",insn);
1797
1798 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1799 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1800 AS2 (ldd,%0,Y+63) CR_TAB
1801 AS2 (sbiw,r28,%o1-63));
1802
1803 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1804 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1805 AS2 (ld,%0,Y) CR_TAB
1806 AS2 (subi,r28,lo8(%o1)) CR_TAB
1807 AS2 (sbci,r29,hi8(%o1)));
1808 }
1809 else if (REGNO (XEXP (x,0)) == REG_X)
1810 {
1811 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
1812 it but I have this situation with extremal optimizing options. */
1813 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
1814 || reg_unused_after (insn, XEXP (x,0)))
1815 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
1816 AS2 (ld,%0,X));
1817
1818 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
1819 AS2 (ld,%0,X) CR_TAB
1820 AS2 (sbiw,r26,%o1));
1821 }
1822 *l = 1;
1823 return AS2 (ldd,%0,%1);
1824 }
1825 *l = 1;
1826 return AS2 (ld,%0,%1);
1827 }
1828
1829 const char *
1830 out_movhi_r_mr (rtx insn, rtx op[], int *l)
1831 {
1832 rtx dest = op[0];
1833 rtx src = op[1];
1834 rtx base = XEXP (src, 0);
1835 int reg_dest = true_regnum (dest);
1836 int reg_base = true_regnum (base);
1837 /* "volatile" forces reading low byte first, even if less efficient,
1838 for correct operation with 16-bit I/O registers. */
1839 int mem_volatile_p = MEM_VOLATILE_P (src);
1840 int tmp;
1841
1842 if (!l)
1843 l = &tmp;
1844
1845 if (reg_base > 0)
1846 {
1847 if (reg_dest == reg_base) /* R = (R) */
1848 {
1849 *l = 3;
1850 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
1851 AS2 (ld,%B0,%1) CR_TAB
1852 AS2 (mov,%A0,__tmp_reg__));
1853 }
1854 else if (reg_base == REG_X) /* (R26) */
1855 {
1856 if (reg_unused_after (insn, base))
1857 {
1858 *l = 2;
1859 return (AS2 (ld,%A0,X+) CR_TAB
1860 AS2 (ld,%B0,X));
1861 }
1862 *l = 3;
1863 return (AS2 (ld,%A0,X+) CR_TAB
1864 AS2 (ld,%B0,X) CR_TAB
1865 AS2 (sbiw,r26,1));
1866 }
1867 else /* (R) */
1868 {
1869 *l = 2;
1870 return (AS2 (ld,%A0,%1) CR_TAB
1871 AS2 (ldd,%B0,%1+1));
1872 }
1873 }
1874 else if (GET_CODE (base) == PLUS) /* (R + i) */
1875 {
1876 int disp = INTVAL (XEXP (base, 1));
1877 int reg_base = true_regnum (XEXP (base, 0));
1878
1879 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
1880 {
1881 if (REGNO (XEXP (base, 0)) != REG_Y)
1882 fatal_insn ("incorrect insn:",insn);
1883
1884 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1885 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
1886 AS2 (ldd,%A0,Y+62) CR_TAB
1887 AS2 (ldd,%B0,Y+63) CR_TAB
1888 AS2 (sbiw,r28,%o1-62));
1889
1890 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1891 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1892 AS2 (ld,%A0,Y) CR_TAB
1893 AS2 (ldd,%B0,Y+1) CR_TAB
1894 AS2 (subi,r28,lo8(%o1)) CR_TAB
1895 AS2 (sbci,r29,hi8(%o1)));
1896 }
1897 if (reg_base == REG_X)
1898 {
1899 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
1900 it but I have this situation with extremal
1901 optimization options. */
1902
1903 *l = 4;
1904 if (reg_base == reg_dest)
1905 return (AS2 (adiw,r26,%o1) CR_TAB
1906 AS2 (ld,__tmp_reg__,X+) CR_TAB
1907 AS2 (ld,%B0,X) CR_TAB
1908 AS2 (mov,%A0,__tmp_reg__));
1909
1910 return (AS2 (adiw,r26,%o1) CR_TAB
1911 AS2 (ld,%A0,X+) CR_TAB
1912 AS2 (ld,%B0,X) CR_TAB
1913 AS2 (sbiw,r26,%o1+1));
1914 }
1915
1916 if (reg_base == reg_dest)
1917 {
1918 *l = 3;
1919 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
1920 AS2 (ldd,%B0,%B1) CR_TAB
1921 AS2 (mov,%A0,__tmp_reg__));
1922 }
1923
1924 *l = 2;
1925 return (AS2 (ldd,%A0,%A1) CR_TAB
1926 AS2 (ldd,%B0,%B1));
1927 }
1928 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
1929 {
1930 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1931 fatal_insn ("incorrect insn:", insn);
1932
1933 if (mem_volatile_p)
1934 {
1935 if (REGNO (XEXP (base, 0)) == REG_X)
1936 {
1937 *l = 4;
1938 return (AS2 (sbiw,r26,2) CR_TAB
1939 AS2 (ld,%A0,X+) CR_TAB
1940 AS2 (ld,%B0,X) CR_TAB
1941 AS2 (sbiw,r26,1));
1942 }
1943 else
1944 {
1945 *l = 3;
1946 return (AS2 (sbiw,%r1,2) CR_TAB
1947 AS2 (ld,%A0,%p1) CR_TAB
1948 AS2 (ldd,%B0,%p1+1));
1949 }
1950 }
1951
1952 *l = 2;
1953 return (AS2 (ld,%B0,%1) CR_TAB
1954 AS2 (ld,%A0,%1));
1955 }
1956 else if (GET_CODE (base) == POST_INC) /* (R++) */
1957 {
1958 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
1959 fatal_insn ("incorrect insn:", insn);
1960
1961 *l = 2;
1962 return (AS2 (ld,%A0,%1) CR_TAB
1963 AS2 (ld,%B0,%1));
1964 }
1965 else if (CONSTANT_ADDRESS_P (base))
1966 {
1967 if (optimize > 0 && io_address_operand (base, HImode))
1968 {
1969 *l = 2;
1970 return (AS2 (in,%A0,%A1-0x20) CR_TAB
1971 AS2 (in,%B0,%B1-0x20));
1972 }
1973 *l = 4;
1974 return (AS2 (lds,%A0,%A1) CR_TAB
1975 AS2 (lds,%B0,%B1));
1976 }
1977
1978 fatal_insn ("unknown move insn:",insn);
1979 return "";
1980 }
1981
1982 const char *
1983 out_movsi_r_mr (rtx insn, rtx op[], int *l)
1984 {
1985 rtx dest = op[0];
1986 rtx src = op[1];
1987 rtx base = XEXP (src, 0);
1988 int reg_dest = true_regnum (dest);
1989 int reg_base = true_regnum (base);
1990 int tmp;
1991
1992 if (!l)
1993 l = &tmp;
1994
1995 if (reg_base > 0)
1996 {
1997 if (reg_base == REG_X) /* (R26) */
1998 {
1999 if (reg_dest == REG_X)
2000 /* "ld r26,-X" is undefined */
2001 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2002 AS2 (ld,r29,X) CR_TAB
2003 AS2 (ld,r28,-X) CR_TAB
2004 AS2 (ld,__tmp_reg__,-X) CR_TAB
2005 AS2 (sbiw,r26,1) CR_TAB
2006 AS2 (ld,r26,X) CR_TAB
2007 AS2 (mov,r27,__tmp_reg__));
2008 else if (reg_dest == REG_X - 2)
2009 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2010 AS2 (ld,%B0,X+) CR_TAB
2011 AS2 (ld,__tmp_reg__,X+) CR_TAB
2012 AS2 (ld,%D0,X) CR_TAB
2013 AS2 (mov,%C0,__tmp_reg__));
2014 else if (reg_unused_after (insn, base))
2015 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2016 AS2 (ld,%B0,X+) CR_TAB
2017 AS2 (ld,%C0,X+) CR_TAB
2018 AS2 (ld,%D0,X));
2019 else
2020 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2021 AS2 (ld,%B0,X+) CR_TAB
2022 AS2 (ld,%C0,X+) CR_TAB
2023 AS2 (ld,%D0,X) CR_TAB
2024 AS2 (sbiw,r26,3));
2025 }
2026 else
2027 {
2028 if (reg_dest == reg_base)
2029 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2030 AS2 (ldd,%C0,%1+2) CR_TAB
2031 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2032 AS2 (ld,%A0,%1) CR_TAB
2033 AS2 (mov,%B0,__tmp_reg__));
2034 else if (reg_base == reg_dest + 2)
2035 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2036 AS2 (ldd,%B0,%1+1) CR_TAB
2037 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2038 AS2 (ldd,%D0,%1+3) CR_TAB
2039 AS2 (mov,%C0,__tmp_reg__));
2040 else
2041 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2042 AS2 (ldd,%B0,%1+1) CR_TAB
2043 AS2 (ldd,%C0,%1+2) CR_TAB
2044 AS2 (ldd,%D0,%1+3));
2045 }
2046 }
2047 else if (GET_CODE (base) == PLUS) /* (R + i) */
2048 {
2049 int disp = INTVAL (XEXP (base, 1));
2050
2051 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2052 {
2053 if (REGNO (XEXP (base, 0)) != REG_Y)
2054 fatal_insn ("incorrect insn:",insn);
2055
2056 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2057 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2058 AS2 (ldd,%A0,Y+60) CR_TAB
2059 AS2 (ldd,%B0,Y+61) CR_TAB
2060 AS2 (ldd,%C0,Y+62) CR_TAB
2061 AS2 (ldd,%D0,Y+63) CR_TAB
2062 AS2 (sbiw,r28,%o1-60));
2063
2064 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2065 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2066 AS2 (ld,%A0,Y) CR_TAB
2067 AS2 (ldd,%B0,Y+1) CR_TAB
2068 AS2 (ldd,%C0,Y+2) CR_TAB
2069 AS2 (ldd,%D0,Y+3) CR_TAB
2070 AS2 (subi,r28,lo8(%o1)) CR_TAB
2071 AS2 (sbci,r29,hi8(%o1)));
2072 }
2073
2074 reg_base = true_regnum (XEXP (base, 0));
2075 if (reg_base == REG_X)
2076 {
2077 /* R = (X + d) */
2078 if (reg_dest == REG_X)
2079 {
2080 *l = 7;
2081 /* "ld r26,-X" is undefined */
2082 return (AS2 (adiw,r26,%o1+3) CR_TAB
2083 AS2 (ld,r29,X) CR_TAB
2084 AS2 (ld,r28,-X) CR_TAB
2085 AS2 (ld,__tmp_reg__,-X) CR_TAB
2086 AS2 (sbiw,r26,1) CR_TAB
2087 AS2 (ld,r26,X) CR_TAB
2088 AS2 (mov,r27,__tmp_reg__));
2089 }
2090 *l = 6;
2091 if (reg_dest == REG_X - 2)
2092 return (AS2 (adiw,r26,%o1) CR_TAB
2093 AS2 (ld,r24,X+) CR_TAB
2094 AS2 (ld,r25,X+) CR_TAB
2095 AS2 (ld,__tmp_reg__,X+) CR_TAB
2096 AS2 (ld,r27,X) CR_TAB
2097 AS2 (mov,r26,__tmp_reg__));
2098
2099 return (AS2 (adiw,r26,%o1) CR_TAB
2100 AS2 (ld,%A0,X+) CR_TAB
2101 AS2 (ld,%B0,X+) CR_TAB
2102 AS2 (ld,%C0,X+) CR_TAB
2103 AS2 (ld,%D0,X) CR_TAB
2104 AS2 (sbiw,r26,%o1+3));
2105 }
2106 if (reg_dest == reg_base)
2107 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2108 AS2 (ldd,%C0,%C1) CR_TAB
2109 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2110 AS2 (ldd,%A0,%A1) CR_TAB
2111 AS2 (mov,%B0,__tmp_reg__));
2112 else if (reg_dest == reg_base - 2)
2113 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2114 AS2 (ldd,%B0,%B1) CR_TAB
2115 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2116 AS2 (ldd,%D0,%D1) CR_TAB
2117 AS2 (mov,%C0,__tmp_reg__));
2118 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2119 AS2 (ldd,%B0,%B1) CR_TAB
2120 AS2 (ldd,%C0,%C1) CR_TAB
2121 AS2 (ldd,%D0,%D1));
2122 }
2123 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2124 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2125 AS2 (ld,%C0,%1) CR_TAB
2126 AS2 (ld,%B0,%1) CR_TAB
2127 AS2 (ld,%A0,%1));
2128 else if (GET_CODE (base) == POST_INC) /* (R++) */
2129 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2130 AS2 (ld,%B0,%1) CR_TAB
2131 AS2 (ld,%C0,%1) CR_TAB
2132 AS2 (ld,%D0,%1));
2133 else if (CONSTANT_ADDRESS_P (base))
2134 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2135 AS2 (lds,%B0,%B1) CR_TAB
2136 AS2 (lds,%C0,%C1) CR_TAB
2137 AS2 (lds,%D0,%D1));
2138
2139 fatal_insn ("unknown move insn:",insn);
2140 return "";
2141 }
2142
2143 const char *
2144 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2145 {
2146 rtx dest = op[0];
2147 rtx src = op[1];
2148 rtx base = XEXP (dest, 0);
2149 int reg_base = true_regnum (base);
2150 int reg_src = true_regnum (src);
2151 int tmp;
2152
2153 if (!l)
2154 l = &tmp;
2155
2156 if (CONSTANT_ADDRESS_P (base))
2157 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2158 AS2 (sts,%B0,%B1) CR_TAB
2159 AS2 (sts,%C0,%C1) CR_TAB
2160 AS2 (sts,%D0,%D1));
2161 if (reg_base > 0) /* (r) */
2162 {
2163 if (reg_base == REG_X) /* (R26) */
2164 {
2165 if (reg_src == REG_X)
2166 {
2167 /* "st X+,r26" is undefined */
2168 if (reg_unused_after (insn, base))
2169 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2170 AS2 (st,X,r26) CR_TAB
2171 AS2 (adiw,r26,1) CR_TAB
2172 AS2 (st,X+,__tmp_reg__) CR_TAB
2173 AS2 (st,X+,r28) CR_TAB
2174 AS2 (st,X,r29));
2175 else
2176 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2177 AS2 (st,X,r26) CR_TAB
2178 AS2 (adiw,r26,1) CR_TAB
2179 AS2 (st,X+,__tmp_reg__) CR_TAB
2180 AS2 (st,X+,r28) CR_TAB
2181 AS2 (st,X,r29) CR_TAB
2182 AS2 (sbiw,r26,3));
2183 }
2184 else if (reg_base == reg_src + 2)
2185 {
2186 if (reg_unused_after (insn, base))
2187 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2188 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2189 AS2 (st,%0+,%A1) CR_TAB
2190 AS2 (st,%0+,%B1) CR_TAB
2191 AS2 (st,%0+,__zero_reg__) CR_TAB
2192 AS2 (st,%0,__tmp_reg__) CR_TAB
2193 AS1 (clr,__zero_reg__));
2194 else
2195 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2196 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2197 AS2 (st,%0+,%A1) CR_TAB
2198 AS2 (st,%0+,%B1) CR_TAB
2199 AS2 (st,%0+,__zero_reg__) CR_TAB
2200 AS2 (st,%0,__tmp_reg__) CR_TAB
2201 AS1 (clr,__zero_reg__) CR_TAB
2202 AS2 (sbiw,r26,3));
2203 }
2204 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2205 AS2 (st,%0+,%B1) CR_TAB
2206 AS2 (st,%0+,%C1) CR_TAB
2207 AS2 (st,%0,%D1) CR_TAB
2208 AS2 (sbiw,r26,3));
2209 }
2210 else
2211 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2212 AS2 (std,%0+1,%B1) CR_TAB
2213 AS2 (std,%0+2,%C1) CR_TAB
2214 AS2 (std,%0+3,%D1));
2215 }
2216 else if (GET_CODE (base) == PLUS) /* (R + i) */
2217 {
2218 int disp = INTVAL (XEXP (base, 1));
2219 reg_base = REGNO (XEXP (base, 0));
2220 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2221 {
2222 if (reg_base != REG_Y)
2223 fatal_insn ("incorrect insn:",insn);
2224
2225 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2226 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2227 AS2 (std,Y+60,%A1) CR_TAB
2228 AS2 (std,Y+61,%B1) CR_TAB
2229 AS2 (std,Y+62,%C1) CR_TAB
2230 AS2 (std,Y+63,%D1) CR_TAB
2231 AS2 (sbiw,r28,%o0-60));
2232
2233 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2234 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2235 AS2 (st,Y,%A1) CR_TAB
2236 AS2 (std,Y+1,%B1) CR_TAB
2237 AS2 (std,Y+2,%C1) CR_TAB
2238 AS2 (std,Y+3,%D1) CR_TAB
2239 AS2 (subi,r28,lo8(%o0)) CR_TAB
2240 AS2 (sbci,r29,hi8(%o0)));
2241 }
2242 if (reg_base == REG_X)
2243 {
2244 /* (X + d) = R */
2245 if (reg_src == REG_X)
2246 {
2247 *l = 9;
2248 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2249 AS2 (mov,__zero_reg__,r27) CR_TAB
2250 AS2 (adiw,r26,%o0) CR_TAB
2251 AS2 (st,X+,__tmp_reg__) CR_TAB
2252 AS2 (st,X+,__zero_reg__) CR_TAB
2253 AS2 (st,X+,r28) CR_TAB
2254 AS2 (st,X,r29) CR_TAB
2255 AS1 (clr,__zero_reg__) CR_TAB
2256 AS2 (sbiw,r26,%o0+3));
2257 }
2258 else if (reg_src == REG_X - 2)
2259 {
2260 *l = 9;
2261 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2262 AS2 (mov,__zero_reg__,r27) CR_TAB
2263 AS2 (adiw,r26,%o0) CR_TAB
2264 AS2 (st,X+,r24) CR_TAB
2265 AS2 (st,X+,r25) CR_TAB
2266 AS2 (st,X+,__tmp_reg__) CR_TAB
2267 AS2 (st,X,__zero_reg__) CR_TAB
2268 AS1 (clr,__zero_reg__) CR_TAB
2269 AS2 (sbiw,r26,%o0+3));
2270 }
2271 *l = 6;
2272 return (AS2 (adiw,r26,%o0) CR_TAB
2273 AS2 (st,X+,%A1) CR_TAB
2274 AS2 (st,X+,%B1) CR_TAB
2275 AS2 (st,X+,%C1) CR_TAB
2276 AS2 (st,X,%D1) CR_TAB
2277 AS2 (sbiw,r26,%o0+3));
2278 }
2279 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2280 AS2 (std,%B0,%B1) CR_TAB
2281 AS2 (std,%C0,%C1) CR_TAB
2282 AS2 (std,%D0,%D1));
2283 }
2284 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2285 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2286 AS2 (st,%0,%C1) CR_TAB
2287 AS2 (st,%0,%B1) CR_TAB
2288 AS2 (st,%0,%A1));
2289 else if (GET_CODE (base) == POST_INC) /* (R++) */
2290 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2291 AS2 (st,%0,%B1) CR_TAB
2292 AS2 (st,%0,%C1) CR_TAB
2293 AS2 (st,%0,%D1));
2294 fatal_insn ("unknown move insn:",insn);
2295 return "";
2296 }
2297
2298 const char *
2299 output_movsisf(rtx insn, rtx operands[], int *l)
2300 {
2301 int dummy;
2302 rtx dest = operands[0];
2303 rtx src = operands[1];
2304 int *real_l = l;
2305
2306 if (!l)
2307 l = &dummy;
2308
2309 if (register_operand (dest, VOIDmode))
2310 {
2311 if (register_operand (src, VOIDmode)) /* mov r,r */
2312 {
2313 if (true_regnum (dest) > true_regnum (src))
2314 {
2315 if (AVR_HAVE_MOVW)
2316 {
2317 *l = 2;
2318 return (AS2 (movw,%C0,%C1) CR_TAB
2319 AS2 (movw,%A0,%A1));
2320 }
2321 *l = 4;
2322 return (AS2 (mov,%D0,%D1) CR_TAB
2323 AS2 (mov,%C0,%C1) CR_TAB
2324 AS2 (mov,%B0,%B1) CR_TAB
2325 AS2 (mov,%A0,%A1));
2326 }
2327 else
2328 {
2329 if (AVR_HAVE_MOVW)
2330 {
2331 *l = 2;
2332 return (AS2 (movw,%A0,%A1) CR_TAB
2333 AS2 (movw,%C0,%C1));
2334 }
2335 *l = 4;
2336 return (AS2 (mov,%A0,%A1) CR_TAB
2337 AS2 (mov,%B0,%B1) CR_TAB
2338 AS2 (mov,%C0,%C1) CR_TAB
2339 AS2 (mov,%D0,%D1));
2340 }
2341 }
2342 else if (CONSTANT_P (src))
2343 {
2344 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2345 {
2346 *l = 4;
2347 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2348 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2349 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2350 AS2 (ldi,%D0,hhi8(%1)));
2351 }
2352
2353 if (GET_CODE (src) == CONST_INT)
2354 {
2355 const char *const clr_op0 =
2356 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2357 AS1 (clr,%B0) CR_TAB
2358 AS2 (movw,%C0,%A0))
2359 : (AS1 (clr,%A0) CR_TAB
2360 AS1 (clr,%B0) CR_TAB
2361 AS1 (clr,%C0) CR_TAB
2362 AS1 (clr,%D0));
2363
2364 if (src == const0_rtx) /* mov r,L */
2365 {
2366 *l = AVR_HAVE_MOVW ? 3 : 4;
2367 return clr_op0;
2368 }
2369 else if (src == const1_rtx)
2370 {
2371 if (!real_l)
2372 output_asm_insn (clr_op0, operands);
2373 *l = AVR_HAVE_MOVW ? 4 : 5;
2374 return AS1 (inc,%A0);
2375 }
2376 else if (src == constm1_rtx)
2377 {
2378 /* Immediate constants -1 to any register */
2379 if (AVR_HAVE_MOVW)
2380 {
2381 *l = 4;
2382 return (AS1 (clr,%A0) CR_TAB
2383 AS1 (dec,%A0) CR_TAB
2384 AS2 (mov,%B0,%A0) CR_TAB
2385 AS2 (movw,%C0,%A0));
2386 }
2387 *l = 5;
2388 return (AS1 (clr,%A0) CR_TAB
2389 AS1 (dec,%A0) CR_TAB
2390 AS2 (mov,%B0,%A0) CR_TAB
2391 AS2 (mov,%C0,%A0) CR_TAB
2392 AS2 (mov,%D0,%A0));
2393 }
2394 else
2395 {
2396 int bit_nr = exact_log2 (INTVAL (src));
2397
2398 if (bit_nr >= 0)
2399 {
2400 *l = AVR_HAVE_MOVW ? 5 : 6;
2401 if (!real_l)
2402 {
2403 output_asm_insn (clr_op0, operands);
2404 output_asm_insn ("set", operands);
2405 }
2406 if (!real_l)
2407 avr_output_bld (operands, bit_nr);
2408
2409 return "";
2410 }
2411 }
2412 }
2413
2414 /* Last resort, better than loading from memory. */
2415 *l = 10;
2416 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2417 AS2 (ldi,r31,lo8(%1)) CR_TAB
2418 AS2 (mov,%A0,r31) CR_TAB
2419 AS2 (ldi,r31,hi8(%1)) CR_TAB
2420 AS2 (mov,%B0,r31) CR_TAB
2421 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2422 AS2 (mov,%C0,r31) CR_TAB
2423 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2424 AS2 (mov,%D0,r31) CR_TAB
2425 AS2 (mov,r31,__tmp_reg__));
2426 }
2427 else if (GET_CODE (src) == MEM)
2428 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2429 }
2430 else if (GET_CODE (dest) == MEM)
2431 {
2432 const char *templ;
2433
2434 if (src == const0_rtx)
2435 operands[1] = zero_reg_rtx;
2436
2437 templ = out_movsi_mr_r (insn, operands, real_l);
2438
2439 if (!real_l)
2440 output_asm_insn (templ, operands);
2441
2442 operands[1] = src;
2443 return "";
2444 }
2445 fatal_insn ("invalid insn:", insn);
2446 return "";
2447 }
2448
2449 const char *
2450 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2451 {
2452 rtx dest = op[0];
2453 rtx src = op[1];
2454 rtx x = XEXP (dest, 0);
2455 int dummy;
2456
2457 if (!l)
2458 l = &dummy;
2459
2460 if (CONSTANT_ADDRESS_P (x))
2461 {
2462 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2463 {
2464 *l = 1;
2465 return AS2 (out,__SREG__,%1);
2466 }
2467 if (optimize > 0 && io_address_operand (x, QImode))
2468 {
2469 *l = 1;
2470 return AS2 (out,%0-0x20,%1);
2471 }
2472 *l = 2;
2473 return AS2 (sts,%0,%1);
2474 }
2475 /* memory access by reg+disp */
2476 else if (GET_CODE (x) == PLUS
2477 && REG_P (XEXP (x,0))
2478 && GET_CODE (XEXP (x,1)) == CONST_INT)
2479 {
2480 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2481 {
2482 int disp = INTVAL (XEXP (x,1));
2483 if (REGNO (XEXP (x,0)) != REG_Y)
2484 fatal_insn ("incorrect insn:",insn);
2485
2486 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2487 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2488 AS2 (std,Y+63,%1) CR_TAB
2489 AS2 (sbiw,r28,%o0-63));
2490
2491 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2492 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2493 AS2 (st,Y,%1) CR_TAB
2494 AS2 (subi,r28,lo8(%o0)) CR_TAB
2495 AS2 (sbci,r29,hi8(%o0)));
2496 }
2497 else if (REGNO (XEXP (x,0)) == REG_X)
2498 {
2499 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2500 {
2501 if (reg_unused_after (insn, XEXP (x,0)))
2502 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2503 AS2 (adiw,r26,%o0) CR_TAB
2504 AS2 (st,X,__tmp_reg__));
2505
2506 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2507 AS2 (adiw,r26,%o0) CR_TAB
2508 AS2 (st,X,__tmp_reg__) CR_TAB
2509 AS2 (sbiw,r26,%o0));
2510 }
2511 else
2512 {
2513 if (reg_unused_after (insn, XEXP (x,0)))
2514 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2515 AS2 (st,X,%1));
2516
2517 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2518 AS2 (st,X,%1) CR_TAB
2519 AS2 (sbiw,r26,%o0));
2520 }
2521 }
2522 *l = 1;
2523 return AS2 (std,%0,%1);
2524 }
2525 *l = 1;
2526 return AS2 (st,%0,%1);
2527 }
2528
2529 const char *
2530 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2531 {
2532 rtx dest = op[0];
2533 rtx src = op[1];
2534 rtx base = XEXP (dest, 0);
2535 int reg_base = true_regnum (base);
2536 int reg_src = true_regnum (src);
2537 /* "volatile" forces writing high byte first, even if less efficient,
2538 for correct operation with 16-bit I/O registers. */
2539 int mem_volatile_p = MEM_VOLATILE_P (dest);
2540 int tmp;
2541
2542 if (!l)
2543 l = &tmp;
2544 if (CONSTANT_ADDRESS_P (base))
2545 {
2546 if (optimize > 0 && io_address_operand (base, HImode))
2547 {
2548 *l = 2;
2549 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2550 AS2 (out,%A0-0x20,%A1));
2551 }
2552 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2553 AS2 (sts,%A0,%A1));
2554 }
2555 if (reg_base > 0)
2556 {
2557 if (reg_base == REG_X)
2558 {
2559 if (reg_src == REG_X)
2560 {
2561 /* "st X+,r26" and "st -X,r26" are undefined. */
2562 if (!mem_volatile_p && reg_unused_after (insn, src))
2563 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2564 AS2 (st,X,r26) CR_TAB
2565 AS2 (adiw,r26,1) CR_TAB
2566 AS2 (st,X,__tmp_reg__));
2567 else
2568 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2569 AS2 (adiw,r26,1) CR_TAB
2570 AS2 (st,X,__tmp_reg__) CR_TAB
2571 AS2 (sbiw,r26,1) CR_TAB
2572 AS2 (st,X,r26));
2573 }
2574 else
2575 {
2576 if (!mem_volatile_p && reg_unused_after (insn, base))
2577 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2578 AS2 (st,X,%B1));
2579 else
2580 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2581 AS2 (st,X,%B1) CR_TAB
2582 AS2 (st,-X,%A1));
2583 }
2584 }
2585 else
2586 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2587 AS2 (st,%0,%A1));
2588 }
2589 else if (GET_CODE (base) == PLUS)
2590 {
2591 int disp = INTVAL (XEXP (base, 1));
2592 reg_base = REGNO (XEXP (base, 0));
2593 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2594 {
2595 if (reg_base != REG_Y)
2596 fatal_insn ("incorrect insn:",insn);
2597
2598 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2599 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2600 AS2 (std,Y+63,%B1) CR_TAB
2601 AS2 (std,Y+62,%A1) CR_TAB
2602 AS2 (sbiw,r28,%o0-62));
2603
2604 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2605 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2606 AS2 (std,Y+1,%B1) CR_TAB
2607 AS2 (st,Y,%A1) CR_TAB
2608 AS2 (subi,r28,lo8(%o0)) CR_TAB
2609 AS2 (sbci,r29,hi8(%o0)));
2610 }
2611 if (reg_base == REG_X)
2612 {
2613 /* (X + d) = R */
2614 if (reg_src == REG_X)
2615 {
2616 *l = 7;
2617 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2618 AS2 (mov,__zero_reg__,r27) CR_TAB
2619 AS2 (adiw,r26,%o0+1) CR_TAB
2620 AS2 (st,X,__zero_reg__) CR_TAB
2621 AS2 (st,-X,__tmp_reg__) CR_TAB
2622 AS1 (clr,__zero_reg__) CR_TAB
2623 AS2 (sbiw,r26,%o0));
2624 }
2625 *l = 4;
2626 return (AS2 (adiw,r26,%o0+1) CR_TAB
2627 AS2 (st,X,%B1) CR_TAB
2628 AS2 (st,-X,%A1) CR_TAB
2629 AS2 (sbiw,r26,%o0));
2630 }
2631 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2632 AS2 (std,%A0,%A1));
2633 }
2634 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2635 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2636 AS2 (st,%0,%A1));
2637 else if (GET_CODE (base) == POST_INC) /* (R++) */
2638 {
2639 if (mem_volatile_p)
2640 {
2641 if (REGNO (XEXP (base, 0)) == REG_X)
2642 {
2643 *l = 4;
2644 return (AS2 (adiw,r26,1) CR_TAB
2645 AS2 (st,X,%B1) CR_TAB
2646 AS2 (st,-X,%A1) CR_TAB
2647 AS2 (adiw,r26,2));
2648 }
2649 else
2650 {
2651 *l = 3;
2652 return (AS2 (std,%p0+1,%B1) CR_TAB
2653 AS2 (st,%p0,%A1) CR_TAB
2654 AS2 (adiw,%r0,2));
2655 }
2656 }
2657
2658 *l = 2;
2659 return (AS2 (st,%0,%A1) CR_TAB
2660 AS2 (st,%0,%B1));
2661 }
2662 fatal_insn ("unknown move insn:",insn);
2663 return "";
2664 }
2665
2666 /* Return 1 if frame pointer for current function required. */
2667
2668 bool
2669 avr_frame_pointer_required_p (void)
2670 {
2671 return (cfun->calls_alloca
2672 || crtl->args.info.nregs == 0
2673 || get_frame_size () > 0);
2674 }
2675
2676 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2677
2678 static RTX_CODE
2679 compare_condition (rtx insn)
2680 {
2681 rtx next = next_real_insn (insn);
2682 RTX_CODE cond = UNKNOWN;
2683 if (next && GET_CODE (next) == JUMP_INSN)
2684 {
2685 rtx pat = PATTERN (next);
2686 rtx src = SET_SRC (pat);
2687 rtx t = XEXP (src, 0);
2688 cond = GET_CODE (t);
2689 }
2690 return cond;
2691 }
2692
2693 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2694
2695 static int
2696 compare_sign_p (rtx insn)
2697 {
2698 RTX_CODE cond = compare_condition (insn);
2699 return (cond == GE || cond == LT);
2700 }
2701
2702 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2703 that needs to be swapped (GT, GTU, LE, LEU). */
2704
2705 int
2706 compare_diff_p (rtx insn)
2707 {
2708 RTX_CODE cond = compare_condition (insn);
2709 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2710 }
2711
2712 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2713
2714 int
2715 compare_eq_p (rtx insn)
2716 {
2717 RTX_CODE cond = compare_condition (insn);
2718 return (cond == EQ || cond == NE);
2719 }
2720
2721
2722 /* Output test instruction for HImode. */
2723
2724 const char *
2725 out_tsthi (rtx insn, rtx op, int *l)
2726 {
2727 if (compare_sign_p (insn))
2728 {
2729 if (l) *l = 1;
2730 return AS1 (tst,%B0);
2731 }
2732 if (reg_unused_after (insn, op)
2733 && compare_eq_p (insn))
2734 {
2735 /* Faster than sbiw if we can clobber the operand. */
2736 if (l) *l = 1;
2737 return AS2 (or,%A0,%B0);
2738 }
2739 if (test_hard_reg_class (ADDW_REGS, op))
2740 {
2741 if (l) *l = 1;
2742 return AS2 (sbiw,%0,0);
2743 }
2744 if (l) *l = 2;
2745 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2746 AS2 (cpc,%B0,__zero_reg__));
2747 }
2748
2749
2750 /* Output test instruction for SImode. */
2751
2752 const char *
2753 out_tstsi (rtx insn, rtx op, int *l)
2754 {
2755 if (compare_sign_p (insn))
2756 {
2757 if (l) *l = 1;
2758 return AS1 (tst,%D0);
2759 }
2760 if (test_hard_reg_class (ADDW_REGS, op))
2761 {
2762 if (l) *l = 3;
2763 return (AS2 (sbiw,%A0,0) CR_TAB
2764 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2765 AS2 (cpc,%D0,__zero_reg__));
2766 }
2767 if (l) *l = 4;
2768 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2769 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2770 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2771 AS2 (cpc,%D0,__zero_reg__));
2772 }
2773
2774
2775 /* Generate asm equivalent for various shifts.
2776 Shift count is a CONST_INT, MEM or REG.
2777 This only handles cases that are not already
2778 carefully hand-optimized in ?sh??i3_out. */
2779
2780 void
2781 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2782 int *len, int t_len)
2783 {
2784 rtx op[10];
2785 char str[500];
2786 int second_label = 1;
2787 int saved_in_tmp = 0;
2788 int use_zero_reg = 0;
2789
2790 op[0] = operands[0];
2791 op[1] = operands[1];
2792 op[2] = operands[2];
2793 op[3] = operands[3];
2794 str[0] = 0;
2795
2796 if (len)
2797 *len = 1;
2798
2799 if (GET_CODE (operands[2]) == CONST_INT)
2800 {
2801 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2802 int count = INTVAL (operands[2]);
2803 int max_len = 10; /* If larger than this, always use a loop. */
2804
2805 if (count <= 0)
2806 {
2807 if (len)
2808 *len = 0;
2809 return;
2810 }
2811
2812 if (count < 8 && !scratch)
2813 use_zero_reg = 1;
2814
2815 if (optimize_size)
2816 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
2817
2818 if (t_len * count <= max_len)
2819 {
2820 /* Output shifts inline with no loop - faster. */
2821 if (len)
2822 *len = t_len * count;
2823 else
2824 {
2825 while (count-- > 0)
2826 output_asm_insn (templ, op);
2827 }
2828
2829 return;
2830 }
2831
2832 if (scratch)
2833 {
2834 if (!len)
2835 strcat (str, AS2 (ldi,%3,%2));
2836 }
2837 else if (use_zero_reg)
2838 {
2839 /* Hack to save one word: use __zero_reg__ as loop counter.
2840 Set one bit, then shift in a loop until it is 0 again. */
2841
2842 op[3] = zero_reg_rtx;
2843 if (len)
2844 *len = 2;
2845 else
2846 strcat (str, ("set" CR_TAB
2847 AS2 (bld,%3,%2-1)));
2848 }
2849 else
2850 {
2851 /* No scratch register available, use one from LD_REGS (saved in
2852 __tmp_reg__) that doesn't overlap with registers to shift. */
2853
2854 op[3] = gen_rtx_REG (QImode,
2855 ((true_regnum (operands[0]) - 1) & 15) + 16);
2856 op[4] = tmp_reg_rtx;
2857 saved_in_tmp = 1;
2858
2859 if (len)
2860 *len = 3; /* Includes "mov %3,%4" after the loop. */
2861 else
2862 strcat (str, (AS2 (mov,%4,%3) CR_TAB
2863 AS2 (ldi,%3,%2)));
2864 }
2865
2866 second_label = 0;
2867 }
2868 else if (GET_CODE (operands[2]) == MEM)
2869 {
2870 rtx op_mov[10];
2871
2872 op[3] = op_mov[0] = tmp_reg_rtx;
2873 op_mov[1] = op[2];
2874
2875 if (len)
2876 out_movqi_r_mr (insn, op_mov, len);
2877 else
2878 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
2879 }
2880 else if (register_operand (operands[2], QImode))
2881 {
2882 if (reg_unused_after (insn, operands[2]))
2883 op[3] = op[2];
2884 else
2885 {
2886 op[3] = tmp_reg_rtx;
2887 if (!len)
2888 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
2889 }
2890 }
2891 else
2892 fatal_insn ("bad shift insn:", insn);
2893
2894 if (second_label)
2895 {
2896 if (len)
2897 ++*len;
2898 else
2899 strcat (str, AS1 (rjmp,2f));
2900 }
2901
2902 if (len)
2903 *len += t_len + 2; /* template + dec + brXX */
2904 else
2905 {
2906 strcat (str, "\n1:\t");
2907 strcat (str, templ);
2908 strcat (str, second_label ? "\n2:\t" : "\n\t");
2909 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
2910 strcat (str, CR_TAB);
2911 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
2912 if (saved_in_tmp)
2913 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
2914 output_asm_insn (str, op);
2915 }
2916 }
2917
2918
2919 /* 8bit shift left ((char)x << i) */
2920
2921 const char *
2922 ashlqi3_out (rtx insn, rtx operands[], int *len)
2923 {
2924 if (GET_CODE (operands[2]) == CONST_INT)
2925 {
2926 int k;
2927
2928 if (!len)
2929 len = &k;
2930
2931 switch (INTVAL (operands[2]))
2932 {
2933 default:
2934 if (INTVAL (operands[2]) < 8)
2935 break;
2936
2937 *len = 1;
2938 return AS1 (clr,%0);
2939
2940 case 1:
2941 *len = 1;
2942 return AS1 (lsl,%0);
2943
2944 case 2:
2945 *len = 2;
2946 return (AS1 (lsl,%0) CR_TAB
2947 AS1 (lsl,%0));
2948
2949 case 3:
2950 *len = 3;
2951 return (AS1 (lsl,%0) CR_TAB
2952 AS1 (lsl,%0) CR_TAB
2953 AS1 (lsl,%0));
2954
2955 case 4:
2956 if (test_hard_reg_class (LD_REGS, operands[0]))
2957 {
2958 *len = 2;
2959 return (AS1 (swap,%0) CR_TAB
2960 AS2 (andi,%0,0xf0));
2961 }
2962 *len = 4;
2963 return (AS1 (lsl,%0) CR_TAB
2964 AS1 (lsl,%0) CR_TAB
2965 AS1 (lsl,%0) CR_TAB
2966 AS1 (lsl,%0));
2967
2968 case 5:
2969 if (test_hard_reg_class (LD_REGS, operands[0]))
2970 {
2971 *len = 3;
2972 return (AS1 (swap,%0) CR_TAB
2973 AS1 (lsl,%0) CR_TAB
2974 AS2 (andi,%0,0xe0));
2975 }
2976 *len = 5;
2977 return (AS1 (lsl,%0) CR_TAB
2978 AS1 (lsl,%0) CR_TAB
2979 AS1 (lsl,%0) CR_TAB
2980 AS1 (lsl,%0) CR_TAB
2981 AS1 (lsl,%0));
2982
2983 case 6:
2984 if (test_hard_reg_class (LD_REGS, operands[0]))
2985 {
2986 *len = 4;
2987 return (AS1 (swap,%0) CR_TAB
2988 AS1 (lsl,%0) CR_TAB
2989 AS1 (lsl,%0) CR_TAB
2990 AS2 (andi,%0,0xc0));
2991 }
2992 *len = 6;
2993 return (AS1 (lsl,%0) CR_TAB
2994 AS1 (lsl,%0) CR_TAB
2995 AS1 (lsl,%0) CR_TAB
2996 AS1 (lsl,%0) CR_TAB
2997 AS1 (lsl,%0) CR_TAB
2998 AS1 (lsl,%0));
2999
3000 case 7:
3001 *len = 3;
3002 return (AS1 (ror,%0) CR_TAB
3003 AS1 (clr,%0) CR_TAB
3004 AS1 (ror,%0));
3005 }
3006 }
3007 else if (CONSTANT_P (operands[2]))
3008 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3009
3010 out_shift_with_cnt (AS1 (lsl,%0),
3011 insn, operands, len, 1);
3012 return "";
3013 }
3014
3015
3016 /* 16bit shift left ((short)x << i) */
3017
3018 const char *
3019 ashlhi3_out (rtx insn, rtx operands[], int *len)
3020 {
3021 if (GET_CODE (operands[2]) == CONST_INT)
3022 {
3023 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3024 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3025 int k;
3026 int *t = len;
3027
3028 if (!len)
3029 len = &k;
3030
3031 switch (INTVAL (operands[2]))
3032 {
3033 default:
3034 if (INTVAL (operands[2]) < 16)
3035 break;
3036
3037 *len = 2;
3038 return (AS1 (clr,%B0) CR_TAB
3039 AS1 (clr,%A0));
3040
3041 case 4:
3042 if (optimize_size && scratch)
3043 break; /* 5 */
3044 if (ldi_ok)
3045 {
3046 *len = 6;
3047 return (AS1 (swap,%A0) CR_TAB
3048 AS1 (swap,%B0) CR_TAB
3049 AS2 (andi,%B0,0xf0) CR_TAB
3050 AS2 (eor,%B0,%A0) CR_TAB
3051 AS2 (andi,%A0,0xf0) CR_TAB
3052 AS2 (eor,%B0,%A0));
3053 }
3054 if (scratch)
3055 {
3056 *len = 7;
3057 return (AS1 (swap,%A0) CR_TAB
3058 AS1 (swap,%B0) CR_TAB
3059 AS2 (ldi,%3,0xf0) CR_TAB
3060 AS2 (and,%B0,%3) CR_TAB
3061 AS2 (eor,%B0,%A0) CR_TAB
3062 AS2 (and,%A0,%3) CR_TAB
3063 AS2 (eor,%B0,%A0));
3064 }
3065 break; /* optimize_size ? 6 : 8 */
3066
3067 case 5:
3068 if (optimize_size)
3069 break; /* scratch ? 5 : 6 */
3070 if (ldi_ok)
3071 {
3072 *len = 8;
3073 return (AS1 (lsl,%A0) CR_TAB
3074 AS1 (rol,%B0) CR_TAB
3075 AS1 (swap,%A0) CR_TAB
3076 AS1 (swap,%B0) CR_TAB
3077 AS2 (andi,%B0,0xf0) CR_TAB
3078 AS2 (eor,%B0,%A0) CR_TAB
3079 AS2 (andi,%A0,0xf0) CR_TAB
3080 AS2 (eor,%B0,%A0));
3081 }
3082 if (scratch)
3083 {
3084 *len = 9;
3085 return (AS1 (lsl,%A0) CR_TAB
3086 AS1 (rol,%B0) CR_TAB
3087 AS1 (swap,%A0) CR_TAB
3088 AS1 (swap,%B0) CR_TAB
3089 AS2 (ldi,%3,0xf0) CR_TAB
3090 AS2 (and,%B0,%3) CR_TAB
3091 AS2 (eor,%B0,%A0) CR_TAB
3092 AS2 (and,%A0,%3) CR_TAB
3093 AS2 (eor,%B0,%A0));
3094 }
3095 break; /* 10 */
3096
3097 case 6:
3098 if (optimize_size)
3099 break; /* scratch ? 5 : 6 */
3100 *len = 9;
3101 return (AS1 (clr,__tmp_reg__) CR_TAB
3102 AS1 (lsr,%B0) CR_TAB
3103 AS1 (ror,%A0) CR_TAB
3104 AS1 (ror,__tmp_reg__) CR_TAB
3105 AS1 (lsr,%B0) CR_TAB
3106 AS1 (ror,%A0) CR_TAB
3107 AS1 (ror,__tmp_reg__) CR_TAB
3108 AS2 (mov,%B0,%A0) CR_TAB
3109 AS2 (mov,%A0,__tmp_reg__));
3110
3111 case 7:
3112 *len = 5;
3113 return (AS1 (lsr,%B0) CR_TAB
3114 AS2 (mov,%B0,%A0) CR_TAB
3115 AS1 (clr,%A0) CR_TAB
3116 AS1 (ror,%B0) CR_TAB
3117 AS1 (ror,%A0));
3118
3119 case 8:
3120 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3121 AS1 (clr,%A0));
3122
3123 case 9:
3124 *len = 3;
3125 return (AS2 (mov,%B0,%A0) CR_TAB
3126 AS1 (clr,%A0) CR_TAB
3127 AS1 (lsl,%B0));
3128
3129 case 10:
3130 *len = 4;
3131 return (AS2 (mov,%B0,%A0) CR_TAB
3132 AS1 (clr,%A0) CR_TAB
3133 AS1 (lsl,%B0) CR_TAB
3134 AS1 (lsl,%B0));
3135
3136 case 11:
3137 *len = 5;
3138 return (AS2 (mov,%B0,%A0) CR_TAB
3139 AS1 (clr,%A0) CR_TAB
3140 AS1 (lsl,%B0) CR_TAB
3141 AS1 (lsl,%B0) CR_TAB
3142 AS1 (lsl,%B0));
3143
3144 case 12:
3145 if (ldi_ok)
3146 {
3147 *len = 4;
3148 return (AS2 (mov,%B0,%A0) CR_TAB
3149 AS1 (clr,%A0) CR_TAB
3150 AS1 (swap,%B0) CR_TAB
3151 AS2 (andi,%B0,0xf0));
3152 }
3153 if (scratch)
3154 {
3155 *len = 5;
3156 return (AS2 (mov,%B0,%A0) CR_TAB
3157 AS1 (clr,%A0) CR_TAB
3158 AS1 (swap,%B0) CR_TAB
3159 AS2 (ldi,%3,0xf0) CR_TAB
3160 AS2 (and,%B0,%3));
3161 }
3162 *len = 6;
3163 return (AS2 (mov,%B0,%A0) CR_TAB
3164 AS1 (clr,%A0) CR_TAB
3165 AS1 (lsl,%B0) CR_TAB
3166 AS1 (lsl,%B0) CR_TAB
3167 AS1 (lsl,%B0) CR_TAB
3168 AS1 (lsl,%B0));
3169
3170 case 13:
3171 if (ldi_ok)
3172 {
3173 *len = 5;
3174 return (AS2 (mov,%B0,%A0) CR_TAB
3175 AS1 (clr,%A0) CR_TAB
3176 AS1 (swap,%B0) CR_TAB
3177 AS1 (lsl,%B0) CR_TAB
3178 AS2 (andi,%B0,0xe0));
3179 }
3180 if (AVR_HAVE_MUL && scratch)
3181 {
3182 *len = 5;
3183 return (AS2 (ldi,%3,0x20) CR_TAB
3184 AS2 (mul,%A0,%3) CR_TAB
3185 AS2 (mov,%B0,r0) CR_TAB
3186 AS1 (clr,%A0) CR_TAB
3187 AS1 (clr,__zero_reg__));
3188 }
3189 if (optimize_size && scratch)
3190 break; /* 5 */
3191 if (scratch)
3192 {
3193 *len = 6;
3194 return (AS2 (mov,%B0,%A0) CR_TAB
3195 AS1 (clr,%A0) CR_TAB
3196 AS1 (swap,%B0) CR_TAB
3197 AS1 (lsl,%B0) CR_TAB
3198 AS2 (ldi,%3,0xe0) CR_TAB
3199 AS2 (and,%B0,%3));
3200 }
3201 if (AVR_HAVE_MUL)
3202 {
3203 *len = 6;
3204 return ("set" CR_TAB
3205 AS2 (bld,r1,5) CR_TAB
3206 AS2 (mul,%A0,r1) CR_TAB
3207 AS2 (mov,%B0,r0) CR_TAB
3208 AS1 (clr,%A0) CR_TAB
3209 AS1 (clr,__zero_reg__));
3210 }
3211 *len = 7;
3212 return (AS2 (mov,%B0,%A0) CR_TAB
3213 AS1 (clr,%A0) CR_TAB
3214 AS1 (lsl,%B0) CR_TAB
3215 AS1 (lsl,%B0) CR_TAB
3216 AS1 (lsl,%B0) CR_TAB
3217 AS1 (lsl,%B0) CR_TAB
3218 AS1 (lsl,%B0));
3219
3220 case 14:
3221 if (AVR_HAVE_MUL && ldi_ok)
3222 {
3223 *len = 5;
3224 return (AS2 (ldi,%B0,0x40) CR_TAB
3225 AS2 (mul,%A0,%B0) CR_TAB
3226 AS2 (mov,%B0,r0) CR_TAB
3227 AS1 (clr,%A0) CR_TAB
3228 AS1 (clr,__zero_reg__));
3229 }
3230 if (AVR_HAVE_MUL && scratch)
3231 {
3232 *len = 5;
3233 return (AS2 (ldi,%3,0x40) CR_TAB
3234 AS2 (mul,%A0,%3) CR_TAB
3235 AS2 (mov,%B0,r0) CR_TAB
3236 AS1 (clr,%A0) CR_TAB
3237 AS1 (clr,__zero_reg__));
3238 }
3239 if (optimize_size && ldi_ok)
3240 {
3241 *len = 5;
3242 return (AS2 (mov,%B0,%A0) CR_TAB
3243 AS2 (ldi,%A0,6) "\n1:\t"
3244 AS1 (lsl,%B0) CR_TAB
3245 AS1 (dec,%A0) CR_TAB
3246 AS1 (brne,1b));
3247 }
3248 if (optimize_size && scratch)
3249 break; /* 5 */
3250 *len = 6;
3251 return (AS1 (clr,%B0) CR_TAB
3252 AS1 (lsr,%A0) CR_TAB
3253 AS1 (ror,%B0) CR_TAB
3254 AS1 (lsr,%A0) CR_TAB
3255 AS1 (ror,%B0) CR_TAB
3256 AS1 (clr,%A0));
3257
3258 case 15:
3259 *len = 4;
3260 return (AS1 (clr,%B0) CR_TAB
3261 AS1 (lsr,%A0) CR_TAB
3262 AS1 (ror,%B0) CR_TAB
3263 AS1 (clr,%A0));
3264 }
3265 len = t;
3266 }
3267 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3268 AS1 (rol,%B0)),
3269 insn, operands, len, 2);
3270 return "";
3271 }
3272
3273
3274 /* 32bit shift left ((long)x << i) */
3275
3276 const char *
3277 ashlsi3_out (rtx insn, rtx operands[], int *len)
3278 {
3279 if (GET_CODE (operands[2]) == CONST_INT)
3280 {
3281 int k;
3282 int *t = len;
3283
3284 if (!len)
3285 len = &k;
3286
3287 switch (INTVAL (operands[2]))
3288 {
3289 default:
3290 if (INTVAL (operands[2]) < 32)
3291 break;
3292
3293 if (AVR_HAVE_MOVW)
3294 return *len = 3, (AS1 (clr,%D0) CR_TAB
3295 AS1 (clr,%C0) CR_TAB
3296 AS2 (movw,%A0,%C0));
3297 *len = 4;
3298 return (AS1 (clr,%D0) CR_TAB
3299 AS1 (clr,%C0) CR_TAB
3300 AS1 (clr,%B0) CR_TAB
3301 AS1 (clr,%A0));
3302
3303 case 8:
3304 {
3305 int reg0 = true_regnum (operands[0]);
3306 int reg1 = true_regnum (operands[1]);
3307 *len = 4;
3308 if (reg0 >= reg1)
3309 return (AS2 (mov,%D0,%C1) CR_TAB
3310 AS2 (mov,%C0,%B1) CR_TAB
3311 AS2 (mov,%B0,%A1) CR_TAB
3312 AS1 (clr,%A0));
3313 else
3314 return (AS1 (clr,%A0) CR_TAB
3315 AS2 (mov,%B0,%A1) CR_TAB
3316 AS2 (mov,%C0,%B1) CR_TAB
3317 AS2 (mov,%D0,%C1));
3318 }
3319
3320 case 16:
3321 {
3322 int reg0 = true_regnum (operands[0]);
3323 int reg1 = true_regnum (operands[1]);
3324 if (reg0 + 2 == reg1)
3325 return *len = 2, (AS1 (clr,%B0) CR_TAB
3326 AS1 (clr,%A0));
3327 if (AVR_HAVE_MOVW)
3328 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3329 AS1 (clr,%B0) CR_TAB
3330 AS1 (clr,%A0));
3331 else
3332 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3333 AS2 (mov,%D0,%B1) CR_TAB
3334 AS1 (clr,%B0) CR_TAB
3335 AS1 (clr,%A0));
3336 }
3337
3338 case 24:
3339 *len = 4;
3340 return (AS2 (mov,%D0,%A1) CR_TAB
3341 AS1 (clr,%C0) CR_TAB
3342 AS1 (clr,%B0) CR_TAB
3343 AS1 (clr,%A0));
3344
3345 case 31:
3346 *len = 6;
3347 return (AS1 (clr,%D0) CR_TAB
3348 AS1 (lsr,%A0) CR_TAB
3349 AS1 (ror,%D0) CR_TAB
3350 AS1 (clr,%C0) CR_TAB
3351 AS1 (clr,%B0) CR_TAB
3352 AS1 (clr,%A0));
3353 }
3354 len = t;
3355 }
3356 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3357 AS1 (rol,%B0) CR_TAB
3358 AS1 (rol,%C0) CR_TAB
3359 AS1 (rol,%D0)),
3360 insn, operands, len, 4);
3361 return "";
3362 }
3363
3364 /* 8bit arithmetic shift right ((signed char)x >> i) */
3365
3366 const char *
3367 ashrqi3_out (rtx insn, rtx operands[], int *len)
3368 {
3369 if (GET_CODE (operands[2]) == CONST_INT)
3370 {
3371 int k;
3372
3373 if (!len)
3374 len = &k;
3375
3376 switch (INTVAL (operands[2]))
3377 {
3378 case 1:
3379 *len = 1;
3380 return AS1 (asr,%0);
3381
3382 case 2:
3383 *len = 2;
3384 return (AS1 (asr,%0) CR_TAB
3385 AS1 (asr,%0));
3386
3387 case 3:
3388 *len = 3;
3389 return (AS1 (asr,%0) CR_TAB
3390 AS1 (asr,%0) CR_TAB
3391 AS1 (asr,%0));
3392
3393 case 4:
3394 *len = 4;
3395 return (AS1 (asr,%0) CR_TAB
3396 AS1 (asr,%0) CR_TAB
3397 AS1 (asr,%0) CR_TAB
3398 AS1 (asr,%0));
3399
3400 case 5:
3401 *len = 5;
3402 return (AS1 (asr,%0) CR_TAB
3403 AS1 (asr,%0) CR_TAB
3404 AS1 (asr,%0) CR_TAB
3405 AS1 (asr,%0) CR_TAB
3406 AS1 (asr,%0));
3407
3408 case 6:
3409 *len = 4;
3410 return (AS2 (bst,%0,6) CR_TAB
3411 AS1 (lsl,%0) CR_TAB
3412 AS2 (sbc,%0,%0) CR_TAB
3413 AS2 (bld,%0,0));
3414
3415 default:
3416 if (INTVAL (operands[2]) < 8)
3417 break;
3418
3419 /* fall through */
3420
3421 case 7:
3422 *len = 2;
3423 return (AS1 (lsl,%0) CR_TAB
3424 AS2 (sbc,%0,%0));
3425 }
3426 }
3427 else if (CONSTANT_P (operands[2]))
3428 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3429
3430 out_shift_with_cnt (AS1 (asr,%0),
3431 insn, operands, len, 1);
3432 return "";
3433 }
3434
3435
3436 /* 16bit arithmetic shift right ((signed short)x >> i) */
3437
3438 const char *
3439 ashrhi3_out (rtx insn, rtx operands[], int *len)
3440 {
3441 if (GET_CODE (operands[2]) == CONST_INT)
3442 {
3443 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3444 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3445 int k;
3446 int *t = len;
3447
3448 if (!len)
3449 len = &k;
3450
3451 switch (INTVAL (operands[2]))
3452 {
3453 case 4:
3454 case 5:
3455 /* XXX try to optimize this too? */
3456 break;
3457
3458 case 6:
3459 if (optimize_size)
3460 break; /* scratch ? 5 : 6 */
3461 *len = 8;
3462 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3463 AS2 (mov,%A0,%B0) CR_TAB
3464 AS1 (lsl,__tmp_reg__) CR_TAB
3465 AS1 (rol,%A0) CR_TAB
3466 AS2 (sbc,%B0,%B0) CR_TAB
3467 AS1 (lsl,__tmp_reg__) CR_TAB
3468 AS1 (rol,%A0) CR_TAB
3469 AS1 (rol,%B0));
3470
3471 case 7:
3472 *len = 4;
3473 return (AS1 (lsl,%A0) CR_TAB
3474 AS2 (mov,%A0,%B0) CR_TAB
3475 AS1 (rol,%A0) CR_TAB
3476 AS2 (sbc,%B0,%B0));
3477
3478 case 8:
3479 {
3480 int reg0 = true_regnum (operands[0]);
3481 int reg1 = true_regnum (operands[1]);
3482
3483 if (reg0 == reg1)
3484 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3485 AS1 (lsl,%B0) CR_TAB
3486 AS2 (sbc,%B0,%B0));
3487 else
3488 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3489 AS1 (clr,%B0) CR_TAB
3490 AS2 (sbrc,%A0,7) CR_TAB
3491 AS1 (dec,%B0));
3492 }
3493
3494 case 9:
3495 *len = 4;
3496 return (AS2 (mov,%A0,%B0) CR_TAB
3497 AS1 (lsl,%B0) CR_TAB
3498 AS2 (sbc,%B0,%B0) CR_TAB
3499 AS1 (asr,%A0));
3500
3501 case 10:
3502 *len = 5;
3503 return (AS2 (mov,%A0,%B0) CR_TAB
3504 AS1 (lsl,%B0) CR_TAB
3505 AS2 (sbc,%B0,%B0) CR_TAB
3506 AS1 (asr,%A0) CR_TAB
3507 AS1 (asr,%A0));
3508
3509 case 11:
3510 if (AVR_HAVE_MUL && ldi_ok)
3511 {
3512 *len = 5;
3513 return (AS2 (ldi,%A0,0x20) CR_TAB
3514 AS2 (muls,%B0,%A0) CR_TAB
3515 AS2 (mov,%A0,r1) CR_TAB
3516 AS2 (sbc,%B0,%B0) CR_TAB
3517 AS1 (clr,__zero_reg__));
3518 }
3519 if (optimize_size && scratch)
3520 break; /* 5 */
3521 *len = 6;
3522 return (AS2 (mov,%A0,%B0) CR_TAB
3523 AS1 (lsl,%B0) CR_TAB
3524 AS2 (sbc,%B0,%B0) CR_TAB
3525 AS1 (asr,%A0) CR_TAB
3526 AS1 (asr,%A0) CR_TAB
3527 AS1 (asr,%A0));
3528
3529 case 12:
3530 if (AVR_HAVE_MUL && ldi_ok)
3531 {
3532 *len = 5;
3533 return (AS2 (ldi,%A0,0x10) CR_TAB
3534 AS2 (muls,%B0,%A0) CR_TAB
3535 AS2 (mov,%A0,r1) CR_TAB
3536 AS2 (sbc,%B0,%B0) CR_TAB
3537 AS1 (clr,__zero_reg__));
3538 }
3539 if (optimize_size && scratch)
3540 break; /* 5 */
3541 *len = 7;
3542 return (AS2 (mov,%A0,%B0) CR_TAB
3543 AS1 (lsl,%B0) CR_TAB
3544 AS2 (sbc,%B0,%B0) CR_TAB
3545 AS1 (asr,%A0) CR_TAB
3546 AS1 (asr,%A0) CR_TAB
3547 AS1 (asr,%A0) CR_TAB
3548 AS1 (asr,%A0));
3549
3550 case 13:
3551 if (AVR_HAVE_MUL && ldi_ok)
3552 {
3553 *len = 5;
3554 return (AS2 (ldi,%A0,0x08) CR_TAB
3555 AS2 (muls,%B0,%A0) CR_TAB
3556 AS2 (mov,%A0,r1) CR_TAB
3557 AS2 (sbc,%B0,%B0) CR_TAB
3558 AS1 (clr,__zero_reg__));
3559 }
3560 if (optimize_size)
3561 break; /* scratch ? 5 : 7 */
3562 *len = 8;
3563 return (AS2 (mov,%A0,%B0) CR_TAB
3564 AS1 (lsl,%B0) CR_TAB
3565 AS2 (sbc,%B0,%B0) CR_TAB
3566 AS1 (asr,%A0) CR_TAB
3567 AS1 (asr,%A0) CR_TAB
3568 AS1 (asr,%A0) CR_TAB
3569 AS1 (asr,%A0) CR_TAB
3570 AS1 (asr,%A0));
3571
3572 case 14:
3573 *len = 5;
3574 return (AS1 (lsl,%B0) CR_TAB
3575 AS2 (sbc,%A0,%A0) CR_TAB
3576 AS1 (lsl,%B0) CR_TAB
3577 AS2 (mov,%B0,%A0) CR_TAB
3578 AS1 (rol,%A0));
3579
3580 default:
3581 if (INTVAL (operands[2]) < 16)
3582 break;
3583
3584 /* fall through */
3585
3586 case 15:
3587 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3588 AS2 (sbc,%A0,%A0) CR_TAB
3589 AS2 (mov,%B0,%A0));
3590 }
3591 len = t;
3592 }
3593 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3594 AS1 (ror,%A0)),
3595 insn, operands, len, 2);
3596 return "";
3597 }
3598
3599
3600 /* 32bit arithmetic shift right ((signed long)x >> i) */
3601
3602 const char *
3603 ashrsi3_out (rtx insn, rtx operands[], int *len)
3604 {
3605 if (GET_CODE (operands[2]) == CONST_INT)
3606 {
3607 int k;
3608 int *t = len;
3609
3610 if (!len)
3611 len = &k;
3612
3613 switch (INTVAL (operands[2]))
3614 {
3615 case 8:
3616 {
3617 int reg0 = true_regnum (operands[0]);
3618 int reg1 = true_regnum (operands[1]);
3619 *len=6;
3620 if (reg0 <= reg1)
3621 return (AS2 (mov,%A0,%B1) CR_TAB
3622 AS2 (mov,%B0,%C1) CR_TAB
3623 AS2 (mov,%C0,%D1) CR_TAB
3624 AS1 (clr,%D0) CR_TAB
3625 AS2 (sbrc,%C0,7) CR_TAB
3626 AS1 (dec,%D0));
3627 else
3628 return (AS1 (clr,%D0) CR_TAB
3629 AS2 (sbrc,%D1,7) CR_TAB
3630 AS1 (dec,%D0) CR_TAB
3631 AS2 (mov,%C0,%D1) CR_TAB
3632 AS2 (mov,%B0,%C1) CR_TAB
3633 AS2 (mov,%A0,%B1));
3634 }
3635
3636 case 16:
3637 {
3638 int reg0 = true_regnum (operands[0]);
3639 int reg1 = true_regnum (operands[1]);
3640
3641 if (reg0 == reg1 + 2)
3642 return *len = 4, (AS1 (clr,%D0) CR_TAB
3643 AS2 (sbrc,%B0,7) CR_TAB
3644 AS1 (com,%D0) CR_TAB
3645 AS2 (mov,%C0,%D0));
3646 if (AVR_HAVE_MOVW)
3647 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3648 AS1 (clr,%D0) CR_TAB
3649 AS2 (sbrc,%B0,7) CR_TAB
3650 AS1 (com,%D0) CR_TAB
3651 AS2 (mov,%C0,%D0));
3652 else
3653 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3654 AS2 (mov,%A0,%C1) CR_TAB
3655 AS1 (clr,%D0) CR_TAB
3656 AS2 (sbrc,%B0,7) CR_TAB
3657 AS1 (com,%D0) CR_TAB
3658 AS2 (mov,%C0,%D0));
3659 }
3660
3661 case 24:
3662 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3663 AS1 (clr,%D0) CR_TAB
3664 AS2 (sbrc,%A0,7) CR_TAB
3665 AS1 (com,%D0) CR_TAB
3666 AS2 (mov,%B0,%D0) CR_TAB
3667 AS2 (mov,%C0,%D0));
3668
3669 default:
3670 if (INTVAL (operands[2]) < 32)
3671 break;
3672
3673 /* fall through */
3674
3675 case 31:
3676 if (AVR_HAVE_MOVW)
3677 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3678 AS2 (sbc,%A0,%A0) CR_TAB
3679 AS2 (mov,%B0,%A0) CR_TAB
3680 AS2 (movw,%C0,%A0));
3681 else
3682 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3683 AS2 (sbc,%A0,%A0) CR_TAB
3684 AS2 (mov,%B0,%A0) CR_TAB
3685 AS2 (mov,%C0,%A0) CR_TAB
3686 AS2 (mov,%D0,%A0));
3687 }
3688 len = t;
3689 }
3690 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3691 AS1 (ror,%C0) CR_TAB
3692 AS1 (ror,%B0) CR_TAB
3693 AS1 (ror,%A0)),
3694 insn, operands, len, 4);
3695 return "";
3696 }
3697
3698 /* 8bit logic shift right ((unsigned char)x >> i) */
3699
3700 const char *
3701 lshrqi3_out (rtx insn, rtx operands[], int *len)
3702 {
3703 if (GET_CODE (operands[2]) == CONST_INT)
3704 {
3705 int k;
3706
3707 if (!len)
3708 len = &k;
3709
3710 switch (INTVAL (operands[2]))
3711 {
3712 default:
3713 if (INTVAL (operands[2]) < 8)
3714 break;
3715
3716 *len = 1;
3717 return AS1 (clr,%0);
3718
3719 case 1:
3720 *len = 1;
3721 return AS1 (lsr,%0);
3722
3723 case 2:
3724 *len = 2;
3725 return (AS1 (lsr,%0) CR_TAB
3726 AS1 (lsr,%0));
3727 case 3:
3728 *len = 3;
3729 return (AS1 (lsr,%0) CR_TAB
3730 AS1 (lsr,%0) CR_TAB
3731 AS1 (lsr,%0));
3732
3733 case 4:
3734 if (test_hard_reg_class (LD_REGS, operands[0]))
3735 {
3736 *len=2;
3737 return (AS1 (swap,%0) CR_TAB
3738 AS2 (andi,%0,0x0f));
3739 }
3740 *len = 4;
3741 return (AS1 (lsr,%0) CR_TAB
3742 AS1 (lsr,%0) CR_TAB
3743 AS1 (lsr,%0) CR_TAB
3744 AS1 (lsr,%0));
3745
3746 case 5:
3747 if (test_hard_reg_class (LD_REGS, operands[0]))
3748 {
3749 *len = 3;
3750 return (AS1 (swap,%0) CR_TAB
3751 AS1 (lsr,%0) CR_TAB
3752 AS2 (andi,%0,0x7));
3753 }
3754 *len = 5;
3755 return (AS1 (lsr,%0) CR_TAB
3756 AS1 (lsr,%0) CR_TAB
3757 AS1 (lsr,%0) CR_TAB
3758 AS1 (lsr,%0) CR_TAB
3759 AS1 (lsr,%0));
3760
3761 case 6:
3762 if (test_hard_reg_class (LD_REGS, operands[0]))
3763 {
3764 *len = 4;
3765 return (AS1 (swap,%0) CR_TAB
3766 AS1 (lsr,%0) CR_TAB
3767 AS1 (lsr,%0) CR_TAB
3768 AS2 (andi,%0,0x3));
3769 }
3770 *len = 6;
3771 return (AS1 (lsr,%0) CR_TAB
3772 AS1 (lsr,%0) CR_TAB
3773 AS1 (lsr,%0) CR_TAB
3774 AS1 (lsr,%0) CR_TAB
3775 AS1 (lsr,%0) CR_TAB
3776 AS1 (lsr,%0));
3777
3778 case 7:
3779 *len = 3;
3780 return (AS1 (rol,%0) CR_TAB
3781 AS1 (clr,%0) CR_TAB
3782 AS1 (rol,%0));
3783 }
3784 }
3785 else if (CONSTANT_P (operands[2]))
3786 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3787
3788 out_shift_with_cnt (AS1 (lsr,%0),
3789 insn, operands, len, 1);
3790 return "";
3791 }
3792
3793 /* 16bit logic shift right ((unsigned short)x >> i) */
3794
3795 const char *
3796 lshrhi3_out (rtx insn, rtx operands[], int *len)
3797 {
3798 if (GET_CODE (operands[2]) == CONST_INT)
3799 {
3800 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3801 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3802 int k;
3803 int *t = len;
3804
3805 if (!len)
3806 len = &k;
3807
3808 switch (INTVAL (operands[2]))
3809 {
3810 default:
3811 if (INTVAL (operands[2]) < 16)
3812 break;
3813
3814 *len = 2;
3815 return (AS1 (clr,%B0) CR_TAB
3816 AS1 (clr,%A0));
3817
3818 case 4:
3819 if (optimize_size && scratch)
3820 break; /* 5 */
3821 if (ldi_ok)
3822 {
3823 *len = 6;
3824 return (AS1 (swap,%B0) CR_TAB
3825 AS1 (swap,%A0) CR_TAB
3826 AS2 (andi,%A0,0x0f) CR_TAB
3827 AS2 (eor,%A0,%B0) CR_TAB
3828 AS2 (andi,%B0,0x0f) CR_TAB
3829 AS2 (eor,%A0,%B0));
3830 }
3831 if (scratch)
3832 {
3833 *len = 7;
3834 return (AS1 (swap,%B0) CR_TAB
3835 AS1 (swap,%A0) CR_TAB
3836 AS2 (ldi,%3,0x0f) CR_TAB
3837 AS2 (and,%A0,%3) CR_TAB
3838 AS2 (eor,%A0,%B0) CR_TAB
3839 AS2 (and,%B0,%3) CR_TAB
3840 AS2 (eor,%A0,%B0));
3841 }
3842 break; /* optimize_size ? 6 : 8 */
3843
3844 case 5:
3845 if (optimize_size)
3846 break; /* scratch ? 5 : 6 */
3847 if (ldi_ok)
3848 {
3849 *len = 8;
3850 return (AS1 (lsr,%B0) CR_TAB
3851 AS1 (ror,%A0) CR_TAB
3852 AS1 (swap,%B0) CR_TAB
3853 AS1 (swap,%A0) CR_TAB
3854 AS2 (andi,%A0,0x0f) CR_TAB
3855 AS2 (eor,%A0,%B0) CR_TAB
3856 AS2 (andi,%B0,0x0f) CR_TAB
3857 AS2 (eor,%A0,%B0));
3858 }
3859 if (scratch)
3860 {
3861 *len = 9;
3862 return (AS1 (lsr,%B0) CR_TAB
3863 AS1 (ror,%A0) CR_TAB
3864 AS1 (swap,%B0) CR_TAB
3865 AS1 (swap,%A0) CR_TAB
3866 AS2 (ldi,%3,0x0f) CR_TAB
3867 AS2 (and,%A0,%3) CR_TAB
3868 AS2 (eor,%A0,%B0) CR_TAB
3869 AS2 (and,%B0,%3) CR_TAB
3870 AS2 (eor,%A0,%B0));
3871 }
3872 break; /* 10 */
3873
3874 case 6:
3875 if (optimize_size)
3876 break; /* scratch ? 5 : 6 */
3877 *len = 9;
3878 return (AS1 (clr,__tmp_reg__) CR_TAB
3879 AS1 (lsl,%A0) CR_TAB
3880 AS1 (rol,%B0) CR_TAB
3881 AS1 (rol,__tmp_reg__) CR_TAB
3882 AS1 (lsl,%A0) CR_TAB
3883 AS1 (rol,%B0) CR_TAB
3884 AS1 (rol,__tmp_reg__) CR_TAB
3885 AS2 (mov,%A0,%B0) CR_TAB
3886 AS2 (mov,%B0,__tmp_reg__));
3887
3888 case 7:
3889 *len = 5;
3890 return (AS1 (lsl,%A0) CR_TAB
3891 AS2 (mov,%A0,%B0) CR_TAB
3892 AS1 (rol,%A0) CR_TAB
3893 AS2 (sbc,%B0,%B0) CR_TAB
3894 AS1 (neg,%B0));
3895
3896 case 8:
3897 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
3898 AS1 (clr,%B0));
3899
3900 case 9:
3901 *len = 3;
3902 return (AS2 (mov,%A0,%B0) CR_TAB
3903 AS1 (clr,%B0) CR_TAB
3904 AS1 (lsr,%A0));
3905
3906 case 10:
3907 *len = 4;
3908 return (AS2 (mov,%A0,%B0) CR_TAB
3909 AS1 (clr,%B0) CR_TAB
3910 AS1 (lsr,%A0) CR_TAB
3911 AS1 (lsr,%A0));
3912
3913 case 11:
3914 *len = 5;
3915 return (AS2 (mov,%A0,%B0) CR_TAB
3916 AS1 (clr,%B0) CR_TAB
3917 AS1 (lsr,%A0) CR_TAB
3918 AS1 (lsr,%A0) CR_TAB
3919 AS1 (lsr,%A0));
3920
3921 case 12:
3922 if (ldi_ok)
3923 {
3924 *len = 4;
3925 return (AS2 (mov,%A0,%B0) CR_TAB
3926 AS1 (clr,%B0) CR_TAB
3927 AS1 (swap,%A0) CR_TAB
3928 AS2 (andi,%A0,0x0f));
3929 }
3930 if (scratch)
3931 {
3932 *len = 5;
3933 return (AS2 (mov,%A0,%B0) CR_TAB
3934 AS1 (clr,%B0) CR_TAB
3935 AS1 (swap,%A0) CR_TAB
3936 AS2 (ldi,%3,0x0f) CR_TAB
3937 AS2 (and,%A0,%3));
3938 }
3939 *len = 6;
3940 return (AS2 (mov,%A0,%B0) CR_TAB
3941 AS1 (clr,%B0) CR_TAB
3942 AS1 (lsr,%A0) CR_TAB
3943 AS1 (lsr,%A0) CR_TAB
3944 AS1 (lsr,%A0) CR_TAB
3945 AS1 (lsr,%A0));
3946
3947 case 13:
3948 if (ldi_ok)
3949 {
3950 *len = 5;
3951 return (AS2 (mov,%A0,%B0) CR_TAB
3952 AS1 (clr,%B0) CR_TAB
3953 AS1 (swap,%A0) CR_TAB
3954 AS1 (lsr,%A0) CR_TAB
3955 AS2 (andi,%A0,0x07));
3956 }
3957 if (AVR_HAVE_MUL && scratch)
3958 {
3959 *len = 5;
3960 return (AS2 (ldi,%3,0x08) CR_TAB
3961 AS2 (mul,%B0,%3) CR_TAB
3962 AS2 (mov,%A0,r1) CR_TAB
3963 AS1 (clr,%B0) CR_TAB
3964 AS1 (clr,__zero_reg__));
3965 }
3966 if (optimize_size && scratch)
3967 break; /* 5 */
3968 if (scratch)
3969 {
3970 *len = 6;
3971 return (AS2 (mov,%A0,%B0) CR_TAB
3972 AS1 (clr,%B0) CR_TAB
3973 AS1 (swap,%A0) CR_TAB
3974 AS1 (lsr,%A0) CR_TAB
3975 AS2 (ldi,%3,0x07) CR_TAB
3976 AS2 (and,%A0,%3));
3977 }
3978 if (AVR_HAVE_MUL)
3979 {
3980 *len = 6;
3981 return ("set" CR_TAB
3982 AS2 (bld,r1,3) CR_TAB
3983 AS2 (mul,%B0,r1) CR_TAB
3984 AS2 (mov,%A0,r1) CR_TAB
3985 AS1 (clr,%B0) CR_TAB
3986 AS1 (clr,__zero_reg__));
3987 }
3988 *len = 7;
3989 return (AS2 (mov,%A0,%B0) CR_TAB
3990 AS1 (clr,%B0) CR_TAB
3991 AS1 (lsr,%A0) CR_TAB
3992 AS1 (lsr,%A0) CR_TAB
3993 AS1 (lsr,%A0) CR_TAB
3994 AS1 (lsr,%A0) CR_TAB
3995 AS1 (lsr,%A0));
3996
3997 case 14:
3998 if (AVR_HAVE_MUL && ldi_ok)
3999 {
4000 *len = 5;
4001 return (AS2 (ldi,%A0,0x04) CR_TAB
4002 AS2 (mul,%B0,%A0) CR_TAB
4003 AS2 (mov,%A0,r1) CR_TAB
4004 AS1 (clr,%B0) CR_TAB
4005 AS1 (clr,__zero_reg__));
4006 }
4007 if (AVR_HAVE_MUL && scratch)
4008 {
4009 *len = 5;
4010 return (AS2 (ldi,%3,0x04) CR_TAB
4011 AS2 (mul,%B0,%3) CR_TAB
4012 AS2 (mov,%A0,r1) CR_TAB
4013 AS1 (clr,%B0) CR_TAB
4014 AS1 (clr,__zero_reg__));
4015 }
4016 if (optimize_size && ldi_ok)
4017 {
4018 *len = 5;
4019 return (AS2 (mov,%A0,%B0) CR_TAB
4020 AS2 (ldi,%B0,6) "\n1:\t"
4021 AS1 (lsr,%A0) CR_TAB
4022 AS1 (dec,%B0) CR_TAB
4023 AS1 (brne,1b));
4024 }
4025 if (optimize_size && scratch)
4026 break; /* 5 */
4027 *len = 6;
4028 return (AS1 (clr,%A0) CR_TAB
4029 AS1 (lsl,%B0) CR_TAB
4030 AS1 (rol,%A0) CR_TAB
4031 AS1 (lsl,%B0) CR_TAB
4032 AS1 (rol,%A0) CR_TAB
4033 AS1 (clr,%B0));
4034
4035 case 15:
4036 *len = 4;
4037 return (AS1 (clr,%A0) CR_TAB
4038 AS1 (lsl,%B0) CR_TAB
4039 AS1 (rol,%A0) CR_TAB
4040 AS1 (clr,%B0));
4041 }
4042 len = t;
4043 }
4044 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4045 AS1 (ror,%A0)),
4046 insn, operands, len, 2);
4047 return "";
4048 }
4049
4050 /* 32bit logic shift right ((unsigned int)x >> i) */
4051
4052 const char *
4053 lshrsi3_out (rtx insn, rtx operands[], int *len)
4054 {
4055 if (GET_CODE (operands[2]) == CONST_INT)
4056 {
4057 int k;
4058 int *t = len;
4059
4060 if (!len)
4061 len = &k;
4062
4063 switch (INTVAL (operands[2]))
4064 {
4065 default:
4066 if (INTVAL (operands[2]) < 32)
4067 break;
4068
4069 if (AVR_HAVE_MOVW)
4070 return *len = 3, (AS1 (clr,%D0) CR_TAB
4071 AS1 (clr,%C0) CR_TAB
4072 AS2 (movw,%A0,%C0));
4073 *len = 4;
4074 return (AS1 (clr,%D0) CR_TAB
4075 AS1 (clr,%C0) CR_TAB
4076 AS1 (clr,%B0) CR_TAB
4077 AS1 (clr,%A0));
4078
4079 case 8:
4080 {
4081 int reg0 = true_regnum (operands[0]);
4082 int reg1 = true_regnum (operands[1]);
4083 *len = 4;
4084 if (reg0 <= reg1)
4085 return (AS2 (mov,%A0,%B1) CR_TAB
4086 AS2 (mov,%B0,%C1) CR_TAB
4087 AS2 (mov,%C0,%D1) CR_TAB
4088 AS1 (clr,%D0));
4089 else
4090 return (AS1 (clr,%D0) CR_TAB
4091 AS2 (mov,%C0,%D1) CR_TAB
4092 AS2 (mov,%B0,%C1) CR_TAB
4093 AS2 (mov,%A0,%B1));
4094 }
4095
4096 case 16:
4097 {
4098 int reg0 = true_regnum (operands[0]);
4099 int reg1 = true_regnum (operands[1]);
4100
4101 if (reg0 == reg1 + 2)
4102 return *len = 2, (AS1 (clr,%C0) CR_TAB
4103 AS1 (clr,%D0));
4104 if (AVR_HAVE_MOVW)
4105 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4106 AS1 (clr,%C0) CR_TAB
4107 AS1 (clr,%D0));
4108 else
4109 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4110 AS2 (mov,%A0,%C1) CR_TAB
4111 AS1 (clr,%C0) CR_TAB
4112 AS1 (clr,%D0));
4113 }
4114
4115 case 24:
4116 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4117 AS1 (clr,%B0) CR_TAB
4118 AS1 (clr,%C0) CR_TAB
4119 AS1 (clr,%D0));
4120
4121 case 31:
4122 *len = 6;
4123 return (AS1 (clr,%A0) CR_TAB
4124 AS2 (sbrc,%D0,7) CR_TAB
4125 AS1 (inc,%A0) CR_TAB
4126 AS1 (clr,%B0) CR_TAB
4127 AS1 (clr,%C0) CR_TAB
4128 AS1 (clr,%D0));
4129 }
4130 len = t;
4131 }
4132 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4133 AS1 (ror,%C0) CR_TAB
4134 AS1 (ror,%B0) CR_TAB
4135 AS1 (ror,%A0)),
4136 insn, operands, len, 4);
4137 return "";
4138 }
4139
4140 /* Modifies the length assigned to instruction INSN
4141 LEN is the initially computed length of the insn. */
4142
4143 int
4144 adjust_insn_length (rtx insn, int len)
4145 {
4146 rtx patt = PATTERN (insn);
4147 rtx set;
4148
4149 if (GET_CODE (patt) == SET)
4150 {
4151 rtx op[10];
4152 op[1] = SET_SRC (patt);
4153 op[0] = SET_DEST (patt);
4154 if (general_operand (op[1], VOIDmode)
4155 && general_operand (op[0], VOIDmode))
4156 {
4157 switch (GET_MODE (op[0]))
4158 {
4159 case QImode:
4160 output_movqi (insn, op, &len);
4161 break;
4162 case HImode:
4163 output_movhi (insn, op, &len);
4164 break;
4165 case SImode:
4166 case SFmode:
4167 output_movsisf (insn, op, &len);
4168 break;
4169 default:
4170 break;
4171 }
4172 }
4173 else if (op[0] == cc0_rtx && REG_P (op[1]))
4174 {
4175 switch (GET_MODE (op[1]))
4176 {
4177 case HImode: out_tsthi (insn, op[1], &len); break;
4178 case SImode: out_tstsi (insn, op[1], &len); break;
4179 default: break;
4180 }
4181 }
4182 else if (GET_CODE (op[1]) == AND)
4183 {
4184 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4185 {
4186 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4187 if (GET_MODE (op[1]) == SImode)
4188 len = (((mask & 0xff) != 0xff)
4189 + ((mask & 0xff00) != 0xff00)
4190 + ((mask & 0xff0000L) != 0xff0000L)
4191 + ((mask & 0xff000000L) != 0xff000000L));
4192 else if (GET_MODE (op[1]) == HImode)
4193 len = (((mask & 0xff) != 0xff)
4194 + ((mask & 0xff00) != 0xff00));
4195 }
4196 }
4197 else if (GET_CODE (op[1]) == IOR)
4198 {
4199 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4200 {
4201 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4202 if (GET_MODE (op[1]) == SImode)
4203 len = (((mask & 0xff) != 0)
4204 + ((mask & 0xff00) != 0)
4205 + ((mask & 0xff0000L) != 0)
4206 + ((mask & 0xff000000L) != 0));
4207 else if (GET_MODE (op[1]) == HImode)
4208 len = (((mask & 0xff) != 0)
4209 + ((mask & 0xff00) != 0));
4210 }
4211 }
4212 }
4213 set = single_set (insn);
4214 if (set)
4215 {
4216 rtx op[10];
4217
4218 op[1] = SET_SRC (set);
4219 op[0] = SET_DEST (set);
4220
4221 if (GET_CODE (patt) == PARALLEL
4222 && general_operand (op[1], VOIDmode)
4223 && general_operand (op[0], VOIDmode))
4224 {
4225 if (XVECLEN (patt, 0) == 2)
4226 op[2] = XVECEXP (patt, 0, 1);
4227
4228 switch (GET_MODE (op[0]))
4229 {
4230 case QImode:
4231 len = 2;
4232 break;
4233 case HImode:
4234 output_reload_inhi (insn, op, &len);
4235 break;
4236 case SImode:
4237 case SFmode:
4238 output_reload_insisf (insn, op, &len);
4239 break;
4240 default:
4241 break;
4242 }
4243 }
4244 else if (GET_CODE (op[1]) == ASHIFT
4245 || GET_CODE (op[1]) == ASHIFTRT
4246 || GET_CODE (op[1]) == LSHIFTRT)
4247 {
4248 rtx ops[10];
4249 ops[0] = op[0];
4250 ops[1] = XEXP (op[1],0);
4251 ops[2] = XEXP (op[1],1);
4252 switch (GET_CODE (op[1]))
4253 {
4254 case ASHIFT:
4255 switch (GET_MODE (op[0]))
4256 {
4257 case QImode: ashlqi3_out (insn,ops,&len); break;
4258 case HImode: ashlhi3_out (insn,ops,&len); break;
4259 case SImode: ashlsi3_out (insn,ops,&len); break;
4260 default: break;
4261 }
4262 break;
4263 case ASHIFTRT:
4264 switch (GET_MODE (op[0]))
4265 {
4266 case QImode: ashrqi3_out (insn,ops,&len); break;
4267 case HImode: ashrhi3_out (insn,ops,&len); break;
4268 case SImode: ashrsi3_out (insn,ops,&len); break;
4269 default: break;
4270 }
4271 break;
4272 case LSHIFTRT:
4273 switch (GET_MODE (op[0]))
4274 {
4275 case QImode: lshrqi3_out (insn,ops,&len); break;
4276 case HImode: lshrhi3_out (insn,ops,&len); break;
4277 case SImode: lshrsi3_out (insn,ops,&len); break;
4278 default: break;
4279 }
4280 break;
4281 default:
4282 break;
4283 }
4284 }
4285 }
4286 return len;
4287 }
4288
4289 /* Return nonzero if register REG dead after INSN. */
4290
4291 int
4292 reg_unused_after (rtx insn, rtx reg)
4293 {
4294 return (dead_or_set_p (insn, reg)
4295 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4296 }
4297
4298 /* Return nonzero if REG is not used after INSN.
4299 We assume REG is a reload reg, and therefore does
4300 not live past labels. It may live past calls or jumps though. */
4301
4302 int
4303 _reg_unused_after (rtx insn, rtx reg)
4304 {
4305 enum rtx_code code;
4306 rtx set;
4307
4308 /* If the reg is set by this instruction, then it is safe for our
4309 case. Disregard the case where this is a store to memory, since
4310 we are checking a register used in the store address. */
4311 set = single_set (insn);
4312 if (set && GET_CODE (SET_DEST (set)) != MEM
4313 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4314 return 1;
4315
4316 while ((insn = NEXT_INSN (insn)))
4317 {
4318 rtx set;
4319 code = GET_CODE (insn);
4320
4321 #if 0
4322 /* If this is a label that existed before reload, then the register
4323 if dead here. However, if this is a label added by reorg, then
4324 the register may still be live here. We can't tell the difference,
4325 so we just ignore labels completely. */
4326 if (code == CODE_LABEL)
4327 return 1;
4328 /* else */
4329 #endif
4330
4331 if (!INSN_P (insn))
4332 continue;
4333
4334 if (code == JUMP_INSN)
4335 return 0;
4336
4337 /* If this is a sequence, we must handle them all at once.
4338 We could have for instance a call that sets the target register,
4339 and an insn in a delay slot that uses the register. In this case,
4340 we must return 0. */
4341 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4342 {
4343 int i;
4344 int retval = 0;
4345
4346 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4347 {
4348 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4349 rtx set = single_set (this_insn);
4350
4351 if (GET_CODE (this_insn) == CALL_INSN)
4352 code = CALL_INSN;
4353 else if (GET_CODE (this_insn) == JUMP_INSN)
4354 {
4355 if (INSN_ANNULLED_BRANCH_P (this_insn))
4356 return 0;
4357 code = JUMP_INSN;
4358 }
4359
4360 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4361 return 0;
4362 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4363 {
4364 if (GET_CODE (SET_DEST (set)) != MEM)
4365 retval = 1;
4366 else
4367 return 0;
4368 }
4369 if (set == 0
4370 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4371 return 0;
4372 }
4373 if (retval == 1)
4374 return 1;
4375 else if (code == JUMP_INSN)
4376 return 0;
4377 }
4378
4379 if (code == CALL_INSN)
4380 {
4381 rtx tem;
4382 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4383 if (GET_CODE (XEXP (tem, 0)) == USE
4384 && REG_P (XEXP (XEXP (tem, 0), 0))
4385 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4386 return 0;
4387 if (call_used_regs[REGNO (reg)])
4388 return 1;
4389 }
4390
4391 set = single_set (insn);
4392
4393 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4394 return 0;
4395 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4396 return GET_CODE (SET_DEST (set)) != MEM;
4397 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4398 return 0;
4399 }
4400 return 1;
4401 }
4402
4403 /* Target hook for assembling integer objects. The AVR version needs
4404 special handling for references to certain labels. */
4405
4406 static bool
4407 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4408 {
4409 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4410 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4411 || GET_CODE (x) == LABEL_REF))
4412 {
4413 fputs ("\t.word\tgs(", asm_out_file);
4414 output_addr_const (asm_out_file, x);
4415 fputs (")\n", asm_out_file);
4416 return true;
4417 }
4418 return default_assemble_integer (x, size, aligned_p);
4419 }
4420
4421 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4422
4423 void
4424 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4425 {
4426
4427 /* If the function has the 'signal' or 'interrupt' attribute, test to
4428 make sure that the name of the function is "__vector_NN" so as to
4429 catch when the user misspells the interrupt vector name. */
4430
4431 if (cfun->machine->is_interrupt)
4432 {
4433 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4434 {
4435 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4436 "%qs appears to be a misspelled interrupt handler",
4437 name);
4438 }
4439 }
4440 else if (cfun->machine->is_signal)
4441 {
4442 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4443 {
4444 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4445 "%qs appears to be a misspelled signal handler",
4446 name);
4447 }
4448 }
4449
4450 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4451 ASM_OUTPUT_LABEL (file, name);
4452 }
4453
4454 /* The routine used to output NUL terminated strings. We use a special
4455 version of this for most svr4 targets because doing so makes the
4456 generated assembly code more compact (and thus faster to assemble)
4457 as well as more readable, especially for targets like the i386
4458 (where the only alternative is to output character sequences as
4459 comma separated lists of numbers). */
4460
4461 void
4462 gas_output_limited_string(FILE *file, const char *str)
4463 {
4464 const unsigned char *_limited_str = (const unsigned char *) str;
4465 unsigned ch;
4466 fprintf (file, "%s\"", STRING_ASM_OP);
4467 for (; (ch = *_limited_str); _limited_str++)
4468 {
4469 int escape;
4470 switch (escape = ESCAPES[ch])
4471 {
4472 case 0:
4473 putc (ch, file);
4474 break;
4475 case 1:
4476 fprintf (file, "\\%03o", ch);
4477 break;
4478 default:
4479 putc ('\\', file);
4480 putc (escape, file);
4481 break;
4482 }
4483 }
4484 fprintf (file, "\"\n");
4485 }
4486
4487 /* The routine used to output sequences of byte values. We use a special
4488 version of this for most svr4 targets because doing so makes the
4489 generated assembly code more compact (and thus faster to assemble)
4490 as well as more readable. Note that if we find subparts of the
4491 character sequence which end with NUL (and which are shorter than
4492 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4493
4494 void
4495 gas_output_ascii(FILE *file, const char *str, size_t length)
4496 {
4497 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4498 const unsigned char *limit = _ascii_bytes + length;
4499 unsigned bytes_in_chunk = 0;
4500 for (; _ascii_bytes < limit; _ascii_bytes++)
4501 {
4502 const unsigned char *p;
4503 if (bytes_in_chunk >= 60)
4504 {
4505 fprintf (file, "\"\n");
4506 bytes_in_chunk = 0;
4507 }
4508 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4509 continue;
4510 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4511 {
4512 if (bytes_in_chunk > 0)
4513 {
4514 fprintf (file, "\"\n");
4515 bytes_in_chunk = 0;
4516 }
4517 gas_output_limited_string (file, (const char*)_ascii_bytes);
4518 _ascii_bytes = p;
4519 }
4520 else
4521 {
4522 int escape;
4523 unsigned ch;
4524 if (bytes_in_chunk == 0)
4525 fprintf (file, "\t.ascii\t\"");
4526 switch (escape = ESCAPES[ch = *_ascii_bytes])
4527 {
4528 case 0:
4529 putc (ch, file);
4530 bytes_in_chunk++;
4531 break;
4532 case 1:
4533 fprintf (file, "\\%03o", ch);
4534 bytes_in_chunk += 4;
4535 break;
4536 default:
4537 putc ('\\', file);
4538 putc (escape, file);
4539 bytes_in_chunk += 2;
4540 break;
4541 }
4542 }
4543 }
4544 if (bytes_in_chunk > 0)
4545 fprintf (file, "\"\n");
4546 }
4547
4548 /* Return value is nonzero if pseudos that have been
4549 assigned to registers of class CLASS would likely be spilled
4550 because registers of CLASS are needed for spill registers. */
4551
4552 enum reg_class
4553 class_likely_spilled_p (int c)
4554 {
4555 return (c != ALL_REGS && c != ADDW_REGS);
4556 }
4557
4558 /* Valid attributes:
4559 progmem - put data to program memory;
4560 signal - make a function to be hardware interrupt. After function
4561 prologue interrupts are disabled;
4562 interrupt - make a function to be hardware interrupt. After function
4563 prologue interrupts are enabled;
4564 naked - don't generate function prologue/epilogue and `ret' command.
4565
4566 Only `progmem' attribute valid for type. */
4567
4568 const struct attribute_spec avr_attribute_table[] =
4569 {
4570 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4571 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4572 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4573 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4574 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4575 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4576 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4577 { NULL, 0, 0, false, false, false, NULL }
4578 };
4579
4580 /* Handle a "progmem" attribute; arguments as in
4581 struct attribute_spec.handler. */
4582 static tree
4583 avr_handle_progmem_attribute (tree *node, tree name,
4584 tree args ATTRIBUTE_UNUSED,
4585 int flags ATTRIBUTE_UNUSED,
4586 bool *no_add_attrs)
4587 {
4588 if (DECL_P (*node))
4589 {
4590 if (TREE_CODE (*node) == TYPE_DECL)
4591 {
4592 /* This is really a decl attribute, not a type attribute,
4593 but try to handle it for GCC 3.0 backwards compatibility. */
4594
4595 tree type = TREE_TYPE (*node);
4596 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4597 tree newtype = build_type_attribute_variant (type, attr);
4598
4599 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4600 TREE_TYPE (*node) = newtype;
4601 *no_add_attrs = true;
4602 }
4603 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4604 {
4605 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4606 {
4607 warning (0, "only initialized variables can be placed into "
4608 "program memory area");
4609 *no_add_attrs = true;
4610 }
4611 }
4612 else
4613 {
4614 warning (OPT_Wattributes, "%qE attribute ignored",
4615 name);
4616 *no_add_attrs = true;
4617 }
4618 }
4619
4620 return NULL_TREE;
4621 }
4622
4623 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4624 struct attribute_spec.handler. */
4625
4626 static tree
4627 avr_handle_fndecl_attribute (tree *node, tree name,
4628 tree args ATTRIBUTE_UNUSED,
4629 int flags ATTRIBUTE_UNUSED,
4630 bool *no_add_attrs)
4631 {
4632 if (TREE_CODE (*node) != FUNCTION_DECL)
4633 {
4634 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4635 name);
4636 *no_add_attrs = true;
4637 }
4638
4639 return NULL_TREE;
4640 }
4641
4642 static tree
4643 avr_handle_fntype_attribute (tree *node, tree name,
4644 tree args ATTRIBUTE_UNUSED,
4645 int flags ATTRIBUTE_UNUSED,
4646 bool *no_add_attrs)
4647 {
4648 if (TREE_CODE (*node) != FUNCTION_TYPE)
4649 {
4650 warning (OPT_Wattributes, "%qE attribute only applies to functions",
4651 name);
4652 *no_add_attrs = true;
4653 }
4654
4655 return NULL_TREE;
4656 }
4657
4658 /* Look for attribute `progmem' in DECL
4659 if found return 1, otherwise 0. */
4660
4661 int
4662 avr_progmem_p (tree decl, tree attributes)
4663 {
4664 tree a;
4665
4666 if (TREE_CODE (decl) != VAR_DECL)
4667 return 0;
4668
4669 if (NULL_TREE
4670 != lookup_attribute ("progmem", attributes))
4671 return 1;
4672
4673 a=decl;
4674 do
4675 a = TREE_TYPE(a);
4676 while (TREE_CODE (a) == ARRAY_TYPE);
4677
4678 if (a == error_mark_node)
4679 return 0;
4680
4681 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4682 return 1;
4683
4684 return 0;
4685 }
4686
4687 /* Add the section attribute if the variable is in progmem. */
4688
4689 static void
4690 avr_insert_attributes (tree node, tree *attributes)
4691 {
4692 if (TREE_CODE (node) == VAR_DECL
4693 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4694 && avr_progmem_p (node, *attributes))
4695 {
4696 static const char dsec[] = ".progmem.data";
4697 *attributes = tree_cons (get_identifier ("section"),
4698 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4699 *attributes);
4700
4701 /* ??? This seems sketchy. Why can't the user declare the
4702 thing const in the first place? */
4703 TREE_READONLY (node) = 1;
4704 }
4705 }
4706
4707 /* A get_unnamed_section callback for switching to progmem_section. */
4708
4709 static void
4710 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4711 {
4712 fprintf (asm_out_file,
4713 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4714 AVR_HAVE_JMP_CALL ? "a" : "ax");
4715 /* Should already be aligned, this is just to be safe if it isn't. */
4716 fprintf (asm_out_file, "\t.p2align 1\n");
4717 }
4718
4719 /* Implement TARGET_ASM_INIT_SECTIONS. */
4720
4721 static void
4722 avr_asm_init_sections (void)
4723 {
4724 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4725 avr_output_progmem_section_asm_op,
4726 NULL);
4727 readonly_data_section = data_section;
4728 }
4729
4730 static unsigned int
4731 avr_section_type_flags (tree decl, const char *name, int reloc)
4732 {
4733 unsigned int flags = default_section_type_flags (decl, name, reloc);
4734
4735 if (strncmp (name, ".noinit", 7) == 0)
4736 {
4737 if (decl && TREE_CODE (decl) == VAR_DECL
4738 && DECL_INITIAL (decl) == NULL_TREE)
4739 flags |= SECTION_BSS; /* @nobits */
4740 else
4741 warning (0, "only uninitialized variables can be placed in the "
4742 ".noinit section");
4743 }
4744
4745 return flags;
4746 }
4747
4748 /* Outputs some appropriate text to go at the start of an assembler
4749 file. */
4750
4751 static void
4752 avr_file_start (void)
4753 {
4754 if (avr_current_arch->asm_only)
4755 error ("MCU %qs supported for assembler only", avr_mcu_name);
4756
4757 default_file_start ();
4758
4759 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4760 fputs ("__SREG__ = 0x3f\n"
4761 "__SP_H__ = 0x3e\n"
4762 "__SP_L__ = 0x3d\n", asm_out_file);
4763
4764 fputs ("__tmp_reg__ = 0\n"
4765 "__zero_reg__ = 1\n", asm_out_file);
4766
4767 /* FIXME: output these only if there is anything in the .data / .bss
4768 sections - some code size could be saved by not linking in the
4769 initialization code from libgcc if one or both sections are empty. */
4770 fputs ("\t.global __do_copy_data\n", asm_out_file);
4771 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4772 }
4773
4774 /* Outputs to the stdio stream FILE some
4775 appropriate text to go at the end of an assembler file. */
4776
4777 static void
4778 avr_file_end (void)
4779 {
4780 }
4781
4782 /* Choose the order in which to allocate hard registers for
4783 pseudo-registers local to a basic block.
4784
4785 Store the desired register order in the array `reg_alloc_order'.
4786 Element 0 should be the register to allocate first; element 1, the
4787 next register; and so on. */
4788
4789 void
4790 order_regs_for_local_alloc (void)
4791 {
4792 unsigned int i;
4793 static const int order_0[] = {
4794 24,25,
4795 18,19,
4796 20,21,
4797 22,23,
4798 30,31,
4799 26,27,
4800 28,29,
4801 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4802 0,1,
4803 32,33,34,35
4804 };
4805 static const int order_1[] = {
4806 18,19,
4807 20,21,
4808 22,23,
4809 24,25,
4810 30,31,
4811 26,27,
4812 28,29,
4813 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4814 0,1,
4815 32,33,34,35
4816 };
4817 static const int order_2[] = {
4818 25,24,
4819 23,22,
4820 21,20,
4821 19,18,
4822 30,31,
4823 26,27,
4824 28,29,
4825 17,16,
4826 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4827 1,0,
4828 32,33,34,35
4829 };
4830
4831 const int *order = (TARGET_ORDER_1 ? order_1 :
4832 TARGET_ORDER_2 ? order_2 :
4833 order_0);
4834 for (i=0; i < ARRAY_SIZE (order_0); ++i)
4835 reg_alloc_order[i] = order[i];
4836 }
4837
4838
4839 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
4840 cost of an RTX operand given its context. X is the rtx of the
4841 operand, MODE is its mode, and OUTER is the rtx_code of this
4842 operand's parent operator. */
4843
4844 static int
4845 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
4846 bool speed)
4847 {
4848 enum rtx_code code = GET_CODE (x);
4849 int total;
4850
4851 switch (code)
4852 {
4853 case REG:
4854 case SUBREG:
4855 return 0;
4856
4857 case CONST_INT:
4858 case CONST_DOUBLE:
4859 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
4860
4861 default:
4862 break;
4863 }
4864
4865 total = 0;
4866 avr_rtx_costs (x, code, outer, &total, speed);
4867 return total;
4868 }
4869
4870 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
4871 is to be calculated. Return true if the complete cost has been
4872 computed, and false if subexpressions should be scanned. In either
4873 case, *TOTAL contains the cost result. */
4874
4875 static bool
4876 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
4877 bool speed)
4878 {
4879 enum machine_mode mode = GET_MODE (x);
4880 HOST_WIDE_INT val;
4881
4882 switch (code)
4883 {
4884 case CONST_INT:
4885 case CONST_DOUBLE:
4886 /* Immediate constants are as cheap as registers. */
4887 *total = 0;
4888 return true;
4889
4890 case MEM:
4891 case CONST:
4892 case LABEL_REF:
4893 case SYMBOL_REF:
4894 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4895 return true;
4896
4897 case NEG:
4898 switch (mode)
4899 {
4900 case QImode:
4901 case SFmode:
4902 *total = COSTS_N_INSNS (1);
4903 break;
4904
4905 case HImode:
4906 *total = COSTS_N_INSNS (3);
4907 break;
4908
4909 case SImode:
4910 *total = COSTS_N_INSNS (7);
4911 break;
4912
4913 default:
4914 return false;
4915 }
4916 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4917 return true;
4918
4919 case ABS:
4920 switch (mode)
4921 {
4922 case QImode:
4923 case SFmode:
4924 *total = COSTS_N_INSNS (1);
4925 break;
4926
4927 default:
4928 return false;
4929 }
4930 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4931 return true;
4932
4933 case NOT:
4934 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4935 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4936 return true;
4937
4938 case ZERO_EXTEND:
4939 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
4940 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4941 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4942 return true;
4943
4944 case SIGN_EXTEND:
4945 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
4946 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
4947 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4948 return true;
4949
4950 case PLUS:
4951 switch (mode)
4952 {
4953 case QImode:
4954 *total = COSTS_N_INSNS (1);
4955 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4956 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4957 break;
4958
4959 case HImode:
4960 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4961 {
4962 *total = COSTS_N_INSNS (2);
4963 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4964 }
4965 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4966 *total = COSTS_N_INSNS (1);
4967 else
4968 *total = COSTS_N_INSNS (2);
4969 break;
4970
4971 case SImode:
4972 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4973 {
4974 *total = COSTS_N_INSNS (4);
4975 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4976 }
4977 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
4978 *total = COSTS_N_INSNS (1);
4979 else
4980 *total = COSTS_N_INSNS (4);
4981 break;
4982
4983 default:
4984 return false;
4985 }
4986 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4987 return true;
4988
4989 case MINUS:
4990 case AND:
4991 case IOR:
4992 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
4993 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
4994 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4995 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
4996 return true;
4997
4998 case XOR:
4999 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5000 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5001 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5002 return true;
5003
5004 case MULT:
5005 switch (mode)
5006 {
5007 case QImode:
5008 if (AVR_HAVE_MUL)
5009 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5010 else if (!speed)
5011 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5012 else
5013 return false;
5014 break;
5015
5016 case HImode:
5017 if (AVR_HAVE_MUL)
5018 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5019 else if (!speed)
5020 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5021 else
5022 return false;
5023 break;
5024
5025 default:
5026 return false;
5027 }
5028 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5029 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5030 return true;
5031
5032 case DIV:
5033 case MOD:
5034 case UDIV:
5035 case UMOD:
5036 if (!speed)
5037 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5038 else
5039 return false;
5040 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5041 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5042 return true;
5043
5044 case ROTATE:
5045 switch (mode)
5046 {
5047 case QImode:
5048 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5049 *total = COSTS_N_INSNS (1);
5050
5051 break;
5052
5053 case HImode:
5054 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5055 *total = COSTS_N_INSNS (3);
5056
5057 break;
5058
5059 case SImode:
5060 if (CONST_INT_P (XEXP (x, 1)))
5061 switch (INTVAL (XEXP (x, 1)))
5062 {
5063 case 8:
5064 case 24:
5065 *total = COSTS_N_INSNS (5);
5066 break;
5067 case 16:
5068 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5069 break;
5070 }
5071 break;
5072
5073 default:
5074 return false;
5075 }
5076 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5077 return true;
5078
5079 case ASHIFT:
5080 switch (mode)
5081 {
5082 case QImode:
5083 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5084 {
5085 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5086 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5087 }
5088 else
5089 {
5090 val = INTVAL (XEXP (x, 1));
5091 if (val == 7)
5092 *total = COSTS_N_INSNS (3);
5093 else if (val >= 0 && val <= 7)
5094 *total = COSTS_N_INSNS (val);
5095 else
5096 *total = COSTS_N_INSNS (1);
5097 }
5098 break;
5099
5100 case HImode:
5101 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5102 {
5103 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5104 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5105 }
5106 else
5107 switch (INTVAL (XEXP (x, 1)))
5108 {
5109 case 0:
5110 *total = 0;
5111 break;
5112 case 1:
5113 case 8:
5114 *total = COSTS_N_INSNS (2);
5115 break;
5116 case 9:
5117 *total = COSTS_N_INSNS (3);
5118 break;
5119 case 2:
5120 case 3:
5121 case 10:
5122 case 15:
5123 *total = COSTS_N_INSNS (4);
5124 break;
5125 case 7:
5126 case 11:
5127 case 12:
5128 *total = COSTS_N_INSNS (5);
5129 break;
5130 case 4:
5131 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5132 break;
5133 case 6:
5134 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5135 break;
5136 case 5:
5137 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5138 break;
5139 default:
5140 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5141 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5142 }
5143 break;
5144
5145 case SImode:
5146 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5147 {
5148 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5149 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5150 }
5151 else
5152 switch (INTVAL (XEXP (x, 1)))
5153 {
5154 case 0:
5155 *total = 0;
5156 break;
5157 case 24:
5158 *total = COSTS_N_INSNS (3);
5159 break;
5160 case 1:
5161 case 8:
5162 case 16:
5163 *total = COSTS_N_INSNS (4);
5164 break;
5165 case 31:
5166 *total = COSTS_N_INSNS (6);
5167 break;
5168 case 2:
5169 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5170 break;
5171 default:
5172 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5173 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5174 }
5175 break;
5176
5177 default:
5178 return false;
5179 }
5180 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5181 return true;
5182
5183 case ASHIFTRT:
5184 switch (mode)
5185 {
5186 case QImode:
5187 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5188 {
5189 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5190 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5191 }
5192 else
5193 {
5194 val = INTVAL (XEXP (x, 1));
5195 if (val == 6)
5196 *total = COSTS_N_INSNS (4);
5197 else if (val == 7)
5198 *total = COSTS_N_INSNS (2);
5199 else if (val >= 0 && val <= 7)
5200 *total = COSTS_N_INSNS (val);
5201 else
5202 *total = COSTS_N_INSNS (1);
5203 }
5204 break;
5205
5206 case HImode:
5207 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5208 {
5209 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5210 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5211 }
5212 else
5213 switch (INTVAL (XEXP (x, 1)))
5214 {
5215 case 0:
5216 *total = 0;
5217 break;
5218 case 1:
5219 *total = COSTS_N_INSNS (2);
5220 break;
5221 case 15:
5222 *total = COSTS_N_INSNS (3);
5223 break;
5224 case 2:
5225 case 7:
5226 case 8:
5227 case 9:
5228 *total = COSTS_N_INSNS (4);
5229 break;
5230 case 10:
5231 case 14:
5232 *total = COSTS_N_INSNS (5);
5233 break;
5234 case 11:
5235 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5236 break;
5237 case 12:
5238 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5239 break;
5240 case 6:
5241 case 13:
5242 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5243 break;
5244 default:
5245 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5246 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5247 }
5248 break;
5249
5250 case SImode:
5251 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5252 {
5253 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5254 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5255 }
5256 else
5257 switch (INTVAL (XEXP (x, 1)))
5258 {
5259 case 0:
5260 *total = 0;
5261 break;
5262 case 1:
5263 *total = COSTS_N_INSNS (4);
5264 break;
5265 case 8:
5266 case 16:
5267 case 24:
5268 *total = COSTS_N_INSNS (6);
5269 break;
5270 case 2:
5271 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5272 break;
5273 case 31:
5274 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5275 break;
5276 default:
5277 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5278 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5279 }
5280 break;
5281
5282 default:
5283 return false;
5284 }
5285 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5286 return true;
5287
5288 case LSHIFTRT:
5289 switch (mode)
5290 {
5291 case QImode:
5292 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5293 {
5294 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5295 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5296 }
5297 else
5298 {
5299 val = INTVAL (XEXP (x, 1));
5300 if (val == 7)
5301 *total = COSTS_N_INSNS (3);
5302 else if (val >= 0 && val <= 7)
5303 *total = COSTS_N_INSNS (val);
5304 else
5305 *total = COSTS_N_INSNS (1);
5306 }
5307 break;
5308
5309 case HImode:
5310 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5311 {
5312 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5313 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5314 }
5315 else
5316 switch (INTVAL (XEXP (x, 1)))
5317 {
5318 case 0:
5319 *total = 0;
5320 break;
5321 case 1:
5322 case 8:
5323 *total = COSTS_N_INSNS (2);
5324 break;
5325 case 9:
5326 *total = COSTS_N_INSNS (3);
5327 break;
5328 case 2:
5329 case 10:
5330 case 15:
5331 *total = COSTS_N_INSNS (4);
5332 break;
5333 case 7:
5334 case 11:
5335 *total = COSTS_N_INSNS (5);
5336 break;
5337 case 3:
5338 case 12:
5339 case 13:
5340 case 14:
5341 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5342 break;
5343 case 4:
5344 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5345 break;
5346 case 5:
5347 case 6:
5348 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5349 break;
5350 default:
5351 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5352 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5353 }
5354 break;
5355
5356 case SImode:
5357 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5358 {
5359 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5360 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5361 }
5362 else
5363 switch (INTVAL (XEXP (x, 1)))
5364 {
5365 case 0:
5366 *total = 0;
5367 break;
5368 case 1:
5369 *total = COSTS_N_INSNS (4);
5370 break;
5371 case 2:
5372 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5373 break;
5374 case 8:
5375 case 16:
5376 case 24:
5377 *total = COSTS_N_INSNS (4);
5378 break;
5379 case 31:
5380 *total = COSTS_N_INSNS (6);
5381 break;
5382 default:
5383 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5384 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5385 }
5386 break;
5387
5388 default:
5389 return false;
5390 }
5391 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5392 return true;
5393
5394 case COMPARE:
5395 switch (GET_MODE (XEXP (x, 0)))
5396 {
5397 case QImode:
5398 *total = COSTS_N_INSNS (1);
5399 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5401 break;
5402
5403 case HImode:
5404 *total = COSTS_N_INSNS (2);
5405 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5406 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5407 else if (INTVAL (XEXP (x, 1)) != 0)
5408 *total += COSTS_N_INSNS (1);
5409 break;
5410
5411 case SImode:
5412 *total = COSTS_N_INSNS (4);
5413 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5414 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5415 else if (INTVAL (XEXP (x, 1)) != 0)
5416 *total += COSTS_N_INSNS (3);
5417 break;
5418
5419 default:
5420 return false;
5421 }
5422 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5423 return true;
5424
5425 default:
5426 break;
5427 }
5428 return false;
5429 }
5430
5431 /* Calculate the cost of a memory address. */
5432
5433 static int
5434 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5435 {
5436 if (GET_CODE (x) == PLUS
5437 && GET_CODE (XEXP (x,1)) == CONST_INT
5438 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5439 && INTVAL (XEXP (x,1)) >= 61)
5440 return 18;
5441 if (CONSTANT_ADDRESS_P (x))
5442 {
5443 if (optimize > 0 && io_address_operand (x, QImode))
5444 return 2;
5445 return 4;
5446 }
5447 return 4;
5448 }
5449
5450 /* Test for extra memory constraint 'Q'.
5451 It's a memory address based on Y or Z pointer with valid displacement. */
5452
5453 int
5454 extra_constraint_Q (rtx x)
5455 {
5456 if (GET_CODE (XEXP (x,0)) == PLUS
5457 && REG_P (XEXP (XEXP (x,0), 0))
5458 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5459 && (INTVAL (XEXP (XEXP (x,0), 1))
5460 <= MAX_LD_OFFSET (GET_MODE (x))))
5461 {
5462 rtx xx = XEXP (XEXP (x,0), 0);
5463 int regno = REGNO (xx);
5464 if (TARGET_ALL_DEBUG)
5465 {
5466 fprintf (stderr, ("extra_constraint:\n"
5467 "reload_completed: %d\n"
5468 "reload_in_progress: %d\n"),
5469 reload_completed, reload_in_progress);
5470 debug_rtx (x);
5471 }
5472 if (regno >= FIRST_PSEUDO_REGISTER)
5473 return 1; /* allocate pseudos */
5474 else if (regno == REG_Z || regno == REG_Y)
5475 return 1; /* strictly check */
5476 else if (xx == frame_pointer_rtx
5477 || xx == arg_pointer_rtx)
5478 return 1; /* XXX frame & arg pointer checks */
5479 }
5480 return 0;
5481 }
5482
5483 /* Convert condition code CONDITION to the valid AVR condition code. */
5484
5485 RTX_CODE
5486 avr_normalize_condition (RTX_CODE condition)
5487 {
5488 switch (condition)
5489 {
5490 case GT:
5491 return GE;
5492 case GTU:
5493 return GEU;
5494 case LE:
5495 return LT;
5496 case LEU:
5497 return LTU;
5498 default:
5499 gcc_unreachable ();
5500 }
5501 }
5502
5503 /* This function optimizes conditional jumps. */
5504
5505 static void
5506 avr_reorg (void)
5507 {
5508 rtx insn, pattern;
5509
5510 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5511 {
5512 if (! (GET_CODE (insn) == INSN
5513 || GET_CODE (insn) == CALL_INSN
5514 || GET_CODE (insn) == JUMP_INSN)
5515 || !single_set (insn))
5516 continue;
5517
5518 pattern = PATTERN (insn);
5519
5520 if (GET_CODE (pattern) == PARALLEL)
5521 pattern = XVECEXP (pattern, 0, 0);
5522 if (GET_CODE (pattern) == SET
5523 && SET_DEST (pattern) == cc0_rtx
5524 && compare_diff_p (insn))
5525 {
5526 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5527 {
5528 /* Now we work under compare insn. */
5529
5530 pattern = SET_SRC (pattern);
5531 if (true_regnum (XEXP (pattern,0)) >= 0
5532 && true_regnum (XEXP (pattern,1)) >= 0 )
5533 {
5534 rtx x = XEXP (pattern,0);
5535 rtx next = next_real_insn (insn);
5536 rtx pat = PATTERN (next);
5537 rtx src = SET_SRC (pat);
5538 rtx t = XEXP (src,0);
5539 PUT_CODE (t, swap_condition (GET_CODE (t)));
5540 XEXP (pattern,0) = XEXP (pattern,1);
5541 XEXP (pattern,1) = x;
5542 INSN_CODE (next) = -1;
5543 }
5544 else if (true_regnum (XEXP (pattern, 0)) >= 0
5545 && XEXP (pattern, 1) == const0_rtx)
5546 {
5547 /* This is a tst insn, we can reverse it. */
5548 rtx next = next_real_insn (insn);
5549 rtx pat = PATTERN (next);
5550 rtx src = SET_SRC (pat);
5551 rtx t = XEXP (src,0);
5552
5553 PUT_CODE (t, swap_condition (GET_CODE (t)));
5554 XEXP (pattern, 1) = XEXP (pattern, 0);
5555 XEXP (pattern, 0) = const0_rtx;
5556 INSN_CODE (next) = -1;
5557 INSN_CODE (insn) = -1;
5558 }
5559 else if (true_regnum (XEXP (pattern,0)) >= 0
5560 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5561 {
5562 rtx x = XEXP (pattern,1);
5563 rtx next = next_real_insn (insn);
5564 rtx pat = PATTERN (next);
5565 rtx src = SET_SRC (pat);
5566 rtx t = XEXP (src,0);
5567 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5568
5569 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5570 {
5571 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5572 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5573 INSN_CODE (next) = -1;
5574 INSN_CODE (insn) = -1;
5575 }
5576 }
5577 }
5578 }
5579 }
5580 }
5581
5582 /* Returns register number for function return value.*/
5583
5584 int
5585 avr_ret_register (void)
5586 {
5587 return 24;
5588 }
5589
5590 /* Create an RTX representing the place where a
5591 library function returns a value of mode MODE. */
5592
5593 rtx
5594 avr_libcall_value (enum machine_mode mode)
5595 {
5596 int offs = GET_MODE_SIZE (mode);
5597 if (offs < 2)
5598 offs = 2;
5599 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5600 }
5601
5602 /* Create an RTX representing the place where a
5603 function returns a value of data type VALTYPE. */
5604
5605 rtx
5606 avr_function_value (const_tree type,
5607 const_tree func ATTRIBUTE_UNUSED,
5608 bool outgoing ATTRIBUTE_UNUSED)
5609 {
5610 unsigned int offs;
5611
5612 if (TYPE_MODE (type) != BLKmode)
5613 return avr_libcall_value (TYPE_MODE (type));
5614
5615 offs = int_size_in_bytes (type);
5616 if (offs < 2)
5617 offs = 2;
5618 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5619 offs = GET_MODE_SIZE (SImode);
5620 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5621 offs = GET_MODE_SIZE (DImode);
5622
5623 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5624 }
5625
5626 /* Places additional restrictions on the register class to
5627 use when it is necessary to copy value X into a register
5628 in class CLASS. */
5629
5630 enum reg_class
5631 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5632 {
5633 return rclass;
5634 }
5635
5636 int
5637 test_hard_reg_class (enum reg_class rclass, rtx x)
5638 {
5639 int regno = true_regnum (x);
5640 if (regno < 0)
5641 return 0;
5642
5643 if (TEST_HARD_REG_CLASS (rclass, regno))
5644 return 1;
5645
5646 return 0;
5647 }
5648
5649
5650 int
5651 jump_over_one_insn_p (rtx insn, rtx dest)
5652 {
5653 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5654 ? XEXP (dest, 0)
5655 : dest);
5656 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5657 int dest_addr = INSN_ADDRESSES (uid);
5658 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5659 }
5660
5661 /* Returns 1 if a value of mode MODE can be stored starting with hard
5662 register number REGNO. On the enhanced core, anything larger than
5663 1 byte must start in even numbered register for "movw" to work
5664 (this way we don't have to check for odd registers everywhere). */
5665
5666 int
5667 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5668 {
5669 /* Disallow QImode in stack pointer regs. */
5670 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5671 return 0;
5672
5673 /* The only thing that can go into registers r28:r29 is a Pmode. */
5674 if (regno == REG_Y && mode == Pmode)
5675 return 1;
5676
5677 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5678 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5679 return 0;
5680
5681 if (mode == QImode)
5682 return 1;
5683
5684 /* Modes larger than QImode occupy consecutive registers. */
5685 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5686 return 0;
5687
5688 /* All modes larger than QImode should start in an even register. */
5689 return !(regno & 1);
5690 }
5691
5692 const char *
5693 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5694 {
5695 int tmp;
5696 if (!len)
5697 len = &tmp;
5698
5699 if (GET_CODE (operands[1]) == CONST_INT)
5700 {
5701 int val = INTVAL (operands[1]);
5702 if ((val & 0xff) == 0)
5703 {
5704 *len = 3;
5705 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5706 AS2 (ldi,%2,hi8(%1)) CR_TAB
5707 AS2 (mov,%B0,%2));
5708 }
5709 else if ((val & 0xff00) == 0)
5710 {
5711 *len = 3;
5712 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5713 AS2 (mov,%A0,%2) CR_TAB
5714 AS2 (mov,%B0,__zero_reg__));
5715 }
5716 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5717 {
5718 *len = 3;
5719 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5720 AS2 (mov,%A0,%2) CR_TAB
5721 AS2 (mov,%B0,%2));
5722 }
5723 }
5724 *len = 4;
5725 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5726 AS2 (mov,%A0,%2) CR_TAB
5727 AS2 (ldi,%2,hi8(%1)) CR_TAB
5728 AS2 (mov,%B0,%2));
5729 }
5730
5731
5732 const char *
5733 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5734 {
5735 rtx src = operands[1];
5736 int cnst = (GET_CODE (src) == CONST_INT);
5737
5738 if (len)
5739 {
5740 if (cnst)
5741 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5742 + ((INTVAL (src) & 0xff00) != 0)
5743 + ((INTVAL (src) & 0xff0000) != 0)
5744 + ((INTVAL (src) & 0xff000000) != 0);
5745 else
5746 *len = 8;
5747
5748 return "";
5749 }
5750
5751 if (cnst && ((INTVAL (src) & 0xff) == 0))
5752 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5753 else
5754 {
5755 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5756 output_asm_insn (AS2 (mov, %A0, %2), operands);
5757 }
5758 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5759 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5760 else
5761 {
5762 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5763 output_asm_insn (AS2 (mov, %B0, %2), operands);
5764 }
5765 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5766 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5767 else
5768 {
5769 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5770 output_asm_insn (AS2 (mov, %C0, %2), operands);
5771 }
5772 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5773 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5774 else
5775 {
5776 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5777 output_asm_insn (AS2 (mov, %D0, %2), operands);
5778 }
5779 return "";
5780 }
5781
5782 void
5783 avr_output_bld (rtx operands[], int bit_nr)
5784 {
5785 static char s[] = "bld %A0,0";
5786
5787 s[5] = 'A' + (bit_nr >> 3);
5788 s[8] = '0' + (bit_nr & 7);
5789 output_asm_insn (s, operands);
5790 }
5791
5792 void
5793 avr_output_addr_vec_elt (FILE *stream, int value)
5794 {
5795 switch_to_section (progmem_section);
5796 if (AVR_HAVE_JMP_CALL)
5797 fprintf (stream, "\t.word gs(.L%d)\n", value);
5798 else
5799 fprintf (stream, "\trjmp .L%d\n", value);
5800 }
5801
5802 /* Returns true if SCRATCH are safe to be allocated as a scratch
5803 registers (for a define_peephole2) in the current function. */
5804
5805 bool
5806 avr_hard_regno_scratch_ok (unsigned int regno)
5807 {
5808 /* Interrupt functions can only use registers that have already been saved
5809 by the prologue, even if they would normally be call-clobbered. */
5810
5811 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5812 && !df_regs_ever_live_p (regno))
5813 return false;
5814
5815 return true;
5816 }
5817
5818 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
5819
5820 int
5821 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
5822 unsigned int new_reg)
5823 {
5824 /* Interrupt functions can only use registers that have already been
5825 saved by the prologue, even if they would normally be
5826 call-clobbered. */
5827
5828 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
5829 && !df_regs_ever_live_p (new_reg))
5830 return 0;
5831
5832 return 1;
5833 }
5834
5835 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
5836 or memory location in the I/O space (QImode only).
5837
5838 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
5839 Operand 1: register operand to test, or CONST_INT memory address.
5840 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
5841 Operand 3: label to jump to if the test is true. */
5842
5843 const char *
5844 avr_out_sbxx_branch (rtx insn, rtx operands[])
5845 {
5846 enum rtx_code comp = GET_CODE (operands[0]);
5847 int long_jump = (get_attr_length (insn) >= 4);
5848 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
5849
5850 if (comp == GE)
5851 comp = EQ;
5852 else if (comp == LT)
5853 comp = NE;
5854
5855 if (reverse)
5856 comp = reverse_condition (comp);
5857
5858 if (GET_CODE (operands[1]) == CONST_INT)
5859 {
5860 if (INTVAL (operands[1]) < 0x40)
5861 {
5862 if (comp == EQ)
5863 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
5864 else
5865 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
5866 }
5867 else
5868 {
5869 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
5870 if (comp == EQ)
5871 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
5872 else
5873 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
5874 }
5875 }
5876 else /* GET_CODE (operands[1]) == REG */
5877 {
5878 if (GET_MODE (operands[1]) == QImode)
5879 {
5880 if (comp == EQ)
5881 output_asm_insn (AS2 (sbrs,%1,%2), operands);
5882 else
5883 output_asm_insn (AS2 (sbrc,%1,%2), operands);
5884 }
5885 else /* HImode or SImode */
5886 {
5887 static char buf[] = "sbrc %A1,0";
5888 int bit_nr = exact_log2 (INTVAL (operands[2])
5889 & GET_MODE_MASK (GET_MODE (operands[1])));
5890
5891 buf[3] = (comp == EQ) ? 's' : 'c';
5892 buf[6] = 'A' + (bit_nr >> 3);
5893 buf[9] = '0' + (bit_nr & 7);
5894 output_asm_insn (buf, operands);
5895 }
5896 }
5897
5898 if (long_jump)
5899 return (AS1 (rjmp,.+4) CR_TAB
5900 AS1 (jmp,%3));
5901 if (!reverse)
5902 return AS1 (rjmp,%3);
5903 return "";
5904 }
5905
5906 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
5907
5908 static void
5909 avr_asm_out_ctor (rtx symbol, int priority)
5910 {
5911 fputs ("\t.global __do_global_ctors\n", asm_out_file);
5912 default_ctor_section_asm_out_constructor (symbol, priority);
5913 }
5914
5915 /* Worker function for TARGET_ASM_DESTRUCTOR. */
5916
5917 static void
5918 avr_asm_out_dtor (rtx symbol, int priority)
5919 {
5920 fputs ("\t.global __do_global_dtors\n", asm_out_file);
5921 default_dtor_section_asm_out_destructor (symbol, priority);
5922 }
5923
5924 /* Worker function for TARGET_RETURN_IN_MEMORY. */
5925
5926 static bool
5927 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
5928 {
5929 if (TYPE_MODE (type) == BLKmode)
5930 {
5931 HOST_WIDE_INT size = int_size_in_bytes (type);
5932 return (size == -1 || size > 8);
5933 }
5934 else
5935 return false;
5936 }
5937
5938 /* Worker function for CASE_VALUES_THRESHOLD. */
5939
5940 unsigned int avr_case_values_threshold (void)
5941 {
5942 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
5943 }
5944
5945 #include "gt-avr.h"