2b1aaf097465c275b9b6820de454a262737d10fd
[gcc.git] / gcc / config / avr / avr.c
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (chertykov@gmail.com)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-attr.h"
32 #include "insn-codes.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "diagnostic-core.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "optabs.h"
43 #include "ggc.h"
44 #include "langhooks.h"
45 #include "tm_p.h"
46 #include "target.h"
47 #include "target-def.h"
48 #include "params.h"
49 #include "df.h"
50
51 /* Maximal allowed offset for an address in the LD command */
52 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
53
54 static void avr_option_override (void);
55 static int avr_naked_function_p (tree);
56 static int interrupt_function_p (tree);
57 static int signal_function_p (tree);
58 static int avr_OS_task_function_p (tree);
59 static int avr_OS_main_function_p (tree);
60 static int avr_regs_to_save (HARD_REG_SET *);
61 static int get_sequence_length (rtx insns);
62 static int sequent_regs_live (void);
63 static const char *ptrreg_to_str (int);
64 static const char *cond_string (enum rtx_code);
65 static int avr_num_arg_regs (enum machine_mode, const_tree);
66
67 static RTX_CODE compare_condition (rtx insn);
68 static rtx avr_legitimize_address (rtx, rtx, enum machine_mode);
69 static int compare_sign_p (rtx insn);
70 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
71 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
72 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
73 static bool avr_assemble_integer (rtx, unsigned int, int);
74 static void avr_file_start (void);
75 static void avr_file_end (void);
76 static bool avr_legitimate_address_p (enum machine_mode, rtx, bool);
77 static void avr_asm_function_end_prologue (FILE *);
78 static void avr_asm_function_begin_epilogue (FILE *);
79 static bool avr_cannot_modify_jumps_p (void);
80 static rtx avr_function_value (const_tree, const_tree, bool);
81 static rtx avr_libcall_value (enum machine_mode, const_rtx);
82 static bool avr_function_value_regno_p (const unsigned int);
83 static void avr_insert_attributes (tree, tree *);
84 static void avr_asm_init_sections (void);
85 static unsigned int avr_section_type_flags (tree, const char *, int);
86
87 static void avr_reorg (void);
88 static void avr_asm_out_ctor (rtx, int);
89 static void avr_asm_out_dtor (rtx, int);
90 static int avr_register_move_cost (enum machine_mode, reg_class_t, reg_class_t);
91 static int avr_memory_move_cost (enum machine_mode, reg_class_t, bool);
92 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
93 static bool avr_rtx_costs (rtx, int, int, int *, bool);
94 static int avr_address_cost (rtx, bool);
95 static bool avr_return_in_memory (const_tree, const_tree);
96 static struct machine_function * avr_init_machine_status (void);
97 static void avr_init_builtins (void);
98 static rtx avr_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
99 static rtx avr_builtin_setjmp_frame_value (void);
100 static bool avr_hard_regno_scratch_ok (unsigned int);
101 static unsigned int avr_case_values_threshold (void);
102 static bool avr_frame_pointer_required_p (void);
103 static bool avr_can_eliminate (const int, const int);
104 static bool avr_class_likely_spilled_p (reg_class_t c);
105 static rtx avr_function_arg (cumulative_args_t , enum machine_mode,
106 const_tree, bool);
107 static void avr_function_arg_advance (cumulative_args_t, enum machine_mode,
108 const_tree, bool);
109 static bool avr_function_ok_for_sibcall (tree, tree);
110 static void avr_asm_named_section (const char *name, unsigned int flags, tree decl);
111
112 /* Allocate registers from r25 to r8 for parameters for function calls. */
113 #define FIRST_CUM_REG 26
114
115 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
116 static GTY(()) rtx tmp_reg_rtx;
117
118 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
119 static GTY(()) rtx zero_reg_rtx;
120
121 /* AVR register names {"r0", "r1", ..., "r31"} */
122 static const char *const avr_regnames[] = REGISTER_NAMES;
123
124 /* Preprocessor macros to define depending on MCU type. */
125 const char *avr_extra_arch_macro;
126
127 /* Current architecture. */
128 const struct base_arch_s *avr_current_arch;
129
130 /* Current device. */
131 const struct mcu_type_s *avr_current_device;
132
133 section *progmem_section;
134
135 /* To track if code will use .bss and/or .data. */
136 bool avr_need_clear_bss_p = false;
137 bool avr_need_copy_data_p = false;
138
139 /* AVR attributes. */
140 static const struct attribute_spec avr_attribute_table[] =
141 {
142 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
143 affects_type_identity } */
144 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute,
145 false },
146 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute,
147 false },
148 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute,
149 false },
150 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute,
151 false },
152 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute,
153 false },
154 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute,
155 false },
156 { NULL, 0, 0, false, false, false, NULL, false }
157 };
158 \f
159 /* Initialize the GCC target structure. */
160 #undef TARGET_ASM_ALIGNED_HI_OP
161 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
162 #undef TARGET_ASM_ALIGNED_SI_OP
163 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
164 #undef TARGET_ASM_UNALIGNED_HI_OP
165 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
166 #undef TARGET_ASM_UNALIGNED_SI_OP
167 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
168 #undef TARGET_ASM_INTEGER
169 #define TARGET_ASM_INTEGER avr_assemble_integer
170 #undef TARGET_ASM_FILE_START
171 #define TARGET_ASM_FILE_START avr_file_start
172 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
173 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
174 #undef TARGET_ASM_FILE_END
175 #define TARGET_ASM_FILE_END avr_file_end
176
177 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
178 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
179 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
180 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
181
182 #undef TARGET_FUNCTION_VALUE
183 #define TARGET_FUNCTION_VALUE avr_function_value
184 #undef TARGET_LIBCALL_VALUE
185 #define TARGET_LIBCALL_VALUE avr_libcall_value
186 #undef TARGET_FUNCTION_VALUE_REGNO_P
187 #define TARGET_FUNCTION_VALUE_REGNO_P avr_function_value_regno_p
188
189 #undef TARGET_ATTRIBUTE_TABLE
190 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
191 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
192 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
193 #undef TARGET_INSERT_ATTRIBUTES
194 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
195 #undef TARGET_SECTION_TYPE_FLAGS
196 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
197
198 /* `TARGET_ASM_NAMED_SECTION' must be defined in avr.h. */
199
200 #undef TARGET_ASM_INIT_SECTIONS
201 #define TARGET_ASM_INIT_SECTIONS avr_asm_init_sections
202
203 #undef TARGET_REGISTER_MOVE_COST
204 #define TARGET_REGISTER_MOVE_COST avr_register_move_cost
205 #undef TARGET_MEMORY_MOVE_COST
206 #define TARGET_MEMORY_MOVE_COST avr_memory_move_cost
207 #undef TARGET_RTX_COSTS
208 #define TARGET_RTX_COSTS avr_rtx_costs
209 #undef TARGET_ADDRESS_COST
210 #define TARGET_ADDRESS_COST avr_address_cost
211 #undef TARGET_MACHINE_DEPENDENT_REORG
212 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
213 #undef TARGET_FUNCTION_ARG
214 #define TARGET_FUNCTION_ARG avr_function_arg
215 #undef TARGET_FUNCTION_ARG_ADVANCE
216 #define TARGET_FUNCTION_ARG_ADVANCE avr_function_arg_advance
217
218 #undef TARGET_LEGITIMIZE_ADDRESS
219 #define TARGET_LEGITIMIZE_ADDRESS avr_legitimize_address
220
221 #undef TARGET_RETURN_IN_MEMORY
222 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
223
224 #undef TARGET_STRICT_ARGUMENT_NAMING
225 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
226
227 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
228 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
229
230 #undef TARGET_HARD_REGNO_SCRATCH_OK
231 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
232 #undef TARGET_CASE_VALUES_THRESHOLD
233 #define TARGET_CASE_VALUES_THRESHOLD avr_case_values_threshold
234
235 #undef TARGET_LEGITIMATE_ADDRESS_P
236 #define TARGET_LEGITIMATE_ADDRESS_P avr_legitimate_address_p
237
238 #undef TARGET_FRAME_POINTER_REQUIRED
239 #define TARGET_FRAME_POINTER_REQUIRED avr_frame_pointer_required_p
240 #undef TARGET_CAN_ELIMINATE
241 #define TARGET_CAN_ELIMINATE avr_can_eliminate
242
243 #undef TARGET_CLASS_LIKELY_SPILLED_P
244 #define TARGET_CLASS_LIKELY_SPILLED_P avr_class_likely_spilled_p
245
246 #undef TARGET_OPTION_OVERRIDE
247 #define TARGET_OPTION_OVERRIDE avr_option_override
248
249 #undef TARGET_CANNOT_MODIFY_JUMPS_P
250 #define TARGET_CANNOT_MODIFY_JUMPS_P avr_cannot_modify_jumps_p
251
252 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
253 #define TARGET_FUNCTION_OK_FOR_SIBCALL avr_function_ok_for_sibcall
254
255 #undef TARGET_INIT_BUILTINS
256 #define TARGET_INIT_BUILTINS avr_init_builtins
257
258 #undef TARGET_EXPAND_BUILTIN
259 #define TARGET_EXPAND_BUILTIN avr_expand_builtin
260
261
262 struct gcc_target targetm = TARGET_INITIALIZER;
263 \f
264 static void
265 avr_option_override (void)
266 {
267 flag_delete_null_pointer_checks = 0;
268
269 avr_current_device = &avr_mcu_types[avr_mcu_index];
270 avr_current_arch = &avr_arch_types[avr_current_device->arch];
271 avr_extra_arch_macro = avr_current_device->macro;
272
273 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
274 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
275
276 init_machine_status = avr_init_machine_status;
277 }
278
279 /* return register class from register number. */
280
281 static const enum reg_class reg_class_tab[]={
282 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
283 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
284 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
285 GENERAL_REGS, /* r0 - r15 */
286 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
287 LD_REGS, /* r16 - 23 */
288 ADDW_REGS,ADDW_REGS, /* r24,r25 */
289 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
290 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
291 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
292 STACK_REG,STACK_REG /* SPL,SPH */
293 };
294
295 /* Function to set up the backend function structure. */
296
297 static struct machine_function *
298 avr_init_machine_status (void)
299 {
300 return ggc_alloc_cleared_machine_function ();
301 }
302
303 /* Return register class for register R. */
304
305 enum reg_class
306 avr_regno_reg_class (int r)
307 {
308 if (r <= 33)
309 return reg_class_tab[r];
310 return ALL_REGS;
311 }
312
313 /* A helper for the subsequent function attribute used to dig for
314 attribute 'name' in a FUNCTION_DECL or FUNCTION_TYPE */
315
316 static inline int
317 avr_lookup_function_attribute1 (const_tree func, const char *name)
318 {
319 if (FUNCTION_DECL == TREE_CODE (func))
320 {
321 if (NULL_TREE != lookup_attribute (name, DECL_ATTRIBUTES (func)))
322 {
323 return true;
324 }
325
326 func = TREE_TYPE (func);
327 }
328
329 gcc_assert (TREE_CODE (func) == FUNCTION_TYPE
330 || TREE_CODE (func) == METHOD_TYPE);
331
332 return NULL_TREE != lookup_attribute (name, TYPE_ATTRIBUTES (func));
333 }
334
335 /* Return nonzero if FUNC is a naked function. */
336
337 static int
338 avr_naked_function_p (tree func)
339 {
340 return avr_lookup_function_attribute1 (func, "naked");
341 }
342
343 /* Return nonzero if FUNC is an interrupt function as specified
344 by the "interrupt" attribute. */
345
346 static int
347 interrupt_function_p (tree func)
348 {
349 return avr_lookup_function_attribute1 (func, "interrupt");
350 }
351
352 /* Return nonzero if FUNC is a signal function as specified
353 by the "signal" attribute. */
354
355 static int
356 signal_function_p (tree func)
357 {
358 return avr_lookup_function_attribute1 (func, "signal");
359 }
360
361 /* Return nonzero if FUNC is a OS_task function. */
362
363 static int
364 avr_OS_task_function_p (tree func)
365 {
366 return avr_lookup_function_attribute1 (func, "OS_task");
367 }
368
369 /* Return nonzero if FUNC is a OS_main function. */
370
371 static int
372 avr_OS_main_function_p (tree func)
373 {
374 return avr_lookup_function_attribute1 (func, "OS_main");
375 }
376
377 /* Return the number of hard registers to push/pop in the prologue/epilogue
378 of the current function, and optionally store these registers in SET. */
379
380 static int
381 avr_regs_to_save (HARD_REG_SET *set)
382 {
383 int reg, count;
384 int int_or_sig_p = (interrupt_function_p (current_function_decl)
385 || signal_function_p (current_function_decl));
386
387 if (set)
388 CLEAR_HARD_REG_SET (*set);
389 count = 0;
390
391 /* No need to save any registers if the function never returns or
392 is have "OS_task" or "OS_main" attribute. */
393 if (TREE_THIS_VOLATILE (current_function_decl)
394 || cfun->machine->is_OS_task
395 || cfun->machine->is_OS_main)
396 return 0;
397
398 for (reg = 0; reg < 32; reg++)
399 {
400 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
401 any global register variables. */
402 if (fixed_regs[reg])
403 continue;
404
405 if ((int_or_sig_p && !current_function_is_leaf && call_used_regs[reg])
406 || (df_regs_ever_live_p (reg)
407 && (int_or_sig_p || !call_used_regs[reg])
408 && !(frame_pointer_needed
409 && (reg == REG_Y || reg == (REG_Y+1)))))
410 {
411 if (set)
412 SET_HARD_REG_BIT (*set, reg);
413 count++;
414 }
415 }
416 return count;
417 }
418
419 /* Return true if register FROM can be eliminated via register TO. */
420
421 bool
422 avr_can_eliminate (const int from, const int to)
423 {
424 return ((from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
425 || ((from == FRAME_POINTER_REGNUM
426 || from == FRAME_POINTER_REGNUM + 1)
427 && !frame_pointer_needed));
428 }
429
430 /* Compute offset between arg_pointer and frame_pointer. */
431
432 int
433 avr_initial_elimination_offset (int from, int to)
434 {
435 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
436 return 0;
437 else
438 {
439 int offset = frame_pointer_needed ? 2 : 0;
440 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
441
442 offset += avr_regs_to_save (NULL);
443 return get_frame_size () + (avr_pc_size) + 1 + offset;
444 }
445 }
446
447 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
448 frame pointer by +STARTING_FRAME_OFFSET.
449 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
450 avoids creating add/sub of offset in nonlocal goto and setjmp. */
451
452 rtx avr_builtin_setjmp_frame_value (void)
453 {
454 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
455 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
456 }
457
458 /* Return contents of MEM at frame pointer + stack size + 1 (+2 if 3 byte PC).
459 This is return address of function. */
460 rtx
461 avr_return_addr_rtx (int count, rtx tem)
462 {
463 rtx r;
464
465 /* Can only return this functions return address. Others not supported. */
466 if (count)
467 return NULL;
468
469 if (AVR_3_BYTE_PC)
470 {
471 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+2");
472 warning (0, "'builtin_return_address' contains only 2 bytes of address");
473 }
474 else
475 r = gen_rtx_SYMBOL_REF (Pmode, ".L__stack_usage+1");
476
477 r = gen_rtx_PLUS (Pmode, tem, r);
478 r = gen_frame_mem (Pmode, memory_address (Pmode, r));
479 r = gen_rtx_ROTATE (HImode, r, GEN_INT (8));
480 return r;
481 }
482
483 /* Return 1 if the function epilogue is just a single "ret". */
484
485 int
486 avr_simple_epilogue (void)
487 {
488 return (! frame_pointer_needed
489 && get_frame_size () == 0
490 && avr_regs_to_save (NULL) == 0
491 && ! interrupt_function_p (current_function_decl)
492 && ! signal_function_p (current_function_decl)
493 && ! avr_naked_function_p (current_function_decl)
494 && ! TREE_THIS_VOLATILE (current_function_decl));
495 }
496
497 /* This function checks sequence of live registers. */
498
499 static int
500 sequent_regs_live (void)
501 {
502 int reg;
503 int live_seq=0;
504 int cur_seq=0;
505
506 for (reg = 0; reg < 18; ++reg)
507 {
508 if (!call_used_regs[reg])
509 {
510 if (df_regs_ever_live_p (reg))
511 {
512 ++live_seq;
513 ++cur_seq;
514 }
515 else
516 cur_seq = 0;
517 }
518 }
519
520 if (!frame_pointer_needed)
521 {
522 if (df_regs_ever_live_p (REG_Y))
523 {
524 ++live_seq;
525 ++cur_seq;
526 }
527 else
528 cur_seq = 0;
529
530 if (df_regs_ever_live_p (REG_Y+1))
531 {
532 ++live_seq;
533 ++cur_seq;
534 }
535 else
536 cur_seq = 0;
537 }
538 else
539 {
540 cur_seq += 2;
541 live_seq += 2;
542 }
543 return (cur_seq == live_seq) ? live_seq : 0;
544 }
545
546 /* Obtain the length sequence of insns. */
547
548 int
549 get_sequence_length (rtx insns)
550 {
551 rtx insn;
552 int length;
553
554 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
555 length += get_attr_length (insn);
556
557 return length;
558 }
559
560 /* Implement INCOMING_RETURN_ADDR_RTX. */
561
562 rtx
563 avr_incoming_return_addr_rtx (void)
564 {
565 /* The return address is at the top of the stack. Note that the push
566 was via post-decrement, which means the actual address is off by one. */
567 return gen_frame_mem (HImode, plus_constant (stack_pointer_rtx, 1));
568 }
569
570 /* Helper for expand_prologue. Emit a push of a byte register. */
571
572 static void
573 emit_push_byte (unsigned regno, bool frame_related_p)
574 {
575 rtx mem, reg, insn;
576
577 mem = gen_rtx_POST_DEC (HImode, stack_pointer_rtx);
578 mem = gen_frame_mem (QImode, mem);
579 reg = gen_rtx_REG (QImode, regno);
580
581 insn = emit_insn (gen_rtx_SET (VOIDmode, mem, reg));
582 if (frame_related_p)
583 RTX_FRAME_RELATED_P (insn) = 1;
584
585 cfun->machine->stack_usage++;
586 }
587
588
589 /* Output function prologue. */
590
591 void
592 expand_prologue (void)
593 {
594 int live_seq;
595 HARD_REG_SET set;
596 int minimize;
597 HOST_WIDE_INT size = get_frame_size();
598 rtx insn;
599
600 /* Init cfun->machine. */
601 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
602 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
603 cfun->machine->is_signal = signal_function_p (current_function_decl);
604 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
605 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
606 cfun->machine->stack_usage = 0;
607
608 /* Prologue: naked. */
609 if (cfun->machine->is_naked)
610 {
611 return;
612 }
613
614 avr_regs_to_save (&set);
615 live_seq = sequent_regs_live ();
616 minimize = (TARGET_CALL_PROLOGUES
617 && !cfun->machine->is_interrupt
618 && !cfun->machine->is_signal
619 && !cfun->machine->is_OS_task
620 && !cfun->machine->is_OS_main
621 && live_seq);
622
623 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
624 {
625 /* Enable interrupts. */
626 if (cfun->machine->is_interrupt)
627 emit_insn (gen_enable_interrupt ());
628
629 /* Push zero reg. */
630 emit_push_byte (ZERO_REGNO, true);
631
632 /* Push tmp reg. */
633 emit_push_byte (TMP_REGNO, true);
634
635 /* Push SREG. */
636 /* ??? There's no dwarf2 column reserved for SREG. */
637 emit_move_insn (tmp_reg_rtx, gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
638 emit_push_byte (TMP_REGNO, false);
639
640 /* Push RAMPZ. */
641 /* ??? There's no dwarf2 column reserved for RAMPZ. */
642 if (AVR_HAVE_RAMPZ
643 && TEST_HARD_REG_BIT (set, REG_Z)
644 && TEST_HARD_REG_BIT (set, REG_Z + 1))
645 {
646 emit_move_insn (tmp_reg_rtx,
647 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
648 emit_push_byte (TMP_REGNO, false);
649 }
650
651 /* Clear zero reg. */
652 emit_move_insn (zero_reg_rtx, const0_rtx);
653
654 /* Prevent any attempt to delete the setting of ZERO_REG! */
655 emit_use (zero_reg_rtx);
656 }
657 if (minimize && (frame_pointer_needed
658 || (AVR_2_BYTE_PC && live_seq > 6)
659 || live_seq > 7))
660 {
661 int first_reg, reg, offset;
662
663 emit_move_insn (gen_rtx_REG (HImode, REG_X),
664 gen_int_mode (size, HImode));
665
666 insn = emit_insn (gen_call_prologue_saves
667 (gen_int_mode (live_seq, HImode),
668 gen_int_mode (size + live_seq, HImode)));
669 RTX_FRAME_RELATED_P (insn) = 1;
670
671 /* Describe the effect of the unspec_volatile call to prologue_saves.
672 Note that this formulation assumes that add_reg_note pushes the
673 notes to the front. Thus we build them in the reverse order of
674 how we want dwarf2out to process them. */
675
676 /* The function does always set frame_pointer_rtx, but whether that
677 is going to be permanent in the function is frame_pointer_needed. */
678 add_reg_note (insn, REG_CFA_ADJUST_CFA,
679 gen_rtx_SET (VOIDmode,
680 (frame_pointer_needed
681 ? frame_pointer_rtx : stack_pointer_rtx),
682 plus_constant (stack_pointer_rtx,
683 -(size + live_seq))));
684
685 /* Note that live_seq always contains r28+r29, but the other
686 registers to be saved are all below 18. */
687 first_reg = 18 - (live_seq - 2);
688
689 for (reg = 29, offset = -live_seq + 1;
690 reg >= first_reg;
691 reg = (reg == 28 ? 17 : reg - 1), ++offset)
692 {
693 rtx m, r;
694
695 m = gen_rtx_MEM (QImode, plus_constant (stack_pointer_rtx, offset));
696 r = gen_rtx_REG (QImode, reg);
697 add_reg_note (insn, REG_CFA_OFFSET, gen_rtx_SET (VOIDmode, m, r));
698 }
699
700 cfun->machine->stack_usage += size + live_seq;
701 }
702 else
703 {
704 int reg;
705 for (reg = 0; reg < 32; ++reg)
706 if (TEST_HARD_REG_BIT (set, reg))
707 emit_push_byte (reg, true);
708
709 if (frame_pointer_needed)
710 {
711 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
712 {
713 /* Push frame pointer. Always be consistent about the
714 ordering of pushes -- epilogue_restores expects the
715 register pair to be pushed low byte first. */
716 emit_push_byte (REG_Y, true);
717 emit_push_byte (REG_Y + 1, true);
718 }
719
720 if (!size)
721 {
722 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
723 RTX_FRAME_RELATED_P (insn) = 1;
724 }
725 else
726 {
727 /* Creating a frame can be done by direct manipulation of the
728 stack or via the frame pointer. These two methods are:
729 fp=sp
730 fp-=size
731 sp=fp
732 OR
733 sp-=size
734 fp=sp
735 the optimum method depends on function type, stack and frame size.
736 To avoid a complex logic, both methods are tested and shortest
737 is selected. */
738 rtx myfp;
739 rtx fp_plus_insns;
740
741 if (AVR_HAVE_8BIT_SP)
742 {
743 /* The high byte (r29) doesn't change. Prefer 'subi'
744 (1 cycle) over 'sbiw' (2 cycles, same size). */
745 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
746 }
747 else
748 {
749 /* Normal sized addition. */
750 myfp = frame_pointer_rtx;
751 }
752
753 /* Method 1-Adjust frame pointer. */
754 start_sequence ();
755
756 /* Normally the dwarf2out frame-related-expr interpreter does
757 not expect to have the CFA change once the frame pointer is
758 set up. Thus we avoid marking the move insn below and
759 instead indicate that the entire operation is complete after
760 the frame pointer subtraction is done. */
761
762 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
763
764 insn = emit_move_insn (myfp, plus_constant (myfp, -size));
765 RTX_FRAME_RELATED_P (insn) = 1;
766 add_reg_note (insn, REG_CFA_ADJUST_CFA,
767 gen_rtx_SET (VOIDmode, frame_pointer_rtx,
768 plus_constant (stack_pointer_rtx,
769 -size)));
770
771 /* Copy to stack pointer. Note that since we've already
772 changed the CFA to the frame pointer this operation
773 need not be annotated at all. */
774 if (AVR_HAVE_8BIT_SP)
775 {
776 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
777 }
778 else if (TARGET_NO_INTERRUPTS
779 || cfun->machine->is_signal
780 || cfun->machine->is_OS_main)
781 {
782 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
783 frame_pointer_rtx));
784 }
785 else if (cfun->machine->is_interrupt)
786 {
787 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
788 frame_pointer_rtx));
789 }
790 else
791 {
792 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
793 }
794
795 fp_plus_insns = get_insns ();
796 end_sequence ();
797
798 /* Method 2-Adjust Stack pointer. */
799 if (size <= 6)
800 {
801 rtx sp_plus_insns;
802
803 start_sequence ();
804
805 insn = plus_constant (stack_pointer_rtx, -size);
806 insn = emit_move_insn (stack_pointer_rtx, insn);
807 RTX_FRAME_RELATED_P (insn) = 1;
808
809 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
810 RTX_FRAME_RELATED_P (insn) = 1;
811
812 sp_plus_insns = get_insns ();
813 end_sequence ();
814
815 /* Use shortest method. */
816 if (get_sequence_length (sp_plus_insns)
817 < get_sequence_length (fp_plus_insns))
818 emit_insn (sp_plus_insns);
819 else
820 emit_insn (fp_plus_insns);
821 }
822 else
823 emit_insn (fp_plus_insns);
824
825 cfun->machine->stack_usage += size;
826 }
827 }
828 }
829
830 if (flag_stack_usage_info)
831 current_function_static_stack_size = cfun->machine->stack_usage;
832 }
833
834 /* Output summary at end of function prologue. */
835
836 static void
837 avr_asm_function_end_prologue (FILE *file)
838 {
839 if (cfun->machine->is_naked)
840 {
841 fputs ("/* prologue: naked */\n", file);
842 }
843 else
844 {
845 if (cfun->machine->is_interrupt)
846 {
847 fputs ("/* prologue: Interrupt */\n", file);
848 }
849 else if (cfun->machine->is_signal)
850 {
851 fputs ("/* prologue: Signal */\n", file);
852 }
853 else
854 fputs ("/* prologue: function */\n", file);
855 }
856 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
857 get_frame_size());
858 fprintf (file, "/* stack size = %d */\n",
859 cfun->machine->stack_usage);
860 /* Create symbol stack offset here so all functions have it. Add 1 to stack
861 usage for offset so that SP + .L__stack_offset = return address. */
862 fprintf (file, ".L__stack_usage = %d\n", cfun->machine->stack_usage);
863 }
864
865
866 /* Implement EPILOGUE_USES. */
867
868 int
869 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
870 {
871 if (reload_completed
872 && cfun->machine
873 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
874 return 1;
875 return 0;
876 }
877
878 /* Helper for expand_epilogue. Emit a pop of a byte register. */
879
880 static void
881 emit_pop_byte (unsigned regno)
882 {
883 rtx mem, reg;
884
885 mem = gen_rtx_PRE_INC (HImode, stack_pointer_rtx);
886 mem = gen_frame_mem (QImode, mem);
887 reg = gen_rtx_REG (QImode, regno);
888
889 emit_insn (gen_rtx_SET (VOIDmode, reg, mem));
890 }
891
892 /* Output RTL epilogue. */
893
894 void
895 expand_epilogue (bool sibcall_p)
896 {
897 int reg;
898 int live_seq;
899 HARD_REG_SET set;
900 int minimize;
901 HOST_WIDE_INT size = get_frame_size();
902
903 /* epilogue: naked */
904 if (cfun->machine->is_naked)
905 {
906 gcc_assert (!sibcall_p);
907
908 emit_jump_insn (gen_return ());
909 return;
910 }
911
912 avr_regs_to_save (&set);
913 live_seq = sequent_regs_live ();
914 minimize = (TARGET_CALL_PROLOGUES
915 && !cfun->machine->is_interrupt
916 && !cfun->machine->is_signal
917 && !cfun->machine->is_OS_task
918 && !cfun->machine->is_OS_main
919 && live_seq);
920
921 if (minimize && (frame_pointer_needed || live_seq > 4))
922 {
923 if (frame_pointer_needed)
924 {
925 /* Get rid of frame. */
926 emit_move_insn(frame_pointer_rtx,
927 gen_rtx_PLUS (HImode, frame_pointer_rtx,
928 gen_int_mode (size, HImode)));
929 }
930 else
931 {
932 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
933 }
934
935 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
936 }
937 else
938 {
939 if (frame_pointer_needed)
940 {
941 if (size)
942 {
943 /* Try two methods to adjust stack and select shortest. */
944 rtx myfp;
945 rtx fp_plus_insns;
946
947 if (AVR_HAVE_8BIT_SP)
948 {
949 /* The high byte (r29) doesn't change - prefer 'subi'
950 (1 cycle) over 'sbiw' (2 cycles, same size). */
951 myfp = gen_rtx_REG (QImode, FRAME_POINTER_REGNUM);
952 }
953 else
954 {
955 /* Normal sized addition. */
956 myfp = frame_pointer_rtx;
957 }
958
959 /* Method 1-Adjust frame pointer. */
960 start_sequence ();
961
962 emit_move_insn (myfp, plus_constant (myfp, size));
963
964 /* Copy to stack pointer. */
965 if (AVR_HAVE_8BIT_SP)
966 {
967 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
968 }
969 else if (TARGET_NO_INTERRUPTS
970 || cfun->machine->is_signal)
971 {
972 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
973 frame_pointer_rtx));
974 }
975 else if (cfun->machine->is_interrupt)
976 {
977 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
978 frame_pointer_rtx));
979 }
980 else
981 {
982 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
983 }
984
985 fp_plus_insns = get_insns ();
986 end_sequence ();
987
988 /* Method 2-Adjust Stack pointer. */
989 if (size <= 5)
990 {
991 rtx sp_plus_insns;
992
993 start_sequence ();
994
995 emit_move_insn (stack_pointer_rtx,
996 plus_constant (stack_pointer_rtx, size));
997
998 sp_plus_insns = get_insns ();
999 end_sequence ();
1000
1001 /* Use shortest method. */
1002 if (get_sequence_length (sp_plus_insns)
1003 < get_sequence_length (fp_plus_insns))
1004 emit_insn (sp_plus_insns);
1005 else
1006 emit_insn (fp_plus_insns);
1007 }
1008 else
1009 emit_insn (fp_plus_insns);
1010 }
1011 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1012 {
1013 /* Restore previous frame_pointer. See expand_prologue for
1014 rationale for not using pophi. */
1015 emit_pop_byte (REG_Y + 1);
1016 emit_pop_byte (REG_Y);
1017 }
1018 }
1019
1020 /* Restore used registers. */
1021 for (reg = 31; reg >= 0; --reg)
1022 if (TEST_HARD_REG_BIT (set, reg))
1023 emit_pop_byte (reg);
1024
1025 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1026 {
1027 /* Restore RAMPZ using tmp reg as scratch. */
1028 if (AVR_HAVE_RAMPZ
1029 && TEST_HARD_REG_BIT (set, REG_Z)
1030 && TEST_HARD_REG_BIT (set, REG_Z + 1))
1031 {
1032 emit_pop_byte (TMP_REGNO);
1033 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)),
1034 tmp_reg_rtx);
1035 }
1036
1037 /* Restore SREG using tmp reg as scratch. */
1038 emit_pop_byte (TMP_REGNO);
1039
1040 emit_move_insn (gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)),
1041 tmp_reg_rtx);
1042
1043 /* Restore tmp REG. */
1044 emit_pop_byte (TMP_REGNO);
1045
1046 /* Restore zero REG. */
1047 emit_pop_byte (ZERO_REGNO);
1048 }
1049
1050 if (!sibcall_p)
1051 emit_jump_insn (gen_return ());
1052 }
1053 }
1054
1055 /* Output summary messages at beginning of function epilogue. */
1056
1057 static void
1058 avr_asm_function_begin_epilogue (FILE *file)
1059 {
1060 fprintf (file, "/* epilogue start */\n");
1061 }
1062
1063
1064 /* Implement TARGET_CANNOT_MODITY_JUMPS_P */
1065
1066 static bool
1067 avr_cannot_modify_jumps_p (void)
1068 {
1069
1070 /* Naked Functions must not have any instructions after
1071 their epilogue, see PR42240 */
1072
1073 if (reload_completed
1074 && cfun->machine
1075 && cfun->machine->is_naked)
1076 {
1077 return true;
1078 }
1079
1080 return false;
1081 }
1082
1083
1084 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1085 machine for a memory operand of mode MODE. */
1086
1087 bool
1088 avr_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1089 {
1090 enum reg_class r = NO_REGS;
1091
1092 if (TARGET_ALL_DEBUG)
1093 {
1094 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1095 GET_MODE_NAME(mode),
1096 strict ? "(strict)": "",
1097 reload_completed ? "(reload_completed)": "",
1098 reload_in_progress ? "(reload_in_progress)": "",
1099 reg_renumber ? "(reg_renumber)" : "");
1100 if (GET_CODE (x) == PLUS
1101 && REG_P (XEXP (x, 0))
1102 && GET_CODE (XEXP (x, 1)) == CONST_INT
1103 && INTVAL (XEXP (x, 1)) >= 0
1104 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1105 && reg_renumber
1106 )
1107 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1108 true_regnum (XEXP (x, 0)));
1109 debug_rtx (x);
1110 }
1111 if (!strict && GET_CODE (x) == SUBREG)
1112 x = SUBREG_REG (x);
1113 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1114 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1115 r = POINTER_REGS;
1116 else if (CONSTANT_ADDRESS_P (x))
1117 r = ALL_REGS;
1118 else if (GET_CODE (x) == PLUS
1119 && REG_P (XEXP (x, 0))
1120 && GET_CODE (XEXP (x, 1)) == CONST_INT
1121 && INTVAL (XEXP (x, 1)) >= 0)
1122 {
1123 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1124 if (fit)
1125 {
1126 if (! strict
1127 || REGNO (XEXP (x,0)) == REG_X
1128 || REGNO (XEXP (x,0)) == REG_Y
1129 || REGNO (XEXP (x,0)) == REG_Z)
1130 r = BASE_POINTER_REGS;
1131 if (XEXP (x,0) == frame_pointer_rtx
1132 || XEXP (x,0) == arg_pointer_rtx)
1133 r = BASE_POINTER_REGS;
1134 }
1135 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1136 r = POINTER_Y_REGS;
1137 }
1138 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1139 && REG_P (XEXP (x, 0))
1140 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1141 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1142 {
1143 r = POINTER_REGS;
1144 }
1145 if (TARGET_ALL_DEBUG)
1146 {
1147 fprintf (stderr, " ret = %c\n", r + '0');
1148 }
1149 return r == NO_REGS ? 0 : (int)r;
1150 }
1151
1152 /* Attempts to replace X with a valid
1153 memory address for an operand of mode MODE */
1154
1155 rtx
1156 avr_legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1157 {
1158 x = oldx;
1159 if (TARGET_ALL_DEBUG)
1160 {
1161 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1162 debug_rtx (oldx);
1163 }
1164
1165 if (GET_CODE (oldx) == PLUS
1166 && REG_P (XEXP (oldx,0)))
1167 {
1168 if (REG_P (XEXP (oldx,1)))
1169 x = force_reg (GET_MODE (oldx), oldx);
1170 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1171 {
1172 int offs = INTVAL (XEXP (oldx,1));
1173 if (frame_pointer_rtx != XEXP (oldx,0))
1174 if (offs > MAX_LD_OFFSET (mode))
1175 {
1176 if (TARGET_ALL_DEBUG)
1177 fprintf (stderr, "force_reg (big offset)\n");
1178 x = force_reg (GET_MODE (oldx), oldx);
1179 }
1180 }
1181 }
1182 return x;
1183 }
1184
1185
1186 /* Return a pointer register name as a string. */
1187
1188 static const char *
1189 ptrreg_to_str (int regno)
1190 {
1191 switch (regno)
1192 {
1193 case REG_X: return "X";
1194 case REG_Y: return "Y";
1195 case REG_Z: return "Z";
1196 default:
1197 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1198 }
1199 return NULL;
1200 }
1201
1202 /* Return the condition name as a string.
1203 Used in conditional jump constructing */
1204
1205 static const char *
1206 cond_string (enum rtx_code code)
1207 {
1208 switch (code)
1209 {
1210 case NE:
1211 return "ne";
1212 case EQ:
1213 return "eq";
1214 case GE:
1215 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1216 return "pl";
1217 else
1218 return "ge";
1219 case LT:
1220 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1221 return "mi";
1222 else
1223 return "lt";
1224 case GEU:
1225 return "sh";
1226 case LTU:
1227 return "lo";
1228 default:
1229 gcc_unreachable ();
1230 }
1231 }
1232
1233 /* Output ADDR to FILE as address. */
1234
1235 void
1236 print_operand_address (FILE *file, rtx addr)
1237 {
1238 switch (GET_CODE (addr))
1239 {
1240 case REG:
1241 fprintf (file, ptrreg_to_str (REGNO (addr)));
1242 break;
1243
1244 case PRE_DEC:
1245 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1246 break;
1247
1248 case POST_INC:
1249 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1250 break;
1251
1252 default:
1253 if (CONSTANT_ADDRESS_P (addr)
1254 && text_segment_operand (addr, VOIDmode))
1255 {
1256 rtx x = addr;
1257 if (GET_CODE (x) == CONST)
1258 x = XEXP (x, 0);
1259 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x,1)) == CONST_INT)
1260 {
1261 /* Assembler gs() will implant word address. Make offset
1262 a byte offset inside gs() for assembler. This is
1263 needed because the more logical (constant+gs(sym)) is not
1264 accepted by gas. For 128K and lower devices this is ok. For
1265 large devices it will create a Trampoline to offset from symbol
1266 which may not be what the user really wanted. */
1267 fprintf (file, "gs(");
1268 output_addr_const (file, XEXP (x,0));
1269 fprintf (file,"+" HOST_WIDE_INT_PRINT_DEC ")", 2 * INTVAL (XEXP (x,1)));
1270 if (AVR_3_BYTE_PC)
1271 if (warning (0, "pointer offset from symbol maybe incorrect"))
1272 {
1273 output_addr_const (stderr, addr);
1274 fprintf(stderr,"\n");
1275 }
1276 }
1277 else
1278 {
1279 fprintf (file, "gs(");
1280 output_addr_const (file, addr);
1281 fprintf (file, ")");
1282 }
1283 }
1284 else
1285 output_addr_const (file, addr);
1286 }
1287 }
1288
1289
1290 /* Output X as assembler operand to file FILE. */
1291
1292 void
1293 print_operand (FILE *file, rtx x, int code)
1294 {
1295 int abcd = 0;
1296
1297 if (code >= 'A' && code <= 'D')
1298 abcd = code - 'A';
1299
1300 if (code == '~')
1301 {
1302 if (!AVR_HAVE_JMP_CALL)
1303 fputc ('r', file);
1304 }
1305 else if (code == '!')
1306 {
1307 if (AVR_HAVE_EIJMP_EICALL)
1308 fputc ('e', file);
1309 }
1310 else if (REG_P (x))
1311 {
1312 if (x == zero_reg_rtx)
1313 fprintf (file, "__zero_reg__");
1314 else
1315 fprintf (file, reg_names[true_regnum (x) + abcd]);
1316 }
1317 else if (GET_CODE (x) == CONST_INT)
1318 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1319 else if (GET_CODE (x) == MEM)
1320 {
1321 rtx addr = XEXP (x,0);
1322 if (code == 'm')
1323 {
1324 if (!CONSTANT_P (addr))
1325 fatal_insn ("bad address, not a constant):", addr);
1326 /* Assembler template with m-code is data - not progmem section */
1327 if (text_segment_operand (addr, VOIDmode))
1328 if (warning ( 0, "accessing data memory with program memory address"))
1329 {
1330 output_addr_const (stderr, addr);
1331 fprintf(stderr,"\n");
1332 }
1333 output_addr_const (file, addr);
1334 }
1335 else if (code == 'o')
1336 {
1337 if (GET_CODE (addr) != PLUS)
1338 fatal_insn ("bad address, not (reg+disp):", addr);
1339
1340 print_operand (file, XEXP (addr, 1), 0);
1341 }
1342 else if (code == 'p' || code == 'r')
1343 {
1344 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1345 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1346
1347 if (code == 'p')
1348 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1349 else
1350 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1351 }
1352 else if (GET_CODE (addr) == PLUS)
1353 {
1354 print_operand_address (file, XEXP (addr,0));
1355 if (REGNO (XEXP (addr, 0)) == REG_X)
1356 fatal_insn ("internal compiler error. Bad address:"
1357 ,addr);
1358 fputc ('+', file);
1359 print_operand (file, XEXP (addr,1), code);
1360 }
1361 else
1362 print_operand_address (file, addr);
1363 }
1364 else if (code == 'x')
1365 {
1366 /* Constant progmem address - like used in jmp or call */
1367 if (0 == text_segment_operand (x, VOIDmode))
1368 if (warning ( 0, "accessing program memory with data memory address"))
1369 {
1370 output_addr_const (stderr, x);
1371 fprintf(stderr,"\n");
1372 }
1373 /* Use normal symbol for direct address no linker trampoline needed */
1374 output_addr_const (file, x);
1375 }
1376 else if (GET_CODE (x) == CONST_DOUBLE)
1377 {
1378 long val;
1379 REAL_VALUE_TYPE rv;
1380 if (GET_MODE (x) != SFmode)
1381 fatal_insn ("internal compiler error. Unknown mode:", x);
1382 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1383 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1384 fprintf (file, "0x%lx", val);
1385 }
1386 else if (code == 'j')
1387 fputs (cond_string (GET_CODE (x)), file);
1388 else if (code == 'k')
1389 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1390 else
1391 print_operand_address (file, x);
1392 }
1393
1394 /* Update the condition code in the INSN. */
1395
1396 void
1397 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1398 {
1399 rtx set;
1400
1401 switch (get_attr_cc (insn))
1402 {
1403 case CC_NONE:
1404 /* Insn does not affect CC at all. */
1405 break;
1406
1407 case CC_SET_N:
1408 CC_STATUS_INIT;
1409 break;
1410
1411 case CC_SET_ZN:
1412 set = single_set (insn);
1413 CC_STATUS_INIT;
1414 if (set)
1415 {
1416 cc_status.flags |= CC_NO_OVERFLOW;
1417 cc_status.value1 = SET_DEST (set);
1418 }
1419 break;
1420
1421 case CC_SET_CZN:
1422 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1423 The V flag may or may not be known but that's ok because
1424 alter_cond will change tests to use EQ/NE. */
1425 set = single_set (insn);
1426 CC_STATUS_INIT;
1427 if (set)
1428 {
1429 cc_status.value1 = SET_DEST (set);
1430 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1431 }
1432 break;
1433
1434 case CC_COMPARE:
1435 set = single_set (insn);
1436 CC_STATUS_INIT;
1437 if (set)
1438 cc_status.value1 = SET_SRC (set);
1439 break;
1440
1441 case CC_CLOBBER:
1442 /* Insn doesn't leave CC in a usable state. */
1443 CC_STATUS_INIT;
1444
1445 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1446 set = single_set (insn);
1447 if (set)
1448 {
1449 rtx src = SET_SRC (set);
1450
1451 if (GET_CODE (src) == ASHIFTRT
1452 && GET_MODE (src) == QImode)
1453 {
1454 rtx x = XEXP (src, 1);
1455
1456 if (GET_CODE (x) == CONST_INT
1457 && INTVAL (x) > 0
1458 && INTVAL (x) != 6)
1459 {
1460 cc_status.value1 = SET_DEST (set);
1461 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1462 }
1463 }
1464 }
1465 break;
1466 }
1467 }
1468
1469 /* Return maximum number of consecutive registers of
1470 class CLASS needed to hold a value of mode MODE. */
1471
1472 int
1473 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1474 {
1475 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1476 }
1477
1478 /* Choose mode for jump insn:
1479 1 - relative jump in range -63 <= x <= 62 ;
1480 2 - relative jump in range -2046 <= x <= 2045 ;
1481 3 - absolute jump (only for ATmega[16]03). */
1482
1483 int
1484 avr_jump_mode (rtx x, rtx insn)
1485 {
1486 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_CODE (x) == LABEL_REF
1487 ? XEXP (x, 0) : x));
1488 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1489 int jump_distance = cur_addr - dest_addr;
1490
1491 if (-63 <= jump_distance && jump_distance <= 62)
1492 return 1;
1493 else if (-2046 <= jump_distance && jump_distance <= 2045)
1494 return 2;
1495 else if (AVR_HAVE_JMP_CALL)
1496 return 3;
1497
1498 return 2;
1499 }
1500
1501 /* return an AVR condition jump commands.
1502 X is a comparison RTX.
1503 LEN is a number returned by avr_jump_mode function.
1504 if REVERSE nonzero then condition code in X must be reversed. */
1505
1506 const char *
1507 ret_cond_branch (rtx x, int len, int reverse)
1508 {
1509 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1510
1511 switch (cond)
1512 {
1513 case GT:
1514 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1515 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1516 AS1 (brpl,%0)) :
1517 len == 2 ? (AS1 (breq,.+4) CR_TAB
1518 AS1 (brmi,.+2) CR_TAB
1519 AS1 (rjmp,%0)) :
1520 (AS1 (breq,.+6) CR_TAB
1521 AS1 (brmi,.+4) CR_TAB
1522 AS1 (jmp,%0)));
1523
1524 else
1525 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1526 AS1 (brge,%0)) :
1527 len == 2 ? (AS1 (breq,.+4) CR_TAB
1528 AS1 (brlt,.+2) CR_TAB
1529 AS1 (rjmp,%0)) :
1530 (AS1 (breq,.+6) CR_TAB
1531 AS1 (brlt,.+4) CR_TAB
1532 AS1 (jmp,%0)));
1533 case GTU:
1534 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1535 AS1 (brsh,%0)) :
1536 len == 2 ? (AS1 (breq,.+4) CR_TAB
1537 AS1 (brlo,.+2) CR_TAB
1538 AS1 (rjmp,%0)) :
1539 (AS1 (breq,.+6) CR_TAB
1540 AS1 (brlo,.+4) CR_TAB
1541 AS1 (jmp,%0)));
1542 case LE:
1543 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1544 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1545 AS1 (brmi,%0)) :
1546 len == 2 ? (AS1 (breq,.+2) CR_TAB
1547 AS1 (brpl,.+2) CR_TAB
1548 AS1 (rjmp,%0)) :
1549 (AS1 (breq,.+2) CR_TAB
1550 AS1 (brpl,.+4) CR_TAB
1551 AS1 (jmp,%0)));
1552 else
1553 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1554 AS1 (brlt,%0)) :
1555 len == 2 ? (AS1 (breq,.+2) CR_TAB
1556 AS1 (brge,.+2) CR_TAB
1557 AS1 (rjmp,%0)) :
1558 (AS1 (breq,.+2) CR_TAB
1559 AS1 (brge,.+4) CR_TAB
1560 AS1 (jmp,%0)));
1561 case LEU:
1562 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1563 AS1 (brlo,%0)) :
1564 len == 2 ? (AS1 (breq,.+2) CR_TAB
1565 AS1 (brsh,.+2) CR_TAB
1566 AS1 (rjmp,%0)) :
1567 (AS1 (breq,.+2) CR_TAB
1568 AS1 (brsh,.+4) CR_TAB
1569 AS1 (jmp,%0)));
1570 default:
1571 if (reverse)
1572 {
1573 switch (len)
1574 {
1575 case 1:
1576 return AS1 (br%k1,%0);
1577 case 2:
1578 return (AS1 (br%j1,.+2) CR_TAB
1579 AS1 (rjmp,%0));
1580 default:
1581 return (AS1 (br%j1,.+4) CR_TAB
1582 AS1 (jmp,%0));
1583 }
1584 }
1585 else
1586 {
1587 switch (len)
1588 {
1589 case 1:
1590 return AS1 (br%j1,%0);
1591 case 2:
1592 return (AS1 (br%k1,.+2) CR_TAB
1593 AS1 (rjmp,%0));
1594 default:
1595 return (AS1 (br%k1,.+4) CR_TAB
1596 AS1 (jmp,%0));
1597 }
1598 }
1599 }
1600 return "";
1601 }
1602
1603 /* Predicate function for immediate operand which fits to byte (8bit) */
1604
1605 int
1606 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1607 {
1608 return (GET_CODE (op) == CONST_INT
1609 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1610 }
1611
1612 /* Output insn cost for next insn. */
1613
1614 void
1615 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1616 int num_operands ATTRIBUTE_UNUSED)
1617 {
1618 if (TARGET_ALL_DEBUG)
1619 {
1620 fprintf (asm_out_file, "/* DEBUG: cost = %d. */\n",
1621 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1622 }
1623 }
1624
1625 /* Return 0 if undefined, 1 if always true or always false. */
1626
1627 int
1628 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1629 {
1630 unsigned int max = (mode == QImode ? 0xff :
1631 mode == HImode ? 0xffff :
1632 mode == SImode ? 0xffffffff : 0);
1633 if (max && op && GET_CODE (x) == CONST_INT)
1634 {
1635 if (unsigned_condition (op) != op)
1636 max >>= 1;
1637
1638 if (max != (INTVAL (x) & max)
1639 && INTVAL (x) != 0xff)
1640 return 1;
1641 }
1642 return 0;
1643 }
1644
1645
1646 /* Returns nonzero if REGNO is the number of a hard
1647 register in which function arguments are sometimes passed. */
1648
1649 int
1650 function_arg_regno_p(int r)
1651 {
1652 return (r >= 8 && r <= 25);
1653 }
1654
1655 /* Initializing the variable cum for the state at the beginning
1656 of the argument list. */
1657
1658 void
1659 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1660 tree fndecl ATTRIBUTE_UNUSED)
1661 {
1662 cum->nregs = 18;
1663 cum->regno = FIRST_CUM_REG;
1664 if (!libname && stdarg_p (fntype))
1665 cum->nregs = 0;
1666
1667 /* Assume the calle may be tail called */
1668
1669 cfun->machine->sibcall_fails = 0;
1670 }
1671
1672 /* Returns the number of registers to allocate for a function argument. */
1673
1674 static int
1675 avr_num_arg_regs (enum machine_mode mode, const_tree type)
1676 {
1677 int size;
1678
1679 if (mode == BLKmode)
1680 size = int_size_in_bytes (type);
1681 else
1682 size = GET_MODE_SIZE (mode);
1683
1684 /* Align all function arguments to start in even-numbered registers.
1685 Odd-sized arguments leave holes above them. */
1686
1687 return (size + 1) & ~1;
1688 }
1689
1690 /* Controls whether a function argument is passed
1691 in a register, and which register. */
1692
1693 static rtx
1694 avr_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1695 const_tree type, bool named ATTRIBUTE_UNUSED)
1696 {
1697 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1698 int bytes = avr_num_arg_regs (mode, type);
1699
1700 if (cum->nregs && bytes <= cum->nregs)
1701 return gen_rtx_REG (mode, cum->regno - bytes);
1702
1703 return NULL_RTX;
1704 }
1705
1706 /* Update the summarizer variable CUM to advance past an argument
1707 in the argument list. */
1708
1709 static void
1710 avr_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1711 const_tree type, bool named ATTRIBUTE_UNUSED)
1712 {
1713 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1714 int bytes = avr_num_arg_regs (mode, type);
1715
1716 cum->nregs -= bytes;
1717 cum->regno -= bytes;
1718
1719 /* A parameter is being passed in a call-saved register. As the original
1720 contents of these regs has to be restored before leaving the function,
1721 a function must not pass arguments in call-saved regs in order to get
1722 tail-called. */
1723
1724 if (cum->regno >= 8
1725 && cum->nregs >= 0
1726 && !call_used_regs[cum->regno])
1727 {
1728 /* FIXME: We ship info on failing tail-call in struct machine_function.
1729 This uses internals of calls.c:expand_call() and the way args_so_far
1730 is used. targetm.function_ok_for_sibcall() needs to be extended to
1731 pass &args_so_far, too. At present, CUMULATIVE_ARGS is target
1732 dependent so that such an extension is not wanted. */
1733
1734 cfun->machine->sibcall_fails = 1;
1735 }
1736
1737 /* Test if all registers needed by the ABI are actually available. If the
1738 user has fixed a GPR needed to pass an argument, an (implicit) function
1739 call would clobber that fixed register. See PR45099 for an example. */
1740
1741 if (cum->regno >= 8
1742 && cum->nregs >= 0)
1743 {
1744 int regno;
1745
1746 for (regno = cum->regno; regno < cum->regno + bytes; regno++)
1747 if (fixed_regs[regno])
1748 error ("Register %s is needed to pass a parameter but is fixed",
1749 reg_names[regno]);
1750 }
1751
1752 if (cum->nregs <= 0)
1753 {
1754 cum->nregs = 0;
1755 cum->regno = FIRST_CUM_REG;
1756 }
1757 }
1758
1759 /* Implement `TARGET_FUNCTION_OK_FOR_SIBCALL' */
1760 /* Decide whether we can make a sibling call to a function. DECL is the
1761 declaration of the function being targeted by the call and EXP is the
1762 CALL_EXPR representing the call. */
1763
1764 static bool
1765 avr_function_ok_for_sibcall (tree decl_callee, tree exp_callee)
1766 {
1767 tree fntype_callee;
1768
1769 /* Tail-calling must fail if callee-saved regs are used to pass
1770 function args. We must not tail-call when `epilogue_restores'
1771 is used. Unfortunately, we cannot tell at this point if that
1772 actually will happen or not, and we cannot step back from
1773 tail-calling. Thus, we inhibit tail-calling with -mcall-prologues. */
1774
1775 if (cfun->machine->sibcall_fails
1776 || TARGET_CALL_PROLOGUES)
1777 {
1778 return false;
1779 }
1780
1781 fntype_callee = TREE_TYPE (CALL_EXPR_FN (exp_callee));
1782
1783 if (decl_callee)
1784 {
1785 decl_callee = TREE_TYPE (decl_callee);
1786 }
1787 else
1788 {
1789 decl_callee = fntype_callee;
1790
1791 while (FUNCTION_TYPE != TREE_CODE (decl_callee)
1792 && METHOD_TYPE != TREE_CODE (decl_callee))
1793 {
1794 decl_callee = TREE_TYPE (decl_callee);
1795 }
1796 }
1797
1798 /* Ensure that caller and callee have compatible epilogues */
1799
1800 if (interrupt_function_p (current_function_decl)
1801 || signal_function_p (current_function_decl)
1802 || avr_naked_function_p (decl_callee)
1803 || avr_naked_function_p (current_function_decl)
1804 /* FIXME: For OS_task and OS_main, we are over-conservative.
1805 This is due to missing documentation of these attributes
1806 and what they actually should do and should not do. */
1807 || (avr_OS_task_function_p (decl_callee)
1808 != avr_OS_task_function_p (current_function_decl))
1809 || (avr_OS_main_function_p (decl_callee)
1810 != avr_OS_main_function_p (current_function_decl)))
1811 {
1812 return false;
1813 }
1814
1815 return true;
1816 }
1817
1818 /***********************************************************************
1819 Functions for outputting various mov's for a various modes
1820 ************************************************************************/
1821 const char *
1822 output_movqi (rtx insn, rtx operands[], int *l)
1823 {
1824 int dummy;
1825 rtx dest = operands[0];
1826 rtx src = operands[1];
1827 int *real_l = l;
1828
1829 if (!l)
1830 l = &dummy;
1831
1832 *l = 1;
1833
1834 if (register_operand (dest, QImode))
1835 {
1836 if (register_operand (src, QImode)) /* mov r,r */
1837 {
1838 if (test_hard_reg_class (STACK_REG, dest))
1839 return AS2 (out,%0,%1);
1840 else if (test_hard_reg_class (STACK_REG, src))
1841 return AS2 (in,%0,%1);
1842
1843 return AS2 (mov,%0,%1);
1844 }
1845 else if (CONSTANT_P (src))
1846 {
1847 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1848 return AS2 (ldi,%0,lo8(%1));
1849
1850 if (GET_CODE (src) == CONST_INT)
1851 {
1852 if (src == const0_rtx) /* mov r,L */
1853 return AS1 (clr,%0);
1854 else if (src == const1_rtx)
1855 {
1856 *l = 2;
1857 return (AS1 (clr,%0) CR_TAB
1858 AS1 (inc,%0));
1859 }
1860 else if (src == constm1_rtx)
1861 {
1862 /* Immediate constants -1 to any register */
1863 *l = 2;
1864 return (AS1 (clr,%0) CR_TAB
1865 AS1 (dec,%0));
1866 }
1867 else
1868 {
1869 int bit_nr = exact_log2 (INTVAL (src));
1870
1871 if (bit_nr >= 0)
1872 {
1873 *l = 3;
1874 if (!real_l)
1875 output_asm_insn ((AS1 (clr,%0) CR_TAB
1876 "set"), operands);
1877 if (!real_l)
1878 avr_output_bld (operands, bit_nr);
1879
1880 return "";
1881 }
1882 }
1883 }
1884
1885 /* Last resort, larger than loading from memory. */
1886 *l = 4;
1887 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1888 AS2 (ldi,r31,lo8(%1)) CR_TAB
1889 AS2 (mov,%0,r31) CR_TAB
1890 AS2 (mov,r31,__tmp_reg__));
1891 }
1892 else if (GET_CODE (src) == MEM)
1893 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1894 }
1895 else if (GET_CODE (dest) == MEM)
1896 {
1897 const char *templ;
1898
1899 if (src == const0_rtx)
1900 operands[1] = zero_reg_rtx;
1901
1902 templ = out_movqi_mr_r (insn, operands, real_l);
1903
1904 if (!real_l)
1905 output_asm_insn (templ, operands);
1906
1907 operands[1] = src;
1908 }
1909 return "";
1910 }
1911
1912
1913 const char *
1914 output_movhi (rtx insn, rtx operands[], int *l)
1915 {
1916 int dummy;
1917 rtx dest = operands[0];
1918 rtx src = operands[1];
1919 int *real_l = l;
1920
1921 if (!l)
1922 l = &dummy;
1923
1924 if (register_operand (dest, HImode))
1925 {
1926 if (register_operand (src, HImode)) /* mov r,r */
1927 {
1928 if (test_hard_reg_class (STACK_REG, dest))
1929 {
1930 if (AVR_HAVE_8BIT_SP)
1931 return *l = 1, AS2 (out,__SP_L__,%A1);
1932 /* Use simple load of stack pointer if no interrupts are
1933 used. */
1934 else if (TARGET_NO_INTERRUPTS)
1935 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1936 AS2 (out,__SP_L__,%A1));
1937 *l = 5;
1938 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1939 "cli" CR_TAB
1940 AS2 (out,__SP_H__,%B1) CR_TAB
1941 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1942 AS2 (out,__SP_L__,%A1));
1943 }
1944 else if (test_hard_reg_class (STACK_REG, src))
1945 {
1946 *l = 2;
1947 return (AS2 (in,%A0,__SP_L__) CR_TAB
1948 AS2 (in,%B0,__SP_H__));
1949 }
1950
1951 if (AVR_HAVE_MOVW)
1952 {
1953 *l = 1;
1954 return (AS2 (movw,%0,%1));
1955 }
1956 else
1957 {
1958 *l = 2;
1959 return (AS2 (mov,%A0,%A1) CR_TAB
1960 AS2 (mov,%B0,%B1));
1961 }
1962 }
1963 else if (CONSTANT_P (src))
1964 {
1965 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1966 {
1967 *l = 2;
1968 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1969 AS2 (ldi,%B0,hi8(%1)));
1970 }
1971
1972 if (GET_CODE (src) == CONST_INT)
1973 {
1974 if (src == const0_rtx) /* mov r,L */
1975 {
1976 *l = 2;
1977 return (AS1 (clr,%A0) CR_TAB
1978 AS1 (clr,%B0));
1979 }
1980 else if (src == const1_rtx)
1981 {
1982 *l = 3;
1983 return (AS1 (clr,%A0) CR_TAB
1984 AS1 (clr,%B0) CR_TAB
1985 AS1 (inc,%A0));
1986 }
1987 else if (src == constm1_rtx)
1988 {
1989 /* Immediate constants -1 to any register */
1990 *l = 3;
1991 return (AS1 (clr,%0) CR_TAB
1992 AS1 (dec,%A0) CR_TAB
1993 AS2 (mov,%B0,%A0));
1994 }
1995 else
1996 {
1997 int bit_nr = exact_log2 (INTVAL (src));
1998
1999 if (bit_nr >= 0)
2000 {
2001 *l = 4;
2002 if (!real_l)
2003 output_asm_insn ((AS1 (clr,%A0) CR_TAB
2004 AS1 (clr,%B0) CR_TAB
2005 "set"), operands);
2006 if (!real_l)
2007 avr_output_bld (operands, bit_nr);
2008
2009 return "";
2010 }
2011 }
2012
2013 if ((INTVAL (src) & 0xff) == 0)
2014 {
2015 *l = 5;
2016 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2017 AS1 (clr,%A0) CR_TAB
2018 AS2 (ldi,r31,hi8(%1)) CR_TAB
2019 AS2 (mov,%B0,r31) CR_TAB
2020 AS2 (mov,r31,__tmp_reg__));
2021 }
2022 else if ((INTVAL (src) & 0xff00) == 0)
2023 {
2024 *l = 5;
2025 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2026 AS2 (ldi,r31,lo8(%1)) CR_TAB
2027 AS2 (mov,%A0,r31) CR_TAB
2028 AS1 (clr,%B0) CR_TAB
2029 AS2 (mov,r31,__tmp_reg__));
2030 }
2031 }
2032
2033 /* Last resort, equal to loading from memory. */
2034 *l = 6;
2035 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2036 AS2 (ldi,r31,lo8(%1)) CR_TAB
2037 AS2 (mov,%A0,r31) CR_TAB
2038 AS2 (ldi,r31,hi8(%1)) CR_TAB
2039 AS2 (mov,%B0,r31) CR_TAB
2040 AS2 (mov,r31,__tmp_reg__));
2041 }
2042 else if (GET_CODE (src) == MEM)
2043 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
2044 }
2045 else if (GET_CODE (dest) == MEM)
2046 {
2047 const char *templ;
2048
2049 if (src == const0_rtx)
2050 operands[1] = zero_reg_rtx;
2051
2052 templ = out_movhi_mr_r (insn, operands, real_l);
2053
2054 if (!real_l)
2055 output_asm_insn (templ, operands);
2056
2057 operands[1] = src;
2058 return "";
2059 }
2060 fatal_insn ("invalid insn:", insn);
2061 return "";
2062 }
2063
2064 const char *
2065 out_movqi_r_mr (rtx insn, rtx op[], int *l)
2066 {
2067 rtx dest = op[0];
2068 rtx src = op[1];
2069 rtx x = XEXP (src, 0);
2070 int dummy;
2071
2072 if (!l)
2073 l = &dummy;
2074
2075 if (CONSTANT_ADDRESS_P (x))
2076 {
2077 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2078 {
2079 *l = 1;
2080 return AS2 (in,%0,__SREG__);
2081 }
2082 if (optimize > 0 && io_address_operand (x, QImode))
2083 {
2084 *l = 1;
2085 return AS2 (in,%0,%m1-0x20);
2086 }
2087 *l = 2;
2088 return AS2 (lds,%0,%m1);
2089 }
2090 /* memory access by reg+disp */
2091 else if (GET_CODE (x) == PLUS
2092 && REG_P (XEXP (x,0))
2093 && GET_CODE (XEXP (x,1)) == CONST_INT)
2094 {
2095 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
2096 {
2097 int disp = INTVAL (XEXP (x,1));
2098 if (REGNO (XEXP (x,0)) != REG_Y)
2099 fatal_insn ("incorrect insn:",insn);
2100
2101 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2102 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
2103 AS2 (ldd,%0,Y+63) CR_TAB
2104 AS2 (sbiw,r28,%o1-63));
2105
2106 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2107 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2108 AS2 (ld,%0,Y) CR_TAB
2109 AS2 (subi,r28,lo8(%o1)) CR_TAB
2110 AS2 (sbci,r29,hi8(%o1)));
2111 }
2112 else if (REGNO (XEXP (x,0)) == REG_X)
2113 {
2114 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2115 it but I have this situation with extremal optimizing options. */
2116 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2117 || reg_unused_after (insn, XEXP (x,0)))
2118 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2119 AS2 (ld,%0,X));
2120
2121 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2122 AS2 (ld,%0,X) CR_TAB
2123 AS2 (sbiw,r26,%o1));
2124 }
2125 *l = 1;
2126 return AS2 (ldd,%0,%1);
2127 }
2128 *l = 1;
2129 return AS2 (ld,%0,%1);
2130 }
2131
2132 const char *
2133 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2134 {
2135 rtx dest = op[0];
2136 rtx src = op[1];
2137 rtx base = XEXP (src, 0);
2138 int reg_dest = true_regnum (dest);
2139 int reg_base = true_regnum (base);
2140 /* "volatile" forces reading low byte first, even if less efficient,
2141 for correct operation with 16-bit I/O registers. */
2142 int mem_volatile_p = MEM_VOLATILE_P (src);
2143 int tmp;
2144
2145 if (!l)
2146 l = &tmp;
2147
2148 if (reg_base > 0)
2149 {
2150 if (reg_dest == reg_base) /* R = (R) */
2151 {
2152 *l = 3;
2153 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2154 AS2 (ld,%B0,%1) CR_TAB
2155 AS2 (mov,%A0,__tmp_reg__));
2156 }
2157 else if (reg_base == REG_X) /* (R26) */
2158 {
2159 if (reg_unused_after (insn, base))
2160 {
2161 *l = 2;
2162 return (AS2 (ld,%A0,X+) CR_TAB
2163 AS2 (ld,%B0,X));
2164 }
2165 *l = 3;
2166 return (AS2 (ld,%A0,X+) CR_TAB
2167 AS2 (ld,%B0,X) CR_TAB
2168 AS2 (sbiw,r26,1));
2169 }
2170 else /* (R) */
2171 {
2172 *l = 2;
2173 return (AS2 (ld,%A0,%1) CR_TAB
2174 AS2 (ldd,%B0,%1+1));
2175 }
2176 }
2177 else if (GET_CODE (base) == PLUS) /* (R + i) */
2178 {
2179 int disp = INTVAL (XEXP (base, 1));
2180 int reg_base = true_regnum (XEXP (base, 0));
2181
2182 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2183 {
2184 if (REGNO (XEXP (base, 0)) != REG_Y)
2185 fatal_insn ("incorrect insn:",insn);
2186
2187 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2188 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2189 AS2 (ldd,%A0,Y+62) CR_TAB
2190 AS2 (ldd,%B0,Y+63) CR_TAB
2191 AS2 (sbiw,r28,%o1-62));
2192
2193 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2194 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2195 AS2 (ld,%A0,Y) CR_TAB
2196 AS2 (ldd,%B0,Y+1) CR_TAB
2197 AS2 (subi,r28,lo8(%o1)) CR_TAB
2198 AS2 (sbci,r29,hi8(%o1)));
2199 }
2200 if (reg_base == REG_X)
2201 {
2202 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2203 it but I have this situation with extremal
2204 optimization options. */
2205
2206 *l = 4;
2207 if (reg_base == reg_dest)
2208 return (AS2 (adiw,r26,%o1) CR_TAB
2209 AS2 (ld,__tmp_reg__,X+) CR_TAB
2210 AS2 (ld,%B0,X) CR_TAB
2211 AS2 (mov,%A0,__tmp_reg__));
2212
2213 return (AS2 (adiw,r26,%o1) CR_TAB
2214 AS2 (ld,%A0,X+) CR_TAB
2215 AS2 (ld,%B0,X) CR_TAB
2216 AS2 (sbiw,r26,%o1+1));
2217 }
2218
2219 if (reg_base == reg_dest)
2220 {
2221 *l = 3;
2222 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2223 AS2 (ldd,%B0,%B1) CR_TAB
2224 AS2 (mov,%A0,__tmp_reg__));
2225 }
2226
2227 *l = 2;
2228 return (AS2 (ldd,%A0,%A1) CR_TAB
2229 AS2 (ldd,%B0,%B1));
2230 }
2231 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2232 {
2233 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2234 fatal_insn ("incorrect insn:", insn);
2235
2236 if (mem_volatile_p)
2237 {
2238 if (REGNO (XEXP (base, 0)) == REG_X)
2239 {
2240 *l = 4;
2241 return (AS2 (sbiw,r26,2) CR_TAB
2242 AS2 (ld,%A0,X+) CR_TAB
2243 AS2 (ld,%B0,X) CR_TAB
2244 AS2 (sbiw,r26,1));
2245 }
2246 else
2247 {
2248 *l = 3;
2249 return (AS2 (sbiw,%r1,2) CR_TAB
2250 AS2 (ld,%A0,%p1) CR_TAB
2251 AS2 (ldd,%B0,%p1+1));
2252 }
2253 }
2254
2255 *l = 2;
2256 return (AS2 (ld,%B0,%1) CR_TAB
2257 AS2 (ld,%A0,%1));
2258 }
2259 else if (GET_CODE (base) == POST_INC) /* (R++) */
2260 {
2261 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2262 fatal_insn ("incorrect insn:", insn);
2263
2264 *l = 2;
2265 return (AS2 (ld,%A0,%1) CR_TAB
2266 AS2 (ld,%B0,%1));
2267 }
2268 else if (CONSTANT_ADDRESS_P (base))
2269 {
2270 if (optimize > 0 && io_address_operand (base, HImode))
2271 {
2272 *l = 2;
2273 return (AS2 (in,%A0,%m1-0x20) CR_TAB
2274 AS2 (in,%B0,%m1+1-0x20));
2275 }
2276 *l = 4;
2277 return (AS2 (lds,%A0,%m1) CR_TAB
2278 AS2 (lds,%B0,%m1+1));
2279 }
2280
2281 fatal_insn ("unknown move insn:",insn);
2282 return "";
2283 }
2284
2285 const char *
2286 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2287 {
2288 rtx dest = op[0];
2289 rtx src = op[1];
2290 rtx base = XEXP (src, 0);
2291 int reg_dest = true_regnum (dest);
2292 int reg_base = true_regnum (base);
2293 int tmp;
2294
2295 if (!l)
2296 l = &tmp;
2297
2298 if (reg_base > 0)
2299 {
2300 if (reg_base == REG_X) /* (R26) */
2301 {
2302 if (reg_dest == REG_X)
2303 /* "ld r26,-X" is undefined */
2304 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2305 AS2 (ld,r29,X) CR_TAB
2306 AS2 (ld,r28,-X) CR_TAB
2307 AS2 (ld,__tmp_reg__,-X) CR_TAB
2308 AS2 (sbiw,r26,1) CR_TAB
2309 AS2 (ld,r26,X) CR_TAB
2310 AS2 (mov,r27,__tmp_reg__));
2311 else if (reg_dest == REG_X - 2)
2312 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2313 AS2 (ld,%B0,X+) CR_TAB
2314 AS2 (ld,__tmp_reg__,X+) CR_TAB
2315 AS2 (ld,%D0,X) CR_TAB
2316 AS2 (mov,%C0,__tmp_reg__));
2317 else if (reg_unused_after (insn, base))
2318 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2319 AS2 (ld,%B0,X+) CR_TAB
2320 AS2 (ld,%C0,X+) CR_TAB
2321 AS2 (ld,%D0,X));
2322 else
2323 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2324 AS2 (ld,%B0,X+) CR_TAB
2325 AS2 (ld,%C0,X+) CR_TAB
2326 AS2 (ld,%D0,X) CR_TAB
2327 AS2 (sbiw,r26,3));
2328 }
2329 else
2330 {
2331 if (reg_dest == reg_base)
2332 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2333 AS2 (ldd,%C0,%1+2) CR_TAB
2334 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2335 AS2 (ld,%A0,%1) CR_TAB
2336 AS2 (mov,%B0,__tmp_reg__));
2337 else if (reg_base == reg_dest + 2)
2338 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2339 AS2 (ldd,%B0,%1+1) CR_TAB
2340 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2341 AS2 (ldd,%D0,%1+3) CR_TAB
2342 AS2 (mov,%C0,__tmp_reg__));
2343 else
2344 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2345 AS2 (ldd,%B0,%1+1) CR_TAB
2346 AS2 (ldd,%C0,%1+2) CR_TAB
2347 AS2 (ldd,%D0,%1+3));
2348 }
2349 }
2350 else if (GET_CODE (base) == PLUS) /* (R + i) */
2351 {
2352 int disp = INTVAL (XEXP (base, 1));
2353
2354 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2355 {
2356 if (REGNO (XEXP (base, 0)) != REG_Y)
2357 fatal_insn ("incorrect insn:",insn);
2358
2359 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2360 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2361 AS2 (ldd,%A0,Y+60) CR_TAB
2362 AS2 (ldd,%B0,Y+61) CR_TAB
2363 AS2 (ldd,%C0,Y+62) CR_TAB
2364 AS2 (ldd,%D0,Y+63) CR_TAB
2365 AS2 (sbiw,r28,%o1-60));
2366
2367 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2368 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2369 AS2 (ld,%A0,Y) CR_TAB
2370 AS2 (ldd,%B0,Y+1) CR_TAB
2371 AS2 (ldd,%C0,Y+2) CR_TAB
2372 AS2 (ldd,%D0,Y+3) CR_TAB
2373 AS2 (subi,r28,lo8(%o1)) CR_TAB
2374 AS2 (sbci,r29,hi8(%o1)));
2375 }
2376
2377 reg_base = true_regnum (XEXP (base, 0));
2378 if (reg_base == REG_X)
2379 {
2380 /* R = (X + d) */
2381 if (reg_dest == REG_X)
2382 {
2383 *l = 7;
2384 /* "ld r26,-X" is undefined */
2385 return (AS2 (adiw,r26,%o1+3) CR_TAB
2386 AS2 (ld,r29,X) CR_TAB
2387 AS2 (ld,r28,-X) CR_TAB
2388 AS2 (ld,__tmp_reg__,-X) CR_TAB
2389 AS2 (sbiw,r26,1) CR_TAB
2390 AS2 (ld,r26,X) CR_TAB
2391 AS2 (mov,r27,__tmp_reg__));
2392 }
2393 *l = 6;
2394 if (reg_dest == REG_X - 2)
2395 return (AS2 (adiw,r26,%o1) CR_TAB
2396 AS2 (ld,r24,X+) CR_TAB
2397 AS2 (ld,r25,X+) CR_TAB
2398 AS2 (ld,__tmp_reg__,X+) CR_TAB
2399 AS2 (ld,r27,X) CR_TAB
2400 AS2 (mov,r26,__tmp_reg__));
2401
2402 return (AS2 (adiw,r26,%o1) CR_TAB
2403 AS2 (ld,%A0,X+) CR_TAB
2404 AS2 (ld,%B0,X+) CR_TAB
2405 AS2 (ld,%C0,X+) CR_TAB
2406 AS2 (ld,%D0,X) CR_TAB
2407 AS2 (sbiw,r26,%o1+3));
2408 }
2409 if (reg_dest == reg_base)
2410 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2411 AS2 (ldd,%C0,%C1) CR_TAB
2412 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2413 AS2 (ldd,%A0,%A1) CR_TAB
2414 AS2 (mov,%B0,__tmp_reg__));
2415 else if (reg_dest == reg_base - 2)
2416 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2417 AS2 (ldd,%B0,%B1) CR_TAB
2418 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2419 AS2 (ldd,%D0,%D1) CR_TAB
2420 AS2 (mov,%C0,__tmp_reg__));
2421 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2422 AS2 (ldd,%B0,%B1) CR_TAB
2423 AS2 (ldd,%C0,%C1) CR_TAB
2424 AS2 (ldd,%D0,%D1));
2425 }
2426 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2427 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2428 AS2 (ld,%C0,%1) CR_TAB
2429 AS2 (ld,%B0,%1) CR_TAB
2430 AS2 (ld,%A0,%1));
2431 else if (GET_CODE (base) == POST_INC) /* (R++) */
2432 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2433 AS2 (ld,%B0,%1) CR_TAB
2434 AS2 (ld,%C0,%1) CR_TAB
2435 AS2 (ld,%D0,%1));
2436 else if (CONSTANT_ADDRESS_P (base))
2437 return *l=8, (AS2 (lds,%A0,%m1) CR_TAB
2438 AS2 (lds,%B0,%m1+1) CR_TAB
2439 AS2 (lds,%C0,%m1+2) CR_TAB
2440 AS2 (lds,%D0,%m1+3));
2441
2442 fatal_insn ("unknown move insn:",insn);
2443 return "";
2444 }
2445
2446 const char *
2447 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2448 {
2449 rtx dest = op[0];
2450 rtx src = op[1];
2451 rtx base = XEXP (dest, 0);
2452 int reg_base = true_regnum (base);
2453 int reg_src = true_regnum (src);
2454 int tmp;
2455
2456 if (!l)
2457 l = &tmp;
2458
2459 if (CONSTANT_ADDRESS_P (base))
2460 return *l=8,(AS2 (sts,%m0,%A1) CR_TAB
2461 AS2 (sts,%m0+1,%B1) CR_TAB
2462 AS2 (sts,%m0+2,%C1) CR_TAB
2463 AS2 (sts,%m0+3,%D1));
2464 if (reg_base > 0) /* (r) */
2465 {
2466 if (reg_base == REG_X) /* (R26) */
2467 {
2468 if (reg_src == REG_X)
2469 {
2470 /* "st X+,r26" is undefined */
2471 if (reg_unused_after (insn, base))
2472 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2473 AS2 (st,X,r26) CR_TAB
2474 AS2 (adiw,r26,1) CR_TAB
2475 AS2 (st,X+,__tmp_reg__) CR_TAB
2476 AS2 (st,X+,r28) CR_TAB
2477 AS2 (st,X,r29));
2478 else
2479 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2480 AS2 (st,X,r26) CR_TAB
2481 AS2 (adiw,r26,1) CR_TAB
2482 AS2 (st,X+,__tmp_reg__) CR_TAB
2483 AS2 (st,X+,r28) CR_TAB
2484 AS2 (st,X,r29) CR_TAB
2485 AS2 (sbiw,r26,3));
2486 }
2487 else if (reg_base == reg_src + 2)
2488 {
2489 if (reg_unused_after (insn, base))
2490 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2491 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2492 AS2 (st,%0+,%A1) CR_TAB
2493 AS2 (st,%0+,%B1) CR_TAB
2494 AS2 (st,%0+,__zero_reg__) CR_TAB
2495 AS2 (st,%0,__tmp_reg__) CR_TAB
2496 AS1 (clr,__zero_reg__));
2497 else
2498 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2499 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2500 AS2 (st,%0+,%A1) CR_TAB
2501 AS2 (st,%0+,%B1) CR_TAB
2502 AS2 (st,%0+,__zero_reg__) CR_TAB
2503 AS2 (st,%0,__tmp_reg__) CR_TAB
2504 AS1 (clr,__zero_reg__) CR_TAB
2505 AS2 (sbiw,r26,3));
2506 }
2507 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2508 AS2 (st,%0+,%B1) CR_TAB
2509 AS2 (st,%0+,%C1) CR_TAB
2510 AS2 (st,%0,%D1) CR_TAB
2511 AS2 (sbiw,r26,3));
2512 }
2513 else
2514 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2515 AS2 (std,%0+1,%B1) CR_TAB
2516 AS2 (std,%0+2,%C1) CR_TAB
2517 AS2 (std,%0+3,%D1));
2518 }
2519 else if (GET_CODE (base) == PLUS) /* (R + i) */
2520 {
2521 int disp = INTVAL (XEXP (base, 1));
2522 reg_base = REGNO (XEXP (base, 0));
2523 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2524 {
2525 if (reg_base != REG_Y)
2526 fatal_insn ("incorrect insn:",insn);
2527
2528 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2529 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2530 AS2 (std,Y+60,%A1) CR_TAB
2531 AS2 (std,Y+61,%B1) CR_TAB
2532 AS2 (std,Y+62,%C1) CR_TAB
2533 AS2 (std,Y+63,%D1) CR_TAB
2534 AS2 (sbiw,r28,%o0-60));
2535
2536 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2537 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2538 AS2 (st,Y,%A1) CR_TAB
2539 AS2 (std,Y+1,%B1) CR_TAB
2540 AS2 (std,Y+2,%C1) CR_TAB
2541 AS2 (std,Y+3,%D1) CR_TAB
2542 AS2 (subi,r28,lo8(%o0)) CR_TAB
2543 AS2 (sbci,r29,hi8(%o0)));
2544 }
2545 if (reg_base == REG_X)
2546 {
2547 /* (X + d) = R */
2548 if (reg_src == REG_X)
2549 {
2550 *l = 9;
2551 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2552 AS2 (mov,__zero_reg__,r27) CR_TAB
2553 AS2 (adiw,r26,%o0) CR_TAB
2554 AS2 (st,X+,__tmp_reg__) CR_TAB
2555 AS2 (st,X+,__zero_reg__) CR_TAB
2556 AS2 (st,X+,r28) CR_TAB
2557 AS2 (st,X,r29) CR_TAB
2558 AS1 (clr,__zero_reg__) CR_TAB
2559 AS2 (sbiw,r26,%o0+3));
2560 }
2561 else if (reg_src == REG_X - 2)
2562 {
2563 *l = 9;
2564 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2565 AS2 (mov,__zero_reg__,r27) CR_TAB
2566 AS2 (adiw,r26,%o0) CR_TAB
2567 AS2 (st,X+,r24) CR_TAB
2568 AS2 (st,X+,r25) CR_TAB
2569 AS2 (st,X+,__tmp_reg__) CR_TAB
2570 AS2 (st,X,__zero_reg__) CR_TAB
2571 AS1 (clr,__zero_reg__) CR_TAB
2572 AS2 (sbiw,r26,%o0+3));
2573 }
2574 *l = 6;
2575 return (AS2 (adiw,r26,%o0) CR_TAB
2576 AS2 (st,X+,%A1) CR_TAB
2577 AS2 (st,X+,%B1) CR_TAB
2578 AS2 (st,X+,%C1) CR_TAB
2579 AS2 (st,X,%D1) CR_TAB
2580 AS2 (sbiw,r26,%o0+3));
2581 }
2582 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2583 AS2 (std,%B0,%B1) CR_TAB
2584 AS2 (std,%C0,%C1) CR_TAB
2585 AS2 (std,%D0,%D1));
2586 }
2587 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2588 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2589 AS2 (st,%0,%C1) CR_TAB
2590 AS2 (st,%0,%B1) CR_TAB
2591 AS2 (st,%0,%A1));
2592 else if (GET_CODE (base) == POST_INC) /* (R++) */
2593 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2594 AS2 (st,%0,%B1) CR_TAB
2595 AS2 (st,%0,%C1) CR_TAB
2596 AS2 (st,%0,%D1));
2597 fatal_insn ("unknown move insn:",insn);
2598 return "";
2599 }
2600
2601 const char *
2602 output_movsisf(rtx insn, rtx operands[], int *l)
2603 {
2604 int dummy;
2605 rtx dest = operands[0];
2606 rtx src = operands[1];
2607 int *real_l = l;
2608
2609 if (!l)
2610 l = &dummy;
2611
2612 if (register_operand (dest, VOIDmode))
2613 {
2614 if (register_operand (src, VOIDmode)) /* mov r,r */
2615 {
2616 if (true_regnum (dest) > true_regnum (src))
2617 {
2618 if (AVR_HAVE_MOVW)
2619 {
2620 *l = 2;
2621 return (AS2 (movw,%C0,%C1) CR_TAB
2622 AS2 (movw,%A0,%A1));
2623 }
2624 *l = 4;
2625 return (AS2 (mov,%D0,%D1) CR_TAB
2626 AS2 (mov,%C0,%C1) CR_TAB
2627 AS2 (mov,%B0,%B1) CR_TAB
2628 AS2 (mov,%A0,%A1));
2629 }
2630 else
2631 {
2632 if (AVR_HAVE_MOVW)
2633 {
2634 *l = 2;
2635 return (AS2 (movw,%A0,%A1) CR_TAB
2636 AS2 (movw,%C0,%C1));
2637 }
2638 *l = 4;
2639 return (AS2 (mov,%A0,%A1) CR_TAB
2640 AS2 (mov,%B0,%B1) CR_TAB
2641 AS2 (mov,%C0,%C1) CR_TAB
2642 AS2 (mov,%D0,%D1));
2643 }
2644 }
2645 else if (CONSTANT_P (src))
2646 {
2647 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2648 {
2649 *l = 4;
2650 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2651 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2652 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2653 AS2 (ldi,%D0,hhi8(%1)));
2654 }
2655
2656 if (GET_CODE (src) == CONST_INT)
2657 {
2658 const char *const clr_op0 =
2659 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2660 AS1 (clr,%B0) CR_TAB
2661 AS2 (movw,%C0,%A0))
2662 : (AS1 (clr,%A0) CR_TAB
2663 AS1 (clr,%B0) CR_TAB
2664 AS1 (clr,%C0) CR_TAB
2665 AS1 (clr,%D0));
2666
2667 if (src == const0_rtx) /* mov r,L */
2668 {
2669 *l = AVR_HAVE_MOVW ? 3 : 4;
2670 return clr_op0;
2671 }
2672 else if (src == const1_rtx)
2673 {
2674 if (!real_l)
2675 output_asm_insn (clr_op0, operands);
2676 *l = AVR_HAVE_MOVW ? 4 : 5;
2677 return AS1 (inc,%A0);
2678 }
2679 else if (src == constm1_rtx)
2680 {
2681 /* Immediate constants -1 to any register */
2682 if (AVR_HAVE_MOVW)
2683 {
2684 *l = 4;
2685 return (AS1 (clr,%A0) CR_TAB
2686 AS1 (dec,%A0) CR_TAB
2687 AS2 (mov,%B0,%A0) CR_TAB
2688 AS2 (movw,%C0,%A0));
2689 }
2690 *l = 5;
2691 return (AS1 (clr,%A0) CR_TAB
2692 AS1 (dec,%A0) CR_TAB
2693 AS2 (mov,%B0,%A0) CR_TAB
2694 AS2 (mov,%C0,%A0) CR_TAB
2695 AS2 (mov,%D0,%A0));
2696 }
2697 else
2698 {
2699 int bit_nr = exact_log2 (INTVAL (src));
2700
2701 if (bit_nr >= 0)
2702 {
2703 *l = AVR_HAVE_MOVW ? 5 : 6;
2704 if (!real_l)
2705 {
2706 output_asm_insn (clr_op0, operands);
2707 output_asm_insn ("set", operands);
2708 }
2709 if (!real_l)
2710 avr_output_bld (operands, bit_nr);
2711
2712 return "";
2713 }
2714 }
2715 }
2716
2717 /* Last resort, better than loading from memory. */
2718 *l = 10;
2719 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2720 AS2 (ldi,r31,lo8(%1)) CR_TAB
2721 AS2 (mov,%A0,r31) CR_TAB
2722 AS2 (ldi,r31,hi8(%1)) CR_TAB
2723 AS2 (mov,%B0,r31) CR_TAB
2724 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2725 AS2 (mov,%C0,r31) CR_TAB
2726 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2727 AS2 (mov,%D0,r31) CR_TAB
2728 AS2 (mov,r31,__tmp_reg__));
2729 }
2730 else if (GET_CODE (src) == MEM)
2731 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2732 }
2733 else if (GET_CODE (dest) == MEM)
2734 {
2735 const char *templ;
2736
2737 if (src == const0_rtx)
2738 operands[1] = zero_reg_rtx;
2739
2740 templ = out_movsi_mr_r (insn, operands, real_l);
2741
2742 if (!real_l)
2743 output_asm_insn (templ, operands);
2744
2745 operands[1] = src;
2746 return "";
2747 }
2748 fatal_insn ("invalid insn:", insn);
2749 return "";
2750 }
2751
2752 const char *
2753 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2754 {
2755 rtx dest = op[0];
2756 rtx src = op[1];
2757 rtx x = XEXP (dest, 0);
2758 int dummy;
2759
2760 if (!l)
2761 l = &dummy;
2762
2763 if (CONSTANT_ADDRESS_P (x))
2764 {
2765 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2766 {
2767 *l = 1;
2768 return AS2 (out,__SREG__,%1);
2769 }
2770 if (optimize > 0 && io_address_operand (x, QImode))
2771 {
2772 *l = 1;
2773 return AS2 (out,%m0-0x20,%1);
2774 }
2775 *l = 2;
2776 return AS2 (sts,%m0,%1);
2777 }
2778 /* memory access by reg+disp */
2779 else if (GET_CODE (x) == PLUS
2780 && REG_P (XEXP (x,0))
2781 && GET_CODE (XEXP (x,1)) == CONST_INT)
2782 {
2783 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2784 {
2785 int disp = INTVAL (XEXP (x,1));
2786 if (REGNO (XEXP (x,0)) != REG_Y)
2787 fatal_insn ("incorrect insn:",insn);
2788
2789 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2790 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2791 AS2 (std,Y+63,%1) CR_TAB
2792 AS2 (sbiw,r28,%o0-63));
2793
2794 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2795 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2796 AS2 (st,Y,%1) CR_TAB
2797 AS2 (subi,r28,lo8(%o0)) CR_TAB
2798 AS2 (sbci,r29,hi8(%o0)));
2799 }
2800 else if (REGNO (XEXP (x,0)) == REG_X)
2801 {
2802 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2803 {
2804 if (reg_unused_after (insn, XEXP (x,0)))
2805 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2806 AS2 (adiw,r26,%o0) CR_TAB
2807 AS2 (st,X,__tmp_reg__));
2808
2809 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2810 AS2 (adiw,r26,%o0) CR_TAB
2811 AS2 (st,X,__tmp_reg__) CR_TAB
2812 AS2 (sbiw,r26,%o0));
2813 }
2814 else
2815 {
2816 if (reg_unused_after (insn, XEXP (x,0)))
2817 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2818 AS2 (st,X,%1));
2819
2820 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2821 AS2 (st,X,%1) CR_TAB
2822 AS2 (sbiw,r26,%o0));
2823 }
2824 }
2825 *l = 1;
2826 return AS2 (std,%0,%1);
2827 }
2828 *l = 1;
2829 return AS2 (st,%0,%1);
2830 }
2831
2832 const char *
2833 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2834 {
2835 rtx dest = op[0];
2836 rtx src = op[1];
2837 rtx base = XEXP (dest, 0);
2838 int reg_base = true_regnum (base);
2839 int reg_src = true_regnum (src);
2840 /* "volatile" forces writing high byte first, even if less efficient,
2841 for correct operation with 16-bit I/O registers. */
2842 int mem_volatile_p = MEM_VOLATILE_P (dest);
2843 int tmp;
2844
2845 if (!l)
2846 l = &tmp;
2847 if (CONSTANT_ADDRESS_P (base))
2848 {
2849 if (optimize > 0 && io_address_operand (base, HImode))
2850 {
2851 *l = 2;
2852 return (AS2 (out,%m0+1-0x20,%B1) CR_TAB
2853 AS2 (out,%m0-0x20,%A1));
2854 }
2855 return *l = 4, (AS2 (sts,%m0+1,%B1) CR_TAB
2856 AS2 (sts,%m0,%A1));
2857 }
2858 if (reg_base > 0)
2859 {
2860 if (reg_base == REG_X)
2861 {
2862 if (reg_src == REG_X)
2863 {
2864 /* "st X+,r26" and "st -X,r26" are undefined. */
2865 if (!mem_volatile_p && reg_unused_after (insn, src))
2866 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2867 AS2 (st,X,r26) CR_TAB
2868 AS2 (adiw,r26,1) CR_TAB
2869 AS2 (st,X,__tmp_reg__));
2870 else
2871 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2872 AS2 (adiw,r26,1) CR_TAB
2873 AS2 (st,X,__tmp_reg__) CR_TAB
2874 AS2 (sbiw,r26,1) CR_TAB
2875 AS2 (st,X,r26));
2876 }
2877 else
2878 {
2879 if (!mem_volatile_p && reg_unused_after (insn, base))
2880 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2881 AS2 (st,X,%B1));
2882 else
2883 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2884 AS2 (st,X,%B1) CR_TAB
2885 AS2 (st,-X,%A1));
2886 }
2887 }
2888 else
2889 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2890 AS2 (st,%0,%A1));
2891 }
2892 else if (GET_CODE (base) == PLUS)
2893 {
2894 int disp = INTVAL (XEXP (base, 1));
2895 reg_base = REGNO (XEXP (base, 0));
2896 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2897 {
2898 if (reg_base != REG_Y)
2899 fatal_insn ("incorrect insn:",insn);
2900
2901 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2902 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2903 AS2 (std,Y+63,%B1) CR_TAB
2904 AS2 (std,Y+62,%A1) CR_TAB
2905 AS2 (sbiw,r28,%o0-62));
2906
2907 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2908 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2909 AS2 (std,Y+1,%B1) CR_TAB
2910 AS2 (st,Y,%A1) CR_TAB
2911 AS2 (subi,r28,lo8(%o0)) CR_TAB
2912 AS2 (sbci,r29,hi8(%o0)));
2913 }
2914 if (reg_base == REG_X)
2915 {
2916 /* (X + d) = R */
2917 if (reg_src == REG_X)
2918 {
2919 *l = 7;
2920 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2921 AS2 (mov,__zero_reg__,r27) CR_TAB
2922 AS2 (adiw,r26,%o0+1) CR_TAB
2923 AS2 (st,X,__zero_reg__) CR_TAB
2924 AS2 (st,-X,__tmp_reg__) CR_TAB
2925 AS1 (clr,__zero_reg__) CR_TAB
2926 AS2 (sbiw,r26,%o0));
2927 }
2928 *l = 4;
2929 return (AS2 (adiw,r26,%o0+1) CR_TAB
2930 AS2 (st,X,%B1) CR_TAB
2931 AS2 (st,-X,%A1) CR_TAB
2932 AS2 (sbiw,r26,%o0));
2933 }
2934 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2935 AS2 (std,%A0,%A1));
2936 }
2937 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2938 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2939 AS2 (st,%0,%A1));
2940 else if (GET_CODE (base) == POST_INC) /* (R++) */
2941 {
2942 if (mem_volatile_p)
2943 {
2944 if (REGNO (XEXP (base, 0)) == REG_X)
2945 {
2946 *l = 4;
2947 return (AS2 (adiw,r26,1) CR_TAB
2948 AS2 (st,X,%B1) CR_TAB
2949 AS2 (st,-X,%A1) CR_TAB
2950 AS2 (adiw,r26,2));
2951 }
2952 else
2953 {
2954 *l = 3;
2955 return (AS2 (std,%p0+1,%B1) CR_TAB
2956 AS2 (st,%p0,%A1) CR_TAB
2957 AS2 (adiw,%r0,2));
2958 }
2959 }
2960
2961 *l = 2;
2962 return (AS2 (st,%0,%A1) CR_TAB
2963 AS2 (st,%0,%B1));
2964 }
2965 fatal_insn ("unknown move insn:",insn);
2966 return "";
2967 }
2968
2969 /* Return 1 if frame pointer for current function required. */
2970
2971 bool
2972 avr_frame_pointer_required_p (void)
2973 {
2974 return (cfun->calls_alloca
2975 || crtl->args.info.nregs == 0
2976 || get_frame_size () > 0);
2977 }
2978
2979 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2980
2981 static RTX_CODE
2982 compare_condition (rtx insn)
2983 {
2984 rtx next = next_real_insn (insn);
2985 RTX_CODE cond = UNKNOWN;
2986 if (next && GET_CODE (next) == JUMP_INSN)
2987 {
2988 rtx pat = PATTERN (next);
2989 rtx src = SET_SRC (pat);
2990 rtx t = XEXP (src, 0);
2991 cond = GET_CODE (t);
2992 }
2993 return cond;
2994 }
2995
2996 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2997
2998 static int
2999 compare_sign_p (rtx insn)
3000 {
3001 RTX_CODE cond = compare_condition (insn);
3002 return (cond == GE || cond == LT);
3003 }
3004
3005 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
3006 that needs to be swapped (GT, GTU, LE, LEU). */
3007
3008 int
3009 compare_diff_p (rtx insn)
3010 {
3011 RTX_CODE cond = compare_condition (insn);
3012 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
3013 }
3014
3015 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
3016
3017 int
3018 compare_eq_p (rtx insn)
3019 {
3020 RTX_CODE cond = compare_condition (insn);
3021 return (cond == EQ || cond == NE);
3022 }
3023
3024
3025 /* Output test instruction for HImode. */
3026
3027 const char *
3028 out_tsthi (rtx insn, rtx op, int *l)
3029 {
3030 if (compare_sign_p (insn))
3031 {
3032 if (l) *l = 1;
3033 return AS1 (tst,%B0);
3034 }
3035 if (reg_unused_after (insn, op)
3036 && compare_eq_p (insn))
3037 {
3038 /* Faster than sbiw if we can clobber the operand. */
3039 if (l) *l = 1;
3040 return "or %A0,%B0";
3041 }
3042 if (test_hard_reg_class (ADDW_REGS, op))
3043 {
3044 if (l) *l = 1;
3045 return AS2 (sbiw,%0,0);
3046 }
3047 if (l) *l = 2;
3048 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3049 AS2 (cpc,%B0,__zero_reg__));
3050 }
3051
3052
3053 /* Output test instruction for SImode. */
3054
3055 const char *
3056 out_tstsi (rtx insn, rtx op, int *l)
3057 {
3058 if (compare_sign_p (insn))
3059 {
3060 if (l) *l = 1;
3061 return AS1 (tst,%D0);
3062 }
3063 if (test_hard_reg_class (ADDW_REGS, op))
3064 {
3065 if (l) *l = 3;
3066 return (AS2 (sbiw,%A0,0) CR_TAB
3067 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3068 AS2 (cpc,%D0,__zero_reg__));
3069 }
3070 if (l) *l = 4;
3071 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
3072 AS2 (cpc,%B0,__zero_reg__) CR_TAB
3073 AS2 (cpc,%C0,__zero_reg__) CR_TAB
3074 AS2 (cpc,%D0,__zero_reg__));
3075 }
3076
3077
3078 /* Generate asm equivalent for various shifts.
3079 Shift count is a CONST_INT, MEM or REG.
3080 This only handles cases that are not already
3081 carefully hand-optimized in ?sh??i3_out. */
3082
3083 void
3084 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
3085 int *len, int t_len)
3086 {
3087 rtx op[10];
3088 char str[500];
3089 int second_label = 1;
3090 int saved_in_tmp = 0;
3091 int use_zero_reg = 0;
3092
3093 op[0] = operands[0];
3094 op[1] = operands[1];
3095 op[2] = operands[2];
3096 op[3] = operands[3];
3097 str[0] = 0;
3098
3099 if (len)
3100 *len = 1;
3101
3102 if (GET_CODE (operands[2]) == CONST_INT)
3103 {
3104 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3105 int count = INTVAL (operands[2]);
3106 int max_len = 10; /* If larger than this, always use a loop. */
3107
3108 if (count <= 0)
3109 {
3110 if (len)
3111 *len = 0;
3112 return;
3113 }
3114
3115 if (count < 8 && !scratch)
3116 use_zero_reg = 1;
3117
3118 if (optimize_size)
3119 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3120
3121 if (t_len * count <= max_len)
3122 {
3123 /* Output shifts inline with no loop - faster. */
3124 if (len)
3125 *len = t_len * count;
3126 else
3127 {
3128 while (count-- > 0)
3129 output_asm_insn (templ, op);
3130 }
3131
3132 return;
3133 }
3134
3135 if (scratch)
3136 {
3137 if (!len)
3138 strcat (str, AS2 (ldi,%3,%2));
3139 }
3140 else if (use_zero_reg)
3141 {
3142 /* Hack to save one word: use __zero_reg__ as loop counter.
3143 Set one bit, then shift in a loop until it is 0 again. */
3144
3145 op[3] = zero_reg_rtx;
3146 if (len)
3147 *len = 2;
3148 else
3149 strcat (str, ("set" CR_TAB
3150 AS2 (bld,%3,%2-1)));
3151 }
3152 else
3153 {
3154 /* No scratch register available, use one from LD_REGS (saved in
3155 __tmp_reg__) that doesn't overlap with registers to shift. */
3156
3157 op[3] = gen_rtx_REG (QImode,
3158 ((true_regnum (operands[0]) - 1) & 15) + 16);
3159 op[4] = tmp_reg_rtx;
3160 saved_in_tmp = 1;
3161
3162 if (len)
3163 *len = 3; /* Includes "mov %3,%4" after the loop. */
3164 else
3165 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3166 AS2 (ldi,%3,%2)));
3167 }
3168
3169 second_label = 0;
3170 }
3171 else if (GET_CODE (operands[2]) == MEM)
3172 {
3173 rtx op_mov[10];
3174
3175 op[3] = op_mov[0] = tmp_reg_rtx;
3176 op_mov[1] = op[2];
3177
3178 if (len)
3179 out_movqi_r_mr (insn, op_mov, len);
3180 else
3181 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3182 }
3183 else if (register_operand (operands[2], QImode))
3184 {
3185 if (reg_unused_after (insn, operands[2]))
3186 op[3] = op[2];
3187 else
3188 {
3189 op[3] = tmp_reg_rtx;
3190 if (!len)
3191 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3192 }
3193 }
3194 else
3195 fatal_insn ("bad shift insn:", insn);
3196
3197 if (second_label)
3198 {
3199 if (len)
3200 ++*len;
3201 else
3202 strcat (str, AS1 (rjmp,2f));
3203 }
3204
3205 if (len)
3206 *len += t_len + 2; /* template + dec + brXX */
3207 else
3208 {
3209 strcat (str, "\n1:\t");
3210 strcat (str, templ);
3211 strcat (str, second_label ? "\n2:\t" : "\n\t");
3212 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3213 strcat (str, CR_TAB);
3214 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3215 if (saved_in_tmp)
3216 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3217 output_asm_insn (str, op);
3218 }
3219 }
3220
3221
3222 /* 8bit shift left ((char)x << i) */
3223
3224 const char *
3225 ashlqi3_out (rtx insn, rtx operands[], int *len)
3226 {
3227 if (GET_CODE (operands[2]) == CONST_INT)
3228 {
3229 int k;
3230
3231 if (!len)
3232 len = &k;
3233
3234 switch (INTVAL (operands[2]))
3235 {
3236 default:
3237 if (INTVAL (operands[2]) < 8)
3238 break;
3239
3240 *len = 1;
3241 return AS1 (clr,%0);
3242
3243 case 1:
3244 *len = 1;
3245 return AS1 (lsl,%0);
3246
3247 case 2:
3248 *len = 2;
3249 return (AS1 (lsl,%0) CR_TAB
3250 AS1 (lsl,%0));
3251
3252 case 3:
3253 *len = 3;
3254 return (AS1 (lsl,%0) CR_TAB
3255 AS1 (lsl,%0) CR_TAB
3256 AS1 (lsl,%0));
3257
3258 case 4:
3259 if (test_hard_reg_class (LD_REGS, operands[0]))
3260 {
3261 *len = 2;
3262 return (AS1 (swap,%0) CR_TAB
3263 AS2 (andi,%0,0xf0));
3264 }
3265 *len = 4;
3266 return (AS1 (lsl,%0) CR_TAB
3267 AS1 (lsl,%0) CR_TAB
3268 AS1 (lsl,%0) CR_TAB
3269 AS1 (lsl,%0));
3270
3271 case 5:
3272 if (test_hard_reg_class (LD_REGS, operands[0]))
3273 {
3274 *len = 3;
3275 return (AS1 (swap,%0) CR_TAB
3276 AS1 (lsl,%0) CR_TAB
3277 AS2 (andi,%0,0xe0));
3278 }
3279 *len = 5;
3280 return (AS1 (lsl,%0) CR_TAB
3281 AS1 (lsl,%0) CR_TAB
3282 AS1 (lsl,%0) CR_TAB
3283 AS1 (lsl,%0) CR_TAB
3284 AS1 (lsl,%0));
3285
3286 case 6:
3287 if (test_hard_reg_class (LD_REGS, operands[0]))
3288 {
3289 *len = 4;
3290 return (AS1 (swap,%0) CR_TAB
3291 AS1 (lsl,%0) CR_TAB
3292 AS1 (lsl,%0) CR_TAB
3293 AS2 (andi,%0,0xc0));
3294 }
3295 *len = 6;
3296 return (AS1 (lsl,%0) CR_TAB
3297 AS1 (lsl,%0) CR_TAB
3298 AS1 (lsl,%0) CR_TAB
3299 AS1 (lsl,%0) CR_TAB
3300 AS1 (lsl,%0) CR_TAB
3301 AS1 (lsl,%0));
3302
3303 case 7:
3304 *len = 3;
3305 return (AS1 (ror,%0) CR_TAB
3306 AS1 (clr,%0) CR_TAB
3307 AS1 (ror,%0));
3308 }
3309 }
3310 else if (CONSTANT_P (operands[2]))
3311 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3312
3313 out_shift_with_cnt (AS1 (lsl,%0),
3314 insn, operands, len, 1);
3315 return "";
3316 }
3317
3318
3319 /* 16bit shift left ((short)x << i) */
3320
3321 const char *
3322 ashlhi3_out (rtx insn, rtx operands[], int *len)
3323 {
3324 if (GET_CODE (operands[2]) == CONST_INT)
3325 {
3326 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3327 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3328 int k;
3329 int *t = len;
3330
3331 if (!len)
3332 len = &k;
3333
3334 switch (INTVAL (operands[2]))
3335 {
3336 default:
3337 if (INTVAL (operands[2]) < 16)
3338 break;
3339
3340 *len = 2;
3341 return (AS1 (clr,%B0) CR_TAB
3342 AS1 (clr,%A0));
3343
3344 case 4:
3345 if (optimize_size && scratch)
3346 break; /* 5 */
3347 if (ldi_ok)
3348 {
3349 *len = 6;
3350 return (AS1 (swap,%A0) CR_TAB
3351 AS1 (swap,%B0) CR_TAB
3352 AS2 (andi,%B0,0xf0) CR_TAB
3353 AS2 (eor,%B0,%A0) CR_TAB
3354 AS2 (andi,%A0,0xf0) CR_TAB
3355 AS2 (eor,%B0,%A0));
3356 }
3357 if (scratch)
3358 {
3359 *len = 7;
3360 return (AS1 (swap,%A0) CR_TAB
3361 AS1 (swap,%B0) CR_TAB
3362 AS2 (ldi,%3,0xf0) CR_TAB
3363 "and %B0,%3" CR_TAB
3364 AS2 (eor,%B0,%A0) CR_TAB
3365 "and %A0,%3" CR_TAB
3366 AS2 (eor,%B0,%A0));
3367 }
3368 break; /* optimize_size ? 6 : 8 */
3369
3370 case 5:
3371 if (optimize_size)
3372 break; /* scratch ? 5 : 6 */
3373 if (ldi_ok)
3374 {
3375 *len = 8;
3376 return (AS1 (lsl,%A0) CR_TAB
3377 AS1 (rol,%B0) CR_TAB
3378 AS1 (swap,%A0) CR_TAB
3379 AS1 (swap,%B0) CR_TAB
3380 AS2 (andi,%B0,0xf0) CR_TAB
3381 AS2 (eor,%B0,%A0) CR_TAB
3382 AS2 (andi,%A0,0xf0) CR_TAB
3383 AS2 (eor,%B0,%A0));
3384 }
3385 if (scratch)
3386 {
3387 *len = 9;
3388 return (AS1 (lsl,%A0) CR_TAB
3389 AS1 (rol,%B0) CR_TAB
3390 AS1 (swap,%A0) CR_TAB
3391 AS1 (swap,%B0) CR_TAB
3392 AS2 (ldi,%3,0xf0) CR_TAB
3393 "and %B0,%3" CR_TAB
3394 AS2 (eor,%B0,%A0) CR_TAB
3395 "and %A0,%3" CR_TAB
3396 AS2 (eor,%B0,%A0));
3397 }
3398 break; /* 10 */
3399
3400 case 6:
3401 if (optimize_size)
3402 break; /* scratch ? 5 : 6 */
3403 *len = 9;
3404 return (AS1 (clr,__tmp_reg__) CR_TAB
3405 AS1 (lsr,%B0) CR_TAB
3406 AS1 (ror,%A0) CR_TAB
3407 AS1 (ror,__tmp_reg__) CR_TAB
3408 AS1 (lsr,%B0) CR_TAB
3409 AS1 (ror,%A0) CR_TAB
3410 AS1 (ror,__tmp_reg__) CR_TAB
3411 AS2 (mov,%B0,%A0) CR_TAB
3412 AS2 (mov,%A0,__tmp_reg__));
3413
3414 case 7:
3415 *len = 5;
3416 return (AS1 (lsr,%B0) CR_TAB
3417 AS2 (mov,%B0,%A0) CR_TAB
3418 AS1 (clr,%A0) CR_TAB
3419 AS1 (ror,%B0) CR_TAB
3420 AS1 (ror,%A0));
3421
3422 case 8:
3423 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3424 AS1 (clr,%A0));
3425
3426 case 9:
3427 *len = 3;
3428 return (AS2 (mov,%B0,%A0) CR_TAB
3429 AS1 (clr,%A0) CR_TAB
3430 AS1 (lsl,%B0));
3431
3432 case 10:
3433 *len = 4;
3434 return (AS2 (mov,%B0,%A0) CR_TAB
3435 AS1 (clr,%A0) CR_TAB
3436 AS1 (lsl,%B0) CR_TAB
3437 AS1 (lsl,%B0));
3438
3439 case 11:
3440 *len = 5;
3441 return (AS2 (mov,%B0,%A0) CR_TAB
3442 AS1 (clr,%A0) CR_TAB
3443 AS1 (lsl,%B0) CR_TAB
3444 AS1 (lsl,%B0) CR_TAB
3445 AS1 (lsl,%B0));
3446
3447 case 12:
3448 if (ldi_ok)
3449 {
3450 *len = 4;
3451 return (AS2 (mov,%B0,%A0) CR_TAB
3452 AS1 (clr,%A0) CR_TAB
3453 AS1 (swap,%B0) CR_TAB
3454 AS2 (andi,%B0,0xf0));
3455 }
3456 if (scratch)
3457 {
3458 *len = 5;
3459 return (AS2 (mov,%B0,%A0) CR_TAB
3460 AS1 (clr,%A0) CR_TAB
3461 AS1 (swap,%B0) CR_TAB
3462 AS2 (ldi,%3,0xf0) CR_TAB
3463 "and %B0,%3");
3464 }
3465 *len = 6;
3466 return (AS2 (mov,%B0,%A0) CR_TAB
3467 AS1 (clr,%A0) CR_TAB
3468 AS1 (lsl,%B0) CR_TAB
3469 AS1 (lsl,%B0) CR_TAB
3470 AS1 (lsl,%B0) CR_TAB
3471 AS1 (lsl,%B0));
3472
3473 case 13:
3474 if (ldi_ok)
3475 {
3476 *len = 5;
3477 return (AS2 (mov,%B0,%A0) CR_TAB
3478 AS1 (clr,%A0) CR_TAB
3479 AS1 (swap,%B0) CR_TAB
3480 AS1 (lsl,%B0) CR_TAB
3481 AS2 (andi,%B0,0xe0));
3482 }
3483 if (AVR_HAVE_MUL && scratch)
3484 {
3485 *len = 5;
3486 return (AS2 (ldi,%3,0x20) CR_TAB
3487 AS2 (mul,%A0,%3) CR_TAB
3488 AS2 (mov,%B0,r0) CR_TAB
3489 AS1 (clr,%A0) CR_TAB
3490 AS1 (clr,__zero_reg__));
3491 }
3492 if (optimize_size && scratch)
3493 break; /* 5 */
3494 if (scratch)
3495 {
3496 *len = 6;
3497 return (AS2 (mov,%B0,%A0) CR_TAB
3498 AS1 (clr,%A0) CR_TAB
3499 AS1 (swap,%B0) CR_TAB
3500 AS1 (lsl,%B0) CR_TAB
3501 AS2 (ldi,%3,0xe0) CR_TAB
3502 "and %B0,%3");
3503 }
3504 if (AVR_HAVE_MUL)
3505 {
3506 *len = 6;
3507 return ("set" CR_TAB
3508 AS2 (bld,r1,5) CR_TAB
3509 AS2 (mul,%A0,r1) CR_TAB
3510 AS2 (mov,%B0,r0) CR_TAB
3511 AS1 (clr,%A0) CR_TAB
3512 AS1 (clr,__zero_reg__));
3513 }
3514 *len = 7;
3515 return (AS2 (mov,%B0,%A0) CR_TAB
3516 AS1 (clr,%A0) CR_TAB
3517 AS1 (lsl,%B0) CR_TAB
3518 AS1 (lsl,%B0) CR_TAB
3519 AS1 (lsl,%B0) CR_TAB
3520 AS1 (lsl,%B0) CR_TAB
3521 AS1 (lsl,%B0));
3522
3523 case 14:
3524 if (AVR_HAVE_MUL && ldi_ok)
3525 {
3526 *len = 5;
3527 return (AS2 (ldi,%B0,0x40) CR_TAB
3528 AS2 (mul,%A0,%B0) CR_TAB
3529 AS2 (mov,%B0,r0) CR_TAB
3530 AS1 (clr,%A0) CR_TAB
3531 AS1 (clr,__zero_reg__));
3532 }
3533 if (AVR_HAVE_MUL && scratch)
3534 {
3535 *len = 5;
3536 return (AS2 (ldi,%3,0x40) CR_TAB
3537 AS2 (mul,%A0,%3) CR_TAB
3538 AS2 (mov,%B0,r0) CR_TAB
3539 AS1 (clr,%A0) CR_TAB
3540 AS1 (clr,__zero_reg__));
3541 }
3542 if (optimize_size && ldi_ok)
3543 {
3544 *len = 5;
3545 return (AS2 (mov,%B0,%A0) CR_TAB
3546 AS2 (ldi,%A0,6) "\n1:\t"
3547 AS1 (lsl,%B0) CR_TAB
3548 AS1 (dec,%A0) CR_TAB
3549 AS1 (brne,1b));
3550 }
3551 if (optimize_size && scratch)
3552 break; /* 5 */
3553 *len = 6;
3554 return (AS1 (clr,%B0) CR_TAB
3555 AS1 (lsr,%A0) CR_TAB
3556 AS1 (ror,%B0) CR_TAB
3557 AS1 (lsr,%A0) CR_TAB
3558 AS1 (ror,%B0) CR_TAB
3559 AS1 (clr,%A0));
3560
3561 case 15:
3562 *len = 4;
3563 return (AS1 (clr,%B0) CR_TAB
3564 AS1 (lsr,%A0) CR_TAB
3565 AS1 (ror,%B0) CR_TAB
3566 AS1 (clr,%A0));
3567 }
3568 len = t;
3569 }
3570 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3571 AS1 (rol,%B0)),
3572 insn, operands, len, 2);
3573 return "";
3574 }
3575
3576
3577 /* 32bit shift left ((long)x << i) */
3578
3579 const char *
3580 ashlsi3_out (rtx insn, rtx operands[], int *len)
3581 {
3582 if (GET_CODE (operands[2]) == CONST_INT)
3583 {
3584 int k;
3585 int *t = len;
3586
3587 if (!len)
3588 len = &k;
3589
3590 switch (INTVAL (operands[2]))
3591 {
3592 default:
3593 if (INTVAL (operands[2]) < 32)
3594 break;
3595
3596 if (AVR_HAVE_MOVW)
3597 return *len = 3, (AS1 (clr,%D0) CR_TAB
3598 AS1 (clr,%C0) CR_TAB
3599 AS2 (movw,%A0,%C0));
3600 *len = 4;
3601 return (AS1 (clr,%D0) CR_TAB
3602 AS1 (clr,%C0) CR_TAB
3603 AS1 (clr,%B0) CR_TAB
3604 AS1 (clr,%A0));
3605
3606 case 8:
3607 {
3608 int reg0 = true_regnum (operands[0]);
3609 int reg1 = true_regnum (operands[1]);
3610 *len = 4;
3611 if (reg0 >= reg1)
3612 return (AS2 (mov,%D0,%C1) CR_TAB
3613 AS2 (mov,%C0,%B1) CR_TAB
3614 AS2 (mov,%B0,%A1) CR_TAB
3615 AS1 (clr,%A0));
3616 else
3617 return (AS1 (clr,%A0) CR_TAB
3618 AS2 (mov,%B0,%A1) CR_TAB
3619 AS2 (mov,%C0,%B1) CR_TAB
3620 AS2 (mov,%D0,%C1));
3621 }
3622
3623 case 16:
3624 {
3625 int reg0 = true_regnum (operands[0]);
3626 int reg1 = true_regnum (operands[1]);
3627 if (reg0 + 2 == reg1)
3628 return *len = 2, (AS1 (clr,%B0) CR_TAB
3629 AS1 (clr,%A0));
3630 if (AVR_HAVE_MOVW)
3631 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3632 AS1 (clr,%B0) CR_TAB
3633 AS1 (clr,%A0));
3634 else
3635 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3636 AS2 (mov,%D0,%B1) CR_TAB
3637 AS1 (clr,%B0) CR_TAB
3638 AS1 (clr,%A0));
3639 }
3640
3641 case 24:
3642 *len = 4;
3643 return (AS2 (mov,%D0,%A1) CR_TAB
3644 AS1 (clr,%C0) CR_TAB
3645 AS1 (clr,%B0) CR_TAB
3646 AS1 (clr,%A0));
3647
3648 case 31:
3649 *len = 6;
3650 return (AS1 (clr,%D0) CR_TAB
3651 AS1 (lsr,%A0) CR_TAB
3652 AS1 (ror,%D0) CR_TAB
3653 AS1 (clr,%C0) CR_TAB
3654 AS1 (clr,%B0) CR_TAB
3655 AS1 (clr,%A0));
3656 }
3657 len = t;
3658 }
3659 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3660 AS1 (rol,%B0) CR_TAB
3661 AS1 (rol,%C0) CR_TAB
3662 AS1 (rol,%D0)),
3663 insn, operands, len, 4);
3664 return "";
3665 }
3666
3667 /* 8bit arithmetic shift right ((signed char)x >> i) */
3668
3669 const char *
3670 ashrqi3_out (rtx insn, rtx operands[], int *len)
3671 {
3672 if (GET_CODE (operands[2]) == CONST_INT)
3673 {
3674 int k;
3675
3676 if (!len)
3677 len = &k;
3678
3679 switch (INTVAL (operands[2]))
3680 {
3681 case 1:
3682 *len = 1;
3683 return AS1 (asr,%0);
3684
3685 case 2:
3686 *len = 2;
3687 return (AS1 (asr,%0) CR_TAB
3688 AS1 (asr,%0));
3689
3690 case 3:
3691 *len = 3;
3692 return (AS1 (asr,%0) CR_TAB
3693 AS1 (asr,%0) CR_TAB
3694 AS1 (asr,%0));
3695
3696 case 4:
3697 *len = 4;
3698 return (AS1 (asr,%0) CR_TAB
3699 AS1 (asr,%0) CR_TAB
3700 AS1 (asr,%0) CR_TAB
3701 AS1 (asr,%0));
3702
3703 case 5:
3704 *len = 5;
3705 return (AS1 (asr,%0) CR_TAB
3706 AS1 (asr,%0) CR_TAB
3707 AS1 (asr,%0) CR_TAB
3708 AS1 (asr,%0) CR_TAB
3709 AS1 (asr,%0));
3710
3711 case 6:
3712 *len = 4;
3713 return (AS2 (bst,%0,6) CR_TAB
3714 AS1 (lsl,%0) CR_TAB
3715 AS2 (sbc,%0,%0) CR_TAB
3716 AS2 (bld,%0,0));
3717
3718 default:
3719 if (INTVAL (operands[2]) < 8)
3720 break;
3721
3722 /* fall through */
3723
3724 case 7:
3725 *len = 2;
3726 return (AS1 (lsl,%0) CR_TAB
3727 AS2 (sbc,%0,%0));
3728 }
3729 }
3730 else if (CONSTANT_P (operands[2]))
3731 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3732
3733 out_shift_with_cnt (AS1 (asr,%0),
3734 insn, operands, len, 1);
3735 return "";
3736 }
3737
3738
3739 /* 16bit arithmetic shift right ((signed short)x >> i) */
3740
3741 const char *
3742 ashrhi3_out (rtx insn, rtx operands[], int *len)
3743 {
3744 if (GET_CODE (operands[2]) == CONST_INT)
3745 {
3746 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3747 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3748 int k;
3749 int *t = len;
3750
3751 if (!len)
3752 len = &k;
3753
3754 switch (INTVAL (operands[2]))
3755 {
3756 case 4:
3757 case 5:
3758 /* XXX try to optimize this too? */
3759 break;
3760
3761 case 6:
3762 if (optimize_size)
3763 break; /* scratch ? 5 : 6 */
3764 *len = 8;
3765 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3766 AS2 (mov,%A0,%B0) CR_TAB
3767 AS1 (lsl,__tmp_reg__) CR_TAB
3768 AS1 (rol,%A0) CR_TAB
3769 AS2 (sbc,%B0,%B0) CR_TAB
3770 AS1 (lsl,__tmp_reg__) CR_TAB
3771 AS1 (rol,%A0) CR_TAB
3772 AS1 (rol,%B0));
3773
3774 case 7:
3775 *len = 4;
3776 return (AS1 (lsl,%A0) CR_TAB
3777 AS2 (mov,%A0,%B0) CR_TAB
3778 AS1 (rol,%A0) CR_TAB
3779 AS2 (sbc,%B0,%B0));
3780
3781 case 8:
3782 {
3783 int reg0 = true_regnum (operands[0]);
3784 int reg1 = true_regnum (operands[1]);
3785
3786 if (reg0 == reg1)
3787 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3788 AS1 (lsl,%B0) CR_TAB
3789 AS2 (sbc,%B0,%B0));
3790 else
3791 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3792 AS1 (clr,%B0) CR_TAB
3793 AS2 (sbrc,%A0,7) CR_TAB
3794 AS1 (dec,%B0));
3795 }
3796
3797 case 9:
3798 *len = 4;
3799 return (AS2 (mov,%A0,%B0) CR_TAB
3800 AS1 (lsl,%B0) CR_TAB
3801 AS2 (sbc,%B0,%B0) CR_TAB
3802 AS1 (asr,%A0));
3803
3804 case 10:
3805 *len = 5;
3806 return (AS2 (mov,%A0,%B0) CR_TAB
3807 AS1 (lsl,%B0) CR_TAB
3808 AS2 (sbc,%B0,%B0) CR_TAB
3809 AS1 (asr,%A0) CR_TAB
3810 AS1 (asr,%A0));
3811
3812 case 11:
3813 if (AVR_HAVE_MUL && ldi_ok)
3814 {
3815 *len = 5;
3816 return (AS2 (ldi,%A0,0x20) CR_TAB
3817 AS2 (muls,%B0,%A0) CR_TAB
3818 AS2 (mov,%A0,r1) CR_TAB
3819 AS2 (sbc,%B0,%B0) CR_TAB
3820 AS1 (clr,__zero_reg__));
3821 }
3822 if (optimize_size && scratch)
3823 break; /* 5 */
3824 *len = 6;
3825 return (AS2 (mov,%A0,%B0) CR_TAB
3826 AS1 (lsl,%B0) CR_TAB
3827 AS2 (sbc,%B0,%B0) CR_TAB
3828 AS1 (asr,%A0) CR_TAB
3829 AS1 (asr,%A0) CR_TAB
3830 AS1 (asr,%A0));
3831
3832 case 12:
3833 if (AVR_HAVE_MUL && ldi_ok)
3834 {
3835 *len = 5;
3836 return (AS2 (ldi,%A0,0x10) CR_TAB
3837 AS2 (muls,%B0,%A0) CR_TAB
3838 AS2 (mov,%A0,r1) CR_TAB
3839 AS2 (sbc,%B0,%B0) CR_TAB
3840 AS1 (clr,__zero_reg__));
3841 }
3842 if (optimize_size && scratch)
3843 break; /* 5 */
3844 *len = 7;
3845 return (AS2 (mov,%A0,%B0) CR_TAB
3846 AS1 (lsl,%B0) CR_TAB
3847 AS2 (sbc,%B0,%B0) CR_TAB
3848 AS1 (asr,%A0) CR_TAB
3849 AS1 (asr,%A0) CR_TAB
3850 AS1 (asr,%A0) CR_TAB
3851 AS1 (asr,%A0));
3852
3853 case 13:
3854 if (AVR_HAVE_MUL && ldi_ok)
3855 {
3856 *len = 5;
3857 return (AS2 (ldi,%A0,0x08) CR_TAB
3858 AS2 (muls,%B0,%A0) CR_TAB
3859 AS2 (mov,%A0,r1) CR_TAB
3860 AS2 (sbc,%B0,%B0) CR_TAB
3861 AS1 (clr,__zero_reg__));
3862 }
3863 if (optimize_size)
3864 break; /* scratch ? 5 : 7 */
3865 *len = 8;
3866 return (AS2 (mov,%A0,%B0) CR_TAB
3867 AS1 (lsl,%B0) CR_TAB
3868 AS2 (sbc,%B0,%B0) CR_TAB
3869 AS1 (asr,%A0) CR_TAB
3870 AS1 (asr,%A0) CR_TAB
3871 AS1 (asr,%A0) CR_TAB
3872 AS1 (asr,%A0) CR_TAB
3873 AS1 (asr,%A0));
3874
3875 case 14:
3876 *len = 5;
3877 return (AS1 (lsl,%B0) CR_TAB
3878 AS2 (sbc,%A0,%A0) CR_TAB
3879 AS1 (lsl,%B0) CR_TAB
3880 AS2 (mov,%B0,%A0) CR_TAB
3881 AS1 (rol,%A0));
3882
3883 default:
3884 if (INTVAL (operands[2]) < 16)
3885 break;
3886
3887 /* fall through */
3888
3889 case 15:
3890 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3891 AS2 (sbc,%A0,%A0) CR_TAB
3892 AS2 (mov,%B0,%A0));
3893 }
3894 len = t;
3895 }
3896 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3897 AS1 (ror,%A0)),
3898 insn, operands, len, 2);
3899 return "";
3900 }
3901
3902
3903 /* 32bit arithmetic shift right ((signed long)x >> i) */
3904
3905 const char *
3906 ashrsi3_out (rtx insn, rtx operands[], int *len)
3907 {
3908 if (GET_CODE (operands[2]) == CONST_INT)
3909 {
3910 int k;
3911 int *t = len;
3912
3913 if (!len)
3914 len = &k;
3915
3916 switch (INTVAL (operands[2]))
3917 {
3918 case 8:
3919 {
3920 int reg0 = true_regnum (operands[0]);
3921 int reg1 = true_regnum (operands[1]);
3922 *len=6;
3923 if (reg0 <= reg1)
3924 return (AS2 (mov,%A0,%B1) CR_TAB
3925 AS2 (mov,%B0,%C1) CR_TAB
3926 AS2 (mov,%C0,%D1) CR_TAB
3927 AS1 (clr,%D0) CR_TAB
3928 AS2 (sbrc,%C0,7) CR_TAB
3929 AS1 (dec,%D0));
3930 else
3931 return (AS1 (clr,%D0) CR_TAB
3932 AS2 (sbrc,%D1,7) CR_TAB
3933 AS1 (dec,%D0) CR_TAB
3934 AS2 (mov,%C0,%D1) CR_TAB
3935 AS2 (mov,%B0,%C1) CR_TAB
3936 AS2 (mov,%A0,%B1));
3937 }
3938
3939 case 16:
3940 {
3941 int reg0 = true_regnum (operands[0]);
3942 int reg1 = true_regnum (operands[1]);
3943
3944 if (reg0 == reg1 + 2)
3945 return *len = 4, (AS1 (clr,%D0) CR_TAB
3946 AS2 (sbrc,%B0,7) CR_TAB
3947 AS1 (com,%D0) CR_TAB
3948 AS2 (mov,%C0,%D0));
3949 if (AVR_HAVE_MOVW)
3950 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3951 AS1 (clr,%D0) CR_TAB
3952 AS2 (sbrc,%B0,7) CR_TAB
3953 AS1 (com,%D0) CR_TAB
3954 AS2 (mov,%C0,%D0));
3955 else
3956 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3957 AS2 (mov,%A0,%C1) CR_TAB
3958 AS1 (clr,%D0) CR_TAB
3959 AS2 (sbrc,%B0,7) CR_TAB
3960 AS1 (com,%D0) CR_TAB
3961 AS2 (mov,%C0,%D0));
3962 }
3963
3964 case 24:
3965 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3966 AS1 (clr,%D0) CR_TAB
3967 AS2 (sbrc,%A0,7) CR_TAB
3968 AS1 (com,%D0) CR_TAB
3969 AS2 (mov,%B0,%D0) CR_TAB
3970 AS2 (mov,%C0,%D0));
3971
3972 default:
3973 if (INTVAL (operands[2]) < 32)
3974 break;
3975
3976 /* fall through */
3977
3978 case 31:
3979 if (AVR_HAVE_MOVW)
3980 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3981 AS2 (sbc,%A0,%A0) CR_TAB
3982 AS2 (mov,%B0,%A0) CR_TAB
3983 AS2 (movw,%C0,%A0));
3984 else
3985 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3986 AS2 (sbc,%A0,%A0) CR_TAB
3987 AS2 (mov,%B0,%A0) CR_TAB
3988 AS2 (mov,%C0,%A0) CR_TAB
3989 AS2 (mov,%D0,%A0));
3990 }
3991 len = t;
3992 }
3993 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3994 AS1 (ror,%C0) CR_TAB
3995 AS1 (ror,%B0) CR_TAB
3996 AS1 (ror,%A0)),
3997 insn, operands, len, 4);
3998 return "";
3999 }
4000
4001 /* 8bit logic shift right ((unsigned char)x >> i) */
4002
4003 const char *
4004 lshrqi3_out (rtx insn, rtx operands[], int *len)
4005 {
4006 if (GET_CODE (operands[2]) == CONST_INT)
4007 {
4008 int k;
4009
4010 if (!len)
4011 len = &k;
4012
4013 switch (INTVAL (operands[2]))
4014 {
4015 default:
4016 if (INTVAL (operands[2]) < 8)
4017 break;
4018
4019 *len = 1;
4020 return AS1 (clr,%0);
4021
4022 case 1:
4023 *len = 1;
4024 return AS1 (lsr,%0);
4025
4026 case 2:
4027 *len = 2;
4028 return (AS1 (lsr,%0) CR_TAB
4029 AS1 (lsr,%0));
4030 case 3:
4031 *len = 3;
4032 return (AS1 (lsr,%0) CR_TAB
4033 AS1 (lsr,%0) CR_TAB
4034 AS1 (lsr,%0));
4035
4036 case 4:
4037 if (test_hard_reg_class (LD_REGS, operands[0]))
4038 {
4039 *len=2;
4040 return (AS1 (swap,%0) CR_TAB
4041 AS2 (andi,%0,0x0f));
4042 }
4043 *len = 4;
4044 return (AS1 (lsr,%0) CR_TAB
4045 AS1 (lsr,%0) CR_TAB
4046 AS1 (lsr,%0) CR_TAB
4047 AS1 (lsr,%0));
4048
4049 case 5:
4050 if (test_hard_reg_class (LD_REGS, operands[0]))
4051 {
4052 *len = 3;
4053 return (AS1 (swap,%0) CR_TAB
4054 AS1 (lsr,%0) CR_TAB
4055 AS2 (andi,%0,0x7));
4056 }
4057 *len = 5;
4058 return (AS1 (lsr,%0) CR_TAB
4059 AS1 (lsr,%0) CR_TAB
4060 AS1 (lsr,%0) CR_TAB
4061 AS1 (lsr,%0) CR_TAB
4062 AS1 (lsr,%0));
4063
4064 case 6:
4065 if (test_hard_reg_class (LD_REGS, operands[0]))
4066 {
4067 *len = 4;
4068 return (AS1 (swap,%0) CR_TAB
4069 AS1 (lsr,%0) CR_TAB
4070 AS1 (lsr,%0) CR_TAB
4071 AS2 (andi,%0,0x3));
4072 }
4073 *len = 6;
4074 return (AS1 (lsr,%0) CR_TAB
4075 AS1 (lsr,%0) CR_TAB
4076 AS1 (lsr,%0) CR_TAB
4077 AS1 (lsr,%0) CR_TAB
4078 AS1 (lsr,%0) CR_TAB
4079 AS1 (lsr,%0));
4080
4081 case 7:
4082 *len = 3;
4083 return (AS1 (rol,%0) CR_TAB
4084 AS1 (clr,%0) CR_TAB
4085 AS1 (rol,%0));
4086 }
4087 }
4088 else if (CONSTANT_P (operands[2]))
4089 fatal_insn ("internal compiler error. Incorrect shift:", insn);
4090
4091 out_shift_with_cnt (AS1 (lsr,%0),
4092 insn, operands, len, 1);
4093 return "";
4094 }
4095
4096 /* 16bit logic shift right ((unsigned short)x >> i) */
4097
4098 const char *
4099 lshrhi3_out (rtx insn, rtx operands[], int *len)
4100 {
4101 if (GET_CODE (operands[2]) == CONST_INT)
4102 {
4103 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
4104 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
4105 int k;
4106 int *t = len;
4107
4108 if (!len)
4109 len = &k;
4110
4111 switch (INTVAL (operands[2]))
4112 {
4113 default:
4114 if (INTVAL (operands[2]) < 16)
4115 break;
4116
4117 *len = 2;
4118 return (AS1 (clr,%B0) CR_TAB
4119 AS1 (clr,%A0));
4120
4121 case 4:
4122 if (optimize_size && scratch)
4123 break; /* 5 */
4124 if (ldi_ok)
4125 {
4126 *len = 6;
4127 return (AS1 (swap,%B0) CR_TAB
4128 AS1 (swap,%A0) CR_TAB
4129 AS2 (andi,%A0,0x0f) CR_TAB
4130 AS2 (eor,%A0,%B0) CR_TAB
4131 AS2 (andi,%B0,0x0f) CR_TAB
4132 AS2 (eor,%A0,%B0));
4133 }
4134 if (scratch)
4135 {
4136 *len = 7;
4137 return (AS1 (swap,%B0) CR_TAB
4138 AS1 (swap,%A0) CR_TAB
4139 AS2 (ldi,%3,0x0f) CR_TAB
4140 "and %A0,%3" CR_TAB
4141 AS2 (eor,%A0,%B0) CR_TAB
4142 "and %B0,%3" CR_TAB
4143 AS2 (eor,%A0,%B0));
4144 }
4145 break; /* optimize_size ? 6 : 8 */
4146
4147 case 5:
4148 if (optimize_size)
4149 break; /* scratch ? 5 : 6 */
4150 if (ldi_ok)
4151 {
4152 *len = 8;
4153 return (AS1 (lsr,%B0) CR_TAB
4154 AS1 (ror,%A0) CR_TAB
4155 AS1 (swap,%B0) CR_TAB
4156 AS1 (swap,%A0) CR_TAB
4157 AS2 (andi,%A0,0x0f) CR_TAB
4158 AS2 (eor,%A0,%B0) CR_TAB
4159 AS2 (andi,%B0,0x0f) CR_TAB
4160 AS2 (eor,%A0,%B0));
4161 }
4162 if (scratch)
4163 {
4164 *len = 9;
4165 return (AS1 (lsr,%B0) CR_TAB
4166 AS1 (ror,%A0) CR_TAB
4167 AS1 (swap,%B0) CR_TAB
4168 AS1 (swap,%A0) CR_TAB
4169 AS2 (ldi,%3,0x0f) CR_TAB
4170 "and %A0,%3" CR_TAB
4171 AS2 (eor,%A0,%B0) CR_TAB
4172 "and %B0,%3" CR_TAB
4173 AS2 (eor,%A0,%B0));
4174 }
4175 break; /* 10 */
4176
4177 case 6:
4178 if (optimize_size)
4179 break; /* scratch ? 5 : 6 */
4180 *len = 9;
4181 return (AS1 (clr,__tmp_reg__) CR_TAB
4182 AS1 (lsl,%A0) CR_TAB
4183 AS1 (rol,%B0) CR_TAB
4184 AS1 (rol,__tmp_reg__) CR_TAB
4185 AS1 (lsl,%A0) CR_TAB
4186 AS1 (rol,%B0) CR_TAB
4187 AS1 (rol,__tmp_reg__) CR_TAB
4188 AS2 (mov,%A0,%B0) CR_TAB
4189 AS2 (mov,%B0,__tmp_reg__));
4190
4191 case 7:
4192 *len = 5;
4193 return (AS1 (lsl,%A0) CR_TAB
4194 AS2 (mov,%A0,%B0) CR_TAB
4195 AS1 (rol,%A0) CR_TAB
4196 AS2 (sbc,%B0,%B0) CR_TAB
4197 AS1 (neg,%B0));
4198
4199 case 8:
4200 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4201 AS1 (clr,%B0));
4202
4203 case 9:
4204 *len = 3;
4205 return (AS2 (mov,%A0,%B0) CR_TAB
4206 AS1 (clr,%B0) CR_TAB
4207 AS1 (lsr,%A0));
4208
4209 case 10:
4210 *len = 4;
4211 return (AS2 (mov,%A0,%B0) CR_TAB
4212 AS1 (clr,%B0) CR_TAB
4213 AS1 (lsr,%A0) CR_TAB
4214 AS1 (lsr,%A0));
4215
4216 case 11:
4217 *len = 5;
4218 return (AS2 (mov,%A0,%B0) CR_TAB
4219 AS1 (clr,%B0) CR_TAB
4220 AS1 (lsr,%A0) CR_TAB
4221 AS1 (lsr,%A0) CR_TAB
4222 AS1 (lsr,%A0));
4223
4224 case 12:
4225 if (ldi_ok)
4226 {
4227 *len = 4;
4228 return (AS2 (mov,%A0,%B0) CR_TAB
4229 AS1 (clr,%B0) CR_TAB
4230 AS1 (swap,%A0) CR_TAB
4231 AS2 (andi,%A0,0x0f));
4232 }
4233 if (scratch)
4234 {
4235 *len = 5;
4236 return (AS2 (mov,%A0,%B0) CR_TAB
4237 AS1 (clr,%B0) CR_TAB
4238 AS1 (swap,%A0) CR_TAB
4239 AS2 (ldi,%3,0x0f) CR_TAB
4240 "and %A0,%3");
4241 }
4242 *len = 6;
4243 return (AS2 (mov,%A0,%B0) CR_TAB
4244 AS1 (clr,%B0) CR_TAB
4245 AS1 (lsr,%A0) CR_TAB
4246 AS1 (lsr,%A0) CR_TAB
4247 AS1 (lsr,%A0) CR_TAB
4248 AS1 (lsr,%A0));
4249
4250 case 13:
4251 if (ldi_ok)
4252 {
4253 *len = 5;
4254 return (AS2 (mov,%A0,%B0) CR_TAB
4255 AS1 (clr,%B0) CR_TAB
4256 AS1 (swap,%A0) CR_TAB
4257 AS1 (lsr,%A0) CR_TAB
4258 AS2 (andi,%A0,0x07));
4259 }
4260 if (AVR_HAVE_MUL && scratch)
4261 {
4262 *len = 5;
4263 return (AS2 (ldi,%3,0x08) CR_TAB
4264 AS2 (mul,%B0,%3) CR_TAB
4265 AS2 (mov,%A0,r1) CR_TAB
4266 AS1 (clr,%B0) CR_TAB
4267 AS1 (clr,__zero_reg__));
4268 }
4269 if (optimize_size && scratch)
4270 break; /* 5 */
4271 if (scratch)
4272 {
4273 *len = 6;
4274 return (AS2 (mov,%A0,%B0) CR_TAB
4275 AS1 (clr,%B0) CR_TAB
4276 AS1 (swap,%A0) CR_TAB
4277 AS1 (lsr,%A0) CR_TAB
4278 AS2 (ldi,%3,0x07) CR_TAB
4279 "and %A0,%3");
4280 }
4281 if (AVR_HAVE_MUL)
4282 {
4283 *len = 6;
4284 return ("set" CR_TAB
4285 AS2 (bld,r1,3) CR_TAB
4286 AS2 (mul,%B0,r1) CR_TAB
4287 AS2 (mov,%A0,r1) CR_TAB
4288 AS1 (clr,%B0) CR_TAB
4289 AS1 (clr,__zero_reg__));
4290 }
4291 *len = 7;
4292 return (AS2 (mov,%A0,%B0) CR_TAB
4293 AS1 (clr,%B0) CR_TAB
4294 AS1 (lsr,%A0) CR_TAB
4295 AS1 (lsr,%A0) CR_TAB
4296 AS1 (lsr,%A0) CR_TAB
4297 AS1 (lsr,%A0) CR_TAB
4298 AS1 (lsr,%A0));
4299
4300 case 14:
4301 if (AVR_HAVE_MUL && ldi_ok)
4302 {
4303 *len = 5;
4304 return (AS2 (ldi,%A0,0x04) CR_TAB
4305 AS2 (mul,%B0,%A0) CR_TAB
4306 AS2 (mov,%A0,r1) CR_TAB
4307 AS1 (clr,%B0) CR_TAB
4308 AS1 (clr,__zero_reg__));
4309 }
4310 if (AVR_HAVE_MUL && scratch)
4311 {
4312 *len = 5;
4313 return (AS2 (ldi,%3,0x04) CR_TAB
4314 AS2 (mul,%B0,%3) CR_TAB
4315 AS2 (mov,%A0,r1) CR_TAB
4316 AS1 (clr,%B0) CR_TAB
4317 AS1 (clr,__zero_reg__));
4318 }
4319 if (optimize_size && ldi_ok)
4320 {
4321 *len = 5;
4322 return (AS2 (mov,%A0,%B0) CR_TAB
4323 AS2 (ldi,%B0,6) "\n1:\t"
4324 AS1 (lsr,%A0) CR_TAB
4325 AS1 (dec,%B0) CR_TAB
4326 AS1 (brne,1b));
4327 }
4328 if (optimize_size && scratch)
4329 break; /* 5 */
4330 *len = 6;
4331 return (AS1 (clr,%A0) CR_TAB
4332 AS1 (lsl,%B0) CR_TAB
4333 AS1 (rol,%A0) CR_TAB
4334 AS1 (lsl,%B0) CR_TAB
4335 AS1 (rol,%A0) CR_TAB
4336 AS1 (clr,%B0));
4337
4338 case 15:
4339 *len = 4;
4340 return (AS1 (clr,%A0) CR_TAB
4341 AS1 (lsl,%B0) CR_TAB
4342 AS1 (rol,%A0) CR_TAB
4343 AS1 (clr,%B0));
4344 }
4345 len = t;
4346 }
4347 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4348 AS1 (ror,%A0)),
4349 insn, operands, len, 2);
4350 return "";
4351 }
4352
4353 /* 32bit logic shift right ((unsigned int)x >> i) */
4354
4355 const char *
4356 lshrsi3_out (rtx insn, rtx operands[], int *len)
4357 {
4358 if (GET_CODE (operands[2]) == CONST_INT)
4359 {
4360 int k;
4361 int *t = len;
4362
4363 if (!len)
4364 len = &k;
4365
4366 switch (INTVAL (operands[2]))
4367 {
4368 default:
4369 if (INTVAL (operands[2]) < 32)
4370 break;
4371
4372 if (AVR_HAVE_MOVW)
4373 return *len = 3, (AS1 (clr,%D0) CR_TAB
4374 AS1 (clr,%C0) CR_TAB
4375 AS2 (movw,%A0,%C0));
4376 *len = 4;
4377 return (AS1 (clr,%D0) CR_TAB
4378 AS1 (clr,%C0) CR_TAB
4379 AS1 (clr,%B0) CR_TAB
4380 AS1 (clr,%A0));
4381
4382 case 8:
4383 {
4384 int reg0 = true_regnum (operands[0]);
4385 int reg1 = true_regnum (operands[1]);
4386 *len = 4;
4387 if (reg0 <= reg1)
4388 return (AS2 (mov,%A0,%B1) CR_TAB
4389 AS2 (mov,%B0,%C1) CR_TAB
4390 AS2 (mov,%C0,%D1) CR_TAB
4391 AS1 (clr,%D0));
4392 else
4393 return (AS1 (clr,%D0) CR_TAB
4394 AS2 (mov,%C0,%D1) CR_TAB
4395 AS2 (mov,%B0,%C1) CR_TAB
4396 AS2 (mov,%A0,%B1));
4397 }
4398
4399 case 16:
4400 {
4401 int reg0 = true_regnum (operands[0]);
4402 int reg1 = true_regnum (operands[1]);
4403
4404 if (reg0 == reg1 + 2)
4405 return *len = 2, (AS1 (clr,%C0) CR_TAB
4406 AS1 (clr,%D0));
4407 if (AVR_HAVE_MOVW)
4408 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4409 AS1 (clr,%C0) CR_TAB
4410 AS1 (clr,%D0));
4411 else
4412 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4413 AS2 (mov,%A0,%C1) CR_TAB
4414 AS1 (clr,%C0) CR_TAB
4415 AS1 (clr,%D0));
4416 }
4417
4418 case 24:
4419 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4420 AS1 (clr,%B0) CR_TAB
4421 AS1 (clr,%C0) CR_TAB
4422 AS1 (clr,%D0));
4423
4424 case 31:
4425 *len = 6;
4426 return (AS1 (clr,%A0) CR_TAB
4427 AS2 (sbrc,%D0,7) CR_TAB
4428 AS1 (inc,%A0) CR_TAB
4429 AS1 (clr,%B0) CR_TAB
4430 AS1 (clr,%C0) CR_TAB
4431 AS1 (clr,%D0));
4432 }
4433 len = t;
4434 }
4435 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4436 AS1 (ror,%C0) CR_TAB
4437 AS1 (ror,%B0) CR_TAB
4438 AS1 (ror,%A0)),
4439 insn, operands, len, 4);
4440 return "";
4441 }
4442
4443 /* Create RTL split patterns for byte sized rotate expressions. This
4444 produces a series of move instructions and considers overlap situations.
4445 Overlapping non-HImode operands need a scratch register. */
4446
4447 bool
4448 avr_rotate_bytes (rtx operands[])
4449 {
4450 int i, j;
4451 enum machine_mode mode = GET_MODE (operands[0]);
4452 bool overlapped = reg_overlap_mentioned_p (operands[0], operands[1]);
4453 bool same_reg = rtx_equal_p (operands[0], operands[1]);
4454 int num = INTVAL (operands[2]);
4455 rtx scratch = operands[3];
4456 /* Work out if byte or word move is needed. Odd byte rotates need QImode.
4457 Word move if no scratch is needed, otherwise use size of scratch. */
4458 enum machine_mode move_mode = QImode;
4459 int move_size, offset, size;
4460
4461 if (num & 0xf)
4462 move_mode = QImode;
4463 else if ((mode == SImode && !same_reg) || !overlapped)
4464 move_mode = HImode;
4465 else
4466 move_mode = GET_MODE (scratch);
4467
4468 /* Force DI rotate to use QI moves since other DI moves are currently split
4469 into QI moves so forward propagation works better. */
4470 if (mode == DImode)
4471 move_mode = QImode;
4472 /* Make scratch smaller if needed. */
4473 if (GET_MODE (scratch) == HImode && move_mode == QImode)
4474 scratch = simplify_gen_subreg (move_mode, scratch, HImode, 0);
4475
4476 move_size = GET_MODE_SIZE (move_mode);
4477 /* Number of bytes/words to rotate. */
4478 offset = (num >> 3) / move_size;
4479 /* Number of moves needed. */
4480 size = GET_MODE_SIZE (mode) / move_size;
4481 /* Himode byte swap is special case to avoid a scratch register. */
4482 if (mode == HImode && same_reg)
4483 {
4484 /* HImode byte swap, using xor. This is as quick as using scratch. */
4485 rtx src, dst;
4486 src = simplify_gen_subreg (move_mode, operands[1], mode, 0);
4487 dst = simplify_gen_subreg (move_mode, operands[0], mode, 1);
4488 if (!rtx_equal_p (dst, src))
4489 {
4490 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4491 emit_move_insn (src, gen_rtx_XOR (QImode, src, dst));
4492 emit_move_insn (dst, gen_rtx_XOR (QImode, dst, src));
4493 }
4494 }
4495 else
4496 {
4497 #define MAX_SIZE 8 /* GET_MODE_SIZE (DImode) / GET_MODE_SIZE (QImode) */
4498 /* Create linked list of moves to determine move order. */
4499 struct {
4500 rtx src, dst;
4501 int links;
4502 } move[MAX_SIZE + 8];
4503 int blocked, moves;
4504
4505 gcc_assert (size <= MAX_SIZE);
4506 /* Generate list of subreg moves. */
4507 for (i = 0; i < size; i++)
4508 {
4509 int from = i;
4510 int to = (from + offset) % size;
4511 move[i].src = simplify_gen_subreg (move_mode, operands[1],
4512 mode, from * move_size);
4513 move[i].dst = simplify_gen_subreg (move_mode, operands[0],
4514 mode, to * move_size);
4515 move[i].links = -1;
4516 }
4517 /* Mark dependence where a dst of one move is the src of another move.
4518 The first move is a conflict as it must wait until second is
4519 performed. We ignore moves to self - we catch this later. */
4520 if (overlapped)
4521 for (i = 0; i < size; i++)
4522 if (reg_overlap_mentioned_p (move[i].dst, operands[1]))
4523 for (j = 0; j < size; j++)
4524 if (j != i && rtx_equal_p (move[j].src, move[i].dst))
4525 {
4526 /* The dst of move i is the src of move j. */
4527 move[i].links = j;
4528 break;
4529 }
4530
4531 blocked = -1;
4532 moves = 0;
4533 /* Go through move list and perform non-conflicting moves. As each
4534 non-overlapping move is made, it may remove other conflicts
4535 so the process is repeated until no conflicts remain. */
4536 do
4537 {
4538 blocked = -1;
4539 moves = 0;
4540 /* Emit move where dst is not also a src or we have used that
4541 src already. */
4542 for (i = 0; i < size; i++)
4543 if (move[i].src != NULL_RTX)
4544 {
4545 if (move[i].links == -1
4546 || move[move[i].links].src == NULL_RTX)
4547 {
4548 moves++;
4549 /* Ignore NOP moves to self. */
4550 if (!rtx_equal_p (move[i].dst, move[i].src))
4551 emit_move_insn (move[i].dst, move[i].src);
4552
4553 /* Remove conflict from list. */
4554 move[i].src = NULL_RTX;
4555 }
4556 else
4557 blocked = i;
4558 }
4559
4560 /* Check for deadlock. This is when no moves occurred and we have
4561 at least one blocked move. */
4562 if (moves == 0 && blocked != -1)
4563 {
4564 /* Need to use scratch register to break deadlock.
4565 Add move to put dst of blocked move into scratch.
4566 When this move occurs, it will break chain deadlock.
4567 The scratch register is substituted for real move. */
4568
4569 move[size].src = move[blocked].dst;
4570 move[size].dst = scratch;
4571 /* Scratch move is never blocked. */
4572 move[size].links = -1;
4573 /* Make sure we have valid link. */
4574 gcc_assert (move[blocked].links != -1);
4575 /* Replace src of blocking move with scratch reg. */
4576 move[move[blocked].links].src = scratch;
4577 /* Make dependent on scratch move occuring. */
4578 move[blocked].links = size;
4579 size=size+1;
4580 }
4581 }
4582 while (blocked != -1);
4583 }
4584 return true;
4585 }
4586
4587 /* Modifies the length assigned to instruction INSN
4588 LEN is the initially computed length of the insn. */
4589
4590 int
4591 adjust_insn_length (rtx insn, int len)
4592 {
4593 rtx patt = PATTERN (insn);
4594 rtx set;
4595
4596 if (GET_CODE (patt) == SET)
4597 {
4598 rtx op[10];
4599 op[1] = SET_SRC (patt);
4600 op[0] = SET_DEST (patt);
4601 if (general_operand (op[1], VOIDmode)
4602 && general_operand (op[0], VOIDmode))
4603 {
4604 switch (GET_MODE (op[0]))
4605 {
4606 case QImode:
4607 output_movqi (insn, op, &len);
4608 break;
4609 case HImode:
4610 output_movhi (insn, op, &len);
4611 break;
4612 case SImode:
4613 case SFmode:
4614 output_movsisf (insn, op, &len);
4615 break;
4616 default:
4617 break;
4618 }
4619 }
4620 else if (op[0] == cc0_rtx && REG_P (op[1]))
4621 {
4622 switch (GET_MODE (op[1]))
4623 {
4624 case HImode: out_tsthi (insn, op[1], &len); break;
4625 case SImode: out_tstsi (insn, op[1], &len); break;
4626 default: break;
4627 }
4628 }
4629 else if (GET_CODE (op[1]) == AND)
4630 {
4631 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4632 {
4633 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4634 if (GET_MODE (op[1]) == SImode)
4635 len = (((mask & 0xff) != 0xff)
4636 + ((mask & 0xff00) != 0xff00)
4637 + ((mask & 0xff0000L) != 0xff0000L)
4638 + ((mask & 0xff000000L) != 0xff000000L));
4639 else if (GET_MODE (op[1]) == HImode)
4640 len = (((mask & 0xff) != 0xff)
4641 + ((mask & 0xff00) != 0xff00));
4642 }
4643 }
4644 else if (GET_CODE (op[1]) == IOR)
4645 {
4646 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4647 {
4648 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4649 if (GET_MODE (op[1]) == SImode)
4650 len = (((mask & 0xff) != 0)
4651 + ((mask & 0xff00) != 0)
4652 + ((mask & 0xff0000L) != 0)
4653 + ((mask & 0xff000000L) != 0));
4654 else if (GET_MODE (op[1]) == HImode)
4655 len = (((mask & 0xff) != 0)
4656 + ((mask & 0xff00) != 0));
4657 }
4658 }
4659 }
4660 set = single_set (insn);
4661 if (set)
4662 {
4663 rtx op[10];
4664
4665 op[1] = SET_SRC (set);
4666 op[0] = SET_DEST (set);
4667
4668 if (GET_CODE (patt) == PARALLEL
4669 && general_operand (op[1], VOIDmode)
4670 && general_operand (op[0], VOIDmode))
4671 {
4672 if (XVECLEN (patt, 0) == 2)
4673 op[2] = XVECEXP (patt, 0, 1);
4674
4675 switch (GET_MODE (op[0]))
4676 {
4677 case QImode:
4678 len = 2;
4679 break;
4680 case HImode:
4681 output_reload_inhi (insn, op, &len);
4682 break;
4683 case SImode:
4684 case SFmode:
4685 output_reload_insisf (insn, op, &len);
4686 break;
4687 default:
4688 break;
4689 }
4690 }
4691 else if (GET_CODE (op[1]) == ASHIFT
4692 || GET_CODE (op[1]) == ASHIFTRT
4693 || GET_CODE (op[1]) == LSHIFTRT)
4694 {
4695 rtx ops[10];
4696 ops[0] = op[0];
4697 ops[1] = XEXP (op[1],0);
4698 ops[2] = XEXP (op[1],1);
4699 switch (GET_CODE (op[1]))
4700 {
4701 case ASHIFT:
4702 switch (GET_MODE (op[0]))
4703 {
4704 case QImode: ashlqi3_out (insn,ops,&len); break;
4705 case HImode: ashlhi3_out (insn,ops,&len); break;
4706 case SImode: ashlsi3_out (insn,ops,&len); break;
4707 default: break;
4708 }
4709 break;
4710 case ASHIFTRT:
4711 switch (GET_MODE (op[0]))
4712 {
4713 case QImode: ashrqi3_out (insn,ops,&len); break;
4714 case HImode: ashrhi3_out (insn,ops,&len); break;
4715 case SImode: ashrsi3_out (insn,ops,&len); break;
4716 default: break;
4717 }
4718 break;
4719 case LSHIFTRT:
4720 switch (GET_MODE (op[0]))
4721 {
4722 case QImode: lshrqi3_out (insn,ops,&len); break;
4723 case HImode: lshrhi3_out (insn,ops,&len); break;
4724 case SImode: lshrsi3_out (insn,ops,&len); break;
4725 default: break;
4726 }
4727 break;
4728 default:
4729 break;
4730 }
4731 }
4732 }
4733 return len;
4734 }
4735
4736 /* Return nonzero if register REG dead after INSN. */
4737
4738 int
4739 reg_unused_after (rtx insn, rtx reg)
4740 {
4741 return (dead_or_set_p (insn, reg)
4742 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4743 }
4744
4745 /* Return nonzero if REG is not used after INSN.
4746 We assume REG is a reload reg, and therefore does
4747 not live past labels. It may live past calls or jumps though. */
4748
4749 int
4750 _reg_unused_after (rtx insn, rtx reg)
4751 {
4752 enum rtx_code code;
4753 rtx set;
4754
4755 /* If the reg is set by this instruction, then it is safe for our
4756 case. Disregard the case where this is a store to memory, since
4757 we are checking a register used in the store address. */
4758 set = single_set (insn);
4759 if (set && GET_CODE (SET_DEST (set)) != MEM
4760 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4761 return 1;
4762
4763 while ((insn = NEXT_INSN (insn)))
4764 {
4765 rtx set;
4766 code = GET_CODE (insn);
4767
4768 #if 0
4769 /* If this is a label that existed before reload, then the register
4770 if dead here. However, if this is a label added by reorg, then
4771 the register may still be live here. We can't tell the difference,
4772 so we just ignore labels completely. */
4773 if (code == CODE_LABEL)
4774 return 1;
4775 /* else */
4776 #endif
4777
4778 if (!INSN_P (insn))
4779 continue;
4780
4781 if (code == JUMP_INSN)
4782 return 0;
4783
4784 /* If this is a sequence, we must handle them all at once.
4785 We could have for instance a call that sets the target register,
4786 and an insn in a delay slot that uses the register. In this case,
4787 we must return 0. */
4788 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4789 {
4790 int i;
4791 int retval = 0;
4792
4793 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4794 {
4795 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4796 rtx set = single_set (this_insn);
4797
4798 if (GET_CODE (this_insn) == CALL_INSN)
4799 code = CALL_INSN;
4800 else if (GET_CODE (this_insn) == JUMP_INSN)
4801 {
4802 if (INSN_ANNULLED_BRANCH_P (this_insn))
4803 return 0;
4804 code = JUMP_INSN;
4805 }
4806
4807 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4808 return 0;
4809 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4810 {
4811 if (GET_CODE (SET_DEST (set)) != MEM)
4812 retval = 1;
4813 else
4814 return 0;
4815 }
4816 if (set == 0
4817 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4818 return 0;
4819 }
4820 if (retval == 1)
4821 return 1;
4822 else if (code == JUMP_INSN)
4823 return 0;
4824 }
4825
4826 if (code == CALL_INSN)
4827 {
4828 rtx tem;
4829 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4830 if (GET_CODE (XEXP (tem, 0)) == USE
4831 && REG_P (XEXP (XEXP (tem, 0), 0))
4832 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4833 return 0;
4834 if (call_used_regs[REGNO (reg)])
4835 return 1;
4836 }
4837
4838 set = single_set (insn);
4839
4840 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4841 return 0;
4842 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4843 return GET_CODE (SET_DEST (set)) != MEM;
4844 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4845 return 0;
4846 }
4847 return 1;
4848 }
4849
4850 /* Target hook for assembling integer objects. The AVR version needs
4851 special handling for references to certain labels. */
4852
4853 static bool
4854 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4855 {
4856 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4857 && text_segment_operand (x, VOIDmode) )
4858 {
4859 fputs ("\t.word\tgs(", asm_out_file);
4860 output_addr_const (asm_out_file, x);
4861 fputs (")\n", asm_out_file);
4862 return true;
4863 }
4864 return default_assemble_integer (x, size, aligned_p);
4865 }
4866
4867 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4868
4869 void
4870 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4871 {
4872
4873 /* If the function has the 'signal' or 'interrupt' attribute, test to
4874 make sure that the name of the function is "__vector_NN" so as to
4875 catch when the user misspells the interrupt vector name. */
4876
4877 if (cfun->machine->is_interrupt)
4878 {
4879 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4880 {
4881 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4882 "%qs appears to be a misspelled interrupt handler",
4883 name);
4884 }
4885 }
4886 else if (cfun->machine->is_signal)
4887 {
4888 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4889 {
4890 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4891 "%qs appears to be a misspelled signal handler",
4892 name);
4893 }
4894 }
4895
4896 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4897 ASM_OUTPUT_LABEL (file, name);
4898 }
4899
4900 /* The routine used to output NUL terminated strings. We use a special
4901 version of this for most svr4 targets because doing so makes the
4902 generated assembly code more compact (and thus faster to assemble)
4903 as well as more readable, especially for targets like the i386
4904 (where the only alternative is to output character sequences as
4905 comma separated lists of numbers). */
4906
4907 void
4908 gas_output_limited_string(FILE *file, const char *str)
4909 {
4910 const unsigned char *_limited_str = (const unsigned char *) str;
4911 unsigned ch;
4912 fprintf (file, "%s\"", STRING_ASM_OP);
4913 for (; (ch = *_limited_str); _limited_str++)
4914 {
4915 int escape;
4916 switch (escape = ESCAPES[ch])
4917 {
4918 case 0:
4919 putc (ch, file);
4920 break;
4921 case 1:
4922 fprintf (file, "\\%03o", ch);
4923 break;
4924 default:
4925 putc ('\\', file);
4926 putc (escape, file);
4927 break;
4928 }
4929 }
4930 fprintf (file, "\"\n");
4931 }
4932
4933 /* The routine used to output sequences of byte values. We use a special
4934 version of this for most svr4 targets because doing so makes the
4935 generated assembly code more compact (and thus faster to assemble)
4936 as well as more readable. Note that if we find subparts of the
4937 character sequence which end with NUL (and which are shorter than
4938 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4939
4940 void
4941 gas_output_ascii(FILE *file, const char *str, size_t length)
4942 {
4943 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4944 const unsigned char *limit = _ascii_bytes + length;
4945 unsigned bytes_in_chunk = 0;
4946 for (; _ascii_bytes < limit; _ascii_bytes++)
4947 {
4948 const unsigned char *p;
4949 if (bytes_in_chunk >= 60)
4950 {
4951 fprintf (file, "\"\n");
4952 bytes_in_chunk = 0;
4953 }
4954 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4955 continue;
4956 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4957 {
4958 if (bytes_in_chunk > 0)
4959 {
4960 fprintf (file, "\"\n");
4961 bytes_in_chunk = 0;
4962 }
4963 gas_output_limited_string (file, (const char*)_ascii_bytes);
4964 _ascii_bytes = p;
4965 }
4966 else
4967 {
4968 int escape;
4969 unsigned ch;
4970 if (bytes_in_chunk == 0)
4971 fprintf (file, "\t.ascii\t\"");
4972 switch (escape = ESCAPES[ch = *_ascii_bytes])
4973 {
4974 case 0:
4975 putc (ch, file);
4976 bytes_in_chunk++;
4977 break;
4978 case 1:
4979 fprintf (file, "\\%03o", ch);
4980 bytes_in_chunk += 4;
4981 break;
4982 default:
4983 putc ('\\', file);
4984 putc (escape, file);
4985 bytes_in_chunk += 2;
4986 break;
4987 }
4988 }
4989 }
4990 if (bytes_in_chunk > 0)
4991 fprintf (file, "\"\n");
4992 }
4993
4994 /* Return value is nonzero if pseudos that have been
4995 assigned to registers of class CLASS would likely be spilled
4996 because registers of CLASS are needed for spill registers. */
4997
4998 static bool
4999 avr_class_likely_spilled_p (reg_class_t c)
5000 {
5001 return (c != ALL_REGS && c != ADDW_REGS);
5002 }
5003
5004 /* Valid attributes:
5005 progmem - put data to program memory;
5006 signal - make a function to be hardware interrupt. After function
5007 prologue interrupts are disabled;
5008 interrupt - make a function to be hardware interrupt. After function
5009 prologue interrupts are enabled;
5010 naked - don't generate function prologue/epilogue and `ret' command.
5011
5012 Only `progmem' attribute valid for type. */
5013
5014 /* Handle a "progmem" attribute; arguments as in
5015 struct attribute_spec.handler. */
5016 static tree
5017 avr_handle_progmem_attribute (tree *node, tree name,
5018 tree args ATTRIBUTE_UNUSED,
5019 int flags ATTRIBUTE_UNUSED,
5020 bool *no_add_attrs)
5021 {
5022 if (DECL_P (*node))
5023 {
5024 if (TREE_CODE (*node) == TYPE_DECL)
5025 {
5026 /* This is really a decl attribute, not a type attribute,
5027 but try to handle it for GCC 3.0 backwards compatibility. */
5028
5029 tree type = TREE_TYPE (*node);
5030 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
5031 tree newtype = build_type_attribute_variant (type, attr);
5032
5033 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
5034 TREE_TYPE (*node) = newtype;
5035 *no_add_attrs = true;
5036 }
5037 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
5038 {
5039 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
5040 {
5041 warning (0, "only initialized variables can be placed into "
5042 "program memory area");
5043 *no_add_attrs = true;
5044 }
5045 }
5046 else
5047 {
5048 warning (OPT_Wattributes, "%qE attribute ignored",
5049 name);
5050 *no_add_attrs = true;
5051 }
5052 }
5053
5054 return NULL_TREE;
5055 }
5056
5057 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
5058 struct attribute_spec.handler. */
5059
5060 static tree
5061 avr_handle_fndecl_attribute (tree *node, tree name,
5062 tree args ATTRIBUTE_UNUSED,
5063 int flags ATTRIBUTE_UNUSED,
5064 bool *no_add_attrs)
5065 {
5066 if (TREE_CODE (*node) != FUNCTION_DECL)
5067 {
5068 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5069 name);
5070 *no_add_attrs = true;
5071 }
5072
5073 return NULL_TREE;
5074 }
5075
5076 static tree
5077 avr_handle_fntype_attribute (tree *node, tree name,
5078 tree args ATTRIBUTE_UNUSED,
5079 int flags ATTRIBUTE_UNUSED,
5080 bool *no_add_attrs)
5081 {
5082 if (TREE_CODE (*node) != FUNCTION_TYPE)
5083 {
5084 warning (OPT_Wattributes, "%qE attribute only applies to functions",
5085 name);
5086 *no_add_attrs = true;
5087 }
5088
5089 return NULL_TREE;
5090 }
5091
5092 /* Look for attribute `progmem' in DECL
5093 if found return 1, otherwise 0. */
5094
5095 int
5096 avr_progmem_p (tree decl, tree attributes)
5097 {
5098 tree a;
5099
5100 if (TREE_CODE (decl) != VAR_DECL)
5101 return 0;
5102
5103 if (NULL_TREE
5104 != lookup_attribute ("progmem", attributes))
5105 return 1;
5106
5107 a=decl;
5108 do
5109 a = TREE_TYPE(a);
5110 while (TREE_CODE (a) == ARRAY_TYPE);
5111
5112 if (a == error_mark_node)
5113 return 0;
5114
5115 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
5116 return 1;
5117
5118 return 0;
5119 }
5120
5121 /* Add the section attribute if the variable is in progmem. */
5122
5123 static void
5124 avr_insert_attributes (tree node, tree *attributes)
5125 {
5126 if (TREE_CODE (node) == VAR_DECL
5127 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
5128 && avr_progmem_p (node, *attributes))
5129 {
5130 if (TREE_READONLY (node))
5131 {
5132 static const char dsec[] = ".progmem.data";
5133
5134 *attributes = tree_cons (get_identifier ("section"),
5135 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
5136 *attributes);
5137 }
5138 else
5139 {
5140 error ("variable %q+D must be const in order to be put into"
5141 " read-only section by means of %<__attribute__((progmem))%>",
5142 node);
5143 }
5144 }
5145 }
5146
5147 /* A get_unnamed_section callback for switching to progmem_section. */
5148
5149 static void
5150 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
5151 {
5152 fprintf (asm_out_file,
5153 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
5154 AVR_HAVE_JMP_CALL ? "a" : "ax");
5155 /* Should already be aligned, this is just to be safe if it isn't. */
5156 fprintf (asm_out_file, "\t.p2align 1\n");
5157 }
5158
5159
5160 /* Implement `ASM_OUTPUT_ALIGNED_DECL_LOCAL'. */
5161 /* Implement `ASM_OUTPUT_ALIGNED_DECL_COMMON'. */
5162 /* Track need of __do_clear_bss. */
5163
5164 void
5165 avr_asm_output_aligned_decl_common (FILE * stream, const_tree decl ATTRIBUTE_UNUSED,
5166 const char *name, unsigned HOST_WIDE_INT size,
5167 unsigned int align, bool local_p)
5168 {
5169 avr_need_clear_bss_p = true;
5170
5171 if (local_p)
5172 {
5173 fputs ("\t.local\t", stream);
5174 assemble_name (stream, name);
5175 fputs ("\n", stream);
5176 }
5177
5178 fputs ("\t.comm\t", stream);
5179 assemble_name (stream, name);
5180 fprintf (stream,
5181 "," HOST_WIDE_INT_PRINT_UNSIGNED ",%u\n",
5182 size, align / BITS_PER_UNIT);
5183 }
5184
5185
5186 /* Unnamed section callback for data_section
5187 to track need of __do_copy_data. */
5188
5189 static void
5190 avr_output_data_section_asm_op (const void *data)
5191 {
5192 avr_need_copy_data_p = true;
5193
5194 /* Dispatch to default. */
5195 output_section_asm_op (data);
5196 }
5197
5198
5199 /* Unnamed section callback for bss_section
5200 to track need of __do_clear_bss. */
5201
5202 static void
5203 avr_output_bss_section_asm_op (const void *data)
5204 {
5205 avr_need_clear_bss_p = true;
5206
5207 /* Dispatch to default. */
5208 output_section_asm_op (data);
5209 }
5210
5211
5212 /* Implement `TARGET_ASM_INIT_SECTIONS'. */
5213
5214 static void
5215 avr_asm_init_sections (void)
5216 {
5217 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
5218 avr_output_progmem_section_asm_op,
5219 NULL);
5220 readonly_data_section = data_section;
5221
5222 data_section->unnamed.callback = avr_output_data_section_asm_op;
5223 bss_section->unnamed.callback = avr_output_bss_section_asm_op;
5224 }
5225
5226
5227 /* Implement `TARGET_ASM_NAMED_SECTION'. */
5228 /* Track need of __do_clear_bss, __do_copy_data for named sections. */
5229
5230 void
5231 avr_asm_named_section (const char *name, unsigned int flags, tree decl)
5232 {
5233 if (!avr_need_copy_data_p)
5234 avr_need_copy_data_p = (0 == strncmp (name, ".data", 5)
5235 || 0 == strncmp (name, ".rodata", 7)
5236 || 0 == strncmp (name, ".gnu.linkonce.d", 15));
5237
5238 if (!avr_need_clear_bss_p)
5239 avr_need_clear_bss_p = (0 == strncmp (name, ".bss", 4));
5240
5241 default_elf_asm_named_section (name, flags, decl);
5242 }
5243
5244 static unsigned int
5245 avr_section_type_flags (tree decl, const char *name, int reloc)
5246 {
5247 unsigned int flags = default_section_type_flags (decl, name, reloc);
5248
5249 if (strncmp (name, ".noinit", 7) == 0)
5250 {
5251 if (decl && TREE_CODE (decl) == VAR_DECL
5252 && DECL_INITIAL (decl) == NULL_TREE)
5253 flags |= SECTION_BSS; /* @nobits */
5254 else
5255 warning (0, "only uninitialized variables can be placed in the "
5256 ".noinit section");
5257 }
5258
5259 return flags;
5260 }
5261
5262
5263 /* Implement `TARGET_ASM_FILE_START'. */
5264 /* Outputs some appropriate text to go at the start of an assembler
5265 file. */
5266
5267 static void
5268 avr_file_start (void)
5269 {
5270 if (avr_current_arch->asm_only)
5271 error ("MCU %qs supported for assembler only", avr_current_device->name);
5272
5273 default_file_start ();
5274
5275 /* fprintf (asm_out_file, "\t.arch %s\n", avr_current_device->name);*/
5276 fputs ("__SREG__ = 0x3f\n"
5277 "__SP_H__ = 0x3e\n"
5278 "__SP_L__ = 0x3d\n", asm_out_file);
5279
5280 fputs ("__tmp_reg__ = 0\n"
5281 "__zero_reg__ = 1\n", asm_out_file);
5282 }
5283
5284
5285 /* Implement `TARGET_ASM_FILE_END'. */
5286 /* Outputs to the stdio stream FILE some
5287 appropriate text to go at the end of an assembler file. */
5288
5289 static void
5290 avr_file_end (void)
5291 {
5292 /* Output these only if there is anything in the
5293 .data* / .rodata* / .gnu.linkonce.* resp. .bss*
5294 input section(s) - some code size can be saved by not
5295 linking in the initialization code from libgcc if resp.
5296 sections are empty. */
5297
5298 if (avr_need_copy_data_p)
5299 fputs (".global __do_copy_data\n", asm_out_file);
5300
5301 if (avr_need_clear_bss_p)
5302 fputs (".global __do_clear_bss\n", asm_out_file);
5303 }
5304
5305 /* Choose the order in which to allocate hard registers for
5306 pseudo-registers local to a basic block.
5307
5308 Store the desired register order in the array `reg_alloc_order'.
5309 Element 0 should be the register to allocate first; element 1, the
5310 next register; and so on. */
5311
5312 void
5313 order_regs_for_local_alloc (void)
5314 {
5315 unsigned int i;
5316 static const int order_0[] = {
5317 24,25,
5318 18,19,
5319 20,21,
5320 22,23,
5321 30,31,
5322 26,27,
5323 28,29,
5324 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5325 0,1,
5326 32,33,34,35
5327 };
5328 static const int order_1[] = {
5329 18,19,
5330 20,21,
5331 22,23,
5332 24,25,
5333 30,31,
5334 26,27,
5335 28,29,
5336 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5337 0,1,
5338 32,33,34,35
5339 };
5340 static const int order_2[] = {
5341 25,24,
5342 23,22,
5343 21,20,
5344 19,18,
5345 30,31,
5346 26,27,
5347 28,29,
5348 17,16,
5349 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5350 1,0,
5351 32,33,34,35
5352 };
5353
5354 const int *order = (TARGET_ORDER_1 ? order_1 :
5355 TARGET_ORDER_2 ? order_2 :
5356 order_0);
5357 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5358 reg_alloc_order[i] = order[i];
5359 }
5360
5361
5362 /* Implement `TARGET_REGISTER_MOVE_COST' */
5363
5364 static int
5365 avr_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
5366 reg_class_t from, reg_class_t to)
5367 {
5368 return (from == STACK_REG ? 6
5369 : to == STACK_REG ? 12
5370 : 2);
5371 }
5372
5373
5374 /* Implement `TARGET_MEMORY_MOVE_COST' */
5375
5376 static int
5377 avr_memory_move_cost (enum machine_mode mode, reg_class_t rclass ATTRIBUTE_UNUSED,
5378 bool in ATTRIBUTE_UNUSED)
5379 {
5380 return (mode == QImode ? 2
5381 : mode == HImode ? 4
5382 : mode == SImode ? 8
5383 : mode == SFmode ? 8
5384 : 16);
5385 }
5386
5387
5388 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5389 cost of an RTX operand given its context. X is the rtx of the
5390 operand, MODE is its mode, and OUTER is the rtx_code of this
5391 operand's parent operator. */
5392
5393 static int
5394 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5395 bool speed)
5396 {
5397 enum rtx_code code = GET_CODE (x);
5398 int total;
5399
5400 switch (code)
5401 {
5402 case REG:
5403 case SUBREG:
5404 return 0;
5405
5406 case CONST_INT:
5407 case CONST_DOUBLE:
5408 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5409
5410 default:
5411 break;
5412 }
5413
5414 total = 0;
5415 avr_rtx_costs (x, code, outer, &total, speed);
5416 return total;
5417 }
5418
5419 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5420 is to be calculated. Return true if the complete cost has been
5421 computed, and false if subexpressions should be scanned. In either
5422 case, *TOTAL contains the cost result. */
5423
5424 static bool
5425 avr_rtx_costs (rtx x, int codearg, int outer_code ATTRIBUTE_UNUSED, int *total,
5426 bool speed)
5427 {
5428 enum rtx_code code = (enum rtx_code) codearg;
5429 enum machine_mode mode = GET_MODE (x);
5430 HOST_WIDE_INT val;
5431
5432 switch (code)
5433 {
5434 case CONST_INT:
5435 case CONST_DOUBLE:
5436 /* Immediate constants are as cheap as registers. */
5437 *total = 0;
5438 return true;
5439
5440 case MEM:
5441 case CONST:
5442 case LABEL_REF:
5443 case SYMBOL_REF:
5444 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5445 return true;
5446
5447 case NEG:
5448 switch (mode)
5449 {
5450 case QImode:
5451 case SFmode:
5452 *total = COSTS_N_INSNS (1);
5453 break;
5454
5455 case HImode:
5456 *total = COSTS_N_INSNS (3);
5457 break;
5458
5459 case SImode:
5460 *total = COSTS_N_INSNS (7);
5461 break;
5462
5463 default:
5464 return false;
5465 }
5466 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5467 return true;
5468
5469 case ABS:
5470 switch (mode)
5471 {
5472 case QImode:
5473 case SFmode:
5474 *total = COSTS_N_INSNS (1);
5475 break;
5476
5477 default:
5478 return false;
5479 }
5480 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5481 return true;
5482
5483 case NOT:
5484 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5485 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5486 return true;
5487
5488 case ZERO_EXTEND:
5489 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5490 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5491 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5492 return true;
5493
5494 case SIGN_EXTEND:
5495 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5496 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5497 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5498 return true;
5499
5500 case PLUS:
5501 switch (mode)
5502 {
5503 case QImode:
5504 *total = COSTS_N_INSNS (1);
5505 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5506 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5507 break;
5508
5509 case HImode:
5510 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5511 {
5512 *total = COSTS_N_INSNS (2);
5513 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5514 }
5515 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5516 *total = COSTS_N_INSNS (1);
5517 else
5518 *total = COSTS_N_INSNS (2);
5519 break;
5520
5521 case SImode:
5522 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5523 {
5524 *total = COSTS_N_INSNS (4);
5525 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5526 }
5527 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5528 *total = COSTS_N_INSNS (1);
5529 else
5530 *total = COSTS_N_INSNS (4);
5531 break;
5532
5533 default:
5534 return false;
5535 }
5536 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5537 return true;
5538
5539 case MINUS:
5540 case AND:
5541 case IOR:
5542 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5543 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5544 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5545 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5546 return true;
5547
5548 case XOR:
5549 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5550 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5551 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5552 return true;
5553
5554 case MULT:
5555 switch (mode)
5556 {
5557 case QImode:
5558 if (AVR_HAVE_MUL)
5559 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5560 else if (!speed)
5561 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5562 else
5563 return false;
5564 break;
5565
5566 case HImode:
5567 if (AVR_HAVE_MUL)
5568 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5569 else if (!speed)
5570 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5571 else
5572 return false;
5573 break;
5574
5575 default:
5576 return false;
5577 }
5578 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5579 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5580 return true;
5581
5582 case DIV:
5583 case MOD:
5584 case UDIV:
5585 case UMOD:
5586 if (!speed)
5587 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5588 else
5589 return false;
5590 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5591 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5592 return true;
5593
5594 case ROTATE:
5595 switch (mode)
5596 {
5597 case QImode:
5598 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5599 *total = COSTS_N_INSNS (1);
5600
5601 break;
5602
5603 case HImode:
5604 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5605 *total = COSTS_N_INSNS (3);
5606
5607 break;
5608
5609 case SImode:
5610 if (CONST_INT_P (XEXP (x, 1)))
5611 switch (INTVAL (XEXP (x, 1)))
5612 {
5613 case 8:
5614 case 24:
5615 *total = COSTS_N_INSNS (5);
5616 break;
5617 case 16:
5618 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5619 break;
5620 }
5621 break;
5622
5623 default:
5624 return false;
5625 }
5626 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5627 return true;
5628
5629 case ASHIFT:
5630 switch (mode)
5631 {
5632 case QImode:
5633 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5634 {
5635 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5636 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5637 }
5638 else
5639 {
5640 val = INTVAL (XEXP (x, 1));
5641 if (val == 7)
5642 *total = COSTS_N_INSNS (3);
5643 else if (val >= 0 && val <= 7)
5644 *total = COSTS_N_INSNS (val);
5645 else
5646 *total = COSTS_N_INSNS (1);
5647 }
5648 break;
5649
5650 case HImode:
5651 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5652 {
5653 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5654 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5655 }
5656 else
5657 switch (INTVAL (XEXP (x, 1)))
5658 {
5659 case 0:
5660 *total = 0;
5661 break;
5662 case 1:
5663 case 8:
5664 *total = COSTS_N_INSNS (2);
5665 break;
5666 case 9:
5667 *total = COSTS_N_INSNS (3);
5668 break;
5669 case 2:
5670 case 3:
5671 case 10:
5672 case 15:
5673 *total = COSTS_N_INSNS (4);
5674 break;
5675 case 7:
5676 case 11:
5677 case 12:
5678 *total = COSTS_N_INSNS (5);
5679 break;
5680 case 4:
5681 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5682 break;
5683 case 6:
5684 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5685 break;
5686 case 5:
5687 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5688 break;
5689 default:
5690 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5691 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5692 }
5693 break;
5694
5695 case SImode:
5696 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5697 {
5698 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5699 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5700 }
5701 else
5702 switch (INTVAL (XEXP (x, 1)))
5703 {
5704 case 0:
5705 *total = 0;
5706 break;
5707 case 24:
5708 *total = COSTS_N_INSNS (3);
5709 break;
5710 case 1:
5711 case 8:
5712 case 16:
5713 *total = COSTS_N_INSNS (4);
5714 break;
5715 case 31:
5716 *total = COSTS_N_INSNS (6);
5717 break;
5718 case 2:
5719 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5720 break;
5721 default:
5722 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5723 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5724 }
5725 break;
5726
5727 default:
5728 return false;
5729 }
5730 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5731 return true;
5732
5733 case ASHIFTRT:
5734 switch (mode)
5735 {
5736 case QImode:
5737 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5738 {
5739 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5740 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5741 }
5742 else
5743 {
5744 val = INTVAL (XEXP (x, 1));
5745 if (val == 6)
5746 *total = COSTS_N_INSNS (4);
5747 else if (val == 7)
5748 *total = COSTS_N_INSNS (2);
5749 else if (val >= 0 && val <= 7)
5750 *total = COSTS_N_INSNS (val);
5751 else
5752 *total = COSTS_N_INSNS (1);
5753 }
5754 break;
5755
5756 case HImode:
5757 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5758 {
5759 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5760 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5761 }
5762 else
5763 switch (INTVAL (XEXP (x, 1)))
5764 {
5765 case 0:
5766 *total = 0;
5767 break;
5768 case 1:
5769 *total = COSTS_N_INSNS (2);
5770 break;
5771 case 15:
5772 *total = COSTS_N_INSNS (3);
5773 break;
5774 case 2:
5775 case 7:
5776 case 8:
5777 case 9:
5778 *total = COSTS_N_INSNS (4);
5779 break;
5780 case 10:
5781 case 14:
5782 *total = COSTS_N_INSNS (5);
5783 break;
5784 case 11:
5785 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5786 break;
5787 case 12:
5788 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5789 break;
5790 case 6:
5791 case 13:
5792 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5793 break;
5794 default:
5795 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5796 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5797 }
5798 break;
5799
5800 case SImode:
5801 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5802 {
5803 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5804 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5805 }
5806 else
5807 switch (INTVAL (XEXP (x, 1)))
5808 {
5809 case 0:
5810 *total = 0;
5811 break;
5812 case 1:
5813 *total = COSTS_N_INSNS (4);
5814 break;
5815 case 8:
5816 case 16:
5817 case 24:
5818 *total = COSTS_N_INSNS (6);
5819 break;
5820 case 2:
5821 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5822 break;
5823 case 31:
5824 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5825 break;
5826 default:
5827 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5828 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5829 }
5830 break;
5831
5832 default:
5833 return false;
5834 }
5835 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5836 return true;
5837
5838 case LSHIFTRT:
5839 switch (mode)
5840 {
5841 case QImode:
5842 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5843 {
5844 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5845 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5846 }
5847 else
5848 {
5849 val = INTVAL (XEXP (x, 1));
5850 if (val == 7)
5851 *total = COSTS_N_INSNS (3);
5852 else if (val >= 0 && val <= 7)
5853 *total = COSTS_N_INSNS (val);
5854 else
5855 *total = COSTS_N_INSNS (1);
5856 }
5857 break;
5858
5859 case HImode:
5860 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5861 {
5862 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5863 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5864 }
5865 else
5866 switch (INTVAL (XEXP (x, 1)))
5867 {
5868 case 0:
5869 *total = 0;
5870 break;
5871 case 1:
5872 case 8:
5873 *total = COSTS_N_INSNS (2);
5874 break;
5875 case 9:
5876 *total = COSTS_N_INSNS (3);
5877 break;
5878 case 2:
5879 case 10:
5880 case 15:
5881 *total = COSTS_N_INSNS (4);
5882 break;
5883 case 7:
5884 case 11:
5885 *total = COSTS_N_INSNS (5);
5886 break;
5887 case 3:
5888 case 12:
5889 case 13:
5890 case 14:
5891 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5892 break;
5893 case 4:
5894 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5895 break;
5896 case 5:
5897 case 6:
5898 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5899 break;
5900 default:
5901 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5902 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5903 }
5904 break;
5905
5906 case SImode:
5907 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5908 {
5909 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5910 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5911 }
5912 else
5913 switch (INTVAL (XEXP (x, 1)))
5914 {
5915 case 0:
5916 *total = 0;
5917 break;
5918 case 1:
5919 *total = COSTS_N_INSNS (4);
5920 break;
5921 case 2:
5922 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5923 break;
5924 case 8:
5925 case 16:
5926 case 24:
5927 *total = COSTS_N_INSNS (4);
5928 break;
5929 case 31:
5930 *total = COSTS_N_INSNS (6);
5931 break;
5932 default:
5933 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5934 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5935 }
5936 break;
5937
5938 default:
5939 return false;
5940 }
5941 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5942 return true;
5943
5944 case COMPARE:
5945 switch (GET_MODE (XEXP (x, 0)))
5946 {
5947 case QImode:
5948 *total = COSTS_N_INSNS (1);
5949 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5950 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5951 break;
5952
5953 case HImode:
5954 *total = COSTS_N_INSNS (2);
5955 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5956 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5957 else if (INTVAL (XEXP (x, 1)) != 0)
5958 *total += COSTS_N_INSNS (1);
5959 break;
5960
5961 case SImode:
5962 *total = COSTS_N_INSNS (4);
5963 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5964 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5965 else if (INTVAL (XEXP (x, 1)) != 0)
5966 *total += COSTS_N_INSNS (3);
5967 break;
5968
5969 default:
5970 return false;
5971 }
5972 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5973 return true;
5974
5975 default:
5976 break;
5977 }
5978 return false;
5979 }
5980
5981 /* Calculate the cost of a memory address. */
5982
5983 static int
5984 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5985 {
5986 if (GET_CODE (x) == PLUS
5987 && GET_CODE (XEXP (x,1)) == CONST_INT
5988 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5989 && INTVAL (XEXP (x,1)) >= 61)
5990 return 18;
5991 if (CONSTANT_ADDRESS_P (x))
5992 {
5993 if (optimize > 0 && io_address_operand (x, QImode))
5994 return 2;
5995 return 4;
5996 }
5997 return 4;
5998 }
5999
6000 /* Test for extra memory constraint 'Q'.
6001 It's a memory address based on Y or Z pointer with valid displacement. */
6002
6003 int
6004 extra_constraint_Q (rtx x)
6005 {
6006 if (GET_CODE (XEXP (x,0)) == PLUS
6007 && REG_P (XEXP (XEXP (x,0), 0))
6008 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
6009 && (INTVAL (XEXP (XEXP (x,0), 1))
6010 <= MAX_LD_OFFSET (GET_MODE (x))))
6011 {
6012 rtx xx = XEXP (XEXP (x,0), 0);
6013 int regno = REGNO (xx);
6014 if (TARGET_ALL_DEBUG)
6015 {
6016 fprintf (stderr, ("extra_constraint:\n"
6017 "reload_completed: %d\n"
6018 "reload_in_progress: %d\n"),
6019 reload_completed, reload_in_progress);
6020 debug_rtx (x);
6021 }
6022 if (regno >= FIRST_PSEUDO_REGISTER)
6023 return 1; /* allocate pseudos */
6024 else if (regno == REG_Z || regno == REG_Y)
6025 return 1; /* strictly check */
6026 else if (xx == frame_pointer_rtx
6027 || xx == arg_pointer_rtx)
6028 return 1; /* XXX frame & arg pointer checks */
6029 }
6030 return 0;
6031 }
6032
6033 /* Convert condition code CONDITION to the valid AVR condition code. */
6034
6035 RTX_CODE
6036 avr_normalize_condition (RTX_CODE condition)
6037 {
6038 switch (condition)
6039 {
6040 case GT:
6041 return GE;
6042 case GTU:
6043 return GEU;
6044 case LE:
6045 return LT;
6046 case LEU:
6047 return LTU;
6048 default:
6049 gcc_unreachable ();
6050 }
6051 }
6052
6053 /* This function optimizes conditional jumps. */
6054
6055 static void
6056 avr_reorg (void)
6057 {
6058 rtx insn, pattern;
6059
6060 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6061 {
6062 if (! (GET_CODE (insn) == INSN
6063 || GET_CODE (insn) == CALL_INSN
6064 || GET_CODE (insn) == JUMP_INSN)
6065 || !single_set (insn))
6066 continue;
6067
6068 pattern = PATTERN (insn);
6069
6070 if (GET_CODE (pattern) == PARALLEL)
6071 pattern = XVECEXP (pattern, 0, 0);
6072 if (GET_CODE (pattern) == SET
6073 && SET_DEST (pattern) == cc0_rtx
6074 && compare_diff_p (insn))
6075 {
6076 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
6077 {
6078 /* Now we work under compare insn. */
6079
6080 pattern = SET_SRC (pattern);
6081 if (true_regnum (XEXP (pattern,0)) >= 0
6082 && true_regnum (XEXP (pattern,1)) >= 0 )
6083 {
6084 rtx x = XEXP (pattern,0);
6085 rtx next = next_real_insn (insn);
6086 rtx pat = PATTERN (next);
6087 rtx src = SET_SRC (pat);
6088 rtx t = XEXP (src,0);
6089 PUT_CODE (t, swap_condition (GET_CODE (t)));
6090 XEXP (pattern,0) = XEXP (pattern,1);
6091 XEXP (pattern,1) = x;
6092 INSN_CODE (next) = -1;
6093 }
6094 else if (true_regnum (XEXP (pattern, 0)) >= 0
6095 && XEXP (pattern, 1) == const0_rtx)
6096 {
6097 /* This is a tst insn, we can reverse it. */
6098 rtx next = next_real_insn (insn);
6099 rtx pat = PATTERN (next);
6100 rtx src = SET_SRC (pat);
6101 rtx t = XEXP (src,0);
6102
6103 PUT_CODE (t, swap_condition (GET_CODE (t)));
6104 XEXP (pattern, 1) = XEXP (pattern, 0);
6105 XEXP (pattern, 0) = const0_rtx;
6106 INSN_CODE (next) = -1;
6107 INSN_CODE (insn) = -1;
6108 }
6109 else if (true_regnum (XEXP (pattern,0)) >= 0
6110 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
6111 {
6112 rtx x = XEXP (pattern,1);
6113 rtx next = next_real_insn (insn);
6114 rtx pat = PATTERN (next);
6115 rtx src = SET_SRC (pat);
6116 rtx t = XEXP (src,0);
6117 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
6118
6119 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
6120 {
6121 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
6122 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
6123 INSN_CODE (next) = -1;
6124 INSN_CODE (insn) = -1;
6125 }
6126 }
6127 }
6128 }
6129 }
6130 }
6131
6132 /* Returns register number for function return value.*/
6133
6134 static inline unsigned int
6135 avr_ret_register (void)
6136 {
6137 return 24;
6138 }
6139
6140 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
6141
6142 static bool
6143 avr_function_value_regno_p (const unsigned int regno)
6144 {
6145 return (regno == avr_ret_register ());
6146 }
6147
6148 /* Create an RTX representing the place where a
6149 library function returns a value of mode MODE. */
6150
6151 static rtx
6152 avr_libcall_value (enum machine_mode mode,
6153 const_rtx func ATTRIBUTE_UNUSED)
6154 {
6155 int offs = GET_MODE_SIZE (mode);
6156 if (offs < 2)
6157 offs = 2;
6158 return gen_rtx_REG (mode, avr_ret_register () + 2 - offs);
6159 }
6160
6161 /* Create an RTX representing the place where a
6162 function returns a value of data type VALTYPE. */
6163
6164 static rtx
6165 avr_function_value (const_tree type,
6166 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
6167 bool outgoing ATTRIBUTE_UNUSED)
6168 {
6169 unsigned int offs;
6170
6171 if (TYPE_MODE (type) != BLKmode)
6172 return avr_libcall_value (TYPE_MODE (type), NULL_RTX);
6173
6174 offs = int_size_in_bytes (type);
6175 if (offs < 2)
6176 offs = 2;
6177 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
6178 offs = GET_MODE_SIZE (SImode);
6179 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
6180 offs = GET_MODE_SIZE (DImode);
6181
6182 return gen_rtx_REG (BLKmode, avr_ret_register () + 2 - offs);
6183 }
6184
6185 int
6186 test_hard_reg_class (enum reg_class rclass, rtx x)
6187 {
6188 int regno = true_regnum (x);
6189 if (regno < 0)
6190 return 0;
6191
6192 if (TEST_HARD_REG_CLASS (rclass, regno))
6193 return 1;
6194
6195 return 0;
6196 }
6197
6198
6199 int
6200 jump_over_one_insn_p (rtx insn, rtx dest)
6201 {
6202 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
6203 ? XEXP (dest, 0)
6204 : dest);
6205 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
6206 int dest_addr = INSN_ADDRESSES (uid);
6207 return dest_addr - jump_addr == get_attr_length (insn) + 1;
6208 }
6209
6210 /* Returns 1 if a value of mode MODE can be stored starting with hard
6211 register number REGNO. On the enhanced core, anything larger than
6212 1 byte must start in even numbered register for "movw" to work
6213 (this way we don't have to check for odd registers everywhere). */
6214
6215 int
6216 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
6217 {
6218 /* Disallow QImode in stack pointer regs. */
6219 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
6220 return 0;
6221
6222 /* The only thing that can go into registers r28:r29 is a Pmode. */
6223 if (regno == REG_Y && mode == Pmode)
6224 return 1;
6225
6226 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
6227 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
6228 return 0;
6229
6230 if (mode == QImode)
6231 return 1;
6232
6233 /* Modes larger than QImode occupy consecutive registers. */
6234 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
6235 return 0;
6236
6237 /* All modes larger than QImode should start in an even register. */
6238 return !(regno & 1);
6239 }
6240
6241 const char *
6242 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6243 {
6244 int tmp;
6245 if (!len)
6246 len = &tmp;
6247
6248 if (GET_CODE (operands[1]) == CONST_INT)
6249 {
6250 int val = INTVAL (operands[1]);
6251 if ((val & 0xff) == 0)
6252 {
6253 *len = 3;
6254 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
6255 AS2 (ldi,%2,hi8(%1)) CR_TAB
6256 AS2 (mov,%B0,%2));
6257 }
6258 else if ((val & 0xff00) == 0)
6259 {
6260 *len = 3;
6261 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6262 AS2 (mov,%A0,%2) CR_TAB
6263 AS2 (mov,%B0,__zero_reg__));
6264 }
6265 else if ((val & 0xff) == ((val & 0xff00) >> 8))
6266 {
6267 *len = 3;
6268 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6269 AS2 (mov,%A0,%2) CR_TAB
6270 AS2 (mov,%B0,%2));
6271 }
6272 }
6273 *len = 4;
6274 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
6275 AS2 (mov,%A0,%2) CR_TAB
6276 AS2 (ldi,%2,hi8(%1)) CR_TAB
6277 AS2 (mov,%B0,%2));
6278 }
6279
6280
6281 const char *
6282 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
6283 {
6284 rtx src = operands[1];
6285 int cnst = (GET_CODE (src) == CONST_INT);
6286
6287 if (len)
6288 {
6289 if (cnst)
6290 *len = 4 + ((INTVAL (src) & 0xff) != 0)
6291 + ((INTVAL (src) & 0xff00) != 0)
6292 + ((INTVAL (src) & 0xff0000) != 0)
6293 + ((INTVAL (src) & 0xff000000) != 0);
6294 else
6295 *len = 8;
6296
6297 return "";
6298 }
6299
6300 if (cnst && ((INTVAL (src) & 0xff) == 0))
6301 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
6302 else
6303 {
6304 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
6305 output_asm_insn (AS2 (mov, %A0, %2), operands);
6306 }
6307 if (cnst && ((INTVAL (src) & 0xff00) == 0))
6308 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
6309 else
6310 {
6311 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
6312 output_asm_insn (AS2 (mov, %B0, %2), operands);
6313 }
6314 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
6315 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
6316 else
6317 {
6318 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
6319 output_asm_insn (AS2 (mov, %C0, %2), operands);
6320 }
6321 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
6322 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
6323 else
6324 {
6325 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
6326 output_asm_insn (AS2 (mov, %D0, %2), operands);
6327 }
6328 return "";
6329 }
6330
6331 void
6332 avr_output_bld (rtx operands[], int bit_nr)
6333 {
6334 static char s[] = "bld %A0,0";
6335
6336 s[5] = 'A' + (bit_nr >> 3);
6337 s[8] = '0' + (bit_nr & 7);
6338 output_asm_insn (s, operands);
6339 }
6340
6341 void
6342 avr_output_addr_vec_elt (FILE *stream, int value)
6343 {
6344 switch_to_section (progmem_section);
6345 if (AVR_HAVE_JMP_CALL)
6346 fprintf (stream, "\t.word gs(.L%d)\n", value);
6347 else
6348 fprintf (stream, "\trjmp .L%d\n", value);
6349 }
6350
6351 /* Returns true if SCRATCH are safe to be allocated as a scratch
6352 registers (for a define_peephole2) in the current function. */
6353
6354 bool
6355 avr_hard_regno_scratch_ok (unsigned int regno)
6356 {
6357 /* Interrupt functions can only use registers that have already been saved
6358 by the prologue, even if they would normally be call-clobbered. */
6359
6360 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6361 && !df_regs_ever_live_p (regno))
6362 return false;
6363
6364 return true;
6365 }
6366
6367 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6368
6369 int
6370 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6371 unsigned int new_reg)
6372 {
6373 /* Interrupt functions can only use registers that have already been
6374 saved by the prologue, even if they would normally be
6375 call-clobbered. */
6376
6377 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6378 && !df_regs_ever_live_p (new_reg))
6379 return 0;
6380
6381 return 1;
6382 }
6383
6384 /* Output a branch that tests a single bit of a register (QI, HI, SI or DImode)
6385 or memory location in the I/O space (QImode only).
6386
6387 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6388 Operand 1: register operand to test, or CONST_INT memory address.
6389 Operand 2: bit number.
6390 Operand 3: label to jump to if the test is true. */
6391
6392 const char *
6393 avr_out_sbxx_branch (rtx insn, rtx operands[])
6394 {
6395 enum rtx_code comp = GET_CODE (operands[0]);
6396 int long_jump = (get_attr_length (insn) >= 4);
6397 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6398
6399 if (comp == GE)
6400 comp = EQ;
6401 else if (comp == LT)
6402 comp = NE;
6403
6404 if (reverse)
6405 comp = reverse_condition (comp);
6406
6407 if (GET_CODE (operands[1]) == CONST_INT)
6408 {
6409 if (INTVAL (operands[1]) < 0x40)
6410 {
6411 if (comp == EQ)
6412 output_asm_insn (AS2 (sbis,%m1-0x20,%2), operands);
6413 else
6414 output_asm_insn (AS2 (sbic,%m1-0x20,%2), operands);
6415 }
6416 else
6417 {
6418 output_asm_insn (AS2 (in,__tmp_reg__,%m1-0x20), operands);
6419 if (comp == EQ)
6420 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6421 else
6422 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6423 }
6424 }
6425 else /* GET_CODE (operands[1]) == REG */
6426 {
6427 if (GET_MODE (operands[1]) == QImode)
6428 {
6429 if (comp == EQ)
6430 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6431 else
6432 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6433 }
6434 else /* HImode or SImode */
6435 {
6436 static char buf[] = "sbrc %A1,0";
6437 int bit_nr = INTVAL (operands[2]);
6438 buf[3] = (comp == EQ) ? 's' : 'c';
6439 buf[6] = 'A' + (bit_nr >> 3);
6440 buf[9] = '0' + (bit_nr & 7);
6441 output_asm_insn (buf, operands);
6442 }
6443 }
6444
6445 if (long_jump)
6446 return (AS1 (rjmp,.+4) CR_TAB
6447 AS1 (jmp,%x3));
6448 if (!reverse)
6449 return AS1 (rjmp,%x3);
6450 return "";
6451 }
6452
6453 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6454
6455 static void
6456 avr_asm_out_ctor (rtx symbol, int priority)
6457 {
6458 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6459 default_ctor_section_asm_out_constructor (symbol, priority);
6460 }
6461
6462 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6463
6464 static void
6465 avr_asm_out_dtor (rtx symbol, int priority)
6466 {
6467 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6468 default_dtor_section_asm_out_destructor (symbol, priority);
6469 }
6470
6471 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6472
6473 static bool
6474 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6475 {
6476 if (TYPE_MODE (type) == BLKmode)
6477 {
6478 HOST_WIDE_INT size = int_size_in_bytes (type);
6479 return (size == -1 || size > 8);
6480 }
6481 else
6482 return false;
6483 }
6484
6485 /* Worker function for CASE_VALUES_THRESHOLD. */
6486
6487 unsigned int avr_case_values_threshold (void)
6488 {
6489 return (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
6490 }
6491
6492 /* Helper for __builtin_avr_delay_cycles */
6493
6494 static void
6495 avr_expand_delay_cycles (rtx operands0)
6496 {
6497 unsigned HOST_WIDE_INT cycles = UINTVAL (operands0);
6498 unsigned HOST_WIDE_INT cycles_used;
6499 unsigned HOST_WIDE_INT loop_count;
6500
6501 if (IN_RANGE (cycles, 83886082, 0xFFFFFFFF))
6502 {
6503 loop_count = ((cycles - 9) / 6) + 1;
6504 cycles_used = ((loop_count - 1) * 6) + 9;
6505 emit_insn (gen_delay_cycles_4 (gen_int_mode (loop_count, SImode)));
6506 cycles -= cycles_used;
6507 }
6508
6509 if (IN_RANGE (cycles, 262145, 83886081))
6510 {
6511 loop_count = ((cycles - 7) / 5) + 1;
6512 if (loop_count > 0xFFFFFF)
6513 loop_count = 0xFFFFFF;
6514 cycles_used = ((loop_count - 1) * 5) + 7;
6515 emit_insn (gen_delay_cycles_3 (gen_int_mode (loop_count, SImode)));
6516 cycles -= cycles_used;
6517 }
6518
6519 if (IN_RANGE (cycles, 768, 262144))
6520 {
6521 loop_count = ((cycles - 5) / 4) + 1;
6522 if (loop_count > 0xFFFF)
6523 loop_count = 0xFFFF;
6524 cycles_used = ((loop_count - 1) * 4) + 5;
6525 emit_insn (gen_delay_cycles_2 (gen_int_mode (loop_count, HImode)));
6526 cycles -= cycles_used;
6527 }
6528
6529 if (IN_RANGE (cycles, 6, 767))
6530 {
6531 loop_count = cycles / 3;
6532 if (loop_count > 255)
6533 loop_count = 255;
6534 cycles_used = loop_count * 3;
6535 emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, QImode)));
6536 cycles -= cycles_used;
6537 }
6538
6539 while (cycles >= 2)
6540 {
6541 emit_insn (gen_nopv (GEN_INT(2)));
6542 cycles -= 2;
6543 }
6544
6545 if (cycles == 1)
6546 {
6547 emit_insn (gen_nopv (GEN_INT(1)));
6548 cycles--;
6549 }
6550 }
6551
6552 /* IDs for all the AVR builtins. */
6553
6554 enum avr_builtin_id
6555 {
6556 AVR_BUILTIN_NOP,
6557 AVR_BUILTIN_SEI,
6558 AVR_BUILTIN_CLI,
6559 AVR_BUILTIN_WDR,
6560 AVR_BUILTIN_SLEEP,
6561 AVR_BUILTIN_SWAP,
6562 AVR_BUILTIN_FMUL,
6563 AVR_BUILTIN_FMULS,
6564 AVR_BUILTIN_FMULSU,
6565 AVR_BUILTIN_DELAY_CYCLES
6566 };
6567
6568 #define DEF_BUILTIN(NAME, TYPE, CODE) \
6569 do \
6570 { \
6571 add_builtin_function ((NAME), (TYPE), (CODE), BUILT_IN_MD, \
6572 NULL, NULL_TREE); \
6573 } while (0)
6574
6575
6576 /* Implement `TARGET_INIT_BUILTINS' */
6577 /* Set up all builtin functions for this target. */
6578
6579 static void
6580 avr_init_builtins (void)
6581 {
6582 tree void_ftype_void
6583 = build_function_type_list (void_type_node, NULL_TREE);
6584 tree uchar_ftype_uchar
6585 = build_function_type_list (unsigned_char_type_node,
6586 unsigned_char_type_node,
6587 NULL_TREE);
6588 tree uint_ftype_uchar_uchar
6589 = build_function_type_list (unsigned_type_node,
6590 unsigned_char_type_node,
6591 unsigned_char_type_node,
6592 NULL_TREE);
6593 tree int_ftype_char_char
6594 = build_function_type_list (integer_type_node,
6595 char_type_node,
6596 char_type_node,
6597 NULL_TREE);
6598 tree int_ftype_char_uchar
6599 = build_function_type_list (integer_type_node,
6600 char_type_node,
6601 unsigned_char_type_node,
6602 NULL_TREE);
6603 tree void_ftype_ulong
6604 = build_function_type_list (void_type_node,
6605 long_unsigned_type_node,
6606 NULL_TREE);
6607
6608 DEF_BUILTIN ("__builtin_avr_nop", void_ftype_void, AVR_BUILTIN_NOP);
6609 DEF_BUILTIN ("__builtin_avr_sei", void_ftype_void, AVR_BUILTIN_SEI);
6610 DEF_BUILTIN ("__builtin_avr_cli", void_ftype_void, AVR_BUILTIN_CLI);
6611 DEF_BUILTIN ("__builtin_avr_wdr", void_ftype_void, AVR_BUILTIN_WDR);
6612 DEF_BUILTIN ("__builtin_avr_sleep", void_ftype_void, AVR_BUILTIN_SLEEP);
6613 DEF_BUILTIN ("__builtin_avr_swap", uchar_ftype_uchar, AVR_BUILTIN_SWAP);
6614 DEF_BUILTIN ("__builtin_avr_delay_cycles", void_ftype_ulong,
6615 AVR_BUILTIN_DELAY_CYCLES);
6616
6617 if (AVR_HAVE_MUL)
6618 {
6619 /* FIXME: If !AVR_HAVE_MUL, make respective functions available
6620 in libgcc. For fmul and fmuls this is straight forward with
6621 upcoming fixed point support. */
6622
6623 DEF_BUILTIN ("__builtin_avr_fmul", uint_ftype_uchar_uchar,
6624 AVR_BUILTIN_FMUL);
6625 DEF_BUILTIN ("__builtin_avr_fmuls", int_ftype_char_char,
6626 AVR_BUILTIN_FMULS);
6627 DEF_BUILTIN ("__builtin_avr_fmulsu", int_ftype_char_uchar,
6628 AVR_BUILTIN_FMULSU);
6629 }
6630 }
6631
6632 #undef DEF_BUILTIN
6633
6634 struct avr_builtin_description
6635 {
6636 const enum insn_code icode;
6637 const char *const name;
6638 const enum avr_builtin_id id;
6639 };
6640
6641 static const struct avr_builtin_description
6642 bdesc_1arg[] =
6643 {
6644 { CODE_FOR_rotlqi3_4, "__builtin_avr_swap", AVR_BUILTIN_SWAP }
6645 };
6646
6647 static const struct avr_builtin_description
6648 bdesc_2arg[] =
6649 {
6650 { CODE_FOR_fmul, "__builtin_avr_fmul", AVR_BUILTIN_FMUL },
6651 { CODE_FOR_fmuls, "__builtin_avr_fmuls", AVR_BUILTIN_FMULS },
6652 { CODE_FOR_fmulsu, "__builtin_avr_fmulsu", AVR_BUILTIN_FMULSU }
6653 };
6654
6655 /* Subroutine of avr_expand_builtin to take care of unop insns. */
6656
6657 static rtx
6658 avr_expand_unop_builtin (enum insn_code icode, tree exp,
6659 rtx target)
6660 {
6661 rtx pat;
6662 tree arg0 = CALL_EXPR_ARG (exp, 0);
6663 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6664 enum machine_mode op0mode = GET_MODE (op0);
6665 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6666 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6667
6668 if (! target
6669 || GET_MODE (target) != tmode
6670 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6671 {
6672 target = gen_reg_rtx (tmode);
6673 }
6674
6675 if (op0mode == SImode && mode0 == HImode)
6676 {
6677 op0mode = HImode;
6678 op0 = gen_lowpart (HImode, op0);
6679 }
6680
6681 gcc_assert (op0mode == mode0 || op0mode == VOIDmode);
6682
6683 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6684 op0 = copy_to_mode_reg (mode0, op0);
6685
6686 pat = GEN_FCN (icode) (target, op0);
6687 if (! pat)
6688 return 0;
6689
6690 emit_insn (pat);
6691
6692 return target;
6693 }
6694
6695
6696 /* Subroutine of avr_expand_builtin to take care of binop insns. */
6697
6698 static rtx
6699 avr_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
6700 {
6701 rtx pat;
6702 tree arg0 = CALL_EXPR_ARG (exp, 0);
6703 tree arg1 = CALL_EXPR_ARG (exp, 1);
6704 rtx op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6705 rtx op1 = expand_expr (arg1, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6706 enum machine_mode op0mode = GET_MODE (op0);
6707 enum machine_mode op1mode = GET_MODE (op1);
6708 enum machine_mode tmode = insn_data[icode].operand[0].mode;
6709 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
6710 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
6711
6712 if (! target
6713 || GET_MODE (target) != tmode
6714 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
6715 {
6716 target = gen_reg_rtx (tmode);
6717 }
6718
6719 if ((op0mode == SImode || op0mode == VOIDmode) && mode0 == HImode)
6720 {
6721 op0mode = HImode;
6722 op0 = gen_lowpart (HImode, op0);
6723 }
6724
6725 if ((op1mode == SImode || op1mode == VOIDmode) && mode1 == HImode)
6726 {
6727 op1mode = HImode;
6728 op1 = gen_lowpart (HImode, op1);
6729 }
6730
6731 /* In case the insn wants input operands in modes different from
6732 the result, abort. */
6733
6734 gcc_assert ((op0mode == mode0 || op0mode == VOIDmode)
6735 && (op1mode == mode1 || op1mode == VOIDmode));
6736
6737 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
6738 op0 = copy_to_mode_reg (mode0, op0);
6739
6740 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
6741 op1 = copy_to_mode_reg (mode1, op1);
6742
6743 pat = GEN_FCN (icode) (target, op0, op1);
6744
6745 if (! pat)
6746 return 0;
6747
6748 emit_insn (pat);
6749 return target;
6750 }
6751
6752
6753 /* Expand an expression EXP that calls a built-in function,
6754 with result going to TARGET if that's convenient
6755 (and in mode MODE if that's convenient).
6756 SUBTARGET may be used as the target for computing one of EXP's operands.
6757 IGNORE is nonzero if the value is to be ignored. */
6758
6759 static rtx
6760 avr_expand_builtin (tree exp, rtx target,
6761 rtx subtarget ATTRIBUTE_UNUSED,
6762 enum machine_mode mode ATTRIBUTE_UNUSED,
6763 int ignore ATTRIBUTE_UNUSED)
6764 {
6765 size_t i;
6766 const struct avr_builtin_description *d;
6767 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6768 unsigned int id = DECL_FUNCTION_CODE (fndecl);
6769 tree arg0;
6770 rtx op0;
6771
6772 switch (id)
6773 {
6774 case AVR_BUILTIN_NOP:
6775 emit_insn (gen_nopv (GEN_INT(1)));
6776 return 0;
6777
6778 case AVR_BUILTIN_SEI:
6779 emit_insn (gen_enable_interrupt ());
6780 return 0;
6781
6782 case AVR_BUILTIN_CLI:
6783 emit_insn (gen_disable_interrupt ());
6784 return 0;
6785
6786 case AVR_BUILTIN_WDR:
6787 emit_insn (gen_wdr ());
6788 return 0;
6789
6790 case AVR_BUILTIN_SLEEP:
6791 emit_insn (gen_sleep ());
6792 return 0;
6793
6794 case AVR_BUILTIN_DELAY_CYCLES:
6795 {
6796 arg0 = CALL_EXPR_ARG (exp, 0);
6797 op0 = expand_expr (arg0, NULL_RTX, VOIDmode, EXPAND_NORMAL);
6798
6799 if (! CONST_INT_P (op0))
6800 error ("__builtin_avr_delay_cycles expects a compile time integer constant.");
6801
6802 avr_expand_delay_cycles (op0);
6803 return 0;
6804 }
6805 }
6806
6807 for (i = 0, d = bdesc_1arg; i < ARRAY_SIZE (bdesc_1arg); i++, d++)
6808 if (d->id == id)
6809 return avr_expand_unop_builtin (d->icode, exp, target);
6810
6811 for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
6812 if (d->id == id)
6813 return avr_expand_binop_builtin (d->icode, exp, target);
6814
6815 gcc_unreachable ();
6816 }
6817
6818
6819 #include "gt-avr.h"