tm.texi.in (OVERRIDE_OPTIONS): Remove documentation.
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published
9 by the Free Software Foundation; either version 3, or (at your
10 option) any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
15 License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "output.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "recog.h"
37 #include "diagnostic-core.h"
38 #include "toplev.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "df.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "tm-constrs.h"
46
47 /* Array of valid operand punctuation characters. */
48 static char m32r_punct_chars[256];
49
50 /* Selected code model. */
51 enum m32r_model m32r_model = M32R_MODEL_DEFAULT;
52
53 /* Selected SDA support. */
54 enum m32r_sdata m32r_sdata = M32R_SDATA_DEFAULT;
55
56 /* Machine-specific symbol_ref flags. */
57 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
58 #define SYMBOL_REF_MODEL(X) \
59 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
60
61 /* For string literals, etc. */
62 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
63
64 /* Forward declaration. */
65 static bool m32r_handle_option (size_t, const char *, int);
66 static void m32r_option_override (void);
67 static void init_reg_tables (void);
68 static void block_move_call (rtx, rtx, rtx);
69 static int m32r_is_insn (rtx);
70 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
71 static bool m32r_mode_dependent_address_p (const_rtx);
72 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
73 static void m32r_print_operand (FILE *, rtx, int);
74 static void m32r_print_operand_address (FILE *, rtx);
75 static bool m32r_print_operand_punct_valid_p (unsigned char code);
76 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
77 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
78
79 static void m32r_file_start (void);
80
81 static int m32r_adjust_priority (rtx, int);
82 static int m32r_issue_rate (void);
83
84 static void m32r_encode_section_info (tree, rtx, int);
85 static bool m32r_in_small_data_p (const_tree);
86 static bool m32r_return_in_memory (const_tree, const_tree);
87 static rtx m32r_function_value (const_tree, const_tree, bool);
88 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
89 static bool m32r_function_value_regno_p (const unsigned int);
90 static void m32r_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
91 tree, int *, int);
92 static void init_idents (void);
93 static bool m32r_rtx_costs (rtx, int, int, int *, bool speed);
94 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
95 static bool m32r_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 static int m32r_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
98 tree, bool);
99 static rtx m32r_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
100 const_tree, bool);
101 static void m32r_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
102 const_tree, bool);
103 static bool m32r_can_eliminate (const int, const int);
104 static void m32r_trampoline_init (rtx, tree, rtx);
105 \f
106 /* M32R specific attributes. */
107
108 static const struct attribute_spec m32r_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
111 { "interrupt", 0, 0, true, false, false, NULL },
112 { "model", 1, 1, true, false, false, m32r_handle_model_attribute },
113 { NULL, 0, 0, false, false, false, NULL }
114 };
115 \f
116 /* Initialize the GCC target structure. */
117 #undef TARGET_ATTRIBUTE_TABLE
118 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
119
120 #undef TARGET_LEGITIMIZE_ADDRESS
121 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
122 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
123 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
124
125 #undef TARGET_ASM_ALIGNED_HI_OP
126 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
127 #undef TARGET_ASM_ALIGNED_SI_OP
128 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
129
130 #undef TARGET_PRINT_OPERAND
131 #define TARGET_PRINT_OPERAND m32r_print_operand
132 #undef TARGET_PRINT_OPERAND_ADDRESS
133 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
134 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
135 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
136
137 #undef TARGET_ASM_FUNCTION_PROLOGUE
138 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
139 #undef TARGET_ASM_FUNCTION_EPILOGUE
140 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
141
142 #undef TARGET_ASM_FILE_START
143 #define TARGET_ASM_FILE_START m32r_file_start
144
145 #undef TARGET_SCHED_ADJUST_PRIORITY
146 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
147 #undef TARGET_SCHED_ISSUE_RATE
148 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
149
150 #undef TARGET_DEFAULT_TARGET_FLAGS
151 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_CPU_DEFAULT
152 #undef TARGET_HANDLE_OPTION
153 #define TARGET_HANDLE_OPTION m32r_handle_option
154 #undef TARGET_OPTION_OVERRIDE
155 #define TARGET_OPTION_OVERRIDE m32r_option_override
156
157 #undef TARGET_ENCODE_SECTION_INFO
158 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
159 #undef TARGET_IN_SMALL_DATA_P
160 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
161
162
163 #undef TARGET_MEMORY_MOVE_COSTS
164 #define TARGET_MEMORY_MOVE_COSTS m32r_memory_move_costs
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS m32r_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
169
170 #undef TARGET_PROMOTE_PROTOTYPES
171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
174
175 #undef TARGET_FUNCTION_VALUE
176 #define TARGET_FUNCTION_VALUE m32r_function_value
177 #undef TARGET_LIBCALL_VALUE
178 #define TARGET_LIBCALL_VALUE m32r_libcall_value
179 #undef TARGET_FUNCTION_VALUE_REGNO_P
180 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
181
182 #undef TARGET_SETUP_INCOMING_VARARGS
183 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
184 #undef TARGET_MUST_PASS_IN_STACK
185 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
186 #undef TARGET_PASS_BY_REFERENCE
187 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
188 #undef TARGET_ARG_PARTIAL_BYTES
189 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
190 #undef TARGET_FUNCTION_ARG
191 #define TARGET_FUNCTION_ARG m32r_function_arg
192 #undef TARGET_FUNCTION_ARG_ADVANCE
193 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
194
195 #undef TARGET_CAN_ELIMINATE
196 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
197
198 #undef TARGET_TRAMPOLINE_INIT
199 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
200
201 struct gcc_target targetm = TARGET_INITIALIZER;
202 \f
203 /* Implement TARGET_HANDLE_OPTION. */
204
205 static bool
206 m32r_handle_option (size_t code, const char *arg, int value)
207 {
208 switch (code)
209 {
210 case OPT_G:
211 g_switch_value = value;
212 g_switch_set = true;
213 return true;
214
215 case OPT_m32r:
216 target_flags &= ~(MASK_M32R2 | MASK_M32RX);
217 return true;
218
219 case OPT_mmodel_:
220 if (strcmp (arg, "small") == 0)
221 m32r_model = M32R_MODEL_SMALL;
222 else if (strcmp (arg, "medium") == 0)
223 m32r_model = M32R_MODEL_MEDIUM;
224 else if (strcmp (arg, "large") == 0)
225 m32r_model = M32R_MODEL_LARGE;
226 else
227 return false;
228 return true;
229
230 case OPT_msdata_:
231 if (strcmp (arg, "none") == 0)
232 m32r_sdata = M32R_SDATA_NONE;
233 else if (strcmp (arg, "sdata") == 0)
234 m32r_sdata = M32R_SDATA_SDATA;
235 else if (strcmp (arg, "use") == 0)
236 m32r_sdata = M32R_SDATA_USE;
237 else
238 return false;
239 return true;
240
241 case OPT_mno_flush_func:
242 m32r_cache_flush_func = NULL;
243 return true;
244
245 case OPT_mflush_trap_:
246 return value <= 15;
247
248 case OPT_mno_flush_trap:
249 m32r_cache_flush_trap = -1;
250 return true;
251
252 default:
253 return true;
254 }
255 }
256
257 /* Called by m32r_option_override to initialize various things. */
258
259 void
260 m32r_init (void)
261 {
262 init_reg_tables ();
263
264 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
265 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
266 m32r_punct_chars['#'] = 1;
267 m32r_punct_chars['@'] = 1; /* ??? no longer used */
268
269 /* Provide default value if not specified. */
270 if (!g_switch_set)
271 g_switch_value = SDATA_DEFAULT_SIZE;
272 }
273
274 static void
275 m32r_option_override (void)
276 {
277 /* These need to be done at start up.
278 It's convenient to do them here. */
279 m32r_init ();
280 SUBTARGET_OVERRIDE_OPTIONS;
281 }
282
283 /* Vectors to keep interesting information about registers where it can easily
284 be got. We use to use the actual mode value as the bit number, but there
285 is (or may be) more than 32 modes now. Instead we use two tables: one
286 indexed by hard register number, and one indexed by mode. */
287
288 /* The purpose of m32r_mode_class is to shrink the range of modes so that
289 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
290 mapped into one m32r_mode_class mode. */
291
292 enum m32r_mode_class
293 {
294 C_MODE,
295 S_MODE, D_MODE, T_MODE, O_MODE,
296 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
297 };
298
299 /* Modes for condition codes. */
300 #define C_MODES (1 << (int) C_MODE)
301
302 /* Modes for single-word and smaller quantities. */
303 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
304
305 /* Modes for double-word and smaller quantities. */
306 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
307
308 /* Modes for quad-word and smaller quantities. */
309 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
310
311 /* Modes for accumulators. */
312 #define A_MODES (1 << (int) A_MODE)
313
314 /* Value is 1 if register/mode pair is acceptable on arc. */
315
316 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
317 {
318 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
319 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
320 S_MODES, C_MODES, A_MODES, A_MODES
321 };
322
323 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
324
325 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
326
327 static void
328 init_reg_tables (void)
329 {
330 int i;
331
332 for (i = 0; i < NUM_MACHINE_MODES; i++)
333 {
334 switch (GET_MODE_CLASS (i))
335 {
336 case MODE_INT:
337 case MODE_PARTIAL_INT:
338 case MODE_COMPLEX_INT:
339 if (GET_MODE_SIZE (i) <= 4)
340 m32r_mode_class[i] = 1 << (int) S_MODE;
341 else if (GET_MODE_SIZE (i) == 8)
342 m32r_mode_class[i] = 1 << (int) D_MODE;
343 else if (GET_MODE_SIZE (i) == 16)
344 m32r_mode_class[i] = 1 << (int) T_MODE;
345 else if (GET_MODE_SIZE (i) == 32)
346 m32r_mode_class[i] = 1 << (int) O_MODE;
347 else
348 m32r_mode_class[i] = 0;
349 break;
350 case MODE_FLOAT:
351 case MODE_COMPLEX_FLOAT:
352 if (GET_MODE_SIZE (i) <= 4)
353 m32r_mode_class[i] = 1 << (int) SF_MODE;
354 else if (GET_MODE_SIZE (i) == 8)
355 m32r_mode_class[i] = 1 << (int) DF_MODE;
356 else if (GET_MODE_SIZE (i) == 16)
357 m32r_mode_class[i] = 1 << (int) TF_MODE;
358 else if (GET_MODE_SIZE (i) == 32)
359 m32r_mode_class[i] = 1 << (int) OF_MODE;
360 else
361 m32r_mode_class[i] = 0;
362 break;
363 case MODE_CC:
364 m32r_mode_class[i] = 1 << (int) C_MODE;
365 break;
366 default:
367 m32r_mode_class[i] = 0;
368 break;
369 }
370 }
371
372 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
373 {
374 if (GPR_P (i))
375 m32r_regno_reg_class[i] = GENERAL_REGS;
376 else if (i == ARG_POINTER_REGNUM)
377 m32r_regno_reg_class[i] = GENERAL_REGS;
378 else
379 m32r_regno_reg_class[i] = NO_REGS;
380 }
381 }
382 \f
383 /* M32R specific attribute support.
384
385 interrupt - for interrupt functions
386
387 model - select code model used to access object
388
389 small: addresses use 24 bits, use bl to make calls
390 medium: addresses use 32 bits, use bl to make calls
391 large: addresses use 32 bits, use seth/add3/jl to make calls
392
393 Grep for MODEL in m32r.h for more info. */
394
395 static tree small_ident1;
396 static tree small_ident2;
397 static tree medium_ident1;
398 static tree medium_ident2;
399 static tree large_ident1;
400 static tree large_ident2;
401
402 static void
403 init_idents (void)
404 {
405 if (small_ident1 == 0)
406 {
407 small_ident1 = get_identifier ("small");
408 small_ident2 = get_identifier ("__small__");
409 medium_ident1 = get_identifier ("medium");
410 medium_ident2 = get_identifier ("__medium__");
411 large_ident1 = get_identifier ("large");
412 large_ident2 = get_identifier ("__large__");
413 }
414 }
415
416 /* Handle an "model" attribute; arguments as in
417 struct attribute_spec.handler. */
418 static tree
419 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
420 tree args, int flags ATTRIBUTE_UNUSED,
421 bool *no_add_attrs)
422 {
423 tree arg;
424
425 init_idents ();
426 arg = TREE_VALUE (args);
427
428 if (arg != small_ident1
429 && arg != small_ident2
430 && arg != medium_ident1
431 && arg != medium_ident2
432 && arg != large_ident1
433 && arg != large_ident2)
434 {
435 warning (OPT_Wattributes, "invalid argument of %qs attribute",
436 IDENTIFIER_POINTER (name));
437 *no_add_attrs = true;
438 }
439
440 return NULL_TREE;
441 }
442 \f
443 /* Encode section information of DECL, which is either a VAR_DECL,
444 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
445
446 For the M32R we want to record:
447
448 - whether the object lives in .sdata/.sbss.
449 - what code model should be used to access the object
450 */
451
452 static void
453 m32r_encode_section_info (tree decl, rtx rtl, int first)
454 {
455 int extra_flags = 0;
456 tree model_attr;
457 enum m32r_model model;
458
459 default_encode_section_info (decl, rtl, first);
460
461 if (!DECL_P (decl))
462 return;
463
464 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
465 if (model_attr)
466 {
467 tree id;
468
469 init_idents ();
470
471 id = TREE_VALUE (TREE_VALUE (model_attr));
472
473 if (id == small_ident1 || id == small_ident2)
474 model = M32R_MODEL_SMALL;
475 else if (id == medium_ident1 || id == medium_ident2)
476 model = M32R_MODEL_MEDIUM;
477 else if (id == large_ident1 || id == large_ident2)
478 model = M32R_MODEL_LARGE;
479 else
480 gcc_unreachable (); /* shouldn't happen */
481 }
482 else
483 {
484 if (TARGET_MODEL_SMALL)
485 model = M32R_MODEL_SMALL;
486 else if (TARGET_MODEL_MEDIUM)
487 model = M32R_MODEL_MEDIUM;
488 else if (TARGET_MODEL_LARGE)
489 model = M32R_MODEL_LARGE;
490 else
491 gcc_unreachable (); /* shouldn't happen */
492 }
493 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
494
495 if (extra_flags)
496 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
497 }
498
499 /* Only mark the object as being small data area addressable if
500 it hasn't been explicitly marked with a code model.
501
502 The user can explicitly put an object in the small data area with the
503 section attribute. If the object is in sdata/sbss and marked with a
504 code model do both [put the object in .sdata and mark it as being
505 addressed with a specific code model - don't mark it as being addressed
506 with an SDA reloc though]. This is ok and might be useful at times. If
507 the object doesn't fit the linker will give an error. */
508
509 static bool
510 m32r_in_small_data_p (const_tree decl)
511 {
512 const_tree section;
513
514 if (TREE_CODE (decl) != VAR_DECL)
515 return false;
516
517 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
518 return false;
519
520 section = DECL_SECTION_NAME (decl);
521 if (section)
522 {
523 const char *const name = TREE_STRING_POINTER (section);
524 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
525 return true;
526 }
527 else
528 {
529 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
530 {
531 int size = int_size_in_bytes (TREE_TYPE (decl));
532
533 if (size > 0 && (unsigned HOST_WIDE_INT) size <= g_switch_value)
534 return true;
535 }
536 }
537
538 return false;
539 }
540
541 /* Do anything needed before RTL is emitted for each function. */
542
543 void
544 m32r_init_expanders (void)
545 {
546 /* ??? At one point there was code here. The function is left in
547 to make it easy to experiment. */
548 }
549 \f
550 int
551 call_operand (rtx op, enum machine_mode mode)
552 {
553 if (!MEM_P (op))
554 return 0;
555 op = XEXP (op, 0);
556 return call_address_operand (op, mode);
557 }
558
559 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
560
561 int
562 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
563 {
564 if (! TARGET_SDATA_USE)
565 return 0;
566
567 if (GET_CODE (op) == SYMBOL_REF)
568 return SYMBOL_REF_SMALL_P (op);
569
570 if (GET_CODE (op) == CONST
571 && GET_CODE (XEXP (op, 0)) == PLUS
572 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
573 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
574 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
575
576 return 0;
577 }
578
579 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
580
581 int
582 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
583 {
584 rtx sym;
585
586 if (flag_pic)
587 return 0;
588
589 if (GET_CODE (op) == LABEL_REF)
590 return TARGET_ADDR24;
591
592 if (GET_CODE (op) == SYMBOL_REF)
593 sym = op;
594 else if (GET_CODE (op) == CONST
595 && GET_CODE (XEXP (op, 0)) == PLUS
596 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
597 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
598 sym = XEXP (XEXP (op, 0), 0);
599 else
600 return 0;
601
602 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
603 return 1;
604
605 if (TARGET_ADDR24
606 && (CONSTANT_POOL_ADDRESS_P (sym)
607 || LIT_NAME_P (XSTR (sym, 0))))
608 return 1;
609
610 return 0;
611 }
612
613 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
614
615 int
616 addr32_operand (rtx op, enum machine_mode mode)
617 {
618 rtx sym;
619
620 if (GET_CODE (op) == LABEL_REF)
621 return TARGET_ADDR32;
622
623 if (GET_CODE (op) == SYMBOL_REF)
624 sym = op;
625 else if (GET_CODE (op) == CONST
626 && GET_CODE (XEXP (op, 0)) == PLUS
627 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
628 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
629 && ! flag_pic)
630 sym = XEXP (XEXP (op, 0), 0);
631 else
632 return 0;
633
634 return (! addr24_operand (sym, mode)
635 && ! small_data_operand (sym, mode));
636 }
637
638 /* Return 1 if OP is a function that can be called with the `bl' insn. */
639
640 int
641 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
642 {
643 if (flag_pic)
644 return 1;
645
646 if (GET_CODE (op) == SYMBOL_REF)
647 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
648
649 return TARGET_CALL26;
650 }
651
652 /* Return 1 if OP is a DImode const we want to handle inline.
653 This must match the code in the movdi pattern.
654 It is used by the 'G' CONST_DOUBLE_OK_FOR_LETTER. */
655
656 int
657 easy_di_const (rtx op)
658 {
659 rtx high_rtx, low_rtx;
660 HOST_WIDE_INT high, low;
661
662 split_double (op, &high_rtx, &low_rtx);
663 high = INTVAL (high_rtx);
664 low = INTVAL (low_rtx);
665 /* Pick constants loadable with 2 16-bit `ldi' insns. */
666 if (high >= -128 && high <= 127
667 && low >= -128 && low <= 127)
668 return 1;
669 return 0;
670 }
671
672 /* Return 1 if OP is a DFmode const we want to handle inline.
673 This must match the code in the movdf pattern.
674 It is used by the 'H' CONST_DOUBLE_OK_FOR_LETTER. */
675
676 int
677 easy_df_const (rtx op)
678 {
679 REAL_VALUE_TYPE r;
680 long l[2];
681
682 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
683 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
684 if (l[0] == 0 && l[1] == 0)
685 return 1;
686 if ((l[0] & 0xffff) == 0 && l[1] == 0)
687 return 1;
688 return 0;
689 }
690
691 /* Return 1 if OP is (mem (reg ...)).
692 This is used in insn length calcs. */
693
694 int
695 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
696 {
697 return MEM_P (op) && REG_P (XEXP (op, 0));
698 }
699
700 /* Return nonzero if TYPE must be passed by indirect reference. */
701
702 static bool
703 m32r_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
704 enum machine_mode mode, const_tree type,
705 bool named ATTRIBUTE_UNUSED)
706 {
707 int size;
708
709 if (type)
710 size = int_size_in_bytes (type);
711 else
712 size = GET_MODE_SIZE (mode);
713
714 return (size < 0 || size > 8);
715 }
716 \f
717 /* Comparisons. */
718
719 /* X and Y are two things to compare using CODE. Emit the compare insn and
720 return the rtx for compare [arg0 of the if_then_else].
721 If need_compare is true then the comparison insn must be generated, rather
722 than being subsumed into the following branch instruction. */
723
724 rtx
725 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
726 {
727 enum rtx_code compare_code;
728 enum rtx_code branch_code;
729 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
730 int must_swap = 0;
731
732 switch (code)
733 {
734 case EQ: compare_code = EQ; branch_code = NE; break;
735 case NE: compare_code = EQ; branch_code = EQ; break;
736 case LT: compare_code = LT; branch_code = NE; break;
737 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
738 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
739 case GE: compare_code = LT; branch_code = EQ; break;
740 case LTU: compare_code = LTU; branch_code = NE; break;
741 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
742 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
743 case GEU: compare_code = LTU; branch_code = EQ; break;
744
745 default:
746 gcc_unreachable ();
747 }
748
749 if (need_compare)
750 {
751 switch (compare_code)
752 {
753 case EQ:
754 if (satisfies_constraint_P (y) /* Reg equal to small const. */
755 && y != const0_rtx)
756 {
757 rtx tmp = gen_reg_rtx (SImode);
758
759 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
760 x = tmp;
761 y = const0_rtx;
762 }
763 else if (CONSTANT_P (y)) /* Reg equal to const. */
764 {
765 rtx tmp = force_reg (GET_MODE (x), y);
766 y = tmp;
767 }
768
769 if (register_operand (y, SImode) /* Reg equal to reg. */
770 || y == const0_rtx) /* Reg equal to zero. */
771 {
772 emit_insn (gen_cmp_eqsi_insn (x, y));
773
774 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
775 }
776 break;
777
778 case LT:
779 if (register_operand (y, SImode)
780 || satisfies_constraint_P (y))
781 {
782 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
783
784 switch (code)
785 {
786 case LT:
787 emit_insn (gen_cmp_ltsi_insn (x, y));
788 code = EQ;
789 break;
790 case LE:
791 if (y == const0_rtx)
792 tmp = const1_rtx;
793 else
794 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
795 emit_insn (gen_cmp_ltsi_insn (x, tmp));
796 code = EQ;
797 break;
798 case GT:
799 if (CONST_INT_P (y))
800 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
801 else
802 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
803 emit_insn (gen_cmp_ltsi_insn (x, tmp));
804 code = NE;
805 break;
806 case GE:
807 emit_insn (gen_cmp_ltsi_insn (x, y));
808 code = NE;
809 break;
810 default:
811 gcc_unreachable ();
812 }
813
814 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
815 }
816 break;
817
818 case LTU:
819 if (register_operand (y, SImode)
820 || satisfies_constraint_P (y))
821 {
822 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
823
824 switch (code)
825 {
826 case LTU:
827 emit_insn (gen_cmp_ltusi_insn (x, y));
828 code = EQ;
829 break;
830 case LEU:
831 if (y == const0_rtx)
832 tmp = const1_rtx;
833 else
834 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
835 emit_insn (gen_cmp_ltusi_insn (x, tmp));
836 code = EQ;
837 break;
838 case GTU:
839 if (CONST_INT_P (y))
840 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
841 else
842 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
843 emit_insn (gen_cmp_ltusi_insn (x, tmp));
844 code = NE;
845 break;
846 case GEU:
847 emit_insn (gen_cmp_ltusi_insn (x, y));
848 code = NE;
849 break;
850 default:
851 gcc_unreachable ();
852 }
853
854 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
855 }
856 break;
857
858 default:
859 gcc_unreachable ();
860 }
861 }
862 else
863 {
864 /* Reg/reg equal comparison. */
865 if (compare_code == EQ
866 && register_operand (y, SImode))
867 return gen_rtx_fmt_ee (code, CCmode, x, y);
868
869 /* Reg/zero signed comparison. */
870 if ((compare_code == EQ || compare_code == LT)
871 && y == const0_rtx)
872 return gen_rtx_fmt_ee (code, CCmode, x, y);
873
874 /* Reg/smallconst equal comparison. */
875 if (compare_code == EQ
876 && satisfies_constraint_P (y))
877 {
878 rtx tmp = gen_reg_rtx (SImode);
879
880 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
881 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
882 }
883
884 /* Reg/const equal comparison. */
885 if (compare_code == EQ
886 && CONSTANT_P (y))
887 {
888 rtx tmp = force_reg (GET_MODE (x), y);
889
890 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
891 }
892 }
893
894 if (CONSTANT_P (y))
895 {
896 if (must_swap)
897 y = force_reg (GET_MODE (x), y);
898 else
899 {
900 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
901
902 if (! ok_const)
903 y = force_reg (GET_MODE (x), y);
904 }
905 }
906
907 switch (compare_code)
908 {
909 case EQ :
910 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
911 break;
912 case LT :
913 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
914 break;
915 case LTU :
916 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
917 break;
918
919 default:
920 gcc_unreachable ();
921 }
922
923 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
924 }
925
926 bool
927 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
928 {
929 enum machine_mode mode = GET_MODE (op0);
930
931 gcc_assert (mode == SImode);
932 switch (code)
933 {
934 case EQ:
935 if (!register_operand (op1, mode))
936 op1 = force_reg (mode, op1);
937
938 if (TARGET_M32RX || TARGET_M32R2)
939 {
940 if (!reg_or_zero_operand (op2, mode))
941 op2 = force_reg (mode, op2);
942
943 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
944 return true;
945 }
946 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
947 {
948 emit_insn (gen_seq_zero_insn (op0, op1));
949 return true;
950 }
951
952 if (!reg_or_eq_int16_operand (op2, mode))
953 op2 = force_reg (mode, op2);
954
955 emit_insn (gen_seq_insn (op0, op1, op2));
956 return true;
957
958 case NE:
959 if (!CONST_INT_P (op2)
960 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
961 {
962 rtx reg;
963
964 if (reload_completed || reload_in_progress)
965 return false;
966
967 reg = gen_reg_rtx (SImode);
968 emit_insn (gen_xorsi3 (reg, op1, op2));
969 op1 = reg;
970
971 if (!register_operand (op1, mode))
972 op1 = force_reg (mode, op1);
973
974 emit_insn (gen_sne_zero_insn (op0, op1));
975 return true;
976 }
977 return false;
978
979 case LT:
980 case GT:
981 if (code == GT)
982 {
983 rtx tmp = op2;
984 op2 = op1;
985 op1 = tmp;
986 code = LT;
987 }
988
989 if (!register_operand (op1, mode))
990 op1 = force_reg (mode, op1);
991
992 if (!reg_or_int16_operand (op2, mode))
993 op2 = force_reg (mode, op2);
994
995 emit_insn (gen_slt_insn (op0, op1, op2));
996 return true;
997
998 case LTU:
999 case GTU:
1000 if (code == GTU)
1001 {
1002 rtx tmp = op2;
1003 op2 = op1;
1004 op1 = tmp;
1005 code = LTU;
1006 }
1007
1008 if (!register_operand (op1, mode))
1009 op1 = force_reg (mode, op1);
1010
1011 if (!reg_or_int16_operand (op2, mode))
1012 op2 = force_reg (mode, op2);
1013
1014 emit_insn (gen_sltu_insn (op0, op1, op2));
1015 return true;
1016
1017 case GE:
1018 case GEU:
1019 if (!register_operand (op1, mode))
1020 op1 = force_reg (mode, op1);
1021
1022 if (!reg_or_int16_operand (op2, mode))
1023 op2 = force_reg (mode, op2);
1024
1025 if (code == GE)
1026 emit_insn (gen_sge_insn (op0, op1, op2));
1027 else
1028 emit_insn (gen_sgeu_insn (op0, op1, op2));
1029 return true;
1030
1031 case LE:
1032 case LEU:
1033 if (!register_operand (op1, mode))
1034 op1 = force_reg (mode, op1);
1035
1036 if (CONST_INT_P (op2))
1037 {
1038 HOST_WIDE_INT value = INTVAL (op2);
1039 if (value >= 2147483647)
1040 {
1041 emit_move_insn (op0, const1_rtx);
1042 return true;
1043 }
1044
1045 op2 = GEN_INT (value + 1);
1046 if (value < -32768 || value >= 32767)
1047 op2 = force_reg (mode, op2);
1048
1049 if (code == LEU)
1050 emit_insn (gen_sltu_insn (op0, op1, op2));
1051 else
1052 emit_insn (gen_slt_insn (op0, op1, op2));
1053 return true;
1054 }
1055
1056 if (!register_operand (op2, mode))
1057 op2 = force_reg (mode, op2);
1058
1059 if (code == LEU)
1060 emit_insn (gen_sleu_insn (op0, op1, op2));
1061 else
1062 emit_insn (gen_sle_insn (op0, op1, op2));
1063 return true;
1064
1065 default:
1066 gcc_unreachable ();
1067 }
1068 }
1069
1070 \f
1071 /* Split a 2 word move (DI or DF) into component parts. */
1072
1073 rtx
1074 gen_split_move_double (rtx operands[])
1075 {
1076 enum machine_mode mode = GET_MODE (operands[0]);
1077 rtx dest = operands[0];
1078 rtx src = operands[1];
1079 rtx val;
1080
1081 /* We might have (SUBREG (MEM)) here, so just get rid of the
1082 subregs to make this code simpler. It is safe to call
1083 alter_subreg any time after reload. */
1084 if (GET_CODE (dest) == SUBREG)
1085 alter_subreg (&dest);
1086 if (GET_CODE (src) == SUBREG)
1087 alter_subreg (&src);
1088
1089 start_sequence ();
1090 if (REG_P (dest))
1091 {
1092 int dregno = REGNO (dest);
1093
1094 /* Reg = reg. */
1095 if (REG_P (src))
1096 {
1097 int sregno = REGNO (src);
1098
1099 int reverse = (dregno == sregno + 1);
1100
1101 /* We normally copy the low-numbered register first. However, if
1102 the first register operand 0 is the same as the second register of
1103 operand 1, we must copy in the opposite order. */
1104 emit_insn (gen_rtx_SET (VOIDmode,
1105 operand_subword (dest, reverse, TRUE, mode),
1106 operand_subword (src, reverse, TRUE, mode)));
1107
1108 emit_insn (gen_rtx_SET (VOIDmode,
1109 operand_subword (dest, !reverse, TRUE, mode),
1110 operand_subword (src, !reverse, TRUE, mode)));
1111 }
1112
1113 /* Reg = constant. */
1114 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1115 {
1116 rtx words[2];
1117 split_double (src, &words[0], &words[1]);
1118 emit_insn (gen_rtx_SET (VOIDmode,
1119 operand_subword (dest, 0, TRUE, mode),
1120 words[0]));
1121
1122 emit_insn (gen_rtx_SET (VOIDmode,
1123 operand_subword (dest, 1, TRUE, mode),
1124 words[1]));
1125 }
1126
1127 /* Reg = mem. */
1128 else if (MEM_P (src))
1129 {
1130 /* If the high-address word is used in the address, we must load it
1131 last. Otherwise, load it first. */
1132 int reverse
1133 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1134
1135 /* We used to optimize loads from single registers as
1136
1137 ld r1,r3+; ld r2,r3
1138
1139 if r3 were not used subsequently. However, the REG_NOTES aren't
1140 propagated correctly by the reload phase, and it can cause bad
1141 code to be generated. We could still try:
1142
1143 ld r1,r3+; ld r2,r3; addi r3,-4
1144
1145 which saves 2 bytes and doesn't force longword alignment. */
1146 emit_insn (gen_rtx_SET (VOIDmode,
1147 operand_subword (dest, reverse, TRUE, mode),
1148 adjust_address (src, SImode,
1149 reverse * UNITS_PER_WORD)));
1150
1151 emit_insn (gen_rtx_SET (VOIDmode,
1152 operand_subword (dest, !reverse, TRUE, mode),
1153 adjust_address (src, SImode,
1154 !reverse * UNITS_PER_WORD)));
1155 }
1156 else
1157 gcc_unreachable ();
1158 }
1159
1160 /* Mem = reg. */
1161 /* We used to optimize loads from single registers as
1162
1163 st r1,r3; st r2,+r3
1164
1165 if r3 were not used subsequently. However, the REG_NOTES aren't
1166 propagated correctly by the reload phase, and it can cause bad
1167 code to be generated. We could still try:
1168
1169 st r1,r3; st r2,+r3; addi r3,-4
1170
1171 which saves 2 bytes and doesn't force longword alignment. */
1172 else if (MEM_P (dest) && REG_P (src))
1173 {
1174 emit_insn (gen_rtx_SET (VOIDmode,
1175 adjust_address (dest, SImode, 0),
1176 operand_subword (src, 0, TRUE, mode)));
1177
1178 emit_insn (gen_rtx_SET (VOIDmode,
1179 adjust_address (dest, SImode, UNITS_PER_WORD),
1180 operand_subword (src, 1, TRUE, mode)));
1181 }
1182
1183 else
1184 gcc_unreachable ();
1185
1186 val = get_insns ();
1187 end_sequence ();
1188 return val;
1189 }
1190
1191 \f
1192 static int
1193 m32r_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1194 tree type, bool named ATTRIBUTE_UNUSED)
1195 {
1196 int words;
1197 unsigned int size =
1198 (((mode == BLKmode && type)
1199 ? (unsigned int) int_size_in_bytes (type)
1200 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1201 / UNITS_PER_WORD;
1202
1203 if (*cum >= M32R_MAX_PARM_REGS)
1204 words = 0;
1205 else if (*cum + size > M32R_MAX_PARM_REGS)
1206 words = (*cum + size) - M32R_MAX_PARM_REGS;
1207 else
1208 words = 0;
1209
1210 return words * UNITS_PER_WORD;
1211 }
1212
1213 /* The ROUND_ADVANCE* macros are local to this file. */
1214 /* Round SIZE up to a word boundary. */
1215 #define ROUND_ADVANCE(SIZE) \
1216 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1217
1218 /* Round arg MODE/TYPE up to the next word boundary. */
1219 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1220 ((MODE) == BLKmode \
1221 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1222 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1223
1224 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1225 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1226
1227 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1228 a reg. This includes arguments that have to be passed by reference as the
1229 pointer to them is passed in a reg if one is available (and that is what
1230 we're given).
1231 This macro is only used in this file. */
1232 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1233 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1234
1235 /* Determine where to put an argument to a function.
1236 Value is zero to push the argument on the stack,
1237 or a hard register in which to store the argument.
1238
1239 MODE is the argument's machine mode.
1240 TYPE is the data type of the argument (as a tree).
1241 This is null for libcalls where that information may
1242 not be available.
1243 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1244 the preceding args and about the function being called.
1245 NAMED is nonzero if this argument is a named parameter
1246 (otherwise it is an extra parameter matching an ellipsis). */
1247 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1248 and the rest are pushed. */
1249
1250 static rtx
1251 m32r_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1252 const_tree type, bool named ATTRIBUTE_UNUSED)
1253 {
1254 return (PASS_IN_REG_P (*cum, mode, type)
1255 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1256 : NULL_RTX);
1257 }
1258
1259 /* Update the data in CUM to advance over an argument
1260 of mode MODE and data type TYPE.
1261 (TYPE is null for libcalls where that information may not be available.) */
1262
1263 static void
1264 m32r_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1265 const_tree type, bool named ATTRIBUTE_UNUSED)
1266 {
1267 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1268 + ROUND_ADVANCE_ARG (mode, type));
1269 }
1270
1271 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1272
1273 static bool
1274 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1275 {
1276 return m32r_pass_by_reference (NULL, TYPE_MODE (type), type, false);
1277 }
1278
1279 /* Worker function for TARGET_FUNCTION_VALUE. */
1280
1281 static rtx
1282 m32r_function_value (const_tree valtype,
1283 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1284 bool outgoing ATTRIBUTE_UNUSED)
1285 {
1286 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1287 }
1288
1289 /* Worker function for TARGET_LIBCALL_VALUE. */
1290
1291 static rtx
1292 m32r_libcall_value (enum machine_mode mode,
1293 const_rtx fun ATTRIBUTE_UNUSED)
1294 {
1295 return gen_rtx_REG (mode, 0);
1296 }
1297
1298 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1299
1300 ??? What about r1 in DI/DF values. */
1301
1302 static bool
1303 m32r_function_value_regno_p (const unsigned int regno)
1304 {
1305 return (regno == 0);
1306 }
1307
1308 /* Do any needed setup for a variadic function. For the M32R, we must
1309 create a register parameter block, and then copy any anonymous arguments
1310 in registers to memory.
1311
1312 CUM has not been updated for the last named argument which has type TYPE
1313 and mode MODE, and we rely on this fact. */
1314
1315 static void
1316 m32r_setup_incoming_varargs (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1317 tree type, int *pretend_size, int no_rtl)
1318 {
1319 int first_anon_arg;
1320
1321 if (no_rtl)
1322 return;
1323
1324 /* All BLKmode values are passed by reference. */
1325 gcc_assert (mode != BLKmode);
1326
1327 first_anon_arg = (ROUND_ADVANCE_CUM (*cum, mode, type)
1328 + ROUND_ADVANCE_ARG (mode, type));
1329
1330 if (first_anon_arg < M32R_MAX_PARM_REGS)
1331 {
1332 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1333 int first_reg_offset = first_anon_arg;
1334 /* Size in words to "pretend" allocate. */
1335 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1336 rtx regblock;
1337
1338 regblock = gen_frame_mem (BLKmode,
1339 plus_constant (arg_pointer_rtx,
1340 FIRST_PARM_OFFSET (0)));
1341 set_mem_alias_set (regblock, get_varargs_alias_set ());
1342 move_block_from_reg (first_reg_offset, regblock, size);
1343
1344 *pretend_size = (size * UNITS_PER_WORD);
1345 }
1346 }
1347
1348 \f
1349 /* Return true if INSN is real instruction bearing insn. */
1350
1351 static int
1352 m32r_is_insn (rtx insn)
1353 {
1354 return (NONDEBUG_INSN_P (insn)
1355 && GET_CODE (PATTERN (insn)) != USE
1356 && GET_CODE (PATTERN (insn)) != CLOBBER
1357 && GET_CODE (PATTERN (insn)) != ADDR_VEC);
1358 }
1359
1360 /* Increase the priority of long instructions so that the
1361 short instructions are scheduled ahead of the long ones. */
1362
1363 static int
1364 m32r_adjust_priority (rtx insn, int priority)
1365 {
1366 if (m32r_is_insn (insn)
1367 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1368 priority <<= 3;
1369
1370 return priority;
1371 }
1372
1373 \f
1374 /* Indicate how many instructions can be issued at the same time.
1375 This is sort of a lie. The m32r can issue only 1 long insn at
1376 once, but it can issue 2 short insns. The default therefore is
1377 set at 2, but this can be overridden by the command line option
1378 -missue-rate=1. */
1379
1380 static int
1381 m32r_issue_rate (void)
1382 {
1383 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1384 }
1385 \f
1386 /* Cost functions. */
1387
1388 /* Implement TARGET_HANDLE_OPTION.
1389
1390 Memory is 3 times as expensive as registers.
1391 ??? Is that the right way to look at it? */
1392
1393 static int
1394 m32r_memory_move_cost (enum machine_mode mode,
1395 reg_class_t rclass ATTRIBUTE_UNUSED,
1396 bool in ATTRIBUTE_UNUSED)
1397 {
1398 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1399 return 6;
1400 else
1401 return 12;
1402 }
1403
1404 static bool
1405 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
1406 bool speed ATTRIBUTE_UNUSED)
1407 {
1408 switch (code)
1409 {
1410 /* Small integers are as cheap as registers. 4 byte values can be
1411 fetched as immediate constants - let's give that the cost of an
1412 extra insn. */
1413 case CONST_INT:
1414 if (INT16_P (INTVAL (x)))
1415 {
1416 *total = 0;
1417 return true;
1418 }
1419 /* FALLTHRU */
1420
1421 case CONST:
1422 case LABEL_REF:
1423 case SYMBOL_REF:
1424 *total = COSTS_N_INSNS (1);
1425 return true;
1426
1427 case CONST_DOUBLE:
1428 {
1429 rtx high, low;
1430
1431 split_double (x, &high, &low);
1432 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1433 + !INT16_P (INTVAL (low)));
1434 return true;
1435 }
1436
1437 case MULT:
1438 *total = COSTS_N_INSNS (3);
1439 return true;
1440
1441 case DIV:
1442 case UDIV:
1443 case MOD:
1444 case UMOD:
1445 *total = COSTS_N_INSNS (10);
1446 return true;
1447
1448 default:
1449 return false;
1450 }
1451 }
1452 \f
1453 /* Type of function DECL.
1454
1455 The result is cached. To reset the cache at the end of a function,
1456 call with DECL = NULL_TREE. */
1457
1458 enum m32r_function_type
1459 m32r_compute_function_type (tree decl)
1460 {
1461 /* Cached value. */
1462 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1463 /* Last function we were called for. */
1464 static tree last_fn = NULL_TREE;
1465
1466 /* Resetting the cached value? */
1467 if (decl == NULL_TREE)
1468 {
1469 fn_type = M32R_FUNCTION_UNKNOWN;
1470 last_fn = NULL_TREE;
1471 return fn_type;
1472 }
1473
1474 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1475 return fn_type;
1476
1477 /* Compute function type. */
1478 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1479 ? M32R_FUNCTION_INTERRUPT
1480 : M32R_FUNCTION_NORMAL);
1481
1482 last_fn = decl;
1483 return fn_type;
1484 }
1485 \f/* Function prologue/epilogue handlers. */
1486
1487 /* M32R stack frames look like:
1488
1489 Before call After call
1490 +-----------------------+ +-----------------------+
1491 | | | |
1492 high | local variables, | | local variables, |
1493 mem | reg save area, etc. | | reg save area, etc. |
1494 | | | |
1495 +-----------------------+ +-----------------------+
1496 | | | |
1497 | arguments on stack. | | arguments on stack. |
1498 | | | |
1499 SP+0->+-----------------------+ +-----------------------+
1500 | reg parm save area, |
1501 | only created for |
1502 | variable argument |
1503 | functions |
1504 +-----------------------+
1505 | previous frame ptr |
1506 +-----------------------+
1507 | |
1508 | register save area |
1509 | |
1510 +-----------------------+
1511 | return address |
1512 +-----------------------+
1513 | |
1514 | local variables |
1515 | |
1516 +-----------------------+
1517 | |
1518 | alloca allocations |
1519 | |
1520 +-----------------------+
1521 | |
1522 low | arguments on stack |
1523 memory | |
1524 SP+0->+-----------------------+
1525
1526 Notes:
1527 1) The "reg parm save area" does not exist for non variable argument fns.
1528 2) The "reg parm save area" can be eliminated completely if we saved regs
1529 containing anonymous args separately but that complicates things too
1530 much (so it's not done).
1531 3) The return address is saved after the register save area so as to have as
1532 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1533
1534 /* Structure to be filled in by m32r_compute_frame_size with register
1535 save masks, and offsets for the current function. */
1536 struct m32r_frame_info
1537 {
1538 unsigned int total_size; /* # bytes that the entire frame takes up. */
1539 unsigned int extra_size; /* # bytes of extra stuff. */
1540 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1541 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1542 unsigned int reg_size; /* # bytes needed to store regs. */
1543 unsigned int var_size; /* # bytes that variables take up. */
1544 unsigned int gmask; /* Mask of saved gp registers. */
1545 unsigned int save_fp; /* Nonzero if fp must be saved. */
1546 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1547 int initialized; /* Nonzero if frame size already calculated. */
1548 };
1549
1550 /* Current frame information calculated by m32r_compute_frame_size. */
1551 static struct m32r_frame_info current_frame_info;
1552
1553 /* Zero structure to initialize current_frame_info. */
1554 static struct m32r_frame_info zero_frame_info;
1555
1556 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1557 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1558
1559 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1560 The return address and frame pointer are treated separately.
1561 Don't consider them here. */
1562 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1563 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1564 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1565
1566 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1567 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1568
1569 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1570 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1571
1572 /* Return the bytes needed to compute the frame pointer from the current
1573 stack pointer.
1574
1575 SIZE is the size needed for local variables. */
1576
1577 unsigned int
1578 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1579 {
1580 unsigned int regno;
1581 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1582 unsigned int reg_size, frame_size;
1583 unsigned int gmask;
1584 enum m32r_function_type fn_type;
1585 int interrupt_p;
1586 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1587 | crtl->profile);
1588
1589 var_size = M32R_STACK_ALIGN (size);
1590 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1591 pretend_size = crtl->args.pretend_args_size;
1592 extra_size = FIRST_PARM_OFFSET (0);
1593 total_size = extra_size + pretend_size + args_size + var_size;
1594 reg_size = 0;
1595 gmask = 0;
1596
1597 /* See if this is an interrupt handler. Call used registers must be saved
1598 for them too. */
1599 fn_type = m32r_compute_function_type (current_function_decl);
1600 interrupt_p = M32R_INTERRUPT_P (fn_type);
1601
1602 /* Calculate space needed for registers. */
1603 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1604 {
1605 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1606 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1607 {
1608 reg_size += UNITS_PER_WORD;
1609 gmask |= 1 << regno;
1610 }
1611 }
1612
1613 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1614 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1615
1616 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1617 * UNITS_PER_WORD);
1618 total_size += reg_size;
1619
1620 /* ??? Not sure this is necessary, and I don't think the epilogue
1621 handler will do the right thing if this changes total_size. */
1622 total_size = M32R_STACK_ALIGN (total_size);
1623
1624 frame_size = total_size - (pretend_size + reg_size);
1625
1626 /* Save computed information. */
1627 current_frame_info.total_size = total_size;
1628 current_frame_info.extra_size = extra_size;
1629 current_frame_info.pretend_size = pretend_size;
1630 current_frame_info.var_size = var_size;
1631 current_frame_info.args_size = args_size;
1632 current_frame_info.reg_size = reg_size;
1633 current_frame_info.gmask = gmask;
1634 current_frame_info.initialized = reload_completed;
1635
1636 /* Ok, we're done. */
1637 return total_size;
1638 }
1639
1640 /* Worker function for TARGET_CAN_ELIMINATE. */
1641
1642 bool
1643 m32r_can_eliminate (const int from, const int to)
1644 {
1645 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1646 ? ! frame_pointer_needed
1647 : true);
1648 }
1649
1650 \f
1651 /* The table we use to reference PIC data. */
1652 static rtx global_offset_table;
1653
1654 static void
1655 m32r_reload_lr (rtx sp, int size)
1656 {
1657 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1658
1659 if (size == 0)
1660 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1661 else if (size < 32768)
1662 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1663 gen_rtx_PLUS (Pmode, sp,
1664 GEN_INT (size)))));
1665 else
1666 {
1667 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1668
1669 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1670 emit_insn (gen_addsi3 (tmp, tmp, sp));
1671 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1672 }
1673
1674 emit_use (lr);
1675 }
1676
1677 void
1678 m32r_load_pic_register (void)
1679 {
1680 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1681 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1682 GEN_INT (TARGET_MODEL_SMALL)));
1683
1684 /* Need to emit this whether or not we obey regdecls,
1685 since setjmp/longjmp can cause life info to screw up. */
1686 emit_use (pic_offset_table_rtx);
1687 }
1688
1689 /* Expand the m32r prologue as a series of insns. */
1690
1691 void
1692 m32r_expand_prologue (void)
1693 {
1694 int regno;
1695 int frame_size;
1696 unsigned int gmask;
1697 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1698 | crtl->profile);
1699
1700 if (! current_frame_info.initialized)
1701 m32r_compute_frame_size (get_frame_size ());
1702
1703 gmask = current_frame_info.gmask;
1704
1705 /* These cases shouldn't happen. Catch them now. */
1706 gcc_assert (current_frame_info.total_size || !gmask);
1707
1708 /* Allocate space for register arguments if this is a variadic function. */
1709 if (current_frame_info.pretend_size != 0)
1710 {
1711 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1712 the wrong result on a 64-bit host. */
1713 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1714 emit_insn (gen_addsi3 (stack_pointer_rtx,
1715 stack_pointer_rtx,
1716 GEN_INT (-pretend_size)));
1717 }
1718
1719 /* Save any registers we need to and set up fp. */
1720 if (current_frame_info.save_fp)
1721 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1722
1723 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1724
1725 /* Save any needed call-saved regs (and call-used if this is an
1726 interrupt handler). */
1727 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1728 {
1729 if ((gmask & (1 << regno)) != 0)
1730 emit_insn (gen_movsi_push (stack_pointer_rtx,
1731 gen_rtx_REG (Pmode, regno)));
1732 }
1733
1734 if (current_frame_info.save_lr)
1735 emit_insn (gen_movsi_push (stack_pointer_rtx,
1736 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1737
1738 /* Allocate the stack frame. */
1739 frame_size = (current_frame_info.total_size
1740 - (current_frame_info.pretend_size
1741 + current_frame_info.reg_size));
1742
1743 if (frame_size == 0)
1744 ; /* Nothing to do. */
1745 else if (frame_size <= 32768)
1746 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1747 GEN_INT (-frame_size)));
1748 else
1749 {
1750 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1751
1752 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1753 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1754 }
1755
1756 if (frame_pointer_needed)
1757 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1758
1759 if (crtl->profile)
1760 /* Push lr for mcount (form_pc, x). */
1761 emit_insn (gen_movsi_push (stack_pointer_rtx,
1762 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1763
1764 if (pic_reg_used)
1765 {
1766 m32r_load_pic_register ();
1767 m32r_reload_lr (stack_pointer_rtx,
1768 (crtl->profile ? 0 : frame_size));
1769 }
1770
1771 if (crtl->profile && !pic_reg_used)
1772 emit_insn (gen_blockage ());
1773 }
1774
1775 \f
1776 /* Set up the stack and frame pointer (if desired) for the function.
1777 Note, if this is changed, you need to mirror the changes in
1778 m32r_compute_frame_size which calculates the prolog size. */
1779
1780 static void
1781 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1782 {
1783 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1784
1785 /* If this is an interrupt handler, mark it as such. */
1786 if (M32R_INTERRUPT_P (fn_type))
1787 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1788
1789 if (! current_frame_info.initialized)
1790 m32r_compute_frame_size (size);
1791
1792 /* This is only for the human reader. */
1793 fprintf (file,
1794 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1795 ASM_COMMENT_START,
1796 current_frame_info.var_size,
1797 current_frame_info.reg_size / 4,
1798 current_frame_info.args_size,
1799 current_frame_info.extra_size);
1800 }
1801 \f
1802 /* Output RTL to pop register REGNO from the stack. */
1803
1804 static void
1805 pop (int regno)
1806 {
1807 rtx x;
1808
1809 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1810 stack_pointer_rtx));
1811 add_reg_note (x, REG_INC, stack_pointer_rtx);
1812 }
1813
1814 /* Expand the m32r epilogue as a series of insns. */
1815
1816 void
1817 m32r_expand_epilogue (void)
1818 {
1819 int regno;
1820 int noepilogue = FALSE;
1821 int total_size;
1822
1823 gcc_assert (current_frame_info.initialized);
1824 total_size = current_frame_info.total_size;
1825
1826 if (total_size == 0)
1827 {
1828 rtx insn = get_last_insn ();
1829
1830 /* If the last insn was a BARRIER, we don't have to write any code
1831 because a jump (aka return) was put there. */
1832 if (insn && NOTE_P (insn))
1833 insn = prev_nonnote_insn (insn);
1834 if (insn && BARRIER_P (insn))
1835 noepilogue = TRUE;
1836 }
1837
1838 if (!noepilogue)
1839 {
1840 unsigned int var_size = current_frame_info.var_size;
1841 unsigned int args_size = current_frame_info.args_size;
1842 unsigned int gmask = current_frame_info.gmask;
1843 int can_trust_sp_p = !cfun->calls_alloca;
1844
1845 if (flag_exceptions)
1846 emit_insn (gen_blockage ());
1847
1848 /* The first thing to do is point the sp at the bottom of the register
1849 save area. */
1850 if (can_trust_sp_p)
1851 {
1852 unsigned int reg_offset = var_size + args_size;
1853
1854 if (reg_offset == 0)
1855 ; /* Nothing to do. */
1856 else if (reg_offset < 32768)
1857 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1858 GEN_INT (reg_offset)));
1859 else
1860 {
1861 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1862
1863 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1864 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1865 tmp));
1866 }
1867 }
1868 else if (frame_pointer_needed)
1869 {
1870 unsigned int reg_offset = var_size + args_size;
1871
1872 if (reg_offset == 0)
1873 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1874 else if (reg_offset < 32768)
1875 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1876 GEN_INT (reg_offset)));
1877 else
1878 {
1879 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1880
1881 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1882 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1883 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1884 tmp));
1885 }
1886 }
1887 else
1888 gcc_unreachable ();
1889
1890 if (current_frame_info.save_lr)
1891 pop (RETURN_ADDR_REGNUM);
1892
1893 /* Restore any saved registers, in reverse order of course. */
1894 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1895 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1896 {
1897 if ((gmask & (1L << regno)) != 0)
1898 pop (regno);
1899 }
1900
1901 if (current_frame_info.save_fp)
1902 pop (FRAME_POINTER_REGNUM);
1903
1904 /* Remove varargs area if present. */
1905 if (current_frame_info.pretend_size != 0)
1906 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1907 GEN_INT (current_frame_info.pretend_size)));
1908
1909 emit_insn (gen_blockage ());
1910 }
1911 }
1912
1913 /* Do any necessary cleanup after a function to restore stack, frame,
1914 and regs. */
1915
1916 static void
1917 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1918 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1919 {
1920 /* Reset state info for each function. */
1921 current_frame_info = zero_frame_info;
1922 m32r_compute_function_type (NULL_TREE);
1923 }
1924 \f
1925 /* Return nonzero if this function is known to have a null or 1 instruction
1926 epilogue. */
1927
1928 int
1929 direct_return (void)
1930 {
1931 if (!reload_completed)
1932 return FALSE;
1933
1934 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1935 return FALSE;
1936
1937 if (! current_frame_info.initialized)
1938 m32r_compute_frame_size (get_frame_size ());
1939
1940 return current_frame_info.total_size == 0;
1941 }
1942
1943 \f
1944 /* PIC. */
1945
1946 int
1947 m32r_legitimate_pic_operand_p (rtx x)
1948 {
1949 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1950 return 0;
1951
1952 if (GET_CODE (x) == CONST
1953 && GET_CODE (XEXP (x, 0)) == PLUS
1954 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1955 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1956 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1957 return 0;
1958
1959 return 1;
1960 }
1961
1962 rtx
1963 m32r_legitimize_pic_address (rtx orig, rtx reg)
1964 {
1965 #ifdef DEBUG_PIC
1966 printf("m32r_legitimize_pic_address()\n");
1967 #endif
1968
1969 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1970 {
1971 rtx pic_ref, address;
1972 rtx insn;
1973 int subregs = 0;
1974
1975 if (reg == 0)
1976 {
1977 gcc_assert (!reload_in_progress && !reload_completed);
1978 reg = gen_reg_rtx (Pmode);
1979
1980 subregs = 1;
1981 }
1982
1983 if (subregs)
1984 address = gen_reg_rtx (Pmode);
1985 else
1986 address = reg;
1987
1988 crtl->uses_pic_offset_table = 1;
1989
1990 if (GET_CODE (orig) == LABEL_REF
1991 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1992 {
1993 emit_insn (gen_gotoff_load_addr (reg, orig));
1994 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1995 return reg;
1996 }
1997
1998 emit_insn (gen_pic_load_addr (address, orig));
1999
2000 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
2001 pic_ref = gen_const_mem (Pmode, address);
2002 insn = emit_move_insn (reg, pic_ref);
2003 #if 0
2004 /* Put a REG_EQUAL note on this insn, so that it can be optimized
2005 by loop. */
2006 set_unique_reg_note (insn, REG_EQUAL, orig);
2007 #endif
2008 return reg;
2009 }
2010 else if (GET_CODE (orig) == CONST)
2011 {
2012 rtx base, offset;
2013
2014 if (GET_CODE (XEXP (orig, 0)) == PLUS
2015 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
2016 return orig;
2017
2018 if (reg == 0)
2019 {
2020 gcc_assert (!reload_in_progress && !reload_completed);
2021 reg = gen_reg_rtx (Pmode);
2022 }
2023
2024 if (GET_CODE (XEXP (orig, 0)) == PLUS)
2025 {
2026 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
2027 if (base == reg)
2028 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
2029 else
2030 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
2031 }
2032 else
2033 return orig;
2034
2035 if (CONST_INT_P (offset))
2036 {
2037 if (INT16_P (INTVAL (offset)))
2038 return plus_constant (base, INTVAL (offset));
2039 else
2040 {
2041 gcc_assert (! reload_in_progress && ! reload_completed);
2042 offset = force_reg (Pmode, offset);
2043 }
2044 }
2045
2046 return gen_rtx_PLUS (Pmode, base, offset);
2047 }
2048
2049 return orig;
2050 }
2051
2052 static rtx
2053 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2054 enum machine_mode mode ATTRIBUTE_UNUSED)
2055 {
2056 if (flag_pic)
2057 return m32r_legitimize_pic_address (x, NULL_RTX);
2058 else
2059 return x;
2060 }
2061
2062 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2063
2064 static bool
2065 m32r_mode_dependent_address_p (const_rtx addr)
2066 {
2067 if (GET_CODE (addr) == LO_SUM)
2068 return true;
2069
2070 return false;
2071 }
2072 \f
2073 /* Nested function support. */
2074
2075 /* Emit RTL insns to initialize the variable parts of a trampoline.
2076 FNADDR is an RTX for the address of the function's pure code.
2077 CXT is an RTX for the static chain value for the function. */
2078
2079 void
2080 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2081 rtx fnaddr ATTRIBUTE_UNUSED,
2082 rtx cxt ATTRIBUTE_UNUSED)
2083 {
2084 }
2085 \f
2086 static void
2087 m32r_file_start (void)
2088 {
2089 default_file_start ();
2090
2091 if (flag_verbose_asm)
2092 fprintf (asm_out_file,
2093 "%s M32R/D special options: -G " HOST_WIDE_INT_PRINT_UNSIGNED "\n",
2094 ASM_COMMENT_START, g_switch_value);
2095
2096 if (TARGET_LITTLE_ENDIAN)
2097 fprintf (asm_out_file, "\t.little\n");
2098 }
2099 \f
2100 /* Print operand X (an rtx) in assembler syntax to file FILE.
2101 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2102 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2103
2104 static void
2105 m32r_print_operand (FILE * file, rtx x, int code)
2106 {
2107 rtx addr;
2108
2109 switch (code)
2110 {
2111 /* The 's' and 'p' codes are used by output_block_move() to
2112 indicate post-increment 's'tores and 'p're-increment loads. */
2113 case 's':
2114 if (REG_P (x))
2115 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2116 else
2117 output_operand_lossage ("invalid operand to %%s code");
2118 return;
2119
2120 case 'p':
2121 if (REG_P (x))
2122 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2123 else
2124 output_operand_lossage ("invalid operand to %%p code");
2125 return;
2126
2127 case 'R' :
2128 /* Write second word of DImode or DFmode reference,
2129 register or memory. */
2130 if (REG_P (x))
2131 fputs (reg_names[REGNO (x)+1], file);
2132 else if (MEM_P (x))
2133 {
2134 fprintf (file, "@(");
2135 /* Handle possible auto-increment. Since it is pre-increment and
2136 we have already done it, we can just use an offset of four. */
2137 /* ??? This is taken from rs6000.c I think. I don't think it is
2138 currently necessary, but keep it around. */
2139 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2140 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2141 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
2142 else
2143 output_address (plus_constant (XEXP (x, 0), 4));
2144 fputc (')', file);
2145 }
2146 else
2147 output_operand_lossage ("invalid operand to %%R code");
2148 return;
2149
2150 case 'H' : /* High word. */
2151 case 'L' : /* Low word. */
2152 if (REG_P (x))
2153 {
2154 /* L = least significant word, H = most significant word. */
2155 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2156 fputs (reg_names[REGNO (x)], file);
2157 else
2158 fputs (reg_names[REGNO (x)+1], file);
2159 }
2160 else if (CONST_INT_P (x)
2161 || GET_CODE (x) == CONST_DOUBLE)
2162 {
2163 rtx first, second;
2164
2165 split_double (x, &first, &second);
2166 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2167 code == 'L' ? INTVAL (first) : INTVAL (second));
2168 }
2169 else
2170 output_operand_lossage ("invalid operand to %%H/%%L code");
2171 return;
2172
2173 case 'A' :
2174 {
2175 char str[30];
2176
2177 if (GET_CODE (x) != CONST_DOUBLE
2178 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2179 fatal_insn ("bad insn for 'A'", x);
2180
2181 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2182 fprintf (file, "%s", str);
2183 return;
2184 }
2185
2186 case 'B' : /* Bottom half. */
2187 case 'T' : /* Top half. */
2188 /* Output the argument to a `seth' insn (sets the Top half-word).
2189 For constants output arguments to a seth/or3 pair to set Top and
2190 Bottom halves. For symbols output arguments to a seth/add3 pair to
2191 set Top and Bottom halves. The difference exists because for
2192 constants seth/or3 is more readable but for symbols we need to use
2193 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2194 switch (GET_CODE (x))
2195 {
2196 case CONST_INT :
2197 case CONST_DOUBLE :
2198 {
2199 rtx first, second;
2200
2201 split_double (x, &first, &second);
2202 x = WORDS_BIG_ENDIAN ? second : first;
2203 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2204 (code == 'B'
2205 ? INTVAL (x) & 0xffff
2206 : (INTVAL (x) >> 16) & 0xffff));
2207 }
2208 return;
2209 case CONST :
2210 case SYMBOL_REF :
2211 if (code == 'B'
2212 && small_data_operand (x, VOIDmode))
2213 {
2214 fputs ("sda(", file);
2215 output_addr_const (file, x);
2216 fputc (')', file);
2217 return;
2218 }
2219 /* fall through */
2220 case LABEL_REF :
2221 fputs (code == 'T' ? "shigh(" : "low(", file);
2222 output_addr_const (file, x);
2223 fputc (')', file);
2224 return;
2225 default :
2226 output_operand_lossage ("invalid operand to %%T/%%B code");
2227 return;
2228 }
2229 break;
2230
2231 case 'U' :
2232 /* ??? wip */
2233 /* Output a load/store with update indicator if appropriate. */
2234 if (MEM_P (x))
2235 {
2236 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2237 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2238 fputs (".a", file);
2239 }
2240 else
2241 output_operand_lossage ("invalid operand to %%U code");
2242 return;
2243
2244 case 'N' :
2245 /* Print a constant value negated. */
2246 if (CONST_INT_P (x))
2247 output_addr_const (file, GEN_INT (- INTVAL (x)));
2248 else
2249 output_operand_lossage ("invalid operand to %%N code");
2250 return;
2251
2252 case 'X' :
2253 /* Print a const_int in hex. Used in comments. */
2254 if (CONST_INT_P (x))
2255 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2256 return;
2257
2258 case '#' :
2259 fputs (IMMEDIATE_PREFIX, file);
2260 return;
2261
2262 case 0 :
2263 /* Do nothing special. */
2264 break;
2265
2266 default :
2267 /* Unknown flag. */
2268 output_operand_lossage ("invalid operand output code");
2269 }
2270
2271 switch (GET_CODE (x))
2272 {
2273 case REG :
2274 fputs (reg_names[REGNO (x)], file);
2275 break;
2276
2277 case MEM :
2278 addr = XEXP (x, 0);
2279 if (GET_CODE (addr) == PRE_INC)
2280 {
2281 if (!REG_P (XEXP (addr, 0)))
2282 fatal_insn ("pre-increment address is not a register", x);
2283
2284 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2285 }
2286 else if (GET_CODE (addr) == PRE_DEC)
2287 {
2288 if (!REG_P (XEXP (addr, 0)))
2289 fatal_insn ("pre-decrement address is not a register", x);
2290
2291 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2292 }
2293 else if (GET_CODE (addr) == POST_INC)
2294 {
2295 if (!REG_P (XEXP (addr, 0)))
2296 fatal_insn ("post-increment address is not a register", x);
2297
2298 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2299 }
2300 else
2301 {
2302 fputs ("@(", file);
2303 output_address (XEXP (x, 0));
2304 fputc (')', file);
2305 }
2306 break;
2307
2308 case CONST_DOUBLE :
2309 /* We handle SFmode constants here as output_addr_const doesn't. */
2310 if (GET_MODE (x) == SFmode)
2311 {
2312 REAL_VALUE_TYPE d;
2313 long l;
2314
2315 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2316 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2317 fprintf (file, "0x%08lx", l);
2318 break;
2319 }
2320
2321 /* Fall through. Let output_addr_const deal with it. */
2322
2323 default :
2324 output_addr_const (file, x);
2325 break;
2326 }
2327 }
2328
2329 /* Print a memory address as an operand to reference that memory location. */
2330
2331 static void
2332 m32r_print_operand_address (FILE * file, rtx addr)
2333 {
2334 rtx base;
2335 rtx index = 0;
2336 int offset = 0;
2337
2338 switch (GET_CODE (addr))
2339 {
2340 case REG :
2341 fputs (reg_names[REGNO (addr)], file);
2342 break;
2343
2344 case PLUS :
2345 if (CONST_INT_P (XEXP (addr, 0)))
2346 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2347 else if (CONST_INT_P (XEXP (addr, 1)))
2348 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2349 else
2350 base = XEXP (addr, 0), index = XEXP (addr, 1);
2351 if (REG_P (base))
2352 {
2353 /* Print the offset first (if present) to conform to the manual. */
2354 if (index == 0)
2355 {
2356 if (offset != 0)
2357 fprintf (file, "%d,", offset);
2358 fputs (reg_names[REGNO (base)], file);
2359 }
2360 /* The chip doesn't support this, but left in for generality. */
2361 else if (REG_P (index))
2362 fprintf (file, "%s,%s",
2363 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2364 /* Not sure this can happen, but leave in for now. */
2365 else if (GET_CODE (index) == SYMBOL_REF)
2366 {
2367 output_addr_const (file, index);
2368 fputc (',', file);
2369 fputs (reg_names[REGNO (base)], file);
2370 }
2371 else
2372 fatal_insn ("bad address", addr);
2373 }
2374 else if (GET_CODE (base) == LO_SUM)
2375 {
2376 gcc_assert (!index && REG_P (XEXP (base, 0)));
2377 if (small_data_operand (XEXP (base, 1), VOIDmode))
2378 fputs ("sda(", file);
2379 else
2380 fputs ("low(", file);
2381 output_addr_const (file, plus_constant (XEXP (base, 1), offset));
2382 fputs ("),", file);
2383 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2384 }
2385 else
2386 fatal_insn ("bad address", addr);
2387 break;
2388
2389 case LO_SUM :
2390 if (!REG_P (XEXP (addr, 0)))
2391 fatal_insn ("lo_sum not of register", addr);
2392 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2393 fputs ("sda(", file);
2394 else
2395 fputs ("low(", file);
2396 output_addr_const (file, XEXP (addr, 1));
2397 fputs ("),", file);
2398 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2399 break;
2400
2401 case PRE_INC : /* Assume SImode. */
2402 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2403 break;
2404
2405 case PRE_DEC : /* Assume SImode. */
2406 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2407 break;
2408
2409 case POST_INC : /* Assume SImode. */
2410 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2411 break;
2412
2413 default :
2414 output_addr_const (file, addr);
2415 break;
2416 }
2417 }
2418
2419 static bool
2420 m32r_print_operand_punct_valid_p (unsigned char code)
2421 {
2422 return m32r_punct_chars[code];
2423 }
2424
2425 /* Return true if the operands are the constants 0 and 1. */
2426
2427 int
2428 zero_and_one (rtx operand1, rtx operand2)
2429 {
2430 return
2431 CONST_INT_P (operand1)
2432 && CONST_INT_P (operand2)
2433 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2434 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2435 }
2436
2437 /* Generate the correct assembler code to handle the conditional loading of a
2438 value into a register. It is known that the operands satisfy the
2439 conditional_move_operand() function above. The destination is operand[0].
2440 The condition is operand [1]. The 'true' value is operand [2] and the
2441 'false' value is operand [3]. */
2442
2443 char *
2444 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2445 {
2446 static char buffer [100];
2447 const char * dest = reg_names [REGNO (operands [0])];
2448
2449 buffer [0] = 0;
2450
2451 /* Destination must be a register. */
2452 gcc_assert (REG_P (operands [0]));
2453 gcc_assert (conditional_move_operand (operands [2], SImode));
2454 gcc_assert (conditional_move_operand (operands [3], SImode));
2455
2456 /* Check to see if the test is reversed. */
2457 if (GET_CODE (operands [1]) == NE)
2458 {
2459 rtx tmp = operands [2];
2460 operands [2] = operands [3];
2461 operands [3] = tmp;
2462 }
2463
2464 sprintf (buffer, "mvfc %s, cbr", dest);
2465
2466 /* If the true value was '0' then we need to invert the results of the move. */
2467 if (INTVAL (operands [2]) == 0)
2468 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2469 dest, dest);
2470
2471 return buffer;
2472 }
2473
2474 /* Returns true if the registers contained in the two
2475 rtl expressions are different. */
2476
2477 int
2478 m32r_not_same_reg (rtx a, rtx b)
2479 {
2480 int reg_a = -1;
2481 int reg_b = -2;
2482
2483 while (GET_CODE (a) == SUBREG)
2484 a = SUBREG_REG (a);
2485
2486 if (REG_P (a))
2487 reg_a = REGNO (a);
2488
2489 while (GET_CODE (b) == SUBREG)
2490 b = SUBREG_REG (b);
2491
2492 if (REG_P (b))
2493 reg_b = REGNO (b);
2494
2495 return reg_a != reg_b;
2496 }
2497
2498 \f
2499 rtx
2500 m32r_function_symbol (const char *name)
2501 {
2502 int extra_flags = 0;
2503 enum m32r_model model;
2504 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2505
2506 if (TARGET_MODEL_SMALL)
2507 model = M32R_MODEL_SMALL;
2508 else if (TARGET_MODEL_MEDIUM)
2509 model = M32R_MODEL_MEDIUM;
2510 else if (TARGET_MODEL_LARGE)
2511 model = M32R_MODEL_LARGE;
2512 else
2513 gcc_unreachable (); /* Shouldn't happen. */
2514 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2515
2516 if (extra_flags)
2517 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2518
2519 return sym;
2520 }
2521
2522 /* Use a library function to move some bytes. */
2523
2524 static void
2525 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2526 {
2527 /* We want to pass the size as Pmode, which will normally be SImode
2528 but will be DImode if we are using 64-bit longs and pointers. */
2529 if (GET_MODE (bytes_rtx) != VOIDmode
2530 && GET_MODE (bytes_rtx) != Pmode)
2531 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2532
2533 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2534 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2535 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2536 TYPE_UNSIGNED (sizetype)),
2537 TYPE_MODE (sizetype));
2538 }
2539
2540 /* Expand string/block move operations.
2541
2542 operands[0] is the pointer to the destination.
2543 operands[1] is the pointer to the source.
2544 operands[2] is the number of bytes to move.
2545 operands[3] is the alignment.
2546
2547 Returns 1 upon success, 0 otherwise. */
2548
2549 int
2550 m32r_expand_block_move (rtx operands[])
2551 {
2552 rtx orig_dst = operands[0];
2553 rtx orig_src = operands[1];
2554 rtx bytes_rtx = operands[2];
2555 rtx align_rtx = operands[3];
2556 int constp = CONST_INT_P (bytes_rtx);
2557 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2558 int align = INTVAL (align_rtx);
2559 int leftover;
2560 rtx src_reg;
2561 rtx dst_reg;
2562
2563 if (constp && bytes <= 0)
2564 return 1;
2565
2566 /* Move the address into scratch registers. */
2567 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2568 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2569
2570 if (align > UNITS_PER_WORD)
2571 align = UNITS_PER_WORD;
2572
2573 /* If we prefer size over speed, always use a function call.
2574 If we do not know the size, use a function call.
2575 If the blocks are not word aligned, use a function call. */
2576 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2577 {
2578 block_move_call (dst_reg, src_reg, bytes_rtx);
2579 return 0;
2580 }
2581
2582 leftover = bytes % MAX_MOVE_BYTES;
2583 bytes -= leftover;
2584
2585 /* If necessary, generate a loop to handle the bulk of the copy. */
2586 if (bytes)
2587 {
2588 rtx label = NULL_RTX;
2589 rtx final_src = NULL_RTX;
2590 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2591 rtx rounded_total = GEN_INT (bytes);
2592 rtx new_dst_reg = gen_reg_rtx (SImode);
2593 rtx new_src_reg = gen_reg_rtx (SImode);
2594
2595 /* If we are going to have to perform this loop more than
2596 once, then generate a label and compute the address the
2597 source register will contain upon completion of the final
2598 iteration. */
2599 if (bytes > MAX_MOVE_BYTES)
2600 {
2601 final_src = gen_reg_rtx (Pmode);
2602
2603 if (INT16_P(bytes))
2604 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2605 else
2606 {
2607 emit_insn (gen_movsi (final_src, rounded_total));
2608 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2609 }
2610
2611 label = gen_label_rtx ();
2612 emit_label (label);
2613 }
2614
2615 /* It is known that output_block_move() will update src_reg to point
2616 to the word after the end of the source block, and dst_reg to point
2617 to the last word of the destination block, provided that the block
2618 is MAX_MOVE_BYTES long. */
2619 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2620 new_dst_reg, new_src_reg));
2621 emit_move_insn (dst_reg, new_dst_reg);
2622 emit_move_insn (src_reg, new_src_reg);
2623 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2624
2625 if (bytes > MAX_MOVE_BYTES)
2626 {
2627 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2628 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2629 }
2630 }
2631
2632 if (leftover)
2633 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2634 gen_reg_rtx (SImode),
2635 gen_reg_rtx (SImode)));
2636 return 1;
2637 }
2638
2639 \f
2640 /* Emit load/stores for a small constant word aligned block_move.
2641
2642 operands[0] is the memory address of the destination.
2643 operands[1] is the memory address of the source.
2644 operands[2] is the number of bytes to move.
2645 operands[3] is a temp register.
2646 operands[4] is a temp register. */
2647
2648 void
2649 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2650 {
2651 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2652 int first_time;
2653 int got_extra = 0;
2654
2655 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2656
2657 /* We do not have a post-increment store available, so the first set of
2658 stores are done without any increment, then the remaining ones can use
2659 the pre-increment addressing mode.
2660
2661 Note: expand_block_move() also relies upon this behavior when building
2662 loops to copy large blocks. */
2663 first_time = 1;
2664
2665 while (bytes > 0)
2666 {
2667 if (bytes >= 8)
2668 {
2669 if (first_time)
2670 {
2671 output_asm_insn ("ld\t%5, %p1", operands);
2672 output_asm_insn ("ld\t%6, %p1", operands);
2673 output_asm_insn ("st\t%5, @%0", operands);
2674 output_asm_insn ("st\t%6, %s0", operands);
2675 }
2676 else
2677 {
2678 output_asm_insn ("ld\t%5, %p1", operands);
2679 output_asm_insn ("ld\t%6, %p1", operands);
2680 output_asm_insn ("st\t%5, %s0", operands);
2681 output_asm_insn ("st\t%6, %s0", operands);
2682 }
2683
2684 bytes -= 8;
2685 }
2686 else if (bytes >= 4)
2687 {
2688 if (bytes > 4)
2689 got_extra = 1;
2690
2691 output_asm_insn ("ld\t%5, %p1", operands);
2692
2693 if (got_extra)
2694 output_asm_insn ("ld\t%6, %p1", operands);
2695
2696 if (first_time)
2697 output_asm_insn ("st\t%5, @%0", operands);
2698 else
2699 output_asm_insn ("st\t%5, %s0", operands);
2700
2701 bytes -= 4;
2702 }
2703 else
2704 {
2705 /* Get the entire next word, even though we do not want all of it.
2706 The saves us from doing several smaller loads, and we assume that
2707 we cannot cause a page fault when at least part of the word is in
2708 valid memory [since we don't get called if things aren't properly
2709 aligned]. */
2710 int dst_offset = first_time ? 0 : 4;
2711 /* The amount of increment we have to make to the
2712 destination pointer. */
2713 int dst_inc_amount = dst_offset + bytes - 4;
2714 /* The same for the source pointer. */
2715 int src_inc_amount = bytes;
2716 int last_shift;
2717 rtx my_operands[3];
2718
2719 /* If got_extra is true then we have already loaded
2720 the next word as part of loading and storing the previous word. */
2721 if (! got_extra)
2722 output_asm_insn ("ld\t%6, @%1", operands);
2723
2724 if (bytes >= 2)
2725 {
2726 bytes -= 2;
2727
2728 output_asm_insn ("sra3\t%5, %6, #16", operands);
2729 my_operands[0] = operands[5];
2730 my_operands[1] = GEN_INT (dst_offset);
2731 my_operands[2] = operands[0];
2732 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2733
2734 /* If there is a byte left to store then increment the
2735 destination address and shift the contents of the source
2736 register down by 8 bits. We could not do the address
2737 increment in the store half word instruction, because it does
2738 not have an auto increment mode. */
2739 if (bytes > 0) /* assert (bytes == 1) */
2740 {
2741 dst_offset += 2;
2742 last_shift = 8;
2743 }
2744 }
2745 else
2746 last_shift = 24;
2747
2748 if (bytes > 0)
2749 {
2750 my_operands[0] = operands[6];
2751 my_operands[1] = GEN_INT (last_shift);
2752 output_asm_insn ("srai\t%0, #%1", my_operands);
2753 my_operands[0] = operands[6];
2754 my_operands[1] = GEN_INT (dst_offset);
2755 my_operands[2] = operands[0];
2756 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2757 }
2758
2759 /* Update the destination pointer if needed. We have to do
2760 this so that the patterns matches what we output in this
2761 function. */
2762 if (dst_inc_amount
2763 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2764 {
2765 my_operands[0] = operands[0];
2766 my_operands[1] = GEN_INT (dst_inc_amount);
2767 output_asm_insn ("addi\t%0, #%1", my_operands);
2768 }
2769
2770 /* Update the source pointer if needed. We have to do this
2771 so that the patterns matches what we output in this
2772 function. */
2773 if (src_inc_amount
2774 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2775 {
2776 my_operands[0] = operands[1];
2777 my_operands[1] = GEN_INT (src_inc_amount);
2778 output_asm_insn ("addi\t%0, #%1", my_operands);
2779 }
2780
2781 bytes = 0;
2782 }
2783
2784 first_time = 0;
2785 }
2786 }
2787
2788 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2789
2790 int
2791 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2792 unsigned int new_reg)
2793 {
2794 /* Interrupt routines can't clobber any register that isn't already used. */
2795 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2796 && !df_regs_ever_live_p (new_reg))
2797 return 0;
2798
2799 return 1;
2800 }
2801
2802 rtx
2803 m32r_return_addr (int count)
2804 {
2805 if (count != 0)
2806 return const0_rtx;
2807
2808 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2809 }
2810
2811 static void
2812 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2813 {
2814 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2815 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2816 0x017e8e17 : 0x178e7e01, SImode));
2817 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2818 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2819 0x0c00ae86 : 0x86ae000c, SImode));
2820 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2821 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2822 0xe627871e : 0x1e8727e6, SImode));
2823 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2824 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2825 0xc616c626 : 0x26c61fc6, SImode));
2826 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2827 chain_value);
2828 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2829 XEXP (DECL_RTL (fndecl), 0));
2830
2831 if (m32r_cache_flush_trap >= 0)
2832 emit_insn (gen_flush_icache
2833 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2834 gen_int_mode (m32r_cache_flush_trap, SImode)));
2835 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2836 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2837 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2838 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2839 GEN_INT (3), SImode);
2840 }