gcc/
[gcc.git] / gcc / config / m32r / m32r.c
1 /* Subroutines used for code generation on the Renesas M32R cpu.
2 Copyright (C) 1996-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "stor-layout.h"
26 #include "varasm.h"
27 #include "stringpool.h"
28 #include "calls.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "dbxout.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "expr.h"
39 #include "function.h"
40 #include "recog.h"
41 #include "diagnostic-core.h"
42 #include "ggc.h"
43 #include "df.h"
44 #include "tm_p.h"
45 #include "target.h"
46 #include "target-def.h"
47 #include "tm-constrs.h"
48 #include "opts.h"
49 #include "builtins.h"
50
51 /* Array of valid operand punctuation characters. */
52 static char m32r_punct_chars[256];
53
54 /* Machine-specific symbol_ref flags. */
55 #define SYMBOL_FLAG_MODEL_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
56 #define SYMBOL_REF_MODEL(X) \
57 ((enum m32r_model) ((SYMBOL_REF_FLAGS (X) >> SYMBOL_FLAG_MODEL_SHIFT) & 3))
58
59 /* For string literals, etc. */
60 #define LIT_NAME_P(NAME) ((NAME)[0] == '*' && (NAME)[1] == '.')
61
62 /* Forward declaration. */
63 static void m32r_option_override (void);
64 static void init_reg_tables (void);
65 static void block_move_call (rtx, rtx, rtx);
66 static int m32r_is_insn (rtx);
67 static bool m32r_legitimate_address_p (enum machine_mode, rtx, bool);
68 static rtx m32r_legitimize_address (rtx, rtx, enum machine_mode);
69 static bool m32r_mode_dependent_address_p (const_rtx, addr_space_t);
70 static tree m32r_handle_model_attribute (tree *, tree, tree, int, bool *);
71 static void m32r_print_operand (FILE *, rtx, int);
72 static void m32r_print_operand_address (FILE *, rtx);
73 static bool m32r_print_operand_punct_valid_p (unsigned char code);
74 static void m32r_output_function_prologue (FILE *, HOST_WIDE_INT);
75 static void m32r_output_function_epilogue (FILE *, HOST_WIDE_INT);
76
77 static void m32r_file_start (void);
78
79 static int m32r_adjust_priority (rtx, int);
80 static int m32r_issue_rate (void);
81
82 static void m32r_encode_section_info (tree, rtx, int);
83 static bool m32r_in_small_data_p (const_tree);
84 static bool m32r_return_in_memory (const_tree, const_tree);
85 static rtx m32r_function_value (const_tree, const_tree, bool);
86 static rtx m32r_libcall_value (enum machine_mode, const_rtx);
87 static bool m32r_function_value_regno_p (const unsigned int);
88 static void m32r_setup_incoming_varargs (cumulative_args_t, enum machine_mode,
89 tree, int *, int);
90 static void init_idents (void);
91 static bool m32r_rtx_costs (rtx, int, int, int, int *, bool speed);
92 static int m32r_memory_move_cost (enum machine_mode, reg_class_t, bool);
93 static bool m32r_pass_by_reference (cumulative_args_t, enum machine_mode,
94 const_tree, bool);
95 static int m32r_arg_partial_bytes (cumulative_args_t, enum machine_mode,
96 tree, bool);
97 static rtx m32r_function_arg (cumulative_args_t, enum machine_mode,
98 const_tree, bool);
99 static void m32r_function_arg_advance (cumulative_args_t, enum machine_mode,
100 const_tree, bool);
101 static bool m32r_can_eliminate (const int, const int);
102 static void m32r_conditional_register_usage (void);
103 static void m32r_trampoline_init (rtx, tree, rtx);
104 static bool m32r_legitimate_constant_p (enum machine_mode, rtx);
105 \f
106 /* M32R specific attributes. */
107
108 static const struct attribute_spec m32r_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "interrupt", 0, 0, true, false, false, NULL, false },
113 { "model", 1, 1, true, false, false, m32r_handle_model_attribute,
114 false },
115 { NULL, 0, 0, false, false, false, NULL, false }
116 };
117 \f
118 /* Initialize the GCC target structure. */
119 #undef TARGET_ATTRIBUTE_TABLE
120 #define TARGET_ATTRIBUTE_TABLE m32r_attribute_table
121
122 #undef TARGET_LEGITIMATE_ADDRESS_P
123 #define TARGET_LEGITIMATE_ADDRESS_P m32r_legitimate_address_p
124 #undef TARGET_LEGITIMIZE_ADDRESS
125 #define TARGET_LEGITIMIZE_ADDRESS m32r_legitimize_address
126 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
127 #define TARGET_MODE_DEPENDENT_ADDRESS_P m32r_mode_dependent_address_p
128
129 #undef TARGET_ASM_ALIGNED_HI_OP
130 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
131 #undef TARGET_ASM_ALIGNED_SI_OP
132 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
133
134 #undef TARGET_PRINT_OPERAND
135 #define TARGET_PRINT_OPERAND m32r_print_operand
136 #undef TARGET_PRINT_OPERAND_ADDRESS
137 #define TARGET_PRINT_OPERAND_ADDRESS m32r_print_operand_address
138 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
139 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P m32r_print_operand_punct_valid_p
140
141 #undef TARGET_ASM_FUNCTION_PROLOGUE
142 #define TARGET_ASM_FUNCTION_PROLOGUE m32r_output_function_prologue
143 #undef TARGET_ASM_FUNCTION_EPILOGUE
144 #define TARGET_ASM_FUNCTION_EPILOGUE m32r_output_function_epilogue
145
146 #undef TARGET_ASM_FILE_START
147 #define TARGET_ASM_FILE_START m32r_file_start
148
149 #undef TARGET_SCHED_ADJUST_PRIORITY
150 #define TARGET_SCHED_ADJUST_PRIORITY m32r_adjust_priority
151 #undef TARGET_SCHED_ISSUE_RATE
152 #define TARGET_SCHED_ISSUE_RATE m32r_issue_rate
153
154 #undef TARGET_OPTION_OVERRIDE
155 #define TARGET_OPTION_OVERRIDE m32r_option_override
156
157 #undef TARGET_ENCODE_SECTION_INFO
158 #define TARGET_ENCODE_SECTION_INFO m32r_encode_section_info
159 #undef TARGET_IN_SMALL_DATA_P
160 #define TARGET_IN_SMALL_DATA_P m32r_in_small_data_p
161
162
163 #undef TARGET_MEMORY_MOVE_COST
164 #define TARGET_MEMORY_MOVE_COST m32r_memory_move_cost
165 #undef TARGET_RTX_COSTS
166 #define TARGET_RTX_COSTS m32r_rtx_costs
167 #undef TARGET_ADDRESS_COST
168 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
169
170 #undef TARGET_PROMOTE_PROTOTYPES
171 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
172 #undef TARGET_RETURN_IN_MEMORY
173 #define TARGET_RETURN_IN_MEMORY m32r_return_in_memory
174
175 #undef TARGET_FUNCTION_VALUE
176 #define TARGET_FUNCTION_VALUE m32r_function_value
177 #undef TARGET_LIBCALL_VALUE
178 #define TARGET_LIBCALL_VALUE m32r_libcall_value
179 #undef TARGET_FUNCTION_VALUE_REGNO_P
180 #define TARGET_FUNCTION_VALUE_REGNO_P m32r_function_value_regno_p
181
182 #undef TARGET_SETUP_INCOMING_VARARGS
183 #define TARGET_SETUP_INCOMING_VARARGS m32r_setup_incoming_varargs
184 #undef TARGET_MUST_PASS_IN_STACK
185 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
186 #undef TARGET_PASS_BY_REFERENCE
187 #define TARGET_PASS_BY_REFERENCE m32r_pass_by_reference
188 #undef TARGET_ARG_PARTIAL_BYTES
189 #define TARGET_ARG_PARTIAL_BYTES m32r_arg_partial_bytes
190 #undef TARGET_FUNCTION_ARG
191 #define TARGET_FUNCTION_ARG m32r_function_arg
192 #undef TARGET_FUNCTION_ARG_ADVANCE
193 #define TARGET_FUNCTION_ARG_ADVANCE m32r_function_arg_advance
194
195 #undef TARGET_CAN_ELIMINATE
196 #define TARGET_CAN_ELIMINATE m32r_can_eliminate
197
198 #undef TARGET_CONDITIONAL_REGISTER_USAGE
199 #define TARGET_CONDITIONAL_REGISTER_USAGE m32r_conditional_register_usage
200
201 #undef TARGET_TRAMPOLINE_INIT
202 #define TARGET_TRAMPOLINE_INIT m32r_trampoline_init
203
204 #undef TARGET_LEGITIMATE_CONSTANT_P
205 #define TARGET_LEGITIMATE_CONSTANT_P m32r_legitimate_constant_p
206
207 struct gcc_target targetm = TARGET_INITIALIZER;
208 \f
209 /* Called by m32r_option_override to initialize various things. */
210
211 void
212 m32r_init (void)
213 {
214 init_reg_tables ();
215
216 /* Initialize array for TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
217 memset (m32r_punct_chars, 0, sizeof (m32r_punct_chars));
218 m32r_punct_chars['#'] = 1;
219 m32r_punct_chars['@'] = 1; /* ??? no longer used */
220
221 /* Provide default value if not specified. */
222 if (!global_options_set.x_g_switch_value)
223 g_switch_value = SDATA_DEFAULT_SIZE;
224 }
225
226 static void
227 m32r_option_override (void)
228 {
229 /* These need to be done at start up.
230 It's convenient to do them here. */
231 m32r_init ();
232 SUBTARGET_OVERRIDE_OPTIONS;
233 }
234
235 /* Vectors to keep interesting information about registers where it can easily
236 be got. We use to use the actual mode value as the bit number, but there
237 is (or may be) more than 32 modes now. Instead we use two tables: one
238 indexed by hard register number, and one indexed by mode. */
239
240 /* The purpose of m32r_mode_class is to shrink the range of modes so that
241 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
242 mapped into one m32r_mode_class mode. */
243
244 enum m32r_mode_class
245 {
246 C_MODE,
247 S_MODE, D_MODE, T_MODE, O_MODE,
248 SF_MODE, DF_MODE, TF_MODE, OF_MODE, A_MODE
249 };
250
251 /* Modes for condition codes. */
252 #define C_MODES (1 << (int) C_MODE)
253
254 /* Modes for single-word and smaller quantities. */
255 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
256
257 /* Modes for double-word and smaller quantities. */
258 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
259
260 /* Modes for quad-word and smaller quantities. */
261 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
262
263 /* Modes for accumulators. */
264 #define A_MODES (1 << (int) A_MODE)
265
266 /* Value is 1 if register/mode pair is acceptable on arc. */
267
268 const unsigned int m32r_hard_regno_mode_ok[FIRST_PSEUDO_REGISTER] =
269 {
270 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
271 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, S_MODES, S_MODES, S_MODES,
272 S_MODES, C_MODES, A_MODES, A_MODES
273 };
274
275 unsigned int m32r_mode_class [NUM_MACHINE_MODES];
276
277 enum reg_class m32r_regno_reg_class[FIRST_PSEUDO_REGISTER];
278
279 static void
280 init_reg_tables (void)
281 {
282 int i;
283
284 for (i = 0; i < NUM_MACHINE_MODES; i++)
285 {
286 enum machine_mode m = (enum machine_mode) i;
287
288 switch (GET_MODE_CLASS (m))
289 {
290 case MODE_INT:
291 case MODE_PARTIAL_INT:
292 case MODE_COMPLEX_INT:
293 if (GET_MODE_SIZE (m) <= 4)
294 m32r_mode_class[i] = 1 << (int) S_MODE;
295 else if (GET_MODE_SIZE (m) == 8)
296 m32r_mode_class[i] = 1 << (int) D_MODE;
297 else if (GET_MODE_SIZE (m) == 16)
298 m32r_mode_class[i] = 1 << (int) T_MODE;
299 else if (GET_MODE_SIZE (m) == 32)
300 m32r_mode_class[i] = 1 << (int) O_MODE;
301 else
302 m32r_mode_class[i] = 0;
303 break;
304 case MODE_FLOAT:
305 case MODE_COMPLEX_FLOAT:
306 if (GET_MODE_SIZE (m) <= 4)
307 m32r_mode_class[i] = 1 << (int) SF_MODE;
308 else if (GET_MODE_SIZE (m) == 8)
309 m32r_mode_class[i] = 1 << (int) DF_MODE;
310 else if (GET_MODE_SIZE (m) == 16)
311 m32r_mode_class[i] = 1 << (int) TF_MODE;
312 else if (GET_MODE_SIZE (m) == 32)
313 m32r_mode_class[i] = 1 << (int) OF_MODE;
314 else
315 m32r_mode_class[i] = 0;
316 break;
317 case MODE_CC:
318 m32r_mode_class[i] = 1 << (int) C_MODE;
319 break;
320 default:
321 m32r_mode_class[i] = 0;
322 break;
323 }
324 }
325
326 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
327 {
328 if (GPR_P (i))
329 m32r_regno_reg_class[i] = GENERAL_REGS;
330 else if (i == ARG_POINTER_REGNUM)
331 m32r_regno_reg_class[i] = GENERAL_REGS;
332 else
333 m32r_regno_reg_class[i] = NO_REGS;
334 }
335 }
336 \f
337 /* M32R specific attribute support.
338
339 interrupt - for interrupt functions
340
341 model - select code model used to access object
342
343 small: addresses use 24 bits, use bl to make calls
344 medium: addresses use 32 bits, use bl to make calls
345 large: addresses use 32 bits, use seth/add3/jl to make calls
346
347 Grep for MODEL in m32r.h for more info. */
348
349 static tree small_ident1;
350 static tree small_ident2;
351 static tree medium_ident1;
352 static tree medium_ident2;
353 static tree large_ident1;
354 static tree large_ident2;
355
356 static void
357 init_idents (void)
358 {
359 if (small_ident1 == 0)
360 {
361 small_ident1 = get_identifier ("small");
362 small_ident2 = get_identifier ("__small__");
363 medium_ident1 = get_identifier ("medium");
364 medium_ident2 = get_identifier ("__medium__");
365 large_ident1 = get_identifier ("large");
366 large_ident2 = get_identifier ("__large__");
367 }
368 }
369
370 /* Handle an "model" attribute; arguments as in
371 struct attribute_spec.handler. */
372 static tree
373 m32r_handle_model_attribute (tree *node ATTRIBUTE_UNUSED, tree name,
374 tree args, int flags ATTRIBUTE_UNUSED,
375 bool *no_add_attrs)
376 {
377 tree arg;
378
379 init_idents ();
380 arg = TREE_VALUE (args);
381
382 if (arg != small_ident1
383 && arg != small_ident2
384 && arg != medium_ident1
385 && arg != medium_ident2
386 && arg != large_ident1
387 && arg != large_ident2)
388 {
389 warning (OPT_Wattributes, "invalid argument of %qs attribute",
390 IDENTIFIER_POINTER (name));
391 *no_add_attrs = true;
392 }
393
394 return NULL_TREE;
395 }
396 \f
397 /* Encode section information of DECL, which is either a VAR_DECL,
398 FUNCTION_DECL, STRING_CST, CONSTRUCTOR, or ???.
399
400 For the M32R we want to record:
401
402 - whether the object lives in .sdata/.sbss.
403 - what code model should be used to access the object
404 */
405
406 static void
407 m32r_encode_section_info (tree decl, rtx rtl, int first)
408 {
409 int extra_flags = 0;
410 tree model_attr;
411 enum m32r_model model;
412
413 default_encode_section_info (decl, rtl, first);
414
415 if (!DECL_P (decl))
416 return;
417
418 model_attr = lookup_attribute ("model", DECL_ATTRIBUTES (decl));
419 if (model_attr)
420 {
421 tree id;
422
423 init_idents ();
424
425 id = TREE_VALUE (TREE_VALUE (model_attr));
426
427 if (id == small_ident1 || id == small_ident2)
428 model = M32R_MODEL_SMALL;
429 else if (id == medium_ident1 || id == medium_ident2)
430 model = M32R_MODEL_MEDIUM;
431 else if (id == large_ident1 || id == large_ident2)
432 model = M32R_MODEL_LARGE;
433 else
434 gcc_unreachable (); /* shouldn't happen */
435 }
436 else
437 {
438 if (TARGET_MODEL_SMALL)
439 model = M32R_MODEL_SMALL;
440 else if (TARGET_MODEL_MEDIUM)
441 model = M32R_MODEL_MEDIUM;
442 else if (TARGET_MODEL_LARGE)
443 model = M32R_MODEL_LARGE;
444 else
445 gcc_unreachable (); /* shouldn't happen */
446 }
447 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
448
449 if (extra_flags)
450 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= extra_flags;
451 }
452
453 /* Only mark the object as being small data area addressable if
454 it hasn't been explicitly marked with a code model.
455
456 The user can explicitly put an object in the small data area with the
457 section attribute. If the object is in sdata/sbss and marked with a
458 code model do both [put the object in .sdata and mark it as being
459 addressed with a specific code model - don't mark it as being addressed
460 with an SDA reloc though]. This is ok and might be useful at times. If
461 the object doesn't fit the linker will give an error. */
462
463 static bool
464 m32r_in_small_data_p (const_tree decl)
465 {
466 const_tree section;
467
468 if (TREE_CODE (decl) != VAR_DECL)
469 return false;
470
471 if (lookup_attribute ("model", DECL_ATTRIBUTES (decl)))
472 return false;
473
474 section = DECL_SECTION_NAME (decl);
475 if (section)
476 {
477 const char *const name = TREE_STRING_POINTER (section);
478 if (strcmp (name, ".sdata") == 0 || strcmp (name, ".sbss") == 0)
479 return true;
480 }
481 else
482 {
483 if (! TREE_READONLY (decl) && ! TARGET_SDATA_NONE)
484 {
485 int size = int_size_in_bytes (TREE_TYPE (decl));
486
487 if (size > 0 && size <= g_switch_value)
488 return true;
489 }
490 }
491
492 return false;
493 }
494
495 /* Do anything needed before RTL is emitted for each function. */
496
497 void
498 m32r_init_expanders (void)
499 {
500 /* ??? At one point there was code here. The function is left in
501 to make it easy to experiment. */
502 }
503 \f
504 int
505 call_operand (rtx op, enum machine_mode mode)
506 {
507 if (!MEM_P (op))
508 return 0;
509 op = XEXP (op, 0);
510 return call_address_operand (op, mode);
511 }
512
513 /* Return 1 if OP is a reference to an object in .sdata/.sbss. */
514
515 int
516 small_data_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
517 {
518 if (! TARGET_SDATA_USE)
519 return 0;
520
521 if (GET_CODE (op) == SYMBOL_REF)
522 return SYMBOL_REF_SMALL_P (op);
523
524 if (GET_CODE (op) == CONST
525 && GET_CODE (XEXP (op, 0)) == PLUS
526 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
527 && satisfies_constraint_J (XEXP (XEXP (op, 0), 1)))
528 return SYMBOL_REF_SMALL_P (XEXP (XEXP (op, 0), 0));
529
530 return 0;
531 }
532
533 /* Return 1 if OP is a symbol that can use 24-bit addressing. */
534
535 int
536 addr24_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
537 {
538 rtx sym;
539
540 if (flag_pic)
541 return 0;
542
543 if (GET_CODE (op) == LABEL_REF)
544 return TARGET_ADDR24;
545
546 if (GET_CODE (op) == SYMBOL_REF)
547 sym = op;
548 else if (GET_CODE (op) == CONST
549 && GET_CODE (XEXP (op, 0)) == PLUS
550 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
551 && satisfies_constraint_M (XEXP (XEXP (op, 0), 1)))
552 sym = XEXP (XEXP (op, 0), 0);
553 else
554 return 0;
555
556 if (SYMBOL_REF_MODEL (sym) == M32R_MODEL_SMALL)
557 return 1;
558
559 if (TARGET_ADDR24
560 && (CONSTANT_POOL_ADDRESS_P (sym)
561 || LIT_NAME_P (XSTR (sym, 0))))
562 return 1;
563
564 return 0;
565 }
566
567 /* Return 1 if OP is a symbol that needs 32-bit addressing. */
568
569 int
570 addr32_operand (rtx op, enum machine_mode mode)
571 {
572 rtx sym;
573
574 if (GET_CODE (op) == LABEL_REF)
575 return TARGET_ADDR32;
576
577 if (GET_CODE (op) == SYMBOL_REF)
578 sym = op;
579 else if (GET_CODE (op) == CONST
580 && GET_CODE (XEXP (op, 0)) == PLUS
581 && GET_CODE (XEXP (XEXP (op, 0), 0)) == SYMBOL_REF
582 && CONST_INT_P (XEXP (XEXP (op, 0), 1))
583 && ! flag_pic)
584 sym = XEXP (XEXP (op, 0), 0);
585 else
586 return 0;
587
588 return (! addr24_operand (sym, mode)
589 && ! small_data_operand (sym, mode));
590 }
591
592 /* Return 1 if OP is a function that can be called with the `bl' insn. */
593
594 int
595 call26_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
596 {
597 if (flag_pic)
598 return 1;
599
600 if (GET_CODE (op) == SYMBOL_REF)
601 return SYMBOL_REF_MODEL (op) != M32R_MODEL_LARGE;
602
603 return TARGET_CALL26;
604 }
605
606 /* Return 1 if OP is a DImode const we want to handle inline.
607 This must match the code in the movdi pattern.
608 It is used by the 'G' constraint. */
609
610 int
611 easy_di_const (rtx op)
612 {
613 rtx high_rtx, low_rtx;
614 HOST_WIDE_INT high, low;
615
616 split_double (op, &high_rtx, &low_rtx);
617 high = INTVAL (high_rtx);
618 low = INTVAL (low_rtx);
619 /* Pick constants loadable with 2 16-bit `ldi' insns. */
620 if (high >= -128 && high <= 127
621 && low >= -128 && low <= 127)
622 return 1;
623 return 0;
624 }
625
626 /* Return 1 if OP is a DFmode const we want to handle inline.
627 This must match the code in the movdf pattern.
628 It is used by the 'H' constraint. */
629
630 int
631 easy_df_const (rtx op)
632 {
633 REAL_VALUE_TYPE r;
634 long l[2];
635
636 REAL_VALUE_FROM_CONST_DOUBLE (r, op);
637 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
638 if (l[0] == 0 && l[1] == 0)
639 return 1;
640 if ((l[0] & 0xffff) == 0 && l[1] == 0)
641 return 1;
642 return 0;
643 }
644
645 /* Return 1 if OP is (mem (reg ...)).
646 This is used in insn length calcs. */
647
648 int
649 memreg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
650 {
651 return MEM_P (op) && REG_P (XEXP (op, 0));
652 }
653
654 /* Return nonzero if TYPE must be passed by indirect reference. */
655
656 static bool
657 m32r_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
658 enum machine_mode mode, const_tree type,
659 bool named ATTRIBUTE_UNUSED)
660 {
661 int size;
662
663 if (type)
664 size = int_size_in_bytes (type);
665 else
666 size = GET_MODE_SIZE (mode);
667
668 return (size < 0 || size > 8);
669 }
670 \f
671 /* Comparisons. */
672
673 /* X and Y are two things to compare using CODE. Emit the compare insn and
674 return the rtx for compare [arg0 of the if_then_else].
675 If need_compare is true then the comparison insn must be generated, rather
676 than being subsumed into the following branch instruction. */
677
678 rtx
679 gen_compare (enum rtx_code code, rtx x, rtx y, int need_compare)
680 {
681 enum rtx_code compare_code;
682 enum rtx_code branch_code;
683 rtx cc_reg = gen_rtx_REG (CCmode, CARRY_REGNUM);
684 int must_swap = 0;
685
686 switch (code)
687 {
688 case EQ: compare_code = EQ; branch_code = NE; break;
689 case NE: compare_code = EQ; branch_code = EQ; break;
690 case LT: compare_code = LT; branch_code = NE; break;
691 case LE: compare_code = LT; branch_code = EQ; must_swap = 1; break;
692 case GT: compare_code = LT; branch_code = NE; must_swap = 1; break;
693 case GE: compare_code = LT; branch_code = EQ; break;
694 case LTU: compare_code = LTU; branch_code = NE; break;
695 case LEU: compare_code = LTU; branch_code = EQ; must_swap = 1; break;
696 case GTU: compare_code = LTU; branch_code = NE; must_swap = 1; break;
697 case GEU: compare_code = LTU; branch_code = EQ; break;
698
699 default:
700 gcc_unreachable ();
701 }
702
703 if (need_compare)
704 {
705 switch (compare_code)
706 {
707 case EQ:
708 if (satisfies_constraint_P (y) /* Reg equal to small const. */
709 && y != const0_rtx)
710 {
711 rtx tmp = gen_reg_rtx (SImode);
712
713 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
714 x = tmp;
715 y = const0_rtx;
716 }
717 else if (CONSTANT_P (y)) /* Reg equal to const. */
718 {
719 rtx tmp = force_reg (GET_MODE (x), y);
720 y = tmp;
721 }
722
723 if (register_operand (y, SImode) /* Reg equal to reg. */
724 || y == const0_rtx) /* Reg equal to zero. */
725 {
726 emit_insn (gen_cmp_eqsi_insn (x, y));
727
728 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
729 }
730 break;
731
732 case LT:
733 if (register_operand (y, SImode)
734 || satisfies_constraint_P (y))
735 {
736 rtx tmp = gen_reg_rtx (SImode); /* Reg compared to reg. */
737
738 switch (code)
739 {
740 case LT:
741 emit_insn (gen_cmp_ltsi_insn (x, y));
742 code = EQ;
743 break;
744 case LE:
745 if (y == const0_rtx)
746 tmp = const1_rtx;
747 else
748 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
749 emit_insn (gen_cmp_ltsi_insn (x, tmp));
750 code = EQ;
751 break;
752 case GT:
753 if (CONST_INT_P (y))
754 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
755 else
756 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
757 emit_insn (gen_cmp_ltsi_insn (x, tmp));
758 code = NE;
759 break;
760 case GE:
761 emit_insn (gen_cmp_ltsi_insn (x, y));
762 code = NE;
763 break;
764 default:
765 gcc_unreachable ();
766 }
767
768 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
769 }
770 break;
771
772 case LTU:
773 if (register_operand (y, SImode)
774 || satisfies_constraint_P (y))
775 {
776 rtx tmp = gen_reg_rtx (SImode); /* Reg (unsigned) compared to reg. */
777
778 switch (code)
779 {
780 case LTU:
781 emit_insn (gen_cmp_ltusi_insn (x, y));
782 code = EQ;
783 break;
784 case LEU:
785 if (y == const0_rtx)
786 tmp = const1_rtx;
787 else
788 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
789 emit_insn (gen_cmp_ltusi_insn (x, tmp));
790 code = EQ;
791 break;
792 case GTU:
793 if (CONST_INT_P (y))
794 tmp = gen_rtx_PLUS (SImode, y, const1_rtx);
795 else
796 emit_insn (gen_addsi3 (tmp, y, constm1_rtx));
797 emit_insn (gen_cmp_ltusi_insn (x, tmp));
798 code = NE;
799 break;
800 case GEU:
801 emit_insn (gen_cmp_ltusi_insn (x, y));
802 code = NE;
803 break;
804 default:
805 gcc_unreachable ();
806 }
807
808 return gen_rtx_fmt_ee (code, CCmode, cc_reg, const0_rtx);
809 }
810 break;
811
812 default:
813 gcc_unreachable ();
814 }
815 }
816 else
817 {
818 /* Reg/reg equal comparison. */
819 if (compare_code == EQ
820 && register_operand (y, SImode))
821 return gen_rtx_fmt_ee (code, CCmode, x, y);
822
823 /* Reg/zero signed comparison. */
824 if ((compare_code == EQ || compare_code == LT)
825 && y == const0_rtx)
826 return gen_rtx_fmt_ee (code, CCmode, x, y);
827
828 /* Reg/smallconst equal comparison. */
829 if (compare_code == EQ
830 && satisfies_constraint_P (y))
831 {
832 rtx tmp = gen_reg_rtx (SImode);
833
834 emit_insn (gen_addsi3 (tmp, x, GEN_INT (-INTVAL (y))));
835 return gen_rtx_fmt_ee (code, CCmode, tmp, const0_rtx);
836 }
837
838 /* Reg/const equal comparison. */
839 if (compare_code == EQ
840 && CONSTANT_P (y))
841 {
842 rtx tmp = force_reg (GET_MODE (x), y);
843
844 return gen_rtx_fmt_ee (code, CCmode, x, tmp);
845 }
846 }
847
848 if (CONSTANT_P (y))
849 {
850 if (must_swap)
851 y = force_reg (GET_MODE (x), y);
852 else
853 {
854 int ok_const = reg_or_int16_operand (y, GET_MODE (y));
855
856 if (! ok_const)
857 y = force_reg (GET_MODE (x), y);
858 }
859 }
860
861 switch (compare_code)
862 {
863 case EQ :
864 emit_insn (gen_cmp_eqsi_insn (must_swap ? y : x, must_swap ? x : y));
865 break;
866 case LT :
867 emit_insn (gen_cmp_ltsi_insn (must_swap ? y : x, must_swap ? x : y));
868 break;
869 case LTU :
870 emit_insn (gen_cmp_ltusi_insn (must_swap ? y : x, must_swap ? x : y));
871 break;
872
873 default:
874 gcc_unreachable ();
875 }
876
877 return gen_rtx_fmt_ee (branch_code, VOIDmode, cc_reg, CONST0_RTX (CCmode));
878 }
879
880 bool
881 gen_cond_store (enum rtx_code code, rtx op0, rtx op1, rtx op2)
882 {
883 enum machine_mode mode = GET_MODE (op0);
884
885 gcc_assert (mode == SImode);
886 switch (code)
887 {
888 case EQ:
889 if (!register_operand (op1, mode))
890 op1 = force_reg (mode, op1);
891
892 if (TARGET_M32RX || TARGET_M32R2)
893 {
894 if (!reg_or_zero_operand (op2, mode))
895 op2 = force_reg (mode, op2);
896
897 emit_insn (gen_seq_insn_m32rx (op0, op1, op2));
898 return true;
899 }
900 if (CONST_INT_P (op2) && INTVAL (op2) == 0)
901 {
902 emit_insn (gen_seq_zero_insn (op0, op1));
903 return true;
904 }
905
906 if (!reg_or_eq_int16_operand (op2, mode))
907 op2 = force_reg (mode, op2);
908
909 emit_insn (gen_seq_insn (op0, op1, op2));
910 return true;
911
912 case NE:
913 if (!CONST_INT_P (op2)
914 || (INTVAL (op2) != 0 && satisfies_constraint_K (op2)))
915 {
916 rtx reg;
917
918 if (reload_completed || reload_in_progress)
919 return false;
920
921 reg = gen_reg_rtx (SImode);
922 emit_insn (gen_xorsi3 (reg, op1, op2));
923 op1 = reg;
924
925 if (!register_operand (op1, mode))
926 op1 = force_reg (mode, op1);
927
928 emit_insn (gen_sne_zero_insn (op0, op1));
929 return true;
930 }
931 return false;
932
933 case LT:
934 case GT:
935 if (code == GT)
936 {
937 rtx tmp = op2;
938 op2 = op1;
939 op1 = tmp;
940 code = LT;
941 }
942
943 if (!register_operand (op1, mode))
944 op1 = force_reg (mode, op1);
945
946 if (!reg_or_int16_operand (op2, mode))
947 op2 = force_reg (mode, op2);
948
949 emit_insn (gen_slt_insn (op0, op1, op2));
950 return true;
951
952 case LTU:
953 case GTU:
954 if (code == GTU)
955 {
956 rtx tmp = op2;
957 op2 = op1;
958 op1 = tmp;
959 code = LTU;
960 }
961
962 if (!register_operand (op1, mode))
963 op1 = force_reg (mode, op1);
964
965 if (!reg_or_int16_operand (op2, mode))
966 op2 = force_reg (mode, op2);
967
968 emit_insn (gen_sltu_insn (op0, op1, op2));
969 return true;
970
971 case GE:
972 case GEU:
973 if (!register_operand (op1, mode))
974 op1 = force_reg (mode, op1);
975
976 if (!reg_or_int16_operand (op2, mode))
977 op2 = force_reg (mode, op2);
978
979 if (code == GE)
980 emit_insn (gen_sge_insn (op0, op1, op2));
981 else
982 emit_insn (gen_sgeu_insn (op0, op1, op2));
983 return true;
984
985 case LE:
986 case LEU:
987 if (!register_operand (op1, mode))
988 op1 = force_reg (mode, op1);
989
990 if (CONST_INT_P (op2))
991 {
992 HOST_WIDE_INT value = INTVAL (op2);
993 if (value >= 2147483647)
994 {
995 emit_move_insn (op0, const1_rtx);
996 return true;
997 }
998
999 op2 = GEN_INT (value + 1);
1000 if (value < -32768 || value >= 32767)
1001 op2 = force_reg (mode, op2);
1002
1003 if (code == LEU)
1004 emit_insn (gen_sltu_insn (op0, op1, op2));
1005 else
1006 emit_insn (gen_slt_insn (op0, op1, op2));
1007 return true;
1008 }
1009
1010 if (!register_operand (op2, mode))
1011 op2 = force_reg (mode, op2);
1012
1013 if (code == LEU)
1014 emit_insn (gen_sleu_insn (op0, op1, op2));
1015 else
1016 emit_insn (gen_sle_insn (op0, op1, op2));
1017 return true;
1018
1019 default:
1020 gcc_unreachable ();
1021 }
1022 }
1023
1024 \f
1025 /* Split a 2 word move (DI or DF) into component parts. */
1026
1027 rtx
1028 gen_split_move_double (rtx operands[])
1029 {
1030 enum machine_mode mode = GET_MODE (operands[0]);
1031 rtx dest = operands[0];
1032 rtx src = operands[1];
1033 rtx val;
1034
1035 /* We might have (SUBREG (MEM)) here, so just get rid of the
1036 subregs to make this code simpler. It is safe to call
1037 alter_subreg any time after reload. */
1038 if (GET_CODE (dest) == SUBREG)
1039 alter_subreg (&dest, true);
1040 if (GET_CODE (src) == SUBREG)
1041 alter_subreg (&src, true);
1042
1043 start_sequence ();
1044 if (REG_P (dest))
1045 {
1046 int dregno = REGNO (dest);
1047
1048 /* Reg = reg. */
1049 if (REG_P (src))
1050 {
1051 int sregno = REGNO (src);
1052
1053 int reverse = (dregno == sregno + 1);
1054
1055 /* We normally copy the low-numbered register first. However, if
1056 the first register operand 0 is the same as the second register of
1057 operand 1, we must copy in the opposite order. */
1058 emit_insn (gen_rtx_SET (VOIDmode,
1059 operand_subword (dest, reverse, TRUE, mode),
1060 operand_subword (src, reverse, TRUE, mode)));
1061
1062 emit_insn (gen_rtx_SET (VOIDmode,
1063 operand_subword (dest, !reverse, TRUE, mode),
1064 operand_subword (src, !reverse, TRUE, mode)));
1065 }
1066
1067 /* Reg = constant. */
1068 else if (CONST_INT_P (src) || GET_CODE (src) == CONST_DOUBLE)
1069 {
1070 rtx words[2];
1071 split_double (src, &words[0], &words[1]);
1072 emit_insn (gen_rtx_SET (VOIDmode,
1073 operand_subword (dest, 0, TRUE, mode),
1074 words[0]));
1075
1076 emit_insn (gen_rtx_SET (VOIDmode,
1077 operand_subword (dest, 1, TRUE, mode),
1078 words[1]));
1079 }
1080
1081 /* Reg = mem. */
1082 else if (MEM_P (src))
1083 {
1084 /* If the high-address word is used in the address, we must load it
1085 last. Otherwise, load it first. */
1086 int reverse
1087 = (refers_to_regno_p (dregno, dregno + 1, XEXP (src, 0), 0) != 0);
1088
1089 /* We used to optimize loads from single registers as
1090
1091 ld r1,r3+; ld r2,r3
1092
1093 if r3 were not used subsequently. However, the REG_NOTES aren't
1094 propagated correctly by the reload phase, and it can cause bad
1095 code to be generated. We could still try:
1096
1097 ld r1,r3+; ld r2,r3; addi r3,-4
1098
1099 which saves 2 bytes and doesn't force longword alignment. */
1100 emit_insn (gen_rtx_SET (VOIDmode,
1101 operand_subword (dest, reverse, TRUE, mode),
1102 adjust_address (src, SImode,
1103 reverse * UNITS_PER_WORD)));
1104
1105 emit_insn (gen_rtx_SET (VOIDmode,
1106 operand_subword (dest, !reverse, TRUE, mode),
1107 adjust_address (src, SImode,
1108 !reverse * UNITS_PER_WORD)));
1109 }
1110 else
1111 gcc_unreachable ();
1112 }
1113
1114 /* Mem = reg. */
1115 /* We used to optimize loads from single registers as
1116
1117 st r1,r3; st r2,+r3
1118
1119 if r3 were not used subsequently. However, the REG_NOTES aren't
1120 propagated correctly by the reload phase, and it can cause bad
1121 code to be generated. We could still try:
1122
1123 st r1,r3; st r2,+r3; addi r3,-4
1124
1125 which saves 2 bytes and doesn't force longword alignment. */
1126 else if (MEM_P (dest) && REG_P (src))
1127 {
1128 emit_insn (gen_rtx_SET (VOIDmode,
1129 adjust_address (dest, SImode, 0),
1130 operand_subword (src, 0, TRUE, mode)));
1131
1132 emit_insn (gen_rtx_SET (VOIDmode,
1133 adjust_address (dest, SImode, UNITS_PER_WORD),
1134 operand_subword (src, 1, TRUE, mode)));
1135 }
1136
1137 else
1138 gcc_unreachable ();
1139
1140 val = get_insns ();
1141 end_sequence ();
1142 return val;
1143 }
1144
1145 \f
1146 static int
1147 m32r_arg_partial_bytes (cumulative_args_t cum_v, enum machine_mode mode,
1148 tree type, bool named ATTRIBUTE_UNUSED)
1149 {
1150 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1151
1152 int words;
1153 unsigned int size =
1154 (((mode == BLKmode && type)
1155 ? (unsigned int) int_size_in_bytes (type)
1156 : GET_MODE_SIZE (mode)) + UNITS_PER_WORD - 1)
1157 / UNITS_PER_WORD;
1158
1159 if (*cum >= M32R_MAX_PARM_REGS)
1160 words = 0;
1161 else if (*cum + size > M32R_MAX_PARM_REGS)
1162 words = (*cum + size) - M32R_MAX_PARM_REGS;
1163 else
1164 words = 0;
1165
1166 return words * UNITS_PER_WORD;
1167 }
1168
1169 /* The ROUND_ADVANCE* macros are local to this file. */
1170 /* Round SIZE up to a word boundary. */
1171 #define ROUND_ADVANCE(SIZE) \
1172 (((SIZE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1173
1174 /* Round arg MODE/TYPE up to the next word boundary. */
1175 #define ROUND_ADVANCE_ARG(MODE, TYPE) \
1176 ((MODE) == BLKmode \
1177 ? ROUND_ADVANCE ((unsigned int) int_size_in_bytes (TYPE)) \
1178 : ROUND_ADVANCE ((unsigned int) GET_MODE_SIZE (MODE)))
1179
1180 /* Round CUM up to the necessary point for argument MODE/TYPE. */
1181 #define ROUND_ADVANCE_CUM(CUM, MODE, TYPE) (CUM)
1182
1183 /* Return boolean indicating arg of type TYPE and mode MODE will be passed in
1184 a reg. This includes arguments that have to be passed by reference as the
1185 pointer to them is passed in a reg if one is available (and that is what
1186 we're given).
1187 This macro is only used in this file. */
1188 #define PASS_IN_REG_P(CUM, MODE, TYPE) \
1189 (ROUND_ADVANCE_CUM ((CUM), (MODE), (TYPE)) < M32R_MAX_PARM_REGS)
1190
1191 /* Determine where to put an argument to a function.
1192 Value is zero to push the argument on the stack,
1193 or a hard register in which to store the argument.
1194
1195 MODE is the argument's machine mode.
1196 TYPE is the data type of the argument (as a tree).
1197 This is null for libcalls where that information may
1198 not be available.
1199 CUM is a variable of type CUMULATIVE_ARGS which gives info about
1200 the preceding args and about the function being called.
1201 NAMED is nonzero if this argument is a named parameter
1202 (otherwise it is an extra parameter matching an ellipsis). */
1203 /* On the M32R the first M32R_MAX_PARM_REGS args are normally in registers
1204 and the rest are pushed. */
1205
1206 static rtx
1207 m32r_function_arg (cumulative_args_t cum_v, enum machine_mode mode,
1208 const_tree type ATTRIBUTE_UNUSED,
1209 bool named ATTRIBUTE_UNUSED)
1210 {
1211 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1212
1213 return (PASS_IN_REG_P (*cum, mode, type)
1214 ? gen_rtx_REG (mode, ROUND_ADVANCE_CUM (*cum, mode, type))
1215 : NULL_RTX);
1216 }
1217
1218 /* Update the data in CUM to advance over an argument
1219 of mode MODE and data type TYPE.
1220 (TYPE is null for libcalls where that information may not be available.) */
1221
1222 static void
1223 m32r_function_arg_advance (cumulative_args_t cum_v, enum machine_mode mode,
1224 const_tree type, bool named ATTRIBUTE_UNUSED)
1225 {
1226 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
1227
1228 *cum = (ROUND_ADVANCE_CUM (*cum, mode, type)
1229 + ROUND_ADVANCE_ARG (mode, type));
1230 }
1231
1232 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1233
1234 static bool
1235 m32r_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1236 {
1237 cumulative_args_t dummy = pack_cumulative_args (NULL);
1238
1239 return m32r_pass_by_reference (dummy, TYPE_MODE (type), type, false);
1240 }
1241
1242 /* Worker function for TARGET_FUNCTION_VALUE. */
1243
1244 static rtx
1245 m32r_function_value (const_tree valtype,
1246 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
1247 bool outgoing ATTRIBUTE_UNUSED)
1248 {
1249 return gen_rtx_REG (TYPE_MODE (valtype), 0);
1250 }
1251
1252 /* Worker function for TARGET_LIBCALL_VALUE. */
1253
1254 static rtx
1255 m32r_libcall_value (enum machine_mode mode,
1256 const_rtx fun ATTRIBUTE_UNUSED)
1257 {
1258 return gen_rtx_REG (mode, 0);
1259 }
1260
1261 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P.
1262
1263 ??? What about r1 in DI/DF values. */
1264
1265 static bool
1266 m32r_function_value_regno_p (const unsigned int regno)
1267 {
1268 return (regno == 0);
1269 }
1270
1271 /* Do any needed setup for a variadic function. For the M32R, we must
1272 create a register parameter block, and then copy any anonymous arguments
1273 in registers to memory.
1274
1275 CUM has not been updated for the last named argument which has type TYPE
1276 and mode MODE, and we rely on this fact. */
1277
1278 static void
1279 m32r_setup_incoming_varargs (cumulative_args_t cum, enum machine_mode mode,
1280 tree type, int *pretend_size, int no_rtl)
1281 {
1282 int first_anon_arg;
1283
1284 if (no_rtl)
1285 return;
1286
1287 /* All BLKmode values are passed by reference. */
1288 gcc_assert (mode != BLKmode);
1289
1290 first_anon_arg = (ROUND_ADVANCE_CUM (*get_cumulative_args (cum), mode, type)
1291 + ROUND_ADVANCE_ARG (mode, type));
1292
1293 if (first_anon_arg < M32R_MAX_PARM_REGS)
1294 {
1295 /* Note that first_reg_offset < M32R_MAX_PARM_REGS. */
1296 int first_reg_offset = first_anon_arg;
1297 /* Size in words to "pretend" allocate. */
1298 int size = M32R_MAX_PARM_REGS - first_reg_offset;
1299 rtx regblock;
1300
1301 regblock = gen_frame_mem (BLKmode,
1302 plus_constant (Pmode, arg_pointer_rtx,
1303 FIRST_PARM_OFFSET (0)));
1304 set_mem_alias_set (regblock, get_varargs_alias_set ());
1305 move_block_from_reg (first_reg_offset, regblock, size);
1306
1307 *pretend_size = (size * UNITS_PER_WORD);
1308 }
1309 }
1310
1311 \f
1312 /* Return true if INSN is real instruction bearing insn. */
1313
1314 static int
1315 m32r_is_insn (rtx insn)
1316 {
1317 return (NONDEBUG_INSN_P (insn)
1318 && GET_CODE (PATTERN (insn)) != USE
1319 && GET_CODE (PATTERN (insn)) != CLOBBER);
1320 }
1321
1322 /* Increase the priority of long instructions so that the
1323 short instructions are scheduled ahead of the long ones. */
1324
1325 static int
1326 m32r_adjust_priority (rtx insn, int priority)
1327 {
1328 if (m32r_is_insn (insn)
1329 && get_attr_insn_size (insn) != INSN_SIZE_SHORT)
1330 priority <<= 3;
1331
1332 return priority;
1333 }
1334
1335 \f
1336 /* Indicate how many instructions can be issued at the same time.
1337 This is sort of a lie. The m32r can issue only 1 long insn at
1338 once, but it can issue 2 short insns. The default therefore is
1339 set at 2, but this can be overridden by the command line option
1340 -missue-rate=1. */
1341
1342 static int
1343 m32r_issue_rate (void)
1344 {
1345 return ((TARGET_LOW_ISSUE_RATE) ? 1 : 2);
1346 }
1347 \f
1348 /* Cost functions. */
1349 /* Memory is 3 times as expensive as registers.
1350 ??? Is that the right way to look at it? */
1351
1352 static int
1353 m32r_memory_move_cost (enum machine_mode mode,
1354 reg_class_t rclass ATTRIBUTE_UNUSED,
1355 bool in ATTRIBUTE_UNUSED)
1356 {
1357 if (GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
1358 return 6;
1359 else
1360 return 12;
1361 }
1362
1363 static bool
1364 m32r_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
1365 int opno ATTRIBUTE_UNUSED, int *total,
1366 bool speed ATTRIBUTE_UNUSED)
1367 {
1368 switch (code)
1369 {
1370 /* Small integers are as cheap as registers. 4 byte values can be
1371 fetched as immediate constants - let's give that the cost of an
1372 extra insn. */
1373 case CONST_INT:
1374 if (INT16_P (INTVAL (x)))
1375 {
1376 *total = 0;
1377 return true;
1378 }
1379 /* FALLTHRU */
1380
1381 case CONST:
1382 case LABEL_REF:
1383 case SYMBOL_REF:
1384 *total = COSTS_N_INSNS (1);
1385 return true;
1386
1387 case CONST_DOUBLE:
1388 {
1389 rtx high, low;
1390
1391 split_double (x, &high, &low);
1392 *total = COSTS_N_INSNS (!INT16_P (INTVAL (high))
1393 + !INT16_P (INTVAL (low)));
1394 return true;
1395 }
1396
1397 case MULT:
1398 *total = COSTS_N_INSNS (3);
1399 return true;
1400
1401 case DIV:
1402 case UDIV:
1403 case MOD:
1404 case UMOD:
1405 *total = COSTS_N_INSNS (10);
1406 return true;
1407
1408 default:
1409 return false;
1410 }
1411 }
1412 \f
1413 /* Type of function DECL.
1414
1415 The result is cached. To reset the cache at the end of a function,
1416 call with DECL = NULL_TREE. */
1417
1418 enum m32r_function_type
1419 m32r_compute_function_type (tree decl)
1420 {
1421 /* Cached value. */
1422 static enum m32r_function_type fn_type = M32R_FUNCTION_UNKNOWN;
1423 /* Last function we were called for. */
1424 static tree last_fn = NULL_TREE;
1425
1426 /* Resetting the cached value? */
1427 if (decl == NULL_TREE)
1428 {
1429 fn_type = M32R_FUNCTION_UNKNOWN;
1430 last_fn = NULL_TREE;
1431 return fn_type;
1432 }
1433
1434 if (decl == last_fn && fn_type != M32R_FUNCTION_UNKNOWN)
1435 return fn_type;
1436
1437 /* Compute function type. */
1438 fn_type = (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE
1439 ? M32R_FUNCTION_INTERRUPT
1440 : M32R_FUNCTION_NORMAL);
1441
1442 last_fn = decl;
1443 return fn_type;
1444 }
1445 \f/* Function prologue/epilogue handlers. */
1446
1447 /* M32R stack frames look like:
1448
1449 Before call After call
1450 +-----------------------+ +-----------------------+
1451 | | | |
1452 high | local variables, | | local variables, |
1453 mem | reg save area, etc. | | reg save area, etc. |
1454 | | | |
1455 +-----------------------+ +-----------------------+
1456 | | | |
1457 | arguments on stack. | | arguments on stack. |
1458 | | | |
1459 SP+0->+-----------------------+ +-----------------------+
1460 | reg parm save area, |
1461 | only created for |
1462 | variable argument |
1463 | functions |
1464 +-----------------------+
1465 | previous frame ptr |
1466 +-----------------------+
1467 | |
1468 | register save area |
1469 | |
1470 +-----------------------+
1471 | return address |
1472 +-----------------------+
1473 | |
1474 | local variables |
1475 | |
1476 +-----------------------+
1477 | |
1478 | alloca allocations |
1479 | |
1480 +-----------------------+
1481 | |
1482 low | arguments on stack |
1483 memory | |
1484 SP+0->+-----------------------+
1485
1486 Notes:
1487 1) The "reg parm save area" does not exist for non variable argument fns.
1488 2) The "reg parm save area" can be eliminated completely if we saved regs
1489 containing anonymous args separately but that complicates things too
1490 much (so it's not done).
1491 3) The return address is saved after the register save area so as to have as
1492 many insns as possible between the restoration of `lr' and the `jmp lr'. */
1493
1494 /* Structure to be filled in by m32r_compute_frame_size with register
1495 save masks, and offsets for the current function. */
1496 struct m32r_frame_info
1497 {
1498 unsigned int total_size; /* # bytes that the entire frame takes up. */
1499 unsigned int extra_size; /* # bytes of extra stuff. */
1500 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
1501 unsigned int args_size; /* # bytes that outgoing arguments take up. */
1502 unsigned int reg_size; /* # bytes needed to store regs. */
1503 unsigned int var_size; /* # bytes that variables take up. */
1504 unsigned int gmask; /* Mask of saved gp registers. */
1505 unsigned int save_fp; /* Nonzero if fp must be saved. */
1506 unsigned int save_lr; /* Nonzero if lr (return addr) must be saved. */
1507 int initialized; /* Nonzero if frame size already calculated. */
1508 };
1509
1510 /* Current frame information calculated by m32r_compute_frame_size. */
1511 static struct m32r_frame_info current_frame_info;
1512
1513 /* Zero structure to initialize current_frame_info. */
1514 static struct m32r_frame_info zero_frame_info;
1515
1516 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1517 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1518
1519 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1520 The return address and frame pointer are treated separately.
1521 Don't consider them here. */
1522 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1523 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1524 && (df_regs_ever_live_p (regno) && (!call_really_used_regs[regno] || interrupt_p)))
1525
1526 #define MUST_SAVE_FRAME_POINTER (df_regs_ever_live_p (FRAME_POINTER_REGNUM))
1527 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM) || crtl->profile)
1528
1529 #define SHORT_INSN_SIZE 2 /* Size of small instructions. */
1530 #define LONG_INSN_SIZE 4 /* Size of long instructions. */
1531
1532 /* Return the bytes needed to compute the frame pointer from the current
1533 stack pointer.
1534
1535 SIZE is the size needed for local variables. */
1536
1537 unsigned int
1538 m32r_compute_frame_size (int size) /* # of var. bytes allocated. */
1539 {
1540 unsigned int regno;
1541 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1542 unsigned int reg_size;
1543 unsigned int gmask;
1544 enum m32r_function_type fn_type;
1545 int interrupt_p;
1546 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1547 | crtl->profile);
1548
1549 var_size = M32R_STACK_ALIGN (size);
1550 args_size = M32R_STACK_ALIGN (crtl->outgoing_args_size);
1551 pretend_size = crtl->args.pretend_args_size;
1552 extra_size = FIRST_PARM_OFFSET (0);
1553 total_size = extra_size + pretend_size + args_size + var_size;
1554 reg_size = 0;
1555 gmask = 0;
1556
1557 /* See if this is an interrupt handler. Call used registers must be saved
1558 for them too. */
1559 fn_type = m32r_compute_function_type (current_function_decl);
1560 interrupt_p = M32R_INTERRUPT_P (fn_type);
1561
1562 /* Calculate space needed for registers. */
1563 for (regno = 0; regno < M32R_MAX_INT_REGS; regno++)
1564 {
1565 if (MUST_SAVE_REGISTER (regno, interrupt_p)
1566 || (regno == PIC_OFFSET_TABLE_REGNUM && pic_reg_used))
1567 {
1568 reg_size += UNITS_PER_WORD;
1569 gmask |= 1 << regno;
1570 }
1571 }
1572
1573 current_frame_info.save_fp = MUST_SAVE_FRAME_POINTER;
1574 current_frame_info.save_lr = MUST_SAVE_RETURN_ADDR || pic_reg_used;
1575
1576 reg_size += ((current_frame_info.save_fp + current_frame_info.save_lr)
1577 * UNITS_PER_WORD);
1578 total_size += reg_size;
1579
1580 /* ??? Not sure this is necessary, and I don't think the epilogue
1581 handler will do the right thing if this changes total_size. */
1582 total_size = M32R_STACK_ALIGN (total_size);
1583
1584 /* frame_size = total_size - (pretend_size + reg_size); */
1585
1586 /* Save computed information. */
1587 current_frame_info.total_size = total_size;
1588 current_frame_info.extra_size = extra_size;
1589 current_frame_info.pretend_size = pretend_size;
1590 current_frame_info.var_size = var_size;
1591 current_frame_info.args_size = args_size;
1592 current_frame_info.reg_size = reg_size;
1593 current_frame_info.gmask = gmask;
1594 current_frame_info.initialized = reload_completed;
1595
1596 /* Ok, we're done. */
1597 return total_size;
1598 }
1599
1600 /* Worker function for TARGET_CAN_ELIMINATE. */
1601
1602 bool
1603 m32r_can_eliminate (const int from, const int to)
1604 {
1605 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
1606 ? ! frame_pointer_needed
1607 : true);
1608 }
1609
1610 \f
1611 /* The table we use to reference PIC data. */
1612 static rtx global_offset_table;
1613
1614 static void
1615 m32r_reload_lr (rtx sp, int size)
1616 {
1617 rtx lr = gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM);
1618
1619 if (size == 0)
1620 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, sp)));
1621 else if (size < 32768)
1622 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode,
1623 gen_rtx_PLUS (Pmode, sp,
1624 GEN_INT (size)))));
1625 else
1626 {
1627 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1628
1629 emit_insn (gen_movsi (tmp, GEN_INT (size)));
1630 emit_insn (gen_addsi3 (tmp, tmp, sp));
1631 emit_insn (gen_movsi (lr, gen_frame_mem (Pmode, tmp)));
1632 }
1633
1634 emit_use (lr);
1635 }
1636
1637 void
1638 m32r_load_pic_register (void)
1639 {
1640 global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
1641 emit_insn (gen_get_pc (pic_offset_table_rtx, global_offset_table,
1642 GEN_INT (TARGET_MODEL_SMALL)));
1643
1644 /* Need to emit this whether or not we obey regdecls,
1645 since setjmp/longjmp can cause life info to screw up. */
1646 emit_use (pic_offset_table_rtx);
1647 }
1648
1649 /* Expand the m32r prologue as a series of insns. */
1650
1651 void
1652 m32r_expand_prologue (void)
1653 {
1654 int regno;
1655 int frame_size;
1656 unsigned int gmask;
1657 int pic_reg_used = flag_pic && (crtl->uses_pic_offset_table
1658 | crtl->profile);
1659
1660 if (! current_frame_info.initialized)
1661 m32r_compute_frame_size (get_frame_size ());
1662
1663 gmask = current_frame_info.gmask;
1664
1665 /* These cases shouldn't happen. Catch them now. */
1666 gcc_assert (current_frame_info.total_size || !gmask);
1667
1668 /* Allocate space for register arguments if this is a variadic function. */
1669 if (current_frame_info.pretend_size != 0)
1670 {
1671 /* Use a HOST_WIDE_INT temporary, since negating an unsigned int gives
1672 the wrong result on a 64-bit host. */
1673 HOST_WIDE_INT pretend_size = current_frame_info.pretend_size;
1674 emit_insn (gen_addsi3 (stack_pointer_rtx,
1675 stack_pointer_rtx,
1676 GEN_INT (-pretend_size)));
1677 }
1678
1679 /* Save any registers we need to and set up fp. */
1680 if (current_frame_info.save_fp)
1681 emit_insn (gen_movsi_push (stack_pointer_rtx, frame_pointer_rtx));
1682
1683 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1684
1685 /* Save any needed call-saved regs (and call-used if this is an
1686 interrupt handler). */
1687 for (regno = 0; regno <= M32R_MAX_INT_REGS; ++regno)
1688 {
1689 if ((gmask & (1 << regno)) != 0)
1690 emit_insn (gen_movsi_push (stack_pointer_rtx,
1691 gen_rtx_REG (Pmode, regno)));
1692 }
1693
1694 if (current_frame_info.save_lr)
1695 emit_insn (gen_movsi_push (stack_pointer_rtx,
1696 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1697
1698 /* Allocate the stack frame. */
1699 frame_size = (current_frame_info.total_size
1700 - (current_frame_info.pretend_size
1701 + current_frame_info.reg_size));
1702
1703 if (frame_size == 0)
1704 ; /* Nothing to do. */
1705 else if (frame_size <= 32768)
1706 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1707 GEN_INT (-frame_size)));
1708 else
1709 {
1710 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1711
1712 emit_insn (gen_movsi (tmp, GEN_INT (frame_size)));
1713 emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, tmp));
1714 }
1715
1716 if (frame_pointer_needed)
1717 emit_insn (gen_movsi (frame_pointer_rtx, stack_pointer_rtx));
1718
1719 if (crtl->profile)
1720 /* Push lr for mcount (form_pc, x). */
1721 emit_insn (gen_movsi_push (stack_pointer_rtx,
1722 gen_rtx_REG (Pmode, RETURN_ADDR_REGNUM)));
1723
1724 if (pic_reg_used)
1725 {
1726 m32r_load_pic_register ();
1727 m32r_reload_lr (stack_pointer_rtx,
1728 (crtl->profile ? 0 : frame_size));
1729 }
1730
1731 if (crtl->profile && !pic_reg_used)
1732 emit_insn (gen_blockage ());
1733 }
1734
1735 \f
1736 /* Set up the stack and frame pointer (if desired) for the function.
1737 Note, if this is changed, you need to mirror the changes in
1738 m32r_compute_frame_size which calculates the prolog size. */
1739
1740 static void
1741 m32r_output_function_prologue (FILE * file, HOST_WIDE_INT size)
1742 {
1743 enum m32r_function_type fn_type = m32r_compute_function_type (current_function_decl);
1744
1745 /* If this is an interrupt handler, mark it as such. */
1746 if (M32R_INTERRUPT_P (fn_type))
1747 fprintf (file, "\t%s interrupt handler\n", ASM_COMMENT_START);
1748
1749 if (! current_frame_info.initialized)
1750 m32r_compute_frame_size (size);
1751
1752 /* This is only for the human reader. */
1753 fprintf (file,
1754 "\t%s PROLOGUE, vars= %d, regs= %d, args= %d, extra= %d\n",
1755 ASM_COMMENT_START,
1756 current_frame_info.var_size,
1757 current_frame_info.reg_size / 4,
1758 current_frame_info.args_size,
1759 current_frame_info.extra_size);
1760 }
1761 \f
1762 /* Output RTL to pop register REGNO from the stack. */
1763
1764 static void
1765 pop (int regno)
1766 {
1767 rtx x;
1768
1769 x = emit_insn (gen_movsi_pop (gen_rtx_REG (Pmode, regno),
1770 stack_pointer_rtx));
1771 add_reg_note (x, REG_INC, stack_pointer_rtx);
1772 }
1773
1774 /* Expand the m32r epilogue as a series of insns. */
1775
1776 void
1777 m32r_expand_epilogue (void)
1778 {
1779 int regno;
1780 int noepilogue = FALSE;
1781 int total_size;
1782
1783 gcc_assert (current_frame_info.initialized);
1784 total_size = current_frame_info.total_size;
1785
1786 if (total_size == 0)
1787 {
1788 rtx insn = get_last_insn ();
1789
1790 /* If the last insn was a BARRIER, we don't have to write any code
1791 because a jump (aka return) was put there. */
1792 if (insn && NOTE_P (insn))
1793 insn = prev_nonnote_insn (insn);
1794 if (insn && BARRIER_P (insn))
1795 noepilogue = TRUE;
1796 }
1797
1798 if (!noepilogue)
1799 {
1800 unsigned int var_size = current_frame_info.var_size;
1801 unsigned int args_size = current_frame_info.args_size;
1802 unsigned int gmask = current_frame_info.gmask;
1803 int can_trust_sp_p = !cfun->calls_alloca;
1804
1805 if (flag_exceptions)
1806 emit_insn (gen_blockage ());
1807
1808 /* The first thing to do is point the sp at the bottom of the register
1809 save area. */
1810 if (can_trust_sp_p)
1811 {
1812 unsigned int reg_offset = var_size + args_size;
1813
1814 if (reg_offset == 0)
1815 ; /* Nothing to do. */
1816 else if (reg_offset < 32768)
1817 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1818 GEN_INT (reg_offset)));
1819 else
1820 {
1821 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1822
1823 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1824 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1825 tmp));
1826 }
1827 }
1828 else if (frame_pointer_needed)
1829 {
1830 unsigned int reg_offset = var_size + args_size;
1831
1832 if (reg_offset == 0)
1833 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1834 else if (reg_offset < 32768)
1835 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_rtx,
1836 GEN_INT (reg_offset)));
1837 else
1838 {
1839 rtx tmp = gen_rtx_REG (Pmode, PROLOGUE_TMP_REGNUM);
1840
1841 emit_insn (gen_movsi (tmp, GEN_INT (reg_offset)));
1842 emit_insn (gen_movsi (stack_pointer_rtx, frame_pointer_rtx));
1843 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1844 tmp));
1845 }
1846 }
1847 else
1848 gcc_unreachable ();
1849
1850 if (current_frame_info.save_lr)
1851 pop (RETURN_ADDR_REGNUM);
1852
1853 /* Restore any saved registers, in reverse order of course. */
1854 gmask &= ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK);
1855 for (regno = M32R_MAX_INT_REGS - 1; regno >= 0; --regno)
1856 {
1857 if ((gmask & (1L << regno)) != 0)
1858 pop (regno);
1859 }
1860
1861 if (current_frame_info.save_fp)
1862 pop (FRAME_POINTER_REGNUM);
1863
1864 /* Remove varargs area if present. */
1865 if (current_frame_info.pretend_size != 0)
1866 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
1867 GEN_INT (current_frame_info.pretend_size)));
1868
1869 emit_insn (gen_blockage ());
1870 }
1871 }
1872
1873 /* Do any necessary cleanup after a function to restore stack, frame,
1874 and regs. */
1875
1876 static void
1877 m32r_output_function_epilogue (FILE * file ATTRIBUTE_UNUSED,
1878 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
1879 {
1880 /* Reset state info for each function. */
1881 current_frame_info = zero_frame_info;
1882 m32r_compute_function_type (NULL_TREE);
1883 }
1884 \f
1885 /* Return nonzero if this function is known to have a null or 1 instruction
1886 epilogue. */
1887
1888 int
1889 direct_return (void)
1890 {
1891 if (!reload_completed)
1892 return FALSE;
1893
1894 if (M32R_INTERRUPT_P (m32r_compute_function_type (current_function_decl)))
1895 return FALSE;
1896
1897 if (! current_frame_info.initialized)
1898 m32r_compute_frame_size (get_frame_size ());
1899
1900 return current_frame_info.total_size == 0;
1901 }
1902
1903 \f
1904 /* PIC. */
1905
1906 int
1907 m32r_legitimate_pic_operand_p (rtx x)
1908 {
1909 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1910 return 0;
1911
1912 if (GET_CODE (x) == CONST
1913 && GET_CODE (XEXP (x, 0)) == PLUS
1914 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1915 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
1916 && (CONST_INT_P (XEXP (XEXP (x, 0), 1))))
1917 return 0;
1918
1919 return 1;
1920 }
1921
1922 rtx
1923 m32r_legitimize_pic_address (rtx orig, rtx reg)
1924 {
1925 #ifdef DEBUG_PIC
1926 printf("m32r_legitimize_pic_address()\n");
1927 #endif
1928
1929 if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
1930 {
1931 rtx pic_ref, address;
1932 int subregs = 0;
1933
1934 if (reg == 0)
1935 {
1936 gcc_assert (!reload_in_progress && !reload_completed);
1937 reg = gen_reg_rtx (Pmode);
1938
1939 subregs = 1;
1940 }
1941
1942 if (subregs)
1943 address = gen_reg_rtx (Pmode);
1944 else
1945 address = reg;
1946
1947 crtl->uses_pic_offset_table = 1;
1948
1949 if (GET_CODE (orig) == LABEL_REF
1950 || (GET_CODE (orig) == SYMBOL_REF && SYMBOL_REF_LOCAL_P (orig)))
1951 {
1952 emit_insn (gen_gotoff_load_addr (reg, orig));
1953 emit_insn (gen_addsi3 (reg, reg, pic_offset_table_rtx));
1954 return reg;
1955 }
1956
1957 emit_insn (gen_pic_load_addr (address, orig));
1958
1959 emit_insn (gen_addsi3 (address, address, pic_offset_table_rtx));
1960 pic_ref = gen_const_mem (Pmode, address);
1961 emit_move_insn (reg, pic_ref);
1962 return reg;
1963 }
1964 else if (GET_CODE (orig) == CONST)
1965 {
1966 rtx base, offset;
1967
1968 if (GET_CODE (XEXP (orig, 0)) == PLUS
1969 && XEXP (XEXP (orig, 0), 1) == pic_offset_table_rtx)
1970 return orig;
1971
1972 if (reg == 0)
1973 {
1974 gcc_assert (!reload_in_progress && !reload_completed);
1975 reg = gen_reg_rtx (Pmode);
1976 }
1977
1978 if (GET_CODE (XEXP (orig, 0)) == PLUS)
1979 {
1980 base = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 0), reg);
1981 if (base == reg)
1982 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), NULL_RTX);
1983 else
1984 offset = m32r_legitimize_pic_address (XEXP (XEXP (orig, 0), 1), reg);
1985 }
1986 else
1987 return orig;
1988
1989 if (CONST_INT_P (offset))
1990 {
1991 if (INT16_P (INTVAL (offset)))
1992 return plus_constant (Pmode, base, INTVAL (offset));
1993 else
1994 {
1995 gcc_assert (! reload_in_progress && ! reload_completed);
1996 offset = force_reg (Pmode, offset);
1997 }
1998 }
1999
2000 return gen_rtx_PLUS (Pmode, base, offset);
2001 }
2002
2003 return orig;
2004 }
2005
2006 static rtx
2007 m32r_legitimize_address (rtx x, rtx orig_x ATTRIBUTE_UNUSED,
2008 enum machine_mode mode ATTRIBUTE_UNUSED)
2009 {
2010 if (flag_pic)
2011 return m32r_legitimize_pic_address (x, NULL_RTX);
2012 else
2013 return x;
2014 }
2015
2016 /* Worker function for TARGET_MODE_DEPENDENT_ADDRESS_P. */
2017
2018 static bool
2019 m32r_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
2020 {
2021 if (GET_CODE (addr) == LO_SUM)
2022 return true;
2023
2024 return false;
2025 }
2026 \f
2027 /* Nested function support. */
2028
2029 /* Emit RTL insns to initialize the variable parts of a trampoline.
2030 FNADDR is an RTX for the address of the function's pure code.
2031 CXT is an RTX for the static chain value for the function. */
2032
2033 void
2034 m32r_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
2035 rtx fnaddr ATTRIBUTE_UNUSED,
2036 rtx cxt ATTRIBUTE_UNUSED)
2037 {
2038 }
2039 \f
2040 static void
2041 m32r_file_start (void)
2042 {
2043 default_file_start ();
2044
2045 if (flag_verbose_asm)
2046 fprintf (asm_out_file,
2047 "%s M32R/D special options: -G %d\n",
2048 ASM_COMMENT_START, g_switch_value);
2049
2050 if (TARGET_LITTLE_ENDIAN)
2051 fprintf (asm_out_file, "\t.little\n");
2052 }
2053 \f
2054 /* Print operand X (an rtx) in assembler syntax to file FILE.
2055 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
2056 For `%' followed by punctuation, CODE is the punctuation and X is null. */
2057
2058 static void
2059 m32r_print_operand (FILE * file, rtx x, int code)
2060 {
2061 rtx addr;
2062
2063 switch (code)
2064 {
2065 /* The 's' and 'p' codes are used by output_block_move() to
2066 indicate post-increment 's'tores and 'p're-increment loads. */
2067 case 's':
2068 if (REG_P (x))
2069 fprintf (file, "@+%s", reg_names [REGNO (x)]);
2070 else
2071 output_operand_lossage ("invalid operand to %%s code");
2072 return;
2073
2074 case 'p':
2075 if (REG_P (x))
2076 fprintf (file, "@%s+", reg_names [REGNO (x)]);
2077 else
2078 output_operand_lossage ("invalid operand to %%p code");
2079 return;
2080
2081 case 'R' :
2082 /* Write second word of DImode or DFmode reference,
2083 register or memory. */
2084 if (REG_P (x))
2085 fputs (reg_names[REGNO (x)+1], file);
2086 else if (MEM_P (x))
2087 {
2088 fprintf (file, "@(");
2089 /* Handle possible auto-increment. Since it is pre-increment and
2090 we have already done it, we can just use an offset of four. */
2091 /* ??? This is taken from rs6000.c I think. I don't think it is
2092 currently necessary, but keep it around. */
2093 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2094 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2095 output_address (plus_constant (Pmode, XEXP (XEXP (x, 0), 0), 4));
2096 else
2097 output_address (plus_constant (Pmode, XEXP (x, 0), 4));
2098 fputc (')', file);
2099 }
2100 else
2101 output_operand_lossage ("invalid operand to %%R code");
2102 return;
2103
2104 case 'H' : /* High word. */
2105 case 'L' : /* Low word. */
2106 if (REG_P (x))
2107 {
2108 /* L = least significant word, H = most significant word. */
2109 if ((WORDS_BIG_ENDIAN != 0) ^ (code == 'L'))
2110 fputs (reg_names[REGNO (x)], file);
2111 else
2112 fputs (reg_names[REGNO (x)+1], file);
2113 }
2114 else if (CONST_INT_P (x)
2115 || GET_CODE (x) == CONST_DOUBLE)
2116 {
2117 rtx first, second;
2118
2119 split_double (x, &first, &second);
2120 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2121 code == 'L' ? INTVAL (first) : INTVAL (second));
2122 }
2123 else
2124 output_operand_lossage ("invalid operand to %%H/%%L code");
2125 return;
2126
2127 case 'A' :
2128 {
2129 char str[30];
2130
2131 if (GET_CODE (x) != CONST_DOUBLE
2132 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
2133 fatal_insn ("bad insn for 'A'", x);
2134
2135 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
2136 fprintf (file, "%s", str);
2137 return;
2138 }
2139
2140 case 'B' : /* Bottom half. */
2141 case 'T' : /* Top half. */
2142 /* Output the argument to a `seth' insn (sets the Top half-word).
2143 For constants output arguments to a seth/or3 pair to set Top and
2144 Bottom halves. For symbols output arguments to a seth/add3 pair to
2145 set Top and Bottom halves. The difference exists because for
2146 constants seth/or3 is more readable but for symbols we need to use
2147 the same scheme as `ld' and `st' insns (16-bit addend is signed). */
2148 switch (GET_CODE (x))
2149 {
2150 case CONST_INT :
2151 case CONST_DOUBLE :
2152 {
2153 rtx first, second;
2154
2155 split_double (x, &first, &second);
2156 x = WORDS_BIG_ENDIAN ? second : first;
2157 fprintf (file, HOST_WIDE_INT_PRINT_HEX,
2158 (code == 'B'
2159 ? INTVAL (x) & 0xffff
2160 : (INTVAL (x) >> 16) & 0xffff));
2161 }
2162 return;
2163 case CONST :
2164 case SYMBOL_REF :
2165 if (code == 'B'
2166 && small_data_operand (x, VOIDmode))
2167 {
2168 fputs ("sda(", file);
2169 output_addr_const (file, x);
2170 fputc (')', file);
2171 return;
2172 }
2173 /* fall through */
2174 case LABEL_REF :
2175 fputs (code == 'T' ? "shigh(" : "low(", file);
2176 output_addr_const (file, x);
2177 fputc (')', file);
2178 return;
2179 default :
2180 output_operand_lossage ("invalid operand to %%T/%%B code");
2181 return;
2182 }
2183 break;
2184
2185 case 'U' :
2186 /* ??? wip */
2187 /* Output a load/store with update indicator if appropriate. */
2188 if (MEM_P (x))
2189 {
2190 if (GET_CODE (XEXP (x, 0)) == PRE_INC
2191 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
2192 fputs (".a", file);
2193 }
2194 else
2195 output_operand_lossage ("invalid operand to %%U code");
2196 return;
2197
2198 case 'N' :
2199 /* Print a constant value negated. */
2200 if (CONST_INT_P (x))
2201 output_addr_const (file, GEN_INT (- INTVAL (x)));
2202 else
2203 output_operand_lossage ("invalid operand to %%N code");
2204 return;
2205
2206 case 'X' :
2207 /* Print a const_int in hex. Used in comments. */
2208 if (CONST_INT_P (x))
2209 fprintf (file, HOST_WIDE_INT_PRINT_HEX, INTVAL (x));
2210 return;
2211
2212 case '#' :
2213 fputs (IMMEDIATE_PREFIX, file);
2214 return;
2215
2216 case 0 :
2217 /* Do nothing special. */
2218 break;
2219
2220 default :
2221 /* Unknown flag. */
2222 output_operand_lossage ("invalid operand output code");
2223 }
2224
2225 switch (GET_CODE (x))
2226 {
2227 case REG :
2228 fputs (reg_names[REGNO (x)], file);
2229 break;
2230
2231 case MEM :
2232 addr = XEXP (x, 0);
2233 if (GET_CODE (addr) == PRE_INC)
2234 {
2235 if (!REG_P (XEXP (addr, 0)))
2236 fatal_insn ("pre-increment address is not a register", x);
2237
2238 fprintf (file, "@+%s", reg_names[REGNO (XEXP (addr, 0))]);
2239 }
2240 else if (GET_CODE (addr) == PRE_DEC)
2241 {
2242 if (!REG_P (XEXP (addr, 0)))
2243 fatal_insn ("pre-decrement address is not a register", x);
2244
2245 fprintf (file, "@-%s", reg_names[REGNO (XEXP (addr, 0))]);
2246 }
2247 else if (GET_CODE (addr) == POST_INC)
2248 {
2249 if (!REG_P (XEXP (addr, 0)))
2250 fatal_insn ("post-increment address is not a register", x);
2251
2252 fprintf (file, "@%s+", reg_names[REGNO (XEXP (addr, 0))]);
2253 }
2254 else
2255 {
2256 fputs ("@(", file);
2257 output_address (XEXP (x, 0));
2258 fputc (')', file);
2259 }
2260 break;
2261
2262 case CONST_DOUBLE :
2263 /* We handle SFmode constants here as output_addr_const doesn't. */
2264 if (GET_MODE (x) == SFmode)
2265 {
2266 REAL_VALUE_TYPE d;
2267 long l;
2268
2269 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2270 REAL_VALUE_TO_TARGET_SINGLE (d, l);
2271 fprintf (file, "0x%08lx", l);
2272 break;
2273 }
2274
2275 /* Fall through. Let output_addr_const deal with it. */
2276
2277 default :
2278 output_addr_const (file, x);
2279 break;
2280 }
2281 }
2282
2283 /* Print a memory address as an operand to reference that memory location. */
2284
2285 static void
2286 m32r_print_operand_address (FILE * file, rtx addr)
2287 {
2288 rtx base;
2289 rtx index = 0;
2290 int offset = 0;
2291
2292 switch (GET_CODE (addr))
2293 {
2294 case REG :
2295 fputs (reg_names[REGNO (addr)], file);
2296 break;
2297
2298 case PLUS :
2299 if (CONST_INT_P (XEXP (addr, 0)))
2300 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
2301 else if (CONST_INT_P (XEXP (addr, 1)))
2302 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
2303 else
2304 base = XEXP (addr, 0), index = XEXP (addr, 1);
2305 if (REG_P (base))
2306 {
2307 /* Print the offset first (if present) to conform to the manual. */
2308 if (index == 0)
2309 {
2310 if (offset != 0)
2311 fprintf (file, "%d,", offset);
2312 fputs (reg_names[REGNO (base)], file);
2313 }
2314 /* The chip doesn't support this, but left in for generality. */
2315 else if (REG_P (index))
2316 fprintf (file, "%s,%s",
2317 reg_names[REGNO (base)], reg_names[REGNO (index)]);
2318 /* Not sure this can happen, but leave in for now. */
2319 else if (GET_CODE (index) == SYMBOL_REF)
2320 {
2321 output_addr_const (file, index);
2322 fputc (',', file);
2323 fputs (reg_names[REGNO (base)], file);
2324 }
2325 else
2326 fatal_insn ("bad address", addr);
2327 }
2328 else if (GET_CODE (base) == LO_SUM)
2329 {
2330 gcc_assert (!index && REG_P (XEXP (base, 0)));
2331 if (small_data_operand (XEXP (base, 1), VOIDmode))
2332 fputs ("sda(", file);
2333 else
2334 fputs ("low(", file);
2335 output_addr_const (file, plus_constant (Pmode, XEXP (base, 1),
2336 offset));
2337 fputs ("),", file);
2338 fputs (reg_names[REGNO (XEXP (base, 0))], file);
2339 }
2340 else
2341 fatal_insn ("bad address", addr);
2342 break;
2343
2344 case LO_SUM :
2345 if (!REG_P (XEXP (addr, 0)))
2346 fatal_insn ("lo_sum not of register", addr);
2347 if (small_data_operand (XEXP (addr, 1), VOIDmode))
2348 fputs ("sda(", file);
2349 else
2350 fputs ("low(", file);
2351 output_addr_const (file, XEXP (addr, 1));
2352 fputs ("),", file);
2353 fputs (reg_names[REGNO (XEXP (addr, 0))], file);
2354 break;
2355
2356 case PRE_INC : /* Assume SImode. */
2357 fprintf (file, "+%s", reg_names[REGNO (XEXP (addr, 0))]);
2358 break;
2359
2360 case PRE_DEC : /* Assume SImode. */
2361 fprintf (file, "-%s", reg_names[REGNO (XEXP (addr, 0))]);
2362 break;
2363
2364 case POST_INC : /* Assume SImode. */
2365 fprintf (file, "%s+", reg_names[REGNO (XEXP (addr, 0))]);
2366 break;
2367
2368 default :
2369 output_addr_const (file, addr);
2370 break;
2371 }
2372 }
2373
2374 static bool
2375 m32r_print_operand_punct_valid_p (unsigned char code)
2376 {
2377 return m32r_punct_chars[code];
2378 }
2379
2380 /* Return true if the operands are the constants 0 and 1. */
2381
2382 int
2383 zero_and_one (rtx operand1, rtx operand2)
2384 {
2385 return
2386 CONST_INT_P (operand1)
2387 && CONST_INT_P (operand2)
2388 && ( ((INTVAL (operand1) == 0) && (INTVAL (operand2) == 1))
2389 ||((INTVAL (operand1) == 1) && (INTVAL (operand2) == 0)));
2390 }
2391
2392 /* Generate the correct assembler code to handle the conditional loading of a
2393 value into a register. It is known that the operands satisfy the
2394 conditional_move_operand() function above. The destination is operand[0].
2395 The condition is operand [1]. The 'true' value is operand [2] and the
2396 'false' value is operand [3]. */
2397
2398 char *
2399 emit_cond_move (rtx * operands, rtx insn ATTRIBUTE_UNUSED)
2400 {
2401 static char buffer [100];
2402 const char * dest = reg_names [REGNO (operands [0])];
2403
2404 buffer [0] = 0;
2405
2406 /* Destination must be a register. */
2407 gcc_assert (REG_P (operands [0]));
2408 gcc_assert (conditional_move_operand (operands [2], SImode));
2409 gcc_assert (conditional_move_operand (operands [3], SImode));
2410
2411 /* Check to see if the test is reversed. */
2412 if (GET_CODE (operands [1]) == NE)
2413 {
2414 rtx tmp = operands [2];
2415 operands [2] = operands [3];
2416 operands [3] = tmp;
2417 }
2418
2419 sprintf (buffer, "mvfc %s, cbr", dest);
2420
2421 /* If the true value was '0' then we need to invert the results of the move. */
2422 if (INTVAL (operands [2]) == 0)
2423 sprintf (buffer + strlen (buffer), "\n\txor3 %s, %s, #1",
2424 dest, dest);
2425
2426 return buffer;
2427 }
2428
2429 /* Returns true if the registers contained in the two
2430 rtl expressions are different. */
2431
2432 int
2433 m32r_not_same_reg (rtx a, rtx b)
2434 {
2435 int reg_a = -1;
2436 int reg_b = -2;
2437
2438 while (GET_CODE (a) == SUBREG)
2439 a = SUBREG_REG (a);
2440
2441 if (REG_P (a))
2442 reg_a = REGNO (a);
2443
2444 while (GET_CODE (b) == SUBREG)
2445 b = SUBREG_REG (b);
2446
2447 if (REG_P (b))
2448 reg_b = REGNO (b);
2449
2450 return reg_a != reg_b;
2451 }
2452
2453 \f
2454 rtx
2455 m32r_function_symbol (const char *name)
2456 {
2457 int extra_flags = 0;
2458 enum m32r_model model;
2459 rtx sym = gen_rtx_SYMBOL_REF (Pmode, name);
2460
2461 if (TARGET_MODEL_SMALL)
2462 model = M32R_MODEL_SMALL;
2463 else if (TARGET_MODEL_MEDIUM)
2464 model = M32R_MODEL_MEDIUM;
2465 else if (TARGET_MODEL_LARGE)
2466 model = M32R_MODEL_LARGE;
2467 else
2468 gcc_unreachable (); /* Shouldn't happen. */
2469 extra_flags |= model << SYMBOL_FLAG_MODEL_SHIFT;
2470
2471 if (extra_flags)
2472 SYMBOL_REF_FLAGS (sym) |= extra_flags;
2473
2474 return sym;
2475 }
2476
2477 /* Use a library function to move some bytes. */
2478
2479 static void
2480 block_move_call (rtx dest_reg, rtx src_reg, rtx bytes_rtx)
2481 {
2482 /* We want to pass the size as Pmode, which will normally be SImode
2483 but will be DImode if we are using 64-bit longs and pointers. */
2484 if (GET_MODE (bytes_rtx) != VOIDmode
2485 && GET_MODE (bytes_rtx) != Pmode)
2486 bytes_rtx = convert_to_mode (Pmode, bytes_rtx, 1);
2487
2488 emit_library_call (m32r_function_symbol ("memcpy"), LCT_NORMAL,
2489 VOIDmode, 3, dest_reg, Pmode, src_reg, Pmode,
2490 convert_to_mode (TYPE_MODE (sizetype), bytes_rtx,
2491 TYPE_UNSIGNED (sizetype)),
2492 TYPE_MODE (sizetype));
2493 }
2494
2495 /* Expand string/block move operations.
2496
2497 operands[0] is the pointer to the destination.
2498 operands[1] is the pointer to the source.
2499 operands[2] is the number of bytes to move.
2500 operands[3] is the alignment.
2501
2502 Returns 1 upon success, 0 otherwise. */
2503
2504 int
2505 m32r_expand_block_move (rtx operands[])
2506 {
2507 rtx orig_dst = operands[0];
2508 rtx orig_src = operands[1];
2509 rtx bytes_rtx = operands[2];
2510 rtx align_rtx = operands[3];
2511 int constp = CONST_INT_P (bytes_rtx);
2512 HOST_WIDE_INT bytes = constp ? INTVAL (bytes_rtx) : 0;
2513 int align = INTVAL (align_rtx);
2514 int leftover;
2515 rtx src_reg;
2516 rtx dst_reg;
2517
2518 if (constp && bytes <= 0)
2519 return 1;
2520
2521 /* Move the address into scratch registers. */
2522 dst_reg = copy_addr_to_reg (XEXP (orig_dst, 0));
2523 src_reg = copy_addr_to_reg (XEXP (orig_src, 0));
2524
2525 if (align > UNITS_PER_WORD)
2526 align = UNITS_PER_WORD;
2527
2528 /* If we prefer size over speed, always use a function call.
2529 If we do not know the size, use a function call.
2530 If the blocks are not word aligned, use a function call. */
2531 if (optimize_size || ! constp || align != UNITS_PER_WORD)
2532 {
2533 block_move_call (dst_reg, src_reg, bytes_rtx);
2534 return 0;
2535 }
2536
2537 leftover = bytes % MAX_MOVE_BYTES;
2538 bytes -= leftover;
2539
2540 /* If necessary, generate a loop to handle the bulk of the copy. */
2541 if (bytes)
2542 {
2543 rtx label = NULL_RTX;
2544 rtx final_src = NULL_RTX;
2545 rtx at_a_time = GEN_INT (MAX_MOVE_BYTES);
2546 rtx rounded_total = GEN_INT (bytes);
2547 rtx new_dst_reg = gen_reg_rtx (SImode);
2548 rtx new_src_reg = gen_reg_rtx (SImode);
2549
2550 /* If we are going to have to perform this loop more than
2551 once, then generate a label and compute the address the
2552 source register will contain upon completion of the final
2553 iteration. */
2554 if (bytes > MAX_MOVE_BYTES)
2555 {
2556 final_src = gen_reg_rtx (Pmode);
2557
2558 if (INT16_P(bytes))
2559 emit_insn (gen_addsi3 (final_src, src_reg, rounded_total));
2560 else
2561 {
2562 emit_insn (gen_movsi (final_src, rounded_total));
2563 emit_insn (gen_addsi3 (final_src, final_src, src_reg));
2564 }
2565
2566 label = gen_label_rtx ();
2567 emit_label (label);
2568 }
2569
2570 /* It is known that output_block_move() will update src_reg to point
2571 to the word after the end of the source block, and dst_reg to point
2572 to the last word of the destination block, provided that the block
2573 is MAX_MOVE_BYTES long. */
2574 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, at_a_time,
2575 new_dst_reg, new_src_reg));
2576 emit_move_insn (dst_reg, new_dst_reg);
2577 emit_move_insn (src_reg, new_src_reg);
2578 emit_insn (gen_addsi3 (dst_reg, dst_reg, GEN_INT (4)));
2579
2580 if (bytes > MAX_MOVE_BYTES)
2581 {
2582 rtx test = gen_rtx_NE (VOIDmode, src_reg, final_src);
2583 emit_jump_insn (gen_cbranchsi4 (test, src_reg, final_src, label));
2584 }
2585 }
2586
2587 if (leftover)
2588 emit_insn (gen_movmemsi_internal (dst_reg, src_reg, GEN_INT (leftover),
2589 gen_reg_rtx (SImode),
2590 gen_reg_rtx (SImode)));
2591 return 1;
2592 }
2593
2594 \f
2595 /* Emit load/stores for a small constant word aligned block_move.
2596
2597 operands[0] is the memory address of the destination.
2598 operands[1] is the memory address of the source.
2599 operands[2] is the number of bytes to move.
2600 operands[3] is a temp register.
2601 operands[4] is a temp register. */
2602
2603 void
2604 m32r_output_block_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[])
2605 {
2606 HOST_WIDE_INT bytes = INTVAL (operands[2]);
2607 int first_time;
2608 int got_extra = 0;
2609
2610 gcc_assert (bytes >= 1 && bytes <= MAX_MOVE_BYTES);
2611
2612 /* We do not have a post-increment store available, so the first set of
2613 stores are done without any increment, then the remaining ones can use
2614 the pre-increment addressing mode.
2615
2616 Note: expand_block_move() also relies upon this behavior when building
2617 loops to copy large blocks. */
2618 first_time = 1;
2619
2620 while (bytes > 0)
2621 {
2622 if (bytes >= 8)
2623 {
2624 if (first_time)
2625 {
2626 output_asm_insn ("ld\t%5, %p1", operands);
2627 output_asm_insn ("ld\t%6, %p1", operands);
2628 output_asm_insn ("st\t%5, @%0", operands);
2629 output_asm_insn ("st\t%6, %s0", operands);
2630 }
2631 else
2632 {
2633 output_asm_insn ("ld\t%5, %p1", operands);
2634 output_asm_insn ("ld\t%6, %p1", operands);
2635 output_asm_insn ("st\t%5, %s0", operands);
2636 output_asm_insn ("st\t%6, %s0", operands);
2637 }
2638
2639 bytes -= 8;
2640 }
2641 else if (bytes >= 4)
2642 {
2643 if (bytes > 4)
2644 got_extra = 1;
2645
2646 output_asm_insn ("ld\t%5, %p1", operands);
2647
2648 if (got_extra)
2649 output_asm_insn ("ld\t%6, %p1", operands);
2650
2651 if (first_time)
2652 output_asm_insn ("st\t%5, @%0", operands);
2653 else
2654 output_asm_insn ("st\t%5, %s0", operands);
2655
2656 bytes -= 4;
2657 }
2658 else
2659 {
2660 /* Get the entire next word, even though we do not want all of it.
2661 The saves us from doing several smaller loads, and we assume that
2662 we cannot cause a page fault when at least part of the word is in
2663 valid memory [since we don't get called if things aren't properly
2664 aligned]. */
2665 int dst_offset = first_time ? 0 : 4;
2666 /* The amount of increment we have to make to the
2667 destination pointer. */
2668 int dst_inc_amount = dst_offset + bytes - 4;
2669 /* The same for the source pointer. */
2670 int src_inc_amount = bytes;
2671 int last_shift;
2672 rtx my_operands[3];
2673
2674 /* If got_extra is true then we have already loaded
2675 the next word as part of loading and storing the previous word. */
2676 if (! got_extra)
2677 output_asm_insn ("ld\t%6, @%1", operands);
2678
2679 if (bytes >= 2)
2680 {
2681 bytes -= 2;
2682
2683 output_asm_insn ("sra3\t%5, %6, #16", operands);
2684 my_operands[0] = operands[5];
2685 my_operands[1] = GEN_INT (dst_offset);
2686 my_operands[2] = operands[0];
2687 output_asm_insn ("sth\t%0, @(%1,%2)", my_operands);
2688
2689 /* If there is a byte left to store then increment the
2690 destination address and shift the contents of the source
2691 register down by 8 bits. We could not do the address
2692 increment in the store half word instruction, because it does
2693 not have an auto increment mode. */
2694 if (bytes > 0) /* assert (bytes == 1) */
2695 {
2696 dst_offset += 2;
2697 last_shift = 8;
2698 }
2699 }
2700 else
2701 last_shift = 24;
2702
2703 if (bytes > 0)
2704 {
2705 my_operands[0] = operands[6];
2706 my_operands[1] = GEN_INT (last_shift);
2707 output_asm_insn ("srai\t%0, #%1", my_operands);
2708 my_operands[0] = operands[6];
2709 my_operands[1] = GEN_INT (dst_offset);
2710 my_operands[2] = operands[0];
2711 output_asm_insn ("stb\t%0, @(%1,%2)", my_operands);
2712 }
2713
2714 /* Update the destination pointer if needed. We have to do
2715 this so that the patterns matches what we output in this
2716 function. */
2717 if (dst_inc_amount
2718 && !find_reg_note (insn, REG_UNUSED, operands[0]))
2719 {
2720 my_operands[0] = operands[0];
2721 my_operands[1] = GEN_INT (dst_inc_amount);
2722 output_asm_insn ("addi\t%0, #%1", my_operands);
2723 }
2724
2725 /* Update the source pointer if needed. We have to do this
2726 so that the patterns matches what we output in this
2727 function. */
2728 if (src_inc_amount
2729 && !find_reg_note (insn, REG_UNUSED, operands[1]))
2730 {
2731 my_operands[0] = operands[1];
2732 my_operands[1] = GEN_INT (src_inc_amount);
2733 output_asm_insn ("addi\t%0, #%1", my_operands);
2734 }
2735
2736 bytes = 0;
2737 }
2738
2739 first_time = 0;
2740 }
2741 }
2742
2743 /* Return true if using NEW_REG in place of OLD_REG is ok. */
2744
2745 int
2746 m32r_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
2747 unsigned int new_reg)
2748 {
2749 /* Interrupt routines can't clobber any register that isn't already used. */
2750 if (lookup_attribute ("interrupt", DECL_ATTRIBUTES (current_function_decl))
2751 && !df_regs_ever_live_p (new_reg))
2752 return 0;
2753
2754 return 1;
2755 }
2756
2757 rtx
2758 m32r_return_addr (int count)
2759 {
2760 if (count != 0)
2761 return const0_rtx;
2762
2763 return get_hard_reg_initial_val (Pmode, RETURN_ADDR_REGNUM);
2764 }
2765
2766 static void
2767 m32r_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
2768 {
2769 emit_move_insn (adjust_address (m_tramp, SImode, 0),
2770 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2771 0x017e8e17 : 0x178e7e01, SImode));
2772 emit_move_insn (adjust_address (m_tramp, SImode, 4),
2773 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2774 0x0c00ae86 : 0x86ae000c, SImode));
2775 emit_move_insn (adjust_address (m_tramp, SImode, 8),
2776 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2777 0xe627871e : 0x1e8727e6, SImode));
2778 emit_move_insn (adjust_address (m_tramp, SImode, 12),
2779 gen_int_mode (TARGET_LITTLE_ENDIAN ?
2780 0xc616c626 : 0x26c61fc6, SImode));
2781 emit_move_insn (adjust_address (m_tramp, SImode, 16),
2782 chain_value);
2783 emit_move_insn (adjust_address (m_tramp, SImode, 20),
2784 XEXP (DECL_RTL (fndecl), 0));
2785
2786 if (m32r_cache_flush_trap >= 0)
2787 emit_insn (gen_flush_icache
2788 (validize_mem (adjust_address (m_tramp, SImode, 0)),
2789 gen_int_mode (m32r_cache_flush_trap, SImode)));
2790 else if (m32r_cache_flush_func && m32r_cache_flush_func[0])
2791 emit_library_call (m32r_function_symbol (m32r_cache_flush_func),
2792 LCT_NORMAL, VOIDmode, 3, XEXP (m_tramp, 0), Pmode,
2793 gen_int_mode (TRAMPOLINE_SIZE, SImode), SImode,
2794 GEN_INT (3), SImode);
2795 }
2796
2797 /* True if X is a reg that can be used as a base reg. */
2798
2799 static bool
2800 m32r_rtx_ok_for_base_p (const_rtx x, bool strict)
2801 {
2802 if (! REG_P (x))
2803 return false;
2804
2805 if (strict)
2806 {
2807 if (GPR_P (REGNO (x)))
2808 return true;
2809 }
2810 else
2811 {
2812 if (GPR_P (REGNO (x))
2813 || REGNO (x) == ARG_POINTER_REGNUM
2814 || ! HARD_REGISTER_P (x))
2815 return true;
2816 }
2817
2818 return false;
2819 }
2820
2821 static inline bool
2822 m32r_rtx_ok_for_offset_p (const_rtx x)
2823 {
2824 return (CONST_INT_P (x) && INT16_P (INTVAL (x)));
2825 }
2826
2827 static inline bool
2828 m32r_legitimate_offset_addres_p (enum machine_mode mode ATTRIBUTE_UNUSED,
2829 const_rtx x, bool strict)
2830 {
2831 if (GET_CODE (x) == PLUS
2832 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2833 && m32r_rtx_ok_for_offset_p (XEXP (x, 1)))
2834 return true;
2835
2836 return false;
2837 }
2838
2839 /* For LO_SUM addresses, do not allow them if the MODE is > 1 word,
2840 since more than one instruction will be required. */
2841
2842 static inline bool
2843 m32r_legitimate_lo_sum_addres_p (enum machine_mode mode, const_rtx x,
2844 bool strict)
2845 {
2846 if (GET_CODE (x) == LO_SUM
2847 && (mode != BLKmode && GET_MODE_SIZE (mode) <= UNITS_PER_WORD)
2848 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict)
2849 && CONSTANT_P (XEXP (x, 1)))
2850 return true;
2851
2852 return false;
2853 }
2854
2855 /* Is this a load and increment operation. */
2856
2857 static inline bool
2858 m32r_load_postinc_p (enum machine_mode mode, const_rtx x, bool strict)
2859 {
2860 if ((mode == SImode || mode == SFmode)
2861 && GET_CODE (x) == POST_INC
2862 && REG_P (XEXP (x, 0))
2863 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2864 return true;
2865
2866 return false;
2867 }
2868
2869 /* Is this an increment/decrement and store operation. */
2870
2871 static inline bool
2872 m32r_store_preinc_predec_p (enum machine_mode mode, const_rtx x, bool strict)
2873 {
2874 if ((mode == SImode || mode == SFmode)
2875 && (GET_CODE (x) == PRE_INC || GET_CODE (x) == PRE_DEC)
2876 && REG_P (XEXP (x, 0)) \
2877 && m32r_rtx_ok_for_base_p (XEXP (x, 0), strict))
2878 return true;
2879
2880 return false;
2881 }
2882
2883 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
2884
2885 static bool
2886 m32r_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
2887 {
2888 if (m32r_rtx_ok_for_base_p (x, strict)
2889 || m32r_legitimate_offset_addres_p (mode, x, strict)
2890 || m32r_legitimate_lo_sum_addres_p (mode, x, strict)
2891 || m32r_load_postinc_p (mode, x, strict)
2892 || m32r_store_preinc_predec_p (mode, x, strict))
2893 return true;
2894
2895 return false;
2896 }
2897
2898 static void
2899 m32r_conditional_register_usage (void)
2900 {
2901 if (flag_pic)
2902 {
2903 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2904 call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
2905 }
2906 }
2907
2908 /* Implement TARGET_LEGITIMATE_CONSTANT_P
2909
2910 We don't allow (plus symbol large-constant) as the relocations can't
2911 describe it. INTVAL > 32767 handles both 16-bit and 24-bit relocations.
2912 We allow all CONST_DOUBLE's as the md file patterns will force the
2913 constant to memory if they can't handle them. */
2914
2915 static bool
2916 m32r_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2917 {
2918 return !(GET_CODE (x) == CONST
2919 && GET_CODE (XEXP (x, 0)) == PLUS
2920 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2921 || GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)
2922 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
2923 && UINTVAL (XEXP (XEXP (x, 0), 1)) > 32767);
2924 }