eed829972443866d1646566bf20a56ca76c92a9f
[gcc.git] / gcc / config / mn10300 / mn10300.c
1 /* Subroutines for insn-output.c for Matsushita MN10300 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "reload.h"
38 #include "expr.h"
39 #include "optabs.h"
40 #include "function.h"
41 #include "obstack.h"
42 #include "toplev.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46
47 /* This is used by GOTaddr2picreg to uniquely identify
48 UNSPEC_INT_LABELs. */
49 int mn10300_unspec_int_label_counter;
50
51 /* This is used in the am33_2.0-linux-gnu port, in which global symbol
52 names are not prefixed by underscores, to tell whether to prefix a
53 label with a plus sign or not, so that the assembler can tell
54 symbol names from register names. */
55 int mn10300_protect_label;
56
57 /* The selected processor. */
58 enum processor_type mn10300_processor = PROCESSOR_DEFAULT;
59
60 /* The size of the callee register save area. Right now we save everything
61 on entry since it costs us nothing in code size. It does cost us from a
62 speed standpoint, so we want to optimize this sooner or later. */
63 #define REG_SAVE_BYTES (4 * df_regs_ever_live_p (2) \
64 + 4 * df_regs_ever_live_p (3) \
65 + 4 * df_regs_ever_live_p (6) \
66 + 4 * df_regs_ever_live_p (7) \
67 + 16 * (df_regs_ever_live_p (14) || df_regs_ever_live_p (15) \
68 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)))
69
70
71 static bool mn10300_handle_option (size_t, const char *, int);
72 static int mn10300_address_cost_1 (rtx, int *);
73 static int mn10300_address_cost (rtx, bool);
74 static bool mn10300_rtx_costs (rtx, int, int, int *, bool);
75 static void mn10300_file_start (void);
76 static bool mn10300_return_in_memory (const_tree, const_tree);
77 static rtx mn10300_builtin_saveregs (void);
78 static void mn10300_va_start (tree, rtx);
79 static bool mn10300_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
80 const_tree, bool);
81 static int mn10300_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
82 tree, bool);
83 \f
84 /* Initialize the GCC target structure. */
85 #undef TARGET_ASM_ALIGNED_HI_OP
86 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
87
88 #undef TARGET_RTX_COSTS
89 #define TARGET_RTX_COSTS mn10300_rtx_costs
90 #undef TARGET_ADDRESS_COST
91 #define TARGET_ADDRESS_COST mn10300_address_cost
92
93 #undef TARGET_ASM_FILE_START
94 #define TARGET_ASM_FILE_START mn10300_file_start
95 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
96 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
97
98 #undef TARGET_DEFAULT_TARGET_FLAGS
99 #define TARGET_DEFAULT_TARGET_FLAGS MASK_MULT_BUG | MASK_PTR_A0D0
100 #undef TARGET_HANDLE_OPTION
101 #define TARGET_HANDLE_OPTION mn10300_handle_option
102
103 #undef TARGET_ENCODE_SECTION_INFO
104 #define TARGET_ENCODE_SECTION_INFO mn10300_encode_section_info
105
106 #undef TARGET_PROMOTE_PROTOTYPES
107 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
108 #undef TARGET_RETURN_IN_MEMORY
109 #define TARGET_RETURN_IN_MEMORY mn10300_return_in_memory
110 #undef TARGET_PASS_BY_REFERENCE
111 #define TARGET_PASS_BY_REFERENCE mn10300_pass_by_reference
112 #undef TARGET_CALLEE_COPIES
113 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
114 #undef TARGET_ARG_PARTIAL_BYTES
115 #define TARGET_ARG_PARTIAL_BYTES mn10300_arg_partial_bytes
116
117 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
118 #define TARGET_EXPAND_BUILTIN_SAVEREGS mn10300_builtin_saveregs
119 #undef TARGET_EXPAND_BUILTIN_VA_START
120 #define TARGET_EXPAND_BUILTIN_VA_START mn10300_va_start
121
122 static void mn10300_encode_section_info (tree, rtx, int);
123 struct gcc_target targetm = TARGET_INITIALIZER;
124 \f
125 /* Implement TARGET_HANDLE_OPTION. */
126
127 static bool
128 mn10300_handle_option (size_t code,
129 const char *arg ATTRIBUTE_UNUSED,
130 int value)
131 {
132 switch (code)
133 {
134 case OPT_mam33:
135 mn10300_processor = value ? PROCESSOR_AM33 : PROCESSOR_MN10300;
136 return true;
137 case OPT_mam33_2:
138 mn10300_processor = (value
139 ? PROCESSOR_AM33_2
140 : MIN (PROCESSOR_AM33, PROCESSOR_DEFAULT));
141 return true;
142 default:
143 return true;
144 }
145 }
146
147 /* Implement OVERRIDE_OPTIONS. */
148
149 void
150 mn10300_override_options (void)
151 {
152 if (TARGET_AM33)
153 target_flags &= ~MASK_MULT_BUG;
154 }
155
156 static void
157 mn10300_file_start (void)
158 {
159 default_file_start ();
160
161 if (TARGET_AM33_2)
162 fprintf (asm_out_file, "\t.am33_2\n");
163 else if (TARGET_AM33)
164 fprintf (asm_out_file, "\t.am33\n");
165 }
166 \f
167
168 /* Print operand X using operand code CODE to assembly language output file
169 FILE. */
170
171 void
172 print_operand (FILE *file, rtx x, int code)
173 {
174 switch (code)
175 {
176 case 'b':
177 case 'B':
178 if (cc_status.mdep.fpCC)
179 {
180 switch (code == 'b' ? GET_CODE (x)
181 : reverse_condition_maybe_unordered (GET_CODE (x)))
182 {
183 case NE:
184 fprintf (file, "ne");
185 break;
186 case EQ:
187 fprintf (file, "eq");
188 break;
189 case GE:
190 fprintf (file, "ge");
191 break;
192 case GT:
193 fprintf (file, "gt");
194 break;
195 case LE:
196 fprintf (file, "le");
197 break;
198 case LT:
199 fprintf (file, "lt");
200 break;
201 case ORDERED:
202 fprintf (file, "lge");
203 break;
204 case UNORDERED:
205 fprintf (file, "uo");
206 break;
207 case LTGT:
208 fprintf (file, "lg");
209 break;
210 case UNEQ:
211 fprintf (file, "ue");
212 break;
213 case UNGE:
214 fprintf (file, "uge");
215 break;
216 case UNGT:
217 fprintf (file, "ug");
218 break;
219 case UNLE:
220 fprintf (file, "ule");
221 break;
222 case UNLT:
223 fprintf (file, "ul");
224 break;
225 default:
226 gcc_unreachable ();
227 }
228 break;
229 }
230 /* These are normal and reversed branches. */
231 switch (code == 'b' ? GET_CODE (x) : reverse_condition (GET_CODE (x)))
232 {
233 case NE:
234 fprintf (file, "ne");
235 break;
236 case EQ:
237 fprintf (file, "eq");
238 break;
239 case GE:
240 fprintf (file, "ge");
241 break;
242 case GT:
243 fprintf (file, "gt");
244 break;
245 case LE:
246 fprintf (file, "le");
247 break;
248 case LT:
249 fprintf (file, "lt");
250 break;
251 case GEU:
252 fprintf (file, "cc");
253 break;
254 case GTU:
255 fprintf (file, "hi");
256 break;
257 case LEU:
258 fprintf (file, "ls");
259 break;
260 case LTU:
261 fprintf (file, "cs");
262 break;
263 default:
264 gcc_unreachable ();
265 }
266 break;
267 case 'C':
268 /* This is used for the operand to a call instruction;
269 if it's a REG, enclose it in parens, else output
270 the operand normally. */
271 if (GET_CODE (x) == REG)
272 {
273 fputc ('(', file);
274 print_operand (file, x, 0);
275 fputc (')', file);
276 }
277 else
278 print_operand (file, x, 0);
279 break;
280
281 case 'D':
282 switch (GET_CODE (x))
283 {
284 case MEM:
285 fputc ('(', file);
286 output_address (XEXP (x, 0));
287 fputc (')', file);
288 break;
289
290 case REG:
291 fprintf (file, "fd%d", REGNO (x) - 18);
292 break;
293
294 default:
295 gcc_unreachable ();
296 }
297 break;
298
299 /* These are the least significant word in a 64bit value. */
300 case 'L':
301 switch (GET_CODE (x))
302 {
303 case MEM:
304 fputc ('(', file);
305 output_address (XEXP (x, 0));
306 fputc (')', file);
307 break;
308
309 case REG:
310 fprintf (file, "%s", reg_names[REGNO (x)]);
311 break;
312
313 case SUBREG:
314 fprintf (file, "%s", reg_names[subreg_regno (x)]);
315 break;
316
317 case CONST_DOUBLE:
318 {
319 long val[2];
320 REAL_VALUE_TYPE rv;
321
322 switch (GET_MODE (x))
323 {
324 case DFmode:
325 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
326 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
327 fprintf (file, "0x%lx", val[0]);
328 break;;
329 case SFmode:
330 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
331 REAL_VALUE_TO_TARGET_SINGLE (rv, val[0]);
332 fprintf (file, "0x%lx", val[0]);
333 break;;
334 case VOIDmode:
335 case DImode:
336 print_operand_address (file,
337 GEN_INT (CONST_DOUBLE_LOW (x)));
338 break;
339 default:
340 break;
341 }
342 break;
343 }
344
345 case CONST_INT:
346 {
347 rtx low, high;
348 split_double (x, &low, &high);
349 fprintf (file, "%ld", (long)INTVAL (low));
350 break;
351 }
352
353 default:
354 gcc_unreachable ();
355 }
356 break;
357
358 /* Similarly, but for the most significant word. */
359 case 'H':
360 switch (GET_CODE (x))
361 {
362 case MEM:
363 fputc ('(', file);
364 x = adjust_address (x, SImode, 4);
365 output_address (XEXP (x, 0));
366 fputc (')', file);
367 break;
368
369 case REG:
370 fprintf (file, "%s", reg_names[REGNO (x) + 1]);
371 break;
372
373 case SUBREG:
374 fprintf (file, "%s", reg_names[subreg_regno (x) + 1]);
375 break;
376
377 case CONST_DOUBLE:
378 {
379 long val[2];
380 REAL_VALUE_TYPE rv;
381
382 switch (GET_MODE (x))
383 {
384 case DFmode:
385 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
386 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
387 fprintf (file, "0x%lx", val[1]);
388 break;;
389 case SFmode:
390 gcc_unreachable ();
391 case VOIDmode:
392 case DImode:
393 print_operand_address (file,
394 GEN_INT (CONST_DOUBLE_HIGH (x)));
395 break;
396 default:
397 break;
398 }
399 break;
400 }
401
402 case CONST_INT:
403 {
404 rtx low, high;
405 split_double (x, &low, &high);
406 fprintf (file, "%ld", (long)INTVAL (high));
407 break;
408 }
409
410 default:
411 gcc_unreachable ();
412 }
413 break;
414
415 case 'A':
416 fputc ('(', file);
417 if (GET_CODE (XEXP (x, 0)) == REG)
418 output_address (gen_rtx_PLUS (SImode, XEXP (x, 0), const0_rtx));
419 else
420 output_address (XEXP (x, 0));
421 fputc (')', file);
422 break;
423
424 case 'N':
425 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
426 fprintf (file, "%d", (int)((~INTVAL (x)) & 0xff));
427 break;
428
429 case 'U':
430 gcc_assert (INTVAL (x) >= -128 && INTVAL (x) <= 255);
431 fprintf (file, "%d", (int)(INTVAL (x) & 0xff));
432 break;
433
434 /* For shift counts. The hardware ignores the upper bits of
435 any immediate, but the assembler will flag an out of range
436 shift count as an error. So we mask off the high bits
437 of the immediate here. */
438 case 'S':
439 if (GET_CODE (x) == CONST_INT)
440 {
441 fprintf (file, "%d", (int)(INTVAL (x) & 0x1f));
442 break;
443 }
444 /* FALL THROUGH */
445
446 default:
447 switch (GET_CODE (x))
448 {
449 case MEM:
450 fputc ('(', file);
451 output_address (XEXP (x, 0));
452 fputc (')', file);
453 break;
454
455 case PLUS:
456 output_address (x);
457 break;
458
459 case REG:
460 fprintf (file, "%s", reg_names[REGNO (x)]);
461 break;
462
463 case SUBREG:
464 fprintf (file, "%s", reg_names[subreg_regno (x)]);
465 break;
466
467 /* This will only be single precision.... */
468 case CONST_DOUBLE:
469 {
470 unsigned long val;
471 REAL_VALUE_TYPE rv;
472
473 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
474 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
475 fprintf (file, "0x%lx", val);
476 break;
477 }
478
479 case CONST_INT:
480 case SYMBOL_REF:
481 case CONST:
482 case LABEL_REF:
483 case CODE_LABEL:
484 case UNSPEC:
485 print_operand_address (file, x);
486 break;
487 default:
488 gcc_unreachable ();
489 }
490 break;
491 }
492 }
493
494 /* Output assembly language output for the address ADDR to FILE. */
495
496 void
497 print_operand_address (FILE *file, rtx addr)
498 {
499 switch (GET_CODE (addr))
500 {
501 case POST_INC:
502 print_operand_address (file, XEXP (addr, 0));
503 fputc ('+', file);
504 break;
505 case REG:
506 print_operand (file, addr, 0);
507 break;
508 case PLUS:
509 {
510 rtx base, index;
511 if (REG_P (XEXP (addr, 0))
512 && REG_OK_FOR_BASE_P (XEXP (addr, 0)))
513 base = XEXP (addr, 0), index = XEXP (addr, 1);
514 else if (REG_P (XEXP (addr, 1))
515 && REG_OK_FOR_BASE_P (XEXP (addr, 1)))
516 base = XEXP (addr, 1), index = XEXP (addr, 0);
517 else
518 gcc_unreachable ();
519 print_operand (file, index, 0);
520 fputc (',', file);
521 print_operand (file, base, 0);;
522 break;
523 }
524 case SYMBOL_REF:
525 output_addr_const (file, addr);
526 break;
527 default:
528 output_addr_const (file, addr);
529 break;
530 }
531 }
532
533 /* Count the number of FP registers that have to be saved. */
534 static int
535 fp_regs_to_save (void)
536 {
537 int i, n = 0;
538
539 if (! TARGET_AM33_2)
540 return 0;
541
542 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
543 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
544 ++n;
545
546 return n;
547 }
548
549 /* Print a set of registers in the format required by "movm" and "ret".
550 Register K is saved if bit K of MASK is set. The data and address
551 registers can be stored individually, but the extended registers cannot.
552 We assume that the mask already takes that into account. For instance,
553 bits 14 to 17 must have the same value. */
554
555 void
556 mn10300_print_reg_list (FILE *file, int mask)
557 {
558 int need_comma;
559 int i;
560
561 need_comma = 0;
562 fputc ('[', file);
563
564 for (i = 0; i < FIRST_EXTENDED_REGNUM; i++)
565 if ((mask & (1 << i)) != 0)
566 {
567 if (need_comma)
568 fputc (',', file);
569 fputs (reg_names [i], file);
570 need_comma = 1;
571 }
572
573 if ((mask & 0x3c000) != 0)
574 {
575 gcc_assert ((mask & 0x3c000) == 0x3c000);
576 if (need_comma)
577 fputc (',', file);
578 fputs ("exreg1", file);
579 need_comma = 1;
580 }
581
582 fputc (']', file);
583 }
584
585 int
586 can_use_return_insn (void)
587 {
588 /* size includes the fixed stack space needed for function calls. */
589 int size = get_frame_size () + crtl->outgoing_args_size;
590
591 /* And space for the return pointer. */
592 size += crtl->outgoing_args_size ? 4 : 0;
593
594 return (reload_completed
595 && size == 0
596 && !df_regs_ever_live_p (2)
597 && !df_regs_ever_live_p (3)
598 && !df_regs_ever_live_p (6)
599 && !df_regs_ever_live_p (7)
600 && !df_regs_ever_live_p (14)
601 && !df_regs_ever_live_p (15)
602 && !df_regs_ever_live_p (16)
603 && !df_regs_ever_live_p (17)
604 && fp_regs_to_save () == 0
605 && !frame_pointer_needed);
606 }
607
608 /* Returns the set of live, callee-saved registers as a bitmask. The
609 callee-saved extended registers cannot be stored individually, so
610 all of them will be included in the mask if any one of them is used. */
611
612 int
613 mn10300_get_live_callee_saved_regs (void)
614 {
615 int mask;
616 int i;
617
618 mask = 0;
619 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
620 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
621 mask |= (1 << i);
622 if ((mask & 0x3c000) != 0)
623 mask |= 0x3c000;
624
625 return mask;
626 }
627
628 /* Generate an instruction that pushes several registers onto the stack.
629 Register K will be saved if bit K in MASK is set. The function does
630 nothing if MASK is zero.
631
632 To be compatible with the "movm" instruction, the lowest-numbered
633 register must be stored in the lowest slot. If MASK is the set
634 { R1,...,RN }, where R1...RN are ordered least first, the generated
635 instruction will have the form:
636
637 (parallel
638 (set (reg:SI 9) (plus:SI (reg:SI 9) (const_int -N*4)))
639 (set (mem:SI (plus:SI (reg:SI 9)
640 (const_int -1*4)))
641 (reg:SI RN))
642 ...
643 (set (mem:SI (plus:SI (reg:SI 9)
644 (const_int -N*4)))
645 (reg:SI R1))) */
646
647 void
648 mn10300_gen_multiple_store (int mask)
649 {
650 if (mask != 0)
651 {
652 int i;
653 int count;
654 rtx par;
655 int pari;
656
657 /* Count how many registers need to be saved. */
658 count = 0;
659 for (i = 0; i <= LAST_EXTENDED_REGNUM; i++)
660 if ((mask & (1 << i)) != 0)
661 count += 1;
662
663 /* We need one PARALLEL element to update the stack pointer and
664 an additional element for each register that is stored. */
665 par = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + 1));
666
667 /* Create the instruction that updates the stack pointer. */
668 XVECEXP (par, 0, 0)
669 = gen_rtx_SET (SImode,
670 stack_pointer_rtx,
671 gen_rtx_PLUS (SImode,
672 stack_pointer_rtx,
673 GEN_INT (-count * 4)));
674
675 /* Create each store. */
676 pari = 1;
677 for (i = LAST_EXTENDED_REGNUM; i >= 0; i--)
678 if ((mask & (1 << i)) != 0)
679 {
680 rtx address = gen_rtx_PLUS (SImode,
681 stack_pointer_rtx,
682 GEN_INT (-pari * 4));
683 XVECEXP(par, 0, pari)
684 = gen_rtx_SET (VOIDmode,
685 gen_rtx_MEM (SImode, address),
686 gen_rtx_REG (SImode, i));
687 pari += 1;
688 }
689
690 par = emit_insn (par);
691 RTX_FRAME_RELATED_P (par) = 1;
692 }
693 }
694
695 void
696 expand_prologue (void)
697 {
698 HOST_WIDE_INT size;
699
700 /* SIZE includes the fixed stack space needed for function calls. */
701 size = get_frame_size () + crtl->outgoing_args_size;
702 size += (crtl->outgoing_args_size ? 4 : 0);
703
704 /* If we use any of the callee-saved registers, save them now. */
705 mn10300_gen_multiple_store (mn10300_get_live_callee_saved_regs ());
706
707 if (TARGET_AM33_2 && fp_regs_to_save ())
708 {
709 int num_regs_to_save = fp_regs_to_save (), i;
710 HOST_WIDE_INT xsize;
711 enum { save_sp_merge,
712 save_sp_no_merge,
713 save_sp_partial_merge,
714 save_a0_merge,
715 save_a0_no_merge } strategy;
716 unsigned int strategy_size = (unsigned)-1, this_strategy_size;
717 rtx reg;
718 rtx insn;
719
720 /* We have several different strategies to save FP registers.
721 We can store them using SP offsets, which is beneficial if
722 there are just a few registers to save, or we can use `a0' in
723 post-increment mode (`a0' is the only call-clobbered address
724 register that is never used to pass information to a
725 function). Furthermore, if we don't need a frame pointer, we
726 can merge the two SP adds into a single one, but this isn't
727 always beneficial; sometimes we can just split the two adds
728 so that we don't exceed a 16-bit constant size. The code
729 below will select which strategy to use, so as to generate
730 smallest code. Ties are broken in favor or shorter sequences
731 (in terms of number of instructions). */
732
733 #define SIZE_ADD_AX(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
734 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 2)
735 #define SIZE_ADD_SP(S) ((((S) >= (1 << 15)) || ((S) < -(1 << 15))) ? 6 \
736 : (((S) >= (1 << 7)) || ((S) < -(1 << 7))) ? 4 : 3)
737 #define SIZE_FMOV_LIMIT(S,N,L,SIZE1,SIZE2,ELSE) \
738 (((S) >= (L)) ? (SIZE1) * (N) \
739 : ((S) + 4 * (N) >= (L)) ? (((L) - (S)) / 4 * (SIZE2) \
740 + ((S) + 4 * (N) - (L)) / 4 * (SIZE1)) \
741 : (ELSE))
742 #define SIZE_FMOV_SP_(S,N) \
743 (SIZE_FMOV_LIMIT ((S), (N), (1 << 24), 7, 6, \
744 SIZE_FMOV_LIMIT ((S), (N), (1 << 8), 6, 4, \
745 (S) ? 4 * (N) : 3 + 4 * ((N) - 1))))
746 #define SIZE_FMOV_SP(S,N) (SIZE_FMOV_SP_ ((unsigned HOST_WIDE_INT)(S), (N)))
747
748 /* Consider alternative save_sp_merge only if we don't need the
749 frame pointer and size is nonzero. */
750 if (! frame_pointer_needed && size)
751 {
752 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
753 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
754 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
755 this_strategy_size += SIZE_FMOV_SP (size, num_regs_to_save);
756
757 if (this_strategy_size < strategy_size)
758 {
759 strategy = save_sp_merge;
760 strategy_size = this_strategy_size;
761 }
762 }
763
764 /* Consider alternative save_sp_no_merge unconditionally. */
765 /* Insn: add -4 * num_regs_to_save, sp. */
766 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
767 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
768 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
769 if (size)
770 {
771 /* Insn: add -size, sp. */
772 this_strategy_size += SIZE_ADD_SP (-size);
773 }
774
775 if (this_strategy_size < strategy_size)
776 {
777 strategy = save_sp_no_merge;
778 strategy_size = this_strategy_size;
779 }
780
781 /* Consider alternative save_sp_partial_merge only if we don't
782 need a frame pointer and size is reasonably large. */
783 if (! frame_pointer_needed && size + 4 * num_regs_to_save > 128)
784 {
785 /* Insn: add -128, sp. */
786 this_strategy_size = SIZE_ADD_SP (-128);
787 /* Insn: fmov fs#, (##, sp), for each fs# to be saved. */
788 this_strategy_size += SIZE_FMOV_SP (128 - 4 * num_regs_to_save,
789 num_regs_to_save);
790 if (size)
791 {
792 /* Insn: add 128-size, sp. */
793 this_strategy_size += SIZE_ADD_SP (128 - size);
794 }
795
796 if (this_strategy_size < strategy_size)
797 {
798 strategy = save_sp_partial_merge;
799 strategy_size = this_strategy_size;
800 }
801 }
802
803 /* Consider alternative save_a0_merge only if we don't need a
804 frame pointer, size is nonzero and the user hasn't
805 changed the calling conventions of a0. */
806 if (! frame_pointer_needed && size
807 && call_used_regs[FIRST_ADDRESS_REGNUM]
808 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
809 {
810 /* Insn: add -(size + 4 * num_regs_to_save), sp. */
811 this_strategy_size = SIZE_ADD_SP (-(size + 4 * num_regs_to_save));
812 /* Insn: mov sp, a0. */
813 this_strategy_size++;
814 if (size)
815 {
816 /* Insn: add size, a0. */
817 this_strategy_size += SIZE_ADD_AX (size);
818 }
819 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
820 this_strategy_size += 3 * num_regs_to_save;
821
822 if (this_strategy_size < strategy_size)
823 {
824 strategy = save_a0_merge;
825 strategy_size = this_strategy_size;
826 }
827 }
828
829 /* Consider alternative save_a0_no_merge if the user hasn't
830 changed the calling conventions of a0. */
831 if (call_used_regs[FIRST_ADDRESS_REGNUM]
832 && ! fixed_regs[FIRST_ADDRESS_REGNUM])
833 {
834 /* Insn: add -4 * num_regs_to_save, sp. */
835 this_strategy_size = SIZE_ADD_SP (-4 * num_regs_to_save);
836 /* Insn: mov sp, a0. */
837 this_strategy_size++;
838 /* Insn: fmov fs#, (a0+), for each fs# to be saved. */
839 this_strategy_size += 3 * num_regs_to_save;
840 if (size)
841 {
842 /* Insn: add -size, sp. */
843 this_strategy_size += SIZE_ADD_SP (-size);
844 }
845
846 if (this_strategy_size < strategy_size)
847 {
848 strategy = save_a0_no_merge;
849 strategy_size = this_strategy_size;
850 }
851 }
852
853 /* Emit the initial SP add, common to all strategies. */
854 switch (strategy)
855 {
856 case save_sp_no_merge:
857 case save_a0_no_merge:
858 emit_insn (gen_addsi3 (stack_pointer_rtx,
859 stack_pointer_rtx,
860 GEN_INT (-4 * num_regs_to_save)));
861 xsize = 0;
862 break;
863
864 case save_sp_partial_merge:
865 emit_insn (gen_addsi3 (stack_pointer_rtx,
866 stack_pointer_rtx,
867 GEN_INT (-128)));
868 xsize = 128 - 4 * num_regs_to_save;
869 size -= xsize;
870 break;
871
872 case save_sp_merge:
873 case save_a0_merge:
874 emit_insn (gen_addsi3 (stack_pointer_rtx,
875 stack_pointer_rtx,
876 GEN_INT (-(size + 4 * num_regs_to_save))));
877 /* We'll have to adjust FP register saves according to the
878 frame size. */
879 xsize = size;
880 /* Since we've already created the stack frame, don't do it
881 again at the end of the function. */
882 size = 0;
883 break;
884
885 default:
886 gcc_unreachable ();
887 }
888
889 /* Now prepare register a0, if we have decided to use it. */
890 switch (strategy)
891 {
892 case save_sp_merge:
893 case save_sp_no_merge:
894 case save_sp_partial_merge:
895 reg = 0;
896 break;
897
898 case save_a0_merge:
899 case save_a0_no_merge:
900 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM);
901 emit_insn (gen_movsi (reg, stack_pointer_rtx));
902 if (xsize)
903 emit_insn (gen_addsi3 (reg, reg, GEN_INT (xsize)));
904 reg = gen_rtx_POST_INC (SImode, reg);
905 break;
906
907 default:
908 gcc_unreachable ();
909 }
910
911 /* Now actually save the FP registers. */
912 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
913 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
914 {
915 rtx addr;
916
917 if (reg)
918 addr = reg;
919 else
920 {
921 /* If we aren't using `a0', use an SP offset. */
922 if (xsize)
923 {
924 addr = gen_rtx_PLUS (SImode,
925 stack_pointer_rtx,
926 GEN_INT (xsize));
927 }
928 else
929 addr = stack_pointer_rtx;
930
931 xsize += 4;
932 }
933
934 insn = emit_insn (gen_movsi (gen_rtx_MEM (SImode, addr),
935 gen_rtx_REG (SImode, i)));
936
937 RTX_FRAME_RELATED_P (insn) = 1;
938 }
939 }
940
941 /* Now put the frame pointer into the frame pointer register. */
942 if (frame_pointer_needed)
943 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
944
945 /* Allocate stack for this frame. */
946 if (size)
947 emit_insn (gen_addsi3 (stack_pointer_rtx,
948 stack_pointer_rtx,
949 GEN_INT (-size)));
950 if (flag_pic && df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM))
951 emit_insn (gen_GOTaddr2picreg ());
952 }
953
954 void
955 expand_epilogue (void)
956 {
957 HOST_WIDE_INT size;
958
959 /* SIZE includes the fixed stack space needed for function calls. */
960 size = get_frame_size () + crtl->outgoing_args_size;
961 size += (crtl->outgoing_args_size ? 4 : 0);
962
963 if (TARGET_AM33_2 && fp_regs_to_save ())
964 {
965 int num_regs_to_save = fp_regs_to_save (), i;
966 rtx reg = 0;
967
968 /* We have several options to restore FP registers. We could
969 load them from SP offsets, but, if there are enough FP
970 registers to restore, we win if we use a post-increment
971 addressing mode. */
972
973 /* If we have a frame pointer, it's the best option, because we
974 already know it has the value we want. */
975 if (frame_pointer_needed)
976 reg = gen_rtx_REG (SImode, FRAME_POINTER_REGNUM);
977 /* Otherwise, we may use `a1', since it's call-clobbered and
978 it's never used for return values. But only do so if it's
979 smaller than using SP offsets. */
980 else
981 {
982 enum { restore_sp_post_adjust,
983 restore_sp_pre_adjust,
984 restore_sp_partial_adjust,
985 restore_a1 } strategy;
986 unsigned int this_strategy_size, strategy_size = (unsigned)-1;
987
988 /* Consider using sp offsets before adjusting sp. */
989 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
990 this_strategy_size = SIZE_FMOV_SP (size, num_regs_to_save);
991 /* If size is too large, we'll have to adjust SP with an
992 add. */
993 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
994 {
995 /* Insn: add size + 4 * num_regs_to_save, sp. */
996 this_strategy_size += SIZE_ADD_SP (size + 4 * num_regs_to_save);
997 }
998 /* If we don't have to restore any non-FP registers,
999 we'll be able to save one byte by using rets. */
1000 if (! REG_SAVE_BYTES)
1001 this_strategy_size--;
1002
1003 if (this_strategy_size < strategy_size)
1004 {
1005 strategy = restore_sp_post_adjust;
1006 strategy_size = this_strategy_size;
1007 }
1008
1009 /* Consider using sp offsets after adjusting sp. */
1010 /* Insn: add size, sp. */
1011 this_strategy_size = SIZE_ADD_SP (size);
1012 /* Insn: fmov (##,sp),fs#, for each fs# to be restored. */
1013 this_strategy_size += SIZE_FMOV_SP (0, num_regs_to_save);
1014 /* We're going to use ret to release the FP registers
1015 save area, so, no savings. */
1016
1017 if (this_strategy_size < strategy_size)
1018 {
1019 strategy = restore_sp_pre_adjust;
1020 strategy_size = this_strategy_size;
1021 }
1022
1023 /* Consider using sp offsets after partially adjusting sp.
1024 When size is close to 32Kb, we may be able to adjust SP
1025 with an imm16 add instruction while still using fmov
1026 (d8,sp). */
1027 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1028 {
1029 /* Insn: add size + 4 * num_regs_to_save
1030 + REG_SAVE_BYTES - 252,sp. */
1031 this_strategy_size = SIZE_ADD_SP (size + 4 * num_regs_to_save
1032 + REG_SAVE_BYTES - 252);
1033 /* Insn: fmov (##,sp),fs#, fo each fs# to be restored. */
1034 this_strategy_size += SIZE_FMOV_SP (252 - REG_SAVE_BYTES
1035 - 4 * num_regs_to_save,
1036 num_regs_to_save);
1037 /* We're going to use ret to release the FP registers
1038 save area, so, no savings. */
1039
1040 if (this_strategy_size < strategy_size)
1041 {
1042 strategy = restore_sp_partial_adjust;
1043 strategy_size = this_strategy_size;
1044 }
1045 }
1046
1047 /* Consider using a1 in post-increment mode, as long as the
1048 user hasn't changed the calling conventions of a1. */
1049 if (call_used_regs[FIRST_ADDRESS_REGNUM+1]
1050 && ! fixed_regs[FIRST_ADDRESS_REGNUM+1])
1051 {
1052 /* Insn: mov sp,a1. */
1053 this_strategy_size = 1;
1054 if (size)
1055 {
1056 /* Insn: add size,a1. */
1057 this_strategy_size += SIZE_ADD_AX (size);
1058 }
1059 /* Insn: fmov (a1+),fs#, for each fs# to be restored. */
1060 this_strategy_size += 3 * num_regs_to_save;
1061 /* If size is large enough, we may be able to save a
1062 couple of bytes. */
1063 if (size + 4 * num_regs_to_save + REG_SAVE_BYTES > 255)
1064 {
1065 /* Insn: mov a1,sp. */
1066 this_strategy_size += 2;
1067 }
1068 /* If we don't have to restore any non-FP registers,
1069 we'll be able to save one byte by using rets. */
1070 if (! REG_SAVE_BYTES)
1071 this_strategy_size--;
1072
1073 if (this_strategy_size < strategy_size)
1074 {
1075 strategy = restore_a1;
1076 strategy_size = this_strategy_size;
1077 }
1078 }
1079
1080 switch (strategy)
1081 {
1082 case restore_sp_post_adjust:
1083 break;
1084
1085 case restore_sp_pre_adjust:
1086 emit_insn (gen_addsi3 (stack_pointer_rtx,
1087 stack_pointer_rtx,
1088 GEN_INT (size)));
1089 size = 0;
1090 break;
1091
1092 case restore_sp_partial_adjust:
1093 emit_insn (gen_addsi3 (stack_pointer_rtx,
1094 stack_pointer_rtx,
1095 GEN_INT (size + 4 * num_regs_to_save
1096 + REG_SAVE_BYTES - 252)));
1097 size = 252 - REG_SAVE_BYTES - 4 * num_regs_to_save;
1098 break;
1099
1100 case restore_a1:
1101 reg = gen_rtx_REG (SImode, FIRST_ADDRESS_REGNUM + 1);
1102 emit_insn (gen_movsi (reg, stack_pointer_rtx));
1103 if (size)
1104 emit_insn (gen_addsi3 (reg, reg, GEN_INT (size)));
1105 break;
1106
1107 default:
1108 gcc_unreachable ();
1109 }
1110 }
1111
1112 /* Adjust the selected register, if any, for post-increment. */
1113 if (reg)
1114 reg = gen_rtx_POST_INC (SImode, reg);
1115
1116 for (i = FIRST_FP_REGNUM; i <= LAST_FP_REGNUM; ++i)
1117 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
1118 {
1119 rtx addr;
1120
1121 if (reg)
1122 addr = reg;
1123 else if (size)
1124 {
1125 /* If we aren't using a post-increment register, use an
1126 SP offset. */
1127 addr = gen_rtx_PLUS (SImode,
1128 stack_pointer_rtx,
1129 GEN_INT (size));
1130 }
1131 else
1132 addr = stack_pointer_rtx;
1133
1134 size += 4;
1135
1136 emit_insn (gen_movsi (gen_rtx_REG (SImode, i),
1137 gen_rtx_MEM (SImode, addr)));
1138 }
1139
1140 /* If we were using the restore_a1 strategy and the number of
1141 bytes to be released won't fit in the `ret' byte, copy `a1'
1142 to `sp', to avoid having to use `add' to adjust it. */
1143 if (! frame_pointer_needed && reg && size + REG_SAVE_BYTES > 255)
1144 {
1145 emit_move_insn (stack_pointer_rtx, XEXP (reg, 0));
1146 size = 0;
1147 }
1148 }
1149
1150 /* Maybe cut back the stack, except for the register save area.
1151
1152 If the frame pointer exists, then use the frame pointer to
1153 cut back the stack.
1154
1155 If the stack size + register save area is more than 255 bytes,
1156 then the stack must be cut back here since the size + register
1157 save size is too big for a ret/retf instruction.
1158
1159 Else leave it alone, it will be cut back as part of the
1160 ret/retf instruction, or there wasn't any stack to begin with.
1161
1162 Under no circumstances should the register save area be
1163 deallocated here, that would leave a window where an interrupt
1164 could occur and trash the register save area. */
1165 if (frame_pointer_needed)
1166 {
1167 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1168 size = 0;
1169 }
1170 else if (size + REG_SAVE_BYTES > 255)
1171 {
1172 emit_insn (gen_addsi3 (stack_pointer_rtx,
1173 stack_pointer_rtx,
1174 GEN_INT (size)));
1175 size = 0;
1176 }
1177
1178 /* Adjust the stack and restore callee-saved registers, if any. */
1179 if (size || df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1180 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1181 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1182 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1183 || frame_pointer_needed)
1184 emit_jump_insn (gen_return_internal_regs
1185 (GEN_INT (size + REG_SAVE_BYTES)));
1186 else
1187 emit_jump_insn (gen_return_internal ());
1188 }
1189
1190 /* Update the condition code from the insn. */
1191
1192 void
1193 notice_update_cc (rtx body, rtx insn)
1194 {
1195 switch (get_attr_cc (insn))
1196 {
1197 case CC_NONE:
1198 /* Insn does not affect CC at all. */
1199 break;
1200
1201 case CC_NONE_0HIT:
1202 /* Insn does not change CC, but the 0'th operand has been changed. */
1203 if (cc_status.value1 != 0
1204 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1205 cc_status.value1 = 0;
1206 break;
1207
1208 case CC_SET_ZN:
1209 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1210 V,C are unusable. */
1211 CC_STATUS_INIT;
1212 cc_status.flags |= CC_NO_CARRY | CC_OVERFLOW_UNUSABLE;
1213 cc_status.value1 = recog_data.operand[0];
1214 break;
1215
1216 case CC_SET_ZNV:
1217 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1218 C is unusable. */
1219 CC_STATUS_INIT;
1220 cc_status.flags |= CC_NO_CARRY;
1221 cc_status.value1 = recog_data.operand[0];
1222 break;
1223
1224 case CC_COMPARE:
1225 /* The insn is a compare instruction. */
1226 CC_STATUS_INIT;
1227 cc_status.value1 = SET_SRC (body);
1228 if (GET_CODE (cc_status.value1) == COMPARE
1229 && GET_MODE (XEXP (cc_status.value1, 0)) == SFmode)
1230 cc_status.mdep.fpCC = 1;
1231 break;
1232
1233 case CC_CLOBBER:
1234 /* Insn doesn't leave CC in a usable state. */
1235 CC_STATUS_INIT;
1236 break;
1237
1238 default:
1239 gcc_unreachable ();
1240 }
1241 }
1242
1243 /* Recognize the PARALLEL rtx generated by mn10300_gen_multiple_store().
1244 This function is for MATCH_PARALLEL and so assumes OP is known to be
1245 parallel. If OP is a multiple store, return a mask indicating which
1246 registers it saves. Return 0 otherwise. */
1247
1248 int
1249 store_multiple_operation (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1250 {
1251 int count;
1252 int mask;
1253 int i;
1254 unsigned int last;
1255 rtx elt;
1256
1257 count = XVECLEN (op, 0);
1258 if (count < 2)
1259 return 0;
1260
1261 /* Check that first instruction has the form (set (sp) (plus A B)) */
1262 elt = XVECEXP (op, 0, 0);
1263 if (GET_CODE (elt) != SET
1264 || GET_CODE (SET_DEST (elt)) != REG
1265 || REGNO (SET_DEST (elt)) != STACK_POINTER_REGNUM
1266 || GET_CODE (SET_SRC (elt)) != PLUS)
1267 return 0;
1268
1269 /* Check that A is the stack pointer and B is the expected stack size.
1270 For OP to match, each subsequent instruction should push a word onto
1271 the stack. We therefore expect the first instruction to create
1272 COUNT-1 stack slots. */
1273 elt = SET_SRC (elt);
1274 if (GET_CODE (XEXP (elt, 0)) != REG
1275 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1276 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1277 || INTVAL (XEXP (elt, 1)) != -(count - 1) * 4)
1278 return 0;
1279
1280 /* Now go through the rest of the vector elements. They must be
1281 ordered so that the first instruction stores the highest-numbered
1282 register to the highest stack slot and that subsequent instructions
1283 store a lower-numbered register to the slot below.
1284
1285 LAST keeps track of the smallest-numbered register stored so far.
1286 MASK is the set of stored registers. */
1287 last = LAST_EXTENDED_REGNUM + 1;
1288 mask = 0;
1289 for (i = 1; i < count; i++)
1290 {
1291 /* Check that element i is a (set (mem M) R) and that R is valid. */
1292 elt = XVECEXP (op, 0, i);
1293 if (GET_CODE (elt) != SET
1294 || GET_CODE (SET_DEST (elt)) != MEM
1295 || GET_CODE (SET_SRC (elt)) != REG
1296 || REGNO (SET_SRC (elt)) >= last)
1297 return 0;
1298
1299 /* R was OK, so provisionally add it to MASK. We return 0 in any
1300 case if the rest of the instruction has a flaw. */
1301 last = REGNO (SET_SRC (elt));
1302 mask |= (1 << last);
1303
1304 /* Check that M has the form (plus (sp) (const_int -I*4)) */
1305 elt = XEXP (SET_DEST (elt), 0);
1306 if (GET_CODE (elt) != PLUS
1307 || GET_CODE (XEXP (elt, 0)) != REG
1308 || REGNO (XEXP (elt, 0)) != STACK_POINTER_REGNUM
1309 || GET_CODE (XEXP (elt, 1)) != CONST_INT
1310 || INTVAL (XEXP (elt, 1)) != -i * 4)
1311 return 0;
1312 }
1313
1314 /* All or none of the callee-saved extended registers must be in the set. */
1315 if ((mask & 0x3c000) != 0
1316 && (mask & 0x3c000) != 0x3c000)
1317 return 0;
1318
1319 return mask;
1320 }
1321
1322 /* What (if any) secondary registers are needed to move IN with mode
1323 MODE into a register in register class RCLASS.
1324
1325 We might be able to simplify this. */
1326 enum reg_class
1327 mn10300_secondary_reload_class (enum reg_class rclass, enum machine_mode mode,
1328 rtx in)
1329 {
1330 rtx inner = in;
1331
1332 /* Strip off any SUBREG expressions from IN. Basically we want
1333 to know if IN is a pseudo or (subreg (pseudo)) as those can
1334 turn into MEMs during reload. */
1335 while (GET_CODE (inner) == SUBREG)
1336 inner = SUBREG_REG (inner);
1337
1338 /* Memory loads less than a full word wide can't have an
1339 address or stack pointer destination. They must use
1340 a data register as an intermediate register. */
1341 if ((GET_CODE (in) == MEM
1342 || (GET_CODE (inner) == REG
1343 && REGNO (inner) >= FIRST_PSEUDO_REGISTER))
1344 && (mode == QImode || mode == HImode)
1345 && (rclass == ADDRESS_REGS || rclass == SP_REGS
1346 || rclass == SP_OR_ADDRESS_REGS))
1347 {
1348 if (TARGET_AM33)
1349 return DATA_OR_EXTENDED_REGS;
1350 return DATA_REGS;
1351 }
1352
1353 /* We can't directly load sp + const_int into a data register;
1354 we must use an address register as an intermediate. */
1355 if (rclass != SP_REGS
1356 && rclass != ADDRESS_REGS
1357 && rclass != SP_OR_ADDRESS_REGS
1358 && rclass != SP_OR_EXTENDED_REGS
1359 && rclass != ADDRESS_OR_EXTENDED_REGS
1360 && rclass != SP_OR_ADDRESS_OR_EXTENDED_REGS
1361 && (in == stack_pointer_rtx
1362 || (GET_CODE (in) == PLUS
1363 && (XEXP (in, 0) == stack_pointer_rtx
1364 || XEXP (in, 1) == stack_pointer_rtx))))
1365 return ADDRESS_REGS;
1366
1367 if (GET_CODE (in) == PLUS
1368 && (XEXP (in, 0) == stack_pointer_rtx
1369 || XEXP (in, 1) == stack_pointer_rtx))
1370 return GENERAL_REGS;
1371
1372 if (TARGET_AM33_2
1373 && rclass == FP_REGS)
1374 {
1375 /* We can't load directly into an FP register from a
1376 constant address. */
1377 if (GET_CODE (in) == MEM
1378 && CONSTANT_ADDRESS_P (XEXP (in, 0)))
1379 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1380
1381 /* Handle case were a pseudo may not get a hard register
1382 but has an equivalent memory location defined. */
1383 if (GET_CODE (inner) == REG
1384 && REGNO (inner) >= FIRST_PSEUDO_REGISTER
1385 && reg_equiv_mem [REGNO (inner)]
1386 && CONSTANT_ADDRESS_P (XEXP (reg_equiv_mem [REGNO (inner)], 0)))
1387 return (TARGET_AM33 ? DATA_OR_EXTENDED_REGS : DATA_REGS);
1388 }
1389
1390 /* Otherwise assume no secondary reloads are needed. */
1391 return NO_REGS;
1392 }
1393
1394 int
1395 initial_offset (int from, int to)
1396 {
1397 /* The difference between the argument pointer and the frame pointer
1398 is the size of the callee register save area. */
1399 if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM)
1400 {
1401 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1402 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1403 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1404 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1405 || fp_regs_to_save ()
1406 || frame_pointer_needed)
1407 return REG_SAVE_BYTES
1408 + 4 * fp_regs_to_save ();
1409 else
1410 return 0;
1411 }
1412
1413 /* The difference between the argument pointer and the stack pointer is
1414 the sum of the size of this function's frame, the callee register save
1415 area, and the fixed stack space needed for function calls (if any). */
1416 if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1417 {
1418 if (df_regs_ever_live_p (2) || df_regs_ever_live_p (3)
1419 || df_regs_ever_live_p (6) || df_regs_ever_live_p (7)
1420 || df_regs_ever_live_p (14) || df_regs_ever_live_p (15)
1421 || df_regs_ever_live_p (16) || df_regs_ever_live_p (17)
1422 || fp_regs_to_save ()
1423 || frame_pointer_needed)
1424 return (get_frame_size () + REG_SAVE_BYTES
1425 + 4 * fp_regs_to_save ()
1426 + (crtl->outgoing_args_size
1427 ? crtl->outgoing_args_size + 4 : 0));
1428 else
1429 return (get_frame_size ()
1430 + (crtl->outgoing_args_size
1431 ? crtl->outgoing_args_size + 4 : 0));
1432 }
1433
1434 /* The difference between the frame pointer and stack pointer is the sum
1435 of the size of this function's frame and the fixed stack space needed
1436 for function calls (if any). */
1437 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
1438 return (get_frame_size ()
1439 + (crtl->outgoing_args_size
1440 ? crtl->outgoing_args_size + 4 : 0));
1441
1442 gcc_unreachable ();
1443 }
1444
1445 /* Worker function for TARGET_RETURN_IN_MEMORY. */
1446
1447 static bool
1448 mn10300_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
1449 {
1450 /* Return values > 8 bytes in length in memory. */
1451 return (int_size_in_bytes (type) > 8
1452 || int_size_in_bytes (type) == 0
1453 || TYPE_MODE (type) == BLKmode);
1454 }
1455
1456 /* Flush the argument registers to the stack for a stdarg function;
1457 return the new argument pointer. */
1458 static rtx
1459 mn10300_builtin_saveregs (void)
1460 {
1461 rtx offset, mem;
1462 tree fntype = TREE_TYPE (current_function_decl);
1463 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0
1464 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1465 != void_type_node)))
1466 ? UNITS_PER_WORD : 0);
1467 alias_set_type set = get_varargs_alias_set ();
1468
1469 if (argadj)
1470 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
1471 else
1472 offset = crtl->args.arg_offset_rtx;
1473
1474 mem = gen_rtx_MEM (SImode, crtl->args.internal_arg_pointer);
1475 set_mem_alias_set (mem, set);
1476 emit_move_insn (mem, gen_rtx_REG (SImode, 0));
1477
1478 mem = gen_rtx_MEM (SImode,
1479 plus_constant (crtl->args.internal_arg_pointer, 4));
1480 set_mem_alias_set (mem, set);
1481 emit_move_insn (mem, gen_rtx_REG (SImode, 1));
1482
1483 return copy_to_reg (expand_binop (Pmode, add_optab,
1484 crtl->args.internal_arg_pointer,
1485 offset, 0, 0, OPTAB_LIB_WIDEN));
1486 }
1487
1488 static void
1489 mn10300_va_start (tree valist, rtx nextarg)
1490 {
1491 nextarg = expand_builtin_saveregs ();
1492 std_expand_builtin_va_start (valist, nextarg);
1493 }
1494
1495 /* Return true when a parameter should be passed by reference. */
1496
1497 static bool
1498 mn10300_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
1499 enum machine_mode mode, const_tree type,
1500 bool named ATTRIBUTE_UNUSED)
1501 {
1502 unsigned HOST_WIDE_INT size;
1503
1504 if (type)
1505 size = int_size_in_bytes (type);
1506 else
1507 size = GET_MODE_SIZE (mode);
1508
1509 return (size > 8 || size == 0);
1510 }
1511
1512 /* Return an RTX to represent where a value with mode MODE will be returned
1513 from a function. If the result is 0, the argument is pushed. */
1514
1515 rtx
1516 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1517 tree type, int named ATTRIBUTE_UNUSED)
1518 {
1519 rtx result = 0;
1520 int size, align;
1521
1522 /* We only support using 2 data registers as argument registers. */
1523 int nregs = 2;
1524
1525 /* Figure out the size of the object to be passed. */
1526 if (mode == BLKmode)
1527 size = int_size_in_bytes (type);
1528 else
1529 size = GET_MODE_SIZE (mode);
1530
1531 /* Figure out the alignment of the object to be passed. */
1532 align = size;
1533
1534 cum->nbytes = (cum->nbytes + 3) & ~3;
1535
1536 /* Don't pass this arg via a register if all the argument registers
1537 are used up. */
1538 if (cum->nbytes > nregs * UNITS_PER_WORD)
1539 return 0;
1540
1541 /* Don't pass this arg via a register if it would be split between
1542 registers and memory. */
1543 if (type == NULL_TREE
1544 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1545 return 0;
1546
1547 switch (cum->nbytes / UNITS_PER_WORD)
1548 {
1549 case 0:
1550 result = gen_rtx_REG (mode, 0);
1551 break;
1552 case 1:
1553 result = gen_rtx_REG (mode, 1);
1554 break;
1555 default:
1556 result = 0;
1557 }
1558
1559 return result;
1560 }
1561
1562 /* Return the number of bytes of registers to use for an argument passed
1563 partially in registers and partially in memory. */
1564
1565 static int
1566 mn10300_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1567 tree type, bool named ATTRIBUTE_UNUSED)
1568 {
1569 int size, align;
1570
1571 /* We only support using 2 data registers as argument registers. */
1572 int nregs = 2;
1573
1574 /* Figure out the size of the object to be passed. */
1575 if (mode == BLKmode)
1576 size = int_size_in_bytes (type);
1577 else
1578 size = GET_MODE_SIZE (mode);
1579
1580 /* Figure out the alignment of the object to be passed. */
1581 align = size;
1582
1583 cum->nbytes = (cum->nbytes + 3) & ~3;
1584
1585 /* Don't pass this arg via a register if all the argument registers
1586 are used up. */
1587 if (cum->nbytes > nregs * UNITS_PER_WORD)
1588 return 0;
1589
1590 if (cum->nbytes + size <= nregs * UNITS_PER_WORD)
1591 return 0;
1592
1593 /* Don't pass this arg via a register if it would be split between
1594 registers and memory. */
1595 if (type == NULL_TREE
1596 && cum->nbytes + size > nregs * UNITS_PER_WORD)
1597 return 0;
1598
1599 return nregs * UNITS_PER_WORD - cum->nbytes;
1600 }
1601
1602 /* Return the location of the function's value. This will be either
1603 $d0 for integer functions, $a0 for pointers, or a PARALLEL of both
1604 $d0 and $a0 if the -mreturn-pointer-on-do flag is set. Note that
1605 we only return the PARALLEL for outgoing values; we do not want
1606 callers relying on this extra copy. */
1607
1608 rtx
1609 mn10300_function_value (const_tree valtype, const_tree func, int outgoing)
1610 {
1611 rtx rv;
1612 enum machine_mode mode = TYPE_MODE (valtype);
1613
1614 if (! POINTER_TYPE_P (valtype))
1615 return gen_rtx_REG (mode, FIRST_DATA_REGNUM);
1616 else if (! TARGET_PTR_A0D0 || ! outgoing
1617 || cfun->returns_struct)
1618 return gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM);
1619
1620 rv = gen_rtx_PARALLEL (mode, rtvec_alloc (2));
1621 XVECEXP (rv, 0, 0)
1622 = gen_rtx_EXPR_LIST (VOIDmode,
1623 gen_rtx_REG (mode, FIRST_ADDRESS_REGNUM),
1624 GEN_INT (0));
1625
1626 XVECEXP (rv, 0, 1)
1627 = gen_rtx_EXPR_LIST (VOIDmode,
1628 gen_rtx_REG (mode, FIRST_DATA_REGNUM),
1629 GEN_INT (0));
1630 return rv;
1631 }
1632
1633 /* Output a tst insn. */
1634 const char *
1635 output_tst (rtx operand, rtx insn)
1636 {
1637 rtx temp;
1638 int past_call = 0;
1639
1640 /* We can save a byte if we can find a register which has the value
1641 zero in it. */
1642 temp = PREV_INSN (insn);
1643 while (optimize && temp)
1644 {
1645 rtx set;
1646
1647 /* We allow the search to go through call insns. We record
1648 the fact that we've past a CALL_INSN and reject matches which
1649 use call clobbered registers. */
1650 if (GET_CODE (temp) == CODE_LABEL
1651 || GET_CODE (temp) == JUMP_INSN
1652 || GET_CODE (temp) == BARRIER)
1653 break;
1654
1655 if (GET_CODE (temp) == CALL_INSN)
1656 past_call = 1;
1657
1658 if (GET_CODE (temp) == NOTE)
1659 {
1660 temp = PREV_INSN (temp);
1661 continue;
1662 }
1663
1664 /* It must be an insn, see if it is a simple set. */
1665 set = single_set (temp);
1666 if (!set)
1667 {
1668 temp = PREV_INSN (temp);
1669 continue;
1670 }
1671
1672 /* Are we setting a data register to zero (this does not win for
1673 address registers)?
1674
1675 If it's a call clobbered register, have we past a call?
1676
1677 Make sure the register we find isn't the same as ourself;
1678 the mn10300 can't encode that.
1679
1680 ??? reg_set_between_p return nonzero anytime we pass a CALL_INSN
1681 so the code to detect calls here isn't doing anything useful. */
1682 if (REG_P (SET_DEST (set))
1683 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1684 && !reg_set_between_p (SET_DEST (set), temp, insn)
1685 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1686 == REGNO_REG_CLASS (REGNO (operand)))
1687 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) != EXTENDED_REGS
1688 && REGNO (SET_DEST (set)) != REGNO (operand)
1689 && (!past_call
1690 || !call_used_regs[REGNO (SET_DEST (set))]))
1691 {
1692 rtx xoperands[2];
1693 xoperands[0] = operand;
1694 xoperands[1] = SET_DEST (set);
1695
1696 output_asm_insn ("cmp %1,%0", xoperands);
1697 return "";
1698 }
1699
1700 if (REGNO_REG_CLASS (REGNO (operand)) == EXTENDED_REGS
1701 && REG_P (SET_DEST (set))
1702 && SET_SRC (set) == CONST0_RTX (GET_MODE (SET_DEST (set)))
1703 && !reg_set_between_p (SET_DEST (set), temp, insn)
1704 && (REGNO_REG_CLASS (REGNO (SET_DEST (set)))
1705 != REGNO_REG_CLASS (REGNO (operand)))
1706 && REGNO_REG_CLASS (REGNO (SET_DEST (set))) == EXTENDED_REGS
1707 && REGNO (SET_DEST (set)) != REGNO (operand)
1708 && (!past_call
1709 || !call_used_regs[REGNO (SET_DEST (set))]))
1710 {
1711 rtx xoperands[2];
1712 xoperands[0] = operand;
1713 xoperands[1] = SET_DEST (set);
1714
1715 output_asm_insn ("cmp %1,%0", xoperands);
1716 return "";
1717 }
1718 temp = PREV_INSN (temp);
1719 }
1720 return "cmp 0,%0";
1721 }
1722
1723 int
1724 impossible_plus_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1725 {
1726 if (GET_CODE (op) != PLUS)
1727 return 0;
1728
1729 if (XEXP (op, 0) == stack_pointer_rtx
1730 || XEXP (op, 1) == stack_pointer_rtx)
1731 return 1;
1732
1733 return 0;
1734 }
1735
1736 /* Similarly, but when using a zero_extract pattern for a btst where
1737 the source operand might end up in memory. */
1738 int
1739 mask_ok_for_mem_btst (int len, int bit)
1740 {
1741 unsigned int mask = 0;
1742
1743 while (len > 0)
1744 {
1745 mask |= (1 << bit);
1746 bit++;
1747 len--;
1748 }
1749
1750 /* MASK must bit into an 8bit value. */
1751 return (((mask & 0xff) == mask)
1752 || ((mask & 0xff00) == mask)
1753 || ((mask & 0xff0000) == mask)
1754 || ((mask & 0xff000000) == mask));
1755 }
1756
1757 /* Return 1 if X contains a symbolic expression. We know these
1758 expressions will have one of a few well defined forms, so
1759 we need only check those forms. */
1760 int
1761 symbolic_operand (register rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1762 {
1763 switch (GET_CODE (op))
1764 {
1765 case SYMBOL_REF:
1766 case LABEL_REF:
1767 return 1;
1768 case CONST:
1769 op = XEXP (op, 0);
1770 return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
1771 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
1772 && GET_CODE (XEXP (op, 1)) == CONST_INT);
1773 default:
1774 return 0;
1775 }
1776 }
1777
1778 /* Try machine dependent ways of modifying an illegitimate address
1779 to be legitimate. If we find one, return the new valid address.
1780 This macro is used in only one place: `memory_address' in explow.c.
1781
1782 OLDX is the address as it was before break_out_memory_refs was called.
1783 In some cases it is useful to look at this to decide what needs to be done.
1784
1785 MODE and WIN are passed so that this macro can use
1786 GO_IF_LEGITIMATE_ADDRESS.
1787
1788 Normally it is always safe for this macro to do nothing. It exists to
1789 recognize opportunities to optimize the output.
1790
1791 But on a few ports with segmented architectures and indexed addressing
1792 (mn10300, hppa) it is used to rewrite certain problematical addresses. */
1793 rtx
1794 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1795 enum machine_mode mode ATTRIBUTE_UNUSED)
1796 {
1797 if (flag_pic && ! legitimate_pic_operand_p (x))
1798 x = legitimize_pic_address (oldx, NULL_RTX);
1799
1800 /* Uh-oh. We might have an address for x[n-100000]. This needs
1801 special handling to avoid creating an indexed memory address
1802 with x-100000 as the base. */
1803 if (GET_CODE (x) == PLUS
1804 && symbolic_operand (XEXP (x, 1), VOIDmode))
1805 {
1806 /* Ugly. We modify things here so that the address offset specified
1807 by the index expression is computed first, then added to x to form
1808 the entire address. */
1809
1810 rtx regx1, regy1, regy2, y;
1811
1812 /* Strip off any CONST. */
1813 y = XEXP (x, 1);
1814 if (GET_CODE (y) == CONST)
1815 y = XEXP (y, 0);
1816
1817 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1818 {
1819 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1820 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1821 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1822 regx1 = force_reg (Pmode,
1823 gen_rtx_fmt_ee (GET_CODE (y), Pmode, regx1, regy2));
1824 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1825 }
1826 }
1827 return x;
1828 }
1829
1830 /* Convert a non-PIC address in `orig' to a PIC address using @GOT or
1831 @GOTOFF in `reg'. */
1832 rtx
1833 legitimize_pic_address (rtx orig, rtx reg)
1834 {
1835 if (GET_CODE (orig) == LABEL_REF
1836 || (GET_CODE (orig) == SYMBOL_REF
1837 && (CONSTANT_POOL_ADDRESS_P (orig)
1838 || ! MN10300_GLOBAL_P (orig))))
1839 {
1840 if (reg == 0)
1841 reg = gen_reg_rtx (Pmode);
1842
1843 emit_insn (gen_symGOTOFF2reg (reg, orig));
1844 return reg;
1845 }
1846 else if (GET_CODE (orig) == SYMBOL_REF)
1847 {
1848 if (reg == 0)
1849 reg = gen_reg_rtx (Pmode);
1850
1851 emit_insn (gen_symGOT2reg (reg, orig));
1852 return reg;
1853 }
1854 return orig;
1855 }
1856
1857 /* Return zero if X references a SYMBOL_REF or LABEL_REF whose symbol
1858 isn't protected by a PIC unspec; nonzero otherwise. */
1859 int
1860 legitimate_pic_operand_p (rtx x)
1861 {
1862 register const char *fmt;
1863 register int i;
1864
1865 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
1866 return 0;
1867
1868 if (GET_CODE (x) == UNSPEC
1869 && (XINT (x, 1) == UNSPEC_PIC
1870 || XINT (x, 1) == UNSPEC_GOT
1871 || XINT (x, 1) == UNSPEC_GOTOFF
1872 || XINT (x, 1) == UNSPEC_PLT
1873 || XINT (x, 1) == UNSPEC_GOTSYM_OFF))
1874 return 1;
1875
1876 fmt = GET_RTX_FORMAT (GET_CODE (x));
1877 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
1878 {
1879 if (fmt[i] == 'E')
1880 {
1881 register int j;
1882
1883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1884 if (! legitimate_pic_operand_p (XVECEXP (x, i, j)))
1885 return 0;
1886 }
1887 else if (fmt[i] == 'e' && ! legitimate_pic_operand_p (XEXP (x, i)))
1888 return 0;
1889 }
1890
1891 return 1;
1892 }
1893
1894 /* Return TRUE if the address X, taken from a (MEM:MODE X) rtx, is
1895 legitimate, and FALSE otherwise. */
1896 bool
1897 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1898 {
1899 if (CONSTANT_ADDRESS_P (x)
1900 && (! flag_pic || legitimate_pic_operand_p (x)))
1901 return TRUE;
1902
1903 if (RTX_OK_FOR_BASE_P (x, strict))
1904 return TRUE;
1905
1906 if (TARGET_AM33
1907 && GET_CODE (x) == POST_INC
1908 && RTX_OK_FOR_BASE_P (XEXP (x, 0), strict)
1909 && (mode == SImode || mode == SFmode || mode == HImode))
1910 return TRUE;
1911
1912 if (GET_CODE (x) == PLUS)
1913 {
1914 rtx base = 0, index = 0;
1915
1916 if (REG_P (XEXP (x, 0))
1917 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 0)), strict))
1918 {
1919 base = XEXP (x, 0);
1920 index = XEXP (x, 1);
1921 }
1922
1923 if (REG_P (XEXP (x, 1))
1924 && REGNO_STRICT_OK_FOR_BASE_P (REGNO (XEXP (x, 1)), strict))
1925 {
1926 base = XEXP (x, 1);
1927 index = XEXP (x, 0);
1928 }
1929
1930 if (base != 0 && index != 0)
1931 {
1932 if (GET_CODE (index) == CONST_INT)
1933 return TRUE;
1934 if (GET_CODE (index) == CONST
1935 && GET_CODE (XEXP (index, 0)) != PLUS
1936 && (! flag_pic
1937 || legitimate_pic_operand_p (index)))
1938 return TRUE;
1939 }
1940 }
1941
1942 return FALSE;
1943 }
1944
1945 static int
1946 mn10300_address_cost_1 (rtx x, int *unsig)
1947 {
1948 switch (GET_CODE (x))
1949 {
1950 case REG:
1951 switch (REGNO_REG_CLASS (REGNO (x)))
1952 {
1953 case SP_REGS:
1954 *unsig = 1;
1955 return 0;
1956
1957 case ADDRESS_REGS:
1958 return 1;
1959
1960 case DATA_REGS:
1961 case EXTENDED_REGS:
1962 case FP_REGS:
1963 return 3;
1964
1965 case NO_REGS:
1966 return 5;
1967
1968 default:
1969 gcc_unreachable ();
1970 }
1971
1972 case PLUS:
1973 case MINUS:
1974 case ASHIFT:
1975 case AND:
1976 case IOR:
1977 return (mn10300_address_cost_1 (XEXP (x, 0), unsig)
1978 + mn10300_address_cost_1 (XEXP (x, 1), unsig));
1979
1980 case EXPR_LIST:
1981 case SUBREG:
1982 case MEM:
1983 return mn10300_address_cost (XEXP (x, 0), !optimize_size);
1984
1985 case ZERO_EXTEND:
1986 *unsig = 1;
1987 return mn10300_address_cost_1 (XEXP (x, 0), unsig);
1988
1989 case CONST_INT:
1990 if (INTVAL (x) == 0)
1991 return 0;
1992 if (INTVAL (x) + (*unsig ? 0 : 0x80) < 0x100)
1993 return 1;
1994 if (INTVAL (x) + (*unsig ? 0 : 0x8000) < 0x10000)
1995 return 3;
1996 if (INTVAL (x) + (*unsig ? 0 : 0x800000) < 0x1000000)
1997 return 5;
1998 return 7;
1999
2000 case CONST:
2001 case SYMBOL_REF:
2002 case LABEL_REF:
2003 return 8;
2004
2005 default:
2006 gcc_unreachable ();
2007
2008 }
2009 }
2010
2011 static int
2012 mn10300_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
2013 {
2014 int s = 0;
2015 return mn10300_address_cost_1 (x, &s);
2016 }
2017
2018 static bool
2019 mn10300_rtx_costs (rtx x, int code, int outer_code, int *total, bool speed ATTRIBUTE_UNUSED)
2020 {
2021 switch (code)
2022 {
2023 case CONST_INT:
2024 /* Zeros are extremely cheap. */
2025 if (INTVAL (x) == 0 && outer_code == SET)
2026 *total = 0;
2027 /* If it fits in 8 bits, then it's still relatively cheap. */
2028 else if (INT_8_BITS (INTVAL (x)))
2029 *total = 1;
2030 /* This is the "base" cost, includes constants where either the
2031 upper or lower 16bits are all zeros. */
2032 else if (INT_16_BITS (INTVAL (x))
2033 || (INTVAL (x) & 0xffff) == 0
2034 || (INTVAL (x) & 0xffff0000) == 0)
2035 *total = 2;
2036 else
2037 *total = 4;
2038 return true;
2039
2040 case CONST:
2041 case LABEL_REF:
2042 case SYMBOL_REF:
2043 /* These are more costly than a CONST_INT, but we can relax them,
2044 so they're less costly than a CONST_DOUBLE. */
2045 *total = 6;
2046 return true;
2047
2048 case CONST_DOUBLE:
2049 /* We don't optimize CONST_DOUBLEs well nor do we relax them well,
2050 so their cost is very high. */
2051 *total = 8;
2052 return true;
2053
2054 /* ??? This probably needs more work. */
2055 case MOD:
2056 case DIV:
2057 case MULT:
2058 *total = 8;
2059 return true;
2060
2061 default:
2062 return false;
2063 }
2064 }
2065
2066 /* Check whether a constant used to initialize a DImode or DFmode can
2067 use a clr instruction. The code here must be kept in sync with
2068 movdf and movdi. */
2069
2070 bool
2071 mn10300_wide_const_load_uses_clr (rtx operands[2])
2072 {
2073 long val[2];
2074
2075 if (GET_CODE (operands[0]) != REG
2076 || REGNO_REG_CLASS (REGNO (operands[0])) != DATA_REGS)
2077 return false;
2078
2079 switch (GET_CODE (operands[1]))
2080 {
2081 case CONST_INT:
2082 {
2083 rtx low, high;
2084 split_double (operands[1], &low, &high);
2085 val[0] = INTVAL (low);
2086 val[1] = INTVAL (high);
2087 }
2088 break;
2089
2090 case CONST_DOUBLE:
2091 if (GET_MODE (operands[1]) == DFmode)
2092 {
2093 REAL_VALUE_TYPE rv;
2094
2095 REAL_VALUE_FROM_CONST_DOUBLE (rv, operands[1]);
2096 REAL_VALUE_TO_TARGET_DOUBLE (rv, val);
2097 }
2098 else if (GET_MODE (operands[1]) == VOIDmode
2099 || GET_MODE (operands[1]) == DImode)
2100 {
2101 val[0] = CONST_DOUBLE_LOW (operands[1]);
2102 val[1] = CONST_DOUBLE_HIGH (operands[1]);
2103 }
2104 break;
2105
2106 default:
2107 return false;
2108 }
2109
2110 return val[0] == 0 || val[1] == 0;
2111 }
2112 /* If using PIC, mark a SYMBOL_REF for a non-global symbol so that we
2113 may access it using GOTOFF instead of GOT. */
2114
2115 static void
2116 mn10300_encode_section_info (tree decl, rtx rtl, int first ATTRIBUTE_UNUSED)
2117 {
2118 rtx symbol;
2119
2120 if (GET_CODE (rtl) != MEM)
2121 return;
2122 symbol = XEXP (rtl, 0);
2123 if (GET_CODE (symbol) != SYMBOL_REF)
2124 return;
2125
2126 if (flag_pic)
2127 SYMBOL_REF_FLAG (symbol) = (*targetm.binds_local_p) (decl);
2128 }