v850-modes.def (CCZ, CCNZ): Add new modes.
[gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996-2018 Free Software Foundation, Inc.
3 Contributed by Jeff Law (law@cygnus.com).
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #define IN_TARGET_CODE 1
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "insn-config.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "stor-layout.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "conditions.h"
44 #include "output.h"
45 #include "insn-attr.h"
46 #include "expr.h"
47 #include "cfgrtl.h"
48 #include "builtins.h"
49
50 /* This file should be included last. */
51 #include "target-def.h"
52
53 #ifndef streq
54 #define streq(a,b) (strcmp (a, b) == 0)
55 #endif
56
57 static void v850_print_operand_address (FILE *, machine_mode, rtx);
58
59 /* Names of the various data areas used on the v850. */
60 const char * GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
61 const char * GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
62
63 /* Track the current data area set by the data area pragma (which
64 can be nested). Tested by check_default_data_area. */
65 data_area_stack_element * data_area_stack = NULL;
66
67 /* True if we don't need to check any more if the current
68 function is an interrupt handler. */
69 static int v850_interrupt_cache_p = FALSE;
70
71 rtx v850_compare_op0, v850_compare_op1;
72
73 /* Whether current function is an interrupt handler. */
74 static int v850_interrupt_p = FALSE;
75
76 static GTY(()) section * rosdata_section;
77 static GTY(()) section * rozdata_section;
78 static GTY(()) section * tdata_section;
79 static GTY(()) section * zdata_section;
80 static GTY(()) section * zbss_section;
81 \f
82 /* We use this to wrap all emitted insns in the prologue. */
83 static rtx
84 F (rtx x)
85 {
86 if (GET_CODE (x) != CLOBBER)
87 RTX_FRAME_RELATED_P (x) = 1;
88 return x;
89 }
90
91 /* Mark all the subexpressions of the PARALLEL rtx PAR as
92 frame-related. Return PAR.
93
94 dwarf2out.c:dwarf2out_frame_debug_expr ignores sub-expressions of a
95 PARALLEL rtx other than the first if they do not have the
96 FRAME_RELATED flag set on them. */
97
98 static rtx
99 v850_all_frame_related (rtx par)
100 {
101 int len = XVECLEN (par, 0);
102 int i;
103
104 gcc_assert (GET_CODE (par) == PARALLEL);
105 for (i = 0; i < len; i++)
106 F (XVECEXP (par, 0, i));
107
108 return par;
109 }
110
111 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
112 Specify whether to pass the argument by reference. */
113
114 static bool
115 v850_pass_by_reference (cumulative_args_t cum ATTRIBUTE_UNUSED,
116 machine_mode mode, const_tree type,
117 bool named ATTRIBUTE_UNUSED)
118 {
119 unsigned HOST_WIDE_INT size;
120
121 if (!TARGET_GCC_ABI)
122 return 0;
123
124 if (type)
125 size = int_size_in_bytes (type);
126 else
127 size = GET_MODE_SIZE (mode);
128
129 return size > 8;
130 }
131
132 /* Return an RTX to represent where an argument with mode MODE
133 and type TYPE will be passed to a function. If the result
134 is NULL_RTX, the argument will be pushed. */
135
136 static rtx
137 v850_function_arg (cumulative_args_t cum_v, machine_mode mode,
138 const_tree type, bool named)
139 {
140 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
141 rtx result = NULL_RTX;
142 int size, align;
143
144 if (!named)
145 return NULL_RTX;
146
147 if (mode == BLKmode)
148 size = int_size_in_bytes (type);
149 else
150 size = GET_MODE_SIZE (mode);
151
152 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
153
154 if (size < 1)
155 {
156 /* Once we have stopped using argument registers, do not start up again. */
157 cum->nbytes = 4 * UNITS_PER_WORD;
158 return NULL_RTX;
159 }
160
161 if (!TARGET_GCC_ABI)
162 align = UNITS_PER_WORD;
163 else if (size <= UNITS_PER_WORD && type)
164 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
165 else
166 align = size;
167
168 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
169
170 if (cum->nbytes > 4 * UNITS_PER_WORD)
171 return NULL_RTX;
172
173 if (type == NULL_TREE
174 && cum->nbytes + size > 4 * UNITS_PER_WORD)
175 return NULL_RTX;
176
177 switch (cum->nbytes / UNITS_PER_WORD)
178 {
179 case 0:
180 result = gen_rtx_REG (mode, 6);
181 break;
182 case 1:
183 result = gen_rtx_REG (mode, 7);
184 break;
185 case 2:
186 result = gen_rtx_REG (mode, 8);
187 break;
188 case 3:
189 result = gen_rtx_REG (mode, 9);
190 break;
191 default:
192 result = NULL_RTX;
193 }
194
195 return result;
196 }
197
198 /* Return the number of bytes which must be put into registers
199 for values which are part in registers and part in memory. */
200 static int
201 v850_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
202 tree type, bool named)
203 {
204 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
205 int size, align;
206
207 if (!named)
208 return 0;
209
210 if (mode == BLKmode)
211 size = int_size_in_bytes (type);
212 else
213 size = GET_MODE_SIZE (mode);
214
215 if (size < 1)
216 size = 1;
217
218 if (!TARGET_GCC_ABI)
219 align = UNITS_PER_WORD;
220 else if (type)
221 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
222 else
223 align = size;
224
225 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
226
227 if (cum->nbytes > 4 * UNITS_PER_WORD)
228 return 0;
229
230 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
231 return 0;
232
233 if (type == NULL_TREE
234 && cum->nbytes + size > 4 * UNITS_PER_WORD)
235 return 0;
236
237 return 4 * UNITS_PER_WORD - cum->nbytes;
238 }
239
240 /* Update the data in CUM to advance over an argument
241 of mode MODE and data type TYPE.
242 (TYPE is null for libcalls where that information may not be available.) */
243
244 static void
245 v850_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
246 const_tree type, bool named ATTRIBUTE_UNUSED)
247 {
248 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
249
250 if (!TARGET_GCC_ABI)
251 cum->nbytes += (((mode != BLKmode
252 ? GET_MODE_SIZE (mode)
253 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1)
254 & -UNITS_PER_WORD);
255 else
256 cum->nbytes += (((type && int_size_in_bytes (type) > 8
257 ? GET_MODE_SIZE (Pmode)
258 : (mode != BLKmode
259 ? GET_MODE_SIZE (mode)
260 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
261 & -UNITS_PER_WORD);
262 }
263
264 /* Return the high and low words of a CONST_DOUBLE */
265
266 static void
267 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
268 {
269 if (GET_CODE (x) == CONST_DOUBLE)
270 {
271 long t[2];
272
273 switch (GET_MODE (x))
274 {
275 case E_DFmode:
276 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x), t);
277 *p_high = t[1]; /* since v850 is little endian */
278 *p_low = t[0]; /* high is second word */
279 return;
280
281 case E_SFmode:
282 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), *p_high);
283 *p_low = 0;
284 return;
285
286 case E_VOIDmode:
287 case E_DImode:
288 *p_high = CONST_DOUBLE_HIGH (x);
289 *p_low = CONST_DOUBLE_LOW (x);
290 return;
291
292 default:
293 break;
294 }
295 }
296
297 fatal_insn ("const_double_split got a bad insn:", x);
298 }
299
300 \f
301 /* Return the cost of the rtx R with code CODE. */
302
303 static int
304 const_costs_int (HOST_WIDE_INT value, int zero_cost)
305 {
306 if (CONST_OK_FOR_I (value))
307 return zero_cost;
308 else if (CONST_OK_FOR_J (value))
309 return 1;
310 else if (CONST_OK_FOR_K (value))
311 return 2;
312 else
313 return 4;
314 }
315
316 static int
317 const_costs (rtx r, enum rtx_code c)
318 {
319 HOST_WIDE_INT high, low;
320
321 switch (c)
322 {
323 case CONST_INT:
324 return const_costs_int (INTVAL (r), 0);
325
326 case CONST_DOUBLE:
327 const_double_split (r, &high, &low);
328 if (GET_MODE (r) == SFmode)
329 return const_costs_int (high, 1);
330 else
331 return const_costs_int (high, 1) + const_costs_int (low, 1);
332
333 case SYMBOL_REF:
334 case LABEL_REF:
335 case CONST:
336 return 2;
337
338 case HIGH:
339 return 1;
340
341 default:
342 return 4;
343 }
344 }
345
346 static bool
347 v850_rtx_costs (rtx x, machine_mode mode, int outer_code,
348 int opno ATTRIBUTE_UNUSED, int *total, bool speed)
349 {
350 enum rtx_code code = GET_CODE (x);
351
352 switch (code)
353 {
354 case CONST_INT:
355 case CONST_DOUBLE:
356 case CONST:
357 case SYMBOL_REF:
358 case LABEL_REF:
359 *total = COSTS_N_INSNS (const_costs (x, code));
360 return true;
361
362 case MOD:
363 case DIV:
364 case UMOD:
365 case UDIV:
366 if (TARGET_V850E && !speed)
367 *total = 6;
368 else
369 *total = 60;
370 return true;
371
372 case MULT:
373 if (TARGET_V850E
374 && (mode == SImode || mode == HImode || mode == QImode))
375 {
376 if (GET_CODE (XEXP (x, 1)) == REG)
377 *total = 4;
378 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
379 {
380 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
381 *total = 6;
382 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
383 *total = 10;
384 }
385 }
386 else
387 *total = 20;
388 return true;
389
390 case ZERO_EXTRACT:
391 if (outer_code == COMPARE)
392 *total = 0;
393 return false;
394
395 default:
396 return false;
397 }
398 }
399 \f
400 /* Print operand X using operand code CODE to assembly language output file
401 FILE. */
402
403 static void
404 v850_print_operand (FILE * file, rtx x, int code)
405 {
406 HOST_WIDE_INT high, low;
407
408 switch (code)
409 {
410 case 'c':
411 /* We use 'c' operands with symbols for .vtinherit. */
412 if (GET_CODE (x) == SYMBOL_REF)
413 {
414 output_addr_const(file, x);
415 break;
416 }
417 /* Fall through. */
418 case 'b':
419 case 'B':
420 case 'C':
421 switch ((code == 'B' || code == 'C')
422 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
423 {
424 case NE:
425 if (code == 'c' || code == 'C')
426 fprintf (file, "nz");
427 else
428 fprintf (file, "ne");
429 break;
430 case EQ:
431 if (code == 'c' || code == 'C')
432 fprintf (file, "z");
433 else
434 fprintf (file, "e");
435 break;
436 case GE:
437 fprintf (file, "ge");
438 break;
439 case GT:
440 fprintf (file, "gt");
441 break;
442 case LE:
443 fprintf (file, "le");
444 break;
445 case LT:
446 fprintf (file, "lt");
447 break;
448 case GEU:
449 fprintf (file, "nl");
450 break;
451 case GTU:
452 fprintf (file, "h");
453 break;
454 case LEU:
455 fprintf (file, "nh");
456 break;
457 case LTU:
458 fprintf (file, "l");
459 break;
460 default:
461 gcc_unreachable ();
462 }
463 break;
464 case 'F': /* High word of CONST_DOUBLE. */
465 switch (GET_CODE (x))
466 {
467 case CONST_INT:
468 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
469 break;
470
471 case CONST_DOUBLE:
472 const_double_split (x, &high, &low);
473 fprintf (file, "%ld", (long) high);
474 break;
475
476 default:
477 gcc_unreachable ();
478 }
479 break;
480 case 'G': /* Low word of CONST_DOUBLE. */
481 switch (GET_CODE (x))
482 {
483 case CONST_INT:
484 fprintf (file, "%ld", (long) INTVAL (x));
485 break;
486
487 case CONST_DOUBLE:
488 const_double_split (x, &high, &low);
489 fprintf (file, "%ld", (long) low);
490 break;
491
492 default:
493 gcc_unreachable ();
494 }
495 break;
496 case 'L':
497 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
498 break;
499 case 'M':
500 fprintf (file, "%d", exact_log2 (INTVAL (x)));
501 break;
502 case 'O':
503 gcc_assert (special_symbolref_operand (x, VOIDmode));
504
505 if (GET_CODE (x) == CONST)
506 x = XEXP (XEXP (x, 0), 0);
507 else
508 gcc_assert (GET_CODE (x) == SYMBOL_REF);
509
510 if (SYMBOL_REF_ZDA_P (x))
511 fprintf (file, "zdaoff");
512 else if (SYMBOL_REF_SDA_P (x))
513 fprintf (file, "sdaoff");
514 else if (SYMBOL_REF_TDA_P (x))
515 fprintf (file, "tdaoff");
516 else
517 gcc_unreachable ();
518 break;
519 case 'P':
520 gcc_assert (special_symbolref_operand (x, VOIDmode));
521 output_addr_const (file, x);
522 break;
523 case 'Q':
524 gcc_assert (special_symbolref_operand (x, VOIDmode));
525
526 if (GET_CODE (x) == CONST)
527 x = XEXP (XEXP (x, 0), 0);
528 else
529 gcc_assert (GET_CODE (x) == SYMBOL_REF);
530
531 if (SYMBOL_REF_ZDA_P (x))
532 fprintf (file, "r0");
533 else if (SYMBOL_REF_SDA_P (x))
534 fprintf (file, "gp");
535 else if (SYMBOL_REF_TDA_P (x))
536 fprintf (file, "ep");
537 else
538 gcc_unreachable ();
539 break;
540 case 'R': /* 2nd word of a double. */
541 switch (GET_CODE (x))
542 {
543 case REG:
544 fprintf (file, reg_names[REGNO (x) + 1]);
545 break;
546 case MEM:
547 {
548 machine_mode mode = GET_MODE (x);
549 x = XEXP (adjust_address (x, SImode, 4), 0);
550 v850_print_operand_address (file, mode, x);
551 if (GET_CODE (x) == CONST_INT)
552 fprintf (file, "[r0]");
553 }
554 break;
555
556 case CONST_INT:
557 {
558 unsigned HOST_WIDE_INT v = INTVAL (x);
559
560 /* Trickery to avoid problems with shifting
561 32-bits at a time on a 32-bit host. */
562 v = v >> 16;
563 v = v >> 16;
564 fprintf (file, HOST_WIDE_INT_PRINT_HEX, v);
565 break;
566 }
567
568 case CONST_DOUBLE:
569 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_HIGH (x));
570 break;
571
572 default:
573 debug_rtx (x);
574 gcc_unreachable ();
575 }
576 break;
577 case 'S':
578 {
579 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
580 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
581 fputs ("s", file);
582
583 break;
584 }
585 case 'T':
586 {
587 /* Like an 'S' operand above, but for unsigned loads only. */
588 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
589 fputs ("s", file);
590
591 break;
592 }
593 case 'W': /* Print the instruction suffix. */
594 switch (GET_MODE (x))
595 {
596 default:
597 gcc_unreachable ();
598
599 case E_QImode: fputs (".b", file); break;
600 case E_HImode: fputs (".h", file); break;
601 case E_SImode: fputs (".w", file); break;
602 case E_SFmode: fputs (".w", file); break;
603 }
604 break;
605 case '.': /* Register r0. */
606 fputs (reg_names[0], file);
607 break;
608 case 'z': /* Reg or zero. */
609 if (REG_P (x))
610 fputs (reg_names[REGNO (x)], file);
611 else if ((GET_MODE(x) == SImode
612 || GET_MODE(x) == DFmode
613 || GET_MODE(x) == SFmode)
614 && x == CONST0_RTX(GET_MODE(x)))
615 fputs (reg_names[0], file);
616 else
617 {
618 gcc_assert (x == const0_rtx);
619 fputs (reg_names[0], file);
620 }
621 break;
622 default:
623 switch (GET_CODE (x))
624 {
625 case MEM:
626 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
627 output_address (GET_MODE (x),
628 gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
629 XEXP (x, 0)));
630 else
631 output_address (GET_MODE (x), XEXP (x, 0));
632 break;
633
634 case REG:
635 fputs (reg_names[REGNO (x)], file);
636 break;
637 case SUBREG:
638 fputs (reg_names[subreg_regno (x)], file);
639 break;
640 case CONST_DOUBLE:
641 fprintf (file, HOST_WIDE_INT_PRINT_HEX, CONST_DOUBLE_LOW (x));
642 break;
643
644 case CONST_INT:
645 case SYMBOL_REF:
646 case CONST:
647 case LABEL_REF:
648 case CODE_LABEL:
649 v850_print_operand_address (file, VOIDmode, x);
650 break;
651 default:
652 gcc_unreachable ();
653 }
654 break;
655
656 }
657 }
658
659 \f
660 /* Output assembly language output for the address ADDR to FILE. */
661
662 static void
663 v850_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
664 {
665 switch (GET_CODE (addr))
666 {
667 case REG:
668 fprintf (file, "0[");
669 v850_print_operand (file, addr, 0);
670 fprintf (file, "]");
671 break;
672 case LO_SUM:
673 if (GET_CODE (XEXP (addr, 0)) == REG)
674 {
675 /* reg,foo */
676 fprintf (file, "lo(");
677 v850_print_operand (file, XEXP (addr, 1), 0);
678 fprintf (file, ")[");
679 v850_print_operand (file, XEXP (addr, 0), 0);
680 fprintf (file, "]");
681 }
682 break;
683 case PLUS:
684 if (GET_CODE (XEXP (addr, 0)) == REG
685 || GET_CODE (XEXP (addr, 0)) == SUBREG)
686 {
687 /* reg,foo */
688 v850_print_operand (file, XEXP (addr, 1), 0);
689 fprintf (file, "[");
690 v850_print_operand (file, XEXP (addr, 0), 0);
691 fprintf (file, "]");
692 }
693 else
694 {
695 v850_print_operand (file, XEXP (addr, 0), 0);
696 fprintf (file, "+");
697 v850_print_operand (file, XEXP (addr, 1), 0);
698 }
699 break;
700 case SYMBOL_REF:
701 {
702 const char *off_name = NULL;
703 const char *reg_name = NULL;
704
705 if (SYMBOL_REF_ZDA_P (addr))
706 {
707 off_name = "zdaoff";
708 reg_name = "r0";
709 }
710 else if (SYMBOL_REF_SDA_P (addr))
711 {
712 off_name = "sdaoff";
713 reg_name = "gp";
714 }
715 else if (SYMBOL_REF_TDA_P (addr))
716 {
717 off_name = "tdaoff";
718 reg_name = "ep";
719 }
720
721 if (off_name)
722 fprintf (file, "%s(", off_name);
723 output_addr_const (file, addr);
724 if (reg_name)
725 fprintf (file, ")[%s]", reg_name);
726 }
727 break;
728 case CONST:
729 if (special_symbolref_operand (addr, VOIDmode))
730 {
731 rtx x = XEXP (XEXP (addr, 0), 0);
732 const char *off_name;
733 const char *reg_name;
734
735 if (SYMBOL_REF_ZDA_P (x))
736 {
737 off_name = "zdaoff";
738 reg_name = "r0";
739 }
740 else if (SYMBOL_REF_SDA_P (x))
741 {
742 off_name = "sdaoff";
743 reg_name = "gp";
744 }
745 else if (SYMBOL_REF_TDA_P (x))
746 {
747 off_name = "tdaoff";
748 reg_name = "ep";
749 }
750 else
751 gcc_unreachable ();
752
753 fprintf (file, "%s(", off_name);
754 output_addr_const (file, addr);
755 fprintf (file, ")[%s]", reg_name);
756 }
757 else
758 output_addr_const (file, addr);
759 break;
760 default:
761 output_addr_const (file, addr);
762 break;
763 }
764 }
765
766 static bool
767 v850_print_operand_punct_valid_p (unsigned char code)
768 {
769 return code == '.';
770 }
771
772 /* When assemble_integer is used to emit the offsets for a switch
773 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
774 output_addr_const will normally barf at this, but it is OK to omit
775 the truncate and just emit the difference of the two labels. The
776 .hword directive will automatically handle the truncation for us.
777
778 Returns true if rtx was handled, false otherwise. */
779
780 static bool
781 v850_output_addr_const_extra (FILE * file, rtx x)
782 {
783 if (GET_CODE (x) != TRUNCATE)
784 return false;
785
786 x = XEXP (x, 0);
787
788 /* We must also handle the case where the switch table was passed a
789 constant value and so has been collapsed. In this case the first
790 label will have been deleted. In such a case it is OK to emit
791 nothing, since the table will not be used.
792 (cf gcc.c-torture/compile/990801-1.c). */
793 if (GET_CODE (x) == MINUS
794 && GET_CODE (XEXP (x, 0)) == LABEL_REF)
795 {
796 rtx_code_label *label
797 = dyn_cast<rtx_code_label *> (XEXP (XEXP (x, 0), 0));
798 if (label && label->deleted ())
799 return true;
800 }
801
802 output_addr_const (file, x);
803 return true;
804 }
805 \f
806 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
807 point value. */
808
809 const char *
810 output_move_single (rtx * operands)
811 {
812 rtx dst = operands[0];
813 rtx src = operands[1];
814
815 if (REG_P (dst))
816 {
817 if (REG_P (src))
818 return "mov %1,%0";
819
820 else if (GET_CODE (src) == CONST_INT)
821 {
822 HOST_WIDE_INT value = INTVAL (src);
823
824 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
825 return "mov %1,%0";
826
827 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
828 return "movea %1,%.,%0";
829
830 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
831 return "movhi hi0(%1),%.,%0";
832
833 /* A random constant. */
834 else if (TARGET_V850E_UP)
835 return "mov %1,%0";
836 else
837 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
838 }
839
840 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
841 {
842 HOST_WIDE_INT high, low;
843
844 const_double_split (src, &high, &low);
845
846 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
847 return "mov %F1,%0";
848
849 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
850 return "movea %F1,%.,%0";
851
852 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
853 return "movhi hi0(%F1),%.,%0";
854
855 /* A random constant. */
856 else if (TARGET_V850E_UP)
857 return "mov %F1,%0";
858
859 else
860 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
861 }
862
863 else if (GET_CODE (src) == MEM)
864 return "%S1ld%W1 %1,%0";
865
866 else if (special_symbolref_operand (src, VOIDmode))
867 return "movea %O1(%P1),%Q1,%0";
868
869 else if (GET_CODE (src) == LABEL_REF
870 || GET_CODE (src) == SYMBOL_REF
871 || GET_CODE (src) == CONST)
872 {
873 if (TARGET_V850E_UP)
874 return "mov hilo(%1),%0";
875 else
876 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
877 }
878
879 else if (GET_CODE (src) == HIGH)
880 return "movhi hi(%1),%.,%0";
881
882 else if (GET_CODE (src) == LO_SUM)
883 {
884 operands[2] = XEXP (src, 0);
885 operands[3] = XEXP (src, 1);
886 return "movea lo(%3),%2,%0";
887 }
888 }
889
890 else if (GET_CODE (dst) == MEM)
891 {
892 if (REG_P (src))
893 return "%S0st%W0 %1,%0";
894
895 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
896 return "%S0st%W0 %.,%0";
897
898 else if (GET_CODE (src) == CONST_DOUBLE
899 && CONST0_RTX (GET_MODE (dst)) == src)
900 return "%S0st%W0 %.,%0";
901 }
902
903 fatal_insn ("output_move_single:", gen_rtx_SET (dst, src));
904 return "";
905 }
906
907 machine_mode
908 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
909 {
910 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
911 {
912 switch (cond)
913 {
914 case LE:
915 return CC_FPU_LEmode;
916 case GE:
917 return CC_FPU_GEmode;
918 case LT:
919 return CC_FPU_LTmode;
920 case GT:
921 return CC_FPU_GTmode;
922 case EQ:
923 return CC_FPU_EQmode;
924 case NE:
925 return CC_FPU_NEmode;
926 default:
927 gcc_unreachable ();
928 }
929 }
930 return CCmode;
931 }
932
933 machine_mode
934 v850_gen_float_compare (enum rtx_code cond, machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
935 {
936 if (GET_MODE (op0) == DFmode)
937 {
938 switch (cond)
939 {
940 case LE:
941 emit_insn (gen_cmpdf_le_insn (op0, op1));
942 break;
943 case GE:
944 emit_insn (gen_cmpdf_ge_insn (op0, op1));
945 break;
946 case LT:
947 emit_insn (gen_cmpdf_lt_insn (op0, op1));
948 break;
949 case GT:
950 emit_insn (gen_cmpdf_gt_insn (op0, op1));
951 break;
952 case NE:
953 /* Note: There is no NE comparison operator. So we
954 perform an EQ comparison and invert the branch.
955 See v850_float_nz_comparison for how this is done. */
956 case EQ:
957 emit_insn (gen_cmpdf_eq_insn (op0, op1));
958 break;
959 default:
960 gcc_unreachable ();
961 }
962 }
963 else if (GET_MODE (v850_compare_op0) == SFmode)
964 {
965 switch (cond)
966 {
967 case LE:
968 emit_insn (gen_cmpsf_le_insn(op0, op1));
969 break;
970 case GE:
971 emit_insn (gen_cmpsf_ge_insn(op0, op1));
972 break;
973 case LT:
974 emit_insn (gen_cmpsf_lt_insn(op0, op1));
975 break;
976 case GT:
977 emit_insn (gen_cmpsf_gt_insn(op0, op1));
978 break;
979 case NE:
980 /* Note: There is no NE comparison operator. So we
981 perform an EQ comparison and invert the branch.
982 See v850_float_nz_comparison for how this is done. */
983 case EQ:
984 emit_insn (gen_cmpsf_eq_insn(op0, op1));
985 break;
986 default:
987 gcc_unreachable ();
988 }
989 }
990 else
991 gcc_unreachable ();
992
993 return v850_select_cc_mode (cond, op0, op1);
994 }
995
996 rtx
997 v850_gen_compare (enum rtx_code cond, machine_mode mode, rtx op0, rtx op1)
998 {
999 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1000 {
1001 emit_insn (gen_cmpsi_insn (op0, op1));
1002 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1003 }
1004 else
1005 {
1006 rtx cc_reg;
1007 mode = v850_gen_float_compare (cond, mode, op0, op1);
1008 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1009 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1010
1011 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1012 }
1013 }
1014
1015 /* Return maximum offset supported for a short EP memory reference of mode
1016 MODE and signedness UNSIGNEDP. */
1017
1018 static int
1019 ep_memory_offset (machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1020 {
1021 int max_offset = 0;
1022
1023 switch (mode)
1024 {
1025 case E_QImode:
1026 if (TARGET_SMALL_SLD)
1027 max_offset = (1 << 4);
1028 else if ((TARGET_V850E_UP)
1029 && unsignedp)
1030 max_offset = (1 << 4);
1031 else
1032 max_offset = (1 << 7);
1033 break;
1034
1035 case E_HImode:
1036 if (TARGET_SMALL_SLD)
1037 max_offset = (1 << 5);
1038 else if ((TARGET_V850E_UP)
1039 && unsignedp)
1040 max_offset = (1 << 5);
1041 else
1042 max_offset = (1 << 8);
1043 break;
1044
1045 case E_SImode:
1046 case E_SFmode:
1047 max_offset = (1 << 8);
1048 break;
1049
1050 default:
1051 break;
1052 }
1053
1054 return max_offset;
1055 }
1056
1057 /* Return true if OP is a valid short EP memory reference */
1058
1059 int
1060 ep_memory_operand (rtx op, machine_mode mode, int unsigned_load)
1061 {
1062 rtx addr, op0, op1;
1063 int max_offset;
1064 int mask;
1065
1066 /* If we are not using the EP register on a per-function basis
1067 then do not allow this optimization at all. This is to
1068 prevent the use of the SLD/SST instructions which cannot be
1069 guaranteed to work properly due to a hardware bug. */
1070 if (!TARGET_EP)
1071 return FALSE;
1072
1073 if (GET_CODE (op) != MEM)
1074 return FALSE;
1075
1076 max_offset = ep_memory_offset (mode, unsigned_load);
1077
1078 mask = GET_MODE_SIZE (mode) - 1;
1079
1080 addr = XEXP (op, 0);
1081 if (GET_CODE (addr) == CONST)
1082 addr = XEXP (addr, 0);
1083
1084 switch (GET_CODE (addr))
1085 {
1086 default:
1087 break;
1088
1089 case SYMBOL_REF:
1090 return SYMBOL_REF_TDA_P (addr);
1091
1092 case REG:
1093 return REGNO (addr) == EP_REGNUM;
1094
1095 case PLUS:
1096 op0 = XEXP (addr, 0);
1097 op1 = XEXP (addr, 1);
1098 if (GET_CODE (op1) == CONST_INT
1099 && INTVAL (op1) < max_offset
1100 && INTVAL (op1) >= 0
1101 && (INTVAL (op1) & mask) == 0)
1102 {
1103 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1104 return TRUE;
1105
1106 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1107 return TRUE;
1108 }
1109 break;
1110 }
1111
1112 return FALSE;
1113 }
1114 \f
1115 /* Substitute memory references involving a pointer, to use the ep pointer,
1116 taking care to save and preserve the ep. */
1117
1118 static void
1119 substitute_ep_register (rtx_insn *first_insn,
1120 rtx_insn *last_insn,
1121 int uses,
1122 int regno,
1123 rtx * p_r1,
1124 rtx * p_ep)
1125 {
1126 rtx reg = gen_rtx_REG (Pmode, regno);
1127 rtx_insn *insn;
1128
1129 if (!*p_r1)
1130 {
1131 df_set_regs_ever_live (1, true);
1132 *p_r1 = gen_rtx_REG (Pmode, 1);
1133 *p_ep = gen_rtx_REG (Pmode, 30);
1134 }
1135
1136 if (TARGET_DEBUG)
1137 fprintf (stderr, "\
1138 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1139 2 * (uses - 3), uses, reg_names[regno],
1140 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1141 INSN_UID (first_insn), INSN_UID (last_insn));
1142
1143 if (NOTE_P (first_insn))
1144 first_insn = next_nonnote_insn (first_insn);
1145
1146 last_insn = next_nonnote_insn (last_insn);
1147 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1148 {
1149 if (NONJUMP_INSN_P (insn))
1150 {
1151 rtx pattern = single_set (insn);
1152
1153 /* Replace the memory references. */
1154 if (pattern)
1155 {
1156 rtx *p_mem;
1157 /* Memory operands are signed by default. */
1158 int unsignedp = FALSE;
1159
1160 if (GET_CODE (SET_DEST (pattern)) == MEM
1161 && GET_CODE (SET_SRC (pattern)) == MEM)
1162 p_mem = (rtx *)0;
1163
1164 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1165 p_mem = &SET_DEST (pattern);
1166
1167 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1168 p_mem = &SET_SRC (pattern);
1169
1170 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1171 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1172 p_mem = &XEXP (SET_SRC (pattern), 0);
1173
1174 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1175 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1176 {
1177 p_mem = &XEXP (SET_SRC (pattern), 0);
1178 unsignedp = TRUE;
1179 }
1180 else
1181 p_mem = (rtx *)0;
1182
1183 if (p_mem)
1184 {
1185 rtx addr = XEXP (*p_mem, 0);
1186
1187 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1188 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1189
1190 else if (GET_CODE (addr) == PLUS
1191 && GET_CODE (XEXP (addr, 0)) == REG
1192 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1193 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1194 && ((INTVAL (XEXP (addr, 1)))
1195 < ep_memory_offset (GET_MODE (*p_mem),
1196 unsignedp))
1197 && ((INTVAL (XEXP (addr, 1))) >= 0))
1198 *p_mem = change_address (*p_mem, VOIDmode,
1199 gen_rtx_PLUS (Pmode,
1200 *p_ep,
1201 XEXP (addr, 1)));
1202 }
1203 }
1204 }
1205 }
1206
1207 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1208 insn = prev_nonnote_insn (first_insn);
1209 if (insn && NONJUMP_INSN_P (insn)
1210 && GET_CODE (PATTERN (insn)) == SET
1211 && SET_DEST (PATTERN (insn)) == *p_ep
1212 && SET_SRC (PATTERN (insn)) == *p_r1)
1213 delete_insn (insn);
1214 else
1215 emit_insn_before (gen_rtx_SET (*p_r1, *p_ep), first_insn);
1216
1217 emit_insn_before (gen_rtx_SET (*p_ep, reg), first_insn);
1218 emit_insn_before (gen_rtx_SET (*p_ep, *p_r1), last_insn);
1219 }
1220
1221 \f
1222 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1223 the -mep mode to copy heavily used pointers to ep to use the implicit
1224 addressing. */
1225
1226 static void
1227 v850_reorg (void)
1228 {
1229 struct
1230 {
1231 int uses;
1232 rtx_insn *first_insn;
1233 rtx_insn *last_insn;
1234 }
1235 regs[FIRST_PSEUDO_REGISTER];
1236
1237 int i;
1238 int use_ep = FALSE;
1239 rtx r1 = NULL_RTX;
1240 rtx ep = NULL_RTX;
1241 rtx_insn *insn;
1242 rtx pattern;
1243
1244 /* If not ep mode, just return now. */
1245 if (!TARGET_EP)
1246 return;
1247
1248 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1249 {
1250 regs[i].uses = 0;
1251 regs[i].first_insn = NULL;
1252 regs[i].last_insn = NULL;
1253 }
1254
1255 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1256 {
1257 switch (GET_CODE (insn))
1258 {
1259 /* End of basic block */
1260 default:
1261 if (!use_ep)
1262 {
1263 int max_uses = -1;
1264 int max_regno = -1;
1265
1266 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1267 {
1268 if (max_uses < regs[i].uses)
1269 {
1270 max_uses = regs[i].uses;
1271 max_regno = i;
1272 }
1273 }
1274
1275 if (max_uses > 3)
1276 substitute_ep_register (regs[max_regno].first_insn,
1277 regs[max_regno].last_insn,
1278 max_uses, max_regno, &r1, &ep);
1279 }
1280
1281 use_ep = FALSE;
1282 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1283 {
1284 regs[i].uses = 0;
1285 regs[i].first_insn = NULL;
1286 regs[i].last_insn = NULL;
1287 }
1288 break;
1289
1290 case NOTE:
1291 break;
1292
1293 case INSN:
1294 pattern = single_set (insn);
1295
1296 /* See if there are any memory references we can shorten. */
1297 if (pattern)
1298 {
1299 rtx src = SET_SRC (pattern);
1300 rtx dest = SET_DEST (pattern);
1301 rtx mem;
1302 /* Memory operands are signed by default. */
1303 int unsignedp = FALSE;
1304
1305 /* We might have (SUBREG (MEM)) here, so just get rid of the
1306 subregs to make this code simpler. */
1307 if (GET_CODE (dest) == SUBREG
1308 && (GET_CODE (SUBREG_REG (dest)) == MEM
1309 || GET_CODE (SUBREG_REG (dest)) == REG))
1310 alter_subreg (&dest, false);
1311 if (GET_CODE (src) == SUBREG
1312 && (GET_CODE (SUBREG_REG (src)) == MEM
1313 || GET_CODE (SUBREG_REG (src)) == REG))
1314 alter_subreg (&src, false);
1315
1316 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1317 mem = NULL_RTX;
1318
1319 else if (GET_CODE (dest) == MEM)
1320 mem = dest;
1321
1322 else if (GET_CODE (src) == MEM)
1323 mem = src;
1324
1325 else if (GET_CODE (src) == SIGN_EXTEND
1326 && GET_CODE (XEXP (src, 0)) == MEM)
1327 mem = XEXP (src, 0);
1328
1329 else if (GET_CODE (src) == ZERO_EXTEND
1330 && GET_CODE (XEXP (src, 0)) == MEM)
1331 {
1332 mem = XEXP (src, 0);
1333 unsignedp = TRUE;
1334 }
1335 else
1336 mem = NULL_RTX;
1337
1338 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1339 use_ep = TRUE;
1340
1341 else if (!use_ep && mem
1342 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1343 {
1344 rtx addr = XEXP (mem, 0);
1345 int regno = -1;
1346 int short_p;
1347
1348 if (GET_CODE (addr) == REG)
1349 {
1350 short_p = TRUE;
1351 regno = REGNO (addr);
1352 }
1353
1354 else if (GET_CODE (addr) == PLUS
1355 && GET_CODE (XEXP (addr, 0)) == REG
1356 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1357 && ((INTVAL (XEXP (addr, 1)))
1358 < ep_memory_offset (GET_MODE (mem), unsignedp))
1359 && ((INTVAL (XEXP (addr, 1))) >= 0))
1360 {
1361 short_p = TRUE;
1362 regno = REGNO (XEXP (addr, 0));
1363 }
1364
1365 else
1366 short_p = FALSE;
1367
1368 if (short_p)
1369 {
1370 regs[regno].uses++;
1371 regs[regno].last_insn = insn;
1372 if (!regs[regno].first_insn)
1373 regs[regno].first_insn = insn;
1374 }
1375 }
1376
1377 /* Loading up a register in the basic block zaps any savings
1378 for the register */
1379 if (GET_CODE (dest) == REG)
1380 {
1381 int regno;
1382 int endregno;
1383
1384 regno = REGNO (dest);
1385 endregno = END_REGNO (dest);
1386
1387 if (!use_ep)
1388 {
1389 /* See if we can use the pointer before this
1390 modification. */
1391 int max_uses = -1;
1392 int max_regno = -1;
1393
1394 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1395 {
1396 if (max_uses < regs[i].uses)
1397 {
1398 max_uses = regs[i].uses;
1399 max_regno = i;
1400 }
1401 }
1402
1403 if (max_uses > 3
1404 && max_regno >= regno
1405 && max_regno < endregno)
1406 {
1407 substitute_ep_register (regs[max_regno].first_insn,
1408 regs[max_regno].last_insn,
1409 max_uses, max_regno, &r1,
1410 &ep);
1411
1412 /* Since we made a substitution, zap all remembered
1413 registers. */
1414 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1415 {
1416 regs[i].uses = 0;
1417 regs[i].first_insn = NULL;
1418 regs[i].last_insn = NULL;
1419 }
1420 }
1421 }
1422
1423 for (i = regno; i < endregno; i++)
1424 {
1425 regs[i].uses = 0;
1426 regs[i].first_insn = NULL;
1427 regs[i].last_insn = NULL;
1428 }
1429 }
1430 }
1431 }
1432 }
1433 }
1434
1435 /* # of registers saved by the interrupt handler. */
1436 #define INTERRUPT_FIXED_NUM 5
1437
1438 /* # of bytes for registers saved by the interrupt handler. */
1439 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1440
1441 /* # of words saved for other registers. */
1442 #define INTERRUPT_ALL_SAVE_NUM \
1443 (30 - INTERRUPT_FIXED_NUM)
1444
1445 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1446
1447 int
1448 compute_register_save_size (long * p_reg_saved)
1449 {
1450 int size = 0;
1451 int i;
1452 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1453 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1454 long reg_saved = 0;
1455
1456 /* Count space for the register saves. */
1457 if (interrupt_handler)
1458 {
1459 for (i = 0; i <= 31; i++)
1460 switch (i)
1461 {
1462 default:
1463 if (df_regs_ever_live_p (i) || call_p)
1464 {
1465 size += 4;
1466 reg_saved |= 1L << i;
1467 }
1468 break;
1469
1470 /* We don't save/restore r0 or the stack pointer */
1471 case 0:
1472 case STACK_POINTER_REGNUM:
1473 break;
1474
1475 /* For registers with fixed use, we save them, set them to the
1476 appropriate value, and then restore them.
1477 These registers are handled specially, so don't list them
1478 on the list of registers to save in the prologue. */
1479 case 1: /* temp used to hold ep */
1480 case 4: /* gp */
1481 case 10: /* temp used to call interrupt save/restore */
1482 case 11: /* temp used to call interrupt save/restore (long call) */
1483 case EP_REGNUM: /* ep */
1484 size += 4;
1485 break;
1486 }
1487 }
1488 else
1489 {
1490 /* Find the first register that needs to be saved. */
1491 for (i = 0; i <= 31; i++)
1492 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1493 || i == LINK_POINTER_REGNUM))
1494 break;
1495
1496 /* If it is possible that an out-of-line helper function might be
1497 used to generate the prologue for the current function, then we
1498 need to cover the possibility that such a helper function will
1499 be used, despite the fact that there might be gaps in the list of
1500 registers that need to be saved. To detect this we note that the
1501 helper functions always push at least register r29 (provided
1502 that the function is not an interrupt handler). */
1503
1504 if (TARGET_PROLOG_FUNCTION
1505 && (i == 2 || ((i >= 20) && (i < 30))))
1506 {
1507 if (i == 2)
1508 {
1509 size += 4;
1510 reg_saved |= 1L << i;
1511
1512 i = 20;
1513 }
1514
1515 /* Helper functions save all registers between the starting
1516 register and the last register, regardless of whether they
1517 are actually used by the function or not. */
1518 for (; i <= 29; i++)
1519 {
1520 size += 4;
1521 reg_saved |= 1L << i;
1522 }
1523
1524 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1525 {
1526 size += 4;
1527 reg_saved |= 1L << LINK_POINTER_REGNUM;
1528 }
1529 }
1530 else
1531 {
1532 for (; i <= 31; i++)
1533 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1534 || i == LINK_POINTER_REGNUM))
1535 {
1536 size += 4;
1537 reg_saved |= 1L << i;
1538 }
1539 }
1540 }
1541
1542 if (p_reg_saved)
1543 *p_reg_saved = reg_saved;
1544
1545 return size;
1546 }
1547
1548 /* Typical stack layout should looks like this after the function's prologue:
1549
1550 | |
1551 -- ^
1552 | | \ |
1553 | | arguments saved | Increasing
1554 | | on the stack | addresses
1555 PARENT arg pointer -> | | /
1556 -------------------------- ---- -------------------
1557 | | - space for argument split between regs & stack
1558 --
1559 CHILD | | \ <-- (return address here)
1560 | | other call
1561 | | saved registers
1562 | | /
1563 --
1564 frame pointer -> | | \ ___
1565 | | local |
1566 | | variables |f
1567 | | / |r
1568 -- |a
1569 | | \ |m
1570 | | outgoing |e
1571 | | arguments | | Decreasing
1572 (hard) frame pointer | | / | | addresses
1573 and stack pointer -> | | / _|_ |
1574 -------------------------- ---- ------------------ V */
1575
1576 int
1577 compute_frame_size (poly_int64 size, long * p_reg_saved)
1578 {
1579 return (size
1580 + compute_register_save_size (p_reg_saved)
1581 + crtl->outgoing_args_size);
1582 }
1583
1584 static int
1585 use_prolog_function (int num_save, int frame_size)
1586 {
1587 int alloc_stack = (4 * num_save);
1588 int unalloc_stack = frame_size - alloc_stack;
1589 int save_func_len, restore_func_len;
1590 int save_normal_len, restore_normal_len;
1591
1592 if (! TARGET_DISABLE_CALLT)
1593 save_func_len = restore_func_len = 2;
1594 else
1595 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1596
1597 if (unalloc_stack)
1598 {
1599 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1600 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1601 }
1602
1603 /* See if we would have used ep to save the stack. */
1604 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1605 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1606 else
1607 save_normal_len = restore_normal_len = 4 * num_save;
1608
1609 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1610 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1611
1612 /* Don't bother checking if we don't actually save any space.
1613 This happens for instance if one register is saved and additional
1614 stack space is allocated. */
1615 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1616 }
1617
1618 static void
1619 increment_stack (signed int amount, bool in_prologue)
1620 {
1621 rtx inc;
1622
1623 if (amount == 0)
1624 return;
1625
1626 inc = GEN_INT (amount);
1627
1628 if (! CONST_OK_FOR_K (amount))
1629 {
1630 rtx reg = gen_rtx_REG (Pmode, 12);
1631
1632 inc = emit_move_insn (reg, inc);
1633 if (in_prologue)
1634 F (inc);
1635 inc = reg;
1636 }
1637
1638 inc = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, inc));
1639 if (in_prologue)
1640 F (inc);
1641 }
1642
1643 void
1644 expand_prologue (void)
1645 {
1646 unsigned int i;
1647 unsigned int size = get_frame_size ();
1648 unsigned int actual_fsize;
1649 unsigned int init_stack_alloc = 0;
1650 rtx save_regs[32];
1651 rtx save_all;
1652 unsigned int num_save;
1653 int code;
1654 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1655 long reg_saved = 0;
1656
1657 actual_fsize = compute_frame_size (size, &reg_saved);
1658
1659 if (flag_stack_usage_info)
1660 current_function_static_stack_size = actual_fsize;
1661
1662 /* Save/setup global registers for interrupt functions right now. */
1663 if (interrupt_handler)
1664 {
1665 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1666 emit_insn (gen_callt_save_interrupt ());
1667 else
1668 emit_insn (gen_save_interrupt ());
1669
1670 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1671
1672 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1673 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1674
1675 /* Interrupt functions are not passed arguments, so no need to
1676 allocate space for split structure arguments. */
1677 gcc_assert (crtl->args.pretend_args_size == 0);
1678 }
1679
1680 /* Identify all of the saved registers. */
1681 num_save = 0;
1682 for (i = 1; i < 32; i++)
1683 {
1684 if (((1L << i) & reg_saved) != 0)
1685 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1686 }
1687
1688 if (crtl->args.pretend_args_size)
1689 {
1690 if (num_save == 0)
1691 {
1692 increment_stack (- (actual_fsize + crtl->args.pretend_args_size), true);
1693 actual_fsize = 0;
1694 }
1695 else
1696 increment_stack (- crtl->args.pretend_args_size, true);
1697 }
1698
1699 /* See if we have an insn that allocates stack space and saves the particular
1700 registers we want to. Note that the helpers won't
1701 allocate additional space for registers GCC saves to complete a
1702 "split" structure argument. */
1703 save_all = NULL_RTX;
1704 if (TARGET_PROLOG_FUNCTION
1705 && !crtl->args.pretend_args_size
1706 && num_save > 0)
1707 {
1708 if (use_prolog_function (num_save, actual_fsize))
1709 {
1710 int alloc_stack = 4 * num_save;
1711 int offset = 0;
1712
1713 save_all = gen_rtx_PARALLEL
1714 (VOIDmode,
1715 rtvec_alloc (num_save + 1
1716 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1717
1718 XVECEXP (save_all, 0, 0)
1719 = gen_rtx_SET (stack_pointer_rtx,
1720 gen_rtx_PLUS (Pmode,
1721 stack_pointer_rtx,
1722 GEN_INT(-alloc_stack)));
1723 for (i = 0; i < num_save; i++)
1724 {
1725 offset -= 4;
1726 XVECEXP (save_all, 0, i+1)
1727 = gen_rtx_SET (gen_rtx_MEM (Pmode,
1728 gen_rtx_PLUS (Pmode,
1729 stack_pointer_rtx,
1730 GEN_INT(offset))),
1731 save_regs[i]);
1732 }
1733
1734 if (TARGET_DISABLE_CALLT)
1735 {
1736 XVECEXP (save_all, 0, num_save + 1)
1737 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1738
1739 if (TARGET_LONG_CALLS)
1740 XVECEXP (save_all, 0, num_save + 2)
1741 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1742 }
1743
1744 v850_all_frame_related (save_all);
1745
1746 code = recog (save_all, NULL, NULL);
1747 if (code >= 0)
1748 {
1749 rtx insn = emit_insn (save_all);
1750 INSN_CODE (insn) = code;
1751 actual_fsize -= alloc_stack;
1752
1753 }
1754 else
1755 save_all = NULL_RTX;
1756 }
1757 }
1758
1759 /* If no prolog save function is available, store the registers the old
1760 fashioned way (one by one). */
1761 if (!save_all)
1762 {
1763 /* Special case interrupt functions that save all registers for a call. */
1764 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1765 {
1766 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1767 emit_insn (gen_callt_save_all_interrupt ());
1768 else
1769 emit_insn (gen_save_all_interrupt ());
1770 }
1771 else
1772 {
1773 int offset;
1774 /* If the stack is too big, allocate it in chunks so we can do the
1775 register saves. We use the register save size so we use the ep
1776 register. */
1777 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1778 init_stack_alloc = compute_register_save_size (NULL);
1779 else
1780 init_stack_alloc = actual_fsize;
1781
1782 /* Save registers at the beginning of the stack frame. */
1783 offset = init_stack_alloc - 4;
1784
1785 if (init_stack_alloc)
1786 increment_stack (- (signed) init_stack_alloc, true);
1787
1788 /* Save the return pointer first. */
1789 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1790 {
1791 F (emit_move_insn (gen_rtx_MEM (SImode,
1792 plus_constant (Pmode,
1793 stack_pointer_rtx,
1794 offset)),
1795 save_regs[--num_save]));
1796 offset -= 4;
1797 }
1798
1799 for (i = 0; i < num_save; i++)
1800 {
1801 F (emit_move_insn (gen_rtx_MEM (SImode,
1802 plus_constant (Pmode,
1803 stack_pointer_rtx,
1804 offset)),
1805 save_regs[i]));
1806 offset -= 4;
1807 }
1808 }
1809 }
1810
1811 /* Allocate the rest of the stack that was not allocated above (either it is
1812 > 32K or we just called a function to save the registers and needed more
1813 stack. */
1814 if (actual_fsize > init_stack_alloc)
1815 increment_stack (init_stack_alloc - actual_fsize, true);
1816
1817 /* If we need a frame pointer, set it up now. */
1818 if (frame_pointer_needed)
1819 F (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
1820 }
1821 \f
1822
1823 void
1824 expand_epilogue (void)
1825 {
1826 unsigned int i;
1827 unsigned int size = get_frame_size ();
1828 long reg_saved = 0;
1829 int actual_fsize = compute_frame_size (size, &reg_saved);
1830 rtx restore_regs[32];
1831 rtx restore_all;
1832 unsigned int num_restore;
1833 int code;
1834 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1835
1836 /* Eliminate the initial stack stored by interrupt functions. */
1837 if (interrupt_handler)
1838 {
1839 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1840 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1841 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1842 }
1843
1844 /* Cut off any dynamic stack created. */
1845 if (frame_pointer_needed)
1846 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1847
1848 /* Identify all of the saved registers. */
1849 num_restore = 0;
1850 for (i = 1; i < 32; i++)
1851 {
1852 if (((1L << i) & reg_saved) != 0)
1853 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1854 }
1855
1856 /* See if we have an insn that restores the particular registers we
1857 want to. */
1858 restore_all = NULL_RTX;
1859
1860 if (TARGET_PROLOG_FUNCTION
1861 && num_restore > 0
1862 && !crtl->args.pretend_args_size
1863 && !interrupt_handler)
1864 {
1865 int alloc_stack = (4 * num_restore);
1866
1867 /* Don't bother checking if we don't actually save any space. */
1868 if (use_prolog_function (num_restore, actual_fsize))
1869 {
1870 int offset;
1871 restore_all = gen_rtx_PARALLEL (VOIDmode,
1872 rtvec_alloc (num_restore + 2));
1873 XVECEXP (restore_all, 0, 0) = ret_rtx;
1874 XVECEXP (restore_all, 0, 1)
1875 = gen_rtx_SET (stack_pointer_rtx,
1876 gen_rtx_PLUS (Pmode,
1877 stack_pointer_rtx,
1878 GEN_INT (alloc_stack)));
1879
1880 offset = alloc_stack - 4;
1881 for (i = 0; i < num_restore; i++)
1882 {
1883 XVECEXP (restore_all, 0, i+2)
1884 = gen_rtx_SET (restore_regs[i],
1885 gen_rtx_MEM (Pmode,
1886 gen_rtx_PLUS (Pmode,
1887 stack_pointer_rtx,
1888 GEN_INT(offset))));
1889 offset -= 4;
1890 }
1891
1892 code = recog (restore_all, NULL, NULL);
1893
1894 if (code >= 0)
1895 {
1896 rtx insn;
1897
1898 actual_fsize -= alloc_stack;
1899 increment_stack (actual_fsize, false);
1900
1901 insn = emit_jump_insn (restore_all);
1902 INSN_CODE (insn) = code;
1903 }
1904 else
1905 restore_all = NULL_RTX;
1906 }
1907 }
1908
1909 /* If no epilogue save function is available, restore the registers the
1910 old fashioned way (one by one). */
1911 if (!restore_all)
1912 {
1913 unsigned int init_stack_free;
1914
1915 /* If the stack is large, we need to cut it down in 2 pieces. */
1916 if (interrupt_handler)
1917 init_stack_free = 0;
1918 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1919 init_stack_free = 4 * num_restore;
1920 else
1921 init_stack_free = (signed) actual_fsize;
1922
1923 /* Deallocate the rest of the stack if it is > 32K. */
1924 if ((unsigned int) actual_fsize > init_stack_free)
1925 increment_stack (actual_fsize - init_stack_free, false);
1926
1927 /* Special case interrupt functions that save all registers
1928 for a call. */
1929 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1930 {
1931 if (! TARGET_DISABLE_CALLT)
1932 emit_insn (gen_callt_restore_all_interrupt ());
1933 else
1934 emit_insn (gen_restore_all_interrupt ());
1935 }
1936 else
1937 {
1938 /* Restore registers from the beginning of the stack frame. */
1939 int offset = init_stack_free - 4;
1940
1941 /* Restore the return pointer first. */
1942 if (num_restore > 0
1943 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1944 {
1945 emit_move_insn (restore_regs[--num_restore],
1946 gen_rtx_MEM (SImode,
1947 plus_constant (Pmode,
1948 stack_pointer_rtx,
1949 offset)));
1950 offset -= 4;
1951 }
1952
1953 for (i = 0; i < num_restore; i++)
1954 {
1955 emit_move_insn (restore_regs[i],
1956 gen_rtx_MEM (SImode,
1957 plus_constant (Pmode,
1958 stack_pointer_rtx,
1959 offset)));
1960
1961 emit_use (restore_regs[i]);
1962 offset -= 4;
1963 }
1964
1965 /* Cut back the remainder of the stack. */
1966 increment_stack (init_stack_free + crtl->args.pretend_args_size,
1967 false);
1968 }
1969
1970 /* And return or use reti for interrupt handlers. */
1971 if (interrupt_handler)
1972 {
1973 if (! TARGET_DISABLE_CALLT && (TARGET_V850E_UP))
1974 emit_insn (gen_callt_return_interrupt ());
1975 else
1976 emit_jump_insn (gen_return_interrupt ());
1977 }
1978 else if (actual_fsize)
1979 emit_jump_insn (gen_return_internal ());
1980 else
1981 emit_jump_insn (gen_return_simple ());
1982 }
1983
1984 v850_interrupt_cache_p = FALSE;
1985 v850_interrupt_p = FALSE;
1986 }
1987
1988 /* Retrieve the data area that has been chosen for the given decl. */
1989
1990 v850_data_area
1991 v850_get_data_area (tree decl)
1992 {
1993 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1994 return DATA_AREA_SDA;
1995
1996 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1997 return DATA_AREA_TDA;
1998
1999 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2000 return DATA_AREA_ZDA;
2001
2002 return DATA_AREA_NORMAL;
2003 }
2004
2005 /* Store the indicated data area in the decl's attributes. */
2006
2007 static void
2008 v850_set_data_area (tree decl, v850_data_area data_area)
2009 {
2010 tree name;
2011
2012 switch (data_area)
2013 {
2014 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2015 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2016 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2017 default:
2018 return;
2019 }
2020
2021 DECL_ATTRIBUTES (decl) = tree_cons
2022 (name, NULL, DECL_ATTRIBUTES (decl));
2023 }
2024 \f
2025 /* Handle an "interrupt" attribute; arguments as in
2026 struct attribute_spec.handler. */
2027 static tree
2028 v850_handle_interrupt_attribute (tree *node, tree name,
2029 tree args ATTRIBUTE_UNUSED,
2030 int flags ATTRIBUTE_UNUSED,
2031 bool * no_add_attrs)
2032 {
2033 if (TREE_CODE (*node) != FUNCTION_DECL)
2034 {
2035 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2036 name);
2037 *no_add_attrs = true;
2038 }
2039
2040 return NULL_TREE;
2041 }
2042
2043 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2044 struct attribute_spec.handler. */
2045 static tree
2046 v850_handle_data_area_attribute (tree *node, tree name,
2047 tree args ATTRIBUTE_UNUSED,
2048 int flags ATTRIBUTE_UNUSED,
2049 bool * no_add_attrs)
2050 {
2051 v850_data_area data_area;
2052 v850_data_area area;
2053 tree decl = *node;
2054
2055 /* Implement data area attribute. */
2056 if (is_attribute_p ("sda", name))
2057 data_area = DATA_AREA_SDA;
2058 else if (is_attribute_p ("tda", name))
2059 data_area = DATA_AREA_TDA;
2060 else if (is_attribute_p ("zda", name))
2061 data_area = DATA_AREA_ZDA;
2062 else
2063 gcc_unreachable ();
2064
2065 switch (TREE_CODE (decl))
2066 {
2067 case VAR_DECL:
2068 if (current_function_decl != NULL_TREE)
2069 {
2070 error_at (DECL_SOURCE_LOCATION (decl),
2071 "data area attributes cannot be specified for "
2072 "local variables");
2073 *no_add_attrs = true;
2074 }
2075
2076 /* FALLTHRU */
2077
2078 case FUNCTION_DECL:
2079 area = v850_get_data_area (decl);
2080 if (area != DATA_AREA_NORMAL && data_area != area)
2081 {
2082 error ("data area of %q+D conflicts with previous declaration",
2083 decl);
2084 *no_add_attrs = true;
2085 }
2086 break;
2087
2088 default:
2089 break;
2090 }
2091
2092 return NULL_TREE;
2093 }
2094
2095 \f
2096 /* Return nonzero if FUNC is an interrupt function as specified
2097 by the "interrupt" attribute. */
2098
2099 int
2100 v850_interrupt_function_p (tree func)
2101 {
2102 tree a;
2103 int ret = 0;
2104
2105 if (v850_interrupt_cache_p)
2106 return v850_interrupt_p;
2107
2108 if (TREE_CODE (func) != FUNCTION_DECL)
2109 return 0;
2110
2111 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2112 if (a != NULL_TREE)
2113 ret = 1;
2114
2115 else
2116 {
2117 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2118 ret = a != NULL_TREE;
2119 }
2120
2121 /* Its not safe to trust global variables until after function inlining has
2122 been done. */
2123 if (reload_completed | reload_in_progress)
2124 v850_interrupt_p = ret;
2125
2126 return ret;
2127 }
2128
2129 \f
2130 static void
2131 v850_encode_data_area (tree decl, rtx symbol)
2132 {
2133 int flags;
2134
2135 /* Map explicit sections into the appropriate attribute */
2136 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2137 {
2138 if (DECL_SECTION_NAME (decl))
2139 {
2140 const char *name = DECL_SECTION_NAME (decl);
2141
2142 if (streq (name, ".zdata") || streq (name, ".zbss"))
2143 v850_set_data_area (decl, DATA_AREA_ZDA);
2144
2145 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2146 v850_set_data_area (decl, DATA_AREA_SDA);
2147
2148 else if (streq (name, ".tdata"))
2149 v850_set_data_area (decl, DATA_AREA_TDA);
2150 }
2151
2152 /* If no attribute, support -m{zda,sda,tda}=n */
2153 else
2154 {
2155 int size = int_size_in_bytes (TREE_TYPE (decl));
2156 if (size <= 0)
2157 ;
2158
2159 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2160 v850_set_data_area (decl, DATA_AREA_TDA);
2161
2162 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2163 v850_set_data_area (decl, DATA_AREA_SDA);
2164
2165 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2166 v850_set_data_area (decl, DATA_AREA_ZDA);
2167 }
2168
2169 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2170 return;
2171 }
2172
2173 flags = SYMBOL_REF_FLAGS (symbol);
2174 switch (v850_get_data_area (decl))
2175 {
2176 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2177 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2178 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2179 default: gcc_unreachable ();
2180 }
2181 SYMBOL_REF_FLAGS (symbol) = flags;
2182 }
2183
2184 static void
2185 v850_encode_section_info (tree decl, rtx rtl, int first)
2186 {
2187 default_encode_section_info (decl, rtl, first);
2188
2189 if (TREE_CODE (decl) == VAR_DECL
2190 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2191 v850_encode_data_area (decl, XEXP (rtl, 0));
2192 }
2193
2194 /* Construct a JR instruction to a routine that will perform the equivalent of
2195 the RTL passed in as an argument. This RTL is a function epilogue that
2196 pops registers off the stack and possibly releases some extra stack space
2197 as well. The code has already verified that the RTL matches these
2198 requirements. */
2199
2200 char *
2201 construct_restore_jr (rtx op)
2202 {
2203 int count = XVECLEN (op, 0);
2204 int stack_bytes;
2205 unsigned long int mask;
2206 unsigned long int first;
2207 unsigned long int last;
2208 int i;
2209 static char buff [100]; /* XXX */
2210
2211 if (count <= 2)
2212 {
2213 error ("bogus JR construction: %d", count);
2214 return NULL;
2215 }
2216
2217 /* Work out how many bytes to pop off the stack before retrieving
2218 registers. */
2219 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2220 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2221 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2222
2223 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2224
2225 /* Each pop will remove 4 bytes from the stack.... */
2226 stack_bytes -= (count - 2) * 4;
2227
2228 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2229 if (stack_bytes != 0)
2230 {
2231 error ("bad amount of stack space removal: %d", stack_bytes);
2232 return NULL;
2233 }
2234
2235 /* Now compute the bit mask of registers to push. */
2236 mask = 0;
2237 for (i = 2; i < count; i++)
2238 {
2239 rtx vector_element = XVECEXP (op, 0, i);
2240
2241 gcc_assert (GET_CODE (vector_element) == SET);
2242 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2243 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2244 SImode));
2245
2246 mask |= 1 << REGNO (SET_DEST (vector_element));
2247 }
2248
2249 /* Scan for the first register to pop. */
2250 for (first = 0; first < 32; first++)
2251 {
2252 if (mask & (1 << first))
2253 break;
2254 }
2255
2256 gcc_assert (first < 32);
2257
2258 /* Discover the last register to pop. */
2259 if (mask & (1 << LINK_POINTER_REGNUM))
2260 {
2261 last = LINK_POINTER_REGNUM;
2262 }
2263 else
2264 {
2265 gcc_assert (!stack_bytes);
2266 gcc_assert (mask & (1 << 29));
2267
2268 last = 29;
2269 }
2270
2271 /* Note, it is possible to have gaps in the register mask.
2272 We ignore this here, and generate a JR anyway. We will
2273 be popping more registers than is strictly necessary, but
2274 it does save code space. */
2275
2276 if (TARGET_LONG_CALLS)
2277 {
2278 char name[40];
2279
2280 if (first == last)
2281 sprintf (name, "__return_%s", reg_names [first]);
2282 else
2283 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2284
2285 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2286 name, name);
2287 }
2288 else
2289 {
2290 if (first == last)
2291 sprintf (buff, "jr __return_%s", reg_names [first]);
2292 else
2293 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2294 }
2295
2296 return buff;
2297 }
2298
2299
2300 /* Construct a JARL instruction to a routine that will perform the equivalent
2301 of the RTL passed as a parameter. This RTL is a function prologue that
2302 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2303 some stack space as well. The code has already verified that the RTL
2304 matches these requirements. */
2305 char *
2306 construct_save_jarl (rtx op)
2307 {
2308 int count = XVECLEN (op, 0);
2309 int stack_bytes;
2310 unsigned long int mask;
2311 unsigned long int first;
2312 unsigned long int last;
2313 int i;
2314 static char buff [100]; /* XXX */
2315
2316 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2317 {
2318 error ("bogus JARL construction: %d", count);
2319 return NULL;
2320 }
2321
2322 /* Paranoia. */
2323 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2324 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2325 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2326 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2327
2328 /* Work out how many bytes to push onto the stack after storing the
2329 registers. */
2330 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2331
2332 /* Each push will put 4 bytes from the stack.... */
2333 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2334
2335 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2336 if (stack_bytes != 0)
2337 {
2338 error ("bad amount of stack space removal: %d", stack_bytes);
2339 return NULL;
2340 }
2341
2342 /* Now compute the bit mask of registers to push. */
2343 mask = 0;
2344 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2345 {
2346 rtx vector_element = XVECEXP (op, 0, i);
2347
2348 gcc_assert (GET_CODE (vector_element) == SET);
2349 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2350 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2351 SImode));
2352
2353 mask |= 1 << REGNO (SET_SRC (vector_element));
2354 }
2355
2356 /* Scan for the first register to push. */
2357 for (first = 0; first < 32; first++)
2358 {
2359 if (mask & (1 << first))
2360 break;
2361 }
2362
2363 gcc_assert (first < 32);
2364
2365 /* Discover the last register to push. */
2366 if (mask & (1 << LINK_POINTER_REGNUM))
2367 {
2368 last = LINK_POINTER_REGNUM;
2369 }
2370 else
2371 {
2372 gcc_assert (!stack_bytes);
2373 gcc_assert (mask & (1 << 29));
2374
2375 last = 29;
2376 }
2377
2378 /* Note, it is possible to have gaps in the register mask.
2379 We ignore this here, and generate a JARL anyway. We will
2380 be pushing more registers than is strictly necessary, but
2381 it does save code space. */
2382
2383 if (TARGET_LONG_CALLS)
2384 {
2385 char name[40];
2386
2387 if (first == last)
2388 sprintf (name, "__save_%s", reg_names [first]);
2389 else
2390 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2391
2392 if (TARGET_V850E3V5_UP)
2393 sprintf (buff, "mov hilo(%s), r11\n\tjarl [r11], r10", name);
2394 else
2395 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2396 name, name);
2397 }
2398 else
2399 {
2400 if (first == last)
2401 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2402 else
2403 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2404 reg_names [last]);
2405 }
2406
2407 return buff;
2408 }
2409
2410 /* A version of asm_output_aligned_bss() that copes with the special
2411 data areas of the v850. */
2412 void
2413 v850_output_aligned_bss (FILE * file,
2414 tree decl,
2415 const char * name,
2416 unsigned HOST_WIDE_INT size,
2417 int align)
2418 {
2419 switch (v850_get_data_area (decl))
2420 {
2421 case DATA_AREA_ZDA:
2422 switch_to_section (zbss_section);
2423 break;
2424
2425 case DATA_AREA_SDA:
2426 switch_to_section (sbss_section);
2427 break;
2428
2429 case DATA_AREA_TDA:
2430 switch_to_section (tdata_section);
2431 break;
2432
2433 default:
2434 switch_to_section (bss_section);
2435 break;
2436 }
2437
2438 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2439 #ifdef ASM_DECLARE_OBJECT_NAME
2440 last_assemble_variable_decl = decl;
2441 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2442 #else
2443 /* Standard thing is just output label for the object. */
2444 ASM_OUTPUT_LABEL (file, name);
2445 #endif /* ASM_DECLARE_OBJECT_NAME */
2446 ASM_OUTPUT_SKIP (file, size ? size : 1);
2447 }
2448
2449 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2450 void
2451 v850_output_common (FILE * file,
2452 tree decl,
2453 const char * name,
2454 int size,
2455 int align)
2456 {
2457 if (decl == NULL_TREE)
2458 {
2459 fprintf (file, "%s", COMMON_ASM_OP);
2460 }
2461 else
2462 {
2463 switch (v850_get_data_area (decl))
2464 {
2465 case DATA_AREA_ZDA:
2466 fprintf (file, "%s", ZCOMMON_ASM_OP);
2467 break;
2468
2469 case DATA_AREA_SDA:
2470 fprintf (file, "%s", SCOMMON_ASM_OP);
2471 break;
2472
2473 case DATA_AREA_TDA:
2474 fprintf (file, "%s", TCOMMON_ASM_OP);
2475 break;
2476
2477 default:
2478 fprintf (file, "%s", COMMON_ASM_OP);
2479 break;
2480 }
2481 }
2482
2483 assemble_name (file, name);
2484 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2485 }
2486
2487 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2488 void
2489 v850_output_local (FILE * file,
2490 tree decl,
2491 const char * name,
2492 int size,
2493 int align)
2494 {
2495 fprintf (file, "%s", LOCAL_ASM_OP);
2496 assemble_name (file, name);
2497 fprintf (file, "\n");
2498
2499 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2500 }
2501
2502 /* Add data area to the given declaration if a ghs data area pragma is
2503 currently in effect (#pragma ghs startXXX/endXXX). */
2504 static void
2505 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2506 {
2507 if (data_area_stack
2508 && data_area_stack->data_area
2509 && current_function_decl == NULL_TREE
2510 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2511 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2512 v850_set_data_area (decl, data_area_stack->data_area);
2513
2514 /* Initialize the default names of the v850 specific sections,
2515 if this has not been done before. */
2516
2517 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2518 {
2519 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2520 = ".sdata";
2521
2522 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2523 = ".rosdata";
2524
2525 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2526 = ".tdata";
2527
2528 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2529 = ".zdata";
2530
2531 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2532 = ".rozdata";
2533 }
2534
2535 if (current_function_decl == NULL_TREE
2536 && (TREE_CODE (decl) == VAR_DECL
2537 || TREE_CODE (decl) == CONST_DECL
2538 || TREE_CODE (decl) == FUNCTION_DECL)
2539 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2540 && !DECL_SECTION_NAME (decl))
2541 {
2542 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2543 const char * chosen_section;
2544
2545 if (TREE_CODE (decl) == FUNCTION_DECL)
2546 kind = GHS_SECTION_KIND_TEXT;
2547 else
2548 {
2549 /* First choose a section kind based on the data area of the decl. */
2550 switch (v850_get_data_area (decl))
2551 {
2552 default:
2553 gcc_unreachable ();
2554
2555 case DATA_AREA_SDA:
2556 kind = ((TREE_READONLY (decl))
2557 ? GHS_SECTION_KIND_ROSDATA
2558 : GHS_SECTION_KIND_SDATA);
2559 break;
2560
2561 case DATA_AREA_TDA:
2562 kind = GHS_SECTION_KIND_TDATA;
2563 break;
2564
2565 case DATA_AREA_ZDA:
2566 kind = ((TREE_READONLY (decl))
2567 ? GHS_SECTION_KIND_ROZDATA
2568 : GHS_SECTION_KIND_ZDATA);
2569 break;
2570
2571 case DATA_AREA_NORMAL: /* default data area */
2572 if (TREE_READONLY (decl))
2573 kind = GHS_SECTION_KIND_RODATA;
2574 else if (DECL_INITIAL (decl))
2575 kind = GHS_SECTION_KIND_DATA;
2576 else
2577 kind = GHS_SECTION_KIND_BSS;
2578 }
2579 }
2580
2581 /* Now, if the section kind has been explicitly renamed,
2582 then attach a section attribute. */
2583 chosen_section = GHS_current_section_names [(int) kind];
2584
2585 /* Otherwise, if this kind of section needs an explicit section
2586 attribute, then also attach one. */
2587 if (chosen_section == NULL)
2588 chosen_section = GHS_default_section_names [(int) kind];
2589
2590 if (chosen_section)
2591 {
2592 /* Only set the section name if specified by a pragma, because
2593 otherwise it will force those variables to get allocated storage
2594 in this module, rather than by the linker. */
2595 set_decl_section_name (decl, chosen_section);
2596 }
2597 }
2598 }
2599
2600 /* Construct a DISPOSE instruction that is the equivalent of
2601 the given RTX. We have already verified that this should
2602 be possible. */
2603
2604 char *
2605 construct_dispose_instruction (rtx op)
2606 {
2607 int count = XVECLEN (op, 0);
2608 int stack_bytes;
2609 unsigned long int mask;
2610 int i;
2611 static char buff[ 100 ]; /* XXX */
2612 int use_callt = 0;
2613
2614 if (count <= 2)
2615 {
2616 error ("bogus DISPOSE construction: %d", count);
2617 return NULL;
2618 }
2619
2620 /* Work out how many bytes to pop off the
2621 stack before retrieving registers. */
2622 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2623 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2624 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2625
2626 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2627
2628 /* Each pop will remove 4 bytes from the stack.... */
2629 stack_bytes -= (count - 2) * 4;
2630
2631 /* Make sure that the amount we are popping
2632 will fit into the DISPOSE instruction. */
2633 if (stack_bytes > 128)
2634 {
2635 error ("too much stack space to dispose of: %d", stack_bytes);
2636 return NULL;
2637 }
2638
2639 /* Now compute the bit mask of registers to push. */
2640 mask = 0;
2641
2642 for (i = 2; i < count; i++)
2643 {
2644 rtx vector_element = XVECEXP (op, 0, i);
2645
2646 gcc_assert (GET_CODE (vector_element) == SET);
2647 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2648 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2649 SImode));
2650
2651 if (REGNO (SET_DEST (vector_element)) == 2)
2652 use_callt = 1;
2653 else
2654 mask |= 1 << REGNO (SET_DEST (vector_element));
2655 }
2656
2657 if (! TARGET_DISABLE_CALLT
2658 && (use_callt || stack_bytes == 0))
2659 {
2660 if (use_callt)
2661 {
2662 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2663 return buff;
2664 }
2665 else
2666 {
2667 for (i = 20; i < 32; i++)
2668 if (mask & (1 << i))
2669 break;
2670
2671 if (i == 31)
2672 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2673 else
2674 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2675 i, (mask & (1 << 31)) ? "31c" : "29");
2676 }
2677 }
2678 else
2679 {
2680 static char regs [100]; /* XXX */
2681 int done_one;
2682
2683 /* Generate the DISPOSE instruction. Note we could just issue the
2684 bit mask as a number as the assembler can cope with this, but for
2685 the sake of our readers we turn it into a textual description. */
2686 regs[0] = 0;
2687 done_one = 0;
2688
2689 for (i = 20; i < 32; i++)
2690 {
2691 if (mask & (1 << i))
2692 {
2693 int first;
2694
2695 if (done_one)
2696 strcat (regs, ", ");
2697 else
2698 done_one = 1;
2699
2700 first = i;
2701 strcat (regs, reg_names[ first ]);
2702
2703 for (i++; i < 32; i++)
2704 if ((mask & (1 << i)) == 0)
2705 break;
2706
2707 if (i > first + 1)
2708 {
2709 strcat (regs, " - ");
2710 strcat (regs, reg_names[ i - 1 ] );
2711 }
2712 }
2713 }
2714
2715 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2716 }
2717
2718 return buff;
2719 }
2720
2721 /* Construct a PREPARE instruction that is the equivalent of
2722 the given RTL. We have already verified that this should
2723 be possible. */
2724
2725 char *
2726 construct_prepare_instruction (rtx op)
2727 {
2728 int count;
2729 int stack_bytes;
2730 unsigned long int mask;
2731 int i;
2732 static char buff[ 100 ]; /* XXX */
2733 int use_callt = 0;
2734
2735 if (XVECLEN (op, 0) <= 1)
2736 {
2737 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2738 return NULL;
2739 }
2740
2741 /* Work out how many bytes to push onto
2742 the stack after storing the registers. */
2743 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2744 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2745 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2746
2747 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2748
2749
2750 /* Make sure that the amount we are popping
2751 will fit into the DISPOSE instruction. */
2752 if (stack_bytes < -128)
2753 {
2754 error ("too much stack space to prepare: %d", stack_bytes);
2755 return NULL;
2756 }
2757
2758 /* Now compute the bit mask of registers to push. */
2759 count = 0;
2760 mask = 0;
2761 for (i = 1; i < XVECLEN (op, 0); i++)
2762 {
2763 rtx vector_element = XVECEXP (op, 0, i);
2764
2765 if (GET_CODE (vector_element) == CLOBBER)
2766 continue;
2767
2768 gcc_assert (GET_CODE (vector_element) == SET);
2769 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2770 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2771 SImode));
2772
2773 if (REGNO (SET_SRC (vector_element)) == 2)
2774 use_callt = 1;
2775 else
2776 mask |= 1 << REGNO (SET_SRC (vector_element));
2777 count++;
2778 }
2779
2780 stack_bytes += count * 4;
2781
2782 if ((! TARGET_DISABLE_CALLT)
2783 && (use_callt || stack_bytes == 0))
2784 {
2785 if (use_callt)
2786 {
2787 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2788 return buff;
2789 }
2790
2791 for (i = 20; i < 32; i++)
2792 if (mask & (1 << i))
2793 break;
2794
2795 if (i == 31)
2796 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2797 else
2798 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2799 i, (mask & (1 << 31)) ? "31c" : "29");
2800 }
2801 else
2802 {
2803 static char regs [100]; /* XXX */
2804 int done_one;
2805
2806
2807 /* Generate the PREPARE instruction. Note we could just issue the
2808 bit mask as a number as the assembler can cope with this, but for
2809 the sake of our readers we turn it into a textual description. */
2810 regs[0] = 0;
2811 done_one = 0;
2812
2813 for (i = 20; i < 32; i++)
2814 {
2815 if (mask & (1 << i))
2816 {
2817 int first;
2818
2819 if (done_one)
2820 strcat (regs, ", ");
2821 else
2822 done_one = 1;
2823
2824 first = i;
2825 strcat (regs, reg_names[ first ]);
2826
2827 for (i++; i < 32; i++)
2828 if ((mask & (1 << i)) == 0)
2829 break;
2830
2831 if (i > first + 1)
2832 {
2833 strcat (regs, " - ");
2834 strcat (regs, reg_names[ i - 1 ] );
2835 }
2836 }
2837 }
2838
2839 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2840 }
2841
2842 return buff;
2843 }
2844
2845 /* Return an RTX indicating where the return address to the
2846 calling function can be found. */
2847
2848 rtx
2849 v850_return_addr (int count)
2850 {
2851 if (count != 0)
2852 return const0_rtx;
2853
2854 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2855 }
2856 \f
2857 /* Implement TARGET_ASM_INIT_SECTIONS. */
2858
2859 static void
2860 v850_asm_init_sections (void)
2861 {
2862 rosdata_section
2863 = get_unnamed_section (0, output_section_asm_op,
2864 "\t.section .rosdata,\"a\"");
2865
2866 rozdata_section
2867 = get_unnamed_section (0, output_section_asm_op,
2868 "\t.section .rozdata,\"a\"");
2869
2870 tdata_section
2871 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2872 "\t.section .tdata,\"aw\"");
2873
2874 zdata_section
2875 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2876 "\t.section .zdata,\"aw\"");
2877
2878 zbss_section
2879 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2880 output_section_asm_op,
2881 "\t.section .zbss,\"aw\"");
2882 }
2883
2884 static section *
2885 v850_select_section (tree exp,
2886 int reloc ATTRIBUTE_UNUSED,
2887 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2888 {
2889 if (TREE_CODE (exp) == VAR_DECL)
2890 {
2891 int is_const;
2892 if (!TREE_READONLY (exp)
2893 || TREE_SIDE_EFFECTS (exp)
2894 || !DECL_INITIAL (exp)
2895 || (DECL_INITIAL (exp) != error_mark_node
2896 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2897 is_const = FALSE;
2898 else
2899 is_const = TRUE;
2900
2901 switch (v850_get_data_area (exp))
2902 {
2903 case DATA_AREA_ZDA:
2904 return is_const ? rozdata_section : zdata_section;
2905
2906 case DATA_AREA_TDA:
2907 return tdata_section;
2908
2909 case DATA_AREA_SDA:
2910 return is_const ? rosdata_section : sdata_section;
2911
2912 default:
2913 return is_const ? readonly_data_section : data_section;
2914 }
2915 }
2916 return readonly_data_section;
2917 }
2918 \f
2919 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
2920
2921 static bool
2922 v850_function_value_regno_p (const unsigned int regno)
2923 {
2924 return (regno == RV_REGNUM);
2925 }
2926
2927 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2928
2929 static bool
2930 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2931 {
2932 /* Return values > 8 bytes in length in memory. */
2933 return int_size_in_bytes (type) > 8
2934 || TYPE_MODE (type) == BLKmode
2935 /* With the rh850 ABI return all aggregates in memory. */
2936 || ((! TARGET_GCC_ABI) && AGGREGATE_TYPE_P (type))
2937 ;
2938 }
2939
2940 /* Worker function for TARGET_FUNCTION_VALUE. */
2941
2942 static rtx
2943 v850_function_value (const_tree valtype,
2944 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
2945 bool outgoing ATTRIBUTE_UNUSED)
2946 {
2947 return gen_rtx_REG (TYPE_MODE (valtype), RV_REGNUM);
2948 }
2949
2950 /* Implement TARGET_LIBCALL_VALUE. */
2951
2952 static rtx
2953 v850_libcall_value (machine_mode mode,
2954 const_rtx func ATTRIBUTE_UNUSED)
2955 {
2956 return gen_rtx_REG (mode, RV_REGNUM);
2957 }
2958
2959 \f
2960 /* Worker function for TARGET_CAN_ELIMINATE. */
2961
2962 static bool
2963 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
2964 {
2965 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
2966 }
2967
2968 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
2969
2970 If TARGET_APP_REGS is not defined then add r2 and r5 to
2971 the pool of fixed registers. See PR 14505. */
2972
2973 static void
2974 v850_conditional_register_usage (void)
2975 {
2976 if (TARGET_APP_REGS)
2977 {
2978 fixed_regs[2] = 0; call_used_regs[2] = 0;
2979 fixed_regs[5] = 0; call_used_regs[5] = 1;
2980 }
2981 }
2982 \f
2983 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
2984
2985 static void
2986 v850_asm_trampoline_template (FILE *f)
2987 {
2988 fprintf (f, "\tjarl .+4,r12\n");
2989 fprintf (f, "\tld.w 12[r12],r20\n");
2990 fprintf (f, "\tld.w 16[r12],r12\n");
2991 fprintf (f, "\tjmp [r12]\n");
2992 fprintf (f, "\tnop\n");
2993 fprintf (f, "\t.long 0\n");
2994 fprintf (f, "\t.long 0\n");
2995 }
2996
2997 /* Worker function for TARGET_TRAMPOLINE_INIT. */
2998
2999 static void
3000 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3001 {
3002 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3003
3004 emit_block_move (m_tramp, assemble_trampoline_template (),
3005 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3006
3007 mem = adjust_address (m_tramp, SImode, 16);
3008 emit_move_insn (mem, chain_value);
3009 mem = adjust_address (m_tramp, SImode, 20);
3010 emit_move_insn (mem, fnaddr);
3011 }
3012
3013 static int
3014 v850_issue_rate (void)
3015 {
3016 return (TARGET_V850E2_UP ? 2 : 1);
3017 }
3018
3019 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3020
3021 static bool
3022 v850_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3023 {
3024 return (GET_CODE (x) == CONST_DOUBLE
3025 || !(GET_CODE (x) == CONST
3026 && GET_CODE (XEXP (x, 0)) == PLUS
3027 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3028 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3029 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3030 }
3031
3032 /* Helper function for `v850_legitimate_address_p'. */
3033
3034 static bool
3035 v850_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3036 {
3037 if (strict_p)
3038 {
3039 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3040 } else {
3041 return true;
3042 }
3043 }
3044
3045 /* Accept either REG or SUBREG where a register is valid. */
3046
3047 static bool
3048 v850_rtx_ok_for_base_p (const_rtx x, bool strict_p)
3049 {
3050 return ((REG_P (x) && v850_reg_ok_for_base_p (x, strict_p))
3051 || (SUBREG_P (x) && REG_P (SUBREG_REG (x))
3052 && v850_reg_ok_for_base_p (SUBREG_REG (x), strict_p)));
3053 }
3054
3055 /* Implement TARGET_LEGITIMATE_ADDRESS_P. */
3056
3057 static bool
3058 v850_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3059 addr_space_t as ATTRIBUTE_UNUSED)
3060 {
3061 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3062
3063 if (v850_rtx_ok_for_base_p (x, strict_p))
3064 return true;
3065 if (CONSTANT_ADDRESS_P (x)
3066 && (mode == QImode || INTVAL (x) % 2 == 0)
3067 && (GET_MODE_SIZE (mode) <= 4 || INTVAL (x) % 4 == 0))
3068 return true;
3069 if (GET_CODE (x) == LO_SUM
3070 && REG_P (XEXP (x, 0))
3071 && v850_reg_ok_for_base_p (XEXP (x, 0), strict_p)
3072 && CONSTANT_P (XEXP (x, 1))
3073 && (!CONST_INT_P (XEXP (x, 1))
3074 || ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3075 && constraint_satisfied_p (XEXP (x, 1), CONSTRAINT_K)))
3076 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode))
3077 return true;
3078 if (special_symbolref_operand (x, mode)
3079 && (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (word_mode)))
3080 return true;
3081 if (GET_CODE (x) == PLUS
3082 && v850_rtx_ok_for_base_p (XEXP (x, 0), strict_p)
3083 && constraint_satisfied_p (XEXP (x,1), CONSTRAINT_K)
3084 && ((mode == QImode || INTVAL (XEXP (x, 1)) % 2 == 0)
3085 && CONST_OK_FOR_K (INTVAL (XEXP (x, 1))
3086 + (GET_MODE_NUNITS (mode) * UNITS_PER_WORD))))
3087 return true;
3088
3089 return false;
3090 }
3091
3092 static int
3093 v850_memory_move_cost (machine_mode mode,
3094 reg_class_t reg_class ATTRIBUTE_UNUSED,
3095 bool in)
3096 {
3097 switch (GET_MODE_SIZE (mode))
3098 {
3099 case 0:
3100 return in ? 24 : 8;
3101 case 1:
3102 case 2:
3103 case 3:
3104 case 4:
3105 return in ? 6 : 2;
3106 default:
3107 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3108 }
3109 }
3110
3111 int
3112 v850_adjust_insn_length (rtx_insn *insn, int length)
3113 {
3114 if (TARGET_V850E3V5_UP)
3115 {
3116 if (CALL_P (insn))
3117 {
3118 if (TARGET_LONG_CALLS)
3119 {
3120 /* call_internal_long, call_value_internal_long. */
3121 if (length == 8)
3122 length = 4;
3123 if (length == 16)
3124 length = 10;
3125 }
3126 else
3127 {
3128 /* call_internal_short, call_value_internal_short. */
3129 if (length == 8)
3130 length = 4;
3131 }
3132 }
3133 }
3134 return length;
3135 }
3136 \f
3137 /* V850 specific attributes. */
3138
3139 static const struct attribute_spec v850_attribute_table[] =
3140 {
3141 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
3142 affects_type_identity, handler, exclude } */
3143 { "interrupt_handler", 0, 0, true, false, false, false,
3144 v850_handle_interrupt_attribute, NULL },
3145 { "interrupt", 0, 0, true, false, false, false,
3146 v850_handle_interrupt_attribute, NULL },
3147 { "sda", 0, 0, true, false, false, false,
3148 v850_handle_data_area_attribute, NULL },
3149 { "tda", 0, 0, true, false, false, false,
3150 v850_handle_data_area_attribute, NULL },
3151 { "zda", 0, 0, true, false, false, false,
3152 v850_handle_data_area_attribute, NULL },
3153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
3154 };
3155 \f
3156 static void
3157 v850_option_override (void)
3158 {
3159 if (flag_exceptions || flag_non_call_exceptions)
3160 flag_omit_frame_pointer = 0;
3161
3162 /* The RH850 ABI does not (currently) support the use of the CALLT instruction. */
3163 if (! TARGET_GCC_ABI)
3164 target_flags |= MASK_DISABLE_CALLT;
3165 }
3166 \f
3167 const char *
3168 v850_gen_movdi (rtx * operands)
3169 {
3170 if (REG_P (operands[0]))
3171 {
3172 if (REG_P (operands[1]))
3173 {
3174 if (REGNO (operands[0]) == (REGNO (operands[1]) - 1))
3175 return "mov %1, %0; mov %R1, %R0";
3176
3177 return "mov %R1, %R0; mov %1, %0";
3178 }
3179
3180 if (MEM_P (operands[1]))
3181 {
3182 if (REGNO (operands[0]) & 1)
3183 /* Use two load word instructions to synthesise a load double. */
3184 return "ld.w %1, %0 ; ld.w %R1, %R0" ;
3185
3186 return "ld.dw %1, %0";
3187 }
3188
3189 return "mov %1, %0; mov %R1, %R0";
3190 }
3191
3192 gcc_assert (REG_P (operands[1]));
3193
3194 if (REGNO (operands[1]) & 1)
3195 /* Use two store word instructions to synthesise a store double. */
3196 return "st.w %1, %0 ; st.w %R1, %R0 ";
3197
3198 return "st.dw %1, %0";
3199 }
3200
3201 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3202
3203 static bool
3204 v850_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
3205 {
3206 return GET_MODE_SIZE (mode) <= 4 || ((regno & 1) == 0 && regno != 0);
3207 }
3208
3209 /* Implement TARGET_MODES_TIEABLE_P. */
3210
3211 static bool
3212 v850_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3213 {
3214 return (mode1 == mode2
3215 || (GET_MODE_SIZE (mode1) <= 4 && GET_MODE_SIZE (mode2) <= 4));
3216 }
3217 \f
3218 /* Initialize the GCC target structure. */
3219
3220 #undef TARGET_OPTION_OVERRIDE
3221 #define TARGET_OPTION_OVERRIDE v850_option_override
3222
3223 #undef TARGET_MEMORY_MOVE_COST
3224 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3225
3226 #undef TARGET_ASM_ALIGNED_HI_OP
3227 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3228
3229 #undef TARGET_PRINT_OPERAND
3230 #define TARGET_PRINT_OPERAND v850_print_operand
3231 #undef TARGET_PRINT_OPERAND_ADDRESS
3232 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3233 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3234 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3235
3236 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3237 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3238
3239 #undef TARGET_ATTRIBUTE_TABLE
3240 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3241
3242 #undef TARGET_INSERT_ATTRIBUTES
3243 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3244
3245 #undef TARGET_ASM_SELECT_SECTION
3246 #define TARGET_ASM_SELECT_SECTION v850_select_section
3247
3248 /* The assembler supports switchable .bss sections, but
3249 v850_select_section doesn't yet make use of them. */
3250 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3251 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3252
3253 #undef TARGET_ENCODE_SECTION_INFO
3254 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3255
3256 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3257 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3258
3259 #undef TARGET_RTX_COSTS
3260 #define TARGET_RTX_COSTS v850_rtx_costs
3261
3262 #undef TARGET_ADDRESS_COST
3263 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
3264
3265 #undef TARGET_MACHINE_DEPENDENT_REORG
3266 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3267
3268 #undef TARGET_SCHED_ISSUE_RATE
3269 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3270
3271 #undef TARGET_FUNCTION_VALUE_REGNO_P
3272 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3273 #undef TARGET_FUNCTION_VALUE
3274 #define TARGET_FUNCTION_VALUE v850_function_value
3275 #undef TARGET_LIBCALL_VALUE
3276 #define TARGET_LIBCALL_VALUE v850_libcall_value
3277
3278 #undef TARGET_PROMOTE_PROTOTYPES
3279 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3280
3281 #undef TARGET_RETURN_IN_MEMORY
3282 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3283
3284 #undef TARGET_PASS_BY_REFERENCE
3285 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3286
3287 #undef TARGET_CALLEE_COPIES
3288 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3289
3290 #undef TARGET_ARG_PARTIAL_BYTES
3291 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3292
3293 #undef TARGET_FUNCTION_ARG
3294 #define TARGET_FUNCTION_ARG v850_function_arg
3295
3296 #undef TARGET_FUNCTION_ARG_ADVANCE
3297 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3298
3299 #undef TARGET_CAN_ELIMINATE
3300 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3301
3302 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3303 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3304
3305 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3306 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3307 #undef TARGET_TRAMPOLINE_INIT
3308 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3309
3310 #undef TARGET_LEGITIMATE_CONSTANT_P
3311 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3312
3313 #undef TARGET_LRA_P
3314 #define TARGET_LRA_P hook_bool_void_false
3315
3316 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
3317 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P v850_legitimate_address_p
3318
3319 #undef TARGET_CAN_USE_DOLOOP_P
3320 #define TARGET_CAN_USE_DOLOOP_P can_use_doloop_if_innermost
3321
3322 #undef TARGET_HARD_REGNO_MODE_OK
3323 #define TARGET_HARD_REGNO_MODE_OK v850_hard_regno_mode_ok
3324
3325 #undef TARGET_MODES_TIEABLE_P
3326 #define TARGET_MODES_TIEABLE_P v850_modes_tieable_p
3327
3328 struct gcc_target targetm = TARGET_INITIALIZER;
3329
3330 #include "gt-v850.h"