config.gcc: Unify V850 architecture options and add support for newer V850 architectures.
[gcc.git] / gcc / config / v850 / v850.c
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "diagnostic-core.h"
39 #include "ggc.h"
40 #include "integrate.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44 #include "df.h"
45 #include "opts.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 static void v850_print_operand_address (FILE *, rtx);
52
53 /* Information about the various small memory areas. */
54 static const int small_memory_physical_max[(int) SMALL_MEMORY_max] =
55 {
56 256,
57 65536,
58 32768,
59 };
60
61 /* Names of the various data areas used on the v850. */
62 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
63 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
64
65 /* Track the current data area set by the data area pragma (which
66 can be nested). Tested by check_default_data_area. */
67 data_area_stack_element * data_area_stack = NULL;
68
69 /* True if we don't need to check any more if the current
70 function is an interrupt handler. */
71 static int v850_interrupt_cache_p = FALSE;
72
73 rtx v850_compare_op0, v850_compare_op1;
74
75 /* Whether current function is an interrupt handler. */
76 static int v850_interrupt_p = FALSE;
77
78 static GTY(()) section * rosdata_section;
79 static GTY(()) section * rozdata_section;
80 static GTY(()) section * tdata_section;
81 static GTY(()) section * zdata_section;
82 static GTY(()) section * zbss_section;
83 \f
84 /* Set the maximum size of small memory area TYPE to the value given
85 by SIZE in structure OPTS (option text OPT passed at location LOC). */
86
87 static void
88 v850_handle_memory_option (enum small_memory_type type,
89 struct gcc_options *opts, const char *opt,
90 int size, location_t loc)
91 {
92 if (size > small_memory_physical_max[type])
93 error_at (loc, "value passed in %qs is too large", opt);
94 else
95 opts->x_small_memory_max[type] = size;
96 }
97
98 /* Implement TARGET_HANDLE_OPTION. */
99
100 static bool
101 v850_handle_option (struct gcc_options *opts,
102 struct gcc_options *opts_set ATTRIBUTE_UNUSED,
103 const struct cl_decoded_option *decoded,
104 location_t loc)
105 {
106 size_t code = decoded->opt_index;
107 int value = decoded->value;
108
109 switch (code)
110 {
111 case OPT_mspace:
112 opts->x_target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
113 return true;
114
115 case OPT_mv850:
116 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850);
117 return true;
118
119 case OPT_mv850e:
120 case OPT_mv850e1:
121 case OPT_mv850es:
122 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850E);
123 return true;
124
125 case OPT_mv850e2:
126 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850E2);
127 return true;
128
129 case OPT_mv850e2v3:
130 opts->x_target_flags &= ~(MASK_CPU ^ MASK_V850E2V3);
131 return true;
132
133 case OPT_mtda_:
134 v850_handle_memory_option (SMALL_MEMORY_TDA, opts,
135 decoded->orig_option_with_args_text,
136 value, loc);
137 return true;
138
139 case OPT_msda_:
140 v850_handle_memory_option (SMALL_MEMORY_SDA, opts,
141 decoded->orig_option_with_args_text,
142 value, loc);
143 return true;
144
145 case OPT_mzda_:
146 v850_handle_memory_option (SMALL_MEMORY_ZDA, opts,
147 decoded->orig_option_with_args_text,
148 value, loc);
149 return true;
150
151 default:
152 return true;
153 }
154 }
155
156 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
157
158 static const struct default_options v850_option_optimization_table[] =
159 {
160 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
161 /* Note - we no longer enable MASK_EP when optimizing. This is
162 because of a hardware bug which stops the SLD and SST instructions
163 from correctly detecting some hazards. If the user is sure that
164 their hardware is fixed or that their program will not encounter
165 the conditions that trigger the bug then they can enable -mep by
166 hand. */
167 { OPT_LEVELS_1_PLUS, OPT_mprolog_function, NULL, 1 },
168 { OPT_LEVELS_NONE, 0, NULL, 0 }
169 };
170
171 /* Handle the TARGET_PASS_BY_REFERENCE target hook.
172 Specify whether to pass the argument by reference. */
173
174 static bool
175 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
176 enum machine_mode mode, const_tree type,
177 bool named ATTRIBUTE_UNUSED)
178 {
179 unsigned HOST_WIDE_INT size;
180
181 if (type)
182 size = int_size_in_bytes (type);
183 else
184 size = GET_MODE_SIZE (mode);
185
186 return size > 8;
187 }
188
189 /* Implementing the Varargs Macros. */
190
191 static bool
192 v850_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
193 {
194 return !TARGET_GHS ? true : false;
195 }
196
197 /* Return an RTX to represent where an argument with mode MODE
198 and type TYPE will be passed to a function. If the result
199 is NULL_RTX, the argument will be pushed. */
200
201 static rtx
202 v850_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
203 const_tree type, bool named)
204 {
205 rtx result = NULL_RTX;
206 int size, align;
207
208 if (!named)
209 return NULL_RTX;
210
211 if (mode == BLKmode)
212 size = int_size_in_bytes (type);
213 else
214 size = GET_MODE_SIZE (mode);
215
216 size = (size + UNITS_PER_WORD -1) & ~(UNITS_PER_WORD -1);
217
218 if (size < 1)
219 {
220 /* Once we have stopped using argument registers, do not start up again. */
221 cum->nbytes = 4 * UNITS_PER_WORD;
222 return NULL_RTX;
223 }
224
225 if (size <= UNITS_PER_WORD && type)
226 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
227 else
228 align = size;
229
230 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
231
232 if (cum->nbytes > 4 * UNITS_PER_WORD)
233 return NULL_RTX;
234
235 if (type == NULL_TREE
236 && cum->nbytes + size > 4 * UNITS_PER_WORD)
237 return NULL_RTX;
238
239 switch (cum->nbytes / UNITS_PER_WORD)
240 {
241 case 0:
242 result = gen_rtx_REG (mode, 6);
243 break;
244 case 1:
245 result = gen_rtx_REG (mode, 7);
246 break;
247 case 2:
248 result = gen_rtx_REG (mode, 8);
249 break;
250 case 3:
251 result = gen_rtx_REG (mode, 9);
252 break;
253 default:
254 result = NULL_RTX;
255 }
256
257 return result;
258 }
259
260 /* Return the number of bytes which must be put into registers
261 for values which are part in registers and part in memory. */
262 static int
263 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
264 tree type, bool named)
265 {
266 int size, align;
267
268 if (TARGET_GHS && !named)
269 return 0;
270
271 if (mode == BLKmode)
272 size = int_size_in_bytes (type);
273 else
274 size = GET_MODE_SIZE (mode);
275
276 if (size < 1)
277 size = 1;
278
279 if (type)
280 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
281 else
282 align = size;
283
284 cum->nbytes = (cum->nbytes + align - 1) & ~ (align - 1);
285
286 if (cum->nbytes > 4 * UNITS_PER_WORD)
287 return 0;
288
289 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
290 return 0;
291
292 if (type == NULL_TREE
293 && cum->nbytes + size > 4 * UNITS_PER_WORD)
294 return 0;
295
296 return 4 * UNITS_PER_WORD - cum->nbytes;
297 }
298
299 /* Update the data in CUM to advance over an argument
300 of mode MODE and data type TYPE.
301 (TYPE is null for libcalls where that information may not be available.) */
302
303 static void
304 v850_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
305 const_tree type, bool named ATTRIBUTE_UNUSED)
306 {
307 cum->nbytes += (((type && int_size_in_bytes (type) > 8
308 ? GET_MODE_SIZE (Pmode)
309 : (mode != BLKmode
310 ? GET_MODE_SIZE (mode)
311 : int_size_in_bytes (type))) + UNITS_PER_WORD - 1)
312 & -UNITS_PER_WORD);
313 }
314
315 /* Return the high and low words of a CONST_DOUBLE */
316
317 static void
318 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
319 {
320 if (GET_CODE (x) == CONST_DOUBLE)
321 {
322 long t[2];
323 REAL_VALUE_TYPE rv;
324
325 switch (GET_MODE (x))
326 {
327 case DFmode:
328 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
329 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
330 *p_high = t[1]; /* since v850 is little endian */
331 *p_low = t[0]; /* high is second word */
332 return;
333
334 case SFmode:
335 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
336 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
337 *p_low = 0;
338 return;
339
340 case VOIDmode:
341 case DImode:
342 *p_high = CONST_DOUBLE_HIGH (x);
343 *p_low = CONST_DOUBLE_LOW (x);
344 return;
345
346 default:
347 break;
348 }
349 }
350
351 fatal_insn ("const_double_split got a bad insn:", x);
352 }
353
354 \f
355 /* Return the cost of the rtx R with code CODE. */
356
357 static int
358 const_costs_int (HOST_WIDE_INT value, int zero_cost)
359 {
360 if (CONST_OK_FOR_I (value))
361 return zero_cost;
362 else if (CONST_OK_FOR_J (value))
363 return 1;
364 else if (CONST_OK_FOR_K (value))
365 return 2;
366 else
367 return 4;
368 }
369
370 static int
371 const_costs (rtx r, enum rtx_code c)
372 {
373 HOST_WIDE_INT high, low;
374
375 switch (c)
376 {
377 case CONST_INT:
378 return const_costs_int (INTVAL (r), 0);
379
380 case CONST_DOUBLE:
381 const_double_split (r, &high, &low);
382 if (GET_MODE (r) == SFmode)
383 return const_costs_int (high, 1);
384 else
385 return const_costs_int (high, 1) + const_costs_int (low, 1);
386
387 case SYMBOL_REF:
388 case LABEL_REF:
389 case CONST:
390 return 2;
391
392 case HIGH:
393 return 1;
394
395 default:
396 return 4;
397 }
398 }
399
400 static bool
401 v850_rtx_costs (rtx x,
402 int codearg,
403 int outer_code ATTRIBUTE_UNUSED,
404 int * total, bool speed)
405 {
406 enum rtx_code code = (enum rtx_code) codearg;
407
408 switch (code)
409 {
410 case CONST_INT:
411 case CONST_DOUBLE:
412 case CONST:
413 case SYMBOL_REF:
414 case LABEL_REF:
415 *total = COSTS_N_INSNS (const_costs (x, code));
416 return true;
417
418 case MOD:
419 case DIV:
420 case UMOD:
421 case UDIV:
422 if (TARGET_V850E && !speed)
423 *total = 6;
424 else
425 *total = 60;
426 return true;
427
428 case MULT:
429 if (TARGET_V850E
430 && ( GET_MODE (x) == SImode
431 || GET_MODE (x) == HImode
432 || GET_MODE (x) == QImode))
433 {
434 if (GET_CODE (XEXP (x, 1)) == REG)
435 *total = 4;
436 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
437 {
438 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
439 *total = 6;
440 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
441 *total = 10;
442 }
443 }
444 else
445 *total = 20;
446 return true;
447
448 case ZERO_EXTRACT:
449 if (outer_code == COMPARE)
450 *total = 0;
451 return false;
452
453 default:
454 return false;
455 }
456 }
457 \f
458 /* Print operand X using operand code CODE to assembly language output file
459 FILE. */
460
461 static void
462 v850_print_operand (FILE * file, rtx x, int code)
463 {
464 HOST_WIDE_INT high, low;
465
466 switch (code)
467 {
468 case 'c':
469 /* We use 'c' operands with symbols for .vtinherit */
470 if (GET_CODE (x) == SYMBOL_REF)
471 {
472 output_addr_const(file, x);
473 break;
474 }
475 /* fall through */
476 case 'b':
477 case 'B':
478 case 'C':
479 switch ((code == 'B' || code == 'C')
480 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
481 {
482 case NE:
483 if (code == 'c' || code == 'C')
484 fprintf (file, "nz");
485 else
486 fprintf (file, "ne");
487 break;
488 case EQ:
489 if (code == 'c' || code == 'C')
490 fprintf (file, "z");
491 else
492 fprintf (file, "e");
493 break;
494 case GE:
495 fprintf (file, "ge");
496 break;
497 case GT:
498 fprintf (file, "gt");
499 break;
500 case LE:
501 fprintf (file, "le");
502 break;
503 case LT:
504 fprintf (file, "lt");
505 break;
506 case GEU:
507 fprintf (file, "nl");
508 break;
509 case GTU:
510 fprintf (file, "h");
511 break;
512 case LEU:
513 fprintf (file, "nh");
514 break;
515 case LTU:
516 fprintf (file, "l");
517 break;
518 default:
519 gcc_unreachable ();
520 }
521 break;
522 case 'F': /* high word of CONST_DOUBLE */
523 switch (GET_CODE (x))
524 {
525 case CONST_INT:
526 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
527 break;
528
529 case CONST_DOUBLE:
530 const_double_split (x, &high, &low);
531 fprintf (file, "%ld", (long) high);
532 break;
533
534 default:
535 gcc_unreachable ();
536 }
537 break;
538 case 'G': /* low word of CONST_DOUBLE */
539 switch (GET_CODE (x))
540 {
541 case CONST_INT:
542 fprintf (file, "%ld", (long) INTVAL (x));
543 break;
544
545 case CONST_DOUBLE:
546 const_double_split (x, &high, &low);
547 fprintf (file, "%ld", (long) low);
548 break;
549
550 default:
551 gcc_unreachable ();
552 }
553 break;
554 case 'L':
555 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
556 break;
557 case 'M':
558 fprintf (file, "%d", exact_log2 (INTVAL (x)));
559 break;
560 case 'O':
561 gcc_assert (special_symbolref_operand (x, VOIDmode));
562
563 if (GET_CODE (x) == CONST)
564 x = XEXP (XEXP (x, 0), 0);
565 else
566 gcc_assert (GET_CODE (x) == SYMBOL_REF);
567
568 if (SYMBOL_REF_ZDA_P (x))
569 fprintf (file, "zdaoff");
570 else if (SYMBOL_REF_SDA_P (x))
571 fprintf (file, "sdaoff");
572 else if (SYMBOL_REF_TDA_P (x))
573 fprintf (file, "tdaoff");
574 else
575 gcc_unreachable ();
576 break;
577 case 'P':
578 gcc_assert (special_symbolref_operand (x, VOIDmode));
579 output_addr_const (file, x);
580 break;
581 case 'Q':
582 gcc_assert (special_symbolref_operand (x, VOIDmode));
583
584 if (GET_CODE (x) == CONST)
585 x = XEXP (XEXP (x, 0), 0);
586 else
587 gcc_assert (GET_CODE (x) == SYMBOL_REF);
588
589 if (SYMBOL_REF_ZDA_P (x))
590 fprintf (file, "r0");
591 else if (SYMBOL_REF_SDA_P (x))
592 fprintf (file, "gp");
593 else if (SYMBOL_REF_TDA_P (x))
594 fprintf (file, "ep");
595 else
596 gcc_unreachable ();
597 break;
598 case 'R': /* 2nd word of a double. */
599 switch (GET_CODE (x))
600 {
601 case REG:
602 fprintf (file, reg_names[REGNO (x) + 1]);
603 break;
604 case MEM:
605 x = XEXP (adjust_address (x, SImode, 4), 0);
606 v850_print_operand_address (file, x);
607 if (GET_CODE (x) == CONST_INT)
608 fprintf (file, "[r0]");
609 break;
610
611 default:
612 break;
613 }
614 break;
615 case 'S':
616 {
617 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
618 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
619 fputs ("s", file);
620
621 break;
622 }
623 case 'T':
624 {
625 /* Like an 'S' operand above, but for unsigned loads only. */
626 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
627 fputs ("s", file);
628
629 break;
630 }
631 case 'W': /* print the instruction suffix */
632 switch (GET_MODE (x))
633 {
634 default:
635 gcc_unreachable ();
636
637 case QImode: fputs (".b", file); break;
638 case HImode: fputs (".h", file); break;
639 case SImode: fputs (".w", file); break;
640 case SFmode: fputs (".w", file); break;
641 }
642 break;
643 case '.': /* register r0 */
644 fputs (reg_names[0], file);
645 break;
646 case 'z': /* reg or zero */
647 if (GET_CODE (x) == REG)
648 fputs (reg_names[REGNO (x)], file);
649 else if ((GET_MODE(x) == SImode
650 || GET_MODE(x) == DFmode
651 || GET_MODE(x) == SFmode)
652 && x == CONST0_RTX(GET_MODE(x)))
653 fputs (reg_names[0], file);
654 else
655 {
656 gcc_assert (x == const0_rtx);
657 fputs (reg_names[0], file);
658 }
659 break;
660 default:
661 switch (GET_CODE (x))
662 {
663 case MEM:
664 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
665 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
666 XEXP (x, 0)));
667 else
668 output_address (XEXP (x, 0));
669 break;
670
671 case REG:
672 fputs (reg_names[REGNO (x)], file);
673 break;
674 case SUBREG:
675 fputs (reg_names[subreg_regno (x)], file);
676 break;
677 case CONST_INT:
678 case SYMBOL_REF:
679 case CONST:
680 case LABEL_REF:
681 case CODE_LABEL:
682 v850_print_operand_address (file, x);
683 break;
684 default:
685 gcc_unreachable ();
686 }
687 break;
688
689 }
690 }
691
692 \f
693 /* Output assembly language output for the address ADDR to FILE. */
694
695 static void
696 v850_print_operand_address (FILE * file, rtx addr)
697 {
698 switch (GET_CODE (addr))
699 {
700 case REG:
701 fprintf (file, "0[");
702 v850_print_operand (file, addr, 0);
703 fprintf (file, "]");
704 break;
705 case LO_SUM:
706 if (GET_CODE (XEXP (addr, 0)) == REG)
707 {
708 /* reg,foo */
709 fprintf (file, "lo(");
710 v850_print_operand (file, XEXP (addr, 1), 0);
711 fprintf (file, ")[");
712 v850_print_operand (file, XEXP (addr, 0), 0);
713 fprintf (file, "]");
714 }
715 break;
716 case PLUS:
717 if (GET_CODE (XEXP (addr, 0)) == REG
718 || GET_CODE (XEXP (addr, 0)) == SUBREG)
719 {
720 /* reg,foo */
721 v850_print_operand (file, XEXP (addr, 1), 0);
722 fprintf (file, "[");
723 v850_print_operand (file, XEXP (addr, 0), 0);
724 fprintf (file, "]");
725 }
726 else
727 {
728 v850_print_operand (file, XEXP (addr, 0), 0);
729 fprintf (file, "+");
730 v850_print_operand (file, XEXP (addr, 1), 0);
731 }
732 break;
733 case SYMBOL_REF:
734 {
735 const char *off_name = NULL;
736 const char *reg_name = NULL;
737
738 if (SYMBOL_REF_ZDA_P (addr))
739 {
740 off_name = "zdaoff";
741 reg_name = "r0";
742 }
743 else if (SYMBOL_REF_SDA_P (addr))
744 {
745 off_name = "sdaoff";
746 reg_name = "gp";
747 }
748 else if (SYMBOL_REF_TDA_P (addr))
749 {
750 off_name = "tdaoff";
751 reg_name = "ep";
752 }
753
754 if (off_name)
755 fprintf (file, "%s(", off_name);
756 output_addr_const (file, addr);
757 if (reg_name)
758 fprintf (file, ")[%s]", reg_name);
759 }
760 break;
761 case CONST:
762 if (special_symbolref_operand (addr, VOIDmode))
763 {
764 rtx x = XEXP (XEXP (addr, 0), 0);
765 const char *off_name;
766 const char *reg_name;
767
768 if (SYMBOL_REF_ZDA_P (x))
769 {
770 off_name = "zdaoff";
771 reg_name = "r0";
772 }
773 else if (SYMBOL_REF_SDA_P (x))
774 {
775 off_name = "sdaoff";
776 reg_name = "gp";
777 }
778 else if (SYMBOL_REF_TDA_P (x))
779 {
780 off_name = "tdaoff";
781 reg_name = "ep";
782 }
783 else
784 gcc_unreachable ();
785
786 fprintf (file, "%s(", off_name);
787 output_addr_const (file, addr);
788 fprintf (file, ")[%s]", reg_name);
789 }
790 else
791 output_addr_const (file, addr);
792 break;
793 default:
794 output_addr_const (file, addr);
795 break;
796 }
797 }
798
799 static bool
800 v850_print_operand_punct_valid_p (unsigned char code)
801 {
802 return code == '.';
803 }
804
805 /* When assemble_integer is used to emit the offsets for a switch
806 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
807 output_addr_const will normally barf at this, but it is OK to omit
808 the truncate and just emit the difference of the two labels. The
809 .hword directive will automatically handle the truncation for us.
810
811 Returns true if rtx was handled, false otherwise. */
812
813 static bool
814 v850_output_addr_const_extra (FILE * file, rtx x)
815 {
816 if (GET_CODE (x) != TRUNCATE)
817 return false;
818
819 x = XEXP (x, 0);
820
821 /* We must also handle the case where the switch table was passed a
822 constant value and so has been collapsed. In this case the first
823 label will have been deleted. In such a case it is OK to emit
824 nothing, since the table will not be used.
825 (cf gcc.c-torture/compile/990801-1.c). */
826 if (GET_CODE (x) == MINUS
827 && GET_CODE (XEXP (x, 0)) == LABEL_REF
828 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
829 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
830 return true;
831
832 output_addr_const (file, x);
833 return true;
834 }
835 \f
836 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
837 point value. */
838
839 const char *
840 output_move_single (rtx * operands)
841 {
842 rtx dst = operands[0];
843 rtx src = operands[1];
844
845 if (REG_P (dst))
846 {
847 if (REG_P (src))
848 return "mov %1,%0";
849
850 else if (GET_CODE (src) == CONST_INT)
851 {
852 HOST_WIDE_INT value = INTVAL (src);
853
854 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
855 return "mov %1,%0";
856
857 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
858 return "movea %1,%.,%0";
859
860 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
861 return "movhi hi0(%1),%.,%0";
862
863 /* A random constant. */
864 else if (TARGET_V850E || TARGET_V850E2_ALL)
865 return "mov %1,%0";
866 else
867 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
868 }
869
870 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
871 {
872 HOST_WIDE_INT high, low;
873
874 const_double_split (src, &high, &low);
875
876 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
877 return "mov %F1,%0";
878
879 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
880 return "movea %F1,%.,%0";
881
882 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
883 return "movhi hi0(%F1),%.,%0";
884
885 /* A random constant. */
886 else if (TARGET_V850E || TARGET_V850E2_ALL)
887 return "mov %F1,%0";
888
889 else
890 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
891 }
892
893 else if (GET_CODE (src) == MEM)
894 return "%S1ld%W1 %1,%0";
895
896 else if (special_symbolref_operand (src, VOIDmode))
897 return "movea %O1(%P1),%Q1,%0";
898
899 else if (GET_CODE (src) == LABEL_REF
900 || GET_CODE (src) == SYMBOL_REF
901 || GET_CODE (src) == CONST)
902 {
903 if (TARGET_V850E || TARGET_V850E2_ALL)
904 return "mov hilo(%1),%0";
905 else
906 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
907 }
908
909 else if (GET_CODE (src) == HIGH)
910 return "movhi hi(%1),%.,%0";
911
912 else if (GET_CODE (src) == LO_SUM)
913 {
914 operands[2] = XEXP (src, 0);
915 operands[3] = XEXP (src, 1);
916 return "movea lo(%3),%2,%0";
917 }
918 }
919
920 else if (GET_CODE (dst) == MEM)
921 {
922 if (REG_P (src))
923 return "%S0st%W0 %1,%0";
924
925 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
926 return "%S0st%W0 %.,%0";
927
928 else if (GET_CODE (src) == CONST_DOUBLE
929 && CONST0_RTX (GET_MODE (dst)) == src)
930 return "%S0st%W0 %.,%0";
931 }
932
933 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
934 return "";
935 }
936
937 /* Generate comparison code. */
938 int
939 v850_float_z_comparison_operator (rtx op, enum machine_mode mode)
940 {
941 enum rtx_code code = GET_CODE (op);
942
943 if (GET_RTX_CLASS (code) != RTX_COMPARE
944 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
945 return 0;
946
947 if (mode != GET_MODE (op) && mode != VOIDmode)
948 return 0;
949
950 if ((GET_CODE (XEXP (op, 0)) != REG
951 || REGNO (XEXP (op, 0)) != CC_REGNUM)
952 || XEXP (op, 1) != const0_rtx)
953 return 0;
954
955 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LTmode)
956 return code == LT;
957 if (GET_MODE (XEXP (op, 0)) == CC_FPU_LEmode)
958 return code == LE;
959 if (GET_MODE (XEXP (op, 0)) == CC_FPU_EQmode)
960 return code == EQ;
961
962 return 0;
963 }
964
965 int
966 v850_float_nz_comparison_operator (rtx op, enum machine_mode mode)
967 {
968 enum rtx_code code = GET_CODE (op);
969
970 if (GET_RTX_CLASS (code) != RTX_COMPARE
971 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
972 return 0;
973
974 if (mode != GET_MODE (op) && mode != VOIDmode)
975 return 0;
976
977 if ((GET_CODE (XEXP (op, 0)) != REG
978 || REGNO (XEXP (op, 0)) != CC_REGNUM)
979 || XEXP (op, 1) != const0_rtx)
980 return 0;
981
982 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GTmode)
983 return code == GT;
984 if (GET_MODE (XEXP (op, 0)) == CC_FPU_GEmode)
985 return code == GE;
986 if (GET_MODE (XEXP (op, 0)) == CC_FPU_NEmode)
987 return code == NE;
988
989 return 0;
990 }
991
992 enum machine_mode
993 v850_select_cc_mode (enum rtx_code cond, rtx op0, rtx op1 ATTRIBUTE_UNUSED)
994 {
995 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_FLOAT)
996 {
997 switch (cond)
998 {
999 case LE:
1000 return CC_FPU_LEmode;
1001 case GE:
1002 return CC_FPU_GEmode;
1003 case LT:
1004 return CC_FPU_LTmode;
1005 case GT:
1006 return CC_FPU_GTmode;
1007 case EQ:
1008 return CC_FPU_EQmode;
1009 case NE:
1010 return CC_FPU_NEmode;
1011 default:
1012 abort ();
1013 }
1014 }
1015 return CCmode;
1016 }
1017
1018 enum machine_mode
1019 v850_gen_float_compare (enum rtx_code cond, enum machine_mode mode ATTRIBUTE_UNUSED, rtx op0, rtx op1)
1020 {
1021 if (GET_MODE(op0) == DFmode)
1022 {
1023 switch (cond)
1024 {
1025 case LE:
1026 emit_insn (gen_cmpdf_le_insn (op0, op1));
1027 break;
1028 case GE:
1029 emit_insn (gen_cmpdf_ge_insn (op0, op1));
1030 break;
1031 case LT:
1032 emit_insn (gen_cmpdf_lt_insn (op0, op1));
1033 break;
1034 case GT:
1035 emit_insn (gen_cmpdf_gt_insn (op0, op1));
1036 break;
1037 case EQ:
1038 emit_insn (gen_cmpdf_eq_insn (op0, op1));
1039 break;
1040 case NE:
1041 emit_insn (gen_cmpdf_ne_insn (op0, op1));
1042 break;
1043 default:
1044 abort ();
1045 }
1046 }
1047 else if (GET_MODE(v850_compare_op0) == SFmode)
1048 {
1049 switch (cond)
1050 {
1051 case LE:
1052 emit_insn (gen_cmpsf_le_insn(op0, op1));
1053 break;
1054 case GE:
1055 emit_insn (gen_cmpsf_ge_insn(op0, op1));
1056 break;
1057 case LT:
1058 emit_insn (gen_cmpsf_lt_insn(op0, op1));
1059 break;
1060 case GT:
1061 emit_insn (gen_cmpsf_gt_insn(op0, op1));
1062 break;
1063 case EQ:
1064 emit_insn (gen_cmpsf_eq_insn(op0, op1));
1065 break;
1066 case NE:
1067 emit_insn (gen_cmpsf_ne_insn(op0, op1));
1068 break;
1069 default:
1070 abort ();
1071 }
1072 }
1073 else
1074 {
1075 abort ();
1076 }
1077
1078 return v850_select_cc_mode (cond, op0, op1);
1079 }
1080
1081 rtx
1082 v850_gen_compare (enum rtx_code cond, enum machine_mode mode, rtx op0, rtx op1)
1083 {
1084 if (GET_MODE_CLASS(GET_MODE (op0)) != MODE_FLOAT)
1085 {
1086 emit_insn (gen_cmpsi_insn (op0, op1));
1087 return gen_rtx_fmt_ee (cond, mode, gen_rtx_REG(CCmode, CC_REGNUM), const0_rtx);
1088 }
1089 else
1090 {
1091 rtx cc_reg;
1092 mode = v850_gen_float_compare (cond, mode, op0, op1);
1093 cc_reg = gen_rtx_REG (mode, CC_REGNUM);
1094 emit_insn (gen_rtx_SET(mode, cc_reg, gen_rtx_REG (mode, FCC_REGNUM)));
1095
1096 return gen_rtx_fmt_ee (cond, mode, cc_reg, const0_rtx);
1097 }
1098 }
1099
1100 /* Return maximum offset supported for a short EP memory reference of mode
1101 MODE and signedness UNSIGNEDP. */
1102
1103 static int
1104 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
1105 {
1106 int max_offset = 0;
1107
1108 switch (mode)
1109 {
1110 case QImode:
1111 if (TARGET_SMALL_SLD)
1112 max_offset = (1 << 4);
1113 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1114 && unsignedp)
1115 max_offset = (1 << 4);
1116 else
1117 max_offset = (1 << 7);
1118 break;
1119
1120 case HImode:
1121 if (TARGET_SMALL_SLD)
1122 max_offset = (1 << 5);
1123 else if ((TARGET_V850E || TARGET_V850E2_ALL)
1124 && unsignedp)
1125 max_offset = (1 << 5);
1126 else
1127 max_offset = (1 << 8);
1128 break;
1129
1130 case SImode:
1131 case SFmode:
1132 max_offset = (1 << 8);
1133 break;
1134
1135 default:
1136 break;
1137 }
1138
1139 return max_offset;
1140 }
1141
1142 /* Return true if OP is a valid short EP memory reference */
1143
1144 int
1145 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
1146 {
1147 rtx addr, op0, op1;
1148 int max_offset;
1149 int mask;
1150
1151 /* If we are not using the EP register on a per-function basis
1152 then do not allow this optimization at all. This is to
1153 prevent the use of the SLD/SST instructions which cannot be
1154 guaranteed to work properly due to a hardware bug. */
1155 if (!TARGET_EP)
1156 return FALSE;
1157
1158 if (GET_CODE (op) != MEM)
1159 return FALSE;
1160
1161 max_offset = ep_memory_offset (mode, unsigned_load);
1162
1163 mask = GET_MODE_SIZE (mode) - 1;
1164
1165 addr = XEXP (op, 0);
1166 if (GET_CODE (addr) == CONST)
1167 addr = XEXP (addr, 0);
1168
1169 switch (GET_CODE (addr))
1170 {
1171 default:
1172 break;
1173
1174 case SYMBOL_REF:
1175 return SYMBOL_REF_TDA_P (addr);
1176
1177 case REG:
1178 return REGNO (addr) == EP_REGNUM;
1179
1180 case PLUS:
1181 op0 = XEXP (addr, 0);
1182 op1 = XEXP (addr, 1);
1183 if (GET_CODE (op1) == CONST_INT
1184 && INTVAL (op1) < max_offset
1185 && INTVAL (op1) >= 0
1186 && (INTVAL (op1) & mask) == 0)
1187 {
1188 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1189 return TRUE;
1190
1191 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1192 return TRUE;
1193 }
1194 break;
1195 }
1196
1197 return FALSE;
1198 }
1199 \f
1200 /* Substitute memory references involving a pointer, to use the ep pointer,
1201 taking care to save and preserve the ep. */
1202
1203 static void
1204 substitute_ep_register (rtx first_insn,
1205 rtx last_insn,
1206 int uses,
1207 int regno,
1208 rtx * p_r1,
1209 rtx * p_ep)
1210 {
1211 rtx reg = gen_rtx_REG (Pmode, regno);
1212 rtx insn;
1213
1214 if (!*p_r1)
1215 {
1216 df_set_regs_ever_live (1, true);
1217 *p_r1 = gen_rtx_REG (Pmode, 1);
1218 *p_ep = gen_rtx_REG (Pmode, 30);
1219 }
1220
1221 if (TARGET_DEBUG)
1222 fprintf (stderr, "\
1223 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1224 2 * (uses - 3), uses, reg_names[regno],
1225 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1226 INSN_UID (first_insn), INSN_UID (last_insn));
1227
1228 if (GET_CODE (first_insn) == NOTE)
1229 first_insn = next_nonnote_insn (first_insn);
1230
1231 last_insn = next_nonnote_insn (last_insn);
1232 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1233 {
1234 if (GET_CODE (insn) == INSN)
1235 {
1236 rtx pattern = single_set (insn);
1237
1238 /* Replace the memory references. */
1239 if (pattern)
1240 {
1241 rtx *p_mem;
1242 /* Memory operands are signed by default. */
1243 int unsignedp = FALSE;
1244
1245 if (GET_CODE (SET_DEST (pattern)) == MEM
1246 && GET_CODE (SET_SRC (pattern)) == MEM)
1247 p_mem = (rtx *)0;
1248
1249 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1250 p_mem = &SET_DEST (pattern);
1251
1252 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1253 p_mem = &SET_SRC (pattern);
1254
1255 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1256 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1257 p_mem = &XEXP (SET_SRC (pattern), 0);
1258
1259 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1260 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1261 {
1262 p_mem = &XEXP (SET_SRC (pattern), 0);
1263 unsignedp = TRUE;
1264 }
1265 else
1266 p_mem = (rtx *)0;
1267
1268 if (p_mem)
1269 {
1270 rtx addr = XEXP (*p_mem, 0);
1271
1272 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1273 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1274
1275 else if (GET_CODE (addr) == PLUS
1276 && GET_CODE (XEXP (addr, 0)) == REG
1277 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1278 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1279 && ((INTVAL (XEXP (addr, 1)))
1280 < ep_memory_offset (GET_MODE (*p_mem),
1281 unsignedp))
1282 && ((INTVAL (XEXP (addr, 1))) >= 0))
1283 *p_mem = change_address (*p_mem, VOIDmode,
1284 gen_rtx_PLUS (Pmode,
1285 *p_ep,
1286 XEXP (addr, 1)));
1287 }
1288 }
1289 }
1290 }
1291
1292 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1293 insn = prev_nonnote_insn (first_insn);
1294 if (insn && GET_CODE (insn) == INSN
1295 && GET_CODE (PATTERN (insn)) == SET
1296 && SET_DEST (PATTERN (insn)) == *p_ep
1297 && SET_SRC (PATTERN (insn)) == *p_r1)
1298 delete_insn (insn);
1299 else
1300 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1301
1302 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1303 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1304 }
1305
1306 \f
1307 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1308 the -mep mode to copy heavily used pointers to ep to use the implicit
1309 addressing. */
1310
1311 static void
1312 v850_reorg (void)
1313 {
1314 struct
1315 {
1316 int uses;
1317 rtx first_insn;
1318 rtx last_insn;
1319 }
1320 regs[FIRST_PSEUDO_REGISTER];
1321
1322 int i;
1323 int use_ep = FALSE;
1324 rtx r1 = NULL_RTX;
1325 rtx ep = NULL_RTX;
1326 rtx insn;
1327 rtx pattern;
1328
1329 /* If not ep mode, just return now. */
1330 if (!TARGET_EP)
1331 return;
1332
1333 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1334 {
1335 regs[i].uses = 0;
1336 regs[i].first_insn = NULL_RTX;
1337 regs[i].last_insn = NULL_RTX;
1338 }
1339
1340 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1341 {
1342 switch (GET_CODE (insn))
1343 {
1344 /* End of basic block */
1345 default:
1346 if (!use_ep)
1347 {
1348 int max_uses = -1;
1349 int max_regno = -1;
1350
1351 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1352 {
1353 if (max_uses < regs[i].uses)
1354 {
1355 max_uses = regs[i].uses;
1356 max_regno = i;
1357 }
1358 }
1359
1360 if (max_uses > 3)
1361 substitute_ep_register (regs[max_regno].first_insn,
1362 regs[max_regno].last_insn,
1363 max_uses, max_regno, &r1, &ep);
1364 }
1365
1366 use_ep = FALSE;
1367 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1368 {
1369 regs[i].uses = 0;
1370 regs[i].first_insn = NULL_RTX;
1371 regs[i].last_insn = NULL_RTX;
1372 }
1373 break;
1374
1375 case NOTE:
1376 break;
1377
1378 case INSN:
1379 pattern = single_set (insn);
1380
1381 /* See if there are any memory references we can shorten */
1382 if (pattern)
1383 {
1384 rtx src = SET_SRC (pattern);
1385 rtx dest = SET_DEST (pattern);
1386 rtx mem;
1387 /* Memory operands are signed by default. */
1388 int unsignedp = FALSE;
1389
1390 /* We might have (SUBREG (MEM)) here, so just get rid of the
1391 subregs to make this code simpler. */
1392 if (GET_CODE (dest) == SUBREG
1393 && (GET_CODE (SUBREG_REG (dest)) == MEM
1394 || GET_CODE (SUBREG_REG (dest)) == REG))
1395 alter_subreg (&dest);
1396 if (GET_CODE (src) == SUBREG
1397 && (GET_CODE (SUBREG_REG (src)) == MEM
1398 || GET_CODE (SUBREG_REG (src)) == REG))
1399 alter_subreg (&src);
1400
1401 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1402 mem = NULL_RTX;
1403
1404 else if (GET_CODE (dest) == MEM)
1405 mem = dest;
1406
1407 else if (GET_CODE (src) == MEM)
1408 mem = src;
1409
1410 else if (GET_CODE (src) == SIGN_EXTEND
1411 && GET_CODE (XEXP (src, 0)) == MEM)
1412 mem = XEXP (src, 0);
1413
1414 else if (GET_CODE (src) == ZERO_EXTEND
1415 && GET_CODE (XEXP (src, 0)) == MEM)
1416 {
1417 mem = XEXP (src, 0);
1418 unsignedp = TRUE;
1419 }
1420 else
1421 mem = NULL_RTX;
1422
1423 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1424 use_ep = TRUE;
1425
1426 else if (!use_ep && mem
1427 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1428 {
1429 rtx addr = XEXP (mem, 0);
1430 int regno = -1;
1431 int short_p;
1432
1433 if (GET_CODE (addr) == REG)
1434 {
1435 short_p = TRUE;
1436 regno = REGNO (addr);
1437 }
1438
1439 else if (GET_CODE (addr) == PLUS
1440 && GET_CODE (XEXP (addr, 0)) == REG
1441 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1442 && ((INTVAL (XEXP (addr, 1)))
1443 < ep_memory_offset (GET_MODE (mem), unsignedp))
1444 && ((INTVAL (XEXP (addr, 1))) >= 0))
1445 {
1446 short_p = TRUE;
1447 regno = REGNO (XEXP (addr, 0));
1448 }
1449
1450 else
1451 short_p = FALSE;
1452
1453 if (short_p)
1454 {
1455 regs[regno].uses++;
1456 regs[regno].last_insn = insn;
1457 if (!regs[regno].first_insn)
1458 regs[regno].first_insn = insn;
1459 }
1460 }
1461
1462 /* Loading up a register in the basic block zaps any savings
1463 for the register */
1464 if (GET_CODE (dest) == REG)
1465 {
1466 enum machine_mode mode = GET_MODE (dest);
1467 int regno;
1468 int endregno;
1469
1470 regno = REGNO (dest);
1471 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1472
1473 if (!use_ep)
1474 {
1475 /* See if we can use the pointer before this
1476 modification. */
1477 int max_uses = -1;
1478 int max_regno = -1;
1479
1480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1481 {
1482 if (max_uses < regs[i].uses)
1483 {
1484 max_uses = regs[i].uses;
1485 max_regno = i;
1486 }
1487 }
1488
1489 if (max_uses > 3
1490 && max_regno >= regno
1491 && max_regno < endregno)
1492 {
1493 substitute_ep_register (regs[max_regno].first_insn,
1494 regs[max_regno].last_insn,
1495 max_uses, max_regno, &r1,
1496 &ep);
1497
1498 /* Since we made a substitution, zap all remembered
1499 registers. */
1500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1501 {
1502 regs[i].uses = 0;
1503 regs[i].first_insn = NULL_RTX;
1504 regs[i].last_insn = NULL_RTX;
1505 }
1506 }
1507 }
1508
1509 for (i = regno; i < endregno; i++)
1510 {
1511 regs[i].uses = 0;
1512 regs[i].first_insn = NULL_RTX;
1513 regs[i].last_insn = NULL_RTX;
1514 }
1515 }
1516 }
1517 }
1518 }
1519 }
1520
1521 /* # of registers saved by the interrupt handler. */
1522 #define INTERRUPT_FIXED_NUM 5
1523
1524 /* # of bytes for registers saved by the interrupt handler. */
1525 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1526
1527 /* # of words saved for other registers. */
1528 #define INTERRUPT_ALL_SAVE_NUM \
1529 (30 - INTERRUPT_FIXED_NUM)
1530
1531 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1532
1533 int
1534 compute_register_save_size (long * p_reg_saved)
1535 {
1536 int size = 0;
1537 int i;
1538 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1539 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1540 long reg_saved = 0;
1541
1542 /* Count the return pointer if we need to save it. */
1543 if (crtl->profile && !call_p)
1544 {
1545 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1546 call_p = 1;
1547 }
1548
1549 /* Count space for the register saves. */
1550 if (interrupt_handler)
1551 {
1552 for (i = 0; i <= 31; i++)
1553 switch (i)
1554 {
1555 default:
1556 if (df_regs_ever_live_p (i) || call_p)
1557 {
1558 size += 4;
1559 reg_saved |= 1L << i;
1560 }
1561 break;
1562
1563 /* We don't save/restore r0 or the stack pointer */
1564 case 0:
1565 case STACK_POINTER_REGNUM:
1566 break;
1567
1568 /* For registers with fixed use, we save them, set them to the
1569 appropriate value, and then restore them.
1570 These registers are handled specially, so don't list them
1571 on the list of registers to save in the prologue. */
1572 case 1: /* temp used to hold ep */
1573 case 4: /* gp */
1574 case 10: /* temp used to call interrupt save/restore */
1575 case 11: /* temp used to call interrupt save/restore (long call) */
1576 case EP_REGNUM: /* ep */
1577 size += 4;
1578 break;
1579 }
1580 }
1581 else
1582 {
1583 /* Find the first register that needs to be saved. */
1584 for (i = 0; i <= 31; i++)
1585 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1586 || i == LINK_POINTER_REGNUM))
1587 break;
1588
1589 /* If it is possible that an out-of-line helper function might be
1590 used to generate the prologue for the current function, then we
1591 need to cover the possibility that such a helper function will
1592 be used, despite the fact that there might be gaps in the list of
1593 registers that need to be saved. To detect this we note that the
1594 helper functions always push at least register r29 (provided
1595 that the function is not an interrupt handler). */
1596
1597 if (TARGET_PROLOG_FUNCTION
1598 && (i == 2 || ((i >= 20) && (i < 30))))
1599 {
1600 if (i == 2)
1601 {
1602 size += 4;
1603 reg_saved |= 1L << i;
1604
1605 i = 20;
1606 }
1607
1608 /* Helper functions save all registers between the starting
1609 register and the last register, regardless of whether they
1610 are actually used by the function or not. */
1611 for (; i <= 29; i++)
1612 {
1613 size += 4;
1614 reg_saved |= 1L << i;
1615 }
1616
1617 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1618 {
1619 size += 4;
1620 reg_saved |= 1L << LINK_POINTER_REGNUM;
1621 }
1622 }
1623 else
1624 {
1625 for (; i <= 31; i++)
1626 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1627 || i == LINK_POINTER_REGNUM))
1628 {
1629 size += 4;
1630 reg_saved |= 1L << i;
1631 }
1632 }
1633 }
1634
1635 if (p_reg_saved)
1636 *p_reg_saved = reg_saved;
1637
1638 return size;
1639 }
1640
1641 int
1642 compute_frame_size (int size, long * p_reg_saved)
1643 {
1644 return (size
1645 + compute_register_save_size (p_reg_saved)
1646 + crtl->outgoing_args_size);
1647 }
1648
1649 static int
1650 use_prolog_function (int num_save, int frame_size)
1651 {
1652 int alloc_stack = (4 * num_save);
1653 int unalloc_stack = frame_size - alloc_stack;
1654 int save_func_len, restore_func_len;
1655 int save_normal_len, restore_normal_len;
1656
1657 if (! TARGET_DISABLE_CALLT)
1658 save_func_len = restore_func_len = 2;
1659 else
1660 save_func_len = restore_func_len = TARGET_LONG_CALLS ? (4+4+4+2+2) : 4;
1661
1662 if (unalloc_stack)
1663 {
1664 save_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1665 restore_func_len += CONST_OK_FOR_J (-unalloc_stack) ? 2 : 4;
1666 }
1667
1668 /* See if we would have used ep to save the stack. */
1669 if (TARGET_EP && num_save > 3 && (unsigned)frame_size < 255)
1670 save_normal_len = restore_normal_len = (3 * 2) + (2 * num_save);
1671 else
1672 save_normal_len = restore_normal_len = 4 * num_save;
1673
1674 save_normal_len += CONST_OK_FOR_J (-frame_size) ? 2 : 4;
1675 restore_normal_len += (CONST_OK_FOR_J (frame_size) ? 2 : 4) + 2;
1676
1677 /* Don't bother checking if we don't actually save any space.
1678 This happens for instance if one register is saved and additional
1679 stack space is allocated. */
1680 return ((save_func_len + restore_func_len) < (save_normal_len + restore_normal_len));
1681 }
1682
1683 void
1684 expand_prologue (void)
1685 {
1686 unsigned int i;
1687 unsigned int size = get_frame_size ();
1688 unsigned int actual_fsize;
1689 unsigned int init_stack_alloc = 0;
1690 rtx save_regs[32];
1691 rtx save_all;
1692 unsigned int num_save;
1693 int code;
1694 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1695 long reg_saved = 0;
1696
1697 actual_fsize = compute_frame_size (size, &reg_saved);
1698
1699 /* Save/setup global registers for interrupt functions right now. */
1700 if (interrupt_handler)
1701 {
1702 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1703 emit_insn (gen_callt_save_interrupt ());
1704 else
1705 emit_insn (gen_save_interrupt ());
1706
1707 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1708
1709 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1710 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1711 }
1712
1713 /* Identify all of the saved registers. */
1714 num_save = 0;
1715 for (i = 1; i < 32; i++)
1716 {
1717 if (((1L << i) & reg_saved) != 0)
1718 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1719 }
1720
1721 /* See if we have an insn that allocates stack space and saves the particular
1722 registers we want to. */
1723 save_all = NULL_RTX;
1724 if (TARGET_PROLOG_FUNCTION && num_save > 0)
1725 {
1726 if (use_prolog_function (num_save, actual_fsize))
1727 {
1728 int alloc_stack = 4 * num_save;
1729 int offset = 0;
1730
1731 save_all = gen_rtx_PARALLEL
1732 (VOIDmode,
1733 rtvec_alloc (num_save + 1
1734 + (TARGET_DISABLE_CALLT ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1735
1736 XVECEXP (save_all, 0, 0)
1737 = gen_rtx_SET (VOIDmode,
1738 stack_pointer_rtx,
1739 gen_rtx_PLUS (Pmode,
1740 stack_pointer_rtx,
1741 GEN_INT(-alloc_stack)));
1742 for (i = 0; i < num_save; i++)
1743 {
1744 offset -= 4;
1745 XVECEXP (save_all, 0, i+1)
1746 = gen_rtx_SET (VOIDmode,
1747 gen_rtx_MEM (Pmode,
1748 gen_rtx_PLUS (Pmode,
1749 stack_pointer_rtx,
1750 GEN_INT(offset))),
1751 save_regs[i]);
1752 }
1753
1754 if (TARGET_DISABLE_CALLT)
1755 {
1756 XVECEXP (save_all, 0, num_save + 1)
1757 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1758
1759 if (TARGET_LONG_CALLS)
1760 XVECEXP (save_all, 0, num_save + 2)
1761 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1762 }
1763
1764 code = recog (save_all, NULL_RTX, NULL);
1765 if (code >= 0)
1766 {
1767 rtx insn = emit_insn (save_all);
1768 INSN_CODE (insn) = code;
1769 actual_fsize -= alloc_stack;
1770
1771 }
1772 else
1773 save_all = NULL_RTX;
1774 }
1775 }
1776
1777 /* If no prolog save function is available, store the registers the old
1778 fashioned way (one by one). */
1779 if (!save_all)
1780 {
1781 /* Special case interrupt functions that save all registers for a call. */
1782 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1783 {
1784 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
1785 emit_insn (gen_callt_save_all_interrupt ());
1786 else
1787 emit_insn (gen_save_all_interrupt ());
1788 }
1789 else
1790 {
1791 int offset;
1792 /* If the stack is too big, allocate it in chunks so we can do the
1793 register saves. We use the register save size so we use the ep
1794 register. */
1795 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1796 init_stack_alloc = compute_register_save_size (NULL);
1797 else
1798 init_stack_alloc = actual_fsize;
1799
1800 /* Save registers at the beginning of the stack frame. */
1801 offset = init_stack_alloc - 4;
1802
1803 if (init_stack_alloc)
1804 emit_insn (gen_addsi3 (stack_pointer_rtx,
1805 stack_pointer_rtx,
1806 GEN_INT (- (signed) init_stack_alloc)));
1807
1808 /* Save the return pointer first. */
1809 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1810 {
1811 emit_move_insn (gen_rtx_MEM (SImode,
1812 plus_constant (stack_pointer_rtx,
1813 offset)),
1814 save_regs[--num_save]);
1815 offset -= 4;
1816 }
1817
1818 for (i = 0; i < num_save; i++)
1819 {
1820 emit_move_insn (gen_rtx_MEM (SImode,
1821 plus_constant (stack_pointer_rtx,
1822 offset)),
1823 save_regs[i]);
1824 offset -= 4;
1825 }
1826 }
1827 }
1828
1829 /* Allocate the rest of the stack that was not allocated above (either it is
1830 > 32K or we just called a function to save the registers and needed more
1831 stack. */
1832 if (actual_fsize > init_stack_alloc)
1833 {
1834 int diff = actual_fsize - init_stack_alloc;
1835 if (CONST_OK_FOR_K (-diff))
1836 emit_insn (gen_addsi3 (stack_pointer_rtx,
1837 stack_pointer_rtx,
1838 GEN_INT (-diff)));
1839 else
1840 {
1841 rtx reg = gen_rtx_REG (Pmode, 12);
1842 emit_move_insn (reg, GEN_INT (-diff));
1843 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1844 }
1845 }
1846
1847 /* If we need a frame pointer, set it up now. */
1848 if (frame_pointer_needed)
1849 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1850 }
1851 \f
1852
1853 void
1854 expand_epilogue (void)
1855 {
1856 unsigned int i;
1857 unsigned int size = get_frame_size ();
1858 long reg_saved = 0;
1859 int actual_fsize = compute_frame_size (size, &reg_saved);
1860 rtx restore_regs[32];
1861 rtx restore_all;
1862 unsigned int num_restore;
1863 int code;
1864 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1865
1866 /* Eliminate the initial stack stored by interrupt functions. */
1867 if (interrupt_handler)
1868 {
1869 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1870 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1871 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1872 }
1873
1874 /* Cut off any dynamic stack created. */
1875 if (frame_pointer_needed)
1876 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1877
1878 /* Identify all of the saved registers. */
1879 num_restore = 0;
1880 for (i = 1; i < 32; i++)
1881 {
1882 if (((1L << i) & reg_saved) != 0)
1883 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1884 }
1885
1886 /* See if we have an insn that restores the particular registers we
1887 want to. */
1888 restore_all = NULL_RTX;
1889
1890 if (TARGET_PROLOG_FUNCTION
1891 && num_restore > 0
1892 && !interrupt_handler)
1893 {
1894 int alloc_stack = (4 * num_restore);
1895
1896 /* Don't bother checking if we don't actually save any space. */
1897 if (use_prolog_function (num_restore, actual_fsize))
1898 {
1899 int offset;
1900 restore_all = gen_rtx_PARALLEL (VOIDmode,
1901 rtvec_alloc (num_restore + 2));
1902 XVECEXP (restore_all, 0, 0) = ret_rtx;
1903 XVECEXP (restore_all, 0, 1)
1904 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1905 gen_rtx_PLUS (Pmode,
1906 stack_pointer_rtx,
1907 GEN_INT (alloc_stack)));
1908
1909 offset = alloc_stack - 4;
1910 for (i = 0; i < num_restore; i++)
1911 {
1912 XVECEXP (restore_all, 0, i+2)
1913 = gen_rtx_SET (VOIDmode,
1914 restore_regs[i],
1915 gen_rtx_MEM (Pmode,
1916 gen_rtx_PLUS (Pmode,
1917 stack_pointer_rtx,
1918 GEN_INT(offset))));
1919 offset -= 4;
1920 }
1921
1922 code = recog (restore_all, NULL_RTX, NULL);
1923
1924 if (code >= 0)
1925 {
1926 rtx insn;
1927
1928 actual_fsize -= alloc_stack;
1929 if (actual_fsize)
1930 {
1931 if (CONST_OK_FOR_K (actual_fsize))
1932 emit_insn (gen_addsi3 (stack_pointer_rtx,
1933 stack_pointer_rtx,
1934 GEN_INT (actual_fsize)));
1935 else
1936 {
1937 rtx reg = gen_rtx_REG (Pmode, 12);
1938 emit_move_insn (reg, GEN_INT (actual_fsize));
1939 emit_insn (gen_addsi3 (stack_pointer_rtx,
1940 stack_pointer_rtx,
1941 reg));
1942 }
1943 }
1944
1945 insn = emit_jump_insn (restore_all);
1946 INSN_CODE (insn) = code;
1947
1948 }
1949 else
1950 restore_all = NULL_RTX;
1951 }
1952 }
1953
1954 /* If no epilogue save function is available, restore the registers the
1955 old fashioned way (one by one). */
1956 if (!restore_all)
1957 {
1958 unsigned int init_stack_free;
1959
1960 /* If the stack is large, we need to cut it down in 2 pieces. */
1961 if (interrupt_handler)
1962 init_stack_free = 0;
1963 else if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1964 init_stack_free = 4 * num_restore;
1965 else
1966 init_stack_free = (signed) actual_fsize;
1967
1968 /* Deallocate the rest of the stack if it is > 32K. */
1969 if ((unsigned int) actual_fsize > init_stack_free)
1970 {
1971 int diff;
1972
1973 diff = actual_fsize - init_stack_free;
1974
1975 if (CONST_OK_FOR_K (diff))
1976 emit_insn (gen_addsi3 (stack_pointer_rtx,
1977 stack_pointer_rtx,
1978 GEN_INT (diff)));
1979 else
1980 {
1981 rtx reg = gen_rtx_REG (Pmode, 12);
1982 emit_move_insn (reg, GEN_INT (diff));
1983 emit_insn (gen_addsi3 (stack_pointer_rtx,
1984 stack_pointer_rtx,
1985 reg));
1986 }
1987 }
1988
1989 /* Special case interrupt functions that save all registers
1990 for a call. */
1991 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1992 {
1993 if (! TARGET_DISABLE_CALLT)
1994 emit_insn (gen_callt_restore_all_interrupt ());
1995 else
1996 emit_insn (gen_restore_all_interrupt ());
1997 }
1998 else
1999 {
2000 /* Restore registers from the beginning of the stack frame. */
2001 int offset = init_stack_free - 4;
2002
2003 /* Restore the return pointer first. */
2004 if (num_restore > 0
2005 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
2006 {
2007 emit_move_insn (restore_regs[--num_restore],
2008 gen_rtx_MEM (SImode,
2009 plus_constant (stack_pointer_rtx,
2010 offset)));
2011 offset -= 4;
2012 }
2013
2014 for (i = 0; i < num_restore; i++)
2015 {
2016 emit_move_insn (restore_regs[i],
2017 gen_rtx_MEM (SImode,
2018 plus_constant (stack_pointer_rtx,
2019 offset)));
2020
2021 emit_use (restore_regs[i]);
2022 offset -= 4;
2023 }
2024
2025 /* Cut back the remainder of the stack. */
2026 if (init_stack_free)
2027 emit_insn (gen_addsi3 (stack_pointer_rtx,
2028 stack_pointer_rtx,
2029 GEN_INT (init_stack_free)));
2030 }
2031
2032 /* And return or use reti for interrupt handlers. */
2033 if (interrupt_handler)
2034 {
2035 if (! TARGET_DISABLE_CALLT && (TARGET_V850E || TARGET_V850E2_ALL))
2036 emit_insn (gen_callt_return_interrupt ());
2037 else
2038 emit_jump_insn (gen_return_interrupt ());
2039 }
2040 else if (actual_fsize)
2041 emit_jump_insn (gen_return_internal ());
2042 else
2043 emit_jump_insn (gen_return_simple ());
2044 }
2045
2046 v850_interrupt_cache_p = FALSE;
2047 v850_interrupt_p = FALSE;
2048 }
2049
2050 /* Update the condition code from the insn. */
2051 void
2052 notice_update_cc (rtx body, rtx insn)
2053 {
2054 switch (get_attr_cc (insn))
2055 {
2056 case CC_NONE:
2057 /* Insn does not affect CC at all. */
2058 break;
2059
2060 case CC_NONE_0HIT:
2061 /* Insn does not change CC, but the 0'th operand has been changed. */
2062 if (cc_status.value1 != 0
2063 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
2064 cc_status.value1 = 0;
2065 break;
2066
2067 case CC_SET_ZN:
2068 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
2069 V,C is in an unusable state. */
2070 CC_STATUS_INIT;
2071 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
2072 cc_status.value1 = recog_data.operand[0];
2073 break;
2074
2075 case CC_SET_ZNV:
2076 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
2077 C is in an unusable state. */
2078 CC_STATUS_INIT;
2079 cc_status.flags |= CC_NO_CARRY;
2080 cc_status.value1 = recog_data.operand[0];
2081 break;
2082
2083 case CC_COMPARE:
2084 /* The insn is a compare instruction. */
2085 CC_STATUS_INIT;
2086 cc_status.value1 = SET_SRC (body);
2087 break;
2088
2089 case CC_CLOBBER:
2090 /* Insn doesn't leave CC in a usable state. */
2091 CC_STATUS_INIT;
2092 break;
2093
2094 default:
2095 break;
2096 }
2097 }
2098
2099 /* Retrieve the data area that has been chosen for the given decl. */
2100
2101 v850_data_area
2102 v850_get_data_area (tree decl)
2103 {
2104 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2105 return DATA_AREA_SDA;
2106
2107 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2108 return DATA_AREA_TDA;
2109
2110 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2111 return DATA_AREA_ZDA;
2112
2113 return DATA_AREA_NORMAL;
2114 }
2115
2116 /* Store the indicated data area in the decl's attributes. */
2117
2118 static void
2119 v850_set_data_area (tree decl, v850_data_area data_area)
2120 {
2121 tree name;
2122
2123 switch (data_area)
2124 {
2125 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2126 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2127 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2128 default:
2129 return;
2130 }
2131
2132 DECL_ATTRIBUTES (decl) = tree_cons
2133 (name, NULL, DECL_ATTRIBUTES (decl));
2134 }
2135 \f
2136 /* Handle an "interrupt" attribute; arguments as in
2137 struct attribute_spec.handler. */
2138 static tree
2139 v850_handle_interrupt_attribute (tree * node,
2140 tree name,
2141 tree args ATTRIBUTE_UNUSED,
2142 int flags ATTRIBUTE_UNUSED,
2143 bool * no_add_attrs)
2144 {
2145 if (TREE_CODE (*node) != FUNCTION_DECL)
2146 {
2147 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2148 name);
2149 *no_add_attrs = true;
2150 }
2151
2152 return NULL_TREE;
2153 }
2154
2155 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2156 struct attribute_spec.handler. */
2157 static tree
2158 v850_handle_data_area_attribute (tree* node,
2159 tree name,
2160 tree args ATTRIBUTE_UNUSED,
2161 int flags ATTRIBUTE_UNUSED,
2162 bool * no_add_attrs)
2163 {
2164 v850_data_area data_area;
2165 v850_data_area area;
2166 tree decl = *node;
2167
2168 /* Implement data area attribute. */
2169 if (is_attribute_p ("sda", name))
2170 data_area = DATA_AREA_SDA;
2171 else if (is_attribute_p ("tda", name))
2172 data_area = DATA_AREA_TDA;
2173 else if (is_attribute_p ("zda", name))
2174 data_area = DATA_AREA_ZDA;
2175 else
2176 gcc_unreachable ();
2177
2178 switch (TREE_CODE (decl))
2179 {
2180 case VAR_DECL:
2181 if (current_function_decl != NULL_TREE)
2182 {
2183 error_at (DECL_SOURCE_LOCATION (decl),
2184 "data area attributes cannot be specified for "
2185 "local variables");
2186 *no_add_attrs = true;
2187 }
2188
2189 /* Drop through. */
2190
2191 case FUNCTION_DECL:
2192 area = v850_get_data_area (decl);
2193 if (area != DATA_AREA_NORMAL && data_area != area)
2194 {
2195 error ("data area of %q+D conflicts with previous declaration",
2196 decl);
2197 *no_add_attrs = true;
2198 }
2199 break;
2200
2201 default:
2202 break;
2203 }
2204
2205 return NULL_TREE;
2206 }
2207
2208 \f
2209 /* Return nonzero if FUNC is an interrupt function as specified
2210 by the "interrupt" attribute. */
2211
2212 int
2213 v850_interrupt_function_p (tree func)
2214 {
2215 tree a;
2216 int ret = 0;
2217
2218 if (v850_interrupt_cache_p)
2219 return v850_interrupt_p;
2220
2221 if (TREE_CODE (func) != FUNCTION_DECL)
2222 return 0;
2223
2224 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2225 if (a != NULL_TREE)
2226 ret = 1;
2227
2228 else
2229 {
2230 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2231 ret = a != NULL_TREE;
2232 }
2233
2234 /* Its not safe to trust global variables until after function inlining has
2235 been done. */
2236 if (reload_completed | reload_in_progress)
2237 v850_interrupt_p = ret;
2238
2239 return ret;
2240 }
2241
2242 \f
2243 static void
2244 v850_encode_data_area (tree decl, rtx symbol)
2245 {
2246 int flags;
2247
2248 /* Map explicit sections into the appropriate attribute */
2249 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2250 {
2251 if (DECL_SECTION_NAME (decl))
2252 {
2253 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2254
2255 if (streq (name, ".zdata") || streq (name, ".zbss"))
2256 v850_set_data_area (decl, DATA_AREA_ZDA);
2257
2258 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2259 v850_set_data_area (decl, DATA_AREA_SDA);
2260
2261 else if (streq (name, ".tdata"))
2262 v850_set_data_area (decl, DATA_AREA_TDA);
2263 }
2264
2265 /* If no attribute, support -m{zda,sda,tda}=n */
2266 else
2267 {
2268 int size = int_size_in_bytes (TREE_TYPE (decl));
2269 if (size <= 0)
2270 ;
2271
2272 else if (size <= small_memory_max [(int) SMALL_MEMORY_TDA])
2273 v850_set_data_area (decl, DATA_AREA_TDA);
2274
2275 else if (size <= small_memory_max [(int) SMALL_MEMORY_SDA])
2276 v850_set_data_area (decl, DATA_AREA_SDA);
2277
2278 else if (size <= small_memory_max [(int) SMALL_MEMORY_ZDA])
2279 v850_set_data_area (decl, DATA_AREA_ZDA);
2280 }
2281
2282 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2283 return;
2284 }
2285
2286 flags = SYMBOL_REF_FLAGS (symbol);
2287 switch (v850_get_data_area (decl))
2288 {
2289 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2290 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2291 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2292 default: gcc_unreachable ();
2293 }
2294 SYMBOL_REF_FLAGS (symbol) = flags;
2295 }
2296
2297 static void
2298 v850_encode_section_info (tree decl, rtx rtl, int first)
2299 {
2300 default_encode_section_info (decl, rtl, first);
2301
2302 if (TREE_CODE (decl) == VAR_DECL
2303 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2304 v850_encode_data_area (decl, XEXP (rtl, 0));
2305 }
2306
2307 /* Construct a JR instruction to a routine that will perform the equivalent of
2308 the RTL passed in as an argument. This RTL is a function epilogue that
2309 pops registers off the stack and possibly releases some extra stack space
2310 as well. The code has already verified that the RTL matches these
2311 requirements. */
2312
2313 char *
2314 construct_restore_jr (rtx op)
2315 {
2316 int count = XVECLEN (op, 0);
2317 int stack_bytes;
2318 unsigned long int mask;
2319 unsigned long int first;
2320 unsigned long int last;
2321 int i;
2322 static char buff [100]; /* XXX */
2323
2324 if (count <= 2)
2325 {
2326 error ("bogus JR construction: %d", count);
2327 return NULL;
2328 }
2329
2330 /* Work out how many bytes to pop off the stack before retrieving
2331 registers. */
2332 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2333 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2334 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2335
2336 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2337
2338 /* Each pop will remove 4 bytes from the stack.... */
2339 stack_bytes -= (count - 2) * 4;
2340
2341 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2342 if (stack_bytes != 0)
2343 {
2344 error ("bad amount of stack space removal: %d", stack_bytes);
2345 return NULL;
2346 }
2347
2348 /* Now compute the bit mask of registers to push. */
2349 mask = 0;
2350 for (i = 2; i < count; i++)
2351 {
2352 rtx vector_element = XVECEXP (op, 0, i);
2353
2354 gcc_assert (GET_CODE (vector_element) == SET);
2355 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2356 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2357 SImode));
2358
2359 mask |= 1 << REGNO (SET_DEST (vector_element));
2360 }
2361
2362 /* Scan for the first register to pop. */
2363 for (first = 0; first < 32; first++)
2364 {
2365 if (mask & (1 << first))
2366 break;
2367 }
2368
2369 gcc_assert (first < 32);
2370
2371 /* Discover the last register to pop. */
2372 if (mask & (1 << LINK_POINTER_REGNUM))
2373 {
2374 last = LINK_POINTER_REGNUM;
2375 }
2376 else
2377 {
2378 gcc_assert (!stack_bytes);
2379 gcc_assert (mask & (1 << 29));
2380
2381 last = 29;
2382 }
2383
2384 /* Note, it is possible to have gaps in the register mask.
2385 We ignore this here, and generate a JR anyway. We will
2386 be popping more registers than is strictly necessary, but
2387 it does save code space. */
2388
2389 if (TARGET_LONG_CALLS)
2390 {
2391 char name[40];
2392
2393 if (first == last)
2394 sprintf (name, "__return_%s", reg_names [first]);
2395 else
2396 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2397
2398 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2399 name, name);
2400 }
2401 else
2402 {
2403 if (first == last)
2404 sprintf (buff, "jr __return_%s", reg_names [first]);
2405 else
2406 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2407 }
2408
2409 return buff;
2410 }
2411
2412
2413 /* Construct a JARL instruction to a routine that will perform the equivalent
2414 of the RTL passed as a parameter. This RTL is a function prologue that
2415 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2416 some stack space as well. The code has already verified that the RTL
2417 matches these requirements. */
2418 char *
2419 construct_save_jarl (rtx op)
2420 {
2421 int count = XVECLEN (op, 0);
2422 int stack_bytes;
2423 unsigned long int mask;
2424 unsigned long int first;
2425 unsigned long int last;
2426 int i;
2427 static char buff [100]; /* XXX */
2428
2429 if (count <= (TARGET_LONG_CALLS ? 3 : 2))
2430 {
2431 error ("bogus JARL construction: %d", count);
2432 return NULL;
2433 }
2434
2435 /* Paranoia. */
2436 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2437 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2438 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2439 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2440
2441 /* Work out how many bytes to push onto the stack after storing the
2442 registers. */
2443 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2444
2445 /* Each push will put 4 bytes from the stack.... */
2446 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2447
2448 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2449 if (stack_bytes != 0)
2450 {
2451 error ("bad amount of stack space removal: %d", stack_bytes);
2452 return NULL;
2453 }
2454
2455 /* Now compute the bit mask of registers to push. */
2456 mask = 0;
2457 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2458 {
2459 rtx vector_element = XVECEXP (op, 0, i);
2460
2461 gcc_assert (GET_CODE (vector_element) == SET);
2462 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2463 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2464 SImode));
2465
2466 mask |= 1 << REGNO (SET_SRC (vector_element));
2467 }
2468
2469 /* Scan for the first register to push. */
2470 for (first = 0; first < 32; first++)
2471 {
2472 if (mask & (1 << first))
2473 break;
2474 }
2475
2476 gcc_assert (first < 32);
2477
2478 /* Discover the last register to push. */
2479 if (mask & (1 << LINK_POINTER_REGNUM))
2480 {
2481 last = LINK_POINTER_REGNUM;
2482 }
2483 else
2484 {
2485 gcc_assert (!stack_bytes);
2486 gcc_assert (mask & (1 << 29));
2487
2488 last = 29;
2489 }
2490
2491 /* Note, it is possible to have gaps in the register mask.
2492 We ignore this here, and generate a JARL anyway. We will
2493 be pushing more registers than is strictly necessary, but
2494 it does save code space. */
2495
2496 if (TARGET_LONG_CALLS)
2497 {
2498 char name[40];
2499
2500 if (first == last)
2501 sprintf (name, "__save_%s", reg_names [first]);
2502 else
2503 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2504
2505 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2506 name, name);
2507 }
2508 else
2509 {
2510 if (first == last)
2511 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2512 else
2513 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2514 reg_names [last]);
2515 }
2516
2517 return buff;
2518 }
2519
2520 extern tree last_assemble_variable_decl;
2521 extern int size_directive_output;
2522
2523 /* A version of asm_output_aligned_bss() that copes with the special
2524 data areas of the v850. */
2525 void
2526 v850_output_aligned_bss (FILE * file,
2527 tree decl,
2528 const char * name,
2529 unsigned HOST_WIDE_INT size,
2530 int align)
2531 {
2532 switch (v850_get_data_area (decl))
2533 {
2534 case DATA_AREA_ZDA:
2535 switch_to_section (zbss_section);
2536 break;
2537
2538 case DATA_AREA_SDA:
2539 switch_to_section (sbss_section);
2540 break;
2541
2542 case DATA_AREA_TDA:
2543 switch_to_section (tdata_section);
2544
2545 default:
2546 switch_to_section (bss_section);
2547 break;
2548 }
2549
2550 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2551 #ifdef ASM_DECLARE_OBJECT_NAME
2552 last_assemble_variable_decl = decl;
2553 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2554 #else
2555 /* Standard thing is just output label for the object. */
2556 ASM_OUTPUT_LABEL (file, name);
2557 #endif /* ASM_DECLARE_OBJECT_NAME */
2558 ASM_OUTPUT_SKIP (file, size ? size : 1);
2559 }
2560
2561 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2562 void
2563 v850_output_common (FILE * file,
2564 tree decl,
2565 const char * name,
2566 int size,
2567 int align)
2568 {
2569 if (decl == NULL_TREE)
2570 {
2571 fprintf (file, "%s", COMMON_ASM_OP);
2572 }
2573 else
2574 {
2575 switch (v850_get_data_area (decl))
2576 {
2577 case DATA_AREA_ZDA:
2578 fprintf (file, "%s", ZCOMMON_ASM_OP);
2579 break;
2580
2581 case DATA_AREA_SDA:
2582 fprintf (file, "%s", SCOMMON_ASM_OP);
2583 break;
2584
2585 case DATA_AREA_TDA:
2586 fprintf (file, "%s", TCOMMON_ASM_OP);
2587 break;
2588
2589 default:
2590 fprintf (file, "%s", COMMON_ASM_OP);
2591 break;
2592 }
2593 }
2594
2595 assemble_name (file, name);
2596 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2597 }
2598
2599 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2600 void
2601 v850_output_local (FILE * file,
2602 tree decl,
2603 const char * name,
2604 int size,
2605 int align)
2606 {
2607 fprintf (file, "%s", LOCAL_ASM_OP);
2608 assemble_name (file, name);
2609 fprintf (file, "\n");
2610
2611 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2612 }
2613
2614 /* Add data area to the given declaration if a ghs data area pragma is
2615 currently in effect (#pragma ghs startXXX/endXXX). */
2616 static void
2617 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2618 {
2619 if (data_area_stack
2620 && data_area_stack->data_area
2621 && current_function_decl == NULL_TREE
2622 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2623 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2624 v850_set_data_area (decl, data_area_stack->data_area);
2625
2626 /* Initialize the default names of the v850 specific sections,
2627 if this has not been done before. */
2628
2629 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2630 {
2631 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2632 = build_string (sizeof (".sdata")-1, ".sdata");
2633
2634 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2635 = build_string (sizeof (".rosdata")-1, ".rosdata");
2636
2637 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2638 = build_string (sizeof (".tdata")-1, ".tdata");
2639
2640 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2641 = build_string (sizeof (".zdata")-1, ".zdata");
2642
2643 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2644 = build_string (sizeof (".rozdata")-1, ".rozdata");
2645 }
2646
2647 if (current_function_decl == NULL_TREE
2648 && (TREE_CODE (decl) == VAR_DECL
2649 || TREE_CODE (decl) == CONST_DECL
2650 || TREE_CODE (decl) == FUNCTION_DECL)
2651 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2652 && !DECL_SECTION_NAME (decl))
2653 {
2654 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2655 tree chosen_section;
2656
2657 if (TREE_CODE (decl) == FUNCTION_DECL)
2658 kind = GHS_SECTION_KIND_TEXT;
2659 else
2660 {
2661 /* First choose a section kind based on the data area of the decl. */
2662 switch (v850_get_data_area (decl))
2663 {
2664 default:
2665 gcc_unreachable ();
2666
2667 case DATA_AREA_SDA:
2668 kind = ((TREE_READONLY (decl))
2669 ? GHS_SECTION_KIND_ROSDATA
2670 : GHS_SECTION_KIND_SDATA);
2671 break;
2672
2673 case DATA_AREA_TDA:
2674 kind = GHS_SECTION_KIND_TDATA;
2675 break;
2676
2677 case DATA_AREA_ZDA:
2678 kind = ((TREE_READONLY (decl))
2679 ? GHS_SECTION_KIND_ROZDATA
2680 : GHS_SECTION_KIND_ZDATA);
2681 break;
2682
2683 case DATA_AREA_NORMAL: /* default data area */
2684 if (TREE_READONLY (decl))
2685 kind = GHS_SECTION_KIND_RODATA;
2686 else if (DECL_INITIAL (decl))
2687 kind = GHS_SECTION_KIND_DATA;
2688 else
2689 kind = GHS_SECTION_KIND_BSS;
2690 }
2691 }
2692
2693 /* Now, if the section kind has been explicitly renamed,
2694 then attach a section attribute. */
2695 chosen_section = GHS_current_section_names [(int) kind];
2696
2697 /* Otherwise, if this kind of section needs an explicit section
2698 attribute, then also attach one. */
2699 if (chosen_section == NULL)
2700 chosen_section = GHS_default_section_names [(int) kind];
2701
2702 if (chosen_section)
2703 {
2704 /* Only set the section name if specified by a pragma, because
2705 otherwise it will force those variables to get allocated storage
2706 in this module, rather than by the linker. */
2707 DECL_SECTION_NAME (decl) = chosen_section;
2708 }
2709 }
2710 }
2711
2712 /* Construct a DISPOSE instruction that is the equivalent of
2713 the given RTX. We have already verified that this should
2714 be possible. */
2715
2716 char *
2717 construct_dispose_instruction (rtx op)
2718 {
2719 int count = XVECLEN (op, 0);
2720 int stack_bytes;
2721 unsigned long int mask;
2722 int i;
2723 static char buff[ 100 ]; /* XXX */
2724 int use_callt = 0;
2725
2726 if (count <= 2)
2727 {
2728 error ("bogus DISPOSE construction: %d", count);
2729 return NULL;
2730 }
2731
2732 /* Work out how many bytes to pop off the
2733 stack before retrieving registers. */
2734 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2735 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2736 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2737
2738 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2739
2740 /* Each pop will remove 4 bytes from the stack.... */
2741 stack_bytes -= (count - 2) * 4;
2742
2743 /* Make sure that the amount we are popping
2744 will fit into the DISPOSE instruction. */
2745 if (stack_bytes > 128)
2746 {
2747 error ("too much stack space to dispose of: %d", stack_bytes);
2748 return NULL;
2749 }
2750
2751 /* Now compute the bit mask of registers to push. */
2752 mask = 0;
2753
2754 for (i = 2; i < count; i++)
2755 {
2756 rtx vector_element = XVECEXP (op, 0, i);
2757
2758 gcc_assert (GET_CODE (vector_element) == SET);
2759 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2760 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2761 SImode));
2762
2763 if (REGNO (SET_DEST (vector_element)) == 2)
2764 use_callt = 1;
2765 else
2766 mask |= 1 << REGNO (SET_DEST (vector_element));
2767 }
2768
2769 if (! TARGET_DISABLE_CALLT
2770 && (use_callt || stack_bytes == 0))
2771 {
2772 if (use_callt)
2773 {
2774 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2775 return buff;
2776 }
2777 else
2778 {
2779 for (i = 20; i < 32; i++)
2780 if (mask & (1 << i))
2781 break;
2782
2783 if (i == 31)
2784 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2785 else
2786 sprintf (buff, "callt ctoff(__callt_return_r%d_r%s)",
2787 i, (mask & (1 << 31)) ? "31c" : "29");
2788 }
2789 }
2790 else
2791 {
2792 static char regs [100]; /* XXX */
2793 int done_one;
2794
2795 /* Generate the DISPOSE instruction. Note we could just issue the
2796 bit mask as a number as the assembler can cope with this, but for
2797 the sake of our readers we turn it into a textual description. */
2798 regs[0] = 0;
2799 done_one = 0;
2800
2801 for (i = 20; i < 32; i++)
2802 {
2803 if (mask & (1 << i))
2804 {
2805 int first;
2806
2807 if (done_one)
2808 strcat (regs, ", ");
2809 else
2810 done_one = 1;
2811
2812 first = i;
2813 strcat (regs, reg_names[ first ]);
2814
2815 for (i++; i < 32; i++)
2816 if ((mask & (1 << i)) == 0)
2817 break;
2818
2819 if (i > first + 1)
2820 {
2821 strcat (regs, " - ");
2822 strcat (regs, reg_names[ i - 1 ] );
2823 }
2824 }
2825 }
2826
2827 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2828 }
2829
2830 return buff;
2831 }
2832
2833 /* Construct a PREPARE instruction that is the equivalent of
2834 the given RTL. We have already verified that this should
2835 be possible. */
2836
2837 char *
2838 construct_prepare_instruction (rtx op)
2839 {
2840 int count;
2841 int stack_bytes;
2842 unsigned long int mask;
2843 int i;
2844 static char buff[ 100 ]; /* XXX */
2845 int use_callt = 0;
2846
2847 if (XVECLEN (op, 0) <= 1)
2848 {
2849 error ("bogus PREPEARE construction: %d", XVECLEN (op, 0));
2850 return NULL;
2851 }
2852
2853 /* Work out how many bytes to push onto
2854 the stack after storing the registers. */
2855 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2856 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2857 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2858
2859 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2860
2861
2862 /* Make sure that the amount we are popping
2863 will fit into the DISPOSE instruction. */
2864 if (stack_bytes < -128)
2865 {
2866 error ("too much stack space to prepare: %d", stack_bytes);
2867 return NULL;
2868 }
2869
2870 /* Now compute the bit mask of registers to push. */
2871 count = 0;
2872 mask = 0;
2873 for (i = 1; i < XVECLEN (op, 0); i++)
2874 {
2875 rtx vector_element = XVECEXP (op, 0, i);
2876
2877 if (GET_CODE (vector_element) == CLOBBER)
2878 continue;
2879
2880 gcc_assert (GET_CODE (vector_element) == SET);
2881 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2882 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2883 SImode));
2884
2885 if (REGNO (SET_SRC (vector_element)) == 2)
2886 use_callt = 1;
2887 else
2888 mask |= 1 << REGNO (SET_SRC (vector_element));
2889 count++;
2890 }
2891
2892 stack_bytes += count * 4;
2893
2894 if ((! TARGET_DISABLE_CALLT)
2895 && (use_callt || stack_bytes == 0))
2896 {
2897 if (use_callt)
2898 {
2899 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2900 return buff;
2901 }
2902
2903 for (i = 20; i < 32; i++)
2904 if (mask & (1 << i))
2905 break;
2906
2907 if (i == 31)
2908 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2909 else
2910 sprintf (buff, "callt ctoff(__callt_save_r%d_r%s)",
2911 i, (mask & (1 << 31)) ? "31c" : "29");
2912 }
2913 else
2914 {
2915 static char regs [100]; /* XXX */
2916 int done_one;
2917
2918
2919 /* Generate the PREPARE instruction. Note we could just issue the
2920 bit mask as a number as the assembler can cope with this, but for
2921 the sake of our readers we turn it into a textual description. */
2922 regs[0] = 0;
2923 done_one = 0;
2924
2925 for (i = 20; i < 32; i++)
2926 {
2927 if (mask & (1 << i))
2928 {
2929 int first;
2930
2931 if (done_one)
2932 strcat (regs, ", ");
2933 else
2934 done_one = 1;
2935
2936 first = i;
2937 strcat (regs, reg_names[ first ]);
2938
2939 for (i++; i < 32; i++)
2940 if ((mask & (1 << i)) == 0)
2941 break;
2942
2943 if (i > first + 1)
2944 {
2945 strcat (regs, " - ");
2946 strcat (regs, reg_names[ i - 1 ] );
2947 }
2948 }
2949 }
2950
2951 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2952 }
2953
2954 return buff;
2955 }
2956
2957 /* Return an RTX indicating where the return address to the
2958 calling function can be found. */
2959
2960 rtx
2961 v850_return_addr (int count)
2962 {
2963 if (count != 0)
2964 return const0_rtx;
2965
2966 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2967 }
2968 \f
2969 /* Implement TARGET_ASM_INIT_SECTIONS. */
2970
2971 static void
2972 v850_asm_init_sections (void)
2973 {
2974 rosdata_section
2975 = get_unnamed_section (0, output_section_asm_op,
2976 "\t.section .rosdata,\"a\"");
2977
2978 rozdata_section
2979 = get_unnamed_section (0, output_section_asm_op,
2980 "\t.section .rozdata,\"a\"");
2981
2982 tdata_section
2983 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2984 "\t.section .tdata,\"aw\"");
2985
2986 zdata_section
2987 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2988 "\t.section .zdata,\"aw\"");
2989
2990 zbss_section
2991 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2992 output_section_asm_op,
2993 "\t.section .zbss,\"aw\"");
2994 }
2995
2996 static section *
2997 v850_select_section (tree exp,
2998 int reloc ATTRIBUTE_UNUSED,
2999 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3000 {
3001 if (TREE_CODE (exp) == VAR_DECL)
3002 {
3003 int is_const;
3004 if (!TREE_READONLY (exp)
3005 || TREE_SIDE_EFFECTS (exp)
3006 || !DECL_INITIAL (exp)
3007 || (DECL_INITIAL (exp) != error_mark_node
3008 && !TREE_CONSTANT (DECL_INITIAL (exp))))
3009 is_const = FALSE;
3010 else
3011 is_const = TRUE;
3012
3013 switch (v850_get_data_area (exp))
3014 {
3015 case DATA_AREA_ZDA:
3016 return is_const ? rozdata_section : zdata_section;
3017
3018 case DATA_AREA_TDA:
3019 return tdata_section;
3020
3021 case DATA_AREA_SDA:
3022 return is_const ? rosdata_section : sdata_section;
3023
3024 default:
3025 return is_const ? readonly_data_section : data_section;
3026 }
3027 }
3028 return readonly_data_section;
3029 }
3030 \f
3031 /* Worker function for TARGET_FUNCTION_VALUE_REGNO_P. */
3032
3033 static bool
3034 v850_function_value_regno_p (const unsigned int regno)
3035 {
3036 return (regno == 10);
3037 }
3038
3039 /* Worker function for TARGET_RETURN_IN_MEMORY. */
3040
3041 static bool
3042 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
3043 {
3044 /* Return values > 8 bytes in length in memory. */
3045 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
3046 }
3047
3048 /* Worker function for TARGET_FUNCTION_VALUE. */
3049
3050 static rtx
3051 v850_function_value (const_tree valtype,
3052 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
3053 bool outgoing ATTRIBUTE_UNUSED)
3054 {
3055 return gen_rtx_REG (TYPE_MODE (valtype), 10);
3056 }
3057
3058 \f
3059 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
3060
3061 static void
3062 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
3063 enum machine_mode mode ATTRIBUTE_UNUSED,
3064 tree type ATTRIBUTE_UNUSED,
3065 int *pretend_arg_size ATTRIBUTE_UNUSED,
3066 int second_time ATTRIBUTE_UNUSED)
3067 {
3068 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
3069 }
3070
3071 /* Worker function for TARGET_CAN_ELIMINATE. */
3072
3073 static bool
3074 v850_can_eliminate (const int from ATTRIBUTE_UNUSED, const int to)
3075 {
3076 return (to == STACK_POINTER_REGNUM ? ! frame_pointer_needed : true);
3077 }
3078
3079 /* Worker function for TARGET_CONDITIONAL_REGISTER_USAGE.
3080
3081 If TARGET_APP_REGS is not defined then add r2 and r5 to
3082 the pool of fixed registers. See PR 14505. */
3083
3084 static void
3085 v850_conditional_register_usage (void)
3086 {
3087 if (TARGET_APP_REGS)
3088 {
3089 fixed_regs[2] = 0; call_used_regs[2] = 0;
3090 fixed_regs[5] = 0; call_used_regs[5] = 1;
3091 }
3092 }
3093 \f
3094 /* Worker function for TARGET_ASM_TRAMPOLINE_TEMPLATE. */
3095
3096 static void
3097 v850_asm_trampoline_template (FILE *f)
3098 {
3099 fprintf (f, "\tjarl .+4,r12\n");
3100 fprintf (f, "\tld.w 12[r12],r20\n");
3101 fprintf (f, "\tld.w 16[r12],r12\n");
3102 fprintf (f, "\tjmp [r12]\n");
3103 fprintf (f, "\tnop\n");
3104 fprintf (f, "\t.long 0\n");
3105 fprintf (f, "\t.long 0\n");
3106 }
3107
3108 /* Worker function for TARGET_TRAMPOLINE_INIT. */
3109
3110 static void
3111 v850_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
3112 {
3113 rtx mem, fnaddr = XEXP (DECL_RTL (fndecl), 0);
3114
3115 emit_block_move (m_tramp, assemble_trampoline_template (),
3116 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3117
3118 mem = adjust_address (m_tramp, SImode, 16);
3119 emit_move_insn (mem, chain_value);
3120 mem = adjust_address (m_tramp, SImode, 20);
3121 emit_move_insn (mem, fnaddr);
3122 }
3123
3124 static int
3125 v850_issue_rate (void)
3126 {
3127 return (TARGET_V850E2_ALL? 2 : 1);
3128 }
3129
3130 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
3131
3132 static bool
3133 v850_legitimate_constant_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3134 {
3135 return (GET_CODE (x) == CONST_DOUBLE
3136 || !(GET_CODE (x) == CONST
3137 && GET_CODE (XEXP (x, 0)) == PLUS
3138 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
3139 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3140 && !CONST_OK_FOR_K (INTVAL (XEXP (XEXP (x, 0), 1)))));
3141 }
3142
3143 static int
3144 v850_memory_move_cost (enum machine_mode mode, bool in)
3145 {
3146 switch (GET_MODE_SIZE (mode))
3147 {
3148 case 0:
3149 return in ? 24 : 8;
3150 case 1:
3151 case 2:
3152 case 3:
3153 case 4:
3154 return in ? 6 : 2;
3155 default:
3156 return (GET_MODE_SIZE (mode) / 2) * (in ? 3 : 1);
3157 }
3158 }
3159 \f
3160 /* V850 specific attributes. */
3161
3162 static const struct attribute_spec v850_attribute_table[] =
3163 {
3164 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
3165 affects_type_identity } */
3166 { "interrupt_handler", 0, 0, true, false, false,
3167 v850_handle_interrupt_attribute, false },
3168 { "interrupt", 0, 0, true, false, false,
3169 v850_handle_interrupt_attribute, false },
3170 { "sda", 0, 0, true, false, false,
3171 v850_handle_data_area_attribute, false },
3172 { "tda", 0, 0, true, false, false,
3173 v850_handle_data_area_attribute, false },
3174 { "zda", 0, 0, true, false, false,
3175 v850_handle_data_area_attribute, false },
3176 { NULL, 0, 0, false, false, false, NULL, false }
3177 };
3178 \f
3179 /* Initialize the GCC target structure. */
3180
3181 #undef TARGET_MEMORY_MOVE_COST
3182 #define TARGET_MEMORY_MOVE_COST v850_memory_move_cost
3183
3184 #undef TARGET_ASM_ALIGNED_HI_OP
3185 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
3186
3187 #undef TARGET_PRINT_OPERAND
3188 #define TARGET_PRINT_OPERAND v850_print_operand
3189 #undef TARGET_PRINT_OPERAND_ADDRESS
3190 #define TARGET_PRINT_OPERAND_ADDRESS v850_print_operand_address
3191 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
3192 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P v850_print_operand_punct_valid_p
3193
3194 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
3195 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA v850_output_addr_const_extra
3196
3197 #undef TARGET_ATTRIBUTE_TABLE
3198 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
3199
3200 #undef TARGET_INSERT_ATTRIBUTES
3201 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
3202
3203 #undef TARGET_ASM_SELECT_SECTION
3204 #define TARGET_ASM_SELECT_SECTION v850_select_section
3205
3206 /* The assembler supports switchable .bss sections, but
3207 v850_select_section doesn't yet make use of them. */
3208 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
3209 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
3210
3211 #undef TARGET_ENCODE_SECTION_INFO
3212 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
3213
3214 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
3215 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
3216
3217 #undef TARGET_DEFAULT_TARGET_FLAGS
3218 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
3219 #undef TARGET_HANDLE_OPTION
3220 #define TARGET_HANDLE_OPTION v850_handle_option
3221
3222 #undef TARGET_RTX_COSTS
3223 #define TARGET_RTX_COSTS v850_rtx_costs
3224
3225 #undef TARGET_ADDRESS_COST
3226 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
3227
3228 #undef TARGET_MACHINE_DEPENDENT_REORG
3229 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
3230
3231 #undef TARGET_SCHED_ISSUE_RATE
3232 #define TARGET_SCHED_ISSUE_RATE v850_issue_rate
3233
3234 #undef TARGET_FUNCTION_VALUE_REGNO_P
3235 #define TARGET_FUNCTION_VALUE_REGNO_P v850_function_value_regno_p
3236 #undef TARGET_FUNCTION_VALUE
3237 #define TARGET_FUNCTION_VALUE v850_function_value
3238
3239 #undef TARGET_PROMOTE_PROTOTYPES
3240 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
3241
3242 #undef TARGET_RETURN_IN_MEMORY
3243 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
3244
3245 #undef TARGET_PASS_BY_REFERENCE
3246 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
3247
3248 #undef TARGET_CALLEE_COPIES
3249 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
3250
3251 #undef TARGET_SETUP_INCOMING_VARARGS
3252 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
3253
3254 #undef TARGET_ARG_PARTIAL_BYTES
3255 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
3256
3257 #undef TARGET_FUNCTION_ARG
3258 #define TARGET_FUNCTION_ARG v850_function_arg
3259
3260 #undef TARGET_FUNCTION_ARG_ADVANCE
3261 #define TARGET_FUNCTION_ARG_ADVANCE v850_function_arg_advance
3262
3263 #undef TARGET_CAN_ELIMINATE
3264 #define TARGET_CAN_ELIMINATE v850_can_eliminate
3265
3266 #undef TARGET_CONDITIONAL_REGISTER_USAGE
3267 #define TARGET_CONDITIONAL_REGISTER_USAGE v850_conditional_register_usage
3268
3269 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
3270 #define TARGET_ASM_TRAMPOLINE_TEMPLATE v850_asm_trampoline_template
3271 #undef TARGET_TRAMPOLINE_INIT
3272 #define TARGET_TRAMPOLINE_INIT v850_trampoline_init
3273
3274 #undef TARGET_STRICT_ARGUMENT_NAMING
3275 #define TARGET_STRICT_ARGUMENT_NAMING v850_strict_argument_naming
3276
3277 #undef TARGET_OPTION_OPTIMIZATION_TABLE
3278 #define TARGET_OPTION_OPTIMIZATION_TABLE v850_option_optimization_table
3279
3280 #undef TARGET_LEGITIMATE_CONSTANT_P
3281 #define TARGET_LEGITIMATE_CONSTANT_P v850_legitimate_constant_p
3282
3283 struct gcc_target targetm = TARGET_INITIALIZER;
3284
3285 #include "gt-v850.h"