make stormy16 use constraints.md
[gcc.git] / gcc / config / stormy16 / stormy16.c
1 /* Xstormy16 target functions.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by Red Hat, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "conditions.h"
31 #include "insn-flags.h"
32 #include "output.h"
33 #include "insn-attr.h"
34 #include "flags.h"
35 #include "recog.h"
36 #include "diagnostic-core.h"
37 #include "obstack.h"
38 #include "tree.h"
39 #include "expr.h"
40 #include "optabs.h"
41 #include "except.h"
42 #include "function.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "tm_p.h"
46 #include "langhooks.h"
47 #include "gimple.h"
48 #include "df.h"
49 #include "ggc.h"
50
51 static rtx emit_addhi3_postreload (rtx, rtx, rtx);
52 static void xstormy16_asm_out_constructor (rtx, int);
53 static void xstormy16_asm_out_destructor (rtx, int);
54 static void xstormy16_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
55 HOST_WIDE_INT, tree);
56
57 static void xstormy16_init_builtins (void);
58 static rtx xstormy16_expand_builtin (tree, rtx, rtx, enum machine_mode, int);
59 static bool xstormy16_rtx_costs (rtx, int, int, int *, bool);
60 static int xstormy16_address_cost (rtx, bool);
61 static bool xstormy16_return_in_memory (const_tree, const_tree);
62
63 static GTY(()) section *bss100_section;
64
65 /* Compute a (partial) cost for rtx X. Return true if the complete
66 cost has been computed, and false if subexpressions should be
67 scanned. In either case, *TOTAL contains the cost result. */
68
69 static bool
70 xstormy16_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED,
71 int *total, bool speed ATTRIBUTE_UNUSED)
72 {
73 switch (code)
74 {
75 case CONST_INT:
76 if (INTVAL (x) < 16 && INTVAL (x) >= 0)
77 *total = COSTS_N_INSNS (1) / 2;
78 else if (INTVAL (x) < 256 && INTVAL (x) >= 0)
79 *total = COSTS_N_INSNS (1);
80 else
81 *total = COSTS_N_INSNS (2);
82 return true;
83
84 case CONST_DOUBLE:
85 case CONST:
86 case SYMBOL_REF:
87 case LABEL_REF:
88 *total = COSTS_N_INSNS (2);
89 return true;
90
91 case MULT:
92 *total = COSTS_N_INSNS (35 + 6);
93 return true;
94 case DIV:
95 *total = COSTS_N_INSNS (51 - 6);
96 return true;
97
98 default:
99 return false;
100 }
101 }
102
103 static int
104 xstormy16_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
105 {
106 return (CONST_INT_P (x) ? 2
107 : GET_CODE (x) == PLUS ? 7
108 : 5);
109 }
110
111 /* Branches are handled as follows:
112
113 1. HImode compare-and-branches. The machine supports these
114 natively, so the appropriate pattern is emitted directly.
115
116 2. SImode EQ and NE. These are emitted as pairs of HImode
117 compare-and-branches.
118
119 3. SImode LT, GE, LTU and GEU. These are emitted as a sequence
120 of a SImode subtract followed by a branch (not a compare-and-branch),
121 like this:
122 sub
123 sbc
124 blt
125
126 4. SImode GT, LE, GTU, LEU. These are emitted as a sequence like:
127 sub
128 sbc
129 blt
130 or
131 bne. */
132
133 /* Emit a branch of kind CODE to location LOC. */
134
135 void
136 xstormy16_emit_cbranch (enum rtx_code code, rtx op0, rtx op1, rtx loc)
137 {
138 rtx condition_rtx, loc_ref, branch, cy_clobber;
139 rtvec vec;
140 enum machine_mode mode;
141
142 mode = GET_MODE (op0);
143 gcc_assert (mode == HImode || mode == SImode);
144
145 if (mode == SImode
146 && (code == GT || code == LE || code == GTU || code == LEU))
147 {
148 int unsigned_p = (code == GTU || code == LEU);
149 int gt_p = (code == GT || code == GTU);
150 rtx lab = NULL_RTX;
151
152 if (gt_p)
153 lab = gen_label_rtx ();
154 xstormy16_emit_cbranch (unsigned_p ? LTU : LT, op0, op1, gt_p ? lab : loc);
155 /* This should be generated as a comparison against the temporary
156 created by the previous insn, but reload can't handle that. */
157 xstormy16_emit_cbranch (gt_p ? NE : EQ, op0, op1, loc);
158 if (gt_p)
159 emit_label (lab);
160 return;
161 }
162 else if (mode == SImode
163 && (code == NE || code == EQ)
164 && op1 != const0_rtx)
165 {
166 rtx op0_word, op1_word;
167 rtx lab = NULL_RTX;
168 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
169 int i;
170
171 if (code == EQ)
172 lab = gen_label_rtx ();
173
174 for (i = 0; i < num_words - 1; i++)
175 {
176 op0_word = simplify_gen_subreg (word_mode, op0, mode,
177 i * UNITS_PER_WORD);
178 op1_word = simplify_gen_subreg (word_mode, op1, mode,
179 i * UNITS_PER_WORD);
180 xstormy16_emit_cbranch (NE, op0_word, op1_word, code == EQ ? lab : loc);
181 }
182 op0_word = simplify_gen_subreg (word_mode, op0, mode,
183 i * UNITS_PER_WORD);
184 op1_word = simplify_gen_subreg (word_mode, op1, mode,
185 i * UNITS_PER_WORD);
186 xstormy16_emit_cbranch (code, op0_word, op1_word, loc);
187
188 if (code == EQ)
189 emit_label (lab);
190 return;
191 }
192
193 /* We can't allow reload to try to generate any reload after a branch,
194 so when some register must match we must make the temporary ourselves. */
195 if (mode != HImode)
196 {
197 rtx tmp;
198 tmp = gen_reg_rtx (mode);
199 emit_move_insn (tmp, op0);
200 op0 = tmp;
201 }
202
203 condition_rtx = gen_rtx_fmt_ee (code, mode, op0, op1);
204 loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc);
205 branch = gen_rtx_SET (VOIDmode, pc_rtx,
206 gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx,
207 loc_ref, pc_rtx));
208
209 cy_clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
210
211 if (mode == HImode)
212 vec = gen_rtvec (2, branch, cy_clobber);
213 else if (code == NE || code == EQ)
214 vec = gen_rtvec (2, branch, gen_rtx_CLOBBER (VOIDmode, op0));
215 else
216 {
217 rtx sub;
218 #if 0
219 sub = gen_rtx_SET (VOIDmode, op0, gen_rtx_MINUS (SImode, op0, op1));
220 #else
221 sub = gen_rtx_CLOBBER (SImode, op0);
222 #endif
223 vec = gen_rtvec (3, branch, sub, cy_clobber);
224 }
225
226 emit_jump_insn (gen_rtx_PARALLEL (VOIDmode, vec));
227 }
228
229 /* Take a SImode conditional branch, one of GT/LE/GTU/LEU, and split
230 the arithmetic operation. Most of the work is done by
231 xstormy16_expand_arith. */
232
233 void
234 xstormy16_split_cbranch (enum machine_mode mode, rtx label, rtx comparison,
235 rtx dest)
236 {
237 rtx op0 = XEXP (comparison, 0);
238 rtx op1 = XEXP (comparison, 1);
239 rtx seq, last_insn;
240 rtx compare;
241
242 start_sequence ();
243 xstormy16_expand_arith (mode, COMPARE, dest, op0, op1);
244 seq = get_insns ();
245 end_sequence ();
246
247 gcc_assert (INSN_P (seq));
248
249 last_insn = seq;
250 while (NEXT_INSN (last_insn) != NULL_RTX)
251 last_insn = NEXT_INSN (last_insn);
252
253 compare = SET_SRC (XVECEXP (PATTERN (last_insn), 0, 0));
254 PUT_CODE (XEXP (compare, 0), GET_CODE (comparison));
255 XEXP (compare, 1) = gen_rtx_LABEL_REF (VOIDmode, label);
256 emit_insn (seq);
257 }
258
259
260 /* Return the string to output a conditional branch to LABEL, which is
261 the operand number of the label.
262
263 OP is the conditional expression, or NULL for branch-always.
264
265 REVERSED is nonzero if we should reverse the sense of the comparison.
266
267 INSN is the insn. */
268
269 char *
270 xstormy16_output_cbranch_hi (rtx op, const char *label, int reversed, rtx insn)
271 {
272 static char string[64];
273 int need_longbranch = (op != NULL_RTX
274 ? get_attr_length (insn) == 8
275 : get_attr_length (insn) == 4);
276 int really_reversed = reversed ^ need_longbranch;
277 const char *ccode;
278 const char *templ;
279 const char *operands;
280 enum rtx_code code;
281
282 if (! op)
283 {
284 if (need_longbranch)
285 ccode = "jmpf";
286 else
287 ccode = "br";
288 sprintf (string, "%s %s", ccode, label);
289 return string;
290 }
291
292 code = GET_CODE (op);
293
294 if (! REG_P (XEXP (op, 0)))
295 {
296 code = swap_condition (code);
297 operands = "%3,%2";
298 }
299 else
300 operands = "%2,%3";
301
302 /* Work out which way this really branches. */
303 if (really_reversed)
304 code = reverse_condition (code);
305
306 switch (code)
307 {
308 case EQ: ccode = "z"; break;
309 case NE: ccode = "nz"; break;
310 case GE: ccode = "ge"; break;
311 case LT: ccode = "lt"; break;
312 case GT: ccode = "gt"; break;
313 case LE: ccode = "le"; break;
314 case GEU: ccode = "nc"; break;
315 case LTU: ccode = "c"; break;
316 case GTU: ccode = "hi"; break;
317 case LEU: ccode = "ls"; break;
318
319 default:
320 gcc_unreachable ();
321 }
322
323 if (need_longbranch)
324 templ = "b%s %s,.+8 | jmpf %s";
325 else
326 templ = "b%s %s,%s";
327 sprintf (string, templ, ccode, operands, label);
328
329 return string;
330 }
331
332 /* Return the string to output a conditional branch to LABEL, which is
333 the operand number of the label, but suitable for the tail of a
334 SImode branch.
335
336 OP is the conditional expression (OP is never NULL_RTX).
337
338 REVERSED is nonzero if we should reverse the sense of the comparison.
339
340 INSN is the insn. */
341
342 char *
343 xstormy16_output_cbranch_si (rtx op, const char *label, int reversed, rtx insn)
344 {
345 static char string[64];
346 int need_longbranch = get_attr_length (insn) >= 8;
347 int really_reversed = reversed ^ need_longbranch;
348 const char *ccode;
349 const char *templ;
350 char prevop[16];
351 enum rtx_code code;
352
353 code = GET_CODE (op);
354
355 /* Work out which way this really branches. */
356 if (really_reversed)
357 code = reverse_condition (code);
358
359 switch (code)
360 {
361 case EQ: ccode = "z"; break;
362 case NE: ccode = "nz"; break;
363 case GE: ccode = "ge"; break;
364 case LT: ccode = "lt"; break;
365 case GEU: ccode = "nc"; break;
366 case LTU: ccode = "c"; break;
367
368 /* The missing codes above should never be generated. */
369 default:
370 gcc_unreachable ();
371 }
372
373 switch (code)
374 {
375 case EQ: case NE:
376 {
377 int regnum;
378
379 gcc_assert (REG_P (XEXP (op, 0)));
380
381 regnum = REGNO (XEXP (op, 0));
382 sprintf (prevop, "or %s,%s", reg_names[regnum], reg_names[regnum+1]);
383 }
384 break;
385
386 case GE: case LT: case GEU: case LTU:
387 strcpy (prevop, "sbc %2,%3");
388 break;
389
390 default:
391 gcc_unreachable ();
392 }
393
394 if (need_longbranch)
395 templ = "%s | b%s .+6 | jmpf %s";
396 else
397 templ = "%s | b%s %s";
398 sprintf (string, templ, prevop, ccode, label);
399
400 return string;
401 }
402 \f
403 /* Many machines have some registers that cannot be copied directly to or from
404 memory or even from other types of registers. An example is the `MQ'
405 register, which on most machines, can only be copied to or from general
406 registers, but not memory. Some machines allow copying all registers to and
407 from memory, but require a scratch register for stores to some memory
408 locations (e.g., those with symbolic address on the RT, and those with
409 certain symbolic address on the SPARC when compiling PIC). In some cases,
410 both an intermediate and a scratch register are required.
411
412 You should define these macros to indicate to the reload phase that it may
413 need to allocate at least one register for a reload in addition to the
414 register to contain the data. Specifically, if copying X to a register
415 RCLASS in MODE requires an intermediate register, you should define
416 `SECONDARY_INPUT_RELOAD_CLASS' to return the largest register class all of
417 whose registers can be used as intermediate registers or scratch registers.
418
419 If copying a register RCLASS in MODE to X requires an intermediate or scratch
420 register, `SECONDARY_OUTPUT_RELOAD_CLASS' should be defined to return the
421 largest register class required. If the requirements for input and output
422 reloads are the same, the macro `SECONDARY_RELOAD_CLASS' should be used
423 instead of defining both macros identically.
424
425 The values returned by these macros are often `GENERAL_REGS'. Return
426 `NO_REGS' if no spare register is needed; i.e., if X can be directly copied
427 to or from a register of RCLASS in MODE without requiring a scratch register.
428 Do not define this macro if it would always return `NO_REGS'.
429
430 If a scratch register is required (either with or without an intermediate
431 register), you should define patterns for `reload_inM' or `reload_outM', as
432 required.. These patterns, which will normally be implemented with a
433 `define_expand', should be similar to the `movM' patterns, except that
434 operand 2 is the scratch register.
435
436 Define constraints for the reload register and scratch register that contain
437 a single register class. If the original reload register (whose class is
438 RCLASS) can meet the constraint given in the pattern, the value returned by
439 these macros is used for the class of the scratch register. Otherwise, two
440 additional reload registers are required. Their classes are obtained from
441 the constraints in the insn pattern.
442
443 X might be a pseudo-register or a `subreg' of a pseudo-register, which could
444 either be in a hard register or in memory. Use `true_regnum' to find out;
445 it will return -1 if the pseudo is in memory and the hard register number if
446 it is in a register.
447
448 These macros should not be used in the case where a particular class of
449 registers can only be copied to memory and not to another class of
450 registers. In that case, secondary reload registers are not needed and
451 would not be helpful. Instead, a stack location must be used to perform the
452 copy and the `movM' pattern should use memory as an intermediate storage.
453 This case often occurs between floating-point and general registers. */
454
455 enum reg_class
456 xstormy16_secondary_reload_class (enum reg_class rclass,
457 enum machine_mode mode ATTRIBUTE_UNUSED,
458 rtx x)
459 {
460 /* This chip has the interesting property that only the first eight
461 registers can be moved to/from memory. */
462 if ((MEM_P (x)
463 || ((GET_CODE (x) == SUBREG || REG_P (x))
464 && (true_regnum (x) == -1
465 || true_regnum (x) >= FIRST_PSEUDO_REGISTER)))
466 && ! reg_class_subset_p (rclass, EIGHT_REGS))
467 return EIGHT_REGS;
468
469 return NO_REGS;
470 }
471
472 enum reg_class
473 xstormy16_preferred_reload_class (rtx x, enum reg_class rclass)
474 {
475 if (rclass == GENERAL_REGS && MEM_P (x))
476 return EIGHT_REGS;
477
478 return rclass;
479 }
480
481 /* Predicate for symbols and addresses that reflect special 8-bit
482 addressing. */
483
484 int
485 xstormy16_below100_symbol (rtx x,
486 enum machine_mode mode ATTRIBUTE_UNUSED)
487 {
488 if (GET_CODE (x) == CONST)
489 x = XEXP (x, 0);
490 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
491 x = XEXP (x, 0);
492
493 if (GET_CODE (x) == SYMBOL_REF)
494 return (SYMBOL_REF_FLAGS (x) & SYMBOL_FLAG_XSTORMY16_BELOW100) != 0;
495
496 if (CONST_INT_P (x))
497 {
498 HOST_WIDE_INT i = INTVAL (x);
499
500 if ((i >= 0x0000 && i <= 0x00ff)
501 || (i >= 0x7f00 && i <= 0x7fff))
502 return 1;
503 }
504 return 0;
505 }
506
507 /* Likewise, but only for non-volatile MEMs, for patterns where the
508 MEM will get split into smaller sized accesses. */
509
510 int
511 xstormy16_splittable_below100_operand (rtx x, enum machine_mode mode)
512 {
513 if (MEM_P (x) && MEM_VOLATILE_P (x))
514 return 0;
515 return xstormy16_below100_operand (x, mode);
516 }
517
518 /* Expand an 8-bit IOR. This either detects the one case we can
519 actually do, or uses a 16-bit IOR. */
520
521 void
522 xstormy16_expand_iorqi3 (rtx *operands)
523 {
524 rtx in, out, outsub, val;
525
526 out = operands[0];
527 in = operands[1];
528 val = operands[2];
529
530 if (xstormy16_onebit_set_operand (val, QImode))
531 {
532 if (!xstormy16_below100_or_register (in, QImode))
533 in = copy_to_mode_reg (QImode, in);
534 if (!xstormy16_below100_or_register (out, QImode))
535 out = gen_reg_rtx (QImode);
536 emit_insn (gen_iorqi3_internal (out, in, val));
537 if (out != operands[0])
538 emit_move_insn (operands[0], out);
539 return;
540 }
541
542 if (! REG_P (in))
543 in = copy_to_mode_reg (QImode, in);
544
545 if (! REG_P (val) && ! CONST_INT_P (val))
546 val = copy_to_mode_reg (QImode, val);
547
548 if (! REG_P (out))
549 out = gen_reg_rtx (QImode);
550
551 in = simplify_gen_subreg (HImode, in, QImode, 0);
552 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
553
554 if (! CONST_INT_P (val))
555 val = simplify_gen_subreg (HImode, val, QImode, 0);
556
557 emit_insn (gen_iorhi3 (outsub, in, val));
558
559 if (out != operands[0])
560 emit_move_insn (operands[0], out);
561 }
562
563 /* Expand an 8-bit AND. This either detects the one case we can
564 actually do, or uses a 16-bit AND. */
565
566 void
567 xstormy16_expand_andqi3 (rtx *operands)
568 {
569 rtx in, out, outsub, val;
570
571 out = operands[0];
572 in = operands[1];
573 val = operands[2];
574
575 if (xstormy16_onebit_clr_operand (val, QImode))
576 {
577 if (!xstormy16_below100_or_register (in, QImode))
578 in = copy_to_mode_reg (QImode, in);
579 if (!xstormy16_below100_or_register (out, QImode))
580 out = gen_reg_rtx (QImode);
581 emit_insn (gen_andqi3_internal (out, in, val));
582 if (out != operands[0])
583 emit_move_insn (operands[0], out);
584 return;
585 }
586
587 if (! REG_P (in))
588 in = copy_to_mode_reg (QImode, in);
589
590 if (! REG_P (val) && ! CONST_INT_P (val))
591 val = copy_to_mode_reg (QImode, val);
592
593 if (! REG_P (out))
594 out = gen_reg_rtx (QImode);
595
596 in = simplify_gen_subreg (HImode, in, QImode, 0);
597 outsub = simplify_gen_subreg (HImode, out, QImode, 0);
598
599 if (! CONST_INT_P (val))
600 val = simplify_gen_subreg (HImode, val, QImode, 0);
601
602 emit_insn (gen_andhi3 (outsub, in, val));
603
604 if (out != operands[0])
605 emit_move_insn (operands[0], out);
606 }
607
608 #define LEGITIMATE_ADDRESS_INTEGER_P(X, OFFSET) \
609 (CONST_INT_P (X) \
610 && (unsigned HOST_WIDE_INT) (INTVAL (X) + (OFFSET) + 2048) < 4096)
611
612 #define LEGITIMATE_ADDRESS_CONST_INT_P(X, OFFSET) \
613 (CONST_INT_P (X) \
614 && INTVAL (X) + (OFFSET) >= 0 \
615 && INTVAL (X) + (OFFSET) < 0x8000 \
616 && (INTVAL (X) + (OFFSET) < 0x100 || INTVAL (X) + (OFFSET) >= 0x7F00))
617
618 bool
619 xstormy16_legitimate_address_p (enum machine_mode mode ATTRIBUTE_UNUSED,
620 rtx x, bool strict)
621 {
622 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0))
623 return true;
624
625 if (GET_CODE (x) == PLUS
626 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0))
627 {
628 x = XEXP (x, 0);
629 /* PR 31232: Do not allow INT+INT as an address. */
630 if (CONST_INT_P (x))
631 return false;
632 }
633
634 if ((GET_CODE (x) == PRE_MODIFY && CONST_INT_P (XEXP (XEXP (x, 1), 1)))
635 || GET_CODE (x) == POST_INC
636 || GET_CODE (x) == PRE_DEC)
637 x = XEXP (x, 0);
638
639 if (REG_P (x)
640 && REGNO_OK_FOR_BASE_P (REGNO (x))
641 && (! strict || REGNO (x) < FIRST_PSEUDO_REGISTER))
642 return true;
643
644 if (xstormy16_below100_symbol (x, mode))
645 return true;
646
647 return false;
648 }
649
650 /* Return nonzero if memory address X (an RTX) can have different
651 meanings depending on the machine mode of the memory reference it
652 is used for or if the address is valid for some modes but not
653 others.
654
655 Autoincrement and autodecrement addresses typically have mode-dependent
656 effects because the amount of the increment or decrement is the size of the
657 operand being addressed. Some machines have other mode-dependent addresses.
658 Many RISC machines have no mode-dependent addresses.
659
660 You may assume that ADDR is a valid address for the machine.
661
662 On this chip, this is true if the address is valid with an offset
663 of 0 but not of 6, because in that case it cannot be used as an
664 address for DImode or DFmode, or if the address is a post-increment
665 or pre-decrement address. */
666
667 int
668 xstormy16_mode_dependent_address_p (rtx x)
669 {
670 if (LEGITIMATE_ADDRESS_CONST_INT_P (x, 0)
671 && ! LEGITIMATE_ADDRESS_CONST_INT_P (x, 6))
672 return 1;
673
674 if (GET_CODE (x) == PLUS
675 && LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 0)
676 && ! LEGITIMATE_ADDRESS_INTEGER_P (XEXP (x, 1), 6))
677 return 1;
678
679 if (GET_CODE (x) == PLUS)
680 x = XEXP (x, 0);
681
682 /* Auto-increment addresses are now treated generically in recog.c. */
683 return 0;
684 }
685
686 int
687 short_memory_operand (rtx x, enum machine_mode mode)
688 {
689 if (! memory_operand (x, mode))
690 return 0;
691 return (GET_CODE (XEXP (x, 0)) != PLUS);
692 }
693
694 /* Splitter for the 'move' patterns, for modes not directly implemented
695 by hardware. Emit insns to copy a value of mode MODE from SRC to
696 DEST.
697
698 This function is only called when reload_completed. */
699
700 void
701 xstormy16_split_move (enum machine_mode mode, rtx dest, rtx src)
702 {
703 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
704 int direction, end, i;
705 int src_modifies = 0;
706 int dest_modifies = 0;
707 int src_volatile = 0;
708 int dest_volatile = 0;
709 rtx mem_operand;
710 rtx auto_inc_reg_rtx = NULL_RTX;
711
712 /* Check initial conditions. */
713 gcc_assert (reload_completed
714 && mode != QImode && mode != HImode
715 && nonimmediate_operand (dest, mode)
716 && general_operand (src, mode));
717
718 /* This case is not supported below, and shouldn't be generated. */
719 gcc_assert (! MEM_P (dest) || ! MEM_P (src));
720
721 /* This case is very very bad after reload, so trap it now. */
722 gcc_assert (GET_CODE (dest) != SUBREG && GET_CODE (src) != SUBREG);
723
724 /* The general idea is to copy by words, offsetting the source and
725 destination. Normally the least-significant word will be copied
726 first, but for pre-dec operations it's better to copy the
727 most-significant word first. Only one operand can be a pre-dec
728 or post-inc operand.
729
730 It's also possible that the copy overlaps so that the direction
731 must be reversed. */
732 direction = 1;
733
734 if (MEM_P (dest))
735 {
736 mem_operand = XEXP (dest, 0);
737 dest_modifies = side_effects_p (mem_operand);
738 if (auto_inc_p (mem_operand))
739 auto_inc_reg_rtx = XEXP (mem_operand, 0);
740 dest_volatile = MEM_VOLATILE_P (dest);
741 if (dest_volatile)
742 {
743 dest = copy_rtx (dest);
744 MEM_VOLATILE_P (dest) = 0;
745 }
746 }
747 else if (MEM_P (src))
748 {
749 mem_operand = XEXP (src, 0);
750 src_modifies = side_effects_p (mem_operand);
751 if (auto_inc_p (mem_operand))
752 auto_inc_reg_rtx = XEXP (mem_operand, 0);
753 src_volatile = MEM_VOLATILE_P (src);
754 if (src_volatile)
755 {
756 src = copy_rtx (src);
757 MEM_VOLATILE_P (src) = 0;
758 }
759 }
760 else
761 mem_operand = NULL_RTX;
762
763 if (mem_operand == NULL_RTX)
764 {
765 if (REG_P (src)
766 && REG_P (dest)
767 && reg_overlap_mentioned_p (dest, src)
768 && REGNO (dest) > REGNO (src))
769 direction = -1;
770 }
771 else if (GET_CODE (mem_operand) == PRE_DEC
772 || (GET_CODE (mem_operand) == PLUS
773 && GET_CODE (XEXP (mem_operand, 0)) == PRE_DEC))
774 direction = -1;
775 else if (MEM_P (src) && reg_overlap_mentioned_p (dest, src))
776 {
777 int regno;
778
779 gcc_assert (REG_P (dest));
780 regno = REGNO (dest);
781
782 gcc_assert (refers_to_regno_p (regno, regno + num_words,
783 mem_operand, 0));
784
785 if (refers_to_regno_p (regno, regno + 1, mem_operand, 0))
786 direction = -1;
787 else if (refers_to_regno_p (regno + num_words - 1, regno + num_words,
788 mem_operand, 0))
789 direction = 1;
790 else
791 /* This means something like
792 (set (reg:DI r0) (mem:DI (reg:HI r1)))
793 which we'd need to support by doing the set of the second word
794 last. */
795 gcc_unreachable ();
796 }
797
798 end = direction < 0 ? -1 : num_words;
799 for (i = direction < 0 ? num_words - 1 : 0; i != end; i += direction)
800 {
801 rtx w_src, w_dest, insn;
802
803 if (src_modifies)
804 w_src = gen_rtx_MEM (word_mode, mem_operand);
805 else
806 w_src = simplify_gen_subreg (word_mode, src, mode, i * UNITS_PER_WORD);
807 if (src_volatile)
808 MEM_VOLATILE_P (w_src) = 1;
809 if (dest_modifies)
810 w_dest = gen_rtx_MEM (word_mode, mem_operand);
811 else
812 w_dest = simplify_gen_subreg (word_mode, dest, mode,
813 i * UNITS_PER_WORD);
814 if (dest_volatile)
815 MEM_VOLATILE_P (w_dest) = 1;
816
817 /* The simplify_subreg calls must always be able to simplify. */
818 gcc_assert (GET_CODE (w_src) != SUBREG
819 && GET_CODE (w_dest) != SUBREG);
820
821 insn = emit_insn (gen_rtx_SET (VOIDmode, w_dest, w_src));
822 if (auto_inc_reg_rtx)
823 REG_NOTES (insn) = alloc_EXPR_LIST (REG_INC,
824 auto_inc_reg_rtx,
825 REG_NOTES (insn));
826 }
827 }
828
829 /* Expander for the 'move' patterns. Emit insns to copy a value of
830 mode MODE from SRC to DEST. */
831
832 void
833 xstormy16_expand_move (enum machine_mode mode, rtx dest, rtx src)
834 {
835 if (MEM_P (dest) && (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY))
836 {
837 rtx pmv = XEXP (dest, 0);
838 rtx dest_reg = XEXP (pmv, 0);
839 rtx dest_mod = XEXP (pmv, 1);
840 rtx set = gen_rtx_SET (Pmode, dest_reg, dest_mod);
841 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
842
843 dest = gen_rtx_MEM (mode, dest_reg);
844 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
845 }
846 else if (MEM_P (src) && (GET_CODE (XEXP (src, 0)) == PRE_MODIFY))
847 {
848 rtx pmv = XEXP (src, 0);
849 rtx src_reg = XEXP (pmv, 0);
850 rtx src_mod = XEXP (pmv, 1);
851 rtx set = gen_rtx_SET (Pmode, src_reg, src_mod);
852 rtx clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
853
854 src = gen_rtx_MEM (mode, src_reg);
855 emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
856 }
857
858 /* There are only limited immediate-to-memory move instructions. */
859 if (! reload_in_progress
860 && ! reload_completed
861 && MEM_P (dest)
862 && (! CONST_INT_P (XEXP (dest, 0))
863 || ! xstormy16_legitimate_address_p (mode, XEXP (dest, 0), 0))
864 && ! xstormy16_below100_operand (dest, mode)
865 && ! REG_P (src)
866 && GET_CODE (src) != SUBREG)
867 src = copy_to_mode_reg (mode, src);
868
869 /* Don't emit something we would immediately split. */
870 if (reload_completed
871 && mode != HImode && mode != QImode)
872 {
873 xstormy16_split_move (mode, dest, src);
874 return;
875 }
876
877 emit_insn (gen_rtx_SET (VOIDmode, dest, src));
878 }
879 \f
880 /* Stack Layout:
881
882 The stack is laid out as follows:
883
884 SP->
885 FP-> Local variables
886 Register save area (up to 4 words)
887 Argument register save area for stdarg (NUM_ARGUMENT_REGISTERS words)
888
889 AP-> Return address (two words)
890 9th procedure parameter word
891 10th procedure parameter word
892 ...
893 last procedure parameter word
894
895 The frame pointer location is tuned to make it most likely that all
896 parameters and local variables can be accessed using a load-indexed
897 instruction. */
898
899 /* A structure to describe the layout. */
900 struct xstormy16_stack_layout
901 {
902 /* Size of the topmost three items on the stack. */
903 int locals_size;
904 int register_save_size;
905 int stdarg_save_size;
906 /* Sum of the above items. */
907 int frame_size;
908 /* Various offsets. */
909 int first_local_minus_ap;
910 int sp_minus_fp;
911 int fp_minus_ap;
912 };
913
914 /* Does REGNO need to be saved? */
915 #define REG_NEEDS_SAVE(REGNUM, IFUN) \
916 ((df_regs_ever_live_p (REGNUM) && ! call_used_regs[REGNUM]) \
917 || (IFUN && ! fixed_regs[REGNUM] && call_used_regs[REGNUM] \
918 && (REGNUM != CARRY_REGNUM) \
919 && (df_regs_ever_live_p (REGNUM) || ! current_function_is_leaf)))
920
921 /* Compute the stack layout. */
922
923 struct xstormy16_stack_layout
924 xstormy16_compute_stack_layout (void)
925 {
926 struct xstormy16_stack_layout layout;
927 int regno;
928 const int ifun = xstormy16_interrupt_function_p ();
929
930 layout.locals_size = get_frame_size ();
931
932 layout.register_save_size = 0;
933 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
934 if (REG_NEEDS_SAVE (regno, ifun))
935 layout.register_save_size += UNITS_PER_WORD;
936
937 if (cfun->stdarg)
938 layout.stdarg_save_size = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
939 else
940 layout.stdarg_save_size = 0;
941
942 layout.frame_size = (layout.locals_size
943 + layout.register_save_size
944 + layout.stdarg_save_size);
945
946 if (crtl->args.size <= 2048 && crtl->args.size != -1)
947 {
948 if (layout.frame_size - INCOMING_FRAME_SP_OFFSET
949 + crtl->args.size <= 2048)
950 layout.fp_minus_ap = layout.frame_size - INCOMING_FRAME_SP_OFFSET;
951 else
952 layout.fp_minus_ap = 2048 - crtl->args.size;
953 }
954 else
955 layout.fp_minus_ap = (layout.stdarg_save_size
956 + layout.register_save_size
957 - INCOMING_FRAME_SP_OFFSET);
958 layout.sp_minus_fp = (layout.frame_size - INCOMING_FRAME_SP_OFFSET
959 - layout.fp_minus_ap);
960 layout.first_local_minus_ap = layout.sp_minus_fp - layout.locals_size;
961 return layout;
962 }
963
964 /* Worker function for TARGET_CAN_ELIMINATE. */
965
966 static bool
967 xstormy16_can_eliminate (const int from, const int to)
968 {
969 return (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM
970 ? ! frame_pointer_needed
971 : true);
972 }
973
974 /* Determine how all the special registers get eliminated. */
975
976 int
977 xstormy16_initial_elimination_offset (int from, int to)
978 {
979 struct xstormy16_stack_layout layout;
980 int result;
981
982 layout = xstormy16_compute_stack_layout ();
983
984 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
985 result = layout.sp_minus_fp - layout.locals_size;
986 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
987 result = - layout.locals_size;
988 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
989 result = - layout.fp_minus_ap;
990 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
991 result = - (layout.sp_minus_fp + layout.fp_minus_ap);
992 else
993 gcc_unreachable ();
994
995 return result;
996 }
997
998 static rtx
999 emit_addhi3_postreload (rtx dest, rtx src0, rtx src1)
1000 {
1001 rtx set, clobber, insn;
1002
1003 set = gen_rtx_SET (VOIDmode, dest, gen_rtx_PLUS (HImode, src0, src1));
1004 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1005 insn = emit_insn (gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set, clobber)));
1006 return insn;
1007 }
1008
1009 /* Called after register allocation to add any instructions needed for
1010 the prologue. Using a prologue insn is favored compared to putting
1011 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1012 since it allows the scheduler to intermix instructions with the
1013 saves of the caller saved registers. In some cases, it might be
1014 necessary to emit a barrier instruction as the last insn to prevent
1015 such scheduling.
1016
1017 Also any insns generated here should have RTX_FRAME_RELATED_P(insn) = 1
1018 so that the debug info generation code can handle them properly. */
1019
1020 void
1021 xstormy16_expand_prologue (void)
1022 {
1023 struct xstormy16_stack_layout layout;
1024 int regno;
1025 rtx insn;
1026 rtx mem_push_rtx;
1027 const int ifun = xstormy16_interrupt_function_p ();
1028
1029 mem_push_rtx = gen_rtx_POST_INC (Pmode, stack_pointer_rtx);
1030 mem_push_rtx = gen_rtx_MEM (HImode, mem_push_rtx);
1031
1032 layout = xstormy16_compute_stack_layout ();
1033
1034 if (layout.locals_size >= 32768)
1035 error ("local variable memory requirements exceed capacity");
1036
1037 /* Save the argument registers if necessary. */
1038 if (layout.stdarg_save_size)
1039 for (regno = FIRST_ARGUMENT_REGISTER;
1040 regno < FIRST_ARGUMENT_REGISTER + NUM_ARGUMENT_REGISTERS;
1041 regno++)
1042 {
1043 rtx dwarf;
1044 rtx reg = gen_rtx_REG (HImode, regno);
1045
1046 insn = emit_move_insn (mem_push_rtx, reg);
1047 RTX_FRAME_RELATED_P (insn) = 1;
1048
1049 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1050
1051 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1052 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1053 reg);
1054 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1055 plus_constant (stack_pointer_rtx,
1056 GET_MODE_SIZE (Pmode)));
1057 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1058 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1059 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1060 }
1061
1062 /* Push each of the registers to save. */
1063 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1064 if (REG_NEEDS_SAVE (regno, ifun))
1065 {
1066 rtx dwarf;
1067 rtx reg = gen_rtx_REG (HImode, regno);
1068
1069 insn = emit_move_insn (mem_push_rtx, reg);
1070 RTX_FRAME_RELATED_P (insn) = 1;
1071
1072 dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
1073
1074 XVECEXP (dwarf, 0, 0) = gen_rtx_SET (VOIDmode,
1075 gen_rtx_MEM (Pmode, stack_pointer_rtx),
1076 reg);
1077 XVECEXP (dwarf, 0, 1) = gen_rtx_SET (Pmode, stack_pointer_rtx,
1078 plus_constant (stack_pointer_rtx,
1079 GET_MODE_SIZE (Pmode)));
1080 add_reg_note (insn, REG_FRAME_RELATED_EXPR, dwarf);
1081 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 0)) = 1;
1082 RTX_FRAME_RELATED_P (XVECEXP (dwarf, 0, 1)) = 1;
1083 }
1084
1085 /* It's just possible that the SP here might be what we need for
1086 the new FP... */
1087 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1088 {
1089 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1090 RTX_FRAME_RELATED_P (insn) = 1;
1091 }
1092
1093 /* Allocate space for local variables. */
1094 if (layout.locals_size)
1095 {
1096 insn = emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1097 GEN_INT (layout.locals_size));
1098 RTX_FRAME_RELATED_P (insn) = 1;
1099 }
1100
1101 /* Set up the frame pointer, if required. */
1102 if (frame_pointer_needed && layout.sp_minus_fp != layout.locals_size)
1103 {
1104 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1105 RTX_FRAME_RELATED_P (insn) = 1;
1106
1107 if (layout.sp_minus_fp)
1108 {
1109 insn = emit_addhi3_postreload (hard_frame_pointer_rtx,
1110 hard_frame_pointer_rtx,
1111 GEN_INT (- layout.sp_minus_fp));
1112 RTX_FRAME_RELATED_P (insn) = 1;
1113 }
1114 }
1115 }
1116
1117 /* Do we need an epilogue at all? */
1118
1119 int
1120 direct_return (void)
1121 {
1122 return (reload_completed
1123 && xstormy16_compute_stack_layout ().frame_size == 0
1124 && ! xstormy16_interrupt_function_p ());
1125 }
1126
1127 /* Called after register allocation to add any instructions needed for
1128 the epilogue. Using an epilogue insn is favored compared to putting
1129 all of the instructions in the TARGET_ASM_FUNCTION_PROLOGUE macro,
1130 since it allows the scheduler to intermix instructions with the
1131 saves of the caller saved registers. In some cases, it might be
1132 necessary to emit a barrier instruction as the last insn to prevent
1133 such scheduling. */
1134
1135 void
1136 xstormy16_expand_epilogue (void)
1137 {
1138 struct xstormy16_stack_layout layout;
1139 rtx mem_pop_rtx;
1140 int regno;
1141 const int ifun = xstormy16_interrupt_function_p ();
1142
1143 mem_pop_rtx = gen_rtx_PRE_DEC (Pmode, stack_pointer_rtx);
1144 mem_pop_rtx = gen_rtx_MEM (HImode, mem_pop_rtx);
1145
1146 layout = xstormy16_compute_stack_layout ();
1147
1148 /* Pop the stack for the locals. */
1149 if (layout.locals_size)
1150 {
1151 if (frame_pointer_needed && layout.sp_minus_fp == layout.locals_size)
1152 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1153 else
1154 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1155 GEN_INT (- layout.locals_size));
1156 }
1157
1158 /* Restore any call-saved registers. */
1159 for (regno = FIRST_PSEUDO_REGISTER - 1; regno >= 0; regno--)
1160 if (REG_NEEDS_SAVE (regno, ifun))
1161 emit_move_insn (gen_rtx_REG (HImode, regno), mem_pop_rtx);
1162
1163 /* Pop the stack for the stdarg save area. */
1164 if (layout.stdarg_save_size)
1165 emit_addhi3_postreload (stack_pointer_rtx, stack_pointer_rtx,
1166 GEN_INT (- layout.stdarg_save_size));
1167
1168 /* Return. */
1169 if (ifun)
1170 emit_jump_insn (gen_return_internal_interrupt ());
1171 else
1172 emit_jump_insn (gen_return_internal ());
1173 }
1174
1175 int
1176 xstormy16_epilogue_uses (int regno)
1177 {
1178 if (reload_completed && call_used_regs[regno])
1179 {
1180 const int ifun = xstormy16_interrupt_function_p ();
1181 return REG_NEEDS_SAVE (regno, ifun);
1182 }
1183 return 0;
1184 }
1185
1186 void
1187 xstormy16_function_profiler (void)
1188 {
1189 sorry ("function_profiler support");
1190 }
1191 \f
1192 /* Update CUM to advance past an argument in the argument list. The
1193 values MODE, TYPE and NAMED describe that argument. Once this is
1194 done, the variable CUM is suitable for analyzing the *following*
1195 argument with `TARGET_FUNCTION_ARG', etc.
1196
1197 This function need not do anything if the argument in question was
1198 passed on the stack. The compiler knows how to track the amount of
1199 stack space used for arguments without any special help. However,
1200 it makes life easier for xstormy16_build_va_list if it does update
1201 the word count. */
1202
1203 static void
1204 xstormy16_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1205 const_tree type, bool named ATTRIBUTE_UNUSED)
1206 {
1207 /* If an argument would otherwise be passed partially in registers,
1208 and partially on the stack, the whole of it is passed on the
1209 stack. */
1210 if (*cum < NUM_ARGUMENT_REGISTERS
1211 && *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1212 *cum = NUM_ARGUMENT_REGISTERS;
1213
1214 *cum += XSTORMY16_WORD_SIZE (type, mode);
1215 }
1216
1217 static rtx
1218 xstormy16_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
1219 const_tree type, bool named ATTRIBUTE_UNUSED)
1220 {
1221 if (mode == VOIDmode)
1222 return const0_rtx;
1223 if (targetm.calls.must_pass_in_stack (mode, type)
1224 || *cum + XSTORMY16_WORD_SIZE (type, mode) > NUM_ARGUMENT_REGISTERS)
1225 return NULL_RTX;
1226 return gen_rtx_REG (mode, *cum + FIRST_ARGUMENT_REGISTER);
1227 }
1228
1229 /* Build the va_list type.
1230
1231 For this chip, va_list is a record containing a counter and a pointer.
1232 The counter is of type 'int' and indicates how many bytes
1233 have been used to date. The pointer indicates the stack position
1234 for arguments that have not been passed in registers.
1235 To keep the layout nice, the pointer is first in the structure. */
1236
1237 static tree
1238 xstormy16_build_builtin_va_list (void)
1239 {
1240 tree f_1, f_2, record, type_decl;
1241
1242 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
1243 type_decl = build_decl (BUILTINS_LOCATION,
1244 TYPE_DECL, get_identifier ("__va_list_tag"), record);
1245
1246 f_1 = build_decl (BUILTINS_LOCATION,
1247 FIELD_DECL, get_identifier ("base"),
1248 ptr_type_node);
1249 f_2 = build_decl (BUILTINS_LOCATION,
1250 FIELD_DECL, get_identifier ("count"),
1251 unsigned_type_node);
1252
1253 DECL_FIELD_CONTEXT (f_1) = record;
1254 DECL_FIELD_CONTEXT (f_2) = record;
1255
1256 TYPE_STUB_DECL (record) = type_decl;
1257 TYPE_NAME (record) = type_decl;
1258 TYPE_FIELDS (record) = f_1;
1259 DECL_CHAIN (f_1) = f_2;
1260
1261 layout_type (record);
1262
1263 return record;
1264 }
1265
1266 /* Implement the stdarg/varargs va_start macro. STDARG_P is nonzero if this
1267 is stdarg.h instead of varargs.h. VALIST is the tree of the va_list
1268 variable to initialize. NEXTARG is the machine independent notion of the
1269 'next' argument after the variable arguments. */
1270
1271 static void
1272 xstormy16_expand_builtin_va_start (tree valist, rtx nextarg ATTRIBUTE_UNUSED)
1273 {
1274 tree f_base, f_count;
1275 tree base, count;
1276 tree t,u;
1277
1278 if (xstormy16_interrupt_function_p ())
1279 error ("cannot use va_start in interrupt function");
1280
1281 f_base = TYPE_FIELDS (va_list_type_node);
1282 f_count = DECL_CHAIN (f_base);
1283
1284 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1285 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1286 NULL_TREE);
1287
1288 t = make_tree (TREE_TYPE (base), virtual_incoming_args_rtx);
1289 u = build_int_cst (NULL_TREE, - INCOMING_FRAME_SP_OFFSET);
1290 u = fold_convert (TREE_TYPE (count), u);
1291 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), t, u);
1292 t = build2 (MODIFY_EXPR, TREE_TYPE (base), base, t);
1293 TREE_SIDE_EFFECTS (t) = 1;
1294 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1295
1296 t = build2 (MODIFY_EXPR, TREE_TYPE (count), count,
1297 build_int_cst (NULL_TREE,
1298 crtl->args.info * UNITS_PER_WORD));
1299 TREE_SIDE_EFFECTS (t) = 1;
1300 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
1301 }
1302
1303 /* Implement the stdarg/varargs va_arg macro. VALIST is the variable
1304 of type va_list as a tree, TYPE is the type passed to va_arg.
1305 Note: This algorithm is documented in stormy-abi. */
1306
1307 static tree
1308 xstormy16_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
1309 gimple_seq *post_p ATTRIBUTE_UNUSED)
1310 {
1311 tree f_base, f_count;
1312 tree base, count;
1313 tree count_tmp, addr, t;
1314 tree lab_gotaddr, lab_fromstack;
1315 int size, size_of_reg_args, must_stack;
1316 tree size_tree;
1317
1318 f_base = TYPE_FIELDS (va_list_type_node);
1319 f_count = DECL_CHAIN (f_base);
1320
1321 base = build3 (COMPONENT_REF, TREE_TYPE (f_base), valist, f_base, NULL_TREE);
1322 count = build3 (COMPONENT_REF, TREE_TYPE (f_count), valist, f_count,
1323 NULL_TREE);
1324
1325 must_stack = targetm.calls.must_pass_in_stack (TYPE_MODE (type), type);
1326 size_tree = round_up (size_in_bytes (type), UNITS_PER_WORD);
1327 gimplify_expr (&size_tree, pre_p, NULL, is_gimple_val, fb_rvalue);
1328
1329 size_of_reg_args = NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD;
1330
1331 count_tmp = get_initialized_tmp_var (count, pre_p, NULL);
1332 lab_gotaddr = create_artificial_label (UNKNOWN_LOCATION);
1333 lab_fromstack = create_artificial_label (UNKNOWN_LOCATION);
1334 addr = create_tmp_var (ptr_type_node, NULL);
1335
1336 if (!must_stack)
1337 {
1338 tree r;
1339
1340 t = fold_convert (TREE_TYPE (count), size_tree);
1341 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1342 r = fold_convert (TREE_TYPE (count), size_int (size_of_reg_args));
1343 t = build2 (GT_EXPR, boolean_type_node, t, r);
1344 t = build3 (COND_EXPR, void_type_node, t,
1345 build1 (GOTO_EXPR, void_type_node, lab_fromstack),
1346 NULL_TREE);
1347 gimplify_and_add (t, pre_p);
1348
1349 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, base, count_tmp);
1350 gimplify_assign (addr, t, pre_p);
1351
1352 t = build1 (GOTO_EXPR, void_type_node, lab_gotaddr);
1353 gimplify_and_add (t, pre_p);
1354
1355 t = build1 (LABEL_EXPR, void_type_node, lab_fromstack);
1356 gimplify_and_add (t, pre_p);
1357 }
1358
1359 /* Arguments larger than a word might need to skip over some
1360 registers, since arguments are either passed entirely in
1361 registers or entirely on the stack. */
1362 size = PUSH_ROUNDING (int_size_in_bytes (type));
1363 if (size > 2 || size < 0 || must_stack)
1364 {
1365 tree r, u;
1366
1367 r = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD);
1368 u = build2 (MODIFY_EXPR, TREE_TYPE (count_tmp), count_tmp, r);
1369
1370 t = fold_convert (TREE_TYPE (count), r);
1371 t = build2 (GE_EXPR, boolean_type_node, count_tmp, t);
1372 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, u);
1373 gimplify_and_add (t, pre_p);
1374 }
1375
1376 t = size_int (NUM_ARGUMENT_REGISTERS * UNITS_PER_WORD
1377 + INCOMING_FRAME_SP_OFFSET);
1378 t = fold_convert (TREE_TYPE (count), t);
1379 t = build2 (MINUS_EXPR, TREE_TYPE (count), count_tmp, t);
1380 t = build2 (PLUS_EXPR, TREE_TYPE (count), t,
1381 fold_convert (TREE_TYPE (count), size_tree));
1382 t = fold_convert (TREE_TYPE (t), fold (t));
1383 t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1384 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base), base, t);
1385 gimplify_assign (addr, t, pre_p);
1386
1387 t = build1 (LABEL_EXPR, void_type_node, lab_gotaddr);
1388 gimplify_and_add (t, pre_p);
1389
1390 t = fold_convert (TREE_TYPE (count), size_tree);
1391 t = build2 (PLUS_EXPR, TREE_TYPE (count), count_tmp, t);
1392 gimplify_assign (count, t, pre_p);
1393
1394 addr = fold_convert (build_pointer_type (type), addr);
1395 return build_va_arg_indirect_ref (addr);
1396 }
1397
1398 /* Worker function for TARGET_TRAMPOLINE_INIT. */
1399
1400 static void
1401 xstormy16_trampoline_init (rtx m_tramp, tree fndecl, rtx static_chain)
1402 {
1403 rtx temp = gen_reg_rtx (HImode);
1404 rtx reg_fnaddr = gen_reg_rtx (HImode);
1405 rtx reg_addr, reg_addr_mem;
1406
1407 reg_addr = copy_to_reg (XEXP (m_tramp, 0));
1408 reg_addr_mem = adjust_automodify_address (m_tramp, HImode, reg_addr, 0);
1409
1410 emit_move_insn (temp, GEN_INT (0x3130 | STATIC_CHAIN_REGNUM));
1411 emit_move_insn (reg_addr_mem, temp);
1412 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1413 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1414
1415 emit_move_insn (temp, static_chain);
1416 emit_move_insn (reg_addr_mem, temp);
1417 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1418 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1419
1420 emit_move_insn (reg_fnaddr, XEXP (DECL_RTL (fndecl), 0));
1421 emit_move_insn (temp, reg_fnaddr);
1422 emit_insn (gen_andhi3 (temp, temp, GEN_INT (0xFF)));
1423 emit_insn (gen_iorhi3 (temp, temp, GEN_INT (0x0200)));
1424 emit_move_insn (reg_addr_mem, temp);
1425 emit_insn (gen_addhi3 (reg_addr, reg_addr, const2_rtx));
1426 reg_addr_mem = adjust_automodify_address (reg_addr_mem, VOIDmode, NULL, 2);
1427
1428 emit_insn (gen_lshrhi3 (reg_fnaddr, reg_fnaddr, GEN_INT (8)));
1429 emit_move_insn (reg_addr_mem, reg_fnaddr);
1430 }
1431
1432 /* Worker function for FUNCTION_VALUE. */
1433
1434 rtx
1435 xstormy16_function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED)
1436 {
1437 enum machine_mode mode;
1438 mode = TYPE_MODE (valtype);
1439 PROMOTE_MODE (mode, 0, valtype);
1440 return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);
1441 }
1442
1443 /* A C compound statement that outputs the assembler code for a thunk function,
1444 used to implement C++ virtual function calls with multiple inheritance. The
1445 thunk acts as a wrapper around a virtual function, adjusting the implicit
1446 object parameter before handing control off to the real function.
1447
1448 First, emit code to add the integer DELTA to the location that contains the
1449 incoming first argument. Assume that this argument contains a pointer, and
1450 is the one used to pass the `this' pointer in C++. This is the incoming
1451 argument *before* the function prologue, e.g. `%o0' on a sparc. The
1452 addition must preserve the values of all other incoming arguments.
1453
1454 After the addition, emit code to jump to FUNCTION, which is a
1455 `FUNCTION_DECL'. This is a direct pure jump, not a call, and does not touch
1456 the return address. Hence returning from FUNCTION will return to whoever
1457 called the current `thunk'.
1458
1459 The effect must be as if @var{function} had been called directly
1460 with the adjusted first argument. This macro is responsible for
1461 emitting all of the code for a thunk function;
1462 TARGET_ASM_FUNCTION_PROLOGUE and TARGET_ASM_FUNCTION_EPILOGUE are
1463 not invoked.
1464
1465 The THUNK_FNDECL is redundant. (DELTA and FUNCTION have already been
1466 extracted from it.) It might possibly be useful on some targets, but
1467 probably not. */
1468
1469 static void
1470 xstormy16_asm_output_mi_thunk (FILE *file,
1471 tree thunk_fndecl ATTRIBUTE_UNUSED,
1472 HOST_WIDE_INT delta,
1473 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
1474 tree function)
1475 {
1476 int regnum = FIRST_ARGUMENT_REGISTER;
1477
1478 /* There might be a hidden first argument for a returned structure. */
1479 if (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function))
1480 regnum += 1;
1481
1482 fprintf (file, "\tadd %s,#0x%x\n", reg_names[regnum], (int) delta & 0xFFFF);
1483 fputs ("\tjmpf ", file);
1484 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
1485 putc ('\n', file);
1486 }
1487
1488 /* The purpose of this function is to override the default behavior of
1489 BSS objects. Normally, they go into .bss or .sbss via ".common"
1490 directives, but we need to override that and put them in
1491 .bss_below100. We can't just use a section override (like we do
1492 for .data_below100), because that makes them initialized rather
1493 than uninitialized. */
1494
1495 void
1496 xstormy16_asm_output_aligned_common (FILE *stream,
1497 tree decl,
1498 const char *name,
1499 int size,
1500 int align,
1501 int global)
1502 {
1503 rtx mem = decl == NULL_TREE ? NULL_RTX : DECL_RTL (decl);
1504 rtx symbol;
1505
1506 if (mem != NULL_RTX
1507 && MEM_P (mem)
1508 && GET_CODE (symbol = XEXP (mem, 0)) == SYMBOL_REF
1509 && SYMBOL_REF_FLAGS (symbol) & SYMBOL_FLAG_XSTORMY16_BELOW100)
1510 {
1511 const char *name2;
1512 int p2align = 0;
1513
1514 switch_to_section (bss100_section);
1515
1516 while (align > 8)
1517 {
1518 align /= 2;
1519 p2align ++;
1520 }
1521
1522 name2 = default_strip_name_encoding (name);
1523 if (global)
1524 fprintf (stream, "\t.globl\t%s\n", name2);
1525 if (p2align)
1526 fprintf (stream, "\t.p2align %d\n", p2align);
1527 fprintf (stream, "\t.type\t%s, @object\n", name2);
1528 fprintf (stream, "\t.size\t%s, %d\n", name2, size);
1529 fprintf (stream, "%s:\n\t.space\t%d\n", name2, size);
1530 return;
1531 }
1532
1533 if (!global)
1534 {
1535 fprintf (stream, "\t.local\t");
1536 assemble_name (stream, name);
1537 fprintf (stream, "\n");
1538 }
1539 fprintf (stream, "\t.comm\t");
1540 assemble_name (stream, name);
1541 fprintf (stream, ",%u,%u\n", size, align / BITS_PER_UNIT);
1542 }
1543
1544 /* Implement TARGET_ASM_INIT_SECTIONS. */
1545
1546 static void
1547 xstormy16_asm_init_sections (void)
1548 {
1549 bss100_section
1550 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
1551 output_section_asm_op,
1552 "\t.section \".bss_below100\",\"aw\",@nobits");
1553 }
1554
1555 /* Mark symbols with the "below100" attribute so that we can use the
1556 special addressing modes for them. */
1557
1558 static void
1559 xstormy16_encode_section_info (tree decl, rtx r, int first)
1560 {
1561 default_encode_section_info (decl, r, first);
1562
1563 if (TREE_CODE (decl) == VAR_DECL
1564 && (lookup_attribute ("below100", DECL_ATTRIBUTES (decl))
1565 || lookup_attribute ("BELOW100", DECL_ATTRIBUTES (decl))))
1566 {
1567 rtx symbol = XEXP (r, 0);
1568
1569 gcc_assert (GET_CODE (symbol) == SYMBOL_REF);
1570 SYMBOL_REF_FLAGS (symbol) |= SYMBOL_FLAG_XSTORMY16_BELOW100;
1571 }
1572 }
1573
1574 #undef TARGET_ASM_CONSTRUCTOR
1575 #define TARGET_ASM_CONSTRUCTOR xstormy16_asm_out_constructor
1576 #undef TARGET_ASM_DESTRUCTOR
1577 #define TARGET_ASM_DESTRUCTOR xstormy16_asm_out_destructor
1578
1579 /* Output constructors and destructors. Just like
1580 default_named_section_asm_out_* but don't set the sections writable. */
1581
1582 static void
1583 xstormy16_asm_out_destructor (rtx symbol, int priority)
1584 {
1585 const char *section = ".dtors";
1586 char buf[16];
1587
1588 /* ??? This only works reliably with the GNU linker. */
1589 if (priority != DEFAULT_INIT_PRIORITY)
1590 {
1591 sprintf (buf, ".dtors.%.5u",
1592 /* Invert the numbering so the linker puts us in the proper
1593 order; constructors are run from right to left, and the
1594 linker sorts in increasing order. */
1595 MAX_INIT_PRIORITY - priority);
1596 section = buf;
1597 }
1598
1599 switch_to_section (get_section (section, 0, NULL));
1600 assemble_align (POINTER_SIZE);
1601 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1602 }
1603
1604 static void
1605 xstormy16_asm_out_constructor (rtx symbol, int priority)
1606 {
1607 const char *section = ".ctors";
1608 char buf[16];
1609
1610 /* ??? This only works reliably with the GNU linker. */
1611 if (priority != DEFAULT_INIT_PRIORITY)
1612 {
1613 sprintf (buf, ".ctors.%.5u",
1614 /* Invert the numbering so the linker puts us in the proper
1615 order; constructors are run from right to left, and the
1616 linker sorts in increasing order. */
1617 MAX_INIT_PRIORITY - priority);
1618 section = buf;
1619 }
1620
1621 switch_to_section (get_section (section, 0, NULL));
1622 assemble_align (POINTER_SIZE);
1623 assemble_integer (symbol, POINTER_SIZE / BITS_PER_UNIT, POINTER_SIZE, 1);
1624 }
1625 \f
1626 /* Print a memory address as an operand to reference that memory location. */
1627
1628 void
1629 xstormy16_print_operand_address (FILE *file, rtx address)
1630 {
1631 HOST_WIDE_INT offset;
1632 int pre_dec, post_inc;
1633
1634 /* There are a few easy cases. */
1635 if (CONST_INT_P (address))
1636 {
1637 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (address) & 0xFFFF);
1638 return;
1639 }
1640
1641 if (CONSTANT_P (address) || LABEL_P (address))
1642 {
1643 output_addr_const (file, address);
1644 return;
1645 }
1646
1647 /* Otherwise, it's hopefully something of the form
1648 (plus:HI (pre_dec:HI (reg:HI ...)) (const_int ...)). */
1649 if (GET_CODE (address) == PLUS)
1650 {
1651 gcc_assert (CONST_INT_P (XEXP (address, 1)));
1652 offset = INTVAL (XEXP (address, 1));
1653 address = XEXP (address, 0);
1654 }
1655 else
1656 offset = 0;
1657
1658 pre_dec = (GET_CODE (address) == PRE_DEC);
1659 post_inc = (GET_CODE (address) == POST_INC);
1660 if (pre_dec || post_inc)
1661 address = XEXP (address, 0);
1662
1663 gcc_assert (REG_P (address));
1664
1665 fputc ('(', file);
1666 if (pre_dec)
1667 fputs ("--", file);
1668 fputs (reg_names [REGNO (address)], file);
1669 if (post_inc)
1670 fputs ("++", file);
1671 if (offset != 0)
1672 fprintf (file, "," HOST_WIDE_INT_PRINT_DEC, offset);
1673 fputc (')', file);
1674 }
1675
1676 /* Print an operand to an assembler instruction. */
1677
1678 void
1679 xstormy16_print_operand (FILE *file, rtx x, int code)
1680 {
1681 switch (code)
1682 {
1683 case 'B':
1684 /* There is either one bit set, or one bit clear, in X.
1685 Print it preceded by '#'. */
1686 {
1687 static int bits_set[8] = { 0, 1, 1, 2, 1, 2, 2, 3 };
1688 HOST_WIDE_INT xx = 1;
1689 HOST_WIDE_INT l;
1690
1691 if (CONST_INT_P (x))
1692 xx = INTVAL (x);
1693 else
1694 output_operand_lossage ("'B' operand is not constant");
1695
1696 /* GCC sign-extends masks with the MSB set, so we have to
1697 detect all the cases that differ only in sign extension
1698 beyond the bits we care about. Normally, the predicates
1699 and constraints ensure that we have the right values. This
1700 works correctly for valid masks. */
1701 if (bits_set[xx & 7] <= 1)
1702 {
1703 /* Remove sign extension bits. */
1704 if ((~xx & ~(HOST_WIDE_INT)0xff) == 0)
1705 xx &= 0xff;
1706 else if ((~xx & ~(HOST_WIDE_INT)0xffff) == 0)
1707 xx &= 0xffff;
1708 l = exact_log2 (xx);
1709 }
1710 else
1711 {
1712 /* Add sign extension bits. */
1713 if ((xx & ~(HOST_WIDE_INT)0xff) == 0)
1714 xx |= ~(HOST_WIDE_INT)0xff;
1715 else if ((xx & ~(HOST_WIDE_INT)0xffff) == 0)
1716 xx |= ~(HOST_WIDE_INT)0xffff;
1717 l = exact_log2 (~xx);
1718 }
1719
1720 if (l == -1)
1721 output_operand_lossage ("'B' operand has multiple bits set");
1722
1723 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, l);
1724 return;
1725 }
1726
1727 case 'C':
1728 /* Print the symbol without a surrounding @fptr(). */
1729 if (GET_CODE (x) == SYMBOL_REF)
1730 assemble_name (file, XSTR (x, 0));
1731 else if (LABEL_P (x))
1732 output_asm_label (x);
1733 else
1734 xstormy16_print_operand_address (file, x);
1735 return;
1736
1737 case 'o':
1738 case 'O':
1739 /* Print the immediate operand less one, preceded by '#'.
1740 For 'O', negate it first. */
1741 {
1742 HOST_WIDE_INT xx = 0;
1743
1744 if (CONST_INT_P (x))
1745 xx = INTVAL (x);
1746 else
1747 output_operand_lossage ("'o' operand is not constant");
1748
1749 if (code == 'O')
1750 xx = -xx;
1751
1752 fprintf (file, IMMEDIATE_PREFIX HOST_WIDE_INT_PRINT_DEC, xx - 1);
1753 return;
1754 }
1755
1756 case 'b':
1757 /* Print the shift mask for bp/bn. */
1758 {
1759 HOST_WIDE_INT xx = 1;
1760 HOST_WIDE_INT l;
1761
1762 if (CONST_INT_P (x))
1763 xx = INTVAL (x);
1764 else
1765 output_operand_lossage ("'B' operand is not constant");
1766
1767 l = 7 - xx;
1768
1769 fputs (IMMEDIATE_PREFIX, file);
1770 fprintf (file, HOST_WIDE_INT_PRINT_DEC, l);
1771 return;
1772 }
1773
1774 case 0:
1775 /* Handled below. */
1776 break;
1777
1778 default:
1779 output_operand_lossage ("xstormy16_print_operand: unknown code");
1780 return;
1781 }
1782
1783 switch (GET_CODE (x))
1784 {
1785 case REG:
1786 fputs (reg_names [REGNO (x)], file);
1787 break;
1788
1789 case MEM:
1790 xstormy16_print_operand_address (file, XEXP (x, 0));
1791 break;
1792
1793 default:
1794 /* Some kind of constant or label; an immediate operand,
1795 so prefix it with '#' for the assembler. */
1796 fputs (IMMEDIATE_PREFIX, file);
1797 output_addr_const (file, x);
1798 break;
1799 }
1800
1801 return;
1802 }
1803 \f
1804 /* Expander for the `casesi' pattern.
1805 INDEX is the index of the switch statement.
1806 LOWER_BOUND is a CONST_INT that is the value of INDEX corresponding
1807 to the first table entry.
1808 RANGE is the number of table entries.
1809 TABLE is an ADDR_VEC that is the jump table.
1810 DEFAULT_LABEL is the address to branch to if INDEX is outside the
1811 range LOWER_BOUND to LOWER_BOUND + RANGE - 1. */
1812
1813 void
1814 xstormy16_expand_casesi (rtx index, rtx lower_bound, rtx range,
1815 rtx table, rtx default_label)
1816 {
1817 HOST_WIDE_INT range_i = INTVAL (range);
1818 rtx int_index;
1819
1820 /* This code uses 'br', so it can deal only with tables of size up to
1821 8192 entries. */
1822 if (range_i >= 8192)
1823 sorry ("switch statement of size %lu entries too large",
1824 (unsigned long) range_i);
1825
1826 index = expand_binop (SImode, sub_optab, index, lower_bound, NULL_RTX, 0,
1827 OPTAB_LIB_WIDEN);
1828 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, SImode, 1,
1829 default_label);
1830 int_index = gen_lowpart_common (HImode, index);
1831 emit_insn (gen_ashlhi3 (int_index, int_index, const2_rtx));
1832 emit_jump_insn (gen_tablejump_pcrel (int_index, table));
1833 }
1834
1835 /* Output an ADDR_VEC. It is output as a sequence of 'jmpf'
1836 instructions, without label or alignment or any other special
1837 constructs. We know that the previous instruction will be the
1838 `tablejump_pcrel' output above.
1839
1840 TODO: it might be nice to output 'br' instructions if they could
1841 all reach. */
1842
1843 void
1844 xstormy16_output_addr_vec (FILE *file, rtx label ATTRIBUTE_UNUSED, rtx table)
1845 {
1846 int vlen, idx;
1847
1848 switch_to_section (current_function_section ());
1849
1850 vlen = XVECLEN (table, 0);
1851 for (idx = 0; idx < vlen; idx++)
1852 {
1853 fputs ("\tjmpf ", file);
1854 output_asm_label (XEXP (XVECEXP (table, 0, idx), 0));
1855 fputc ('\n', file);
1856 }
1857 }
1858 \f
1859 /* Expander for the `call' patterns.
1860 RETVAL is the RTL for the return register or NULL for void functions.
1861 DEST is the function to call, expressed as a MEM.
1862 COUNTER is ignored. */
1863
1864 void
1865 xstormy16_expand_call (rtx retval, rtx dest, rtx counter)
1866 {
1867 rtx call, temp;
1868 enum machine_mode mode;
1869
1870 gcc_assert (MEM_P (dest));
1871 dest = XEXP (dest, 0);
1872
1873 if (! CONSTANT_P (dest) && ! REG_P (dest))
1874 dest = force_reg (Pmode, dest);
1875
1876 if (retval == NULL)
1877 mode = VOIDmode;
1878 else
1879 mode = GET_MODE (retval);
1880
1881 call = gen_rtx_CALL (mode, gen_rtx_MEM (FUNCTION_MODE, dest),
1882 counter);
1883 if (retval)
1884 call = gen_rtx_SET (VOIDmode, retval, call);
1885
1886 if (! CONSTANT_P (dest))
1887 {
1888 temp = gen_reg_rtx (HImode);
1889 emit_move_insn (temp, const0_rtx);
1890 }
1891 else
1892 temp = const0_rtx;
1893
1894 call = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, call,
1895 gen_rtx_USE (VOIDmode, temp)));
1896 emit_call_insn (call);
1897 }
1898 \f
1899 /* Expanders for multiword computational operations. */
1900
1901 /* Expander for arithmetic operations; emit insns to compute
1902
1903 (set DEST (CODE:MODE SRC0 SRC1))
1904
1905 When CODE is COMPARE, a branch template is generated
1906 (this saves duplicating code in xstormy16_split_cbranch). */
1907
1908 void
1909 xstormy16_expand_arith (enum machine_mode mode, enum rtx_code code,
1910 rtx dest, rtx src0, rtx src1)
1911 {
1912 int num_words = GET_MODE_BITSIZE (mode) / BITS_PER_WORD;
1913 int i;
1914 int firstloop = 1;
1915
1916 if (code == NEG)
1917 emit_move_insn (src0, const0_rtx);
1918
1919 for (i = 0; i < num_words; i++)
1920 {
1921 rtx w_src0, w_src1, w_dest;
1922 rtx insn;
1923
1924 w_src0 = simplify_gen_subreg (word_mode, src0, mode,
1925 i * UNITS_PER_WORD);
1926 w_src1 = simplify_gen_subreg (word_mode, src1, mode, i * UNITS_PER_WORD);
1927 w_dest = simplify_gen_subreg (word_mode, dest, mode, i * UNITS_PER_WORD);
1928
1929 switch (code)
1930 {
1931 case PLUS:
1932 if (firstloop
1933 && CONST_INT_P (w_src1)
1934 && INTVAL (w_src1) == 0)
1935 continue;
1936
1937 if (firstloop)
1938 insn = gen_addchi4 (w_dest, w_src0, w_src1);
1939 else
1940 insn = gen_addchi5 (w_dest, w_src0, w_src1);
1941 break;
1942
1943 case NEG:
1944 case MINUS:
1945 case COMPARE:
1946 if (code == COMPARE && i == num_words - 1)
1947 {
1948 rtx branch, sub, clobber, sub_1;
1949
1950 sub_1 = gen_rtx_MINUS (HImode, w_src0,
1951 gen_rtx_ZERO_EXTEND (HImode, gen_rtx_REG (BImode, CARRY_REGNUM)));
1952 sub = gen_rtx_SET (VOIDmode, w_dest,
1953 gen_rtx_MINUS (HImode, sub_1, w_src1));
1954 clobber = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (BImode, CARRY_REGNUM));
1955 branch = gen_rtx_SET (VOIDmode, pc_rtx,
1956 gen_rtx_IF_THEN_ELSE (VOIDmode,
1957 gen_rtx_EQ (HImode,
1958 sub_1,
1959 w_src1),
1960 pc_rtx,
1961 pc_rtx));
1962 insn = gen_rtx_PARALLEL (VOIDmode,
1963 gen_rtvec (3, branch, sub, clobber));
1964 }
1965 else if (firstloop
1966 && code != COMPARE
1967 && CONST_INT_P (w_src1)
1968 && INTVAL (w_src1) == 0)
1969 continue;
1970 else if (firstloop)
1971 insn = gen_subchi4 (w_dest, w_src0, w_src1);
1972 else
1973 insn = gen_subchi5 (w_dest, w_src0, w_src1);
1974 break;
1975
1976 case IOR:
1977 case XOR:
1978 case AND:
1979 if (CONST_INT_P (w_src1)
1980 && INTVAL (w_src1) == -(code == AND))
1981 continue;
1982
1983 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_fmt_ee (code, mode,
1984 w_src0, w_src1));
1985 break;
1986
1987 case NOT:
1988 insn = gen_rtx_SET (VOIDmode, w_dest, gen_rtx_NOT (mode, w_src0));
1989 break;
1990
1991 default:
1992 gcc_unreachable ();
1993 }
1994
1995 firstloop = 0;
1996 emit (insn);
1997 }
1998
1999 /* If we emit nothing, try_split() will think we failed. So emit
2000 something that does nothing and can be optimized away. */
2001 if (firstloop)
2002 emit (gen_nop ());
2003 }
2004
2005 /* The shift operations are split at output time for constant values;
2006 variable-width shifts get handed off to a library routine.
2007
2008 Generate an output string to do (set X (CODE:MODE X SIZE_R))
2009 SIZE_R will be a CONST_INT, X will be a hard register. */
2010
2011 const char *
2012 xstormy16_output_shift (enum machine_mode mode, enum rtx_code code,
2013 rtx x, rtx size_r, rtx temp)
2014 {
2015 HOST_WIDE_INT size;
2016 const char *r0, *r1, *rt;
2017 static char r[64];
2018
2019 gcc_assert (CONST_INT_P (size_r)
2020 && REG_P (x)
2021 && mode == SImode);
2022
2023 size = INTVAL (size_r) & (GET_MODE_BITSIZE (mode) - 1);
2024
2025 if (size == 0)
2026 return "";
2027
2028 r0 = reg_names [REGNO (x)];
2029 r1 = reg_names [REGNO (x) + 1];
2030
2031 /* For shifts of size 1, we can use the rotate instructions. */
2032 if (size == 1)
2033 {
2034 switch (code)
2035 {
2036 case ASHIFT:
2037 sprintf (r, "shl %s,#1 | rlc %s,#1", r0, r1);
2038 break;
2039 case ASHIFTRT:
2040 sprintf (r, "asr %s,#1 | rrc %s,#1", r1, r0);
2041 break;
2042 case LSHIFTRT:
2043 sprintf (r, "shr %s,#1 | rrc %s,#1", r1, r0);
2044 break;
2045 default:
2046 gcc_unreachable ();
2047 }
2048 return r;
2049 }
2050
2051 /* For large shifts, there are easy special cases. */
2052 if (size == 16)
2053 {
2054 switch (code)
2055 {
2056 case ASHIFT:
2057 sprintf (r, "mov %s,%s | mov %s,#0", r1, r0, r0);
2058 break;
2059 case ASHIFTRT:
2060 sprintf (r, "mov %s,%s | asr %s,#15", r0, r1, r1);
2061 break;
2062 case LSHIFTRT:
2063 sprintf (r, "mov %s,%s | mov %s,#0", r0, r1, r1);
2064 break;
2065 default:
2066 gcc_unreachable ();
2067 }
2068 return r;
2069 }
2070 if (size > 16)
2071 {
2072 switch (code)
2073 {
2074 case ASHIFT:
2075 sprintf (r, "mov %s,%s | mov %s,#0 | shl %s,#%d",
2076 r1, r0, r0, r1, (int) size - 16);
2077 break;
2078 case ASHIFTRT:
2079 sprintf (r, "mov %s,%s | asr %s,#15 | asr %s,#%d",
2080 r0, r1, r1, r0, (int) size - 16);
2081 break;
2082 case LSHIFTRT:
2083 sprintf (r, "mov %s,%s | mov %s,#0 | shr %s,#%d",
2084 r0, r1, r1, r0, (int) size - 16);
2085 break;
2086 default:
2087 gcc_unreachable ();
2088 }
2089 return r;
2090 }
2091
2092 /* For the rest, we have to do more work. In particular, we
2093 need a temporary. */
2094 rt = reg_names [REGNO (temp)];
2095 switch (code)
2096 {
2097 case ASHIFT:
2098 sprintf (r,
2099 "mov %s,%s | shl %s,#%d | shl %s,#%d | shr %s,#%d | or %s,%s",
2100 rt, r0, r0, (int) size, r1, (int) size, rt, (int) (16 - size),
2101 r1, rt);
2102 break;
2103 case ASHIFTRT:
2104 sprintf (r,
2105 "mov %s,%s | asr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2106 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2107 r0, rt);
2108 break;
2109 case LSHIFTRT:
2110 sprintf (r,
2111 "mov %s,%s | shr %s,#%d | shr %s,#%d | shl %s,#%d | or %s,%s",
2112 rt, r1, r1, (int) size, r0, (int) size, rt, (int) (16 - size),
2113 r0, rt);
2114 break;
2115 default:
2116 gcc_unreachable ();
2117 }
2118 return r;
2119 }
2120 \f
2121 /* Attribute handling. */
2122
2123 /* Return nonzero if the function is an interrupt function. */
2124
2125 int
2126 xstormy16_interrupt_function_p (void)
2127 {
2128 tree attributes;
2129
2130 /* The dwarf2 mechanism asks for INCOMING_FRAME_SP_OFFSET before
2131 any functions are declared, which is demonstrably wrong, but
2132 it is worked around here. FIXME. */
2133 if (!cfun)
2134 return 0;
2135
2136 attributes = TYPE_ATTRIBUTES (TREE_TYPE (current_function_decl));
2137 return lookup_attribute ("interrupt", attributes) != NULL_TREE;
2138 }
2139
2140 #undef TARGET_ATTRIBUTE_TABLE
2141 #define TARGET_ATTRIBUTE_TABLE xstormy16_attribute_table
2142
2143 static tree xstormy16_handle_interrupt_attribute
2144 (tree *, tree, tree, int, bool *);
2145 static tree xstormy16_handle_below100_attribute
2146 (tree *, tree, tree, int, bool *);
2147
2148 static const struct attribute_spec xstormy16_attribute_table[] =
2149 {
2150 /* name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */
2151 { "interrupt", 0, 0, false, true, true, xstormy16_handle_interrupt_attribute },
2152 { "BELOW100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2153 { "below100", 0, 0, false, false, false, xstormy16_handle_below100_attribute },
2154 { NULL, 0, 0, false, false, false, NULL }
2155 };
2156
2157 /* Handle an "interrupt" attribute;
2158 arguments as in struct attribute_spec.handler. */
2159
2160 static tree
2161 xstormy16_handle_interrupt_attribute (tree *node, tree name,
2162 tree args ATTRIBUTE_UNUSED,
2163 int flags ATTRIBUTE_UNUSED,
2164 bool *no_add_attrs)
2165 {
2166 if (TREE_CODE (*node) != FUNCTION_TYPE)
2167 {
2168 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2169 name);
2170 *no_add_attrs = true;
2171 }
2172
2173 return NULL_TREE;
2174 }
2175
2176 /* Handle an "below" attribute;
2177 arguments as in struct attribute_spec.handler. */
2178
2179 static tree
2180 xstormy16_handle_below100_attribute (tree *node,
2181 tree name ATTRIBUTE_UNUSED,
2182 tree args ATTRIBUTE_UNUSED,
2183 int flags ATTRIBUTE_UNUSED,
2184 bool *no_add_attrs)
2185 {
2186 if (TREE_CODE (*node) != VAR_DECL
2187 && TREE_CODE (*node) != POINTER_TYPE
2188 && TREE_CODE (*node) != TYPE_DECL)
2189 {
2190 warning (OPT_Wattributes,
2191 "%<__BELOW100__%> attribute only applies to variables");
2192 *no_add_attrs = true;
2193 }
2194 else if (args == NULL_TREE && TREE_CODE (*node) == VAR_DECL)
2195 {
2196 if (! (TREE_PUBLIC (*node) || TREE_STATIC (*node)))
2197 {
2198 warning (OPT_Wattributes, "__BELOW100__ attribute not allowed "
2199 "with auto storage class");
2200 *no_add_attrs = true;
2201 }
2202 }
2203
2204 return NULL_TREE;
2205 }
2206 \f
2207 #undef TARGET_INIT_BUILTINS
2208 #define TARGET_INIT_BUILTINS xstormy16_init_builtins
2209 #undef TARGET_EXPAND_BUILTIN
2210 #define TARGET_EXPAND_BUILTIN xstormy16_expand_builtin
2211
2212 static struct
2213 {
2214 const char * name;
2215 int md_code;
2216 const char * arg_ops; /* 0..9, t for temp register, r for return value. */
2217 const char * arg_types; /* s=short,l=long, upper case for unsigned. */
2218 }
2219 s16builtins[] =
2220 {
2221 { "__sdivlh", CODE_FOR_sdivlh, "rt01", "sls" },
2222 { "__smodlh", CODE_FOR_sdivlh, "tr01", "sls" },
2223 { "__udivlh", CODE_FOR_udivlh, "rt01", "SLS" },
2224 { "__umodlh", CODE_FOR_udivlh, "tr01", "SLS" },
2225 { NULL, 0, NULL, NULL }
2226 };
2227
2228 static void
2229 xstormy16_init_builtins (void)
2230 {
2231 tree args, ret_type, arg;
2232 int i, a;
2233
2234 ret_type = void_type_node;
2235
2236 for (i = 0; s16builtins[i].name; i++)
2237 {
2238 args = void_list_node;
2239 for (a = strlen (s16builtins[i].arg_types) - 1; a >= 0; a--)
2240 {
2241 switch (s16builtins[i].arg_types[a])
2242 {
2243 case 's': arg = short_integer_type_node; break;
2244 case 'S': arg = short_unsigned_type_node; break;
2245 case 'l': arg = long_integer_type_node; break;
2246 case 'L': arg = long_unsigned_type_node; break;
2247 default: gcc_unreachable ();
2248 }
2249 if (a == 0)
2250 ret_type = arg;
2251 else
2252 args = tree_cons (NULL_TREE, arg, args);
2253 }
2254 add_builtin_function (s16builtins[i].name,
2255 build_function_type (ret_type, args),
2256 i, BUILT_IN_MD, NULL, NULL);
2257 }
2258 }
2259
2260 static rtx
2261 xstormy16_expand_builtin (tree exp, rtx target,
2262 rtx subtarget ATTRIBUTE_UNUSED,
2263 enum machine_mode mode ATTRIBUTE_UNUSED,
2264 int ignore ATTRIBUTE_UNUSED)
2265 {
2266 rtx op[10], args[10], pat, copyto[10], retval = 0;
2267 tree fndecl, argtree;
2268 int i, a, o, code;
2269
2270 fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
2271 argtree = TREE_OPERAND (exp, 1);
2272 i = DECL_FUNCTION_CODE (fndecl);
2273 code = s16builtins[i].md_code;
2274
2275 for (a = 0; a < 10 && argtree; a++)
2276 {
2277 args[a] = expand_normal (TREE_VALUE (argtree));
2278 argtree = TREE_CHAIN (argtree);
2279 }
2280
2281 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2282 {
2283 char ao = s16builtins[i].arg_ops[o];
2284 char c = insn_data[code].operand[o].constraint[0];
2285 enum machine_mode omode;
2286
2287 copyto[o] = 0;
2288
2289 omode = (enum machine_mode) insn_data[code].operand[o].mode;
2290 if (ao == 'r')
2291 op[o] = target ? target : gen_reg_rtx (omode);
2292 else if (ao == 't')
2293 op[o] = gen_reg_rtx (omode);
2294 else
2295 op[o] = args[(int) hex_value (ao)];
2296
2297 if (! (*insn_data[code].operand[o].predicate) (op[o], GET_MODE (op[o])))
2298 {
2299 if (c == '+' || c == '=')
2300 {
2301 copyto[o] = op[o];
2302 op[o] = gen_reg_rtx (omode);
2303 }
2304 else
2305 op[o] = copy_to_mode_reg (omode, op[o]);
2306 }
2307
2308 if (ao == 'r')
2309 retval = op[o];
2310 }
2311
2312 pat = GEN_FCN (code) (op[0], op[1], op[2], op[3], op[4],
2313 op[5], op[6], op[7], op[8], op[9]);
2314 emit_insn (pat);
2315
2316 for (o = 0; s16builtins[i].arg_ops[o]; o++)
2317 if (copyto[o])
2318 {
2319 emit_move_insn (copyto[o], op[o]);
2320 if (op[o] == retval)
2321 retval = copyto[o];
2322 }
2323
2324 return retval;
2325 }
2326 \f
2327 /* Look for combinations of insns that can be converted to BN or BP
2328 opcodes. This is, unfortunately, too complex to do with MD
2329 patterns. */
2330
2331 static void
2332 combine_bnp (rtx insn)
2333 {
2334 int insn_code, regno, need_extend;
2335 unsigned int mask;
2336 rtx cond, reg, and_insn, load, qireg, mem;
2337 enum machine_mode load_mode = QImode;
2338 enum machine_mode and_mode = QImode;
2339 rtx shift = NULL_RTX;
2340
2341 insn_code = recog_memoized (insn);
2342 if (insn_code != CODE_FOR_cbranchhi
2343 && insn_code != CODE_FOR_cbranchhi_neg)
2344 return;
2345
2346 cond = XVECEXP (PATTERN (insn), 0, 0); /* set */
2347 cond = XEXP (cond, 1); /* if */
2348 cond = XEXP (cond, 0); /* cond */
2349 switch (GET_CODE (cond))
2350 {
2351 case NE:
2352 case EQ:
2353 need_extend = 0;
2354 break;
2355 case LT:
2356 case GE:
2357 need_extend = 1;
2358 break;
2359 default:
2360 return;
2361 }
2362
2363 reg = XEXP (cond, 0);
2364 if (! REG_P (reg))
2365 return;
2366 regno = REGNO (reg);
2367 if (XEXP (cond, 1) != const0_rtx)
2368 return;
2369 if (! find_regno_note (insn, REG_DEAD, regno))
2370 return;
2371 qireg = gen_rtx_REG (QImode, regno);
2372
2373 if (need_extend)
2374 {
2375 /* LT and GE conditionals should have a sign extend before
2376 them. */
2377 for (and_insn = prev_real_insn (insn); and_insn;
2378 and_insn = prev_real_insn (and_insn))
2379 {
2380 int and_code = recog_memoized (and_insn);
2381
2382 if (and_code == CODE_FOR_extendqihi2
2383 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2384 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), qireg))
2385 break;
2386
2387 if (and_code == CODE_FOR_movhi_internal
2388 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg))
2389 {
2390 /* This is for testing bit 15. */
2391 and_insn = insn;
2392 break;
2393 }
2394
2395 if (reg_mentioned_p (reg, and_insn))
2396 return;
2397
2398 if (GET_CODE (and_insn) != NOTE
2399 && GET_CODE (and_insn) != INSN)
2400 return;
2401 }
2402 }
2403 else
2404 {
2405 /* EQ and NE conditionals have an AND before them. */
2406 for (and_insn = prev_real_insn (insn); and_insn;
2407 and_insn = prev_real_insn (and_insn))
2408 {
2409 if (recog_memoized (and_insn) == CODE_FOR_andhi3
2410 && rtx_equal_p (SET_DEST (PATTERN (and_insn)), reg)
2411 && rtx_equal_p (XEXP (SET_SRC (PATTERN (and_insn)), 0), reg))
2412 break;
2413
2414 if (reg_mentioned_p (reg, and_insn))
2415 return;
2416
2417 if (GET_CODE (and_insn) != NOTE
2418 && GET_CODE (and_insn) != INSN)
2419 return;
2420 }
2421
2422 if (and_insn)
2423 {
2424 /* Some mis-optimizations by GCC can generate a RIGHT-SHIFT
2425 followed by an AND like this:
2426
2427 (parallel [(set (reg:HI r7) (lshiftrt:HI (reg:HI r7) (const_int 3)))
2428 (clobber (reg:BI carry))]
2429
2430 (set (reg:HI r7) (and:HI (reg:HI r7) (const_int 1)))
2431
2432 Attempt to detect this here. */
2433 for (shift = prev_real_insn (and_insn); shift;
2434 shift = prev_real_insn (shift))
2435 {
2436 if (recog_memoized (shift) == CODE_FOR_lshrhi3
2437 && rtx_equal_p (SET_DEST (XVECEXP (PATTERN (shift), 0, 0)), reg)
2438 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 0), reg))
2439 break;
2440
2441 if (reg_mentioned_p (reg, shift)
2442 || (GET_CODE (shift) != NOTE
2443 && GET_CODE (shift) != INSN))
2444 {
2445 shift = NULL_RTX;
2446 break;
2447 }
2448 }
2449 }
2450 }
2451 if (!and_insn)
2452 return;
2453
2454 for (load = shift ? prev_real_insn (shift) : prev_real_insn (and_insn);
2455 load;
2456 load = prev_real_insn (load))
2457 {
2458 int load_code = recog_memoized (load);
2459
2460 if (load_code == CODE_FOR_movhi_internal
2461 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2462 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), HImode)
2463 && ! MEM_VOLATILE_P (SET_SRC (PATTERN (load))))
2464 {
2465 load_mode = HImode;
2466 break;
2467 }
2468
2469 if (load_code == CODE_FOR_movqi_internal
2470 && rtx_equal_p (SET_DEST (PATTERN (load)), qireg)
2471 && xstormy16_below100_operand (SET_SRC (PATTERN (load)), QImode))
2472 {
2473 load_mode = QImode;
2474 break;
2475 }
2476
2477 if (load_code == CODE_FOR_zero_extendqihi2
2478 && rtx_equal_p (SET_DEST (PATTERN (load)), reg)
2479 && xstormy16_below100_operand (XEXP (SET_SRC (PATTERN (load)), 0), QImode))
2480 {
2481 load_mode = QImode;
2482 and_mode = HImode;
2483 break;
2484 }
2485
2486 if (reg_mentioned_p (reg, load))
2487 return;
2488
2489 if (GET_CODE (load) != NOTE
2490 && GET_CODE (load) != INSN)
2491 return;
2492 }
2493 if (!load)
2494 return;
2495
2496 mem = SET_SRC (PATTERN (load));
2497
2498 if (need_extend)
2499 {
2500 mask = (load_mode == HImode) ? 0x8000 : 0x80;
2501
2502 /* If the mem includes a zero-extend operation and we are
2503 going to generate a sign-extend operation then move the
2504 mem inside the zero-extend. */
2505 if (GET_CODE (mem) == ZERO_EXTEND)
2506 mem = XEXP (mem, 0);
2507 }
2508 else
2509 {
2510 if (!xstormy16_onebit_set_operand (XEXP (SET_SRC (PATTERN (and_insn)), 1),
2511 load_mode))
2512 return;
2513
2514 mask = (int) INTVAL (XEXP (SET_SRC (PATTERN (and_insn)), 1));
2515
2516 if (shift)
2517 mask <<= INTVAL (XEXP (SET_SRC (XVECEXP (PATTERN (shift), 0, 0)), 1));
2518 }
2519
2520 if (load_mode == HImode)
2521 {
2522 rtx addr = XEXP (mem, 0);
2523
2524 if (! (mask & 0xff))
2525 {
2526 addr = plus_constant (addr, 1);
2527 mask >>= 8;
2528 }
2529 mem = gen_rtx_MEM (QImode, addr);
2530 }
2531
2532 if (need_extend)
2533 XEXP (cond, 0) = gen_rtx_SIGN_EXTEND (HImode, mem);
2534 else
2535 XEXP (cond, 0) = gen_rtx_AND (and_mode, mem, GEN_INT (mask));
2536
2537 INSN_CODE (insn) = -1;
2538 delete_insn (load);
2539
2540 if (and_insn != insn)
2541 delete_insn (and_insn);
2542
2543 if (shift != NULL_RTX)
2544 delete_insn (shift);
2545 }
2546
2547 static void
2548 xstormy16_reorg (void)
2549 {
2550 rtx insn;
2551
2552 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2553 {
2554 if (! JUMP_P (insn))
2555 continue;
2556 combine_bnp (insn);
2557 }
2558 }
2559 \f
2560 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2561
2562 static bool
2563 xstormy16_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2564 {
2565 const HOST_WIDE_INT size = int_size_in_bytes (type);
2566 return (size == -1 || size > UNITS_PER_WORD * NUM_ARGUMENT_REGISTERS);
2567 }
2568 \f
2569 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
2570 static const struct default_options xstorym16_option_optimization_table[] =
2571 {
2572 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
2573 { OPT_LEVELS_NONE, 0, NULL, 0 }
2574 };
2575 \f
2576 #undef TARGET_ASM_ALIGNED_HI_OP
2577 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
2578 #undef TARGET_ASM_ALIGNED_SI_OP
2579 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
2580 #undef TARGET_ENCODE_SECTION_INFO
2581 #define TARGET_ENCODE_SECTION_INFO xstormy16_encode_section_info
2582
2583 /* Select_section doesn't handle .bss_below100. */
2584 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
2585 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
2586
2587 #undef TARGET_ASM_OUTPUT_MI_THUNK
2588 #define TARGET_ASM_OUTPUT_MI_THUNK xstormy16_asm_output_mi_thunk
2589 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
2590 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
2591
2592 #undef TARGET_RTX_COSTS
2593 #define TARGET_RTX_COSTS xstormy16_rtx_costs
2594 #undef TARGET_ADDRESS_COST
2595 #define TARGET_ADDRESS_COST xstormy16_address_cost
2596
2597 #undef TARGET_BUILD_BUILTIN_VA_LIST
2598 #define TARGET_BUILD_BUILTIN_VA_LIST xstormy16_build_builtin_va_list
2599 #undef TARGET_EXPAND_BUILTIN_VA_START
2600 #define TARGET_EXPAND_BUILTIN_VA_START xstormy16_expand_builtin_va_start
2601 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
2602 #define TARGET_GIMPLIFY_VA_ARG_EXPR xstormy16_gimplify_va_arg_expr
2603
2604 #undef TARGET_PROMOTE_FUNCTION_MODE
2605 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
2606 #undef TARGET_PROMOTE_PROTOTYPES
2607 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
2608
2609 #undef TARGET_FUNCTION_ARG
2610 #define TARGET_FUNCTION_ARG xstormy16_function_arg
2611 #undef TARGET_FUNCTION_ARG_ADVANCE
2612 #define TARGET_FUNCTION_ARG_ADVANCE xstormy16_function_arg_advance
2613
2614 #undef TARGET_RETURN_IN_MEMORY
2615 #define TARGET_RETURN_IN_MEMORY xstormy16_return_in_memory
2616
2617 #undef TARGET_MACHINE_DEPENDENT_REORG
2618 #define TARGET_MACHINE_DEPENDENT_REORG xstormy16_reorg
2619
2620 #undef TARGET_LEGITIMATE_ADDRESS_P
2621 #define TARGET_LEGITIMATE_ADDRESS_P xstormy16_legitimate_address_p
2622
2623 #undef TARGET_CAN_ELIMINATE
2624 #define TARGET_CAN_ELIMINATE xstormy16_can_eliminate
2625
2626 #undef TARGET_TRAMPOLINE_INIT
2627 #define TARGET_TRAMPOLINE_INIT xstormy16_trampoline_init
2628
2629 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2630 #define TARGET_OPTION_OPTIMIZATION_TABLE xstorym16_option_optimization_table
2631
2632 struct gcc_target targetm = TARGET_INITIALIZER;
2633
2634 #include "gt-stormy16.h"