Merge basic-improvements-branch to trunk
[gcc.git] / gcc / config / arc / arc.c
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002
3 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "conditions.h"
35 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "recog.h"
41 #include "toplev.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45
46 /* Which cpu we're compiling for (NULL(=base), ???). */
47 const char *arc_cpu_string;
48 int arc_cpu_type;
49
50 /* Name of mangle string to add to symbols to separate code compiled for each
51 cpu (or NULL). */
52 const char *arc_mangle_cpu;
53
54 /* Save the operands last given to a compare for use when we
55 generate a scc or bcc insn. */
56 rtx arc_compare_op0, arc_compare_op1;
57
58 /* Name of text, data, and rodata sections, as specified on command line.
59 Selected by -m{text,data,rodata} flags. */
60 const char *arc_text_string = ARC_DEFAULT_TEXT_SECTION;
61 const char *arc_data_string = ARC_DEFAULT_DATA_SECTION;
62 const char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION;
63
64 /* Name of text, data, and rodata sections used in varasm.c. */
65 const char *arc_text_section;
66 const char *arc_data_section;
67 const char *arc_rodata_section;
68
69 /* Array of valid operand punctuation characters. */
70 char arc_punct_chars[256];
71
72 /* Variables used by arc_final_prescan_insn to implement conditional
73 execution. */
74 static int arc_ccfsm_state;
75 static int arc_ccfsm_current_cc;
76 static rtx arc_ccfsm_target_insn;
77 static int arc_ccfsm_target_label;
78
79 /* The maximum number of insns skipped which will be conditionalised if
80 possible. */
81 #define MAX_INSNS_SKIPPED 3
82
83 /* A nop is needed between a 4 byte insn that sets the condition codes and
84 a branch that uses them (the same isn't true for an 8 byte insn that sets
85 the condition codes). Set by arc_final_prescan_insn. Used by
86 arc_print_operand. */
87 static int last_insn_set_cc_p;
88 static int current_insn_set_cc_p;
89 static void record_cc_ref PARAMS ((rtx));
90 static void arc_init_reg_tables PARAMS ((void));
91 static int get_arc_condition_code PARAMS ((rtx));
92 const struct attribute_spec arc_attribute_table[];
93 static tree arc_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *));
94 static bool arc_assemble_integer PARAMS ((rtx, unsigned int, int));
95 static void arc_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT));
96 static void arc_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT));
97 static void arc_encode_section_info PARAMS ((tree, int));
98 static void arc_internal_label PARAMS ((FILE *, const char *, unsigned long));
99 \f
100 /* Initialize the GCC target structure. */
101 #undef TARGET_ASM_ALIGNED_HI_OP
102 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
103 #undef TARGET_ASM_ALIGNED_SI_OP
104 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
105 #undef TARGET_ASM_INTEGER
106 #define TARGET_ASM_INTEGER arc_assemble_integer
107
108 #undef TARGET_ASM_FUNCTION_PROLOGUE
109 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
110 #undef TARGET_ASM_FUNCTION_EPILOGUE
111 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
112 #undef TARGET_ATTRIBUTE_TABLE
113 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
114 #undef TARGET_ENCODE_SECTION_INFO
115 #define TARGET_ENCODE_SECTION_INFO arc_encode_section_info
116 #undef TARGET_ASM_INTERNAL_LABEL
117 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
118
119 struct gcc_target targetm = TARGET_INITIALIZER;
120 \f
121 /* Called by OVERRIDE_OPTIONS to initialize various things. */
122
123 void
124 arc_init ()
125 {
126 char *tmp;
127
128 if (arc_cpu_string == 0
129 || !strcmp (arc_cpu_string, "base"))
130 {
131 /* Ensure we have a printable value for the .cpu pseudo-op. */
132 arc_cpu_string = "base";
133 arc_cpu_type = 0;
134 arc_mangle_cpu = NULL;
135 }
136 else if (ARC_EXTENSION_CPU (arc_cpu_string))
137 ; /* nothing to do */
138 else
139 {
140 error ("bad value (%s) for -mcpu switch", arc_cpu_string);
141 arc_cpu_string = "base";
142 arc_cpu_type = 0;
143 arc_mangle_cpu = NULL;
144 }
145
146 /* Set the pseudo-ops for the various standard sections. */
147 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
148 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
149 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
150 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
151 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
152 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
153
154 arc_init_reg_tables ();
155
156 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
157 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
158 arc_punct_chars['#'] = 1;
159 arc_punct_chars['*'] = 1;
160 arc_punct_chars['?'] = 1;
161 arc_punct_chars['!'] = 1;
162 arc_punct_chars['~'] = 1;
163 }
164 \f
165 /* The condition codes of the ARC, and the inverse function. */
166 static const char *const arc_condition_codes[] =
167 {
168 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
169 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
170 };
171
172 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
173
174 /* Returns the index of the ARC condition code string in
175 `arc_condition_codes'. COMPARISON should be an rtx like
176 `(eq (...) (...))'. */
177
178 static int
179 get_arc_condition_code (comparison)
180 rtx comparison;
181 {
182 switch (GET_CODE (comparison))
183 {
184 case EQ : return 2;
185 case NE : return 3;
186 case GT : return 10;
187 case LE : return 11;
188 case GE : return 12;
189 case LT : return 13;
190 case GTU : return 14;
191 case LEU : return 15;
192 case LTU : return 6;
193 case GEU : return 7;
194 default : abort ();
195 }
196 /*NOTREACHED*/
197 return (42);
198 }
199
200 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
201 return the mode to be used for the comparison. */
202
203 enum machine_mode
204 arc_select_cc_mode (op, x, y)
205 enum rtx_code op;
206 rtx x, y ATTRIBUTE_UNUSED;
207 {
208 switch (op)
209 {
210 case EQ :
211 case NE :
212 return CCZNmode;
213 default :
214 switch (GET_CODE (x))
215 {
216 case AND :
217 case IOR :
218 case XOR :
219 case SIGN_EXTEND :
220 case ZERO_EXTEND :
221 return CCZNmode;
222 case ASHIFT :
223 case ASHIFTRT :
224 case LSHIFTRT :
225 return CCZNCmode;
226 default:
227 break;
228 }
229 }
230 return CCmode;
231 }
232 \f
233 /* Vectors to keep interesting information about registers where it can easily
234 be got. We use to use the actual mode value as the bit number, but there
235 is (or may be) more than 32 modes now. Instead we use two tables: one
236 indexed by hard register number, and one indexed by mode. */
237
238 /* The purpose of arc_mode_class is to shrink the range of modes so that
239 they all fit (as bit numbers) in a 32 bit word (again). Each real mode is
240 mapped into one arc_mode_class mode. */
241
242 enum arc_mode_class {
243 C_MODE,
244 S_MODE, D_MODE, T_MODE, O_MODE,
245 SF_MODE, DF_MODE, TF_MODE, OF_MODE
246 };
247
248 /* Modes for condition codes. */
249 #define C_MODES (1 << (int) C_MODE)
250
251 /* Modes for single-word and smaller quantities. */
252 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
253
254 /* Modes for double-word and smaller quantities. */
255 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
256
257 /* Modes for quad-word and smaller quantities. */
258 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
259
260 /* Value is 1 if register/mode pair is acceptable on arc. */
261
262 const unsigned int arc_hard_regno_mode_ok[] = {
263 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
264 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
265 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
266 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
267
268 /* ??? Leave these as S_MODES for now. */
269 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
270 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
271 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
272 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
273 };
274
275 unsigned int arc_mode_class [NUM_MACHINE_MODES];
276
277 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
278
279 static void
280 arc_init_reg_tables ()
281 {
282 int i;
283
284 for (i = 0; i < NUM_MACHINE_MODES; i++)
285 {
286 switch (GET_MODE_CLASS (i))
287 {
288 case MODE_INT:
289 case MODE_PARTIAL_INT:
290 case MODE_COMPLEX_INT:
291 if (GET_MODE_SIZE (i) <= 4)
292 arc_mode_class[i] = 1 << (int) S_MODE;
293 else if (GET_MODE_SIZE (i) == 8)
294 arc_mode_class[i] = 1 << (int) D_MODE;
295 else if (GET_MODE_SIZE (i) == 16)
296 arc_mode_class[i] = 1 << (int) T_MODE;
297 else if (GET_MODE_SIZE (i) == 32)
298 arc_mode_class[i] = 1 << (int) O_MODE;
299 else
300 arc_mode_class[i] = 0;
301 break;
302 case MODE_FLOAT:
303 case MODE_COMPLEX_FLOAT:
304 if (GET_MODE_SIZE (i) <= 4)
305 arc_mode_class[i] = 1 << (int) SF_MODE;
306 else if (GET_MODE_SIZE (i) == 8)
307 arc_mode_class[i] = 1 << (int) DF_MODE;
308 else if (GET_MODE_SIZE (i) == 16)
309 arc_mode_class[i] = 1 << (int) TF_MODE;
310 else if (GET_MODE_SIZE (i) == 32)
311 arc_mode_class[i] = 1 << (int) OF_MODE;
312 else
313 arc_mode_class[i] = 0;
314 break;
315 case MODE_CC:
316 default:
317 /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so
318 we must explicitly check for them here. */
319 if (i == (int) CCmode || i == (int) CCZNmode || i == (int) CCZNCmode)
320 arc_mode_class[i] = 1 << (int) C_MODE;
321 else
322 arc_mode_class[i] = 0;
323 break;
324 }
325 }
326
327 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328 {
329 if (i < 60)
330 arc_regno_reg_class[i] = GENERAL_REGS;
331 else if (i == 60)
332 arc_regno_reg_class[i] = LPCOUNT_REG;
333 else if (i == 61)
334 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
335 else
336 arc_regno_reg_class[i] = NO_REGS;
337 }
338 }
339 \f
340 /* ARC specific attribute support.
341
342 The ARC has these attributes:
343 interrupt - for interrupt functions
344 */
345
346 const struct attribute_spec arc_attribute_table[] =
347 {
348 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
349 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
350 { NULL, 0, 0, false, false, false, NULL }
351 };
352
353 /* Handle an "interrupt" attribute; arguments as in
354 struct attribute_spec.handler. */
355 static tree
356 arc_handle_interrupt_attribute (node, name, args, flags, no_add_attrs)
357 tree *node ATTRIBUTE_UNUSED;
358 tree name;
359 tree args;
360 int flags ATTRIBUTE_UNUSED;
361 bool *no_add_attrs;
362 {
363 tree value = TREE_VALUE (args);
364
365 if (TREE_CODE (value) != STRING_CST)
366 {
367 warning ("argument of `%s' attribute is not a string constant",
368 IDENTIFIER_POINTER (name));
369 *no_add_attrs = true;
370 }
371 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
372 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
373 {
374 warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"",
375 IDENTIFIER_POINTER (name));
376 *no_add_attrs = true;
377 }
378
379 return NULL_TREE;
380 }
381
382 \f
383 /* Acceptable arguments to the call insn. */
384
385 int
386 call_address_operand (op, mode)
387 rtx op;
388 enum machine_mode mode;
389 {
390 return (symbolic_operand (op, mode)
391 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
392 || (GET_CODE (op) == REG));
393 }
394
395 int
396 call_operand (op, mode)
397 rtx op;
398 enum machine_mode mode;
399 {
400 if (GET_CODE (op) != MEM)
401 return 0;
402 op = XEXP (op, 0);
403 return call_address_operand (op, mode);
404 }
405
406 /* Returns 1 if OP is a symbol reference. */
407
408 int
409 symbolic_operand (op, mode)
410 rtx op;
411 enum machine_mode mode ATTRIBUTE_UNUSED;
412 {
413 switch (GET_CODE (op))
414 {
415 case SYMBOL_REF:
416 case LABEL_REF:
417 case CONST :
418 return 1;
419 default:
420 return 0;
421 }
422 }
423
424 /* Return truth value of statement that OP is a symbolic memory
425 operand of mode MODE. */
426
427 int
428 symbolic_memory_operand (op, mode)
429 rtx op;
430 enum machine_mode mode ATTRIBUTE_UNUSED;
431 {
432 if (GET_CODE (op) == SUBREG)
433 op = SUBREG_REG (op);
434 if (GET_CODE (op) != MEM)
435 return 0;
436 op = XEXP (op, 0);
437 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
438 || GET_CODE (op) == LABEL_REF);
439 }
440
441 /* Return true if OP is a short immediate (shimm) value. */
442
443 int
444 short_immediate_operand (op, mode)
445 rtx op;
446 enum machine_mode mode ATTRIBUTE_UNUSED;
447 {
448 if (GET_CODE (op) != CONST_INT)
449 return 0;
450 return SMALL_INT (INTVAL (op));
451 }
452
453 /* Return true if OP will require a long immediate (limm) value.
454 This is currently only used when calculating length attributes. */
455
456 int
457 long_immediate_operand (op, mode)
458 rtx op;
459 enum machine_mode mode ATTRIBUTE_UNUSED;
460 {
461 switch (GET_CODE (op))
462 {
463 case SYMBOL_REF :
464 case LABEL_REF :
465 case CONST :
466 return 1;
467 case CONST_INT :
468 return !SMALL_INT (INTVAL (op));
469 case CONST_DOUBLE :
470 /* These can happen because large unsigned 32 bit constants are
471 represented this way (the multiplication patterns can cause these
472 to be generated). They also occur for SFmode values. */
473 return 1;
474 default:
475 break;
476 }
477 return 0;
478 }
479
480 /* Return true if OP is a MEM that when used as a load or store address will
481 require an 8 byte insn.
482 Load and store instructions don't allow the same possibilities but they're
483 similar enough that this one function will do.
484 This is currently only used when calculating length attributes. */
485
486 int
487 long_immediate_loadstore_operand (op, mode)
488 rtx op;
489 enum machine_mode mode ATTRIBUTE_UNUSED;
490 {
491 if (GET_CODE (op) != MEM)
492 return 0;
493
494 op = XEXP (op, 0);
495 switch (GET_CODE (op))
496 {
497 case SYMBOL_REF :
498 case LABEL_REF :
499 case CONST :
500 return 1;
501 case CONST_INT :
502 /* This must be handled as "st c,[limm]". Ditto for load.
503 Technically, the assembler could translate some possibilities to
504 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
505 assume that it does. */
506 return 1;
507 case CONST_DOUBLE :
508 /* These can happen because large unsigned 32 bit constants are
509 represented this way (the multiplication patterns can cause these
510 to be generated). They also occur for SFmode values. */
511 return 1;
512 case REG :
513 return 0;
514 case PLUS :
515 if (GET_CODE (XEXP (op, 1)) == CONST_INT
516 && !SMALL_INT (INTVAL (XEXP (op, 1))))
517 return 1;
518 return 0;
519 default:
520 break;
521 }
522 return 0;
523 }
524
525 /* Return true if OP is an acceptable argument for a single word
526 move source. */
527
528 int
529 move_src_operand (op, mode)
530 rtx op;
531 enum machine_mode mode;
532 {
533 switch (GET_CODE (op))
534 {
535 case SYMBOL_REF :
536 case LABEL_REF :
537 case CONST :
538 return 1;
539 case CONST_INT :
540 return (LARGE_INT (INTVAL (op)));
541 case CONST_DOUBLE :
542 /* We can handle DImode integer constants in SImode if the value
543 (signed or unsigned) will fit in 32 bits. This is needed because
544 large unsigned 32 bit constants are represented as CONST_DOUBLEs. */
545 if (mode == SImode)
546 return arc_double_limm_p (op);
547 /* We can handle 32 bit floating point constants. */
548 if (mode == SFmode)
549 return GET_MODE (op) == SFmode;
550 return 0;
551 case REG :
552 return register_operand (op, mode);
553 case SUBREG :
554 /* (subreg (mem ...) ...) can occur here if the inner part was once a
555 pseudo-reg and is now a stack slot. */
556 if (GET_CODE (SUBREG_REG (op)) == MEM)
557 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
558 else
559 return register_operand (op, mode);
560 case MEM :
561 return address_operand (XEXP (op, 0), mode);
562 default :
563 return 0;
564 }
565 }
566
567 /* Return true if OP is an acceptable argument for a double word
568 move source. */
569
570 int
571 move_double_src_operand (op, mode)
572 rtx op;
573 enum machine_mode mode;
574 {
575 switch (GET_CODE (op))
576 {
577 case REG :
578 return register_operand (op, mode);
579 case SUBREG :
580 /* (subreg (mem ...) ...) can occur here if the inner part was once a
581 pseudo-reg and is now a stack slot. */
582 if (GET_CODE (SUBREG_REG (op)) == MEM)
583 return move_double_src_operand (SUBREG_REG (op), mode);
584 else
585 return register_operand (op, mode);
586 case MEM :
587 /* Disallow auto inc/dec for now. */
588 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
589 || GET_CODE (XEXP (op, 0)) == PRE_INC)
590 return 0;
591 return address_operand (XEXP (op, 0), mode);
592 case CONST_INT :
593 case CONST_DOUBLE :
594 return 1;
595 default :
596 return 0;
597 }
598 }
599
600 /* Return true if OP is an acceptable argument for a move destination. */
601
602 int
603 move_dest_operand (op, mode)
604 rtx op;
605 enum machine_mode mode;
606 {
607 switch (GET_CODE (op))
608 {
609 case REG :
610 return register_operand (op, mode);
611 case SUBREG :
612 /* (subreg (mem ...) ...) can occur here if the inner part was once a
613 pseudo-reg and is now a stack slot. */
614 if (GET_CODE (SUBREG_REG (op)) == MEM)
615 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
616 else
617 return register_operand (op, mode);
618 case MEM :
619 return address_operand (XEXP (op, 0), mode);
620 default :
621 return 0;
622 }
623 }
624
625 /* Return true if OP is valid load with update operand. */
626
627 int
628 load_update_operand (op, mode)
629 rtx op;
630 enum machine_mode mode;
631 {
632 if (GET_CODE (op) != MEM
633 || GET_MODE (op) != mode)
634 return 0;
635 op = XEXP (op, 0);
636 if (GET_CODE (op) != PLUS
637 || GET_MODE (op) != Pmode
638 || !register_operand (XEXP (op, 0), Pmode)
639 || !nonmemory_operand (XEXP (op, 1), Pmode))
640 return 0;
641 return 1;
642 }
643
644 /* Return true if OP is valid store with update operand. */
645
646 int
647 store_update_operand (op, mode)
648 rtx op;
649 enum machine_mode mode;
650 {
651 if (GET_CODE (op) != MEM
652 || GET_MODE (op) != mode)
653 return 0;
654 op = XEXP (op, 0);
655 if (GET_CODE (op) != PLUS
656 || GET_MODE (op) != Pmode
657 || !register_operand (XEXP (op, 0), Pmode)
658 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
659 && SMALL_INT (INTVAL (XEXP (op, 1)))))
660 return 0;
661 return 1;
662 }
663
664 /* Return true if OP is a non-volatile non-immediate operand.
665 Volatile memory refs require a special "cache-bypass" instruction
666 and only the standard movXX patterns are set up to handle them. */
667
668 int
669 nonvol_nonimm_operand (op, mode)
670 rtx op;
671 enum machine_mode mode;
672 {
673 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
674 return 0;
675 return nonimmediate_operand (op, mode);
676 }
677
678 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
679 to check the range carefully since this predicate is used in DImode
680 contexts. */
681
682 int
683 const_sint32_operand (op, mode)
684 rtx op;
685 enum machine_mode mode ATTRIBUTE_UNUSED;
686 {
687 /* All allowed constants will fit a CONST_INT. */
688 return (GET_CODE (op) == CONST_INT
689 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
690 }
691
692 /* Accept integer operands in the range 0..0xffffffff. We have to check the
693 range carefully since this predicate is used in DImode contexts. Also, we
694 need some extra crud to make it work when hosted on 64-bit machines. */
695
696 int
697 const_uint32_operand (op, mode)
698 rtx op;
699 enum machine_mode mode ATTRIBUTE_UNUSED;
700 {
701 #if HOST_BITS_PER_WIDE_INT > 32
702 /* All allowed constants will fit a CONST_INT. */
703 return (GET_CODE (op) == CONST_INT
704 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
705 #else
706 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
707 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
708 #endif
709 }
710
711 /* Return 1 if OP is a comparison operator valid for the mode of CC.
712 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
713
714 Some insns only set a few bits in the condition code. So only allow those
715 comparisons that use the bits that are valid. */
716
717 int
718 proper_comparison_operator (op, mode)
719 rtx op;
720 enum machine_mode mode ATTRIBUTE_UNUSED;
721 {
722 enum rtx_code code = GET_CODE (op);
723
724 if (GET_RTX_CLASS (code) != '<')
725 return 0;
726
727 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
728 return (code == EQ || code == NE);
729 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
730 return (code == EQ || code == NE
731 || code == LTU || code == GEU || code == GTU || code == LEU);
732 return 1;
733 }
734 \f
735 /* Misc. utilities. */
736
737 /* X and Y are two things to compare using CODE. Emit the compare insn and
738 return the rtx for the cc reg in the proper mode. */
739
740 rtx
741 gen_compare_reg (code, x, y)
742 enum rtx_code code;
743 rtx x, y;
744 {
745 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
746 rtx cc_reg;
747
748 cc_reg = gen_rtx_REG (mode, 61);
749
750 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
751 gen_rtx_COMPARE (mode, x, y)));
752
753 return cc_reg;
754 }
755
756 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
757 We assume the value can be either signed or unsigned. */
758
759 int
760 arc_double_limm_p (value)
761 rtx value;
762 {
763 HOST_WIDE_INT low, high;
764
765 if (GET_CODE (value) != CONST_DOUBLE)
766 abort ();
767
768 low = CONST_DOUBLE_LOW (value);
769 high = CONST_DOUBLE_HIGH (value);
770
771 if (low & 0x80000000)
772 {
773 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
774 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
775 == - (unsigned HOST_WIDE_INT) 0x80000000)
776 && high == -1));
777 }
778 else
779 {
780 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
781 }
782 }
783 \f
784 /* Do any needed setup for a variadic function. For the ARC, we must
785 create a register parameter block, and then copy any anonymous arguments
786 in registers to memory.
787
788 CUM has not been updated for the last named argument which has type TYPE
789 and mode MODE, and we rely on this fact.
790
791 We do things a little weird here. We're supposed to only allocate space
792 for the anonymous arguments. However we need to keep the stack eight byte
793 aligned. So we round the space up if necessary, and leave it to va_start
794 to compensate. */
795
796 void
797 arc_setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl)
798 CUMULATIVE_ARGS *cum;
799 enum machine_mode mode;
800 tree type ATTRIBUTE_UNUSED;
801 int *pretend_size;
802 int no_rtl;
803 {
804 int first_anon_arg;
805
806 /* All BLKmode values are passed by reference. */
807 if (mode == BLKmode)
808 abort ();
809
810 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
811 / UNITS_PER_WORD);
812
813 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
814 {
815 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
816 int first_reg_offset = first_anon_arg;
817 /* Size in words to "pretend" allocate. */
818 int size = MAX_ARC_PARM_REGS - first_reg_offset;
819 /* Extra slop to keep stack eight byte aligned. */
820 int align_slop = size & 1;
821 rtx regblock;
822
823 regblock = gen_rtx_MEM (BLKmode,
824 plus_constant (arg_pointer_rtx,
825 FIRST_PARM_OFFSET (0)
826 + align_slop * UNITS_PER_WORD));
827 set_mem_alias_set (regblock, get_varargs_alias_set ());
828 set_mem_align (regblock, BITS_PER_WORD);
829 move_block_from_reg (first_reg_offset, regblock,
830 MAX_ARC_PARM_REGS - first_reg_offset,
831 ((MAX_ARC_PARM_REGS - first_reg_offset)
832 * UNITS_PER_WORD));
833
834 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
835 * UNITS_PER_WORD);
836 }
837 }
838 \f
839 /* Cost functions. */
840
841 /* Provide the costs of an addressing mode that contains ADDR.
842 If ADDR is not a valid address, its cost is irrelevant. */
843
844 int
845 arc_address_cost (addr)
846 rtx addr;
847 {
848 switch (GET_CODE (addr))
849 {
850 case REG :
851 /* This is handled in the macro that calls us.
852 It's here for documentation. */
853 return 1;
854
855 case LABEL_REF :
856 case SYMBOL_REF :
857 case CONST :
858 return 2;
859
860 case PLUS :
861 {
862 register rtx plus0 = XEXP (addr, 0);
863 register rtx plus1 = XEXP (addr, 1);
864
865 if (GET_CODE (plus0) != REG)
866 break;
867
868 switch (GET_CODE (plus1))
869 {
870 case CONST_INT :
871 return SMALL_INT (plus1) ? 1 : 2;
872 case CONST :
873 case SYMBOL_REF :
874 case LABEL_REF :
875 return 2;
876 default:
877 break;
878 }
879 break;
880 }
881 default:
882 break;
883 }
884
885 return 4;
886 }
887 \f
888 /* Function prologue/epilogue handlers. */
889
890 /* ARC stack frames look like:
891
892 Before call After call
893 +-----------------------+ +-----------------------+
894 | | | |
895 high | local variables, | | local variables, |
896 mem | reg save area, etc. | | reg save area, etc. |
897 | | | |
898 +-----------------------+ +-----------------------+
899 | | | |
900 | arguments on stack. | | arguments on stack. |
901 | | | |
902 SP+16->+-----------------------+FP+48->+-----------------------+
903 | 4 word save area for | | reg parm save area, |
904 | return addr, prev %fp | | only created for |
905 SP+0->+-----------------------+ | variable argument |
906 | functions |
907 FP+16->+-----------------------+
908 | 4 word save area for |
909 | return addr, prev %fp |
910 FP+0->+-----------------------+
911 | |
912 | local variables |
913 | |
914 +-----------------------+
915 | |
916 | register save area |
917 | |
918 +-----------------------+
919 | |
920 | alloca allocations |
921 | |
922 +-----------------------+
923 | |
924 | arguments on stack |
925 | |
926 SP+16->+-----------------------+
927 low | 4 word save area for |
928 memory | return addr, prev %fp |
929 SP+0->+-----------------------+
930
931 Notes:
932 1) The "reg parm save area" does not exist for non variable argument fns.
933 The "reg parm save area" can be eliminated completely if we created our
934 own va-arc.h, but that has tradeoffs as well (so it's not done). */
935
936 /* Structure to be filled in by arc_compute_frame_size with register
937 save masks, and offsets for the current function. */
938 struct arc_frame_info
939 {
940 unsigned int total_size; /* # bytes that the entire frame takes up. */
941 unsigned int extra_size; /* # bytes of extra stuff. */
942 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
943 unsigned int args_size; /* # bytes that outgoing arguments take up. */
944 unsigned int reg_size; /* # bytes needed to store regs. */
945 unsigned int var_size; /* # bytes that variables take up. */
946 unsigned int reg_offset; /* Offset from new sp to store regs. */
947 unsigned int gmask; /* Mask of saved gp registers. */
948 int initialized; /* Nonzero if frame size already calculated. */
949 };
950
951 /* Current frame information calculated by arc_compute_frame_size. */
952 static struct arc_frame_info current_frame_info;
953
954 /* Zero structure to initialize current_frame_info. */
955 static struct arc_frame_info zero_frame_info;
956
957 /* Type of function DECL.
958
959 The result is cached. To reset the cache at the end of a function,
960 call with DECL = NULL_TREE. */
961
962 enum arc_function_type
963 arc_compute_function_type (decl)
964 tree decl;
965 {
966 tree a;
967 /* Cached value. */
968 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
969 /* Last function we were called for. */
970 static tree last_fn = NULL_TREE;
971
972 /* Resetting the cached value? */
973 if (decl == NULL_TREE)
974 {
975 fn_type = ARC_FUNCTION_UNKNOWN;
976 last_fn = NULL_TREE;
977 return fn_type;
978 }
979
980 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
981 return fn_type;
982
983 /* Assume we have a normal function (not an interrupt handler). */
984 fn_type = ARC_FUNCTION_NORMAL;
985
986 /* Now see if this is an interrupt handler. */
987 for (a = DECL_ATTRIBUTES (current_function_decl);
988 a;
989 a = TREE_CHAIN (a))
990 {
991 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
992
993 if (name == get_identifier ("__interrupt__")
994 && list_length (args) == 1
995 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
996 {
997 tree value = TREE_VALUE (args);
998
999 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1000 fn_type = ARC_FUNCTION_ILINK1;
1001 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1002 fn_type = ARC_FUNCTION_ILINK2;
1003 else
1004 abort ();
1005 break;
1006 }
1007 }
1008
1009 last_fn = decl;
1010 return fn_type;
1011 }
1012
1013 #define ILINK1_REGNUM 29
1014 #define ILINK2_REGNUM 30
1015 #define RETURN_ADDR_REGNUM 31
1016 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1017 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1018
1019 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1020 The return address and frame pointer are treated separately.
1021 Don't consider them here. */
1022 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1023 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1024 && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p)))
1025
1026 #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM])
1027
1028 /* Return the bytes needed to compute the frame pointer from the current
1029 stack pointer.
1030
1031 SIZE is the size needed for local variables. */
1032
1033 unsigned int
1034 arc_compute_frame_size (size)
1035 int size; /* # of var. bytes allocated. */
1036 {
1037 int regno;
1038 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1039 unsigned int reg_size, reg_offset;
1040 unsigned int gmask;
1041 enum arc_function_type fn_type;
1042 int interrupt_p;
1043
1044 var_size = size;
1045 args_size = current_function_outgoing_args_size;
1046 pretend_size = current_function_pretend_args_size;
1047 extra_size = FIRST_PARM_OFFSET (0);
1048 total_size = extra_size + pretend_size + args_size + var_size;
1049 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1050 reg_size = 0;
1051 gmask = 0;
1052
1053 /* See if this is an interrupt handler. Call used registers must be saved
1054 for them too. */
1055 fn_type = arc_compute_function_type (current_function_decl);
1056 interrupt_p = ARC_INTERRUPT_P (fn_type);
1057
1058 /* Calculate space needed for registers.
1059 ??? We ignore the extension registers for now. */
1060
1061 for (regno = 0; regno <= 31; regno++)
1062 {
1063 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1064 {
1065 reg_size += UNITS_PER_WORD;
1066 gmask |= 1 << regno;
1067 }
1068 }
1069
1070 total_size += reg_size;
1071
1072 /* If the only space to allocate is the fp/blink save area this is an
1073 empty frame. However, if we'll be making a function call we need to
1074 allocate a stack frame for our callee's fp/blink save area. */
1075 if (total_size == extra_size
1076 && !MUST_SAVE_RETURN_ADDR)
1077 total_size = extra_size = 0;
1078
1079 total_size = ARC_STACK_ALIGN (total_size);
1080
1081 /* Save computed information. */
1082 current_frame_info.total_size = total_size;
1083 current_frame_info.extra_size = extra_size;
1084 current_frame_info.pretend_size = pretend_size;
1085 current_frame_info.var_size = var_size;
1086 current_frame_info.args_size = args_size;
1087 current_frame_info.reg_size = reg_size;
1088 current_frame_info.reg_offset = reg_offset;
1089 current_frame_info.gmask = gmask;
1090 current_frame_info.initialized = reload_completed;
1091
1092 /* Ok, we're done. */
1093 return total_size;
1094 }
1095 \f
1096 /* Common code to save/restore registers. */
1097
1098 void
1099 arc_save_restore (file, base_reg, offset, gmask, op)
1100 FILE *file;
1101 const char *base_reg;
1102 unsigned int offset;
1103 unsigned int gmask;
1104 const char *op;
1105 {
1106 int regno;
1107
1108 if (gmask == 0)
1109 return;
1110
1111 for (regno = 0; regno <= 31; regno++)
1112 {
1113 if ((gmask & (1L << regno)) != 0)
1114 {
1115 fprintf (file, "\t%s %s,[%s,%d]\n",
1116 op, reg_names[regno], base_reg, offset);
1117 offset += UNITS_PER_WORD;
1118 }
1119 }
1120 }
1121 \f
1122 /* Target hook to assemble an integer object. The ARC version needs to
1123 emit a special directive for references to labels and function
1124 symbols. */
1125
1126 static bool
1127 arc_assemble_integer (x, size, aligned_p)
1128 rtx x;
1129 unsigned int size;
1130 int aligned_p;
1131 {
1132 if (size == UNITS_PER_WORD && aligned_p
1133 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FLAG (x))
1134 || GET_CODE (x) == LABEL_REF))
1135 {
1136 fputs ("\t.word\t%st(", asm_out_file);
1137 output_addr_const (asm_out_file, x);
1138 fputs (")\n", asm_out_file);
1139 return true;
1140 }
1141 return default_assemble_integer (x, size, aligned_p);
1142 }
1143 \f
1144 /* Set up the stack and frame pointer (if desired) for the function. */
1145
1146 static void
1147 arc_output_function_prologue (file, size)
1148 FILE *file;
1149 HOST_WIDE_INT size;
1150 {
1151 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1152 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1153 unsigned int gmask = current_frame_info.gmask;
1154 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1155
1156 /* If this is an interrupt handler, set up our stack frame.
1157 ??? Optimize later. */
1158 if (ARC_INTERRUPT_P (fn_type))
1159 {
1160 fprintf (file, "\t%s interrupt handler\n",
1161 ASM_COMMENT_START);
1162 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1163 }
1164
1165 /* This is only for the human reader. */
1166 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1167 ASM_COMMENT_START, ASM_COMMENT_START,
1168 current_frame_info.var_size,
1169 current_frame_info.reg_size / 4,
1170 current_frame_info.args_size,
1171 current_frame_info.extra_size);
1172
1173 size = ARC_STACK_ALIGN (size);
1174 size = (! current_frame_info.initialized
1175 ? arc_compute_frame_size (size)
1176 : current_frame_info.total_size);
1177
1178 /* These cases shouldn't happen. Catch them now. */
1179 if (size == 0 && gmask)
1180 abort ();
1181
1182 /* Allocate space for register arguments if this is a variadic function. */
1183 if (current_frame_info.pretend_size != 0)
1184 fprintf (file, "\tsub %s,%s,%d\n",
1185 sp_str, sp_str, current_frame_info.pretend_size);
1186
1187 /* The home-grown ABI says link register is saved first. */
1188 if (MUST_SAVE_RETURN_ADDR)
1189 fprintf (file, "\tst %s,[%s,%d]\n",
1190 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1191
1192 /* Set up the previous frame pointer next (if we need to). */
1193 if (frame_pointer_needed)
1194 {
1195 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1196 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1197 }
1198
1199 /* ??? We don't handle the case where the saved regs are more than 252
1200 bytes away from sp. This can be handled by decrementing sp once, saving
1201 the regs, and then decrementing it again. The epilogue doesn't have this
1202 problem as the `ld' insn takes reg+limm values (though it would be more
1203 efficient to avoid reg+limm). */
1204
1205 /* Allocate the stack frame. */
1206 if (size - current_frame_info.pretend_size > 0)
1207 fprintf (file, "\tsub %s,%s,%d\n",
1208 sp_str, sp_str, size - current_frame_info.pretend_size);
1209
1210 /* Save any needed call-saved regs (and call-used if this is an
1211 interrupt handler). */
1212 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1213 /* The zeroing of these two bits is unnecessary,
1214 but leave this in for clarity. */
1215 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1216 "st");
1217
1218 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1219 }
1220 \f
1221 /* Do any necessary cleanup after a function to restore stack, frame,
1222 and regs. */
1223
1224 static void
1225 arc_output_function_epilogue (file, size)
1226 FILE *file;
1227 HOST_WIDE_INT size;
1228 {
1229 rtx epilogue_delay = current_function_epilogue_delay_list;
1230 int noepilogue = FALSE;
1231 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1232
1233 /* This is only for the human reader. */
1234 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1235
1236 size = ARC_STACK_ALIGN (size);
1237 size = (!current_frame_info.initialized
1238 ? arc_compute_frame_size (size)
1239 : current_frame_info.total_size);
1240
1241 if (size == 0 && epilogue_delay == 0)
1242 {
1243 rtx insn = get_last_insn ();
1244
1245 /* If the last insn was a BARRIER, we don't have to write any code
1246 because a jump (aka return) was put there. */
1247 if (GET_CODE (insn) == NOTE)
1248 insn = prev_nonnote_insn (insn);
1249 if (insn && GET_CODE (insn) == BARRIER)
1250 noepilogue = TRUE;
1251 }
1252
1253 if (!noepilogue)
1254 {
1255 unsigned int pretend_size = current_frame_info.pretend_size;
1256 unsigned int frame_size = size - pretend_size;
1257 int restored, fp_restored_p;
1258 int can_trust_sp_p = !current_function_calls_alloca;
1259 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1260 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1261
1262 /* ??? There are lots of optimizations that can be done here.
1263 EG: Use fp to restore regs if it's closer.
1264 Maybe in time we'll do them all. For now, always restore regs from
1265 sp, but don't restore sp if we don't have to. */
1266
1267 if (!can_trust_sp_p)
1268 {
1269 if (!frame_pointer_needed)
1270 abort ();
1271 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1272 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1273 }
1274
1275 /* Restore any saved registers. */
1276 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1277 /* The zeroing of these two bits is unnecessary,
1278 but leave this in for clarity. */
1279 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1280 "ld");
1281
1282 if (MUST_SAVE_RETURN_ADDR)
1283 fprintf (file, "\tld %s,[%s,%d]\n",
1284 reg_names[RETURN_ADDR_REGNUM],
1285 frame_pointer_needed ? fp_str : sp_str,
1286 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1287
1288 /* Keep track of how much of the stack pointer we've restored.
1289 It makes the following a lot more readable. */
1290 restored = 0;
1291 fp_restored_p = 0;
1292
1293 /* We try to emit the epilogue delay slot insn right after the load
1294 of the return address register so that it can execute with the
1295 stack intact. Secondly, loads are delayed. */
1296 /* ??? If stack intactness is important, always emit now. */
1297 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1298 {
1299 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1300 epilogue_delay = NULL_RTX;
1301 }
1302
1303 if (frame_pointer_needed)
1304 {
1305 /* Try to restore the frame pointer in the delay slot. We can't,
1306 however, if any of these is true. */
1307 if (epilogue_delay != NULL_RTX
1308 || !SMALL_INT (frame_size)
1309 || pretend_size
1310 || ARC_INTERRUPT_P (fn_type))
1311 {
1312 /* Note that we restore fp and sp here! */
1313 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1314 restored += frame_size;
1315 fp_restored_p = 1;
1316 }
1317 }
1318 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1319 || ARC_INTERRUPT_P (fn_type))
1320 {
1321 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1322 restored += frame_size;
1323 }
1324
1325 /* These must be done before the return insn because the delay slot
1326 does the final stack restore. */
1327 if (ARC_INTERRUPT_P (fn_type))
1328 {
1329 if (epilogue_delay)
1330 {
1331 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1332 }
1333 }
1334
1335 /* Emit the return instruction. */
1336 {
1337 static const int regs[4] = {
1338 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1339 };
1340 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1341 }
1342
1343 /* If the only register saved is the return address, we need a
1344 nop, unless we have an instruction to put into it. Otherwise
1345 we don't since reloading multiple registers doesn't reference
1346 the register being loaded. */
1347
1348 if (ARC_INTERRUPT_P (fn_type))
1349 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1350 else if (epilogue_delay != NULL_RTX)
1351 {
1352 if (frame_pointer_needed && !fp_restored_p)
1353 abort ();
1354 if (restored < size)
1355 abort ();
1356 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1);
1357 }
1358 else if (frame_pointer_needed && !fp_restored_p)
1359 {
1360 if (!SMALL_INT (frame_size))
1361 abort ();
1362 /* Note that we restore fp and sp here! */
1363 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1364 }
1365 else if (restored < size)
1366 {
1367 if (!SMALL_INT (size - restored))
1368 abort ();
1369 fprintf (file, "\tadd %s,%s,%d\n",
1370 sp_str, sp_str, size - restored);
1371 }
1372 else
1373 fprintf (file, "\tnop\n");
1374 }
1375
1376 /* Reset state info for each function. */
1377 current_frame_info = zero_frame_info;
1378 arc_compute_function_type (NULL_TREE);
1379 }
1380 \f
1381 /* Define the number of delay slots needed for the function epilogue.
1382
1383 Interrupt handlers can't have any epilogue delay slots (it's always needed
1384 for something else, I think). For normal functions, we have to worry about
1385 using call-saved regs as they'll be restored before the delay slot insn.
1386 Functions with non-empty frames already have enough choices for the epilogue
1387 delay slot so for now we only consider functions with empty frames. */
1388
1389 int
1390 arc_delay_slots_for_epilogue ()
1391 {
1392 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1393 return 0;
1394 if (!current_frame_info.initialized)
1395 (void) arc_compute_frame_size (get_frame_size ());
1396 if (current_frame_info.total_size == 0)
1397 return 1;
1398 return 0;
1399 }
1400
1401 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1402 Any single length instruction which doesn't reference the stack or frame
1403 pointer or any call-saved register is OK. SLOT will always be 0. */
1404
1405 int
1406 arc_eligible_for_epilogue_delay (trial, slot)
1407 rtx trial;
1408 int slot;
1409 {
1410 if (slot != 0)
1411 abort ();
1412
1413 if (get_attr_length (trial) == 1
1414 /* If registers where saved, presumably there's more than enough
1415 possibilities for the delay slot. The alternative is something
1416 more complicated (of course, if we expanded the epilogue as rtl
1417 this problem would go away). */
1418 /* ??? Note that this will always be true since only functions with
1419 empty frames have epilogue delay slots. See
1420 arc_delay_slots_for_epilogue. */
1421 && current_frame_info.gmask == 0
1422 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1423 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1424 return 1;
1425 return 0;
1426 }
1427 \f
1428 /* PIC */
1429
1430 /* Emit special PIC prologues and epilogues. */
1431
1432 void
1433 arc_finalize_pic ()
1434 {
1435 /* nothing to do */
1436 }
1437 \f
1438 /* Return true if OP is a shift operator. */
1439
1440 int
1441 shift_operator (op, mode)
1442 rtx op;
1443 enum machine_mode mode ATTRIBUTE_UNUSED;
1444 {
1445 switch (GET_CODE (op))
1446 {
1447 case ASHIFTRT:
1448 case LSHIFTRT:
1449 case ASHIFT:
1450 return 1;
1451 default:
1452 return 0;
1453 }
1454 }
1455
1456 /* Output the assembler code for doing a shift.
1457 We go to a bit of trouble to generate efficient code as the ARC only has
1458 single bit shifts. This is taken from the h8300 port. We only have one
1459 mode of shifting and can't access individual bytes like the h8300 can, so
1460 this is greatly simplified (at the expense of not generating hyper-
1461 efficient code).
1462
1463 This function is not used if the variable shift insns are present. */
1464
1465 /* ??? We assume the output operand is the same as operand 1.
1466 This can be optimized (deleted) in the case of 1 bit shifts. */
1467 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1468 using it here will give us a chance to play with it. */
1469
1470 const char *
1471 output_shift (operands)
1472 rtx *operands;
1473 {
1474 rtx shift = operands[3];
1475 enum machine_mode mode = GET_MODE (shift);
1476 enum rtx_code code = GET_CODE (shift);
1477 const char *shift_one;
1478
1479 if (mode != SImode)
1480 abort ();
1481
1482 switch (code)
1483 {
1484 case ASHIFT: shift_one = "asl %0,%0"; break;
1485 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1486 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1487 default: abort ();
1488 }
1489
1490 if (GET_CODE (operands[2]) != CONST_INT)
1491 {
1492 if (optimize)
1493 output_asm_insn ("mov lp_count,%2", operands);
1494 else
1495 output_asm_insn ("mov %4,%2", operands);
1496 goto shiftloop;
1497 }
1498 else
1499 {
1500 int n = INTVAL (operands[2]);
1501
1502 /* If the count is negative, make it 0. */
1503 if (n < 0)
1504 n = 0;
1505 /* If the count is too big, truncate it.
1506 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1507 do the intuitive thing. */
1508 else if (n > GET_MODE_BITSIZE (mode))
1509 n = GET_MODE_BITSIZE (mode);
1510
1511 /* First see if we can do them inline. */
1512 if (n <= 8)
1513 {
1514 while (--n >= 0)
1515 output_asm_insn (shift_one, operands);
1516 }
1517 /* See if we can use a rotate/and. */
1518 else if (n == BITS_PER_WORD - 1)
1519 {
1520 switch (code)
1521 {
1522 case ASHIFT :
1523 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1524 break;
1525 case ASHIFTRT :
1526 /* The ARC doesn't have a rol insn. Use something else. */
1527 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1528 break;
1529 case LSHIFTRT :
1530 /* The ARC doesn't have a rol insn. Use something else. */
1531 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1532 break;
1533 default:
1534 break;
1535 }
1536 }
1537 /* Must loop. */
1538 else
1539 {
1540 char buf[100];
1541
1542 if (optimize)
1543 output_asm_insn ("mov lp_count,%c2", operands);
1544 else
1545 output_asm_insn ("mov %4,%c2", operands);
1546 shiftloop:
1547 if (optimize)
1548 {
1549 if (flag_pic)
1550 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1551 ASM_COMMENT_START);
1552 else
1553 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1554 ASM_COMMENT_START);
1555 output_asm_insn (buf, operands);
1556 output_asm_insn ("sr %4,[lp_start]", operands);
1557 output_asm_insn ("add %4,%4,1", operands);
1558 output_asm_insn ("sr %4,[lp_end]", operands);
1559 output_asm_insn ("nop\n\tnop", operands);
1560 if (flag_pic)
1561 fprintf (asm_out_file, "\t%s single insn loop\n",
1562 ASM_COMMENT_START);
1563 else
1564 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1565 ASM_COMMENT_START);
1566 output_asm_insn (shift_one, operands);
1567 }
1568 else
1569 {
1570 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1571 ASM_COMMENT_START);
1572 output_asm_insn ("sub.f %4,%4,1", operands);
1573 output_asm_insn ("nop", operands);
1574 output_asm_insn ("bn.nd 2f", operands);
1575 output_asm_insn (shift_one, operands);
1576 output_asm_insn ("b.nd 1b", operands);
1577 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1578 ASM_COMMENT_START);
1579 }
1580 }
1581 }
1582
1583 return "";
1584 }
1585 \f
1586 /* Nested function support. */
1587
1588 /* Emit RTL insns to initialize the variable parts of a trampoline.
1589 FNADDR is an RTX for the address of the function's pure code.
1590 CXT is an RTX for the static chain value for the function. */
1591
1592 void
1593 arc_initialize_trampoline (tramp, fnaddr, cxt)
1594 rtx tramp ATTRIBUTE_UNUSED, fnaddr ATTRIBUTE_UNUSED, cxt ATTRIBUTE_UNUSED;
1595 {
1596 }
1597 \f
1598 /* Set the cpu type and print out other fancy things,
1599 at the top of the file. */
1600
1601 void
1602 arc_asm_file_start (file)
1603 FILE *file;
1604 {
1605 fprintf (file, "\t.cpu %s\n", arc_cpu_string);
1606 }
1607 \f
1608 /* Print operand X (an rtx) in assembler syntax to file FILE.
1609 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1610 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1611
1612 void
1613 arc_print_operand (file, x, code)
1614 FILE *file;
1615 rtx x;
1616 int code;
1617 {
1618 switch (code)
1619 {
1620 case '#' :
1621 /* Conditional branches. For now these are equivalent. */
1622 case '*' :
1623 /* Unconditional branches. Output the appropriate delay slot suffix. */
1624 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1625 {
1626 /* There's nothing in the delay slot. */
1627 fputs (".nd", file);
1628 }
1629 else
1630 {
1631 rtx jump = XVECEXP (final_sequence, 0, 0);
1632 rtx delay = XVECEXP (final_sequence, 0, 1);
1633 if (INSN_ANNULLED_BRANCH_P (jump))
1634 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1635 else
1636 fputs (".d", file);
1637 }
1638 return;
1639 case '?' : /* with leading "." */
1640 case '!' : /* without leading "." */
1641 /* This insn can be conditionally executed. See if the ccfsm machinery
1642 says it should be conditionalized. */
1643 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1644 {
1645 /* Is this insn in a delay slot? */
1646 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1647 {
1648 rtx insn = XVECEXP (final_sequence, 0, 1);
1649
1650 /* If the insn is annulled and is from the target path, we need
1651 to inverse the condition test. */
1652 if (INSN_ANNULLED_BRANCH_P (insn))
1653 {
1654 if (INSN_FROM_TARGET_P (insn))
1655 fprintf (file, "%s%s",
1656 code == '?' ? "." : "",
1657 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1658 else
1659 fprintf (file, "%s%s",
1660 code == '?' ? "." : "",
1661 arc_condition_codes[arc_ccfsm_current_cc]);
1662 }
1663 else
1664 {
1665 /* This insn is executed for either path, so don't
1666 conditionalize it at all. */
1667 ; /* nothing to do */
1668 }
1669 }
1670 else
1671 {
1672 /* This insn isn't in a delay slot. */
1673 fprintf (file, "%s%s",
1674 code == '?' ? "." : "",
1675 arc_condition_codes[arc_ccfsm_current_cc]);
1676 }
1677 }
1678 return;
1679 case '~' :
1680 /* Output a nop if we're between a set of the condition codes,
1681 and a conditional branch. */
1682 if (last_insn_set_cc_p)
1683 fputs ("nop\n\t", file);
1684 return;
1685 case 'd' :
1686 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1687 return;
1688 case 'D' :
1689 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1690 (get_arc_condition_code (x))],
1691 file);
1692 return;
1693 case 'R' :
1694 /* Write second word of DImode or DFmode reference,
1695 register or memory. */
1696 if (GET_CODE (x) == REG)
1697 fputs (reg_names[REGNO (x)+1], file);
1698 else if (GET_CODE (x) == MEM)
1699 {
1700 fputc ('[', file);
1701 /* Handle possible auto-increment. Since it is pre-increment and
1702 we have already done it, we can just use an offset of four. */
1703 /* ??? This is taken from rs6000.c I think. I don't think it is
1704 currently necessary, but keep it around. */
1705 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1706 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1707 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1708 else
1709 output_address (plus_constant (XEXP (x, 0), 4));
1710 fputc (']', file);
1711 }
1712 else
1713 output_operand_lossage ("invalid operand to %%R code");
1714 return;
1715 case 'S' :
1716 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FLAG (x))
1717 || GET_CODE (x) == LABEL_REF)
1718 {
1719 fprintf (file, "%%st(");
1720 output_addr_const (file, x);
1721 fprintf (file, ")");
1722 return;
1723 }
1724 break;
1725 case 'H' :
1726 case 'L' :
1727 if (GET_CODE (x) == REG)
1728 {
1729 /* L = least significant word, H = most significant word */
1730 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1731 fputs (reg_names[REGNO (x)], file);
1732 else
1733 fputs (reg_names[REGNO (x)+1], file);
1734 }
1735 else if (GET_CODE (x) == CONST_INT
1736 || GET_CODE (x) == CONST_DOUBLE)
1737 {
1738 rtx first, second;
1739
1740 split_double (x, &first, &second);
1741 fprintf (file, "0x%08lx",
1742 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1743 }
1744 else
1745 output_operand_lossage ("invalid operand to %%H/%%L code");
1746 return;
1747 case 'A' :
1748 {
1749 char str[30];
1750
1751 if (GET_CODE (x) != CONST_DOUBLE
1752 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
1753 abort ();
1754
1755 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1756 fprintf (file, "%s", str);
1757 return;
1758 }
1759 case 'U' :
1760 /* Output a load/store with update indicator if appropriate. */
1761 if (GET_CODE (x) == MEM)
1762 {
1763 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1764 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1765 fputs (".a", file);
1766 }
1767 else
1768 output_operand_lossage ("invalid operand to %%U code");
1769 return;
1770 case 'V' :
1771 /* Output cache bypass indicator for a load/store insn. Volatile memory
1772 refs are defined to use the cache bypass mechanism. */
1773 if (GET_CODE (x) == MEM)
1774 {
1775 if (MEM_VOLATILE_P (x))
1776 fputs (".di", file);
1777 }
1778 else
1779 output_operand_lossage ("invalid operand to %%V code");
1780 return;
1781 case 0 :
1782 /* Do nothing special. */
1783 break;
1784 default :
1785 /* Unknown flag. */
1786 output_operand_lossage ("invalid operand output code");
1787 }
1788
1789 switch (GET_CODE (x))
1790 {
1791 case REG :
1792 fputs (reg_names[REGNO (x)], file);
1793 break;
1794 case MEM :
1795 fputc ('[', file);
1796 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1797 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1798 GET_MODE_SIZE (GET_MODE (x))));
1799 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1800 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1801 - GET_MODE_SIZE (GET_MODE (x))));
1802 else
1803 output_address (XEXP (x, 0));
1804 fputc (']', file);
1805 break;
1806 case CONST_DOUBLE :
1807 /* We handle SFmode constants here as output_addr_const doesn't. */
1808 if (GET_MODE (x) == SFmode)
1809 {
1810 REAL_VALUE_TYPE d;
1811 long l;
1812
1813 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1814 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1815 fprintf (file, "0x%08lx", l);
1816 break;
1817 }
1818 /* Fall through. Let output_addr_const deal with it. */
1819 default :
1820 output_addr_const (file, x);
1821 break;
1822 }
1823 }
1824
1825 /* Print a memory address as an operand to reference that memory location. */
1826
1827 void
1828 arc_print_operand_address (file, addr)
1829 FILE *file;
1830 rtx addr;
1831 {
1832 register rtx base, index = 0;
1833 int offset = 0;
1834
1835 switch (GET_CODE (addr))
1836 {
1837 case REG :
1838 fputs (reg_names[REGNO (addr)], file);
1839 break;
1840 case SYMBOL_REF :
1841 if (/*???*/ 0 && SYMBOL_REF_FLAG (addr))
1842 {
1843 fprintf (file, "%%st(");
1844 output_addr_const (file, addr);
1845 fprintf (file, ")");
1846 }
1847 else
1848 output_addr_const (file, addr);
1849 break;
1850 case PLUS :
1851 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1852 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1853 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1854 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1855 else
1856 base = XEXP (addr, 0), index = XEXP (addr, 1);
1857 if (GET_CODE (base) != REG)
1858 abort ();
1859 fputs (reg_names[REGNO (base)], file);
1860 if (index == 0)
1861 {
1862 if (offset != 0)
1863 fprintf (file, ",%d", offset);
1864 }
1865 else if (GET_CODE (index) == REG)
1866 fprintf (file, ",%s", reg_names[REGNO (index)]);
1867 else if (GET_CODE (index) == SYMBOL_REF)
1868 fputc (',', file), output_addr_const (file, index);
1869 else
1870 abort ();
1871 break;
1872 case PRE_INC :
1873 case PRE_DEC :
1874 /* We shouldn't get here as we've lost the mode of the memory object
1875 (which says how much to inc/dec by. */
1876 abort ();
1877 break;
1878 default :
1879 output_addr_const (file, addr);
1880 break;
1881 }
1882 }
1883
1884 /* Update compare/branch separation marker. */
1885
1886 static void
1887 record_cc_ref (insn)
1888 rtx insn;
1889 {
1890 last_insn_set_cc_p = current_insn_set_cc_p;
1891
1892 switch (get_attr_cond (insn))
1893 {
1894 case COND_SET :
1895 case COND_SET_ZN :
1896 case COND_SET_ZNC :
1897 if (get_attr_length (insn) == 1)
1898 current_insn_set_cc_p = 1;
1899 else
1900 current_insn_set_cc_p = 0;
1901 break;
1902 default :
1903 current_insn_set_cc_p = 0;
1904 break;
1905 }
1906 }
1907 \f
1908 /* Conditional execution support.
1909
1910 This is based on the ARM port but for now is much simpler.
1911
1912 A finite state machine takes care of noticing whether or not instructions
1913 can be conditionally executed, and thus decrease execution time and code
1914 size by deleting branch instructions. The fsm is controlled by
1915 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1916 in the .md file for the branch insns also have a hand in this. */
1917
1918 /* The state of the fsm controlling condition codes are:
1919 0: normal, do nothing special
1920 1: don't output this insn
1921 2: don't output this insn
1922 3: make insns conditional
1923 4: make insns conditional
1924
1925 State transitions (state->state by whom, under what condition):
1926 0 -> 1 final_prescan_insn, if insn is conditional branch
1927 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1928 1 -> 3 branch patterns, after having not output the conditional branch
1929 2 -> 4 branch patterns, after having not output the conditional branch
1930 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1931 (the target label has CODE_LABEL_NUMBER equal to
1932 arc_ccfsm_target_label).
1933 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1934
1935 If the jump clobbers the conditions then we use states 2 and 4.
1936
1937 A similar thing can be done with conditional return insns.
1938
1939 We also handle separating branches from sets of the condition code.
1940 This is done here because knowledge of the ccfsm state is required,
1941 we may not be outputting the branch. */
1942
1943 void
1944 arc_final_prescan_insn (insn, opvec, noperands)
1945 rtx insn;
1946 rtx *opvec ATTRIBUTE_UNUSED;
1947 int noperands ATTRIBUTE_UNUSED;
1948 {
1949 /* BODY will hold the body of INSN. */
1950 register rtx body = PATTERN (insn);
1951
1952 /* This will be 1 if trying to repeat the trick (ie: do the `else' part of
1953 an if/then/else), and things need to be reversed. */
1954 int reverse = 0;
1955
1956 /* If we start with a return insn, we only succeed if we find another one. */
1957 int seeking_return = 0;
1958
1959 /* START_INSN will hold the insn from where we start looking. This is the
1960 first insn after the following code_label if REVERSE is true. */
1961 rtx start_insn = insn;
1962
1963 /* Update compare/branch separation marker. */
1964 record_cc_ref (insn);
1965
1966 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1967 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1968 final_scan_insn which has `optimize' as a local. */
1969 if (optimize < 2 || TARGET_NO_COND_EXEC)
1970 return;
1971
1972 /* If in state 4, check if the target branch is reached, in order to
1973 change back to state 0. */
1974 if (arc_ccfsm_state == 4)
1975 {
1976 if (insn == arc_ccfsm_target_insn)
1977 {
1978 arc_ccfsm_target_insn = NULL;
1979 arc_ccfsm_state = 0;
1980 }
1981 return;
1982 }
1983
1984 /* If in state 3, it is possible to repeat the trick, if this insn is an
1985 unconditional branch to a label, and immediately following this branch
1986 is the previous target label which is only used once, and the label this
1987 branch jumps to is not too far off. Or in other words "we've done the
1988 `then' part, see if we can do the `else' part." */
1989 if (arc_ccfsm_state == 3)
1990 {
1991 if (simplejump_p (insn))
1992 {
1993 start_insn = next_nonnote_insn (start_insn);
1994 if (GET_CODE (start_insn) == BARRIER)
1995 {
1996 /* ??? Isn't this always a barrier? */
1997 start_insn = next_nonnote_insn (start_insn);
1998 }
1999 if (GET_CODE (start_insn) == CODE_LABEL
2000 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2001 && LABEL_NUSES (start_insn) == 1)
2002 reverse = TRUE;
2003 else
2004 return;
2005 }
2006 else if (GET_CODE (body) == RETURN)
2007 {
2008 start_insn = next_nonnote_insn (start_insn);
2009 if (GET_CODE (start_insn) == BARRIER)
2010 start_insn = next_nonnote_insn (start_insn);
2011 if (GET_CODE (start_insn) == CODE_LABEL
2012 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2013 && LABEL_NUSES (start_insn) == 1)
2014 {
2015 reverse = TRUE;
2016 seeking_return = 1;
2017 }
2018 else
2019 return;
2020 }
2021 else
2022 return;
2023 }
2024
2025 if (GET_CODE (insn) != JUMP_INSN)
2026 return;
2027
2028 /* This jump might be paralleled with a clobber of the condition codes,
2029 the jump should always come first. */
2030 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2031 body = XVECEXP (body, 0, 0);
2032
2033 if (reverse
2034 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2035 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2036 {
2037 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2038 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2039 int then_not_else = TRUE;
2040 /* Nonzero if next insn must be the target label. */
2041 int next_must_be_target_label_p;
2042 rtx this_insn = start_insn, label = 0;
2043
2044 /* Register the insn jumped to. */
2045 if (reverse)
2046 {
2047 if (!seeking_return)
2048 label = XEXP (SET_SRC (body), 0);
2049 }
2050 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2051 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2052 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2053 {
2054 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2055 then_not_else = FALSE;
2056 }
2057 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2058 seeking_return = 1;
2059 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2060 {
2061 seeking_return = 1;
2062 then_not_else = FALSE;
2063 }
2064 else
2065 abort ();
2066
2067 /* See how many insns this branch skips, and what kind of insns. If all
2068 insns are okay, and the label or unconditional branch to the same
2069 label is not too far away, succeed. */
2070 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2071 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2072 insns_skipped++)
2073 {
2074 rtx scanbody;
2075
2076 this_insn = next_nonnote_insn (this_insn);
2077 if (!this_insn)
2078 break;
2079
2080 if (next_must_be_target_label_p)
2081 {
2082 if (GET_CODE (this_insn) == BARRIER)
2083 continue;
2084 if (GET_CODE (this_insn) == CODE_LABEL
2085 && this_insn == label)
2086 {
2087 arc_ccfsm_state = 1;
2088 succeed = TRUE;
2089 }
2090 else
2091 fail = TRUE;
2092 break;
2093 }
2094
2095 scanbody = PATTERN (this_insn);
2096
2097 switch (GET_CODE (this_insn))
2098 {
2099 case CODE_LABEL:
2100 /* Succeed if it is the target label, otherwise fail since
2101 control falls in from somewhere else. */
2102 if (this_insn == label)
2103 {
2104 arc_ccfsm_state = 1;
2105 succeed = TRUE;
2106 }
2107 else
2108 fail = TRUE;
2109 break;
2110
2111 case BARRIER:
2112 /* Succeed if the following insn is the target label.
2113 Otherwise fail.
2114 If return insns are used then the last insn in a function
2115 will be a barrier. */
2116 next_must_be_target_label_p = TRUE;
2117 break;
2118
2119 case CALL_INSN:
2120 /* Can handle a call insn if there are no insns after it.
2121 IE: The next "insn" is the target label. We don't have to
2122 worry about delay slots as such insns are SEQUENCE's inside
2123 INSN's. ??? It is possible to handle such insns though. */
2124 if (get_attr_cond (this_insn) == COND_CANUSE)
2125 next_must_be_target_label_p = TRUE;
2126 else
2127 fail = TRUE;
2128 break;
2129
2130 case JUMP_INSN:
2131 /* If this is an unconditional branch to the same label, succeed.
2132 If it is to another label, do nothing. If it is conditional,
2133 fail. */
2134 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2135
2136 if (GET_CODE (scanbody) == SET
2137 && GET_CODE (SET_DEST (scanbody)) == PC)
2138 {
2139 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2140 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2141 {
2142 arc_ccfsm_state = 2;
2143 succeed = TRUE;
2144 }
2145 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2146 fail = TRUE;
2147 }
2148 else if (GET_CODE (scanbody) == RETURN
2149 && seeking_return)
2150 {
2151 arc_ccfsm_state = 2;
2152 succeed = TRUE;
2153 }
2154 else if (GET_CODE (scanbody) == PARALLEL)
2155 {
2156 if (get_attr_cond (this_insn) != COND_CANUSE)
2157 fail = TRUE;
2158 }
2159 break;
2160
2161 case INSN:
2162 /* We can only do this with insns that can use the condition
2163 codes (and don't set them). */
2164 if (GET_CODE (scanbody) == SET
2165 || GET_CODE (scanbody) == PARALLEL)
2166 {
2167 if (get_attr_cond (this_insn) != COND_CANUSE)
2168 fail = TRUE;
2169 }
2170 /* We can't handle other insns like sequences. */
2171 else
2172 fail = TRUE;
2173 break;
2174
2175 default:
2176 break;
2177 }
2178 }
2179
2180 if (succeed)
2181 {
2182 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2183 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2184 else if (seeking_return || arc_ccfsm_state == 2)
2185 {
2186 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2187 {
2188 this_insn = next_nonnote_insn (this_insn);
2189 if (this_insn && (GET_CODE (this_insn) == BARRIER
2190 || GET_CODE (this_insn) == CODE_LABEL))
2191 abort ();
2192 }
2193 if (!this_insn)
2194 {
2195 /* Oh dear! we ran off the end, give up. */
2196 extract_insn_cached (insn);
2197 arc_ccfsm_state = 0;
2198 arc_ccfsm_target_insn = NULL;
2199 return;
2200 }
2201 arc_ccfsm_target_insn = this_insn;
2202 }
2203 else
2204 abort ();
2205
2206 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2207 what it was. */
2208 if (!reverse)
2209 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2210 0));
2211
2212 if (reverse || then_not_else)
2213 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2214 }
2215
2216 /* Restore recog_data. Getting the attributes of other insns can
2217 destroy this array, but final.c assumes that it remains intact
2218 across this call. */
2219 extract_insn_cached (insn);
2220 }
2221 }
2222
2223 /* Record that we are currently outputting label NUM with prefix PREFIX.
2224 It it's the label we're looking for, reset the ccfsm machinery.
2225
2226 Called from (*targetm.asm_out.internal_label). */
2227
2228 void
2229 arc_ccfsm_at_label (prefix, num)
2230 const char *prefix;
2231 int num;
2232 {
2233 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2234 && !strcmp (prefix, "L"))
2235 {
2236 arc_ccfsm_state = 0;
2237 arc_ccfsm_target_insn = NULL_RTX;
2238 }
2239 }
2240
2241 /* See if the current insn, which is a conditional branch, is to be
2242 deleted. */
2243
2244 int
2245 arc_ccfsm_branch_deleted_p ()
2246 {
2247 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2248 return 1;
2249 return 0;
2250 }
2251
2252 /* Record a branch isn't output because subsequent insns can be
2253 conditionalized. */
2254
2255 void
2256 arc_ccfsm_record_branch_deleted ()
2257 {
2258 /* Indicate we're conditionalizing insns now. */
2259 arc_ccfsm_state += 2;
2260
2261 /* If the next insn is a subroutine call, we still need a nop between the
2262 cc setter and user. We need to undo the effect of calling record_cc_ref
2263 for the just deleted branch. */
2264 current_insn_set_cc_p = last_insn_set_cc_p;
2265 }
2266 \f
2267 void
2268 arc_va_start (valist, nextarg)
2269 tree valist;
2270 rtx nextarg;
2271 {
2272 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2273 if (current_function_args_info < 8
2274 && (current_function_args_info & 1))
2275 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2276
2277 std_expand_builtin_va_start (valist, nextarg);
2278 }
2279
2280 rtx
2281 arc_va_arg (valist, type)
2282 tree valist, type;
2283 {
2284 rtx addr_rtx;
2285 tree addr, incr;
2286 tree type_ptr = build_pointer_type (type);
2287
2288 /* All aggregates are passed by reference. All scalar types larger
2289 than 8 bytes are passed by reference. */
2290
2291 if (AGGREGATE_TYPE_P (type) || int_size_in_bytes (type) > 8)
2292 {
2293 tree type_ptr_ptr = build_pointer_type (type_ptr);
2294
2295 addr = build (INDIRECT_REF, type_ptr,
2296 build (NOP_EXPR, type_ptr_ptr, valist));
2297
2298 incr = build (PLUS_EXPR, TREE_TYPE (valist),
2299 valist, build_int_2 (UNITS_PER_WORD, 0));
2300 }
2301 else
2302 {
2303 HOST_WIDE_INT align, rounded_size;
2304
2305 /* Compute the rounded size of the type. */
2306 align = PARM_BOUNDARY / BITS_PER_UNIT;
2307 rounded_size = (((TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT
2308 + align - 1) / align) * align);
2309
2310 /* Align 8 byte operands. */
2311 addr = valist;
2312 if (TYPE_ALIGN (type) > BITS_PER_WORD)
2313 {
2314 /* AP = (TYPE *)(((int)AP + 7) & -8) */
2315
2316 addr = build (NOP_EXPR, integer_type_node, valist);
2317 addr = fold (build (PLUS_EXPR, integer_type_node, addr,
2318 build_int_2 (7, 0)));
2319 addr = fold (build (BIT_AND_EXPR, integer_type_node, addr,
2320 build_int_2 (-8, 0)));
2321 addr = fold (build (NOP_EXPR, TREE_TYPE (valist), addr));
2322 }
2323
2324 /* The increment is always rounded_size past the aligned pointer. */
2325 incr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2326 build_int_2 (rounded_size, 0)));
2327
2328 /* Adjust the pointer in big-endian mode. */
2329 if (BYTES_BIG_ENDIAN)
2330 {
2331 HOST_WIDE_INT adj;
2332 adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
2333 if (rounded_size > align)
2334 adj = rounded_size;
2335
2336 addr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr,
2337 build_int_2 (rounded_size - adj, 0)));
2338 }
2339 }
2340
2341 /* Evaluate the data address. */
2342 addr_rtx = expand_expr (addr, NULL_RTX, Pmode, EXPAND_NORMAL);
2343 addr_rtx = copy_to_reg (addr_rtx);
2344
2345 /* Compute new value for AP. */
2346 incr = build (MODIFY_EXPR, TREE_TYPE (valist), valist, incr);
2347 TREE_SIDE_EFFECTS (incr) = 1;
2348 expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL);
2349
2350 return addr_rtx;
2351 }
2352
2353 /* On the ARC, function addresses are not the same as normal addresses.
2354 Branch to absolute address insns take an address that is right-shifted
2355 by 2. We encode the fact that we have a function here, and then emit a
2356 special assembler op when outputting the address. */
2357
2358 static void
2359 arc_encode_section_info (decl, first)
2360 tree decl;
2361 int first ATTRIBUTE_UNUSED;
2362 {
2363 if (TREE_CODE (decl) == FUNCTION_DECL)
2364 SYMBOL_REF_FLAG (XEXP (DECL_RTL (decl), 0)) = 1;
2365 }
2366
2367 /* This is how to output a definition of an internal numbered label where
2368 PREFIX is the class of label and NUM is the number within the class. */
2369
2370 static void
2371 arc_internal_label (stream, prefix, labelno)
2372 FILE *stream;
2373 const char *prefix;
2374 unsigned long labelno;
2375 {
2376 arc_ccfsm_at_label (prefix, labelno);
2377 default_internal_label (stream, prefix, labelno);
2378 }