827cc1e6f0b699d556ba10c1ed0aa6c6bdb6fbd9
[gcc.git] / gcc / config / arc / arc.c
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
3 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "recog.h"
40 #include "toplev.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44
45 /* Which cpu we're compiling for. */
46 int arc_cpu_type;
47
48 /* Name of mangle string to add to symbols to separate code compiled for each
49 cpu (or NULL). */
50 const char *arc_mangle_cpu;
51
52 /* Save the operands last given to a compare for use when we
53 generate a scc or bcc insn. */
54 rtx arc_compare_op0, arc_compare_op1;
55
56 /* Name of text, data, and rodata sections used in varasm.c. */
57 const char *arc_text_section;
58 const char *arc_data_section;
59 const char *arc_rodata_section;
60
61 /* Array of valid operand punctuation characters. */
62 char arc_punct_chars[256];
63
64 /* Variables used by arc_final_prescan_insn to implement conditional
65 execution. */
66 static int arc_ccfsm_state;
67 static int arc_ccfsm_current_cc;
68 static rtx arc_ccfsm_target_insn;
69 static int arc_ccfsm_target_label;
70
71 /* The maximum number of insns skipped which will be conditionalised if
72 possible. */
73 #define MAX_INSNS_SKIPPED 3
74
75 /* A nop is needed between a 4 byte insn that sets the condition codes and
76 a branch that uses them (the same isn't true for an 8 byte insn that sets
77 the condition codes). Set by arc_final_prescan_insn. Used by
78 arc_print_operand. */
79 static int last_insn_set_cc_p;
80 static int current_insn_set_cc_p;
81 static bool arc_handle_option (size_t, const char *, int);
82 static void record_cc_ref (rtx);
83 static void arc_init_reg_tables (void);
84 static int get_arc_condition_code (rtx);
85 const struct attribute_spec arc_attribute_table[];
86 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
87 static bool arc_assemble_integer (rtx, unsigned int, int);
88 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
89 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
90 static void arc_file_start (void);
91 static void arc_internal_label (FILE *, const char *, unsigned long);
92 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
93 tree, int *, int);
94 static bool arc_rtx_costs (rtx, int, int, int *);
95 static int arc_address_cost (rtx);
96 static void arc_external_libcall (rtx);
97 static bool arc_return_in_memory (tree, tree);
98 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
99 tree, bool);
100 \f
101 /* Initialize the GCC target structure. */
102 #undef TARGET_ASM_ALIGNED_HI_OP
103 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
104 #undef TARGET_ASM_ALIGNED_SI_OP
105 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
106 #undef TARGET_ASM_INTEGER
107 #define TARGET_ASM_INTEGER arc_assemble_integer
108
109 #undef TARGET_ASM_FUNCTION_PROLOGUE
110 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
111 #undef TARGET_ASM_FUNCTION_EPILOGUE
112 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
113 #undef TARGET_ASM_FILE_START
114 #define TARGET_ASM_FILE_START arc_file_start
115 #undef TARGET_ATTRIBUTE_TABLE
116 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
117 #undef TARGET_ASM_INTERNAL_LABEL
118 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
119 #undef TARGET_ASM_EXTERNAL_LIBCALL
120 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
121
122 #undef TARGET_HANDLE_OPTION
123 #define TARGET_HANDLE_OPTION arc_handle_option
124
125 #undef TARGET_RTX_COSTS
126 #define TARGET_RTX_COSTS arc_rtx_costs
127 #undef TARGET_ADDRESS_COST
128 #define TARGET_ADDRESS_COST arc_address_cost
129
130 #undef TARGET_PROMOTE_FUNCTION_ARGS
131 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
132 #undef TARGET_PROMOTE_FUNCTION_RETURN
133 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
134 #undef TARGET_PROMOTE_PROTOTYPES
135 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
136
137 #undef TARGET_RETURN_IN_MEMORY
138 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
139 #undef TARGET_PASS_BY_REFERENCE
140 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
141 #undef TARGET_CALLEE_COPIES
142 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
143
144 #undef TARGET_SETUP_INCOMING_VARARGS
145 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
146
147 struct gcc_target targetm = TARGET_INITIALIZER;
148 \f
149 /* Implement TARGET_HANDLE_OPTION. */
150
151 static bool
152 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
153 {
154 switch (code)
155 {
156 case OPT_mcpu_:
157 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
158
159 default:
160 return true;
161 }
162 }
163
164 /* Called by OVERRIDE_OPTIONS to initialize various things. */
165
166 void
167 arc_init (void)
168 {
169 char *tmp;
170
171 /* Set the pseudo-ops for the various standard sections. */
172 arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
173 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
174 arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
175 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
176 arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
178
179 arc_init_reg_tables ();
180
181 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
182 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
183 arc_punct_chars['#'] = 1;
184 arc_punct_chars['*'] = 1;
185 arc_punct_chars['?'] = 1;
186 arc_punct_chars['!'] = 1;
187 arc_punct_chars['~'] = 1;
188 }
189 \f
190 /* The condition codes of the ARC, and the inverse function. */
191 static const char *const arc_condition_codes[] =
192 {
193 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
194 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
195 };
196
197 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
198
199 /* Returns the index of the ARC condition code string in
200 `arc_condition_codes'. COMPARISON should be an rtx like
201 `(eq (...) (...))'. */
202
203 static int
204 get_arc_condition_code (rtx comparison)
205 {
206 switch (GET_CODE (comparison))
207 {
208 case EQ : return 2;
209 case NE : return 3;
210 case GT : return 10;
211 case LE : return 11;
212 case GE : return 12;
213 case LT : return 13;
214 case GTU : return 14;
215 case LEU : return 15;
216 case LTU : return 6;
217 case GEU : return 7;
218 default : gcc_unreachable ();
219 }
220 /*NOTREACHED*/
221 return (42);
222 }
223
224 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
225 return the mode to be used for the comparison. */
226
227 enum machine_mode
228 arc_select_cc_mode (enum rtx_code op,
229 rtx x ATTRIBUTE_UNUSED,
230 rtx y ATTRIBUTE_UNUSED)
231 {
232 switch (op)
233 {
234 case EQ :
235 case NE :
236 return CCZNmode;
237 default :
238 switch (GET_CODE (x))
239 {
240 case AND :
241 case IOR :
242 case XOR :
243 case SIGN_EXTEND :
244 case ZERO_EXTEND :
245 return CCZNmode;
246 case ASHIFT :
247 case ASHIFTRT :
248 case LSHIFTRT :
249 return CCZNCmode;
250 default:
251 break;
252 }
253 }
254 return CCmode;
255 }
256 \f
257 /* Vectors to keep interesting information about registers where it can easily
258 be got. We use to use the actual mode value as the bit number, but there
259 is (or may be) more than 32 modes now. Instead we use two tables: one
260 indexed by hard register number, and one indexed by mode. */
261
262 /* The purpose of arc_mode_class is to shrink the range of modes so that
263 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
264 mapped into one arc_mode_class mode. */
265
266 enum arc_mode_class {
267 C_MODE,
268 S_MODE, D_MODE, T_MODE, O_MODE,
269 SF_MODE, DF_MODE, TF_MODE, OF_MODE
270 };
271
272 /* Modes for condition codes. */
273 #define C_MODES (1 << (int) C_MODE)
274
275 /* Modes for single-word and smaller quantities. */
276 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
277
278 /* Modes for double-word and smaller quantities. */
279 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
280
281 /* Modes for quad-word and smaller quantities. */
282 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
283
284 /* Value is 1 if register/mode pair is acceptable on arc. */
285
286 const unsigned int arc_hard_regno_mode_ok[] = {
287 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
288 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
289 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
290 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
291
292 /* ??? Leave these as S_MODES for now. */
293 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
294 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
295 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
297 };
298
299 unsigned int arc_mode_class [NUM_MACHINE_MODES];
300
301 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
302
303 static void
304 arc_init_reg_tables (void)
305 {
306 int i;
307
308 for (i = 0; i < NUM_MACHINE_MODES; i++)
309 {
310 switch (GET_MODE_CLASS (i))
311 {
312 case MODE_INT:
313 case MODE_PARTIAL_INT:
314 case MODE_COMPLEX_INT:
315 if (GET_MODE_SIZE (i) <= 4)
316 arc_mode_class[i] = 1 << (int) S_MODE;
317 else if (GET_MODE_SIZE (i) == 8)
318 arc_mode_class[i] = 1 << (int) D_MODE;
319 else if (GET_MODE_SIZE (i) == 16)
320 arc_mode_class[i] = 1 << (int) T_MODE;
321 else if (GET_MODE_SIZE (i) == 32)
322 arc_mode_class[i] = 1 << (int) O_MODE;
323 else
324 arc_mode_class[i] = 0;
325 break;
326 case MODE_FLOAT:
327 case MODE_COMPLEX_FLOAT:
328 if (GET_MODE_SIZE (i) <= 4)
329 arc_mode_class[i] = 1 << (int) SF_MODE;
330 else if (GET_MODE_SIZE (i) == 8)
331 arc_mode_class[i] = 1 << (int) DF_MODE;
332 else if (GET_MODE_SIZE (i) == 16)
333 arc_mode_class[i] = 1 << (int) TF_MODE;
334 else if (GET_MODE_SIZE (i) == 32)
335 arc_mode_class[i] = 1 << (int) OF_MODE;
336 else
337 arc_mode_class[i] = 0;
338 break;
339 case MODE_CC:
340 arc_mode_class[i] = 1 << (int) C_MODE;
341 break;
342 default:
343 arc_mode_class[i] = 0;
344 break;
345 }
346 }
347
348 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
349 {
350 if (i < 60)
351 arc_regno_reg_class[i] = GENERAL_REGS;
352 else if (i == 60)
353 arc_regno_reg_class[i] = LPCOUNT_REG;
354 else if (i == 61)
355 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
356 else
357 arc_regno_reg_class[i] = NO_REGS;
358 }
359 }
360 \f
361 /* ARC specific attribute support.
362
363 The ARC has these attributes:
364 interrupt - for interrupt functions
365 */
366
367 const struct attribute_spec arc_attribute_table[] =
368 {
369 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
370 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
371 { NULL, 0, 0, false, false, false, NULL }
372 };
373
374 /* Handle an "interrupt" attribute; arguments as in
375 struct attribute_spec.handler. */
376 static tree
377 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
378 tree name,
379 tree args,
380 int flags ATTRIBUTE_UNUSED,
381 bool *no_add_attrs)
382 {
383 tree value = TREE_VALUE (args);
384
385 if (TREE_CODE (value) != STRING_CST)
386 {
387 warning (OPT_Wattributes,
388 "argument of %qs attribute is not a string constant",
389 IDENTIFIER_POINTER (name));
390 *no_add_attrs = true;
391 }
392 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
393 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
394 {
395 warning (OPT_Wattributes,
396 "argument of %qs attribute is not \"ilink1\" or \"ilink2\"",
397 IDENTIFIER_POINTER (name));
398 *no_add_attrs = true;
399 }
400
401 return NULL_TREE;
402 }
403
404 \f
405 /* Acceptable arguments to the call insn. */
406
407 int
408 call_address_operand (rtx op, enum machine_mode mode)
409 {
410 return (symbolic_operand (op, mode)
411 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
412 || (GET_CODE (op) == REG));
413 }
414
415 int
416 call_operand (rtx op, enum machine_mode mode)
417 {
418 if (GET_CODE (op) != MEM)
419 return 0;
420 op = XEXP (op, 0);
421 return call_address_operand (op, mode);
422 }
423
424 /* Returns 1 if OP is a symbol reference. */
425
426 int
427 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
428 {
429 switch (GET_CODE (op))
430 {
431 case SYMBOL_REF:
432 case LABEL_REF:
433 case CONST :
434 return 1;
435 default:
436 return 0;
437 }
438 }
439
440 /* Return truth value of statement that OP is a symbolic memory
441 operand of mode MODE. */
442
443 int
444 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
445 {
446 if (GET_CODE (op) == SUBREG)
447 op = SUBREG_REG (op);
448 if (GET_CODE (op) != MEM)
449 return 0;
450 op = XEXP (op, 0);
451 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
452 || GET_CODE (op) == LABEL_REF);
453 }
454
455 /* Return true if OP is a short immediate (shimm) value. */
456
457 int
458 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
459 {
460 if (GET_CODE (op) != CONST_INT)
461 return 0;
462 return SMALL_INT (INTVAL (op));
463 }
464
465 /* Return true if OP will require a long immediate (limm) value.
466 This is currently only used when calculating length attributes. */
467
468 int
469 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
470 {
471 switch (GET_CODE (op))
472 {
473 case SYMBOL_REF :
474 case LABEL_REF :
475 case CONST :
476 return 1;
477 case CONST_INT :
478 return !SMALL_INT (INTVAL (op));
479 case CONST_DOUBLE :
480 /* These can happen because large unsigned 32-bit constants are
481 represented this way (the multiplication patterns can cause these
482 to be generated). They also occur for SFmode values. */
483 return 1;
484 default:
485 break;
486 }
487 return 0;
488 }
489
490 /* Return true if OP is a MEM that when used as a load or store address will
491 require an 8 byte insn.
492 Load and store instructions don't allow the same possibilities but they're
493 similar enough that this one function will do.
494 This is currently only used when calculating length attributes. */
495
496 int
497 long_immediate_loadstore_operand (rtx op,
498 enum machine_mode mode ATTRIBUTE_UNUSED)
499 {
500 if (GET_CODE (op) != MEM)
501 return 0;
502
503 op = XEXP (op, 0);
504 switch (GET_CODE (op))
505 {
506 case SYMBOL_REF :
507 case LABEL_REF :
508 case CONST :
509 return 1;
510 case CONST_INT :
511 /* This must be handled as "st c,[limm]". Ditto for load.
512 Technically, the assembler could translate some possibilities to
513 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
514 assume that it does. */
515 return 1;
516 case CONST_DOUBLE :
517 /* These can happen because large unsigned 32-bit constants are
518 represented this way (the multiplication patterns can cause these
519 to be generated). They also occur for SFmode values. */
520 return 1;
521 case REG :
522 return 0;
523 case PLUS :
524 if (GET_CODE (XEXP (op, 1)) == CONST_INT
525 && !SMALL_INT (INTVAL (XEXP (op, 1))))
526 return 1;
527 return 0;
528 default:
529 break;
530 }
531 return 0;
532 }
533
534 /* Return true if OP is an acceptable argument for a single word
535 move source. */
536
537 int
538 move_src_operand (rtx op, enum machine_mode mode)
539 {
540 switch (GET_CODE (op))
541 {
542 case SYMBOL_REF :
543 case LABEL_REF :
544 case CONST :
545 return 1;
546 case CONST_INT :
547 return (LARGE_INT (INTVAL (op)));
548 case CONST_DOUBLE :
549 /* We can handle DImode integer constants in SImode if the value
550 (signed or unsigned) will fit in 32 bits. This is needed because
551 large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
552 if (mode == SImode)
553 return arc_double_limm_p (op);
554 /* We can handle 32-bit floating point constants. */
555 if (mode == SFmode)
556 return GET_MODE (op) == SFmode;
557 return 0;
558 case REG :
559 return register_operand (op, mode);
560 case SUBREG :
561 /* (subreg (mem ...) ...) can occur here if the inner part was once a
562 pseudo-reg and is now a stack slot. */
563 if (GET_CODE (SUBREG_REG (op)) == MEM)
564 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
565 else
566 return register_operand (op, mode);
567 case MEM :
568 return address_operand (XEXP (op, 0), mode);
569 default :
570 return 0;
571 }
572 }
573
574 /* Return true if OP is an acceptable argument for a double word
575 move source. */
576
577 int
578 move_double_src_operand (rtx op, enum machine_mode mode)
579 {
580 switch (GET_CODE (op))
581 {
582 case REG :
583 return register_operand (op, mode);
584 case SUBREG :
585 /* (subreg (mem ...) ...) can occur here if the inner part was once a
586 pseudo-reg and is now a stack slot. */
587 if (GET_CODE (SUBREG_REG (op)) == MEM)
588 return move_double_src_operand (SUBREG_REG (op), mode);
589 else
590 return register_operand (op, mode);
591 case MEM :
592 /* Disallow auto inc/dec for now. */
593 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
594 || GET_CODE (XEXP (op, 0)) == PRE_INC)
595 return 0;
596 return address_operand (XEXP (op, 0), mode);
597 case CONST_INT :
598 case CONST_DOUBLE :
599 return 1;
600 default :
601 return 0;
602 }
603 }
604
605 /* Return true if OP is an acceptable argument for a move destination. */
606
607 int
608 move_dest_operand (rtx op, enum machine_mode mode)
609 {
610 switch (GET_CODE (op))
611 {
612 case REG :
613 return register_operand (op, mode);
614 case SUBREG :
615 /* (subreg (mem ...) ...) can occur here if the inner part was once a
616 pseudo-reg and is now a stack slot. */
617 if (GET_CODE (SUBREG_REG (op)) == MEM)
618 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
619 else
620 return register_operand (op, mode);
621 case MEM :
622 return address_operand (XEXP (op, 0), mode);
623 default :
624 return 0;
625 }
626 }
627
628 /* Return true if OP is valid load with update operand. */
629
630 int
631 load_update_operand (rtx op, enum machine_mode mode)
632 {
633 if (GET_CODE (op) != MEM
634 || GET_MODE (op) != mode)
635 return 0;
636 op = XEXP (op, 0);
637 if (GET_CODE (op) != PLUS
638 || GET_MODE (op) != Pmode
639 || !register_operand (XEXP (op, 0), Pmode)
640 || !nonmemory_operand (XEXP (op, 1), Pmode))
641 return 0;
642 return 1;
643 }
644
645 /* Return true if OP is valid store with update operand. */
646
647 int
648 store_update_operand (rtx op, enum machine_mode mode)
649 {
650 if (GET_CODE (op) != MEM
651 || GET_MODE (op) != mode)
652 return 0;
653 op = XEXP (op, 0);
654 if (GET_CODE (op) != PLUS
655 || GET_MODE (op) != Pmode
656 || !register_operand (XEXP (op, 0), Pmode)
657 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
658 && SMALL_INT (INTVAL (XEXP (op, 1)))))
659 return 0;
660 return 1;
661 }
662
663 /* Return true if OP is a non-volatile non-immediate operand.
664 Volatile memory refs require a special "cache-bypass" instruction
665 and only the standard movXX patterns are set up to handle them. */
666
667 int
668 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
669 {
670 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
671 return 0;
672 return nonimmediate_operand (op, mode);
673 }
674
675 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
676 to check the range carefully since this predicate is used in DImode
677 contexts. */
678
679 int
680 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
681 {
682 /* All allowed constants will fit a CONST_INT. */
683 return (GET_CODE (op) == CONST_INT
684 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
685 }
686
687 /* Accept integer operands in the range 0..0xffffffff. We have to check the
688 range carefully since this predicate is used in DImode contexts. Also, we
689 need some extra crud to make it work when hosted on 64-bit machines. */
690
691 int
692 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
693 {
694 #if HOST_BITS_PER_WIDE_INT > 32
695 /* All allowed constants will fit a CONST_INT. */
696 return (GET_CODE (op) == CONST_INT
697 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
698 #else
699 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
700 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
701 #endif
702 }
703
704 /* Return 1 if OP is a comparison operator valid for the mode of CC.
705 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
706
707 Some insns only set a few bits in the condition code. So only allow those
708 comparisons that use the bits that are valid. */
709
710 int
711 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
712 {
713 enum rtx_code code;
714 if (!COMPARISON_P (op))
715 return 0;
716
717 code = GET_CODE (op);
718 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
719 return (code == EQ || code == NE);
720 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
721 return (code == EQ || code == NE
722 || code == LTU || code == GEU || code == GTU || code == LEU);
723 return 1;
724 }
725 \f
726 /* Misc. utilities. */
727
728 /* X and Y are two things to compare using CODE. Emit the compare insn and
729 return the rtx for the cc reg in the proper mode. */
730
731 rtx
732 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
733 {
734 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
735 rtx cc_reg;
736
737 cc_reg = gen_rtx_REG (mode, 61);
738
739 emit_insn (gen_rtx_SET (VOIDmode, cc_reg,
740 gen_rtx_COMPARE (mode, x, y)));
741
742 return cc_reg;
743 }
744
745 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
746 We assume the value can be either signed or unsigned. */
747
748 int
749 arc_double_limm_p (rtx value)
750 {
751 HOST_WIDE_INT low, high;
752
753 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
754
755 low = CONST_DOUBLE_LOW (value);
756 high = CONST_DOUBLE_HIGH (value);
757
758 if (low & 0x80000000)
759 {
760 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
761 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
762 == - (unsigned HOST_WIDE_INT) 0x80000000)
763 && high == -1));
764 }
765 else
766 {
767 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
768 }
769 }
770 \f
771 /* Do any needed setup for a variadic function. For the ARC, we must
772 create a register parameter block, and then copy any anonymous arguments
773 in registers to memory.
774
775 CUM has not been updated for the last named argument which has type TYPE
776 and mode MODE, and we rely on this fact.
777
778 We do things a little weird here. We're supposed to only allocate space
779 for the anonymous arguments. However we need to keep the stack eight byte
780 aligned. So we round the space up if necessary, and leave it to va_start
781 to compensate. */
782
783 static void
784 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
785 enum machine_mode mode,
786 tree type ATTRIBUTE_UNUSED,
787 int *pretend_size,
788 int no_rtl)
789 {
790 int first_anon_arg;
791
792 /* All BLKmode values are passed by reference. */
793 gcc_assert (mode != BLKmode);
794
795 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
796 / UNITS_PER_WORD);
797
798 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
799 {
800 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
801 int first_reg_offset = first_anon_arg;
802 /* Size in words to "pretend" allocate. */
803 int size = MAX_ARC_PARM_REGS - first_reg_offset;
804 /* Extra slop to keep stack eight byte aligned. */
805 int align_slop = size & 1;
806 rtx regblock;
807
808 regblock = gen_rtx_MEM (BLKmode,
809 plus_constant (arg_pointer_rtx,
810 FIRST_PARM_OFFSET (0)
811 + align_slop * UNITS_PER_WORD));
812 set_mem_alias_set (regblock, get_varargs_alias_set ());
813 set_mem_align (regblock, BITS_PER_WORD);
814 move_block_from_reg (first_reg_offset, regblock,
815 MAX_ARC_PARM_REGS - first_reg_offset);
816
817 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
818 * UNITS_PER_WORD);
819 }
820 }
821 \f
822 /* Cost functions. */
823
824 /* Compute a (partial) cost for rtx X. Return true if the complete
825 cost has been computed, and false if subexpressions should be
826 scanned. In either case, *TOTAL contains the cost result. */
827
828 static bool
829 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total)
830 {
831 switch (code)
832 {
833 /* Small integers are as cheap as registers. 4 byte values can
834 be fetched as immediate constants - let's give that the cost
835 of an extra insn. */
836 case CONST_INT:
837 if (SMALL_INT (INTVAL (x)))
838 {
839 *total = 0;
840 return true;
841 }
842 /* FALLTHRU */
843
844 case CONST:
845 case LABEL_REF:
846 case SYMBOL_REF:
847 *total = COSTS_N_INSNS (1);
848 return true;
849
850 case CONST_DOUBLE:
851 {
852 rtx high, low;
853 split_double (x, &high, &low);
854 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
855 + !SMALL_INT (INTVAL (low)));
856 return true;
857 }
858
859 /* Encourage synth_mult to find a synthetic multiply when reasonable.
860 If we need more than 12 insns to do a multiply, then go out-of-line,
861 since the call overhead will be < 10% of the cost of the multiply. */
862 case ASHIFT:
863 case ASHIFTRT:
864 case LSHIFTRT:
865 if (TARGET_SHIFTER)
866 *total = COSTS_N_INSNS (1);
867 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
868 *total = COSTS_N_INSNS (16);
869 else
870 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
871 return false;
872
873 default:
874 return false;
875 }
876 }
877
878
879 /* Provide the costs of an addressing mode that contains ADDR.
880 If ADDR is not a valid address, its cost is irrelevant. */
881
882 static int
883 arc_address_cost (rtx addr)
884 {
885 switch (GET_CODE (addr))
886 {
887 case REG :
888 return 1;
889
890 case LABEL_REF :
891 case SYMBOL_REF :
892 case CONST :
893 return 2;
894
895 case PLUS :
896 {
897 register rtx plus0 = XEXP (addr, 0);
898 register rtx plus1 = XEXP (addr, 1);
899
900 if (GET_CODE (plus0) != REG)
901 break;
902
903 switch (GET_CODE (plus1))
904 {
905 case CONST_INT :
906 return SMALL_INT (plus1) ? 1 : 2;
907 case CONST :
908 case SYMBOL_REF :
909 case LABEL_REF :
910 return 2;
911 default:
912 break;
913 }
914 break;
915 }
916 default:
917 break;
918 }
919
920 return 4;
921 }
922 \f
923 /* Function prologue/epilogue handlers. */
924
925 /* ARC stack frames look like:
926
927 Before call After call
928 +-----------------------+ +-----------------------+
929 | | | |
930 high | local variables, | | local variables, |
931 mem | reg save area, etc. | | reg save area, etc. |
932 | | | |
933 +-----------------------+ +-----------------------+
934 | | | |
935 | arguments on stack. | | arguments on stack. |
936 | | | |
937 SP+16->+-----------------------+FP+48->+-----------------------+
938 | 4 word save area for | | reg parm save area, |
939 | return addr, prev %fp | | only created for |
940 SP+0->+-----------------------+ | variable argument |
941 | functions |
942 FP+16->+-----------------------+
943 | 4 word save area for |
944 | return addr, prev %fp |
945 FP+0->+-----------------------+
946 | |
947 | local variables |
948 | |
949 +-----------------------+
950 | |
951 | register save area |
952 | |
953 +-----------------------+
954 | |
955 | alloca allocations |
956 | |
957 +-----------------------+
958 | |
959 | arguments on stack |
960 | |
961 SP+16->+-----------------------+
962 low | 4 word save area for |
963 memory | return addr, prev %fp |
964 SP+0->+-----------------------+
965
966 Notes:
967 1) The "reg parm save area" does not exist for non variable argument fns.
968 The "reg parm save area" can be eliminated completely if we created our
969 own va-arc.h, but that has tradeoffs as well (so it's not done). */
970
971 /* Structure to be filled in by arc_compute_frame_size with register
972 save masks, and offsets for the current function. */
973 struct arc_frame_info
974 {
975 unsigned int total_size; /* # bytes that the entire frame takes up. */
976 unsigned int extra_size; /* # bytes of extra stuff. */
977 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
978 unsigned int args_size; /* # bytes that outgoing arguments take up. */
979 unsigned int reg_size; /* # bytes needed to store regs. */
980 unsigned int var_size; /* # bytes that variables take up. */
981 unsigned int reg_offset; /* Offset from new sp to store regs. */
982 unsigned int gmask; /* Mask of saved gp registers. */
983 int initialized; /* Nonzero if frame size already calculated. */
984 };
985
986 /* Current frame information calculated by arc_compute_frame_size. */
987 static struct arc_frame_info current_frame_info;
988
989 /* Zero structure to initialize current_frame_info. */
990 static struct arc_frame_info zero_frame_info;
991
992 /* Type of function DECL.
993
994 The result is cached. To reset the cache at the end of a function,
995 call with DECL = NULL_TREE. */
996
997 enum arc_function_type
998 arc_compute_function_type (tree decl)
999 {
1000 tree a;
1001 /* Cached value. */
1002 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
1003 /* Last function we were called for. */
1004 static tree last_fn = NULL_TREE;
1005
1006 /* Resetting the cached value? */
1007 if (decl == NULL_TREE)
1008 {
1009 fn_type = ARC_FUNCTION_UNKNOWN;
1010 last_fn = NULL_TREE;
1011 return fn_type;
1012 }
1013
1014 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1015 return fn_type;
1016
1017 /* Assume we have a normal function (not an interrupt handler). */
1018 fn_type = ARC_FUNCTION_NORMAL;
1019
1020 /* Now see if this is an interrupt handler. */
1021 for (a = DECL_ATTRIBUTES (current_function_decl);
1022 a;
1023 a = TREE_CHAIN (a))
1024 {
1025 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1026
1027 if (name == get_identifier ("__interrupt__")
1028 && list_length (args) == 1
1029 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1030 {
1031 tree value = TREE_VALUE (args);
1032
1033 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1034 fn_type = ARC_FUNCTION_ILINK1;
1035 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1036 fn_type = ARC_FUNCTION_ILINK2;
1037 else
1038 gcc_unreachable ();
1039 break;
1040 }
1041 }
1042
1043 last_fn = decl;
1044 return fn_type;
1045 }
1046
1047 #define ILINK1_REGNUM 29
1048 #define ILINK2_REGNUM 30
1049 #define RETURN_ADDR_REGNUM 31
1050 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1051 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1052
1053 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1054 The return address and frame pointer are treated separately.
1055 Don't consider them here. */
1056 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1057 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1058 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1059
1060 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
1061
1062 /* Return the bytes needed to compute the frame pointer from the current
1063 stack pointer.
1064
1065 SIZE is the size needed for local variables. */
1066
1067 unsigned int
1068 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1069 {
1070 int regno;
1071 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1072 unsigned int reg_size, reg_offset;
1073 unsigned int gmask;
1074 enum arc_function_type fn_type;
1075 int interrupt_p;
1076
1077 var_size = size;
1078 args_size = current_function_outgoing_args_size;
1079 pretend_size = current_function_pretend_args_size;
1080 extra_size = FIRST_PARM_OFFSET (0);
1081 total_size = extra_size + pretend_size + args_size + var_size;
1082 reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size;
1083 reg_size = 0;
1084 gmask = 0;
1085
1086 /* See if this is an interrupt handler. Call used registers must be saved
1087 for them too. */
1088 fn_type = arc_compute_function_type (current_function_decl);
1089 interrupt_p = ARC_INTERRUPT_P (fn_type);
1090
1091 /* Calculate space needed for registers.
1092 ??? We ignore the extension registers for now. */
1093
1094 for (regno = 0; regno <= 31; regno++)
1095 {
1096 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1097 {
1098 reg_size += UNITS_PER_WORD;
1099 gmask |= 1 << regno;
1100 }
1101 }
1102
1103 total_size += reg_size;
1104
1105 /* If the only space to allocate is the fp/blink save area this is an
1106 empty frame. However, if we'll be making a function call we need to
1107 allocate a stack frame for our callee's fp/blink save area. */
1108 if (total_size == extra_size
1109 && !MUST_SAVE_RETURN_ADDR)
1110 total_size = extra_size = 0;
1111
1112 total_size = ARC_STACK_ALIGN (total_size);
1113
1114 /* Save computed information. */
1115 current_frame_info.total_size = total_size;
1116 current_frame_info.extra_size = extra_size;
1117 current_frame_info.pretend_size = pretend_size;
1118 current_frame_info.var_size = var_size;
1119 current_frame_info.args_size = args_size;
1120 current_frame_info.reg_size = reg_size;
1121 current_frame_info.reg_offset = reg_offset;
1122 current_frame_info.gmask = gmask;
1123 current_frame_info.initialized = reload_completed;
1124
1125 /* Ok, we're done. */
1126 return total_size;
1127 }
1128 \f
1129 /* Common code to save/restore registers. */
1130
1131 void
1132 arc_save_restore (FILE *file,
1133 const char *base_reg,
1134 unsigned int offset,
1135 unsigned int gmask,
1136 const char *op)
1137 {
1138 int regno;
1139
1140 if (gmask == 0)
1141 return;
1142
1143 for (regno = 0; regno <= 31; regno++)
1144 {
1145 if ((gmask & (1L << regno)) != 0)
1146 {
1147 fprintf (file, "\t%s %s,[%s,%d]\n",
1148 op, reg_names[regno], base_reg, offset);
1149 offset += UNITS_PER_WORD;
1150 }
1151 }
1152 }
1153 \f
1154 /* Target hook to assemble an integer object. The ARC version needs to
1155 emit a special directive for references to labels and function
1156 symbols. */
1157
1158 static bool
1159 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1160 {
1161 if (size == UNITS_PER_WORD && aligned_p
1162 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1163 || GET_CODE (x) == LABEL_REF))
1164 {
1165 fputs ("\t.word\t%st(", asm_out_file);
1166 output_addr_const (asm_out_file, x);
1167 fputs (")\n", asm_out_file);
1168 return true;
1169 }
1170 return default_assemble_integer (x, size, aligned_p);
1171 }
1172 \f
1173 /* Set up the stack and frame pointer (if desired) for the function. */
1174
1175 static void
1176 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1177 {
1178 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1179 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1180 unsigned int gmask = current_frame_info.gmask;
1181 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1182
1183 /* If this is an interrupt handler, set up our stack frame.
1184 ??? Optimize later. */
1185 if (ARC_INTERRUPT_P (fn_type))
1186 {
1187 fprintf (file, "\t%s interrupt handler\n",
1188 ASM_COMMENT_START);
1189 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1190 }
1191
1192 /* This is only for the human reader. */
1193 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1194 ASM_COMMENT_START, ASM_COMMENT_START,
1195 current_frame_info.var_size,
1196 current_frame_info.reg_size / 4,
1197 current_frame_info.args_size,
1198 current_frame_info.extra_size);
1199
1200 size = ARC_STACK_ALIGN (size);
1201 size = (! current_frame_info.initialized
1202 ? arc_compute_frame_size (size)
1203 : current_frame_info.total_size);
1204
1205 /* These cases shouldn't happen. Catch them now. */
1206 gcc_assert (size || !gmask);
1207
1208 /* Allocate space for register arguments if this is a variadic function. */
1209 if (current_frame_info.pretend_size != 0)
1210 fprintf (file, "\tsub %s,%s,%d\n",
1211 sp_str, sp_str, current_frame_info.pretend_size);
1212
1213 /* The home-grown ABI says link register is saved first. */
1214 if (MUST_SAVE_RETURN_ADDR)
1215 fprintf (file, "\tst %s,[%s,%d]\n",
1216 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1217
1218 /* Set up the previous frame pointer next (if we need to). */
1219 if (frame_pointer_needed)
1220 {
1221 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1222 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1223 }
1224
1225 /* ??? We don't handle the case where the saved regs are more than 252
1226 bytes away from sp. This can be handled by decrementing sp once, saving
1227 the regs, and then decrementing it again. The epilogue doesn't have this
1228 problem as the `ld' insn takes reg+limm values (though it would be more
1229 efficient to avoid reg+limm). */
1230
1231 /* Allocate the stack frame. */
1232 if (size - current_frame_info.pretend_size > 0)
1233 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1234 sp_str, sp_str, size - current_frame_info.pretend_size);
1235
1236 /* Save any needed call-saved regs (and call-used if this is an
1237 interrupt handler). */
1238 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1239 /* The zeroing of these two bits is unnecessary,
1240 but leave this in for clarity. */
1241 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1242 "st");
1243
1244 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1245 }
1246 \f
1247 /* Do any necessary cleanup after a function to restore stack, frame,
1248 and regs. */
1249
1250 static void
1251 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1252 {
1253 rtx epilogue_delay = current_function_epilogue_delay_list;
1254 int noepilogue = FALSE;
1255 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1256
1257 /* This is only for the human reader. */
1258 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1259
1260 size = ARC_STACK_ALIGN (size);
1261 size = (!current_frame_info.initialized
1262 ? arc_compute_frame_size (size)
1263 : current_frame_info.total_size);
1264
1265 if (size == 0 && epilogue_delay == 0)
1266 {
1267 rtx insn = get_last_insn ();
1268
1269 /* If the last insn was a BARRIER, we don't have to write any code
1270 because a jump (aka return) was put there. */
1271 if (GET_CODE (insn) == NOTE)
1272 insn = prev_nonnote_insn (insn);
1273 if (insn && GET_CODE (insn) == BARRIER)
1274 noepilogue = TRUE;
1275 }
1276
1277 if (!noepilogue)
1278 {
1279 unsigned int pretend_size = current_frame_info.pretend_size;
1280 unsigned int frame_size = size - pretend_size;
1281 int restored, fp_restored_p;
1282 int can_trust_sp_p = !current_function_calls_alloca;
1283 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1284 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1285
1286 /* ??? There are lots of optimizations that can be done here.
1287 EG: Use fp to restore regs if it's closer.
1288 Maybe in time we'll do them all. For now, always restore regs from
1289 sp, but don't restore sp if we don't have to. */
1290
1291 if (!can_trust_sp_p)
1292 {
1293 gcc_assert (frame_pointer_needed);
1294 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1295 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1296 }
1297
1298 /* Restore any saved registers. */
1299 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1300 /* The zeroing of these two bits is unnecessary,
1301 but leave this in for clarity. */
1302 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1303 "ld");
1304
1305 if (MUST_SAVE_RETURN_ADDR)
1306 fprintf (file, "\tld %s,[%s,%d]\n",
1307 reg_names[RETURN_ADDR_REGNUM],
1308 frame_pointer_needed ? fp_str : sp_str,
1309 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1310
1311 /* Keep track of how much of the stack pointer we've restored.
1312 It makes the following a lot more readable. */
1313 restored = 0;
1314 fp_restored_p = 0;
1315
1316 /* We try to emit the epilogue delay slot insn right after the load
1317 of the return address register so that it can execute with the
1318 stack intact. Secondly, loads are delayed. */
1319 /* ??? If stack intactness is important, always emit now. */
1320 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1321 {
1322 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1323 epilogue_delay = NULL_RTX;
1324 }
1325
1326 if (frame_pointer_needed)
1327 {
1328 /* Try to restore the frame pointer in the delay slot. We can't,
1329 however, if any of these is true. */
1330 if (epilogue_delay != NULL_RTX
1331 || !SMALL_INT (frame_size)
1332 || pretend_size
1333 || ARC_INTERRUPT_P (fn_type))
1334 {
1335 /* Note that we restore fp and sp here! */
1336 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1337 restored += frame_size;
1338 fp_restored_p = 1;
1339 }
1340 }
1341 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1342 || ARC_INTERRUPT_P (fn_type))
1343 {
1344 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1345 restored += frame_size;
1346 }
1347
1348 /* These must be done before the return insn because the delay slot
1349 does the final stack restore. */
1350 if (ARC_INTERRUPT_P (fn_type))
1351 {
1352 if (epilogue_delay)
1353 {
1354 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1355 }
1356 }
1357
1358 /* Emit the return instruction. */
1359 {
1360 static const int regs[4] = {
1361 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1362 };
1363
1364 /* Update the flags, if returning from an interrupt handler. */
1365 if (ARC_INTERRUPT_P (fn_type))
1366 fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1367 else
1368 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1369 }
1370
1371 /* If the only register saved is the return address, we need a
1372 nop, unless we have an instruction to put into it. Otherwise
1373 we don't since reloading multiple registers doesn't reference
1374 the register being loaded. */
1375
1376 if (ARC_INTERRUPT_P (fn_type))
1377 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1378 else if (epilogue_delay != NULL_RTX)
1379 {
1380 gcc_assert (!frame_pointer_needed || fp_restored_p);
1381 gcc_assert (restored >= size);
1382 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1383 }
1384 else if (frame_pointer_needed && !fp_restored_p)
1385 {
1386 gcc_assert (SMALL_INT (frame_size));
1387 /* Note that we restore fp and sp here! */
1388 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1389 }
1390 else if (restored < size)
1391 {
1392 gcc_assert (SMALL_INT (size - restored));
1393 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1394 sp_str, sp_str, size - restored);
1395 }
1396 else
1397 fprintf (file, "\tnop\n");
1398 }
1399
1400 /* Reset state info for each function. */
1401 current_frame_info = zero_frame_info;
1402 arc_compute_function_type (NULL_TREE);
1403 }
1404 \f
1405 /* Define the number of delay slots needed for the function epilogue.
1406
1407 Interrupt handlers can't have any epilogue delay slots (it's always needed
1408 for something else, I think). For normal functions, we have to worry about
1409 using call-saved regs as they'll be restored before the delay slot insn.
1410 Functions with non-empty frames already have enough choices for the epilogue
1411 delay slot so for now we only consider functions with empty frames. */
1412
1413 int
1414 arc_delay_slots_for_epilogue (void)
1415 {
1416 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1417 return 0;
1418 if (!current_frame_info.initialized)
1419 (void) arc_compute_frame_size (get_frame_size ());
1420 if (current_frame_info.total_size == 0)
1421 return 1;
1422 return 0;
1423 }
1424
1425 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1426 Any single length instruction which doesn't reference the stack or frame
1427 pointer or any call-saved register is OK. SLOT will always be 0. */
1428
1429 int
1430 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1431 {
1432 gcc_assert (!slot);
1433
1434 if (get_attr_length (trial) == 1
1435 /* If registers where saved, presumably there's more than enough
1436 possibilities for the delay slot. The alternative is something
1437 more complicated (of course, if we expanded the epilogue as rtl
1438 this problem would go away). */
1439 /* ??? Note that this will always be true since only functions with
1440 empty frames have epilogue delay slots. See
1441 arc_delay_slots_for_epilogue. */
1442 && current_frame_info.gmask == 0
1443 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1444 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1445 return 1;
1446 return 0;
1447 }
1448 \f
1449 /* Return true if OP is a shift operator. */
1450
1451 int
1452 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1453 {
1454 switch (GET_CODE (op))
1455 {
1456 case ASHIFTRT:
1457 case LSHIFTRT:
1458 case ASHIFT:
1459 return 1;
1460 default:
1461 return 0;
1462 }
1463 }
1464
1465 /* Output the assembler code for doing a shift.
1466 We go to a bit of trouble to generate efficient code as the ARC only has
1467 single bit shifts. This is taken from the h8300 port. We only have one
1468 mode of shifting and can't access individual bytes like the h8300 can, so
1469 this is greatly simplified (at the expense of not generating hyper-
1470 efficient code).
1471
1472 This function is not used if the variable shift insns are present. */
1473
1474 /* ??? We assume the output operand is the same as operand 1.
1475 This can be optimized (deleted) in the case of 1 bit shifts. */
1476 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1477 using it here will give us a chance to play with it. */
1478
1479 const char *
1480 output_shift (rtx *operands)
1481 {
1482 rtx shift = operands[3];
1483 enum machine_mode mode = GET_MODE (shift);
1484 enum rtx_code code = GET_CODE (shift);
1485 const char *shift_one;
1486
1487 gcc_assert (mode == SImode);
1488
1489 switch (code)
1490 {
1491 case ASHIFT: shift_one = "asl %0,%0"; break;
1492 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1493 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1494 default: gcc_unreachable ();
1495 }
1496
1497 if (GET_CODE (operands[2]) != CONST_INT)
1498 {
1499 if (optimize)
1500 {
1501 output_asm_insn ("sub.f 0,%2,0", operands);
1502 output_asm_insn ("mov lp_count,%2", operands);
1503 output_asm_insn ("bz 2f", operands);
1504 }
1505 else
1506 output_asm_insn ("mov %4,%2", operands);
1507 goto shiftloop;
1508 }
1509 else
1510 {
1511 int n = INTVAL (operands[2]);
1512
1513 /* If the count is negative, make it 0. */
1514 if (n < 0)
1515 n = 0;
1516 /* If the count is too big, truncate it.
1517 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1518 do the intuitive thing. */
1519 else if (n > GET_MODE_BITSIZE (mode))
1520 n = GET_MODE_BITSIZE (mode);
1521
1522 /* First see if we can do them inline. */
1523 if (n <= 8)
1524 {
1525 while (--n >= 0)
1526 output_asm_insn (shift_one, operands);
1527 }
1528 /* See if we can use a rotate/and. */
1529 else if (n == BITS_PER_WORD - 1)
1530 {
1531 switch (code)
1532 {
1533 case ASHIFT :
1534 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1535 break;
1536 case ASHIFTRT :
1537 /* The ARC doesn't have a rol insn. Use something else. */
1538 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1539 break;
1540 case LSHIFTRT :
1541 /* The ARC doesn't have a rol insn. Use something else. */
1542 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1543 break;
1544 default:
1545 break;
1546 }
1547 }
1548 /* Must loop. */
1549 else
1550 {
1551 char buf[100];
1552
1553 if (optimize)
1554 output_asm_insn ("mov lp_count,%c2", operands);
1555 else
1556 output_asm_insn ("mov %4,%c2", operands);
1557 shiftloop:
1558 if (optimize)
1559 {
1560 if (flag_pic)
1561 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1562 ASM_COMMENT_START);
1563 else
1564 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1565 ASM_COMMENT_START);
1566 output_asm_insn (buf, operands);
1567 output_asm_insn ("sr %4,[lp_start]", operands);
1568 output_asm_insn ("add %4,%4,1", operands);
1569 output_asm_insn ("sr %4,[lp_end]", operands);
1570 output_asm_insn ("nop\n\tnop", operands);
1571 if (flag_pic)
1572 fprintf (asm_out_file, "\t%s single insn loop\n",
1573 ASM_COMMENT_START);
1574 else
1575 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1576 ASM_COMMENT_START);
1577 output_asm_insn (shift_one, operands);
1578 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1579 ASM_COMMENT_START);
1580 }
1581 else
1582 {
1583 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1584 ASM_COMMENT_START);
1585 output_asm_insn ("sub.f %4,%4,1", operands);
1586 output_asm_insn ("nop", operands);
1587 output_asm_insn ("bn.nd 2f", operands);
1588 output_asm_insn (shift_one, operands);
1589 output_asm_insn ("b.nd 1b", operands);
1590 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1591 ASM_COMMENT_START);
1592 }
1593 }
1594 }
1595
1596 return "";
1597 }
1598 \f
1599 /* Nested function support. */
1600
1601 /* Emit RTL insns to initialize the variable parts of a trampoline.
1602 FNADDR is an RTX for the address of the function's pure code.
1603 CXT is an RTX for the static chain value for the function. */
1604
1605 void
1606 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1607 rtx fnaddr ATTRIBUTE_UNUSED,
1608 rtx cxt ATTRIBUTE_UNUSED)
1609 {
1610 }
1611 \f
1612 /* Set the cpu type and print out other fancy things,
1613 at the top of the file. */
1614
1615 static void
1616 arc_file_start (void)
1617 {
1618 default_file_start ();
1619 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1620 }
1621 \f
1622 /* Print operand X (an rtx) in assembler syntax to file FILE.
1623 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1624 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1625
1626 void
1627 arc_print_operand (FILE *file, rtx x, int code)
1628 {
1629 switch (code)
1630 {
1631 case '#' :
1632 /* Conditional branches. For now these are equivalent. */
1633 case '*' :
1634 /* Unconditional branches. Output the appropriate delay slot suffix. */
1635 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1636 {
1637 /* There's nothing in the delay slot. */
1638 fputs (".nd", file);
1639 }
1640 else
1641 {
1642 rtx jump = XVECEXP (final_sequence, 0, 0);
1643 rtx delay = XVECEXP (final_sequence, 0, 1);
1644 if (INSN_ANNULLED_BRANCH_P (jump))
1645 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1646 else
1647 fputs (".d", file);
1648 }
1649 return;
1650 case '?' : /* with leading "." */
1651 case '!' : /* without leading "." */
1652 /* This insn can be conditionally executed. See if the ccfsm machinery
1653 says it should be conditionalized. */
1654 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1655 {
1656 /* Is this insn in a delay slot? */
1657 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1658 {
1659 rtx insn = XVECEXP (final_sequence, 0, 1);
1660
1661 /* If the insn is annulled and is from the target path, we need
1662 to inverse the condition test. */
1663 if (INSN_ANNULLED_BRANCH_P (insn))
1664 {
1665 if (INSN_FROM_TARGET_P (insn))
1666 fprintf (file, "%s%s",
1667 code == '?' ? "." : "",
1668 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1669 else
1670 fprintf (file, "%s%s",
1671 code == '?' ? "." : "",
1672 arc_condition_codes[arc_ccfsm_current_cc]);
1673 }
1674 else
1675 {
1676 /* This insn is executed for either path, so don't
1677 conditionalize it at all. */
1678 ; /* nothing to do */
1679 }
1680 }
1681 else
1682 {
1683 /* This insn isn't in a delay slot. */
1684 fprintf (file, "%s%s",
1685 code == '?' ? "." : "",
1686 arc_condition_codes[arc_ccfsm_current_cc]);
1687 }
1688 }
1689 return;
1690 case '~' :
1691 /* Output a nop if we're between a set of the condition codes,
1692 and a conditional branch. */
1693 if (last_insn_set_cc_p)
1694 fputs ("nop\n\t", file);
1695 return;
1696 case 'd' :
1697 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1698 return;
1699 case 'D' :
1700 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1701 (get_arc_condition_code (x))],
1702 file);
1703 return;
1704 case 'R' :
1705 /* Write second word of DImode or DFmode reference,
1706 register or memory. */
1707 if (GET_CODE (x) == REG)
1708 fputs (reg_names[REGNO (x)+1], file);
1709 else if (GET_CODE (x) == MEM)
1710 {
1711 fputc ('[', file);
1712 /* Handle possible auto-increment. Since it is pre-increment and
1713 we have already done it, we can just use an offset of four. */
1714 /* ??? This is taken from rs6000.c I think. I don't think it is
1715 currently necessary, but keep it around. */
1716 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1717 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1718 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1719 else
1720 output_address (plus_constant (XEXP (x, 0), 4));
1721 fputc (']', file);
1722 }
1723 else
1724 output_operand_lossage ("invalid operand to %%R code");
1725 return;
1726 case 'S' :
1727 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1728 || GET_CODE (x) == LABEL_REF)
1729 {
1730 fprintf (file, "%%st(");
1731 output_addr_const (file, x);
1732 fprintf (file, ")");
1733 return;
1734 }
1735 break;
1736 case 'H' :
1737 case 'L' :
1738 if (GET_CODE (x) == REG)
1739 {
1740 /* L = least significant word, H = most significant word */
1741 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1742 fputs (reg_names[REGNO (x)], file);
1743 else
1744 fputs (reg_names[REGNO (x)+1], file);
1745 }
1746 else if (GET_CODE (x) == CONST_INT
1747 || GET_CODE (x) == CONST_DOUBLE)
1748 {
1749 rtx first, second;
1750
1751 split_double (x, &first, &second);
1752 fprintf (file, "0x%08lx",
1753 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1754 }
1755 else
1756 output_operand_lossage ("invalid operand to %%H/%%L code");
1757 return;
1758 case 'A' :
1759 {
1760 char str[30];
1761
1762 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1763 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1764
1765 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1766 fprintf (file, "%s", str);
1767 return;
1768 }
1769 case 'U' :
1770 /* Output a load/store with update indicator if appropriate. */
1771 if (GET_CODE (x) == MEM)
1772 {
1773 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1774 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1775 fputs (".a", file);
1776 }
1777 else
1778 output_operand_lossage ("invalid operand to %%U code");
1779 return;
1780 case 'V' :
1781 /* Output cache bypass indicator for a load/store insn. Volatile memory
1782 refs are defined to use the cache bypass mechanism. */
1783 if (GET_CODE (x) == MEM)
1784 {
1785 if (MEM_VOLATILE_P (x))
1786 fputs (".di", file);
1787 }
1788 else
1789 output_operand_lossage ("invalid operand to %%V code");
1790 return;
1791 case 0 :
1792 /* Do nothing special. */
1793 break;
1794 default :
1795 /* Unknown flag. */
1796 output_operand_lossage ("invalid operand output code");
1797 }
1798
1799 switch (GET_CODE (x))
1800 {
1801 case REG :
1802 fputs (reg_names[REGNO (x)], file);
1803 break;
1804 case MEM :
1805 fputc ('[', file);
1806 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1807 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1808 GET_MODE_SIZE (GET_MODE (x))));
1809 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1810 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1811 - GET_MODE_SIZE (GET_MODE (x))));
1812 else
1813 output_address (XEXP (x, 0));
1814 fputc (']', file);
1815 break;
1816 case CONST_DOUBLE :
1817 /* We handle SFmode constants here as output_addr_const doesn't. */
1818 if (GET_MODE (x) == SFmode)
1819 {
1820 REAL_VALUE_TYPE d;
1821 long l;
1822
1823 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1824 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1825 fprintf (file, "0x%08lx", l);
1826 break;
1827 }
1828 /* Fall through. Let output_addr_const deal with it. */
1829 default :
1830 output_addr_const (file, x);
1831 break;
1832 }
1833 }
1834
1835 /* Print a memory address as an operand to reference that memory location. */
1836
1837 void
1838 arc_print_operand_address (FILE *file, rtx addr)
1839 {
1840 register rtx base, index = 0;
1841 int offset = 0;
1842
1843 switch (GET_CODE (addr))
1844 {
1845 case REG :
1846 fputs (reg_names[REGNO (addr)], file);
1847 break;
1848 case SYMBOL_REF :
1849 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1850 {
1851 fprintf (file, "%%st(");
1852 output_addr_const (file, addr);
1853 fprintf (file, ")");
1854 }
1855 else
1856 output_addr_const (file, addr);
1857 break;
1858 case PLUS :
1859 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1860 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1861 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1862 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1863 else
1864 base = XEXP (addr, 0), index = XEXP (addr, 1);
1865 gcc_assert (GET_CODE (base) == REG);
1866 fputs (reg_names[REGNO (base)], file);
1867 if (index == 0)
1868 {
1869 if (offset != 0)
1870 fprintf (file, ",%d", offset);
1871 }
1872 else
1873 {
1874 switch (GET_CODE (index))
1875 {
1876 case REG:
1877 fprintf (file, ",%s", reg_names[REGNO (index)]);
1878 break;
1879 case SYMBOL_REF:
1880 fputc (',', file), output_addr_const (file, index);
1881 break;
1882 default:
1883 gcc_unreachable ();
1884 }
1885 }
1886 break;
1887 case PRE_INC :
1888 case PRE_DEC :
1889 /* We shouldn't get here as we've lost the mode of the memory object
1890 (which says how much to inc/dec by. */
1891 gcc_unreachable ();
1892 break;
1893 default :
1894 output_addr_const (file, addr);
1895 break;
1896 }
1897 }
1898
1899 /* Update compare/branch separation marker. */
1900
1901 static void
1902 record_cc_ref (rtx insn)
1903 {
1904 last_insn_set_cc_p = current_insn_set_cc_p;
1905
1906 switch (get_attr_cond (insn))
1907 {
1908 case COND_SET :
1909 case COND_SET_ZN :
1910 case COND_SET_ZNC :
1911 if (get_attr_length (insn) == 1)
1912 current_insn_set_cc_p = 1;
1913 else
1914 current_insn_set_cc_p = 0;
1915 break;
1916 default :
1917 current_insn_set_cc_p = 0;
1918 break;
1919 }
1920 }
1921 \f
1922 /* Conditional execution support.
1923
1924 This is based on the ARM port but for now is much simpler.
1925
1926 A finite state machine takes care of noticing whether or not instructions
1927 can be conditionally executed, and thus decrease execution time and code
1928 size by deleting branch instructions. The fsm is controlled by
1929 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1930 in the .md file for the branch insns also have a hand in this. */
1931
1932 /* The state of the fsm controlling condition codes are:
1933 0: normal, do nothing special
1934 1: don't output this insn
1935 2: don't output this insn
1936 3: make insns conditional
1937 4: make insns conditional
1938
1939 State transitions (state->state by whom, under what condition):
1940 0 -> 1 final_prescan_insn, if insn is conditional branch
1941 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1942 1 -> 3 branch patterns, after having not output the conditional branch
1943 2 -> 4 branch patterns, after having not output the conditional branch
1944 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1945 (the target label has CODE_LABEL_NUMBER equal to
1946 arc_ccfsm_target_label).
1947 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1948
1949 If the jump clobbers the conditions then we use states 2 and 4.
1950
1951 A similar thing can be done with conditional return insns.
1952
1953 We also handle separating branches from sets of the condition code.
1954 This is done here because knowledge of the ccfsm state is required,
1955 we may not be outputting the branch. */
1956
1957 void
1958 arc_final_prescan_insn (rtx insn,
1959 rtx *opvec ATTRIBUTE_UNUSED,
1960 int noperands ATTRIBUTE_UNUSED)
1961 {
1962 /* BODY will hold the body of INSN. */
1963 register rtx body = PATTERN (insn);
1964
1965 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1966 an if/then/else), and things need to be reversed. */
1967 int reverse = 0;
1968
1969 /* If we start with a return insn, we only succeed if we find another one. */
1970 int seeking_return = 0;
1971
1972 /* START_INSN will hold the insn from where we start looking. This is the
1973 first insn after the following code_label if REVERSE is true. */
1974 rtx start_insn = insn;
1975
1976 /* Update compare/branch separation marker. */
1977 record_cc_ref (insn);
1978
1979 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1980 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1981 final_scan_insn which has `optimize' as a local. */
1982 if (optimize < 2 || TARGET_NO_COND_EXEC)
1983 return;
1984
1985 /* If in state 4, check if the target branch is reached, in order to
1986 change back to state 0. */
1987 if (arc_ccfsm_state == 4)
1988 {
1989 if (insn == arc_ccfsm_target_insn)
1990 {
1991 arc_ccfsm_target_insn = NULL;
1992 arc_ccfsm_state = 0;
1993 }
1994 return;
1995 }
1996
1997 /* If in state 3, it is possible to repeat the trick, if this insn is an
1998 unconditional branch to a label, and immediately following this branch
1999 is the previous target label which is only used once, and the label this
2000 branch jumps to is not too far off. Or in other words "we've done the
2001 `then' part, see if we can do the `else' part." */
2002 if (arc_ccfsm_state == 3)
2003 {
2004 if (simplejump_p (insn))
2005 {
2006 start_insn = next_nonnote_insn (start_insn);
2007 if (GET_CODE (start_insn) == BARRIER)
2008 {
2009 /* ??? Isn't this always a barrier? */
2010 start_insn = next_nonnote_insn (start_insn);
2011 }
2012 if (GET_CODE (start_insn) == CODE_LABEL
2013 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2014 && LABEL_NUSES (start_insn) == 1)
2015 reverse = TRUE;
2016 else
2017 return;
2018 }
2019 else if (GET_CODE (body) == RETURN)
2020 {
2021 start_insn = next_nonnote_insn (start_insn);
2022 if (GET_CODE (start_insn) == BARRIER)
2023 start_insn = next_nonnote_insn (start_insn);
2024 if (GET_CODE (start_insn) == CODE_LABEL
2025 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2026 && LABEL_NUSES (start_insn) == 1)
2027 {
2028 reverse = TRUE;
2029 seeking_return = 1;
2030 }
2031 else
2032 return;
2033 }
2034 else
2035 return;
2036 }
2037
2038 if (GET_CODE (insn) != JUMP_INSN)
2039 return;
2040
2041 /* This jump might be paralleled with a clobber of the condition codes,
2042 the jump should always come first. */
2043 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2044 body = XVECEXP (body, 0, 0);
2045
2046 if (reverse
2047 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2048 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2049 {
2050 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2051 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2052 int then_not_else = TRUE;
2053 /* Nonzero if next insn must be the target label. */
2054 int next_must_be_target_label_p;
2055 rtx this_insn = start_insn, label = 0;
2056
2057 /* Register the insn jumped to. */
2058 if (reverse)
2059 {
2060 if (!seeking_return)
2061 label = XEXP (SET_SRC (body), 0);
2062 }
2063 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2064 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2065 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2066 {
2067 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2068 then_not_else = FALSE;
2069 }
2070 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2071 seeking_return = 1;
2072 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2073 {
2074 seeking_return = 1;
2075 then_not_else = FALSE;
2076 }
2077 else
2078 gcc_unreachable ();
2079
2080 /* See how many insns this branch skips, and what kind of insns. If all
2081 insns are okay, and the label or unconditional branch to the same
2082 label is not too far away, succeed. */
2083 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2084 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2085 insns_skipped++)
2086 {
2087 rtx scanbody;
2088
2089 this_insn = next_nonnote_insn (this_insn);
2090 if (!this_insn)
2091 break;
2092
2093 if (next_must_be_target_label_p)
2094 {
2095 if (GET_CODE (this_insn) == BARRIER)
2096 continue;
2097 if (GET_CODE (this_insn) == CODE_LABEL
2098 && this_insn == label)
2099 {
2100 arc_ccfsm_state = 1;
2101 succeed = TRUE;
2102 }
2103 else
2104 fail = TRUE;
2105 break;
2106 }
2107
2108 scanbody = PATTERN (this_insn);
2109
2110 switch (GET_CODE (this_insn))
2111 {
2112 case CODE_LABEL:
2113 /* Succeed if it is the target label, otherwise fail since
2114 control falls in from somewhere else. */
2115 if (this_insn == label)
2116 {
2117 arc_ccfsm_state = 1;
2118 succeed = TRUE;
2119 }
2120 else
2121 fail = TRUE;
2122 break;
2123
2124 case BARRIER:
2125 /* Succeed if the following insn is the target label.
2126 Otherwise fail.
2127 If return insns are used then the last insn in a function
2128 will be a barrier. */
2129 next_must_be_target_label_p = TRUE;
2130 break;
2131
2132 case CALL_INSN:
2133 /* Can handle a call insn if there are no insns after it.
2134 IE: The next "insn" is the target label. We don't have to
2135 worry about delay slots as such insns are SEQUENCE's inside
2136 INSN's. ??? It is possible to handle such insns though. */
2137 if (get_attr_cond (this_insn) == COND_CANUSE)
2138 next_must_be_target_label_p = TRUE;
2139 else
2140 fail = TRUE;
2141 break;
2142
2143 case JUMP_INSN:
2144 /* If this is an unconditional branch to the same label, succeed.
2145 If it is to another label, do nothing. If it is conditional,
2146 fail. */
2147 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2148
2149 if (GET_CODE (scanbody) == SET
2150 && GET_CODE (SET_DEST (scanbody)) == PC)
2151 {
2152 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2153 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2154 {
2155 arc_ccfsm_state = 2;
2156 succeed = TRUE;
2157 }
2158 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2159 fail = TRUE;
2160 }
2161 else if (GET_CODE (scanbody) == RETURN
2162 && seeking_return)
2163 {
2164 arc_ccfsm_state = 2;
2165 succeed = TRUE;
2166 }
2167 else if (GET_CODE (scanbody) == PARALLEL)
2168 {
2169 if (get_attr_cond (this_insn) != COND_CANUSE)
2170 fail = TRUE;
2171 }
2172 break;
2173
2174 case INSN:
2175 /* We can only do this with insns that can use the condition
2176 codes (and don't set them). */
2177 if (GET_CODE (scanbody) == SET
2178 || GET_CODE (scanbody) == PARALLEL)
2179 {
2180 if (get_attr_cond (this_insn) != COND_CANUSE)
2181 fail = TRUE;
2182 }
2183 /* We can't handle other insns like sequences. */
2184 else
2185 fail = TRUE;
2186 break;
2187
2188 default:
2189 break;
2190 }
2191 }
2192
2193 if (succeed)
2194 {
2195 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2196 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2197 else
2198 {
2199 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2200 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2201 {
2202 this_insn = next_nonnote_insn (this_insn);
2203 gcc_assert (!this_insn
2204 || (GET_CODE (this_insn) != BARRIER
2205 && GET_CODE (this_insn) != CODE_LABEL));
2206 }
2207 if (!this_insn)
2208 {
2209 /* Oh dear! we ran off the end, give up. */
2210 extract_insn_cached (insn);
2211 arc_ccfsm_state = 0;
2212 arc_ccfsm_target_insn = NULL;
2213 return;
2214 }
2215 arc_ccfsm_target_insn = this_insn;
2216 }
2217
2218 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2219 what it was. */
2220 if (!reverse)
2221 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2222 0));
2223
2224 if (reverse || then_not_else)
2225 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2226 }
2227
2228 /* Restore recog_data. Getting the attributes of other insns can
2229 destroy this array, but final.c assumes that it remains intact
2230 across this call. */
2231 extract_insn_cached (insn);
2232 }
2233 }
2234
2235 /* Record that we are currently outputting label NUM with prefix PREFIX.
2236 It it's the label we're looking for, reset the ccfsm machinery.
2237
2238 Called from (*targetm.asm_out.internal_label). */
2239
2240 void
2241 arc_ccfsm_at_label (const char *prefix, int num)
2242 {
2243 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2244 && !strcmp (prefix, "L"))
2245 {
2246 arc_ccfsm_state = 0;
2247 arc_ccfsm_target_insn = NULL_RTX;
2248 }
2249 }
2250
2251 /* See if the current insn, which is a conditional branch, is to be
2252 deleted. */
2253
2254 int
2255 arc_ccfsm_branch_deleted_p (void)
2256 {
2257 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2258 return 1;
2259 return 0;
2260 }
2261
2262 /* Record a branch isn't output because subsequent insns can be
2263 conditionalized. */
2264
2265 void
2266 arc_ccfsm_record_branch_deleted (void)
2267 {
2268 /* Indicate we're conditionalizing insns now. */
2269 arc_ccfsm_state += 2;
2270
2271 /* If the next insn is a subroutine call, we still need a nop between the
2272 cc setter and user. We need to undo the effect of calling record_cc_ref
2273 for the just deleted branch. */
2274 current_insn_set_cc_p = last_insn_set_cc_p;
2275 }
2276 \f
2277 void
2278 arc_va_start (tree valist, rtx nextarg)
2279 {
2280 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2281 if (current_function_args_info < 8
2282 && (current_function_args_info & 1))
2283 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2284
2285 std_expand_builtin_va_start (valist, nextarg);
2286 }
2287
2288 /* This is how to output a definition of an internal numbered label where
2289 PREFIX is the class of label and NUM is the number within the class. */
2290
2291 static void
2292 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2293 {
2294 arc_ccfsm_at_label (prefix, labelno);
2295 default_internal_label (stream, prefix, labelno);
2296 }
2297
2298 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2299
2300 static void
2301 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2302 {
2303 #if 0
2304 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2305 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2306 and we'll get another suffix added on if -mmangle-cpu. */
2307 if (TARGET_MANGLE_CPU_LIBGCC)
2308 {
2309 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2310 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2311 arc_mangle_suffix);
2312 }
2313 #endif
2314 }
2315
2316 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2317
2318 static bool
2319 arc_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
2320 {
2321 if (AGGREGATE_TYPE_P (type))
2322 return true;
2323 else
2324 {
2325 HOST_WIDE_INT size = int_size_in_bytes (type);
2326 return (size == -1 || size > 8);
2327 }
2328 }
2329
2330 /* For ARC, All aggregates and arguments greater than 8 bytes are
2331 passed by reference. */
2332
2333 static bool
2334 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2335 enum machine_mode mode, tree type,
2336 bool named ATTRIBUTE_UNUSED)
2337 {
2338 unsigned HOST_WIDE_INT size;
2339
2340 if (type)
2341 {
2342 if (AGGREGATE_TYPE_P (type))
2343 return true;
2344 size = int_size_in_bytes (type);
2345 }
2346 else
2347 size = GET_MODE_SIZE (mode);
2348
2349 return size > 8;
2350 }