Merge cond-optab branch.
[gcc.git] / gcc / config / arc / arc.c
1 /* Subroutines used for code generation on the Argonaut ARC cpu.
2 Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
3 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 /* ??? This is an old port, and is undoubtedly suffering from bit rot. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "rtl.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "real.h"
32 #include "insn-config.h"
33 #include "conditions.h"
34 #include "output.h"
35 #include "insn-attr.h"
36 #include "flags.h"
37 #include "function.h"
38 #include "expr.h"
39 #include "recog.h"
40 #include "toplev.h"
41 #include "tm_p.h"
42 #include "target.h"
43 #include "target-def.h"
44
45 /* Which cpu we're compiling for. */
46 int arc_cpu_type;
47
48 /* Name of mangle string to add to symbols to separate code compiled for each
49 cpu (or NULL). */
50 const char *arc_mangle_cpu;
51
52 /* Name of text, data, and rodata sections used in varasm.c. */
53 const char *arc_text_section;
54 const char *arc_data_section;
55 const char *arc_rodata_section;
56
57 /* Array of valid operand punctuation characters. */
58 char arc_punct_chars[256];
59
60 /* Variables used by arc_final_prescan_insn to implement conditional
61 execution. */
62 static int arc_ccfsm_state;
63 static int arc_ccfsm_current_cc;
64 static rtx arc_ccfsm_target_insn;
65 static int arc_ccfsm_target_label;
66
67 /* The maximum number of insns skipped which will be conditionalised if
68 possible. */
69 #define MAX_INSNS_SKIPPED 3
70
71 /* A nop is needed between a 4 byte insn that sets the condition codes and
72 a branch that uses them (the same isn't true for an 8 byte insn that sets
73 the condition codes). Set by arc_final_prescan_insn. Used by
74 arc_print_operand. */
75 static int last_insn_set_cc_p;
76 static int current_insn_set_cc_p;
77 static bool arc_handle_option (size_t, const char *, int);
78 static void record_cc_ref (rtx);
79 static void arc_init_reg_tables (void);
80 static int get_arc_condition_code (rtx);
81 const struct attribute_spec arc_attribute_table[];
82 static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
83 static bool arc_assemble_integer (rtx, unsigned int, int);
84 static void arc_output_function_prologue (FILE *, HOST_WIDE_INT);
85 static void arc_output_function_epilogue (FILE *, HOST_WIDE_INT);
86 static void arc_file_start (void);
87 static void arc_internal_label (FILE *, const char *, unsigned long);
88 static void arc_va_start (tree, rtx);
89 static void arc_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
90 tree, int *, int);
91 static bool arc_rtx_costs (rtx, int, int, int *, bool);
92 static int arc_address_cost (rtx, bool);
93 static void arc_external_libcall (rtx);
94 static bool arc_return_in_memory (const_tree, const_tree);
95 static bool arc_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
96 const_tree, bool);
97 \f
98 /* Initialize the GCC target structure. */
99 #undef TARGET_ASM_ALIGNED_HI_OP
100 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
101 #undef TARGET_ASM_ALIGNED_SI_OP
102 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
103 #undef TARGET_ASM_INTEGER
104 #define TARGET_ASM_INTEGER arc_assemble_integer
105
106 #undef TARGET_ASM_FUNCTION_PROLOGUE
107 #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue
108 #undef TARGET_ASM_FUNCTION_EPILOGUE
109 #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue
110 #undef TARGET_ASM_FILE_START
111 #define TARGET_ASM_FILE_START arc_file_start
112 #undef TARGET_ATTRIBUTE_TABLE
113 #define TARGET_ATTRIBUTE_TABLE arc_attribute_table
114 #undef TARGET_ASM_INTERNAL_LABEL
115 #define TARGET_ASM_INTERNAL_LABEL arc_internal_label
116 #undef TARGET_ASM_EXTERNAL_LIBCALL
117 #define TARGET_ASM_EXTERNAL_LIBCALL arc_external_libcall
118
119 #undef TARGET_HANDLE_OPTION
120 #define TARGET_HANDLE_OPTION arc_handle_option
121
122 #undef TARGET_RTX_COSTS
123 #define TARGET_RTX_COSTS arc_rtx_costs
124 #undef TARGET_ADDRESS_COST
125 #define TARGET_ADDRESS_COST arc_address_cost
126
127 #undef TARGET_PROMOTE_FUNCTION_ARGS
128 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true
129 #undef TARGET_PROMOTE_FUNCTION_RETURN
130 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true
131 #undef TARGET_PROMOTE_PROTOTYPES
132 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
133
134 #undef TARGET_RETURN_IN_MEMORY
135 #define TARGET_RETURN_IN_MEMORY arc_return_in_memory
136 #undef TARGET_PASS_BY_REFERENCE
137 #define TARGET_PASS_BY_REFERENCE arc_pass_by_reference
138 #undef TARGET_CALLEE_COPIES
139 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
140
141 #undef TARGET_SETUP_INCOMING_VARARGS
142 #define TARGET_SETUP_INCOMING_VARARGS arc_setup_incoming_varargs
143
144 #undef TARGET_EXPAND_BUILTIN_VA_START
145 #define TARGET_EXPAND_BUILTIN_VA_START arc_va_start
146
147 struct gcc_target targetm = TARGET_INITIALIZER;
148 \f
149 /* Implement TARGET_HANDLE_OPTION. */
150
151 static bool
152 arc_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
153 {
154 switch (code)
155 {
156 case OPT_mcpu_:
157 return strcmp (arg, "base") == 0 || ARC_EXTENSION_CPU (arg);
158
159 default:
160 return true;
161 }
162 }
163
164 /* Called by OVERRIDE_OPTIONS to initialize various things. */
165
166 void
167 arc_init (void)
168 {
169 char *tmp;
170
171 /* Set the pseudo-ops for the various standard sections. */
172 arc_text_section = tmp = XNEWVEC (char, strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1);
173 sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string);
174 arc_data_section = tmp = XNEWVEC (char, strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1);
175 sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string);
176 arc_rodata_section = tmp = XNEWVEC (char, strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1);
177 sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string);
178
179 arc_init_reg_tables ();
180
181 /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */
182 memset (arc_punct_chars, 0, sizeof (arc_punct_chars));
183 arc_punct_chars['#'] = 1;
184 arc_punct_chars['*'] = 1;
185 arc_punct_chars['?'] = 1;
186 arc_punct_chars['!'] = 1;
187 arc_punct_chars['~'] = 1;
188 }
189 \f
190 /* The condition codes of the ARC, and the inverse function. */
191 static const char *const arc_condition_codes[] =
192 {
193 "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv",
194 "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0
195 };
196
197 #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1)
198
199 /* Returns the index of the ARC condition code string in
200 `arc_condition_codes'. COMPARISON should be an rtx like
201 `(eq (...) (...))'. */
202
203 static int
204 get_arc_condition_code (rtx comparison)
205 {
206 switch (GET_CODE (comparison))
207 {
208 case EQ : return 2;
209 case NE : return 3;
210 case GT : return 10;
211 case LE : return 11;
212 case GE : return 12;
213 case LT : return 13;
214 case GTU : return 14;
215 case LEU : return 15;
216 case LTU : return 6;
217 case GEU : return 7;
218 default : gcc_unreachable ();
219 }
220 /*NOTREACHED*/
221 return (42);
222 }
223
224 /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE,
225 return the mode to be used for the comparison. */
226
227 enum machine_mode
228 arc_select_cc_mode (enum rtx_code op,
229 rtx x ATTRIBUTE_UNUSED,
230 rtx y ATTRIBUTE_UNUSED)
231 {
232 switch (op)
233 {
234 case EQ :
235 case NE :
236 return CCZNmode;
237 default :
238 switch (GET_CODE (x))
239 {
240 case AND :
241 case IOR :
242 case XOR :
243 case SIGN_EXTEND :
244 case ZERO_EXTEND :
245 return CCZNmode;
246 case ASHIFT :
247 case ASHIFTRT :
248 case LSHIFTRT :
249 return CCZNCmode;
250 default:
251 break;
252 }
253 }
254 return CCmode;
255 }
256 \f
257 /* Vectors to keep interesting information about registers where it can easily
258 be got. We use to use the actual mode value as the bit number, but there
259 is (or may be) more than 32 modes now. Instead we use two tables: one
260 indexed by hard register number, and one indexed by mode. */
261
262 /* The purpose of arc_mode_class is to shrink the range of modes so that
263 they all fit (as bit numbers) in a 32-bit word (again). Each real mode is
264 mapped into one arc_mode_class mode. */
265
266 enum arc_mode_class {
267 C_MODE,
268 S_MODE, D_MODE, T_MODE, O_MODE,
269 SF_MODE, DF_MODE, TF_MODE, OF_MODE
270 };
271
272 /* Modes for condition codes. */
273 #define C_MODES (1 << (int) C_MODE)
274
275 /* Modes for single-word and smaller quantities. */
276 #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE))
277
278 /* Modes for double-word and smaller quantities. */
279 #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE))
280
281 /* Modes for quad-word and smaller quantities. */
282 #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE))
283
284 /* Value is 1 if register/mode pair is acceptable on arc. */
285
286 const unsigned int arc_hard_regno_mode_ok[] = {
287 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
288 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES,
289 T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES,
290 D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
291
292 /* ??? Leave these as S_MODES for now. */
293 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
294 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
295 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES,
296 S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES
297 };
298
299 unsigned int arc_mode_class [NUM_MACHINE_MODES];
300
301 enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER];
302
303 static void
304 arc_init_reg_tables (void)
305 {
306 int i;
307
308 for (i = 0; i < NUM_MACHINE_MODES; i++)
309 {
310 switch (GET_MODE_CLASS (i))
311 {
312 case MODE_INT:
313 case MODE_PARTIAL_INT:
314 case MODE_COMPLEX_INT:
315 if (GET_MODE_SIZE (i) <= 4)
316 arc_mode_class[i] = 1 << (int) S_MODE;
317 else if (GET_MODE_SIZE (i) == 8)
318 arc_mode_class[i] = 1 << (int) D_MODE;
319 else if (GET_MODE_SIZE (i) == 16)
320 arc_mode_class[i] = 1 << (int) T_MODE;
321 else if (GET_MODE_SIZE (i) == 32)
322 arc_mode_class[i] = 1 << (int) O_MODE;
323 else
324 arc_mode_class[i] = 0;
325 break;
326 case MODE_FLOAT:
327 case MODE_COMPLEX_FLOAT:
328 if (GET_MODE_SIZE (i) <= 4)
329 arc_mode_class[i] = 1 << (int) SF_MODE;
330 else if (GET_MODE_SIZE (i) == 8)
331 arc_mode_class[i] = 1 << (int) DF_MODE;
332 else if (GET_MODE_SIZE (i) == 16)
333 arc_mode_class[i] = 1 << (int) TF_MODE;
334 else if (GET_MODE_SIZE (i) == 32)
335 arc_mode_class[i] = 1 << (int) OF_MODE;
336 else
337 arc_mode_class[i] = 0;
338 break;
339 case MODE_CC:
340 arc_mode_class[i] = 1 << (int) C_MODE;
341 break;
342 default:
343 arc_mode_class[i] = 0;
344 break;
345 }
346 }
347
348 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
349 {
350 if (i < 60)
351 arc_regno_reg_class[i] = GENERAL_REGS;
352 else if (i == 60)
353 arc_regno_reg_class[i] = LPCOUNT_REG;
354 else if (i == 61)
355 arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */;
356 else
357 arc_regno_reg_class[i] = NO_REGS;
358 }
359 }
360 \f
361 /* ARC specific attribute support.
362
363 The ARC has these attributes:
364 interrupt - for interrupt functions
365 */
366
367 const struct attribute_spec arc_attribute_table[] =
368 {
369 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
370 { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute },
371 { NULL, 0, 0, false, false, false, NULL }
372 };
373
374 /* Handle an "interrupt" attribute; arguments as in
375 struct attribute_spec.handler. */
376 static tree
377 arc_handle_interrupt_attribute (tree *node ATTRIBUTE_UNUSED,
378 tree name,
379 tree args,
380 int flags ATTRIBUTE_UNUSED,
381 bool *no_add_attrs)
382 {
383 tree value = TREE_VALUE (args);
384
385 if (TREE_CODE (value) != STRING_CST)
386 {
387 warning (OPT_Wattributes,
388 "argument of %qE attribute is not a string constant",
389 name);
390 *no_add_attrs = true;
391 }
392 else if (strcmp (TREE_STRING_POINTER (value), "ilink1")
393 && strcmp (TREE_STRING_POINTER (value), "ilink2"))
394 {
395 warning (OPT_Wattributes,
396 "argument of %qE attribute is not \"ilink1\" or \"ilink2\"",
397 name);
398 *no_add_attrs = true;
399 }
400
401 return NULL_TREE;
402 }
403
404 \f
405 /* Acceptable arguments to the call insn. */
406
407 int
408 call_address_operand (rtx op, enum machine_mode mode)
409 {
410 return (symbolic_operand (op, mode)
411 || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op))
412 || (GET_CODE (op) == REG));
413 }
414
415 int
416 call_operand (rtx op, enum machine_mode mode)
417 {
418 if (GET_CODE (op) != MEM)
419 return 0;
420 op = XEXP (op, 0);
421 return call_address_operand (op, mode);
422 }
423
424 /* Returns 1 if OP is a symbol reference. */
425
426 int
427 symbolic_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
428 {
429 switch (GET_CODE (op))
430 {
431 case SYMBOL_REF:
432 case LABEL_REF:
433 case CONST :
434 return 1;
435 default:
436 return 0;
437 }
438 }
439
440 /* Return truth value of statement that OP is a symbolic memory
441 operand of mode MODE. */
442
443 int
444 symbolic_memory_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
445 {
446 if (GET_CODE (op) == SUBREG)
447 op = SUBREG_REG (op);
448 if (GET_CODE (op) != MEM)
449 return 0;
450 op = XEXP (op, 0);
451 return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST
452 || GET_CODE (op) == LABEL_REF);
453 }
454
455 /* Return true if OP is a short immediate (shimm) value. */
456
457 int
458 short_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
459 {
460 if (GET_CODE (op) != CONST_INT)
461 return 0;
462 return SMALL_INT (INTVAL (op));
463 }
464
465 /* Return true if OP will require a long immediate (limm) value.
466 This is currently only used when calculating length attributes. */
467
468 int
469 long_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
470 {
471 switch (GET_CODE (op))
472 {
473 case SYMBOL_REF :
474 case LABEL_REF :
475 case CONST :
476 return 1;
477 case CONST_INT :
478 return !SMALL_INT (INTVAL (op));
479 case CONST_DOUBLE :
480 /* These can happen because large unsigned 32-bit constants are
481 represented this way (the multiplication patterns can cause these
482 to be generated). They also occur for SFmode values. */
483 return 1;
484 default:
485 break;
486 }
487 return 0;
488 }
489
490 /* Return true if OP is a MEM that when used as a load or store address will
491 require an 8 byte insn.
492 Load and store instructions don't allow the same possibilities but they're
493 similar enough that this one function will do.
494 This is currently only used when calculating length attributes. */
495
496 int
497 long_immediate_loadstore_operand (rtx op,
498 enum machine_mode mode ATTRIBUTE_UNUSED)
499 {
500 if (GET_CODE (op) != MEM)
501 return 0;
502
503 op = XEXP (op, 0);
504 switch (GET_CODE (op))
505 {
506 case SYMBOL_REF :
507 case LABEL_REF :
508 case CONST :
509 return 1;
510 case CONST_INT :
511 /* This must be handled as "st c,[limm]". Ditto for load.
512 Technically, the assembler could translate some possibilities to
513 "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't
514 assume that it does. */
515 return 1;
516 case CONST_DOUBLE :
517 /* These can happen because large unsigned 32-bit constants are
518 represented this way (the multiplication patterns can cause these
519 to be generated). They also occur for SFmode values. */
520 return 1;
521 case REG :
522 return 0;
523 case PLUS :
524 if (GET_CODE (XEXP (op, 1)) == CONST_INT
525 && !SMALL_INT (INTVAL (XEXP (op, 1))))
526 return 1;
527 return 0;
528 default:
529 break;
530 }
531 return 0;
532 }
533
534 /* Return true if OP is an acceptable argument for a single word
535 move source. */
536
537 int
538 move_src_operand (rtx op, enum machine_mode mode)
539 {
540 switch (GET_CODE (op))
541 {
542 case SYMBOL_REF :
543 case LABEL_REF :
544 case CONST :
545 return 1;
546 case CONST_INT :
547 return (LARGE_INT (INTVAL (op)));
548 case CONST_DOUBLE :
549 /* We can handle DImode integer constants in SImode if the value
550 (signed or unsigned) will fit in 32 bits. This is needed because
551 large unsigned 32-bit constants are represented as CONST_DOUBLEs. */
552 if (mode == SImode)
553 return arc_double_limm_p (op);
554 /* We can handle 32-bit floating point constants. */
555 if (mode == SFmode)
556 return GET_MODE (op) == SFmode;
557 return 0;
558 case REG :
559 return register_operand (op, mode);
560 case SUBREG :
561 /* (subreg (mem ...) ...) can occur here if the inner part was once a
562 pseudo-reg and is now a stack slot. */
563 if (GET_CODE (SUBREG_REG (op)) == MEM)
564 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
565 else
566 return register_operand (op, mode);
567 case MEM :
568 return address_operand (XEXP (op, 0), mode);
569 default :
570 return 0;
571 }
572 }
573
574 /* Return true if OP is an acceptable argument for a double word
575 move source. */
576
577 int
578 move_double_src_operand (rtx op, enum machine_mode mode)
579 {
580 switch (GET_CODE (op))
581 {
582 case REG :
583 return register_operand (op, mode);
584 case SUBREG :
585 /* (subreg (mem ...) ...) can occur here if the inner part was once a
586 pseudo-reg and is now a stack slot. */
587 if (GET_CODE (SUBREG_REG (op)) == MEM)
588 return move_double_src_operand (SUBREG_REG (op), mode);
589 else
590 return register_operand (op, mode);
591 case MEM :
592 /* Disallow auto inc/dec for now. */
593 if (GET_CODE (XEXP (op, 0)) == PRE_DEC
594 || GET_CODE (XEXP (op, 0)) == PRE_INC)
595 return 0;
596 return address_operand (XEXP (op, 0), mode);
597 case CONST_INT :
598 case CONST_DOUBLE :
599 return 1;
600 default :
601 return 0;
602 }
603 }
604
605 /* Return true if OP is an acceptable argument for a move destination. */
606
607 int
608 move_dest_operand (rtx op, enum machine_mode mode)
609 {
610 switch (GET_CODE (op))
611 {
612 case REG :
613 return register_operand (op, mode);
614 case SUBREG :
615 /* (subreg (mem ...) ...) can occur here if the inner part was once a
616 pseudo-reg and is now a stack slot. */
617 if (GET_CODE (SUBREG_REG (op)) == MEM)
618 return address_operand (XEXP (SUBREG_REG (op), 0), mode);
619 else
620 return register_operand (op, mode);
621 case MEM :
622 return address_operand (XEXP (op, 0), mode);
623 default :
624 return 0;
625 }
626 }
627
628 /* Return true if OP is valid load with update operand. */
629
630 int
631 load_update_operand (rtx op, enum machine_mode mode)
632 {
633 if (GET_CODE (op) != MEM
634 || GET_MODE (op) != mode)
635 return 0;
636 op = XEXP (op, 0);
637 if (GET_CODE (op) != PLUS
638 || GET_MODE (op) != Pmode
639 || !register_operand (XEXP (op, 0), Pmode)
640 || !nonmemory_operand (XEXP (op, 1), Pmode))
641 return 0;
642 return 1;
643 }
644
645 /* Return true if OP is valid store with update operand. */
646
647 int
648 store_update_operand (rtx op, enum machine_mode mode)
649 {
650 if (GET_CODE (op) != MEM
651 || GET_MODE (op) != mode)
652 return 0;
653 op = XEXP (op, 0);
654 if (GET_CODE (op) != PLUS
655 || GET_MODE (op) != Pmode
656 || !register_operand (XEXP (op, 0), Pmode)
657 || !(GET_CODE (XEXP (op, 1)) == CONST_INT
658 && SMALL_INT (INTVAL (XEXP (op, 1)))))
659 return 0;
660 return 1;
661 }
662
663 /* Return true if OP is a non-volatile non-immediate operand.
664 Volatile memory refs require a special "cache-bypass" instruction
665 and only the standard movXX patterns are set up to handle them. */
666
667 int
668 nonvol_nonimm_operand (rtx op, enum machine_mode mode)
669 {
670 if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
671 return 0;
672 return nonimmediate_operand (op, mode);
673 }
674
675 /* Accept integer operands in the range -0x80000000..0x7fffffff. We have
676 to check the range carefully since this predicate is used in DImode
677 contexts. */
678
679 int
680 const_sint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
681 {
682 /* All allowed constants will fit a CONST_INT. */
683 return (GET_CODE (op) == CONST_INT
684 && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff));
685 }
686
687 /* Accept integer operands in the range 0..0xffffffff. We have to check the
688 range carefully since this predicate is used in DImode contexts. Also, we
689 need some extra crud to make it work when hosted on 64-bit machines. */
690
691 int
692 const_uint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
693 {
694 #if HOST_BITS_PER_WIDE_INT > 32
695 /* All allowed constants will fit a CONST_INT. */
696 return (GET_CODE (op) == CONST_INT
697 && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL));
698 #else
699 return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0)
700 || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));
701 #endif
702 }
703
704 /* Return 1 if OP is a comparison operator valid for the mode of CC.
705 This allows the use of MATCH_OPERATOR to recognize all the branch insns.
706
707 Some insns only set a few bits in the condition code. So only allow those
708 comparisons that use the bits that are valid. */
709
710 int
711 proper_comparison_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
712 {
713 enum rtx_code code;
714 if (!COMPARISON_P (op))
715 return 0;
716
717 code = GET_CODE (op);
718 if (GET_MODE (XEXP (op, 0)) == CCZNmode)
719 return (code == EQ || code == NE);
720 if (GET_MODE (XEXP (op, 0)) == CCZNCmode)
721 return (code == EQ || code == NE
722 || code == LTU || code == GEU || code == GTU || code == LEU);
723 return 1;
724 }
725 \f
726 /* Misc. utilities. */
727
728 /* X and Y are two things to compare using CODE. Return the rtx
729 for the cc reg in the proper mode. */
730
731 rtx
732 gen_compare_reg (enum rtx_code code, rtx x, rtx y)
733 {
734 enum machine_mode mode = SELECT_CC_MODE (code, x, y);
735 return gen_rtx_REG (mode, 61);
736 }
737
738 /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number).
739 We assume the value can be either signed or unsigned. */
740
741 int
742 arc_double_limm_p (rtx value)
743 {
744 HOST_WIDE_INT low, high;
745
746 gcc_assert (GET_CODE (value) == CONST_DOUBLE);
747
748 low = CONST_DOUBLE_LOW (value);
749 high = CONST_DOUBLE_HIGH (value);
750
751 if (low & 0x80000000)
752 {
753 return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0)
754 || (((low & - (unsigned HOST_WIDE_INT) 0x80000000)
755 == - (unsigned HOST_WIDE_INT) 0x80000000)
756 && high == -1));
757 }
758 else
759 {
760 return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0;
761 }
762 }
763 \f
764 /* Do any needed setup for a variadic function. For the ARC, we must
765 create a register parameter block, and then copy any anonymous arguments
766 in registers to memory.
767
768 CUM has not been updated for the last named argument which has type TYPE
769 and mode MODE, and we rely on this fact.
770
771 We do things a little weird here. We're supposed to only allocate space
772 for the anonymous arguments. However we need to keep the stack eight byte
773 aligned. So we round the space up if necessary, and leave it to va_start
774 to compensate. */
775
776 static void
777 arc_setup_incoming_varargs (CUMULATIVE_ARGS *cum,
778 enum machine_mode mode,
779 tree type ATTRIBUTE_UNUSED,
780 int *pretend_size,
781 int no_rtl)
782 {
783 int first_anon_arg;
784
785 /* All BLKmode values are passed by reference. */
786 gcc_assert (mode != BLKmode);
787
788 first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1)
789 / UNITS_PER_WORD);
790
791 if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl)
792 {
793 /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */
794 int first_reg_offset = first_anon_arg;
795 /* Size in words to "pretend" allocate. */
796 int size = MAX_ARC_PARM_REGS - first_reg_offset;
797 /* Extra slop to keep stack eight byte aligned. */
798 int align_slop = size & 1;
799 rtx regblock;
800
801 regblock = gen_rtx_MEM (BLKmode,
802 plus_constant (arg_pointer_rtx,
803 FIRST_PARM_OFFSET (0)
804 + align_slop * UNITS_PER_WORD));
805 set_mem_alias_set (regblock, get_varargs_alias_set ());
806 set_mem_align (regblock, BITS_PER_WORD);
807 move_block_from_reg (first_reg_offset, regblock,
808 MAX_ARC_PARM_REGS - first_reg_offset);
809
810 *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop)
811 * UNITS_PER_WORD);
812 }
813 }
814 \f
815 /* Cost functions. */
816
817 /* Compute a (partial) cost for rtx X. Return true if the complete
818 cost has been computed, and false if subexpressions should be
819 scanned. In either case, *TOTAL contains the cost result. */
820
821 static bool
822 arc_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
823 bool speed ATTRIBUTE_UNUSED)
824 {
825 switch (code)
826 {
827 /* Small integers are as cheap as registers. 4 byte values can
828 be fetched as immediate constants - let's give that the cost
829 of an extra insn. */
830 case CONST_INT:
831 if (SMALL_INT (INTVAL (x)))
832 {
833 *total = 0;
834 return true;
835 }
836 /* FALLTHRU */
837
838 case CONST:
839 case LABEL_REF:
840 case SYMBOL_REF:
841 *total = COSTS_N_INSNS (1);
842 return true;
843
844 case CONST_DOUBLE:
845 {
846 rtx high, low;
847 split_double (x, &high, &low);
848 *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high))
849 + !SMALL_INT (INTVAL (low)));
850 return true;
851 }
852
853 /* Encourage synth_mult to find a synthetic multiply when reasonable.
854 If we need more than 12 insns to do a multiply, then go out-of-line,
855 since the call overhead will be < 10% of the cost of the multiply. */
856 case ASHIFT:
857 case ASHIFTRT:
858 case LSHIFTRT:
859 if (TARGET_SHIFTER)
860 *total = COSTS_N_INSNS (1);
861 else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
862 *total = COSTS_N_INSNS (16);
863 else
864 *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1)));
865 return false;
866
867 default:
868 return false;
869 }
870 }
871
872
873 /* Provide the costs of an addressing mode that contains ADDR.
874 If ADDR is not a valid address, its cost is irrelevant. */
875
876 static int
877 arc_address_cost (rtx addr, bool speed ATTRIBUTE_UNUSED)
878 {
879 switch (GET_CODE (addr))
880 {
881 case REG :
882 return 1;
883
884 case LABEL_REF :
885 case SYMBOL_REF :
886 case CONST :
887 return 2;
888
889 case PLUS :
890 {
891 register rtx plus0 = XEXP (addr, 0);
892 register rtx plus1 = XEXP (addr, 1);
893
894 if (GET_CODE (plus0) != REG)
895 break;
896
897 switch (GET_CODE (plus1))
898 {
899 case CONST_INT :
900 return SMALL_INT (plus1) ? 1 : 2;
901 case CONST :
902 case SYMBOL_REF :
903 case LABEL_REF :
904 return 2;
905 default:
906 break;
907 }
908 break;
909 }
910 default:
911 break;
912 }
913
914 return 4;
915 }
916 \f
917 /* Function prologue/epilogue handlers. */
918
919 /* ARC stack frames look like:
920
921 Before call After call
922 +-----------------------+ +-----------------------+
923 | | | |
924 high | local variables, | | local variables, |
925 mem | reg save area, etc. | | reg save area, etc. |
926 | | | |
927 +-----------------------+ +-----------------------+
928 | | | |
929 | arguments on stack. | | arguments on stack. |
930 | | | |
931 SP+16->+-----------------------+FP+48->+-----------------------+
932 | 4 word save area for | | reg parm save area, |
933 | return addr, prev %fp | | only created for |
934 SP+0->+-----------------------+ | variable argument |
935 | functions |
936 FP+16->+-----------------------+
937 | 4 word save area for |
938 | return addr, prev %fp |
939 FP+0->+-----------------------+
940 | |
941 | local variables |
942 | |
943 +-----------------------+
944 | |
945 | register save area |
946 | |
947 +-----------------------+
948 | |
949 | alloca allocations |
950 | |
951 +-----------------------+
952 | |
953 | arguments on stack |
954 | |
955 SP+16->+-----------------------+
956 low | 4 word save area for |
957 memory | return addr, prev %fp |
958 SP+0->+-----------------------+
959
960 Notes:
961 1) The "reg parm save area" does not exist for non variable argument fns.
962 The "reg parm save area" can be eliminated completely if we created our
963 own va-arc.h, but that has tradeoffs as well (so it's not done). */
964
965 /* Structure to be filled in by arc_compute_frame_size with register
966 save masks, and offsets for the current function. */
967 struct arc_frame_info
968 {
969 unsigned int total_size; /* # bytes that the entire frame takes up. */
970 unsigned int extra_size; /* # bytes of extra stuff. */
971 unsigned int pretend_size; /* # bytes we push and pretend caller did. */
972 unsigned int args_size; /* # bytes that outgoing arguments take up. */
973 unsigned int reg_size; /* # bytes needed to store regs. */
974 unsigned int var_size; /* # bytes that variables take up. */
975 unsigned int reg_offset; /* Offset from new sp to store regs. */
976 unsigned int gmask; /* Mask of saved gp registers. */
977 int initialized; /* Nonzero if frame size already calculated. */
978 };
979
980 /* Current frame information calculated by arc_compute_frame_size. */
981 static struct arc_frame_info current_frame_info;
982
983 /* Zero structure to initialize current_frame_info. */
984 static struct arc_frame_info zero_frame_info;
985
986 /* Type of function DECL.
987
988 The result is cached. To reset the cache at the end of a function,
989 call with DECL = NULL_TREE. */
990
991 enum arc_function_type
992 arc_compute_function_type (tree decl)
993 {
994 tree a;
995 /* Cached value. */
996 static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN;
997 /* Last function we were called for. */
998 static tree last_fn = NULL_TREE;
999
1000 /* Resetting the cached value? */
1001 if (decl == NULL_TREE)
1002 {
1003 fn_type = ARC_FUNCTION_UNKNOWN;
1004 last_fn = NULL_TREE;
1005 return fn_type;
1006 }
1007
1008 if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN)
1009 return fn_type;
1010
1011 /* Assume we have a normal function (not an interrupt handler). */
1012 fn_type = ARC_FUNCTION_NORMAL;
1013
1014 /* Now see if this is an interrupt handler. */
1015 for (a = DECL_ATTRIBUTES (current_function_decl);
1016 a;
1017 a = TREE_CHAIN (a))
1018 {
1019 tree name = TREE_PURPOSE (a), args = TREE_VALUE (a);
1020
1021 if (name == get_identifier ("__interrupt__")
1022 && list_length (args) == 1
1023 && TREE_CODE (TREE_VALUE (args)) == STRING_CST)
1024 {
1025 tree value = TREE_VALUE (args);
1026
1027 if (!strcmp (TREE_STRING_POINTER (value), "ilink1"))
1028 fn_type = ARC_FUNCTION_ILINK1;
1029 else if (!strcmp (TREE_STRING_POINTER (value), "ilink2"))
1030 fn_type = ARC_FUNCTION_ILINK2;
1031 else
1032 gcc_unreachable ();
1033 break;
1034 }
1035 }
1036
1037 last_fn = decl;
1038 return fn_type;
1039 }
1040
1041 #define ILINK1_REGNUM 29
1042 #define ILINK2_REGNUM 30
1043 #define RETURN_ADDR_REGNUM 31
1044 #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM))
1045 #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM))
1046
1047 /* Tell prologue and epilogue if register REGNO should be saved / restored.
1048 The return address and frame pointer are treated separately.
1049 Don't consider them here. */
1050 #define MUST_SAVE_REGISTER(regno, interrupt_p) \
1051 ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \
1052 && (df_regs_ever_live_p (regno) && (!call_used_regs[regno] || interrupt_p)))
1053
1054 #define MUST_SAVE_RETURN_ADDR (df_regs_ever_live_p (RETURN_ADDR_REGNUM))
1055
1056 /* Return the bytes needed to compute the frame pointer from the current
1057 stack pointer.
1058
1059 SIZE is the size needed for local variables. */
1060
1061 unsigned int
1062 arc_compute_frame_size (int size /* # of var. bytes allocated. */)
1063 {
1064 int regno;
1065 unsigned int total_size, var_size, args_size, pretend_size, extra_size;
1066 unsigned int reg_size, reg_offset;
1067 unsigned int gmask;
1068 enum arc_function_type fn_type;
1069 int interrupt_p;
1070
1071 var_size = size;
1072 args_size = crtl->outgoing_args_size;
1073 pretend_size = crtl->args.pretend_args_size;
1074 extra_size = FIRST_PARM_OFFSET (0);
1075 total_size = extra_size + pretend_size + args_size + var_size;
1076 reg_offset = FIRST_PARM_OFFSET(0) + crtl->outgoing_args_size;
1077 reg_size = 0;
1078 gmask = 0;
1079
1080 /* See if this is an interrupt handler. Call used registers must be saved
1081 for them too. */
1082 fn_type = arc_compute_function_type (current_function_decl);
1083 interrupt_p = ARC_INTERRUPT_P (fn_type);
1084
1085 /* Calculate space needed for registers.
1086 ??? We ignore the extension registers for now. */
1087
1088 for (regno = 0; regno <= 31; regno++)
1089 {
1090 if (MUST_SAVE_REGISTER (regno, interrupt_p))
1091 {
1092 reg_size += UNITS_PER_WORD;
1093 gmask |= 1 << regno;
1094 }
1095 }
1096
1097 total_size += reg_size;
1098
1099 /* If the only space to allocate is the fp/blink save area this is an
1100 empty frame. However, if we'll be making a function call we need to
1101 allocate a stack frame for our callee's fp/blink save area. */
1102 if (total_size == extra_size
1103 && !MUST_SAVE_RETURN_ADDR)
1104 total_size = extra_size = 0;
1105
1106 total_size = ARC_STACK_ALIGN (total_size);
1107
1108 /* Save computed information. */
1109 current_frame_info.total_size = total_size;
1110 current_frame_info.extra_size = extra_size;
1111 current_frame_info.pretend_size = pretend_size;
1112 current_frame_info.var_size = var_size;
1113 current_frame_info.args_size = args_size;
1114 current_frame_info.reg_size = reg_size;
1115 current_frame_info.reg_offset = reg_offset;
1116 current_frame_info.gmask = gmask;
1117 current_frame_info.initialized = reload_completed;
1118
1119 /* Ok, we're done. */
1120 return total_size;
1121 }
1122 \f
1123 /* Common code to save/restore registers. */
1124
1125 void
1126 arc_save_restore (FILE *file,
1127 const char *base_reg,
1128 unsigned int offset,
1129 unsigned int gmask,
1130 const char *op)
1131 {
1132 int regno;
1133
1134 if (gmask == 0)
1135 return;
1136
1137 for (regno = 0; regno <= 31; regno++)
1138 {
1139 if ((gmask & (1L << regno)) != 0)
1140 {
1141 fprintf (file, "\t%s %s,[%s,%d]\n",
1142 op, reg_names[regno], base_reg, offset);
1143 offset += UNITS_PER_WORD;
1144 }
1145 }
1146 }
1147 \f
1148 /* Target hook to assemble an integer object. The ARC version needs to
1149 emit a special directive for references to labels and function
1150 symbols. */
1151
1152 static bool
1153 arc_assemble_integer (rtx x, unsigned int size, int aligned_p)
1154 {
1155 if (size == UNITS_PER_WORD && aligned_p
1156 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1157 || GET_CODE (x) == LABEL_REF))
1158 {
1159 fputs ("\t.word\t%st(", asm_out_file);
1160 output_addr_const (asm_out_file, x);
1161 fputs (")\n", asm_out_file);
1162 return true;
1163 }
1164 return default_assemble_integer (x, size, aligned_p);
1165 }
1166 \f
1167 /* Set up the stack and frame pointer (if desired) for the function. */
1168
1169 static void
1170 arc_output_function_prologue (FILE *file, HOST_WIDE_INT size)
1171 {
1172 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1173 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1174 unsigned int gmask = current_frame_info.gmask;
1175 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1176
1177 /* If this is an interrupt handler, set up our stack frame.
1178 ??? Optimize later. */
1179 if (ARC_INTERRUPT_P (fn_type))
1180 {
1181 fprintf (file, "\t%s interrupt handler\n",
1182 ASM_COMMENT_START);
1183 fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str);
1184 }
1185
1186 /* This is only for the human reader. */
1187 fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n",
1188 ASM_COMMENT_START, ASM_COMMENT_START,
1189 current_frame_info.var_size,
1190 current_frame_info.reg_size / 4,
1191 current_frame_info.args_size,
1192 current_frame_info.extra_size);
1193
1194 size = ARC_STACK_ALIGN (size);
1195 size = (! current_frame_info.initialized
1196 ? arc_compute_frame_size (size)
1197 : current_frame_info.total_size);
1198
1199 /* These cases shouldn't happen. Catch them now. */
1200 gcc_assert (size || !gmask);
1201
1202 /* Allocate space for register arguments if this is a variadic function. */
1203 if (current_frame_info.pretend_size != 0)
1204 fprintf (file, "\tsub %s,%s,%d\n",
1205 sp_str, sp_str, current_frame_info.pretend_size);
1206
1207 /* The home-grown ABI says link register is saved first. */
1208 if (MUST_SAVE_RETURN_ADDR)
1209 fprintf (file, "\tst %s,[%s,%d]\n",
1210 reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD);
1211
1212 /* Set up the previous frame pointer next (if we need to). */
1213 if (frame_pointer_needed)
1214 {
1215 fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str);
1216 fprintf (file, "\tmov %s,%s\n", fp_str, sp_str);
1217 }
1218
1219 /* ??? We don't handle the case where the saved regs are more than 252
1220 bytes away from sp. This can be handled by decrementing sp once, saving
1221 the regs, and then decrementing it again. The epilogue doesn't have this
1222 problem as the `ld' insn takes reg+limm values (though it would be more
1223 efficient to avoid reg+limm). */
1224
1225 /* Allocate the stack frame. */
1226 if (size - current_frame_info.pretend_size > 0)
1227 fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1228 sp_str, sp_str, size - current_frame_info.pretend_size);
1229
1230 /* Save any needed call-saved regs (and call-used if this is an
1231 interrupt handler). */
1232 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1233 /* The zeroing of these two bits is unnecessary,
1234 but leave this in for clarity. */
1235 gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1236 "st");
1237
1238 fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START);
1239 }
1240 \f
1241 /* Do any necessary cleanup after a function to restore stack, frame,
1242 and regs. */
1243
1244 static void
1245 arc_output_function_epilogue (FILE *file, HOST_WIDE_INT size)
1246 {
1247 rtx epilogue_delay = crtl->epilogue_delay_list;
1248 int noepilogue = FALSE;
1249 enum arc_function_type fn_type = arc_compute_function_type (current_function_decl);
1250
1251 /* This is only for the human reader. */
1252 fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START);
1253
1254 size = ARC_STACK_ALIGN (size);
1255 size = (!current_frame_info.initialized
1256 ? arc_compute_frame_size (size)
1257 : current_frame_info.total_size);
1258
1259 if (size == 0 && epilogue_delay == 0)
1260 {
1261 rtx insn = get_last_insn ();
1262
1263 /* If the last insn was a BARRIER, we don't have to write any code
1264 because a jump (aka return) was put there. */
1265 if (GET_CODE (insn) == NOTE)
1266 insn = prev_nonnote_insn (insn);
1267 if (insn && GET_CODE (insn) == BARRIER)
1268 noepilogue = TRUE;
1269 }
1270
1271 if (!noepilogue)
1272 {
1273 unsigned int pretend_size = current_frame_info.pretend_size;
1274 unsigned int frame_size = size - pretend_size;
1275 int restored, fp_restored_p;
1276 int can_trust_sp_p = !cfun->calls_alloca;
1277 const char *sp_str = reg_names[STACK_POINTER_REGNUM];
1278 const char *fp_str = reg_names[FRAME_POINTER_REGNUM];
1279
1280 /* ??? There are lots of optimizations that can be done here.
1281 EG: Use fp to restore regs if it's closer.
1282 Maybe in time we'll do them all. For now, always restore regs from
1283 sp, but don't restore sp if we don't have to. */
1284
1285 if (!can_trust_sp_p)
1286 {
1287 gcc_assert (frame_pointer_needed);
1288 fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n",
1289 sp_str, fp_str, frame_size, ASM_COMMENT_START);
1290 }
1291
1292 /* Restore any saved registers. */
1293 arc_save_restore (file, sp_str, current_frame_info.reg_offset,
1294 /* The zeroing of these two bits is unnecessary,
1295 but leave this in for clarity. */
1296 current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK),
1297 "ld");
1298
1299 if (MUST_SAVE_RETURN_ADDR)
1300 fprintf (file, "\tld %s,[%s,%d]\n",
1301 reg_names[RETURN_ADDR_REGNUM],
1302 frame_pointer_needed ? fp_str : sp_str,
1303 UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size));
1304
1305 /* Keep track of how much of the stack pointer we've restored.
1306 It makes the following a lot more readable. */
1307 restored = 0;
1308 fp_restored_p = 0;
1309
1310 /* We try to emit the epilogue delay slot insn right after the load
1311 of the return address register so that it can execute with the
1312 stack intact. Secondly, loads are delayed. */
1313 /* ??? If stack intactness is important, always emit now. */
1314 if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX)
1315 {
1316 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1317 epilogue_delay = NULL_RTX;
1318 }
1319
1320 if (frame_pointer_needed)
1321 {
1322 /* Try to restore the frame pointer in the delay slot. We can't,
1323 however, if any of these is true. */
1324 if (epilogue_delay != NULL_RTX
1325 || !SMALL_INT (frame_size)
1326 || pretend_size
1327 || ARC_INTERRUPT_P (fn_type))
1328 {
1329 /* Note that we restore fp and sp here! */
1330 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1331 restored += frame_size;
1332 fp_restored_p = 1;
1333 }
1334 }
1335 else if (!SMALL_INT (size /* frame_size + pretend_size */)
1336 || ARC_INTERRUPT_P (fn_type))
1337 {
1338 fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size);
1339 restored += frame_size;
1340 }
1341
1342 /* These must be done before the return insn because the delay slot
1343 does the final stack restore. */
1344 if (ARC_INTERRUPT_P (fn_type))
1345 {
1346 if (epilogue_delay)
1347 {
1348 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1349 }
1350 }
1351
1352 /* Emit the return instruction. */
1353 {
1354 static const int regs[4] = {
1355 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM
1356 };
1357
1358 /* Update the flags, if returning from an interrupt handler. */
1359 if (ARC_INTERRUPT_P (fn_type))
1360 fprintf (file, "\tj.d.f %s\n", reg_names[regs[fn_type]]);
1361 else
1362 fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]);
1363 }
1364
1365 /* If the only register saved is the return address, we need a
1366 nop, unless we have an instruction to put into it. Otherwise
1367 we don't since reloading multiple registers doesn't reference
1368 the register being loaded. */
1369
1370 if (ARC_INTERRUPT_P (fn_type))
1371 fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str);
1372 else if (epilogue_delay != NULL_RTX)
1373 {
1374 gcc_assert (!frame_pointer_needed || fp_restored_p);
1375 gcc_assert (restored >= size);
1376 final_scan_insn (XEXP (epilogue_delay, 0), file, 1, 1, NULL);
1377 }
1378 else if (frame_pointer_needed && !fp_restored_p)
1379 {
1380 gcc_assert (SMALL_INT (frame_size));
1381 /* Note that we restore fp and sp here! */
1382 fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size);
1383 }
1384 else if (restored < size)
1385 {
1386 gcc_assert (SMALL_INT (size - restored));
1387 fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n",
1388 sp_str, sp_str, size - restored);
1389 }
1390 else
1391 fprintf (file, "\tnop\n");
1392 }
1393
1394 /* Reset state info for each function. */
1395 current_frame_info = zero_frame_info;
1396 arc_compute_function_type (NULL_TREE);
1397 }
1398 \f
1399 /* Define the number of delay slots needed for the function epilogue.
1400
1401 Interrupt handlers can't have any epilogue delay slots (it's always needed
1402 for something else, I think). For normal functions, we have to worry about
1403 using call-saved regs as they'll be restored before the delay slot insn.
1404 Functions with non-empty frames already have enough choices for the epilogue
1405 delay slot so for now we only consider functions with empty frames. */
1406
1407 int
1408 arc_delay_slots_for_epilogue (void)
1409 {
1410 if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL)
1411 return 0;
1412 if (!current_frame_info.initialized)
1413 (void) arc_compute_frame_size (get_frame_size ());
1414 if (current_frame_info.total_size == 0)
1415 return 1;
1416 return 0;
1417 }
1418
1419 /* Return true if TRIAL is a valid insn for the epilogue delay slot.
1420 Any single length instruction which doesn't reference the stack or frame
1421 pointer or any call-saved register is OK. SLOT will always be 0. */
1422
1423 int
1424 arc_eligible_for_epilogue_delay (rtx trial, int slot)
1425 {
1426 gcc_assert (!slot);
1427
1428 if (get_attr_length (trial) == 1
1429 /* If registers where saved, presumably there's more than enough
1430 possibilities for the delay slot. The alternative is something
1431 more complicated (of course, if we expanded the epilogue as rtl
1432 this problem would go away). */
1433 /* ??? Note that this will always be true since only functions with
1434 empty frames have epilogue delay slots. See
1435 arc_delay_slots_for_epilogue. */
1436 && current_frame_info.gmask == 0
1437 && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial))
1438 && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial)))
1439 return 1;
1440 return 0;
1441 }
1442 \f
1443 /* Return true if OP is a shift operator. */
1444
1445 int
1446 shift_operator (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1447 {
1448 switch (GET_CODE (op))
1449 {
1450 case ASHIFTRT:
1451 case LSHIFTRT:
1452 case ASHIFT:
1453 return 1;
1454 default:
1455 return 0;
1456 }
1457 }
1458
1459 /* Output the assembler code for doing a shift.
1460 We go to a bit of trouble to generate efficient code as the ARC only has
1461 single bit shifts. This is taken from the h8300 port. We only have one
1462 mode of shifting and can't access individual bytes like the h8300 can, so
1463 this is greatly simplified (at the expense of not generating hyper-
1464 efficient code).
1465
1466 This function is not used if the variable shift insns are present. */
1467
1468 /* ??? We assume the output operand is the same as operand 1.
1469 This can be optimized (deleted) in the case of 1 bit shifts. */
1470 /* ??? We use the loop register here. We don't use it elsewhere (yet) and
1471 using it here will give us a chance to play with it. */
1472
1473 const char *
1474 output_shift (rtx *operands)
1475 {
1476 rtx shift = operands[3];
1477 enum machine_mode mode = GET_MODE (shift);
1478 enum rtx_code code = GET_CODE (shift);
1479 const char *shift_one;
1480
1481 gcc_assert (mode == SImode);
1482
1483 switch (code)
1484 {
1485 case ASHIFT: shift_one = "asl %0,%0"; break;
1486 case ASHIFTRT: shift_one = "asr %0,%0"; break;
1487 case LSHIFTRT: shift_one = "lsr %0,%0"; break;
1488 default: gcc_unreachable ();
1489 }
1490
1491 if (GET_CODE (operands[2]) != CONST_INT)
1492 {
1493 if (optimize)
1494 {
1495 output_asm_insn ("sub.f 0,%2,0", operands);
1496 output_asm_insn ("mov lp_count,%2", operands);
1497 output_asm_insn ("bz 2f", operands);
1498 }
1499 else
1500 output_asm_insn ("mov %4,%2", operands);
1501 goto shiftloop;
1502 }
1503 else
1504 {
1505 int n = INTVAL (operands[2]);
1506
1507 /* If the count is negative, make it 0. */
1508 if (n < 0)
1509 n = 0;
1510 /* If the count is too big, truncate it.
1511 ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to
1512 do the intuitive thing. */
1513 else if (n > GET_MODE_BITSIZE (mode))
1514 n = GET_MODE_BITSIZE (mode);
1515
1516 /* First see if we can do them inline. */
1517 if (n <= 8)
1518 {
1519 while (--n >= 0)
1520 output_asm_insn (shift_one, operands);
1521 }
1522 /* See if we can use a rotate/and. */
1523 else if (n == BITS_PER_WORD - 1)
1524 {
1525 switch (code)
1526 {
1527 case ASHIFT :
1528 output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands);
1529 break;
1530 case ASHIFTRT :
1531 /* The ARC doesn't have a rol insn. Use something else. */
1532 output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands);
1533 break;
1534 case LSHIFTRT :
1535 /* The ARC doesn't have a rol insn. Use something else. */
1536 output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands);
1537 break;
1538 default:
1539 break;
1540 }
1541 }
1542 /* Must loop. */
1543 else
1544 {
1545 char buf[100];
1546
1547 if (optimize)
1548 output_asm_insn ("mov lp_count,%c2", operands);
1549 else
1550 output_asm_insn ("mov %4,%c2", operands);
1551 shiftloop:
1552 if (optimize)
1553 {
1554 if (flag_pic)
1555 sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start",
1556 ASM_COMMENT_START);
1557 else
1558 sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2",
1559 ASM_COMMENT_START);
1560 output_asm_insn (buf, operands);
1561 output_asm_insn ("sr %4,[lp_start]", operands);
1562 output_asm_insn ("add %4,%4,1", operands);
1563 output_asm_insn ("sr %4,[lp_end]", operands);
1564 output_asm_insn ("nop\n\tnop", operands);
1565 if (flag_pic)
1566 fprintf (asm_out_file, "\t%s single insn loop\n",
1567 ASM_COMMENT_START);
1568 else
1569 fprintf (asm_out_file, "1:\t%s single insn loop\n",
1570 ASM_COMMENT_START);
1571 output_asm_insn (shift_one, operands);
1572 fprintf (asm_out_file, "2:\t%s end single insn loop\n",
1573 ASM_COMMENT_START);
1574 }
1575 else
1576 {
1577 fprintf (asm_out_file, "1:\t%s begin shift loop\n",
1578 ASM_COMMENT_START);
1579 output_asm_insn ("sub.f %4,%4,1", operands);
1580 output_asm_insn ("nop", operands);
1581 output_asm_insn ("bn.nd 2f", operands);
1582 output_asm_insn (shift_one, operands);
1583 output_asm_insn ("b.nd 1b", operands);
1584 fprintf (asm_out_file, "2:\t%s end shift loop\n",
1585 ASM_COMMENT_START);
1586 }
1587 }
1588 }
1589
1590 return "";
1591 }
1592 \f
1593 /* Nested function support. */
1594
1595 /* Emit RTL insns to initialize the variable parts of a trampoline.
1596 FNADDR is an RTX for the address of the function's pure code.
1597 CXT is an RTX for the static chain value for the function. */
1598
1599 void
1600 arc_initialize_trampoline (rtx tramp ATTRIBUTE_UNUSED,
1601 rtx fnaddr ATTRIBUTE_UNUSED,
1602 rtx cxt ATTRIBUTE_UNUSED)
1603 {
1604 }
1605 \f
1606 /* Set the cpu type and print out other fancy things,
1607 at the top of the file. */
1608
1609 static void
1610 arc_file_start (void)
1611 {
1612 default_file_start ();
1613 fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
1614 }
1615 \f
1616 /* Print operand X (an rtx) in assembler syntax to file FILE.
1617 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
1618 For `%' followed by punctuation, CODE is the punctuation and X is null. */
1619
1620 void
1621 arc_print_operand (FILE *file, rtx x, int code)
1622 {
1623 switch (code)
1624 {
1625 case '#' :
1626 /* Conditional branches. For now these are equivalent. */
1627 case '*' :
1628 /* Unconditional branches. Output the appropriate delay slot suffix. */
1629 if (!final_sequence || XVECLEN (final_sequence, 0) == 1)
1630 {
1631 /* There's nothing in the delay slot. */
1632 fputs (".nd", file);
1633 }
1634 else
1635 {
1636 rtx jump = XVECEXP (final_sequence, 0, 0);
1637 rtx delay = XVECEXP (final_sequence, 0, 1);
1638 if (INSN_ANNULLED_BRANCH_P (jump))
1639 fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file);
1640 else
1641 fputs (".d", file);
1642 }
1643 return;
1644 case '?' : /* with leading "." */
1645 case '!' : /* without leading "." */
1646 /* This insn can be conditionally executed. See if the ccfsm machinery
1647 says it should be conditionalized. */
1648 if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4)
1649 {
1650 /* Is this insn in a delay slot? */
1651 if (final_sequence && XVECLEN (final_sequence, 0) == 2)
1652 {
1653 rtx insn = XVECEXP (final_sequence, 0, 1);
1654
1655 /* If the insn is annulled and is from the target path, we need
1656 to inverse the condition test. */
1657 if (INSN_ANNULLED_BRANCH_P (insn))
1658 {
1659 if (INSN_FROM_TARGET_P (insn))
1660 fprintf (file, "%s%s",
1661 code == '?' ? "." : "",
1662 arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]);
1663 else
1664 fprintf (file, "%s%s",
1665 code == '?' ? "." : "",
1666 arc_condition_codes[arc_ccfsm_current_cc]);
1667 }
1668 else
1669 {
1670 /* This insn is executed for either path, so don't
1671 conditionalize it at all. */
1672 ; /* nothing to do */
1673 }
1674 }
1675 else
1676 {
1677 /* This insn isn't in a delay slot. */
1678 fprintf (file, "%s%s",
1679 code == '?' ? "." : "",
1680 arc_condition_codes[arc_ccfsm_current_cc]);
1681 }
1682 }
1683 return;
1684 case '~' :
1685 /* Output a nop if we're between a set of the condition codes,
1686 and a conditional branch. */
1687 if (last_insn_set_cc_p)
1688 fputs ("nop\n\t", file);
1689 return;
1690 case 'd' :
1691 fputs (arc_condition_codes[get_arc_condition_code (x)], file);
1692 return;
1693 case 'D' :
1694 fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE
1695 (get_arc_condition_code (x))],
1696 file);
1697 return;
1698 case 'R' :
1699 /* Write second word of DImode or DFmode reference,
1700 register or memory. */
1701 if (GET_CODE (x) == REG)
1702 fputs (reg_names[REGNO (x)+1], file);
1703 else if (GET_CODE (x) == MEM)
1704 {
1705 fputc ('[', file);
1706 /* Handle possible auto-increment. Since it is pre-increment and
1707 we have already done it, we can just use an offset of four. */
1708 /* ??? This is taken from rs6000.c I think. I don't think it is
1709 currently necessary, but keep it around. */
1710 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1711 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1712 output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4));
1713 else
1714 output_address (plus_constant (XEXP (x, 0), 4));
1715 fputc (']', file);
1716 }
1717 else
1718 output_operand_lossage ("invalid operand to %%R code");
1719 return;
1720 case 'S' :
1721 if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
1722 || GET_CODE (x) == LABEL_REF)
1723 {
1724 fprintf (file, "%%st(");
1725 output_addr_const (file, x);
1726 fprintf (file, ")");
1727 return;
1728 }
1729 break;
1730 case 'H' :
1731 case 'L' :
1732 if (GET_CODE (x) == REG)
1733 {
1734 /* L = least significant word, H = most significant word */
1735 if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L'))
1736 fputs (reg_names[REGNO (x)], file);
1737 else
1738 fputs (reg_names[REGNO (x)+1], file);
1739 }
1740 else if (GET_CODE (x) == CONST_INT
1741 || GET_CODE (x) == CONST_DOUBLE)
1742 {
1743 rtx first, second;
1744
1745 split_double (x, &first, &second);
1746 fprintf (file, "0x%08lx",
1747 (long)(code == 'L' ? INTVAL (first) : INTVAL (second)));
1748 }
1749 else
1750 output_operand_lossage ("invalid operand to %%H/%%L code");
1751 return;
1752 case 'A' :
1753 {
1754 char str[30];
1755
1756 gcc_assert (GET_CODE (x) == CONST_DOUBLE
1757 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT);
1758
1759 real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1);
1760 fprintf (file, "%s", str);
1761 return;
1762 }
1763 case 'U' :
1764 /* Output a load/store with update indicator if appropriate. */
1765 if (GET_CODE (x) == MEM)
1766 {
1767 if (GET_CODE (XEXP (x, 0)) == PRE_INC
1768 || GET_CODE (XEXP (x, 0)) == PRE_DEC)
1769 fputs (".a", file);
1770 }
1771 else
1772 output_operand_lossage ("invalid operand to %%U code");
1773 return;
1774 case 'V' :
1775 /* Output cache bypass indicator for a load/store insn. Volatile memory
1776 refs are defined to use the cache bypass mechanism. */
1777 if (GET_CODE (x) == MEM)
1778 {
1779 if (MEM_VOLATILE_P (x))
1780 fputs (".di", file);
1781 }
1782 else
1783 output_operand_lossage ("invalid operand to %%V code");
1784 return;
1785 case 0 :
1786 /* Do nothing special. */
1787 break;
1788 default :
1789 /* Unknown flag. */
1790 output_operand_lossage ("invalid operand output code");
1791 }
1792
1793 switch (GET_CODE (x))
1794 {
1795 case REG :
1796 fputs (reg_names[REGNO (x)], file);
1797 break;
1798 case MEM :
1799 fputc ('[', file);
1800 if (GET_CODE (XEXP (x, 0)) == PRE_INC)
1801 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1802 GET_MODE_SIZE (GET_MODE (x))));
1803 else if (GET_CODE (XEXP (x, 0)) == PRE_DEC)
1804 output_address (plus_constant (XEXP (XEXP (x, 0), 0),
1805 - GET_MODE_SIZE (GET_MODE (x))));
1806 else
1807 output_address (XEXP (x, 0));
1808 fputc (']', file);
1809 break;
1810 case CONST_DOUBLE :
1811 /* We handle SFmode constants here as output_addr_const doesn't. */
1812 if (GET_MODE (x) == SFmode)
1813 {
1814 REAL_VALUE_TYPE d;
1815 long l;
1816
1817 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
1818 REAL_VALUE_TO_TARGET_SINGLE (d, l);
1819 fprintf (file, "0x%08lx", l);
1820 break;
1821 }
1822 /* Fall through. Let output_addr_const deal with it. */
1823 default :
1824 output_addr_const (file, x);
1825 break;
1826 }
1827 }
1828
1829 /* Print a memory address as an operand to reference that memory location. */
1830
1831 void
1832 arc_print_operand_address (FILE *file, rtx addr)
1833 {
1834 register rtx base, index = 0;
1835 int offset = 0;
1836
1837 switch (GET_CODE (addr))
1838 {
1839 case REG :
1840 fputs (reg_names[REGNO (addr)], file);
1841 break;
1842 case SYMBOL_REF :
1843 if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr))
1844 {
1845 fprintf (file, "%%st(");
1846 output_addr_const (file, addr);
1847 fprintf (file, ")");
1848 }
1849 else
1850 output_addr_const (file, addr);
1851 break;
1852 case PLUS :
1853 if (GET_CODE (XEXP (addr, 0)) == CONST_INT)
1854 offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1);
1855 else if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
1856 offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0);
1857 else
1858 base = XEXP (addr, 0), index = XEXP (addr, 1);
1859 gcc_assert (GET_CODE (base) == REG);
1860 fputs (reg_names[REGNO (base)], file);
1861 if (index == 0)
1862 {
1863 if (offset != 0)
1864 fprintf (file, ",%d", offset);
1865 }
1866 else
1867 {
1868 switch (GET_CODE (index))
1869 {
1870 case REG:
1871 fprintf (file, ",%s", reg_names[REGNO (index)]);
1872 break;
1873 case SYMBOL_REF:
1874 fputc (',', file), output_addr_const (file, index);
1875 break;
1876 default:
1877 gcc_unreachable ();
1878 }
1879 }
1880 break;
1881 case PRE_INC :
1882 case PRE_DEC :
1883 /* We shouldn't get here as we've lost the mode of the memory object
1884 (which says how much to inc/dec by. */
1885 gcc_unreachable ();
1886 break;
1887 default :
1888 output_addr_const (file, addr);
1889 break;
1890 }
1891 }
1892
1893 /* Update compare/branch separation marker. */
1894
1895 static void
1896 record_cc_ref (rtx insn)
1897 {
1898 last_insn_set_cc_p = current_insn_set_cc_p;
1899
1900 switch (get_attr_cond (insn))
1901 {
1902 case COND_SET :
1903 case COND_SET_ZN :
1904 case COND_SET_ZNC :
1905 if (get_attr_length (insn) == 1)
1906 current_insn_set_cc_p = 1;
1907 else
1908 current_insn_set_cc_p = 0;
1909 break;
1910 default :
1911 current_insn_set_cc_p = 0;
1912 break;
1913 }
1914 }
1915 \f
1916 /* Conditional execution support.
1917
1918 This is based on the ARM port but for now is much simpler.
1919
1920 A finite state machine takes care of noticing whether or not instructions
1921 can be conditionally executed, and thus decrease execution time and code
1922 size by deleting branch instructions. The fsm is controlled by
1923 final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns
1924 in the .md file for the branch insns also have a hand in this. */
1925
1926 /* The state of the fsm controlling condition codes are:
1927 0: normal, do nothing special
1928 1: don't output this insn
1929 2: don't output this insn
1930 3: make insns conditional
1931 4: make insns conditional
1932
1933 State transitions (state->state by whom, under what condition):
1934 0 -> 1 final_prescan_insn, if insn is conditional branch
1935 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch
1936 1 -> 3 branch patterns, after having not output the conditional branch
1937 2 -> 4 branch patterns, after having not output the conditional branch
1938 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached
1939 (the target label has CODE_LABEL_NUMBER equal to
1940 arc_ccfsm_target_label).
1941 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached
1942
1943 If the jump clobbers the conditions then we use states 2 and 4.
1944
1945 A similar thing can be done with conditional return insns.
1946
1947 We also handle separating branches from sets of the condition code.
1948 This is done here because knowledge of the ccfsm state is required,
1949 we may not be outputting the branch. */
1950
1951 void
1952 arc_final_prescan_insn (rtx insn,
1953 rtx *opvec ATTRIBUTE_UNUSED,
1954 int noperands ATTRIBUTE_UNUSED)
1955 {
1956 /* BODY will hold the body of INSN. */
1957 register rtx body = PATTERN (insn);
1958
1959 /* This will be 1 if trying to repeat the trick (i.e.: do the `else' part of
1960 an if/then/else), and things need to be reversed. */
1961 int reverse = 0;
1962
1963 /* If we start with a return insn, we only succeed if we find another one. */
1964 int seeking_return = 0;
1965
1966 /* START_INSN will hold the insn from where we start looking. This is the
1967 first insn after the following code_label if REVERSE is true. */
1968 rtx start_insn = insn;
1969
1970 /* Update compare/branch separation marker. */
1971 record_cc_ref (insn);
1972
1973 /* Allow -mdebug-ccfsm to turn this off so we can see how well it does.
1974 We can't do this in macro FINAL_PRESCAN_INSN because its called from
1975 final_scan_insn which has `optimize' as a local. */
1976 if (optimize < 2 || TARGET_NO_COND_EXEC)
1977 return;
1978
1979 /* If in state 4, check if the target branch is reached, in order to
1980 change back to state 0. */
1981 if (arc_ccfsm_state == 4)
1982 {
1983 if (insn == arc_ccfsm_target_insn)
1984 {
1985 arc_ccfsm_target_insn = NULL;
1986 arc_ccfsm_state = 0;
1987 }
1988 return;
1989 }
1990
1991 /* If in state 3, it is possible to repeat the trick, if this insn is an
1992 unconditional branch to a label, and immediately following this branch
1993 is the previous target label which is only used once, and the label this
1994 branch jumps to is not too far off. Or in other words "we've done the
1995 `then' part, see if we can do the `else' part." */
1996 if (arc_ccfsm_state == 3)
1997 {
1998 if (simplejump_p (insn))
1999 {
2000 start_insn = next_nonnote_insn (start_insn);
2001 if (GET_CODE (start_insn) == BARRIER)
2002 {
2003 /* ??? Isn't this always a barrier? */
2004 start_insn = next_nonnote_insn (start_insn);
2005 }
2006 if (GET_CODE (start_insn) == CODE_LABEL
2007 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2008 && LABEL_NUSES (start_insn) == 1)
2009 reverse = TRUE;
2010 else
2011 return;
2012 }
2013 else if (GET_CODE (body) == RETURN)
2014 {
2015 start_insn = next_nonnote_insn (start_insn);
2016 if (GET_CODE (start_insn) == BARRIER)
2017 start_insn = next_nonnote_insn (start_insn);
2018 if (GET_CODE (start_insn) == CODE_LABEL
2019 && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label
2020 && LABEL_NUSES (start_insn) == 1)
2021 {
2022 reverse = TRUE;
2023 seeking_return = 1;
2024 }
2025 else
2026 return;
2027 }
2028 else
2029 return;
2030 }
2031
2032 if (GET_CODE (insn) != JUMP_INSN)
2033 return;
2034
2035 /* This jump might be paralleled with a clobber of the condition codes,
2036 the jump should always come first. */
2037 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0)
2038 body = XVECEXP (body, 0, 0);
2039
2040 if (reverse
2041 || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC
2042 && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE))
2043 {
2044 int insns_skipped = 0, fail = FALSE, succeed = FALSE;
2045 /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */
2046 int then_not_else = TRUE;
2047 /* Nonzero if next insn must be the target label. */
2048 int next_must_be_target_label_p;
2049 rtx this_insn = start_insn, label = 0;
2050
2051 /* Register the insn jumped to. */
2052 if (reverse)
2053 {
2054 if (!seeking_return)
2055 label = XEXP (SET_SRC (body), 0);
2056 }
2057 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF)
2058 label = XEXP (XEXP (SET_SRC (body), 1), 0);
2059 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF)
2060 {
2061 label = XEXP (XEXP (SET_SRC (body), 2), 0);
2062 then_not_else = FALSE;
2063 }
2064 else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN)
2065 seeking_return = 1;
2066 else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN)
2067 {
2068 seeking_return = 1;
2069 then_not_else = FALSE;
2070 }
2071 else
2072 gcc_unreachable ();
2073
2074 /* See how many insns this branch skips, and what kind of insns. If all
2075 insns are okay, and the label or unconditional branch to the same
2076 label is not too far away, succeed. */
2077 for (insns_skipped = 0, next_must_be_target_label_p = FALSE;
2078 !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED;
2079 insns_skipped++)
2080 {
2081 rtx scanbody;
2082
2083 this_insn = next_nonnote_insn (this_insn);
2084 if (!this_insn)
2085 break;
2086
2087 if (next_must_be_target_label_p)
2088 {
2089 if (GET_CODE (this_insn) == BARRIER)
2090 continue;
2091 if (GET_CODE (this_insn) == CODE_LABEL
2092 && this_insn == label)
2093 {
2094 arc_ccfsm_state = 1;
2095 succeed = TRUE;
2096 }
2097 else
2098 fail = TRUE;
2099 break;
2100 }
2101
2102 scanbody = PATTERN (this_insn);
2103
2104 switch (GET_CODE (this_insn))
2105 {
2106 case CODE_LABEL:
2107 /* Succeed if it is the target label, otherwise fail since
2108 control falls in from somewhere else. */
2109 if (this_insn == label)
2110 {
2111 arc_ccfsm_state = 1;
2112 succeed = TRUE;
2113 }
2114 else
2115 fail = TRUE;
2116 break;
2117
2118 case BARRIER:
2119 /* Succeed if the following insn is the target label.
2120 Otherwise fail.
2121 If return insns are used then the last insn in a function
2122 will be a barrier. */
2123 next_must_be_target_label_p = TRUE;
2124 break;
2125
2126 case CALL_INSN:
2127 /* Can handle a call insn if there are no insns after it.
2128 IE: The next "insn" is the target label. We don't have to
2129 worry about delay slots as such insns are SEQUENCE's inside
2130 INSN's. ??? It is possible to handle such insns though. */
2131 if (get_attr_cond (this_insn) == COND_CANUSE)
2132 next_must_be_target_label_p = TRUE;
2133 else
2134 fail = TRUE;
2135 break;
2136
2137 case JUMP_INSN:
2138 /* If this is an unconditional branch to the same label, succeed.
2139 If it is to another label, do nothing. If it is conditional,
2140 fail. */
2141 /* ??? Probably, the test for the SET and the PC are unnecessary. */
2142
2143 if (GET_CODE (scanbody) == SET
2144 && GET_CODE (SET_DEST (scanbody)) == PC)
2145 {
2146 if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF
2147 && XEXP (SET_SRC (scanbody), 0) == label && !reverse)
2148 {
2149 arc_ccfsm_state = 2;
2150 succeed = TRUE;
2151 }
2152 else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE)
2153 fail = TRUE;
2154 }
2155 else if (GET_CODE (scanbody) == RETURN
2156 && seeking_return)
2157 {
2158 arc_ccfsm_state = 2;
2159 succeed = TRUE;
2160 }
2161 else if (GET_CODE (scanbody) == PARALLEL)
2162 {
2163 if (get_attr_cond (this_insn) != COND_CANUSE)
2164 fail = TRUE;
2165 }
2166 break;
2167
2168 case INSN:
2169 /* We can only do this with insns that can use the condition
2170 codes (and don't set them). */
2171 if (GET_CODE (scanbody) == SET
2172 || GET_CODE (scanbody) == PARALLEL)
2173 {
2174 if (get_attr_cond (this_insn) != COND_CANUSE)
2175 fail = TRUE;
2176 }
2177 /* We can't handle other insns like sequences. */
2178 else
2179 fail = TRUE;
2180 break;
2181
2182 default:
2183 break;
2184 }
2185 }
2186
2187 if (succeed)
2188 {
2189 if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse))
2190 arc_ccfsm_target_label = CODE_LABEL_NUMBER (label);
2191 else
2192 {
2193 gcc_assert (seeking_return || arc_ccfsm_state == 2);
2194 while (this_insn && GET_CODE (PATTERN (this_insn)) == USE)
2195 {
2196 this_insn = next_nonnote_insn (this_insn);
2197 gcc_assert (!this_insn
2198 || (GET_CODE (this_insn) != BARRIER
2199 && GET_CODE (this_insn) != CODE_LABEL));
2200 }
2201 if (!this_insn)
2202 {
2203 /* Oh dear! we ran off the end, give up. */
2204 extract_insn_cached (insn);
2205 arc_ccfsm_state = 0;
2206 arc_ccfsm_target_insn = NULL;
2207 return;
2208 }
2209 arc_ccfsm_target_insn = this_insn;
2210 }
2211
2212 /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from
2213 what it was. */
2214 if (!reverse)
2215 arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body),
2216 0));
2217
2218 if (reverse || then_not_else)
2219 arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc);
2220 }
2221
2222 /* Restore recog_data. Getting the attributes of other insns can
2223 destroy this array, but final.c assumes that it remains intact
2224 across this call. */
2225 extract_insn_cached (insn);
2226 }
2227 }
2228
2229 /* Record that we are currently outputting label NUM with prefix PREFIX.
2230 It it's the label we're looking for, reset the ccfsm machinery.
2231
2232 Called from (*targetm.asm_out.internal_label). */
2233
2234 void
2235 arc_ccfsm_at_label (const char *prefix, int num)
2236 {
2237 if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num
2238 && !strcmp (prefix, "L"))
2239 {
2240 arc_ccfsm_state = 0;
2241 arc_ccfsm_target_insn = NULL_RTX;
2242 }
2243 }
2244
2245 /* See if the current insn, which is a conditional branch, is to be
2246 deleted. */
2247
2248 int
2249 arc_ccfsm_branch_deleted_p (void)
2250 {
2251 if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2)
2252 return 1;
2253 return 0;
2254 }
2255
2256 /* Record a branch isn't output because subsequent insns can be
2257 conditionalized. */
2258
2259 void
2260 arc_ccfsm_record_branch_deleted (void)
2261 {
2262 /* Indicate we're conditionalizing insns now. */
2263 arc_ccfsm_state += 2;
2264
2265 /* If the next insn is a subroutine call, we still need a nop between the
2266 cc setter and user. We need to undo the effect of calling record_cc_ref
2267 for the just deleted branch. */
2268 current_insn_set_cc_p = last_insn_set_cc_p;
2269 }
2270 \f
2271 static void
2272 arc_va_start (tree valist, rtx nextarg)
2273 {
2274 /* See arc_setup_incoming_varargs for reasons for this oddity. */
2275 if (crtl->args.info < 8
2276 && (crtl->args.info & 1))
2277 nextarg = plus_constant (nextarg, UNITS_PER_WORD);
2278
2279 std_expand_builtin_va_start (valist, nextarg);
2280 }
2281
2282 /* This is how to output a definition of an internal numbered label where
2283 PREFIX is the class of label and NUM is the number within the class. */
2284
2285 static void
2286 arc_internal_label (FILE *stream, const char *prefix, unsigned long labelno)
2287 {
2288 arc_ccfsm_at_label (prefix, labelno);
2289 default_internal_label (stream, prefix, labelno);
2290 }
2291
2292 /* Worker function for TARGET_ASM_EXTERNAL_LIBCALL. */
2293
2294 static void
2295 arc_external_libcall (rtx fun ATTRIBUTE_UNUSED)
2296 {
2297 #if 0
2298 /* On the ARC we want to have libgcc's for multiple cpus in one binary.
2299 We can't use `assemble_name' here as that will call ASM_OUTPUT_LABELREF
2300 and we'll get another suffix added on if -mmangle-cpu. */
2301 if (TARGET_MANGLE_CPU_LIBGCC)
2302 {
2303 fprintf (FILE, "\t.rename\t_%s, _%s%s\n",
2304 XSTR (SYMREF, 0), XSTR (SYMREF, 0),
2305 arc_mangle_suffix);
2306 }
2307 #endif
2308 }
2309
2310 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2311
2312 static bool
2313 arc_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2314 {
2315 if (AGGREGATE_TYPE_P (type))
2316 return true;
2317 else
2318 {
2319 HOST_WIDE_INT size = int_size_in_bytes (type);
2320 return (size == -1 || size > 8);
2321 }
2322 }
2323
2324 /* For ARC, All aggregates and arguments greater than 8 bytes are
2325 passed by reference. */
2326
2327 static bool
2328 arc_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED,
2329 enum machine_mode mode, const_tree type,
2330 bool named ATTRIBUTE_UNUSED)
2331 {
2332 unsigned HOST_WIDE_INT size;
2333
2334 if (type)
2335 {
2336 if (AGGREGATE_TYPE_P (type))
2337 return true;
2338 size = int_size_in_bytes (type);
2339 }
2340 else
2341 size = GET_MODE_SIZE (mode);
2342
2343 return size > 8;
2344 }