static char arc_cpu_name[10] = "";
static const char *arc_cpu_string = arc_cpu_name;
+typedef struct GTY (()) _arc_jli_section
+{
+ const char *name;
+ struct _arc_jli_section *next;
+} arc_jli_section;
+
+static arc_jli_section *arc_jli_sections = NULL;
+
/* Track which regs are set fixed/call saved/call used from commnad line. */
HARD_REG_SET overrideregs;
static tree arc_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
static tree arc_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
+static tree arc_handle_jli_attribute (tree *, tree, tree, int, bool *);
+
/* Initialized arc_attribute_table to NULL since arc doesnot have any
machine specific supported attributes. */
by the compiler. */
{ "naked", 0, 0, true, false, false, false, arc_handle_fndecl_attribute,
NULL },
+ /* Functions calls made using jli instruction. The pointer in JLI
+ table is found latter. */
+ { "jli_always", 0, 0, false, true, true, NULL, NULL },
+ /* Functions calls made using jli instruction. The pointer in JLI
+ table is given as input parameter. */
+ { "jli_fixed", 1, 1, false, true, true, arc_handle_jli_attribute,
+ NULL },
{ NULL, 0, 0, false, false, false, false, NULL, NULL }
};
static int arc_comp_type_attributes (const_tree, const_tree);
'd'
'D'
'R': Second word
- 'S'
+ 'S': JLI instruction
'B': Branch comparison operand - suppress sda reference
'H': Most significant word
'L': Least significant word
output_operand_lossage ("invalid operand to %%R code");
return;
case 'S' :
- /* FIXME: remove %S option. */
- break;
+ if (GET_CODE (x) == SYMBOL_REF
+ && arc_is_jli_call_p (x))
+ {
+ if (SYMBOL_REF_DECL (x))
+ {
+ tree attrs = (TREE_TYPE (SYMBOL_REF_DECL (x)) != error_mark_node
+ ? TYPE_ATTRIBUTES (TREE_TYPE (SYMBOL_REF_DECL (x)))
+ : NULL_TREE);
+ if (lookup_attribute ("jli_fixed", attrs))
+ {
+ fprintf (file, "%ld\t; @",
+ TREE_INT_CST_LOW (TREE_VALUE (TREE_VALUE (attrs))));
+ assemble_name (file, XSTR (x, 0));
+ return;
+ }
+ }
+ fprintf (file, "@__jli.");
+ assemble_name (file, XSTR (x, 0));
+ return;
+ }
+ break;
case 'B' /* Branch or other LIMM ref - must not use sda references. */ :
if (CONSTANT_P (x))
{
fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string);
}
+/* Implement `TARGET_ASM_FILE_END'. */
+/* Outputs to the stdio stream FILE jli related text. */
+
+void arc_file_end (void)
+{
+ arc_jli_section *sec = arc_jli_sections;
+
+ while (sec != NULL)
+ {
+ fprintf (asm_out_file, "\n");
+ fprintf (asm_out_file, "# JLI entry for function ");
+ assemble_name (asm_out_file, sec->name);
+ fprintf (asm_out_file, "\n\t.section .jlitab, \"axG\", @progbits, "
+ ".jlitab.");
+ assemble_name (asm_out_file, sec->name);
+ fprintf (asm_out_file,", comdat\n");
+
+ fprintf (asm_out_file, "\t.align\t4\n");
+ fprintf (asm_out_file, "__jli.");
+ assemble_name (asm_out_file, sec->name);
+ fprintf (asm_out_file, ":\n\t.weak __jli.");
+ assemble_name (asm_out_file, sec->name);
+ fprintf (asm_out_file, "\n\tb\t@");
+ assemble_name (asm_out_file, sec->name);
+ fprintf (asm_out_file, "\n");
+ sec = sec->next;
+ }
+ file_end_indicate_exec_stack ();
+}
+
/* Cost functions. */
/* Compute a (partial) cost for rtx X. Return true if the complete
/* Return true if it is ok to make a tail-call to DECL. */
static bool
-arc_function_ok_for_sibcall (tree decl ATTRIBUTE_UNUSED,
+arc_function_ok_for_sibcall (tree decl,
tree exp ATTRIBUTE_UNUSED)
{
+ tree attrs = NULL_TREE;
+
/* Never tailcall from an ISR routine - it needs a special exit sequence. */
if (ARC_INTERRUPT_P (arc_compute_function_type (cfun)))
return false;
+ if (decl)
+ {
+ attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
+
+ if (lookup_attribute ("jli_always", attrs))
+ return false;
+ if (lookup_attribute ("jli_fixed", attrs))
+ return false;
+ }
+
/* Everything else is ok. */
return true;
}
reorg_loops (true, &arc_doloop_hooks);
}
+/* Add the given function declaration to emit code in JLI section. */
+
+static void
+arc_add_jli_section (rtx pat)
+{
+ const char *name;
+ tree attrs;
+ arc_jli_section *sec = arc_jli_sections, *new_section;
+ tree decl = SYMBOL_REF_DECL (pat);
+
+ if (!pat)
+ return;
+
+ if (decl)
+ {
+ /* For fixed locations do not generate the jli table entry. It
+ should be provided by the user as an asm file. */
+ attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
+ if (lookup_attribute ("jli_fixed", attrs))
+ return;
+ }
+
+ name = XSTR (pat, 0);
+
+ /* Don't insert the same symbol twice. */
+ while (sec != NULL)
+ {
+ if(strcmp (name, sec->name) == 0)
+ return;
+ sec = sec->next;
+ }
+
+ /* New name, insert it. */
+ new_section = (arc_jli_section *) xmalloc (sizeof (arc_jli_section));
+ gcc_assert (new_section != NULL);
+ new_section->name = name;
+ new_section->next = arc_jli_sections;
+ arc_jli_sections = new_section;
+}
+
+/* Scan all calls and add symbols to be emitted in the jli section if
+ needed. */
+
+static void
+jli_call_scan (void)
+{
+ rtx_insn *insn;
+
+ for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
+ {
+ if (!CALL_P (insn))
+ continue;
+
+ rtx pat = PATTERN (insn);
+ if (GET_CODE (pat) == COND_EXEC)
+ pat = COND_EXEC_CODE (pat);
+ pat = XVECEXP (pat, 0, 0);
+ if (GET_CODE (pat) == SET)
+ pat = SET_SRC (pat);
+
+ pat = XEXP (XEXP (pat, 0), 0);
+ if (GET_CODE (pat) == SYMBOL_REF
+ && arc_is_jli_call_p (pat))
+ arc_add_jli_section (pat);
+ }
+}
+
static int arc_reorg_in_progress = 0;
/* ARC's machince specific reorg function. */
arc_reorg_loops ();
workaround_arc_anomaly ();
+ jli_call_scan ();
/* FIXME: should anticipate ccfsm action, generate special patterns for
to-be-deleted branches that have no delay slot and have at least the
/* Try to emit a 16 bit opcode with long immediate. */
ret = 6;
if (short_p && match)
- ADDSI_OUTPUT1 ("add%? %0,%1,%S2");
+ ADDSI_OUTPUT1 ("add%? %0,%1,%2");
/* We have to use a 32 bit opcode, and with a long immediate. */
ret = 8;
- ADDSI_OUTPUT1 (intval < 0 ? "sub%? %0,%1,%n2" : "add%? %0,%1,%S2");
+ ADDSI_OUTPUT1 (intval < 0 ? "sub%? %0,%1,%n2" : "add%? %0,%1,%2");
}
/* Emit code for an commutative_cond_exec instruction with OPERANDS.
return gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
}
+/* Return nonzero if a jli call should be generated for a call from
+ the current function to DECL. */
+
+bool
+arc_is_jli_call_p (rtx pat)
+{
+ tree attrs;
+ tree decl = SYMBOL_REF_DECL (pat);
+
+ /* If it is not a well defined public function then return false. */
+ if (!decl || !SYMBOL_REF_FUNCTION_P (pat) || !TREE_PUBLIC (decl))
+ return false;
+
+ attrs = TYPE_ATTRIBUTES (TREE_TYPE (decl));
+ if (lookup_attribute ("jli_always", attrs))
+ return true;
+
+ if (lookup_attribute ("jli_fixed", attrs))
+ return true;
+
+ return TARGET_JLI_ALWAYS;
+}
+
+/* Handle and "jli" attribute; arguments as in struct
+ attribute_spec.handler. */
+
+static tree
+arc_handle_jli_attribute (tree *node ATTRIBUTE_UNUSED,
+ tree name, tree args, int,
+ bool *no_add_attrs)
+{
+ if (!TARGET_V2)
+ {
+ warning (OPT_Wattributes,
+ "%qE attribute only valid for ARCv2 architecture",
+ name);
+ *no_add_attrs = true;
+ }
+
+ if (args == NULL_TREE)
+ {
+ warning (OPT_Wattributes,
+ "argument of %qE attribute is missing",
+ name);
+ *no_add_attrs = true;
+ }
+ else
+ {
+ if (TREE_CODE (TREE_VALUE (args)) == NON_LVALUE_EXPR)
+ TREE_VALUE (args) = TREE_OPERAND (TREE_VALUE (args), 0);
+ tree arg = TREE_VALUE (args);
+ if (TREE_CODE (arg) != INTEGER_CST)
+ {
+ warning (0, "%qE attribute allows only an integer constant argument",
+ name);
+ *no_add_attrs = true;
+ }
+ /* FIXME! add range check. TREE_INT_CST_LOW (arg) */
+ }
+ return NULL_TREE;
+}
+
/* Implement TARGET_USE_ANCHORS_FOR_SYMBOL_P. We don't want to use
anchors for small data: the GP register acts as an anchor in that
case. We also don't want to use them for PC-relative accesses,
mov%? %0,%1
mov%? %0,%1
mov%? %0,%1
- mov%? %0,%S1
+ mov%? %0,%1
ldb%? %0,%1%&
stb%? %1,%0%&
ldb%? %0,%1%&
mov%? %0,%1
mov%? %0,%1
mov%? %0,%1
- mov%? %0,%S1%&
- mov%? %0,%S1
- mov%? %0,%S1
+ mov%? %0,%1%&
+ mov%? %0,%1
+ mov%? %0,%1
ld%_%? %0,%1%&
st%_%? %1,%0%&
xld%_%U1 %0,%1
xst%_%U0 %1,%0
st%_%U0%V0 %1,%0
st%_%U0%V0 %1,%0
- st%_%U0%V0 %S1,%0
- st%_%U0%V0 %S1,%0"
+ st%_%U0%V0 %1,%0
+ st%_%U0%V0 %1,%0"
[(set_attr "type" "move,move,move,move,move,move,move,move,move,move,move,load,store,load,load,store,store,store,store,store")
(set_attr "iscompact" "maybe,maybe,maybe,true,true,false,false,false,maybe_limm,maybe_limm,false,true,true,false,false,false,false,false,false,false")
(set_attr "predicable" "yes,no,yes,no,no,yes,no,yes,yes,yes,yes,no,no,no,no,no,no,no,no,no")
movh.cl %0,%L1>>16 ;9
* return INTVAL (operands[1]) & 0xffffff ? \"movbi.cl %0,%1 >> %p1,%p1,8;10\" : \"movbi.cl %0,%L1 >> 24,24,8;10\";
mov%? %0,%1 ;11
- add %0,%S1 ;12
+ add %0,%1 ;12
add %0,pcl,%1@pcl ;13
mov%? %0,%1 ;14
mov%? %0,%1 ;15
(set (match_operand:SI 0 "register_operand" "=w,w,w")
(match_dup 1))]
""
- "mov%?.f %0,%S1"
+ "mov%?.f %0,%1"
; splitting to 'tst' allows short insns and combination into brcc.
"reload_completed && operands_match_p (operands[0], operands[1])"
[(set (match_dup 2) (match_dup 3))]
(set (match_operand:SI 0 "dest_reg_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ldb.a%V4 %3,[%0,%S2]"
+ "ldb.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ldb.a%V4 %3,[%0,%S2]"
+ "ldb.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ldb.x.a%V4 %3,[%0,%S2]"
+ "ldb.x.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=w,w")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ld%_.a%V4 %3,[%0,%S2]"
+ "ld%_.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=r,r")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ld%_.a%V4 %3,[%0,%S2]"
+ "ld%_.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=w,w")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ld%_.x.a%V4 %3,[%0,%S2]"
+ "ld%_.x.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=w,w")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ld.a%V4 %3,[%0,%S2]"
+ "ld.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
(set (match_operand:SI 0 "dest_reg_operand" "=w,w")
(plus:SI (match_dup 1) (match_dup 2)))]
""
- "ld.a%V4 %3,[%0,%S2]"
+ "ld.a%V4 %3,[%0,%2]"
[(set_attr "type" "load,load")
(set_attr "length" "4,8")])
&& rtx_equal_p (operands[1], constm1_rtx)
&& GET_CODE (operands[3]) == LTU)
return "sbc.cs %0,%0,%0";
- return "mov.%d3 %0,%S1";
+ return "mov.%d3 %0,%1";
}
[(set_attr "type" "cmove,cmove")
(set_attr "length" "4,8")])
"@
bset%? %0,%1,%2 ;;peep2, constr 1
bset %0,%1,%2 ;;peep2, constr 2
- bset %0,%S1,%2 ;;peep2, constr 3"
+ bset %0,%1,%2 ;;peep2, constr 3"
[(set_attr "length" "4,4,8")
(set_attr "predicable" "yes,no,no")
(set_attr "cond" "canuse,nocond,nocond")]
"@
bxor%? %0,%1,%2
bxor %0,%1,%2
- bxor %0,%S1,%2"
+ bxor %0,%1,%2"
[(set_attr "length" "4,4,8")
(set_attr "predicable" "yes,no,no")
(set_attr "cond" "canuse,nocond,nocond")]
"@
bclr%? %0,%1,%2
bclr %0,%1,%2
- bclr %0,%S1,%2"
+ bclr %0,%1,%2"
[(set_attr "length" "4,4,8")
(set_attr "predicable" "yes,no,no")
(set_attr "cond" "canuse,nocond,nocond")]
(const_int -1))))]
""
"@
- bmsk%? %0,%S1,%2
+ bmsk%? %0,%1,%2
bmsk %0,%1,%2
- bmsk %0,%S1,%2"
+ bmsk %0,%1,%2"
[(set_attr "length" "4,4,8")
(set_attr "predicable" "yes,no,no")
(set_attr "cond" "canuse,nocond,nocond")]
bic%? %0, %2, %1%& ;;constraint 0
bic%? %0,%2,%1 ;;constraint 1
bic %0,%2,%1 ;;constraint 2, FIXME: will it ever get generated ???
- bic%? %0,%2,%S1 ;;constraint 3, FIXME: will it ever get generated ???
+ bic%? %0,%2,%1 ;;constraint 3, FIXME: will it ever get generated ???
bic %0,%2,%1 ;;constraint 4
- bic %0,%2,%S1 ;;constraint 5, FIXME: will it ever get generated ???
- bic %0,%S2,%1 ;;constraint 6"
+ bic %0,%2,%1 ;;constraint 5, FIXME: will it ever get generated ???
+ bic %0,%2,%1 ;;constraint 6"
[(set_attr "length" "*,4,4,8,4,8,8")
(set_attr "iscompact" "maybe, false, false, false, false, false, false")
(set_attr "predicable" "no,yes,no,yes,no,no,no")
(compare:CC_C (match_operand:SI 0 "register_operand" "Rcqq,Rcqq, h, c,Rcqq, c")
(match_operand:SI 1 "nonmemory_operand" "cO, hO,Cm1,cI, Cal,Cal")))]
""
- "cmp%? %0,%S1%&"
+ "cmp%? %0,%1%&"
[(set_attr "type" "compare")
(set_attr "iscompact" "true,true,true,false,true_limm,false")
(set_attr "cond" "set")
* current_insn_predicate = 0; return \"mov%?.ne %0,%1\";
* current_insn_predicate = 0; return \"mov%?.ne %0,%1\";
mov.ne %0,%1
- mov.ne %0,%S1"
+ mov.ne %0,%1"
[(set_attr "type" "cmove")
(set_attr "iscompact" "true,true,true_limm,false,false")
(set_attr "length" "2,2,6,4,8")
(set (match_operand:SI 0 "dest_reg_operand" "=w,w")
(match_operand:SI 1 "nonmemory_operand" "LRac,?Cal")))]
""
- "mov.%d3 %0,%S1"
+ "mov.%d3 %0,%1"
[(set_attr "type" "cmove")
(set_attr "length" "4,8")])
}
")
-
; Rcq, which is used in alternative 0, checks for conditional execution.
; At instruction output time, if it doesn't match and we end up with
; alternative 1 ("q"), that means that we can't use the short form.
(define_insn "*call_i"
[(call (mem:SI (match_operand:SI 0
- "call_address_operand" "Rcq,q,c,Cbp,Cbr,L,I,Cal"))
+ "call_address_operand" "Rcq,q,c,Cji,Cbp,Cbr,L,I,Cal"))
(match_operand 1 "" ""))
(clobber (reg:SI 31))]
""
jl%!%* [%0]%&
jl%!%* [%0]%&
jl%!%* [%0]
+ jli_s %S0
bl%!%* %P0
bl%!%* %P0
- jl%!%* %S0
- jl%* %S0
- jl%! %S0"
- [(set_attr "type" "call,call,call,call,call,call,call,call_no_delay_slot")
- (set_attr "iscompact" "maybe,false,*,*,*,*,*,*")
- (set_attr "predicable" "no,no,yes,yes,no,yes,no,yes")
- (set_attr "length" "*,*,4,4,4,4,4,8")])
+ jl%!%* %0
+ jl%* %0
+ jl%! %0"
+ [(set_attr "type" "call,call,call,call_no_delay_slot,call,call,call,call,call_no_delay_slot")
+ (set_attr "iscompact" "maybe,false,*,true,*,*,*,*,*")
+ (set_attr "predicable" "no,no,yes,no,yes,no,yes,no,yes")
+ (set_attr "length" "*,*,4,2,4,4,4,4,8")])
(define_expand "call_value"
;; operand 2 is stack_size_rtx
XEXP (operands[1], 0) = force_reg (Pmode, callee);
}")
-
; Rcq, which is used in alternative 0, checks for conditional execution.
; At instruction output time, if it doesn't match and we end up with
; alternative 1 ("q"), that means that we can't use the short form.
(define_insn "*call_value_i"
- [(set (match_operand 0 "dest_reg_operand" "=Rcq,q,w, w, w,w,w, w")
+ [(set (match_operand 0 "dest_reg_operand" "=Rcq,q,w, w, w, w,w,w, w")
(call (mem:SI (match_operand:SI 1
- "call_address_operand" "Rcq,q,c,Cbp,Cbr,L,I,Cal"))
+ "call_address_operand" "Rcq,q,c,Cji,Cbp,Cbr,L,I,Cal"))
(match_operand 2 "" "")))
(clobber (reg:SI 31))]
""
jl%!%* [%1]%&
jl%!%* [%1]%&
jl%!%* [%1]
+ jli_s %S1
bl%!%* %P1;1
bl%!%* %P1;1
- jl%!%* %S1
- jl%* %S1
- jl%! %S1"
- [(set_attr "type" "call,call,call,call,call,call,call,call_no_delay_slot")
- (set_attr "iscompact" "maybe,false,*,*,*,*,*,*")
- (set_attr "predicable" "no,no,yes,yes,no,yes,no,yes")
- (set_attr "length" "*,*,4,4,4,4,4,8")])
+ jl%!%* %1
+ jl%* %1
+ jl%! %1"
+ [(set_attr "type" "call,call,call,call_no_delay_slot,call,call,call,call,call_no_delay_slot")
+ (set_attr "iscompact" "maybe,false,*,true,*,*,*,*,*")
+ (set_attr "predicable" "no,no,yes,no,yes,no,yes,no,yes")
+ (set_attr "length" "*,*,4,2,4,4,4,4,8")])
; There is a bl_s instruction (16 bit opcode branch-and-link), but we can't
; use it for lack of inter-procedural branch shortening.
"TARGET_NORM"
"@
norm \t%0, %1
- norm \t%0, %S1"
+ norm \t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])
"TARGET_NORM"
"@
norm.f\t%0, %1
- norm.f\t%0, %S1"
+ norm.f\t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])
"TARGET_NORM"
"@
norm%_ \t%0, %1
- norm%_ \t%0, %S1"
+ norm%_ \t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])
"TARGET_SWAP"
"@
swap \t%0, %1
- swap \t%0, %S1
+ swap \t%0, %1
swap \t%0, %1"
[(set_attr "length" "4,8,4")
(set_attr "type" "two_cycle_core,two_cycle_core,two_cycle_core")])
"TARGET_ARC700 || TARGET_EA_SET"
"@
divaw \t%0, %1, %2
- divaw \t%0, %S1, %2
- divaw \t%0, %1, %S2"
+ divaw \t%0, %1, %2
+ divaw \t%0, %1, %2"
[(set_attr "length" "4,8,8")
(set_attr "type" "divaw,divaw,divaw")])
"@
flag%? %0
flag %0
- flag%? %S0"
+ flag%? %0"
[(set_attr "length" "4,4,8")
(set_attr "type" "misc,misc,misc")
(set_attr "predicable" "yes,no,yes")
(match_operand:SI 1 "general_operand" "Ir,I,HCal,r")]
VUNSPEC_ARC_SR)]
""
- "sr\t%S0, [%1]"
+ "sr\t%0, [%1]"
[(set_attr "length" "8,4,8,4")
(set_attr "type" "sr,sr,sr,sr")])
;; ??? Should this use arc_output_libcall and set is_sfunc?
(define_insn "*millicode_thunk_st"
[(match_parallel 0 "millicode_store_operation"
- [(set (mem:SI (reg:SI SP_REG)) (reg:SI 13))])]
+ [(set (mem:SI (reg:SI SP_REG)) (reg:SI 13))])]
""
{
output_asm_insn ("bl%* __st_r13_to_%0",
(define_insn "*millicode_thunk_ld"
[(match_parallel 0 "millicode_load_clob_operation"
- [(set (reg:SI 13) (mem:SI (reg:SI SP_REG)))])]
+ [(set (reg:SI 13) (mem:SI (reg:SI SP_REG)))])]
""
{
output_asm_insn ("bl%* __ld_r13_to_%0",
; the sibthunk restores blink, so we use the return rtx.
(define_insn "*millicode_sibthunk_ld"
[(match_parallel 0 "millicode_load_operation"
- [(return)
- (set (reg:SI SP_REG) (plus:SI (reg:SI SP_REG) (reg:SI 12)))
- (set (reg:SI 13) (mem:SI (reg:SI SP_REG)))])]
+ [(return)
+ (set (reg:SI SP_REG) (plus:SI (reg:SI SP_REG) (reg:SI 12)))
+ (set (reg:SI 13) (mem:SI (reg:SI SP_REG)))])]
""
{
output_asm_insn ("b%* __ld_r13_to_%0_ret",
"@
kflag%? %0
kflag %0
- kflag%? %S0"
+ kflag%? %0"
[(set_attr "length" "4,4,8")
(set_attr "type" "misc,misc,misc")
(set_attr "predicable" "yes,no,yes")
"TARGET_NORM && TARGET_V2"
"@
ffs \t%0, %1
- ffs \t%0, %S1"
+ ffs \t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])
"TARGET_NORM && TARGET_V2"
"@
ffs.f\t%0, %1
- ffs.f\t%0, %S1"
+ ffs.f\t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])
"TARGET_NORM && TARGET_V2"
"@
fls \t%0, %1
- fls \t%0, %S1"
+ fls \t%0, %1"
[(set_attr "length" "4,8")
(set_attr "type" "two_cycle_core,two_cycle_core")])