1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright 1991, 1993, 1994, 1995, 1996, 1996, 1997, 1998, 1999, 2000,
3 ;; 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 ;; Free Software Foundation, Inc.
5 ;; Contributed by Pieter `Tiggr' Schoenmakers (rcpieter@win.tue.nl)
6 ;; and Martin Simmons (@harleqn.co.uk).
7 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
9 ;; This file is part of GCC.
11 ;; GCC is free software; you can redistribute it and/or modify it
12 ;; under the terms of the GNU General Public License as published
13 ;; by the Free Software Foundation; either version 3, or (at your
14 ;; option) any later version.
16 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
17 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
18 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
19 ;; License for more details.
21 ;; You should have received a copy of the GNU General Public License
22 ;; along with GCC; see the file COPYING3. If not see
23 ;; <http://www.gnu.org/licenses/>.
25 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
28 ;;---------------------------------------------------------------------------
33 [(R0_REGNUM 0) ; First CORE register
34 (IP_REGNUM 12) ; Scratch register
35 (SP_REGNUM 13) ; Stack pointer
36 (LR_REGNUM 14) ; Return address register
37 (PC_REGNUM 15) ; Program counter
38 (CC_REGNUM 24) ; Condition code pseudo register
39 (LAST_ARM_REGNUM 15) ;
40 (FPA_F0_REGNUM 16) ; FIRST_FPA_REGNUM
41 (FPA_F7_REGNUM 23) ; LAST_FPA_REGNUM
44 ;; 3rd operand to select_dominance_cc_mode
53 ;; Note: sin and cos are no-longer used.
54 ;; Unspec constants for Neon are defined in neon.md.
57 [(UNSPEC_SIN 0) ; `sin' operation (MODE_FLOAT):
58 ; operand 0 is the result,
59 ; operand 1 the parameter.
60 (UNPSEC_COS 1) ; `cos' operation (MODE_FLOAT):
61 ; operand 0 is the result,
62 ; operand 1 the parameter.
63 (UNSPEC_PUSH_MULT 2) ; `push multiple' operation:
64 ; operand 0 is the first register,
65 ; subsequent registers are in parallel (use ...)
67 (UNSPEC_PIC_SYM 3) ; A symbol that has been treated properly for pic
68 ; usage, that is, we will add the pic_register
69 ; value to it before trying to dereference it.
70 (UNSPEC_PIC_BASE 4) ; Adding the PC value to the offset to the
71 ; GLOBAL_OFFSET_TABLE. The operation is fully
72 ; described by the RTL but must be wrapped to
73 ; prevent combine from trying to rip it apart.
74 (UNSPEC_PRLG_STK 5) ; A special barrier that prevents frame accesses
75 ; being scheduled before the stack adjustment insn.
76 (UNSPEC_PROLOGUE_USE 6) ; As USE insns are not meaningful after reload,
77 ; this unspec is used to prevent the deletion of
78 ; instructions setting registers for EH handling
79 ; and stack frame generation. Operand 0 is the
81 (UNSPEC_CHECK_ARCH 7); Set CCs to indicate 26-bit or 32-bit mode.
82 (UNSPEC_WSHUFH 8) ; Used by the intrinsic form of the iWMMXt WSHUFH instruction.
83 (UNSPEC_WACC 9) ; Used by the intrinsic form of the iWMMXt WACC instruction.
84 (UNSPEC_TMOVMSK 10) ; Used by the intrinsic form of the iWMMXt TMOVMSK instruction.
85 (UNSPEC_WSAD 11) ; Used by the intrinsic form of the iWMMXt WSAD instruction.
86 (UNSPEC_WSADZ 12) ; Used by the intrinsic form of the iWMMXt WSADZ instruction.
87 (UNSPEC_WMACS 13) ; Used by the intrinsic form of the iWMMXt WMACS instruction.
88 (UNSPEC_WMACU 14) ; Used by the intrinsic form of the iWMMXt WMACU instruction.
89 (UNSPEC_WMACSZ 15) ; Used by the intrinsic form of the iWMMXt WMACSZ instruction.
90 (UNSPEC_WMACUZ 16) ; Used by the intrinsic form of the iWMMXt WMACUZ instruction.
91 (UNSPEC_CLRDI 17) ; Used by the intrinsic form of the iWMMXt CLRDI instruction.
92 (UNSPEC_WMADDS 18) ; Used by the intrinsic form of the iWMMXt WMADDS instruction.
93 (UNSPEC_WMADDU 19) ; Used by the intrinsic form of the iWMMXt WMADDU instruction.
94 (UNSPEC_TLS 20) ; A symbol that has been treated properly for TLS usage.
95 (UNSPEC_PIC_LABEL 21) ; A label used for PIC access that does not appear in the
97 (UNSPEC_STACK_ALIGN 22) ; Doubleword aligned stack pointer. Used to
98 ; generate correct unwind information.
99 (UNSPEC_PIC_OFFSET 23) ; A symbolic 12-bit OFFSET that has been treated
100 ; correctly for PIC usage.
104 ;; UNSPEC_VOLATILE Usage:
107 [(VUNSPEC_BLOCKAGE 0) ; `blockage' insn to prevent scheduling across an
109 (VUNSPEC_EPILOGUE 1) ; `epilogue' insn, used to represent any part of the
110 ; instruction epilogue sequence that isn't expanded
111 ; into normal RTL. Used for both normal and sibcall
113 (VUNSPEC_ALIGN 2) ; `align' insn. Used at the head of a minipool table
114 ; for inlined constants.
115 (VUNSPEC_POOL_END 3) ; `end-of-table'. Used to mark the end of a minipool
117 (VUNSPEC_POOL_1 4) ; `pool-entry(1)'. An entry in the constant pool for
119 (VUNSPEC_POOL_2 5) ; `pool-entry(2)'. An entry in the constant pool for
121 (VUNSPEC_POOL_4 6) ; `pool-entry(4)'. An entry in the constant pool for
123 (VUNSPEC_POOL_8 7) ; `pool-entry(8)'. An entry in the constant pool for
125 (VUNSPEC_POOL_16 8) ; `pool-entry(16)'. An entry in the constant pool for
127 (VUNSPEC_TMRC 9) ; Used by the iWMMXt TMRC instruction.
128 (VUNSPEC_TMCR 10) ; Used by the iWMMXt TMCR instruction.
129 (VUNSPEC_ALIGN8 11) ; 8-byte alignment version of VUNSPEC_ALIGN
130 (VUNSPEC_WCMP_EQ 12) ; Used by the iWMMXt WCMPEQ instructions
131 (VUNSPEC_WCMP_GTU 13) ; Used by the iWMMXt WCMPGTU instructions
132 (VUNSPEC_WCMP_GT 14) ; Used by the iwMMXT WCMPGT instructions
133 (VUNSPEC_EH_RETURN 20); Use to override the return address for exception
138 ;;---------------------------------------------------------------------------
141 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
142 ; generating ARM code. This is used to control the length of some insn
143 ; patterns that share the same RTL in both ARM and Thumb code.
144 (define_attr "is_thumb" "no,yes" (const (symbol_ref "thumb_code")))
146 ; IS_STRONGARM is set to 'yes' when compiling for StrongARM, it affects
147 ; scheduling decisions for the load unit and the multiplier.
148 (define_attr "is_strongarm" "no,yes" (const (symbol_ref "arm_tune_strongarm")))
150 ; IS_XSCALE is set to 'yes' when compiling for XScale.
151 (define_attr "is_xscale" "no,yes" (const (symbol_ref "arm_tune_xscale")))
153 ;; Operand number of an input operand that is shifted. Zero if the
154 ;; given instruction does not shift one of its input operands.
155 (define_attr "shift" "" (const_int 0))
157 ; Floating Point Unit. If we only have floating point emulation, then there
158 ; is no point in scheduling the floating point insns. (Well, for best
159 ; performance we should try and group them together).
160 (define_attr "fpu" "none,fpa,fpe2,fpe3,maverick,vfp"
161 (const (symbol_ref "arm_fpu_attr")))
163 ; LENGTH of an instruction (in bytes)
164 (define_attr "length" "" (const_int 4))
166 ; POOL_RANGE is how far away from a constant pool entry that this insn
167 ; can be placed. If the distance is zero, then this insn will never
168 ; reference the pool.
169 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
170 ; before its address.
171 (define_attr "pool_range" "" (const_int 0))
172 (define_attr "neg_pool_range" "" (const_int 0))
174 ; An assembler sequence may clobber the condition codes without us knowing.
175 ; If such an insn references the pool, then we have no way of knowing how,
176 ; so use the most conservative value for pool_range.
177 (define_asm_attributes
178 [(set_attr "conds" "clob")
179 (set_attr "length" "4")
180 (set_attr "pool_range" "250")])
182 ;; The instruction used to implement a particular pattern. This
183 ;; information is used by pipeline descriptions to provide accurate
184 ;; scheduling information.
187 "mov,mvn,smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals,smlawy,smuad,smuadx,smlad,smladx,smusd,smusdx,smlsd,smlsdx,smmul,smmulr,smmla,umaal,smlald,smlsld,clz,mrs,msr,xtab,sdiv,udiv,other"
188 (const_string "other"))
190 ; TYPE attribute is used to detect floating point instructions which, if
191 ; running on a co-processor can run in parallel with other, basic instructions
192 ; If write-buffer scheduling is enabled then it can also be used in the
193 ; scheduling of writes.
195 ; Classification of each insn
196 ; alu any alu instruction that doesn't hit memory or fp
197 ; regs or have a shifted source operand
198 ; alu_shift any data instruction that doesn't hit memory or fp
199 ; regs, but has a source operand shifted by a constant
200 ; alu_shift_reg any data instruction that doesn't hit memory or fp
201 ; regs, but has a source operand shifted by a register value
202 ; mult a multiply instruction
203 ; block blockage insn, this blocks all functional units
204 ; float a floating point arithmetic operation (subject to expansion)
205 ; fdivd DFmode floating point division
206 ; fdivs SFmode floating point division
207 ; fmul Floating point multiply
208 ; ffmul Fast floating point multiply
209 ; farith Floating point arithmetic (4 cycle)
210 ; ffarith Fast floating point arithmetic (2 cycle)
211 ; float_em a floating point arithmetic operation that is normally emulated
212 ; even on a machine with an fpa.
213 ; f_load a floating point load from memory
214 ; f_store a floating point store to memory
215 ; f_load[sd] single/double load from memory
216 ; f_store[sd] single/double store to memory
217 ; f_flag a transfer of co-processor flags to the CPSR
218 ; f_mem_r a transfer of a floating point register to a real reg via mem
219 ; r_mem_f the reverse of f_mem_r
220 ; f_2_r fast transfer float to arm (no memory needed)
221 ; r_2_f fast transfer arm to float
222 ; f_cvt convert floating<->integral
224 ; call a subroutine call
225 ; load_byte load byte(s) from memory to arm registers
226 ; load1 load 1 word from memory to arm registers
227 ; load2 load 2 words from memory to arm registers
228 ; load3 load 3 words from memory to arm registers
229 ; load4 load 4 words from memory to arm registers
230 ; store store 1 word to memory from arm registers
231 ; store2 store 2 words
232 ; store3 store 3 words
233 ; store4 store 4 (or more) words
234 ; Additions for Cirrus Maverick co-processor:
235 ; mav_farith Floating point arithmetic (4 cycle)
236 ; mav_dmult Double multiplies (7 cycle)
240 "alu,alu_shift,alu_shift_reg,mult,block,float,fdivx,fdivd,fdivs,fmul,fmuls,fmuld,fmacs,fmacd,ffmul,farith,ffarith,f_flag,float_em,f_load,f_store,f_loads,f_loadd,f_stores,f_stored,f_mem_r,r_mem_f,f_2_r,r_2_f,f_cvt,branch,call,load_byte,load1,load2,load3,load4,store1,store2,store3,store4,mav_farith,mav_dmult"
242 (eq_attr "insn" "smulxy,smlaxy,smlalxy,smulwy,smlawx,mul,muls,mla,mlas,umull,umulls,umlal,umlals,smull,smulls,smlal,smlals")
243 (const_string "mult")
244 (const_string "alu")))
246 ; Load scheduling, set from the arm_ld_sched variable
247 ; initialized by arm_override_options()
248 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
250 ; condition codes: this one is used by final_prescan_insn to speed up
251 ; conditionalizing instructions. It saves having to scan the rtl to see if
252 ; it uses or alters the condition codes.
254 ; USE means that the condition codes are used by the insn in the process of
255 ; outputting code, this means (at present) that we can't use the insn in
258 ; SET means that the purpose of the insn is to set the condition codes in a
259 ; well defined manner.
261 ; CLOB means that the condition codes are altered in an undefined manner, if
262 ; they are altered at all
264 ; JUMP_CLOB is used when the condition cannot be represented by a single
265 ; instruction (UNEQ and LTGT). These cannot be predicated.
267 ; NOCOND means that the condition codes are neither altered nor affect the
268 ; output of this insn
270 (define_attr "conds" "use,set,clob,jump_clob,nocond"
271 (if_then_else (eq_attr "type" "call")
272 (const_string "clob")
273 (const_string "nocond")))
275 ; Predicable means that the insn can be conditionally executed based on
276 ; an automatically added predicate (additional patterns are generated by
277 ; gen...). We default to 'no' because no Thumb patterns match this rule
278 ; and not all ARM patterns do.
279 (define_attr "predicable" "no,yes" (const_string "no"))
281 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
282 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
283 ; suffer blockages enough to warrant modelling this (and it can adversely
284 ; affect the schedule).
285 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
287 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
288 ; to stall the processor. Used with model_wbuf above.
289 (define_attr "write_conflict" "no,yes"
290 (if_then_else (eq_attr "type"
291 "block,float_em,f_load,f_store,f_mem_r,r_mem_f,call,load1")
293 (const_string "no")))
295 ; Classify the insns into those that take one cycle and those that take more
296 ; than one on the main cpu execution unit.
297 (define_attr "core_cycles" "single,multi"
298 (if_then_else (eq_attr "type"
299 "alu,alu_shift,float,fdivx,fdivd,fdivs,fmul,ffmul,farith,ffarith")
300 (const_string "single")
301 (const_string "multi")))
303 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
304 ;; distant label. Only applicable to Thumb code.
305 (define_attr "far_jump" "yes,no" (const_string "no"))
308 ;; The number of machine instructions this pattern expands to.
309 ;; Used for Thumb-2 conditional execution.
310 (define_attr "ce_count" "" (const_int 1))
312 ;;---------------------------------------------------------------------------
315 ; A list of modes that are exactly 64 bits in size. We use this to expand
316 ; some splits that are the same for all modes when operating on ARM
318 (define_mode_iterator ANY64 [DI DF V8QI V4HI V2SI V2SF])
320 ;;---------------------------------------------------------------------------
323 (include "predicates.md")
324 (include "constraints.md")
326 ;;---------------------------------------------------------------------------
327 ;; Pipeline descriptions
329 ;; Processor type. This is created automatically from arm-cores.def.
330 (include "arm-tune.md")
332 ;; True if the generic scheduling description should be used.
334 (define_attr "generic_sched" "yes,no"
336 (eq_attr "tune" "arm926ejs,arm1020e,arm1026ejs,arm1136js,arm1136jfs,cortexa8,cortexr4")
338 (const_string "yes"))))
340 (define_attr "generic_vfp" "yes,no"
342 (and (eq_attr "fpu" "vfp")
343 (eq_attr "tune" "!arm1020e,arm1022e,cortexa8"))
345 (const_string "no"))))
347 (include "arm-generic.md")
348 (include "arm926ejs.md")
349 (include "arm1020e.md")
350 (include "arm1026ejs.md")
351 (include "arm1136jfs.md")
352 (include "cortex-a8.md")
353 (include "cortex-r4.md")
357 ;;---------------------------------------------------------------------------
362 ;; Note: For DImode insns, there is normally no reason why operands should
363 ;; not be in the same register, what we don't want is for something being
364 ;; written to partially overlap something that is an input.
365 ;; Cirrus 64bit additions should not be split because we have a native
366 ;; 64bit addition instructions.
368 (define_expand "adddi3"
370 [(set (match_operand:DI 0 "s_register_operand" "")
371 (plus:DI (match_operand:DI 1 "s_register_operand" "")
372 (match_operand:DI 2 "s_register_operand" "")))
373 (clobber (reg:CC CC_REGNUM))])]
376 if (TARGET_HARD_FLOAT && TARGET_MAVERICK)
378 if (!cirrus_fp_register (operands[0], DImode))
379 operands[0] = force_reg (DImode, operands[0]);
380 if (!cirrus_fp_register (operands[1], DImode))
381 operands[1] = force_reg (DImode, operands[1]);
382 emit_insn (gen_cirrus_adddi3 (operands[0], operands[1], operands[2]));
388 if (GET_CODE (operands[1]) != REG)
389 operands[1] = force_reg (SImode, operands[1]);
390 if (GET_CODE (operands[2]) != REG)
391 operands[2] = force_reg (SImode, operands[2]);
396 (define_insn "*thumb1_adddi3"
397 [(set (match_operand:DI 0 "register_operand" "=l")
398 (plus:DI (match_operand:DI 1 "register_operand" "%0")
399 (match_operand:DI 2 "register_operand" "l")))
400 (clobber (reg:CC CC_REGNUM))
403 "add\\t%Q0, %Q0, %Q2\;adc\\t%R0, %R0, %R2"
404 [(set_attr "length" "4")]
407 (define_insn_and_split "*arm_adddi3"
408 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
409 (plus:DI (match_operand:DI 1 "s_register_operand" "%0, 0")
410 (match_operand:DI 2 "s_register_operand" "r, 0")))
411 (clobber (reg:CC CC_REGNUM))]
412 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
414 "TARGET_32BIT && reload_completed"
415 [(parallel [(set (reg:CC_C CC_REGNUM)
416 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
418 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
419 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
420 (plus:SI (match_dup 4) (match_dup 5))))]
423 operands[3] = gen_highpart (SImode, operands[0]);
424 operands[0] = gen_lowpart (SImode, operands[0]);
425 operands[4] = gen_highpart (SImode, operands[1]);
426 operands[1] = gen_lowpart (SImode, operands[1]);
427 operands[5] = gen_highpart (SImode, operands[2]);
428 operands[2] = gen_lowpart (SImode, operands[2]);
430 [(set_attr "conds" "clob")
431 (set_attr "length" "8")]
434 (define_insn_and_split "*adddi_sesidi_di"
435 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
436 (plus:DI (sign_extend:DI
437 (match_operand:SI 2 "s_register_operand" "r,r"))
438 (match_operand:DI 1 "s_register_operand" "r,0")))
439 (clobber (reg:CC CC_REGNUM))]
440 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
442 "TARGET_32BIT && reload_completed"
443 [(parallel [(set (reg:CC_C CC_REGNUM)
444 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
446 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
447 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
448 (plus:SI (ashiftrt:SI (match_dup 2)
453 operands[3] = gen_highpart (SImode, operands[0]);
454 operands[0] = gen_lowpart (SImode, operands[0]);
455 operands[4] = gen_highpart (SImode, operands[1]);
456 operands[1] = gen_lowpart (SImode, operands[1]);
457 operands[2] = gen_lowpart (SImode, operands[2]);
459 [(set_attr "conds" "clob")
460 (set_attr "length" "8")]
463 (define_insn_and_split "*adddi_zesidi_di"
464 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
465 (plus:DI (zero_extend:DI
466 (match_operand:SI 2 "s_register_operand" "r,r"))
467 (match_operand:DI 1 "s_register_operand" "r,0")))
468 (clobber (reg:CC CC_REGNUM))]
469 "TARGET_32BIT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)"
471 "TARGET_32BIT && reload_completed"
472 [(parallel [(set (reg:CC_C CC_REGNUM)
473 (compare:CC_C (plus:SI (match_dup 1) (match_dup 2))
475 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))])
476 (set (match_dup 3) (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
477 (plus:SI (match_dup 4) (const_int 0))))]
480 operands[3] = gen_highpart (SImode, operands[0]);
481 operands[0] = gen_lowpart (SImode, operands[0]);
482 operands[4] = gen_highpart (SImode, operands[1]);
483 operands[1] = gen_lowpart (SImode, operands[1]);
484 operands[2] = gen_lowpart (SImode, operands[2]);
486 [(set_attr "conds" "clob")
487 (set_attr "length" "8")]
490 (define_expand "addsi3"
491 [(set (match_operand:SI 0 "s_register_operand" "")
492 (plus:SI (match_operand:SI 1 "s_register_operand" "")
493 (match_operand:SI 2 "reg_or_int_operand" "")))]
496 if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
498 arm_split_constant (PLUS, SImode, NULL_RTX,
499 INTVAL (operands[2]), operands[0], operands[1],
500 optimize && can_create_pseudo_p ());
506 ; If there is a scratch available, this will be faster than synthesizing the
509 [(match_scratch:SI 3 "r")
510 (set (match_operand:SI 0 "arm_general_register_operand" "")
511 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
512 (match_operand:SI 2 "const_int_operand" "")))]
514 !(const_ok_for_arm (INTVAL (operands[2]))
515 || const_ok_for_arm (-INTVAL (operands[2])))
516 && const_ok_for_arm (~INTVAL (operands[2]))"
517 [(set (match_dup 3) (match_dup 2))
518 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
522 (define_insn_and_split "*arm_addsi3"
523 [(set (match_operand:SI 0 "s_register_operand" "=r, !k,r, !k,r")
524 (plus:SI (match_operand:SI 1 "s_register_operand" "%rk,!k,rk,!k,rk")
525 (match_operand:SI 2 "reg_or_int_operand" "rI, rI,L, L,?n")))]
534 GET_CODE (operands[2]) == CONST_INT
535 && !(const_ok_for_arm (INTVAL (operands[2]))
536 || const_ok_for_arm (-INTVAL (operands[2])))"
537 [(clobber (const_int 0))]
539 arm_split_constant (PLUS, SImode, curr_insn,
540 INTVAL (operands[2]), operands[0],
544 [(set_attr "length" "4,4,4,4,16")
545 (set_attr "predicable" "yes")]
548 ;; Register group 'k' is a single register group containing only the stack
549 ;; register. Trying to reload it will always fail catastrophically,
550 ;; so never allow those alternatives to match if reloading is needed.
552 (define_insn "*thumb1_addsi3"
553 [(set (match_operand:SI 0 "register_operand" "=l,l,l,*rk,*hk,l,!k")
554 (plus:SI (match_operand:SI 1 "register_operand" "%0,0,l,*0,*0,!k,!k")
555 (match_operand:SI 2 "nonmemory_operand" "I,J,lL,*hk,*rk,!M,!O")))]
558 static const char * const asms[] =
560 \"add\\t%0, %0, %2\",
561 \"sub\\t%0, %0, #%n2\",
562 \"add\\t%0, %1, %2\",
563 \"add\\t%0, %0, %2\",
564 \"add\\t%0, %0, %2\",
565 \"add\\t%0, %1, %2\",
568 if ((which_alternative == 2 || which_alternative == 6)
569 && GET_CODE (operands[2]) == CONST_INT
570 && INTVAL (operands[2]) < 0)
571 return \"sub\\t%0, %1, #%n2\";
572 return asms[which_alternative];
574 [(set_attr "length" "2")]
577 ;; Reloading and elimination of the frame pointer can
578 ;; sometimes cause this optimization to be missed.
580 [(set (match_operand:SI 0 "arm_general_register_operand" "")
581 (match_operand:SI 1 "const_int_operand" ""))
583 (plus:SI (match_dup 0) (reg:SI SP_REGNUM)))]
585 && (unsigned HOST_WIDE_INT) (INTVAL (operands[1])) < 1024
586 && (INTVAL (operands[1]) & 3) == 0"
587 [(set (match_dup 0) (plus:SI (reg:SI SP_REGNUM) (match_dup 1)))]
591 ;; ??? Make Thumb-2 variants which prefer low regs
592 (define_insn "*addsi3_compare0"
593 [(set (reg:CC_NOOV CC_REGNUM)
595 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r")
596 (match_operand:SI 2 "arm_add_operand" "rI,L"))
598 (set (match_operand:SI 0 "s_register_operand" "=r,r")
599 (plus:SI (match_dup 1) (match_dup 2)))]
603 sub%.\\t%0, %1, #%n2"
604 [(set_attr "conds" "set")]
607 (define_insn "*addsi3_compare0_scratch"
608 [(set (reg:CC_NOOV CC_REGNUM)
610 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r")
611 (match_operand:SI 1 "arm_add_operand" "rI,L"))
617 [(set_attr "conds" "set")]
620 (define_insn "*compare_negsi_si"
621 [(set (reg:CC_Z CC_REGNUM)
623 (neg:SI (match_operand:SI 0 "s_register_operand" "r"))
624 (match_operand:SI 1 "s_register_operand" "r")))]
627 [(set_attr "conds" "set")]
630 ;; This is the canonicalization of addsi3_compare0_for_combiner when the
631 ;; addend is a constant.
632 (define_insn "*cmpsi2_addneg"
633 [(set (reg:CC CC_REGNUM)
635 (match_operand:SI 1 "s_register_operand" "r,r")
636 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
637 (set (match_operand:SI 0 "s_register_operand" "=r,r")
638 (plus:SI (match_dup 1)
639 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
640 "TARGET_32BIT && INTVAL (operands[2]) == -INTVAL (operands[3])"
643 add%.\\t%0, %1, #%n2"
644 [(set_attr "conds" "set")]
647 ;; Convert the sequence
649 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
653 ;; bcs dest ((unsigned)rn >= 1)
654 ;; similarly for the beq variant using bcc.
655 ;; This is a common looping idiom (while (n--))
657 [(set (match_operand:SI 0 "arm_general_register_operand" "")
658 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
660 (set (match_operand 2 "cc_register" "")
661 (compare (match_dup 0) (const_int -1)))
663 (if_then_else (match_operator 3 "equality_operator"
664 [(match_dup 2) (const_int 0)])
665 (match_operand 4 "" "")
666 (match_operand 5 "" "")))]
667 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
671 (match_dup 1) (const_int 1)))
672 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
674 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
677 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
678 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
681 operands[2], const0_rtx);"
684 ;; The next four insns work because they compare the result with one of
685 ;; the operands, and we know that the use of the condition code is
686 ;; either GEU or LTU, so we can use the carry flag from the addition
687 ;; instead of doing the compare a second time.
688 (define_insn "*addsi3_compare_op1"
689 [(set (reg:CC_C CC_REGNUM)
691 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
692 (match_operand:SI 2 "arm_add_operand" "rI,L"))
694 (set (match_operand:SI 0 "s_register_operand" "=r,r")
695 (plus:SI (match_dup 1) (match_dup 2)))]
699 sub%.\\t%0, %1, #%n2"
700 [(set_attr "conds" "set")]
703 (define_insn "*addsi3_compare_op2"
704 [(set (reg:CC_C CC_REGNUM)
706 (plus:SI (match_operand:SI 1 "s_register_operand" "r,r")
707 (match_operand:SI 2 "arm_add_operand" "rI,L"))
709 (set (match_operand:SI 0 "s_register_operand" "=r,r")
710 (plus:SI (match_dup 1) (match_dup 2)))]
714 sub%.\\t%0, %1, #%n2"
715 [(set_attr "conds" "set")]
718 (define_insn "*compare_addsi2_op0"
719 [(set (reg:CC_C CC_REGNUM)
721 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
722 (match_operand:SI 1 "arm_add_operand" "rI,L"))
728 [(set_attr "conds" "set")]
731 (define_insn "*compare_addsi2_op1"
732 [(set (reg:CC_C CC_REGNUM)
734 (plus:SI (match_operand:SI 0 "s_register_operand" "r,r")
735 (match_operand:SI 1 "arm_add_operand" "rI,L"))
741 [(set_attr "conds" "set")]
744 (define_insn "*addsi3_carryin"
745 [(set (match_operand:SI 0 "s_register_operand" "=r")
746 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
747 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
748 (match_operand:SI 2 "arm_rhs_operand" "rI"))))]
751 [(set_attr "conds" "use")]
754 (define_insn "*addsi3_carryin_shift"
755 [(set (match_operand:SI 0 "s_register_operand" "=r")
756 (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
758 (match_operator:SI 2 "shift_operator"
759 [(match_operand:SI 3 "s_register_operand" "r")
760 (match_operand:SI 4 "reg_or_int_operand" "rM")])
761 (match_operand:SI 1 "s_register_operand" "r"))))]
763 "adc%?\\t%0, %1, %3%S2"
764 [(set_attr "conds" "use")
765 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
766 (const_string "alu_shift")
767 (const_string "alu_shift_reg")))]
770 (define_insn "*addsi3_carryin_alt1"
771 [(set (match_operand:SI 0 "s_register_operand" "=r")
772 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "r")
773 (match_operand:SI 2 "arm_rhs_operand" "rI"))
774 (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))))]
777 [(set_attr "conds" "use")]
780 (define_insn "*addsi3_carryin_alt2"
781 [(set (match_operand:SI 0 "s_register_operand" "=r")
782 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
783 (match_operand:SI 1 "s_register_operand" "r"))
784 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
787 [(set_attr "conds" "use")]
790 (define_insn "*addsi3_carryin_alt3"
791 [(set (match_operand:SI 0 "s_register_operand" "=r")
792 (plus:SI (plus:SI (ltu:SI (reg:CC_C CC_REGNUM) (const_int 0))
793 (match_operand:SI 2 "arm_rhs_operand" "rI"))
794 (match_operand:SI 1 "s_register_operand" "r")))]
797 [(set_attr "conds" "use")]
800 (define_expand "incscc"
801 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
802 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
803 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
804 (match_operand:SI 1 "s_register_operand" "0,?r")))]
809 (define_insn "*arm_incscc"
810 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
811 (plus:SI (match_operator:SI 2 "arm_comparison_operator"
812 [(match_operand:CC 3 "cc_register" "") (const_int 0)])
813 (match_operand:SI 1 "s_register_operand" "0,?r")))]
817 mov%D2\\t%0, %1\;add%d2\\t%0, %1, #1"
818 [(set_attr "conds" "use")
819 (set_attr "length" "4,8")]
822 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
824 [(set (match_operand:SI 0 "s_register_operand" "")
825 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
826 (match_operand:SI 2 "s_register_operand" ""))
828 (clobber (match_operand:SI 3 "s_register_operand" ""))]
830 [(set (match_dup 3) (match_dup 1))
831 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
833 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
836 (define_expand "addsf3"
837 [(set (match_operand:SF 0 "s_register_operand" "")
838 (plus:SF (match_operand:SF 1 "s_register_operand" "")
839 (match_operand:SF 2 "arm_float_add_operand" "")))]
840 "TARGET_32BIT && TARGET_HARD_FLOAT"
843 && !cirrus_fp_register (operands[2], SFmode))
844 operands[2] = force_reg (SFmode, operands[2]);
847 (define_expand "adddf3"
848 [(set (match_operand:DF 0 "s_register_operand" "")
849 (plus:DF (match_operand:DF 1 "s_register_operand" "")
850 (match_operand:DF 2 "arm_float_add_operand" "")))]
851 "TARGET_32BIT && TARGET_HARD_FLOAT"
854 && !cirrus_fp_register (operands[2], DFmode))
855 operands[2] = force_reg (DFmode, operands[2]);
858 (define_expand "subdi3"
860 [(set (match_operand:DI 0 "s_register_operand" "")
861 (minus:DI (match_operand:DI 1 "s_register_operand" "")
862 (match_operand:DI 2 "s_register_operand" "")))
863 (clobber (reg:CC CC_REGNUM))])]
866 if (TARGET_HARD_FLOAT && TARGET_MAVERICK
868 && cirrus_fp_register (operands[0], DImode)
869 && cirrus_fp_register (operands[1], DImode))
871 emit_insn (gen_cirrus_subdi3 (operands[0], operands[1], operands[2]));
877 if (GET_CODE (operands[1]) != REG)
878 operands[1] = force_reg (SImode, operands[1]);
879 if (GET_CODE (operands[2]) != REG)
880 operands[2] = force_reg (SImode, operands[2]);
885 (define_insn "*arm_subdi3"
886 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r,&r")
887 (minus:DI (match_operand:DI 1 "s_register_operand" "0,r,0")
888 (match_operand:DI 2 "s_register_operand" "r,0,0")))
889 (clobber (reg:CC CC_REGNUM))]
891 "subs\\t%Q0, %Q1, %Q2\;sbc\\t%R0, %R1, %R2"
892 [(set_attr "conds" "clob")
893 (set_attr "length" "8")]
896 (define_insn "*thumb_subdi3"
897 [(set (match_operand:DI 0 "register_operand" "=l")
898 (minus:DI (match_operand:DI 1 "register_operand" "0")
899 (match_operand:DI 2 "register_operand" "l")))
900 (clobber (reg:CC CC_REGNUM))]
902 "sub\\t%Q0, %Q0, %Q2\;sbc\\t%R0, %R0, %R2"
903 [(set_attr "length" "4")]
906 (define_insn "*subdi_di_zesidi"
907 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
908 (minus:DI (match_operand:DI 1 "s_register_operand" "?r,0")
910 (match_operand:SI 2 "s_register_operand" "r,r"))))
911 (clobber (reg:CC CC_REGNUM))]
913 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, #0"
914 [(set_attr "conds" "clob")
915 (set_attr "length" "8")]
918 (define_insn "*subdi_di_sesidi"
919 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
920 (minus:DI (match_operand:DI 1 "s_register_operand" "r,0")
922 (match_operand:SI 2 "s_register_operand" "r,r"))))
923 (clobber (reg:CC CC_REGNUM))]
925 "subs\\t%Q0, %Q1, %2\;sbc\\t%R0, %R1, %2, asr #31"
926 [(set_attr "conds" "clob")
927 (set_attr "length" "8")]
930 (define_insn "*subdi_zesidi_di"
931 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
932 (minus:DI (zero_extend:DI
933 (match_operand:SI 2 "s_register_operand" "r,r"))
934 (match_operand:DI 1 "s_register_operand" "?r,0")))
935 (clobber (reg:CC CC_REGNUM))]
937 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, #0"
938 [(set_attr "conds" "clob")
939 (set_attr "length" "8")]
942 (define_insn "*subdi_sesidi_di"
943 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
944 (minus:DI (sign_extend:DI
945 (match_operand:SI 2 "s_register_operand" "r,r"))
946 (match_operand:DI 1 "s_register_operand" "?r,0")))
947 (clobber (reg:CC CC_REGNUM))]
949 "rsbs\\t%Q0, %Q1, %2\;rsc\\t%R0, %R1, %2, asr #31"
950 [(set_attr "conds" "clob")
951 (set_attr "length" "8")]
954 (define_insn "*subdi_zesidi_zesidi"
955 [(set (match_operand:DI 0 "s_register_operand" "=r")
956 (minus:DI (zero_extend:DI
957 (match_operand:SI 1 "s_register_operand" "r"))
959 (match_operand:SI 2 "s_register_operand" "r"))))
960 (clobber (reg:CC CC_REGNUM))]
962 "subs\\t%Q0, %1, %2\;sbc\\t%R0, %1, %1"
963 [(set_attr "conds" "clob")
964 (set_attr "length" "8")]
967 (define_expand "subsi3"
968 [(set (match_operand:SI 0 "s_register_operand" "")
969 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "")
970 (match_operand:SI 2 "s_register_operand" "")))]
973 if (GET_CODE (operands[1]) == CONST_INT)
977 arm_split_constant (MINUS, SImode, NULL_RTX,
978 INTVAL (operands[1]), operands[0],
979 operands[2], optimize && can_create_pseudo_p ());
982 else /* TARGET_THUMB1 */
983 operands[1] = force_reg (SImode, operands[1]);
988 (define_insn "*thumb1_subsi3_insn"
989 [(set (match_operand:SI 0 "register_operand" "=l")
990 (minus:SI (match_operand:SI 1 "register_operand" "l")
991 (match_operand:SI 2 "register_operand" "l")))]
994 [(set_attr "length" "2")]
997 ; ??? Check Thumb-2 split length
998 (define_insn_and_split "*arm_subsi3_insn"
999 [(set (match_operand:SI 0 "s_register_operand" "=r,rk,r")
1000 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "rI,!k,?n")
1001 (match_operand:SI 2 "s_register_operand" "r, r, r")))]
1008 && GET_CODE (operands[1]) == CONST_INT
1009 && !const_ok_for_arm (INTVAL (operands[1]))"
1010 [(clobber (const_int 0))]
1012 arm_split_constant (MINUS, SImode, curr_insn,
1013 INTVAL (operands[1]), operands[0], operands[2], 0);
1016 [(set_attr "length" "4,4,16")
1017 (set_attr "predicable" "yes")]
1021 [(match_scratch:SI 3 "r")
1022 (set (match_operand:SI 0 "arm_general_register_operand" "")
1023 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1024 (match_operand:SI 2 "arm_general_register_operand" "")))]
1026 && !const_ok_for_arm (INTVAL (operands[1]))
1027 && const_ok_for_arm (~INTVAL (operands[1]))"
1028 [(set (match_dup 3) (match_dup 1))
1029 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1033 (define_insn "*subsi3_compare0"
1034 [(set (reg:CC_NOOV CC_REGNUM)
1036 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,I")
1037 (match_operand:SI 2 "arm_rhs_operand" "rI,r"))
1039 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1040 (minus:SI (match_dup 1) (match_dup 2)))]
1045 [(set_attr "conds" "set")]
1048 (define_expand "decscc"
1049 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1050 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1051 (match_operator:SI 2 "arm_comparison_operator"
1052 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1057 (define_insn "*arm_decscc"
1058 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1059 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
1060 (match_operator:SI 2 "arm_comparison_operator"
1061 [(match_operand 3 "cc_register" "") (const_int 0)])))]
1065 mov%D2\\t%0, %1\;sub%d2\\t%0, %1, #1"
1066 [(set_attr "conds" "use")
1067 (set_attr "length" "*,8")]
1070 (define_expand "subsf3"
1071 [(set (match_operand:SF 0 "s_register_operand" "")
1072 (minus:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1073 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1074 "TARGET_32BIT && TARGET_HARD_FLOAT"
1076 if (TARGET_MAVERICK)
1078 if (!cirrus_fp_register (operands[1], SFmode))
1079 operands[1] = force_reg (SFmode, operands[1]);
1080 if (!cirrus_fp_register (operands[2], SFmode))
1081 operands[2] = force_reg (SFmode, operands[2]);
1085 (define_expand "subdf3"
1086 [(set (match_operand:DF 0 "s_register_operand" "")
1087 (minus:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1088 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1089 "TARGET_32BIT && TARGET_HARD_FLOAT"
1091 if (TARGET_MAVERICK)
1093 if (!cirrus_fp_register (operands[1], DFmode))
1094 operands[1] = force_reg (DFmode, operands[1]);
1095 if (!cirrus_fp_register (operands[2], DFmode))
1096 operands[2] = force_reg (DFmode, operands[2]);
1101 ;; Multiplication insns
1103 (define_expand "mulsi3"
1104 [(set (match_operand:SI 0 "s_register_operand" "")
1105 (mult:SI (match_operand:SI 2 "s_register_operand" "")
1106 (match_operand:SI 1 "s_register_operand" "")))]
1111 ;; Use `&' and then `0' to prevent the operands 0 and 1 being the same
1112 (define_insn "*arm_mulsi3"
1113 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1114 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
1115 (match_operand:SI 1 "s_register_operand" "%?r,0")))]
1116 "TARGET_32BIT && !arm_arch6"
1117 "mul%?\\t%0, %2, %1"
1118 [(set_attr "insn" "mul")
1119 (set_attr "predicable" "yes")]
1122 (define_insn "*arm_mulsi3_v6"
1123 [(set (match_operand:SI 0 "s_register_operand" "=r")
1124 (mult:SI (match_operand:SI 1 "s_register_operand" "r")
1125 (match_operand:SI 2 "s_register_operand" "r")))]
1126 "TARGET_32BIT && arm_arch6"
1127 "mul%?\\t%0, %1, %2"
1128 [(set_attr "insn" "mul")
1129 (set_attr "predicable" "yes")]
1132 ; Unfortunately with the Thumb the '&'/'0' trick can fails when operands
1133 ; 1 and 2; are the same, because reload will make operand 0 match
1134 ; operand 1 without realizing that this conflicts with operand 2. We fix
1135 ; this by adding another alternative to match this case, and then `reload'
1136 ; it ourselves. This alternative must come first.
1137 (define_insn "*thumb_mulsi3"
1138 [(set (match_operand:SI 0 "register_operand" "=&l,&l,&l")
1139 (mult:SI (match_operand:SI 1 "register_operand" "%l,*h,0")
1140 (match_operand:SI 2 "register_operand" "l,l,l")))]
1141 "TARGET_THUMB1 && !arm_arch6"
1143 if (which_alternative < 2)
1144 return \"mov\\t%0, %1\;mul\\t%0, %2\";
1146 return \"mul\\t%0, %2\";
1148 [(set_attr "length" "4,4,2")
1149 (set_attr "insn" "mul")]
1152 (define_insn "*thumb_mulsi3_v6"
1153 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
1154 (mult:SI (match_operand:SI 1 "register_operand" "0,l,0")
1155 (match_operand:SI 2 "register_operand" "l,0,0")))]
1156 "TARGET_THUMB1 && arm_arch6"
1161 [(set_attr "length" "2")
1162 (set_attr "insn" "mul")]
1165 (define_insn "*mulsi3_compare0"
1166 [(set (reg:CC_NOOV CC_REGNUM)
1167 (compare:CC_NOOV (mult:SI
1168 (match_operand:SI 2 "s_register_operand" "r,r")
1169 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1171 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1172 (mult:SI (match_dup 2) (match_dup 1)))]
1173 "TARGET_ARM && !arm_arch6"
1174 "mul%.\\t%0, %2, %1"
1175 [(set_attr "conds" "set")
1176 (set_attr "insn" "muls")]
1179 (define_insn "*mulsi3_compare0_v6"
1180 [(set (reg:CC_NOOV CC_REGNUM)
1181 (compare:CC_NOOV (mult:SI
1182 (match_operand:SI 2 "s_register_operand" "r")
1183 (match_operand:SI 1 "s_register_operand" "r"))
1185 (set (match_operand:SI 0 "s_register_operand" "=r")
1186 (mult:SI (match_dup 2) (match_dup 1)))]
1187 "TARGET_ARM && arm_arch6 && optimize_size"
1188 "mul%.\\t%0, %2, %1"
1189 [(set_attr "conds" "set")
1190 (set_attr "insn" "muls")]
1193 (define_insn "*mulsi_compare0_scratch"
1194 [(set (reg:CC_NOOV CC_REGNUM)
1195 (compare:CC_NOOV (mult:SI
1196 (match_operand:SI 2 "s_register_operand" "r,r")
1197 (match_operand:SI 1 "s_register_operand" "%?r,0"))
1199 (clobber (match_scratch:SI 0 "=&r,&r"))]
1200 "TARGET_ARM && !arm_arch6"
1201 "mul%.\\t%0, %2, %1"
1202 [(set_attr "conds" "set")
1203 (set_attr "insn" "muls")]
1206 (define_insn "*mulsi_compare0_scratch_v6"
1207 [(set (reg:CC_NOOV CC_REGNUM)
1208 (compare:CC_NOOV (mult:SI
1209 (match_operand:SI 2 "s_register_operand" "r")
1210 (match_operand:SI 1 "s_register_operand" "r"))
1212 (clobber (match_scratch:SI 0 "=r"))]
1213 "TARGET_ARM && arm_arch6 && optimize_size"
1214 "mul%.\\t%0, %2, %1"
1215 [(set_attr "conds" "set")
1216 (set_attr "insn" "muls")]
1219 ;; Unnamed templates to match MLA instruction.
1221 (define_insn "*mulsi3addsi"
1222 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1224 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1225 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1226 (match_operand:SI 3 "s_register_operand" "?r,r,0,0")))]
1227 "TARGET_32BIT && !arm_arch6"
1228 "mla%?\\t%0, %2, %1, %3"
1229 [(set_attr "insn" "mla")
1230 (set_attr "predicable" "yes")]
1233 (define_insn "*mulsi3addsi_v6"
1234 [(set (match_operand:SI 0 "s_register_operand" "=r")
1236 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1237 (match_operand:SI 1 "s_register_operand" "r"))
1238 (match_operand:SI 3 "s_register_operand" "r")))]
1239 "TARGET_32BIT && arm_arch6"
1240 "mla%?\\t%0, %2, %1, %3"
1241 [(set_attr "insn" "mla")
1242 (set_attr "predicable" "yes")]
1245 (define_insn "*mulsi3addsi_compare0"
1246 [(set (reg:CC_NOOV CC_REGNUM)
1249 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1250 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1251 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1253 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
1254 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1256 "TARGET_ARM && arm_arch6"
1257 "mla%.\\t%0, %2, %1, %3"
1258 [(set_attr "conds" "set")
1259 (set_attr "insn" "mlas")]
1262 (define_insn "*mulsi3addsi_compare0_v6"
1263 [(set (reg:CC_NOOV CC_REGNUM)
1266 (match_operand:SI 2 "s_register_operand" "r")
1267 (match_operand:SI 1 "s_register_operand" "r"))
1268 (match_operand:SI 3 "s_register_operand" "r"))
1270 (set (match_operand:SI 0 "s_register_operand" "=r")
1271 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
1273 "TARGET_ARM && arm_arch6 && optimize_size"
1274 "mla%.\\t%0, %2, %1, %3"
1275 [(set_attr "conds" "set")
1276 (set_attr "insn" "mlas")]
1279 (define_insn "*mulsi3addsi_compare0_scratch"
1280 [(set (reg:CC_NOOV CC_REGNUM)
1283 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
1284 (match_operand:SI 1 "s_register_operand" "%r,0,r,0"))
1285 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
1287 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
1288 "TARGET_ARM && !arm_arch6"
1289 "mla%.\\t%0, %2, %1, %3"
1290 [(set_attr "conds" "set")
1291 (set_attr "insn" "mlas")]
1294 (define_insn "*mulsi3addsi_compare0_scratch_v6"
1295 [(set (reg:CC_NOOV CC_REGNUM)
1298 (match_operand:SI 2 "s_register_operand" "r")
1299 (match_operand:SI 1 "s_register_operand" "r"))
1300 (match_operand:SI 3 "s_register_operand" "r"))
1302 (clobber (match_scratch:SI 0 "=r"))]
1303 "TARGET_ARM && arm_arch6 && optimize_size"
1304 "mla%.\\t%0, %2, %1, %3"
1305 [(set_attr "conds" "set")
1306 (set_attr "insn" "mlas")]
1309 (define_insn "*mulsi3subsi"
1310 [(set (match_operand:SI 0 "s_register_operand" "=r")
1312 (match_operand:SI 3 "s_register_operand" "r")
1313 (mult:SI (match_operand:SI 2 "s_register_operand" "r")
1314 (match_operand:SI 1 "s_register_operand" "r"))))]
1315 "TARGET_32BIT && arm_arch_thumb2"
1316 "mls%?\\t%0, %2, %1, %3"
1317 [(set_attr "insn" "mla")
1318 (set_attr "predicable" "yes")]
1321 ;; Unnamed template to match long long multiply-accumulate (smlal)
1323 (define_insn "*mulsidi3adddi"
1324 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1327 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1328 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1329 (match_operand:DI 1 "s_register_operand" "0")))]
1330 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1331 "smlal%?\\t%Q0, %R0, %3, %2"
1332 [(set_attr "insn" "smlal")
1333 (set_attr "predicable" "yes")]
1336 (define_insn "*mulsidi3adddi_v6"
1337 [(set (match_operand:DI 0 "s_register_operand" "=r")
1340 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1341 (sign_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1342 (match_operand:DI 1 "s_register_operand" "0")))]
1343 "TARGET_32BIT && arm_arch6"
1344 "smlal%?\\t%Q0, %R0, %3, %2"
1345 [(set_attr "insn" "smlal")
1346 (set_attr "predicable" "yes")]
1349 ;; 32x32->64 widening multiply.
1350 ;; As with mulsi3, the only difference between the v3-5 and v6+
1351 ;; versions of these patterns is the requirement that the output not
1352 ;; overlap the inputs, but that still means we have to have a named
1353 ;; expander and two different starred insns.
1355 (define_expand "mulsidi3"
1356 [(set (match_operand:DI 0 "s_register_operand" "")
1358 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1359 (sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1360 "TARGET_32BIT && arm_arch3m"
1364 (define_insn "*mulsidi3_nov6"
1365 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1367 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1368 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1369 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1370 "smull%?\\t%Q0, %R0, %1, %2"
1371 [(set_attr "insn" "smull")
1372 (set_attr "predicable" "yes")]
1375 (define_insn "*mulsidi3_v6"
1376 [(set (match_operand:DI 0 "s_register_operand" "=r")
1378 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1379 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1380 "TARGET_32BIT && arm_arch6"
1381 "smull%?\\t%Q0, %R0, %1, %2"
1382 [(set_attr "insn" "smull")
1383 (set_attr "predicable" "yes")]
1386 (define_expand "umulsidi3"
1387 [(set (match_operand:DI 0 "s_register_operand" "")
1389 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1390 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))))]
1391 "TARGET_32BIT && arm_arch3m"
1395 (define_insn "*umulsidi3_nov6"
1396 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1398 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r"))
1399 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1400 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1401 "umull%?\\t%Q0, %R0, %1, %2"
1402 [(set_attr "insn" "umull")
1403 (set_attr "predicable" "yes")]
1406 (define_insn "*umulsidi3_v6"
1407 [(set (match_operand:DI 0 "s_register_operand" "=r")
1409 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1410 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))))]
1411 "TARGET_32BIT && arm_arch6"
1412 "umull%?\\t%Q0, %R0, %1, %2"
1413 [(set_attr "insn" "umull")
1414 (set_attr "predicable" "yes")]
1417 ;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
1419 (define_insn "*umulsidi3adddi"
1420 [(set (match_operand:DI 0 "s_register_operand" "=&r")
1423 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "%r"))
1424 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1425 (match_operand:DI 1 "s_register_operand" "0")))]
1426 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1427 "umlal%?\\t%Q0, %R0, %3, %2"
1428 [(set_attr "insn" "umlal")
1429 (set_attr "predicable" "yes")]
1432 (define_insn "*umulsidi3adddi_v6"
1433 [(set (match_operand:DI 0 "s_register_operand" "=r")
1436 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r"))
1437 (zero_extend:DI (match_operand:SI 3 "s_register_operand" "r")))
1438 (match_operand:DI 1 "s_register_operand" "0")))]
1439 "TARGET_32BIT && arm_arch6"
1440 "umlal%?\\t%Q0, %R0, %3, %2"
1441 [(set_attr "insn" "umlal")
1442 (set_attr "predicable" "yes")]
1445 (define_expand "smulsi3_highpart"
1447 [(set (match_operand:SI 0 "s_register_operand" "")
1451 (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1452 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1454 (clobber (match_scratch:SI 3 ""))])]
1455 "TARGET_32BIT && arm_arch3m"
1459 (define_insn "*smulsi3_highpart_nov6"
1460 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1464 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1465 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1467 (clobber (match_scratch:SI 3 "=&r,&r"))]
1468 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1469 "smull%?\\t%3, %0, %2, %1"
1470 [(set_attr "insn" "smull")
1471 (set_attr "predicable" "yes")]
1474 (define_insn "*smulsi3_highpart_v6"
1475 [(set (match_operand:SI 0 "s_register_operand" "=r")
1479 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1480 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1482 (clobber (match_scratch:SI 3 "=r"))]
1483 "TARGET_32BIT && arm_arch6"
1484 "smull%?\\t%3, %0, %2, %1"
1485 [(set_attr "insn" "smull")
1486 (set_attr "predicable" "yes")]
1489 (define_expand "umulsi3_highpart"
1491 [(set (match_operand:SI 0 "s_register_operand" "")
1495 (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
1496 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
1498 (clobber (match_scratch:SI 3 ""))])]
1499 "TARGET_32BIT && arm_arch3m"
1503 (define_insn "*umulsi3_highpart_nov6"
1504 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r")
1508 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%r,0"))
1509 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r,r")))
1511 (clobber (match_scratch:SI 3 "=&r,&r"))]
1512 "TARGET_32BIT && arm_arch3m && !arm_arch6"
1513 "umull%?\\t%3, %0, %2, %1"
1514 [(set_attr "insn" "umull")
1515 (set_attr "predicable" "yes")]
1518 (define_insn "*umulsi3_highpart_v6"
1519 [(set (match_operand:SI 0 "s_register_operand" "=r")
1523 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r"))
1524 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1526 (clobber (match_scratch:SI 3 "=r"))]
1527 "TARGET_32BIT && arm_arch6"
1528 "umull%?\\t%3, %0, %2, %1"
1529 [(set_attr "insn" "umull")
1530 (set_attr "predicable" "yes")]
1533 (define_insn "mulhisi3"
1534 [(set (match_operand:SI 0 "s_register_operand" "=r")
1535 (mult:SI (sign_extend:SI
1536 (match_operand:HI 1 "s_register_operand" "%r"))
1538 (match_operand:HI 2 "s_register_operand" "r"))))]
1539 "TARGET_DSP_MULTIPLY"
1540 "smulbb%?\\t%0, %1, %2"
1541 [(set_attr "insn" "smulxy")
1542 (set_attr "predicable" "yes")]
1545 (define_insn "*mulhisi3tb"
1546 [(set (match_operand:SI 0 "s_register_operand" "=r")
1547 (mult:SI (ashiftrt:SI
1548 (match_operand:SI 1 "s_register_operand" "r")
1551 (match_operand:HI 2 "s_register_operand" "r"))))]
1552 "TARGET_DSP_MULTIPLY"
1553 "smultb%?\\t%0, %1, %2"
1554 [(set_attr "insn" "smulxy")
1555 (set_attr "predicable" "yes")]
1558 (define_insn "*mulhisi3bt"
1559 [(set (match_operand:SI 0 "s_register_operand" "=r")
1560 (mult:SI (sign_extend:SI
1561 (match_operand:HI 1 "s_register_operand" "r"))
1563 (match_operand:SI 2 "s_register_operand" "r")
1565 "TARGET_DSP_MULTIPLY"
1566 "smulbt%?\\t%0, %1, %2"
1567 [(set_attr "insn" "smulxy")
1568 (set_attr "predicable" "yes")]
1571 (define_insn "*mulhisi3tt"
1572 [(set (match_operand:SI 0 "s_register_operand" "=r")
1573 (mult:SI (ashiftrt:SI
1574 (match_operand:SI 1 "s_register_operand" "r")
1577 (match_operand:SI 2 "s_register_operand" "r")
1579 "TARGET_DSP_MULTIPLY"
1580 "smultt%?\\t%0, %1, %2"
1581 [(set_attr "insn" "smulxy")
1582 (set_attr "predicable" "yes")]
1585 (define_insn "*mulhisi3addsi"
1586 [(set (match_operand:SI 0 "s_register_operand" "=r")
1587 (plus:SI (match_operand:SI 1 "s_register_operand" "r")
1588 (mult:SI (sign_extend:SI
1589 (match_operand:HI 2 "s_register_operand" "%r"))
1591 (match_operand:HI 3 "s_register_operand" "r")))))]
1592 "TARGET_DSP_MULTIPLY"
1593 "smlabb%?\\t%0, %2, %3, %1"
1594 [(set_attr "insn" "smlaxy")
1595 (set_attr "predicable" "yes")]
1598 (define_insn "*mulhidi3adddi"
1599 [(set (match_operand:DI 0 "s_register_operand" "=r")
1601 (match_operand:DI 1 "s_register_operand" "0")
1602 (mult:DI (sign_extend:DI
1603 (match_operand:HI 2 "s_register_operand" "%r"))
1605 (match_operand:HI 3 "s_register_operand" "r")))))]
1606 "TARGET_DSP_MULTIPLY"
1607 "smlalbb%?\\t%Q0, %R0, %2, %3"
1608 [(set_attr "insn" "smlalxy")
1609 (set_attr "predicable" "yes")])
1611 (define_expand "mulsf3"
1612 [(set (match_operand:SF 0 "s_register_operand" "")
1613 (mult:SF (match_operand:SF 1 "s_register_operand" "")
1614 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1615 "TARGET_32BIT && TARGET_HARD_FLOAT"
1618 && !cirrus_fp_register (operands[2], SFmode))
1619 operands[2] = force_reg (SFmode, operands[2]);
1622 (define_expand "muldf3"
1623 [(set (match_operand:DF 0 "s_register_operand" "")
1624 (mult:DF (match_operand:DF 1 "s_register_operand" "")
1625 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1626 "TARGET_32BIT && TARGET_HARD_FLOAT"
1629 && !cirrus_fp_register (operands[2], DFmode))
1630 operands[2] = force_reg (DFmode, operands[2]);
1635 (define_expand "divsf3"
1636 [(set (match_operand:SF 0 "s_register_operand" "")
1637 (div:SF (match_operand:SF 1 "arm_float_rhs_operand" "")
1638 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1639 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1642 (define_expand "divdf3"
1643 [(set (match_operand:DF 0 "s_register_operand" "")
1644 (div:DF (match_operand:DF 1 "arm_float_rhs_operand" "")
1645 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1646 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
1651 (define_expand "modsf3"
1652 [(set (match_operand:SF 0 "s_register_operand" "")
1653 (mod:SF (match_operand:SF 1 "s_register_operand" "")
1654 (match_operand:SF 2 "arm_float_rhs_operand" "")))]
1655 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1658 (define_expand "moddf3"
1659 [(set (match_operand:DF 0 "s_register_operand" "")
1660 (mod:DF (match_operand:DF 1 "s_register_operand" "")
1661 (match_operand:DF 2 "arm_float_rhs_operand" "")))]
1662 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
1665 ;; Boolean and,ior,xor insns
1667 ;; Split up double word logical operations
1669 ;; Split up simple DImode logical operations. Simply perform the logical
1670 ;; operation on the upper and lower halves of the registers.
1672 [(set (match_operand:DI 0 "s_register_operand" "")
1673 (match_operator:DI 6 "logical_binary_operator"
1674 [(match_operand:DI 1 "s_register_operand" "")
1675 (match_operand:DI 2 "s_register_operand" "")]))]
1676 "TARGET_32BIT && reload_completed
1677 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
1678 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1679 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
1682 operands[3] = gen_highpart (SImode, operands[0]);
1683 operands[0] = gen_lowpart (SImode, operands[0]);
1684 operands[4] = gen_highpart (SImode, operands[1]);
1685 operands[1] = gen_lowpart (SImode, operands[1]);
1686 operands[5] = gen_highpart (SImode, operands[2]);
1687 operands[2] = gen_lowpart (SImode, operands[2]);
1692 [(set (match_operand:DI 0 "s_register_operand" "")
1693 (match_operator:DI 6 "logical_binary_operator"
1694 [(sign_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1695 (match_operand:DI 1 "s_register_operand" "")]))]
1696 "TARGET_32BIT && reload_completed"
1697 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
1698 (set (match_dup 3) (match_op_dup:SI 6
1699 [(ashiftrt:SI (match_dup 2) (const_int 31))
1703 operands[3] = gen_highpart (SImode, operands[0]);
1704 operands[0] = gen_lowpart (SImode, operands[0]);
1705 operands[4] = gen_highpart (SImode, operands[1]);
1706 operands[1] = gen_lowpart (SImode, operands[1]);
1707 operands[5] = gen_highpart (SImode, operands[2]);
1708 operands[2] = gen_lowpart (SImode, operands[2]);
1712 ;; The zero extend of operand 2 means we can just copy the high part of
1713 ;; operand1 into operand0.
1715 [(set (match_operand:DI 0 "s_register_operand" "")
1717 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1718 (match_operand:DI 1 "s_register_operand" "")))]
1719 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1720 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
1721 (set (match_dup 3) (match_dup 4))]
1724 operands[4] = gen_highpart (SImode, operands[1]);
1725 operands[3] = gen_highpart (SImode, operands[0]);
1726 operands[0] = gen_lowpart (SImode, operands[0]);
1727 operands[1] = gen_lowpart (SImode, operands[1]);
1731 ;; The zero extend of operand 2 means we can just copy the high part of
1732 ;; operand1 into operand0.
1734 [(set (match_operand:DI 0 "s_register_operand" "")
1736 (zero_extend:DI (match_operand:SI 2 "s_register_operand" ""))
1737 (match_operand:DI 1 "s_register_operand" "")))]
1738 "TARGET_32BIT && operands[0] != operands[1] && reload_completed"
1739 [(set (match_dup 0) (xor:SI (match_dup 1) (match_dup 2)))
1740 (set (match_dup 3) (match_dup 4))]
1743 operands[4] = gen_highpart (SImode, operands[1]);
1744 operands[3] = gen_highpart (SImode, operands[0]);
1745 operands[0] = gen_lowpart (SImode, operands[0]);
1746 operands[1] = gen_lowpart (SImode, operands[1]);
1750 (define_insn "anddi3"
1751 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1752 (and:DI (match_operand:DI 1 "s_register_operand" "%0,r")
1753 (match_operand:DI 2 "s_register_operand" "r,r")))]
1754 "TARGET_32BIT && ! TARGET_IWMMXT"
1756 [(set_attr "length" "8")]
1759 (define_insn_and_split "*anddi_zesidi_di"
1760 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1761 (and:DI (zero_extend:DI
1762 (match_operand:SI 2 "s_register_operand" "r,r"))
1763 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1766 "TARGET_32BIT && reload_completed"
1767 ; The zero extend of operand 2 clears the high word of the output
1769 [(set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))
1770 (set (match_dup 3) (const_int 0))]
1773 operands[3] = gen_highpart (SImode, operands[0]);
1774 operands[0] = gen_lowpart (SImode, operands[0]);
1775 operands[1] = gen_lowpart (SImode, operands[1]);
1777 [(set_attr "length" "8")]
1780 (define_insn "*anddi_sesdi_di"
1781 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
1782 (and:DI (sign_extend:DI
1783 (match_operand:SI 2 "s_register_operand" "r,r"))
1784 (match_operand:DI 1 "s_register_operand" "?r,0")))]
1787 [(set_attr "length" "8")]
1790 (define_expand "andsi3"
1791 [(set (match_operand:SI 0 "s_register_operand" "")
1792 (and:SI (match_operand:SI 1 "s_register_operand" "")
1793 (match_operand:SI 2 "reg_or_int_operand" "")))]
1798 if (GET_CODE (operands[2]) == CONST_INT)
1800 arm_split_constant (AND, SImode, NULL_RTX,
1801 INTVAL (operands[2]), operands[0],
1802 operands[1], optimize && can_create_pseudo_p ());
1807 else /* TARGET_THUMB1 */
1809 if (GET_CODE (operands[2]) != CONST_INT)
1810 operands[2] = force_reg (SImode, operands[2]);
1815 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
1817 operands[2] = force_reg (SImode,
1818 GEN_INT (~INTVAL (operands[2])));
1820 emit_insn (gen_bicsi3 (operands[0], operands[2], operands[1]));
1825 for (i = 9; i <= 31; i++)
1827 if ((((HOST_WIDE_INT) 1) << i) - 1 == INTVAL (operands[2]))
1829 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
1833 else if ((((HOST_WIDE_INT) 1) << i) - 1
1834 == ~INTVAL (operands[2]))
1836 rtx shift = GEN_INT (i);
1837 rtx reg = gen_reg_rtx (SImode);
1839 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
1840 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
1846 operands[2] = force_reg (SImode, operands[2]);
1852 ; ??? Check split length for Thumb-2
1853 (define_insn_and_split "*arm_andsi3_insn"
1854 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1855 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
1856 (match_operand:SI 2 "reg_or_int_operand" "rI,K,?n")))]
1860 bic%?\\t%0, %1, #%B2
1863 && GET_CODE (operands[2]) == CONST_INT
1864 && !(const_ok_for_arm (INTVAL (operands[2]))
1865 || const_ok_for_arm (~INTVAL (operands[2])))"
1866 [(clobber (const_int 0))]
1868 arm_split_constant (AND, SImode, curr_insn,
1869 INTVAL (operands[2]), operands[0], operands[1], 0);
1872 [(set_attr "length" "4,4,16")
1873 (set_attr "predicable" "yes")]
1876 (define_insn "*thumb1_andsi3_insn"
1877 [(set (match_operand:SI 0 "register_operand" "=l")
1878 (and:SI (match_operand:SI 1 "register_operand" "%0")
1879 (match_operand:SI 2 "register_operand" "l")))]
1882 [(set_attr "length" "2")]
1885 (define_insn "*andsi3_compare0"
1886 [(set (reg:CC_NOOV CC_REGNUM)
1888 (and:SI (match_operand:SI 1 "s_register_operand" "r,r")
1889 (match_operand:SI 2 "arm_not_operand" "rI,K"))
1891 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1892 (and:SI (match_dup 1) (match_dup 2)))]
1896 bic%.\\t%0, %1, #%B2"
1897 [(set_attr "conds" "set")]
1900 (define_insn "*andsi3_compare0_scratch"
1901 [(set (reg:CC_NOOV CC_REGNUM)
1903 (and:SI (match_operand:SI 0 "s_register_operand" "r,r")
1904 (match_operand:SI 1 "arm_not_operand" "rI,K"))
1906 (clobber (match_scratch:SI 2 "=X,r"))]
1910 bic%.\\t%2, %0, #%B1"
1911 [(set_attr "conds" "set")]
1914 (define_insn "*zeroextractsi_compare0_scratch"
1915 [(set (reg:CC_NOOV CC_REGNUM)
1916 (compare:CC_NOOV (zero_extract:SI
1917 (match_operand:SI 0 "s_register_operand" "r")
1918 (match_operand 1 "const_int_operand" "n")
1919 (match_operand 2 "const_int_operand" "n"))
1922 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
1923 && INTVAL (operands[1]) > 0
1924 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
1925 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
1927 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
1928 << INTVAL (operands[2]));
1929 output_asm_insn (\"tst%?\\t%0, %1\", operands);
1932 [(set_attr "conds" "set")]
1935 (define_insn_and_split "*ne_zeroextractsi"
1936 [(set (match_operand:SI 0 "s_register_operand" "=r")
1937 (ne:SI (zero_extract:SI
1938 (match_operand:SI 1 "s_register_operand" "r")
1939 (match_operand:SI 2 "const_int_operand" "n")
1940 (match_operand:SI 3 "const_int_operand" "n"))
1942 (clobber (reg:CC CC_REGNUM))]
1944 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1945 && INTVAL (operands[2]) > 0
1946 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1947 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1950 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
1951 && INTVAL (operands[2]) > 0
1952 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
1953 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
1954 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1955 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
1957 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
1959 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1960 (match_dup 0) (const_int 1)))]
1962 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
1963 << INTVAL (operands[3]));
1965 [(set_attr "conds" "clob")
1966 (set (attr "length")
1967 (if_then_else (eq_attr "is_thumb" "yes")
1972 (define_insn_and_split "*ne_zeroextractsi_shifted"
1973 [(set (match_operand:SI 0 "s_register_operand" "=r")
1974 (ne:SI (zero_extract:SI
1975 (match_operand:SI 1 "s_register_operand" "r")
1976 (match_operand:SI 2 "const_int_operand" "n")
1979 (clobber (reg:CC CC_REGNUM))]
1983 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
1984 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
1986 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
1988 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
1989 (match_dup 0) (const_int 1)))]
1991 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
1993 [(set_attr "conds" "clob")
1994 (set_attr "length" "8")]
1997 (define_insn_and_split "*ite_ne_zeroextractsi"
1998 [(set (match_operand:SI 0 "s_register_operand" "=r")
1999 (if_then_else:SI (ne (zero_extract:SI
2000 (match_operand:SI 1 "s_register_operand" "r")
2001 (match_operand:SI 2 "const_int_operand" "n")
2002 (match_operand:SI 3 "const_int_operand" "n"))
2004 (match_operand:SI 4 "arm_not_operand" "rIK")
2006 (clobber (reg:CC CC_REGNUM))]
2008 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2009 && INTVAL (operands[2]) > 0
2010 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2011 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2012 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2015 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2016 && INTVAL (operands[2]) > 0
2017 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2018 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2019 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2020 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2021 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2023 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2025 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2026 (match_dup 0) (match_dup 4)))]
2028 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2029 << INTVAL (operands[3]));
2031 [(set_attr "conds" "clob")
2032 (set_attr "length" "8")]
2035 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2036 [(set (match_operand:SI 0 "s_register_operand" "=r")
2037 (if_then_else:SI (ne (zero_extract:SI
2038 (match_operand:SI 1 "s_register_operand" "r")
2039 (match_operand:SI 2 "const_int_operand" "n")
2042 (match_operand:SI 3 "arm_not_operand" "rIK")
2044 (clobber (reg:CC CC_REGNUM))]
2045 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2047 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2048 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2049 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2051 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2053 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2054 (match_dup 0) (match_dup 3)))]
2056 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2058 [(set_attr "conds" "clob")
2059 (set_attr "length" "8")]
2063 [(set (match_operand:SI 0 "s_register_operand" "")
2064 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "")
2065 (match_operand:SI 2 "const_int_operand" "")
2066 (match_operand:SI 3 "const_int_operand" "")))
2067 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2069 [(set (match_dup 4) (ashift:SI (match_dup 1) (match_dup 2)))
2070 (set (match_dup 0) (lshiftrt:SI (match_dup 4) (match_dup 3)))]
2072 HOST_WIDE_INT temp = INTVAL (operands[2]);
2074 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2075 operands[3] = GEN_INT (32 - temp);
2079 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2081 [(set (match_operand:SI 0 "s_register_operand" "")
2082 (match_operator:SI 1 "shiftable_operator"
2083 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2084 (match_operand:SI 3 "const_int_operand" "")
2085 (match_operand:SI 4 "const_int_operand" ""))
2086 (match_operand:SI 5 "s_register_operand" "")]))
2087 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2089 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2092 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2095 HOST_WIDE_INT temp = INTVAL (operands[3]);
2097 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2098 operands[4] = GEN_INT (32 - temp);
2103 [(set (match_operand:SI 0 "s_register_operand" "")
2104 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "")
2105 (match_operand:SI 2 "const_int_operand" "")
2106 (match_operand:SI 3 "const_int_operand" "")))]
2108 [(set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))
2109 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (match_dup 3)))]
2111 HOST_WIDE_INT temp = INTVAL (operands[2]);
2113 operands[2] = GEN_INT (32 - temp - INTVAL (operands[3]));
2114 operands[3] = GEN_INT (32 - temp);
2119 [(set (match_operand:SI 0 "s_register_operand" "")
2120 (match_operator:SI 1 "shiftable_operator"
2121 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2122 (match_operand:SI 3 "const_int_operand" "")
2123 (match_operand:SI 4 "const_int_operand" ""))
2124 (match_operand:SI 5 "s_register_operand" "")]))
2125 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2127 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2130 [(ashiftrt:SI (match_dup 6) (match_dup 4))
2133 HOST_WIDE_INT temp = INTVAL (operands[3]);
2135 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2136 operands[4] = GEN_INT (32 - temp);
2140 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
2141 ;;; represented by the bitfield, then this will produce incorrect results.
2142 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
2143 ;;; which have a real bit-field insert instruction, the truncation happens
2144 ;;; in the bit-field insert instruction itself. Since arm does not have a
2145 ;;; bit-field insert instruction, we would have to emit code here to truncate
2146 ;;; the value before we insert. This loses some of the advantage of having
2147 ;;; this insv pattern, so this pattern needs to be reevalutated.
2149 (define_expand "insv"
2150 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "")
2151 (match_operand:SI 1 "general_operand" "")
2152 (match_operand:SI 2 "general_operand" ""))
2153 (match_operand:SI 3 "reg_or_int_operand" ""))]
2154 "TARGET_ARM || arm_arch_thumb2"
2157 int start_bit = INTVAL (operands[2]);
2158 int width = INTVAL (operands[1]);
2159 HOST_WIDE_INT mask = (((HOST_WIDE_INT)1) << width) - 1;
2160 rtx target, subtarget;
2162 if (arm_arch_thumb2)
2164 bool use_bfi = TRUE;
2166 if (GET_CODE (operands[3]) == CONST_INT)
2168 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
2172 emit_insn (gen_insv_zero (operands[0], operands[1],
2177 /* See if the set can be done with a single orr instruction. */
2178 if (val == mask && const_ok_for_arm (val << start_bit))
2184 if (GET_CODE (operands[3]) != REG)
2185 operands[3] = force_reg (SImode, operands[3]);
2187 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
2193 target = operands[0];
2194 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
2195 subreg as the final target. */
2196 if (GET_CODE (target) == SUBREG)
2198 subtarget = gen_reg_rtx (SImode);
2199 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
2200 < GET_MODE_SIZE (SImode))
2201 target = SUBREG_REG (target);
2206 if (GET_CODE (operands[3]) == CONST_INT)
2208 /* Since we are inserting a known constant, we may be able to
2209 reduce the number of bits that we have to clear so that
2210 the mask becomes simple. */
2211 /* ??? This code does not check to see if the new mask is actually
2212 simpler. It may not be. */
2213 rtx op1 = gen_reg_rtx (SImode);
2214 /* ??? Truncate operand3 to fit in the bitfield. See comment before
2215 start of this pattern. */
2216 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
2217 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
2219 emit_insn (gen_andsi3 (op1, operands[0],
2220 gen_int_mode (~mask2, SImode)));
2221 emit_insn (gen_iorsi3 (subtarget, op1,
2222 gen_int_mode (op3_value << start_bit, SImode)));
2224 else if (start_bit == 0
2225 && !(const_ok_for_arm (mask)
2226 || const_ok_for_arm (~mask)))
2228 /* A Trick, since we are setting the bottom bits in the word,
2229 we can shift operand[3] up, operand[0] down, OR them together
2230 and rotate the result back again. This takes 3 insns, and
2231 the third might be mergeable into another op. */
2232 /* The shift up copes with the possibility that operand[3] is
2233 wider than the bitfield. */
2234 rtx op0 = gen_reg_rtx (SImode);
2235 rtx op1 = gen_reg_rtx (SImode);
2237 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2238 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
2239 emit_insn (gen_iorsi3 (op1, op1, op0));
2240 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
2242 else if ((width + start_bit == 32)
2243 && !(const_ok_for_arm (mask)
2244 || const_ok_for_arm (~mask)))
2246 /* Similar trick, but slightly less efficient. */
2248 rtx op0 = gen_reg_rtx (SImode);
2249 rtx op1 = gen_reg_rtx (SImode);
2251 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
2252 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
2253 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
2254 emit_insn (gen_iorsi3 (subtarget, op1, op0));
2258 rtx op0 = gen_int_mode (mask, SImode);
2259 rtx op1 = gen_reg_rtx (SImode);
2260 rtx op2 = gen_reg_rtx (SImode);
2262 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
2264 rtx tmp = gen_reg_rtx (SImode);
2266 emit_insn (gen_movsi (tmp, op0));
2270 /* Mask out any bits in operand[3] that are not needed. */
2271 emit_insn (gen_andsi3 (op1, operands[3], op0));
2273 if (GET_CODE (op0) == CONST_INT
2274 && (const_ok_for_arm (mask << start_bit)
2275 || const_ok_for_arm (~(mask << start_bit))))
2277 op0 = gen_int_mode (~(mask << start_bit), SImode);
2278 emit_insn (gen_andsi3 (op2, operands[0], op0));
2282 if (GET_CODE (op0) == CONST_INT)
2284 rtx tmp = gen_reg_rtx (SImode);
2286 emit_insn (gen_movsi (tmp, op0));
2291 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
2293 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
2297 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
2299 emit_insn (gen_iorsi3 (subtarget, op1, op2));
2302 if (subtarget != target)
2304 /* If TARGET is still a SUBREG, then it must be wider than a word,
2305 so we must be careful only to set the subword we were asked to. */
2306 if (GET_CODE (target) == SUBREG)
2307 emit_move_insn (target, subtarget);
2309 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
2316 (define_insn "insv_zero"
2317 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2318 (match_operand:SI 1 "const_int_operand" "M")
2319 (match_operand:SI 2 "const_int_operand" "M"))
2323 [(set_attr "length" "4")
2324 (set_attr "predicable" "yes")]
2327 (define_insn "insv_t2"
2328 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
2329 (match_operand:SI 1 "const_int_operand" "M")
2330 (match_operand:SI 2 "const_int_operand" "M"))
2331 (match_operand:SI 3 "s_register_operand" "r"))]
2333 "bfi%?\t%0, %3, %2, %1"
2334 [(set_attr "length" "4")
2335 (set_attr "predicable" "yes")]
2338 ; constants for op 2 will never be given to these patterns.
2339 (define_insn_and_split "*anddi_notdi_di"
2340 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2341 (and:DI (not:DI (match_operand:DI 1 "s_register_operand" "r,0"))
2342 (match_operand:DI 2 "s_register_operand" "0,r")))]
2345 "TARGET_32BIT && reload_completed && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2346 [(set (match_dup 0) (and:SI (not:SI (match_dup 1)) (match_dup 2)))
2347 (set (match_dup 3) (and:SI (not:SI (match_dup 4)) (match_dup 5)))]
2350 operands[3] = gen_highpart (SImode, operands[0]);
2351 operands[0] = gen_lowpart (SImode, operands[0]);
2352 operands[4] = gen_highpart (SImode, operands[1]);
2353 operands[1] = gen_lowpart (SImode, operands[1]);
2354 operands[5] = gen_highpart (SImode, operands[2]);
2355 operands[2] = gen_lowpart (SImode, operands[2]);
2357 [(set_attr "length" "8")
2358 (set_attr "predicable" "yes")]
2361 (define_insn_and_split "*anddi_notzesidi_di"
2362 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2363 (and:DI (not:DI (zero_extend:DI
2364 (match_operand:SI 2 "s_register_operand" "r,r")))
2365 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2368 bic%?\\t%Q0, %Q1, %2
2370 ; (not (zero_extend ...)) allows us to just copy the high word from
2371 ; operand1 to operand0.
2374 && operands[0] != operands[1]"
2375 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2376 (set (match_dup 3) (match_dup 4))]
2379 operands[3] = gen_highpart (SImode, operands[0]);
2380 operands[0] = gen_lowpart (SImode, operands[0]);
2381 operands[4] = gen_highpart (SImode, operands[1]);
2382 operands[1] = gen_lowpart (SImode, operands[1]);
2384 [(set_attr "length" "4,8")
2385 (set_attr "predicable" "yes")]
2388 (define_insn_and_split "*anddi_notsesidi_di"
2389 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2390 (and:DI (not:DI (sign_extend:DI
2391 (match_operand:SI 2 "s_register_operand" "r,r")))
2392 (match_operand:DI 1 "s_register_operand" "0,r")))]
2395 "TARGET_32BIT && reload_completed"
2396 [(set (match_dup 0) (and:SI (not:SI (match_dup 2)) (match_dup 1)))
2397 (set (match_dup 3) (and:SI (not:SI
2398 (ashiftrt:SI (match_dup 2) (const_int 31)))
2402 operands[3] = gen_highpart (SImode, operands[0]);
2403 operands[0] = gen_lowpart (SImode, operands[0]);
2404 operands[4] = gen_highpart (SImode, operands[1]);
2405 operands[1] = gen_lowpart (SImode, operands[1]);
2407 [(set_attr "length" "8")
2408 (set_attr "predicable" "yes")]
2411 (define_insn "andsi_notsi_si"
2412 [(set (match_operand:SI 0 "s_register_operand" "=r")
2413 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2414 (match_operand:SI 1 "s_register_operand" "r")))]
2416 "bic%?\\t%0, %1, %2"
2417 [(set_attr "predicable" "yes")]
2420 (define_insn "bicsi3"
2421 [(set (match_operand:SI 0 "register_operand" "=l")
2422 (and:SI (not:SI (match_operand:SI 1 "register_operand" "l"))
2423 (match_operand:SI 2 "register_operand" "0")))]
2426 [(set_attr "length" "2")]
2429 (define_insn "andsi_not_shiftsi_si"
2430 [(set (match_operand:SI 0 "s_register_operand" "=r")
2431 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
2432 [(match_operand:SI 2 "s_register_operand" "r")
2433 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
2434 (match_operand:SI 1 "s_register_operand" "r")))]
2436 "bic%?\\t%0, %1, %2%S4"
2437 [(set_attr "predicable" "yes")
2438 (set_attr "shift" "2")
2439 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
2440 (const_string "alu_shift")
2441 (const_string "alu_shift_reg")))]
2444 (define_insn "*andsi_notsi_si_compare0"
2445 [(set (reg:CC_NOOV CC_REGNUM)
2447 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2448 (match_operand:SI 1 "s_register_operand" "r"))
2450 (set (match_operand:SI 0 "s_register_operand" "=r")
2451 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
2453 "bic%.\\t%0, %1, %2"
2454 [(set_attr "conds" "set")]
2457 (define_insn "*andsi_notsi_si_compare0_scratch"
2458 [(set (reg:CC_NOOV CC_REGNUM)
2460 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2461 (match_operand:SI 1 "s_register_operand" "r"))
2463 (clobber (match_scratch:SI 0 "=r"))]
2465 "bic%.\\t%0, %1, %2"
2466 [(set_attr "conds" "set")]
2469 (define_insn "iordi3"
2470 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2471 (ior:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2472 (match_operand:DI 2 "s_register_operand" "r,r")))]
2473 "TARGET_32BIT && ! TARGET_IWMMXT"
2475 [(set_attr "length" "8")
2476 (set_attr "predicable" "yes")]
2479 (define_insn "*iordi_zesidi_di"
2480 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2481 (ior:DI (zero_extend:DI
2482 (match_operand:SI 2 "s_register_operand" "r,r"))
2483 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2486 orr%?\\t%Q0, %Q1, %2
2488 [(set_attr "length" "4,8")
2489 (set_attr "predicable" "yes")]
2492 (define_insn "*iordi_sesidi_di"
2493 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2494 (ior:DI (sign_extend:DI
2495 (match_operand:SI 2 "s_register_operand" "r,r"))
2496 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2499 [(set_attr "length" "8")
2500 (set_attr "predicable" "yes")]
2503 (define_expand "iorsi3"
2504 [(set (match_operand:SI 0 "s_register_operand" "")
2505 (ior:SI (match_operand:SI 1 "s_register_operand" "")
2506 (match_operand:SI 2 "reg_or_int_operand" "")))]
2509 if (GET_CODE (operands[2]) == CONST_INT)
2513 arm_split_constant (IOR, SImode, NULL_RTX,
2514 INTVAL (operands[2]), operands[0], operands[1],
2515 optimize && can_create_pseudo_p ());
2518 else /* TARGET_THUMB1 */
2519 operands [2] = force_reg (SImode, operands [2]);
2524 (define_insn_and_split "*arm_iorsi3"
2525 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2526 (ior:SI (match_operand:SI 1 "s_register_operand" "r,r")
2527 (match_operand:SI 2 "reg_or_int_operand" "rI,?n")))]
2533 && GET_CODE (operands[2]) == CONST_INT
2534 && !const_ok_for_arm (INTVAL (operands[2]))"
2535 [(clobber (const_int 0))]
2537 arm_split_constant (IOR, SImode, curr_insn,
2538 INTVAL (operands[2]), operands[0], operands[1], 0);
2541 [(set_attr "length" "4,16")
2542 (set_attr "predicable" "yes")]
2545 (define_insn "*thumb1_iorsi3"
2546 [(set (match_operand:SI 0 "register_operand" "=l")
2547 (ior:SI (match_operand:SI 1 "register_operand" "%0")
2548 (match_operand:SI 2 "register_operand" "l")))]
2551 [(set_attr "length" "2")]
2555 [(match_scratch:SI 3 "r")
2556 (set (match_operand:SI 0 "arm_general_register_operand" "")
2557 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
2558 (match_operand:SI 2 "const_int_operand" "")))]
2560 && !const_ok_for_arm (INTVAL (operands[2]))
2561 && const_ok_for_arm (~INTVAL (operands[2]))"
2562 [(set (match_dup 3) (match_dup 2))
2563 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
2567 (define_insn "*iorsi3_compare0"
2568 [(set (reg:CC_NOOV CC_REGNUM)
2569 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2570 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2572 (set (match_operand:SI 0 "s_register_operand" "=r")
2573 (ior:SI (match_dup 1) (match_dup 2)))]
2575 "orr%.\\t%0, %1, %2"
2576 [(set_attr "conds" "set")]
2579 (define_insn "*iorsi3_compare0_scratch"
2580 [(set (reg:CC_NOOV CC_REGNUM)
2581 (compare:CC_NOOV (ior:SI (match_operand:SI 1 "s_register_operand" "%r")
2582 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2584 (clobber (match_scratch:SI 0 "=r"))]
2586 "orr%.\\t%0, %1, %2"
2587 [(set_attr "conds" "set")]
2590 (define_insn "xordi3"
2591 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2592 (xor:DI (match_operand:DI 1 "s_register_operand" "%0,r")
2593 (match_operand:DI 2 "s_register_operand" "r,r")))]
2594 "TARGET_32BIT && !TARGET_IWMMXT"
2596 [(set_attr "length" "8")
2597 (set_attr "predicable" "yes")]
2600 (define_insn "*xordi_zesidi_di"
2601 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2602 (xor:DI (zero_extend:DI
2603 (match_operand:SI 2 "s_register_operand" "r,r"))
2604 (match_operand:DI 1 "s_register_operand" "0,?r")))]
2607 eor%?\\t%Q0, %Q1, %2
2609 [(set_attr "length" "4,8")
2610 (set_attr "predicable" "yes")]
2613 (define_insn "*xordi_sesidi_di"
2614 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
2615 (xor:DI (sign_extend:DI
2616 (match_operand:SI 2 "s_register_operand" "r,r"))
2617 (match_operand:DI 1 "s_register_operand" "?r,0")))]
2620 [(set_attr "length" "8")
2621 (set_attr "predicable" "yes")]
2624 (define_expand "xorsi3"
2625 [(set (match_operand:SI 0 "s_register_operand" "")
2626 (xor:SI (match_operand:SI 1 "s_register_operand" "")
2627 (match_operand:SI 2 "arm_rhs_operand" "")))]
2630 if (GET_CODE (operands[2]) == CONST_INT)
2631 operands[2] = force_reg (SImode, operands[2]);
2635 (define_insn "*arm_xorsi3"
2636 [(set (match_operand:SI 0 "s_register_operand" "=r")
2637 (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2638 (match_operand:SI 2 "arm_rhs_operand" "rI")))]
2640 "eor%?\\t%0, %1, %2"
2641 [(set_attr "predicable" "yes")]
2644 (define_insn "*thumb1_xorsi3"
2645 [(set (match_operand:SI 0 "register_operand" "=l")
2646 (xor:SI (match_operand:SI 1 "register_operand" "%0")
2647 (match_operand:SI 2 "register_operand" "l")))]
2650 [(set_attr "length" "2")]
2653 (define_insn "*xorsi3_compare0"
2654 [(set (reg:CC_NOOV CC_REGNUM)
2655 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r")
2656 (match_operand:SI 2 "arm_rhs_operand" "rI"))
2658 (set (match_operand:SI 0 "s_register_operand" "=r")
2659 (xor:SI (match_dup 1) (match_dup 2)))]
2661 "eor%.\\t%0, %1, %2"
2662 [(set_attr "conds" "set")]
2665 (define_insn "*xorsi3_compare0_scratch"
2666 [(set (reg:CC_NOOV CC_REGNUM)
2667 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r")
2668 (match_operand:SI 1 "arm_rhs_operand" "rI"))
2672 [(set_attr "conds" "set")]
2675 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
2676 ; (NOT D) we can sometimes merge the final NOT into one of the following
2680 [(set (match_operand:SI 0 "s_register_operand" "")
2681 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
2682 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
2683 (match_operand:SI 3 "arm_rhs_operand" "")))
2684 (clobber (match_operand:SI 4 "s_register_operand" ""))]
2686 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
2687 (not:SI (match_dup 3))))
2688 (set (match_dup 0) (not:SI (match_dup 4)))]
2692 (define_insn "*andsi_iorsi3_notsi"
2693 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
2694 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "r,r,0")
2695 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
2696 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
2698 "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
2699 [(set_attr "length" "8")
2700 (set_attr "ce_count" "2")
2701 (set_attr "predicable" "yes")]
2704 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
2705 ; insns are available?
2707 [(set (match_operand:SI 0 "s_register_operand" "")
2708 (match_operator:SI 1 "logical_binary_operator"
2709 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2710 (match_operand:SI 3 "const_int_operand" "")
2711 (match_operand:SI 4 "const_int_operand" ""))
2712 (match_operator:SI 9 "logical_binary_operator"
2713 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2714 (match_operand:SI 6 "const_int_operand" ""))
2715 (match_operand:SI 7 "s_register_operand" "")])]))
2716 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2718 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2719 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2722 [(ashift:SI (match_dup 2) (match_dup 4))
2726 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2729 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2733 [(set (match_operand:SI 0 "s_register_operand" "")
2734 (match_operator:SI 1 "logical_binary_operator"
2735 [(match_operator:SI 9 "logical_binary_operator"
2736 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2737 (match_operand:SI 6 "const_int_operand" ""))
2738 (match_operand:SI 7 "s_register_operand" "")])
2739 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2740 (match_operand:SI 3 "const_int_operand" "")
2741 (match_operand:SI 4 "const_int_operand" ""))]))
2742 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2744 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2745 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2748 [(ashift:SI (match_dup 2) (match_dup 4))
2752 [(lshiftrt:SI (match_dup 8) (match_dup 6))
2755 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2759 [(set (match_operand:SI 0 "s_register_operand" "")
2760 (match_operator:SI 1 "logical_binary_operator"
2761 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2762 (match_operand:SI 3 "const_int_operand" "")
2763 (match_operand:SI 4 "const_int_operand" ""))
2764 (match_operator:SI 9 "logical_binary_operator"
2765 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2766 (match_operand:SI 6 "const_int_operand" ""))
2767 (match_operand:SI 7 "s_register_operand" "")])]))
2768 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2770 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2771 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2774 [(ashift:SI (match_dup 2) (match_dup 4))
2778 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2781 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2785 [(set (match_operand:SI 0 "s_register_operand" "")
2786 (match_operator:SI 1 "logical_binary_operator"
2787 [(match_operator:SI 9 "logical_binary_operator"
2788 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
2789 (match_operand:SI 6 "const_int_operand" ""))
2790 (match_operand:SI 7 "s_register_operand" "")])
2791 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
2792 (match_operand:SI 3 "const_int_operand" "")
2793 (match_operand:SI 4 "const_int_operand" ""))]))
2794 (clobber (match_operand:SI 8 "s_register_operand" ""))]
2796 && GET_CODE (operands[1]) == GET_CODE (operands[9])
2797 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
2800 [(ashift:SI (match_dup 2) (match_dup 4))
2804 [(ashiftrt:SI (match_dup 8) (match_dup 6))
2807 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
2811 ;; Minimum and maximum insns
2813 (define_expand "smaxsi3"
2815 (set (match_operand:SI 0 "s_register_operand" "")
2816 (smax:SI (match_operand:SI 1 "s_register_operand" "")
2817 (match_operand:SI 2 "arm_rhs_operand" "")))
2818 (clobber (reg:CC CC_REGNUM))])]
2821 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
2823 /* No need for a clobber of the condition code register here. */
2824 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2825 gen_rtx_SMAX (SImode, operands[1],
2831 (define_insn "*smax_0"
2832 [(set (match_operand:SI 0 "s_register_operand" "=r")
2833 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2836 "bic%?\\t%0, %1, %1, asr #31"
2837 [(set_attr "predicable" "yes")]
2840 (define_insn "*smax_m1"
2841 [(set (match_operand:SI 0 "s_register_operand" "=r")
2842 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
2845 "orr%?\\t%0, %1, %1, asr #31"
2846 [(set_attr "predicable" "yes")]
2849 (define_insn "*arm_smax_insn"
2850 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2851 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2852 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2853 (clobber (reg:CC CC_REGNUM))]
2856 cmp\\t%1, %2\;movlt\\t%0, %2
2857 cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
2858 [(set_attr "conds" "clob")
2859 (set_attr "length" "8,12")]
2862 (define_expand "sminsi3"
2864 (set (match_operand:SI 0 "s_register_operand" "")
2865 (smin:SI (match_operand:SI 1 "s_register_operand" "")
2866 (match_operand:SI 2 "arm_rhs_operand" "")))
2867 (clobber (reg:CC CC_REGNUM))])]
2870 if (operands[2] == const0_rtx)
2872 /* No need for a clobber of the condition code register here. */
2873 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
2874 gen_rtx_SMIN (SImode, operands[1],
2880 (define_insn "*smin_0"
2881 [(set (match_operand:SI 0 "s_register_operand" "=r")
2882 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
2885 "and%?\\t%0, %1, %1, asr #31"
2886 [(set_attr "predicable" "yes")]
2889 (define_insn "*arm_smin_insn"
2890 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2891 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
2892 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
2893 (clobber (reg:CC CC_REGNUM))]
2896 cmp\\t%1, %2\;movge\\t%0, %2
2897 cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
2898 [(set_attr "conds" "clob")
2899 (set_attr "length" "8,12")]
2902 (define_expand "umaxsi3"
2904 (set (match_operand:SI 0 "s_register_operand" "")
2905 (umax:SI (match_operand:SI 1 "s_register_operand" "")
2906 (match_operand:SI 2 "arm_rhs_operand" "")))
2907 (clobber (reg:CC CC_REGNUM))])]
2912 (define_insn "*arm_umaxsi3"
2913 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2914 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2915 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2916 (clobber (reg:CC CC_REGNUM))]
2919 cmp\\t%1, %2\;movcc\\t%0, %2
2920 cmp\\t%1, %2\;movcs\\t%0, %1
2921 cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
2922 [(set_attr "conds" "clob")
2923 (set_attr "length" "8,8,12")]
2926 (define_expand "uminsi3"
2928 (set (match_operand:SI 0 "s_register_operand" "")
2929 (umin:SI (match_operand:SI 1 "s_register_operand" "")
2930 (match_operand:SI 2 "arm_rhs_operand" "")))
2931 (clobber (reg:CC CC_REGNUM))])]
2936 (define_insn "*arm_uminsi3"
2937 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2938 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
2939 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
2940 (clobber (reg:CC CC_REGNUM))]
2943 cmp\\t%1, %2\;movcs\\t%0, %2
2944 cmp\\t%1, %2\;movcc\\t%0, %1
2945 cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
2946 [(set_attr "conds" "clob")
2947 (set_attr "length" "8,8,12")]
2950 (define_insn "*store_minmaxsi"
2951 [(set (match_operand:SI 0 "memory_operand" "=m")
2952 (match_operator:SI 3 "minmax_operator"
2953 [(match_operand:SI 1 "s_register_operand" "r")
2954 (match_operand:SI 2 "s_register_operand" "r")]))
2955 (clobber (reg:CC CC_REGNUM))]
2958 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
2959 operands[1], operands[2]);
2960 output_asm_insn (\"cmp\\t%1, %2\", operands);
2962 output_asm_insn (\"ite\t%d3\", operands);
2963 output_asm_insn (\"str%d3\\t%1, %0\", operands);
2964 output_asm_insn (\"str%D3\\t%2, %0\", operands);
2967 [(set_attr "conds" "clob")
2968 (set (attr "length")
2969 (if_then_else (eq_attr "is_thumb" "yes")
2972 (set_attr "type" "store1")]
2975 ; Reject the frame pointer in operand[1], since reloading this after
2976 ; it has been eliminated can cause carnage.
2977 (define_insn "*minmax_arithsi"
2978 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
2979 (match_operator:SI 4 "shiftable_operator"
2980 [(match_operator:SI 5 "minmax_operator"
2981 [(match_operand:SI 2 "s_register_operand" "r,r")
2982 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
2983 (match_operand:SI 1 "s_register_operand" "0,?r")]))
2984 (clobber (reg:CC CC_REGNUM))]
2985 "TARGET_32BIT && !arm_eliminable_register (operands[1])"
2988 enum rtx_code code = GET_CODE (operands[4]);
2991 if (which_alternative != 0 || operands[3] != const0_rtx
2992 || (code != PLUS && code != MINUS && code != IOR && code != XOR))
2997 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
2998 operands[2], operands[3]);
2999 output_asm_insn (\"cmp\\t%2, %3\", operands);
3003 output_asm_insn (\"ite\\t%d5\", operands);
3005 output_asm_insn (\"it\\t%d5\", operands);
3007 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3009 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3012 [(set_attr "conds" "clob")
3013 (set (attr "length")
3014 (if_then_else (eq_attr "is_thumb" "yes")
3020 ;; Shift and rotation insns
3022 (define_expand "ashldi3"
3023 [(set (match_operand:DI 0 "s_register_operand" "")
3024 (ashift:DI (match_operand:DI 1 "s_register_operand" "")
3025 (match_operand:SI 2 "reg_or_int_operand" "")))]
3028 if (GET_CODE (operands[2]) == CONST_INT)
3030 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3032 emit_insn (gen_arm_ashldi3_1bit (operands[0], operands[1]));
3035 /* Ideally we shouldn't fail here if we could know that operands[1]
3036 ends up already living in an iwmmxt register. Otherwise it's
3037 cheaper to have the alternate code being generated than moving
3038 values to iwmmxt regs and back. */
3041 else if (!TARGET_REALLY_IWMMXT && !(TARGET_HARD_FLOAT && TARGET_MAVERICK))
3046 (define_insn "arm_ashldi3_1bit"
3047 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3048 (ashift:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3050 (clobber (reg:CC CC_REGNUM))]
3052 "movs\\t%Q0, %Q1, asl #1\;adc\\t%R0, %R1, %R1"
3053 [(set_attr "conds" "clob")
3054 (set_attr "length" "8")]
3057 (define_expand "ashlsi3"
3058 [(set (match_operand:SI 0 "s_register_operand" "")
3059 (ashift:SI (match_operand:SI 1 "s_register_operand" "")
3060 (match_operand:SI 2 "arm_rhs_operand" "")))]
3063 if (GET_CODE (operands[2]) == CONST_INT
3064 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3066 emit_insn (gen_movsi (operands[0], const0_rtx));
3072 (define_insn "*thumb1_ashlsi3"
3073 [(set (match_operand:SI 0 "register_operand" "=l,l")
3074 (ashift:SI (match_operand:SI 1 "register_operand" "l,0")
3075 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3078 [(set_attr "length" "2")]
3081 (define_expand "ashrdi3"
3082 [(set (match_operand:DI 0 "s_register_operand" "")
3083 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3084 (match_operand:SI 2 "reg_or_int_operand" "")))]
3087 if (GET_CODE (operands[2]) == CONST_INT)
3089 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3091 emit_insn (gen_arm_ashrdi3_1bit (operands[0], operands[1]));
3094 /* Ideally we shouldn't fail here if we could know that operands[1]
3095 ends up already living in an iwmmxt register. Otherwise it's
3096 cheaper to have the alternate code being generated than moving
3097 values to iwmmxt regs and back. */
3100 else if (!TARGET_REALLY_IWMMXT)
3105 (define_insn "arm_ashrdi3_1bit"
3106 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3107 (ashiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3109 (clobber (reg:CC CC_REGNUM))]
3111 "movs\\t%R0, %R1, asr #1\;mov\\t%Q0, %Q1, rrx"
3112 [(set_attr "conds" "clob")
3113 (set_attr "length" "8")]
3116 (define_expand "ashrsi3"
3117 [(set (match_operand:SI 0 "s_register_operand" "")
3118 (ashiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3119 (match_operand:SI 2 "arm_rhs_operand" "")))]
3122 if (GET_CODE (operands[2]) == CONST_INT
3123 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3124 operands[2] = GEN_INT (31);
3128 (define_insn "*thumb1_ashrsi3"
3129 [(set (match_operand:SI 0 "register_operand" "=l,l")
3130 (ashiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3131 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3134 [(set_attr "length" "2")]
3137 (define_expand "lshrdi3"
3138 [(set (match_operand:DI 0 "s_register_operand" "")
3139 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "")
3140 (match_operand:SI 2 "reg_or_int_operand" "")))]
3143 if (GET_CODE (operands[2]) == CONST_INT)
3145 if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
3147 emit_insn (gen_arm_lshrdi3_1bit (operands[0], operands[1]));
3150 /* Ideally we shouldn't fail here if we could know that operands[1]
3151 ends up already living in an iwmmxt register. Otherwise it's
3152 cheaper to have the alternate code being generated than moving
3153 values to iwmmxt regs and back. */
3156 else if (!TARGET_REALLY_IWMMXT)
3161 (define_insn "arm_lshrdi3_1bit"
3162 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3163 (lshiftrt:DI (match_operand:DI 1 "s_register_operand" "?r,0")
3165 (clobber (reg:CC CC_REGNUM))]
3167 "movs\\t%R0, %R1, lsr #1\;mov\\t%Q0, %Q1, rrx"
3168 [(set_attr "conds" "clob")
3169 (set_attr "length" "8")]
3172 (define_expand "lshrsi3"
3173 [(set (match_operand:SI 0 "s_register_operand" "")
3174 (lshiftrt:SI (match_operand:SI 1 "s_register_operand" "")
3175 (match_operand:SI 2 "arm_rhs_operand" "")))]
3178 if (GET_CODE (operands[2]) == CONST_INT
3179 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3181 emit_insn (gen_movsi (operands[0], const0_rtx));
3187 (define_insn "*thumb1_lshrsi3"
3188 [(set (match_operand:SI 0 "register_operand" "=l,l")
3189 (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,0")
3190 (match_operand:SI 2 "nonmemory_operand" "N,l")))]
3193 [(set_attr "length" "2")]
3196 (define_expand "rotlsi3"
3197 [(set (match_operand:SI 0 "s_register_operand" "")
3198 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3199 (match_operand:SI 2 "reg_or_int_operand" "")))]
3202 if (GET_CODE (operands[2]) == CONST_INT)
3203 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
3206 rtx reg = gen_reg_rtx (SImode);
3207 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
3213 (define_expand "rotrsi3"
3214 [(set (match_operand:SI 0 "s_register_operand" "")
3215 (rotatert:SI (match_operand:SI 1 "s_register_operand" "")
3216 (match_operand:SI 2 "arm_rhs_operand" "")))]
3221 if (GET_CODE (operands[2]) == CONST_INT
3222 && ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
3223 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
3225 else /* TARGET_THUMB1 */
3227 if (GET_CODE (operands [2]) == CONST_INT)
3228 operands [2] = force_reg (SImode, operands[2]);
3233 (define_insn "*thumb1_rotrsi3"
3234 [(set (match_operand:SI 0 "register_operand" "=l")
3235 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
3236 (match_operand:SI 2 "register_operand" "l")))]
3239 [(set_attr "length" "2")]
3242 (define_insn "*arm_shiftsi3"
3243 [(set (match_operand:SI 0 "s_register_operand" "=r")
3244 (match_operator:SI 3 "shift_operator"
3245 [(match_operand:SI 1 "s_register_operand" "r")
3246 (match_operand:SI 2 "reg_or_int_operand" "rM")]))]
3248 "* return arm_output_shift(operands, 0);"
3249 [(set_attr "predicable" "yes")
3250 (set_attr "shift" "1")
3251 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3252 (const_string "alu_shift")
3253 (const_string "alu_shift_reg")))]
3256 (define_insn "*shiftsi3_compare0"
3257 [(set (reg:CC_NOOV CC_REGNUM)
3258 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3259 [(match_operand:SI 1 "s_register_operand" "r")
3260 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3262 (set (match_operand:SI 0 "s_register_operand" "=r")
3263 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
3265 "* return arm_output_shift(operands, 1);"
3266 [(set_attr "conds" "set")
3267 (set_attr "shift" "1")
3268 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3269 (const_string "alu_shift")
3270 (const_string "alu_shift_reg")))]
3273 (define_insn "*shiftsi3_compare0_scratch"
3274 [(set (reg:CC_NOOV CC_REGNUM)
3275 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
3276 [(match_operand:SI 1 "s_register_operand" "r")
3277 (match_operand:SI 2 "arm_rhs_operand" "rM")])
3279 (clobber (match_scratch:SI 0 "=r"))]
3281 "* return arm_output_shift(operands, 1);"
3282 [(set_attr "conds" "set")
3283 (set_attr "shift" "1")]
3286 (define_insn "*arm_notsi_shiftsi"
3287 [(set (match_operand:SI 0 "s_register_operand" "=r")
3288 (not:SI (match_operator:SI 3 "shift_operator"
3289 [(match_operand:SI 1 "s_register_operand" "r")
3290 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
3293 [(set_attr "predicable" "yes")
3294 (set_attr "shift" "1")
3295 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3296 (const_string "alu_shift")
3297 (const_string "alu_shift_reg")))]
3300 (define_insn "*arm_notsi_shiftsi_compare0"
3301 [(set (reg:CC_NOOV CC_REGNUM)
3302 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3303 [(match_operand:SI 1 "s_register_operand" "r")
3304 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3306 (set (match_operand:SI 0 "s_register_operand" "=r")
3307 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
3310 [(set_attr "conds" "set")
3311 (set_attr "shift" "1")
3312 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3313 (const_string "alu_shift")
3314 (const_string "alu_shift_reg")))]
3317 (define_insn "*arm_not_shiftsi_compare0_scratch"
3318 [(set (reg:CC_NOOV CC_REGNUM)
3319 (compare:CC_NOOV (not:SI (match_operator:SI 3 "shift_operator"
3320 [(match_operand:SI 1 "s_register_operand" "r")
3321 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3323 (clobber (match_scratch:SI 0 "=r"))]
3326 [(set_attr "conds" "set")
3327 (set_attr "shift" "1")
3328 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3329 (const_string "alu_shift")
3330 (const_string "alu_shift_reg")))]
3333 ;; We don't really have extzv, but defining this using shifts helps
3334 ;; to reduce register pressure later on.
3336 (define_expand "extzv"
3338 (ashift:SI (match_operand:SI 1 "register_operand" "")
3339 (match_operand:SI 2 "const_int_operand" "")))
3340 (set (match_operand:SI 0 "register_operand" "")
3341 (lshiftrt:SI (match_dup 4)
3342 (match_operand:SI 3 "const_int_operand" "")))]
3343 "TARGET_THUMB1 || arm_arch_thumb2"
3346 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
3347 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
3349 if (arm_arch_thumb2)
3351 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
3356 operands[3] = GEN_INT (rshift);
3360 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
3364 operands[2] = GEN_INT (lshift);
3365 operands[4] = gen_reg_rtx (SImode);
3370 [(set (match_operand:SI 0 "s_register_operand" "=r")
3371 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3372 (match_operand:SI 2 "const_int_operand" "M")
3373 (match_operand:SI 3 "const_int_operand" "M")))]
3375 "sbfx%?\t%0, %1, %3, %2"
3376 [(set_attr "length" "4")
3377 (set_attr "predicable" "yes")]
3380 (define_insn "extzv_t2"
3381 [(set (match_operand:SI 0 "s_register_operand" "=r")
3382 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
3383 (match_operand:SI 2 "const_int_operand" "M")
3384 (match_operand:SI 3 "const_int_operand" "M")))]
3386 "ubfx%?\t%0, %1, %3, %2"
3387 [(set_attr "length" "4")
3388 (set_attr "predicable" "yes")]
3392 ;; Unary arithmetic insns
3394 (define_expand "negdi2"
3396 [(set (match_operand:DI 0 "s_register_operand" "")
3397 (neg:DI (match_operand:DI 1 "s_register_operand" "")))
3398 (clobber (reg:CC CC_REGNUM))])]
3403 if (GET_CODE (operands[1]) != REG)
3404 operands[1] = force_reg (SImode, operands[1]);
3409 ;; The constraints here are to prevent a *partial* overlap (where %Q0 == %R1).
3410 ;; The second alternative is to allow the common case of a *full* overlap.
3411 (define_insn "*arm_negdi2"
3412 [(set (match_operand:DI 0 "s_register_operand" "=&r,r")
3413 (neg:DI (match_operand:DI 1 "s_register_operand" "?r,0")))
3414 (clobber (reg:CC CC_REGNUM))]
3416 "rsbs\\t%Q0, %Q1, #0\;rsc\\t%R0, %R1, #0"
3417 [(set_attr "conds" "clob")
3418 (set_attr "length" "8")]
3421 (define_insn "*thumb1_negdi2"
3422 [(set (match_operand:DI 0 "register_operand" "=&l")
3423 (neg:DI (match_operand:DI 1 "register_operand" "l")))
3424 (clobber (reg:CC CC_REGNUM))]
3426 "mov\\t%R0, #0\;neg\\t%Q0, %Q1\;sbc\\t%R0, %R1"
3427 [(set_attr "length" "6")]
3430 (define_expand "negsi2"
3431 [(set (match_operand:SI 0 "s_register_operand" "")
3432 (neg:SI (match_operand:SI 1 "s_register_operand" "")))]
3437 (define_insn "*arm_negsi2"
3438 [(set (match_operand:SI 0 "s_register_operand" "=r")
3439 (neg:SI (match_operand:SI 1 "s_register_operand" "r")))]
3441 "rsb%?\\t%0, %1, #0"
3442 [(set_attr "predicable" "yes")]
3445 (define_insn "*thumb1_negsi2"
3446 [(set (match_operand:SI 0 "register_operand" "=l")
3447 (neg:SI (match_operand:SI 1 "register_operand" "l")))]
3450 [(set_attr "length" "2")]
3453 (define_expand "negsf2"
3454 [(set (match_operand:SF 0 "s_register_operand" "")
3455 (neg:SF (match_operand:SF 1 "s_register_operand" "")))]
3456 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3460 (define_expand "negdf2"
3461 [(set (match_operand:DF 0 "s_register_operand" "")
3462 (neg:DF (match_operand:DF 1 "s_register_operand" "")))]
3463 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3466 ;; abssi2 doesn't really clobber the condition codes if a different register
3467 ;; is being set. To keep things simple, assume during rtl manipulations that
3468 ;; it does, but tell the final scan operator the truth. Similarly for
3471 (define_expand "abssi2"
3473 [(set (match_operand:SI 0 "s_register_operand" "")
3474 (abs:SI (match_operand:SI 1 "s_register_operand" "")))
3475 (clobber (match_dup 2))])]
3479 operands[2] = gen_rtx_SCRATCH (SImode);
3481 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
3484 (define_insn "*arm_abssi2"
3485 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3486 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
3487 (clobber (reg:CC CC_REGNUM))]
3490 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
3491 eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31"
3492 [(set_attr "conds" "clob,*")
3493 (set_attr "shift" "1")
3494 ;; predicable can't be set based on the variant, so left as no
3495 (set_attr "length" "8")]
3498 (define_insn_and_split "*thumb1_abssi2"
3499 [(set (match_operand:SI 0 "s_register_operand" "=l")
3500 (abs:SI (match_operand:SI 1 "s_register_operand" "l")))
3501 (clobber (match_scratch:SI 2 "=&l"))]
3504 "TARGET_THUMB1 && reload_completed"
3505 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3506 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 2)))
3507 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3509 [(set_attr "length" "6")]
3512 (define_insn "*arm_neg_abssi2"
3513 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
3514 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
3515 (clobber (reg:CC CC_REGNUM))]
3518 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
3519 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31"
3520 [(set_attr "conds" "clob,*")
3521 (set_attr "shift" "1")
3522 ;; predicable can't be set based on the variant, so left as no
3523 (set_attr "length" "8")]
3526 (define_insn_and_split "*thumb1_neg_abssi2"
3527 [(set (match_operand:SI 0 "s_register_operand" "=l")
3528 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "l"))))
3529 (clobber (match_scratch:SI 2 "=&l"))]
3532 "TARGET_THUMB1 && reload_completed"
3533 [(set (match_dup 2) (ashiftrt:SI (match_dup 1) (const_int 31)))
3534 (set (match_dup 0) (minus:SI (match_dup 2) (match_dup 1)))
3535 (set (match_dup 0) (xor:SI (match_dup 0) (match_dup 2)))]
3537 [(set_attr "length" "6")]
3540 (define_expand "abssf2"
3541 [(set (match_operand:SF 0 "s_register_operand" "")
3542 (abs:SF (match_operand:SF 1 "s_register_operand" "")))]
3543 "TARGET_32BIT && TARGET_HARD_FLOAT"
3546 (define_expand "absdf2"
3547 [(set (match_operand:DF 0 "s_register_operand" "")
3548 (abs:DF (match_operand:DF 1 "s_register_operand" "")))]
3549 "TARGET_32BIT && TARGET_HARD_FLOAT"
3552 (define_expand "sqrtsf2"
3553 [(set (match_operand:SF 0 "s_register_operand" "")
3554 (sqrt:SF (match_operand:SF 1 "s_register_operand" "")))]
3555 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3558 (define_expand "sqrtdf2"
3559 [(set (match_operand:DF 0 "s_register_operand" "")
3560 (sqrt:DF (match_operand:DF 1 "s_register_operand" "")))]
3561 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
3564 (define_insn_and_split "one_cmpldi2"
3565 [(set (match_operand:DI 0 "s_register_operand" "=&r,&r")
3566 (not:DI (match_operand:DI 1 "s_register_operand" "?r,0")))]
3569 "TARGET_32BIT && reload_completed"
3570 [(set (match_dup 0) (not:SI (match_dup 1)))
3571 (set (match_dup 2) (not:SI (match_dup 3)))]
3574 operands[2] = gen_highpart (SImode, operands[0]);
3575 operands[0] = gen_lowpart (SImode, operands[0]);
3576 operands[3] = gen_highpart (SImode, operands[1]);
3577 operands[1] = gen_lowpart (SImode, operands[1]);
3579 [(set_attr "length" "8")
3580 (set_attr "predicable" "yes")]
3583 (define_expand "one_cmplsi2"
3584 [(set (match_operand:SI 0 "s_register_operand" "")
3585 (not:SI (match_operand:SI 1 "s_register_operand" "")))]
3590 (define_insn "*arm_one_cmplsi2"
3591 [(set (match_operand:SI 0 "s_register_operand" "=r")
3592 (not:SI (match_operand:SI 1 "s_register_operand" "r")))]
3595 [(set_attr "predicable" "yes")]
3598 (define_insn "*thumb1_one_cmplsi2"
3599 [(set (match_operand:SI 0 "register_operand" "=l")
3600 (not:SI (match_operand:SI 1 "register_operand" "l")))]
3603 [(set_attr "length" "2")]
3606 (define_insn "*notsi_compare0"
3607 [(set (reg:CC_NOOV CC_REGNUM)
3608 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3610 (set (match_operand:SI 0 "s_register_operand" "=r")
3611 (not:SI (match_dup 1)))]
3614 [(set_attr "conds" "set")]
3617 (define_insn "*notsi_compare0_scratch"
3618 [(set (reg:CC_NOOV CC_REGNUM)
3619 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
3621 (clobber (match_scratch:SI 0 "=r"))]
3624 [(set_attr "conds" "set")]
3627 ;; Fixed <--> Floating conversion insns
3629 (define_expand "floatsisf2"
3630 [(set (match_operand:SF 0 "s_register_operand" "")
3631 (float:SF (match_operand:SI 1 "s_register_operand" "")))]
3632 "TARGET_32BIT && TARGET_HARD_FLOAT"
3634 if (TARGET_MAVERICK)
3636 emit_insn (gen_cirrus_floatsisf2 (operands[0], operands[1]));
3641 (define_expand "floatsidf2"
3642 [(set (match_operand:DF 0 "s_register_operand" "")
3643 (float:DF (match_operand:SI 1 "s_register_operand" "")))]
3644 "TARGET_32BIT && TARGET_HARD_FLOAT"
3646 if (TARGET_MAVERICK)
3648 emit_insn (gen_cirrus_floatsidf2 (operands[0], operands[1]));
3653 (define_expand "fix_truncsfsi2"
3654 [(set (match_operand:SI 0 "s_register_operand" "")
3655 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand" ""))))]
3656 "TARGET_32BIT && TARGET_HARD_FLOAT"
3658 if (TARGET_MAVERICK)
3660 if (!cirrus_fp_register (operands[0], SImode))
3661 operands[0] = force_reg (SImode, operands[0]);
3662 if (!cirrus_fp_register (operands[1], SFmode))
3663 operands[1] = force_reg (SFmode, operands[0]);
3664 emit_insn (gen_cirrus_truncsfsi2 (operands[0], operands[1]));
3669 (define_expand "fix_truncdfsi2"
3670 [(set (match_operand:SI 0 "s_register_operand" "")
3671 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand" ""))))]
3672 "TARGET_32BIT && TARGET_HARD_FLOAT"
3674 if (TARGET_MAVERICK)
3676 if (!cirrus_fp_register (operands[1], DFmode))
3677 operands[1] = force_reg (DFmode, operands[0]);
3678 emit_insn (gen_cirrus_truncdfsi2 (operands[0], operands[1]));
3685 (define_expand "truncdfsf2"
3686 [(set (match_operand:SF 0 "s_register_operand" "")
3688 (match_operand:DF 1 "s_register_operand" "")))]
3689 "TARGET_32BIT && TARGET_HARD_FLOAT"
3693 ;; Zero and sign extension instructions.
3695 (define_expand "zero_extendsidi2"
3696 [(set (match_operand:DI 0 "s_register_operand" "")
3697 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3702 (define_insn "*arm_zero_extendsidi2"
3703 [(set (match_operand:DI 0 "s_register_operand" "=r")
3704 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3707 if (REGNO (operands[1])
3708 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3709 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3710 return \"mov%?\\t%R0, #0\";
3712 [(set_attr "length" "8")
3713 (set_attr "predicable" "yes")]
3716 (define_expand "zero_extendqidi2"
3717 [(set (match_operand:DI 0 "s_register_operand" "")
3718 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "")))]
3723 (define_insn "*arm_zero_extendqidi2"
3724 [(set (match_operand:DI 0 "s_register_operand" "=r,r")
3725 (zero_extend:DI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3728 and%?\\t%Q0, %1, #255\;mov%?\\t%R0, #0
3729 ldr%(b%)\\t%Q0, %1\;mov%?\\t%R0, #0"
3730 [(set_attr "length" "8")
3731 (set_attr "predicable" "yes")
3732 (set_attr "type" "*,load_byte")
3733 (set_attr "pool_range" "*,4092")
3734 (set_attr "neg_pool_range" "*,4084")]
3737 (define_expand "extendsidi2"
3738 [(set (match_operand:DI 0 "s_register_operand" "")
3739 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "")))]
3744 (define_insn "*arm_extendsidi2"
3745 [(set (match_operand:DI 0 "s_register_operand" "=r")
3746 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))]
3749 if (REGNO (operands[1])
3750 != REGNO (operands[0]) + (WORDS_BIG_ENDIAN ? 1 : 0))
3751 output_asm_insn (\"mov%?\\t%Q0, %1\", operands);
3752 return \"mov%?\\t%R0, %Q0, asr #31\";
3754 [(set_attr "length" "8")
3755 (set_attr "shift" "1")
3756 (set_attr "predicable" "yes")]
3759 (define_expand "zero_extendhisi2"
3761 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
3763 (set (match_operand:SI 0 "s_register_operand" "")
3764 (lshiftrt:SI (match_dup 2) (const_int 16)))]
3768 if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
3770 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3771 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3775 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
3777 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
3781 if (!s_register_operand (operands[1], HImode))
3782 operands[1] = copy_to_mode_reg (HImode, operands[1]);
3786 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
3787 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
3791 operands[1] = gen_lowpart (SImode, operands[1]);
3792 operands[2] = gen_reg_rtx (SImode);
3796 (define_insn "*thumb1_zero_extendhisi2"
3797 [(set (match_operand:SI 0 "register_operand" "=l")
3798 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3799 "TARGET_THUMB1 && !arm_arch6"
3801 rtx mem = XEXP (operands[1], 0);
3803 if (GET_CODE (mem) == CONST)
3804 mem = XEXP (mem, 0);
3806 if (GET_CODE (mem) == LABEL_REF)
3807 return \"ldr\\t%0, %1\";
3809 if (GET_CODE (mem) == PLUS)
3811 rtx a = XEXP (mem, 0);
3812 rtx b = XEXP (mem, 1);
3814 /* This can happen due to bugs in reload. */
3815 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3818 ops[0] = operands[0];
3821 output_asm_insn (\"mov %0, %1\", ops);
3823 XEXP (mem, 0) = operands[0];
3826 else if ( GET_CODE (a) == LABEL_REF
3827 && GET_CODE (b) == CONST_INT)
3828 return \"ldr\\t%0, %1\";
3831 return \"ldrh\\t%0, %1\";
3833 [(set_attr "length" "4")
3834 (set_attr "type" "load_byte")
3835 (set_attr "pool_range" "60")]
3838 (define_insn "*thumb1_zero_extendhisi2_v6"
3839 [(set (match_operand:SI 0 "register_operand" "=l,l")
3840 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))]
3841 "TARGET_THUMB1 && arm_arch6"
3845 if (which_alternative == 0)
3846 return \"uxth\\t%0, %1\";
3848 mem = XEXP (operands[1], 0);
3850 if (GET_CODE (mem) == CONST)
3851 mem = XEXP (mem, 0);
3853 if (GET_CODE (mem) == LABEL_REF)
3854 return \"ldr\\t%0, %1\";
3856 if (GET_CODE (mem) == PLUS)
3858 rtx a = XEXP (mem, 0);
3859 rtx b = XEXP (mem, 1);
3861 /* This can happen due to bugs in reload. */
3862 if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
3865 ops[0] = operands[0];
3868 output_asm_insn (\"mov %0, %1\", ops);
3870 XEXP (mem, 0) = operands[0];
3873 else if ( GET_CODE (a) == LABEL_REF
3874 && GET_CODE (b) == CONST_INT)
3875 return \"ldr\\t%0, %1\";
3878 return \"ldrh\\t%0, %1\";
3880 [(set_attr "length" "2,4")
3881 (set_attr "type" "alu_shift,load_byte")
3882 (set_attr "pool_range" "*,60")]
3885 (define_insn "*arm_zero_extendhisi2"
3886 [(set (match_operand:SI 0 "s_register_operand" "=r")
3887 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
3888 "TARGET_ARM && arm_arch4 && !arm_arch6"
3890 [(set_attr "type" "load_byte")
3891 (set_attr "predicable" "yes")
3892 (set_attr "pool_range" "256")
3893 (set_attr "neg_pool_range" "244")]
3896 (define_insn "*arm_zero_extendhisi2_v6"
3897 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3898 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
3899 "TARGET_ARM && arm_arch6"
3903 [(set_attr "type" "alu_shift,load_byte")
3904 (set_attr "predicable" "yes")
3905 (set_attr "pool_range" "*,256")
3906 (set_attr "neg_pool_range" "*,244")]
3909 (define_insn "*arm_zero_extendhisi2addsi"
3910 [(set (match_operand:SI 0 "s_register_operand" "=r")
3911 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
3912 (match_operand:SI 2 "s_register_operand" "r")))]
3914 "uxtah%?\\t%0, %2, %1"
3915 [(set_attr "type" "alu_shift")
3916 (set_attr "predicable" "yes")]
3919 (define_expand "zero_extendqisi2"
3920 [(set (match_operand:SI 0 "s_register_operand" "")
3921 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
3924 if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
3928 emit_insn (gen_andsi3 (operands[0],
3929 gen_lowpart (SImode, operands[1]),
3932 else /* TARGET_THUMB */
3934 rtx temp = gen_reg_rtx (SImode);
3937 operands[1] = copy_to_mode_reg (QImode, operands[1]);
3938 operands[1] = gen_lowpart (SImode, operands[1]);
3941 ops[1] = operands[1];
3942 ops[2] = GEN_INT (24);
3944 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3945 gen_rtx_ASHIFT (SImode, ops[1], ops[2])));
3947 ops[0] = operands[0];
3949 ops[2] = GEN_INT (24);
3951 emit_insn (gen_rtx_SET (VOIDmode, ops[0],
3952 gen_rtx_LSHIFTRT (SImode, ops[1], ops[2])));
3959 (define_insn "*thumb1_zero_extendqisi2"
3960 [(set (match_operand:SI 0 "register_operand" "=l")
3961 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3962 "TARGET_THUMB1 && !arm_arch6"
3964 [(set_attr "length" "2")
3965 (set_attr "type" "load_byte")
3966 (set_attr "pool_range" "32")]
3969 (define_insn "*thumb1_zero_extendqisi2_v6"
3970 [(set (match_operand:SI 0 "register_operand" "=l,l")
3971 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,m")))]
3972 "TARGET_THUMB1 && arm_arch6"
3976 [(set_attr "length" "2,2")
3977 (set_attr "type" "alu_shift,load_byte")
3978 (set_attr "pool_range" "*,32")]
3981 (define_insn "*arm_zero_extendqisi2"
3982 [(set (match_operand:SI 0 "s_register_operand" "=r")
3983 (zero_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
3984 "TARGET_ARM && !arm_arch6"
3985 "ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3986 [(set_attr "type" "load_byte")
3987 (set_attr "predicable" "yes")
3988 (set_attr "pool_range" "4096")
3989 (set_attr "neg_pool_range" "4084")]
3992 (define_insn "*arm_zero_extendqisi2_v6"
3993 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3994 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
3995 "TARGET_ARM && arm_arch6"
3998 ldr%(b%)\\t%0, %1\\t%@ zero_extendqisi2"
3999 [(set_attr "type" "alu_shift,load_byte")
4000 (set_attr "predicable" "yes")
4001 (set_attr "pool_range" "*,4096")
4002 (set_attr "neg_pool_range" "*,4084")]
4005 (define_insn "*arm_zero_extendqisi2addsi"
4006 [(set (match_operand:SI 0 "s_register_operand" "=r")
4007 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4008 (match_operand:SI 2 "s_register_operand" "r")))]
4010 "uxtab%?\\t%0, %2, %1"
4011 [(set_attr "predicable" "yes")
4012 (set_attr "insn" "xtab")
4013 (set_attr "type" "alu_shift")]
4017 [(set (match_operand:SI 0 "s_register_operand" "")
4018 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
4019 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4020 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
4021 [(set (match_dup 2) (match_dup 1))
4022 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4027 [(set (match_operand:SI 0 "s_register_operand" "")
4028 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
4029 (clobber (match_operand:SI 2 "s_register_operand" ""))]
4030 "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
4031 [(set (match_dup 2) (match_dup 1))
4032 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
4036 (define_insn "*compareqi_eq0"
4037 [(set (reg:CC_Z CC_REGNUM)
4038 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
4042 [(set_attr "conds" "set")]
4045 (define_expand "extendhisi2"
4047 (ashift:SI (match_operand:HI 1 "nonimmediate_operand" "")
4049 (set (match_operand:SI 0 "s_register_operand" "")
4050 (ashiftrt:SI (match_dup 2)
4055 if (GET_CODE (operands[1]) == MEM)
4059 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4064 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4065 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4070 if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
4072 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
4076 if (!s_register_operand (operands[1], HImode))
4077 operands[1] = copy_to_mode_reg (HImode, operands[1]);
4082 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
4084 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4085 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4090 operands[1] = gen_lowpart (SImode, operands[1]);
4091 operands[2] = gen_reg_rtx (SImode);
4095 (define_insn "thumb1_extendhisi2"
4096 [(set (match_operand:SI 0 "register_operand" "=l")
4097 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))
4098 (clobber (match_scratch:SI 2 "=&l"))]
4099 "TARGET_THUMB1 && !arm_arch6"
4103 rtx mem = XEXP (operands[1], 0);
4105 /* This code used to try to use 'V', and fix the address only if it was
4106 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4107 range of QImode offsets, and offsettable_address_p does a QImode
4110 if (GET_CODE (mem) == CONST)
4111 mem = XEXP (mem, 0);
4113 if (GET_CODE (mem) == LABEL_REF)
4114 return \"ldr\\t%0, %1\";
4116 if (GET_CODE (mem) == PLUS)
4118 rtx a = XEXP (mem, 0);
4119 rtx b = XEXP (mem, 1);
4121 if (GET_CODE (a) == LABEL_REF
4122 && GET_CODE (b) == CONST_INT)
4123 return \"ldr\\t%0, %1\";
4125 if (GET_CODE (b) == REG)
4126 return \"ldrsh\\t%0, %1\";
4134 ops[2] = const0_rtx;
4137 gcc_assert (GET_CODE (ops[1]) == REG);
4139 ops[0] = operands[0];
4140 ops[3] = operands[2];
4141 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4144 [(set_attr "length" "4")
4145 (set_attr "type" "load_byte")
4146 (set_attr "pool_range" "1020")]
4149 ;; We used to have an early-clobber on the scratch register here.
4150 ;; However, there's a bug somewhere in reload which means that this
4151 ;; can be partially ignored during spill allocation if the memory
4152 ;; address also needs reloading; this causes us to die later on when
4153 ;; we try to verify the operands. Fortunately, we don't really need
4154 ;; the early-clobber: we can always use operand 0 if operand 2
4155 ;; overlaps the address.
4156 (define_insn "*thumb1_extendhisi2_insn_v6"
4157 [(set (match_operand:SI 0 "register_operand" "=l,l")
4158 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "l,m")))
4159 (clobber (match_scratch:SI 2 "=X,l"))]
4160 "TARGET_THUMB1 && arm_arch6"
4166 if (which_alternative == 0)
4167 return \"sxth\\t%0, %1\";
4169 mem = XEXP (operands[1], 0);
4171 /* This code used to try to use 'V', and fix the address only if it was
4172 offsettable, but this fails for e.g. REG+48 because 48 is outside the
4173 range of QImode offsets, and offsettable_address_p does a QImode
4176 if (GET_CODE (mem) == CONST)
4177 mem = XEXP (mem, 0);
4179 if (GET_CODE (mem) == LABEL_REF)
4180 return \"ldr\\t%0, %1\";
4182 if (GET_CODE (mem) == PLUS)
4184 rtx a = XEXP (mem, 0);
4185 rtx b = XEXP (mem, 1);
4187 if (GET_CODE (a) == LABEL_REF
4188 && GET_CODE (b) == CONST_INT)
4189 return \"ldr\\t%0, %1\";
4191 if (GET_CODE (b) == REG)
4192 return \"ldrsh\\t%0, %1\";
4200 ops[2] = const0_rtx;
4203 gcc_assert (GET_CODE (ops[1]) == REG);
4205 ops[0] = operands[0];
4206 if (reg_mentioned_p (operands[2], ops[1]))
4209 ops[3] = operands[2];
4210 output_asm_insn (\"mov\\t%3, %2\;ldrsh\\t%0, [%1, %3]\", ops);
4213 [(set_attr "length" "2,4")
4214 (set_attr "type" "alu_shift,load_byte")
4215 (set_attr "pool_range" "*,1020")]
4218 ;; This pattern will only be used when ldsh is not available
4219 (define_expand "extendhisi2_mem"
4220 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
4222 (zero_extend:SI (match_dup 7)))
4223 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
4224 (set (match_operand:SI 0 "" "")
4225 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
4230 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
4232 mem1 = change_address (operands[1], QImode, addr);
4233 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
4234 operands[0] = gen_lowpart (SImode, operands[0]);
4236 operands[2] = gen_reg_rtx (SImode);
4237 operands[3] = gen_reg_rtx (SImode);
4238 operands[6] = gen_reg_rtx (SImode);
4241 if (BYTES_BIG_ENDIAN)
4243 operands[4] = operands[2];
4244 operands[5] = operands[3];
4248 operands[4] = operands[3];
4249 operands[5] = operands[2];
4254 (define_insn "*arm_extendhisi2"
4255 [(set (match_operand:SI 0 "s_register_operand" "=r")
4256 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
4257 "TARGET_ARM && arm_arch4 && !arm_arch6"
4258 "ldr%(sh%)\\t%0, %1"
4259 [(set_attr "type" "load_byte")
4260 (set_attr "predicable" "yes")
4261 (set_attr "pool_range" "256")
4262 (set_attr "neg_pool_range" "244")]
4265 ;; ??? Check Thumb-2 pool range
4266 (define_insn "*arm_extendhisi2_v6"
4267 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4268 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
4269 "TARGET_32BIT && arm_arch6"
4273 [(set_attr "type" "alu_shift,load_byte")
4274 (set_attr "predicable" "yes")
4275 (set_attr "pool_range" "*,256")
4276 (set_attr "neg_pool_range" "*,244")]
4279 (define_insn "*arm_extendhisi2addsi"
4280 [(set (match_operand:SI 0 "s_register_operand" "=r")
4281 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
4282 (match_operand:SI 2 "s_register_operand" "r")))]
4284 "sxtah%?\\t%0, %2, %1"
4287 (define_expand "extendqihi2"
4289 (ashift:SI (match_operand:QI 1 "general_operand" "")
4291 (set (match_operand:HI 0 "s_register_operand" "")
4292 (ashiftrt:SI (match_dup 2)
4297 if (arm_arch4 && GET_CODE (operands[1]) == MEM)
4299 emit_insn (gen_rtx_SET (VOIDmode,
4301 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
4304 if (!s_register_operand (operands[1], QImode))
4305 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4306 operands[0] = gen_lowpart (SImode, operands[0]);
4307 operands[1] = gen_lowpart (SImode, operands[1]);
4308 operands[2] = gen_reg_rtx (SImode);
4312 (define_insn "*arm_extendqihi_insn"
4313 [(set (match_operand:HI 0 "s_register_operand" "=r")
4314 (sign_extend:HI (match_operand:QI 1 "memory_operand" "Uq")))]
4315 "TARGET_ARM && arm_arch4"
4316 "ldr%(sb%)\\t%0, %1"
4317 [(set_attr "type" "load_byte")
4318 (set_attr "predicable" "yes")
4319 (set_attr "pool_range" "256")
4320 (set_attr "neg_pool_range" "244")]
4323 (define_expand "extendqisi2"
4325 (ashift:SI (match_operand:QI 1 "general_operand" "")
4327 (set (match_operand:SI 0 "s_register_operand" "")
4328 (ashiftrt:SI (match_dup 2)
4333 if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
4335 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4336 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4340 if (!s_register_operand (operands[1], QImode))
4341 operands[1] = copy_to_mode_reg (QImode, operands[1]);
4345 emit_insn (gen_rtx_SET (VOIDmode, operands[0],
4346 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
4350 operands[1] = gen_lowpart (SImode, operands[1]);
4351 operands[2] = gen_reg_rtx (SImode);
4355 (define_insn "*arm_extendqisi"
4356 [(set (match_operand:SI 0 "s_register_operand" "=r")
4357 (sign_extend:SI (match_operand:QI 1 "memory_operand" "Uq")))]
4358 "TARGET_ARM && arm_arch4 && !arm_arch6"
4359 "ldr%(sb%)\\t%0, %1"
4360 [(set_attr "type" "load_byte")
4361 (set_attr "predicable" "yes")
4362 (set_attr "pool_range" "256")
4363 (set_attr "neg_pool_range" "244")]
4366 (define_insn "*arm_extendqisi_v6"
4367 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4368 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uq")))]
4369 "TARGET_ARM && arm_arch6"
4373 [(set_attr "type" "alu_shift,load_byte")
4374 (set_attr "predicable" "yes")
4375 (set_attr "pool_range" "*,256")
4376 (set_attr "neg_pool_range" "*,244")]
4379 (define_insn "*arm_extendqisi2addsi"
4380 [(set (match_operand:SI 0 "s_register_operand" "=r")
4381 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
4382 (match_operand:SI 2 "s_register_operand" "r")))]
4384 "sxtab%?\\t%0, %2, %1"
4385 [(set_attr "type" "alu_shift")
4386 (set_attr "insn" "xtab")
4387 (set_attr "predicable" "yes")]
4390 (define_insn "*thumb1_extendqisi2"
4391 [(set (match_operand:SI 0 "register_operand" "=l,l")
4392 (sign_extend:SI (match_operand:QI 1 "memory_operand" "V,m")))]
4393 "TARGET_THUMB1 && !arm_arch6"
4397 rtx mem = XEXP (operands[1], 0);
4399 if (GET_CODE (mem) == CONST)
4400 mem = XEXP (mem, 0);
4402 if (GET_CODE (mem) == LABEL_REF)
4403 return \"ldr\\t%0, %1\";
4405 if (GET_CODE (mem) == PLUS
4406 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4407 return \"ldr\\t%0, %1\";
4409 if (which_alternative == 0)
4410 return \"ldrsb\\t%0, %1\";
4412 ops[0] = operands[0];
4414 if (GET_CODE (mem) == PLUS)
4416 rtx a = XEXP (mem, 0);
4417 rtx b = XEXP (mem, 1);
4422 if (GET_CODE (a) == REG)
4424 if (GET_CODE (b) == REG)
4425 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4426 else if (REGNO (a) == REGNO (ops[0]))
4428 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4429 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4430 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4433 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4437 gcc_assert (GET_CODE (b) == REG);
4438 if (REGNO (b) == REGNO (ops[0]))
4440 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4441 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4442 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4445 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4448 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4450 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4451 output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
4452 output_asm_insn (\"asr\\t%0, %0, #24\", ops);
4457 ops[2] = const0_rtx;
4459 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4463 [(set_attr "length" "2,6")
4464 (set_attr "type" "load_byte,load_byte")
4465 (set_attr "pool_range" "32,32")]
4468 (define_insn "*thumb1_extendqisi2_v6"
4469 [(set (match_operand:SI 0 "register_operand" "=l,l,l")
4470 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "l,V,m")))]
4471 "TARGET_THUMB1 && arm_arch6"
4477 if (which_alternative == 0)
4478 return \"sxtb\\t%0, %1\";
4480 mem = XEXP (operands[1], 0);
4482 if (GET_CODE (mem) == CONST)
4483 mem = XEXP (mem, 0);
4485 if (GET_CODE (mem) == LABEL_REF)
4486 return \"ldr\\t%0, %1\";
4488 if (GET_CODE (mem) == PLUS
4489 && GET_CODE (XEXP (mem, 0)) == LABEL_REF)
4490 return \"ldr\\t%0, %1\";
4492 if (which_alternative == 0)
4493 return \"ldrsb\\t%0, %1\";
4495 ops[0] = operands[0];
4497 if (GET_CODE (mem) == PLUS)
4499 rtx a = XEXP (mem, 0);
4500 rtx b = XEXP (mem, 1);
4505 if (GET_CODE (a) == REG)
4507 if (GET_CODE (b) == REG)
4508 output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
4509 else if (REGNO (a) == REGNO (ops[0]))
4511 output_asm_insn (\"ldrb\\t%0, [%1, %2]\", ops);
4512 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4515 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4519 gcc_assert (GET_CODE (b) == REG);
4520 if (REGNO (b) == REGNO (ops[0]))
4522 output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
4523 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4526 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4529 else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
4531 output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
4532 output_asm_insn (\"sxtb\\t%0, %0\", ops);
4537 ops[2] = const0_rtx;
4539 output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
4543 [(set_attr "length" "2,2,4")
4544 (set_attr "type" "alu_shift,load_byte,load_byte")
4545 (set_attr "pool_range" "*,32,32")]
4548 (define_expand "extendsfdf2"
4549 [(set (match_operand:DF 0 "s_register_operand" "")
4550 (float_extend:DF (match_operand:SF 1 "s_register_operand" "")))]
4551 "TARGET_32BIT && TARGET_HARD_FLOAT"
4555 ;; Move insns (including loads and stores)
4557 ;; XXX Just some ideas about movti.
4558 ;; I don't think these are a good idea on the arm, there just aren't enough
4560 ;;(define_expand "loadti"
4561 ;; [(set (match_operand:TI 0 "s_register_operand" "")
4562 ;; (mem:TI (match_operand:SI 1 "address_operand" "")))]
4565 ;;(define_expand "storeti"
4566 ;; [(set (mem:TI (match_operand:TI 0 "address_operand" ""))
4567 ;; (match_operand:TI 1 "s_register_operand" ""))]
4570 ;;(define_expand "movti"
4571 ;; [(set (match_operand:TI 0 "general_operand" "")
4572 ;; (match_operand:TI 1 "general_operand" ""))]
4578 ;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
4579 ;; operands[1] = copy_to_reg (operands[1]);
4580 ;; if (GET_CODE (operands[0]) == MEM)
4581 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
4582 ;; else if (GET_CODE (operands[1]) == MEM)
4583 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
4587 ;; emit_insn (insn);
4591 ;; Recognize garbage generated above.
4594 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
4595 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
4599 ;; register mem = (which_alternative < 3);
4600 ;; register const char *template;
4602 ;; operands[mem] = XEXP (operands[mem], 0);
4603 ;; switch (which_alternative)
4605 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
4606 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
4607 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
4608 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
4609 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
4610 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
4612 ;; output_asm_insn (template, operands);
4616 (define_expand "movdi"
4617 [(set (match_operand:DI 0 "general_operand" "")
4618 (match_operand:DI 1 "general_operand" ""))]
4621 if (can_create_pseudo_p ())
4623 if (GET_CODE (operands[0]) != REG)
4624 operands[1] = force_reg (DImode, operands[1]);
4629 (define_insn "*arm_movdi"
4630 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
4631 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
4633 && !(TARGET_HARD_FLOAT && (TARGET_MAVERICK || TARGET_VFP))
4635 && ( register_operand (operands[0], DImode)
4636 || register_operand (operands[1], DImode))"
4638 switch (which_alternative)
4645 return output_move_double (operands);
4648 [(set_attr "length" "8,12,16,8,8")
4649 (set_attr "type" "*,*,*,load2,store2")
4650 (set_attr "pool_range" "*,*,*,1020,*")
4651 (set_attr "neg_pool_range" "*,*,*,1008,*")]
4655 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4656 (match_operand:ANY64 1 "const_double_operand" ""))]
4659 && (arm_const_double_inline_cost (operands[1])
4660 <= ((optimize_size || arm_ld_sched) ? 3 : 4))"
4663 arm_split_constant (SET, SImode, curr_insn,
4664 INTVAL (gen_lowpart (SImode, operands[1])),
4665 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
4666 arm_split_constant (SET, SImode, curr_insn,
4667 INTVAL (gen_highpart_mode (SImode,
4668 GET_MODE (operands[0]),
4670 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
4675 ; If optimizing for size, or if we have load delay slots, then
4676 ; we want to split the constant into two separate operations.
4677 ; In both cases this may split a trivial part into a single data op
4678 ; leaving a single complex constant to load. We can also get longer
4679 ; offsets in a LDR which means we get better chances of sharing the pool
4680 ; entries. Finally, we can normally do a better job of scheduling
4681 ; LDR instructions than we can with LDM.
4682 ; This pattern will only match if the one above did not.
4684 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4685 (match_operand:ANY64 1 "const_double_operand" ""))]
4686 "TARGET_ARM && reload_completed
4687 && arm_const_double_by_parts (operands[1])"
4688 [(set (match_dup 0) (match_dup 1))
4689 (set (match_dup 2) (match_dup 3))]
4691 operands[2] = gen_highpart (SImode, operands[0]);
4692 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
4694 operands[0] = gen_lowpart (SImode, operands[0]);
4695 operands[1] = gen_lowpart (SImode, operands[1]);
4700 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
4701 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
4702 "TARGET_EITHER && reload_completed"
4703 [(set (match_dup 0) (match_dup 1))
4704 (set (match_dup 2) (match_dup 3))]
4706 operands[2] = gen_highpart (SImode, operands[0]);
4707 operands[3] = gen_highpart (SImode, operands[1]);
4708 operands[0] = gen_lowpart (SImode, operands[0]);
4709 operands[1] = gen_lowpart (SImode, operands[1]);
4711 /* Handle a partial overlap. */
4712 if (rtx_equal_p (operands[0], operands[3]))
4714 rtx tmp0 = operands[0];
4715 rtx tmp1 = operands[1];
4717 operands[0] = operands[2];
4718 operands[1] = operands[3];
4725 ;; We can't actually do base+index doubleword loads if the index and
4726 ;; destination overlap. Split here so that we at least have chance to
4729 [(set (match_operand:DI 0 "s_register_operand" "")
4730 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
4731 (match_operand:SI 2 "s_register_operand" ""))))]
4733 && reg_overlap_mentioned_p (operands[0], operands[1])
4734 && reg_overlap_mentioned_p (operands[0], operands[2])"
4736 (plus:SI (match_dup 1)
4739 (mem:DI (match_dup 4)))]
4741 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
4745 ;;; ??? This should have alternatives for constants.
4746 ;;; ??? This was originally identical to the movdf_insn pattern.
4747 ;;; ??? The 'i' constraint looks funny, but it should always be replaced by
4748 ;;; thumb_reorg with a memory reference.
4749 (define_insn "*thumb1_movdi_insn"
4750 [(set (match_operand:DI 0 "nonimmediate_operand" "=l,l,l,l,>,l, m,*r")
4751 (match_operand:DI 1 "general_operand" "l, I,J,>,l,mi,l,*r"))]
4753 && !(TARGET_HARD_FLOAT && TARGET_MAVERICK)
4754 && ( register_operand (operands[0], DImode)
4755 || register_operand (operands[1], DImode))"
4758 switch (which_alternative)
4762 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4763 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
4764 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
4766 return \"mov\\t%Q0, %1\;mov\\t%R0, #0\";
4768 operands[1] = GEN_INT (- INTVAL (operands[1]));
4769 return \"mov\\t%Q0, %1\;neg\\t%Q0, %Q0\;asr\\t%R0, %Q0, #31\";
4771 return \"ldmia\\t%1, {%0, %H0}\";
4773 return \"stmia\\t%0, {%1, %H1}\";
4775 return thumb_load_double_from_address (operands);
4777 operands[2] = gen_rtx_MEM (SImode,
4778 plus_constant (XEXP (operands[0], 0), 4));
4779 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
4782 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
4783 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
4784 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
4787 [(set_attr "length" "4,4,6,2,2,6,4,4")
4788 (set_attr "type" "*,*,*,load2,store2,load2,store2,*")
4789 (set_attr "pool_range" "*,*,*,*,*,1020,*,*")]
4792 (define_expand "movsi"
4793 [(set (match_operand:SI 0 "general_operand" "")
4794 (match_operand:SI 1 "general_operand" ""))]
4798 rtx base, offset, tmp;
4802 /* Everything except mem = const or mem = mem can be done easily. */
4803 if (GET_CODE (operands[0]) == MEM)
4804 operands[1] = force_reg (SImode, operands[1]);
4805 if (arm_general_register_operand (operands[0], SImode)
4806 && GET_CODE (operands[1]) == CONST_INT
4807 && !(const_ok_for_arm (INTVAL (operands[1]))
4808 || const_ok_for_arm (~INTVAL (operands[1]))))
4810 arm_split_constant (SET, SImode, NULL_RTX,
4811 INTVAL (operands[1]), operands[0], NULL_RTX,
4812 optimize && can_create_pseudo_p ());
4816 else /* TARGET_THUMB1... */
4818 if (can_create_pseudo_p ())
4820 if (GET_CODE (operands[0]) != REG)
4821 operands[1] = force_reg (SImode, operands[1]);
4825 if (ARM_OFFSETS_MUST_BE_WITHIN_SECTIONS_P)
4827 split_const (operands[1], &base, &offset);
4828 if (GET_CODE (base) == SYMBOL_REF
4829 && !offset_within_block_p (base, INTVAL (offset)))
4831 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
4832 emit_move_insn (tmp, base);
4833 emit_insn (gen_addsi3 (operands[0], tmp, offset));
4838 /* Recognize the case where operand[1] is a reference to thread-local
4839 data and load its address to a register. */
4840 if (arm_tls_referenced_p (operands[1]))
4842 rtx tmp = operands[1];
4845 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
4847 addend = XEXP (XEXP (tmp, 0), 1);
4848 tmp = XEXP (XEXP (tmp, 0), 0);
4851 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
4852 gcc_assert (SYMBOL_REF_TLS_MODEL (tmp) != 0);
4854 tmp = legitimize_tls_address (tmp,
4855 !can_create_pseudo_p () ? operands[0] : 0);
4858 tmp = gen_rtx_PLUS (SImode, tmp, addend);
4859 tmp = force_operand (tmp, operands[0]);
4864 && (CONSTANT_P (operands[1])
4865 || symbol_mentioned_p (operands[1])
4866 || label_mentioned_p (operands[1])))
4867 operands[1] = legitimize_pic_address (operands[1], SImode,
4868 (!can_create_pseudo_p ()
4875 (define_insn "*arm_movsi_insn"
4876 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
4877 (match_operand:SI 1 "general_operand" "rk, I,K,N,mi,rk"))]
4878 "TARGET_ARM && ! TARGET_IWMMXT
4879 && !(TARGET_HARD_FLOAT && TARGET_VFP)
4880 && ( register_operand (operands[0], SImode)
4881 || register_operand (operands[1], SImode))"
4889 [(set_attr "type" "*,*,*,*,load1,store1")
4890 (set_attr "predicable" "yes")
4891 (set_attr "pool_range" "*,*,*,*,4096,*")
4892 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
4896 [(set (match_operand:SI 0 "arm_general_register_operand" "")
4897 (match_operand:SI 1 "const_int_operand" ""))]
4899 && (!(const_ok_for_arm (INTVAL (operands[1]))
4900 || const_ok_for_arm (~INTVAL (operands[1]))))"
4901 [(clobber (const_int 0))]
4903 arm_split_constant (SET, SImode, NULL_RTX,
4904 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
4909 (define_insn "*thumb1_movsi_insn"
4910 [(set (match_operand:SI 0 "nonimmediate_operand" "=l,l,l,l,l,>,l, m,*lhk")
4911 (match_operand:SI 1 "general_operand" "l, I,J,K,>,l,mi,l,*lhk"))]
4913 && ( register_operand (operands[0], SImode)
4914 || register_operand (operands[1], SImode))"
4925 [(set_attr "length" "2,2,4,4,2,2,2,2,2")
4926 (set_attr "type" "*,*,*,*,load1,store1,load1,store1,*")
4927 (set_attr "pool_range" "*,*,*,*,*,*,1020,*,*")]
4931 [(set (match_operand:SI 0 "register_operand" "")
4932 (match_operand:SI 1 "const_int_operand" ""))]
4933 "TARGET_THUMB1 && satisfies_constraint_J (operands[1])"
4934 [(set (match_dup 0) (match_dup 1))
4935 (set (match_dup 0) (neg:SI (match_dup 0)))]
4936 "operands[1] = GEN_INT (- INTVAL (operands[1]));"
4940 [(set (match_operand:SI 0 "register_operand" "")
4941 (match_operand:SI 1 "const_int_operand" ""))]
4942 "TARGET_THUMB1 && satisfies_constraint_K (operands[1])"
4943 [(set (match_dup 0) (match_dup 1))
4944 (set (match_dup 0) (ashift:SI (match_dup 0) (match_dup 2)))]
4947 unsigned HOST_WIDE_INT val = INTVAL (operands[1]);
4948 unsigned HOST_WIDE_INT mask = 0xff;
4951 for (i = 0; i < 25; i++)
4952 if ((val & (mask << i)) == val)
4955 /* Shouldn't happen, but we don't want to split if the shift is zero. */
4959 operands[1] = GEN_INT (val >> i);
4960 operands[2] = GEN_INT (i);
4964 ;; When generating pic, we need to load the symbol offset into a register.
4965 ;; So that the optimizer does not confuse this with a normal symbol load
4966 ;; we use an unspec. The offset will be loaded from a constant pool entry,
4967 ;; since that is the only type of relocation we can use.
4969 ;; The rather odd constraints on the following are to force reload to leave
4970 ;; the insn alone, and to force the minipool generation pass to then move
4971 ;; the GOT symbol to memory.
4973 (define_insn "pic_load_addr_arm"
4974 [(set (match_operand:SI 0 "s_register_operand" "=r")
4975 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4976 "TARGET_ARM && flag_pic"
4978 [(set_attr "type" "load1")
4979 (set (attr "pool_range") (const_int 4096))
4980 (set (attr "neg_pool_range") (const_int 4084))]
4983 (define_insn "pic_load_addr_thumb1"
4984 [(set (match_operand:SI 0 "s_register_operand" "=l")
4985 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
4986 "TARGET_THUMB1 && flag_pic"
4988 [(set_attr "type" "load1")
4989 (set (attr "pool_range") (const_int 1024))]
4992 (define_insn "pic_add_dot_plus_four"
4993 [(set (match_operand:SI 0 "register_operand" "=r")
4994 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "0")
4995 (const (plus:SI (pc) (const_int 4))))
4996 (match_operand 2 "" "")]
5000 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5001 INTVAL (operands[2]));
5002 return \"add\\t%0, %|pc\";
5004 [(set_attr "length" "2")]
5007 (define_insn "pic_add_dot_plus_eight"
5008 [(set (match_operand:SI 0 "register_operand" "=r")
5009 (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5010 (const (plus:SI (pc) (const_int 8))))
5011 (match_operand 2 "" "")]
5015 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5016 INTVAL (operands[2]));
5017 return \"add%?\\t%0, %|pc, %1\";
5019 [(set_attr "predicable" "yes")]
5022 (define_insn "tls_load_dot_plus_eight"
5023 [(set (match_operand:SI 0 "register_operand" "+r")
5024 (mem:SI (unspec:SI [(plus:SI (match_operand:SI 1 "register_operand" "r")
5025 (const (plus:SI (pc) (const_int 8))))
5026 (match_operand 2 "" "")]
5030 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
5031 INTVAL (operands[2]));
5032 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
5034 [(set_attr "predicable" "yes")]
5037 ;; PIC references to local variables can generate pic_add_dot_plus_eight
5038 ;; followed by a load. These sequences can be crunched down to
5039 ;; tls_load_dot_plus_eight by a peephole.
5042 [(parallel [(set (match_operand:SI 0 "register_operand" "")
5043 (unspec:SI [(plus:SI (match_operand:SI 3 "register_operand" "")
5044 (const (plus:SI (pc) (const_int 8))))]
5046 (use (label_ref (match_operand 1 "" "")))])
5047 (set (match_operand:SI 2 "register_operand" "") (mem:SI (match_dup 0)))]
5048 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
5049 [(parallel [(set (match_dup 2)
5050 (mem:SI (unspec:SI [(plus:SI (match_dup 3)
5051 (const (plus:SI (pc) (const_int 8))))]
5053 (use (label_ref (match_dup 1)))])]
5057 (define_insn "pic_offset_arm"
5058 [(set (match_operand:SI 0 "register_operand" "=r")
5059 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
5060 (unspec:SI [(match_operand:SI 2 "" "X")]
5061 UNSPEC_PIC_OFFSET))))]
5062 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
5063 "ldr%?\\t%0, [%1,%2]"
5064 [(set_attr "type" "load1")]
5067 (define_expand "builtin_setjmp_receiver"
5068 [(label_ref (match_operand 0 "" ""))]
5072 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
5074 if (arm_pic_register != INVALID_REGNUM)
5075 arm_load_pic_register (1UL << 3);
5079 ;; If copying one reg to another we can set the condition codes according to
5080 ;; its value. Such a move is common after a return from subroutine and the
5081 ;; result is being tested against zero.
5083 (define_insn "*movsi_compare0"
5084 [(set (reg:CC CC_REGNUM)
5085 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
5087 (set (match_operand:SI 0 "s_register_operand" "=r,r")
5093 [(set_attr "conds" "set")]
5096 ;; Subroutine to store a half word from a register into memory.
5097 ;; Operand 0 is the source register (HImode)
5098 ;; Operand 1 is the destination address in a register (SImode)
5100 ;; In both this routine and the next, we must be careful not to spill
5101 ;; a memory address of reg+large_const into a separate PLUS insn, since this
5102 ;; can generate unrecognizable rtl.
5104 (define_expand "storehi"
5105 [;; store the low byte
5106 (set (match_operand 1 "" "") (match_dup 3))
5107 ;; extract the high byte
5109 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5110 ;; store the high byte
5111 (set (match_dup 4) (match_dup 5))]
5115 rtx op1 = operands[1];
5116 rtx addr = XEXP (op1, 0);
5117 enum rtx_code code = GET_CODE (addr);
5119 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5121 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
5123 operands[4] = adjust_address (op1, QImode, 1);
5124 operands[1] = adjust_address (operands[1], QImode, 0);
5125 operands[3] = gen_lowpart (QImode, operands[0]);
5126 operands[0] = gen_lowpart (SImode, operands[0]);
5127 operands[2] = gen_reg_rtx (SImode);
5128 operands[5] = gen_lowpart (QImode, operands[2]);
5132 (define_expand "storehi_bigend"
5133 [(set (match_dup 4) (match_dup 3))
5135 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
5136 (set (match_operand 1 "" "") (match_dup 5))]
5140 rtx op1 = operands[1];
5141 rtx addr = XEXP (op1, 0);
5142 enum rtx_code code = GET_CODE (addr);
5144 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5146 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
5148 operands[4] = adjust_address (op1, QImode, 1);
5149 operands[1] = adjust_address (operands[1], QImode, 0);
5150 operands[3] = gen_lowpart (QImode, operands[0]);
5151 operands[0] = gen_lowpart (SImode, operands[0]);
5152 operands[2] = gen_reg_rtx (SImode);
5153 operands[5] = gen_lowpart (QImode, operands[2]);
5157 ;; Subroutine to store a half word integer constant into memory.
5158 (define_expand "storeinthi"
5159 [(set (match_operand 0 "" "")
5160 (match_operand 1 "" ""))
5161 (set (match_dup 3) (match_dup 2))]
5165 HOST_WIDE_INT value = INTVAL (operands[1]);
5166 rtx addr = XEXP (operands[0], 0);
5167 rtx op0 = operands[0];
5168 enum rtx_code code = GET_CODE (addr);
5170 if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
5172 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
5174 operands[1] = gen_reg_rtx (SImode);
5175 if (BYTES_BIG_ENDIAN)
5177 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
5178 if ((value & 255) == ((value >> 8) & 255))
5179 operands[2] = operands[1];
5182 operands[2] = gen_reg_rtx (SImode);
5183 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
5188 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
5189 if ((value & 255) == ((value >> 8) & 255))
5190 operands[2] = operands[1];
5193 operands[2] = gen_reg_rtx (SImode);
5194 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
5198 operands[3] = adjust_address (op0, QImode, 1);
5199 operands[0] = adjust_address (operands[0], QImode, 0);
5200 operands[2] = gen_lowpart (QImode, operands[2]);
5201 operands[1] = gen_lowpart (QImode, operands[1]);
5205 (define_expand "storehi_single_op"
5206 [(set (match_operand:HI 0 "memory_operand" "")
5207 (match_operand:HI 1 "general_operand" ""))]
5208 "TARGET_32BIT && arm_arch4"
5210 if (!s_register_operand (operands[1], HImode))
5211 operands[1] = copy_to_mode_reg (HImode, operands[1]);
5215 (define_expand "movhi"
5216 [(set (match_operand:HI 0 "general_operand" "")
5217 (match_operand:HI 1 "general_operand" ""))]
5222 if (can_create_pseudo_p ())
5224 if (GET_CODE (operands[0]) == MEM)
5228 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
5231 if (GET_CODE (operands[1]) == CONST_INT)
5232 emit_insn (gen_storeinthi (operands[0], operands[1]));
5235 if (GET_CODE (operands[1]) == MEM)
5236 operands[1] = force_reg (HImode, operands[1]);
5237 if (BYTES_BIG_ENDIAN)
5238 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
5240 emit_insn (gen_storehi (operands[1], operands[0]));
5244 /* Sign extend a constant, and keep it in an SImode reg. */
5245 else if (GET_CODE (operands[1]) == CONST_INT)
5247 rtx reg = gen_reg_rtx (SImode);
5248 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5250 /* If the constant is already valid, leave it alone. */
5251 if (!const_ok_for_arm (val))
5253 /* If setting all the top bits will make the constant
5254 loadable in a single instruction, then set them.
5255 Otherwise, sign extend the number. */
5257 if (const_ok_for_arm (~(val | ~0xffff)))
5259 else if (val & 0x8000)
5263 emit_insn (gen_movsi (reg, GEN_INT (val)));
5264 operands[1] = gen_lowpart (HImode, reg);
5266 else if (arm_arch4 && optimize && can_create_pseudo_p ()
5267 && GET_CODE (operands[1]) == MEM)
5269 rtx reg = gen_reg_rtx (SImode);
5271 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5272 operands[1] = gen_lowpart (HImode, reg);
5274 else if (!arm_arch4)
5276 if (GET_CODE (operands[1]) == MEM)
5279 rtx offset = const0_rtx;
5280 rtx reg = gen_reg_rtx (SImode);
5282 if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
5283 || (GET_CODE (base) == PLUS
5284 && (GET_CODE (offset = XEXP (base, 1))
5286 && ((INTVAL(offset) & 1) != 1)
5287 && GET_CODE (base = XEXP (base, 0)) == REG))
5288 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
5292 new_rtx = widen_memory_access (operands[1], SImode,
5293 ((INTVAL (offset) & ~3)
5294 - INTVAL (offset)));
5295 emit_insn (gen_movsi (reg, new_rtx));
5296 if (((INTVAL (offset) & 2) != 0)
5297 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
5299 rtx reg2 = gen_reg_rtx (SImode);
5301 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
5306 emit_insn (gen_movhi_bytes (reg, operands[1]));
5308 operands[1] = gen_lowpart (HImode, reg);
5312 /* Handle loading a large integer during reload. */
5313 else if (GET_CODE (operands[1]) == CONST_INT
5314 && !const_ok_for_arm (INTVAL (operands[1]))
5315 && !const_ok_for_arm (~INTVAL (operands[1])))
5317 /* Writing a constant to memory needs a scratch, which should
5318 be handled with SECONDARY_RELOADs. */
5319 gcc_assert (GET_CODE (operands[0]) == REG);
5321 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5322 emit_insn (gen_movsi (operands[0], operands[1]));
5326 else if (TARGET_THUMB2)
5328 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
5329 if (can_create_pseudo_p ())
5331 if (GET_CODE (operands[0]) != REG)
5332 operands[1] = force_reg (HImode, operands[1]);
5333 /* Zero extend a constant, and keep it in an SImode reg. */
5334 else if (GET_CODE (operands[1]) == CONST_INT)
5336 rtx reg = gen_reg_rtx (SImode);
5337 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
5339 emit_insn (gen_movsi (reg, GEN_INT (val)));
5340 operands[1] = gen_lowpart (HImode, reg);
5344 else /* TARGET_THUMB1 */
5346 if (can_create_pseudo_p ())
5348 if (GET_CODE (operands[1]) == CONST_INT)
5350 rtx reg = gen_reg_rtx (SImode);
5352 emit_insn (gen_movsi (reg, operands[1]));
5353 operands[1] = gen_lowpart (HImode, reg);
5356 /* ??? We shouldn't really get invalid addresses here, but this can
5357 happen if we are passed a SP (never OK for HImode/QImode) or
5358 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5359 HImode/QImode) relative address. */
5360 /* ??? This should perhaps be fixed elsewhere, for instance, in
5361 fixup_stack_1, by checking for other kinds of invalid addresses,
5362 e.g. a bare reference to a virtual register. This may confuse the
5363 alpha though, which must handle this case differently. */
5364 if (GET_CODE (operands[0]) == MEM
5365 && !memory_address_p (GET_MODE (operands[0]),
5366 XEXP (operands[0], 0)))
5368 = replace_equiv_address (operands[0],
5369 copy_to_reg (XEXP (operands[0], 0)));
5371 if (GET_CODE (operands[1]) == MEM
5372 && !memory_address_p (GET_MODE (operands[1]),
5373 XEXP (operands[1], 0)))
5375 = replace_equiv_address (operands[1],
5376 copy_to_reg (XEXP (operands[1], 0)));
5378 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5380 rtx reg = gen_reg_rtx (SImode);
5382 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
5383 operands[1] = gen_lowpart (HImode, reg);
5386 if (GET_CODE (operands[0]) == MEM)
5387 operands[1] = force_reg (HImode, operands[1]);
5389 else if (GET_CODE (operands[1]) == CONST_INT
5390 && !satisfies_constraint_I (operands[1]))
5392 /* Handle loading a large integer during reload. */
5394 /* Writing a constant to memory needs a scratch, which should
5395 be handled with SECONDARY_RELOADs. */
5396 gcc_assert (GET_CODE (operands[0]) == REG);
5398 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5399 emit_insn (gen_movsi (operands[0], operands[1]));
5406 (define_insn "*thumb1_movhi_insn"
5407 [(set (match_operand:HI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5408 (match_operand:HI 1 "general_operand" "l,m,l,*h,*r,I"))]
5410 && ( register_operand (operands[0], HImode)
5411 || register_operand (operands[1], HImode))"
5413 switch (which_alternative)
5415 case 0: return \"add %0, %1, #0\";
5416 case 2: return \"strh %1, %0\";
5417 case 3: return \"mov %0, %1\";
5418 case 4: return \"mov %0, %1\";
5419 case 5: return \"mov %0, %1\";
5420 default: gcc_unreachable ();
5422 /* The stack pointer can end up being taken as an index register.
5423 Catch this case here and deal with it. */
5424 if (GET_CODE (XEXP (operands[1], 0)) == PLUS
5425 && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
5426 && REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
5429 ops[0] = operands[0];
5430 ops[1] = XEXP (XEXP (operands[1], 0), 0);
5432 output_asm_insn (\"mov %0, %1\", ops);
5434 XEXP (XEXP (operands[1], 0), 0) = operands[0];
5437 return \"ldrh %0, %1\";
5439 [(set_attr "length" "2,4,2,2,2,2")
5440 (set_attr "type" "*,load1,store1,*,*,*")]
5444 (define_expand "movhi_bytes"
5445 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5447 (zero_extend:SI (match_dup 6)))
5448 (set (match_operand:SI 0 "" "")
5449 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
5454 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5456 mem1 = change_address (operands[1], QImode, addr);
5457 mem2 = change_address (operands[1], QImode, plus_constant (addr, 1));
5458 operands[0] = gen_lowpart (SImode, operands[0]);
5460 operands[2] = gen_reg_rtx (SImode);
5461 operands[3] = gen_reg_rtx (SImode);
5464 if (BYTES_BIG_ENDIAN)
5466 operands[4] = operands[2];
5467 operands[5] = operands[3];
5471 operands[4] = operands[3];
5472 operands[5] = operands[2];
5477 (define_expand "movhi_bigend"
5479 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand" "") 0)
5482 (ashiftrt:SI (match_dup 2) (const_int 16)))
5483 (set (match_operand:HI 0 "s_register_operand" "")
5487 operands[2] = gen_reg_rtx (SImode);
5488 operands[3] = gen_reg_rtx (SImode);
5489 operands[4] = gen_lowpart (HImode, operands[3]);
5493 ;; Pattern to recognize insn generated default case above
5494 (define_insn "*movhi_insn_arch4"
5495 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
5496 (match_operand:HI 1 "general_operand" "rI,K,r,m"))]
5499 && (GET_CODE (operands[1]) != CONST_INT
5500 || const_ok_for_arm (INTVAL (operands[1]))
5501 || const_ok_for_arm (~INTVAL (operands[1])))"
5503 mov%?\\t%0, %1\\t%@ movhi
5504 mvn%?\\t%0, #%B1\\t%@ movhi
5505 str%(h%)\\t%1, %0\\t%@ movhi
5506 ldr%(h%)\\t%0, %1\\t%@ movhi"
5507 [(set_attr "type" "*,*,store1,load1")
5508 (set_attr "predicable" "yes")
5509 (set_attr "pool_range" "*,*,*,256")
5510 (set_attr "neg_pool_range" "*,*,*,244")]
5513 (define_insn "*movhi_bytes"
5514 [(set (match_operand:HI 0 "s_register_operand" "=r,r")
5515 (match_operand:HI 1 "arm_rhs_operand" "rI,K"))]
5518 mov%?\\t%0, %1\\t%@ movhi
5519 mvn%?\\t%0, #%B1\\t%@ movhi"
5520 [(set_attr "predicable" "yes")]
5523 (define_expand "thumb_movhi_clobber"
5524 [(set (match_operand:HI 0 "memory_operand" "")
5525 (match_operand:HI 1 "register_operand" ""))
5526 (clobber (match_operand:DI 2 "register_operand" ""))]
5529 if (strict_memory_address_p (HImode, XEXP (operands[0], 0))
5530 && REGNO (operands[1]) <= LAST_LO_REGNUM)
5532 emit_insn (gen_movhi (operands[0], operands[1]));
5535 /* XXX Fixme, need to handle other cases here as well. */
5540 ;; We use a DImode scratch because we may occasionally need an additional
5541 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
5542 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
5543 (define_expand "reload_outhi"
5544 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
5545 (match_operand:HI 1 "s_register_operand" "r")
5546 (match_operand:DI 2 "s_register_operand" "=&l")])]
5549 arm_reload_out_hi (operands);
5551 thumb_reload_out_hi (operands);
5556 (define_expand "reload_inhi"
5557 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
5558 (match_operand:HI 1 "arm_reload_memory_operand" "o")
5559 (match_operand:DI 2 "s_register_operand" "=&r")])]
5563 arm_reload_in_hi (operands);
5565 thumb_reload_out_hi (operands);
5569 (define_expand "movqi"
5570 [(set (match_operand:QI 0 "general_operand" "")
5571 (match_operand:QI 1 "general_operand" ""))]
5574 /* Everything except mem = const or mem = mem can be done easily */
5576 if (can_create_pseudo_p ())
5578 if (GET_CODE (operands[1]) == CONST_INT)
5580 rtx reg = gen_reg_rtx (SImode);
5582 emit_insn (gen_movsi (reg, operands[1]));
5583 operands[1] = gen_lowpart (QImode, reg);
5588 /* ??? We shouldn't really get invalid addresses here, but this can
5589 happen if we are passed a SP (never OK for HImode/QImode) or
5590 virtual register (rejected by GO_IF_LEGITIMATE_ADDRESS for
5591 HImode/QImode) relative address. */
5592 /* ??? This should perhaps be fixed elsewhere, for instance, in
5593 fixup_stack_1, by checking for other kinds of invalid addresses,
5594 e.g. a bare reference to a virtual register. This may confuse the
5595 alpha though, which must handle this case differently. */
5596 if (GET_CODE (operands[0]) == MEM
5597 && !memory_address_p (GET_MODE (operands[0]),
5598 XEXP (operands[0], 0)))
5600 = replace_equiv_address (operands[0],
5601 copy_to_reg (XEXP (operands[0], 0)));
5602 if (GET_CODE (operands[1]) == MEM
5603 && !memory_address_p (GET_MODE (operands[1]),
5604 XEXP (operands[1], 0)))
5606 = replace_equiv_address (operands[1],
5607 copy_to_reg (XEXP (operands[1], 0)));
5610 if (GET_CODE (operands[1]) == MEM && optimize > 0)
5612 rtx reg = gen_reg_rtx (SImode);
5614 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
5615 operands[1] = gen_lowpart (QImode, reg);
5618 if (GET_CODE (operands[0]) == MEM)
5619 operands[1] = force_reg (QImode, operands[1]);
5621 else if (TARGET_THUMB
5622 && GET_CODE (operands[1]) == CONST_INT
5623 && !satisfies_constraint_I (operands[1]))
5625 /* Handle loading a large integer during reload. */
5627 /* Writing a constant to memory needs a scratch, which should
5628 be handled with SECONDARY_RELOADs. */
5629 gcc_assert (GET_CODE (operands[0]) == REG);
5631 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
5632 emit_insn (gen_movsi (operands[0], operands[1]));
5639 (define_insn "*arm_movqi_insn"
5640 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,m")
5641 (match_operand:QI 1 "general_operand" "rI,K,m,r"))]
5643 && ( register_operand (operands[0], QImode)
5644 || register_operand (operands[1], QImode))"
5650 [(set_attr "type" "*,*,load1,store1")
5651 (set_attr "predicable" "yes")]
5654 (define_insn "*thumb1_movqi_insn"
5655 [(set (match_operand:QI 0 "nonimmediate_operand" "=l,l,m,*r,*h,l")
5656 (match_operand:QI 1 "general_operand" "l, m,l,*h,*r,I"))]
5658 && ( register_operand (operands[0], QImode)
5659 || register_operand (operands[1], QImode))"
5667 [(set_attr "length" "2")
5668 (set_attr "type" "*,load1,store1,*,*,*")
5669 (set_attr "pool_range" "*,32,*,*,*,*")]
5672 (define_expand "movsf"
5673 [(set (match_operand:SF 0 "general_operand" "")
5674 (match_operand:SF 1 "general_operand" ""))]
5679 if (GET_CODE (operands[0]) == MEM)
5680 operands[1] = force_reg (SFmode, operands[1]);
5682 else /* TARGET_THUMB1 */
5684 if (can_create_pseudo_p ())
5686 if (GET_CODE (operands[0]) != REG)
5687 operands[1] = force_reg (SFmode, operands[1]);
5693 ;; Transform a floating-point move of a constant into a core register into
5694 ;; an SImode operation.
5696 [(set (match_operand:SF 0 "arm_general_register_operand" "")
5697 (match_operand:SF 1 "immediate_operand" ""))]
5700 && GET_CODE (operands[1]) == CONST_DOUBLE"
5701 [(set (match_dup 2) (match_dup 3))]
5703 operands[2] = gen_lowpart (SImode, operands[0]);
5704 operands[3] = gen_lowpart (SImode, operands[1]);
5705 if (operands[2] == 0 || operands[3] == 0)
5710 (define_insn "*arm_movsf_soft_insn"
5711 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
5712 (match_operand:SF 1 "general_operand" "r,mE,r"))]
5714 && TARGET_SOFT_FLOAT
5715 && (GET_CODE (operands[0]) != MEM
5716 || register_operand (operands[1], SFmode))"
5719 ldr%?\\t%0, %1\\t%@ float
5720 str%?\\t%1, %0\\t%@ float"
5721 [(set_attr "length" "4,4,4")
5722 (set_attr "predicable" "yes")
5723 (set_attr "type" "*,load1,store1")
5724 (set_attr "pool_range" "*,4096,*")
5725 (set_attr "neg_pool_range" "*,4084,*")]
5728 ;;; ??? This should have alternatives for constants.
5729 (define_insn "*thumb1_movsf_insn"
5730 [(set (match_operand:SF 0 "nonimmediate_operand" "=l,l,>,l, m,*r,*h")
5731 (match_operand:SF 1 "general_operand" "l, >,l,mF,l,*h,*r"))]
5733 && ( register_operand (operands[0], SFmode)
5734 || register_operand (operands[1], SFmode))"
5743 [(set_attr "length" "2")
5744 (set_attr "type" "*,load1,store1,load1,store1,*,*")
5745 (set_attr "pool_range" "*,*,*,1020,*,*,*")]
5748 (define_expand "movdf"
5749 [(set (match_operand:DF 0 "general_operand" "")
5750 (match_operand:DF 1 "general_operand" ""))]
5755 if (GET_CODE (operands[0]) == MEM)
5756 operands[1] = force_reg (DFmode, operands[1]);
5758 else /* TARGET_THUMB */
5760 if (can_create_pseudo_p ())
5762 if (GET_CODE (operands[0]) != REG)
5763 operands[1] = force_reg (DFmode, operands[1]);
5769 ;; Reloading a df mode value stored in integer regs to memory can require a
5771 (define_expand "reload_outdf"
5772 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
5773 (match_operand:DF 1 "s_register_operand" "r")
5774 (match_operand:SI 2 "s_register_operand" "=&r")]
5778 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
5781 operands[2] = XEXP (operands[0], 0);
5782 else if (code == POST_INC || code == PRE_DEC)
5784 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
5785 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
5786 emit_insn (gen_movdi (operands[0], operands[1]));
5789 else if (code == PRE_INC)
5791 rtx reg = XEXP (XEXP (operands[0], 0), 0);
5793 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
5796 else if (code == POST_DEC)
5797 operands[2] = XEXP (XEXP (operands[0], 0), 0);
5799 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
5800 XEXP (XEXP (operands[0], 0), 1)));
5802 emit_insn (gen_rtx_SET (VOIDmode,
5803 replace_equiv_address (operands[0], operands[2]),
5806 if (code == POST_DEC)
5807 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
5813 (define_insn "*movdf_soft_insn"
5814 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
5815 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
5816 "TARGET_ARM && TARGET_SOFT_FLOAT
5817 && ( register_operand (operands[0], DFmode)
5818 || register_operand (operands[1], DFmode))"
5820 switch (which_alternative)
5827 return output_move_double (operands);
5830 [(set_attr "length" "8,12,16,8,8")
5831 (set_attr "type" "*,*,*,load2,store2")
5832 (set_attr "pool_range" "1020")
5833 (set_attr "neg_pool_range" "1008")]
5836 ;;; ??? This should have alternatives for constants.
5837 ;;; ??? This was originally identical to the movdi_insn pattern.
5838 ;;; ??? The 'F' constraint looks funny, but it should always be replaced by
5839 ;;; thumb_reorg with a memory reference.
5840 (define_insn "*thumb_movdf_insn"
5841 [(set (match_operand:DF 0 "nonimmediate_operand" "=l,l,>,l, m,*r")
5842 (match_operand:DF 1 "general_operand" "l, >,l,mF,l,*r"))]
5844 && ( register_operand (operands[0], DFmode)
5845 || register_operand (operands[1], DFmode))"
5847 switch (which_alternative)
5851 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5852 return \"add\\t%0, %1, #0\;add\\t%H0, %H1, #0\";
5853 return \"add\\t%H0, %H1, #0\;add\\t%0, %1, #0\";
5855 return \"ldmia\\t%1, {%0, %H0}\";
5857 return \"stmia\\t%0, {%1, %H1}\";
5859 return thumb_load_double_from_address (operands);
5861 operands[2] = gen_rtx_MEM (SImode,
5862 plus_constant (XEXP (operands[0], 0), 4));
5863 output_asm_insn (\"str\\t%1, %0\;str\\t%H1, %2\", operands);
5866 if (REGNO (operands[1]) == REGNO (operands[0]) + 1)
5867 return \"mov\\t%0, %1\;mov\\t%H0, %H1\";
5868 return \"mov\\t%H0, %H1\;mov\\t%0, %1\";
5871 [(set_attr "length" "4,2,2,6,4,4")
5872 (set_attr "type" "*,load2,store2,load2,store2,*")
5873 (set_attr "pool_range" "*,*,*,1020,*,*")]
5876 (define_expand "movxf"
5877 [(set (match_operand:XF 0 "general_operand" "")
5878 (match_operand:XF 1 "general_operand" ""))]
5879 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
5881 if (GET_CODE (operands[0]) == MEM)
5882 operands[1] = force_reg (XFmode, operands[1]);
5888 ;; load- and store-multiple insns
5889 ;; The arm can load/store any set of registers, provided that they are in
5890 ;; ascending order; but that is beyond GCC so stick with what it knows.
5892 (define_expand "load_multiple"
5893 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
5894 (match_operand:SI 1 "" ""))
5895 (use (match_operand:SI 2 "" ""))])]
5898 HOST_WIDE_INT offset = 0;
5900 /* Support only fixed point registers. */
5901 if (GET_CODE (operands[2]) != CONST_INT
5902 || INTVAL (operands[2]) > 14
5903 || INTVAL (operands[2]) < 2
5904 || GET_CODE (operands[1]) != MEM
5905 || GET_CODE (operands[0]) != REG
5906 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
5907 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
5911 = arm_gen_load_multiple (REGNO (operands[0]), INTVAL (operands[2]),
5912 force_reg (SImode, XEXP (operands[1], 0)),
5913 TRUE, FALSE, operands[1], &offset);
5916 ;; Load multiple with write-back
5918 (define_insn "*ldmsi_postinc4"
5919 [(match_parallel 0 "load_multiple_operation"
5920 [(set (match_operand:SI 1 "s_register_operand" "=r")
5921 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5923 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5924 (mem:SI (match_dup 2)))
5925 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5926 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5927 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5928 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5929 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5930 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5931 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
5932 "ldm%(ia%)\\t%1!, {%3, %4, %5, %6}"
5933 [(set_attr "type" "load4")
5934 (set_attr "predicable" "yes")]
5937 (define_insn "*ldmsi_postinc4_thumb1"
5938 [(match_parallel 0 "load_multiple_operation"
5939 [(set (match_operand:SI 1 "s_register_operand" "=l")
5940 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5942 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5943 (mem:SI (match_dup 2)))
5944 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5945 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5946 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5947 (mem:SI (plus:SI (match_dup 2) (const_int 8))))
5948 (set (match_operand:SI 6 "arm_hard_register_operand" "")
5949 (mem:SI (plus:SI (match_dup 2) (const_int 12))))])]
5950 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
5951 "ldmia\\t%1!, {%3, %4, %5, %6}"
5952 [(set_attr "type" "load4")]
5955 (define_insn "*ldmsi_postinc3"
5956 [(match_parallel 0 "load_multiple_operation"
5957 [(set (match_operand:SI 1 "s_register_operand" "=r")
5958 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5960 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5961 (mem:SI (match_dup 2)))
5962 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5963 (mem:SI (plus:SI (match_dup 2) (const_int 4))))
5964 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5965 (mem:SI (plus:SI (match_dup 2) (const_int 8))))])]
5966 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
5967 "ldm%(ia%)\\t%1!, {%3, %4, %5}"
5968 [(set_attr "type" "load3")
5969 (set_attr "predicable" "yes")]
5972 (define_insn "*ldmsi_postinc2"
5973 [(match_parallel 0 "load_multiple_operation"
5974 [(set (match_operand:SI 1 "s_register_operand" "=r")
5975 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
5977 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5978 (mem:SI (match_dup 2)))
5979 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5980 (mem:SI (plus:SI (match_dup 2) (const_int 4))))])]
5981 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
5982 "ldm%(ia%)\\t%1!, {%3, %4}"
5983 [(set_attr "type" "load2")
5984 (set_attr "predicable" "yes")]
5987 ;; Ordinary load multiple
5989 (define_insn "*ldmsi4"
5990 [(match_parallel 0 "load_multiple_operation"
5991 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
5992 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
5993 (set (match_operand:SI 3 "arm_hard_register_operand" "")
5994 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
5995 (set (match_operand:SI 4 "arm_hard_register_operand" "")
5996 (mem:SI (plus:SI (match_dup 1) (const_int 8))))
5997 (set (match_operand:SI 5 "arm_hard_register_operand" "")
5998 (mem:SI (plus:SI (match_dup 1) (const_int 12))))])]
5999 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6000 "ldm%(ia%)\\t%1, {%2, %3, %4, %5}"
6001 [(set_attr "type" "load4")
6002 (set_attr "predicable" "yes")]
6005 (define_insn "*ldmsi3"
6006 [(match_parallel 0 "load_multiple_operation"
6007 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6008 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6009 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6010 (mem:SI (plus:SI (match_dup 1) (const_int 4))))
6011 (set (match_operand:SI 4 "arm_hard_register_operand" "")
6012 (mem:SI (plus:SI (match_dup 1) (const_int 8))))])]
6013 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6014 "ldm%(ia%)\\t%1, {%2, %3, %4}"
6015 [(set_attr "type" "load3")
6016 (set_attr "predicable" "yes")]
6019 (define_insn "*ldmsi2"
6020 [(match_parallel 0 "load_multiple_operation"
6021 [(set (match_operand:SI 2 "arm_hard_register_operand" "")
6022 (mem:SI (match_operand:SI 1 "s_register_operand" "r")))
6023 (set (match_operand:SI 3 "arm_hard_register_operand" "")
6024 (mem:SI (plus:SI (match_dup 1) (const_int 4))))])]
6025 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6026 "ldm%(ia%)\\t%1, {%2, %3}"
6027 [(set_attr "type" "load2")
6028 (set_attr "predicable" "yes")]
6031 (define_expand "store_multiple"
6032 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
6033 (match_operand:SI 1 "" ""))
6034 (use (match_operand:SI 2 "" ""))])]
6037 HOST_WIDE_INT offset = 0;
6039 /* Support only fixed point registers. */
6040 if (GET_CODE (operands[2]) != CONST_INT
6041 || INTVAL (operands[2]) > 14
6042 || INTVAL (operands[2]) < 2
6043 || GET_CODE (operands[1]) != REG
6044 || GET_CODE (operands[0]) != MEM
6045 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
6046 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
6050 = arm_gen_store_multiple (REGNO (operands[1]), INTVAL (operands[2]),
6051 force_reg (SImode, XEXP (operands[0], 0)),
6052 TRUE, FALSE, operands[0], &offset);
6055 ;; Store multiple with write-back
6057 (define_insn "*stmsi_postinc4"
6058 [(match_parallel 0 "store_multiple_operation"
6059 [(set (match_operand:SI 1 "s_register_operand" "=r")
6060 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6062 (set (mem:SI (match_dup 2))
6063 (match_operand:SI 3 "arm_hard_register_operand" ""))
6064 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6065 (match_operand:SI 4 "arm_hard_register_operand" ""))
6066 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6067 (match_operand:SI 5 "arm_hard_register_operand" ""))
6068 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6069 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6070 "TARGET_32BIT && XVECLEN (operands[0], 0) == 5"
6071 "stm%(ia%)\\t%1!, {%3, %4, %5, %6}"
6072 [(set_attr "predicable" "yes")
6073 (set_attr "type" "store4")]
6076 (define_insn "*stmsi_postinc4_thumb1"
6077 [(match_parallel 0 "store_multiple_operation"
6078 [(set (match_operand:SI 1 "s_register_operand" "=l")
6079 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6081 (set (mem:SI (match_dup 2))
6082 (match_operand:SI 3 "arm_hard_register_operand" ""))
6083 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6084 (match_operand:SI 4 "arm_hard_register_operand" ""))
6085 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6086 (match_operand:SI 5 "arm_hard_register_operand" ""))
6087 (set (mem:SI (plus:SI (match_dup 2) (const_int 12)))
6088 (match_operand:SI 6 "arm_hard_register_operand" ""))])]
6089 "TARGET_THUMB1 && XVECLEN (operands[0], 0) == 5"
6090 "stmia\\t%1!, {%3, %4, %5, %6}"
6091 [(set_attr "type" "store4")]
6094 (define_insn "*stmsi_postinc3"
6095 [(match_parallel 0 "store_multiple_operation"
6096 [(set (match_operand:SI 1 "s_register_operand" "=r")
6097 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6099 (set (mem:SI (match_dup 2))
6100 (match_operand:SI 3 "arm_hard_register_operand" ""))
6101 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6102 (match_operand:SI 4 "arm_hard_register_operand" ""))
6103 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6104 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6105 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6106 "stm%(ia%)\\t%1!, {%3, %4, %5}"
6107 [(set_attr "predicable" "yes")
6108 (set_attr "type" "store3")]
6111 (define_insn "*stmsi_postinc2"
6112 [(match_parallel 0 "store_multiple_operation"
6113 [(set (match_operand:SI 1 "s_register_operand" "=r")
6114 (plus:SI (match_operand:SI 2 "s_register_operand" "1")
6116 (set (mem:SI (match_dup 2))
6117 (match_operand:SI 3 "arm_hard_register_operand" ""))
6118 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6119 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6120 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6121 "stm%(ia%)\\t%1!, {%3, %4}"
6122 [(set_attr "predicable" "yes")
6123 (set_attr "type" "store2")]
6126 ;; Ordinary store multiple
6128 (define_insn "*stmsi4"
6129 [(match_parallel 0 "store_multiple_operation"
6130 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6131 (match_operand:SI 2 "arm_hard_register_operand" ""))
6132 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6133 (match_operand:SI 3 "arm_hard_register_operand" ""))
6134 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6135 (match_operand:SI 4 "arm_hard_register_operand" ""))
6136 (set (mem:SI (plus:SI (match_dup 1) (const_int 12)))
6137 (match_operand:SI 5 "arm_hard_register_operand" ""))])]
6138 "TARGET_32BIT && XVECLEN (operands[0], 0) == 4"
6139 "stm%(ia%)\\t%1, {%2, %3, %4, %5}"
6140 [(set_attr "predicable" "yes")
6141 (set_attr "type" "store4")]
6144 (define_insn "*stmsi3"
6145 [(match_parallel 0 "store_multiple_operation"
6146 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6147 (match_operand:SI 2 "arm_hard_register_operand" ""))
6148 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6149 (match_operand:SI 3 "arm_hard_register_operand" ""))
6150 (set (mem:SI (plus:SI (match_dup 1) (const_int 8)))
6151 (match_operand:SI 4 "arm_hard_register_operand" ""))])]
6152 "TARGET_32BIT && XVECLEN (operands[0], 0) == 3"
6153 "stm%(ia%)\\t%1, {%2, %3, %4}"
6154 [(set_attr "predicable" "yes")
6155 (set_attr "type" "store3")]
6158 (define_insn "*stmsi2"
6159 [(match_parallel 0 "store_multiple_operation"
6160 [(set (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
6161 (match_operand:SI 2 "arm_hard_register_operand" ""))
6162 (set (mem:SI (plus:SI (match_dup 1) (const_int 4)))
6163 (match_operand:SI 3 "arm_hard_register_operand" ""))])]
6164 "TARGET_32BIT && XVECLEN (operands[0], 0) == 2"
6165 "stm%(ia%)\\t%1, {%2, %3}"
6166 [(set_attr "predicable" "yes")
6167 (set_attr "type" "store2")]
6170 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
6171 ;; We could let this apply for blocks of less than this, but it clobbers so
6172 ;; many registers that there is then probably a better way.
6174 (define_expand "movmemqi"
6175 [(match_operand:BLK 0 "general_operand" "")
6176 (match_operand:BLK 1 "general_operand" "")
6177 (match_operand:SI 2 "const_int_operand" "")
6178 (match_operand:SI 3 "const_int_operand" "")]
6183 if (arm_gen_movmemqi (operands))
6187 else /* TARGET_THUMB1 */
6189 if ( INTVAL (operands[3]) != 4
6190 || INTVAL (operands[2]) > 48)
6193 thumb_expand_movmemqi (operands);
6199 ;; Thumb block-move insns
6201 (define_insn "movmem12b"
6202 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6203 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6204 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6205 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6206 (set (mem:SI (plus:SI (match_dup 2) (const_int 8)))
6207 (mem:SI (plus:SI (match_dup 3) (const_int 8))))
6208 (set (match_operand:SI 0 "register_operand" "=l")
6209 (plus:SI (match_dup 2) (const_int 12)))
6210 (set (match_operand:SI 1 "register_operand" "=l")
6211 (plus:SI (match_dup 3) (const_int 12)))
6212 (clobber (match_scratch:SI 4 "=&l"))
6213 (clobber (match_scratch:SI 5 "=&l"))
6214 (clobber (match_scratch:SI 6 "=&l"))]
6216 "* return thumb_output_move_mem_multiple (3, operands);"
6217 [(set_attr "length" "4")
6218 ; This isn't entirely accurate... It loads as well, but in terms of
6219 ; scheduling the following insn it is better to consider it as a store
6220 (set_attr "type" "store3")]
6223 (define_insn "movmem8b"
6224 [(set (mem:SI (match_operand:SI 2 "register_operand" "0"))
6225 (mem:SI (match_operand:SI 3 "register_operand" "1")))
6226 (set (mem:SI (plus:SI (match_dup 2) (const_int 4)))
6227 (mem:SI (plus:SI (match_dup 3) (const_int 4))))
6228 (set (match_operand:SI 0 "register_operand" "=l")
6229 (plus:SI (match_dup 2) (const_int 8)))
6230 (set (match_operand:SI 1 "register_operand" "=l")
6231 (plus:SI (match_dup 3) (const_int 8)))
6232 (clobber (match_scratch:SI 4 "=&l"))
6233 (clobber (match_scratch:SI 5 "=&l"))]
6235 "* return thumb_output_move_mem_multiple (2, operands);"
6236 [(set_attr "length" "4")
6237 ; This isn't entirely accurate... It loads as well, but in terms of
6238 ; scheduling the following insn it is better to consider it as a store
6239 (set_attr "type" "store2")]
6244 ;; Compare & branch insns
6245 ;; The range calculations are based as follows:
6246 ;; For forward branches, the address calculation returns the address of
6247 ;; the next instruction. This is 2 beyond the branch instruction.
6248 ;; For backward branches, the address calculation returns the address of
6249 ;; the first instruction in this pattern (cmp). This is 2 before the branch
6250 ;; instruction for the shortest sequence, and 4 before the branch instruction
6251 ;; if we have to jump around an unconditional branch.
6252 ;; To the basic branch range the PC offset must be added (this is +4).
6253 ;; So for forward branches we have
6254 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
6255 ;; And for backward branches we have
6256 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
6258 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
6259 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
6261 (define_expand "cbranchsi4"
6262 [(set (pc) (if_then_else
6263 (match_operator 0 "arm_comparison_operator"
6264 [(match_operand:SI 1 "s_register_operand" "")
6265 (match_operand:SI 2 "nonmemory_operand" "")])
6266 (label_ref (match_operand 3 "" ""))
6270 if (thumb1_cmpneg_operand (operands[2], SImode))
6272 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
6273 operands[3], operands[0]));
6276 if (!thumb1_cmp_operand (operands[2], SImode))
6277 operands[2] = force_reg (SImode, operands[2]);
6280 (define_insn "*cbranchsi4_insn"
6281 [(set (pc) (if_then_else
6282 (match_operator 0 "arm_comparison_operator"
6283 [(match_operand:SI 1 "s_register_operand" "l,*h")
6284 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r")])
6285 (label_ref (match_operand 3 "" ""))
6289 output_asm_insn (\"cmp\\t%1, %2\", operands);
6291 switch (get_attr_length (insn))
6293 case 4: return \"b%d0\\t%l3\";
6294 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6295 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6298 [(set (attr "far_jump")
6300 (eq_attr "length" "8")
6301 (const_string "yes")
6302 (const_string "no")))
6303 (set (attr "length")
6305 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6306 (le (minus (match_dup 3) (pc)) (const_int 256)))
6309 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6310 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6315 (define_insn "cbranchsi4_scratch"
6316 [(set (pc) (if_then_else
6317 (match_operator 4 "arm_comparison_operator"
6318 [(match_operand:SI 1 "s_register_operand" "l,0")
6319 (match_operand:SI 2 "thumb1_cmpneg_operand" "L,J")])
6320 (label_ref (match_operand 3 "" ""))
6322 (clobber (match_scratch:SI 0 "=l,l"))]
6325 output_asm_insn (\"add\\t%0, %1, #%n2\", operands);
6327 switch (get_attr_length (insn))
6329 case 4: return \"b%d4\\t%l3\";
6330 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6331 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6334 [(set (attr "far_jump")
6336 (eq_attr "length" "8")
6337 (const_string "yes")
6338 (const_string "no")))
6339 (set (attr "length")
6341 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6342 (le (minus (match_dup 3) (pc)) (const_int 256)))
6345 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6346 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6350 (define_insn "*movsi_cbranchsi4"
6353 (match_operator 3 "arm_comparison_operator"
6354 [(match_operand:SI 1 "s_register_operand" "0,l,l,l")
6356 (label_ref (match_operand 2 "" ""))
6358 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*h,*m")
6362 if (which_alternative == 0)
6363 output_asm_insn (\"cmp\t%0, #0\", operands);
6364 else if (which_alternative == 1)
6365 output_asm_insn (\"sub\t%0, %1, #0\", operands);
6368 output_asm_insn (\"cmp\t%1, #0\", operands);
6369 if (which_alternative == 2)
6370 output_asm_insn (\"mov\t%0, %1\", operands);
6372 output_asm_insn (\"str\t%1, %0\", operands);
6374 switch (get_attr_length (insn) - ((which_alternative > 1) ? 2 : 0))
6376 case 4: return \"b%d3\\t%l2\";
6377 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6378 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6381 [(set (attr "far_jump")
6383 (ior (and (gt (symbol_ref ("which_alternative"))
6385 (eq_attr "length" "8"))
6386 (eq_attr "length" "10"))
6387 (const_string "yes")
6388 (const_string "no")))
6389 (set (attr "length")
6391 (le (symbol_ref ("which_alternative"))
6394 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6395 (le (minus (match_dup 2) (pc)) (const_int 256)))
6398 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6399 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6403 (and (ge (minus (match_dup 2) (pc)) (const_int -248))
6404 (le (minus (match_dup 2) (pc)) (const_int 256)))
6407 (and (ge (minus (match_dup 2) (pc)) (const_int -2038))
6408 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6413 (define_insn "*negated_cbranchsi4"
6416 (match_operator 0 "equality_operator"
6417 [(match_operand:SI 1 "s_register_operand" "l")
6418 (neg:SI (match_operand:SI 2 "s_register_operand" "l"))])
6419 (label_ref (match_operand 3 "" ""))
6423 output_asm_insn (\"cmn\\t%1, %2\", operands);
6424 switch (get_attr_length (insn))
6426 case 4: return \"b%d0\\t%l3\";
6427 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6428 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6431 [(set (attr "far_jump")
6433 (eq_attr "length" "8")
6434 (const_string "yes")
6435 (const_string "no")))
6436 (set (attr "length")
6438 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6439 (le (minus (match_dup 3) (pc)) (const_int 256)))
6442 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6443 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6448 (define_insn "*tbit_cbranch"
6451 (match_operator 0 "equality_operator"
6452 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6454 (match_operand:SI 2 "const_int_operand" "i"))
6456 (label_ref (match_operand 3 "" ""))
6458 (clobber (match_scratch:SI 4 "=l"))]
6463 op[0] = operands[4];
6464 op[1] = operands[1];
6465 op[2] = GEN_INT (32 - 1 - INTVAL (operands[2]));
6467 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6468 switch (get_attr_length (insn))
6470 case 4: return \"b%d0\\t%l3\";
6471 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6472 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6475 [(set (attr "far_jump")
6477 (eq_attr "length" "8")
6478 (const_string "yes")
6479 (const_string "no")))
6480 (set (attr "length")
6482 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6483 (le (minus (match_dup 3) (pc)) (const_int 256)))
6486 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6487 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6492 (define_insn "*tlobits_cbranch"
6495 (match_operator 0 "equality_operator"
6496 [(zero_extract:SI (match_operand:SI 1 "s_register_operand" "l")
6497 (match_operand:SI 2 "const_int_operand" "i")
6500 (label_ref (match_operand 3 "" ""))
6502 (clobber (match_scratch:SI 4 "=l"))]
6507 op[0] = operands[4];
6508 op[1] = operands[1];
6509 op[2] = GEN_INT (32 - INTVAL (operands[2]));
6511 output_asm_insn (\"lsl\\t%0, %1, %2\", op);
6512 switch (get_attr_length (insn))
6514 case 4: return \"b%d0\\t%l3\";
6515 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6516 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6519 [(set (attr "far_jump")
6521 (eq_attr "length" "8")
6522 (const_string "yes")
6523 (const_string "no")))
6524 (set (attr "length")
6526 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6527 (le (minus (match_dup 3) (pc)) (const_int 256)))
6530 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6531 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6536 (define_insn "*tstsi3_cbranch"
6539 (match_operator 3 "equality_operator"
6540 [(and:SI (match_operand:SI 0 "s_register_operand" "%l")
6541 (match_operand:SI 1 "s_register_operand" "l"))
6543 (label_ref (match_operand 2 "" ""))
6548 output_asm_insn (\"tst\\t%0, %1\", operands);
6549 switch (get_attr_length (insn))
6551 case 4: return \"b%d3\\t%l2\";
6552 case 6: return \"b%D3\\t.LCB%=\;b\\t%l2\\t%@long jump\\n.LCB%=:\";
6553 default: return \"b%D3\\t.LCB%=\;bl\\t%l2\\t%@far jump\\n.LCB%=:\";
6556 [(set (attr "far_jump")
6558 (eq_attr "length" "8")
6559 (const_string "yes")
6560 (const_string "no")))
6561 (set (attr "length")
6563 (and (ge (minus (match_dup 2) (pc)) (const_int -250))
6564 (le (minus (match_dup 2) (pc)) (const_int 256)))
6567 (and (ge (minus (match_dup 2) (pc)) (const_int -2040))
6568 (le (minus (match_dup 2) (pc)) (const_int 2048)))
6573 (define_insn "*andsi3_cbranch"
6576 (match_operator 5 "equality_operator"
6577 [(and:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6578 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6580 (label_ref (match_operand 4 "" ""))
6582 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6583 (and:SI (match_dup 2) (match_dup 3)))
6584 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6588 if (which_alternative == 0)
6589 output_asm_insn (\"and\\t%0, %3\", operands);
6590 else if (which_alternative == 1)
6592 output_asm_insn (\"and\\t%1, %3\", operands);
6593 output_asm_insn (\"mov\\t%0, %1\", operands);
6597 output_asm_insn (\"and\\t%1, %3\", operands);
6598 output_asm_insn (\"str\\t%1, %0\", operands);
6601 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6603 case 4: return \"b%d5\\t%l4\";
6604 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6605 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6608 [(set (attr "far_jump")
6610 (ior (and (eq (symbol_ref ("which_alternative"))
6612 (eq_attr "length" "8"))
6613 (eq_attr "length" "10"))
6614 (const_string "yes")
6615 (const_string "no")))
6616 (set (attr "length")
6618 (eq (symbol_ref ("which_alternative"))
6621 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6622 (le (minus (match_dup 4) (pc)) (const_int 256)))
6625 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6626 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6630 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6631 (le (minus (match_dup 4) (pc)) (const_int 256)))
6634 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6635 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6640 (define_insn "*orrsi3_cbranch_scratch"
6643 (match_operator 4 "equality_operator"
6644 [(ior:SI (match_operand:SI 1 "s_register_operand" "%0")
6645 (match_operand:SI 2 "s_register_operand" "l"))
6647 (label_ref (match_operand 3 "" ""))
6649 (clobber (match_scratch:SI 0 "=l"))]
6653 output_asm_insn (\"orr\\t%0, %2\", operands);
6654 switch (get_attr_length (insn))
6656 case 4: return \"b%d4\\t%l3\";
6657 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6658 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6661 [(set (attr "far_jump")
6663 (eq_attr "length" "8")
6664 (const_string "yes")
6665 (const_string "no")))
6666 (set (attr "length")
6668 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6669 (le (minus (match_dup 3) (pc)) (const_int 256)))
6672 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6673 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6678 (define_insn "*orrsi3_cbranch"
6681 (match_operator 5 "equality_operator"
6682 [(ior:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6683 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6685 (label_ref (match_operand 4 "" ""))
6687 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6688 (ior:SI (match_dup 2) (match_dup 3)))
6689 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6693 if (which_alternative == 0)
6694 output_asm_insn (\"orr\\t%0, %3\", operands);
6695 else if (which_alternative == 1)
6697 output_asm_insn (\"orr\\t%1, %3\", operands);
6698 output_asm_insn (\"mov\\t%0, %1\", operands);
6702 output_asm_insn (\"orr\\t%1, %3\", operands);
6703 output_asm_insn (\"str\\t%1, %0\", operands);
6706 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6708 case 4: return \"b%d5\\t%l4\";
6709 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6710 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6713 [(set (attr "far_jump")
6715 (ior (and (eq (symbol_ref ("which_alternative"))
6717 (eq_attr "length" "8"))
6718 (eq_attr "length" "10"))
6719 (const_string "yes")
6720 (const_string "no")))
6721 (set (attr "length")
6723 (eq (symbol_ref ("which_alternative"))
6726 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6727 (le (minus (match_dup 4) (pc)) (const_int 256)))
6730 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6731 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6735 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6736 (le (minus (match_dup 4) (pc)) (const_int 256)))
6739 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6740 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6745 (define_insn "*xorsi3_cbranch_scratch"
6748 (match_operator 4 "equality_operator"
6749 [(xor:SI (match_operand:SI 1 "s_register_operand" "%0")
6750 (match_operand:SI 2 "s_register_operand" "l"))
6752 (label_ref (match_operand 3 "" ""))
6754 (clobber (match_scratch:SI 0 "=l"))]
6758 output_asm_insn (\"eor\\t%0, %2\", operands);
6759 switch (get_attr_length (insn))
6761 case 4: return \"b%d4\\t%l3\";
6762 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6763 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6766 [(set (attr "far_jump")
6768 (eq_attr "length" "8")
6769 (const_string "yes")
6770 (const_string "no")))
6771 (set (attr "length")
6773 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6774 (le (minus (match_dup 3) (pc)) (const_int 256)))
6777 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6778 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6783 (define_insn "*xorsi3_cbranch"
6786 (match_operator 5 "equality_operator"
6787 [(xor:SI (match_operand:SI 2 "s_register_operand" "%0,1,1,1")
6788 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
6790 (label_ref (match_operand 4 "" ""))
6792 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6793 (xor:SI (match_dup 2) (match_dup 3)))
6794 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6798 if (which_alternative == 0)
6799 output_asm_insn (\"eor\\t%0, %3\", operands);
6800 else if (which_alternative == 1)
6802 output_asm_insn (\"eor\\t%1, %3\", operands);
6803 output_asm_insn (\"mov\\t%0, %1\", operands);
6807 output_asm_insn (\"eor\\t%1, %3\", operands);
6808 output_asm_insn (\"str\\t%1, %0\", operands);
6811 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6813 case 4: return \"b%d5\\t%l4\";
6814 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6815 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6818 [(set (attr "far_jump")
6820 (ior (and (eq (symbol_ref ("which_alternative"))
6822 (eq_attr "length" "8"))
6823 (eq_attr "length" "10"))
6824 (const_string "yes")
6825 (const_string "no")))
6826 (set (attr "length")
6828 (eq (symbol_ref ("which_alternative"))
6831 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6832 (le (minus (match_dup 4) (pc)) (const_int 256)))
6835 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6836 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6840 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6841 (le (minus (match_dup 4) (pc)) (const_int 256)))
6844 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6845 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6850 (define_insn "*bicsi3_cbranch_scratch"
6853 (match_operator 4 "equality_operator"
6854 [(and:SI (not:SI (match_operand:SI 2 "s_register_operand" "l"))
6855 (match_operand:SI 1 "s_register_operand" "0"))
6857 (label_ref (match_operand 3 "" ""))
6859 (clobber (match_scratch:SI 0 "=l"))]
6863 output_asm_insn (\"bic\\t%0, %2\", operands);
6864 switch (get_attr_length (insn))
6866 case 4: return \"b%d4\\t%l3\";
6867 case 6: return \"b%D4\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
6868 default: return \"b%D4\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
6871 [(set (attr "far_jump")
6873 (eq_attr "length" "8")
6874 (const_string "yes")
6875 (const_string "no")))
6876 (set (attr "length")
6878 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
6879 (le (minus (match_dup 3) (pc)) (const_int 256)))
6882 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
6883 (le (minus (match_dup 3) (pc)) (const_int 2048)))
6888 (define_insn "*bicsi3_cbranch"
6891 (match_operator 5 "equality_operator"
6892 [(and:SI (not:SI (match_operand:SI 3 "s_register_operand" "l,l,l,l,l"))
6893 (match_operand:SI 2 "s_register_operand" "0,1,1,1,1"))
6895 (label_ref (match_operand 4 "" ""))
6897 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=!l,l,*?h,*?m,*?m")
6898 (and:SI (not:SI (match_dup 3)) (match_dup 2)))
6899 (clobber (match_scratch:SI 1 "=X,l,l,&l,&l"))]
6903 if (which_alternative == 0)
6904 output_asm_insn (\"bic\\t%0, %3\", operands);
6905 else if (which_alternative <= 2)
6907 output_asm_insn (\"bic\\t%1, %3\", operands);
6908 /* It's ok if OP0 is a lo-reg, even though the mov will set the
6909 conditions again, since we're only testing for equality. */
6910 output_asm_insn (\"mov\\t%0, %1\", operands);
6914 output_asm_insn (\"bic\\t%1, %3\", operands);
6915 output_asm_insn (\"str\\t%1, %0\", operands);
6918 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6920 case 4: return \"b%d5\\t%l4\";
6921 case 6: return \"b%D5\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
6922 default: return \"b%D5\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
6925 [(set (attr "far_jump")
6927 (ior (and (eq (symbol_ref ("which_alternative"))
6929 (eq_attr "length" "8"))
6930 (eq_attr "length" "10"))
6931 (const_string "yes")
6932 (const_string "no")))
6933 (set (attr "length")
6935 (eq (symbol_ref ("which_alternative"))
6938 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
6939 (le (minus (match_dup 4) (pc)) (const_int 256)))
6942 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
6943 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6947 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
6948 (le (minus (match_dup 4) (pc)) (const_int 256)))
6951 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
6952 (le (minus (match_dup 4) (pc)) (const_int 2048)))
6957 (define_insn "*cbranchne_decr1"
6959 (if_then_else (match_operator 3 "equality_operator"
6960 [(match_operand:SI 2 "s_register_operand" "l,l,1,l")
6962 (label_ref (match_operand 4 "" ""))
6964 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
6965 (plus:SI (match_dup 2) (const_int -1)))
6966 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
6971 cond[0] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
6973 VOIDmode, operands[2], const1_rtx);
6974 cond[1] = operands[4];
6976 if (which_alternative == 0)
6977 output_asm_insn (\"sub\\t%0, %2, #1\", operands);
6978 else if (which_alternative == 1)
6980 /* We must provide an alternative for a hi reg because reload
6981 cannot handle output reloads on a jump instruction, but we
6982 can't subtract into that. Fortunately a mov from lo to hi
6983 does not clobber the condition codes. */
6984 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6985 output_asm_insn (\"mov\\t%0, %1\", operands);
6989 /* Similarly, but the target is memory. */
6990 output_asm_insn (\"sub\\t%1, %2, #1\", operands);
6991 output_asm_insn (\"str\\t%1, %0\", operands);
6994 switch (get_attr_length (insn) - (which_alternative ? 2 : 0))
6997 output_asm_insn (\"b%d0\\t%l1\", cond);
7000 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7001 return \"b\\t%l4\\t%@long jump\\n.LCB%=:\";
7003 output_asm_insn (\"b%D0\\t.LCB%=\", cond);
7004 return \"bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7008 [(set (attr "far_jump")
7010 (ior (and (eq (symbol_ref ("which_alternative"))
7012 (eq_attr "length" "8"))
7013 (eq_attr "length" "10"))
7014 (const_string "yes")
7015 (const_string "no")))
7016 (set_attr_alternative "length"
7020 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7021 (le (minus (match_dup 4) (pc)) (const_int 256)))
7024 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7025 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7030 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7031 (le (minus (match_dup 4) (pc)) (const_int 256)))
7034 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7035 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7040 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7041 (le (minus (match_dup 4) (pc)) (const_int 256)))
7044 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7045 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7050 (and (ge (minus (match_dup 4) (pc)) (const_int -248))
7051 (le (minus (match_dup 4) (pc)) (const_int 256)))
7054 (and (ge (minus (match_dup 4) (pc)) (const_int -2038))
7055 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7060 (define_insn "*addsi3_cbranch"
7063 (match_operator 4 "comparison_operator"
7065 (match_operand:SI 2 "s_register_operand" "%l,0,*0,1,1,1")
7066 (match_operand:SI 3 "reg_or_int_operand" "lL,IJ,*r,lIJ,lIJ,lIJ"))
7068 (label_ref (match_operand 5 "" ""))
7071 (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,l,*!h,*?h,*?m,*?m")
7072 (plus:SI (match_dup 2) (match_dup 3)))
7073 (clobber (match_scratch:SI 1 "=X,X,X,l,&l,&l"))]
7075 && (GET_CODE (operands[4]) == EQ
7076 || GET_CODE (operands[4]) == NE
7077 || GET_CODE (operands[4]) == GE
7078 || GET_CODE (operands[4]) == LT)"
7084 cond[0] = (which_alternative < 3) ? operands[0] : operands[1];
7085 cond[1] = operands[2];
7086 cond[2] = operands[3];
7088 if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
7089 output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
7091 output_asm_insn (\"add\\t%0, %1, %2\", cond);
7093 if (which_alternative >= 3
7094 && which_alternative < 4)
7095 output_asm_insn (\"mov\\t%0, %1\", operands);
7096 else if (which_alternative >= 4)
7097 output_asm_insn (\"str\\t%1, %0\", operands);
7099 switch (get_attr_length (insn) - ((which_alternative >= 3) ? 2 : 0))
7102 return \"b%d4\\t%l5\";
7104 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7106 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7110 [(set (attr "far_jump")
7112 (ior (and (lt (symbol_ref ("which_alternative"))
7114 (eq_attr "length" "8"))
7115 (eq_attr "length" "10"))
7116 (const_string "yes")
7117 (const_string "no")))
7118 (set (attr "length")
7120 (lt (symbol_ref ("which_alternative"))
7123 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7124 (le (minus (match_dup 5) (pc)) (const_int 256)))
7127 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7128 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7132 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7133 (le (minus (match_dup 5) (pc)) (const_int 256)))
7136 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7137 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7142 (define_insn "*addsi3_cbranch_scratch"
7145 (match_operator 3 "comparison_operator"
7147 (match_operand:SI 1 "s_register_operand" "%l,l,l,0")
7148 (match_operand:SI 2 "reg_or_int_operand" "J,l,L,IJ"))
7150 (label_ref (match_operand 4 "" ""))
7152 (clobber (match_scratch:SI 0 "=X,X,l,l"))]
7154 && (GET_CODE (operands[3]) == EQ
7155 || GET_CODE (operands[3]) == NE
7156 || GET_CODE (operands[3]) == GE
7157 || GET_CODE (operands[3]) == LT)"
7160 switch (which_alternative)
7163 output_asm_insn (\"cmp\t%1, #%n2\", operands);
7166 output_asm_insn (\"cmn\t%1, %2\", operands);
7169 if (INTVAL (operands[2]) < 0)
7170 output_asm_insn (\"sub\t%0, %1, %2\", operands);
7172 output_asm_insn (\"add\t%0, %1, %2\", operands);
7175 if (INTVAL (operands[2]) < 0)
7176 output_asm_insn (\"sub\t%0, %0, %2\", operands);
7178 output_asm_insn (\"add\t%0, %0, %2\", operands);
7182 switch (get_attr_length (insn))
7185 return \"b%d3\\t%l4\";
7187 return \"b%D3\\t.LCB%=\;b\\t%l4\\t%@long jump\\n.LCB%=:\";
7189 return \"b%D3\\t.LCB%=\;bl\\t%l4\\t%@far jump\\n.LCB%=:\";
7193 [(set (attr "far_jump")
7195 (eq_attr "length" "8")
7196 (const_string "yes")
7197 (const_string "no")))
7198 (set (attr "length")
7200 (and (ge (minus (match_dup 4) (pc)) (const_int -250))
7201 (le (minus (match_dup 4) (pc)) (const_int 256)))
7204 (and (ge (minus (match_dup 4) (pc)) (const_int -2040))
7205 (le (minus (match_dup 4) (pc)) (const_int 2048)))
7210 (define_insn "*subsi3_cbranch"
7213 (match_operator 4 "comparison_operator"
7215 (match_operand:SI 2 "s_register_operand" "l,l,1,l")
7216 (match_operand:SI 3 "s_register_operand" "l,l,l,l"))
7218 (label_ref (match_operand 5 "" ""))
7220 (set (match_operand:SI 0 "thumb_cbrch_target_operand" "=l,*?h,*?m,*?m")
7221 (minus:SI (match_dup 2) (match_dup 3)))
7222 (clobber (match_scratch:SI 1 "=X,l,&l,&l"))]
7224 && (GET_CODE (operands[4]) == EQ
7225 || GET_CODE (operands[4]) == NE
7226 || GET_CODE (operands[4]) == GE
7227 || GET_CODE (operands[4]) == LT)"
7230 if (which_alternative == 0)
7231 output_asm_insn (\"sub\\t%0, %2, %3\", operands);
7232 else if (which_alternative == 1)
7234 /* We must provide an alternative for a hi reg because reload
7235 cannot handle output reloads on a jump instruction, but we
7236 can't subtract into that. Fortunately a mov from lo to hi
7237 does not clobber the condition codes. */
7238 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7239 output_asm_insn (\"mov\\t%0, %1\", operands);
7243 /* Similarly, but the target is memory. */
7244 output_asm_insn (\"sub\\t%1, %2, %3\", operands);
7245 output_asm_insn (\"str\\t%1, %0\", operands);
7248 switch (get_attr_length (insn) - ((which_alternative != 0) ? 2 : 0))
7251 return \"b%d4\\t%l5\";
7253 return \"b%D4\\t.LCB%=\;b\\t%l5\\t%@long jump\\n.LCB%=:\";
7255 return \"b%D4\\t.LCB%=\;bl\\t%l5\\t%@far jump\\n.LCB%=:\";
7259 [(set (attr "far_jump")
7261 (ior (and (eq (symbol_ref ("which_alternative"))
7263 (eq_attr "length" "8"))
7264 (eq_attr "length" "10"))
7265 (const_string "yes")
7266 (const_string "no")))
7267 (set (attr "length")
7269 (eq (symbol_ref ("which_alternative"))
7272 (and (ge (minus (match_dup 5) (pc)) (const_int -250))
7273 (le (minus (match_dup 5) (pc)) (const_int 256)))
7276 (and (ge (minus (match_dup 5) (pc)) (const_int -2040))
7277 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7281 (and (ge (minus (match_dup 5) (pc)) (const_int -248))
7282 (le (minus (match_dup 5) (pc)) (const_int 256)))
7285 (and (ge (minus (match_dup 5) (pc)) (const_int -2038))
7286 (le (minus (match_dup 5) (pc)) (const_int 2048)))
7291 (define_insn "*subsi3_cbranch_scratch"
7294 (match_operator 0 "arm_comparison_operator"
7295 [(minus:SI (match_operand:SI 1 "register_operand" "l")
7296 (match_operand:SI 2 "nonmemory_operand" "l"))
7298 (label_ref (match_operand 3 "" ""))
7301 && (GET_CODE (operands[0]) == EQ
7302 || GET_CODE (operands[0]) == NE
7303 || GET_CODE (operands[0]) == GE
7304 || GET_CODE (operands[0]) == LT)"
7306 output_asm_insn (\"cmp\\t%1, %2\", operands);
7307 switch (get_attr_length (insn))
7309 case 4: return \"b%d0\\t%l3\";
7310 case 6: return \"b%D0\\t.LCB%=\;b\\t%l3\\t%@long jump\\n.LCB%=:\";
7311 default: return \"b%D0\\t.LCB%=\;bl\\t%l3\\t%@far jump\\n.LCB%=:\";
7314 [(set (attr "far_jump")
7316 (eq_attr "length" "8")
7317 (const_string "yes")
7318 (const_string "no")))
7319 (set (attr "length")
7321 (and (ge (minus (match_dup 3) (pc)) (const_int -250))
7322 (le (minus (match_dup 3) (pc)) (const_int 256)))
7325 (and (ge (minus (match_dup 3) (pc)) (const_int -2040))
7326 (le (minus (match_dup 3) (pc)) (const_int 2048)))
7331 ;; Comparison and test insns
7333 (define_expand "cmpsi"
7334 [(match_operand:SI 0 "s_register_operand" "")
7335 (match_operand:SI 1 "arm_add_operand" "")]
7338 arm_compare_op0 = operands[0];
7339 arm_compare_op1 = operands[1];
7344 (define_expand "cmpsf"
7345 [(match_operand:SF 0 "s_register_operand" "")
7346 (match_operand:SF 1 "arm_float_compare_operand" "")]
7347 "TARGET_32BIT && TARGET_HARD_FLOAT"
7349 arm_compare_op0 = operands[0];
7350 arm_compare_op1 = operands[1];
7355 (define_expand "cmpdf"
7356 [(match_operand:DF 0 "s_register_operand" "")
7357 (match_operand:DF 1 "arm_float_compare_operand" "")]
7358 "TARGET_32BIT && TARGET_HARD_FLOAT"
7360 arm_compare_op0 = operands[0];
7361 arm_compare_op1 = operands[1];
7366 (define_insn "*arm_cmpsi_insn"
7367 [(set (reg:CC CC_REGNUM)
7368 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r")
7369 (match_operand:SI 1 "arm_add_operand" "rI,L")))]
7374 [(set_attr "conds" "set")]
7377 (define_insn "*arm_cmpsi_shiftsi"
7378 [(set (reg:CC CC_REGNUM)
7379 (compare:CC (match_operand:SI 0 "s_register_operand" "r")
7380 (match_operator:SI 3 "shift_operator"
7381 [(match_operand:SI 1 "s_register_operand" "r")
7382 (match_operand:SI 2 "arm_rhs_operand" "rM")])))]
7385 [(set_attr "conds" "set")
7386 (set_attr "shift" "1")
7387 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7388 (const_string "alu_shift")
7389 (const_string "alu_shift_reg")))]
7392 (define_insn "*arm_cmpsi_shiftsi_swp"
7393 [(set (reg:CC_SWP CC_REGNUM)
7394 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7395 [(match_operand:SI 1 "s_register_operand" "r")
7396 (match_operand:SI 2 "reg_or_int_operand" "rM")])
7397 (match_operand:SI 0 "s_register_operand" "r")))]
7400 [(set_attr "conds" "set")
7401 (set_attr "shift" "1")
7402 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
7403 (const_string "alu_shift")
7404 (const_string "alu_shift_reg")))]
7407 (define_insn "*arm_cmpsi_negshiftsi_si"
7408 [(set (reg:CC_Z CC_REGNUM)
7410 (neg:SI (match_operator:SI 1 "shift_operator"
7411 [(match_operand:SI 2 "s_register_operand" "r")
7412 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7413 (match_operand:SI 0 "s_register_operand" "r")))]
7416 [(set_attr "conds" "set")
7417 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7418 (const_string "alu_shift")
7419 (const_string "alu_shift_reg")))]
7422 ;; Cirrus SF compare instruction
7423 (define_insn "*cirrus_cmpsf"
7424 [(set (reg:CCFP CC_REGNUM)
7425 (compare:CCFP (match_operand:SF 0 "cirrus_fp_register" "v")
7426 (match_operand:SF 1 "cirrus_fp_register" "v")))]
7427 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7428 "cfcmps%?\\tr15, %V0, %V1"
7429 [(set_attr "type" "mav_farith")
7430 (set_attr "cirrus" "compare")]
7433 ;; Cirrus DF compare instruction
7434 (define_insn "*cirrus_cmpdf"
7435 [(set (reg:CCFP CC_REGNUM)
7436 (compare:CCFP (match_operand:DF 0 "cirrus_fp_register" "v")
7437 (match_operand:DF 1 "cirrus_fp_register" "v")))]
7438 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7439 "cfcmpd%?\\tr15, %V0, %V1"
7440 [(set_attr "type" "mav_farith")
7441 (set_attr "cirrus" "compare")]
7444 ;; Cirrus DI compare instruction
7445 (define_expand "cmpdi"
7446 [(match_operand:DI 0 "cirrus_fp_register" "")
7447 (match_operand:DI 1 "cirrus_fp_register" "")]
7448 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7450 arm_compare_op0 = operands[0];
7451 arm_compare_op1 = operands[1];
7455 (define_insn "*cirrus_cmpdi"
7456 [(set (reg:CC CC_REGNUM)
7457 (compare:CC (match_operand:DI 0 "cirrus_fp_register" "v")
7458 (match_operand:DI 1 "cirrus_fp_register" "v")))]
7459 "TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK"
7460 "cfcmp64%?\\tr15, %V0, %V1"
7461 [(set_attr "type" "mav_farith")
7462 (set_attr "cirrus" "compare")]
7465 ; This insn allows redundant compares to be removed by cse, nothing should
7466 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7467 ; is deleted later on. The match_dup will match the mode here, so that
7468 ; mode changes of the condition codes aren't lost by this even though we don't
7469 ; specify what they are.
7471 (define_insn "*deleted_compare"
7472 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7474 "\\t%@ deleted compare"
7475 [(set_attr "conds" "set")
7476 (set_attr "length" "0")]
7480 ;; Conditional branch insns
7482 (define_expand "beq"
7484 (if_then_else (eq (match_dup 1) (const_int 0))
7485 (label_ref (match_operand 0 "" ""))
7488 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7491 (define_expand "bne"
7493 (if_then_else (ne (match_dup 1) (const_int 0))
7494 (label_ref (match_operand 0 "" ""))
7497 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7500 (define_expand "bgt"
7502 (if_then_else (gt (match_dup 1) (const_int 0))
7503 (label_ref (match_operand 0 "" ""))
7506 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7509 (define_expand "ble"
7511 (if_then_else (le (match_dup 1) (const_int 0))
7512 (label_ref (match_operand 0 "" ""))
7515 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7518 (define_expand "bge"
7520 (if_then_else (ge (match_dup 1) (const_int 0))
7521 (label_ref (match_operand 0 "" ""))
7524 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7527 (define_expand "blt"
7529 (if_then_else (lt (match_dup 1) (const_int 0))
7530 (label_ref (match_operand 0 "" ""))
7533 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7536 (define_expand "bgtu"
7538 (if_then_else (gtu (match_dup 1) (const_int 0))
7539 (label_ref (match_operand 0 "" ""))
7542 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7545 (define_expand "bleu"
7547 (if_then_else (leu (match_dup 1) (const_int 0))
7548 (label_ref (match_operand 0 "" ""))
7551 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7554 (define_expand "bgeu"
7556 (if_then_else (geu (match_dup 1) (const_int 0))
7557 (label_ref (match_operand 0 "" ""))
7560 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7563 (define_expand "bltu"
7565 (if_then_else (ltu (match_dup 1) (const_int 0))
7566 (label_ref (match_operand 0 "" ""))
7569 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7572 (define_expand "bunordered"
7574 (if_then_else (unordered (match_dup 1) (const_int 0))
7575 (label_ref (match_operand 0 "" ""))
7577 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7578 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7582 (define_expand "bordered"
7584 (if_then_else (ordered (match_dup 1) (const_int 0))
7585 (label_ref (match_operand 0 "" ""))
7587 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7588 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7592 (define_expand "bungt"
7594 (if_then_else (ungt (match_dup 1) (const_int 0))
7595 (label_ref (match_operand 0 "" ""))
7597 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7598 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0, arm_compare_op1);"
7601 (define_expand "bunlt"
7603 (if_then_else (unlt (match_dup 1) (const_int 0))
7604 (label_ref (match_operand 0 "" ""))
7606 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7607 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0, arm_compare_op1);"
7610 (define_expand "bunge"
7612 (if_then_else (unge (match_dup 1) (const_int 0))
7613 (label_ref (match_operand 0 "" ""))
7615 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7616 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0, arm_compare_op1);"
7619 (define_expand "bunle"
7621 (if_then_else (unle (match_dup 1) (const_int 0))
7622 (label_ref (match_operand 0 "" ""))
7624 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7625 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0, arm_compare_op1);"
7628 ;; The following two patterns need two branch instructions, since there is
7629 ;; no single instruction that will handle all cases.
7630 (define_expand "buneq"
7632 (if_then_else (uneq (match_dup 1) (const_int 0))
7633 (label_ref (match_operand 0 "" ""))
7635 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7636 "operands[1] = arm_gen_compare_reg (UNEQ, arm_compare_op0, arm_compare_op1);"
7639 (define_expand "bltgt"
7641 (if_then_else (ltgt (match_dup 1) (const_int 0))
7642 (label_ref (match_operand 0 "" ""))
7644 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7645 "operands[1] = arm_gen_compare_reg (LTGT, arm_compare_op0, arm_compare_op1);"
7649 ;; Patterns to match conditional branch insns.
7652 ; Special pattern to match UNEQ.
7653 (define_insn "*arm_buneq"
7655 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7656 (label_ref (match_operand 0 "" ""))
7658 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7660 gcc_assert (!arm_ccfsm_state);
7662 return \"bvs\\t%l0\;beq\\t%l0\";
7664 [(set_attr "conds" "jump_clob")
7665 (set_attr "length" "8")]
7668 ; Special pattern to match LTGT.
7669 (define_insn "*arm_bltgt"
7671 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7672 (label_ref (match_operand 0 "" ""))
7674 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7676 gcc_assert (!arm_ccfsm_state);
7678 return \"bmi\\t%l0\;bgt\\t%l0\";
7680 [(set_attr "conds" "jump_clob")
7681 (set_attr "length" "8")]
7684 (define_insn "*arm_cond_branch"
7686 (if_then_else (match_operator 1 "arm_comparison_operator"
7687 [(match_operand 2 "cc_register" "") (const_int 0)])
7688 (label_ref (match_operand 0 "" ""))
7692 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7694 arm_ccfsm_state += 2;
7697 return \"b%d1\\t%l0\";
7699 [(set_attr "conds" "use")
7700 (set_attr "type" "branch")]
7703 ; Special pattern to match reversed UNEQ.
7704 (define_insn "*arm_buneq_reversed"
7706 (if_then_else (uneq (match_operand 1 "cc_register" "") (const_int 0))
7708 (label_ref (match_operand 0 "" ""))))]
7709 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7711 gcc_assert (!arm_ccfsm_state);
7713 return \"bmi\\t%l0\;bgt\\t%l0\";
7715 [(set_attr "conds" "jump_clob")
7716 (set_attr "length" "8")]
7719 ; Special pattern to match reversed LTGT.
7720 (define_insn "*arm_bltgt_reversed"
7722 (if_then_else (ltgt (match_operand 1 "cc_register" "") (const_int 0))
7724 (label_ref (match_operand 0 "" ""))))]
7725 "TARGET_ARM && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7727 gcc_assert (!arm_ccfsm_state);
7729 return \"bvs\\t%l0\;beq\\t%l0\";
7731 [(set_attr "conds" "jump_clob")
7732 (set_attr "length" "8")]
7735 (define_insn "*arm_cond_branch_reversed"
7737 (if_then_else (match_operator 1 "arm_comparison_operator"
7738 [(match_operand 2 "cc_register" "") (const_int 0)])
7740 (label_ref (match_operand 0 "" ""))))]
7743 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7745 arm_ccfsm_state += 2;
7748 return \"b%D1\\t%l0\";
7750 [(set_attr "conds" "use")
7751 (set_attr "type" "branch")]
7758 (define_expand "seq"
7759 [(set (match_operand:SI 0 "s_register_operand" "")
7760 (eq:SI (match_dup 1) (const_int 0)))]
7762 "operands[1] = arm_gen_compare_reg (EQ, arm_compare_op0, arm_compare_op1);"
7765 (define_expand "sne"
7766 [(set (match_operand:SI 0 "s_register_operand" "")
7767 (ne:SI (match_dup 1) (const_int 0)))]
7769 "operands[1] = arm_gen_compare_reg (NE, arm_compare_op0, arm_compare_op1);"
7772 (define_expand "sgt"
7773 [(set (match_operand:SI 0 "s_register_operand" "")
7774 (gt:SI (match_dup 1) (const_int 0)))]
7776 "operands[1] = arm_gen_compare_reg (GT, arm_compare_op0, arm_compare_op1);"
7779 (define_expand "sle"
7780 [(set (match_operand:SI 0 "s_register_operand" "")
7781 (le:SI (match_dup 1) (const_int 0)))]
7783 "operands[1] = arm_gen_compare_reg (LE, arm_compare_op0, arm_compare_op1);"
7786 (define_expand "sge"
7787 [(set (match_operand:SI 0 "s_register_operand" "")
7788 (ge:SI (match_dup 1) (const_int 0)))]
7790 "operands[1] = arm_gen_compare_reg (GE, arm_compare_op0, arm_compare_op1);"
7793 (define_expand "slt"
7794 [(set (match_operand:SI 0 "s_register_operand" "")
7795 (lt:SI (match_dup 1) (const_int 0)))]
7797 "operands[1] = arm_gen_compare_reg (LT, arm_compare_op0, arm_compare_op1);"
7800 (define_expand "sgtu"
7801 [(set (match_operand:SI 0 "s_register_operand" "")
7802 (gtu:SI (match_dup 1) (const_int 0)))]
7804 "operands[1] = arm_gen_compare_reg (GTU, arm_compare_op0, arm_compare_op1);"
7807 (define_expand "sleu"
7808 [(set (match_operand:SI 0 "s_register_operand" "")
7809 (leu:SI (match_dup 1) (const_int 0)))]
7811 "operands[1] = arm_gen_compare_reg (LEU, arm_compare_op0, arm_compare_op1);"
7814 (define_expand "sgeu"
7815 [(set (match_operand:SI 0 "s_register_operand" "")
7816 (geu:SI (match_dup 1) (const_int 0)))]
7818 "operands[1] = arm_gen_compare_reg (GEU, arm_compare_op0, arm_compare_op1);"
7821 (define_expand "sltu"
7822 [(set (match_operand:SI 0 "s_register_operand" "")
7823 (ltu:SI (match_dup 1) (const_int 0)))]
7825 "operands[1] = arm_gen_compare_reg (LTU, arm_compare_op0, arm_compare_op1);"
7828 (define_expand "sunordered"
7829 [(set (match_operand:SI 0 "s_register_operand" "")
7830 (unordered:SI (match_dup 1) (const_int 0)))]
7831 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7832 "operands[1] = arm_gen_compare_reg (UNORDERED, arm_compare_op0,
7836 (define_expand "sordered"
7837 [(set (match_operand:SI 0 "s_register_operand" "")
7838 (ordered:SI (match_dup 1) (const_int 0)))]
7839 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7840 "operands[1] = arm_gen_compare_reg (ORDERED, arm_compare_op0,
7844 (define_expand "sungt"
7845 [(set (match_operand:SI 0 "s_register_operand" "")
7846 (ungt:SI (match_dup 1) (const_int 0)))]
7847 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7848 "operands[1] = arm_gen_compare_reg (UNGT, arm_compare_op0,
7852 (define_expand "sunge"
7853 [(set (match_operand:SI 0 "s_register_operand" "")
7854 (unge:SI (match_dup 1) (const_int 0)))]
7855 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7856 "operands[1] = arm_gen_compare_reg (UNGE, arm_compare_op0,
7860 (define_expand "sunlt"
7861 [(set (match_operand:SI 0 "s_register_operand" "")
7862 (unlt:SI (match_dup 1) (const_int 0)))]
7863 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7864 "operands[1] = arm_gen_compare_reg (UNLT, arm_compare_op0,
7868 (define_expand "sunle"
7869 [(set (match_operand:SI 0 "s_register_operand" "")
7870 (unle:SI (match_dup 1) (const_int 0)))]
7871 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7872 "operands[1] = arm_gen_compare_reg (UNLE, arm_compare_op0,
7876 ;;; DO NOT add patterns for SUNEQ or SLTGT, these can't be represented with
7877 ;;; simple ARM instructions.
7879 ; (define_expand "suneq"
7880 ; [(set (match_operand:SI 0 "s_register_operand" "")
7881 ; (uneq:SI (match_dup 1) (const_int 0)))]
7882 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7883 ; "gcc_unreachable ();"
7886 ; (define_expand "sltgt"
7887 ; [(set (match_operand:SI 0 "s_register_operand" "")
7888 ; (ltgt:SI (match_dup 1) (const_int 0)))]
7889 ; "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
7890 ; "gcc_unreachable ();"
7893 (define_insn "*mov_scc"
7894 [(set (match_operand:SI 0 "s_register_operand" "=r")
7895 (match_operator:SI 1 "arm_comparison_operator"
7896 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7898 "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7899 [(set_attr "conds" "use")
7900 (set_attr "length" "8")]
7903 (define_insn "*mov_negscc"
7904 [(set (match_operand:SI 0 "s_register_operand" "=r")
7905 (neg:SI (match_operator:SI 1 "arm_comparison_operator"
7906 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7908 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7909 [(set_attr "conds" "use")
7910 (set_attr "length" "8")]
7913 (define_insn "*mov_notscc"
7914 [(set (match_operand:SI 0 "s_register_operand" "=r")
7915 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7916 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7918 "mov%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7919 [(set_attr "conds" "use")
7920 (set_attr "length" "8")]
7923 (define_expand "cstoresi4"
7924 [(set (match_operand:SI 0 "s_register_operand" "")
7925 (match_operator:SI 1 "arm_comparison_operator"
7926 [(match_operand:SI 2 "s_register_operand" "")
7927 (match_operand:SI 3 "reg_or_int_operand" "")]))]
7930 rtx op3, scratch, scratch2;
7932 if (operands[3] == const0_rtx)
7934 switch (GET_CODE (operands[1]))
7937 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7941 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7945 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7946 NULL_RTX, 0, OPTAB_WIDEN);
7947 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7948 NULL_RTX, 0, OPTAB_WIDEN);
7949 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7950 operands[0], 1, OPTAB_WIDEN);
7954 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7956 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7957 NULL_RTX, 1, OPTAB_WIDEN);
7961 scratch = expand_binop (SImode, ashr_optab, operands[2],
7962 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7963 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7964 NULL_RTX, 0, OPTAB_WIDEN);
7965 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7969 /* LT is handled by generic code. No need for unsigned with 0. */
7976 switch (GET_CODE (operands[1]))
7979 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7980 NULL_RTX, 0, OPTAB_WIDEN);
7981 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7985 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7986 NULL_RTX, 0, OPTAB_WIDEN);
7987 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7991 op3 = force_reg (SImode, operands[3]);
7993 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7994 NULL_RTX, 1, OPTAB_WIDEN);
7995 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7996 NULL_RTX, 0, OPTAB_WIDEN);
7997 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8003 if (!thumb1_cmp_operand (op3, SImode))
8004 op3 = force_reg (SImode, op3);
8005 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
8006 NULL_RTX, 0, OPTAB_WIDEN);
8007 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
8008 NULL_RTX, 1, OPTAB_WIDEN);
8009 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
8014 op3 = force_reg (SImode, operands[3]);
8015 scratch = force_reg (SImode, const0_rtx);
8016 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8022 if (!thumb1_cmp_operand (op3, SImode))
8023 op3 = force_reg (SImode, op3);
8024 scratch = force_reg (SImode, const0_rtx);
8025 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
8031 if (!thumb1_cmp_operand (op3, SImode))
8032 op3 = force_reg (SImode, op3);
8033 scratch = gen_reg_rtx (SImode);
8034 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, operands[2], op3));
8035 emit_insn (gen_negsi2 (operands[0], scratch));
8039 op3 = force_reg (SImode, operands[3]);
8040 scratch = gen_reg_rtx (SImode);
8041 emit_insn (gen_cstoresi_nltu_thumb1 (scratch, op3, operands[2]));
8042 emit_insn (gen_negsi2 (operands[0], scratch));
8045 /* No good sequences for GT, LT. */
8052 (define_expand "cstoresi_eq0_thumb1"
8054 [(set (match_operand:SI 0 "s_register_operand" "")
8055 (eq:SI (match_operand:SI 1 "s_register_operand" "")
8057 (clobber (match_dup:SI 2))])]
8059 "operands[2] = gen_reg_rtx (SImode);"
8062 (define_expand "cstoresi_ne0_thumb1"
8064 [(set (match_operand:SI 0 "s_register_operand" "")
8065 (ne:SI (match_operand:SI 1 "s_register_operand" "")
8067 (clobber (match_dup:SI 2))])]
8069 "operands[2] = gen_reg_rtx (SImode);"
8072 (define_insn "*cstoresi_eq0_thumb1_insn"
8073 [(set (match_operand:SI 0 "s_register_operand" "=&l,l")
8074 (eq:SI (match_operand:SI 1 "s_register_operand" "l,0")
8076 (clobber (match_operand:SI 2 "s_register_operand" "=X,l"))]
8079 neg\\t%0, %1\;adc\\t%0, %0, %1
8080 neg\\t%2, %1\;adc\\t%0, %1, %2"
8081 [(set_attr "length" "4")]
8084 (define_insn "*cstoresi_ne0_thumb1_insn"
8085 [(set (match_operand:SI 0 "s_register_operand" "=l")
8086 (ne:SI (match_operand:SI 1 "s_register_operand" "0")
8088 (clobber (match_operand:SI 2 "s_register_operand" "=l"))]
8090 "sub\\t%2, %1, #1\;sbc\\t%0, %1, %2"
8091 [(set_attr "length" "4")]
8094 (define_insn "cstoresi_nltu_thumb1"
8095 [(set (match_operand:SI 0 "s_register_operand" "=l,l")
8096 (neg:SI (gtu:SI (match_operand:SI 1 "s_register_operand" "l,*h")
8097 (match_operand:SI 2 "thumb1_cmp_operand" "lI*h,*r"))))]
8099 "cmp\\t%1, %2\;sbc\\t%0, %0, %0"
8100 [(set_attr "length" "4")]
8103 ;; Used as part of the expansion of thumb les sequence.
8104 (define_insn "thumb1_addsi3_addgeu"
8105 [(set (match_operand:SI 0 "s_register_operand" "=l")
8106 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%0")
8107 (match_operand:SI 2 "s_register_operand" "l"))
8108 (geu:SI (match_operand:SI 3 "s_register_operand" "l")
8109 (match_operand:SI 4 "thumb1_cmp_operand" "lI"))))]
8111 "cmp\\t%3, %4\;adc\\t%0, %1, %2"
8112 [(set_attr "length" "4")]
8116 ;; Conditional move insns
8118 (define_expand "movsicc"
8119 [(set (match_operand:SI 0 "s_register_operand" "")
8120 (if_then_else:SI (match_operand 1 "arm_comparison_operator" "")
8121 (match_operand:SI 2 "arm_not_operand" "")
8122 (match_operand:SI 3 "arm_not_operand" "")))]
8126 enum rtx_code code = GET_CODE (operands[1]);
8129 if (code == UNEQ || code == LTGT)
8132 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8133 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8137 (define_expand "movsfcc"
8138 [(set (match_operand:SF 0 "s_register_operand" "")
8139 (if_then_else:SF (match_operand 1 "arm_comparison_operator" "")
8140 (match_operand:SF 2 "s_register_operand" "")
8141 (match_operand:SF 3 "nonmemory_operand" "")))]
8145 enum rtx_code code = GET_CODE (operands[1]);
8148 if (code == UNEQ || code == LTGT)
8151 /* When compiling for SOFT_FLOAT, ensure both arms are in registers.
8152 Otherwise, ensure it is a valid FP add operand */
8153 if ((!(TARGET_HARD_FLOAT && TARGET_FPA))
8154 || (!arm_float_add_operand (operands[3], SFmode)))
8155 operands[3] = force_reg (SFmode, operands[3]);
8157 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8158 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8162 (define_expand "movdfcc"
8163 [(set (match_operand:DF 0 "s_register_operand" "")
8164 (if_then_else:DF (match_operand 1 "arm_comparison_operator" "")
8165 (match_operand:DF 2 "s_register_operand" "")
8166 (match_operand:DF 3 "arm_float_add_operand" "")))]
8167 "TARGET_32BIT && TARGET_HARD_FLOAT && (TARGET_FPA || TARGET_VFP)"
8170 enum rtx_code code = GET_CODE (operands[1]);
8173 if (code == UNEQ || code == LTGT)
8176 ccreg = arm_gen_compare_reg (code, arm_compare_op0, arm_compare_op1);
8177 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
8181 (define_insn "*movsicc_insn"
8182 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
8184 (match_operator 3 "arm_comparison_operator"
8185 [(match_operand 4 "cc_register" "") (const_int 0)])
8186 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
8187 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
8194 mov%d3\\t%0, %1\;mov%D3\\t%0, %2
8195 mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
8196 mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
8197 mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
8198 [(set_attr "length" "4,4,4,4,8,8,8,8")
8199 (set_attr "conds" "use")]
8202 (define_insn "*movsfcc_soft_insn"
8203 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
8204 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
8205 [(match_operand 4 "cc_register" "") (const_int 0)])
8206 (match_operand:SF 1 "s_register_operand" "0,r")
8207 (match_operand:SF 2 "s_register_operand" "r,0")))]
8208 "TARGET_ARM && TARGET_SOFT_FLOAT"
8212 [(set_attr "conds" "use")]
8216 ;; Jump and linkage insns
8218 (define_expand "jump"
8220 (label_ref (match_operand 0 "" "")))]
8225 (define_insn "*arm_jump"
8227 (label_ref (match_operand 0 "" "")))]
8231 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
8233 arm_ccfsm_state += 2;
8236 return \"b%?\\t%l0\";
8239 [(set_attr "predicable" "yes")]
8242 (define_insn "*thumb_jump"
8244 (label_ref (match_operand 0 "" "")))]
8247 if (get_attr_length (insn) == 2)
8249 return \"bl\\t%l0\\t%@ far jump\";
8251 [(set (attr "far_jump")
8253 (eq_attr "length" "4")
8254 (const_string "yes")
8255 (const_string "no")))
8256 (set (attr "length")
8258 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
8259 (le (minus (match_dup 0) (pc)) (const_int 2048)))
8264 (define_expand "call"
8265 [(parallel [(call (match_operand 0 "memory_operand" "")
8266 (match_operand 1 "general_operand" ""))
8267 (use (match_operand 2 "" ""))
8268 (clobber (reg:SI LR_REGNUM))])]
8274 /* In an untyped call, we can get NULL for operand 2. */
8275 if (operands[2] == NULL_RTX)
8276 operands[2] = const0_rtx;
8278 /* Decide if we should generate indirect calls by loading the
8279 32-bit address of the callee into a register before performing the
8281 callee = XEXP (operands[0], 0);
8282 if (GET_CODE (callee) == SYMBOL_REF
8283 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8285 XEXP (operands[0], 0) = force_reg (Pmode, callee);
8287 pat = gen_call_internal (operands[0], operands[1], operands[2]);
8288 arm_emit_call_insn (pat, XEXP (operands[0], 0));
8293 (define_expand "call_internal"
8294 [(parallel [(call (match_operand 0 "memory_operand" "")
8295 (match_operand 1 "general_operand" ""))
8296 (use (match_operand 2 "" ""))
8297 (clobber (reg:SI LR_REGNUM))])])
8299 (define_insn "*call_reg_armv5"
8300 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8301 (match_operand 1 "" ""))
8302 (use (match_operand 2 "" ""))
8303 (clobber (reg:SI LR_REGNUM))]
8304 "TARGET_ARM && arm_arch5"
8306 [(set_attr "type" "call")]
8309 (define_insn "*call_reg_arm"
8310 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
8311 (match_operand 1 "" ""))
8312 (use (match_operand 2 "" ""))
8313 (clobber (reg:SI LR_REGNUM))]
8314 "TARGET_ARM && !arm_arch5"
8316 return output_call (operands);
8318 ;; length is worst case, normally it is only two
8319 [(set_attr "length" "12")
8320 (set_attr "type" "call")]
8323 (define_insn "*call_mem"
8324 [(call (mem:SI (match_operand:SI 0 "call_memory_operand" "m"))
8325 (match_operand 1 "" ""))
8326 (use (match_operand 2 "" ""))
8327 (clobber (reg:SI LR_REGNUM))]
8330 return output_call_mem (operands);
8332 [(set_attr "length" "12")
8333 (set_attr "type" "call")]
8336 (define_insn "*call_reg_thumb1_v5"
8337 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8338 (match_operand 1 "" ""))
8339 (use (match_operand 2 "" ""))
8340 (clobber (reg:SI LR_REGNUM))]
8341 "TARGET_THUMB1 && arm_arch5"
8343 [(set_attr "length" "2")
8344 (set_attr "type" "call")]
8347 (define_insn "*call_reg_thumb1"
8348 [(call (mem:SI (match_operand:SI 0 "register_operand" "l*r"))
8349 (match_operand 1 "" ""))
8350 (use (match_operand 2 "" ""))
8351 (clobber (reg:SI LR_REGNUM))]
8352 "TARGET_THUMB1 && !arm_arch5"
8355 if (!TARGET_CALLER_INTERWORKING)
8356 return thumb_call_via_reg (operands[0]);
8357 else if (operands[1] == const0_rtx)
8358 return \"bl\\t%__interwork_call_via_%0\";
8359 else if (frame_pointer_needed)
8360 return \"bl\\t%__interwork_r7_call_via_%0\";
8362 return \"bl\\t%__interwork_r11_call_via_%0\";
8364 [(set_attr "type" "call")]
8367 (define_expand "call_value"
8368 [(parallel [(set (match_operand 0 "" "")
8369 (call (match_operand 1 "memory_operand" "")
8370 (match_operand 2 "general_operand" "")))
8371 (use (match_operand 3 "" ""))
8372 (clobber (reg:SI LR_REGNUM))])]
8378 /* In an untyped call, we can get NULL for operand 2. */
8379 if (operands[3] == 0)
8380 operands[3] = const0_rtx;
8382 /* Decide if we should generate indirect calls by loading the
8383 32-bit address of the callee into a register before performing the
8385 callee = XEXP (operands[1], 0);
8386 if (GET_CODE (callee) == SYMBOL_REF
8387 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8389 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8391 pat = gen_call_value_internal (operands[0], operands[1],
8392 operands[2], operands[3]);
8393 arm_emit_call_insn (pat, XEXP (operands[1], 0));
8398 (define_expand "call_value_internal"
8399 [(parallel [(set (match_operand 0 "" "")
8400 (call (match_operand 1 "memory_operand" "")
8401 (match_operand 2 "general_operand" "")))
8402 (use (match_operand 3 "" ""))
8403 (clobber (reg:SI LR_REGNUM))])])
8405 (define_insn "*call_value_reg_armv5"
8406 [(set (match_operand 0 "" "")
8407 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8408 (match_operand 2 "" "")))
8409 (use (match_operand 3 "" ""))
8410 (clobber (reg:SI LR_REGNUM))]
8411 "TARGET_ARM && arm_arch5"
8413 [(set_attr "type" "call")]
8416 (define_insn "*call_value_reg_arm"
8417 [(set (match_operand 0 "" "")
8418 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8419 (match_operand 2 "" "")))
8420 (use (match_operand 3 "" ""))
8421 (clobber (reg:SI LR_REGNUM))]
8422 "TARGET_ARM && !arm_arch5"
8424 return output_call (&operands[1]);
8426 [(set_attr "length" "12")
8427 (set_attr "type" "call")]
8430 (define_insn "*call_value_mem"
8431 [(set (match_operand 0 "" "")
8432 (call (mem:SI (match_operand:SI 1 "call_memory_operand" "m"))
8433 (match_operand 2 "" "")))
8434 (use (match_operand 3 "" ""))
8435 (clobber (reg:SI LR_REGNUM))]
8436 "TARGET_ARM && (!CONSTANT_ADDRESS_P (XEXP (operands[1], 0)))"
8438 return output_call_mem (&operands[1]);
8440 [(set_attr "length" "12")
8441 (set_attr "type" "call")]
8444 (define_insn "*call_value_reg_thumb1_v5"
8445 [(set (match_operand 0 "" "")
8446 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8447 (match_operand 2 "" "")))
8448 (use (match_operand 3 "" ""))
8449 (clobber (reg:SI LR_REGNUM))]
8450 "TARGET_THUMB1 && arm_arch5"
8452 [(set_attr "length" "2")
8453 (set_attr "type" "call")]
8456 (define_insn "*call_value_reg_thumb1"
8457 [(set (match_operand 0 "" "")
8458 (call (mem:SI (match_operand:SI 1 "register_operand" "l*r"))
8459 (match_operand 2 "" "")))
8460 (use (match_operand 3 "" ""))
8461 (clobber (reg:SI LR_REGNUM))]
8462 "TARGET_THUMB1 && !arm_arch5"
8465 if (!TARGET_CALLER_INTERWORKING)
8466 return thumb_call_via_reg (operands[1]);
8467 else if (operands[2] == const0_rtx)
8468 return \"bl\\t%__interwork_call_via_%1\";
8469 else if (frame_pointer_needed)
8470 return \"bl\\t%__interwork_r7_call_via_%1\";
8472 return \"bl\\t%__interwork_r11_call_via_%1\";
8474 [(set_attr "type" "call")]
8477 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8478 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8480 (define_insn "*call_symbol"
8481 [(call (mem:SI (match_operand:SI 0 "" ""))
8482 (match_operand 1 "" ""))
8483 (use (match_operand 2 "" ""))
8484 (clobber (reg:SI LR_REGNUM))]
8486 && (GET_CODE (operands[0]) == SYMBOL_REF)
8487 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8490 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8492 [(set_attr "type" "call")]
8495 (define_insn "*call_value_symbol"
8496 [(set (match_operand 0 "" "")
8497 (call (mem:SI (match_operand:SI 1 "" ""))
8498 (match_operand:SI 2 "" "")))
8499 (use (match_operand 3 "" ""))
8500 (clobber (reg:SI LR_REGNUM))]
8502 && (GET_CODE (operands[1]) == SYMBOL_REF)
8503 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8506 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8508 [(set_attr "type" "call")]
8511 (define_insn "*call_insn"
8512 [(call (mem:SI (match_operand:SI 0 "" ""))
8513 (match_operand:SI 1 "" ""))
8514 (use (match_operand 2 "" ""))
8515 (clobber (reg:SI LR_REGNUM))]
8517 && GET_CODE (operands[0]) == SYMBOL_REF
8518 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8520 [(set_attr "length" "4")
8521 (set_attr "type" "call")]
8524 (define_insn "*call_value_insn"
8525 [(set (match_operand 0 "" "")
8526 (call (mem:SI (match_operand 1 "" ""))
8527 (match_operand 2 "" "")))
8528 (use (match_operand 3 "" ""))
8529 (clobber (reg:SI LR_REGNUM))]
8531 && GET_CODE (operands[1]) == SYMBOL_REF
8532 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8534 [(set_attr "length" "4")
8535 (set_attr "type" "call")]
8538 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8539 (define_expand "sibcall"
8540 [(parallel [(call (match_operand 0 "memory_operand" "")
8541 (match_operand 1 "general_operand" ""))
8543 (use (match_operand 2 "" ""))])]
8547 if (operands[2] == NULL_RTX)
8548 operands[2] = const0_rtx;
8552 (define_expand "sibcall_value"
8553 [(parallel [(set (match_operand 0 "" "")
8554 (call (match_operand 1 "memory_operand" "")
8555 (match_operand 2 "general_operand" "")))
8557 (use (match_operand 3 "" ""))])]
8561 if (operands[3] == NULL_RTX)
8562 operands[3] = const0_rtx;
8566 (define_insn "*sibcall_insn"
8567 [(call (mem:SI (match_operand:SI 0 "" "X"))
8568 (match_operand 1 "" ""))
8570 (use (match_operand 2 "" ""))]
8571 "TARGET_ARM && GET_CODE (operands[0]) == SYMBOL_REF"
8573 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8575 [(set_attr "type" "call")]
8578 (define_insn "*sibcall_value_insn"
8579 [(set (match_operand 0 "" "")
8580 (call (mem:SI (match_operand:SI 1 "" "X"))
8581 (match_operand 2 "" "")))
8583 (use (match_operand 3 "" ""))]
8584 "TARGET_ARM && GET_CODE (operands[1]) == SYMBOL_REF"
8586 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8588 [(set_attr "type" "call")]
8591 ;; Often the return insn will be the same as loading from memory, so set attr
8592 (define_insn "return"
8594 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8597 if (arm_ccfsm_state == 2)
8599 arm_ccfsm_state += 2;
8602 return output_return_instruction (const_true_rtx, TRUE, FALSE);
8604 [(set_attr "type" "load1")
8605 (set_attr "length" "12")
8606 (set_attr "predicable" "yes")]
8609 (define_insn "*cond_return"
8611 (if_then_else (match_operator 0 "arm_comparison_operator"
8612 [(match_operand 1 "cc_register" "") (const_int 0)])
8615 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8618 if (arm_ccfsm_state == 2)
8620 arm_ccfsm_state += 2;
8623 return output_return_instruction (operands[0], TRUE, FALSE);
8625 [(set_attr "conds" "use")
8626 (set_attr "length" "12")
8627 (set_attr "type" "load1")]
8630 (define_insn "*cond_return_inverted"
8632 (if_then_else (match_operator 0 "arm_comparison_operator"
8633 [(match_operand 1 "cc_register" "") (const_int 0)])
8636 "TARGET_ARM && USE_RETURN_INSN (TRUE)"
8639 if (arm_ccfsm_state == 2)
8641 arm_ccfsm_state += 2;
8644 return output_return_instruction (operands[0], TRUE, TRUE);
8646 [(set_attr "conds" "use")
8647 (set_attr "length" "12")
8648 (set_attr "type" "load1")]
8651 ;; Generate a sequence of instructions to determine if the processor is
8652 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8655 (define_expand "return_addr_mask"
8657 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8659 (set (match_operand:SI 0 "s_register_operand" "")
8660 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8662 (const_int 67108860)))] ; 0x03fffffc
8665 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8668 (define_insn "*check_arch2"
8669 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8670 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8673 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8674 [(set_attr "length" "8")
8675 (set_attr "conds" "set")]
8678 ;; Call subroutine returning any type.
8680 (define_expand "untyped_call"
8681 [(parallel [(call (match_operand 0 "" "")
8683 (match_operand 1 "" "")
8684 (match_operand 2 "" "")])]
8689 rtx par = gen_rtx_PARALLEL (VOIDmode,
8690 rtvec_alloc (XVECLEN (operands[2], 0)));
8691 rtx addr = gen_reg_rtx (Pmode);
8695 emit_move_insn (addr, XEXP (operands[1], 0));
8696 mem = change_address (operands[1], BLKmode, addr);
8698 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8700 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8702 /* Default code only uses r0 as a return value, but we could
8703 be using anything up to 4 registers. */
8704 if (REGNO (src) == R0_REGNUM)
8705 src = gen_rtx_REG (TImode, R0_REGNUM);
8707 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8709 size += GET_MODE_SIZE (GET_MODE (src));
8712 emit_call_insn (GEN_CALL_VALUE (par, operands[0], const0_rtx, NULL,
8717 for (i = 0; i < XVECLEN (par, 0); i++)
8719 HOST_WIDE_INT offset = 0;
8720 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8723 emit_move_insn (addr, plus_constant (addr, size));
8725 mem = change_address (mem, GET_MODE (reg), NULL);
8726 if (REGNO (reg) == R0_REGNUM)
8728 /* On thumb we have to use a write-back instruction. */
8729 emit_insn (arm_gen_store_multiple (R0_REGNUM, 4, addr, TRUE,
8730 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8731 size = TARGET_ARM ? 16 : 0;
8735 emit_move_insn (mem, reg);
8736 size = GET_MODE_SIZE (GET_MODE (reg));
8740 /* The optimizer does not know that the call sets the function value
8741 registers we stored in the result block. We avoid problems by
8742 claiming that all hard registers are used and clobbered at this
8744 emit_insn (gen_blockage ());
8750 (define_expand "untyped_return"
8751 [(match_operand:BLK 0 "memory_operand" "")
8752 (match_operand 1 "" "")]
8757 rtx addr = gen_reg_rtx (Pmode);
8761 emit_move_insn (addr, XEXP (operands[0], 0));
8762 mem = change_address (operands[0], BLKmode, addr);
8764 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8766 HOST_WIDE_INT offset = 0;
8767 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8770 emit_move_insn (addr, plus_constant (addr, size));
8772 mem = change_address (mem, GET_MODE (reg), NULL);
8773 if (REGNO (reg) == R0_REGNUM)
8775 /* On thumb we have to use a write-back instruction. */
8776 emit_insn (arm_gen_load_multiple (R0_REGNUM, 4, addr, TRUE,
8777 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8778 size = TARGET_ARM ? 16 : 0;
8782 emit_move_insn (reg, mem);
8783 size = GET_MODE_SIZE (GET_MODE (reg));
8787 /* Emit USE insns before the return. */
8788 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8789 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8791 /* Construct the return. */
8792 expand_naked_return ();
8798 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8799 ;; all of memory. This blocks insns from being moved across this point.
8801 (define_insn "blockage"
8802 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8805 [(set_attr "length" "0")
8806 (set_attr "type" "block")]
8809 (define_expand "casesi"
8810 [(match_operand:SI 0 "s_register_operand" "") ; index to jump on
8811 (match_operand:SI 1 "const_int_operand" "") ; lower bound
8812 (match_operand:SI 2 "const_int_operand" "") ; total range
8813 (match_operand:SI 3 "" "") ; table label
8814 (match_operand:SI 4 "" "")] ; Out of range label
8819 if (operands[1] != const0_rtx)
8821 reg = gen_reg_rtx (SImode);
8823 emit_insn (gen_addsi3 (reg, operands[0],
8824 GEN_INT (-INTVAL (operands[1]))));
8828 if (!const_ok_for_arm (INTVAL (operands[2])))
8829 operands[2] = force_reg (SImode, operands[2]);
8833 emit_jump_insn (gen_arm_casesi_internal (operands[0], operands[2],
8834 operands[3], operands[4]));
8838 emit_jump_insn (gen_thumb2_casesi_internal_pic (operands[0],
8839 operands[2], operands[3], operands[4]));
8843 emit_jump_insn (gen_thumb2_casesi_internal (operands[0], operands[2],
8844 operands[3], operands[4]));
8850 ;; The USE in this pattern is needed to tell flow analysis that this is
8851 ;; a CASESI insn. It has no other purpose.
8852 (define_insn "arm_casesi_internal"
8853 [(parallel [(set (pc)
8855 (leu (match_operand:SI 0 "s_register_operand" "r")
8856 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8857 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8858 (label_ref (match_operand 2 "" ""))))
8859 (label_ref (match_operand 3 "" ""))))
8860 (clobber (reg:CC CC_REGNUM))
8861 (use (label_ref (match_dup 2)))])]
8865 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8866 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8868 [(set_attr "conds" "clob")
8869 (set_attr "length" "12")]
8872 (define_expand "indirect_jump"
8874 (match_operand:SI 0 "s_register_operand" ""))]
8877 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8878 address and use bx. */
8882 tmp = gen_reg_rtx (SImode);
8883 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8889 ;; NB Never uses BX.
8890 (define_insn "*arm_indirect_jump"
8892 (match_operand:SI 0 "s_register_operand" "r"))]
8894 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8895 [(set_attr "predicable" "yes")]
8898 (define_insn "*load_indirect_jump"
8900 (match_operand:SI 0 "memory_operand" "m"))]
8902 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8903 [(set_attr "type" "load1")
8904 (set_attr "pool_range" "4096")
8905 (set_attr "neg_pool_range" "4084")
8906 (set_attr "predicable" "yes")]
8909 ;; NB Never uses BX.
8910 (define_insn "*thumb1_indirect_jump"
8912 (match_operand:SI 0 "register_operand" "l*r"))]
8915 [(set_attr "conds" "clob")
8916 (set_attr "length" "2")]
8926 if (TARGET_UNIFIED_ASM)
8929 return \"mov%?\\t%|r0, %|r0\\t%@ nop\";
8930 return \"mov\\tr8, r8\";
8932 [(set (attr "length")
8933 (if_then_else (eq_attr "is_thumb" "yes")
8939 ;; Patterns to allow combination of arithmetic, cond code and shifts
8941 (define_insn "*arith_shiftsi"
8942 [(set (match_operand:SI 0 "s_register_operand" "=r")
8943 (match_operator:SI 1 "shiftable_operator"
8944 [(match_operator:SI 3 "shift_operator"
8945 [(match_operand:SI 4 "s_register_operand" "r")
8946 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8947 (match_operand:SI 2 "s_register_operand" "r")]))]
8949 "%i1%?\\t%0, %2, %4%S3"
8950 [(set_attr "predicable" "yes")
8951 (set_attr "shift" "4")
8952 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8953 (const_string "alu_shift")
8954 (const_string "alu_shift_reg")))]
8958 [(set (match_operand:SI 0 "s_register_operand" "")
8959 (match_operator:SI 1 "shiftable_operator"
8960 [(match_operator:SI 2 "shiftable_operator"
8961 [(match_operator:SI 3 "shift_operator"
8962 [(match_operand:SI 4 "s_register_operand" "")
8963 (match_operand:SI 5 "reg_or_int_operand" "")])
8964 (match_operand:SI 6 "s_register_operand" "")])
8965 (match_operand:SI 7 "arm_rhs_operand" "")]))
8966 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8969 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8972 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8975 (define_insn "*arith_shiftsi_compare0"
8976 [(set (reg:CC_NOOV CC_REGNUM)
8977 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8978 [(match_operator:SI 3 "shift_operator"
8979 [(match_operand:SI 4 "s_register_operand" "r")
8980 (match_operand:SI 5 "reg_or_int_operand" "rI")])
8981 (match_operand:SI 2 "s_register_operand" "r")])
8983 (set (match_operand:SI 0 "s_register_operand" "=r")
8984 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8987 "%i1%.\\t%0, %2, %4%S3"
8988 [(set_attr "conds" "set")
8989 (set_attr "shift" "4")
8990 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
8991 (const_string "alu_shift")
8992 (const_string "alu_shift_reg")))]
8995 (define_insn "*arith_shiftsi_compare0_scratch"
8996 [(set (reg:CC_NOOV CC_REGNUM)
8997 (compare:CC_NOOV (match_operator:SI 1 "shiftable_operator"
8998 [(match_operator:SI 3 "shift_operator"
8999 [(match_operand:SI 4 "s_register_operand" "r")
9000 (match_operand:SI 5 "reg_or_int_operand" "rI")])
9001 (match_operand:SI 2 "s_register_operand" "r")])
9003 (clobber (match_scratch:SI 0 "=r"))]
9005 "%i1%.\\t%0, %2, %4%S3"
9006 [(set_attr "conds" "set")
9007 (set_attr "shift" "4")
9008 (set (attr "type") (if_then_else (match_operand 5 "const_int_operand" "")
9009 (const_string "alu_shift")
9010 (const_string "alu_shift_reg")))]
9013 (define_insn "*sub_shiftsi"
9014 [(set (match_operand:SI 0 "s_register_operand" "=r")
9015 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9016 (match_operator:SI 2 "shift_operator"
9017 [(match_operand:SI 3 "s_register_operand" "r")
9018 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
9020 "sub%?\\t%0, %1, %3%S2"
9021 [(set_attr "predicable" "yes")
9022 (set_attr "shift" "3")
9023 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9024 (const_string "alu_shift")
9025 (const_string "alu_shift_reg")))]
9028 (define_insn "*sub_shiftsi_compare0"
9029 [(set (reg:CC_NOOV CC_REGNUM)
9031 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9032 (match_operator:SI 2 "shift_operator"
9033 [(match_operand:SI 3 "s_register_operand" "r")
9034 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9036 (set (match_operand:SI 0 "s_register_operand" "=r")
9037 (minus:SI (match_dup 1) (match_op_dup 2 [(match_dup 3)
9040 "sub%.\\t%0, %1, %3%S2"
9041 [(set_attr "conds" "set")
9042 (set_attr "shift" "3")
9043 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9044 (const_string "alu_shift")
9045 (const_string "alu_shift_reg")))]
9048 (define_insn "*sub_shiftsi_compare0_scratch"
9049 [(set (reg:CC_NOOV CC_REGNUM)
9051 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
9052 (match_operator:SI 2 "shift_operator"
9053 [(match_operand:SI 3 "s_register_operand" "r")
9054 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
9056 (clobber (match_scratch:SI 0 "=r"))]
9058 "sub%.\\t%0, %1, %3%S2"
9059 [(set_attr "conds" "set")
9060 (set_attr "shift" "3")
9061 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
9062 (const_string "alu_shift")
9063 (const_string "alu_shift_reg")))]
9068 (define_insn "*and_scc"
9069 [(set (match_operand:SI 0 "s_register_operand" "=r")
9070 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9071 [(match_operand 3 "cc_register" "") (const_int 0)])
9072 (match_operand:SI 2 "s_register_operand" "r")))]
9074 "mov%D1\\t%0, #0\;and%d1\\t%0, %2, #1"
9075 [(set_attr "conds" "use")
9076 (set_attr "length" "8")]
9079 (define_insn "*ior_scc"
9080 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9081 (ior:SI (match_operator:SI 2 "arm_comparison_operator"
9082 [(match_operand 3 "cc_register" "") (const_int 0)])
9083 (match_operand:SI 1 "s_register_operand" "0,?r")))]
9087 mov%D2\\t%0, %1\;orr%d2\\t%0, %1, #1"
9088 [(set_attr "conds" "use")
9089 (set_attr "length" "4,8")]
9092 (define_insn "*compare_scc"
9093 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9094 (match_operator:SI 1 "arm_comparison_operator"
9095 [(match_operand:SI 2 "s_register_operand" "r,r")
9096 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9097 (clobber (reg:CC CC_REGNUM))]
9100 if (operands[3] == const0_rtx)
9102 if (GET_CODE (operands[1]) == LT)
9103 return \"mov\\t%0, %2, lsr #31\";
9105 if (GET_CODE (operands[1]) == GE)
9106 return \"mvn\\t%0, %2\;mov\\t%0, %0, lsr #31\";
9108 if (GET_CODE (operands[1]) == EQ)
9109 return \"rsbs\\t%0, %2, #1\;movcc\\t%0, #0\";
9112 if (GET_CODE (operands[1]) == NE)
9114 if (which_alternative == 1)
9115 return \"adds\\t%0, %2, #%n3\;movne\\t%0, #1\";
9116 return \"subs\\t%0, %2, %3\;movne\\t%0, #1\";
9118 if (which_alternative == 1)
9119 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9121 output_asm_insn (\"cmp\\t%2, %3\", operands);
9122 return \"mov%D1\\t%0, #0\;mov%d1\\t%0, #1\";
9124 [(set_attr "conds" "clob")
9125 (set_attr "length" "12")]
9128 (define_insn "*cond_move"
9129 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9130 (if_then_else:SI (match_operator 3 "equality_operator"
9131 [(match_operator 4 "arm_comparison_operator"
9132 [(match_operand 5 "cc_register" "") (const_int 0)])
9134 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9135 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9138 if (GET_CODE (operands[3]) == NE)
9140 if (which_alternative != 1)
9141 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9142 if (which_alternative != 0)
9143 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9146 if (which_alternative != 0)
9147 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9148 if (which_alternative != 1)
9149 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9152 [(set_attr "conds" "use")
9153 (set_attr "length" "4,4,8")]
9156 (define_insn "*cond_arith"
9157 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9158 (match_operator:SI 5 "shiftable_operator"
9159 [(match_operator:SI 4 "arm_comparison_operator"
9160 [(match_operand:SI 2 "s_register_operand" "r,r")
9161 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9162 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9163 (clobber (reg:CC CC_REGNUM))]
9166 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9167 return \"%i5\\t%0, %1, %2, lsr #31\";
9169 output_asm_insn (\"cmp\\t%2, %3\", operands);
9170 if (GET_CODE (operands[5]) == AND)
9171 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9172 else if (GET_CODE (operands[5]) == MINUS)
9173 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9174 else if (which_alternative != 0)
9175 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9176 return \"%i5%d4\\t%0, %1, #1\";
9178 [(set_attr "conds" "clob")
9179 (set_attr "length" "12")]
9182 (define_insn "*cond_sub"
9183 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9184 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9185 (match_operator:SI 4 "arm_comparison_operator"
9186 [(match_operand:SI 2 "s_register_operand" "r,r")
9187 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9188 (clobber (reg:CC CC_REGNUM))]
9191 output_asm_insn (\"cmp\\t%2, %3\", operands);
9192 if (which_alternative != 0)
9193 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9194 return \"sub%d4\\t%0, %1, #1\";
9196 [(set_attr "conds" "clob")
9197 (set_attr "length" "8,12")]
9200 ;; ??? Is it worth using these conditional patterns in Thumb-2 mode?
9201 (define_insn "*cmp_ite0"
9202 [(set (match_operand 6 "dominant_cc_register" "")
9205 (match_operator 4 "arm_comparison_operator"
9206 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9207 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9208 (match_operator:SI 5 "arm_comparison_operator"
9209 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9210 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9216 static const char * const opcodes[4][2] =
9218 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9219 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9220 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9221 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9222 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9223 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9224 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9225 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9228 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9230 return opcodes[which_alternative][swap];
9232 [(set_attr "conds" "set")
9233 (set_attr "length" "8")]
9236 (define_insn "*cmp_ite1"
9237 [(set (match_operand 6 "dominant_cc_register" "")
9240 (match_operator 4 "arm_comparison_operator"
9241 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9242 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9243 (match_operator:SI 5 "arm_comparison_operator"
9244 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9245 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")])
9251 static const char * const opcodes[4][2] =
9253 {\"cmp\\t%0, %1\;cmp%d4\\t%2, %3\",
9254 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9255 {\"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\",
9256 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9257 {\"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\",
9258 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9259 {\"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\",
9260 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9263 comparison_dominates_p (GET_CODE (operands[5]),
9264 reverse_condition (GET_CODE (operands[4])));
9266 return opcodes[which_alternative][swap];
9268 [(set_attr "conds" "set")
9269 (set_attr "length" "8")]
9272 (define_insn "*cmp_and"
9273 [(set (match_operand 6 "dominant_cc_register" "")
9276 (match_operator 4 "arm_comparison_operator"
9277 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9278 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9279 (match_operator:SI 5 "arm_comparison_operator"
9280 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9281 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9286 static const char *const opcodes[4][2] =
9288 {\"cmp\\t%2, %3\;cmp%d5\\t%0, %1\",
9289 \"cmp\\t%0, %1\;cmp%d4\\t%2, %3\"},
9290 {\"cmp\\t%2, %3\;cmn%d5\\t%0, #%n1\",
9291 \"cmn\\t%0, #%n1\;cmp%d4\\t%2, %3\"},
9292 {\"cmn\\t%2, #%n3\;cmp%d5\\t%0, %1\",
9293 \"cmp\\t%0, %1\;cmn%d4\\t%2, #%n3\"},
9294 {\"cmn\\t%2, #%n3\;cmn%d5\\t%0, #%n1\",
9295 \"cmn\\t%0, #%n1\;cmn%d4\\t%2, #%n3\"}
9298 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9300 return opcodes[which_alternative][swap];
9302 [(set_attr "conds" "set")
9303 (set_attr "predicable" "no")
9304 (set_attr "length" "8")]
9307 (define_insn "*cmp_ior"
9308 [(set (match_operand 6 "dominant_cc_register" "")
9311 (match_operator 4 "arm_comparison_operator"
9312 [(match_operand:SI 0 "s_register_operand" "r,r,r,r")
9313 (match_operand:SI 1 "arm_add_operand" "rI,L,rI,L")])
9314 (match_operator:SI 5 "arm_comparison_operator"
9315 [(match_operand:SI 2 "s_register_operand" "r,r,r,r")
9316 (match_operand:SI 3 "arm_add_operand" "rI,rI,L,L")]))
9321 static const char *const opcodes[4][2] =
9323 {\"cmp\\t%0, %1\;cmp%D4\\t%2, %3\",
9324 \"cmp\\t%2, %3\;cmp%D5\\t%0, %1\"},
9325 {\"cmn\\t%0, #%n1\;cmp%D4\\t%2, %3\",
9326 \"cmp\\t%2, %3\;cmn%D5\\t%0, #%n1\"},
9327 {\"cmp\\t%0, %1\;cmn%D4\\t%2, #%n3\",
9328 \"cmn\\t%2, #%n3\;cmp%D5\\t%0, %1\"},
9329 {\"cmn\\t%0, #%n1\;cmn%D4\\t%2, #%n3\",
9330 \"cmn\\t%2, #%n3\;cmn%D5\\t%0, #%n1\"}
9333 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9335 return opcodes[which_alternative][swap];
9338 [(set_attr "conds" "set")
9339 (set_attr "length" "8")]
9342 (define_insn_and_split "*ior_scc_scc"
9343 [(set (match_operand:SI 0 "s_register_operand" "=r")
9344 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9345 [(match_operand:SI 1 "s_register_operand" "r")
9346 (match_operand:SI 2 "arm_add_operand" "rIL")])
9347 (match_operator:SI 6 "arm_comparison_operator"
9348 [(match_operand:SI 4 "s_register_operand" "r")
9349 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9350 (clobber (reg:CC CC_REGNUM))]
9352 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9355 "TARGET_ARM && reload_completed"
9359 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9360 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9362 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9364 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9367 [(set_attr "conds" "clob")
9368 (set_attr "length" "16")])
9370 ; If the above pattern is followed by a CMP insn, then the compare is
9371 ; redundant, since we can rework the conditional instruction that follows.
9372 (define_insn_and_split "*ior_scc_scc_cmp"
9373 [(set (match_operand 0 "dominant_cc_register" "")
9374 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9375 [(match_operand:SI 1 "s_register_operand" "r")
9376 (match_operand:SI 2 "arm_add_operand" "rIL")])
9377 (match_operator:SI 6 "arm_comparison_operator"
9378 [(match_operand:SI 4 "s_register_operand" "r")
9379 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9381 (set (match_operand:SI 7 "s_register_operand" "=r")
9382 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9383 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9386 "TARGET_ARM && reload_completed"
9390 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9391 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9393 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9395 [(set_attr "conds" "set")
9396 (set_attr "length" "16")])
9398 (define_insn_and_split "*and_scc_scc"
9399 [(set (match_operand:SI 0 "s_register_operand" "=r")
9400 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9401 [(match_operand:SI 1 "s_register_operand" "r")
9402 (match_operand:SI 2 "arm_add_operand" "rIL")])
9403 (match_operator:SI 6 "arm_comparison_operator"
9404 [(match_operand:SI 4 "s_register_operand" "r")
9405 (match_operand:SI 5 "arm_add_operand" "rIL")])))
9406 (clobber (reg:CC CC_REGNUM))]
9408 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9411 "TARGET_ARM && reload_completed
9412 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9417 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9418 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9420 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9422 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9425 [(set_attr "conds" "clob")
9426 (set_attr "length" "16")])
9428 ; If the above pattern is followed by a CMP insn, then the compare is
9429 ; redundant, since we can rework the conditional instruction that follows.
9430 (define_insn_and_split "*and_scc_scc_cmp"
9431 [(set (match_operand 0 "dominant_cc_register" "")
9432 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9433 [(match_operand:SI 1 "s_register_operand" "r")
9434 (match_operand:SI 2 "arm_add_operand" "rIL")])
9435 (match_operator:SI 6 "arm_comparison_operator"
9436 [(match_operand:SI 4 "s_register_operand" "r")
9437 (match_operand:SI 5 "arm_add_operand" "rIL")]))
9439 (set (match_operand:SI 7 "s_register_operand" "=r")
9440 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9441 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9444 "TARGET_ARM && reload_completed"
9448 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9449 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9451 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9453 [(set_attr "conds" "set")
9454 (set_attr "length" "16")])
9456 ;; If there is no dominance in the comparison, then we can still save an
9457 ;; instruction in the AND case, since we can know that the second compare
9458 ;; need only zero the value if false (if true, then the value is already
9460 (define_insn_and_split "*and_scc_scc_nodom"
9461 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
9462 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9463 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9464 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9465 (match_operator:SI 6 "arm_comparison_operator"
9466 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9467 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9468 (clobber (reg:CC CC_REGNUM))]
9470 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9473 "TARGET_ARM && reload_completed"
9474 [(parallel [(set (match_dup 0)
9475 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9476 (clobber (reg:CC CC_REGNUM))])
9477 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9479 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9482 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9483 operands[4], operands[5]),
9485 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9487 [(set_attr "conds" "clob")
9488 (set_attr "length" "20")])
9491 [(set (reg:CC_NOOV CC_REGNUM)
9492 (compare:CC_NOOV (ior:SI
9493 (and:SI (match_operand:SI 0 "s_register_operand" "")
9495 (match_operator:SI 1 "comparison_operator"
9496 [(match_operand:SI 2 "s_register_operand" "")
9497 (match_operand:SI 3 "arm_add_operand" "")]))
9499 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9502 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9504 (set (reg:CC_NOOV CC_REGNUM)
9505 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9510 [(set (reg:CC_NOOV CC_REGNUM)
9511 (compare:CC_NOOV (ior:SI
9512 (match_operator:SI 1 "comparison_operator"
9513 [(match_operand:SI 2 "s_register_operand" "")
9514 (match_operand:SI 3 "arm_add_operand" "")])
9515 (and:SI (match_operand:SI 0 "s_register_operand" "")
9518 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9521 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9523 (set (reg:CC_NOOV CC_REGNUM)
9524 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9527 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9529 (define_insn "*negscc"
9530 [(set (match_operand:SI 0 "s_register_operand" "=r")
9531 (neg:SI (match_operator 3 "arm_comparison_operator"
9532 [(match_operand:SI 1 "s_register_operand" "r")
9533 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9534 (clobber (reg:CC CC_REGNUM))]
9537 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9538 return \"mov\\t%0, %1, asr #31\";
9540 if (GET_CODE (operands[3]) == NE)
9541 return \"subs\\t%0, %1, %2\;mvnne\\t%0, #0\";
9543 output_asm_insn (\"cmp\\t%1, %2\", operands);
9544 output_asm_insn (\"mov%D3\\t%0, #0\", operands);
9545 return \"mvn%d3\\t%0, #0\";
9547 [(set_attr "conds" "clob")
9548 (set_attr "length" "12")]
9551 (define_insn "movcond"
9552 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9554 (match_operator 5 "arm_comparison_operator"
9555 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9556 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
9557 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9558 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
9559 (clobber (reg:CC CC_REGNUM))]
9562 if (GET_CODE (operands[5]) == LT
9563 && (operands[4] == const0_rtx))
9565 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9567 if (operands[2] == const0_rtx)
9568 return \"and\\t%0, %1, %3, asr #31\";
9569 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
9571 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9573 if (operands[1] == const0_rtx)
9574 return \"bic\\t%0, %2, %3, asr #31\";
9575 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
9577 /* The only case that falls through to here is when both ops 1 & 2
9581 if (GET_CODE (operands[5]) == GE
9582 && (operands[4] == const0_rtx))
9584 if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
9586 if (operands[2] == const0_rtx)
9587 return \"bic\\t%0, %1, %3, asr #31\";
9588 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
9590 else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
9592 if (operands[1] == const0_rtx)
9593 return \"and\\t%0, %2, %3, asr #31\";
9594 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
9596 /* The only case that falls through to here is when both ops 1 & 2
9599 if (GET_CODE (operands[4]) == CONST_INT
9600 && !const_ok_for_arm (INTVAL (operands[4])))
9601 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
9603 output_asm_insn (\"cmp\\t%3, %4\", operands);
9604 if (which_alternative != 0)
9605 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
9606 if (which_alternative != 1)
9607 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
9610 [(set_attr "conds" "clob")
9611 (set_attr "length" "8,8,12")]
9614 ;; ??? The patterns below need checking for Thumb-2 usefulness.
9616 (define_insn "*ifcompare_plus_move"
9617 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9618 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9619 [(match_operand:SI 4 "s_register_operand" "r,r")
9620 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9622 (match_operand:SI 2 "s_register_operand" "r,r")
9623 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
9624 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9625 (clobber (reg:CC CC_REGNUM))]
9628 [(set_attr "conds" "clob")
9629 (set_attr "length" "8,12")]
9632 (define_insn "*if_plus_move"
9633 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9635 (match_operator 4 "arm_comparison_operator"
9636 [(match_operand 5 "cc_register" "") (const_int 0)])
9638 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9639 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
9640 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
9644 sub%d4\\t%0, %2, #%n3
9645 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
9646 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
9647 [(set_attr "conds" "use")
9648 (set_attr "length" "4,4,8,8")
9649 (set_attr "type" "*,*,*,*")]
9652 (define_insn "*ifcompare_move_plus"
9653 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9654 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9655 [(match_operand:SI 4 "s_register_operand" "r,r")
9656 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9657 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9659 (match_operand:SI 2 "s_register_operand" "r,r")
9660 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
9661 (clobber (reg:CC CC_REGNUM))]
9664 [(set_attr "conds" "clob")
9665 (set_attr "length" "8,12")]
9668 (define_insn "*if_move_plus"
9669 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
9671 (match_operator 4 "arm_comparison_operator"
9672 [(match_operand 5 "cc_register" "") (const_int 0)])
9673 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
9675 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
9676 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
9680 sub%D4\\t%0, %2, #%n3
9681 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
9682 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
9683 [(set_attr "conds" "use")
9684 (set_attr "length" "4,4,8,8")
9685 (set_attr "type" "*,*,*,*")]
9688 (define_insn "*ifcompare_arith_arith"
9689 [(set (match_operand:SI 0 "s_register_operand" "=r")
9690 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
9691 [(match_operand:SI 5 "s_register_operand" "r")
9692 (match_operand:SI 6 "arm_add_operand" "rIL")])
9693 (match_operator:SI 8 "shiftable_operator"
9694 [(match_operand:SI 1 "s_register_operand" "r")
9695 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9696 (match_operator:SI 7 "shiftable_operator"
9697 [(match_operand:SI 3 "s_register_operand" "r")
9698 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
9699 (clobber (reg:CC CC_REGNUM))]
9702 [(set_attr "conds" "clob")
9703 (set_attr "length" "12")]
9706 (define_insn "*if_arith_arith"
9707 [(set (match_operand:SI 0 "s_register_operand" "=r")
9708 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
9709 [(match_operand 8 "cc_register" "") (const_int 0)])
9710 (match_operator:SI 6 "shiftable_operator"
9711 [(match_operand:SI 1 "s_register_operand" "r")
9712 (match_operand:SI 2 "arm_rhs_operand" "rI")])
9713 (match_operator:SI 7 "shiftable_operator"
9714 [(match_operand:SI 3 "s_register_operand" "r")
9715 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
9717 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
9718 [(set_attr "conds" "use")
9719 (set_attr "length" "8")]
9722 (define_insn "*ifcompare_arith_move"
9723 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9724 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9725 [(match_operand:SI 2 "s_register_operand" "r,r")
9726 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
9727 (match_operator:SI 7 "shiftable_operator"
9728 [(match_operand:SI 4 "s_register_operand" "r,r")
9729 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
9730 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
9731 (clobber (reg:CC CC_REGNUM))]
9734 /* If we have an operation where (op x 0) is the identity operation and
9735 the conditional operator is LT or GE and we are comparing against zero and
9736 everything is in registers then we can do this in two instructions. */
9737 if (operands[3] == const0_rtx
9738 && GET_CODE (operands[7]) != AND
9739 && GET_CODE (operands[5]) == REG
9740 && GET_CODE (operands[1]) == REG
9741 && REGNO (operands[1]) == REGNO (operands[4])
9742 && REGNO (operands[4]) != REGNO (operands[0]))
9744 if (GET_CODE (operands[6]) == LT)
9745 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9746 else if (GET_CODE (operands[6]) == GE)
9747 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
9749 if (GET_CODE (operands[3]) == CONST_INT
9750 && !const_ok_for_arm (INTVAL (operands[3])))
9751 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
9753 output_asm_insn (\"cmp\\t%2, %3\", operands);
9754 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
9755 if (which_alternative != 0)
9756 return \"mov%D6\\t%0, %1\";
9759 [(set_attr "conds" "clob")
9760 (set_attr "length" "8,12")]
9763 (define_insn "*if_arith_move"
9764 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9765 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
9766 [(match_operand 6 "cc_register" "") (const_int 0)])
9767 (match_operator:SI 5 "shiftable_operator"
9768 [(match_operand:SI 2 "s_register_operand" "r,r")
9769 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9770 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
9774 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
9775 [(set_attr "conds" "use")
9776 (set_attr "length" "4,8")
9777 (set_attr "type" "*,*")]
9780 (define_insn "*ifcompare_move_arith"
9781 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9782 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
9783 [(match_operand:SI 4 "s_register_operand" "r,r")
9784 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9785 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9786 (match_operator:SI 7 "shiftable_operator"
9787 [(match_operand:SI 2 "s_register_operand" "r,r")
9788 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9789 (clobber (reg:CC CC_REGNUM))]
9792 /* If we have an operation where (op x 0) is the identity operation and
9793 the conditional operator is LT or GE and we are comparing against zero and
9794 everything is in registers then we can do this in two instructions */
9795 if (operands[5] == const0_rtx
9796 && GET_CODE (operands[7]) != AND
9797 && GET_CODE (operands[3]) == REG
9798 && GET_CODE (operands[1]) == REG
9799 && REGNO (operands[1]) == REGNO (operands[2])
9800 && REGNO (operands[2]) != REGNO (operands[0]))
9802 if (GET_CODE (operands[6]) == GE)
9803 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9804 else if (GET_CODE (operands[6]) == LT)
9805 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
9808 if (GET_CODE (operands[5]) == CONST_INT
9809 && !const_ok_for_arm (INTVAL (operands[5])))
9810 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
9812 output_asm_insn (\"cmp\\t%4, %5\", operands);
9814 if (which_alternative != 0)
9815 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
9816 return \"%I7%D6\\t%0, %2, %3\";
9818 [(set_attr "conds" "clob")
9819 (set_attr "length" "8,12")]
9822 (define_insn "*if_move_arith"
9823 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9825 (match_operator 4 "arm_comparison_operator"
9826 [(match_operand 6 "cc_register" "") (const_int 0)])
9827 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
9828 (match_operator:SI 5 "shiftable_operator"
9829 [(match_operand:SI 2 "s_register_operand" "r,r")
9830 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
9834 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
9835 [(set_attr "conds" "use")
9836 (set_attr "length" "4,8")
9837 (set_attr "type" "*,*")]
9840 (define_insn "*ifcompare_move_not"
9841 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9843 (match_operator 5 "arm_comparison_operator"
9844 [(match_operand:SI 3 "s_register_operand" "r,r")
9845 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9846 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9848 (match_operand:SI 2 "s_register_operand" "r,r"))))
9849 (clobber (reg:CC CC_REGNUM))]
9852 [(set_attr "conds" "clob")
9853 (set_attr "length" "8,12")]
9856 (define_insn "*if_move_not"
9857 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9859 (match_operator 4 "arm_comparison_operator"
9860 [(match_operand 3 "cc_register" "") (const_int 0)])
9861 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9862 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
9866 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
9867 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
9868 [(set_attr "conds" "use")
9869 (set_attr "length" "4,8,8")]
9872 (define_insn "*ifcompare_not_move"
9873 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9875 (match_operator 5 "arm_comparison_operator"
9876 [(match_operand:SI 3 "s_register_operand" "r,r")
9877 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
9879 (match_operand:SI 2 "s_register_operand" "r,r"))
9880 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9881 (clobber (reg:CC CC_REGNUM))]
9884 [(set_attr "conds" "clob")
9885 (set_attr "length" "8,12")]
9888 (define_insn "*if_not_move"
9889 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9891 (match_operator 4 "arm_comparison_operator"
9892 [(match_operand 3 "cc_register" "") (const_int 0)])
9893 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
9894 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9898 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
9899 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
9900 [(set_attr "conds" "use")
9901 (set_attr "length" "4,8,8")]
9904 (define_insn "*ifcompare_shift_move"
9905 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9907 (match_operator 6 "arm_comparison_operator"
9908 [(match_operand:SI 4 "s_register_operand" "r,r")
9909 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9910 (match_operator:SI 7 "shift_operator"
9911 [(match_operand:SI 2 "s_register_operand" "r,r")
9912 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
9913 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
9914 (clobber (reg:CC CC_REGNUM))]
9917 [(set_attr "conds" "clob")
9918 (set_attr "length" "8,12")]
9921 (define_insn "*if_shift_move"
9922 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9924 (match_operator 5 "arm_comparison_operator"
9925 [(match_operand 6 "cc_register" "") (const_int 0)])
9926 (match_operator:SI 4 "shift_operator"
9927 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9928 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
9929 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
9933 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
9934 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
9935 [(set_attr "conds" "use")
9936 (set_attr "shift" "2")
9937 (set_attr "length" "4,8,8")
9938 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9939 (const_string "alu_shift")
9940 (const_string "alu_shift_reg")))]
9943 (define_insn "*ifcompare_move_shift"
9944 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9946 (match_operator 6 "arm_comparison_operator"
9947 [(match_operand:SI 4 "s_register_operand" "r,r")
9948 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
9949 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
9950 (match_operator:SI 7 "shift_operator"
9951 [(match_operand:SI 2 "s_register_operand" "r,r")
9952 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
9953 (clobber (reg:CC CC_REGNUM))]
9956 [(set_attr "conds" "clob")
9957 (set_attr "length" "8,12")]
9960 (define_insn "*if_move_shift"
9961 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9963 (match_operator 5 "arm_comparison_operator"
9964 [(match_operand 6 "cc_register" "") (const_int 0)])
9965 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
9966 (match_operator:SI 4 "shift_operator"
9967 [(match_operand:SI 2 "s_register_operand" "r,r,r")
9968 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
9972 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
9973 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
9974 [(set_attr "conds" "use")
9975 (set_attr "shift" "2")
9976 (set_attr "length" "4,8,8")
9977 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
9978 (const_string "alu_shift")
9979 (const_string "alu_shift_reg")))]
9982 (define_insn "*ifcompare_shift_shift"
9983 [(set (match_operand:SI 0 "s_register_operand" "=r")
9985 (match_operator 7 "arm_comparison_operator"
9986 [(match_operand:SI 5 "s_register_operand" "r")
9987 (match_operand:SI 6 "arm_add_operand" "rIL")])
9988 (match_operator:SI 8 "shift_operator"
9989 [(match_operand:SI 1 "s_register_operand" "r")
9990 (match_operand:SI 2 "arm_rhs_operand" "rM")])
9991 (match_operator:SI 9 "shift_operator"
9992 [(match_operand:SI 3 "s_register_operand" "r")
9993 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
9994 (clobber (reg:CC CC_REGNUM))]
9997 [(set_attr "conds" "clob")
9998 (set_attr "length" "12")]
10001 (define_insn "*if_shift_shift"
10002 [(set (match_operand:SI 0 "s_register_operand" "=r")
10004 (match_operator 5 "arm_comparison_operator"
10005 [(match_operand 8 "cc_register" "") (const_int 0)])
10006 (match_operator:SI 6 "shift_operator"
10007 [(match_operand:SI 1 "s_register_operand" "r")
10008 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10009 (match_operator:SI 7 "shift_operator"
10010 [(match_operand:SI 3 "s_register_operand" "r")
10011 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10013 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10014 [(set_attr "conds" "use")
10015 (set_attr "shift" "1")
10016 (set_attr "length" "8")
10017 (set (attr "type") (if_then_else
10018 (and (match_operand 2 "const_int_operand" "")
10019 (match_operand 4 "const_int_operand" ""))
10020 (const_string "alu_shift")
10021 (const_string "alu_shift_reg")))]
10024 (define_insn "*ifcompare_not_arith"
10025 [(set (match_operand:SI 0 "s_register_operand" "=r")
10027 (match_operator 6 "arm_comparison_operator"
10028 [(match_operand:SI 4 "s_register_operand" "r")
10029 (match_operand:SI 5 "arm_add_operand" "rIL")])
10030 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10031 (match_operator:SI 7 "shiftable_operator"
10032 [(match_operand:SI 2 "s_register_operand" "r")
10033 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10034 (clobber (reg:CC CC_REGNUM))]
10037 [(set_attr "conds" "clob")
10038 (set_attr "length" "12")]
10041 (define_insn "*if_not_arith"
10042 [(set (match_operand:SI 0 "s_register_operand" "=r")
10044 (match_operator 5 "arm_comparison_operator"
10045 [(match_operand 4 "cc_register" "") (const_int 0)])
10046 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10047 (match_operator:SI 6 "shiftable_operator"
10048 [(match_operand:SI 2 "s_register_operand" "r")
10049 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10051 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10052 [(set_attr "conds" "use")
10053 (set_attr "length" "8")]
10056 (define_insn "*ifcompare_arith_not"
10057 [(set (match_operand:SI 0 "s_register_operand" "=r")
10059 (match_operator 6 "arm_comparison_operator"
10060 [(match_operand:SI 4 "s_register_operand" "r")
10061 (match_operand:SI 5 "arm_add_operand" "rIL")])
10062 (match_operator:SI 7 "shiftable_operator"
10063 [(match_operand:SI 2 "s_register_operand" "r")
10064 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10065 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10066 (clobber (reg:CC CC_REGNUM))]
10069 [(set_attr "conds" "clob")
10070 (set_attr "length" "12")]
10073 (define_insn "*if_arith_not"
10074 [(set (match_operand:SI 0 "s_register_operand" "=r")
10076 (match_operator 5 "arm_comparison_operator"
10077 [(match_operand 4 "cc_register" "") (const_int 0)])
10078 (match_operator:SI 6 "shiftable_operator"
10079 [(match_operand:SI 2 "s_register_operand" "r")
10080 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10081 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10083 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10084 [(set_attr "conds" "use")
10085 (set_attr "length" "8")]
10088 (define_insn "*ifcompare_neg_move"
10089 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10091 (match_operator 5 "arm_comparison_operator"
10092 [(match_operand:SI 3 "s_register_operand" "r,r")
10093 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10094 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10095 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10096 (clobber (reg:CC CC_REGNUM))]
10099 [(set_attr "conds" "clob")
10100 (set_attr "length" "8,12")]
10103 (define_insn "*if_neg_move"
10104 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10106 (match_operator 4 "arm_comparison_operator"
10107 [(match_operand 3 "cc_register" "") (const_int 0)])
10108 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10109 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10112 rsb%d4\\t%0, %2, #0
10113 mov%D4\\t%0, %1\;rsb%d4\\t%0, %2, #0
10114 mvn%D4\\t%0, #%B1\;rsb%d4\\t%0, %2, #0"
10115 [(set_attr "conds" "use")
10116 (set_attr "length" "4,8,8")]
10119 (define_insn "*ifcompare_move_neg"
10120 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10122 (match_operator 5 "arm_comparison_operator"
10123 [(match_operand:SI 3 "s_register_operand" "r,r")
10124 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10125 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10126 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10127 (clobber (reg:CC CC_REGNUM))]
10130 [(set_attr "conds" "clob")
10131 (set_attr "length" "8,12")]
10134 (define_insn "*if_move_neg"
10135 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10137 (match_operator 4 "arm_comparison_operator"
10138 [(match_operand 3 "cc_register" "") (const_int 0)])
10139 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10140 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10143 rsb%D4\\t%0, %2, #0
10144 mov%d4\\t%0, %1\;rsb%D4\\t%0, %2, #0
10145 mvn%d4\\t%0, #%B1\;rsb%D4\\t%0, %2, #0"
10146 [(set_attr "conds" "use")
10147 (set_attr "length" "4,8,8")]
10150 (define_insn "*arith_adjacentmem"
10151 [(set (match_operand:SI 0 "s_register_operand" "=r")
10152 (match_operator:SI 1 "shiftable_operator"
10153 [(match_operand:SI 2 "memory_operand" "m")
10154 (match_operand:SI 3 "memory_operand" "m")]))
10155 (clobber (match_scratch:SI 4 "=r"))]
10156 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10162 HOST_WIDE_INT val1 = 0, val2 = 0;
10164 if (REGNO (operands[0]) > REGNO (operands[4]))
10166 ldm[1] = operands[4];
10167 ldm[2] = operands[0];
10171 ldm[1] = operands[0];
10172 ldm[2] = operands[4];
10175 base_reg = XEXP (operands[2], 0);
10177 if (!REG_P (base_reg))
10179 val1 = INTVAL (XEXP (base_reg, 1));
10180 base_reg = XEXP (base_reg, 0);
10183 if (!REG_P (XEXP (operands[3], 0)))
10184 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10186 arith[0] = operands[0];
10187 arith[3] = operands[1];
10201 if (val1 !=0 && val2 != 0)
10205 if (val1 == 4 || val2 == 4)
10206 /* Other val must be 8, since we know they are adjacent and neither
10208 output_asm_insn (\"ldm%(ib%)\\t%0, {%1, %2}\", ldm);
10209 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10211 ldm[0] = ops[0] = operands[4];
10213 ops[2] = GEN_INT (val1);
10214 output_add_immediate (ops);
10216 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10218 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10222 /* Offset is out of range for a single add, so use two ldr. */
10225 ops[2] = GEN_INT (val1);
10226 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10228 ops[2] = GEN_INT (val2);
10229 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10232 else if (val1 != 0)
10235 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10237 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10242 output_asm_insn (\"ldm%(ia%)\\t%0, {%1, %2}\", ldm);
10244 output_asm_insn (\"ldm%(da%)\\t%0, {%1, %2}\", ldm);
10246 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10249 [(set_attr "length" "12")
10250 (set_attr "predicable" "yes")
10251 (set_attr "type" "load1")]
10254 ; This pattern is never tried by combine, so do it as a peephole
10257 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10258 (match_operand:SI 1 "arm_general_register_operand" ""))
10259 (set (reg:CC CC_REGNUM)
10260 (compare:CC (match_dup 1) (const_int 0)))]
10262 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10263 (set (match_dup 0) (match_dup 1))])]
10267 ; Peepholes to spot possible load- and store-multiples, if the ordering is
10268 ; reversed, check that the memory references aren't volatile.
10271 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10272 (match_operand:SI 4 "memory_operand" "m"))
10273 (set (match_operand:SI 1 "s_register_operand" "=rk")
10274 (match_operand:SI 5 "memory_operand" "m"))
10275 (set (match_operand:SI 2 "s_register_operand" "=rk")
10276 (match_operand:SI 6 "memory_operand" "m"))
10277 (set (match_operand:SI 3 "s_register_operand" "=rk")
10278 (match_operand:SI 7 "memory_operand" "m"))]
10279 "TARGET_ARM && load_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10281 return emit_ldm_seq (operands, 4);
10286 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10287 (match_operand:SI 3 "memory_operand" "m"))
10288 (set (match_operand:SI 1 "s_register_operand" "=rk")
10289 (match_operand:SI 4 "memory_operand" "m"))
10290 (set (match_operand:SI 2 "s_register_operand" "=rk")
10291 (match_operand:SI 5 "memory_operand" "m"))]
10292 "TARGET_ARM && load_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10294 return emit_ldm_seq (operands, 3);
10299 [(set (match_operand:SI 0 "s_register_operand" "=rk")
10300 (match_operand:SI 2 "memory_operand" "m"))
10301 (set (match_operand:SI 1 "s_register_operand" "=rk")
10302 (match_operand:SI 3 "memory_operand" "m"))]
10303 "TARGET_ARM && load_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10305 return emit_ldm_seq (operands, 2);
10310 [(set (match_operand:SI 4 "memory_operand" "=m")
10311 (match_operand:SI 0 "s_register_operand" "rk"))
10312 (set (match_operand:SI 5 "memory_operand" "=m")
10313 (match_operand:SI 1 "s_register_operand" "rk"))
10314 (set (match_operand:SI 6 "memory_operand" "=m")
10315 (match_operand:SI 2 "s_register_operand" "rk"))
10316 (set (match_operand:SI 7 "memory_operand" "=m")
10317 (match_operand:SI 3 "s_register_operand" "rk"))]
10318 "TARGET_ARM && store_multiple_sequence (operands, 4, NULL, NULL, NULL)"
10320 return emit_stm_seq (operands, 4);
10325 [(set (match_operand:SI 3 "memory_operand" "=m")
10326 (match_operand:SI 0 "s_register_operand" "rk"))
10327 (set (match_operand:SI 4 "memory_operand" "=m")
10328 (match_operand:SI 1 "s_register_operand" "rk"))
10329 (set (match_operand:SI 5 "memory_operand" "=m")
10330 (match_operand:SI 2 "s_register_operand" "rk"))]
10331 "TARGET_ARM && store_multiple_sequence (operands, 3, NULL, NULL, NULL)"
10333 return emit_stm_seq (operands, 3);
10338 [(set (match_operand:SI 2 "memory_operand" "=m")
10339 (match_operand:SI 0 "s_register_operand" "rk"))
10340 (set (match_operand:SI 3 "memory_operand" "=m")
10341 (match_operand:SI 1 "s_register_operand" "rk"))]
10342 "TARGET_ARM && store_multiple_sequence (operands, 2, NULL, NULL, NULL)"
10344 return emit_stm_seq (operands, 2);
10349 [(set (match_operand:SI 0 "s_register_operand" "")
10350 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10352 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10353 [(match_operand:SI 3 "s_register_operand" "")
10354 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10355 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10357 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10358 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10363 ;; This split can be used because CC_Z mode implies that the following
10364 ;; branch will be an equality, or an unsigned inequality, so the sign
10365 ;; extension is not needed.
10368 [(set (reg:CC_Z CC_REGNUM)
10370 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10372 (match_operand 1 "const_int_operand" "")))
10373 (clobber (match_scratch:SI 2 ""))]
10375 && (((unsigned HOST_WIDE_INT) INTVAL (operands[1]))
10376 == (((unsigned HOST_WIDE_INT) INTVAL (operands[1])) >> 24) << 24)"
10377 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10378 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10380 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10383 ;; ??? Check the patterns above for Thumb-2 usefulness
10385 (define_expand "prologue"
10386 [(clobber (const_int 0))]
10389 arm_expand_prologue ();
10391 thumb1_expand_prologue ();
10396 (define_expand "epilogue"
10397 [(clobber (const_int 0))]
10400 if (crtl->calls_eh_return)
10401 emit_insn (gen_prologue_use (gen_rtx_REG (Pmode, 2)));
10403 thumb1_expand_epilogue ();
10404 else if (USE_RETURN_INSN (FALSE))
10406 emit_jump_insn (gen_return ());
10409 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10411 gen_rtx_RETURN (VOIDmode)),
10412 VUNSPEC_EPILOGUE));
10417 ;; Note - although unspec_volatile's USE all hard registers,
10418 ;; USEs are ignored after relaod has completed. Thus we need
10419 ;; to add an unspec of the link register to ensure that flow
10420 ;; does not think that it is unused by the sibcall branch that
10421 ;; will replace the standard function epilogue.
10422 (define_insn "sibcall_epilogue"
10423 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_PROLOGUE_USE)
10424 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10427 if (use_return_insn (FALSE, next_nonnote_insn (insn)))
10428 return output_return_instruction (const_true_rtx, FALSE, FALSE);
10429 return arm_output_epilogue (next_nonnote_insn (insn));
10431 ;; Length is absolute worst case
10432 [(set_attr "length" "44")
10433 (set_attr "type" "block")
10434 ;; We don't clobber the conditions, but the potential length of this
10435 ;; operation is sufficient to make conditionalizing the sequence
10436 ;; unlikely to be profitable.
10437 (set_attr "conds" "clob")]
10440 (define_insn "*epilogue_insns"
10441 [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
10445 return arm_output_epilogue (NULL);
10446 else /* TARGET_THUMB1 */
10447 return thumb_unexpanded_epilogue ();
10449 ; Length is absolute worst case
10450 [(set_attr "length" "44")
10451 (set_attr "type" "block")
10452 ;; We don't clobber the conditions, but the potential length of this
10453 ;; operation is sufficient to make conditionalizing the sequence
10454 ;; unlikely to be profitable.
10455 (set_attr "conds" "clob")]
10458 (define_expand "eh_epilogue"
10459 [(use (match_operand:SI 0 "register_operand" ""))
10460 (use (match_operand:SI 1 "register_operand" ""))
10461 (use (match_operand:SI 2 "register_operand" ""))]
10465 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10466 if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
10468 rtx ra = gen_rtx_REG (Pmode, 2);
10470 emit_move_insn (ra, operands[2]);
10473 /* This is a hack -- we may have crystalized the function type too
10475 cfun->machine->func_type = 0;
10479 ;; This split is only used during output to reduce the number of patterns
10480 ;; that need assembler instructions adding to them. We allowed the setting
10481 ;; of the conditions to be implicit during rtl generation so that
10482 ;; the conditional compare patterns would work. However this conflicts to
10483 ;; some extent with the conditional data operations, so we have to split them
10486 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10487 ;; conditional execution sufficient?
10490 [(set (match_operand:SI 0 "s_register_operand" "")
10491 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10492 [(match_operand 2 "" "") (match_operand 3 "" "")])
10494 (match_operand 4 "" "")))
10495 (clobber (reg:CC CC_REGNUM))]
10496 "TARGET_ARM && reload_completed"
10497 [(set (match_dup 5) (match_dup 6))
10498 (cond_exec (match_dup 7)
10499 (set (match_dup 0) (match_dup 4)))]
10502 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10503 operands[2], operands[3]);
10504 enum rtx_code rc = GET_CODE (operands[1]);
10506 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10507 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10508 if (mode == CCFPmode || mode == CCFPEmode)
10509 rc = reverse_condition_maybe_unordered (rc);
10511 rc = reverse_condition (rc);
10513 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10518 [(set (match_operand:SI 0 "s_register_operand" "")
10519 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10520 [(match_operand 2 "" "") (match_operand 3 "" "")])
10521 (match_operand 4 "" "")
10523 (clobber (reg:CC CC_REGNUM))]
10524 "TARGET_ARM && reload_completed"
10525 [(set (match_dup 5) (match_dup 6))
10526 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10527 (set (match_dup 0) (match_dup 4)))]
10530 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10531 operands[2], operands[3]);
10533 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10534 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10539 [(set (match_operand:SI 0 "s_register_operand" "")
10540 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10541 [(match_operand 2 "" "") (match_operand 3 "" "")])
10542 (match_operand 4 "" "")
10543 (match_operand 5 "" "")))
10544 (clobber (reg:CC CC_REGNUM))]
10545 "TARGET_ARM && reload_completed"
10546 [(set (match_dup 6) (match_dup 7))
10547 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10548 (set (match_dup 0) (match_dup 4)))
10549 (cond_exec (match_dup 8)
10550 (set (match_dup 0) (match_dup 5)))]
10553 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10554 operands[2], operands[3]);
10555 enum rtx_code rc = GET_CODE (operands[1]);
10557 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10558 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10559 if (mode == CCFPmode || mode == CCFPEmode)
10560 rc = reverse_condition_maybe_unordered (rc);
10562 rc = reverse_condition (rc);
10564 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10569 [(set (match_operand:SI 0 "s_register_operand" "")
10570 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10571 [(match_operand:SI 2 "s_register_operand" "")
10572 (match_operand:SI 3 "arm_add_operand" "")])
10573 (match_operand:SI 4 "arm_rhs_operand" "")
10575 (match_operand:SI 5 "s_register_operand" ""))))
10576 (clobber (reg:CC CC_REGNUM))]
10577 "TARGET_ARM && reload_completed"
10578 [(set (match_dup 6) (match_dup 7))
10579 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10580 (set (match_dup 0) (match_dup 4)))
10581 (cond_exec (match_dup 8)
10582 (set (match_dup 0) (not:SI (match_dup 5))))]
10585 enum machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10586 operands[2], operands[3]);
10587 enum rtx_code rc = GET_CODE (operands[1]);
10589 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10590 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10591 if (mode == CCFPmode || mode == CCFPEmode)
10592 rc = reverse_condition_maybe_unordered (rc);
10594 rc = reverse_condition (rc);
10596 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10600 (define_insn "*cond_move_not"
10601 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10602 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10603 [(match_operand 3 "cc_register" "") (const_int 0)])
10604 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10606 (match_operand:SI 2 "s_register_operand" "r,r"))))]
10610 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
10611 [(set_attr "conds" "use")
10612 (set_attr "length" "4,8")]
10615 ;; The next two patterns occur when an AND operation is followed by a
10616 ;; scc insn sequence
10618 (define_insn "*sign_extract_onebit"
10619 [(set (match_operand:SI 0 "s_register_operand" "=r")
10620 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10622 (match_operand:SI 2 "const_int_operand" "n")))
10623 (clobber (reg:CC CC_REGNUM))]
10626 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10627 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
10628 return \"mvnne\\t%0, #0\";
10630 [(set_attr "conds" "clob")
10631 (set_attr "length" "8")]
10634 (define_insn "*not_signextract_onebit"
10635 [(set (match_operand:SI 0 "s_register_operand" "=r")
10637 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
10639 (match_operand:SI 2 "const_int_operand" "n"))))
10640 (clobber (reg:CC CC_REGNUM))]
10643 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
10644 output_asm_insn (\"tst\\t%1, %2\", operands);
10645 output_asm_insn (\"mvneq\\t%0, #0\", operands);
10646 return \"movne\\t%0, #0\";
10648 [(set_attr "conds" "clob")
10649 (set_attr "length" "12")]
10651 ;; ??? The above patterns need auditing for Thumb-2
10653 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
10654 ;; expressions. For simplicity, the first register is also in the unspec
10656 (define_insn "*push_multi"
10657 [(match_parallel 2 "multi_register_push"
10658 [(set (match_operand:BLK 0 "memory_operand" "=m")
10659 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "r")]
10660 UNSPEC_PUSH_MULT))])]
10664 int num_saves = XVECLEN (operands[2], 0);
10666 /* For the StrongARM at least it is faster to
10667 use STR to store only a single register.
10668 In Thumb mode always use push, and the assembler will pick
10669 something appropriate. */
10670 if (num_saves == 1 && TARGET_ARM)
10671 output_asm_insn (\"str\\t%1, [%m0, #-4]!\", operands);
10678 strcpy (pattern, \"stmfd\\t%m0!, {%1\");
10680 strcpy (pattern, \"push\\t{%1\");
10682 for (i = 1; i < num_saves; i++)
10684 strcat (pattern, \", %|\");
10686 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
10689 strcat (pattern, \"}\");
10690 output_asm_insn (pattern, operands);
10695 [(set_attr "type" "store4")]
10698 (define_insn "stack_tie"
10699 [(set (mem:BLK (scratch))
10700 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
10701 (match_operand:SI 1 "s_register_operand" "rk")]
10705 [(set_attr "length" "0")]
10708 ;; Similarly for the floating point registers
10709 (define_insn "*push_fp_multi"
10710 [(match_parallel 2 "multi_register_push"
10711 [(set (match_operand:BLK 0 "memory_operand" "=m")
10712 (unspec:BLK [(match_operand:XF 1 "f_register_operand" "f")]
10713 UNSPEC_PUSH_MULT))])]
10714 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
10719 sprintf (pattern, \"sfmfd\\t%%1, %d, [%%m0]!\", XVECLEN (operands[2], 0));
10720 output_asm_insn (pattern, operands);
10723 [(set_attr "type" "f_store")]
10726 ;; Special patterns for dealing with the constant pool
10728 (define_insn "align_4"
10729 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
10732 assemble_align (32);
10737 (define_insn "align_8"
10738 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
10741 assemble_align (64);
10746 (define_insn "consttable_end"
10747 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
10750 making_const_table = FALSE;
10755 (define_insn "consttable_1"
10756 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
10759 making_const_table = TRUE;
10760 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
10761 assemble_zeros (3);
10764 [(set_attr "length" "4")]
10767 (define_insn "consttable_2"
10768 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
10771 making_const_table = TRUE;
10772 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
10773 assemble_zeros (2);
10776 [(set_attr "length" "4")]
10779 (define_insn "consttable_4"
10780 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
10784 making_const_table = TRUE;
10785 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10790 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10791 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10795 assemble_integer (operands[0], 4, BITS_PER_WORD, 1);
10800 [(set_attr "length" "4")]
10803 (define_insn "consttable_8"
10804 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
10808 making_const_table = TRUE;
10809 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10814 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10815 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10819 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
10824 [(set_attr "length" "8")]
10827 (define_insn "consttable_16"
10828 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
10832 making_const_table = TRUE;
10833 switch (GET_MODE_CLASS (GET_MODE (operands[0])))
10838 REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
10839 assemble_real (r, GET_MODE (operands[0]), BITS_PER_WORD);
10843 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
10848 [(set_attr "length" "16")]
10851 ;; Miscellaneous Thumb patterns
10853 (define_expand "tablejump"
10854 [(parallel [(set (pc) (match_operand:SI 0 "register_operand" ""))
10855 (use (label_ref (match_operand 1 "" "")))])]
10860 /* Hopefully, CSE will eliminate this copy. */
10861 rtx reg1 = copy_addr_to_reg (gen_rtx_LABEL_REF (Pmode, operands[1]));
10862 rtx reg2 = gen_reg_rtx (SImode);
10864 emit_insn (gen_addsi3 (reg2, operands[0], reg1));
10865 operands[0] = reg2;
10870 ;; NB never uses BX.
10871 (define_insn "*thumb1_tablejump"
10872 [(set (pc) (match_operand:SI 0 "register_operand" "l*r"))
10873 (use (label_ref (match_operand 1 "" "")))]
10876 [(set_attr "length" "2")]
10879 ;; V5 Instructions,
10881 (define_insn "clzsi2"
10882 [(set (match_operand:SI 0 "s_register_operand" "=r")
10883 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
10884 "TARGET_32BIT && arm_arch5"
10886 [(set_attr "predicable" "yes")
10887 (set_attr "insn" "clz")])
10889 ;; V5E instructions.
10891 (define_insn "prefetch"
10892 [(prefetch (match_operand:SI 0 "address_operand" "p")
10893 (match_operand:SI 1 "" "")
10894 (match_operand:SI 2 "" ""))]
10895 "TARGET_32BIT && arm_arch5e"
10898 ;; General predication pattern
10901 [(match_operator 0 "arm_comparison_operator"
10902 [(match_operand 1 "cc_register" "")
10908 (define_insn "prologue_use"
10909 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_PROLOGUE_USE)]
10911 "%@ %0 needed for prologue"
10915 ;; Patterns for exception handling
10917 (define_expand "eh_return"
10918 [(use (match_operand 0 "general_operand" ""))]
10923 emit_insn (gen_arm_eh_return (operands[0]));
10925 emit_insn (gen_thumb_eh_return (operands[0]));
10930 ;; We can't expand this before we know where the link register is stored.
10931 (define_insn_and_split "arm_eh_return"
10932 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
10934 (clobber (match_scratch:SI 1 "=&r"))]
10937 "&& reload_completed"
10941 arm_set_return_address (operands[0], operands[1]);
10946 (define_insn_and_split "thumb_eh_return"
10947 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "l")]
10949 (clobber (match_scratch:SI 1 "=&l"))]
10952 "&& reload_completed"
10956 thumb_set_return_address (operands[0], operands[1]);
10964 (define_insn "load_tp_hard"
10965 [(set (match_operand:SI 0 "register_operand" "=r")
10966 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
10968 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
10969 [(set_attr "predicable" "yes")]
10972 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
10973 (define_insn "load_tp_soft"
10974 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
10975 (clobber (reg:SI LR_REGNUM))
10976 (clobber (reg:SI IP_REGNUM))
10977 (clobber (reg:CC CC_REGNUM))]
10979 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
10980 [(set_attr "conds" "clob")]
10983 ;; Load the FPA co-processor patterns
10985 ;; Load the Maverick co-processor patterns
10986 (include "cirrus.md")
10987 ;; Vector bits common to IWMMXT and Neon
10988 (include "vec-common.md")
10989 ;; Load the Intel Wireless Multimedia Extension patterns
10990 (include "iwmmxt.md")
10991 ;; Load the VFP co-processor patterns
10993 ;; Thumb-2 patterns
10994 (include "thumb2.md")
10996 (include "neon.md")